# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. from typing import List, Optional from typing_extensions import Literal from .._models import BaseModel from .completion_usage import CompletionUsage from .completion_choice import CompletionChoice __all__ = ["Completion"] class Completion(BaseModel): """Represents a completion response from the API. Note: both the streamed and non-streamed response objects share the same shape (unlike the chat endpoint). """ id: str """A unique identifier for the completion.""" choices: List[CompletionChoice] """The list of completion choices the model generated for the input prompt.""" created: int """The Unix timestamp (in seconds) of when the completion was created.""" model: str """The model used for completion.""" object: Literal["text_completion"] """The object type, which is always "text_completion" """ system_fingerprint: Optional[str] = None """This fingerprint represents the backend configuration that the model runs with. Can be used in conjunction with the `seed` request parameter to understand when backend changes have been made that might impact determinism. """ usage: Optional[CompletionUsage] = None """Usage statistics for the completion request."""