From 9321b9b9fa9b26ebdedbb0d2d9f7583d1cc61c22 Mon Sep 17 00:00:00 2001 From: Stainless Bot Date: Thu, 4 Jan 2024 02:27:14 +0000 Subject: [PATCH] feat: add `None` default value to nullable response properties --- src/openai/_exceptions.py | 4 ++-- src/openai/types/beta/assistant.py | 8 ++++---- src/openai/types/beta/thread.py | 2 +- src/openai/types/beta/threads/run.py | 14 +++++++------- .../types/beta/threads/runs/function_tool_call.py | 2 +- src/openai/types/beta/threads/runs/run_step.py | 12 ++++++------ src/openai/types/beta/threads/thread_message.py | 6 +++--- src/openai/types/chat/chat_completion.py | 4 ++-- src/openai/types/chat/chat_completion_chunk.py | 4 ++-- src/openai/types/chat/chat_completion_message.py | 2 +- .../types/chat/chat_completion_token_logprob.py | 4 ++-- src/openai/types/completion_choice.py | 2 +- src/openai/types/fine_tune.py | 2 +- src/openai/types/fine_tuning/fine_tuning_job.py | 12 ++++++------ 14 files changed, 39 insertions(+), 39 deletions(-) diff --git a/src/openai/_exceptions.py b/src/openai/_exceptions.py index 40b163270d..d7ded1248f 100644 --- a/src/openai/_exceptions.py +++ b/src/openai/_exceptions.py @@ -40,8 +40,8 @@ class APIError(OpenAIError): If there was no response associated with this error then it will be `None`. """ - code: Optional[str] - param: Optional[str] + code: Optional[str] = None + param: Optional[str] = None type: Optional[str] def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: diff --git a/src/openai/types/beta/assistant.py b/src/openai/types/beta/assistant.py index a21206765a..89e45d4806 100644 --- a/src/openai/types/beta/assistant.py +++ b/src/openai/types/beta/assistant.py @@ -37,7 +37,7 @@ class Assistant(BaseModel): created_at: int """The Unix timestamp (in seconds) for when the assistant was created.""" - description: Optional[str] + description: Optional[str] = None """The description of the assistant. The maximum length is 512 characters.""" file_ids: List[str] @@ -47,13 +47,13 @@ class Assistant(BaseModel): assistant. Files are ordered by their creation date in ascending order. """ - instructions: Optional[str] + instructions: Optional[str] = None """The system instructions that the assistant uses. The maximum length is 32768 characters. """ - metadata: Optional[builtins.object] + metadata: Optional[builtins.object] = None """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a @@ -71,7 +71,7 @@ class Assistant(BaseModel): descriptions of them. """ - name: Optional[str] + name: Optional[str] = None """The name of the assistant. The maximum length is 256 characters.""" object: Literal["assistant"] diff --git a/src/openai/types/beta/thread.py b/src/openai/types/beta/thread.py index a340bffd60..474527033a 100644 --- a/src/openai/types/beta/thread.py +++ b/src/openai/types/beta/thread.py @@ -16,7 +16,7 @@ class Thread(BaseModel): created_at: int """The Unix timestamp (in seconds) for when the thread was created.""" - metadata: Optional[builtins.object] + metadata: Optional[builtins.object] = None """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a diff --git a/src/openai/types/beta/threads/run.py b/src/openai/types/beta/threads/run.py index ffbba1e504..b6d66bd8dd 100644 --- a/src/openai/types/beta/threads/run.py +++ b/src/openai/types/beta/threads/run.py @@ -72,10 +72,10 @@ class Run(BaseModel): execution of this run. """ - cancelled_at: Optional[int] + cancelled_at: Optional[int] = None """The Unix timestamp (in seconds) for when the run was cancelled.""" - completed_at: Optional[int] + completed_at: Optional[int] = None """The Unix timestamp (in seconds) for when the run was completed.""" created_at: int @@ -84,7 +84,7 @@ class Run(BaseModel): expires_at: int """The Unix timestamp (in seconds) for when the run will expire.""" - failed_at: Optional[int] + failed_at: Optional[int] = None """The Unix timestamp (in seconds) for when the run failed.""" file_ids: List[str] @@ -101,10 +101,10 @@ class Run(BaseModel): this run. """ - last_error: Optional[LastError] + last_error: Optional[LastError] = None """The last error associated with this run. Will be `null` if there are no errors.""" - metadata: Optional[builtins.object] + metadata: Optional[builtins.object] = None """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a @@ -122,13 +122,13 @@ class Run(BaseModel): object: Literal["thread.run"] """The object type, which is always `thread.run`.""" - required_action: Optional[RequiredAction] + required_action: Optional[RequiredAction] = None """Details on the action required to continue the run. Will be `null` if no action is required. """ - started_at: Optional[int] + started_at: Optional[int] = None """The Unix timestamp (in seconds) for when the run was started.""" status: Literal[ diff --git a/src/openai/types/beta/threads/runs/function_tool_call.py b/src/openai/types/beta/threads/runs/function_tool_call.py index f4cf8bbdd0..bbd3cb7052 100644 --- a/src/openai/types/beta/threads/runs/function_tool_call.py +++ b/src/openai/types/beta/threads/runs/function_tool_call.py @@ -15,7 +15,7 @@ class Function(BaseModel): name: str """The name of the function.""" - output: Optional[str] + output: Optional[str] = None """The output of the function. This will be `null` if the outputs have not been diff --git a/src/openai/types/beta/threads/runs/run_step.py b/src/openai/types/beta/threads/runs/run_step.py index 5f8723b71a..1d95e9d6eb 100644 --- a/src/openai/types/beta/threads/runs/run_step.py +++ b/src/openai/types/beta/threads/runs/run_step.py @@ -33,31 +33,31 @@ class RunStep(BaseModel): associated with the run step. """ - cancelled_at: Optional[int] + cancelled_at: Optional[int] = None """The Unix timestamp (in seconds) for when the run step was cancelled.""" - completed_at: Optional[int] + completed_at: Optional[int] = None """The Unix timestamp (in seconds) for when the run step completed.""" created_at: int """The Unix timestamp (in seconds) for when the run step was created.""" - expired_at: Optional[int] + expired_at: Optional[int] = None """The Unix timestamp (in seconds) for when the run step expired. A step is considered expired if the parent run is expired. """ - failed_at: Optional[int] + failed_at: Optional[int] = None """The Unix timestamp (in seconds) for when the run step failed.""" - last_error: Optional[LastError] + last_error: Optional[LastError] = None """The last error associated with this run step. Will be `null` if there are no errors. """ - metadata: Optional[builtins.object] + metadata: Optional[builtins.object] = None """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a diff --git a/src/openai/types/beta/threads/thread_message.py b/src/openai/types/beta/threads/thread_message.py index 0f782ef845..8f1ac07d0a 100644 --- a/src/openai/types/beta/threads/thread_message.py +++ b/src/openai/types/beta/threads/thread_message.py @@ -17,7 +17,7 @@ class ThreadMessage(BaseModel): id: str """The identifier, which can be referenced in API endpoints.""" - assistant_id: Optional[str] + assistant_id: Optional[str] = None """ If applicable, the ID of the [assistant](https://platform.openai.com/docs/api-reference/assistants) that @@ -37,7 +37,7 @@ class ThreadMessage(BaseModel): that can access files. A maximum of 10 files can be attached to a message. """ - metadata: Optional[builtins.object] + metadata: Optional[builtins.object] = None """Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in a @@ -51,7 +51,7 @@ class ThreadMessage(BaseModel): role: Literal["user", "assistant"] """The entity that produced the message. One of `user` or `assistant`.""" - run_id: Optional[str] + run_id: Optional[str] = None """ If applicable, the ID of the [run](https://platform.openai.com/docs/api-reference/runs) associated with the diff --git a/src/openai/types/chat/chat_completion.py b/src/openai/types/chat/chat_completion.py index 055280c347..dc63d84945 100644 --- a/src/openai/types/chat/chat_completion.py +++ b/src/openai/types/chat/chat_completion.py @@ -12,7 +12,7 @@ class ChoiceLogprobs(BaseModel): - content: Optional[List[ChatCompletionTokenLogprob]] + content: Optional[List[ChatCompletionTokenLogprob]] = None """A list of message content tokens with log probability information.""" @@ -30,7 +30,7 @@ class Choice(BaseModel): index: int """The index of the choice in the list of choices.""" - logprobs: Optional[ChoiceLogprobs] + logprobs: Optional[ChoiceLogprobs] = None """Log probability information for the choice.""" message: ChatCompletionMessage diff --git a/src/openai/types/chat/chat_completion_chunk.py b/src/openai/types/chat/chat_completion_chunk.py index ccc7ad79ec..95013e7a4f 100644 --- a/src/openai/types/chat/chat_completion_chunk.py +++ b/src/openai/types/chat/chat_completion_chunk.py @@ -73,7 +73,7 @@ class ChoiceDelta(BaseModel): class ChoiceLogprobs(BaseModel): - content: Optional[List[ChatCompletionTokenLogprob]] + content: Optional[List[ChatCompletionTokenLogprob]] = None """A list of message content tokens with log probability information.""" @@ -81,7 +81,7 @@ class Choice(BaseModel): delta: ChoiceDelta """A chat completion delta generated by streamed model responses.""" - finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]] + finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]] = None """The reason the model stopped generating tokens. This will be `stop` if the model hit a natural stop point or a provided stop diff --git a/src/openai/types/chat/chat_completion_message.py b/src/openai/types/chat/chat_completion_message.py index 4749798a33..da8b2fcd5c 100644 --- a/src/openai/types/chat/chat_completion_message.py +++ b/src/openai/types/chat/chat_completion_message.py @@ -23,7 +23,7 @@ class FunctionCall(BaseModel): class ChatCompletionMessage(BaseModel): - content: Optional[str] + content: Optional[str] = None """The contents of the message.""" role: Literal["assistant"] diff --git a/src/openai/types/chat/chat_completion_token_logprob.py b/src/openai/types/chat/chat_completion_token_logprob.py index 8896da8b85..728845fb33 100644 --- a/src/openai/types/chat/chat_completion_token_logprob.py +++ b/src/openai/types/chat/chat_completion_token_logprob.py @@ -11,7 +11,7 @@ class TopLogprob(BaseModel): token: str """The token.""" - bytes: Optional[List[int]] + bytes: Optional[List[int]] = None """A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and @@ -27,7 +27,7 @@ class ChatCompletionTokenLogprob(BaseModel): token: str """The token.""" - bytes: Optional[List[int]] + bytes: Optional[List[int]] = None """A list of integers representing the UTF-8 bytes representation of the token. Useful in instances where characters are represented by multiple tokens and diff --git a/src/openai/types/completion_choice.py b/src/openai/types/completion_choice.py index 71de0f9247..7b08582bfd 100644 --- a/src/openai/types/completion_choice.py +++ b/src/openai/types/completion_choice.py @@ -30,6 +30,6 @@ class CompletionChoice(BaseModel): index: int - logprobs: Optional[Logprobs] + logprobs: Optional[Logprobs] = None text: str diff --git a/src/openai/types/fine_tune.py b/src/openai/types/fine_tune.py index de1e097ee4..d1a063a065 100644 --- a/src/openai/types/fine_tune.py +++ b/src/openai/types/fine_tune.py @@ -50,7 +50,7 @@ class FineTune(BaseModel): created_at: int """The Unix timestamp (in seconds) for when the fine-tuning job was created.""" - fine_tuned_model: Optional[str] + fine_tuned_model: Optional[str] = None """The name of the fine-tuned model that is being created.""" hyperparams: Hyperparams diff --git a/src/openai/types/fine_tuning/fine_tuning_job.py b/src/openai/types/fine_tuning/fine_tuning_job.py index 3897176a47..5aa4f07eb1 100644 --- a/src/openai/types/fine_tuning/fine_tuning_job.py +++ b/src/openai/types/fine_tuning/fine_tuning_job.py @@ -15,7 +15,7 @@ class Error(BaseModel): message: str """A human-readable error message.""" - param: Optional[str] + param: Optional[str] = None """The parameter that was invalid, usually `training_file` or `validation_file`. This field will be null if the failure was not parameter-specific. @@ -39,19 +39,19 @@ class FineTuningJob(BaseModel): created_at: int """The Unix timestamp (in seconds) for when the fine-tuning job was created.""" - error: Optional[Error] + error: Optional[Error] = None """ For fine-tuning jobs that have `failed`, this will contain more information on the cause of the failure. """ - fine_tuned_model: Optional[str] + fine_tuned_model: Optional[str] = None """The name of the fine-tuned model that is being created. The value will be null if the fine-tuning job is still running. """ - finished_at: Optional[int] + finished_at: Optional[int] = None """The Unix timestamp (in seconds) for when the fine-tuning job was finished. The value will be null if the fine-tuning job is still running. @@ -86,7 +86,7 @@ class FineTuningJob(BaseModel): `validating_files`, `queued`, `running`, `succeeded`, `failed`, or `cancelled`. """ - trained_tokens: Optional[int] + trained_tokens: Optional[int] = None """The total number of billable tokens processed by this fine-tuning job. The value will be null if the fine-tuning job is still running. @@ -99,7 +99,7 @@ class FineTuningJob(BaseModel): [Files API](https://platform.openai.com/docs/api-reference/files/retrieve-contents). """ - validation_file: Optional[str] + validation_file: Optional[str] = None """The file ID used for validation. You can retrieve the validation results with the