From f14cba6c1f87c1fca3fedbefb5783d5981d50d75 Mon Sep 17 00:00:00 2001 From: vedpatwardhan Date: Thu, 19 Sep 2024 11:31:06 +0000 Subject: [PATCH] updated docs --- api-reference/llm_queries/chat_completions.mdx | 6 +++--- api-reference/openapi.json | 4 ++-- python/types.mdx | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/api-reference/llm_queries/chat_completions.mdx b/api-reference/llm_queries/chat_completions.mdx index ac9b01459..74572d894 100644 --- a/api-reference/llm_queries/chat_completions.mdx +++ b/api-reference/llm_queries/chat_completions.mdx @@ -194,8 +194,8 @@ curl --request POST \ "response_format": "{ \"type\": \"json_mode\"}", "seed": 11, "stream_options": [ - true, - "include_usage" + "include_usage", + true ], "top_p": 0.5, "tool_choice": "{\"type\": \"function\", \"function\": {\"name\": \"my_function\"}}", @@ -218,7 +218,7 @@ url = "https://api.unify.ai/v0/chat/completions" headers = {"Authorization": "Bearer "} -json_input = {"messages": [{"content": "Tell me a joke", "role": "user"}], "model": "gpt-4o-mini@openai", "max_tokens": 1024, "stop": ["The End.", " is the answer."], "stream": False, "temperature": 0.9, "frequency_penalty": 1.5, "logit_bias": {"0": 10, "1": -75, "2": 90}, "logprobs": False, "top_logprobs": 15, "n": 15, "presence_penalty": -1.1, "response_format": "{ "type": "json_mode"}", "seed": 11, "stream_options": [True, "include_usage"], "top_p": 0.5, "tool_choice": "{"type": "function", "function": {"name": "my_function"}}", "parallel_tool_calls": True, "user": "some_user", "signature": "python", "use_custom_keys": True, "tags": ["user123", "CompanyABC"], "drop_params": True} +json_input = {"messages": [{"content": "Tell me a joke", "role": "user"}], "model": "gpt-4o-mini@openai", "max_tokens": 1024, "stop": ["The End.", " is the answer."], "stream": False, "temperature": 0.9, "frequency_penalty": 1.5, "logit_bias": {"0": 10, "1": -75, "2": 90}, "logprobs": False, "top_logprobs": 15, "n": 15, "presence_penalty": -1.1, "response_format": "{ "type": "json_mode"}", "seed": 11, "stream_options": ["include_usage", True], "top_p": 0.5, "tool_choice": "{"type": "function", "function": {"name": "my_function"}}", "parallel_tool_calls": True, "user": "some_user", "signature": "python", "use_custom_keys": True, "tags": ["user123", "CompanyABC"], "drop_params": True} response = requests.request("POST", url, json=json_input, headers=headers) diff --git a/api-reference/openapi.json b/api-reference/openapi.json index 6440459a8..54747fe2b 100644 --- a/api-reference/openapi.json +++ b/api-reference/openapi.json @@ -4036,8 +4036,8 @@ "title": "Stream Options", "description": "Options for streaming response. Only set this when you set `stream: true`.", "example": [ - true, - "include_usage" + "include_usage", + true ] }, "top_p": { diff --git a/python/types.mdx b/python/types.mdx index c1fbb4b56..59d6ad455 100644 --- a/python/types.mdx +++ b/python/types.mdx @@ -174,7 +174,7 @@ types on screen. Can be either "verbose" or "concise". ## ReprMode ```python -class ReprMode(str) +class ReprMode() ```