Skip to content

Commit

Permalink
updated docs
Browse files Browse the repository at this point in the history
  • Loading branch information
vedpatwardhan committed Sep 19, 2024
1 parent f14cba6 commit 7719897
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
6 changes: 3 additions & 3 deletions api-reference/llm_queries/chat_completions.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -194,8 +194,8 @@ curl --request POST \
"response_format": "{ \"type\": \"json_mode\"}",
"seed": 11,
"stream_options": [
"include_usage",
true
true,
"include_usage"
],
"top_p": 0.5,
"tool_choice": "{\"type\": \"function\", \"function\": {\"name\": \"my_function\"}}",
Expand All @@ -218,7 +218,7 @@ url = "https://api.unify.ai/v0/chat/completions"

headers = {"Authorization": "Bearer <token>"}

json_input = {"messages": [{"content": "Tell me a joke", "role": "user"}], "model": "gpt-4o-mini@openai", "max_tokens": 1024, "stop": ["The End.", " is the answer."], "stream": False, "temperature": 0.9, "frequency_penalty": 1.5, "logit_bias": {"0": 10, "1": -75, "2": 90}, "logprobs": False, "top_logprobs": 15, "n": 15, "presence_penalty": -1.1, "response_format": "{ "type": "json_mode"}", "seed": 11, "stream_options": ["include_usage", True], "top_p": 0.5, "tool_choice": "{"type": "function", "function": {"name": "my_function"}}", "parallel_tool_calls": True, "user": "some_user", "signature": "python", "use_custom_keys": True, "tags": ["user123", "CompanyABC"], "drop_params": True}
json_input = {"messages": [{"content": "Tell me a joke", "role": "user"}], "model": "gpt-4o-mini@openai", "max_tokens": 1024, "stop": ["The End.", " is the answer."], "stream": False, "temperature": 0.9, "frequency_penalty": 1.5, "logit_bias": {"0": 10, "1": -75, "2": 90}, "logprobs": False, "top_logprobs": 15, "n": 15, "presence_penalty": -1.1, "response_format": "{ "type": "json_mode"}", "seed": 11, "stream_options": [True, "include_usage"], "top_p": 0.5, "tool_choice": "{"type": "function", "function": {"name": "my_function"}}", "parallel_tool_calls": True, "user": "some_user", "signature": "python", "use_custom_keys": True, "tags": ["user123", "CompanyABC"], "drop_params": True}

response = requests.request("POST", url, json=json_input, headers=headers)

Expand Down
4 changes: 2 additions & 2 deletions api-reference/openapi.json
Original file line number Diff line number Diff line change
Expand Up @@ -4036,8 +4036,8 @@
"title": "Stream Options",
"description": "Options for streaming response. Only set this when you set `stream: true`.",
"example": [
"include_usage",
true
true,
"include_usage"
]
},
"top_p": {
Expand Down
2 changes: 1 addition & 1 deletion python/types.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ Create Prompt instance.
## ChatCompletion

```python
class ChatCompletion(_FormattedBaseModel, _ChatCompletion)
class ChatCompletion(_FormattedBaseModel)
```

<a id="types.ChatCompletion.__init__"></a>
Expand Down

0 comments on commit 7719897

Please sign in to comment.