From 76565a588b731f9aef1737913b2eaac1c7a1f72d Mon Sep 17 00:00:00 2001 From: Deborah Mendes Ferreira Date: Mon, 18 Dec 2023 09:54:50 +0000 Subject: [PATCH] adding TimeOut option --- +llms/+internal/callOpenAIChatAPI.m | 4 +++- +llms/+internal/sendRequest.m | 36 ++++++++++++++++++++--------- openAIChat.m | 20 +++++++++++----- tests/topenAIChat.m | 18 +++++++++++++-- 4 files changed, 58 insertions(+), 20 deletions(-) diff --git a/+llms/+internal/callOpenAIChatAPI.m b/+llms/+internal/callOpenAIChatAPI.m index 9ebafb1..af000cb 100644 --- a/+llms/+internal/callOpenAIChatAPI.m +++ b/+llms/+internal/callOpenAIChatAPI.m @@ -18,6 +18,7 @@ % - PresencePenalty (presence_penalty) % - FrequencyPenalty (frequence_penalty) % - ApiKey +% - TimeOut % More details on the parameters: https://platform.openai.com/docs/api-reference/chat/create % % Example @@ -63,13 +64,14 @@ nvp.PresencePenalty = 0 nvp.FrequencyPenalty = 0 nvp.ApiKey = "" + nvp.TimeOut = 10 end END_POINT = "https://api.openai.com/v1/chat/completions"; parameters = buildParametersCall(messages, functions, nvp); -response = llms.internal.sendRequest(parameters,nvp.ApiKey, END_POINT); +response = llms.internal.sendRequest(parameters,nvp.ApiKey, END_POINT, nvp.TimeOut); % If call errors, "choices" will not be part of response.Body.Data, instead % we get response.Body.Data.error diff --git a/+llms/+internal/sendRequest.m b/+llms/+internal/sendRequest.m index 96ccd36..e5fbcc9 100644 --- a/+llms/+internal/sendRequest.m +++ b/+llms/+internal/sendRequest.m @@ -1,18 +1,32 @@ -function response = sendRequest(parameters, token, endpoint) +function response = sendRequest(parameters, token, endpoint, timeout) % This function is undocumented and will change in a future release -%sendRequest Sends a request to an ENDPOINT using PARAMETERS and -% api key TOKEN. +%sendRequest Sends a request to an ENDPOINT using PARAMETERS and +% api key TOKEN. TIMEOUT is the nubmer of seconds to wait for initial +% server connection. % Copyright 2023 The MathWorks, Inc. - % Define the headers for the API request - - headers = [matlab.net.http.HeaderField('Content-Type', 'application/json')... - matlab.net.http.HeaderField('Authorization', "Bearer " + token)]; - % Define the request message - request = matlab.net.http.RequestMessage('post',headers,parameters); - % Send the request and store the response - response = send(request, matlab.net.URI(endpoint)); +arguments + parameters + token + endpoint + timeout end +% Define the headers for the API request + +headers = [matlab.net.http.HeaderField('Content-Type', 'application/json')... + matlab.net.http.HeaderField('Authorization', "Bearer " + token)]; + +% Define the request message +request = matlab.net.http.RequestMessage('post',headers,parameters); + +% Create a HTTPOptions object; +httpOpts = matlab.net.http.HTTPOptions; +% Set the ConnectTimeout option + +httpOpts.ConnectTimeout = timeout; +% Send the request and store the response +response = send(request, matlab.net.URI(endpoint),httpOpts); +end \ No newline at end of file diff --git a/openAIChat.m b/openAIChat.m index 629a8c0..5ab5534 100644 --- a/openAIChat.m +++ b/openAIChat.m @@ -54,6 +54,9 @@ % % FunctionNames - Names of the functions that the model can % request calls. +% +% TimeOut - Connection Timeout in seconds (default: 10 secs) +% % Copyright 2023 The MathWorks, Inc. @@ -71,10 +74,13 @@ PresencePenalty %FREQUENCYPENALTY Penalty for using a token that is frequent in the training data. - FrequencyPenalty + FrequencyPenalty end - properties(SetAccess=private) + properties(SetAccess=private) + %TIMEOUT Connection timeout in seconds (default 10 secs) + TimeOut + %FUNCTIONNAMES Names of the functions that the model can request calls FunctionNames @@ -88,7 +94,7 @@ properties(Access=private) Functions FunctionsStruct - ApiKey + ApiKey end methods @@ -97,14 +103,15 @@ systemPrompt {llms.utils.mustBeTextOrEmpty} = [] nvp.Functions (1,:) {mustBeA(nvp.Functions, "openAIFunction")} = openAIFunction.empty nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4", "gpt-4-0613", "gpt-4-32k", ... - "gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k",... - "gpt-3.5-turbo-16k-0613", "gpt-4-1106-preview"])} = "gpt-3.5-turbo" + "gpt-3.5-turbo", "gpt-3.5-turbo-16k",... + "gpt-4-1106-preview","gpt-3.5-turbo-1106"])} = "gpt-3.5-turbo" nvp.Temperature {mustBeValidTemperature} = 1 nvp.TopProbabilityMass {mustBeValidTopP} = 1 nvp.StopSequences {mustBeValidStop} = {} nvp.ApiKey {mustBeNonzeroLengthTextScalar} nvp.PresencePenalty {mustBeValidPenalty} = 0 nvp.FrequencyPenalty {mustBeValidPenalty} = 0 + nvp.TimeOut (1,1) {mustBeReal,mustBePositive} = 10 end if ~isempty(nvp.Functions) @@ -130,6 +137,7 @@ this.PresencePenalty = nvp.PresencePenalty; this.FrequencyPenalty = nvp.FrequencyPenalty; this.ApiKey = llms.internal.getApiKeyFromNvpOrEnv(nvp); + this.TimeOut = nvp.TimeOut; end function [text, message, response] = generate(this, messages, nvp) @@ -174,7 +182,7 @@ TopProbabilityMass=this.TopProbabilityMass, NumCompletions=nvp.NumCompletions,... StopSequences=this.StopSequences, MaxNumTokens=nvp.MaxNumTokens, ... PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ... - ApiKey=this.ApiKey); + ApiKey=this.ApiKey,TimeOut=this.TimeOut); end function this = set.Temperature(this, temperature) diff --git a/tests/topenAIChat.m b/tests/topenAIChat.m index ce6ccb9..0d313d8 100644 --- a/tests/topenAIChat.m +++ b/tests/topenAIChat.m @@ -51,9 +51,10 @@ function constructChatWithAllNVP(testCase) presenceP = -2; frequenceP = 2; systemPrompt = "This is a system prompt"; + timeout = 3; chat = openAIChat(systemPrompt, Functions=functions, ModelName=modelName, ... Temperature=temperature, TopProbabilityMass=topP, StopSequences=stop, ApiKey=apiKey,... - FrequencyPenalty=frequenceP, PresencePenalty=presenceP); + FrequencyPenalty=frequenceP, PresencePenalty=presenceP, TimeOut=timeout); testCase.verifyEqual(chat.ModelName, modelName); testCase.verifyEqual(chat.Temperature, temperature); testCase.verifyEqual(chat.TopProbabilityMass, topP); @@ -62,6 +63,11 @@ function constructChatWithAllNVP(testCase) testCase.verifyEqual(chat.PresencePenalty, presenceP); end + function verySmallTimeOutErrors(testCase) + chat = openAIChat(TimeOut=0.0001, ApiKey="false-key"); + testCase.verifyError(@()generate(chat, "hi"), "MATLAB:webservices:Timeout") + end + function errorsWhenPassingFunctionCallWithEmptyFunctions(testCase) chat = openAIChat(ApiKey="this-is-not-a-real-key"); testCase.verifyError(@()generate(chat,"input", FunctionCall="bla"), "llms:mustSetFunctionsForCall"); @@ -90,7 +96,7 @@ function assignValueToProperty(property, value) function invalidValuesSetters = iGetInvalidValuesSetters -invalidValuesSetters = struct( ... +invalidValuesSetters = struct( ... "InvalidTemperatureType", struct( ... "Property", "Temperature", ... "Value", "2", ... @@ -195,6 +201,14 @@ function assignValueToProperty(property, value) function invalidConstructorInput = iGetInvalidConstructorInput validFunction = openAIFunction("funName"); invalidConstructorInput = struct( ... + "InvalidTimeOutType", struct( ... + "Input",{{"TimeOut", "2" }},... + "Error", "MATLAB:validators:mustBeReal"), ... + ... + "InvalidTimeOutSize", struct( ... + "Input",{{"TimeOut", [1 1 1] }},... + "Error", "MATLAB:validation:IncompatibleSize"), ... + ... "WrongTypeSystemPrompt",struct( ... "Input",{{ 123 }},... "Error","MATLAB:validators:mustBeTextScalar"),...