Skip to content

Commit

Permalink
Merge pull request #18 from matlab-deep-learning/dev-update-040924models
Browse files Browse the repository at this point in the history
Reflecting the updated models released on April 9, 2024
  • Loading branch information
toshiakit authored Apr 16, 2024
2 parents 2ef309d + e9acd81 commit d3e7389
Show file tree
Hide file tree
Showing 10 changed files with 67 additions and 50 deletions.
22 changes: 13 additions & 9 deletions +llms/+internal/callOpenAIChatAPI.m
Original file line number Diff line number Diff line change
Expand Up @@ -119,18 +119,16 @@

parameters.stream = ~isempty(nvp.StreamFun);

if ~isempty(functions) && ~strcmp(nvp.ModelName,'gpt-4-vision-preview')
if ~isempty(functions)
parameters.tools = functions;
end

if ~isempty(nvp.ToolChoice) && ~strcmp(nvp.ModelName,'gpt-4-vision-preview')
if ~isempty(nvp.ToolChoice)
parameters.tool_choice = nvp.ToolChoice;
end

if ismember(nvp.ModelName,["gpt-3.5-turbo-1106","gpt-4-1106-preview"])
if strcmp(nvp.ResponseFormat,"json")
parameters.response_format = struct('type','json_object');
end
if strcmp(nvp.ResponseFormat,"json")
parameters.response_format = struct('type','json_object');
end

if ~isempty(nvp.Seed)
Expand All @@ -142,15 +140,21 @@
dict = mapNVPToParameters;

nvpOptions = keys(dict);
if strcmp(nvp.ModelName,'gpt-4-vision-preview')
nvpOptions(ismember(nvpOptions,"StopSequences")) = [];
end

for opt = nvpOptions.'
if isfield(nvp, opt)
parameters.(dict(opt)) = nvp.(opt);
end
end

if isempty(nvp.StopSequences)
parameters = rmfield(parameters,"stop");
end

if nvp.MaxNumTokens == Inf
parameters = rmfield(parameters,"max_tokens");
end

end

function dict = mapNVPToParameters()
Expand Down
2 changes: 2 additions & 0 deletions +llms/+utils/errorMessageCatalog.m
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,10 @@
catalog("llms:mustBeMessagesOrTxt") = "Messages must be text with one or more characters or an openAIMessages objects.";
catalog("llms:invalidOptionAndValueForModel") = "'{1}' with value '{2}' is not supported for ModelName '{3}'";
catalog("llms:invalidOptionForModel") = "{1} is not supported for ModelName '{2}'";
catalog("llms:invalidContentTypeForModel") = "{1} is not supported for ModelName '{2}'";
catalog("llms:functionNotAvailableForModel") = "This function is not supported for ModelName '{1}'";
catalog("llms:promptLimitCharacter") = "Prompt must have a maximum length of {1} characters for ModelName '{2}'";
catalog("llms:pngExpected") = "Argument must be a PNG image.";
catalog("llms:warningJsonInstruction") = "When using JSON mode, you must also prompt the model to produce JSON yourself via a system or user message.";
catalog("llms:apiReturnedError") = "OpenAI API Error: {1}";
end
28 changes: 14 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@ This repository contains example code to demonstrate how to connect MATLAB to th
The functionality shown here serves as an interface to the ChatGPT and DALL·E APIs. To start using the OpenAI APIs, you first need to obtain OpenAI API keys. You are responsible for any fees OpenAI may charge for the use of their APIs. You should be familiar with the limitations and risks associated with using this technology, and you agree that you shall be solely responsible for full compliance with any terms that may apply to your use of the OpenAI APIs.

Some of the current LLMs supported are:
- gpt-3.5-turbo, gpt-3.5-turbo-1106
- gpt-4, gpt-4-1106-preview
- gpt-4-vision-preview (a.k.a. GPT-4 Turbo with Vision)
- gpt-3.5-turbo, gpt-3.5-turbo-1106, gpt-3.5-turbo-0125
- gpt-4-turbo, gpt-4-turbo-2024-04-09 (GPT-4 Turbo with Vision)
- gpt-4, gpt-4-0613
- dall-e-2, dall-e-3

For details on the specification of each model, check the official [OpenAI documentation](https://platform.openai.com/docs/models).

## Requirements
Expand Down Expand Up @@ -52,15 +52,15 @@ To use this repository with a local installation of MATLAB, first clone the repo

Set up your OpenAI API key. Create a `.env` file in the project root directory with the following content.

```
OPENAI_API_KEY=<your key>
```
```
OPENAI_API_KEY=<your key>
```
Then load your `.env` file as follows:
Then load your `.env` file as follows:
```matlab
loadenv(".env")
```
```matlab
loadenv(".env")
```

## Getting Started with Chat Completion API

Expand Down Expand Up @@ -287,13 +287,13 @@ You can extract the arguments and write the data to a table, for example.

### Understand the content of an image

You can use gpt-4-vision-preview to experiment with image understanding.
You can use gpt-4-turbo to experiment with image understanding.
```matlab
chat = openAIChat("You are an AI assistant.", ModelName="gpt-4-vision-preview");
chat = openAIChat("You are an AI assistant.", ModelName="gpt-4-turbo");
image_path = "peppers.png";
messages = openAIMessages;
messages = addUserMessageWithImages(messages,"What is in the image?",image_path);
[txt,response] = generate(chat,messages);
[txt,response] = generate(chat,messages,MaxNumTokens=4096);
% Should output the description of the image
```

Expand Down
Binary file modified examples/AnalyzeSentimentinTextUsingChatGPTinJSONMode.mlx
Binary file not shown.
Binary file modified examples/DescribeImagesUsingChatGPT.mlx
Binary file not shown.
Binary file modified examples/ExampleParallelFunctionCalls.mlx
Binary file not shown.
Binary file modified examples/UsingDALLEToEditImages.mlx
Binary file not shown.
Binary file modified examples/UsingDALLEToGenerateImages.mlx
Binary file not shown.
47 changes: 22 additions & 25 deletions openAIChat.m
Original file line number Diff line number Diff line change
Expand Up @@ -114,10 +114,10 @@
arguments
systemPrompt {llms.utils.mustBeTextOrEmpty} = []
nvp.Tools (1,:) {mustBeA(nvp.Tools, "openAIFunction")} = openAIFunction.empty
nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4", "gpt-4-0613", "gpt-4-32k", ...
"gpt-3.5-turbo", "gpt-4-1106-preview", ...
"gpt-3.5-turbo-1106", "gpt-4-vision-preview", ...
"gpt-4-turbo-preview"])} = "gpt-3.5-turbo"
nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4-turbo", ...
"gpt-4-turbo-2024-04-09","gpt-4","gpt-4-0613", ...
"gpt-3.5-turbo","gpt-3.5-turbo-0125", ...
"gpt-3.5-turbo-1106"])} = "gpt-3.5-turbo"
nvp.Temperature {mustBeValidTemperature} = 1
nvp.TopProbabilityMass {mustBeValidTopP} = 1
nvp.StopSequences {mustBeValidStop} = {}
Expand All @@ -131,10 +131,6 @@

if isfield(nvp,"StreamFun")
this.StreamFun = nvp.StreamFun;
if strcmp(nvp.ModelName,'gpt-4-vision-preview')
error("llms:invalidOptionForModel", ...
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "StreamFun", nvp.ModelName));
end
else
this.StreamFun = [];
end
Expand All @@ -146,10 +142,6 @@
else
this.Tools = nvp.Tools;
[this.FunctionsStruct, this.FunctionNames] = functionAsStruct(nvp.Tools);
if strcmp(nvp.ModelName,'gpt-4-vision-preview')
error("llms:invalidOptionForModel", ...
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "Tools", nvp.ModelName));
end
end

if ~isempty(systemPrompt)
Expand All @@ -163,20 +155,15 @@
this.Temperature = nvp.Temperature;
this.TopProbabilityMass = nvp.TopProbabilityMass;
this.StopSequences = nvp.StopSequences;
if ~isempty(nvp.StopSequences) && strcmp(nvp.ModelName,'gpt-4-vision-preview')
error("llms:invalidOptionForModel", ...
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "StopSequences", nvp.ModelName));
end


% ResponseFormat is only supported in the latest models only
if (nvp.ResponseFormat == "json")
if ismember(this.ModelName,["gpt-3.5-turbo-1106","gpt-4-1106-preview"])
warning("llms:warningJsonInstruction", ...
llms.utils.errorMessageCatalog.getMessage("llms:warningJsonInstruction"))
else
if ismember(this.ModelName,["gpt-4","gpt-4-0613"])
error("llms:invalidOptionAndValueForModel", ...
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionAndValueForModel", "ResponseFormat", "json", this.ModelName));
else
warning("llms:warningJsonInstruction", ...
llms.utils.errorMessageCatalog.getMessage("llms:warningJsonInstruction"))
end

end
Expand Down Expand Up @@ -222,17 +209,20 @@
end

toolChoice = convertToolChoice(this, nvp.ToolChoice);
if ~isempty(nvp.ToolChoice) && strcmp(this.ModelName,'gpt-4-vision-preview')
error("llms:invalidOptionForModel", ...
llms.utils.errorMessageCatalog.getMessage("llms:invalidOptionForModel", "ToolChoice", this.ModelName));
end

if isstring(messages) && isscalar(messages)
messagesStruct = {struct("role", "user", "content", messages)};
else
messagesStruct = messages.Messages;
end

if iscell(messagesStruct{end}.content) && any(cellfun(@(x) isfield(x,"image_url"), messagesStruct{end}.content))
if ~ismember(this.ModelName,["gpt-4-turbo","gpt-4-turbo-2024-04-09"])
error("llms:invalidContentTypeForModel", ...
llms.utils.errorMessageCatalog.getMessage("llms:invalidContentTypeForModel", "Image content", this.ModelName));
end
end

if ~isempty(this.SystemPrompt)
messagesStruct = horzcat(this.SystemPrompt, messagesStruct);
end
Expand All @@ -244,6 +234,13 @@
PresencePenalty=this.PresencePenalty, FrequencyPenalty=this.FrequencyPenalty, ...
ResponseFormat=this.ResponseFormat,Seed=nvp.Seed, ...
ApiKey=this.ApiKey,TimeOut=this.TimeOut, StreamFun=this.StreamFun);

if isfield(response.Body.Data,"error")
err = response.Body.Data.error.message;
text = llms.utils.errorMessageCatalog.getMessage("llms:apiReturnedError",err);
message = struct("role","assistant","content",text);
end

end

function this = set.Temperature(this, temperature)
Expand Down
18 changes: 16 additions & 2 deletions tests/topenAIChat.m
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ function generateAcceptsMessagesAsInput(testCase)
end

function constructMdlWithInvalidParameters(testCase)
testCase.verifyError(@()openAIChat(ApiKey="this-is-not-a-real-key", ResponseFormat="json"), "llms:invalidOptionAndValueForModel");
testCase.verifyError(@()openAIChat(ApiKey="this-is-not-a-real-key", ModelName="gpt-4", ResponseFormat="json"), "llms:invalidOptionAndValueForModel");
end

function keyNotFound(testCase)
Expand Down Expand Up @@ -100,7 +100,21 @@ function assignValueToProperty(property, value)
end

testCase.verifyError(@()assignValueToProperty(InvalidValuesSetters.Property,InvalidValuesSetters.Value), InvalidValuesSetters.Error);
end
end

function invalidGenerateInputforModel(testCase)
chat = openAIChat(ApiKey="this-is-not-a-real-key");
image_path = "peppers.png";
emptyMessages = openAIMessages;
inValidMessages = addUserMessageWithImages(emptyMessages,"What is in the image?",image_path);
testCase.verifyError(@()generate(chat,inValidMessages), "llms:invalidContentTypeForModel")
end

function noStopSequencesNoMaxNumTokens(testCase)
chat = openAIChat(ApiKey="this-is-not-a-real-key");
testCase.verifyWarningFree(@()generate(chat,"This is okay"));
end

end
end

Expand Down

0 comments on commit d3e7389

Please sign in to comment.