From 95ed0b127aa2d26397f14e6441c4b682f9f09ae0 Mon Sep 17 00:00:00 2001 From: Deborah Ferreira Date: Mon, 9 Oct 2023 17:14:43 +0100 Subject: [PATCH 1/8] Create setup_matlab.yml --- .github/workflows/setup_matlab.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/workflows/setup_matlab.yml diff --git a/.github/workflows/setup_matlab.yml b/.github/workflows/setup_matlab.yml new file mode 100644 index 0000000..8a82dee --- /dev/null +++ b/.github/workflows/setup_matlab.yml @@ -0,0 +1,16 @@ +name: Run MATLAB Tests on GitHub-Hosted Runner +on: [push] +jobs: + my-job: + name: Run MATLAB Tests and Generate Artifacts + runs-on: ubuntu-latest + steps: + - name: Check out repository + uses: actions/checkout@v3 + - name: Set up MATLAB + uses: matlab-actions/setup-matlab@v1 + - name: Run tests and generate artifacts + uses: matlab-actions/run-tests@v1 + with: + test-results-junit: test-results/results.xml + code-coverage-cobertura: code-coverage/coverage.xml From da1c112e2d58e65b1b768110c23ddd084bdc6455 Mon Sep 17 00:00:00 2001 From: Christopher Creutzig Date: Wed, 11 Oct 2023 10:21:52 +0200 Subject: [PATCH 2/8] replace regexp with extractBetween --- examples/ExampleFunctionCalling.m | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/examples/ExampleFunctionCalling.m b/examples/ExampleFunctionCalling.m index 69b8687..3da0029 100644 --- a/examples/ExampleFunctionCalling.m +++ b/examples/ExampleFunctionCalling.m @@ -39,20 +39,18 @@ % Extract individual paper entries from the API response and use ChatGPT % to determine whether each paper is related to the specified topic. -pattern = '(.*?)'; - % ChatGPT will parse the XML file, so we only need to extract the relevant % entries. -matches = regexp(code, pattern, 'tokens'); +entries = extractBetween(code, '', ''); % Determine the topic of interest topic = "Embedding documents or sentences"; % Loop over the entries and see if they are relevant to the topic of % interest. -for i = 1:length(matches) +for i = 1:length(entries) prompt = "Given the following paper:" + newline +... - string(matches{i})+ newline +... + string(entries{i})+ newline +... "Is it related to the topic: "+ topic +"?" + ... " Answer 'yes' or 'no'."; [text, response] = generate(chat, prompt); @@ -60,7 +58,7 @@ % If the model classifies this entry as relevant, then it tries to % request a function call. if contains("yes", text, IgnoreCase=true) - prompt = "Given the following paper:" + newline + string(matches{i})+ newline +... + prompt = "Given the following paper:" + newline + string(entries{i})+ newline +... "Given the topic: "+ topic + newline + "Write the details to a table."; [text, response] = generate(chat, prompt); From d6fbeef9a77e496c5de7a4f45608663710085dd4 Mon Sep 17 00:00:00 2001 From: Deborah Mendes Ferreira Date: Mon, 9 Oct 2023 14:43:00 +0100 Subject: [PATCH 3/8] Bug fixes. --- +llms/+utils/errorMessageCatalog.m | 2 +- openAIChat.m | 19 +++++++------------ openAIMessages.m | 23 ++++++++++++++--------- tests/topenAIChat.m | 7 ++++--- tests/topenAIMessages.m | 24 +++++++++++++++++------- 5 files changed, 43 insertions(+), 32 deletions(-) diff --git a/+llms/+utils/errorMessageCatalog.m b/+llms/+utils/errorMessageCatalog.m index ad7ddf9..0908106 100644 --- a/+llms/+utils/errorMessageCatalog.m +++ b/+llms/+utils/errorMessageCatalog.m @@ -45,7 +45,7 @@ catalog("llms:assistantMustHaveTextNameAndArguments") = "Fields 'name' and 'arguments' must be text with one or more characters."; catalog("llms:mustBeValidIndex") = "Value is larger than the number of elements in Messages ({1})."; catalog("llms:stopSequencesMustHaveMax4Elements") = "Number of elements must not be larger than 4."; -catalog("llms:keyMustBeSpecified") = "API key not found as environment variable OPEN_API_KEY and not specified via ApiKey parameter."; +catalog("llms:keyMustBeSpecified") = "API key not found as environment variable OPENAI_API_KEY and not specified via ApiKey parameter."; catalog("llms:mustHaveMessages") = "Value must contain at least one message in Messages."; catalog("llms:mustSetFunctionsForCall") = "When no functions are defined, FunctionCall must not be specified."; catalog("llms:mustBeMessagesOrTxt") = "Messages must be text with one or more characters or an openAIMessages objects."; diff --git a/openAIChat.m b/openAIChat.m index f62f85e..000fa62 100644 --- a/openAIChat.m +++ b/openAIChat.m @@ -52,8 +52,6 @@ % % SystemPrompt - System prompt. % -% AvailableModels - List of available models. -% % FunctionNames - Names of the functions that the model can % request calls. @@ -93,25 +91,18 @@ ApiKey end - properties(Constant) - %AVAILABLEMODELS List of available models. - AvailableModels = ["gpt-4", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613",... - "gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k",... - "gpt-3.5-turbo-16k-0613"] - end - methods function this = openAIChat(systemPrompt, nvp) arguments systemPrompt {llms.utils.mustBeTextOrEmpty} = [] nvp.Functions (1,:) {mustBeA(nvp.Functions, "openAIFunction")} = openAIFunction.empty - nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4", "gpt-4-0613", "gpt-4-32k", "gpt-4-32k-0613",... + nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4", "gpt-4-0613", "gpt-4-32k", ... "gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k",... "gpt-3.5-turbo-16k-0613"])} = "gpt-3.5-turbo" nvp.Temperature (1,1) {mustBeValidTemperature} = 1 nvp.TopProbabilityMass (1,1) {mustBeValidTopP} = 1 nvp.StopSequences (1,:) {mustBeValidStop} = {} - nvp.ApiKey (1,1) {mustBeNonzeroLengthText} + nvp.ApiKey {mustBeNonzeroLengthTextScalar} nvp.PresencePenalty (1,1) {mustBeValidPenalty} = 0 nvp.FrequencyPenalty (1,1) {mustBeValidPenalty} = 0 end @@ -249,6 +240,10 @@ function mustBeValidFunctionCall(this, functionCall) end end +function mustBeNonzeroLengthTextScalar(content) +mustBeNonzeroLengthText(content) +mustBeTextScalar(content) +end function [functionsStruct, functionNames] = functionAsStruct(functions) numFunctions = numel(functions); @@ -268,7 +263,7 @@ function mustBeValidMsgs(value) end else try - mustBeNonzeroLengthText(value); + mustBeNonzeroLengthTextScalar(value); catch ME error("llms:mustBeMessagesOrTxt", llms.utils.errorMessageCatalog.getMessage("llms:mustBeMessagesOrTxt")); end diff --git a/openAIMessages.m b/openAIMessages.m index 8662c9d..e897f8a 100644 --- a/openAIMessages.m +++ b/openAIMessages.m @@ -39,11 +39,11 @@ arguments this (1,1) openAIMessages - name (1,1) {mustBeNonzeroLengthText} - content (1,1) {mustBeNonzeroLengthText} + name {mustBeNonzeroLengthTextScalar} + content {mustBeNonzeroLengthTextScalar} end - newMessage = struct("role", "system", "name", name, "content", content); + newMessage = struct("role", "system", "name", string(name), "content", string(content)); this.Messages{end+1} = newMessage; end @@ -62,10 +62,10 @@ arguments this (1,1) openAIMessages - content (1,1) {mustBeNonzeroLengthText} + content {mustBeNonzeroLengthTextScalar} end - newMessage = struct("role", "user", "content", content); + newMessage = struct("role", "user", "content", string(content)); this.Messages{end+1} = newMessage; end @@ -86,11 +86,11 @@ arguments this (1,1) openAIMessages - name (1,1) {mustBeNonzeroLengthText} - content (1,1) {mustBeNonzeroLengthText} + name {mustBeNonzeroLengthTextScalar} + content {mustBeNonzeroLengthTextScalar} end - newMessage = struct("role", "function", "name", name, "content", content); + newMessage = struct("role", "function", "name", string(name), "content", string(content)); this.Messages{end+1} = newMessage; end @@ -133,7 +133,7 @@ if isfield(messageStruct, "function_call") funCall = messageStruct.function_call; validateAssistantWithFunctionCall(funCall) - this = addAssistantMessage(this,funCall.name, funCall.arguments); + this = addAssistantMessage(this, funCall.name, funCall.arguments); else % Simple assistant response validateRegularAssistant(messageStruct.content); @@ -197,6 +197,11 @@ end end +function mustBeNonzeroLengthTextScalar(content) +mustBeNonzeroLengthText(content) +mustBeTextScalar(content) +end + function validateRegularAssistant(content) try mustBeNonzeroLengthText(content) diff --git a/tests/topenAIChat.m b/tests/topenAIChat.m index 2f17215..5d0732a 100644 --- a/tests/topenAIChat.m +++ b/tests/topenAIChat.m @@ -26,6 +26,8 @@ function saveEnvVar(testCase) function generateAcceptsSingleStringAsInput(testCase) chat = openAIChat(ApiKey="this-is-not-a-real-key"); testCase.verifyWarningFree(@()generate(chat,"This is okay")); + chat = openAIChat(ApiKey='this-is-not-a-real-key'); + testCase.verifyWarningFree(@()generate(chat,"This is okay")); end function generateAcceptsMessagesAsInput(testCase) @@ -307,7 +309,7 @@ function assignValueToProperty(property, value) ... "InvalidApiKeySize",struct( ... "Input",{{ "ApiKey" ["abc" "abc"] }},... - "Error","MATLAB:validation:IncompatibleSize")); + "Error","MATLAB:validators:mustBeTextScalar")); end function invalidGenerateInput = iGetInvalidGenerateInput @@ -354,5 +356,4 @@ function assignValueToProperty(property, value) "InvalidFunctionCallSize",struct( ... "Input",{{ validMessages "FunctionCall" ["validfunction", "validfunction"] }},... "Error","MATLAB:validators:mustBeTextScalar")); -end - +end \ No newline at end of file diff --git a/tests/topenAIMessages.m b/tests/topenAIMessages.m index e465b03..39db370 100644 --- a/tests/topenAIMessages.m +++ b/tests/topenAIMessages.m @@ -9,6 +9,7 @@ InvalidInputsSystemPrompt = iGetInvalidInputsSystemPrompt; InvalidInputsResponseMessage = iGetInvalidInputsResponseMessage; InvalidRemoveMessage = iGetInvalidRemoveMessage; + ValidTextInput = {"This is okay"; 'this is ok'}; end methods(Test) @@ -17,6 +18,15 @@ function constructorStartsWithEmptyMessages(testCase) testCase.verifyTrue(isempty(msgs.Messages)); end + function differentInputTextAccepted(testCase, ValidTextInput) + msgs = openAIMessages; + testCase.verifyWarningFree(@()addSystemMessage(msgs, ValidTextInput, ValidTextInput)); + testCase.verifyWarningFree(@()addSystemMessage(msgs, ValidTextInput, ValidTextInput)); + testCase.verifyWarningFree(@()addUserMessage(msgs, ValidTextInput)); + testCase.verifyWarningFree(@()addFunctionMessage(msgs, ValidTextInput, ValidTextInput)); + end + + function systemMessageIsAdded(testCase) prompt = "Here is a system prompt"; name = "example"; @@ -56,7 +66,7 @@ function assistantFunctionCallMessageIsAdded(testCase) msgs = openAIMessages; args = "{""arg1"": 1, ""arg2"": 2, ""arg3"": ""3""}"; funCall = struct("name", functionName, "arguments", args); - functionCallPrompt = struct("role", "assistant", "content", [], "function_call", funCall); + functionCallPrompt = struct("role", "assistant", "content", "", "function_call", funCall); msgs = addResponseMessage(msgs, functionCallPrompt); testCase.verifyEqual(msgs.Messages{1}, functionCallPrompt); end @@ -65,7 +75,7 @@ function assistantFunctionCallMessageWithoutArgsIsAdded(testCase) functionName = "functionName"; msgs = openAIMessages; funCall = struct("name", functionName, "arguments", "{}"); - functionCallPrompt = struct("role", "assistant", "content", [], "function_call", funCall); + functionCallPrompt = struct("role", "assistant", "content", "", "function_call", funCall); msgs = addResponseMessage(msgs, functionCallPrompt); testCase.verifyEqual(msgs.Messages{1}, functionCallPrompt); end @@ -145,11 +155,11 @@ function invalidInputsResponsePrompt(testCase, InvalidInputsResponseMessage) ... "NonScalarInputName", ... struct("Input", {{["name1" "name2"], "content"}}, ... - "Error", "MATLAB:validation:IncompatibleSize"),... + "Error", "MATLAB:validators:mustBeTextScalar"),... ... "NonScalarInputContent", ... struct("Input", {{"name", ["content1", "content2"]}}, ... - "Error", "MATLAB:validation:IncompatibleSize")); + "Error", "MATLAB:validators:mustBeTextScalar")); end function invalidInputsUserPrompt = iGetInvalidInputsUserPrompt @@ -160,7 +170,7 @@ function invalidInputsResponsePrompt(testCase, InvalidInputsResponseMessage) ... "NonScalarInput", ... struct("Input", {{["prompt1" "prompt2"]}}, ... - "Error", "MATLAB:validation:IncompatibleSize"), ... + "Error", "MATLAB:validators:mustBeTextScalar"), ... ... "EmptyInput", ... struct("Input", {{""}}, ... @@ -187,11 +197,11 @@ function invalidInputsResponsePrompt(testCase, InvalidInputsResponseMessage) ... "NonScalarInputName", ... struct("Input", {{["name1" "name2"], "content"}}, ... - "Error", "MATLAB:validation:IncompatibleSize"),... + "Error", "MATLAB:validators:mustBeTextScalar"),... ... "NonScalarInputContent", ... struct("Input", {{"name", ["content1", "content2"]}}, ... - "Error", "MATLAB:validation:IncompatibleSize")); + "Error", "MATLAB:validators:mustBeTextScalar")); end function invalidRemoveMessage = iGetInvalidRemoveMessage From d586395b45dd8e468bdcb80aecf53160811f84f1 Mon Sep 17 00:00:00 2001 From: Deborah Ferreira Date: Sat, 21 Oct 2023 16:23:17 +0100 Subject: [PATCH 4/8] Update setup_matlab.yml --- .github/workflows/setup_matlab.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/setup_matlab.yml b/.github/workflows/setup_matlab.yml index 8a82dee..87a90ac 100644 --- a/.github/workflows/setup_matlab.yml +++ b/.github/workflows/setup_matlab.yml @@ -14,3 +14,4 @@ jobs: with: test-results-junit: test-results/results.xml code-coverage-cobertura: code-coverage/coverage.xml + source-folder: . From ebe08c6e330edfb1873cd1b46cbd4250f8fe75e0 Mon Sep 17 00:00:00 2001 From: Deborah Mendes Ferreira Date: Sun, 22 Oct 2023 16:07:33 +0000 Subject: [PATCH 5/8] Summary limit --- examples/ExampleSummarization.m | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/examples/ExampleSummarization.m b/examples/ExampleSummarization.m index 27dba15..847596c 100644 --- a/examples/ExampleSummarization.m +++ b/examples/ExampleSummarization.m @@ -39,11 +39,19 @@ % Looping process to gradually summarize the text chunk by chunk, reducing % the chunk size with each iteration. +numCalls = 0; while numel(chunks)>1 summarizedChunks = strings(size(chunks)); + numCalls = numCalls + numel(chunks); + % Add a limit to the number of calls, to ensure you are not making + % more calls than what is expected. You can change this value to match + % what is needed for your application. + if numCalls > 20 + error("Document is too long to be summarized.") + end for i = 1:length(chunks) - summarizedChunks(i) = generate(summarizer, "Summarize this content:" + newline + chunks(i)); + summarizedChunks(i) = generate(summarizer, "Summarize this content:" + newline + chunks(i)); end % Merging the summarized chunks to serve as the base for the next iteration From 6a91fedc23f2ba43d699b2418f581b0a8378672a Mon Sep 17 00:00:00 2001 From: Deborah Mendes Ferreira Date: Mon, 23 Oct 2023 09:36:24 +0000 Subject: [PATCH 6/8] Fixing arg parsing bugs. --- openAIChat.m | 64 +++++++++++++++++++++++------------------- tests/topenAIChat.m | 68 ++++++++++++++++++++++----------------------- 2 files changed, 69 insertions(+), 63 deletions(-) diff --git a/openAIChat.m b/openAIChat.m index 000fa62..b3cf296 100644 --- a/openAIChat.m +++ b/openAIChat.m @@ -57,10 +57,7 @@ % Copyright 2023 The MathWorks, Inc. - properties - %MODELNAME Model name. - ModelName - + properties %TEMPERATURE Temperature of generation. Temperature @@ -74,15 +71,18 @@ PresencePenalty %FREQUENCYPENALTY Penalty for using a token that is frequent in the training data. - FrequencyPenalty - - %SYSTEMPROMPT System prompt. - SystemPrompt = [] + FrequencyPenalty end properties(SetAccess=private) %FUNCTIONNAMES Names of the functions that the model can request calls FunctionNames + + %MODELNAME Model name. + ModelName + + %SYSTEMPROMPT System prompt. + SystemPrompt = [] end properties(Access=private) @@ -99,12 +99,12 @@ nvp.ModelName (1,1) {mustBeMember(nvp.ModelName,["gpt-4", "gpt-4-0613", "gpt-4-32k", ... "gpt-3.5-turbo", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k",... "gpt-3.5-turbo-16k-0613"])} = "gpt-3.5-turbo" - nvp.Temperature (1,1) {mustBeValidTemperature} = 1 - nvp.TopProbabilityMass (1,1) {mustBeValidTopP} = 1 - nvp.StopSequences (1,:) {mustBeValidStop} = {} - nvp.ApiKey {mustBeNonzeroLengthTextScalar} - nvp.PresencePenalty (1,1) {mustBeValidPenalty} = 0 - nvp.FrequencyPenalty (1,1) {mustBeValidPenalty} = 0 + nvp.Temperature {mustBeValidTemperature} = 1 + nvp.TopProbabilityMass {mustBeValidTopP} = 1 + nvp.StopSequences {mustBeValidStop} = {} + nvp.ApiKey {mustBeNonzeroLengthTextScalar} + nvp.PresencePenalty {mustBeValidPenalty} = 0 + nvp.FrequencyPenalty {mustBeValidPenalty} = 0 end if ~isempty(nvp.Functions) @@ -180,40 +180,46 @@ function this = set.Temperature(this, temperature) arguments this openAIChat - temperature (1,1) {mustBeValidTemperature} + temperature end + mustBeValidTemperature(temperature); + this.Temperature = temperature; end function this = set.TopProbabilityMass(this,topP) arguments this openAIChat - topP (1,1) {mustBeValidTopP} + topP end + mustBeValidTopP(topP); this.TopProbabilityMass = topP; end function this = set.StopSequences(this,stop) arguments this openAIChat - stop (1,:) {mustBeValidStop} + stop end + mustBeValidStop(stop); this.StopSequences = stop; end function this = set.PresencePenalty(this,penalty) arguments this openAIChat - penalty (1,1) {mustBeValidPenalty} + penalty end + mustBeValidPenalty(penalty) this.PresencePenalty = penalty; end function this = set.FrequencyPenalty(this,penalty) arguments this openAIChat - penalty (1,1) {mustBeValidPenalty} + penalty end + mustBeValidPenalty(penalty) this.FrequencyPenalty = penalty; end end @@ -271,24 +277,24 @@ function mustBeValidMsgs(value) end function mustBeValidPenalty(value) -mustBeLessThanOrEqual(value,2); -mustBeGreaterThanOrEqual(value,-2); +validateattributes(value, {'numeric'}, {'real', 'scalar', 'nonsparse', '<=', 2, '>=', -2}) end function mustBeValidTopP(value) -mustBeNonnegative(value); -mustBeLessThanOrEqual(value,1); +validateattributes(value, {'numeric'}, {'real', 'scalar', 'nonnegative', 'nonsparse', '<=', 1}) end function mustBeValidTemperature(value) -mustBeNonnegative(value); -mustBeLessThanOrEqual(value,2) +validateattributes(value, {'numeric'}, {'real', 'scalar', 'nonnegative', 'nonsparse', '<=', 2}) end function mustBeValidStop(value) -mustBeNonzeroLengthText(value); -% This restriction is set by the OpenAI API -if numel(value)>4 - error("llms:stopSequencesMustHaveMax4Elements", llms.utils.errorMessageCatalog.getMessage("llms:stopSequencesMustHaveMax4Elements")); +if ~isempty(value) + mustBeVector(value); + mustBeNonzeroLengthText(value); + % This restriction is set by the OpenAI API + if numel(value)>4 + error("llms:stopSequencesMustHaveMax4Elements", llms.utils.errorMessageCatalog.getMessage("llms:stopSequencesMustHaveMax4Elements")); + end end end \ No newline at end of file diff --git a/tests/topenAIChat.m b/tests/topenAIChat.m index 5d0732a..ce6ccb9 100644 --- a/tests/topenAIChat.m +++ b/tests/topenAIChat.m @@ -94,42 +94,42 @@ function assignValueToProperty(property, value) "InvalidTemperatureType", struct( ... "Property", "Temperature", ... "Value", "2", ... - "Error", "MATLAB:validators:mustBeNumericOrLogical"), ... + "Error", "MATLAB:invalidType"), ... ... "InvalidTemperatureSize", struct( ... "Property", "Temperature", ... "Value", [1 1 1], ... - "Error", "MATLAB:validation:IncompatibleSize"), ... + "Error", "MATLAB:expectedScalar"), ... ... "TemperatureTooLarge", struct( ... "Property", "Temperature", ... "Value", 20, ... - "Error", "MATLAB:validators:mustBeLessThanOrEqual"), ... + "Error", "MATLAB:notLessEqual"), ... ... "TemperatureTooSmall", struct( ... "Property", "Temperature", ... "Value", -20, ... - "Error", "MATLAB:validators:mustBeNonnegative"), ... + "Error", "MATLAB:expectedNonnegative"), ... ... "InvalidTopProbabilityMassType", struct( ... "Property", "TopProbabilityMass", ... "Value", "2", ... - "Error", "MATLAB:validators:mustBeNumericOrLogical"), ... + "Error", "MATLAB:invalidType"), ... ... "InvalidTopProbabilityMassSize", struct( ... "Property", "TopProbabilityMass", ... "Value", [1 1 1], ... - "Error", "MATLAB:validation:IncompatibleSize"), ... + "Error", "MATLAB:expectedScalar"), ... ... "TopProbabilityMassTooLarge", struct( ... "Property", "TopProbabilityMass", ... "Value", 20, ... - "Error", "MATLAB:validators:mustBeLessThanOrEqual"), ... + "Error", "MATLAB:notLessEqual"), ... ... "TopProbabilityMassTooSmall", struct( ... "Property", "TopProbabilityMass", ... "Value", -20, ... - "Error", "MATLAB:validators:mustBeNonnegative"), ... + "Error", "MATLAB:expectedNonnegative"), ... ... "WrongTypeStopSequences", struct( ... "Property", "StopSequences", ... @@ -139,7 +139,7 @@ function assignValueToProperty(property, value) "WrongSizeStopNonVector", struct( ... "Property", "StopSequences", ... "Value", repmat("stop", 4), ... - "Error", "MATLAB:validation:IncompatibleSize"), ... + "Error", "MATLAB:validators:mustBeVector"), ... ... "EmptyStopSequences", struct( ... "Property", "StopSequences", ... @@ -154,42 +154,42 @@ function assignValueToProperty(property, value) "InvalidPresencePenalty", struct( ... "Property", "PresencePenalty", ... "Value", "2", ... - "Error", "MATLAB:validators:mustBeNumericOrLogical"), ... + "Error", "MATLAB:invalidType"), ... ... "InvalidPresencePenaltySize", struct( ... "Property", "PresencePenalty", ... "Value", [1 1 1], ... - "Error", "MATLAB:validation:IncompatibleSize"), ... + "Error", "MATLAB:expectedScalar"), ... ... "PresencePenaltyTooLarge", struct( ... "Property", "PresencePenalty", ... "Value", 20, ... - "Error", "MATLAB:validators:mustBeLessThanOrEqual"), ... + "Error", "MATLAB:notLessEqual"), ... ... "PresencePenaltyTooSmall", struct( ... "Property", "PresencePenalty", ... "Value", -20, ... - "Error", "MATLAB:validators:mustBeGreaterThanOrEqual"), ... + "Error", "MATLAB:notGreaterEqual"), ... ... "InvalidFrequencyPenalty", struct( ... "Property", "FrequencyPenalty", ... "Value", "2", ... - "Error", "MATLAB:validators:mustBeNumericOrLogical"), ... + "Error", "MATLAB:invalidType"), ... ... "InvalidFrequencyPenaltySize", struct( ... "Property", "FrequencyPenalty", ... "Value", [1 1 1], ... - "Error", "MATLAB:validation:IncompatibleSize"), ... + "Error", "MATLAB:expectedScalar"), ... ... "FrequencyPenaltyTooLarge", struct( ... "Property", "FrequencyPenalty", ... "Value", 20, ... - "Error", "MATLAB:validators:mustBeLessThanOrEqual"), ... + "Error", "MATLAB:notLessEqual"), ... ... "FrequencyPenaltyTooSmall", struct( ... "Property", "FrequencyPenalty", ... "Value", -20, ... - "Error", "MATLAB:validators:mustBeGreaterThanOrEqual")); + "Error", "MATLAB:notGreaterEqual")); end function invalidConstructorInput = iGetInvalidConstructorInput @@ -225,35 +225,35 @@ function assignValueToProperty(property, value) ... "InvalidTemperatureType",struct( ... "Input",{{ "Temperature" "2" }},... - "Error","MATLAB:validators:mustBeNumericOrLogical"),... + "Error","MATLAB:invalidType"),... ... "InvalidTemperatureSize",struct( ... "Input",{{ "Temperature" [1 1 1] }},... - "Error","MATLAB:validation:IncompatibleSize"),... + "Error","MATLAB:expectedScalar"),... ... "TemperatureTooLarge",struct( ... "Input",{{ "Temperature" 20 }},... - "Error","MATLAB:validators:mustBeLessThanOrEqual"),... + "Error","MATLAB:notLessEqual"),... ... "TemperatureTooSmall",struct( ... "Input",{{ "Temperature" -20 }},... - "Error","MATLAB:validators:mustBeNonnegative"),... + "Error","MATLAB:expectedNonnegative"),... ... "InvalidTopProbabilityMassType",struct( ... "Input",{{ "TopProbabilityMass" "2" }},... - "Error","MATLAB:validators:mustBeNumericOrLogical"),... + "Error","MATLAB:invalidType"),... ... "InvalidTopProbabilityMassSize",struct( ... "Input",{{ "TopProbabilityMass" [1 1 1] }},... - "Error","MATLAB:validation:IncompatibleSize"),... + "Error","MATLAB:expectedScalar"),... ... "TopProbabilityMassTooLarge",struct( ... "Input",{{ "TopProbabilityMass" 20 }},... - "Error","MATLAB:validators:mustBeLessThanOrEqual"),... + "Error","MATLAB:notLessEqual"),... ... "TopProbabilityMassTooSmall",struct( ... "Input",{{ "TopProbabilityMass" -20 }},... - "Error","MATLAB:validators:mustBeNonnegative"),... + "Error","MATLAB:expectedNonnegative"),... ... "WrongTypeStopSequences",struct( ... "Input",{{ "StopSequences" 123}},... @@ -261,7 +261,7 @@ function assignValueToProperty(property, value) ... "WrongSizeStopNonVector",struct( ... "Input",{{ "StopSequences" repmat("stop", 4) }},... - "Error","MATLAB:validation:IncompatibleSize"),... + "Error","MATLAB:validators:mustBeVector"),... ... "EmptyStopSequences",struct( ... "Input",{{ "StopSequences" ""}},... @@ -273,35 +273,35 @@ function assignValueToProperty(property, value) ... "InvalidPresencePenalty",struct( ... "Input",{{ "PresencePenalty" "2" }},... - "Error","MATLAB:validators:mustBeNumericOrLogical"),... + "Error","MATLAB:invalidType"),... ... "InvalidPresencePenaltySize",struct( ... "Input",{{ "PresencePenalty" [1 1 1] }},... - "Error","MATLAB:validation:IncompatibleSize"),... + "Error","MATLAB:expectedScalar"),... ... "PresencePenaltyTooLarge",struct( ... "Input",{{ "PresencePenalty" 20 }},... - "Error","MATLAB:validators:mustBeLessThanOrEqual"),... + "Error","MATLAB:notLessEqual"),... ... "PresencePenaltyTooSmall",struct( ... "Input",{{ "PresencePenalty" -20 }},... - "Error","MATLAB:validators:mustBeGreaterThanOrEqual"),... + "Error","MATLAB:notGreaterEqual"),... ... "InvalidFrequencyPenalty",struct( ... "Input",{{ "FrequencyPenalty" "2" }},... - "Error","MATLAB:validators:mustBeNumericOrLogical"),... + "Error","MATLAB:invalidType"),... ... "InvalidFrequencyPenaltySize",struct( ... "Input",{{ "FrequencyPenalty" [1 1 1] }},... - "Error","MATLAB:validation:IncompatibleSize"),... + "Error","MATLAB:expectedScalar"),... ... "FrequencyPenaltyTooLarge",struct( ... "Input",{{ "FrequencyPenalty" 20 }},... - "Error","MATLAB:validators:mustBeLessThanOrEqual"),... + "Error","MATLAB:notLessEqual"),... ... "FrequencyPenaltyTooSmall",struct( ... "Input",{{ "FrequencyPenalty" -20 }},... - "Error","MATLAB:validators:mustBeGreaterThanOrEqual"),... + "Error","MATLAB:notGreaterEqual"),... ... "InvalidApiKeyType",struct( ... "Input",{{ "ApiKey" 123 }},... From 5e5618ec8c857b9aeb70de3898127b553643ed17 Mon Sep 17 00:00:00 2001 From: Deborah Mendes Ferreira Date: Tue, 24 Oct 2023 21:10:08 +0100 Subject: [PATCH 7/8] updating readme with new links. --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 147ea9b..197d32f 100644 --- a/README.md +++ b/README.md @@ -119,7 +119,7 @@ history = addUserMessage(history,"Generate MATLAB code that computes that"); ### Calling MATLAB functions with the API The optional parameter `functions` can be used to provide function specifications to the API. The purpose of this is to enable models to generate function arguments which adhere to the provided specifications. -Note that the API is not able to directly call any function, so you should call the function and pass the values to the API directly. This process can be automated as shown in [ExampleFunctionCalling.m](/examples/ExampleFunctionCalling.m), but it's important to consider that ChatGPT can hallucinate function names, so avoid executing any arbitrary generated functions and only allow the execution of functions that you have defined. +Note that the API is not able to directly call any function, so you should call the function and pass the values to the API directly. This process can be automated as shown in [ExampleFunctionCalling.mlx](/examples/ExampleFunctionCalling.mlx), but it's important to consider that ChatGPT can hallucinate function names, so avoid executing any arbitrary generated functions and only allow the execution of functions that you have defined. For example, if you want to use the API for mathematical operations such as `sind`, instead of letting the model generate the result and risk running into hallucinations, you can give the model direct access to the function as follows: @@ -237,9 +237,9 @@ You can extract the arguments and write the data to a table, for example. ## Examples To learn how to use this in your workflows, see [Examples](/examples/). -- [ExampleSummarization.m](/examples/ExampleSummarization.m): Learn to create concise summaries of long texts with ChatGPT. (Requires Text Analytics Toolbox™) -- [ExampleChatBot.m](/examples/ExampleChatBot.m): Build a conversational chatbot capable of handling various dialogue scenarios using ChatGPT. (Requires Text Analytics Toolbox) -- [ExampleFunctionCalling.m](/examples/ExampleFunctionCalling.m): Learn how to create agents capable of executing MATLAB functions. +- [ExampleSummarization.mlx](/examples/ExampleSummarization.mlx): Learn to create concise summaries of long texts with ChatGPT. (Requires Text Analytics Toolbox™) +- [ExampleChatBot.mlx](/examples/ExampleChatBot.mlx): Build a conversational chatbot capable of handling various dialogue scenarios using ChatGPT. (Requires Text Analytics Toolbox) +- [ExampleFunctionCalling.mlx](/examples/ExampleFunctionCalling.mlx): Learn how to create agents capable of executing MATLAB functions. ## License From 616303729a4019e1517683ba890e1bb1ee8fcd4a Mon Sep 17 00:00:00 2001 From: Deborah Mendes Ferreira Date: Tue, 24 Oct 2023 21:08:45 +0100 Subject: [PATCH 8/8] replacing examples with mlx --- examples/ExampleChatBot.m | 74 ------------------- examples/ExampleChatBot.mlx | Bin 0 -> 4891 bytes examples/ExampleFunctionCalling.m | 108 ---------------------------- examples/ExampleFunctionCalling.mlx | Bin 0 -> 7934 bytes examples/ExampleSummarization.m | 96 ------------------------- examples/ExampleSummarization.mlx | Bin 0 -> 6735 bytes 6 files changed, 278 deletions(-) delete mode 100644 examples/ExampleChatBot.m create mode 100644 examples/ExampleChatBot.mlx delete mode 100644 examples/ExampleFunctionCalling.m create mode 100644 examples/ExampleFunctionCalling.mlx delete mode 100644 examples/ExampleSummarization.m create mode 100644 examples/ExampleSummarization.mlx diff --git a/examples/ExampleChatBot.m b/examples/ExampleChatBot.m deleted file mode 100644 index 425f3a3..0000000 --- a/examples/ExampleChatBot.m +++ /dev/null @@ -1,74 +0,0 @@ -%% Creating a Chatbot -% This script orchestrates a chat interaction with the OpenAI Chat Completions API, taking user -% inputs, maintaining a word count, and ensuring the chat remains within a -% predefined word limit. -% -% Running this example will start an interactive ChatBot in your terminal and it can be ended -% by pressing Ctrl+C or typing "end". - -% Set the maximum allowable number of words per chat session -wordLimit = 2000; - -% Define the keyword that, when entered by the user, ends the chat session -stopWord = "end"; - -modelName = "gpt-3.5-turbo"; -chat = openAIChat("You are a helpful assistant.", ModelName=modelName); - -messages = openAIMessages; - -query = ""; -totalWords = 0; -messagesSizes = []; - -% Main loop: continues indefinitely until the user inputs the stop word -while true - % Prompt the user for input and convert it to a string - query = input("User: ", "s"); - query = string(query); - - % If the user inputs the stop word, display a farewell message and exit the loop - if query == stopWord - disp("AI: Closing the chat. Have a great day!") - break; - end - - - numWordsQuery = countNumWords(query); - - % If the query exceeds the word limit, display an error message and halt execution - if numWordsQuery>wordLimit - error("Your query should have less than 2000 words. You query had " + numWordsQuery + " words") - end - - % Keep track of the size of each message and the total number of words used so far - messagesSizes = [messagesSizes; numWordsQuery]; %#ok - totalWords = totalWords + numWordsQuery; - - % If the total word count exceeds the limit, remove messages from the start of the session until it no longer does - while totalWords > wordLimit - totalWords = totalWords - messagesSizes(1); - messages = removeMessage(messages, 1); - messagesSizes(1) = []; - end - - % Add the user's message to the session and generate a response using the OpenAI API - messages = addUserMessage(messages, query); - [text, response] = generate(chat, messages); - - disp("AI: " + text) - - % Count the number of words in the AI's response and update the total word count - numWordsResponse = countNumWords(text); - messagesSizes = [messagesSizes; numWordsResponse]; %#ok - totalWords = totalWords + numWordsResponse; - - % Add the AI's response to the session - messages = addResponseMessage(messages, response); -end - -%% countNumWords function -% Function to count the number of words in a text string -function numWords = countNumWords(text) - numWords = doclength(tokenizedDocument(text)); -end \ No newline at end of file diff --git a/examples/ExampleChatBot.mlx b/examples/ExampleChatBot.mlx new file mode 100644 index 0000000000000000000000000000000000000000..68d56b93029e8f2d457c845a13e28621cae10a81 GIT binary patch literal 4891 zcmaKw1yoe+*2hO0q`MJ8a_E$h?j8^Z0m%`mp}SG(cp-`;kD&4Ty0K?r~-H|&&8`N{3X`V+4sdta6MOTl5AVB z3oGB=-<^@7^3rI+Q&%gU3l&;A{NbG^ZBLj>I5ma>I3Z`7S8k6^*@wkN{4%})?QqXO!Xb`Kk3pr-ZIY_eIV~w zr6%%ZIAU8;yX5>9VSkLF{RicyQ?X?>BxQOu0N_57vZ)Ki-j#>@*Zpl`2a+zX^udov zwhJLU5f*6&Pn2?z*S(l&-FB7rDoI85*S`G0id}geBz>QcPF!iNcjcxNL^iA-BX)UZR>n6Nol z$RYePO!%FQxZ$Q9uF91LGjl8!7b}(|Ep7k!Qgg5UV_Of!IJQM~?Ne_oa|EVH(L+ERrLcIq&61e*I5`+wcM2X2j*qiNj<6bADKx~NLdZ9bGJ>P_ zYupZxFE8h0SV+kJ8fc1Dy~;f4Yg&<_c&9D{3lrg|!{l~mM?=lHum1Ql;q zSOieVRup9;4S@?6h=>qja&C+xflv|2>+6v=FU>)>l+tCwI z7x=sUny{*T6&KdJ4tqoEBGf4JP*T+bT1qMq&?39hi*vrU`uuewXUJ19@5^k8;d3;| z9;I#z~J_VqsB7|yCpoY|a)H>b6~b5pjF@y@iBSV*18+*M(Sb49Y82d(OY8X*WiLIidxX;Lk#crW| zq7eKr=yj=I;fv9tUsZ0mX&i5r6l-(X4BLCVq#X$aroCr%0z79HB$u~|ID}19$0*3IK zo2sWZ)7|pOI3eJtOi?>vT1O7o_?wQ6re=C~G0au29M6Z7VhqGohXvZp_61Z{M> zxrxs}m4(T<*u<=5wXr18L+L1DD1rFhO&x2q4zMcJ)<>)hBH@MdOij#U1(sFK$jM-dI2nzZ(%Yl4(=pLigho9fp zY`(wW5TPW$V^In(hqLd?;X)JhiE^p=!0m23Oyj#s;VxmO@C*Y%VmR zPnfvbZi8Yi1(Yp8&_W88;dBKWqf&I!k#VengT6Y)c9vAmLkR8MHA?8eK=Eh2=YKIYd9Ujz4-EtRvpM8!vjP`hKQr5^6*s z9^?_VUWhPOH#)0OeUl1`!L~1Q4{m*`le!MWFf~b{#@3~ds7MJH9y9ZvtI!Y`)#zT} z55R%zh(BJScslM-ipS+wm;B9hHzH@Ip~XWFKB^g9sx#sCZTbT{8%MA3^NIsvQp!)J z%jd#<1}Kdu335Ex-x_O4s-ho)qCXNjat@GX3$*rL3n3;mJ>)*Un(|2QNTH8MBdmq@ znBzfvCrb`J*du(@yT%2&=kpko6zGY1{p)NZx~~y04Om5kMqhb7zxJV=w5uG2vd`rq zlIL|(5hDwTi6?~k{F@Q`mjZK-p9ehKlc~p&Qc{7R zzQtcqYJ*K^(_rjvL8CSsna9j=Y3JgQ;eFm6eFvZC~5$J2)UO2 zC-pnJyE(c4U*g|0?0~Kb6SiJ(QkwdxFwIrdS$D7^p{~m03URv5K=%I=5YQjT9RTa;!K*K`Zp+``Js0EaM@s3960J%+K4& zG9NoTDa+C;)mo^690|fR<#Qo}lCGU5Y`^4I>b%ax-K<{tUNO8eV5x`7ZOINV51f+FO zOg$ZZPuBX}vQ0WbFh8}4?BQ1l2G7I5ya|i%v0Q;ba7DeSH8<*+)yNE;N=lH0T1q~! zjbMZ2w%X-w-;=*onYAi)gJGbM61P0G+Kemi!dAOD(^mh>{eT`~6RIfdZw)e}ZZ{Lw zQucDVG!Y_~vQROekwRo{G^=>X102dVoE!{S(EKkZ54;8^`gjU%cL3CXgv z+pQ8!cMDVD(^7$m39P|O7&E!`lnSOQQ|xg<>%&jPsDXITGA0RFsVJ4VKj#`LJ(=L_ z^y4O7#br}d-++ui%HfiHn;^XMO$D1_>1eq>{O zJuvfq?*9s^vz3~eMx$<1|Ndum;+N`2o8%<91_RN@%=nVwiAVbe1YC8H*)CzbdiEYU z`m-wu1Z=Hckh<3@WI8rCAPSn9NzqzVd2|0U~+X2Phq*T%91U--|Wb z@UQ-Fhh@h5Mf?PybO!olxF*N6499es5~Mk?|0V7E>WUnbz?{Y=zxw9!Ez5X7bzA3# ztw`$ORBk}#z;kR|)FQ=u_O$KXs58t#8mKeCAQSW|Z@@?V4O(Vm3w|6OD)%ON$rYcC zX5Dm{oqVAr@mv2Q85tKtvjqknGHZ};3xweKbywd&UYVX_w`1Ao+ zs`z9fL6s(cbElvR9|~8f{?~V%BSRdYdOv%=Gg>5|=4inT8?A{|HBX}iN+3Fd&WLGo zSoYF*$rgPaDH%2GWt>ydWW$^Uz&-w6h*(PQMK4TdwQYdh~nh;AQ$}q_p4c8g!2nmKKFLjje4``UGQY3-Zwia1>A6RRV@?ohr0P z6@MD}bdf(c`XaESLRi5Z*bFPw`jcn7&Kl=1lA{KaDc#?BI(Ys!QIjtnw#W_yW7#*S z^YW9Z29KpE_g-EpG&(^t$Tq5XA|=KFv9Y&Rt(KU1IeN>;>?yTaEnZ@eisv`(k;S-U z1~kF^DM@eo{P6(#=b-zqQFHlRKj_As63`I`6ch51MWsItKW;Att!5G$YHc$%(D$EW z?W7pdwS?z6Iw_2LW=1Ft14_Vw1bP>pjvx%DfMWp1>Kuk+kPr47ktEV}nF zAk5faId`lD0{lu3i%fr7-geE~tL{7SqFBitz4xTRqV4B?e7irlaujdM0DoFaWd*4p zp56pY>7?R>o{6o@_a@$ruPx=OQ0D6b<(k_<^42?RW6#t^PZM&OJji)x`={Z8AodV* zSBMJqh2yUQyKBPnLwE^WPso@qYU;}VnxN)cY#~f#>Hx)8rIW|x@-V5jc=?`8t;~6v zbg0?r)(!QZ(~V$Eeir7;@UzT~^(96anIEAuI?)?z3pVIa+k`bLIVpds?RC1HuZBB0pADVhhZJ$sUxqL5 zADlfM*Nj*Y!0ZQfwW3lbt((BdBRq9`x#PFc6yD#2{#PcsD|I*eeoHOk|AW*Yv3OVWu8aTH947b&%{vaS Vt%i>A%MuHDiX!J{i163jzW|VUY(D@1 literal 0 HcmV?d00001 diff --git a/examples/ExampleFunctionCalling.m b/examples/ExampleFunctionCalling.m deleted file mode 100644 index 3da0029..0000000 --- a/examples/ExampleFunctionCalling.m +++ /dev/null @@ -1,108 +0,0 @@ -%% Using ChatGPT with function calls -% This script automatically analyzes recent scientific papers from the -% ArXiv API, filtering papers based on the topic and using ChatGPT -% functions feature to extract relevant information on the papers. - -%% Initialize OpenAI API Function and Chat -% Set up the function to store paper details and initiate a chat with the OpenAI API -% with a defined role as a scientific paper expert. - -% Define the function that you want the model to have access to. The -% function is defined at the end of the example. -f = openAIFunction("writePaperDetails", "Function to write paper details to a table."); -f = addParameter(f, "name", type="string", description="Name of the paper."); -f = addParameter(f, "url", type="string", description="URL containing the paper."); -f = addParameter(f, "explanation", type="string", description="Explanation on why the paper is related to the given topic."); - -chat = openAIChat("You are an expert in filtering scientific papers. " + ... - "Given a certain topic, you are able to decide if the paper" + ... - " fits the given topic or not.", Functions=f); - -%% Query ArXiv API for Recent Papers -% Specify the category of interest, the date range for the query, and the maximum -% number of results to retrieve from the ArXiv API. - -category = "cs.CL"; -endDate = datetime("today", "Format","uuuuMMdd"); -startDate = datetime("today", "Format","uuuuMMdd") - 5; -maxResults = 40; -urlQuery = "https://export.arxiv.org/api/query?search_query=" + ... - "cat:" + category + ... - "&submittedDate=["+string(startDate)+"+TO+"+string(endDate)+"]"+... - "&max_results=" + maxResults + ... - "&sortBy=submittedDate&sortOrder=descending"; - -options = weboptions('Timeout',160); -code = webread(urlQuery,options); - -%% Extract Paper Entries and Filter by Topic -% Extract individual paper entries from the API response and use ChatGPT -% to determine whether each paper is related to the specified topic. - -% ChatGPT will parse the XML file, so we only need to extract the relevant -% entries. -entries = extractBetween(code, '', ''); - -% Determine the topic of interest -topic = "Embedding documents or sentences"; - -% Loop over the entries and see if they are relevant to the topic of -% interest. -for i = 1:length(entries) - prompt = "Given the following paper:" + newline +... - string(entries{i})+ newline +... - "Is it related to the topic: "+ topic +"?" + ... - " Answer 'yes' or 'no'."; - [text, response] = generate(chat, prompt); - - % If the model classifies this entry as relevant, then it tries to - % request a function call. - if contains("yes", text, IgnoreCase=true) - prompt = "Given the following paper:" + newline + string(entries{i})+ newline +... - "Given the topic: "+ topic + newline + "Write the details to a table."; - [text, response] = generate(chat, prompt); - - % If function_call if part of the response, it means the model is - % requesting a function call. The function call request should - % contain the needed arguments to call the function specified at - % the end of this example and defined with openAIFunctions - if isfield(response, "function_call") - funCall = response.function_call; - functionCallAttempt(funCall); - end - end -end - -%% Function to Handle Function Call Attempts -% This function handles function call attempts from the model, checking -% the function name and arguments before calling the appropriate function to -% store the paper details. - -function functionCallAttempt(funCall) -% The model can sometimes hallucinate function names, so you need to ensure -% that it's suggesting the correct name. -if funCall.name == "writePaperDetails" - try - % The model can sometimes return improperly formed JSON, which - % needs to be handled - funArgs = jsondecode(funCall.arguments); - catch ME - error("Model returned improperly formed JSON."); - end - % The model can hallucinate arguments. The code needs to ensure the - % arguments have been defined before calling the function. - if isfield(funArgs, "name") && isfield(funArgs, "url") && isfield(funArgs,"explanation") - writePaperDetails(string(funArgs.name), string(funArgs.url), string(funArgs.explanation)); - end -end -end - -%% Function to Write Paper Details to CSV File -% This function takes the details of a scientific paper and writes them to -% a CSV file for further review. - -function writePaperDetails(name, url, desc) -filename = "papers_to_read.csv"; -T = table(name, url, desc, VariableNames=["Name", "URL", "Description"]); -writetable(T, filename, WriteMode="append"); -end \ No newline at end of file diff --git a/examples/ExampleFunctionCalling.mlx b/examples/ExampleFunctionCalling.mlx new file mode 100644 index 0000000000000000000000000000000000000000..ee82aa0bc6f3ca5eab21e275ea4ce57bc6feab53 GIT binary patch literal 7934 zcmaKR1yEeew)Wrzm*B3!JvhO2aCaDBfPukXg9ZjCNJ0p1g9QkI;10pvNpL5)1qkqS z{`cygd-CqPdspvW)m7`OUSF?Xy}Cb5AQJL(004mg)aU>UeG>4O2mnAO5&%H>)MFy+ z;_L}__O#ITa|L^tarrty>rzJ4yLs>;j)O3U3{o3hrO=rULup``a&YmEb!a9$!D%5= zFepG!jTHq4`I)nu_fF=i+`zV(bq}g+Mb^lcpJ*Clr8n5gpfS+s`wIhYIYg5%H)4;* zZkFCa!2x2iM6K$@K2v?U2jfyWJCwF~v|wvsG7>d95mY3;`?5mQOts{liWtzIH7}da zj*#xNiK&dE(s!(a>}=r4@-%sLZD~AbyK4#(KeqjqSWQ`w_y!8=k!UfN&pIU4q2*aY zc_rq-yd;^gS|he9uw?1Y%axORzg$VT_&lyt{_JXoIuathU3~K364cM`oHyx{Id}|? z<6M^ONE%2pzAWq0&!4H#YTjG9eZQ-_SClRp?M2_Py&IeTcFgyxTkC8Gls!Q!7g!Ax zdNm5$mC!7{dR&B_p=$nv<;DxqRhB1~sgVHy%qNyD+`&)}Zmz$+l_{N1>|#kB-^Z|A zzjPF0mU8kzEES6ALwn!jSYD@`S_r)h;0smg&SfL~)_;2LL1}j&JNH&dpk9lwL|ME9 znaLnOSx?gHSC7EMUk*6X6#dx_+IhohBD6@Lkt|qJ=W(0OJLLGYqfe#a zBDs=}O8B(^(Nu?zCLM6+oQPi*m6Kiuk%LuUU5%AMMbMJXQ{?g^v3Z7O%duwD{8Z2h z{4~G5-RL-NVc~swLeTb2pL$V>yy5L|!A}F&g^bLl`t@V+i8KYiAr|s@C;+Wu$pd^c z@GDC2leC!8wj-AEtr{a^0y^glbO}nz!Ksy|KB$O;w?ZPzva05VA38CxQEyIZIbH>4 zmEl7D;)5!h$EacTwEP9mKMDO|gDn;E6v1PZ{}r}QAWtaBirdD;+RN!F zdjAUB5hEuTibnj0PZphANrquwZ&T`{x<*A$7Di_xgWR09jxf4;;ASf8;CJ8OnhrGO zKeqDA9>v-e)fgYRB0`mc?!r~LIeE*iuZQ^sN%~YO*LO!&X{}_KMAIC|6>}75+HQas z55In_^UNq`U~Z8P8(g{KEAsEebgzvw^Cx?VjYn$o2u?8X;Iw_%bInsTCe1>3ls3)A zUZJK86dP{T#gCyp@Pr+s+&2qecps!^SjKjXjIH-8zk}IFBegTmbgQaTzM`0o-OUku zh-j{!BJ)|XNSv2xBFju=bF(A^`lcJ1uG{CG4z+)7_VpZF4ird9U+C%Y;x!k=R!?f! zd+XL)yH(Z2$3WnjtH#(e08=&W#5@ttB+cR%BK+C&n`7ta=578MGoMFSt~6-!y-6}3 z56TNsvkv!9+WMbJ%@#2EP<)CyGq8qv#NDc2GjCH0Bn>J)qasAURYNuLw`Ue+9<0ss z%qvV0Pie=v!k^sQM05hvl+or%*bi`Rc(PNnV}5Q_$2m#%T14iq))nUB2kt9P&Ohrj zLi})3(bda&zeg4YCM>0uQwVpUOqYZS$RR#7JGHi?1=$t$eh7l!XupGoPJGKVEuoif zwhP1=WH!t{@#;`G^h%oGf`UtC)$`< z{T_1WnnwA<9#lv;xBHEsCKo=Ik6TGP8Bs^tWj#ts6aH4~7#{BK_rctw+XLZpCMyIB zB}Y2lgqS{|qB}pls`O+=wc!_%YH3_Cb{j{fz=AhQRgunn^6Tvgy><|P>>u158J31mHg9XnHm-85_0u$@z;~x-U-Fd!hm+ZnjbGuiGf;3E|ZaP$|=Urw?JQ+*z$fuc!V5%r7oHSL@p;TFBa~Y8CRBxmc znKX34?2Ck>*%hfc&uVtwTpBmbQBUyUBVeYYkr8Mp;sFwsPuS%7ePA97Pnwb@`7B_f(Z>XHD zcA3vf6kUzVwDcW*@N}IHmAOVh^-97tqLtO8B?n|@DvG?o=)n|WjB5KDmT7Y(WwD+% zjoOTJD(z(9__L1lTcfAuXSjPH$>Cko@vQB|Rl)tlw|jqDUu5bP#k+V)%||-q1aFs& z+Rs$*#~*y=3u)BB0;N2>R@l%Gd4Ode=GaTCdc2L3qGln)sQt;q%ay;FJU!6?!@M*H@t(uSjj&ms*@`oO6vb+o9tk3aLnMHZ$j*`11Li;+Vx2KG2e0{(%nV2m%Vd9fo`*f2fdDFC%ROaHB{B>6qJyCRQx-3@Otyne8e(NQuhrf!&)I@{&}!FYhwPvYPVhn=actg&v5WAYapi zM!O`OlwmdZ%eYEa{fW5?y6@^(Jw@nZKTLTnwlqxe=(*_De1`Ug1N+y+u5+^$t44~T z<+o(_a-10WRoK{rTAUsx?Q?BaEtet7GKvUoO=TXXJ(m+}jjMz8R!mMnZCA-#TM5K+ zPI@ODl%Y}2j3Q26I+ZV&^df@Ew&Zxo93?aCl@1;_ZgT`*CVi~vZ~9VbbN8D|u`Qg) zDN;HJC&RvT915C8>$ebH>dRnQm%zh_Hm76`b@Xs&)?oe7oa{nE^D($1D!gYjFKJAUIn*C<4jE_p zF3Yw1inH5-ZbwS{KKdi7s@5k3eV4v zDyWLIx0oan-JfJ>@j=SLM0!qo>>%CP{v-Hz9(%D%AYAdVwp#K0)z zZi0uLGidBC`J>To^2rEQ&b}37({p=%Dg5heM%{*W=c^RKoIDop>Ar(#&?VA(^^M!v zFUfxvEa0m0x06@^fP)?Yfd5nh{&&IR;^pb;_5aJ2L(>E2r9Og(l@i*E0(HQ^=Rg?Y zJFT^OXM&E#4t>{PWa%1u1A;_luIA>{N@Bl+SL1#)=OY{tEk37GSPC2#O`ER&eIa9^ zF_0fP{&>j-Ypt1P*4~>^u=6Ni8xYV0rQ+lWE?)*aN;v;syaZ3a1F>`qHIBGDHU-mH z3KQ$EmefRhfOzk}W3V^Flsxe74sYBfi3!t8u2C?R+Rdm=iBgKW8}}SoyI?EXWanzc zinA(as{LOEQ;SiWOfs8;o6ml@jQv>3K0C0dcHm^r&ayd!$d2!zGQz<0I!l+31~Fn$ zf_uYk6Ku0zPShN>Vk!(gtX|gxj}ObI!3l@lT@(-z-$kW}c=<<5#*LqqgJ;9FawTgh zH9XspZZ!r1i6rQ6@h|Ns*mC)tyX7lfN_VA7HbOm>!I_e`tNY@&I%oO8&A+!Dy&z2* zzXcyJk%)Gb9hw>nek&0|8o9W8`ESZKni-pycPx6^YK)6y3D#aVfJ2LAudDm=^;WX7 zg5%}q<|Hg?n|(Rdatq?b{aQ2GgYV@YN1RuVS02iEGc{IEgW(lbmvwEy?Q80iZo9u6 zH*OXN4!So|kd7OP7vNsV$i)R6Vnlm9B(i(0h`MpDp_9vvacGt2;wk==WMuw!d{dLF z!3r76rO!oCbk9#3UsOR4#>2EFzc!>nGUJ=juO@1EKMRW4e~oO5>C05XUCMV^ToTKw z92qsYI~ZuF)%re9A>TKI+;9T5vBrCBqPO(ow3e3D_w!~vebMdUuqu+OelAP283B%u zl83$GX%)yc$Neg{OvXt85nn5Ez`Al?lK?Fv;inWv<{Ii5a%(C}sz@Wyf2kn;Md^P! zEUu{`>!Os}s=IC6^UIn$t&OLrMJ7Qb#G;dK+N#(A29${Qg?rj4!a$cO^tB?LZrm!T zR8nH(vcdKkOla`jt7@%P{m7!N(5;t9X_FFD%9`8{6NW(5m&1XhEUc+86>0QE!3u|M zUix(A!V_&8&D^U^JsXbGH^$b5G9PmFUQw>0!B_A~&SHws`=GPr8`oYz9Ht**cqXwc z`o+>Ss7`IOVHaW7q?uj<1jfklxqL|oyhPr?zN*k!g3tzk6MI=1vT~UnC`OYI-)tIK zUaRMIItifn4aYbSx$n{KL068m-6N^*a>=;?2V~xEgqt64f<#~u8ihQgZ)KBgFeCKH z`O4&bEc3WI)iB+yF`9?t3rh`qIlCNpWjY0Z_`Y*3V2LCRoiuW8dFa9HUyxVrQ_buM zOkPTF)&^tqbq5s|OND7HA548UCkA_-LoM-Zw1lV>ZCLulyDrP(!k&NQ0deg{u{fnI z`%@v~mAxI-PD(-`q+Jd(ilrkL5`ZwX_F>bj_IoIZAj&zi=#+{CzhzD(mJDMLE-m*w z-C)_2@Qr8DSd>hmzhGvYTjnp~3cgH;jhK#MV#+kI8Ci_W9}e(y(zAJ8BE#S}Y&!~W z2_hp|S+rPP*OSGvBU3NNQsl>SWL+obGgfU(^vxLy!By9=$cXw`oAxm_F%-*#bAe}1 zEG~l{ICb!izQO)-MlV^3MTfrkjh5x{YD`*G+DG=L3^x7lX9#cdNzhWFu3H9d{QcR? zsp_wC+;mTFV;*%#xIAK{I$k3n8cG){^Pw_~Ha+VkQU^yJCx|1VmAM52!c})=a)x&C zJEhq<)*4vX2DA7pBHv>5wkF9a6&KW?GgDiU-3VSZ?X^m4EBHF|QEZu`B^6KVBzn>?Y?~3#fx?NGOkj{+j2bQ66Z+F+Yz0h1) zszlk>5=~e7E~~D{;58tq@2cwBl!RWdYcpU>SN5p_gv8?=72#4uTsye_>;T0hbF>SA z$X;J2T#<%r#|beu_0n*;6%}QKorXOJoicT) z&{Ank3u74*`mtUSu+PcCRdof9|BEO^DHcoHh+L0O%biUXRIJMyDz?)Eqq}8HbSZLw zSNGrRbmw8lXYJ^20)~zFOBcC9!ok0~d!ij?3R(mi zf%r2V8|zudxvyU4ey<5sOi=Jg{3v(j`%pi(xbWSvGyg8;FjT?2@oxGn;uVo_Bny=` zK@sEogn5iun&MSwoGz2XIe}XSy8y4A=t?qws9PL6j1N2jLSSEr6vjfxpzT+%l_M(6 zq)a!_bP7o}L3(Z7vd^JM(tNCI;z{)WwY$jIK>2Jp;!!9WOXUDs&p$>|3GdCSyT9$=z4S{;bP*#C} zm#hYKdSWsm)c8V&1j&uvfV8OxY^fMTYk8Ep*51#gWOA%D`Lw&(Q6c8?XeBZesV5jH zXc98J`RrctR!|9Vk~=+J*0nliVt3(GwCB$wdz7-%iFE|!jWWO1#_rx6AHwHm)^jGc z+shM=R3@5Ui6ZGDMAYsv_lHb zis#HPfx=`Yr}D=j*eg%KyFHRfu?%BB=5Y>+6)?A3+xdvK?&t}uQ#Abb+QEj#1%OM! z=YaYpSN4FH1}Oz^I-R8D?}wm`a;qDK+8Sj0lIGd5j*6Vn#&>p>)+EqH<6644#r#Kr;&5je=xGU!aN1!)Q_!&UTWPX z223A8kARf)+ycFa_o6zcG|#FMBP7?9<`+&SMOe-3bFwmTf`hVej>X12(`(A)Z*EBs z14I=(>aQvjm5KHF0*5_-I=d$P?XNbBa!xDO$;Qg+XlGvttqcvQZFHlxOGwD9UVn-B zy%)WL+|_qV+)P_1kG0$OSl(I11fdi_B>SdhR~=3A`wHJb679{6M(tYh;|q*KxEo*t zBDhRIa>c}!P~O*hhr0#%;bOmP?Ll|wV^?xs_|@n7DAIF_i6Ey>;=-_s@0yRSL^mXI z`laaF+p+@-j42OUzCfbL>EJyaywReSPj`^*2U(!?SoIQ#mq(f8Qz;y&wp>Nclx8tx zuq$kxOFeM#G5#psVCCo0-QmI9>KFEFpYn|~gI&)c&ZyP((d)hOrQnYIp%!(-6Ah?- zRA9y%7t|eR_;kmxVNJSqY4g@sySAwzXJ0DJz~22I-57Hg(Sahy!5rp0-TJh4UX!0x z0^c#B6yz|e*yXV#I>`Hd@y~~HKL@w#r{2#Hg8%Atl=o}#&;QBwyr@=>9cUMD8L?o( zq1xK)sXuN(%}HFm3=(DE<-LoIEcMA4#kd{b=e|Bz@$Fp>aU>V?Z8L`Kq ze!p~yZyA8XAZ6!vwekrUVTqj-Q8X5zW7NA+K#cvpgJ>*o$Eb6wK>4mvET}`hb>(N! zWzsfAM011@783Og#bUu0mJ?g&?8th_dmbHPJvKV>oB;6tBf>uqK%K=_g``hc;ADsZ z0L5PiAh0LM2IL9iwsvs`Yr4C*g55nKV2{6UZsU`sHNtsF-yHvrDfVGOskcY^nY*A& zNXSd>qIT0q4uLGPEwz8`#x6Npt?|n_9+*4kG!UC9z^zutY3dSC=0)Uuqrd;DOJhW| zt*hVflgTnJIa>=_)I?c=3g|s4hxmMF$Rzdzk^D^1>C024Odh zF?(G7#@Lh8;my&34DxThWalu9FVk;b*%2BN@YoW)nK@i>IWcgq>Xumj zMqQjUUw*0;7_$D*XEZU+#O|KXWez-Ze2CJCj1fFrnRKl0HS_b>+Vc2d(rOR)*WB1H z9$%Tq%H>k*^Pw-Lo%;x7Hz@y%tZINRci9sG%TJB@KM^7C`=o4nKwO;v3No_&T$3@Pe!t7wo)BNG%4R;1q3vn4@)~!O!r5lXSQ?v{vqcDFNxGQkQsO;%O);} z+E%zCK%3z_D#NoW?g!?f}lf3VBk&17;dv z`5yfyqkpc^+wF@v$zz1!9mF|&nLsl|J)gJ%b#D}{M)9QIaAbV&fD+nxam7dsBT(c% zAHU>L(dz>7pWgP)ZuK@kdCC1W?JWPvTWv5D4DtXgJKMVa<*%3e08SVWUh6p#!}aGc z@T6&S?&TJ|cMP4{$<-Oe@j2WKz!u+t^Vzi}AG1y%MCZ#$-xbvu>P){N`-ed5oP+>^ zMHA2|3Y{ypsiweJ{W(ZfgQBZa^Ig$QQRsXBR5LpLG?jQnncK|@<_(oFKM=x6GG404 z^eN?_J}~Fa_blIw-8>-H%=awkX+Cfv7xk~`)uZD}y(x9r4}P>kKzAz=S?Z?Qz@)h2 znxb?=f?-HGKg8{NWr{`YqS=rjEaXn#leI z=-(xhKcW6ye1AiAlm7>(e=Nm6G5-wlzcFi`c8>qqIQ|^o9|5lkL_z(_68-5Fewv@^ JC#ets_1 - summarizedChunks = strings(size(chunks)); - numCalls = numCalls + numel(chunks); - % Add a limit to the number of calls, to ensure you are not making - % more calls than what is expected. You can change this value to match - % what is needed for your application. - if numCalls > 20 - error("Document is too long to be summarized.") - end - - for i = 1:length(chunks) - summarizedChunks(i) = generate(summarizer, "Summarize this content:" + newline + chunks(i)); - end - - % Merging the summarized chunks to serve as the base for the next iteration - incrementalSummary = join(summarizedChunks); - - % Forming new chunks with a reduced size for the subsequent iteration - chunks = createChunks(incrementalSummary, limitChunkWords); -end - -% Compiling the final summary by combining the summaries from all the chunks -fullSummary = generate(summarizer, "Combine these summaries:" + newline + incrementalSummary) - -%% CreateChunks function -% This function segments a long text into smaller parts of a predefined size -% to facilitate easier summarization. It preserves the structure of -% sentences. The chunkSize should be large enough to fit at least one -% sentence. - -function chunks = createChunks(text, chunkSize) - % Tokenizing the input text for processing - text = tokenizedDocument(text); - - % Splitting the tokenized text into individual sentences - text = splitSentences(text); - chunks = []; - currentChunk = ""; - currentChunkSize = 0; - - % Iterating through the sentences to aggregate them into chunks until the chunk - % attains the predefined size, after which a new chunk is started - for i=1:length(text) - newChunkSize = currentChunkSize + doclength(text(i)); - if newChunkSize < chunkSize - currentChunkSize = currentChunkSize + doclength(text(i)); - currentChunk = currentChunk + " " + joinWords(text(i)); - else - chunks = [chunks; currentChunk]; %#ok - currentChunkSize = doclength(text(i)); - currentChunk = joinWords(text(i)); - end - end -end \ No newline at end of file diff --git a/examples/ExampleSummarization.mlx b/examples/ExampleSummarization.mlx new file mode 100644 index 0000000000000000000000000000000000000000..0f5674840fd987c48a160eae23a6706febe4a63e GIT binary patch literal 6735 zcmaJ`1yCK!wgnFEPH+f*u;32C65QSG1a}XR5D4yY2=1=IJpqEd9Nb-kOK|xo|GldF zZvOjsW_qS(s`skdt9S3(y;?;c0FMs?1A`20)G+hCBE>wgFfbJW7#KY07kx=b2RD#| zo3Vzs6Ufz&&CA}dK4ClOCr6F<};ZE%y*Yh=2kPn2P3QpLg@8+K{M(f0Fcyna5s zicAQY@F)(>9=mC0QUg1N=G};rNcBcdY7lzf9P<5%F@FoJ&v0x#(=Y}=drVS;ptOD&| zPK?+~u^B@_zGyL+XZ7^aJ6p^-Du?ZiE3=BGo`?W@4~OJK5n_!C>+P4xENmLvaW<0; zfHGj(i*aKLA4QHz<AwE)t$5LB5Ax=_`>~n66Hd`C)$?7`%n2$f-zs@N(b2Fy z5tYKL=LNfSM3uj>+9lj> zG?k2bV`}x~$^lH-nz*j-rQ`=%LTfDTI&W$9`4(`M<9LheUGFkEhHMWzdgbyi<0?4G z`9E?KKsr2CsO5Jr2)HB>Szk)Pu`o$1DKg>8@tOeL1b(Fx8l|YTov5_TLHtgO&vF~u zbx#uK=ab8#{dR79l?oE1b?%1q4z-UfU(WI79 zax-9EDa-f5Jt-i$j_OoRNnc|AozV1;qb*#}2o@#(SJ>K{y4jhUv0FHryW2yf_jlNi z=o&gMHsSh>eOP=SWeAhJV74GLyYq%cwFV7Y3A7-5V90qP1GH5Ypn8Lt>9K)yAX2 zgZwOVkvj%h#GHh^;c|j~qhyhzG0`^quV-WDFjKziRnYfulUe)j(uXk>2@p(!V-m)_ zqRf}p9zrwx?UB*&6+^J$bgAQAqgTtL)+jF;9d8E4y(9Y3y;rT=p4Pj1sL!s8B4FkP z^ByP4eeGl6W5G@H+1v|Fa`g5FX*^FaX`;HX>XflfzL&dikjkV5A@xZc^d^glR0$%j z@)jqDX?0NtIgU=6wvwfb*>NCTphuYG6jd zPfPb2RwYO>qkkkEP{th}mk8%q*_h1ylMLCmnlK&3(+-tW&6-|--k|2?fUO@A+@y75 zT2h?C`Hhl)FOaaaond1kFM8cK_RFN^(I!e#kldan0~^F=PfdTLaU)}bX+8jC{!FUZ zS;uT$0XvQKv#olEMuDJxHuEyTju+=^qPs#8Cq`i)g>Y+|yR>}0aI7xS4;URas?htC8dke z*xLf{G;j|?t%=w~)!TqhZ8DX}lZ}%b?O!ha13rsZ;}#RI-;K(ZX{MN&LaJ)n@pG|@ zZ2)e~s~rcjnF{>Zs>MQZkzA;tK!1E8oTPw#BRzG%+jKI`9A9l&T?wg63R-o_KDi>) z?UEgZlu&^(oU@SwDn5*QA&^gdT58~KrOtzpTe^doWK0iGTLo)AAeJF^5OKc+J~p7S zSsx>eHm_7U=8nS8PN%hpUZuqzapfDofPgqqN_#9-iK^VGE96J7+1nqD>$j9u_j#|M z8!^`8ZiV6DxC!2W7iB%?P&Vdo6+Ni-Bj%G0p+`0cHngRr@+WaIn-d;cxUYRqR?oga zHAQzSa-XEPWnT|Slk@4Gu{5kV2ic$4!q*JRRi_V>nXb2H?1K)Su;||7yWe zClSUNqK2Dl+MpLj_a-=_<;003C^U1^OFa;6#BownWT6mojpxfdxN6@g+{+BE?O%1cWL5EH=kl^OVeE2_GSpI3olg2x<|_3~MZ2WR>7U zt^4&kf=IVABrb~x_fbe6$7~?>1cq$6Ax2G%q#&fVmxs!_4w|6{^j=?I?Zwh?7GmS0 zdP6b~e+F%)sGVd!lP~$g_mL$lRjS+ypv=RA7o$gJK5;5EtGs8xx1!rjj(W=8jD5SR z=$irj;;wp_V$eXV5s-j z&KovjLFrq9AR;lIdHgI)Vh**s;EHQCs#oy(LW#7v(Ecel;-hN$ZUCh@s*lFlbl<)9 zn~gv$r{%*VMl>24B3OuNp_sqsWbavUnQ*^h2T043sqb)KHi&+2c}9giSuQNMt1%eUA~6-25=iUvI|qX6zJ<^RNwW$FHX=q z=1M(66&%vrvrsp;H7o~;*H@dHX_no*oirVrK!eK$Bq7_P(GH@=Idv65;y1isR~@35 zm!m0gc>@RNjULs1bhs{l-=~})zIl?o$Zi;>>s0&BPb%y?%-iQtT(srRLM<*!D*44x z0zW)^m{Xn@fb(^*BCc%4=vRL;0Z_zPuzAPE32nigI*GrA*ng{VF9>#whK_N$4z0FQ z7rDVR3fbeA;(L>A4&(Vtk6_;H#gRuvwJ$1A9E$j2Dnx z!}q@!!C+l*o?LHX-%208!R7l9b-Apu!i`|;B>RHo_y@uVbn_y~Y5(=CyW)))>r`FfcgKqV3-`hNHWill%Wx z7~8swE~I?W3PU^lCZ1>>-+CaaltVrITUs#-!@Nw1a3Gf=rq&CS05qn%yepckp)2lJ zk{F34Dw?g^X47|_sj#2kWI>jT($vnDCG&cM)3s;XvWAT^mElt+wFb*ql|QuJ-$yhi zoDd@L$d>|3KSbw5b+U&C2RtwacE2Sp$wTM=Kx&aKO``~INftF^uUndbaV`%Unu|s0 zack0>v&b~gHRg|>u1uyVC-^=Mgm+o~G&4UV_}#+!={z?Xb0uOw;Tz;_R_zG$?v~PL zL-`Yi_$-Fkd0~%w_-$=-GDTZz5PPS!yyU{3W48ti3rRF%YF6;OgT(EpxP6GwE~_CTYUR5(@64vbXvQ$80G zk?1}rH)j}seR_!A>eQEXT#Xx5A+xCn?~xr)#@=pP)a{O4z-=b`@_lJM9ilZ2n@y+o zyy3dJE?QU?l_UqvE#lV)Vq?P+&SH$PH$c388Fgrw&uJyzAGFG+gpEt`6-1-Q5xr*= z!Cc?sZEbUDz)94r=s(+Dc@rWcQRaht`BuaJTy~M?fuk;nIu~_Jv<2hn)I{HfvqR8J z;?2!rW@ACT9yiP@6Db>eQ@BX}EBOvshM`xd`StcwlVORI-J*P)7B2-?ucNb$u$a}E z;PIUC^bh75CrR-2ED76mO;zAA(`~ONaS}3a$5z<#CQnmlePtGLtG{17G)n2x4CxW$ zrsi`AY=P{jD65MOcCLOZEcvz#^3D^*4H2g~tG*eLtqvH1n>hQV?d2Lqf({e;OtUm( zFlA%dUd+#=!?iXg+9!-4hJ5>^!>j%)MSI~LO3XSM=R=q?sg29PoaoLP6W$p^eoB;q zU}{H&f%-XOe%7z8cXRV>vtNlkc*;XfUF42fMU4uCL;ERce=$`^Ifh`QLBGp~Ub}4c9&>rwR>4sy1y~=Y$(zyez8J+twr=4V z53t?)vrlRh{f3KIy;B@LQ0K6Iyzk4c+)Y|>pp9yvUaGrP)8o(pUk`Vg1FVDsVU?}F z4!-{W$vLiOmZp2)mTtKd4*{e)2J^}o!RVeM#YfEJ-3`axkAw*FEej6j!hK6sJ#x|n z!n?lj72EW1CHYw|H#(Sj7ot~kvaT|$1?RrMD1XM%pRLD9VCDwg%|w*h0Ln@v5^>t~ z)#Cv7(^4#+8^H~sJ4j8oJaTLqIN504s`C}%NvI zE)O?Em>X-UDZ}YkWvA`SH<)*Oy+6kNGa@mU$|w4z37Q|AOb~KL-{E*AJXnkghI1`+ zUw-;1a4b7OD@Y$qf^6om1K=71S7Z4FItON+`WKE%bzLAiTTmK)D8D?eB#;kZEiK&N zShGD~Oc;CLCGb)p5_HKv9?k@b6?$zWoj#0|d4b)fxF{xfXQ>-TS?fzD28ow!`?wW{ zp+`rYYBho4f_x*JSei(!9j>lfV4WNez?@rp8>ypm&Gj9!_K6h4YA-mPwOZznQAldA z6^t33!F=88Z*JLUN^aI@ZQS z_B+gR50pVZC-J^+Laf63HL4K_47KzTnkm>5+tDnR7q(X9yP$oRpG?At50r8J=p6Yv z78h)teN6AOIWdPVJ!iilrd$Lr*7J0nQ_WwbTp(@J9Onj}Q|;9s1#EebuBCiLaVOO1 zS(3fuKIKl=K;(rl^NR*g`Ig9;8z%Tg{Hfa+wwpbqSkrFdd%3IU0V$@+Pv$Jq&?@ zo39PhH_J?x`T38xraA;=uv;HsxD%r(mb=*Qws#m~lt zg%KB)k&2K`cEB2-Vtvf`>0NgBQY)iy!Mu<;EH0IZ4sbl=gVFL%L1si?{c+{tBwQs$ z8`n=atR$tEcW``%#&f>!y)w#o>La&E@D>2%E}Z1#_-|v~qMx)?-o4FfBM&;iy38Jq zk?`h2-Co`o_jbo0JoLp!W6vTgpO1t{XCAU+&bq$9so{5`^R$4)O)XksCLVH*f6@~8H6gvrsb{l?hR~%Xm zdqN#P6IgRGm}JXhu#Q1~Hxl~Pe5*_ z7N%~d?BqYMpjZC)!UsdeA z2N0|T_F3sN6x119VNw`4b|N59NGF^bETB3{R;6RJH)aQoy)-pzI?f=Z7F`b3pd14W! z=W4#NDGWC45ANyjd2~H@`FPF$fk$)4i5aCy2^$#W!A2em!y1nHRKExoW_NH%Bd8<> zMQ|4}wOXAA)CNEfX5$}HpVz0VbL;QEf>o8+1%win0(9tZA)pLjxWDt+p}NUD3guA| z$|+P<`ioC{&;QG);drMFRJKE0^U3^i^POLj-JGB8D7(lZGG4^rCZc!hOO*Y>_R(&& zd_vmI#4|Omr`SrhP@x?F!x!R_PQ7o!Vu@vkGhd!@Dh3di#~ZzMvB$19Q1;6W<*p=>q4_s7qeeo z#^vdNveAFXRXi;e+qqTIFF~9-r$a=3sK(6O2zD`1psKPgTjmvDPrb9~9wYG}kqC#j#T)>?>{9 zNi>~maaAdVQCaLX@@-x|7c=XNo`#+B*6MX5y;tO8h|~SN%&ETSS(6O>doKTJH;{3qB>k2OCmrCCWv?N!)Ff({IWJc$>nW#$kg6rq0L)wdAvc zmomIA_j2UXrRd01%1`2V*6pd7$Jx%l_-Up55(1o%_k{%!d$fOpWxe`wu*8vd!`{$HZ1u=MwEN08*&T{(ly1f13Z9LH{yuh3k literal 0 HcmV?d00001