Skip to content

Commit

Permalink
Trace/replay llms.internal.sendRequest
Browse files Browse the repository at this point in the history
To decouple `texampleTests.m` from availability and speed of external servers,
record calls to `llms.internal.sendRequest` (on dev machine) and replay
(during most test runs, including CI). See tests/recording/README.md for
instructions.
  • Loading branch information
ccreutzi committed Aug 5, 2024
1 parent 062538c commit 2565f27
Show file tree
Hide file tree
Showing 26 changed files with 151 additions and 18 deletions.
2 changes: 1 addition & 1 deletion +llms/+internal/callAzureChatAPI.m
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@

parameters = buildParametersCall(messages, functions, nvp);

[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);

% If call errors, "choices" will not be part of response.Body.Data, instead
% we get response.Body.Data.error
Expand Down
2 changes: 1 addition & 1 deletion +llms/+internal/callOllamaChatAPI.m
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@

parameters = buildParametersCall(model, messages, nvp);

[response, streamedText] = llms.internal.sendRequest(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);

% If call errors, "choices" will not be part of response.Body.Data, instead
% we get response.Body.Data.error
Expand Down
2 changes: 1 addition & 1 deletion +llms/+internal/callOpenAIChatAPI.m
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@

parameters = buildParametersCall(messages, functions, nvp);

[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);

% If call errors, "choices" will not be part of response.Body.Data, instead
% we get response.Body.Data.error
Expand Down
5 changes: 5 additions & 0 deletions +llms/+internal/sendRequestWrapper.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
function [response, streamedText] = sendRequestWrapper(varargin)
% This function is undocumented and will change in a future release

% A wrapper around sendRequest to have a test seam
[response, streamedText] = llms.internal.sendRequest(varargin{:});
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
*.env
*.asv
*.mat
!tests/recordings/*.mat
startup.m
papers_to_read.csv
data/*
Expand Down
2 changes: 1 addition & 1 deletion extractOpenAIEmbeddings.m
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
end


response = llms.internal.sendRequest(parameters,key, END_POINT, nvp.TimeOut);
response = llms.internal.sendRequestWrapper(parameters,key, END_POINT, nvp.TimeOut);

if isfield(response.Body.Data, "data")
emb = [response.Body.Data.data.embedding];
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
function [response, streamedText] = sendRequestWrapper(parameters, token, varargin)
% This function is undocumented and will change in a future release

% A wrapper around sendRequest to have a test seam
persistent seenCalls
if isempty(seenCalls)
seenCalls = cell(0,2);
end

persistent filename

if nargin == 1 && isequal(parameters,"close")
save(filename+".mat","seenCalls");
seenCalls = cell(0,2);
return
end

if nargin==2 && isequal(parameters,"open")
filename = token;
return
end

streamFunCalls = {};
hasCallback = nargin >= 5 && isa(varargin{3},'function_handle');
if hasCallback
streamFun = varargin{3};
end
function wrappedStreamFun(varargin)
streamFunCalls(end+1) = varargin;
streamFun(varargin{:});
end
if hasCallback
varargin{3} = @wrappedStreamFun;
end


[response, streamedText] = llms.internal.sendRequest(parameters, token, varargin{:});

seenCalls(end+1,:) = {{parameters},{response,streamFunCalls,streamedText}};
end
2 changes: 2 additions & 0 deletions tests/private/recording-doubles/addpath.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
function addpath(~)
% ignore addpath calls in examples
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
function [response, streamedText] = sendRequestWrapper(parameters, token, varargin)
% This function is undocumented and will change in a future release

% A wrapper around sendRequest to have a test seam
persistent seenCalls
if isempty(seenCalls)
seenCalls = cell(0,2);
end

if nargin == 1 && isequal(parameters,"close")
seenCalls = cell(0,2);
return
end

if nargin==2 && isequal(parameters,"open")
load(token+".mat","seenCalls");
return
end

result = seenCalls{1,2};
response = result{1};
streamFunCalls = result{2};
streamedText = result{3};

if nargin >= 5 && isa(varargin{3},'function_handle')
streamFun = varargin{3};
cellfun(streamFun, streamFunCalls);
end

seenCalls(1,:) = [];
2 changes: 2 additions & 0 deletions tests/private/replaying-doubles/addpath.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
function addpath(~)
% ignore addpath calls in examples
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added tests/recordings/CreateSimpleChatBot.mat
Binary file not shown.
Binary file added tests/recordings/CreateSimpleOllamaChatBot.mat
Binary file not shown.
Binary file added tests/recordings/DescribeImagesUsingChatGPT.mat
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
12 changes: 12 additions & 0 deletions tests/recordings/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# Test Double Recordings

Testing the examples typically takes a long time and tends to have false negatives relatively often, mostly due to timeout errors.

The point of testing the examples is not to test that we can connect to the servers. We have other test points for that. Hence, we insert a “test double” while testing the examples that keeps recordings of previous interactions with the servers and just replays the responses.

This directory contains those recordings.

## Generating Recordings

To generate or re-generate recordings (e.g., after changing an example, or making relevant software changes), open [`texampleTests.m`](../texampleTests.m) and in `setUpAndTearDowns`, change `capture = false;` to `capture = true;`. Then, run the test points relevant to the example(s) in question, and change `capture` back to `false`.

Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file added tests/recordings/UsingDALLEToEditImages.mat
Binary file not shown.
Binary file added tests/recordings/UsingDALLEToGenerateImages.mat
Binary file not shown.
69 changes: 55 additions & 14 deletions tests/texampleTests.m
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,25 @@
ChatBotExample = {"CreateSimpleChatBot", "CreateSimpleOllamaChatBot"};
end

properties
TestDir;
end

methods (TestClassSetup)
function setUpAndTearDowns(testCase)
% Capture and replay server interactions
testCase.TestDir = fileparts(mfilename("fullpath"));
import matlab.unittest.fixtures.PathFixture
capture = false; % run in capture or replay mode, cf. recordings/README.md

if capture
testCase.applyFixture(PathFixture( ...
fullfile(testCase.TestDir,"private","recording-doubles")));
else
testCase.applyFixture(PathFixture( ...
fullfile(testCase.TestDir,"private","replaying-doubles")));
end

import matlab.unittest.fixtures.CurrentFolderFixture
testCase.applyFixture(CurrentFolderFixture("../examples/mlx-scripts"));

Expand All @@ -29,22 +45,39 @@ function setUpAndTearDowns(testCase)
testCase.addTeardown(@() iCloseAll());
end
end


methods
function startCapture(testCase,testName)
llms.internal.sendRequestWrapper("open", ...
fullfile(testCase.TestDir,"recordings",testName));
end
end

methods(TestMethodTeardown)
function closeCapture(~)
llms.internal.sendRequestWrapper("close");
end
end

methods(Test)
function testAnalyzeScientificPapersUsingFunctionCalls(~)
function testAnalyzeScientificPapersUsingFunctionCalls(testCase)
testCase.startCapture("AnalyzeScientificPapersUsingFunctionCalls");
AnalyzeScientificPapersUsingFunctionCalls;
end

function testAnalyzeSentimentinTextUsingChatGPTinJSONMode(testCase)
testCase.startCapture("AnalyzeSentimentinTextUsingChatGPTinJSONMode");
testCase.verifyWarning(@AnalyzeSentimentinTextUsingChatGPTinJSONMode,...
"llms:warningJsonInstruction");
end

function testAnalyzeTextDataUsingParallelFunctionCallwithChatGPT(~)
function testAnalyzeTextDataUsingParallelFunctionCallwithChatGPT(testCase)
testCase.startCapture("AnalyzeTextDataUsingParallelFunctionCallwithChatGPT");
AnalyzeTextDataUsingParallelFunctionCallwithChatGPT;
end

function testCreateSimpleChatBot(testCase,ChatBotExample)
testCase.startCapture(ChatBotExample);
% set up a fake input command, returning canned user prompts
count = 0;
prompts = [
Expand Down Expand Up @@ -85,43 +118,51 @@ function testCreateSimpleChatBot(testCase,ChatBotExample)
testCase.verifySize(messages.Messages,[1 2*(count-1)]);
end

function testDescribeImagesUsingChatGPT(~)
function testDescribeImagesUsingChatGPT(testCase)
testCase.startCapture("DescribeImagesUsingChatGPT");
DescribeImagesUsingChatGPT;
end

function testInformationRetrievalUsingOpenAIDocumentEmbedding(~)
function testInformationRetrievalUsingOpenAIDocumentEmbedding(testCase)
testCase.startCapture("InformationRetrievalUsingOpenAIDocumentEmbedding");
InformationRetrievalUsingOpenAIDocumentEmbedding;
end

function testProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode(~)
function testProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode(testCase)
testCase.startCapture("ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode");
ProcessGeneratedTextinRealTimebyUsingChatGPTinStreamingMode;
end

function testProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode(~)
function testProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode(testCase)
testCase.startCapture("ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode");
ProcessGeneratedTextInRealTimeByUsingOllamaInStreamingMode;
end

function testRetrievalAugmentedGenerationUsingChatGPTandMATLAB(~)
function testRetrievalAugmentedGenerationUsingChatGPTandMATLAB(testCase)
testCase.startCapture("RetrievalAugmentedGenerationUsingChatGPTandMATLAB");
RetrievalAugmentedGenerationUsingChatGPTandMATLAB;
end

function testRetrievalAugmentedGenerationUsingOllamaAndMATLAB(~)
function testRetrievalAugmentedGenerationUsingOllamaAndMATLAB(testCase)
testCase.startCapture("RetrievalAugmentedGenerationUsingOllamaAndMATLAB");
RetrievalAugmentedGenerationUsingOllamaAndMATLAB;
end

function testSummarizeLargeDocumentsUsingChatGPTandMATLAB(~)
function testSummarizeLargeDocumentsUsingChatGPTandMATLAB(testCase)
testCase.startCapture("SummarizeLargeDocumentsUsingChatGPTandMATLAB");
SummarizeLargeDocumentsUsingChatGPTandMATLAB;
end

function testUsingDALLEToEditImages(~)
function testUsingDALLEToEditImages(testCase)
testCase.startCapture("UsingDALLEToEditImages");
UsingDALLEToEditImages;
end

function testUsingDALLEToGenerateImages(~)
function testUsingDALLEToGenerateImages(testCase)
testCase.startCapture("UsingDALLEToGenerateImages");
UsingDALLEToGenerateImages;
end
end

end
end

function iCloseAll()
Expand Down

0 comments on commit 2565f27

Please sign in to comment.