Skip to content

Commit

Permalink
Trace/replay llms.internal.sendRequest
Browse files Browse the repository at this point in the history
To decouple `texampleTests.m` from availability and speed of external servers,
record calls to `llms.internal.sendRequest` (on dev machine) and replay
(during most test runs, including CI). See tests/recording/README.md for
instructions.
  • Loading branch information
ccreutzi committed Aug 5, 2024
1 parent 062538c commit 5d4ee2d
Show file tree
Hide file tree
Showing 28 changed files with 3,828 additions and 18 deletions.
2 changes: 1 addition & 1 deletion +llms/+internal/callAzureChatAPI.m
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@

parameters = buildParametersCall(messages, functions, nvp);

[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, URL, nvp.TimeOut, nvp.StreamFun);

% If call errors, "choices" will not be part of response.Body.Data, instead
% we get response.Body.Data.error
Expand Down
2 changes: 1 addition & 1 deletion +llms/+internal/callOllamaChatAPI.m
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@

parameters = buildParametersCall(model, messages, nvp);

[response, streamedText] = llms.internal.sendRequest(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,[],URL,nvp.TimeOut,nvp.StreamFun);

% If call errors, "choices" will not be part of response.Body.Data, instead
% we get response.Body.Data.error
Expand Down
2 changes: 1 addition & 1 deletion +llms/+internal/callOpenAIChatAPI.m
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@

parameters = buildParametersCall(messages, functions, nvp);

[response, streamedText] = llms.internal.sendRequest(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);
[response, streamedText] = llms.internal.sendRequestWrapper(parameters,nvp.APIKey, END_POINT, nvp.TimeOut, nvp.StreamFun);

% If call errors, "choices" will not be part of response.Body.Data, instead
% we get response.Body.Data.error
Expand Down
5 changes: 5 additions & 0 deletions +llms/+internal/sendRequestWrapper.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
function [response, streamedText] = sendRequestWrapper(varargin)
% This function is undocumented and will change in a future release

% A wrapper around sendRequest to have a test seam
[response, streamedText] = llms.internal.sendRequest(varargin{:});
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
*.env
*.asv
*.mat
!tests/recordings/*.mat
startup.m
papers_to_read.csv
data/*
Expand Down
2 changes: 1 addition & 1 deletion extractOpenAIEmbeddings.m
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
end


response = llms.internal.sendRequest(parameters,key, END_POINT, nvp.TimeOut);
response = llms.internal.sendRequestWrapper(parameters,key, END_POINT, nvp.TimeOut);

if isfield(response.Body.Data, "data")
emb = [response.Body.Data.data.embedding];
Expand Down
40 changes: 40 additions & 0 deletions test-utils/recording-doubles/+llms/+internal/sendRequestWrapper.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
function [response, streamedText] = sendRequestWrapper(parameters, token, varargin)
% This function is undocumented and will change in a future release

% A wrapper around sendRequest to have a test seam
persistent seenCalls
if isempty(seenCalls)
seenCalls = cell(0,2);
end

persistent filename

if nargin == 1 && isequal(parameters,"close")
save(filename+".mat","seenCalls");
seenCalls = cell(0,2);
return
end

if nargin==2 && isequal(parameters,"open")
filename = token;
return
end

streamFunCalls = {};
hasCallback = nargin >= 5 && isa(varargin{3},'function_handle');
if hasCallback
streamFun = varargin{3};
end
function wrappedStreamFun(varargin)
streamFunCalls(end+1) = varargin;
streamFun(varargin{:});
end
if hasCallback
varargin{3} = @wrappedStreamFun;
end


[response, streamedText] = llms.internal.sendRequest(parameters, token, varargin{:});

seenCalls(end+1,:) = {{parameters},{response,streamFunCalls,streamedText}};
end
2 changes: 2 additions & 0 deletions test-utils/recording-doubles/addpath.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
function addpath(~)
% ignore addpath calls in examples
30 changes: 30 additions & 0 deletions test-utils/replaying-doubles/+llms/+internal/sendRequestWrapper.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
function [response, streamedText] = sendRequestWrapper(parameters, token, varargin)
% This function is undocumented and will change in a future release

% A wrapper around sendRequest to have a test seam
persistent seenCalls
if isempty(seenCalls)
seenCalls = cell(0,2);
end

if nargin == 1 && isequal(parameters,"close")
seenCalls = cell(0,2);
return
end

if nargin==2 && isequal(parameters,"open")
load(token+".mat","seenCalls");
return
end

result = seenCalls{1,2};
response = result{1};
streamFunCalls = result{2};
streamedText = result{3};

if nargin >= 5 && isa(varargin{3},'function_handle')
streamFun = varargin{3};
cellfun(streamFun, streamFunCalls);
end

seenCalls(1,:) = [];
2 changes: 2 additions & 0 deletions test-utils/replaying-doubles/addpath.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
function addpath(~)
% ignore addpath calls in examples
Loading

0 comments on commit 5d4ee2d

Please sign in to comment.