-
Notifications
You must be signed in to change notification settings - Fork 24
57 lines (53 loc) · 2.04 KB
/
ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
name: Run MATLAB Tests on GitHub-Hosted Runner
on: [push]
jobs:
test:
name: Run MATLAB Tests and Generate Artifacts
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
- name: Install Ollama
run: |
curl -fsSL https://ollama.com/install.sh | sudo -E sh
- name: Start serving
run: |
# Run the background, there is no way to daemonise at the moment
ollama serve &
# Run a second server to test different endpoint
OLLAMA_HOST=127.0.0.1:11435 OLLAMA_MODELS=/tmp/ollama/models ollama serve &
# A short pause is required before the HTTP port is opened
sleep 5
# This endpoint blocks until ready
time curl -i http://localhost:11434
time curl -i http://localhost:11435
# For debugging, record Ollama version
ollama --version
- name: Pull models
run: |
ollama pull mistral
ollama pull bakllava
OLLAMA_HOST=127.0.0.1:11435 ollama pull qwen2:0.5b
- name: Set up MATLAB
uses: matlab-actions/setup-matlab@v2
with:
products: Text_Analytics_Toolbox
cache: true
- name: Run tests and generate artifacts
env:
OPENAI_KEY: ${{ secrets.OPENAI_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_KEY }}
AZURE_OPENAI_DEPLOYMENT: ${{ secrets.AZURE_DEPLOYMENT }}
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_ENDPOINT }}
AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_KEY }}
SECOND_OLLAMA_ENDPOINT: 127.0.0.1:11435
uses: matlab-actions/run-tests@v2
with:
test-results-junit: test-results/results.xml
code-coverage-cobertura: code-coverage/coverage.xml
source-folder: .
- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
slug: matlab-deep-learning/llms-with-matlab