1
+ name : LLM Translation Testing for Release Candidates
2
+
3
+ on :
4
+ workflow_dispatch :
5
+ inputs :
6
+ release_candidate_tag :
7
+ description : ' Release candidate tag/version'
8
+ required : true
9
+ type : string
10
+ push :
11
+ tags :
12
+ - ' v*-rc*' # Triggers on release candidate tags like v1.0.0-rc1
13
+
14
+ jobs :
15
+ llm-translation-tests :
16
+ runs-on : ubuntu-latest
17
+ timeout-minutes : 120
18
+
19
+ steps :
20
+ - name : Checkout code
21
+ uses : actions/checkout@v4
22
+ with :
23
+ ref : ${{ github.event.inputs.release_candidate_tag || github.ref }}
24
+
25
+ - name : Set up Python 3.11
26
+ uses : actions/setup-python@v4
27
+ with :
28
+ python-version : ' 3.11'
29
+
30
+ - name : Cache pip dependencies
31
+ uses : actions/cache@v3
32
+ with :
33
+ path : ~/.cache/pip
34
+ key : ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }}
35
+ restore-keys : |
36
+ ${{ runner.os }}-pip-
37
+
38
+ - name : Install dependencies
39
+ run : |
40
+ python -m pip install --upgrade pip
41
+ python -m pip install -r requirements.txt
42
+ pip install "pytest==7.3.1"
43
+ pip install "pytest-retry==1.6.3"
44
+ pip install "pytest-cov==5.0.0"
45
+ pip install "pytest-asyncio==0.21.1"
46
+ pip install "respx==0.22.0"
47
+ pip install "pytest-xdist==3.6.1"
48
+ pip install "pytest-html==4.1.1"
49
+
50
+ - name : Create test results directory
51
+ run : mkdir -p test-results
52
+
53
+ - name : Run LLM Translation Tests
54
+ run : |
55
+ python -m pytest -vv tests/llm_translation \
56
+ --cov=litellm \
57
+ --cov-report=xml \
58
+ --cov-report=html \
59
+ --junitxml=test-results/junit.xml \
60
+ --html=test-results/report.html \
61
+ --self-contained-html \
62
+ --durations=10 \
63
+ -n 4 \
64
+ -x
65
+ continue-on-error : true
66
+
67
+ - name : Generate test summary
68
+ run : |
69
+ echo "# LLM Translation Testing Results" > test-results/summary.md
70
+ echo "" >> test-results/summary.md
71
+ echo "**Release Candidate:** ${{ github.event.inputs.release_candidate_tag || github.ref_name }}" >> test-results/summary.md
72
+ echo "**Run Date:** $(date)" >> test-results/summary.md
73
+ echo "**Commit:** ${{ github.sha }}" >> test-results/summary.md
74
+ echo "" >> test-results/summary.md
75
+
76
+ # Parse junit.xml for test statistics if it exists
77
+ if [ -f "test-results/junit.xml" ]; then
78
+ python -c "
79
+ import xml.etree.ElementTree as ET
80
+ try:
81
+ tree = ET.parse('test-results/junit.xml')
82
+ root = tree.getroot()
83
+ tests = root.get('tests', '0')
84
+ failures = root.get('failures', '0')
85
+ errors = root.get('errors', '0')
86
+ skipped = root.get('skipped', '0')
87
+ time = root.get('time', '0')
88
+
89
+ print(f'**Total Tests:** {tests}')
90
+ print(f'**Passed:** {int(tests) - int(failures) - int(errors) - int(skipped)}')
91
+ print(f'**Failed:** {failures}')
92
+ print(f'**Errors:** {errors}')
93
+ print(f'**Skipped:** {skipped}')
94
+ print(f'**Duration:** {time} seconds')
95
+ except Exception as e:
96
+ print(f'Could not parse test results: {e}')
97
+ " >> test-results/summary.md
98
+ fi
99
+
100
+ echo "" >> test-results/summary.md
101
+ echo "## Test Files Covered" >> test-results/summary.md
102
+ ls tests/llm_translation/*.py | sed 's/^/- /' >> test-results/summary.md
103
+
104
+ - name : Upload test results
105
+ uses : actions/upload-artifact@v4
106
+ if : always()
107
+ with :
108
+ name : llm-translation-test-results-${{ github.event.inputs.release_candidate_tag || github.ref_name }}
109
+ path : |
110
+ test-results/
111
+ coverage.xml
112
+ htmlcov/
113
+ .coverage
114
+ retention-days : 30
115
+
116
+ - name : Upload JUnit test results
117
+ uses : actions/upload-artifact@v4
118
+ if : always()
119
+ with :
120
+ name : junit-xml-${{ github.event.inputs.release_candidate_tag || github.ref_name }}
121
+ path : test-results/junit.xml
122
+ retention-days : 30
0 commit comments