Skip to content

Commit ec5b214

Browse files
RUIJIEZHONG66166mengfei25
authored andcommitted
[CI] Update UT result check and add Reproduce Command for UT (#1984)
- Update UT result check with xml info - Add reproduce command for UT - Add UT test case number check disable_e2e --------- Co-authored-by: mengfei25 <[email protected]>
1 parent 789f59d commit ec5b214

File tree

4 files changed

+284
-124
lines changed

4 files changed

+284
-124
lines changed

.github/actions/linux-uttest/action.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ runs:
2020
cd pytorch/third_party/torch-xpu-ops/test/regressions
2121
pytest --junit-xml=${{ github.workspace }}/ut_log/op_regression.xml \
2222
2> ${log_dir}/op_regression_test_error.log |tee ${log_dir}/op_regression_test.log
23+
echo -e "File Path: cd pytorch/third_party/torch-xpu-ops/test/regressions" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_regression.log
24+
echo -e "Reproduce Command: pytest -sv failed_case" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_regression.log
2325
- name: op_regression_dev1
2426
shell: timeout 300 bash -xe {0}
2527
if: ${{ inputs.ut_name == 'op_regression_dev1' || inputs.ut_name == 'basic' }}
@@ -30,6 +32,8 @@ runs:
3032
timeout 180 pytest test_operation_on_device_1.py \
3133
--junit-xml=${{ github.workspace }}/ut_log/op_regression_dev1.xml \
3234
2> ${log_dir}/op_regression_dev1_test_error.log |tee ${log_dir}/op_regression_dev1_test.log
35+
echo -e "File Path: cd pytorch/third_party/torch-xpu-ops/test/regressions" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_regression_dev1.log
36+
echo -e "Reproduce Command: pytest -sv failed_case" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_regression_dev1.log
3337
- name: op_transformers
3438
shell: timeout 3600 bash -xe {0}
3539
if: ${{ inputs.ut_name == 'op_transformers' || inputs.ut_name == 'basic' }}
@@ -41,6 +45,8 @@ runs:
4145
pytest test/test_transformers.py -k xpu \
4246
--junit-xml=${{ github.workspace }}/ut_log/op_transformers.xml \
4347
2> ${log_dir}/op_transformers_test_error.log |tee ${log_dir}/op_transformers_test.log
48+
echo -e "File Path: cd pytorch" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_transformers.log
49+
echo -e "Reproduce Command: pytest -sv test/failed_case -k xpu" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_transformers.log
4450
- name: op_extended
4551
shell: timeout 3600 bash -xe {0}
4652
if: ${{ inputs.ut_name == 'op_extended' || inputs.ut_name == 'basic' }}
@@ -53,6 +59,8 @@ runs:
5359
2> ${log_dir}/op_extended_test_error.log |tee ${log_dir}/op_extended_test.log
5460
ls -al
5561
cp *.xml ${{ github.workspace }}/ut_log
62+
echo -e "File Path: cd pytorch/third_party/torch-xpu-ops/test/xpu/extended" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_extended.log
63+
echo -e "Reproduce Command: pytest -sv failed_case" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_extended.log
5664
- name: op_ut
5765
shell: timeout 18000 bash -xe {0}
5866
if: ${{ inputs.ut_name == 'op_ut' }}
@@ -89,6 +97,8 @@ runs:
8997
tee ${{ github.workspace }}/ut_log/op_ut/op_ut_with_only_test.log
9098
ls -al
9199
cp *.xml ${{ github.workspace }}/ut_log
100+
echo -e "File Path: cd pytorch/third_party/torch-xpu-ops/test/xpu" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_ut.log
101+
echo -e "Reproduce Command: pytest -sv failed_case" | tee -a ${{ github.workspace }}/ut_log/reproduce_op_ut.log
92102
- name: torch_xpu
93103
shell: timeout 3600 bash -xe {0}
94104
if: ${{ inputs.ut_name == 'torch_xpu' }}

.github/scripts/check-ut.py

Lines changed: 105 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,24 @@
33
import os
44
import re
55
from junitparser import JUnitXml, Error, Failure, Skipped
6+
from collections import defaultdict
67

78
parser = argparse.ArgumentParser(description='Test results analyzer')
89
parser.add_argument('input_files', nargs='+', help='JUnit XML files or log files')
910
args = parser.parse_args()
1011

1112
failures = []
1213
summaries = []
14+
failures_by_category = defaultdict(list)
15+
passed_cases = []
16+
passed_by_category = defaultdict(list)
17+
category_totals = defaultdict(lambda: {
18+
'Test cases': 0,
19+
'Passed': 0,
20+
'Skipped': 0,
21+
'Failures': 0,
22+
'Errors': 0
23+
})
1324

1425
error_types = [
1526
"RuntimeError",
@@ -38,6 +49,14 @@ def get_name(case):
3849
return case.get('name', '')
3950
return ' '.join(case.name.split())
4051

52+
def get_category_from_case(case):
53+
if isinstance(case, dict):
54+
return case.get('category', 'unknown')
55+
else:
56+
if hasattr(case, '_file_category'):
57+
return case._file_category
58+
return 'unknown'
59+
4160
def get_result(case):
4261
if isinstance(case, dict):
4362
return case.get('status', 'failed')
@@ -108,6 +127,7 @@ def print_failures(failure_list=None):
108127
print_header = True
109128
for case in failures:
110129
print_md_row({
130+
'Category': get_category_from_case(case),
111131
'Class name': get_classname(case),
112132
'Test name': get_name(case),
113133
'Status': get_result(case),
@@ -116,13 +136,34 @@ def print_failures(failure_list=None):
116136
}, print_header, failure_list=failure_list)
117137
print_header = False
118138

139+
def generate_failures_log():
140+
if not failures:
141+
print("No failures found, skipping log file creation.")
142+
return
143+
144+
for case in failures:
145+
category = get_category_from_case(case)
146+
failures_by_category[category].append(case)
147+
148+
for category, category_failures in failures_by_category.items():
149+
if not category_failures:
150+
continue
151+
152+
log_filename = f"failures_{category}.log"
153+
with open(log_filename, "w", encoding='utf-8') as log_file:
154+
for case in category_failures:
155+
class_name = get_classname(case)
156+
test_name = get_name(case)
157+
log_file.write(f"{category},{class_name},{test_name}\n")
158+
119159
def parse_log_file(log_file):
120160
with open(log_file, encoding='utf-8') as f:
121161
content = f.read()
122162

123163
ut_name = os.path.splitext(os.path.basename(log_file))[0]
164+
category = determine_category(ut_name)
124165
summary = {
125-
'Category': determine_category(ut_name),
166+
'Category': category,
126167
'UT': ut_name,
127168
'Test cases': 0,
128169
'Passed': 0,
@@ -170,19 +211,29 @@ def parse_log_file(log_file):
170211
for match in error_matches:
171212
error_msg.append(match.group(0).strip())
172213

173-
failures.append({
214+
failure_case = {
174215
'classname': ut_name,
175216
'name': f"{case_match.group(2)}:{test_name}",
176217
'error': " ".join(error_msg),
177218
'status': 'failed',
178-
'source': 'Log'
179-
})
219+
'source': 'Log',
220+
'category': category
221+
}
222+
failures.append(failure_case)
223+
failures_by_category[category].append(failure_case)
180224
failures_number += 1
181225

182226
if failures_number > summary['Failures']:
183227
summary['Failures'] = failures_number
184228
summary['Passed'] = summary['Test cases'] - summary['Failures'] - summary['Skipped']
185229

230+
# Update category totals
231+
category_totals[category]['Test cases'] += summary['Test cases']
232+
category_totals[category]['Passed'] += summary['Passed']
233+
category_totals[category]['Skipped'] += summary['Skipped']
234+
category_totals[category]['Failures'] += summary['Failures']
235+
category_totals[category]['Errors'] += summary['Errors']
236+
186237
return summary
187238

188239
def determine_category(ut):
@@ -192,6 +243,8 @@ def determine_category(ut):
192243
return 'op_regression_dev1'
193244
elif ut == 'op_extended':
194245
return 'op_extended'
246+
elif ut == 'op_transformers':
247+
return 'op_transformers'
195248
elif 'op_ut' in ut:
196249
return 'op_ut'
197250
else:
@@ -223,12 +276,56 @@ def process_xml_file(xml_file):
223276
}
224277
summaries.append(suite_summary)
225278

279+
# Update category totals
280+
category_totals[category]['Test cases'] += suite_summary['Test cases']
281+
category_totals[category]['Passed'] += suite_summary['Passed']
282+
category_totals[category]['Skipped'] += suite_summary['Skipped']
283+
category_totals[category]['Failures'] += suite_summary['Failures']
284+
category_totals[category]['Errors'] += suite_summary['Errors']
285+
226286
for case in suite:
227287
if get_result(case) not in ["passed", "skipped"]:
288+
case._file_category = category
228289
failures.append(case)
290+
elif get_result(case) == "passed":
291+
case._file_category = category
292+
passed_cases.append(case)
293+
passed_by_category[category].append(case)
229294
except Exception as e:
230295
print(f"Error processing {xml_file}: {e}", file=sys.stderr)
231296

297+
def generate_passed_log():
298+
if not passed_cases:
299+
print("No passed cases found, skipping log file creation.")
300+
return
301+
302+
for category, category_passed in passed_by_category.items():
303+
if not category_passed:
304+
continue
305+
306+
log_filename = f"passed_{category}.log"
307+
with open(log_filename, "w", encoding='utf-8') as log_file:
308+
for case in category_passed:
309+
class_name = get_classname(case)
310+
test_name = get_name(case)
311+
status = get_result(case)
312+
log_file.write(f"{category},{class_name},{test_name}\n")
313+
314+
def generate_category_totals_log():
315+
"""Generate log files with category totals"""
316+
for category, totals in category_totals.items():
317+
if totals['Test cases'] == 0:
318+
continue
319+
320+
log_filename = f"category_{category}.log"
321+
with open(log_filename, "w", encoding='utf-8') as log_file:
322+
log_file.write(f"Category: {category}\n")
323+
log_file.write(f"Test cases: {totals['Test cases']}\n")
324+
log_file.write(f"Passed: {totals['Passed']}\n")
325+
log_file.write(f"Skipped: {totals['Skipped']}\n")
326+
log_file.write(f"Failures: {totals['Failures']}\n")
327+
log_file.write(f"Errors: {totals['Errors']}\n")
328+
232329
def print_summary():
233330
print("### Results Summary")
234331
print_header = True
@@ -276,6 +373,10 @@ def main():
276373

277374
with open("ut_failure_list.csv", "w") as failure_list:
278375
print_failures(failure_list=failure_list)
376+
377+
generate_failures_log()
378+
generate_passed_log()
379+
generate_category_totals_log()
279380
print_summary()
280381

281382

0 commit comments

Comments
 (0)