From 5d5e6af08168d54111061f87663df4c946c45756 Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Thu, 27 Nov 2025 01:56:01 -0800 Subject: [PATCH 01/10] [CI] Collect All UT cases --- .github/actions/linux-uttest/action.yml | 31 +++++++------- .github/scripts/check-ut.py | 55 +++++++++++++++++++++---- .github/workflows/_linux_ut.yml | 1 + 3 files changed, 64 insertions(+), 23 deletions(-) diff --git a/.github/actions/linux-uttest/action.yml b/.github/actions/linux-uttest/action.yml index 847d71b8ac..dd02edb3c4 100644 --- a/.github/actions/linux-uttest/action.yml +++ b/.github/actions/linux-uttest/action.yml @@ -180,20 +180,23 @@ runs: else echo -e "No Failure logs" fi - # Copied the passed logs - if ls passed*.log 1> /dev/null 2>&1; then - cp passed*.log ${{ github.workspace }}/ut_log - echo -e "Passed logs Copied" - else - echo -e "No Passed logs" - fi - # Copied the Summary logs - if ls category*.log 1> /dev/null 2>&1; then - cp category*.log ${{ github.workspace }}/ut_log - echo -e "Category logs Copied" - else - echo -e "No Category logs" - fi + + log_dir="${{ github.workspace }}/ut_log" + copy_logs() { + local pattern=$1 + local name=$2 + + if ls ${pattern} 1> /dev/null 2>&1; then + cp ${pattern} "$log_dir" + echo -e "${name} logs Copied" + else + echo -e "No ${name} logs" + fi + } + copy_logs "passed*.log" "Passed" + copy_logs "category*.log" "Category" + copy_logs "all_cases*.log" "All cases collect" + if [ -e ut_failure_list.csv ];then cp ut_failure_list.csv ${{ github.workspace }}/ut_log/ut_failure_list.csv || true fi diff --git a/.github/scripts/check-ut.py b/.github/scripts/check-ut.py index 50b112a180..b85f1761c2 100644 --- a/.github/scripts/check-ut.py +++ b/.github/scripts/check-ut.py @@ -15,6 +15,8 @@ failures_by_category = defaultdict(list) passed_cases = [] passed_by_category = defaultdict(list) +all_cases = [] +all_cases_by_category = defaultdict(list) category_totals = defaultdict(lambda: { 'Test cases': 0, 'Passed': 0, @@ -156,6 +158,22 @@ def generate_failures_log(): test_name = get_name(case) log_file.write(f"{category},{class_name},{test_name}\n") +def generate_all_cases_log(): + if not all_cases: + return + + for category, category_cases in all_cases_by_category.items(): + if not category_cases: + continue + + log_filename = f"all_cases_{category}.log" + with open(log_filename, "w", encoding='utf-8') as log_file: + for case in category_cases: + class_name = get_classname(case) + test_name = get_name(case) + status = get_result(case) + log_file.write(f"{category},{class_name},{test_name}\n") + def parse_log_file(log_file): with open(log_file, encoding='utf-8') as f: content = f.read() @@ -267,7 +285,30 @@ def process_xml_file(xml_file): parts_category = os.path.basename(xml_file).split('.')[0] category = determine_category(parts_category) + def process_suite(suite, category): + suite_cases_count = 0 + + for case in suite: + if hasattr(case, 'tests'): + suite_cases_count += process_suite(case, category) + else: + case._file_category = category + all_cases.append(case) + all_cases_by_category[category].append(case) + suite_cases_count += 1 + + if get_result(case) not in ["passed", "skipped"]: + case._file_category = category + failures.append(case) + elif get_result(case) == "passed": + case._file_category = category + passed_cases.append(case) + passed_by_category[category].append(case) + + return suite_cases_count + for suite in xml: + actual_cases_count = process_suite(suite, category) suite_summary = { 'Category': category, 'UT': ut, @@ -286,15 +327,10 @@ def process_xml_file(xml_file): category_totals[category]['Skipped'] += suite_summary['Skipped'] category_totals[category]['Failures'] += suite_summary['Failures'] category_totals[category]['Errors'] += suite_summary['Errors'] - - for case in suite: - if get_result(case) not in ["passed", "skipped"]: - case._file_category = category - failures.append(case) - elif get_result(case) == "passed": - case._file_category = category - passed_cases.append(case) - passed_by_category[category].append(case) + + if suite.tests != actual_cases_count: + print(f"Warning: Suite '{ut}' has {suite.tests} tests in summary but {actual_cases_count} cases were processed", + file=sys.stderr) except Exception as e: print(f"Error processing {xml_file}: {e}", file=sys.stderr) @@ -379,6 +415,7 @@ def main(): generate_failures_log() generate_passed_log() + generate_all_cases_log() generate_category_totals_log() print_summary() diff --git a/.github/workflows/_linux_ut.yml b/.github/workflows/_linux_ut.yml index dc10647124..b9af33064f 100644 --- a/.github/workflows/_linux_ut.yml +++ b/.github/workflows/_linux_ut.yml @@ -158,6 +158,7 @@ jobs: \( -name "failures_*.log" -o \ -name "passed_*.log" -o \ -name "category_*.log" -o \ + -name "all_cases_*.log" -o \ -name "reproduce_*.log" \) \ -exec mv {} ./ \; || true cp ${{ github.workspace }}/.github/scripts/ut_result_check.sh ./ From 0dfaf6f789e95b39bb3d5108f719149b317ca2fa Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Thu, 27 Nov 2025 02:01:33 -0800 Subject: [PATCH 02/10] align the lint check --- .github/scripts/check-ut.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/scripts/check-ut.py b/.github/scripts/check-ut.py index b85f1761c2..fb06493376 100644 --- a/.github/scripts/check-ut.py +++ b/.github/scripts/check-ut.py @@ -304,7 +304,7 @@ def process_suite(suite, category): case._file_category = category passed_cases.append(case) passed_by_category[category].append(case) - + return suite_cases_count for suite in xml: @@ -327,9 +327,9 @@ def process_suite(suite, category): category_totals[category]['Skipped'] += suite_summary['Skipped'] category_totals[category]['Failures'] += suite_summary['Failures'] category_totals[category]['Errors'] += suite_summary['Errors'] - + if suite.tests != actual_cases_count: - print(f"Warning: Suite '{ut}' has {suite.tests} tests in summary but {actual_cases_count} cases were processed", + print(f"Warning: Suite '{ut}' has {suite.tests} tests in summary but {actual_cases_count} cases were processed", file=sys.stderr) except Exception as e: print(f"Error processing {xml_file}: {e}", file=sys.stderr) @@ -415,7 +415,7 @@ def main(): generate_failures_log() generate_passed_log() - generate_all_cases_log() + generate_all_cases_log() generate_category_totals_log() print_summary() From ab75f36a9e3edf523a45790c627aa30495a0f56c Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Mon, 1 Dec 2025 02:29:10 -0800 Subject: [PATCH 03/10] add UT failures classify --- .github/scripts/check-ut.py | 19 ++- .github/scripts/ut_result_check.sh | 173 +++++++++++++++++++++++++ .github/workflows/_linux_ut.yml | 57 ++++++++ .github/workflows/nightly_ondemand.yml | 1 + .github/workflows/pull.yml | 1 + 5 files changed, 246 insertions(+), 5 deletions(-) diff --git a/.github/scripts/check-ut.py b/.github/scripts/check-ut.py index fb06493376..1b1d7f9d74 100644 --- a/.github/scripts/check-ut.py +++ b/.github/scripts/check-ut.py @@ -8,8 +8,11 @@ parser = argparse.ArgumentParser(description='Test results analyzer') parser.add_argument('-n', '--ut-name', type=str, default='', help='UT name') parser.add_argument('-i', '--input-files', nargs='+', help='JUnit XML files or log files') +parser.add_argument('-o', '--output-dir', type=str, default='.', help='Output directory for log files (default: current directory)') args = parser.parse_args() +os.makedirs(args.output_dir, exist_ok=True) + failures = [] summaries = [] failures_by_category = defaultdict(list) @@ -121,6 +124,8 @@ def print_md_row(row, print_header=False, failure_list=None): if failure_list is not None: failure_list.write(f"| {row_values} |\n") +def get_output_path(filename): + return os.path.join(args.output_dir, filename) def print_failures(failure_list=None): if not failures: @@ -151,7 +156,7 @@ def generate_failures_log(): if not category_failures: continue - log_filename = f"failures_{category}.log" + log_filename = get_output_path(f"failures_{category}.log") with open(log_filename, "w", encoding='utf-8') as log_file: for case in category_failures: class_name = get_classname(case) @@ -166,7 +171,7 @@ def generate_all_cases_log(): if not category_cases: continue - log_filename = f"all_cases_{category}.log" + log_filename = get_output_path(f"all_cases_{category}.log") with open(log_filename, "w", encoding='utf-8') as log_file: for case in category_cases: class_name = get_classname(case) @@ -342,7 +347,7 @@ def generate_passed_log(): if not category_passed: continue - log_filename = f"passed_{category}.log" + log_filename = get_output_path(f"passed_{category}.log") with open(log_filename, "w", encoding='utf-8') as log_file: for case in category_passed: class_name = get_classname(case) @@ -356,7 +361,7 @@ def generate_category_totals_log(): if totals['Test cases'] == 0: continue - log_filename = f"category_{category}.log" + log_filename = get_output_path(f"category_{category}.log") with open(log_filename, "w", encoding='utf-8') as log_file: log_file.write(f"Category: {category}\n") log_file.write(f"Test cases: {totals['Test cases']}\n") @@ -402,6 +407,8 @@ def print_summary(): print_md_row(totals) def main(): + os.makedirs(args.output_dir, exist_ok=True) + for input_file in args.input_files: if input_file.endswith('.log'): process_log_file(input_file) @@ -409,8 +416,10 @@ def main(): process_xml_file(input_file) else: print(f"Skipping unknown file type: {input_file}", file=sys.stderr) + if args.ut_name != "skipped_ut": - with open("ut_failure_list.csv", "w") as failure_list: + failure_list_path = get_output_path("ut_failure_list.csv") + with open(failure_list_path, "w", encoding='utf-8') as failure_list: print_failures(failure_list=failure_list) generate_failures_log() diff --git a/.github/scripts/ut_result_check.sh b/.github/scripts/ut_result_check.sh index 68cb1e327a..5acb4761d6 100644 --- a/.github/scripts/ut_result_check.sh +++ b/.github/scripts/ut_result_check.sh @@ -174,6 +174,178 @@ check_skipped_ut() { fi } +categorize_failures() { + local failures_log="$1" + local all_ut_log="$2" + local output_dir="${3:-.}" + + # Check if required parameters are provided + if [[ $# -lt 2 ]]; then + echo "Usage: categorize_failures [output_dir]" + echo "Example: categorize_failures failures.txt all_ut.txt ./output" + return 1 + fi + + # Check if files exist + if [[ ! -f "$failures_log" ]]; then + echo "Error: Failures log file not found: $failures_log" + return 1 + fi + + if [[ ! -f "$all_ut_log" ]]; then + echo "Error: All UT log file not found: $all_ut_log" + return 1 + fi + + # Create output directory + mkdir -p "$output_dir" + + # Output file paths + local regression_file="$output_dir/regression_ut.txt" + local new_issue_file="$output_dir/new_issue_ut.txt" + local summary_file="$output_dir/summary.txt" + + # Clear output files (if they exist) + > "$regression_file" + > "$new_issue_file" + > "$summary_file" + + # Counters + local regression_count=0 + local new_issue_count=0 + local total_failures=0 + + echo "Starting UT failure analysis..." + echo "Failures log: $failures_log" + echo "All UT log: $all_ut_log" + echo "Output directory: $output_dir" + echo "" + + # Process failures log line by line + while IFS= read -r line || [[ -n "$line" ]]; do + # Skip empty lines + if [[ -z "$line" ]]; then + continue + fi + + total_failures=$((total_failures + 1)) + + # Check if this line exists in all UT log + # Using grep -Fxq: -F fixed strings, -x whole line match, -q quiet mode + if grep -Fxq "$line" "$all_ut_log" 2>/dev/null; then + # Exists in all UT log -> Regression issue + regression_count=$((regression_count + 1)) + echo "$line" >> "$regression_file" + else + # Not found in all UT log -> New issue + new_issue_count=$((new_issue_count + 1)) + echo "$line" >> "$new_issue_file" + fi + done < "$failures_log" + + # Generate summary report + local timestamp + timestamp=$(date '+%Y-%m-%d %H:%M:%S') + + echo "" + echo "Analysis completed!" + echo "=================================" + echo "Total New failed UTs: $total_failures" + echo "Regression issues: $regression_count" + echo "New UTs issues: $new_issue_count" + echo "=================================" + echo "" + + # Display regression cases + if [[ $regression_count -gt 0 ]]; then + echo "REGRESSION CASES ISSUE ($regression_count):" + echo "---------------------------------" + cat "$regression_file" | while IFS= read -r line; do + echo " $line" + done + echo "" + else + echo "✅ No regression cases found." + echo "" + fi + + # Display new issue cases + if [[ $new_issue_count -gt 0 ]]; then + echo "NEW UT CASES ISSUE ($new_issue_count):" + echo "--------------------------------" + cat "$new_issue_file" | while IFS= read -r line; do + echo " $line" + done + echo "" + else + echo "✅ No new UT cases issue found." + echo "" + fi + + cat > "$summary_file" << EOF +Failed UT Categorization Report +================================ +Generated: $timestamp +Failures log file: $(basename "$failures_log") +All UT log file: $(basename "$all_ut_log") + +Statistics: +----------- +Total New failed UTs: $total_failures +Regression issues: $regression_count +New UTs issues: $new_issue_count + +Output Files: +------------- +Regression UT list: $(basename "$regression_file") ($regression_count items) +New issue UT list: $(basename "$new_issue_file") ($new_issue_count items) + +Detailed Lists: +--------------- + +EOF + + # Add regression UT list to summary + if [[ $regression_count -gt 0 ]]; then + echo "Regression Issues:" >> "$summary_file" + echo "------------------" >> "$summary_file" + cat "$regression_file" >> "$summary_file" + echo "" >> "$summary_file" + else + echo "✅ No regression issues found" >> "$summary_file" + echo "" >> "$summary_file" + fi + + # Add new issue UT list to summary + if [[ $new_issue_count -gt 0 ]]; then + echo "New Issues:" >> "$summary_file" + echo "-----------" >> "$summary_file" + cat "$new_issue_file" >> "$summary_file" + else + echo "✅ No new issues found" >> "$summary_file" + fi + + # Print summary to console + echo "" + echo "Analysis completed!" + echo "=================================" + echo "Total New failed UTs: $total_failures" + echo "Regression issues: $regression_count" + echo "New UTs issues: $new_issue_count" + echo "=================================" + echo "" + echo "Output files:" + echo " Regression UT list: $regression_file" + echo " New issue UT list: $new_issue_file" + echo " Detailed summary: $summary_file" + + # Show warning if no failures were found + if [[ $total_failures -eq 0 ]]; then + echo "" + echo "Note: No failed UT records found in the failures log file." + fi +} + # Main test runner for standard test suites (op_regression, op_extended, etc.) run_main_tests() { local suite="$1" @@ -216,6 +388,7 @@ run_main_tests() { local failed_count=0 passed_count=0 if [[ -f "failures_${suite}_filtered.log" ]]; then failed_count=$(wc -l < "failures_${suite}_filtered.log") + categorize_failures failures_${suite}_filtered.log all_cases_${suite}_reference.log categorize_failures fi if [[ -f "passed_${suite}.log" ]]; then passed_count=$(wc -l < "passed_${suite}.log") diff --git a/.github/workflows/_linux_ut.yml b/.github/workflows/_linux_ut.yml index b9af33064f..b66f1e15f1 100644 --- a/.github/workflows/_linux_ut.yml +++ b/.github/workflows/_linux_ut.yml @@ -23,6 +23,10 @@ on: required: true type: string description: UT scope. one of `op_regression,op_transformers,op_extended,op_ut,skipped_ut,torch_xpu,op_regression_dev1` + test_type: + type: string + default: "build-cicd" + description: Test type, default is for CI tests, others (build or wheel)-(nightly, weekly or ondemand) permissions: read-all @@ -140,6 +144,8 @@ jobs: timeout-minutes: 30 permissions: issues: write + env: + REFERENCE_ISSUE_ID: 2440 steps: - name: Checkout torch-xpu-ops uses: actions/checkout@v4 @@ -148,6 +154,23 @@ jobs: with: pattern: Inductor-XPU-UT-Data-${{ github.event.pull_request.number || github.sha }}-${{ inputs.ut }}-* path: ${{ github.workspace }}/ut_log + - name: Download Baseline Artifact + run: | + mkdir baseline/ + cd baseline/ + if [[ "${{ inputs.test_type }}" != *"ly" ]];then + artifact_type="$(echo ${{ inputs.test_type }} |awk -F '-' '{print $1}')-nightly" + else + artifact_type="${{ inputs.test_type }}" + fi + gh --repo intel/torch-xpu-ops issue view ${REFERENCE_ISSUE_ID} --json body -q .body 2>&1 |tee body.txt + REFERENCE_RUN_ID="$(cat body.txt |grep "Inductor-${artifact_type}-LTS2" |sed 's/.*: *//' || echo '')" + if [ "${REFERENCE_RUN_ID}" != "" ];then + gh --repo intel/torch-xpu-ops run download ${REFERENCE_RUN_ID} -p "Inductor-XPU-UT-Data-*" + find Inductor-XPU-UT-Data-*/ -maxdepth 1 -mindepth 1 -type d |sort -V |\ + while read line; do rsync -az --delete $line/ $(basename $line)/; done + rm -rf Inductor-XPU-UT-Data-* || true + fi - name: Check UT Results run: | ls -al ${{ github.workspace }}/ut_log @@ -161,6 +184,29 @@ jobs: -name "all_cases_*.log" -o \ -name "reproduce_*.log" \) \ -exec mv {} ./ \; || true + + if find "${{ github.workspace }}/baseline" -type f -name "all_cases_*.log" | grep -q .; then + echo -e "All cases log collect" + find "${{ github.workspace }}/baseline" -type f -name "all_cases_*.log" \ + -exec sh -c 'for file; do + filename=$(basename "$file") + newname="${filename%.log}_reference.log" + mv "$file" "./$newname" + done' _ {} + 2>/dev/null || true + else + echo -e "No all cases log" + mkdir -p ${{ github.workspace }}/ut_log/baseline + find "${{ github.workspace }}/baseline/" -type f \ + \( -name "*.xml" \) \ + -exec mv {} ${{ github.workspace }}/ut_log/baseline \; || true + python ${{ github.workspace }}/.github/scripts/check-ut.py -n ${{ inputs.ut }} -i ${{ github.workspace }}/ut_log/baseline/*.xml -o ${{ github.workspace }}/ut_log/baseline + find "${{ github.workspace }}/ut_log/baseline" -type f -name "all_cases_*.log" \ + -exec sh -c 'for file; do + filename=$(basename "$file") + newname="${filename%.log}_reference.log" + mv "$file" "./$newname" + done' _ {} + 2>/dev/null || true + fi cp ${{ github.workspace }}/.github/scripts/ut_result_check.sh ./ # get distributed known issues gh --repo intel/torch-xpu-ops issue view $UT_SKIP_ISSUE --json body -q .body |sed -E '/^(#|$)/d' > Known_issue.log.tmp @@ -187,6 +233,17 @@ jobs: sed -i 's/[[:space:]]*$//g' Known_issue.log bash ut_result_check.sh "${ut_name}" "${{ inputs.pytorch }}" done + - name: Upload Reference Run ID + if: ${{ endsWith(inputs.test_type, 'ly') }} + run: | + gh --repo ${GITHUB_REPOSITORY} issue view ${REFERENCE_ISSUE_ID} --json body -q .body 2>&1 |tee new_body.txt 2>&1 + has_or_not="$(grep -c 'Inductor-${{ inputs.test_type }}-LTS2' new_body.txt || true)" + if [ ${has_or_not} -ne 0 ];then + sed -i "s/Inductor-${{ inputs.test_type }}-LTS2:.*/Inductor-${{ inputs.test_type }}-LTS2: ${GITHUB_RUN_ID}/" new_body.txt + else + echo "Inductor-${{ inputs.test_type }}-LTS2: ${GITHUB_RUN_ID}" |tee -a new_body.txt + fi + gh --repo ${GITHUB_REPOSITORY} issue edit ${REFERENCE_ISSUE_ID} --body-file new_body.txt - name: Upload Inductor XPU UT Log if: ${{ ! cancelled() }} uses: actions/upload-artifact@v4 diff --git a/.github/workflows/nightly_ondemand.yml b/.github/workflows/nightly_ondemand.yml index 932ad56a46..b21593aa34 100644 --- a/.github/workflows/nightly_ondemand.yml +++ b/.github/workflows/nightly_ondemand.yml @@ -157,6 +157,7 @@ jobs: torch_xpu_ops: ${{ needs.Conditions-Filter.outputs.torch_xpu_ops }} python: ${{ needs.Conditions-Filter.outputs.python }} ut: ${{ matrix.ut_name }} + test_type: ${{ needs.Conditions-Filter.outputs.test_type }} Linux-Nightly-Ondemand-E2E-Tests: if: ${{ github.event_name == 'schedule' || contains(inputs.suite, 'e') }} diff --git a/.github/workflows/pull.yml b/.github/workflows/pull.yml index 6c26af3c71..e2646a655a 100644 --- a/.github/workflows/pull.yml +++ b/.github/workflows/pull.yml @@ -118,6 +118,7 @@ jobs: pytorch: ${{ needs.conditions-filter.outputs.pytorch }} torch_xpu_ops: ${{ needs.conditions-filter.outputs.pytorch == 'nightly_wheel' && 'pinned' || 'main' }} ut: ${{ matrix.ut_name }} + test_type: ${{ needs.conditions-filter.outputs.pytorch == 'nightly_wheel' && 'wheel-cicd' || 'build-cicd' }} linux-distributed: needs: [conditions-filter, linux-build] From c36e319d527ec76ccbe6c4a3cf533ae0a179422e Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Mon, 1 Dec 2025 02:46:27 -0800 Subject: [PATCH 04/10] align the lint check --- .github/scripts/check-ut.py | 4 +-- .github/scripts/ut_result_check.sh | 58 ++++++++++++++++-------------- 2 files changed, 33 insertions(+), 29 deletions(-) diff --git a/.github/scripts/check-ut.py b/.github/scripts/check-ut.py index 1b1d7f9d74..fa999ae7c8 100644 --- a/.github/scripts/check-ut.py +++ b/.github/scripts/check-ut.py @@ -408,7 +408,7 @@ def print_summary(): def main(): os.makedirs(args.output_dir, exist_ok=True) - + for input_file in args.input_files: if input_file.endswith('.log'): process_log_file(input_file) @@ -416,7 +416,7 @@ def main(): process_xml_file(input_file) else: print(f"Skipping unknown file type: {input_file}", file=sys.stderr) - + if args.ut_name != "skipped_ut": failure_list_path = get_output_path("ut_failure_list.csv") with open(failure_list_path, "w", encoding='utf-8') as failure_list: diff --git a/.github/scripts/ut_result_check.sh b/.github/scripts/ut_result_check.sh index 5acb4761d6..a75109c957 100644 --- a/.github/scripts/ut_result_check.sh +++ b/.github/scripts/ut_result_check.sh @@ -178,58 +178,58 @@ categorize_failures() { local failures_log="$1" local all_ut_log="$2" local output_dir="${3:-.}" - + # Check if required parameters are provided if [[ $# -lt 2 ]]; then echo "Usage: categorize_failures [output_dir]" echo "Example: categorize_failures failures.txt all_ut.txt ./output" return 1 fi - + # Check if files exist if [[ ! -f "$failures_log" ]]; then echo "Error: Failures log file not found: $failures_log" return 1 fi - + if [[ ! -f "$all_ut_log" ]]; then echo "Error: All UT log file not found: $all_ut_log" return 1 fi - + # Create output directory mkdir -p "$output_dir" - + # Output file paths local regression_file="$output_dir/regression_ut.txt" local new_issue_file="$output_dir/new_issue_ut.txt" local summary_file="$output_dir/summary.txt" - + # Clear output files (if they exist) - > "$regression_file" - > "$new_issue_file" - > "$summary_file" - + true > "$regression_file" + true > "$new_issue_file" + true > "$summary_file" + # Counters local regression_count=0 local new_issue_count=0 local total_failures=0 - + echo "Starting UT failure analysis..." echo "Failures log: $failures_log" echo "All UT log: $all_ut_log" echo "Output directory: $output_dir" echo "" - + # Process failures log line by line while IFS= read -r line || [[ -n "$line" ]]; do # Skip empty lines if [[ -z "$line" ]]; then continue fi - + total_failures=$((total_failures + 1)) - + # Check if this line exists in all UT log # Using grep -Fxq: -F fixed strings, -x whole line match, -q quiet mode if grep -Fxq "$line" "$all_ut_log" 2>/dev/null; then @@ -242,7 +242,7 @@ categorize_failures() { echo "$line" >> "$new_issue_file" fi done < "$failures_log" - + # Generate summary report local timestamp timestamp=$(date '+%Y-%m-%d %H:%M:%S') @@ -260,9 +260,9 @@ categorize_failures() { if [[ $regression_count -gt 0 ]]; then echo "REGRESSION CASES ISSUE ($regression_count):" echo "---------------------------------" - cat "$regression_file" | while IFS= read -r line; do + while IFS= read -r line; do echo " $line" - done + done < "$regression_file" echo "" else echo "✅ No regression cases found." @@ -273,9 +273,9 @@ categorize_failures() { if [[ $new_issue_count -gt 0 ]]; then echo "NEW UT CASES ISSUE ($new_issue_count):" echo "--------------------------------" - cat "$new_issue_file" | while IFS= read -r line; do + while IFS= read -r line; do echo " $line" - done + done < "$new_issue_file" echo "" else echo "✅ No new UT cases issue found." @@ -304,27 +304,31 @@ Detailed Lists: --------------- EOF - + # Add regression UT list to summary if [[ $regression_count -gt 0 ]]; then - echo "Regression Issues:" >> "$summary_file" - echo "------------------" >> "$summary_file" + { + echo "Regression Issues:" + echo "-----------" + } >> "$summary_file" cat "$regression_file" >> "$summary_file" echo "" >> "$summary_file" else echo "✅ No regression issues found" >> "$summary_file" echo "" >> "$summary_file" fi - + # Add new issue UT list to summary if [[ $new_issue_count -gt 0 ]]; then - echo "New Issues:" >> "$summary_file" - echo "-----------" >> "$summary_file" + { + echo "New Issues:" + echo "-----------" + } >> "$summary_file" cat "$new_issue_file" >> "$summary_file" else echo "✅ No new issues found" >> "$summary_file" fi - + # Print summary to console echo "" echo "Analysis completed!" @@ -338,7 +342,7 @@ EOF echo " Regression UT list: $regression_file" echo " New issue UT list: $new_issue_file" echo " Detailed summary: $summary_file" - + # Show warning if no failures were found if [[ $total_failures -eq 0 ]]; then echo "" From e50639c0755985a378d01f51c72dd06bfb6d5733 Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Mon, 1 Dec 2025 02:49:49 -0800 Subject: [PATCH 05/10] align the lint check --- .github/scripts/ut_result_check.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/scripts/ut_result_check.sh b/.github/scripts/ut_result_check.sh index a75109c957..8a276fe2d9 100644 --- a/.github/scripts/ut_result_check.sh +++ b/.github/scripts/ut_result_check.sh @@ -310,8 +310,8 @@ EOF { echo "Regression Issues:" echo "-----------" + cat "$regression_file" } >> "$summary_file" - cat "$regression_file" >> "$summary_file" echo "" >> "$summary_file" else echo "✅ No regression issues found" >> "$summary_file" @@ -323,8 +323,8 @@ EOF { echo "New Issues:" echo "-----------" + cat "$new_issue_file" } >> "$summary_file" - cat "$new_issue_file" >> "$summary_file" else echo "✅ No new issues found" >> "$summary_file" fi From 65a8ade1c51c82614455ee5b06fc4550b1f9f8d4 Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Mon, 1 Dec 2025 19:22:22 -0800 Subject: [PATCH 06/10] fix the path issue --- .github/workflows/_linux_ut.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/_linux_ut.yml b/.github/workflows/_linux_ut.yml index b66f1e15f1..668df46001 100644 --- a/.github/workflows/_linux_ut.yml +++ b/.github/workflows/_linux_ut.yml @@ -166,10 +166,10 @@ jobs: gh --repo intel/torch-xpu-ops issue view ${REFERENCE_ISSUE_ID} --json body -q .body 2>&1 |tee body.txt REFERENCE_RUN_ID="$(cat body.txt |grep "Inductor-${artifact_type}-LTS2" |sed 's/.*: *//' || echo '')" if [ "${REFERENCE_RUN_ID}" != "" ];then - gh --repo intel/torch-xpu-ops run download ${REFERENCE_RUN_ID} -p "Inductor-XPU-UT-Data-*" - find Inductor-XPU-UT-Data-*/ -maxdepth 1 -mindepth 1 -type d |sort -V |\ - while read line; do rsync -az --delete $line/ $(basename $line)/; done - rm -rf Inductor-XPU-UT-Data-* || true + gh --repo intel/torch-xpu-ops run download ${REFERENCE_RUN_ID} -p "Inductor-XPU-UT-Data-*${{ inputs.ut }}*" + find Inductor-XPU-UT-Data-*${{ inputs.ut }}*/ -type f -path "*/Inductor-XPU-UT-Data-*/*" -print0 | \ + xargs -0 -I {} cp {} . + rm -rf Inductor-XPU-UT-Data-*${{ inputs.ut }}* || true fi - name: Check UT Results run: | From 2af4b57e04a74b62e478e1b87d8bdd45ac8331c9 Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Mon, 1 Dec 2025 21:14:27 -0800 Subject: [PATCH 07/10] fix the dependency issue --- .github/workflows/_linux_ut.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/_linux_ut.yml b/.github/workflows/_linux_ut.yml index 668df46001..16195fb2c4 100644 --- a/.github/workflows/_linux_ut.yml +++ b/.github/workflows/_linux_ut.yml @@ -199,6 +199,7 @@ jobs: find "${{ github.workspace }}/baseline/" -type f \ \( -name "*.xml" \) \ -exec mv {} ${{ github.workspace }}/ut_log/baseline \; || true + pip install junitparser python ${{ github.workspace }}/.github/scripts/check-ut.py -n ${{ inputs.ut }} -i ${{ github.workspace }}/ut_log/baseline/*.xml -o ${{ github.workspace }}/ut_log/baseline find "${{ github.workspace }}/ut_log/baseline" -type f -name "all_cases_*.log" \ -exec sh -c 'for file; do From 8e7163edb5c0f2b85306aa58bbb8fb3cfa2831fc Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Tue, 2 Dec 2025 23:30:11 -0800 Subject: [PATCH 08/10] fix the path issue --- .github/workflows/_linux_ut.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/_linux_ut.yml b/.github/workflows/_linux_ut.yml index 16195fb2c4..4d73a4429a 100644 --- a/.github/workflows/_linux_ut.yml +++ b/.github/workflows/_linux_ut.yml @@ -166,10 +166,10 @@ jobs: gh --repo intel/torch-xpu-ops issue view ${REFERENCE_ISSUE_ID} --json body -q .body 2>&1 |tee body.txt REFERENCE_RUN_ID="$(cat body.txt |grep "Inductor-${artifact_type}-LTS2" |sed 's/.*: *//' || echo '')" if [ "${REFERENCE_RUN_ID}" != "" ];then - gh --repo intel/torch-xpu-ops run download ${REFERENCE_RUN_ID} -p "Inductor-XPU-UT-Data-*${{ inputs.ut }}*" - find Inductor-XPU-UT-Data-*${{ inputs.ut }}*/ -type f -path "*/Inductor-XPU-UT-Data-*/*" -print0 | \ + gh --repo intel/torch-xpu-ops run download ${REFERENCE_RUN_ID} -p "Inductor-XPU-UT-Data-*" + find Inductor-XPU-UT-Data-*/ -type f -path "*/Inductor-XPU-UT-Data-*/*" -print0 | \ xargs -0 -I {} cp {} . - rm -rf Inductor-XPU-UT-Data-*${{ inputs.ut }}* || true + rm -rf Inductor-XPU-UT-Data-* || true fi - name: Check UT Results run: | From 05d687d169f12621ce717059c211150b6c5ca3ba Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Tue, 2 Dec 2025 23:42:49 -0800 Subject: [PATCH 09/10] add run id check --- .github/workflows/_linux_ut.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/_linux_ut.yml b/.github/workflows/_linux_ut.yml index 4d73a4429a..8a28c6c4f2 100644 --- a/.github/workflows/_linux_ut.yml +++ b/.github/workflows/_linux_ut.yml @@ -240,6 +240,11 @@ jobs: gh --repo ${GITHUB_REPOSITORY} issue view ${REFERENCE_ISSUE_ID} --json body -q .body 2>&1 |tee new_body.txt 2>&1 has_or_not="$(grep -c 'Inductor-${{ inputs.test_type }}-LTS2' new_body.txt || true)" if [ ${has_or_not} -ne 0 ];then + existing_run_id=$(grep -o "Inductor-${{ inputs.test_type }}-LTS2: [0-9]*" new_body.txt | grep -o "[0-9]*" || echo "") + if [ "${existing_run_id}" = "${GITHUB_RUN_ID}" ]; then + echo "Existing Run ID (${existing_run_id}) is the same as new Run ID (${GITHUB_RUN_ID}). No update needed." + exit 0 + fi sed -i "s/Inductor-${{ inputs.test_type }}-LTS2:.*/Inductor-${{ inputs.test_type }}-LTS2: ${GITHUB_RUN_ID}/" new_body.txt else echo "Inductor-${{ inputs.test_type }}-LTS2: ${GITHUB_RUN_ID}" |tee -a new_body.txt From 5544f52a2656c48ee82198c0df938e31b418fb9d Mon Sep 17 00:00:00 2001 From: "Zhong, Ruijie" Date: Wed, 3 Dec 2025 00:41:29 -0800 Subject: [PATCH 10/10] update ut check --- .github/scripts/check-ut.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/.github/scripts/check-ut.py b/.github/scripts/check-ut.py index fa999ae7c8..d094367783 100644 --- a/.github/scripts/check-ut.py +++ b/.github/scripts/check-ut.py @@ -131,6 +131,9 @@ def print_failures(failure_list=None): if not failures: return + if args.output_dir != '.': + return + print("### Test Failures") print_header = True for case in failures: @@ -371,6 +374,9 @@ def generate_category_totals_log(): log_file.write(f"Errors: {totals['Errors']}\n") def print_summary(): + if args.output_dir != '.': + return + print("### Results Summary") print_header = True @@ -426,7 +432,9 @@ def main(): generate_passed_log() generate_all_cases_log() generate_category_totals_log() - print_summary() + + if args.output_dir == '.': + print_summary() if __name__ == "__main__":