|
33 | 33 | run: mvn -B install -P gradlePlugin --no-transfer-progress |
34 | 34 | env: |
35 | 35 | BUILD_LOG_LEVEL: 'ERROR' |
36 | | - - name: Tests |
37 | | - uses: mikepenz/action-junit-report@v5 |
| 36 | + - name: 📊 Test Report |
38 | 37 | if: always() |
39 | | - with: |
40 | | - check_name: Test ${{ matrix.os }} ${{ matrix.java-version }} |
41 | | - report_paths: '*/target/*/TEST-*.xml' |
| 38 | + run: | |
| 39 | + echo "## 🧪 Test Summary (${{ matrix.os }} - Java ${{ matrix.java-version }})" >> $GITHUB_STEP_SUMMARY |
| 40 | +
|
| 41 | + # Use Python to safely parse the JUnit XML files and write to the summary |
| 42 | + python3 -c ' |
| 43 | + import xml.etree.ElementTree as ET |
| 44 | + import glob, os |
| 45 | +
|
| 46 | + # Find all JUnit XML reports |
| 47 | + files = glob.glob("**/target/**/TEST-*.xml", recursive=True) |
| 48 | +
|
| 49 | + tests, failures, errors, skipped = 0, 0, 0, 0 |
| 50 | + failed_tests = set() |
| 51 | + skipped_tests = set() |
| 52 | +
|
| 53 | + for f in files: |
| 54 | + try: |
| 55 | + tree = ET.parse(f) |
| 56 | + root = tree.getroot() |
| 57 | +
|
| 58 | + # Handle both <testsuite> and <testsuites> root elements |
| 59 | + suites = [root] if root.tag == "testsuite" else root.findall(".//testsuite") |
| 60 | +
|
| 61 | + for suite in suites: |
| 62 | + tests += int(suite.attrib.get("tests", 0)) |
| 63 | + failures += int(suite.attrib.get("failures", 0)) |
| 64 | + errors += int(suite.attrib.get("errors", 0)) |
| 65 | + skipped += int(suite.attrib.get("skipped", 0)) |
| 66 | +
|
| 67 | + # Collect names of failing and skipped tests |
| 68 | + for case in suite.findall("testcase"): |
| 69 | + if case.find("failure") is not None or case.find("error") is not None: |
| 70 | + # Strip the package path from the classname for a cleaner display |
| 71 | + cls = case.attrib.get("classname", "UnknownClass").split(".")[-1] |
| 72 | + name = case.attrib.get("name", "UnknownMethod") |
| 73 | + failed_tests.add(f"- `{cls}.{name}`") |
| 74 | + elif case.find("skipped") is not None: |
| 75 | + cls = case.attrib.get("classname", "UnknownClass").split(".")[-1] |
| 76 | + name = case.attrib.get("name", "UnknownMethod") |
| 77 | + skipped_tests.add(f"- `{cls}.{name}`") |
| 78 | + except Exception as e: |
| 79 | + print(f"Error parsing {f}: {e}") |
| 80 | +
|
| 81 | + passed = tests - failures - errors - skipped |
| 82 | + summary_file = os.environ.get("GITHUB_STEP_SUMMARY") |
| 83 | +
|
| 84 | + with open(summary_file, "a", encoding="utf-8") as f: |
| 85 | + if not files: |
| 86 | + f.write("⚠️ **Could not find any `TEST-*.xml` files.**\n") |
| 87 | + else: |
| 88 | + # Draw the Markdown Table |
| 89 | + f.write("| Result | Count |\n") |
| 90 | + f.write("|--------|-------|\n") |
| 91 | + f.write(f"| ✅ **Passed** | **{passed}** |\n") |
| 92 | + f.write(f"| ❌ **Failed/Errors** | **{failures + errors}** |\n") |
| 93 | + f.write(f"| ⚠️ **Skipped** | **{skipped}** |\n") |
| 94 | + f.write(f"| 📊 **Total Tests** | **{tests}** |\n\n") |
| 95 | +
|
| 96 | + # Provide specific feedback for failures |
| 97 | + if failures > 0 or errors > 0: |
| 98 | + f.write("### 🚨 Test Failures Detected!\n") |
| 99 | + f.write("The following tests did not pass:\n") |
| 100 | + for test in sorted(failed_tests): |
| 101 | + f.write(f"{test}\n") |
| 102 | + else: |
| 103 | + f.write("### 🎉 100% Pass Rate!\n") |
| 104 | + f.write(f"The build is green across all {tests} tests.\n") |
| 105 | +
|
| 106 | + # Provide specific feedback for skips |
| 107 | + if skipped > 0: |
| 108 | + f.write("\n### ⚠️ Skipped Tests\n") |
| 109 | + for test in sorted(skipped_tests): |
| 110 | + f.write(f"{test}\n") |
| 111 | + ' |
0 commit comments