KAFKA-18748 Run new tests separately in PRs (#18770)

Split the JUnit tests into "new", "flaky", and the remainder. 

On PR builds, "new" tests are anything that do not exist on trunk. They are run with zero tolerance for flakiness. 

On trunk builds, "new" tests are anything added in the last 7 days. They are run with some tolerance for flakiness.

Another change included here is that we will not update the test catalog if any test job fails on a trunk build. We have had difficulty determining if all the tests had or not (due to timeout or failures in upstream Gradle tasks). By requiring green ":test" jobs, we can be sure that the resulting catalog will be valid.

---

The purpose of this change is to discourage contributors from adding flaky tests, but give some leeway for trunk so we have successful builds.

The "quarantinedTest" Gradle target has been consolidated into the regular "test" target. There are now some
runtime properties to control what tests are run.

* kafka.test.catalog.file: path to test catalog
* kafka.test.run.new: include new tests. this selection depends on the age of the loaded test catalog
* kafka.test.run.flaky: include tests marked as `@Flaky` (replaces the `excludeTags 'flaky'` directive)
* kafka.test.verbose: include additional logging from new JUnit classes (enabled by default if re-running GitHub workflow with debug logging enabled)
* maxTestRetries: how many retries to allow via Develocity retry plugin (default 0)
* maxTestRetryFailures: how many failures to allow before stopping retries (default 0)


Thanks to Jun Rao for inspiring the idea.

Reviewers: Chia-Ping Tsai <chia7712@gmail.com>, Ismael Juma <ismael@juma.me.uk>, Jun Rao <junrao@gmail.com>
This commit is contained in:
David Arthur 2025-02-24 17:08:15 -05:00 committed by GitHub
parent 10da082184
commit cb33e98dfc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 547 additions and 361 deletions

View File

@ -22,7 +22,10 @@ inputs:
# Composite actions do not support typed parameters. Everything is treated as a string
# See: https://github.com/actions/runner/issues/2238
test-task:
description: "The test suite to run. Either 'test' or 'quarantinedTest'."
description: "The Gradle task name to run."
required: true
test-xml-output:
description: "Output directory for JUnit XML results"
required: true
timeout-minutes:
description: "The timeout for the tests, in minutes."
@ -33,10 +36,32 @@ inputs:
build-scan-artifact-name:
description: "The name to use for archiving the build scan."
required: true
test-retries:
description: "The number of retries for a given test should we allow"
required: true
default: "0"
test-repeat:
description: "The number of times to repeat the integration tests"
required: true
default: "1"
test-verbose:
description: "Enable additional logging by the JUnit infrastructure"
required: true
default: "false"
run-new-tests:
description: "Run tests not present in the given test catalog"
required: true
default: "false"
run-flaky-tests:
description: "Run tests marked as flaky"
required: true
default: "false"
outputs:
gradle-exitcode:
description: "The result of the Gradle test task."
value: ${{ steps.run-tests.outputs.exitcode }}
runs:
using: "composite"
steps:
@ -52,15 +77,25 @@ runs:
TIMEOUT_MINUTES: ${{ inputs.timeout-minutes}}
TEST_CATALOG: ${{ inputs.test-catalog-path }}
TEST_TASK: ${{ inputs.test-task }}
TEST_RETRIES: ${{ inputs.test-retries }}
TEST_REPEAT: ${{ inputs.test-repeat }}
RUN_NEW_TESTS: ${{ inputs.run-new-tests }}
RUN_FLAKY_TESTS: ${{ inputs.run-flaky-tests }}
TEST_XML_OUTPUT_DIR: ${{ inputs.test-xml-output }}
TEST_VERBOSE: ${{ inputs.test-verbose }}
run: |
set +e
./.github/scripts/thread-dump.sh &
timeout ${TIMEOUT_MINUTES}m ./gradlew --build-cache --continue --no-scan \
-PtestLoggingEvents=started,passed,skipped,failed \
-PmaxParallelForks=2 \
-PmaxTestRetries=1 -PmaxTestRetryFailures=10 \
-PmaxQuarantineTestRetries=3 -PmaxQuarantineTestRetryFailures=0 \
-PmaxTestRetries=$TEST_RETRIES -PmaxTestRetryFailures=10 \
-Pkafka.test.catalog.file=$TEST_CATALOG \
-Pkafka.test.run.new=$RUN_NEW_TESTS \
-Pkafka.test.run.flaky=$RUN_FLAKY_TESTS \
-Pkafka.test.xml.output.dir=$TEST_XML_OUTPUT_DIR \
-Pkafka.cluster.test.repeat=$TEST_REPEAT \
-Pkafka.test.verbose=$TEST_VERBOSE \
-PcommitId=xxxxxxxxxxxxxxxx \
$TEST_TASK
exitcode="$?"

View File

@ -208,12 +208,12 @@ def split_report_path(base_path: str, report_path: str) -> Tuple[str, str]:
"""
Parse a report XML and extract the module path. Test report paths look like:
build/junit-xml/module[/sub-module]/[task]/TEST-class.method.xml
build/junit-xml/module[/sub-module]/[test-job]/TEST-class.method.xml
This method strips off a base path and assumes all path segments leading up to the suite name
This method strips off a base path and assumes all path segments leading up to the job name
are part of the module path.
Returns a tuple of (module, task)
Returns a tuple of (module, job)
"""
rel_report_path = os.path.relpath(report_path, base_path)
path_segments = pathlib.Path(rel_report_path).parts
@ -238,7 +238,7 @@ if __name__ == "__main__":
parser.add_argument("--export-test-catalog",
required=False,
default="",
help="Optional path to dump all tests")
help="Optional path to dump all tests.")
if not os.getenv("GITHUB_WORKSPACE"):
print("This script is intended to by run by GitHub Actions.")
@ -249,6 +249,7 @@ if __name__ == "__main__":
glob_path = os.path.join(args.path, "**/*.xml")
reports = glob(pathname=glob_path, recursive=True)
logger.info(f"Found {len(reports)} JUnit results")
workspace_path = get_env("GITHUB_WORKSPACE") # e.g., /home/runner/work/apache/kafka
total_file_count = 0
@ -265,14 +266,15 @@ if __name__ == "__main__":
flaky_table = []
skipped_table = []
quarantined_table = []
new_table = []
exporter = TestCatalogExporter()
logger.debug(f"::group::Parsing {len(reports)} JUnit Report Files")
for report in reports:
with open(report, "r") as fp:
module_path, task = split_report_path(args.path, report)
logger.debug(f"Parsing file: {report}, module: {module_path}, task: {task}")
module_path, test_job = split_report_path(args.path, report)
logger.debug(f"Parsing file: {report}, module: {module_path}, job: {test_job}")
for suite in parse_report(workspace_path, report, fp):
total_skipped += suite.skipped
total_errors += suite.errors
@ -309,8 +311,8 @@ if __name__ == "__main__":
logger.debug(f"Found skipped test: {skipped_test}")
skipped_table.append((simple_class_name, skipped_test.test_name))
# Only collect quarantined tests from the "quarantinedTest" task
if task == "quarantinedTest":
# Only collect quarantined tests from the "flaky" test jobs
if re.match(r".*\bflaky\b.*", test_job) is not None:
for test in all_suite_passed.values():
simple_class_name = test.class_name.split(".")[-1]
quarantined_table.append((simple_class_name, test.test_name))
@ -318,6 +320,14 @@ if __name__ == "__main__":
simple_class_name = test.class_name.split(".")[-1]
quarantined_table.append((simple_class_name, test.test_name))
if re.match(r".*\bnew\b.*", test_job) is not None:
for test in all_suite_passed.values():
simple_class_name = test.class_name.split(".")[-1]
new_table.append((simple_class_name, test.test_name))
for test in all_suite_failed.values():
simple_class_name = test.class_name.split(".")[-1]
new_table.append((simple_class_name, test.test_name))
if args.export_test_catalog:
exporter.handle_suite(module_path, suite)
@ -331,14 +341,36 @@ if __name__ == "__main__":
duration = pretty_time_duration(total_time)
logger.info(f"Finished processing {len(reports)} reports")
# Print summary of the tests.
# Determine exit status. If we add anything to failure_messages, we will exit(1)
failure_messages = []
exit_code = get_env("GRADLE_TEST_EXIT_CODE", int)
junit_report_url = get_env("JUNIT_REPORT_URL")
thread_dump_url = get_env("THREAD_DUMP_URL")
if exit_code is None:
failure_messages.append("Missing required GRADLE_TEST_EXIT_CODE environment variable. Failing this script.")
elif exit_code == 124:
# Special handling for timeouts. The exit code 124 is emitted by 'timeout' command used in build.yml.
# A watchdog script "thread-dump.sh" will use jstack to force a thread dump for any Gradle process
# still running after the timeout. We capture the exit codes of the two test tasks and pass them to
# this script. If any task fails due to timeout, we want to fail the overall build since it will not
# include all the test results
failure_messages.append(f"Gradle task had a timeout. Failing this script. These are partial results!")
elif exit_code > 0:
failure_messages.append(f"Gradle task had a failure exit code. Failing this script.")
if thread_dump_url:
failure_messages.append(f"Thread dump available at {thread_dump_url}. Failing this script.")
if junit_report_url:
report_md = f"Download [JUnit HTML report]({junit_report_url})"
else:
report_md = "No reports available. Environment variable JUNIT_REPORT_URL was not found."
# Print summary of the tests
# The stdout (print) goes to the workflow step console output.
# The stderr (logger) is redirected to GITHUB_STEP_SUMMARY which becomes part of the HTML job summary.
report_url = get_env("JUNIT_REPORT_URL")
if report_url:
report_md = f"Download [HTML report]({report_url})."
else:
report_md = "No report available. JUNIT_REPORT_URL was missing."
summary = (f"{total_run} tests cases run in {duration}.\n\n"
f"{total_success} {PASSED}, {total_failures} {FAILED}, "
f"{total_flaky} {FLAKY}, {total_skipped} {SKIPPED}, {len(quarantined_table)} {QUARANTINED}, and {total_errors} errors.")
@ -402,42 +434,28 @@ if __name__ == "__main__":
print("\n</details>")
logger.debug("::endgroup::")
if len(new_table) > 0:
print("<details>")
print(f"<summary>New Tests ({len(new_table)})</summary>\n")
print(f"| Module | Test |")
print(f"| ------ | ---- |")
logger.debug(f"::group::Found {len(new_table)} new tests")
for row in new_table:
row_joined = " | ".join(row)
print(f"| {row_joined} |")
logger.debug(f"{row[0]} > {row[1]}")
print("\n</details>")
logger.debug("::endgroup::")
print("<hr/>")
# Print special message if there was a timeout
test_exit_code = get_env("GRADLE_TEST_EXIT_CODE", int)
quarantined_test_exit_code = get_env("GRADLE_QUARANTINED_TEST_EXIT_CODE", int)
if test_exit_code == 124 or quarantined_test_exit_code == 124:
# Special handling for timeouts. The exit code 124 is emitted by 'timeout' command used in build.yml.
# A watchdog script "thread-dump.sh" will use jstack to force a thread dump for any Gradle process
# still running after the timeout. We capture the exit codes of the two test tasks and pass them to
# this script. If either "test" or "quarantinedTest" fails due to timeout, we want to fail the overall build.
thread_dump_url = get_env("THREAD_DUMP_URL")
if test_exit_code == 124:
logger.debug(f"Gradle task for 'test' timed out. These are partial results!")
else:
logger.debug(f"Gradle task for 'quarantinedTest' timed out. These are partial results!")
# Print errors and exit
for message in failure_messages:
logger.debug(message)
logger.debug(summary)
if thread_dump_url:
print(f"\nThe JUnit tests were cancelled due to a timeout. Thread dumps were generated before the job was cancelled. "
f"Download [thread dumps]({thread_dump_url}).\n")
logger.debug(f"Failing this step because the tests timed out. Thread dumps were taken and archived here: {thread_dump_url}")
else:
logger.debug(f"Failing this step because the tests timed out. Thread dumps were not archived, check logs in JUnit step.")
exit(1)
elif test_exit_code == 1 or quarantined_test_exit_code == 1:
logger.debug(summary)
if total_failures > 0:
logger.debug(f"Failing this step due to {total_failures} test failures")
exit(1)
elif total_errors > 0:
logger.debug(f"Failing this step due to {total_errors} test errors")
if len(failure_messages) > 0:
exit(1)
else:
logger.debug("There was an error during the test or quarantinedTest task. Please check the logs")
exit(1)
else:
# Normal exit
logger.debug(summary)
exit(0)

View File

@ -20,10 +20,12 @@ sleep $(($SLEEP_MINUTES*60));
echo "Timed out after $SLEEP_MINUTES minutes. Dumping threads now..."
mkdir thread-dumps
touch thread-dumps/pids.txt
sleep 5;
for GRADLE_WORKER_PID in `jps | grep GradleWorkerMain | awk -F" " '{print $1}'`;
do
echo $GRADLE_WORKER_PID >> thread-dumps/pids.txt
echo "Dumping threads for GradleWorkerMain pid $GRADLE_WORKER_PID into $FILENAME";
FILENAME="thread-dumps/GradleWorkerMain-$GRADLE_WORKER_PID.txt"
jstack $GRADLE_WORKER_PID > $FILENAME

View File

@ -31,7 +31,34 @@ on:
type: boolean
jobs:
configure:
runs-on: ubuntu-latest
name: Configure Workflow
outputs:
is-draft: ${{ steps.check-draft-pr.outputs.is-draft }}
test-catalog-days: ${{ steps.test-catalog.outputs.days }}
steps:
- name: Env
run: printenv
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Check for Draft PR
id: check-draft-pr
if: |
github.event_name == 'pull_request' &&
github.event.pull_request.draft
run: echo "is-draft=true" >> "$GITHUB_OUTPUT"
- name: Test Catalog Age
id: test-catalog
run: |
if [ "${{ github.event_name }}" = "pull_request" ]; then
echo "days=0" >> "$GITHUB_OUTPUT"
else
echo "days=7" >> "$GITHUB_OUTPUT"
fi
load-catalog:
needs: [configure]
runs-on: ubuntu-latest
name: Load Test Catalog
steps:
@ -49,9 +76,12 @@ jobs:
path: test-catalog
- name: Checkout catalog at earlier date
if: ${{ needs.configure.outputs.test-catalog-days != '0' }}
env:
DAYS: ${{ needs.configure.outputs.test-catalog-days }}
run: |
cd test-catalog
SHA=$(git rev-list -1 --before 7.days.ago origin/test-catalog)
SHA=$(git rev-list -1 --before $DAYS.days.ago origin/test-catalog)
echo $SHA
git switch --detach $SHA
git show --no-patch
@ -76,21 +106,14 @@ jobs:
compression-level: 9
validate:
needs: [configure]
runs-on: ubuntu-latest
name: Compile and Check Java
outputs:
is-draft: ${{ steps.check-draft-pr.outputs.is-draft }}
steps:
- name: Env
run: printenv
env:
GITHUB_CONTEXT: ${{ toJson(github) }}
- name: Check for Draft PR
id: check-draft-pr
if: |
github.event_name == 'pull_request' &&
github.event.pull_request.draft
run: echo "is-draft=true" >> "$GITHUB_OUTPUT"
- name: Checkout code
uses: actions/checkout@v4
with:
@ -145,16 +168,22 @@ jobs:
fi
test:
needs: [validate, load-catalog]
if: ${{ ! needs.validate.outputs.is-draft }}
needs: [configure, validate, load-catalog]
if: ${{ ! needs.configure.outputs.is-draft }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
java: [ 23, 17 ] # If we change these, make sure to adjust ci-complete.yml
outputs:
timed-out: ${{ (steps.junit-test.outputs.gradle-exitcode == '124' || steps.junit-quarantined-test.outputs.gradle-exitcode == '124') }}
name: JUnit tests Java ${{ matrix.java }}
# If we change these, make sure to adjust ci-complete.yml
java: [ 23, 17 ]
run-flaky: [ true, false ]
run-new: [ true, false ]
exclude:
- run-flaky: true
run-new: true
env:
job-variation: ${{ matrix.java }}-${{ matrix.run-flaky == true && 'flaky' || 'noflaky' }}-${{ matrix.run-new == true && 'new' || 'nonew' }}
name: JUnit tests Java ${{ matrix.java }}${{ matrix.run-flaky == true && ' (flaky)' || '' }}${{ matrix.run-new == true && ' (new)' || '' }}
steps:
- name: Checkout code
uses: actions/checkout@v4
@ -179,15 +208,6 @@ jobs:
with:
name: combined-test-catalog
- name: JUnit Quarantined Tests
id: junit-quarantined-test
uses: ./.github/actions/run-gradle
with:
test-task: quarantinedTest
timeout-minutes: 180
test-catalog-path: ${{ steps.load-test-catalog.outputs.download-path }}/combined-test-catalog.txt
build-scan-artifact-name: build-scan-quarantined-test-${{ matrix.java }}
- name: JUnit Tests
id: junit-test
uses: ./.github/actions/run-gradle
@ -195,13 +215,19 @@ jobs:
test-task: test
timeout-minutes: 180 # 3 hours
test-catalog-path: ${{ steps.load-test-catalog.outputs.download-path }}/combined-test-catalog.txt
build-scan-artifact-name: build-scan-test-${{ matrix.java }}
build-scan-artifact-name: build-scan-${{ env.job-variation }}
run-new-tests: ${{ matrix.run-new }}
run-flaky-tests: ${{ matrix.run-flaky }}
test-retries: ${{ matrix.run-flaky == true && '3' || '1' }}
test-xml-output: ${{ env.job-variation }}
test-repeat: ${{ !inputs.is-trunk && matrix.run-new && '3' || '1' }}
test-verbose: ${{ runner.debug == '1' }}
- name: Archive JUnit HTML reports
uses: actions/upload-artifact@v4
id: junit-upload-artifact
id: archive-junit-html
with:
name: junit-reports-${{ matrix.java }}
name: junit-reports-${{ env.job-variation }}
path: |
**/build/reports/tests/*
compression-level: 9
@ -210,45 +236,92 @@ jobs:
- name: Archive JUnit XML
uses: actions/upload-artifact@v4
with:
name: junit-xml-${{ matrix.java }}
name: junit-xml-${{ env.job-variation }}
path: |
build/junit-xml/**/*.xml
compression-level: 9
if-no-files-found: ignore
- name: Archive Thread Dumps
id: thread-dump-upload-artifact
if: always() && (steps.junit-test.outputs.gradle-exitcode == '124' || steps.junit-quarantined-test.outputs.gradle-exitcode == '124')
id: archive-thread-dump
if: steps.junit-test.outputs.gradle-exitcode == '124'
uses: actions/upload-artifact@v4
with:
name: junit-thread-dumps-${{ matrix.java }}
name: junit-thread-dumps-${{ env.job-variation }}
path: |
thread-dumps/*
compression-level: 9
if-no-files-found: ignore
- name: Parse JUnit tests
run: python .github/scripts/junit.py --export-test-catalog ./test-catalog >> $GITHUB_STEP_SUMMARY
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
JUNIT_REPORT_URL: ${{ steps.junit-upload-artifact.outputs.artifact-url }}
THREAD_DUMP_URL: ${{ steps.thread-dump-upload-artifact.outputs.artifact-url }}
JUNIT_REPORT_URL: ${{ steps.archive-junit-html.outputs.artifact-url }}
THREAD_DUMP_URL: ${{ steps.archive-thread-dump.outputs.artifact-url }}
GRADLE_TEST_EXIT_CODE: ${{ steps.junit-test.outputs.gradle-exitcode }}
GRADLE_QUARANTINED_TEST_EXIT_CODE: ${{ steps.junit-quarantined-test.outputs.gradle-exitcode }}
run: |
python .github/scripts/junit.py \
--path build/junit-xml >> $GITHUB_STEP_SUMMARY
# This job downloads all the JUnit XML files and thread dumps from the JDK 23 test runs.
# If any test job fails, we will not run this job. Also, if any thread dump artifacts
# are present, this means there was a timeout in the tests and so we will not proceed
# with catalog creation.
collate-test-catalog:
name: Collate Test Catalog
needs: test
runs-on: ubuntu-latest
outputs:
uploaded-test-catalog: ${{ steps.archive-test-catalog.outcome == 'success' }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Download Thread Dumps
uses: actions/download-artifact@v4
with:
pattern: junit-thread-dumps-23-*
path: thread-dumps
merge-multiple: true
- name: Check For Thread Dump
id: check-for-thread-dump
run: |
find .
if [ -d thread-dumps ]; then
echo "Found 'thread-dumps' directory. Will not proceed with test catalog collation.";
exit 1;
fi
- name: Download JUnit XMLs
uses: actions/download-artifact@v4
with:
pattern: junit-xml-23-* # Only look at JDK 23 tests for the test catalog
path: junit-xml
merge-multiple: true
- name: Collate Test Catalog
continue-on-error: true
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
GRADLE_TEST_EXIT_CODE: 0
run: |
python .github/scripts/junit.py \
--path junit-xml \
--export-test-catalog ./test-catalog >> $GITHUB_STEP_SUMMARY
- name: Archive Test Catalog
if: ${{ always() && matrix.java == '23' }}
id: archive-test-catalog
uses: actions/upload-artifact@v4
with:
name: test-catalog
path: test-catalog
compression-level: 9
if-no-files-found: ignore
if-no-files-found: error
# This job downloads the test catalog from the previous job and overlays it on the test-catalog branch.
# This will only run on trunk and only if the collate job did not detect a timeout.
update-test-catalog:
name: Update Test Catalog
needs: test
if: ${{ always() && inputs.is-trunk && needs.test.outputs.timed-out == 'false' }}
needs: collate-test-catalog
if: ${{ inputs.is-trunk && needs.collate-test-catalog.outputs.uploaded-test-catalog == 'true' }}
runs-on: ubuntu-latest
permissions:
contents: write

View File

@ -43,8 +43,18 @@ jobs:
strategy:
fail-fast: false
matrix:
# Make sure these match build.yml
java: [ 23, 17 ]
artifact-prefix: [ "build-scan-test-", "build-scan-quarantined-test-"]
run-flaky: [ true, false ]
run-new: [ true, false ]
exclude:
- run-flaky: true
run-new: true
env:
job-variation: ${{ matrix.java }}-${{ matrix.run-flaky == true && 'flaky' || 'noflaky' }}-${{ matrix.run-new == true && 'new' || 'nonew' }}
status-context: Java ${{ matrix.java }}${{ matrix.run-flaky == true && ' / Flaky' || '' }}${{ matrix.run-new == true && ' / New' || '' }}
steps:
- name: Env
run: printenv
@ -67,7 +77,7 @@ jobs:
with:
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
name: ${{ matrix.artifact-prefix }}${{ matrix.java }}
name: build-scan-${{ env.job-variation }}
path: ~/.gradle/build-scan-data # This is where Gradle buffers unpublished build scan data when --no-scan is given
- name: Handle missing scan
if: ${{ steps.download-build-scan.outcome == 'failure' }}
@ -78,7 +88,7 @@ jobs:
commit_sha: ${{ github.event.workflow_run.head_sha }}
url: '${{ github.event.workflow_run.html_url }}'
description: 'Could not find build scan'
context: 'Gradle Build Scan / Java ${{ matrix.java }}'
context: Gradle Build Scan / ${{ env.status-context }}
state: 'error'
- name: Publish Scan
id: publish-build-scan
@ -106,7 +116,7 @@ jobs:
commit_sha: ${{ github.event.workflow_run.head_sha }}
url: '${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }}'
description: 'The build scan failed to be published'
context: 'Gradle Build Scan / Java ${{ matrix.java }}'
context: Gradle Build Scan / ${{ env.status-context }}
state: 'error'
- name: Update Status Check
if: ${{ steps.publish-build-scan.outcome == 'success' }}
@ -117,5 +127,5 @@ jobs:
commit_sha: ${{ github.event.workflow_run.head_sha }}
url: ${{ steps.publish-build-scan.outputs.build-scan-url }}
description: 'The build scan was successfully published'
context: 'Gradle Build Scan / Java ${{ matrix.java }}'
context: Gradle Build Scan / ${{ env.status-context }}
state: 'success'

View File

@ -45,12 +45,17 @@ jobs:
uses: actions/checkout@v4
with:
persist-credentials: false
- name: Setup Python
uses: ./.github/actions/setup-python
- name: Setup Gradle
uses: ./.github/actions/setup-gradle
with:
java-version: ${{ inputs.java-version }}
gradle-cache-read-only: true
develocity-access-key: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Test
timeout-minutes: 60
id: junit-test
@ -60,26 +65,52 @@ jobs:
TEST_PATTERN: ${{ inputs.test-pattern }}
run: |
set +e
./gradlew --info --build-cache --scan --continue \
./.github/scripts/thread-dump.sh &
timeout 60m ./gradlew --info --build-cache --scan --continue \
-PtestLoggingEvents=started,passed,skipped,failed \
-PignoreFailures=true -PmaxParallelForks=2 \
-PmaxParallelForks=2 \
-Pkafka.cluster.test.repeat=$TEST_REPEAT \
-PmaxTestRetries=$TEST_REPEAT -PmaxTestRetryFailures=0 \
-PmaxQuarantineTestRetries=$TEST_REPEAT -PmaxQuarantineTestRetryFailures=0 \
${TEST_MODULE}:test ${TEST_MODULE}:quarantinedTest --tests $TEST_PATTERN
${TEST_MODULE}:test --tests $TEST_PATTERN
exitcode="$?"
echo "exitcode=$exitcode" >> $GITHUB_OUTPUT
- name: Archive JUnit reports
- name: Archive JUnit HTML reports
uses: actions/upload-artifact@v4
id: junit-upload-artifact
id: archive-junit-html
with:
name: junit-reports-${{ inputs.java-version }}
name: junit-html-reports
path: |
**/build/reports/tests/test/*
**/build/reports/tests/*
compression-level: 9
if-no-files-found: ignore
- name: Archive JUnit XML
uses: actions/upload-artifact@v4
with:
name: junit-xml
path: |
build/junit-xml/**/*.xml
compression-level: 9
if-no-files-found: ignore
- name: Archive Thread Dumps
id: archive-thread-dump
if: steps.junit-test.outputs.gradle-exitcode == '124'
uses: actions/upload-artifact@v4
with:
name: junit-thread-dumps-${{ env.job-variation }}
path: |
thread-dumps/*
compression-level: 9
if-no-files-found: ignore
- name: Parse JUnit tests
run: python .github/scripts/junit.py >> $GITHUB_STEP_SUMMARY
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
REPORT_URL: ${{ steps.junit-upload-artifact.outputs.artifact-url }}
GRADLE_EXIT_CODE: ${{ steps.junit-test.outputs.exitcode }}
JUNIT_REPORT_URL: ${{ steps.archive-junit-html.outputs.artifact-url }}
THREAD_DUMP_URL: ${{ steps.archive-thread-dump.outputs.artifact-url }}
GRADLE_TEST_EXIT_CODE: ${{ steps.junit-test.outputs.gradle-exitcode }}
run: |
python .github/scripts/junit.py \
--path build/junit-xml >> $GITHUB_STEP_SUMMARY

View File

@ -42,7 +42,6 @@ Follow instructions in https://kafka.apache.org/quickstart
./gradlew test # runs both unit and integration tests
./gradlew unitTest
./gradlew integrationTest
./gradlew quarantinedTest # runs the quarantined tests
### Force re-running tests without code change ###
@ -77,10 +76,6 @@ The following example declares -PmaxTestRetries=1 and -PmaxTestRetryFailures=3 t
./gradlew test -PmaxTestRetries=1 -PmaxTestRetryFailures=3
The quarantinedTest task also has no retries by default, but you can set maxQuarantineTestRetries and maxQuarantineTestRetryFailures to enable retries, similar to the test task.
./gradlew quarantinedTest -PmaxQuarantineTestRetries=3 -PmaxQuarantineTestRetryFailures=20
See [Test Retry Gradle Plugin](https://github.com/gradle/test-retry-gradle-plugin) for and [build.yml](.github/workflows/build.yml) more details.
### Generating test coverage reports ###

View File

@ -82,9 +82,6 @@ ext {
userMaxTestRetries = project.hasProperty('maxTestRetries') ? maxTestRetries.toInteger() : 0
userMaxTestRetryFailures = project.hasProperty('maxTestRetryFailures') ? maxTestRetryFailures.toInteger() : 0
userMaxQuarantineTestRetries = project.hasProperty('maxQuarantineTestRetries') ? maxQuarantineTestRetries.toInteger() : 0
userMaxQuarantineTestRetryFailures = project.hasProperty('maxQuarantineTestRetryFailures') ? maxQuarantineTestRetryFailures.toInteger() : 0
skipSigning = project.hasProperty('skipSigning') && skipSigning.toBoolean()
shouldSign = !skipSigning && !version.endsWith("SNAPSHOT")
@ -105,7 +102,7 @@ ext {
throw new GradleException("Unexpected value for keepAliveMode property. Expected one of $keepAliveValues, but received: $userKeepAliveModeString")
}
// Used by :test and :quarantinedTest tasks
// Used by :test task
isGithubActions = System.getenv('GITHUB_ACTIONS') != null
// See README.md for details on this option and the reasoning for the default
@ -496,7 +493,7 @@ subprojects {
def testsToExclude = ['**/*Suite.class']
// These two tasks will copy JUnit XML files out of the sub-project's build directory and into
// This task will copy JUnit XML files out of the sub-project's build directory and into
// a top-level build/junit-xml directory. This is necessary to avoid reporting on tests which
// were not run, but instead were restored via FROM-CACHE. See KAFKA-17479 for more details.
def copyTestXml = tasks.register('copyTestXml') {
@ -504,13 +501,17 @@ subprojects {
onlyIf("Project '${project.name}:test' has sources") { ! test.state.noSource }
onlyIf("Task '${project.name}:test' did work") { test.state.didWork }
ext {
output = project.findProperty("kafka.test.xml.output.dir")
}
// Never cache this task
outputs.cacheIf { false }
outputs.upToDateWhen { false }
doLast {
def moduleDirPath = projectToJUnitXmlPath(project)
def dest = rootProject.layout.buildDirectory.dir("junit-xml/${moduleDirPath}/test").get().asFile
def dest = rootProject.layout.buildDirectory.dir("junit-xml/${moduleDirPath}/${output}").get().asFile
println "Copy JUnit XML for ${project.name} to $dest"
ant.copy(todir: "$dest") {
ant.fileset(dir: "${test.reports.junitXml.entryPoint}") {
@ -520,27 +521,6 @@ subprojects {
}
}
def copyQuarantinedTestXml = tasks.register('copyQuarantinedTestXml') {
onlyIf("Environment GITHUB_ACTIONS is set") { isGithubActions }
onlyIf("Project '${project.name}:quarantinedTest' has sources") { ! quarantinedTest.state.noSource }
onlyIf("Task '${project.name}:quarantinedTest' did work") { quarantinedTest.state.didWork }
// Never cache this task
outputs.cacheIf { false }
outputs.upToDateWhen { false }
doLast {
def moduleDirPath = projectToJUnitXmlPath(project)
def dest = rootProject.layout.buildDirectory.dir("junit-xml/${moduleDirPath}/quarantinedTest").get().asFile
println "Copy JUnit XML for ${project.name} to $dest"
ant.copy(todir: "$dest") {
ant.fileset(dir: "${quarantinedTest.reports.junitXml.entryPoint}") {
ant.include(name: "**/*.xml")
}
}
}
}
test {
maxParallelForks = maxTestForks
ignoreFailures = userIgnoreFailures
@ -551,7 +531,9 @@ subprojects {
// KAFKA-17433 Used by deflake.yml github action to repeat individual tests
systemProperty("kafka.cluster.test.repeat", project.findProperty("kafka.cluster.test.repeat"))
systemProperty("kafka.test.catalog.file", project.findProperty("kafka.test.catalog.file"))
systemProperty("kafka.test.run.quarantined", "false")
systemProperty("kafka.test.run.new", project.findProperty("kafka.test.run.new"))
systemProperty("kafka.test.run.flaky", project.findProperty("kafka.test.run.flaky"))
systemProperty("kafka.test.verbose", project.findProperty("kafka.test.verbose"))
testLogging {
events = userTestLoggingEvents ?: testLoggingEvents
@ -565,7 +547,6 @@ subprojects {
useJUnitPlatform {
includeEngines 'junit-jupiter'
excludeTags 'flaky'
}
develocity {
@ -578,45 +559,6 @@ subprojects {
finalizedBy("copyTestXml")
}
task quarantinedTest(type: Test, dependsOn: compileJava) {
// Disable caching and up-to-date for this task. We always want quarantined tests
// to run and never want to cache their results.
outputs.upToDateWhen { false }
outputs.cacheIf { false }
maxParallelForks = maxTestForks
ignoreFailures = userIgnoreFailures
maxHeapSize = defaultMaxHeapSize
jvmArgs = defaultJvmArgs
// KAFKA-17433 Used by deflake.yml github action to repeat individual tests
systemProperty("kafka.cluster.test.repeat", project.findProperty("kafka.cluster.test.repeat"))
systemProperty("kafka.test.catalog.file", project.findProperty("kafka.test.catalog.file"))
systemProperty("kafka.test.run.quarantined", "true")
testLogging {
events = userTestLoggingEvents ?: testLoggingEvents
showStandardStreams = userShowStandardStreams ?: testShowStandardStreams
exceptionFormat = testExceptionFormat
displayGranularity = 0
}
logTestStdout.rehydrate(delegate, owner, this)()
useJUnitPlatform {
includeEngines 'junit-jupiter'
}
develocity {
testRetry {
maxRetries = userMaxQuarantineTestRetries
maxFailures = userMaxQuarantineTestRetryFailures
}
}
finalizedBy("copyQuarantinedTestXml")
}
task integrationTest(type: Test, dependsOn: compileJava) {
maxParallelForks = maxTestForks
ignoreFailures = userIgnoreFailures

View File

@ -19,6 +19,9 @@ plugins {
}
def isGithubActions = System.getenv('GITHUB_ACTIONS') != null
def runNew = ext.find("kafka.test.run.new") == "true"
def runFlaky = ext.find("kafka.test.run.flaky") == "true"
def currentJvm = JavaVersion.current()
develocity {
@ -38,6 +41,13 @@ develocity {
tag "local"
}
tag "JDK$currentJvm"
if (runFlaky) {
tag "flaky"
}
if (runNew) {
tag "new"
}
}
}

View File

@ -34,7 +34,7 @@ public @interface Flaky {
String value();
/**
* Optional comment describing the reason for quarantined.
* Optional comment describing the reason.
*/
String comment() default "";
}

View File

@ -37,19 +37,25 @@ import java.util.Objects;
import java.util.Optional;
import java.util.Set;
public class AutoQuarantinedTestFilter implements Filter<TestDescriptor> {
/**
* A filter that selectively includes tests that are not present in a given test catalog.
* <p>
* The format of the test catalog is a text file where each line has the format of:
* <pre>
* FullyQualifiedClassName "#" MethodName "\n"
* </pre>
* If the catalog is missing, empty, or invalid, this filter will include all tests by default.
*/
public class CatalogTestFilter implements Filter<TestDescriptor> {
private static final Filter<TestDescriptor> INCLUDE_ALL_TESTS = testDescriptor -> FilterResult.included(null);
private static final Filter<TestDescriptor> EXCLUDE_ALL_TESTS = testDescriptor -> FilterResult.excluded(null);
private static final Filter<TestDescriptor> EXCLUDE_ALL_TESTS = testDescriptor -> FilterResult.excluded("missing catalog");
private static final Logger log = LoggerFactory.getLogger(AutoQuarantinedTestFilter.class);
private static final Logger log = LoggerFactory.getLogger(CatalogTestFilter.class);
private final Set<TestAndMethod> testCatalog;
private final boolean includeQuarantined;
AutoQuarantinedTestFilter(Set<TestAndMethod> testCatalog, boolean includeQuarantined) {
CatalogTestFilter(Set<TestAndMethod> testCatalog) {
this.testCatalog = Collections.unmodifiableSet(testCatalog);
this.includeQuarantined = includeQuarantined;
}
@Override
@ -67,54 +73,28 @@ public class AutoQuarantinedTestFilter implements Filter<TestDescriptor> {
MethodSource methodSource = (MethodSource) source;
TestAndMethod testAndMethod = new TestAndMethod(methodSource.getClassName(), methodSource.getMethodName());
if (includeQuarantined) {
if (testCatalog.contains(testAndMethod)) {
return FilterResult.excluded("exclude non-quarantined");
return FilterResult.excluded(null);
} else {
return FilterResult.included("auto-quarantined");
}
} else {
if (testCatalog.contains(testAndMethod)) {
return FilterResult.included(null);
} else {
return FilterResult.excluded("auto-quarantined");
}
}
}
private static Filter<TestDescriptor> defaultFilter(boolean includeQuarantined) {
if (includeQuarantined) {
return EXCLUDE_ALL_TESTS;
} else {
return INCLUDE_ALL_TESTS;
return FilterResult.included("new test");
}
}
/**
* Create a filter that excludes tests that are missing from a given test catalog file.
* If no test catalog is given, the default behavior depends on {@code includeQuarantined}.
* If true, this filter will exclude all tests. If false, this filter will include all tests.
* <p>
* The format of the test catalog is a text file where each line has the format of:
*
* <pre>
* FullyQualifiedClassName "#" MethodName "\n"
* </pre>
*
* @param testCatalogFileName path to a test catalog file
* @param includeQuarantined true if this filter should include only the auto-quarantined tests
*/
public static Filter<TestDescriptor> create(String testCatalogFileName, boolean includeQuarantined) {
public static Filter<TestDescriptor> create(String testCatalogFileName) {
if (testCatalogFileName == null || testCatalogFileName.isEmpty()) {
log.debug("No test catalog specified, will not quarantine any recently added tests.");
return defaultFilter(includeQuarantined);
log.debug("No test catalog specified, will not select any tests with this filter.");
return EXCLUDE_ALL_TESTS;
}
Path path = Paths.get(testCatalogFileName);
log.debug("Loading test catalog file {}.", path);
if (!Files.exists(path)) {
log.error("Test catalog file {} does not exist, will not quarantine any recently added tests.", path);
return defaultFilter(includeQuarantined);
log.error("Test catalog file {} does not exist, will not select any tests with this filter.", path);
return EXCLUDE_ALL_TESTS;
}
Set<TestAndMethod> allTests = new HashSet<>();
@ -126,16 +106,16 @@ public class AutoQuarantinedTestFilter implements Filter<TestDescriptor> {
line = reader.readLine();
}
} catch (IOException e) {
log.error("Error while reading test catalog file, will not quarantine any recently added tests.", e);
return defaultFilter(includeQuarantined);
log.error("Error while reading test catalog file, will not select any tests with this filter.", e);
return EXCLUDE_ALL_TESTS;
}
if (allTests.isEmpty()) {
log.error("Loaded an empty test catalog, will not quarantine any recently added tests.");
return defaultFilter(includeQuarantined);
log.error("Loaded an empty test catalog, will not select any tests with this filter.");
return EXCLUDE_ALL_TESTS;
} else {
log.debug("Loaded {} test methods from test catalog file {}.", allTests.size(), path);
return new AutoQuarantinedTestFilter(allTests, includeQuarantined);
return new CatalogTestFilter(allTests);
}
}

View File

@ -0,0 +1,152 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.test.junit;
import org.junit.platform.engine.Filter;
import org.junit.platform.engine.FilterResult;
import org.junit.platform.engine.TestDescriptor;
import org.junit.platform.engine.TestTag;
import org.junit.platform.launcher.PostDiscoveryFilter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A JUnit test filter that customizes which tests are run as part of different CI jobs.
* <p>
* Four system properties control the behavior of this filter:
* <ul>
* <li>kafka.test.run.new: only run newly added tests</li>
* <li>kafka.test.flaky.new: only run tests tagged with "flaky"</li>
* <li>kafka.test.catalog.file: location of a test catalog file</li>
* <li>kafka.test.verbose: enable additional log output</li>
* </ul>
* The test catalog is how we test for "new"-ness. During each CI build of "trunk", we
* produce a test catalog that includes every test run as part of the build. This is
* stored in the "test-catalog" branch of the repo. By loading the test catalog from a
* prior point in time, we can easily determine which tests were added within a certain
* time frame.
* <p>
* If no test catalog is given, or it is empty/invalid, "kafka.test.run.new" will not
* select any tests.
* <p>
* This filter is registered with JUnit using SPI. The test-common-runtime module
* includes a META-INF/services/org.junit.platform.launcher.PostDiscoveryFilter
* service file which registers this class.
*/
public class KafkaPostDiscoveryFilter implements PostDiscoveryFilter {
private static final TestTag FLAKY_TEST_TAG = TestTag.create("flaky");
public static final String RUN_NEW_PROP = "kafka.test.run.new";
public static final String RUN_FLAKY_PROP = "kafka.test.run.flaky";
public static final String CATALOG_FILE_PROP = "kafka.test.catalog.file";
public static final String VERBOSE_PROP = "kafka.test.verbose";
private static final Logger log = LoggerFactory.getLogger(KafkaPostDiscoveryFilter.class);
private final Filter<TestDescriptor> catalogFilter;
private final boolean runNew;
private final boolean runFlaky;
private final boolean verbose;
// No-arg public constructor for SPI
@SuppressWarnings("unused")
public KafkaPostDiscoveryFilter() {
runNew = System.getProperty(RUN_NEW_PROP, "false")
.equalsIgnoreCase("true");
runFlaky = System.getProperty(RUN_FLAKY_PROP, "false")
.equalsIgnoreCase("true");
verbose = System.getProperty(VERBOSE_PROP, "false")
.equalsIgnoreCase("true");
String testCatalogFileName = System.getProperty(CATALOG_FILE_PROP);
catalogFilter = CatalogTestFilter.create(testCatalogFileName);
}
// Visible for tests
KafkaPostDiscoveryFilter(
Filter<TestDescriptor> catalogFilter,
boolean runNew,
boolean runFlaky
) {
this.catalogFilter = catalogFilter;
this.runNew = runNew;
this.runFlaky = runFlaky;
this.verbose = false;
}
@Override
public FilterResult apply(TestDescriptor testDescriptor) {
boolean hasFlakyTag = testDescriptor.getTags().contains(FLAKY_TEST_TAG);
FilterResult catalogFilterResult = catalogFilter.apply(testDescriptor);
final FilterResult result;
if (runFlaky && runNew) {
// If selecting flaky and new tests, we first check for explicitly flaky tests.
// If no flaky tag is set, defer to the catalog filter.
if (hasFlakyTag) {
result = FilterResult.included("flaky");
} else {
result = catalogFilterResult;
}
} else if (runFlaky) {
// If selecting only flaky, just check the tag. Don't use the catalog filter
if (hasFlakyTag) {
result = FilterResult.included("flaky");
} else {
result = FilterResult.excluded("non-flaky");
}
} else if (runNew) {
// Running only new tests (per the catalog filter)
if (catalogFilterResult.included() && hasFlakyTag) {
result = FilterResult.excluded("flaky");
} else {
result = catalogFilterResult;
}
} else {
// The main test suite
if (hasFlakyTag) {
result = FilterResult.excluded("flaky");
} else if (catalogFilterResult.included()) {
result = FilterResult.excluded("new");
} else {
result = FilterResult.included(null);
}
}
if (verbose) {
log.info(
"{} Test '{}' with reason '{}'. Flaky tag is {}, catalog filter has {} this test.",
result.included() ? "Including" : "Excluding",
testDescriptor.getDisplayName(),
result.getReason().orElse("null"),
hasFlakyTag ? "present" : "not present",
catalogFilterResult.included() ? "included" : "not included"
);
}
return result;
}
}

View File

@ -1,87 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.test.junit;
import org.junit.platform.engine.Filter;
import org.junit.platform.engine.FilterResult;
import org.junit.platform.engine.TestDescriptor;
import org.junit.platform.engine.TestTag;
import org.junit.platform.launcher.PostDiscoveryFilter;
/**
* A JUnit test filter which can include or exclude discovered tests before
* they are sent off to the test engine for execution. The behavior of this
* filter is controlled by the system property "kafka.test.run.quarantined".
* If the property is set to "true", then only auto-quarantined and explicitly
* {@code @Flaky} tests will be included. If the property is set to "false", then
* only non-quarantined tests will be run.
* <p>
* This filter is registered with JUnit using SPI. The test-common-runtime module
* includes a META-INF/services/org.junit.platform.launcher.PostDiscoveryFilter
* service file which registers this class.
*/
public class QuarantinedPostDiscoveryFilter implements PostDiscoveryFilter {
private static final TestTag FLAKY_TEST_TAG = TestTag.create("flaky");
public static final String RUN_QUARANTINED_PROP = "kafka.test.run.quarantined";
public static final String CATALOG_FILE_PROP = "kafka.test.catalog.file";
private final Filter<TestDescriptor> autoQuarantinedFilter;
private final boolean runQuarantined;
// No-arg public constructor for SPI
@SuppressWarnings("unused")
public QuarantinedPostDiscoveryFilter() {
runQuarantined = System.getProperty(RUN_QUARANTINED_PROP, "false")
.equalsIgnoreCase("true");
String testCatalogFileName = System.getProperty(CATALOG_FILE_PROP);
autoQuarantinedFilter = AutoQuarantinedTestFilter.create(testCatalogFileName, runQuarantined);
}
// Visible for tests
QuarantinedPostDiscoveryFilter(Filter<TestDescriptor> autoQuarantinedFilter, boolean runQuarantined) {
this.autoQuarantinedFilter = autoQuarantinedFilter;
this.runQuarantined = runQuarantined;
}
@Override
public FilterResult apply(TestDescriptor testDescriptor) {
boolean hasTag = testDescriptor.getTags().contains(FLAKY_TEST_TAG);
FilterResult result = autoQuarantinedFilter.apply(testDescriptor);
if (runQuarantined) {
// If selecting quarantined tests, we first check for explicitly flaky tests. If no
// flaky tag is set, check the auto-quarantined filter. In the case of a missing test
// catalog, the auto-quarantined filter will exclude all tests.
if (hasTag) {
return FilterResult.included("flaky");
} else {
return result;
}
} else {
// If selecting non-quarantined tests, we exclude auto-quarantined tests and flaky tests
if (result.included() && hasTag) {
return FilterResult.excluded("flaky");
} else {
return result;
}
}
}
}

View File

@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
org.apache.kafka.common.test.junit.QuarantinedPostDiscoveryFilter
org.apache.kafka.common.test.junit.KafkaPostDiscoveryFilter

View File

@ -31,10 +31,10 @@ import java.util.List;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class AutoQuarantinedTestFilterTest {
public class CatalogTestFilterTest {
private TestDescriptor descriptor(String className, String methodName) {
return new QuarantinedPostDiscoveryFilterTest.MockTestDescriptor(className, methodName);
return new KafkaPostDiscoveryFilterTest.MockTestDescriptor(className, methodName);
}
@Test
@ -46,13 +46,7 @@ public class AutoQuarantinedTestFilterTest {
lines.add("o.a.k.Spam#testEggs");
Files.write(catalog, lines);
Filter<TestDescriptor> filter = AutoQuarantinedTestFilter.create(catalog.toString(), false);
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar1")).included());
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar2")).included());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testEggs")).included());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testNew")).excluded());
filter = AutoQuarantinedTestFilter.create(catalog.toString(), true);
Filter<TestDescriptor> filter = CatalogTestFilter.create(catalog.toString());
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar1")).excluded());
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar2")).excluded());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testEggs")).excluded());
@ -64,19 +58,19 @@ public class AutoQuarantinedTestFilterTest {
Path catalog = tempDir.resolve("catalog.txt");
Files.write(catalog, Collections.emptyList());
Filter<TestDescriptor> filter = AutoQuarantinedTestFilter.create(catalog.toString(), false);
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar1")).included());
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar2")).included());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testEggs")).included());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testNew")).included());
Filter<TestDescriptor> filter = CatalogTestFilter.create(catalog.toString());
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar1")).excluded());
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar2")).excluded());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testEggs")).excluded());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testNew")).excluded());
}
@Test
public void testMissingCatalog() {
Filter<TestDescriptor> filter = AutoQuarantinedTestFilter.create("does-not-exist.txt", false);
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar1")).included());
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar2")).included());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testEggs")).included());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testNew")).included());
Filter<TestDescriptor> filter = CatalogTestFilter.create("does-not-exist.txt");
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar1")).excluded());
assertTrue(filter.apply(descriptor("o.a.k.Foo", "testBar2")).excluded());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testEggs")).excluded());
assertTrue(filter.apply(descriptor("o.a.k.Spam", "testNew")).excluded());
}
}

View File

@ -31,7 +31,7 @@ import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class QuarantinedPostDiscoveryFilterTest {
public class KafkaPostDiscoveryFilterTest {
static class MockTestDescriptor implements TestDescriptor {
@ -105,71 +105,102 @@ public class QuarantinedPostDiscoveryFilterTest {
}
}
QuarantinedPostDiscoveryFilter setupFilter(boolean runQuarantined) {
Set<AutoQuarantinedTestFilter.TestAndMethod> testCatalog = new HashSet<>();
testCatalog.add(new AutoQuarantinedTestFilter.TestAndMethod("o.a.k.Foo", "testBar1"));
testCatalog.add(new AutoQuarantinedTestFilter.TestAndMethod("o.a.k.Foo", "testBar2"));
testCatalog.add(new AutoQuarantinedTestFilter.TestAndMethod("o.a.k.Spam", "testEggs"));
KafkaPostDiscoveryFilter setupFilter(boolean runNew, boolean runFlaky) {
Set<CatalogTestFilter.TestAndMethod> testCatalog = new HashSet<>();
testCatalog.add(new CatalogTestFilter.TestAndMethod("o.a.k.Foo", "testBar1"));
testCatalog.add(new CatalogTestFilter.TestAndMethod("o.a.k.Foo", "testBar2"));
testCatalog.add(new CatalogTestFilter.TestAndMethod("o.a.k.Spam", "testEggs"));
AutoQuarantinedTestFilter autoQuarantinedTestFilter = new AutoQuarantinedTestFilter(testCatalog, runQuarantined);
return new QuarantinedPostDiscoveryFilter(autoQuarantinedTestFilter, runQuarantined);
CatalogTestFilter catalogTestFilter = new CatalogTestFilter(testCatalog);
return new KafkaPostDiscoveryFilter(catalogTestFilter, runNew, runFlaky);
}
@Test
public void testQuarantinedExistingTestNonFlaky() {
QuarantinedPostDiscoveryFilter filter = setupFilter(true);
public void testExcludeExistingNonFlaky() {
KafkaPostDiscoveryFilter filter = setupFilter(false, true);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar1")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar2")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggs")).excluded());
}
@Test
public void testQuarantinedExistingTestFlaky() {
QuarantinedPostDiscoveryFilter filter = setupFilter(true);
public void testIncludeExistingFlaky() {
KafkaPostDiscoveryFilter filter = setupFilter(false, true);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar1", "flaky")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar2", "flaky")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggs", "flaky", "integration")).included());
}
@Test
public void testQuarantinedNewTest() {
QuarantinedPostDiscoveryFilter filter = setupFilter(true);
public void testIncludeAutoQuarantinedAndFlaky() {
KafkaPostDiscoveryFilter filter = setupFilter(true, true);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar3")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggz", "flaky")).included());
}
@Test
public void testExistingTestNonFlaky() {
QuarantinedPostDiscoveryFilter filter = setupFilter(false);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar1")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar2")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggs")).included());
public void testIncludeAutoQuarantinedNoFlaky() {
KafkaPostDiscoveryFilter filter = setupFilter(true, false);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar3")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggz", "flaky")).excluded());
}
@Test
public void testExcludeFlakyAndNew() {
KafkaPostDiscoveryFilter filter = setupFilter(false, false);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar3")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggz", "flaky")).excluded());
}
@Test
public void testExistingTestFlaky() {
QuarantinedPostDiscoveryFilter filter = setupFilter(false);
public void testExcludeFlaky() {
KafkaPostDiscoveryFilter filter = setupFilter(false, false);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar1", "flaky")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar2", "flaky")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggs", "flaky", "integration")).excluded());
}
@Test
public void testNewTest() {
QuarantinedPostDiscoveryFilter filter = setupFilter(false);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar3")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggz", "flaky")).excluded());
public void testExistingTestNonFlaky() {
KafkaPostDiscoveryFilter filter = setupFilter(false, false);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar1")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar2")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggs")).included());
}
@Test
public void testNoCatalogQuarantinedTest() {
QuarantinedPostDiscoveryFilter filter = new QuarantinedPostDiscoveryFilter(
AutoQuarantinedTestFilter.create(null, true),
true
public void testNoCatalogRunFlakyTests() {
KafkaPostDiscoveryFilter filter = new KafkaPostDiscoveryFilter(
CatalogTestFilter.create(null),
false, true
);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar1", "flaky")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar2", "flaky")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggs")).excluded());
}
@Test
public void testNoCatalogRunNewTest() {
KafkaPostDiscoveryFilter filter = new KafkaPostDiscoveryFilter(
CatalogTestFilter.create(null),
true, false
);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar1", "flaky")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar2", "flaky")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggs")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testNew")).excluded(),
"Should not select a new test because there is no catalog loaded");
}
@Test
public void testNoCatalogRunMainTests() {
KafkaPostDiscoveryFilter filter = new KafkaPostDiscoveryFilter(
CatalogTestFilter.create(null),
false, false
);
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar1", "flaky")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Foo", "testBar2", "flaky")).excluded());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testEggs")).included());
assertTrue(filter.apply(new MockTestDescriptor("o.a.k.Spam", "testNew")).included());
}
}