id "com.diffplug.gradle.spotless" version "3.28.0"
id 'com.github.johnrengelman.shadow' version '4.0.3'
id 'com.install4j.gradle' version '10.0.3'
- id 'com.dorongold.task-tree' version '2.1.0' // only needed to display task dependency tree with gradle task1 [task2 ...] taskTree
+ id 'com.dorongold.task-tree' version '2.1.1' // only needed to display task dependency tree with gradle task1 [task2 ...] taskTree
id 'com.palantir.git-version' version '0.13.0' apply false
}
// JBP->BS should the print statement in doFirst refer to compile_target_compatibility ?
sourceCompatibility = compile_source_compatibility
targetCompatibility = compile_target_compatibility
- options.compilerArgs = additional_compiler_args
+ options.compilerArgs += additional_compiler_args
options.encoding = "UTF-8"
doFirst {
print ("Setting target compatibility to "+compile_target_compatibility+"\n")
compileTestJava {
sourceCompatibility = compile_source_compatibility
targetCompatibility = compile_target_compatibility
- options.compilerArgs = additional_compiler_args
+ options.compilerArgs += additional_compiler_args
doFirst {
print ("Setting target compatibility to "+targetCompatibility+"\n")
}
ext.testsFailed = false
/* testTask0 is the main test task */
task testTask0(type: Test) {
+ group = "Verification"
+ description = "The main test task. Runs all non-testTaskN-labelled tests (unless excluded)"
useTestNG() {
includeGroups testng_groups.split(",")
excludeGroups testng_excluded_groups.split(",")
/* separated tests */
task testTask1(type: Test) {
+ group = "Verification"
+ description = "Tests that need to be isolated from the main test run"
useTestNG() {
includeGroups name
excludeGroups testng_excluded_groups.split(",")
- tasks.withType(Test).matching {it.name.startsWith("testTask") && it.name != name}.all {t -> excludeGroups t.name}
preserveOrder true
useDefaultListeners=true
}
import groovy.time.TimeCategory
import org.gradle.api.tasks.testing.logging.TestExceptionFormat
import org.gradle.api.tasks.testing.logging.TestLogEvent
-
rootProject.ext.testsResults = [] // Container for tests summaries
-allprojects { project ->
- tasks.withType(Test).matching {t -> t.getName().startsWith("testTask")}.all { testTask ->
+tasks.withType(Test).matching {t -> t.getName().startsWith("testTask")}.all { testTask ->
- // run main tests first
-// if (!testTask.name.equals("testTask0"))
-// testTask.mustRunAfter testTask0
+ // from original test task
+ if (useClover) {
+ dependsOn cloverClasses
+ } else { //?
+ dependsOn testClasses //?
+ }
- testTask.testLogging { logging ->
- events TestLogEvent.FAILED,
- TestLogEvent.SKIPPED,
- TestLogEvent.STANDARD_OUT,
- TestLogEvent.STANDARD_ERROR
+ // run main tests first
+ if (!testTask.name.equals("testTask0"))
+ testTask.mustRunAfter "testTask0"
- exceptionFormat TestExceptionFormat.FULL
- showExceptions true
- showCauses true
- showStackTraces true
- }
+ testTask.testLogging { logging ->
+ events TestLogEvent.FAILED,
+ TestLogEvent.SKIPPED,
+ TestLogEvent.STANDARD_OUT,
+ TestLogEvent.STANDARD_ERROR
- ignoreFailures = true // Always try to run all tests for all modules
+ exceptionFormat TestExceptionFormat.FULL
+ showExceptions true
+ showCauses true
+ showStackTraces true
+ }
- afterSuite { desc, result ->
+ ignoreFailures = true // Always try to run all tests for all modules
- if (desc.parent) return // Only summarize results for whole modules
+ afterSuite { desc, result ->
- String summary = "${testTask.project.name}:${testTask.name} results: ${result.resultType} " +
- "(" +
- "${result.testCount} tests, " +
- "${result.successfulTestCount} successes, " +
- "${result.failedTestCount} failures, " +
- "${result.skippedTestCount} skipped" +
- ") " +
- "in ${TimeCategory.minus(new Date(result.endTime), new Date(result.startTime))}" +
- "\n" +
- "Report file: ${testTask.reports.html.entryPoint}"
+ if (desc.parent) return // Only summarize results for whole modules
- // Add reports in `testsResults`, keep failed suites at the end
- if (result.resultType == TestResult.ResultType.SUCCESS) {
- rootProject.ext.testsResults.add(0, summary)
- } else {
- rootProject.ext.testsResults += summary
- }
- if (result.resultType == TestResult.ResultType.FAILURE) {
- testsFailed = true
- }
- }
+ def summary = [testTask.project.name, testTask.name, result, TimeCategory.minus(new Date(result.endTime), new Date(result.startTime)), testTask.reports.html.entryPoint]
- // from original test task
- if (useClover) {
- dependsOn cloverClasses
- } else { //?
- dependsOn compileJava //?
+ // Add reports in `testsResults`, keep failed suites at the end
+ if (result.resultType == TestResult.ResultType.SUCCESS) {
+ rootProject.ext.testsResults.add(0, summary)
+ } else {
+ rootProject.ext.testsResults.add(summary)
}
- maxHeapSize = "1024m"
- workingDir = jalviewDir
- def testLaf = project.findProperty("test_laf")
- if (testLaf != null) {
- println("Setting Test LaF to '${testLaf}'")
- systemProperty "laf", testLaf
- }
- def testHiDPIScale = project.findProperty("test_HiDPIScale")
- if (testHiDPIScale != null) {
- println("Setting Test HiDPI Scale to '${testHiDPIScale}'")
- systemProperty "sun.java2d.uiScale", testHiDPIScale
+ if (result.resultType == TestResult.ResultType.FAILURE) {
+ testsFailed = true
}
- sourceCompatibility = compile_source_compatibility
- targetCompatibility = compile_target_compatibility
- jvmArgs += additional_compiler_args
+ }
- doFirst {
- if (useClover) {
- println("Running tests " + (useClover?"WITH":"WITHOUT") + " clover")
- }
- }
+ // from original test task
+ maxHeapSize = "1024m"
+ workingDir = jalviewDir
+ def testLaf = project.findProperty("test_laf")
+ if (testLaf != null) {
+ println("Setting Test LaF to '${testLaf}'")
+ systemProperty "laf", testLaf
+ }
+ def testHiDPIScale = project.findProperty("test_HiDPIScale")
+ if (testHiDPIScale != null) {
+ println("Setting Test HiDPI Scale to '${testHiDPIScale}'")
+ systemProperty "sun.java2d.uiScale", testHiDPIScale
+ }
+ sourceCompatibility = compile_source_compatibility
+ targetCompatibility = compile_target_compatibility
+ jvmArgs += additional_compiler_args
+
+ doFirst {
+ if (useClover) {
+ println("Running tests " + (useClover?"WITH":"WITHOUT") + " clover")
+ }
}
+
}
gradle.buildFinished {
if (!allResults.isEmpty()) {
printResults allResults
+ allResults.each {r ->
+ if (r[2].resultType == TestResult.ResultType.FAILURE)
+ throw GradleException("Failed tests (buildFinished)!")
+ }
}
}
private static void printResults(allResults) {
- def maxLength = allResults*.readLines().flatten().collect { it.length() }.max()
+ // styler from https://stackoverflow.com/a/56139852
+ def styler = 'black red green yellow blue magenta cyan white'.split().toList().withIndex(30).collectEntries { key, val -> [(key) : { "\033[${val}m${it}\033[0m" }] }
+
+ def maxLength = 0
+ def failedTests = false
+ def summaryLines = []
+ allResults.each {
+ def projectName = it[0]
+ def taskName = it[1]
+ def result = it[2]
+ def time = it[3]
+ def report = it[4]
+ def colour = 'black'
+ switch(result.resultType) {
+ case TestResult.ResultType.SUCCESS:
+ colour = 'green'
+ break;
+ case TestResult.ResultType.FAILURE:
+ colour = 'red'
+ failedTests = true
+ break;
+ default:
+ colour = 'yellow'
+ break;
+ }
+ def summaryCol = "${projectName}:${taskName} results: ${styler[colour](result.resultType)} (" +
+ "${result.testCount} tests, " +
+ (result.successfulTestCount > 0 ? "${styler['green'](result.successfulTestCount)} successes" : "${result.successfulTestCount} successes") + ", " +
+ (result.failedTestCount > 0 ? "${styler['red'](result.failedTestCount)} failures" : "${result.failedTestCount} failures") + ", " +
+ "${result.skippedTestCount} skipped" +
+ ") " + "in ${time}"
+ def summaryPlain = "${projectName}:${taskName} results: ${result.resultType} (" +
+ "${result.testCount} tests, " +
+ "${result.successfulTestCount} successes, " +
+ "${result.failedTestCount} failures, " +
+ "${result.skippedTestCount} skipped" +
+ ") " + "in ${time}"
+ def reportLine = "Report file: ${report}"
+ def ls = summaryPlain.length()
+ def lr = reportLine.length()
+ def m = [ls, lr].max()
+ if (m > maxLength)
+ maxLength = m
+ def info = [ls, summaryCol, reportLine]
+ summaryLines.add(info)
+ }
println "┌${"${"─" * maxLength}"}┐"
- println allResults.collect {
- it.readLines().collect {
- "│" + it + " " * (maxLength - it.length()) + "│"
- }.join("\n")
- }.join("\n├${"${"─" * maxLength}"}┤\n")
+ println summaryLines.collect {info ->
+ def ls = info[0]
+ def summary = info[1]
+ def report = info[2]
+
+ return "│" + summary + " " * (maxLength - ls) + "│" + "\n" +
+ "│" + report + " " * (maxLength - report.length()) + "│"
+
+ }.join("\n├${"${"─" * maxLength}"}┤\n")
println "└${"${"─" * maxLength}"}┘"
}
/* END of test tasks results summary */
task verifyTestStatus {
+ group = "Verification"
+ description = "Task that FAILs the build if any tests failed"
doLast {
if (testsFailed) {
throw new GradleException("There were failing tests!")
}
test {
+ // from original test task
+ if (useClover) {
+ dependsOn cloverClasses
+ } else { //?
+ dependsOn testClasses
+ }
dependsOn tasks.withType(Test).matching {t -> t.getName().startsWith("testTask")}
finalizedBy verifyTestStatus
+
// not running tests in this task
exclude "**/*"
}