Best Kotest code snippet using io.kotest.runner.junit.platform.results
JUnitTestEngineListener.kt
Source:JUnitTestEngineListener.kt  
...79   // contains a mapping of junit TestDescriptor's, so we can find previously registered tests80   private val descriptors = mutableMapOf<Descriptor, TestDescriptor>()81   private var started = false82   private val startedTests = mutableSetOf<Descriptor.TestDescriptor>()83   // the root tests are our entry point when outputting results84   private val rootTests = mutableListOf<TestCase>()85   private var failOnIgnoredTests = false86   private val children = mutableMapOf<Descriptor, MutableList<TestCase>>()87   private val results = mutableMapOf<Descriptor, TestResult>()88   private val dummies = hashSetOf<String>()89   override suspend fun engineStarted() {90      logger.log { Pair(null, "Engine started") }91      listener.executionStarted(root)92   }93   override suspend fun engineInitialized(context: EngineContext) {94      failOnIgnoredTests = context.configuration.failOnIgnoredTests95      formatter = getDisplayNameFormatter(context.configuration.registry, context.configuration)96   }97   override suspend fun engineFinished(t: List<Throwable>) {98      logger.log { Pair(null, "Engine finished; throwables=[${t}]") }99      registerExceptionPlaceholders(t)100      val result = if (failOnIgnoredTests && results.values.any { it.isIgnored }) {101         TestExecutionResult.failed(RuntimeException("Build contained ignored test"))102      } else {103         TestExecutionResult.successful()104      }105      logger.log { Pair(null, "Notifying junit that engine completed $root") }106      listener.executionFinished(root, result)107   }108   override suspend fun specStarted(kclass: KClass<*>) {109      markSpecStarted(kclass)110   }111   override suspend fun specFinished(kclass: KClass<*>, result: TestResult) {112      val t = result.errorOrNull113      when {114         // if we have a spec error before we even started the spec, we will start the spec, add a placeholder115         // to hold the error, mark that test as failed, and then fail the spec as well116         t != null && !started -> {117            val descriptor = markSpecStarted(kclass)118            addPlaceholderTest(descriptor, t, kclass)119            logger.log { Pair(kclass.bestName(), "execution failed: $descriptor $t") }120            listener.executionFinished(descriptor, TestExecutionResult.failed(t))121         }122         // if we had an error in the spec, and we had no tests, we'll add the dummy and return123         t != null && rootTests.isEmpty() -> {124            val descriptor = getSpecDescriptor(kclass)125            addPlaceholderTest(descriptor, t, kclass)126            logger.log { Pair(kclass.bestName(), "execution failed: $descriptor $t") }127            listener.executionFinished(descriptor, TestExecutionResult.failed(t))128         }129         else -> {130            val descriptor = getSpecDescriptor(kclass)131            val result = when (t) {132               null -> TestExecutionResult.successful()133               else -> {134                  addPlaceholderTest(descriptor, t, kclass)135                  TestExecutionResult.successful()136               }137            }138            logger.log { Pair(kclass.bestName(), "executionFinished: $descriptor") }139            listener.executionFinished(descriptor, result)140         }141      }142      reset()143   }144   override suspend fun specIgnored(kclass: KClass<*>, reason: String?) {145      logger.log { Pair(kclass.bestName(), "Spec is being flagged as ignored") }146      listener.executionSkipped(getSpecDescriptor(kclass), reason)147   }148   private fun markSpecStarted(kclass: KClass<*>): TestDescriptor {149      return try {150         val descriptor = getSpecDescriptor(root, kclass.toDescriptor(), formatter.format(kclass))151         logger.log { Pair(kclass.bestName(), "Registering dynamic spec $descriptor") }152         listener.dynamicTestRegistered(descriptor)153         logger.log { Pair(kclass.bestName(), "Spec executionStarted $descriptor") }154         listener.executionStarted(descriptor)155         started = true156         descriptor157      } catch (t: Throwable) {158         logger.log { Pair(kclass.bestName(), "Error in JUnit Platform listener $t") }159         throw t160      }161   }162   private fun reset() {163      rootTests.clear()164      children.clear()165      results.clear()166      started = false167      descriptors.clear()168      startedTests.clear()169   }170   private fun addPlaceholderTest(parent: TestDescriptor, t: Throwable, kclass: KClass<*>) {171      val (name, cause) = ExtensionExceptionExtractor.resolve(t)172      val descriptor = createTestDescriptor(173         parent.uniqueId.append(Segment.Test.value, name),174         name,175         TestDescriptor.Type.TEST,176         ClassSource.from(kclass.java),177         false178      )179      parent.addChild(descriptor)180      listener.dynamicTestRegistered(descriptor)181      listener.executionStarted(descriptor)182      listener.executionFinished(descriptor, TestResult.Error(Duration.ZERO, cause).toTestExecutionResult())183   }184   override suspend fun testStarted(testCase: TestCase) {185      // depending on the test type, we may want to wait to notify junit, this is because gradle doesn't work186      // properly with the junit test types. Ideally, we'd just set everything to CONTAINER_AND_TEST, which is187      // supposed to mean a test can contain other tests as well as being a test itself, which is exactly how188      // Kotest views tests, but unfortunately it doesn't work properly.189      //190      // Another approach is to wait until the spec finishes to see which tests contain children and which191      // don't and set the test type appropriately, but junit doesn't give us a way to specify test duration192      // (instead it just calculates it itself from the time between marking a test as started and marking193      // it as finished), so this approach works but ends up having all tests as 0ms194      //195      // So the approach we will take is use the TestType from the test definition, unless it is dynamic,196      // then for dynamic we will calculate it later, and accept the 0ms drawback197      logger.log { Pair(testCase.name.testName, "test started") }198      if (testCase.parent != null) rootTests.add(testCase)199      addChild(testCase)200      when (testCase.type) {201         TestType.Container -> startTestIfNotStarted(testCase, TestDescriptor.Type.CONTAINER)202         TestType.Test -> startTestIfNotStarted(testCase, TestDescriptor.Type.TEST)203         TestType.Dynamic -> Unit204      }205   }206   // this test can be output now it has completed as we have all we need to know to complete it207   override suspend fun testFinished(testCase: TestCase, result: TestResult) {208      logger.log { Pair(testCase.name.testName, "test finished $result") }209      results[testCase.descriptor] = result210      val descriptor = getOrCreateTestDescriptor(testCase, null)211      // we need to ensure all parents have been started first212      startParents(testCase)213      startTestIfNotStarted(testCase, null)214      logger.log { Pair(testCase.name.testName, "executionFinished: $descriptor") }215      listener.executionFinished(descriptor, result.toTestExecutionResult())216   }217   override suspend fun testIgnored(testCase: TestCase, reason: String?) {218      logger.log { Pair(testCase.name.testName, "test ignored $reason") }219      if (testCase.parent == null) rootTests.add(testCase)220      addChild(testCase)221      results[testCase.descriptor] = TestResult.Ignored(reason)222      // we need to ensure all parents have been started first223      startParents(testCase)224      val descriptor = getOrCreateTestDescriptor(testCase, TestDescriptor.Type.TEST)225      logger.log { Pair(testCase.name.testName, "Registering dynamic test: $descriptor") }226      listener.dynamicTestRegistered(descriptor)227      logger.log { Pair(testCase.name.testName, "executionSkipped: $descriptor") }228      listener.executionSkipped(descriptor, reason)229   }230   private fun addChild(testCase: TestCase) {231      children.getOrPut(testCase.descriptor.parent) { mutableListOf() }.add(testCase)232   }233   private fun startParents(testCase: TestCase) {234      val parent = testCase.parent235      if (parent != null) {...build.gradle.kts
Source:build.gradle.kts  
...117    version = "2.8.1"118}119val allureAggregatedReport by tasks.creating(AllureReport::class) {120    doFirst {121        val results = mutableListOf<File>()122        subprojects.stream().forEach {123            it.allure.resultsDir?.let { dir ->124                results.add(dir)125            }126        }127        resultsDirs = results128    }129}130val allureAggregatedServe by tasks.creating(AllureServe::class) {131    doFirst {132        val results = mutableListOf<File>()133        subprojects.stream().forEach {134            it.allure.resultsDir?.let { dir ->135                results.add(dir)136            }137        }138        resultsDirs = results139    }140}...results
Using AI Code Generation
1import io.kotest.matchers.shouldBe2import io.kotest.core.spec.style.FunSpec3import io.kotest.runner.junit.platform.KotestEngineLauncher4import io.kotest.runner.junit.platform.KotestEngineLauncher5import org.junit.platform.engine.discovery.DiscoverySelectors6import org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder7import org.junit.platform.launcher.core.LauncherDiscoveryRequestBuilder.request8import org.junit.platform.launcher.core.LauncherFactory9import org.junit.platform.launcher.core.LauncherFactory.launcher10import org.junit.platform.launcher.listeners.SummaryGeneratingListener11import org.junit.platform.launcher.listeners.SummaryGeneratingListener12import org.junit.platform.launcher.listeners.TestExecutionSummary13import org.junit.platform.launcher.listeners.TestExecutionSummary14import org.junit.platform.launcher.listeners.TestExecutionSummary.Failure15import org.junit.platform.launcher.listeners.TestExecresults
Using AI Code Generation
1    import io.kotest.runner.junit.platform.KotestEngineLauncher2    import io.kotest.runner.junit.platform.KotestEngineLauncherDiscoveryRequestBuilder3    import io.kotest.core.spec.Spec4    import io.kotest.core.spec.style.FunSpec5    import io.kotest.matchers.shouldBe6    import io.kotest.core.spec.style.scopes.RootScope7    import io.kotest.core.spec.style.scopes.TestScope8    import io.kotest.core.spec.style.scopes.ContainerScope9    import io.kotest.core.spec.style.scopes.TestWithConfigScope10    import io.kotest.core.spec.style.scopes.AfterContainerScope11    import io.kotest.core.spec.style.scopes.AfterEachScope12    import io.kotest.core.spec.style.scopes.AfterTestScope13    import io.kotest.core.spec.style.scopes.BeforeContainerScope14    import io.kotest.core.spec.style.scopes.BeforeEachScope15    import io.kotest.core.spec.style.scopes.BeforeTestScope16    import io.kotest.core.spec.style.scopes.TestWithConfigAndInvocationCountScope17    import io.kotest.core.spec.style.scopes.TestWithConfigAndInvocationCountAndParallelismScope18    import io.kotest.core.spec.style.scopes.TestWithInvocationCountScope19    import io.kotest.core.spec.style.scopes.TestWithInvocationCountAndParallelismScope20    import io.kotest.core.spec.style.scopes.TestWithParallelismScope21    import io.kotest.core.spec.style.scopes.TestWithTestPathScope22    import io.kotest.core.spec.style.scopes.TestWithTestPathAndConfigScope23    import io.kotest.core.spec.style.scopes.TestWithTestPathAndConfigAndInvocationCountScope24    import io.kotest.core.spec.style.scopes.TestWithTestPathAndConfigAndInvocationCountAndParallelismScope25    import io.kotest.core.spec.style.scopes.TestWithTestPathAndInvocationCountScope26    import io.kotest.core.spec.style.scopes.TestWithTestPathAndInvocationCountAndParallelismScope27    import io.kotest.core.spec.style.scopes.TestWithTestPathAndParallelismScope28    import io.kotest.core.spec.style.scopes.TestWithTestPathAndTestScope29    import io.kotest.core.spec.style.scopes.TestWithTestPathAndTestAndConfigScope30    importLearn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
