Best Kotest code snippet using io.kotest.core.spec.Spec.rootTests
JUnitTestEngineListener.kt
Source:JUnitTestEngineListener.kt  
...80   private val descriptors = mutableMapOf<Descriptor, TestDescriptor>()81   private var started = false82   private val startedTests = mutableSetOf<Descriptor.TestDescriptor>()83   // the root tests are our entry point when outputting results84   private val rootTests = mutableListOf<TestCase>()85   private var failOnIgnoredTests = false86   private val children = mutableMapOf<Descriptor, MutableList<TestCase>>()87   private val results = mutableMapOf<Descriptor, TestResult>()88   private val dummies = hashSetOf<String>()89   override suspend fun engineStarted() {90      logger.log { Pair(null, "Engine started") }91      listener.executionStarted(root)92   }93   override suspend fun engineInitialized(context: EngineContext) {94      failOnIgnoredTests = context.configuration.failOnIgnoredTests95      formatter = getDisplayNameFormatter(context.configuration.registry, context.configuration)96   }97   override suspend fun engineFinished(t: List<Throwable>) {98      logger.log { Pair(null, "Engine finished; throwables=[${t}]") }99      registerExceptionPlaceholders(t)100      val result = if (failOnIgnoredTests && results.values.any { it.isIgnored }) {101         TestExecutionResult.failed(RuntimeException("Build contained ignored test"))102      } else {103         TestExecutionResult.successful()104      }105      logger.log { Pair(null, "Notifying junit that engine completed $root") }106      listener.executionFinished(root, result)107   }108   override suspend fun specStarted(kclass: KClass<*>) {109      markSpecStarted(kclass)110   }111   override suspend fun specFinished(kclass: KClass<*>, result: TestResult) {112      val t = result.errorOrNull113      when {114         // if we have a spec error before we even started the spec, we will start the spec, add a placeholder115         // to hold the error, mark that test as failed, and then fail the spec as well116         t != null && !started -> {117            val descriptor = markSpecStarted(kclass)118            addPlaceholderTest(descriptor, t, kclass)119            logger.log { Pair(kclass.bestName(), "execution failed: $descriptor $t") }120            listener.executionFinished(descriptor, TestExecutionResult.failed(t))121         }122         // if we had an error in the spec, and we had no tests, we'll add the dummy and return123         t != null && rootTests.isEmpty() -> {124            val descriptor = getSpecDescriptor(kclass)125            addPlaceholderTest(descriptor, t, kclass)126            logger.log { Pair(kclass.bestName(), "execution failed: $descriptor $t") }127            listener.executionFinished(descriptor, TestExecutionResult.failed(t))128         }129         else -> {130            val descriptor = getSpecDescriptor(kclass)131            val result = when (t) {132               null -> TestExecutionResult.successful()133               else -> {134                  addPlaceholderTest(descriptor, t, kclass)135                  TestExecutionResult.successful()136               }137            }138            logger.log { Pair(kclass.bestName(), "executionFinished: $descriptor") }139            listener.executionFinished(descriptor, result)140         }141      }142      reset()143   }144   override suspend fun specIgnored(kclass: KClass<*>, reason: String?) {145      logger.log { Pair(kclass.bestName(), "Spec is being flagged as ignored") }146      listener.executionSkipped(getSpecDescriptor(kclass), reason)147   }148   private fun markSpecStarted(kclass: KClass<*>): TestDescriptor {149      return try {150         val descriptor = getSpecDescriptor(root, kclass.toDescriptor(), formatter.format(kclass))151         logger.log { Pair(kclass.bestName(), "Registering dynamic spec $descriptor") }152         listener.dynamicTestRegistered(descriptor)153         logger.log { Pair(kclass.bestName(), "Spec executionStarted $descriptor") }154         listener.executionStarted(descriptor)155         started = true156         descriptor157      } catch (t: Throwable) {158         logger.log { Pair(kclass.bestName(), "Error in JUnit Platform listener $t") }159         throw t160      }161   }162   private fun reset() {163      rootTests.clear()164      children.clear()165      results.clear()166      started = false167      descriptors.clear()168      startedTests.clear()169   }170   private fun addPlaceholderTest(parent: TestDescriptor, t: Throwable, kclass: KClass<*>) {171      val (name, cause) = ExtensionExceptionExtractor.resolve(t)172      val descriptor = createTestDescriptor(173         parent.uniqueId.append(Segment.Test.value, name),174         name,175         TestDescriptor.Type.TEST,176         ClassSource.from(kclass.java),177         false178      )179      parent.addChild(descriptor)180      listener.dynamicTestRegistered(descriptor)181      listener.executionStarted(descriptor)182      listener.executionFinished(descriptor, TestResult.Error(Duration.ZERO, cause).toTestExecutionResult())183   }184   override suspend fun testStarted(testCase: TestCase) {185      // depending on the test type, we may want to wait to notify junit, this is because gradle doesn't work186      // properly with the junit test types. Ideally, we'd just set everything to CONTAINER_AND_TEST, which is187      // supposed to mean a test can contain other tests as well as being a test itself, which is exactly how188      // Kotest views tests, but unfortunately it doesn't work properly.189      //190      // Another approach is to wait until the spec finishes to see which tests contain children and which191      // don't and set the test type appropriately, but junit doesn't give us a way to specify test duration192      // (instead it just calculates it itself from the time between marking a test as started and marking193      // it as finished), so this approach works but ends up having all tests as 0ms194      //195      // So the approach we will take is use the TestType from the test definition, unless it is dynamic,196      // then for dynamic we will calculate it later, and accept the 0ms drawback197      logger.log { Pair(testCase.name.testName, "test started") }198      if (testCase.parent != null) rootTests.add(testCase)199      addChild(testCase)200      when (testCase.type) {201         TestType.Container -> startTestIfNotStarted(testCase, TestDescriptor.Type.CONTAINER)202         TestType.Test -> startTestIfNotStarted(testCase, TestDescriptor.Type.TEST)203         TestType.Dynamic -> Unit204      }205   }206   // this test can be output now it has completed as we have all we need to know to complete it207   override suspend fun testFinished(testCase: TestCase, result: TestResult) {208      logger.log { Pair(testCase.name.testName, "test finished $result") }209      results[testCase.descriptor] = result210      val descriptor = getOrCreateTestDescriptor(testCase, null)211      // we need to ensure all parents have been started first212      startParents(testCase)213      startTestIfNotStarted(testCase, null)214      logger.log { Pair(testCase.name.testName, "executionFinished: $descriptor") }215      listener.executionFinished(descriptor, result.toTestExecutionResult())216   }217   override suspend fun testIgnored(testCase: TestCase, reason: String?) {218      logger.log { Pair(testCase.name.testName, "test ignored $reason") }219      if (testCase.parent == null) rootTests.add(testCase)220      addChild(testCase)221      results[testCase.descriptor] = TestResult.Ignored(reason)222      // we need to ensure all parents have been started first223      startParents(testCase)224      val descriptor = getOrCreateTestDescriptor(testCase, TestDescriptor.Type.TEST)225      logger.log { Pair(testCase.name.testName, "Registering dynamic test: $descriptor") }226      listener.dynamicTestRegistered(descriptor)227      logger.log { Pair(testCase.name.testName, "executionSkipped: $descriptor") }228      listener.executionSkipped(descriptor, reason)229   }230   private fun addChild(testCase: TestCase) {231      children.getOrPut(testCase.descriptor.parent) { mutableListOf() }.add(testCase)232   }233   private fun startParents(testCase: TestCase) {...SingleInstanceSpecRunner.kt
Source:SingleInstanceSpecRunner.kt  
...38   private val logger = Logger(SingleInstanceSpecRunner::class)39   override suspend fun execute(spec: Spec): Result<Map<TestCase, TestResult>> {40      logger.log { Pair(spec::class.bestName(), "executing spec $spec") }41      suspend fun interceptAndRun(context: CoroutineContext) = runCatching {42         val rootTests = materializer.materialize(spec)43         logger.log { Pair(spec::class.bestName(), "Materialized root tests: ${rootTests.size}") }44         launch(spec) {45            logger.log { Pair(it.name.testName, "Executing test $it") }46            runTest(it, context, null)47         }48      }49      try {50         return coroutineScope {51            extensions.beforeSpec(spec)52               .flatMap { interceptAndRun(coroutineContext) }53               .flatMap { SpecExtensions(configuration.registry).afterSpec(spec) }54               .map { results }55         }56      } catch (e: Exception) {57         e.printStackTrace()...DslDrivenSpec.kt
Source:DslDrivenSpec.kt  
...15abstract class DslDrivenSpec : Spec(), RootScope {16   /**17    * Contains the [RootTest]s that have been registered on this spec.18    */19   private var rootTests = emptyList<RootTest>()20   private var sealed = false21   private val globalExtensions = mutableListOf<Extension>()22   /**23    * Marks that this spec has been instantiated and all root tests have been registered.24    * After this point, no further root tests are allowed to be defined.25    */26   fun seal() {27      sealed = true28   }29   override fun rootTests(): List<RootTest> {30      return rootTests31   }32   override fun globalExtensions(): List<Extension> {33      return globalExtensions.toList()34   }35   override fun add(test: RootTest) {36      if (sealed) throw InvalidDslException("Cannot add a root test after the spec has been instantiated: ${test.name.testName}")37      rootTests = rootTests + test38   }39   /**40    * Include the tests and extensions from the given [TestFactory] in this spec.41    * Tests are added in order from where this include was invoked using configuration and42    * settings at the time the method was invoked.43    */44   fun include(factory: TestFactory) {45      factory.tests.forEach { add(it.copy(factoryId = factory.factoryId)) }46      register(factory.extensions)47   }48   /**49    * Includes the tests from the given [TestFactory] in this spec or factory, with the given50    * prefixed added to each of the test's name.51    */...KotestStringSpecTest.kt
Source:KotestStringSpecTest.kt  
...83    override suspend fun afterInvocation(testCase: TestCase, iteration: Int) {84        logger.info("afterInvocation ${testCase.displayName}")85    }86    override suspend fun afterSpec(spec: Spec) {87        val names = spec.rootTests().map { it.testCase.displayName }88        logger.info("afterSpec $names")89    }90    override suspend fun afterTest(testCase: TestCase, result: TestResult) {91        logger.info("afterTest ${testCase.displayName}, ${result.status} ${result.reason} ${result.error}")92    }93    override suspend fun beforeInvocation(testCase: TestCase, iteration: Int) {94        logger.info("beforeInvocation ${testCase.displayName}")95    }96    override suspend fun beforeSpec(spec: Spec) {97        val names = spec.rootTests().map { it.testCase.displayName }98        logger.info("beforeSpec $names")99    }100    override suspend fun beforeTest(testCase: TestCase) {101        logger.info("beforeTest ${testCase.displayName}")102    }103    override suspend fun finalizeSpec(kclass: KClass<out Spec>, results: Map<TestCase, TestResult>) {104        logger.info("finalizeSpec")105    }106    override suspend fun prepareSpec(kclass: KClass<out Spec>) {107        logger.info("prepareSpec")108    }109}...Materializer.kt
Source:Materializer.kt  
...25    */26   fun materialize(spec: Spec): List<TestCase> {27      val duplicateTestNameMode = spec.duplicateTestNameMode ?: configuration.duplicateTestNameMode28      val handler = DuplicateTestNameHandler(duplicateTestNameMode)29      val tests = spec.rootTests().map { rootTest ->30         val uniqueName = handler.handle(rootTest.name)31         val uniqueTestName = if (uniqueName == null) rootTest.name else rootTest.name.copy(testName = uniqueName)32         TestCase(33            descriptor = spec::class.toDescriptor().append(uniqueTestName),34            name = uniqueTestName,35            spec = spec,36            type = rootTest.type,37            source = rootTest.source,38            test = rootTest.test,39            config = resolveConfig(40               config = rootTest.config,41               xdisabled = rootTest.disabled,42               parent = null,43               spec = spec,...ScriptRuntime.kt
Source:ScriptRuntime.kt  
...28//   /**29//    * Stores root level tests added by a script.30//    * Should be cleared after each script ScriptInstantiationExceptionhas completed.31//    */32//   private val rootTests = mutableListOf<TestCase>()33//34//   /**35//    * Adds a new root [TestCase] with the given name and type.36//    *37//    * @param xdisabled if true then this test has been disabled by using an xKeyword method.38//    */39//   fun registerRootTest(40//      name: DescriptionName.TestName,41//      xdisabled: Boolean,42//      type: TestType,43//      test: suspend (testScope) -> Unit44//   ) {45//      log { "ScriptRuntime: registerRootTest $name" }46//      val config = if (xdisabled) TestCaseConfig().copy(enabled = false) else TestCaseConfig()47//      val description = spec.description().append(name, type)48//      rootTests.add(49//         TestCase(50//            descriptor = description,51//            spec = spec,52//            test = test,53//            source = sourceRef(),54//            type = type,55//            config = config,56//            factoryId = null,57//            descriptor = Descriptor.fromScriptClass(ScriptSpec::class).append(58//               Name(description.name.name),59//               DisplayName(description.name.displayName),60//               TestType.Test,61//               Source.TestSource(sourceRef().fileName, sourceRef().lineNumber),62//            ),63//            parent = null,64//         )65//      )66//   }67//68//   fun reset() {69//      rootTests.clear()70//      spec = ScriptSpec()71//   }72//73//   fun materializeRootTsssests(parent: Descriptor.SpecDescriptor): List<TestCase> {74//      // the test cases will have been registered with a placeholder spec description, since we don't know75//      // what that is until runtime. So now we must replace that.76//      return rootTests.toList().map {77//         it.copy(descriptor = it.descriptor!!.copy(parent = parent))78//      }79//   }80//}...SpecRunner.kt
Source:SpecRunner.kt  
...35   /**36    * Schedules all the tests in this spec.37    */38   protected suspend fun launch(spec: Spec, run: suspend (TestCase) -> Unit) {39      val rootTests = materializer.materialize(spec)40      logger.log { Pair(spec::class.bestName(), "Launching ${rootTests.size} root tests with launcher $scheduler") }41      scheduler.schedule(run, rootTests)42   }43   /**44    * Creates an instance of the supplied [Spec] by delegating to the project constructors,45    * and notifies the [TestEngineListener] of the instantiation event.46    */47   protected suspend fun createInstance(kclass: KClass<out Spec>): Result<Spec> =48      createAndInitializeSpec(kclass, configuration.registry)49}...CompositeSpec.kt
Source:CompositeSpec.kt  
1package io.kotest.core.spec2import io.kotest.core.factory.TestFactory3abstract class CompositeSpec(private vararg val factories: TestFactory) : Spec() {4   override fun rootTests(): List<RootTest> {5      return factories.flatMap { it.tests }6   }7}...rootTests
Using AI Code Generation
1    fun test1() {2        rootTests().forEach { println(it.description.name) }3    }4    fun test2() {5        rootTests().forEach { println(it.description.name) }6    }7    fun test3() {8        rootTests().forEach { println(it.description.name) }9    }10    fun test4() {11        rootTests().forEach { println(it.description.name) }12    }13    fun test5() {14        rootTests().forEach { println(it.description.name) }15    }16    fun test6() {17        rootTests().forEach { println(it.description.name) }18    }19    fun test7() {20        rootTests().forEach { println(it.description.name) }21    }22    fun test8() {23        rootTests().forEach { println(it.description.name) }24    }25    fun test9() {26        rootTests().forEach { println(it.description.name) }27    }28    fun test10() {29        rootTests().forEach { println(it.description.name) }30    }31    fun test11() {32        rootTests().forEach { println(it.description.name) }33    }rootTests
Using AI Code Generation
1 override   fun   rootTests ():   Sequence < Test >   { 2    return   sequence   { 3       yield ( test1 ) 4       yield ( test2 ) 5       yield ( test3 ) 6    } 7 }8 val   test1   =   "test1"   { 9    println ( "test1" ) 10 } 11 val   test2   =   "test2"   { 12    println ( "test2" ) 13 } 14 val   test3   =   "test3"   { 15    println ( "test3" ) 16 }rootTests
Using AI Code Generation
1class MySpec : FunSpec({2rootTests {3test("my test") {4}5}6})7class MySpec : FunSpec({8test("my test") {9}10})11class MySpec : FunSpec({12test("my test") {13}14})15class MySpec : FunSpec({16test("my test") {17}18})19class MySpec : FunSpec({20test("my test") {21}22})23class MySpec : FunSpec({24test("my test") {25}26})27class MySpec : FunSpec({28test("my test") {29}30})31class MySpec : FunSpec({32test("my test") {33}34})Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
