Best Kotest code snippet using io.kotest.matchers.sql.resultset.test
SykefraversstatistikkImportTest.kt
Source:SykefraversstatistikkImportTest.kt
1package no.nav.lydia.container.sykefraversstatistikk2import arrow.core.Either3import com.github.kittinunf.fuel.gson.responseObject4import com.github.kittinunf.result.getOrElse5import io.kotest.inspectors.forAtLeastOne6import io.kotest.matchers.ints.shouldBeExactly7import io.kotest.matchers.ints.shouldBeGreaterThanOrEqual8import io.kotest.matchers.nulls.shouldBeNull9import io.kotest.matchers.nulls.shouldNotBeNull10import io.kotest.matchers.shouldBe11import no.nav.lydia.helper.*12import no.nav.lydia.helper.TestContainerHelper.Companion.performGet13import no.nav.lydia.helper.TestContainerHelper.Companion.withLydiaToken14import no.nav.lydia.helper.TestVirksomhet.Companion.TESTVIRKSOMHET_FOR_IMPORT15import no.nav.lydia.sykefraversstatistikk.api.Periode16import no.nav.lydia.sykefraversstatistikk.api.SYKEFRAVERSSTATISTIKK_PATH17import no.nav.lydia.sykefraversstatistikk.api.SykefraversstatistikkVirksomhetDto18import java.sql.ResultSet19import kotlin.test.Test20import kotlin.test.fail21class SykefraversstatistikkImportTest {22 private val lydiaApi = TestContainerHelper.lydiaApiContainer23 private val kafkaContainer = TestContainerHelper.kafkaContainerHelper24 private val postgres = TestContainerHelper.postgresContainer25 @Test26 fun `kan importere statistikk for flere kvartal`() {27 val gjeldendePeriode = Periode.gjeldendePeriode()28 val forrigePeriode = Periode.forrigePeriode()29 kafkaContainer.sendSykefraversstatistikkKafkaMelding(importDto = SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto)30 hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)31 .forExactlyOne {32 it.kvartal shouldBeExactly forrigePeriode.kvartal33 it.arstall shouldBeExactly forrigePeriode.årstall34 it.orgnr shouldBe TESTVIRKSOMHET_FOR_IMPORT.orgnr35 }36 kafkaContainer.sendSykefraversstatistikkKafkaMelding(importDto = SykefraværsstatistikkTestData.testVirksomhetGjeldeneKvartal.sykefraværsstatistikkImportDto)37 val osloAndreOgTredjeKvart = hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)38 osloAndreOgTredjeKvart.forExactlyOne {39 it.kvartal shouldBe forrigePeriode.kvartal40 it.arstall shouldBe forrigePeriode.årstall41 it.orgnr shouldBe TESTVIRKSOMHET_FOR_IMPORT.orgnr42 }43 osloAndreOgTredjeKvart.forExactlyOne {44 it.kvartal shouldBe gjeldendePeriode.kvartal45 it.arstall shouldBe gjeldendePeriode.årstall46 it.orgnr shouldBe TESTVIRKSOMHET_FOR_IMPORT.orgnr47 }48 }49 @Test50 fun `importerte data skal kunne hentes ut og være like`() {51 val sykefraværsstatistikk = SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto52 kafkaContainer.sendSykefraversstatistikkKafkaMelding(sykefraværsstatistikk)53 val dtos = hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)54 dtos.size shouldBeGreaterThanOrEqual 155 dtos.forAtLeastOne { dto ->56 dto.orgnr shouldBe sykefraværsstatistikk.virksomhetSykefravær.orgnr57 dto.arstall shouldBe sykefraværsstatistikk.virksomhetSykefravær.årstall58 dto.kvartal shouldBe sykefraværsstatistikk.virksomhetSykefravær.kvartal59 dto.sykefraversprosent shouldBe sykefraværsstatistikk.virksomhetSykefravær.prosent60 dto.antallPersoner shouldBe sykefraværsstatistikk.virksomhetSykefravær.antallPersoner.toInt()61 dto.muligeDagsverk shouldBe sykefraværsstatistikk.virksomhetSykefravær.muligeDagsverk62 dto.tapteDagsverk shouldBe sykefraværsstatistikk.virksomhetSykefravær.tapteDagsverk63 }64 }65 @Test66 fun `import av data er idempotent`() {67 kafkaContainer.sendSykefraversstatistikkKafkaMelding(SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto)68 val førsteLagredeStatistikk = hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)69 kafkaContainer.sendSykefraversstatistikkKafkaMelding(SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto)70 val andreLagredeStatistikk = hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)71 andreLagredeStatistikk.forExactlyOne { dto ->72 dto.orgnr shouldBe førsteLagredeStatistikk[0].orgnr73 dto.arstall shouldBe førsteLagredeStatistikk[0].arstall74 dto.kvartal shouldBe førsteLagredeStatistikk[0].kvartal75 dto.sykefraversprosent shouldBe førsteLagredeStatistikk[0].sykefraversprosent76 dto.antallPersoner shouldBe førsteLagredeStatistikk[0].antallPersoner77 dto.muligeDagsverk shouldBe førsteLagredeStatistikk[0].muligeDagsverk78 dto.tapteDagsverk shouldBe førsteLagredeStatistikk[0].tapteDagsverk79 }80 }81 @Test82 fun `vi lagrer metadata ved import`() {83 kafkaContainer.sendSykefraversstatistikkKafkaMelding(SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto)84 val rs =85 postgres.performQuery("SELECT * FROM virksomhet_statistikk_metadata WHERE orgnr = '${TESTVIRKSOMHET_FOR_IMPORT.orgnr}'")86 rs.row shouldBe 187 }88 @Test89 fun `sykefraværsstatistikk skal oppdateres om det kommer nye versjoner av samme nøkler`() {90 val virksomhet = TestVirksomhet.nyVirksomhet()91 val originalStatistikk = TestData().lagData(92 virksomhet = virksomhet,93 perioder = listOf(Periode.gjeldendePeriode()),94 antallPersoner = 100.0,95 tapteDagsverk = 20.0,96 sykefraværsProsent = 2.097 )...
JDBCConnectionTest.kt
Source:JDBCConnectionTest.kt
1package dev.neeffect.nee.effects.jdbc2import io.kotest.core.spec.style.DescribeSpec3import io.kotest.matchers.shouldBe4import io.kotest.matchers.shouldNotBe5import io.vavr.collection.List6import io.vavr.control.Option7import java.sql.Connection8import java.sql.ResultSet9class JDBCConnectionTest : DescribeSpec({10 describe("jdbc connection") {11 val cfg = JDBCConfig("org.h2.Driver", "jdbc:h2:mem:test_mem", "sa")12 val provider = { JDBCProvider(cfg) }13 describe("connection") {14 val conn = provider().getConnection()15 it("is created") {16 conn shouldNotBe null17 }18 it("gives access to physical jdbc") {19 conn.getResource() shouldNotBe null20 }21 it("reads from dual") {22 val res = simpleQuery(conn.getResource(), "select 4 from dual")23 res[0] shouldBe "4"24 }25 it("should start with no transaction") {...
ResultSetMatchersTest.kt
Source:ResultSetMatchersTest.kt
1package io.kotest.matchers.sql2import io.kotest.core.spec.style.StringSpec3import io.kotest.matchers.collections.shouldContain4import io.kotest.matchers.collections.shouldContainAll5import io.kotest.matchers.collections.shouldContainExactly6import io.kotest.matchers.collections.shouldNotContain7import io.kotest.matchers.shouldBe8import io.mockk.clearMocks9import io.mockk.every10import io.mockk.mockk11import java.sql.ResultSet12class ResultSetMatchersTest : StringSpec() {13 private val resultSet = mockk<ResultSet>().also {14 every { it.row } returns 115 every { it.metaData.columnCount } returns 116 every { it.metaData.columnCount } returns 117 every { it.metaData.getColumnLabel(1) } returns TEST_COLUMN18 every { it.next() } returnsMany listOf(true, true, true, false)19 every { it.getObject(TEST_COLUMN) } returnsMany TEST_COLUMN_VALUES20 }21 init {...
AuctionRepositoryExposedDbTest.kt
Source:AuctionRepositoryExposedDbTest.kt
1package com.github.christophpickl.tbakotlinmasterproject.boundary.boundarydb2import com.github.christophpickl.tbakotlinmasterproject.commons.commonstest.Tags3import com.github.christophpickl.tbakotlinmasterproject.commons.commonstest.UUID_14import io.kotest.assertions.arrow.core.shouldBeLeft5import io.kotest.assertions.arrow.core.shouldBeRight6import io.kotest.core.spec.Spec7import io.kotest.core.spec.style.DescribeSpec8import io.kotest.core.test.TestCase9import io.kotest.core.test.TestResult10import io.kotest.matchers.collections.shouldBeEmpty11import io.kotest.matchers.collections.shouldContainExactly12import io.kotest.matchers.shouldBe13import org.jetbrains.exposed.sql.Database14import org.jetbrains.exposed.sql.transactions.TransactionManager15import java.sql.ResultSet16import java.util.UUID17internal class AuctionRepositoryExposedDbTest : DescribeSpec() {18 private val tableName = "auctions"19 private val dbo = AuctionDbo.any()20 private val repo = AuctionRepositoryExposed()21 private val uuid = UUID_122 private lateinit var db: Database23 override fun tags() = setOf(Tags.Database)24 override fun beforeSpec(spec: Spec) {25 db = connectTestDb {}26 }27 override fun afterTest(testCase: TestCase, result: TestResult) {28 db.deleteAll()29 }30 override fun afterSpec(spec: Spec) {31 TransactionManager.closeAndUnregister(db)32 }33 init {34 describe("When insert") {35 it("Then one row existing") {36 val result = repo.insert(db, AuctionDbo.any())37 result.shouldBeRight()38 val rs = db.query("SELECT * FROM $tableName")39 rs.countRows() shouldBe 140 }41 it("Then values inserted") {...
ResultSetMapperTest.kt
Source:ResultSetMapperTest.kt
1package com.github.imanushin.test.unit2import com.github.imanushin.DynamicResultSetMapperFactoryImpl3import com.github.imanushin.KotlinClassCompilationImpl4import com.github.imanushin.createForType5import com.github.imanushin.model.application.Email6import com.github.imanushin.model.application.EmailDomain7import com.github.imanushin.model.application.EmailUser8import com.github.imanushin.model.application.UserName9import com.github.imanushin.model.database.DbUser10import io.kotest.core.spec.style.FreeSpec11import io.kotest.matchers.shouldBe12import io.mockk.every13import io.mockk.mockk14import java.sql.ResultSet15import java.sql.ResultSetMetaData16/**17 * This isn't right unit test, because it operates with several services at one time.18 *19 * Right implementation should be based on SpringIoC, etc. and should be named "integration"20 *21 * However for example simplification22 */23class ResultSetMapperTest : FreeSpec() {24 init {25 "user should be parsed" {26 // Given27 val expectedResult = listOf(28 DbUser(29 UserName("name1"),30 Email(31 EmailUser("name-1"),...
resultset.kt
Source:resultset.kt
1package io.kotest.matchers.sql2import io.kotest.matchers.Matcher3import io.kotest.matchers.MatcherResult4import io.kotest.matchers.should5import io.kotest.matchers.shouldNot6import java.sql.ResultSet7infix fun ResultSet.shouldHaveRows(rowCount: Int) = this should haveRowCount(8 rowCount9)10infix fun ResultSet.shouldNotHaveRows(rowCount: Int) = this shouldNot haveRowCount(11 rowCount12)13fun haveRowCount(rowCount: Int) = object : Matcher<ResultSet> {14 override fun test(value: ResultSet) =15 MatcherResult(16 value.row == rowCount,17 { "$value should have $rowCount rows" },18 { "$value should not have $rowCount rows" }19 )20}21infix fun ResultSet.shouldHaveColumns(columnCount: Int) = this should haveColumnCount(22 columnCount23)24infix fun ResultSet.shouldNotHaveColumns(columnCount: Int) = this shouldNot haveColumnCount(25 columnCount26)27fun haveColumnCount(columnCount: Int) = object : Matcher<ResultSet> {28 override fun test(value: ResultSet) =29 MatcherResult(30 value.metaData.columnCount == columnCount,31 { "$value should have $columnCount columns" },32 { "$value should not have $columnCount columns" }33 )34}35infix fun ResultSet.shouldContainColumn(columnName: String) = this should containColumn(36 columnName37)38infix fun ResultSet.shouldNotContainColumn(columnName: String) = this shouldNot containColumn(39 columnName40)41fun containColumn(columnName: String) = object : Matcher<ResultSet> {42 override fun test(value: ResultSet): MatcherResult {43 val metaData = value.metaData44 val colCount = metaData.columnCount45 return MatcherResult(46 (1..colCount).any { metaData.getColumnLabel(colCount) == columnName },47 { "$value should have $columnName column" },48 { "$value should not have $columnName column" }49 )50 }51}52@Suppress("UNCHECKED_CAST")53fun <T> ResultSet.shouldHaveColumn(columnName: String, next: (List<T>) -> Unit) {54 this shouldContainColumn columnName55 val data = mutableListOf<T>()56 while (this.next()) {...
BigDecimalTypeTest.kt
Source:BigDecimalTypeTest.kt
1package com.github.thake.logminer.kafka.connect2import io.confluent.connect.avro.AvroData3import io.kotest.matchers.comparables.shouldBeEqualComparingTo4import io.kotest.matchers.nulls.shouldNotBeNull5import io.kotest.matchers.should6import io.kotest.matchers.shouldBe7import io.mockk.every8import io.mockk.mockk9import org.apache.avro.LogicalType10import org.apache.avro.LogicalTypes11import org.junit.jupiter.api.Test12import java.math.BigDecimal13import java.sql.ResultSet14import javax.xml.validation.Schema15class BigDecimalTypeTest {16 @Test17 fun testCorrectScaleForString() {18 val type = SchemaType.NumberType.BigDecimalType(13,10)19 val str = "123.20"20 type.convert(str).should {21 it.scale().shouldBe(type.scale)22 it.shouldBeEqualComparingTo(str.toBigDecimal())23 }24 }25 @Test26 fun testCorrectScaleForResultSet(){27 val resultSet = mockk<ResultSet>()28 val columnIndex = 129 val expectedDecimal = "234.123".toBigDecimal()30 every { resultSet.getBigDecimal(columnIndex) }.returns(expectedDecimal)31 val type = SchemaType.NumberType.BigDecimalType(ORACLE_UNQUALIFIED_NUMBER_PRECISION,32 ORACLE_UNQUALIFIED_NUMBER_SCALE)33 type.extract(columnIndex,resultSet).should {34 it.shouldNotBeNull()35 it.scale().shouldBe(type.scale)36 it.shouldBeEqualComparingTo(expectedDecimal)37 }38 }39 @Test40 fun testConversionToAvroSchema(){41 val type = SchemaType.NumberType.BigDecimalType(ORACLE_UNQUALIFIED_NUMBER_PRECISION,42 ORACLE_UNQUALIFIED_NUMBER_SCALE)43 val schema = type.createSchemaBuilder().build()44 val avroData = AvroData(10)45 val avroSchema = avroData.fromConnectSchema(schema)46 avroSchema.type.shouldBe(org.apache.avro.Schema.Type.BYTES)47 avroSchema.logicalType.shouldBe(LogicalTypes.decimal(ORACLE_UNQUALIFIED_NUMBER_PRECISION,48 ORACLE_UNQUALIFIED_NUMBER_SCALE))49 }50}...
DemoApplicationTests.kt
Source:DemoApplicationTests.kt
1package com.example.demo2import io.kotest.matchers.collections.shouldHaveSize3import io.kotest.matchers.string.shouldContain4import org.junit.jupiter.api.Test5import org.springframework.beans.factory.annotation.Autowired6import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase7import org.springframework.boot.test.context.SpringBootTest8import org.springframework.jdbc.core.JdbcTemplate9import org.springframework.test.context.DynamicPropertyRegistry10import org.springframework.test.context.DynamicPropertySource11import org.testcontainers.containers.PostgreSQLContainer12import org.testcontainers.junit.jupiter.Container13import org.testcontainers.junit.jupiter.Testcontainers14import java.sql.ResultSet15@SpringBootTest16@AutoConfigureTestDatabase(replace = AutoConfigureTestDatabase.Replace.NONE)17@Testcontainers18class DemoApplicationTests {19 companion object {20 @Container21 private val postgreSQLContainer = PostgreSQLContainer<Nothing>("postgres:latest")22 @DynamicPropertySource23 @JvmStatic24 fun registerDynamicProperties(registry: DynamicPropertyRegistry) {25 registry.add("spring.datasource.url", postgreSQLContainer::getJdbcUrl)26 registry.add("spring.datasource.username", postgreSQLContainer::getUsername)27 registry.add("spring.datasource.password", postgreSQLContainer::getPassword)28 }29 }30 @Autowired31 private lateinit var jdbcTemplate: JdbcTemplate32 @Test33 fun contextLoads() {34 }35 @Test...
test
Using AI Code Generation
1 import io.kotest.matchers.shouldBe2 import io.kotest.matchers.shouldNotBe3 import io.kotest.matchers.sql.resultset.shouldBeEmpty4 import io.kotest.matchers.sql.resultset.shouldBeRow5 import io.kotest.matchers.sql.resultset.shouldBeRowWith6 import io.kotest.matchers.sql.resultset.shouldBeRowWithColumns7 import io.kotest.matchers.sql.resultset.shouldBeRowWithValues8 import io.kotest.matchers.sql.resultset.shouldBeSingleRow9 import io.kotest.matchers.sql.resultset.shouldBeSingleRowWith10 import io.kotest.matchers.sql.resultset.shouldBeSingleRowWithColumns11 import io.kotest.matchers.sql.resultset.shouldBeSingleRowWithValues12 import io.kotest.matchers.sql.resultset.shouldHaveColumn13 import io.kotest.matchers.sql.resultset.shouldHaveColumns14 import io.kotest.matchers.sql.resultset.shouldHaveNoColumns15 import io.kotest.matchers.sql.resultset.shouldHaveRows16 import io.kotest.matchers.sql.resultset.shouldHaveSingleRow17 import io.kotest.matchers.sql.resultset.shouldNotBeEmpty18 import io.kotest.matchers.sql.resultset.shouldNotBeRow19 import io.kotest.matchers.sql.resultset.shouldNotBeRowWith20 import io.kotest.matchers.sql.resultset.shouldNotBeRowWithColumns21 import io.kotest.matchers.sql.resultset.shouldNotBeRowWithValues22 import io.kotest.matchers.sql.resultset.shouldNotBeSingleRow23 import io.kotest.matchers.sql.resultset.shouldNotBeSingleRowWith24 import io.kotest.matchers.sql.resultset.shouldNotBeSingleRowWithColumns25 import io.kotest.matchers.sql.resultset.shouldNotBeSingleRowWithValues26 import io.kotest.matchers.sql.resultset.shouldNotHaveColumn27 import io.kotest.matchers.sql.resultset.shouldNotHaveColumns28 import io.kotest.matchers.sql.resultset.shouldNotHaveRows29 import io.kotest.matchers.sql.resultset.shouldNotHaveSingleRow30 import io.kotest.matchers.sql.resultset.shouldNotHaveSingleRowWith31 import io.kotest.matchers
test
Using AI Code Generation
1 val result = dataSource.connection.use { connection ->2 connection.createStatement().use { statement ->3 statement.executeQuery("SELECT 1").use { resultSet ->4 resultSet.next()5 resultSet.getInt(1)6 }7 }8 }9 }10 fun `test with sql server`() {11 val result = dataSource.connection.use { connection ->12 connection.createStatement().use { statement ->13 statement.executeQuery("SELECT 1").use { resultSet ->14 resultSet.next()15 resultSet.getInt(1)16 }17 }18 }19 }20}
Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!