Best Kotest code snippet using io.kotest.matchers.sql.resultset
SykefraversstatistikkImportTest.kt
Source:SykefraversstatistikkImportTest.kt  
1package no.nav.lydia.container.sykefraversstatistikk2import arrow.core.Either3import com.github.kittinunf.fuel.gson.responseObject4import com.github.kittinunf.result.getOrElse5import io.kotest.inspectors.forAtLeastOne6import io.kotest.matchers.ints.shouldBeExactly7import io.kotest.matchers.ints.shouldBeGreaterThanOrEqual8import io.kotest.matchers.nulls.shouldBeNull9import io.kotest.matchers.nulls.shouldNotBeNull10import io.kotest.matchers.shouldBe11import no.nav.lydia.helper.*12import no.nav.lydia.helper.TestContainerHelper.Companion.performGet13import no.nav.lydia.helper.TestContainerHelper.Companion.withLydiaToken14import no.nav.lydia.helper.TestVirksomhet.Companion.TESTVIRKSOMHET_FOR_IMPORT15import no.nav.lydia.sykefraversstatistikk.api.Periode16import no.nav.lydia.sykefraversstatistikk.api.SYKEFRAVERSSTATISTIKK_PATH17import no.nav.lydia.sykefraversstatistikk.api.SykefraversstatistikkVirksomhetDto18import java.sql.ResultSet19import kotlin.test.Test20import kotlin.test.fail21class SykefraversstatistikkImportTest {22    private val lydiaApi = TestContainerHelper.lydiaApiContainer23    private val kafkaContainer = TestContainerHelper.kafkaContainerHelper24    private val postgres = TestContainerHelper.postgresContainer25    @Test26    fun `kan importere statistikk for flere kvartal`() {27        val gjeldendePeriode = Periode.gjeldendePeriode()28        val forrigePeriode = Periode.forrigePeriode()29        kafkaContainer.sendSykefraversstatistikkKafkaMelding(importDto = SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto)30        hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)31            .forExactlyOne {32                it.kvartal shouldBeExactly forrigePeriode.kvartal33                it.arstall shouldBeExactly forrigePeriode.årstall34                it.orgnr shouldBe TESTVIRKSOMHET_FOR_IMPORT.orgnr35            }36        kafkaContainer.sendSykefraversstatistikkKafkaMelding(importDto = SykefraværsstatistikkTestData.testVirksomhetGjeldeneKvartal.sykefraværsstatistikkImportDto)37        val osloAndreOgTredjeKvart = hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)38        osloAndreOgTredjeKvart.forExactlyOne {39            it.kvartal shouldBe forrigePeriode.kvartal40            it.arstall shouldBe forrigePeriode.årstall41            it.orgnr shouldBe TESTVIRKSOMHET_FOR_IMPORT.orgnr42        }43        osloAndreOgTredjeKvart.forExactlyOne {44            it.kvartal shouldBe gjeldendePeriode.kvartal45            it.arstall shouldBe gjeldendePeriode.årstall46            it.orgnr shouldBe TESTVIRKSOMHET_FOR_IMPORT.orgnr47        }48    }49    @Test50    fun `importerte data skal kunne hentes ut og være like`() {51        val sykefraværsstatistikk = SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto52        kafkaContainer.sendSykefraversstatistikkKafkaMelding(sykefraværsstatistikk)53        val dtos = hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)54        dtos.size shouldBeGreaterThanOrEqual 155        dtos.forAtLeastOne { dto ->56            dto.orgnr shouldBe sykefraværsstatistikk.virksomhetSykefravær.orgnr57            dto.arstall shouldBe sykefraværsstatistikk.virksomhetSykefravær.årstall58            dto.kvartal shouldBe sykefraværsstatistikk.virksomhetSykefravær.kvartal59            dto.sykefraversprosent shouldBe sykefraværsstatistikk.virksomhetSykefravær.prosent60            dto.antallPersoner shouldBe sykefraværsstatistikk.virksomhetSykefravær.antallPersoner.toInt()61            dto.muligeDagsverk shouldBe sykefraværsstatistikk.virksomhetSykefravær.muligeDagsverk62            dto.tapteDagsverk shouldBe sykefraværsstatistikk.virksomhetSykefravær.tapteDagsverk63        }64    }65    @Test66    fun `import av data er idempotent`() {67        kafkaContainer.sendSykefraversstatistikkKafkaMelding(SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto)68        val førsteLagredeStatistikk = hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)69        kafkaContainer.sendSykefraversstatistikkKafkaMelding(SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto)70        val andreLagredeStatistikk = hentSykefraværsstatistikk(TESTVIRKSOMHET_FOR_IMPORT.orgnr)71        andreLagredeStatistikk.forExactlyOne { dto ->72            dto.orgnr shouldBe førsteLagredeStatistikk[0].orgnr73            dto.arstall shouldBe førsteLagredeStatistikk[0].arstall74            dto.kvartal shouldBe førsteLagredeStatistikk[0].kvartal75            dto.sykefraversprosent shouldBe førsteLagredeStatistikk[0].sykefraversprosent76            dto.antallPersoner shouldBe førsteLagredeStatistikk[0].antallPersoner77            dto.muligeDagsverk shouldBe førsteLagredeStatistikk[0].muligeDagsverk78            dto.tapteDagsverk shouldBe førsteLagredeStatistikk[0].tapteDagsverk79        }80    }81    @Test82    fun `vi lagrer metadata ved import`() {83        kafkaContainer.sendSykefraversstatistikkKafkaMelding(SykefraværsstatistikkTestData.testVirksomhetForrigeKvartal.sykefraværsstatistikkImportDto)84        val rs =85            postgres.performQuery("SELECT * FROM virksomhet_statistikk_metadata WHERE orgnr = '${TESTVIRKSOMHET_FOR_IMPORT.orgnr}'")86        rs.row shouldBe 187    }88    @Test89    fun `sykefraværsstatistikk skal oppdateres om det kommer nye versjoner av samme nøkler`() {90        val virksomhet = TestVirksomhet.nyVirksomhet()91        val originalStatistikk = TestData().lagData(92            virksomhet = virksomhet,93            perioder = listOf(Periode.gjeldendePeriode()),94            antallPersoner = 100.0,95            tapteDagsverk = 20.0,96            sykefraværsProsent = 2.097        )98        VirksomhetHelper.lastInnTestdata(originalStatistikk)99        hentSykefraværsstatistikk(virksomhet.orgnr).forExactlyOne {100            it.sykefraversprosent shouldBe 2.0101            it.antallPersoner shouldBe 100102            it.tapteDagsverk shouldBe 20.0103        }104        hentKolonneFraSykefraværsstatistikk(virksomhet, "endret").getOrNull("endret").shouldBeNull()105        val opppdatertStatistikk = lagSykefraværsstatistikkImportDto(106            orgnr = virksomhet.orgnr,107            periode = Periode.gjeldendePeriode(),108            sykefraværsProsent = 3.0,109            antallPersoner = 1337.0,110            tapteDagsverk = 16.0,111            sektor = "3"112        )113        kafkaContainer.sendSykefraversstatistikkKafkaMelding(opppdatertStatistikk)114        hentSykefraværsstatistikk(virksomhet.orgnr).forExactlyOne {115            it.sykefraversprosent shouldBe 3.0116            it.antallPersoner shouldBe 1337117            it.tapteDagsverk shouldBe 16.0118        }119        hentKolonneFraSykefraværsstatistikk(virksomhet, "endret").getOrNull("endret").shouldNotBeNull()120    }121    @Test122    fun `skal importere sykefraværsstatistikk for sektor`() {123        val orgnr = "111111111"124        val sektorKode = "3"125        val periode = Periode(kvartal = 1, årstall = 1971)126        val melding = lagSykefraværsstatistikkImportDto(127            orgnr = orgnr,128            periode = periode,129            antallPersoner = 100.0,130            sektor = sektorKode131        )132        kafkaContainer.sendSykefraversstatistikkKafkaMelding(importDto = melding)133        postgres.performQuery(134            """135            select * from sykefravar_statistikk_sektor136            where sektor_kode = '$sektorKode' AND137            arstall = ${periode.årstall} AND138            kvartal = ${periode.kvartal}139            """.trimIndent()140        ).getString("sektor_kode") shouldBe sektorKode141    }142    private fun hentKolonneFraSykefraværsstatistikk(virksomhet: TestVirksomhet, kolonneNavn: String) =143        postgres.performQuery(144            """145                select $kolonneNavn from sykefravar_statistikk_virksomhet 146                where 147                orgnr = '${virksomhet.orgnr}' and148                arstall = ${Periode.gjeldendePeriode().årstall} and149                kvartal = ${Periode.gjeldendePeriode().kvartal}150            """.trimIndent()151        )152    private fun hentSykefraværsstatistikk(orgnr: String) =153        lydiaApi.performGet("$SYKEFRAVERSSTATISTIKK_PATH/${orgnr}")154            .withLydiaToken()155            .responseObject<List<SykefraversstatistikkVirksomhetDto>>().third156            .getOrElse { fail(it.message) }157    private fun ResultSet.getOrNull(columnLabel: String): Any? = Either.catch {158        this.getObject(columnLabel)159    }.orNull()160}...JDBCConnectionTest.kt
Source:JDBCConnectionTest.kt  
1package dev.neeffect.nee.effects.jdbc2import io.kotest.core.spec.style.DescribeSpec3import io.kotest.matchers.shouldBe4import io.kotest.matchers.shouldNotBe5import io.vavr.collection.List6import io.vavr.control.Option7import java.sql.Connection8import java.sql.ResultSet9class JDBCConnectionTest : DescribeSpec({10    describe("jdbc connection") {11        val cfg = JDBCConfig("org.h2.Driver", "jdbc:h2:mem:test_mem", "sa")12        val provider = { JDBCProvider(cfg) }13        describe("connection") {14            val conn = provider().getConnection()15            it("is created") {16                conn shouldNotBe null17            }18            it("gives access to physical jdbc") {19                conn.getResource() shouldNotBe null20            }21            it("reads from dual") {22                val res = simpleQuery(conn.getResource(), "select 4 from dual")23                res[0] shouldBe "4"24            }25            it("should start with no transaction") {26                conn.hasTransaction() shouldBe false27            }28            it("connection should close ") {29                conn.close()30            }31        }32        describe("creation of data") {33            val conn = provider().getConnection()34            it("should create table") {35                val res = simpleUpdate(36                    conn.getResource(),37                    """ CREATE TABLE PLANETS (38                            	ID INT not null,39	                            NAME VARCHAR not null,40                                PRIMARY KEY (ID));41                    """42                )43                res shouldBe 044            }45            it("should insert row") {46                val res = simpleUpdate(47                    conn.getResource(),48                    """ INSERT INTO PLANETS VALUES (1,'MERCURY')"""49                )50                res shouldBe 151            }52            it("should read single row") {53                val res = conn.getResource().q(54                    "SELECT NAME FROM PLANETS"55                )56                res[0] shouldBe "MERCURY"57            }58        }59        describe("transactions") {60            val conn = provider().getConnection()61            describe("simple trx") {62                val trx = conn.continueTx()63                it("should start  trx") {64                    trx.isRight shouldBe (true)65                }66                describe("for rollback") {67                    val trConnection = trx.get()68                    it("should insert row") {69                        val res = trConnection.getResource().x(" INSERT INTO PLANETS VALUES (2,'VENUS')")70                        res shouldBe 171                    }72                    it("should read this row") {73                        val res = trConnection.getResource().q(74                            "SELECT NAME FROM PLANETS WHERE ID = 2;"75                        )76                        res[0] shouldBe ("VENUS")77                    }78                    it("after rollback") {79                        trConnection.rollback()80                        val res = conn.getResource().q(81                            "SELECT COUNT(ID) FROM PLANETS"82                        )83                        res[0] shouldBe ("1")84                    }85                }86            }87            describe("nested transaction") {88                val trx = conn.continueTx()89                it("should start  trx") {90                    trx.isRight shouldBe (true)91                }92                describe("level 1 trx") {93                    val trConnection = trx.get()94                    it("should insert row") {95                        val res = trConnection.getResource().x(" INSERT INTO PLANETS VALUES (2,'VENUS')")96                        res shouldBe 197                    }98                    describe("level 2 trx") {99                        val trx2 = trConnection.begin()100                        it("trx2 object ") {101                            trx2.isRight shouldBe (true)102                        }103                        val trConnection2 = trx2.get()104                        it("should insert row") {105                            val res = trConnection2.getResource().x(" INSERT INTO PLANETS VALUES (3,'EARTH')")106                            res shouldBe 1107                        }108                        it("read inserted planets") {109                            val res = conn.getResource().q(110                                "SELECT COUNT(ID) FROM PLANETS"111                            )112                            res[0] shouldBe ("3")113                        }114                        it("rollback nested") {115                            trConnection2.rollback()116                            val res = conn.getResource().q(117                                "SELECT COUNT(ID) FROM PLANETS"118                            )119                            res[0] shouldBe ("2")120                        }121                        it("commit  upper") {122                            val committed = trConnection.commit()123                            committed.first shouldBe Option.none()124                        }125                        it("outer connection") {126                            val res = conn.getResource().q(127                                "SELECT COUNT(ID) FROM PLANETS"128                            )129                            res[0] shouldBe ("2")130                        }131                    }132                }133            }134        }135    }136})137fun simpleQuery(jdbcConnection: Connection, sql: String) = jdbcConnection.let { conn ->138    conn.createStatement().use { stmt ->139        val result = stmt.executeQuery(sql)140        resultToRow(result, List.empty())141    }142}143fun simpleUpdate(jdbcConnection: Connection, sql: String) = jdbcConnection.let { conn ->144    conn.createStatement().use { stmt ->145        stmt.executeUpdate(sql)146    }147}148fun Connection.q(sql: String) = simpleQuery(this, sql)149fun Connection.x(sql: String) = simpleUpdate(this, sql)150tailrec fun resultToRow(resultSet: ResultSet, rows: List<String>): List<String> =151    if (!resultSet.next()) {152        rows153    } else {154        val stringRes = resultSet.getString(1)155        resultToRow(resultSet, rows.append(stringRes))156    }...AuctionRepositoryExposedDbTest.kt
Source:AuctionRepositoryExposedDbTest.kt  
1package com.github.christophpickl.tbakotlinmasterproject.boundary.boundarydb2import com.github.christophpickl.tbakotlinmasterproject.commons.commonstest.Tags3import com.github.christophpickl.tbakotlinmasterproject.commons.commonstest.UUID_14import io.kotest.assertions.arrow.core.shouldBeLeft5import io.kotest.assertions.arrow.core.shouldBeRight6import io.kotest.core.spec.Spec7import io.kotest.core.spec.style.DescribeSpec8import io.kotest.core.test.TestCase9import io.kotest.core.test.TestResult10import io.kotest.matchers.collections.shouldBeEmpty11import io.kotest.matchers.collections.shouldContainExactly12import io.kotest.matchers.shouldBe13import org.jetbrains.exposed.sql.Database14import org.jetbrains.exposed.sql.transactions.TransactionManager15import java.sql.ResultSet16import java.util.UUID17internal class AuctionRepositoryExposedDbTest : DescribeSpec() {18    private val tableName = "auctions"19    private val dbo = AuctionDbo.any()20    private val repo = AuctionRepositoryExposed()21    private val uuid = UUID_122    private lateinit var db: Database23    override fun tags() = setOf(Tags.Database)24    override fun beforeSpec(spec: Spec) {25        db = connectTestDb {}26    }27    override fun afterTest(testCase: TestCase, result: TestResult) {28        db.deleteAll()29    }30    override fun afterSpec(spec: Spec) {31        TransactionManager.closeAndUnregister(db)32    }33    init {34        describe("When insert") {35            it("Then one row existing") {36                val result = repo.insert(db, AuctionDbo.any())37                result.shouldBeRight()38                val rs = db.query("SELECT * FROM $tableName")39                rs.countRows() shouldBe 140            }41            it("Then values inserted") {42                val result = repo.insert(db, dbo)43                result.shouldBeRight()44                val rs = db.query("SELECT * FROM $tableName")45                rs.next()46                rs.toAuctionDbo() shouldBe dbo47            }48            it("Given auction with same ID Then fault") {49                insert(AuctionDbo.any().copy(id = uuid))50                val result = repo.insert(db, AuctionDbo.any().copy(id = uuid))51                result.shouldBeLeft()52            }53        }54        55        describe("When select all") {56            it("Then return empty") {57                val result = repo.selectAll(db)58                result.shouldBeRight().shouldBeEmpty()59            }60            it("Given inserted auction Then return it") {61                insert(dbo)62                val result = repo.selectAll(db)63                println("result: $result")64                result.shouldBeRight().shouldContainExactly(dbo)65            }66        }67    }68    private fun insert(dbo: AuctionDbo) {69        println("insert: $dbo")70        db.execute("INSERT INTO $tableName (id, title) VALUES ('${dbo.id}', '${dbo.title}')")71    }72    private fun ResultSet.toAuctionDbo() = AuctionDbo(73        id = UUID.fromString(getString("id")),74        title = getString("title")75    )76}...ResultSetMapperTest.kt
Source:ResultSetMapperTest.kt  
1package com.github.imanushin.test.unit2import com.github.imanushin.DynamicResultSetMapperFactoryImpl3import com.github.imanushin.KotlinClassCompilationImpl4import com.github.imanushin.createForType5import com.github.imanushin.model.application.Email6import com.github.imanushin.model.application.EmailDomain7import com.github.imanushin.model.application.EmailUser8import com.github.imanushin.model.application.UserName9import com.github.imanushin.model.database.DbUser10import io.kotest.core.spec.style.FreeSpec11import io.kotest.matchers.shouldBe12import io.mockk.every13import io.mockk.mockk14import java.sql.ResultSet15import java.sql.ResultSetMetaData16/**17 * This isn't right unit test, because it operates with several services at one time.18 *19 * Right implementation should be based on SpringIoC, etc. and should be named "integration"20 *21 * However for example simplification22 */23class ResultSetMapperTest : FreeSpec() {24    init {25        "user should be parsed" {26            // Given27            val expectedResult = listOf(28                    DbUser(29                            UserName("name1"),30                            Email(31                                    EmailUser("name-1"),32                                    EmailDomain("github.com")33                            )34                    )35            )36            val compiler = KotlinClassCompilationImpl()37            val mapperFactory = DynamicResultSetMapperFactoryImpl(compiler)38            val metadata = mockk<ResultSetMetaData> {39                every { columnCount } returns 340            }41            val resultSet = mockk<ResultSet> {42                var isFirstRow = true43                every { metaData } returns metadata44                every { getString(0) } returns "name1"45                every { getString(1) } returns "name-1"46                every { getString(2) } returns "github.com"47                every { findColumn("name") } returns 048                every { findColumn("user_email_name") } returns 149                every { findColumn("user_email_domain") } returns 250                every { next() } answers {51                    isFirstRow.also {52                        isFirstRow = false53                    }54                }55            }56            // When57            val mapper = mapperFactory.createForType<DbUser>()58            val result = mapper.extractData(resultSet)59            // Then60            result shouldBe expectedResult61        }62    }63}...resultset.kt
Source:resultset.kt  
1package io.kotest.matchers.sql2import io.kotest.matchers.Matcher3import io.kotest.matchers.MatcherResult4import io.kotest.matchers.should5import io.kotest.matchers.shouldNot6import java.sql.ResultSet7infix fun ResultSet.shouldHaveRows(rowCount: Int) = this should haveRowCount(8   rowCount9)10infix fun ResultSet.shouldNotHaveRows(rowCount: Int) = this shouldNot haveRowCount(11   rowCount12)13fun haveRowCount(rowCount: Int) = object : Matcher<ResultSet> {14   override fun test(value: ResultSet) =15      MatcherResult(16         value.row == rowCount,17         { "$value should have $rowCount rows" },18         { "$value should not have $rowCount rows" }19      )20}21infix fun ResultSet.shouldHaveColumns(columnCount: Int) = this should haveColumnCount(22   columnCount23)24infix fun ResultSet.shouldNotHaveColumns(columnCount: Int) = this shouldNot haveColumnCount(25   columnCount26)27fun haveColumnCount(columnCount: Int) = object : Matcher<ResultSet> {28   override fun test(value: ResultSet) =29      MatcherResult(30         value.metaData.columnCount == columnCount,31         { "$value should have $columnCount columns" },32         { "$value should not have $columnCount columns" }33      )34}35infix fun ResultSet.shouldContainColumn(columnName: String) = this should containColumn(36   columnName37)38infix fun ResultSet.shouldNotContainColumn(columnName: String) = this shouldNot containColumn(39   columnName40)41fun containColumn(columnName: String) = object : Matcher<ResultSet> {42   override fun test(value: ResultSet): MatcherResult {43      val metaData = value.metaData44      val colCount = metaData.columnCount45      return MatcherResult(46         (1..colCount).any { metaData.getColumnLabel(colCount) == columnName },47         { "$value should have $columnName column" },48         { "$value should not have $columnName column" }49      )50   }51}52@Suppress("UNCHECKED_CAST")53fun <T> ResultSet.shouldHaveColumn(columnName: String, next: (List<T>) -> Unit) {54   this shouldContainColumn columnName55   val data = mutableListOf<T>()56   while (this.next()) {57      data += this.getObject(columnName) as T58   }59   next(data)60}61fun ResultSet.shouldHaveRow(rowNum: Int, next: (List<Any>) -> Unit) {62   val metaData = this.metaData63   val colCount = metaData.columnCount64   val row = mutableListOf<Any>()65   this.absolute(rowNum)66   (1..colCount).forEach { colNum ->67      row += this.getObject(colNum)68   }69   next(row)70}...BigDecimalTypeTest.kt
Source:BigDecimalTypeTest.kt  
1package com.github.thake.logminer.kafka.connect2import io.confluent.connect.avro.AvroData3import io.kotest.matchers.comparables.shouldBeEqualComparingTo4import io.kotest.matchers.nulls.shouldNotBeNull5import io.kotest.matchers.should6import io.kotest.matchers.shouldBe7import io.mockk.every8import io.mockk.mockk9import org.apache.avro.LogicalType10import org.apache.avro.LogicalTypes11import org.junit.jupiter.api.Test12import java.math.BigDecimal13import java.sql.ResultSet14import javax.xml.validation.Schema15class BigDecimalTypeTest {16    @Test17    fun testCorrectScaleForString() {18        val type = SchemaType.NumberType.BigDecimalType(13,10)19        val str = "123.20"20        type.convert(str).should {21            it.scale().shouldBe(type.scale)22            it.shouldBeEqualComparingTo(str.toBigDecimal())23        }24    }25    @Test26    fun testCorrectScaleForResultSet(){27        val resultSet = mockk<ResultSet>()28        val columnIndex = 129        val expectedDecimal = "234.123".toBigDecimal()30        every { resultSet.getBigDecimal(columnIndex) }.returns(expectedDecimal)31        val type = SchemaType.NumberType.BigDecimalType(ORACLE_UNQUALIFIED_NUMBER_PRECISION,32            ORACLE_UNQUALIFIED_NUMBER_SCALE)33        type.extract(columnIndex,resultSet).should {34            it.shouldNotBeNull()35            it.scale().shouldBe(type.scale)36            it.shouldBeEqualComparingTo(expectedDecimal)37        }38    }39    @Test40    fun testConversionToAvroSchema(){41        val type = SchemaType.NumberType.BigDecimalType(ORACLE_UNQUALIFIED_NUMBER_PRECISION,42            ORACLE_UNQUALIFIED_NUMBER_SCALE)43        val schema = type.createSchemaBuilder().build()44        val avroData = AvroData(10)45        val avroSchema = avroData.fromConnectSchema(schema)46        avroSchema.type.shouldBe(org.apache.avro.Schema.Type.BYTES)47        avroSchema.logicalType.shouldBe(LogicalTypes.decimal(ORACLE_UNQUALIFIED_NUMBER_PRECISION,48            ORACLE_UNQUALIFIED_NUMBER_SCALE))49    }50}...DemoApplicationTests.kt
Source:DemoApplicationTests.kt  
1package com.example.demo2import io.kotest.matchers.collections.shouldHaveSize3import io.kotest.matchers.string.shouldContain4import org.junit.jupiter.api.Test5import org.springframework.beans.factory.annotation.Autowired6import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureTestDatabase7import org.springframework.boot.test.context.SpringBootTest8import org.springframework.jdbc.core.JdbcTemplate9import org.springframework.test.context.DynamicPropertyRegistry10import org.springframework.test.context.DynamicPropertySource11import org.testcontainers.containers.PostgreSQLContainer12import org.testcontainers.junit.jupiter.Container13import org.testcontainers.junit.jupiter.Testcontainers14import java.sql.ResultSet15@SpringBootTest16@AutoConfigureTestDatabase(replace = AutoConfigureTestDatabase.Replace.NONE)17@Testcontainers18class DemoApplicationTests {19	companion object {20		@Container21		private val postgreSQLContainer = PostgreSQLContainer<Nothing>("postgres:latest")22		@DynamicPropertySource23		@JvmStatic24		fun registerDynamicProperties(registry: DynamicPropertyRegistry) {25			registry.add("spring.datasource.url", postgreSQLContainer::getJdbcUrl)26			registry.add("spring.datasource.username", postgreSQLContainer::getUsername)27			registry.add("spring.datasource.password", postgreSQLContainer::getPassword)28		}29	}30	@Autowired31	private lateinit var jdbcTemplate: JdbcTemplate32	@Test33	fun contextLoads() {34	}35	@Test36	fun `when database is connected then it should be Postgres version 13`() {37		val actualDatabaseVersion = jdbcTemplate.queryForObject("SELECT version()", String::class.java)38		actualDatabaseVersion shouldContain "PostgreSQL 13.0"39	}40}...TestcontainersSpec.kt
Source:TestcontainersSpec.kt  
1package testcontainer.spike2import io.kotest.assertions.fail3import io.kotest.core.spec.style.WordSpec4import io.kotest.matchers.shouldBe5import testcontainer.spike.DatabaseFixture.withDatabase6import java.sql.DriverManager7import java.util.*8class TestcontainersSpec : WordSpec({9    "testcontainers" should {10        "be accessible in Jenkins" {11            withDatabase { dbHost, dbPort ->12                println("Attempting connection to $dbHost:$dbPort")13                val url = "jdbc:postgresql://$dbHost:$dbPort/products"14                val props = with(Properties()) {15                    setProperty("user", "developer")16                    setProperty("password", "password123")17                    this18                }19                val conn = DriverManager.getConnection(url, props)20                val prepareStatement = conn.prepareStatement("SELECT 1")21                val resultSet = prepareStatement.executeQuery()22                if (resultSet.next()) {23                    val result = resultSet.getInt(1)24                    result shouldBe 125                } else {26                    fail("no result found")27                }28                prepareStatement.close()29                conn.close()30            }31        }32    }33})...resultset
Using AI Code Generation
1+import io.kotest.matchers.sql.resultset.shouldHaveColumn2+import io.kotest.matchers.sql.resultset.shouldHaveColumns3+import io.kotest.matchers.sql.resultset.shouldHaveRow4+import io.kotest.matchers.sql.resultset.shouldHaveRows5+import io.kotest.matchers.sql.resultset.shouldHaveSize6+import io.kotest.matchers.sql.resultset.shouldHaveZeroRows7+import io.kotest.matchers.sql.resultset.shouldNotHaveColumn8+import io.kotest.matchers.sql.resultset.shouldNotHaveColumns9+import io.kotest.matchers.sql.resultset.shouldNotHaveRow10+import io.kotest.matchers.sql.resultset.shouldNotHaveRows11+import io.kotest.matchers.sql.resultset.shouldNotHaveSize12+import io.kotest.matchers.sql.resultset.shouldNotHaveZeroRows13+import io.kotest.matchers.sql.statement.shouldBeEmpty14+import io.kotest.matchers.sql.statement.shouldBeExecutable15+import io.kotest.matchers.sql.statement.shouldBePrepared16+import io.kotest.matchers.sql.statement.shouldBeQuery17+import io.kotest.matchers.sql.statement.shouldBeUpdate18+import io.kotest.matchers.sql.statement.shouldBeValid19+import io.kotest.matchers.sql.statement.shouldNotBeEmpty20+import io.kotest.matchers.sql.statement.shouldNotBeExecutable21+import io.kotest.matchers.sql.statement.shouldNotBePrepared22+import io.kotest.matchers.sql.statement.shouldNotBeQuery23+import io.kotest.matchers.sql.statement.shouldNotBeUpdate24+import io.kotest.matchers.sql.statement.shouldNotBeValid25+import io.kotest.matchers.sql.sql.shouldBeValid26+import io.kotest.matchers.sql.sql.shouldNotBeValid27+import io.kotest.matchers.sql.sqlException.shouldBeInvalid28+import io.kotest.matchers.sql.sqlException.shouldNotBeInvalidresultset
Using AI Code Generation
1+import io.kotest.matchers.sql.resultset.shouldHaveColumn2+import io.kotest.matchers.sql.resultset.shouldHaveColumns3+import io.kotest.matchers.sql.resultset.shouldHaveSize4+import io.kotest.matchers.sql.resultset.shouldHaveValue5+import io.kotest.matchers.sql.resultset.shouldHaveValues6+import io.kotest.matchers.sql.resultset.shouldMatch7+import io.kotest.matchers.sql.resultset.shouldMatchExactly8+import io.kotest.matchers.sql.resultset.shouldMatchInAnyOrder9+import io.kotest.matchers.sql.resultset.shouldMatchInOrder10+import io.kotest.matchers.sql.resultset.shouldMatchTable11+import io.kotest.matchers.sql.resultset.shouldMatchTableExactly12+import io.kotest.matchers.sql.resultset.shouldMatchTableInAnyOrder13+import io.kotest.matchers.sql.resultset.shouldMatchTableInOrder14+import io.kotest.matchers.sql.resultset.shouldMatchTableWithColumns15+import io.kotest.matchers.sql.resultset.shouldMatchWithColumns16+import io.kotest.matchers.sql.resultset.shouldMatchWithColumnsExactly17+import io.kotest.matchers.sql.resultset.shouldMatchWithColumnsInAnyOrder18+import io.kotest.matchers.sql.resultset.shouldMatchWithColumnsInOrder19+import io.kotest.matchers.sql.resultset.shouldNotMatch20+import io.kotest.matchers.sql.resultset.shouldNotMatchExactly21+import io.kotest.matchers.sql.resultset.shouldNotMatchInAnyOrder22+import io.kotest.matchers.sql.resultset.shouldNotMatchInOrder23+import io.kotest.matchers.sql.resultset.shouldNotMatchTable24+import io.kotest.matchers.sql.resultset.shouldNotMatchTableExactly25+import io.kotest.matchers.sql.resultset.shouldNotMatchTableInAnyOrder26+import io.kotest.matchers.sql.resultset.shouldNotMatchTableInOrder27+import io.kotest.matchers.sql.resultset.shouldNotMatchTableWithColumns28+import io.kotest.matchers.sql.resultset.shouldNotMatchWithColumns29+import io.kotest.matchers.sql.resultset.shouldNotMatchWithColumnsExactly30+import io.kotest.matchers.sql.resultset.shouldNotMatchWithColumnsInAnyOrderresultset
Using AI Code Generation
1+import io.kotest.matchers.sql.resultset.shouldBe2+import io.kotest.matchers.sql.resultset.shouldHaveColumns3+import io.kotest.matchers.sql.resultset.shouldHaveSize4+import io.kotest.matchers.sql.resultset.shouldNotBe5+import io.kotest.matchers.sql.resultset.shouldNotHaveColumns6+import io.kotest.matchers.sql.resultset.shouldNotHaveSize7+import io.kotest.matchers.sql.sql.shouldBe8+import io.kotest.matchers.sql.sql.shouldNotBe9+import io.kotest.matchers.sql.sqlException.shouldBe10+import io.kotest.matchers.sql.sqlException.shouldNotBe11+import io.kotest.matchers.sql.sqlStatement.shouldBe12+import io.kotest.matchers.sql.sqlStatement.shouldNotBe13+import io.kotest.matchers.sql.sqlStatementResult.shouldBe14+import io.kotest.matchers.sql.sqlStatementResult.shouldNotBe15+import io.kotest.matchers.sql.sqlStatementType.shouldBe16+import io.kotest.matchers.sql.sqlStatementType.shouldNotBe17+import io.kotest.matchers.sql.table.shouldBe18+import io.kotest.matchers.sql.table.shouldNotBe19+import io.kotest.matchers.sql.transaction.shouldBe20+import io.kotest.matchers.sql.transaction.shouldNotBeresultset
Using AI Code Generation
1+    resultset {2+        row {3+        }4+        row {5+        }6+    }7+}resultset
Using AI Code Generation
1resultset should matchResultSet {2    row {3        column("id", 1)4        column("name", "kotlin")5    }6    row {7        column("id", 2)8        column("name", "java")9    }10}11resultset should matchResultSet {12    row {13        column("id", 1)14        column("name", "kotlin")15    }16    row {17        column("id", 2)18        column("name", "java")19    }20}21resultset should matchResultSet {22    row {23        column("id", 1)24        column("name", "kotlin")25    }26    row {27        column("id", 2)28        column("name", "java")29    }30}31resultset should matchResultSet {32    row {33        column("id", 1)34        column("name", "kotlin")35    }36    row {37        column("id", 2)38        column("name", "java")39    }40}41resultset should matchResultSet {42    row {43        column("id", 1)44        column("name", "kotlin")45    }46    row {47        column("id", 2)48        column("name", "java")49    }50}51resultset should matchResultSet {52    row {53        column("id", 1)54        column("name", "kotlin")55    }56    row {57        column("id", 2)58        column("name", "java")59    }60}61resultset should matchResultSet {62    row {63        column("id", 1)64        column("name", "kotlin")65    }66    row {67        column("id", 2)68        column("name", "java")69    }70}71resultset should matchResultSet {72    row {73        column("id", 1)74        column("name", "kotlin")75    }resultset
Using AI Code Generation
1+import io.kotest.matchers.sql.resultset.shouldBe2+class ResultSetMatchersTest : FunSpec({3+  test("ResultSet should be matched") {4+    val rs = mockkClass(ResultSet::class)5+    every { rs.next() } returnsMany listOf(true, false)6+    every { rs.getString(1) } returns "foo" andThen "bar"7+    every { rs.getString(2) } returns "baz" andThen "qux"8+    every { rs.getString(3) } returns "quux" andThen "quuz"9+    rs shouldBe table(10+      headers("a", "b", "c"),11+      row("foo", "baz", "quux"),12+      row("bar", "qux", "quuz")13+  }14+})15+import io.kotest.matchers.sql.resultset.shouldBe16+class ResultSetMatchersTest : FunSpec({17+  test("ResultSet should be matched") {18+    val rs = mockkClass(ResultSet::class)19+    every { rs.next() } returnsMany listOf(true, false)20+    every { rs.getString(1) } returns "foo" andThen "bar"21+    every { rs.getString(2) } returns "baz" andThen "qux"22+    every { rs.getString(3) } returns "quux" andThen "quuz"23+    rs shouldBe table(24+      headers("a", "b", "c"),25+      row("foo", "baz", "quux"),26+      row("bar", "qux", "quuz")27+  }28+})29+import io.kotest.matchers.sql.resultset.shouldBe30+class ResultSetMatchersTest : FunSpec({31+  test("ResultSet should be matched") {32+    val rs = mockkClass(ResultSet::class)33+    every { rs.next() } returnsMany listOf(true, false)34+    every { rs.getString(1) } returns "foo" andThen "bar"35+    every { rs.getString(2) } returns "baz" andThen "qux"36+    every { rs.getString(3)resultset
Using AI Code Generation
1val resultset = ResultSet()2resultset.shouldBeEmpty()3resultset.shouldBeNonEmpty()4resultset.shouldHaveSize(10)5resultset.shouldHaveColumnCount(5)6resultset.shouldHaveColumnNames("id", "name", "age")7resultset.shouldHaveColumnTypes("int", "varchar", "int")8resultset.shouldHaveColumnNamesAndTypes("id" to "int", "name" to "varchar", "age" to "int")9resultset.shouldHaveRow(1, "john", 20)10resultset.shouldHaveRows(listOf(1 to "john" to 20, 2 to "jane" to 21))11resultset.shouldHaveRows(listOf(listOf(1, "john", 20), listOf(2, "jane", 21)))12val resultset = ResultSet()13resultset.shouldBeEmpty()14resultset.shouldBeNonEmpty()15resultset.shouldHaveSize(10)16resultset.shouldHaveColumnCount(5)17resultset.shouldHaveColumnNames("id", "name", "age")18resultset.shouldHaveColumnTypes("int", "varchar", "int")19resultset.shouldHaveColumnNamesAndTypes("id" to "int", "name" to "varchar", "age" to "int")20resultset.shouldHaveRow(1, "john", 20)21resultset.shouldHaveRows(listOf(1 to "john" to 20, 2 to "jane" to 21))22resultset.shouldHaveRows(listOf(listOf(1, "john", 20), listOf(2, "jane", 21)))23val resultset = ResultSet()24resultset.shouldBeEmpty()25resultset.shouldBeNonEmpty()26resultset.shouldHaveSize(10)27resultset.shouldHaveColumnCount(5)28resultset.shouldHaveColumnNames("id", "name", "age")29resultset.shouldHaveColumnTypes("int", "varchar", "int")30resultset.shouldHaveColumnNamesAndTypes("id" to "int", "name" to "varchar", "age" to "int")31resultset.shouldHaveRow(1, "john", 20)32resultset.shouldHaveRows(listOf(1 to "john" to 20, 2 to "janeLearn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.
You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.
Get 100 minutes of automation test minutes FREE!!
