How to use Test method of regression Package

Best Keploy code snippet using regression.Test

aggregate_builtins_test.go

Source:aggregate_builtins_test.go Github

copy

Full Screen

...32 aggFunc func([]*types.T, *tree.EvalContext, tree.Datums) tree.AggregateFunc,33 firstArgs []tree.Datum,34 otherArgs ...[]tree.Datum,35) {36 evalCtx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings())37 defer evalCtx.Stop(context.Background())38 argTypes := []*types.T{firstArgs[0].ResolvedType()}39 otherArgs = flattenArgs(otherArgs...)40 if len(otherArgs) == 0 {41 otherArgs = make([][]tree.Datum, len(firstArgs))42 }43 for i := range otherArgs[0] {44 argTypes = append(argTypes, otherArgs[0][i].ResolvedType())45 }46 aggImpl := aggFunc(argTypes, evalCtx, nil)47 defer aggImpl.Close(context.Background())48 runningDatums := make([]tree.Datum, len(firstArgs))49 runningStrings := make([]string, len(firstArgs))50 for i := range firstArgs {51 if err := aggImpl.Add(context.Background(), firstArgs[i], otherArgs[i]...); err != nil {52 t.Fatal(err)53 }54 res, err := aggImpl.Result()55 if err != nil {56 t.Fatal(err)57 }58 runningDatums[i] = res59 runningStrings[i] = res.String()60 }61 finalStrings := make([]string, len(firstArgs))62 for i, d := range runningDatums {63 finalStrings[i] = d.String()64 }65 if !reflect.DeepEqual(runningStrings, finalStrings) {66 t.Errorf("Aggregate result mutated during future accumulation: initial results were %v,"+67 " later results were %v", runningStrings, finalStrings)68 }69}70func flattenArgs(args ...[]tree.Datum) [][]tree.Datum {71 if len(args) == 0 {72 return nil73 }74 res := make([][]tree.Datum, len(args[0]))75 for i := range args {76 for j := range args[i] {77 res[j] = append(res[j], args[i][j])78 }79 }80 return res81}82func TestAvgIntResultDeepCopy(t *testing.T) {83 defer leaktest.AfterTest(t)()84 testAggregateResultDeepCopy(t, newIntAvgAggregate, makeIntTestDatum(10))85}86func TestAvgFloatResultDeepCopy(t *testing.T) {87 defer leaktest.AfterTest(t)()88 testAggregateResultDeepCopy(t, newFloatAvgAggregate, makeFloatTestDatum(10))89}90func TestAvgDecimalResultDeepCopy(t *testing.T) {91 defer leaktest.AfterTest(t)()92 testAggregateResultDeepCopy(t, newDecimalAvgAggregate, makeDecimalTestDatum(10))93}94func TestAvgIntervalResultDeepCopy(t *testing.T) {95 defer leaktest.AfterTest(t)()96 testAggregateResultDeepCopy(t, newIntervalAvgAggregate, makeIntervalTestDatum(10))97}98func TestBitAndIntResultDeepCopy(t *testing.T) {99 defer leaktest.AfterTest(t)()100 t.Run("all null", func(t *testing.T) {101 testAggregateResultDeepCopy(t, newIntBitAndAggregate, makeNullTestDatum(10))102 })103 t.Run("with null", func(t *testing.T) {104 testAggregateResultDeepCopy(t, newIntBitAndAggregate, makeTestWithNullDatum(10, makeIntTestDatum))105 })106 t.Run("without null", func(t *testing.T) {107 testAggregateResultDeepCopy(t, newIntBitAndAggregate, makeIntTestDatum(10))108 })109}110func TestBitAndBitResultDeepCopy(t *testing.T) {111 defer leaktest.AfterTest(t)()112 t.Run("all null", func(t *testing.T) {113 testAggregateResultDeepCopy(t, newBitBitAndAggregate, makeNullTestDatum(10))114 })115 t.Run("with null", func(t *testing.T) {116 testAggregateResultDeepCopy(t, newBitBitAndAggregate, makeTestWithNullDatum(10, makeBitTestDatum))117 })118 t.Run("without null", func(t *testing.T) {119 for i := 0; i < 1000; i++ {120 testAggregateResultDeepCopy(t, newBitBitAndAggregate, makeBitTestDatum(10))121 }122 })123}124func TestBitOrIntResultDeepCopy(t *testing.T) {125 defer leaktest.AfterTest(t)()126 t.Run("all null", func(t *testing.T) {127 testAggregateResultDeepCopy(t, newIntBitOrAggregate, makeNullTestDatum(10))128 })129 t.Run("with null", func(t *testing.T) {130 testAggregateResultDeepCopy(t, newIntBitOrAggregate, makeTestWithNullDatum(10, makeIntTestDatum))131 })132 t.Run("without null", func(t *testing.T) {133 testAggregateResultDeepCopy(t, newIntBitOrAggregate, makeIntTestDatum(10))134 })135}136func TestBitOrBitResultDeepCopy(t *testing.T) {137 defer leaktest.AfterTest(t)()138 t.Run("all null", func(t *testing.T) {139 testAggregateResultDeepCopy(t, newBitBitOrAggregate, makeNullTestDatum(10))140 })141 t.Run("with null", func(t *testing.T) {142 testAggregateResultDeepCopy(t, newBitBitOrAggregate, makeTestWithNullDatum(10, makeBitTestDatum))143 })144 t.Run("without null", func(t *testing.T) {145 testAggregateResultDeepCopy(t, newBitBitOrAggregate, makeBitTestDatum(10))146 })147}148func TestBoolAndResultDeepCopy(t *testing.T) {149 defer leaktest.AfterTest(t)()150 testAggregateResultDeepCopy(t, newBoolAndAggregate, makeBoolTestDatum(10))151}152func TestBoolOrResultDeepCopy(t *testing.T) {153 defer leaktest.AfterTest(t)()154 testAggregateResultDeepCopy(t, newBoolOrAggregate, makeBoolTestDatum(10))155}156func TestCountResultDeepCopy(t *testing.T) {157 defer leaktest.AfterTest(t)()158 testAggregateResultDeepCopy(t, newCountAggregate, makeIntTestDatum(10))159}160func TestMaxIntResultDeepCopy(t *testing.T) {161 defer leaktest.AfterTest(t)()162 testAggregateResultDeepCopy(t, newMaxAggregate, makeIntTestDatum(10))163}164func TestMaxFloatResultDeepCopy(t *testing.T) {165 defer leaktest.AfterTest(t)()166 testAggregateResultDeepCopy(t, newMaxAggregate, makeFloatTestDatum(10))167}168func TestMaxDecimalResultDeepCopy(t *testing.T) {169 defer leaktest.AfterTest(t)()170 testAggregateResultDeepCopy(t, newMaxAggregate, makeDecimalTestDatum(10))171}172func TestMaxBoolResultDeepCopy(t *testing.T) {173 defer leaktest.AfterTest(t)()174 testAggregateResultDeepCopy(t, newMaxAggregate, makeBoolTestDatum(10))175}176func TestMinIntResultDeepCopy(t *testing.T) {177 defer leaktest.AfterTest(t)()178 testAggregateResultDeepCopy(t, newMinAggregate, makeIntTestDatum(10))179}180func TestMinFloatResultDeepCopy(t *testing.T) {181 defer leaktest.AfterTest(t)()182 testAggregateResultDeepCopy(t, newMinAggregate, makeFloatTestDatum(10))183}184func TestMinDecimalResultDeepCopy(t *testing.T) {185 defer leaktest.AfterTest(t)()186 testAggregateResultDeepCopy(t, newMinAggregate, makeDecimalTestDatum(10))187}188func TestMinBoolResultDeepCopy(t *testing.T) {189 defer leaktest.AfterTest(t)()190 testAggregateResultDeepCopy(t, newMinAggregate, makeBoolTestDatum(10))191}192func TestSumSmallIntResultDeepCopy(t *testing.T) {193 defer leaktest.AfterTest(t)()194 testAggregateResultDeepCopy(t, newSmallIntSumAggregate, makeSmallIntTestDatum(10))195}196func TestSumIntResultDeepCopy(t *testing.T) {197 defer leaktest.AfterTest(t)()198 testAggregateResultDeepCopy(t, newIntSumAggregate, makeIntTestDatum(10))199}200func TestSumFloatResultDeepCopy(t *testing.T) {201 defer leaktest.AfterTest(t)()202 testAggregateResultDeepCopy(t, newFloatSumAggregate, makeFloatTestDatum(10))203}204func TestSumDecimalResultDeepCopy(t *testing.T) {205 defer leaktest.AfterTest(t)()206 testAggregateResultDeepCopy(t, newDecimalSumAggregate, makeDecimalTestDatum(10))207}208func TestSumIntervalResultDeepCopy(t *testing.T) {209 defer leaktest.AfterTest(t)()210 testAggregateResultDeepCopy(t, newIntervalSumAggregate, makeIntervalTestDatum(10))211}212func TestVarianceIntResultDeepCopy(t *testing.T) {213 defer leaktest.AfterTest(t)()214 testAggregateResultDeepCopy(t, newIntVarianceAggregate, makeIntTestDatum(10))215}216func TestVarianceFloatResultDeepCopy(t *testing.T) {217 defer leaktest.AfterTest(t)()218 testAggregateResultDeepCopy(t, newFloatVarianceAggregate, makeFloatTestDatum(10))219}220func TestVarianceDecimalResultDeepCopy(t *testing.T) {221 defer leaktest.AfterTest(t)()222 testAggregateResultDeepCopy(t, newDecimalVarianceAggregate, makeDecimalTestDatum(10))223}224func TestSqrDiffIntResultDeepCopy(t *testing.T) {225 defer leaktest.AfterTest(t)()226 testAggregateResultDeepCopy(t, newIntSqrDiffAggregate, makeIntTestDatum(10))227}228func TestSqrDiffFloatResultDeepCopy(t *testing.T) {229 defer leaktest.AfterTest(t)()230 testAggregateResultDeepCopy(t, newFloatSqrDiffAggregate, makeFloatTestDatum(10))231}232func TestSqrDiffDecimalResultDeepCopy(t *testing.T) {233 defer leaktest.AfterTest(t)()234 testAggregateResultDeepCopy(t, newDecimalSqrDiffAggregate, makeDecimalTestDatum(10))235}236func TestVarPopIntResultDeepCopy(t *testing.T) {237 defer leaktest.AfterTest(t)()238 testAggregateResultDeepCopy(t, newIntVarPopAggregate, makeIntTestDatum(10))239}240func TestVarPopFloatResultDeepCopy(t *testing.T) {241 defer leaktest.AfterTest(t)()242 testAggregateResultDeepCopy(t, newFloatVarPopAggregate, makeFloatTestDatum(10))243}244func TestVarPopDecimalResultDeepCopy(t *testing.T) {245 defer leaktest.AfterTest(t)()246 testAggregateResultDeepCopy(t, newDecimalVarPopAggregate, makeDecimalTestDatum(10))247}248func TestStdDevIntResultDeepCopy(t *testing.T) {249 defer leaktest.AfterTest(t)()250 testAggregateResultDeepCopy(t, newIntStdDevAggregate, makeIntTestDatum(10))251}252func TestStdDevFloatResultDeepCopy(t *testing.T) {253 defer leaktest.AfterTest(t)()254 testAggregateResultDeepCopy(t, newFloatStdDevAggregate, makeFloatTestDatum(10))255}256func TestStdDevDecimalResultDeepCopy(t *testing.T) {257 defer leaktest.AfterTest(t)()258 testAggregateResultDeepCopy(t, newDecimalStdDevAggregate, makeDecimalTestDatum(10))259}260func TestStdDevPopIntResultDeepCopy(t *testing.T) {261 defer leaktest.AfterTest(t)()262 testAggregateResultDeepCopy(t, newIntStdDevPopAggregate, makeIntTestDatum(10))263}264func TestStdDevPopFloatResultDeepCopy(t *testing.T) {265 defer leaktest.AfterTest(t)()266 testAggregateResultDeepCopy(t, newFloatStdDevPopAggregate, makeFloatTestDatum(10))267}268func TestStdDevPopDecimalResultDeepCopy(t *testing.T) {269 defer leaktest.AfterTest(t)()270 testAggregateResultDeepCopy(t, newDecimalStdDevPopAggregate, makeDecimalTestDatum(10))271}272func TestCorr(t *testing.T) {273 defer leaktest.AfterTest(t)()274 testRegressionAggregateFunctionResultDeepCopy(t, newCorrAggregate)275}276func TestCovarPop(t *testing.T) {277 defer leaktest.AfterTest(t)()278 testRegressionAggregateFunctionResultDeepCopy(t, newCovarPopAggregate)279}280func TestCovarSamp(t *testing.T) {281 defer leaktest.AfterTest(t)()282 testRegressionAggregateFunctionResultDeepCopy(t, newCovarSampAggregate)283}284func TestRegressionIntercept(t *testing.T) {285 defer leaktest.AfterTest(t)()286 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionInterceptAggregate)287}288func TestRegressionR2(t *testing.T) {289 defer leaktest.AfterTest(t)()290 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionR2Aggregate)291}292func TestRegressionSlope(t *testing.T) {293 defer leaktest.AfterTest(t)()294 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionSlopeAggregate)295}296func TestRegressionSXX(t *testing.T) {297 defer leaktest.AfterTest(t)()298 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionSXXAggregate)299}300func TestRegressionSXY(t *testing.T) {301 defer leaktest.AfterTest(t)()302 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionSXYAggregate)303}304func TestRegressionSYY(t *testing.T) {305 defer leaktest.AfterTest(t)()306 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionSYYAggregate)307}308func TestRegressionCount(t *testing.T) {309 defer leaktest.AfterTest(t)()310 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionCountAggregate)311}312func TestRegressionAvgX(t *testing.T) {313 defer leaktest.AfterTest(t)()314 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionAvgXAggregate)315}316func TestRegressionAvgY(t *testing.T) {317 defer leaktest.AfterTest(t)()318 testRegressionAggregateFunctionResultDeepCopy(t, newRegressionAvgYAggregate)319}320// testRegressionAggregateFunctionResultDeepCopy is a helper function321// for testing regression aggregate functions.322func testRegressionAggregateFunctionResultDeepCopy(323 t *testing.T, aggFunc func([]*types.T, *tree.EvalContext, tree.Datums) tree.AggregateFunc,324) {325 defer leaktest.AfterTest(t)()326 t.Run("float float", func(t *testing.T) {327 testAggregateResultDeepCopy(t, aggFunc, makeFloatTestDatum(10), makeFloatTestDatum(10))328 })329 t.Run("int int", func(t *testing.T) {330 testAggregateResultDeepCopy(t, aggFunc, makeIntTestDatum(10), makeIntTestDatum(10))331 })332 t.Run("decimal decimal", func(t *testing.T) {333 testAggregateResultDeepCopy(t, aggFunc, makeDecimalTestDatum(10), makeDecimalTestDatum(10))334 })335 t.Run("float int", func(t *testing.T) {336 testAggregateResultDeepCopy(t, aggFunc, makeFloatTestDatum(10), makeIntTestDatum(10))337 })338 t.Run("float decimal", func(t *testing.T) {339 testAggregateResultDeepCopy(t, aggFunc, makeFloatTestDatum(10), makeDecimalTestDatum(10))340 })341 t.Run("int float", func(t *testing.T) {342 testAggregateResultDeepCopy(t, aggFunc, makeIntTestDatum(10), makeFloatTestDatum(10))343 })344 t.Run("int decimal", func(t *testing.T) {345 testAggregateResultDeepCopy(t, aggFunc, makeIntTestDatum(10), makeDecimalTestDatum(10))346 })347 t.Run("decimal float", func(t *testing.T) {348 testAggregateResultDeepCopy(t, aggFunc, makeDecimalTestDatum(10), makeFloatTestDatum(10))349 })350 t.Run("decimal int", func(t *testing.T) {351 testAggregateResultDeepCopy(t, aggFunc, makeDecimalTestDatum(10), makeIntTestDatum(10))352 })353 t.Run("all null", func(t *testing.T) {354 testAggregateResultDeepCopy(t, aggFunc, makeNullTestDatum(10), makeNullTestDatum(10))355 })356 t.Run("with first arg null", func(t *testing.T) {357 testAggregateResultDeepCopy(t, aggFunc, makeTestWithNullDatum(10, makeIntTestDatum), makeIntTestDatum(10))358 })359 t.Run("with other arg null", func(t *testing.T) {360 testAggregateResultDeepCopy(t, aggFunc, makeIntTestDatum(10), makeTestWithNullDatum(10, makeIntTestDatum))361 })362}363// makeNullTestDatum will create an array of only DNull364// values to make sure the aggregation handles only nulls.365func makeNullTestDatum(count int) []tree.Datum {366 values := make([]tree.Datum, count)367 for i := range values {368 values[i] = tree.DNull369 }370 return values371}372func makeIntTestDatum(count int) []tree.Datum {373 rng, _ := randutil.NewTestRand()374 vals := make([]tree.Datum, count)375 for i := range vals {376 vals[i] = tree.NewDInt(tree.DInt(rng.Int63()))377 }378 return vals379}380func makeBitTestDatum(count int) []tree.Datum {381 rng, _ := randutil.NewTestRand()382 // Compute randWidth outside the loop so that all bit arrays are the same383 // length. Generate widths in the range [0, 64].384 vals := make([]tree.Datum, count)385 randWidth := uint(rng.Intn(65))386 for i := range vals {387 vals[i], _ = tree.NewDBitArrayFromInt(rng.Int63(), randWidth)388 }389 return vals390}391// makeTestWithNullDatum will call the maker function392// to generate an array of datums, and then a null datum393// will be placed randomly in the array of datums and394// returned. Use this to ensure proper partial null395// handling of aggregations.396func makeTestWithNullDatum(count int, maker func(count int) []tree.Datum) []tree.Datum {397 rng, _ := randutil.NewTestRand()398 values := maker(count)399 values[rng.Int()%count] = tree.DNull400 return values401}402// makeSmallIntTestDatum creates integers that are sufficiently403// smaller than 2^64-1 that they can be added to each other for a404// significant part of the test without overflow. This is meant to405// test the implementation of aggregates that can use an int64 to406// optimize computations small decimal values.407func makeSmallIntTestDatum(count int) []tree.Datum {408 rng, _ := randutil.NewTestRand()409 vals := make([]tree.Datum, count)410 for i := range vals {411 sign := int32(1)412 if rng.Int31()&1 == 0 {413 sign = -1414 }415 vals[i] = tree.NewDInt(tree.DInt(rng.Int31() * sign))416 }417 return vals418}419func makeFloatTestDatum(count int) []tree.Datum {420 rng, _ := randutil.NewTestRand()421 vals := make([]tree.Datum, count)422 for i := range vals {423 vals[i] = tree.NewDFloat(tree.DFloat(rng.Float64()))424 }425 return vals426}427func makeDecimalTestDatum(count int) []tree.Datum {428 rng, _ := randutil.NewTestRand()429 vals := make([]tree.Datum, count)430 for i := range vals {431 dd := &tree.DDecimal{}432 if _, err := dd.SetFloat64(rng.Float64()); err != nil {433 panic(err)434 }435 vals[i] = dd436 }437 return vals438}439func makeBoolTestDatum(count int) []tree.Datum {440 rng, _ := randutil.NewTestRand()441 vals := make([]tree.Datum, count)442 for i := range vals {443 vals[i] = tree.MakeDBool(tree.DBool(rng.Int31n(2) == 0))444 }445 return vals446}447func makeIntervalTestDatum(count int) []tree.Datum {448 rng, _ := randutil.NewTestRand()449 vals := make([]tree.Datum, count)450 for i := range vals {451 vals[i] = &tree.DInterval{Duration: duration.MakeDuration(rng.Int63n(1000000), rng.Int63n(1000), rng.Int63n(1000))}452 }453 return vals454}455func TestArrayAggNameOverload(t *testing.T) {456 defer leaktest.AfterTest(t)()457 testArrayAggAliasedTypeOverload(context.Background(), t, types.Name)458}459func TestArrayAggOidOverload(t *testing.T) {460 defer leaktest.AfterTest(t)()461 testArrayAggAliasedTypeOverload(context.Background(), t, types.Oid)462}463// testAliasedTypeOverload is a helper function for testing ARRAY_AGG's464// overloads that can take aliased scalar types like NAME and OID.465// These tests are necessary because some ORMs (e.g., sequelize) require466// ARRAY_AGG to work on these aliased types and produce a result with the467// correct type.468func testArrayAggAliasedTypeOverload(ctx context.Context, t *testing.T, expected *types.T) {469 defer tree.MockNameTypes(map[string]*types.T{470 "a": expected,471 })()472 exprStr := "array_agg(a)"473 expr, err := parser.ParseExpr(exprStr)474 if err != nil {475 t.Fatalf("%s: %v", exprStr, err)476 }477 typ := types.MakeArray(expected)478 typedExpr, err := tree.TypeCheck(ctx, expr, nil, typ)479 if err != nil {480 t.Fatalf("%s: %v", expr, err)481 }482 if !typedExpr.ResolvedType().ArrayContents().Identical(expected) {483 t.Fatalf(484 "Expression has incorrect type: expected %v but got %v",485 expected,486 typedExpr.ResolvedType(),487 )488 }489}490func runBenchmarkAggregate(491 b *testing.B,492 aggFunc func([]*types.T, *tree.EvalContext, tree.Datums) tree.AggregateFunc,493 firstArgs []tree.Datum,494 otherArgs ...[]tree.Datum,495) {496 evalCtx := tree.NewTestingEvalContext(cluster.MakeTestingClusterSettings())497 defer evalCtx.Stop(context.Background())498 argTypes := []*types.T{firstArgs[0].ResolvedType()}499 otherArgs = flattenArgs(otherArgs...)500 if len(otherArgs) == 0 {501 otherArgs = make([][]tree.Datum, len(firstArgs))502 }503 for i := range otherArgs[0] {504 argTypes = append(argTypes, otherArgs[0][i].ResolvedType())505 }506 b.ResetTimer()507 for i := 0; i < b.N; i++ {508 func() {509 aggImpl := aggFunc(argTypes, evalCtx, nil)510 defer aggImpl.Close(context.Background())511 for i := range firstArgs {512 if err := aggImpl.Add(context.Background(), firstArgs[i], otherArgs[i]...); err != nil {513 b.Fatal(err)514 }515 }516 res, err := aggImpl.Result()517 if err != nil || res == nil {518 b.Errorf("taking result of aggregate implementation %T failed", aggImpl)519 }520 }()521 }522}523const aggregateBuiltinsBenchmarkCount = 1000524func BenchmarkAvgAggregateInt(b *testing.B) {525 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {526 runBenchmarkAggregate(b, newIntAvgAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))527 })528}529func BenchmarkAvgAggregateSmallInt(b *testing.B) {530 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {531 runBenchmarkAggregate(b, newIntAvgAggregate, makeSmallIntTestDatum(aggregateBuiltinsBenchmarkCount))532 })533}534func BenchmarkAvgAggregateFloat(b *testing.B) {535 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {536 runBenchmarkAggregate(b, newFloatAvgAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))537 })538}539func BenchmarkAvgAggregateDecimal(b *testing.B) {540 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {541 runBenchmarkAggregate(b, newDecimalAvgAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))542 })543}544func BenchmarkAvgAggregateInterval(b *testing.B) {545 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {546 runBenchmarkAggregate(b, newIntervalAvgAggregate, makeIntervalTestDatum(aggregateBuiltinsBenchmarkCount))547 })548}549func BenchmarkCountAggregate(b *testing.B) {550 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {551 runBenchmarkAggregate(b, newCountAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))552 })553}554func BenchmarkSumIntAggregateInt(b *testing.B) {555 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {556 runBenchmarkAggregate(b, newSmallIntSumAggregate, makeSmallIntTestDatum(aggregateBuiltinsBenchmarkCount))557 })558}559func BenchmarkSumAggregateInt(b *testing.B) {560 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {561 runBenchmarkAggregate(b, newIntSumAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))562 })563}564func BenchmarkSumAggregateSmallInt(b *testing.B) {565 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {566 runBenchmarkAggregate(b, newIntSumAggregate, makeSmallIntTestDatum(aggregateBuiltinsBenchmarkCount))567 })568}569func BenchmarkSumAggregateFloat(b *testing.B) {570 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {571 runBenchmarkAggregate(b, newFloatSumAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))572 })573}574func BenchmarkSumAggregateDecimal(b *testing.B) {575 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {576 runBenchmarkAggregate(b, newDecimalSumAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))577 })578}579func BenchmarkMaxAggregateInt(b *testing.B) {580 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {581 runBenchmarkAggregate(b, newMaxAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))582 })583}584func BenchmarkMaxAggregateFloat(b *testing.B) {585 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {586 runBenchmarkAggregate(b, newMaxAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))587 })588}589func BenchmarkMaxAggregateDecimal(b *testing.B) {590 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {591 runBenchmarkAggregate(b, newMaxAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))592 })593}594func BenchmarkMinAggregateInt(b *testing.B) {595 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {596 runBenchmarkAggregate(b, newMinAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))597 })598}599func BenchmarkMinAggregateFloat(b *testing.B) {600 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {601 runBenchmarkAggregate(b, newMinAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))602 })603}604func BenchmarkMinAggregateDecimal(b *testing.B) {605 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {606 runBenchmarkAggregate(b, newMinAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))607 })608}609func BenchmarkVarianceAggregateInt(b *testing.B) {610 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {611 runBenchmarkAggregate(b, newIntVarianceAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))612 })613}614func BenchmarkVarianceAggregateFloat(b *testing.B) {615 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {616 runBenchmarkAggregate(b, newFloatVarianceAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))617 })618}619func BenchmarkVarianceAggregateDecimal(b *testing.B) {620 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {621 runBenchmarkAggregate(b, newDecimalVarianceAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))622 })623}624func BenchmarkSqrDiffAggregateInt(b *testing.B) {625 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {626 runBenchmarkAggregate(b, newIntSqrDiffAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))627 })628}629func BenchmarkSqrDiffAggregateFloat(b *testing.B) {630 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {631 runBenchmarkAggregate(b, newFloatSqrDiffAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))632 })633}634func BenchmarkSqrDiffAggregateDecimal(b *testing.B) {635 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {636 runBenchmarkAggregate(b, newDecimalSqrDiffAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))637 })638}639func BenchmarkVarPopAggregateInt(b *testing.B) {640 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {641 runBenchmarkAggregate(b, newIntVarPopAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))642 })643}644func BenchmarkVarPopAggregateFloat(b *testing.B) {645 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {646 runBenchmarkAggregate(b, newFloatVarPopAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))647 })648}649func BenchmarkVarPopAggregateDecimal(b *testing.B) {650 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {651 runBenchmarkAggregate(b, newDecimalVarPopAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))652 })653}654func BenchmarkStdDevAggregateInt(b *testing.B) {655 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {656 runBenchmarkAggregate(b, newIntStdDevAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))657 })658}659func BenchmarkStdDevAggregateFloat(b *testing.B) {660 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {661 runBenchmarkAggregate(b, newFloatStdDevAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))662 })663}664func BenchmarkStdDevAggregateDecimal(b *testing.B) {665 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {666 runBenchmarkAggregate(b, newDecimalStdDevAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))667 })668}669func BenchmarkStdDevPopAggregateInt(b *testing.B) {670 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {671 runBenchmarkAggregate(b, newIntStdDevPopAggregate, makeIntTestDatum(aggregateBuiltinsBenchmarkCount))672 })673}674func BenchmarkStdDevPopAggregateFloat(b *testing.B) {675 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {676 runBenchmarkAggregate(b, newFloatStdDevPopAggregate, makeFloatTestDatum(aggregateBuiltinsBenchmarkCount))677 })678}679func BenchmarkStdDevPopAggregateDecimal(b *testing.B) {680 b.Run(fmt.Sprintf("count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {681 runBenchmarkAggregate(b, newDecimalStdDevPopAggregate, makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount))682 })683}684func BenchmarkCorrAggregate(b *testing.B) {685 runRegressionAggregateBenchmarks(b, newCorrAggregate)686}687func BenchmarkCovarPopAggregate(b *testing.B) {688 runRegressionAggregateBenchmarks(b, newCovarPopAggregate)689}690func BenchmarkCovarSampAggregate(b *testing.B) {691 runRegressionAggregateBenchmarks(b, newCovarSampAggregate)692}693func BenchmarkRegressionInterceptAggregate(b *testing.B) {694 runRegressionAggregateBenchmarks(b, newRegressionInterceptAggregate)695}696func BenchmarkRegressionR2Aggregate(b *testing.B) {697 runRegressionAggregateBenchmarks(b, newRegressionR2Aggregate)698}699func BenchmarkRegressionSlopeAggregate(b *testing.B) {700 runRegressionAggregateBenchmarks(b, newRegressionSlopeAggregate)701}702func BenchmarkRegressionSXXAggregate(b *testing.B) {703 runRegressionAggregateBenchmarks(b, newRegressionSXXAggregate)704}705func BenchmarkRegressionSXYAggregate(b *testing.B) {706 runRegressionAggregateBenchmarks(b, newRegressionSXYAggregate)707}708func BenchmarkRegressionSYYAggregate(b *testing.B) {709 runRegressionAggregateBenchmarks(b, newRegressionSYYAggregate)710}711func BenchmarkRegressionCountAggregate(b *testing.B) {712 runRegressionAggregateBenchmarks(b, newRegressionCountAggregate)713}714func BenchmarkRegressionAvgXAggregate(b *testing.B) {715 runRegressionAggregateBenchmarks(b, newRegressionAvgXAggregate)716}717func BenchmarkRegressionAvgYAggregate(b *testing.B) {718 runRegressionAggregateBenchmarks(b, newRegressionAvgYAggregate)719}720// runRegressionAggregateBenchmarks is a helper function for running721// benchmarks for regression aggregate functions.722func runRegressionAggregateBenchmarks(723 b *testing.B, aggFunc func([]*types.T, *tree.EvalContext, tree.Datums) tree.AggregateFunc,724) {725 b.Run(fmt.Sprintf("Ints count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {726 runBenchmarkAggregate(727 b,728 aggFunc,729 makeIntTestDatum(aggregateBuiltinsBenchmarkCount),730 makeIntTestDatum(aggregateBuiltinsBenchmarkCount),731 )732 })733 b.Run(fmt.Sprintf("Floats count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {734 runBenchmarkAggregate(735 b,736 aggFunc,737 makeFloatTestDatum(aggregateBuiltinsBenchmarkCount),738 makeFloatTestDatum(aggregateBuiltinsBenchmarkCount),739 )740 })741 b.Run(fmt.Sprintf("Int Float count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {742 runBenchmarkAggregate(743 b,744 aggFunc,745 makeIntTestDatum(aggregateBuiltinsBenchmarkCount),746 makeFloatTestDatum(aggregateBuiltinsBenchmarkCount),747 )748 })749 b.Run(fmt.Sprintf("Decimals count=%d", aggregateBuiltinsBenchmarkCount), func(b *testing.B) {750 runBenchmarkAggregate(751 b,752 aggFunc,753 makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount),754 makeDecimalTestDatum(aggregateBuiltinsBenchmarkCount),755 )756 })757}...

Full Screen

Full Screen

LogisticRegression.go

Source:LogisticRegression.go Github

copy

Full Screen

1package main2import (3 "fmt"4 "math"5)6type LogisticRegression struct {7 N int8 n_in int9 n_out int10 W [][]float6411 b []float6412}13func LogisticRegression__construct(this *LogisticRegression, N int, n_in int, n_out int) {14 this.N = N15 this.n_in = n_in16 this.n_out = n_out17 this.W = make([][]float64, n_out)18 for i := 0; i < n_out; i++ { this.W[i] = make([]float64, n_in) }19 20 this.b = make([]float64, n_out)21}22func LogisticRegression_train(this *LogisticRegression, x []int, y []int, lr float64) {23 p_y_given_x := make([]float64, this.n_out)24 dy := make([]float64, this.n_out)25 26 for i := 0; i < this.n_out; i++ {27 p_y_given_x[i] = 028 for j := 0; j < this.n_in; j++ {29 p_y_given_x[i] += this.W[i][j] * float64(x[j])30 }31 p_y_given_x[i] += this.b[i]32 }33 LogisticRegression_softmax(this, p_y_given_x)34 35 for i := 0; i < this.n_out; i++ {36 dy[i] = float64(y[i]) - p_y_given_x[i]37 38 for j := 0; j < this.n_in; j++ {39 this.W[i][j] += lr * dy[i] * float64(x[j]) / float64(this.N)40 }41 this.b[i] += lr * dy[i] / float64(this.N)42 }43 44}45func LogisticRegression_softmax(this *LogisticRegression, x []float64) {46 var (47 max float6448 sum float6449 )50 for i := 0; i < this.n_out; i++ { if max < x[i] {max = x[i]} }51 for i := 0; i < this.n_out; i++ {52 x[i] = math.Exp(x[i] - max)53 sum += x[i]54 }55 for i := 0; i < this.n_out; i++ { x[i] /= sum }56}57func LogisticRegression_predict(this *LogisticRegression, x []int, y []float64) {58 for i := 0; i < this.n_out; i++ {59 y[i] = 060 for j := 0; j < this.n_in; j++ {61 y[i] += this.W[i][j] * float64(x[j])62 }63 y[i] += this.b[i]64 }65 LogisticRegression_softmax(this, y)66}67func test_lr() {68 69 learning_rate := 0.170 n_epochs := 50071 train_N := 672 test_N := 273 n_in := 674 n_out := 275 76 // training data77 train_X := [][]int {78 {1, 1, 1, 0, 0, 0},79 {1, 0, 1, 0, 0, 0},80 {1, 1, 1, 0, 0, 0},81 {0, 0, 1, 1, 1, 0},82 {0, 0, 1, 1, 0, 0},83 {0, 0, 1, 1, 1, 0},84 }85 train_Y := [][]int {86 {1, 0},87 {1, 0},88 {1, 0},89 {0, 1},90 {0, 1},91 {0, 1},92 }93 94 // construct LogisticRegression95 var classifier LogisticRegression96 LogisticRegression__construct(&classifier, train_N, n_in, n_out)97 // train98 for epoch := 0; epoch < n_epochs; epoch++ {99 for i := 0; i < train_N; i++ {100 LogisticRegression_train(&classifier, train_X[i], train_Y[i], learning_rate)101 }102 }103 104 // test data105 test_X := [][]int {106 {1, 0, 1, 0, 0, 0},107 {0, 0, 1, 1, 1, 0},108 }109 110 test_Y := make([][]float64, test_N)111 for i := 0; i < test_N; i++ { test_Y[i] = make([]float64, n_out) }112 // test113 for i := 0; i < test_N; i++ {114 LogisticRegression_predict(&classifier, test_X[i], test_Y[i])115 for j := 0; j < n_out; j++ {116 fmt.Printf("%f ", test_Y[i][j])117 }118 fmt.Printf("\n")119 }120 121}122func main() {123 test_lr()124}...

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r.SetObserved("Y")4 r.SetVar(0, "X")5 r.Train(regression.Data{6 regression.Var("X"), regression.Var("Y"),7 }, []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10})8 r.Run()9 fmt.Printf("%0.2f\n", r.Coeff(0))10}11import (12func main() {13 r.SetObserved("Y")14 r.SetVar(0, "X")15 r.Train(regression.Data{16 regression.Var("X"), regression.Var("Y"),17 }, []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10})18 r.Run()19 fmt.Printf("%0.2f\n", r.Coeff(1))20}21import (22func main() {23 r.SetObserved("Y")24 r.SetVar(0, "X")25 r.Train(regression.Data{26 regression.Var("X"), regression.Var("Y"),27 }, []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10})28 r.Run()29 fmt.Printf("%0.2f\n", r.Formula)30}31import (32func main() {33 r.SetObserved("Y")34 r.SetVar(0, "

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r.SetObserved("y")4 r.SetVar(0, "x")5 r.Train(6 regression.DataPoint(2.71, []float64{1.41}),7 regression.DataPoint(4.27, []float64{2.72}),8 regression.DataPoint(5.68, []float64{3.14}),9 regression.DataPoint(8.83, []float64{4.13}),10 r.Run()11 fmt.Printf("%0.2f\n", r.Coeff(0))12 fmt.Printf("%0.2f\n", r.Coeff(1))13 fmt.Printf("%0.2f\n", r.Coeff(2))14 fmt.Printf("%0.2f\n", r.Coeff(3))15 fmt.Printf("%0.2f\n", r.Coeff(4))16 fmt.Printf("%0.2f\n", r.Coeff(5))17 fmt.Printf("%0.2f\n", r.Coeff(6))18 fmt.Printf("%0.2f\n", r.Coeff(7))19 fmt.Printf("%0.2f\n", r.Coeff(8))20 fmt.Printf("%0.2f\n", r.Coeff(9))21 fmt.Printf("%0.2f\n", r.Coeff(10))22 fmt.Printf("%0.2f\n", r.Coeff(11))23 fmt.Printf("%0.2f\n", r.Coeff(12))24 fmt.Printf("%0.2f\n", r.Coeff(13))25 fmt.Printf("%0.2f\n", r.Coeff(14))26 fmt.Printf("%0.2f\n", r.Coeff(15))27 fmt.Printf("%0.2f\n", r.Coeff(16))28 fmt.Printf("%0.2f\n", r.Coeff(17))29 fmt.Printf("%0.2f\n", r.Coeff(18))30 fmt.Printf("%0.2f\n", r.Coeff(19))31 fmt.Printf("%0.2f\n", r.Coeff(20))32 fmt.Printf("%0.2f\n", r.Coeff(21))33 fmt.Printf("%0.2f\n", r.Coeff(22))

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r.SetObserved("Price")4 r.SetVar(0, "Size")5 r.Train(6 regression.Data{7 X: []float64{1, 2, 3, 4},8 Y: []float64{1, 2, 3, 4},9 },10 r.Train(11 regression.Data{12 X: []float64{1, 2, 3, 4},13 Y: []float64{2, 4, 6, 8},14 },15 r.Run()16 fmt.Printf("\nRegression formula:\n%v\n", r.Formula)17 fmt.Printf("\nR²:\n%v\n", r.R2)18 fmt.Printf("\nStd. Error:\n%v\n", r.StdErr)19 fmt.Printf("\nParameters:\n%v\n", r.Coeffs)20 fmt.Printf("\nPrice of a 5 bedroom:\n%0.2f\n", r.Predict([]float64{5}))21}22import (23func main() {24 r.SetObserved("Price")25 r.SetVar(0, "Size")26 r.Train(27 regression.Data{28 X: []float64{1, 2, 3, 4},29 Y: []float64{1, 2, 3, 4},30 },31 r.Train(32 regression.Data{33 X: []float64{1, 2, 3, 4},34 Y: []float64{2, 4, 6, 8},35 },36 r.Run()37 fmt.Printf("\nRegression formula:\n%v\n", r.Formula)38 fmt.Printf("\nR²:\n%v\n", r.R2)39 fmt.Printf("\nStd. Error:\n%v\n", r.StdErr)40 fmt.Printf("\nParameters:\n%v\n", r.Coeffs)41 fmt.Printf("\nPrice of a 5 bedroom:\n%0.2f\n", r.Predict([]float64{5}))

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r := new(regression.Regression)4 r.SetObserved("Y")5 r.SetVar(0, "X")6 r.Train(regression.DataPoint(65.0, []float64{1.0}))7 r.Train(regression.DataPoint(72.0, []float64{2.0}))8 r.Train(regression.DataPoint(69.0, []float64{3.0}))9 r.Train(regression.DataPoint(76.0, []float64{4.0}))10 r.Train(regression.DataPoint(68.0, []float64{5.0}))11 r.Train(regression.DataPoint(80.0, []float64{6.0}))12 r.Train(regression.DataPoint(75.0, []float64{7.0}))13 r.Train(regression.DataPoint(79.0, []float64{8.0}))14 r.Run()15 fmt.Printf("\nRegression Formula:\n%v\n", r.Formula)16 fmt.Printf("\nR²: %v\n", r.R2)17 fmt.Printf("\nX=4.0, Predicted Y: %v\n", r.Predict([]float64{4.0}))18}

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 x = []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}4 y = []float64{2, 4, 6, 8, 10, 12, 14, 16, 18, 20}5 myreg := NewRegression(x, y, 2)6 fmt.Println(myreg.Test(x, y))7}8import (9func main() {10 x = []float64{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}11 y = []float64{2, 4, 6, 8, 10, 12, 14, 16, 18, 20}12 myreg := NewRegression(x, y, 2)13 fmt.Println(myreg.Test(x, y))14}

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r := new(regression.Regression)4 r.SetObserved("Y")5 r.SetVar(0, "X")6 r.Train(regression.DataPoint(65.0, []float64{23.0}))7 r.Train(regression.DataPoint(72.0, []float64{26.0}))8 r.Train(regression.DataPoint(78.0, []float64{30.0}))9 r.Train(regression.DataPoint(82.0, []float64{34.0}))10 r.Train(regression.DataPoint(86.0, []float64{43.0}))11 r.Train(regression.DataPoint(88.0, []float64{48.0}))12 r.Train(regression.DataPoint(90.0, []float64{52.0}))13 r.Train(regression.DataPoint(92.0, []float64{57.0}))14 r.Train(regression.DataPoint(94.0, []float64{58.0}))15 r.Train(regression.DataPoint(96.0, []float64{60.0}))16 r.Run()17 fmt.Printf("\nRegression Formula:\n%v\n", r.Formula)18 fmt.Printf("\nR2: %0.2f\n", r.R2)19 fmt.Printf("\nPredicted age at 73 inches: %0.2f\n", r.Predict([]float64{73.0}))20}

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r := new(regression.Regression)4 r.SetObserved("Y")5 r.SetVar(0, "X")6 f, err := os.Open("data.csv")7 if err != nil {8 log.Fatal(err)9 }10 lines, err := regression.ParseCSV(f)11 if err != nil {12 log.Fatal(err)13 }14 for _, line := range lines {15 yVal, err := strconv.ParseFloat(line["Y"], 64)16 if err != nil {17 log.Fatal(err)18 }19 xVal, err := strconv.ParseFloat(line["X"], 64)20 if err != nil {21 log.Fatal(err)22 }23 r.Train(regression.DataPoint(yVal, []float64{xVal}))24 }25 r.Run()26 fmt.Print("Enter a value for x: ")27 fmt.Scanf("%f", &x)28 fmt.Printf("Predicted value for y: %0.2f\n", r.Predict([]float64{x}))29}30import (31func main() {32 r := new(regression.Regression)33 r.SetObserved("Y")34 r.SetVar(0, "X")35 f, err := os.Open("data.csv")36 if err != nil {37 log.Fatal(err)38 }39 lines, err := regression.ParseCSV(f)40 if err != nil {41 log.Fatal(err)42 }43 for _, line := range lines {44 yVal, err := strconv.ParseFloat(line["Y"], 64)45 if err != nil {46 log.Fatal(err)47 }48 xVal, err := strconv.ParseFloat(line["X"], 64)49 if err != nil {50 log.Fatal(err)51 }

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 rawData, err := LoadCSV("data.csv")4 if err != nil {5 log.Fatal(err)6 }7 r.SetObserved("MPG")8 r.SetVar(0, "Cylinders")9 r.SetVar(1, "Displacement")10 r.SetVar(2, "Horsepower")11 r.SetVar(3, "Weight")12 r.SetVar(4, "Acceleration")13 r.SetVar(5, "Model Year")14 r.SetVar(6, "Origin")15 for _, d := range rawData {16 r.Train(regression.DataPoint(d.Output, d.Input))17 }18 r.Run()19 for _, d := range rawData {20 fmt.Printf("Predicted: %v, Actual: %v\n", r.Predict(d.Input), d.Output)21 }22}23import (24func main() {25 rawData, err := LoadCSV("data.csv")26 if err != nil {27 log.Fatal(err)28 }29 r.SetObserved("MPG")30 r.SetVar(0, "Cylinders")31 r.SetVar(1, "Displacement")32 r.SetVar(2, "Horsepower")33 r.SetVar(3, "Weight")34 r.SetVar(4, "Acceleration")35 r.SetVar(5, "Model Year")36 r.SetVar(6, "Origin")37 for _, d := range rawData {38 r.Train(regression.DataPoint(d.Output, d.Input))39 }40 r.Run()41 for _, d := range rawData {42 fmt.Printf("Predicted: %v, Actual: %v\n", r.Predict(d.Input), d.Output)43 }44}45import (46func main() {

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 features := mat.NewDense(4, 2, []float64{4 })5 target := mat.NewDense(4, 1, []float64{6 })7 model := NewRegression(features, target)8 model.Train(0.01, 1000)9 x := mat.NewDense(1, 2, []float64{10 })11 y := model.Predict(x)12 fmt.Println(y)13}14import (15func main() {16 features := mat.NewDense(4, 2, []float64{17 })18 target := mat.NewDense(4, 1, []float64{19 })20 model := NewRegression(features, target)21 model.Train(0.01, 1000)22 x := mat.NewDense(1, 2, []float64{23 })24 y := model.Predict(x)25 fmt.Println(y)26}27import (28func main() {29 features := mat.NewDense(4, 2, []float64{

Full Screen

Full Screen

Test

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 f, err := os.Create("test.svg")4 if err != nil {5 log.Fatal(err)6 }7 canvas := svg.New(f)8 canvas.Start(600, 600)9 canvas.Rect(0, 0, 600, 600, "fill:rgb(255,255,255)")10 canvas.Line(100, 100, 400, 400, "stroke:black;stroke-width:3")11 canvas.Circle(300, 300, 100, "fill:red")12 canvas.Text(300, 300, "Hello World!", "text-anchor:middle;font-size:30px;fill:white")13 canvas.End()14}15import (16func main() {17 f, err := os.Create("test.svg")18 if err != nil {19 log.Fatal(err)20 }21 canvas := svg.New(f)22 canvas.Start(600, 600)23 canvas.Rect(0, 0, 600, 600, "fill:rgb(255,255,255)")24 canvas.Line(100, 100, 400, 400, "stroke:black;stroke-width:3")25 canvas.Circle(300, 300, 100, "fill:red")26 canvas.Text(300, 300, "Hello World!", "text-anchor:middle;font-size:30px;fill:white")27 canvas.End()28}29import (

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Keploy automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful