How to use runAll method of metrics Package

Best K6 code snippet using metrics.runAll

model.go

Source:model.go Github

copy

Full Screen

1// Package model provides an interface for machine learning models.2package model3import (4 "fmt"5 golog "log"6 "github.com/aunum/gold/pkg/v1/track"7 cgraph "github.com/aunum/goro/pkg/v1/common/graph"8 "github.com/aunum/goro/pkg/v1/layer"9 "github.com/aunum/log"10 g "gorgonia.org/gorgonia"11)12// Model is a prediction model.13type Model interface {14 // Compile the model.15 Compile(x InputOr, y *Input, opts ...Opt) error16 // Predict x.17 Predict(x g.Value) (prediction g.Value, err error)18 // Fit x to y.19 Fit(x ValueOr, y g.Value) error20 // FitBatch fits x to y as batches.21 FitBatch(x ValueOr, y g.Value) error22 // PredictBatch predicts x as a batch23 PredictBatch(x g.Value) (prediction g.Value, err error)24 // ResizeBatch resizes the batch graphs.25 ResizeBatch(n int) error26 // Visualize the model by graph name.27 Visualize(name string)28 // Graph returns the expression graph for the model.29 Graphs() map[string]*g.ExprGraph30 // X is the inputs to the model.31 X() InputOr32 // Y is the expected output of the model.33 Y() *Input34 // Learnables for the model.35 Learnables() g.Nodes36}37// Sequential model.38type Sequential struct {39 // Chain of layers in the model.40 Chain *layer.Chain41 // Tracker of values.42 Tracker *track.Tracker43 noTracker bool44 logger *log.Logger45 name string46 x Inputs47 y *Input48 fwd *Input49 trainChain, trainBatchChain *layer.Chain50 onlineChain, onlineBatchChain *layer.Chain51 backwardChain *layer.Chain52 trainGraph, trainBatchGraph *g.ExprGraph53 onlineGraph, onlineBatchGraph *g.ExprGraph54 backwardGraph *g.ExprGraph55 xTrain, xTrainBatch Inputs56 xTrainFwd, xTrainBatchFwd *Input57 xOnline, xOnlineBatch Inputs58 xOnlineFwd, xOnlineBatchFwd *Input59 yTrain, yTrainBatch *Input60 trainPredVal, trainBatchPredVal g.Value61 onlinePredVal, onlineBatchPredVal g.Value62 loss Loss63 trainLoss, trainBatchLoss Loss64 metrics Metrics65 batchSize int66 optimizer g.Solver67 trainVM, trainBatchVM g.VM68 onlineVM, onlineBatchVM g.VM69 backwardVM g.VM70 vmOpts []g.VMOpt71}72// NewSequential returns a new sequential model.73func NewSequential(name string) (*Sequential, error) {74 return &Sequential{75 Chain: layer.NewChain(),76 name: name,77 batchSize: 32,78 metrics: AllMetrics,79 }, nil80}81// Opt is a model option.82type Opt func(Model)83// Metric tracked by the model.84type Metric string85const (86 // TrainLossMetric is the metric for training loss.87 TrainLossMetric Metric = "train_loss"88 // TrainBatchLossMetric is the metric for batch training loss.89 TrainBatchLossMetric Metric = "train_batch_loss"90)91// Metrics is a set of metric.92type Metrics []Metric93// Contains tells whether the set contains the given metric.94func (m Metrics) Contains(metric Metric) bool {95 for _, mt := range m {96 if mt == metric {97 return true98 }99 }100 return false101}102// AllMetrics are all metrics.103var AllMetrics = Metrics{TrainLossMetric, TrainBatchLossMetric}104// WithMetrics sets the metrics that the model should track.105// Defaults to AllMetrics.106func WithMetrics(metrics ...Metric) func(Model) {107 return func(m Model) {108 switch t := m.(type) {109 case *Sequential:110 t.metrics = metrics111 default:112 log.Fatal("unknown model type")113 }114 }115}116// WithLoss uses a specific loss function with the model.117// Defaults to MSE.118func WithLoss(loss Loss) func(Model) {119 return func(m Model) {120 switch t := m.(type) {121 case *Sequential:122 t.loss = loss123 default:124 log.Fatal("unknown model type")125 }126 }127}128// WithOptimizer uses a specific optimizer function.129// Defaults to Adam.130func WithOptimizer(optimizer g.Solver) func(Model) {131 return func(m Model) {132 switch t := m.(type) {133 case *Sequential:134 t.optimizer = optimizer135 default:136 log.Fatal("unknown model type")137 }138 }139}140// WithTracker adds a tracker to the model, if not provided one will be created.141func WithTracker(tracker *track.Tracker) func(Model) {142 return func(m Model) {143 switch t := m.(type) {144 case *Sequential:145 t.Tracker = tracker146 default:147 log.Fatal("unknown model type")148 }149 }150}151// WithoutTracker uses no tracking with the model.152func WithoutTracker() func(Model) {153 return func(m Model) {154 switch t := m.(type) {155 case *Sequential:156 t.noTracker = true157 default:158 log.Fatal("unknown model type")159 }160 }161}162// WithBatchSize sets the batch size for the model.163// Defaults to 32.164func WithBatchSize(size int) func(Model) {165 return func(m Model) {166 switch t := m.(type) {167 case *Sequential:168 t.batchSize = size169 default:170 log.Fatal("unknown model type")171 }172 }173}174// WithGraphLogger adds a logger to the model which will print out the graph operations175// as they occur.176func WithGraphLogger(log *golog.Logger) func(Model) {177 return func(m Model) {178 switch t := m.(type) {179 case *Sequential:180 t.vmOpts = append(t.vmOpts, g.WithLogger(log))181 default:182 log.Fatal("unknown model type")183 }184 }185}186// WithLogger adds a logger to the model.187func WithLogger(logger *log.Logger) func(Model) {188 return func(m Model) {189 switch t := m.(type) {190 case *Sequential:191 t.logger = logger192 default:193 log.Fatal("unknown model type")194 }195 }196}197// AddLayer adds a layer.198func (s *Sequential) AddLayer(layer layer.Config) {199 s.Chain.Add(layer)200}201// AddLayers adds a number of layer.202func (s *Sequential) AddLayers(layers ...layer.Config) {203 for _, layer := range layers {204 s.Chain.Add(layer)205 }206}207// Fwd tells the model which input should be sent through the layer.208// If not provided, the first input will be used.209func (s *Sequential) Fwd(x *Input) {210 s.fwd = x211}212// Compile the model.213func (s *Sequential) Compile(x InputOr, y *Input, opts ...Opt) error {214 s.x = x.Inputs()215 err := y.Validate()216 if err != nil {217 return err218 }219 s.y = y220 for _, opt := range opts {221 opt(s)222 }223 if s.logger == nil {224 s.logger = log.DefaultLogger225 }226 if s.loss == nil {227 s.loss = MSE228 }229 if s.optimizer == nil {230 s.optimizer = g.NewAdamSolver()231 }232 if s.Tracker == nil && !s.noTracker {233 tracker, err := track.NewTracker(track.WithLogger(s.logger))234 if err != nil {235 return err236 }237 s.Tracker = tracker238 }239 if s.fwd == nil {240 s.fwd = x.Inputs()[0]241 err = s.fwd.Validate()242 if err != nil {243 return err244 }245 s.logger.Infof("setting forward for layers to input %q", s.fwd.Name())246 }247 err = s.buildTrainGraph(s.x, s.y)248 if err != nil {249 return err250 }251 err = s.buildTrainBatchGraph(s.x, s.y)252 if err != nil {253 return err254 }255 err = s.buildOnlineGraph(s.x)256 if err != nil {257 return err258 }259 err = s.buildOnlineBatchGraph(s.x)260 if err != nil {261 return err262 }263 return nil264}265func (s *Sequential) buildTrainGraph(x Inputs, y *Input) (err error) {266 s.trainGraph = g.NewGraph()267 s.trainLoss = s.loss.CloneTo(s.trainGraph)268 for _, input := range x {269 if i, err := s.trainLoss.Inputs().Get(input.Name()); err == nil {270 s.xTrain = append(s.xTrain, i)271 continue272 }273 i := input.CloneTo(s.trainGraph)274 s.xTrain = append(s.xTrain, i)275 }276 s.xTrainFwd, err = s.xTrain.Get(s.fwd.Name())277 if err != nil {278 return err279 }280 s.trainLoss = s.loss.CloneTo(s.trainGraph)281 s.yTrain = y.Clone()282 s.yTrain.Compile(s.trainGraph)283 s.trainChain = s.Chain.Clone()284 s.trainChain.Compile(s.trainGraph)285 prediction, err := s.trainChain.Fwd(s.xTrainFwd.Node())286 if err != nil {287 return err288 }289 g.Read(prediction, &s.trainPredVal)290 loss, err := s.trainLoss.Compute(prediction, s.yTrain.Node())291 if err != nil {292 return err293 }294 if s.metrics.Contains(TrainLossMetric) {295 if s.Tracker != nil {296 s.Tracker.TrackValue("train_loss", loss, track.WithNamespace(s.name))297 }298 }299 _, err = g.Grad(loss, s.trainChain.Learnables()...)300 if err != nil {301 return err302 }303 vmOpts := []g.VMOpt{}304 copy(vmOpts, s.vmOpts)305 vmOpts = append(vmOpts, g.BindDualValues(s.trainChain.Learnables()...))306 s.trainVM = g.NewTapeMachine(s.trainGraph, vmOpts...)307 return nil308}309func (s *Sequential) buildTrainBatchGraph(x Inputs, y *Input) (err error) {310 s.trainBatchGraph = g.NewGraph()311 s.trainBatchLoss = s.loss.CloneTo(s.trainBatchGraph, AsBatch(s.batchSize))312 for _, input := range x {313 // TODO: need to validate input names for duplicates.314 if i, err := s.trainBatchLoss.Inputs().Get(input.Name()); err == nil {315 s.xTrainBatch = append(s.xTrainBatch, i)316 continue317 }318 i := input.CloneTo(s.trainBatchGraph, AsBatch(s.batchSize))319 s.xTrainBatch = append(s.xTrainBatch, i)320 }321 s.xTrainBatchFwd, err = s.xTrainBatch.Get(NameAsBatch(s.fwd.Name()))322 if err != nil {323 return err324 }325 s.yTrainBatch = s.y.AsBatch(s.batchSize)326 s.yTrainBatch.Compile(s.trainBatchGraph)327 s.trainBatchChain = s.Chain.Clone()328 s.trainBatchChain.Compile(s.trainBatchGraph, layer.WithSharedChainLearnables(s.trainChain), layer.WithLayerOpts(layer.AsBatch()))329 prediction, err := s.trainBatchChain.Fwd(s.xTrainBatchFwd.Node())330 if err != nil {331 return err332 }333 g.Read(prediction, &s.trainBatchPredVal)334 loss, err := s.trainBatchLoss.Compute(prediction, s.yTrainBatch.Node())335 if err != nil {336 return err337 }338 if s.metrics.Contains(TrainBatchLossMetric) {339 if s.Tracker != nil {340 s.Tracker.TrackValue("train_batch_loss", loss, track.WithNamespace(s.name))341 }342 }343 _, err = g.Grad(loss, s.trainBatchChain.Learnables()...)344 if err != nil {345 return err346 }347 vmOpts := []g.VMOpt{}348 copy(vmOpts, s.vmOpts)349 vmOpts = append(vmOpts, g.BindDualValues(s.trainBatchChain.Learnables()...))350 s.trainBatchVM = g.NewTapeMachine(s.trainBatchGraph, vmOpts...)351 return nil352}353func (s *Sequential) buildOnlineGraph(x Inputs) (err error) {354 s.onlineGraph = g.NewGraph()355 s.xOnline = s.x.Clone()356 s.xOnline.Compile(s.onlineGraph)357 s.xOnlineFwd, err = s.xOnline.Get(s.fwd.Name())358 if err != nil {359 return err360 }361 s.onlineChain = s.Chain.Clone()362 s.onlineChain.Compile(s.onlineGraph, layer.WithSharedChainLearnables(s.trainChain))363 prediction, err := s.onlineChain.Fwd(s.xOnlineFwd.Node())364 if err != nil {365 return err366 }367 g.Read(prediction, &s.onlinePredVal)368 vmOpts := []g.VMOpt{}369 copy(vmOpts, s.vmOpts)370 s.onlineVM = g.NewTapeMachine(s.onlineGraph, vmOpts...)371 return nil372}373func (s *Sequential) buildOnlineBatchGraph(x Inputs) (err error) {374 s.onlineBatchGraph = g.NewGraph()375 for _, input := range x {376 if input.Name() == s.fwd.Name() {377 s.xOnlineBatchFwd = input.AsBatch(s.batchSize)378 s.xOnlineBatchFwd.Compile(s.onlineBatchGraph)379 s.xOnlineBatch = append(s.xOnlineBatch, s.xOnlineBatchFwd)380 continue381 }382 i := input.CloneTo(s.onlineBatchGraph)383 s.xOnlineBatch = append(s.xOnlineBatch, i)384 }385 s.xOnlineBatchFwd, err = s.xOnlineBatch.Get(NameAsBatch(s.fwd.Name()))386 if err != nil {387 return err388 }389 s.onlineBatchChain = s.Chain.Clone()390 s.onlineBatchChain.Compile(s.onlineBatchGraph, layer.WithSharedChainLearnables(s.trainChain), layer.WithLayerOpts(layer.AsBatch()))391 prediction, err := s.onlineBatchChain.Fwd(s.xOnlineBatchFwd.Node())392 if err != nil {393 return err394 }395 g.Read(prediction, &s.onlineBatchPredVal)396 vmOpts := []g.VMOpt{}397 copy(vmOpts, s.vmOpts)398 s.onlineBatchVM = g.NewTapeMachine(s.onlineBatchGraph, vmOpts...)399 return nil400}401// ResizeBatch will resize the batch graph.402// Note: this is expensive as it recompiles the graph.403func (s *Sequential) ResizeBatch(n int) (err error) {404 log.Debugf("resizing batch graphs to %d", n)405 s.batchSize = n406 s.xTrainBatch = Inputs{}407 s.xTrainBatchFwd = nil408 err = s.buildTrainBatchGraph(s.x, s.y)409 if err != nil {410 return411 }412 s.xOnlineBatch = Inputs{}413 s.xOnlineBatchFwd = nil414 return s.buildOnlineBatchGraph(s.x)415}416// Predict x.417func (s *Sequential) Predict(x g.Value) (prediction g.Value, err error) {418 err = s.xOnlineFwd.Set(x)419 if err != nil {420 return prediction, err421 }422 err = s.onlineVM.RunAll()423 if err != nil {424 return prediction, err425 }426 prediction = s.onlinePredVal427 s.onlineVM.Reset()428 return429}430// PredictBatch predicts x as a batch.431func (s *Sequential) PredictBatch(x g.Value) (prediction g.Value, err error) {432 err = s.xOnlineBatchFwd.Set(x)433 if err != nil {434 return prediction, err435 }436 err = s.onlineBatchVM.RunAll()437 if err != nil {438 return prediction, err439 }440 prediction = s.onlineBatchPredVal441 s.onlineBatchVM.Reset()442 return443}444// Fit x to y.445func (s *Sequential) Fit(x ValueOr, y g.Value) error {446 err := s.yTrain.Set(y)447 if err != nil {448 return err449 }450 xVals := ValuesFrom(x)451 err = s.xTrain.Set(xVals)452 if err != nil {453 return err454 }455 err = s.trainVM.RunAll()456 if err != nil {457 return err458 }459 grads := g.NodesToValueGrads(s.trainChain.Learnables())460 s.optimizer.Step(grads)461 s.trainVM.Reset()462 return nil463}464// FitBatch fits x to y as a batch.465func (s *Sequential) FitBatch(x ValueOr, y g.Value) error {466 err := s.yTrainBatch.Set(y)467 if err != nil {468 return err469 }470 xVals := ValuesFrom(x)471 err = s.xTrainBatch.Set(xVals)472 if err != nil {473 return err474 }475 err = s.trainBatchVM.RunAll()476 if err != nil {477 return err478 }479 // log.Infovb("pred val", s.trainBatchPredVal)480 grads := g.NodesToValueGrads(s.trainBatchChain.Learnables())481 s.optimizer.Step(grads)482 s.trainBatchVM.Reset()483 return nil484}485// Visualize the model by graph name.486func (s *Sequential) Visualize(name string) {487 cgraph.Visualize(s.Graphs()[name])488}489// Graphs returns the expression graphs for the model.490func (s *Sequential) Graphs() map[string]*g.ExprGraph {491 return map[string]*g.ExprGraph{492 "train": s.trainGraph,493 "trainBatch": s.trainBatchGraph,494 "online": s.onlineGraph,495 "onlineBatch": s.onlineBatchGraph,496 }497}498// X is is the input to the model.499func (s *Sequential) X() InputOr {500 return s.x501}502// Y is is the output of the model.503func (s *Sequential) Y() *Input {504 return s.y505}506// Learnables are the model learnables.507func (s *Sequential) Learnables() g.Nodes {508 return s.trainChain.Learnables()509}510// CloneLearnablesTo another model.511func (s *Sequential) CloneLearnablesTo(to *Sequential) error {512 desired := s.trainChain.Learnables()513 destination := to.trainChain.Learnables()514 if len(desired) != len(destination) {515 return fmt.Errorf("models must be identical to clone learnables")516 }517 for i, learnable := range destination {518 c := desired[i].Clone()519 err := g.Let(learnable, c.(*g.Node).Value())520 if err != nil {521 return err522 }523 }524 new := to.trainChain.Learnables()525 shared := map[string]*layer.Chain{526 "trainBatch": to.trainBatchChain,527 "online": to.onlineChain,528 "onlineBatch": to.onlineBatchChain,529 }530 for name, chain := range shared {531 s.logger.Debugv("chain", name)532 for i, learnable := range chain.Learnables() {533 err := g.Let(learnable, new[i].Value())534 if err != nil {535 return err536 }537 s.logger.Debugvb(learnable.Name(), learnable.Value())538 }539 }540 return nil541}542// SetLearnables sets learnables to model543func (s *Sequential) SetLearnables(desired g.Nodes) error {544 destination := s.trainChain.Learnables()545 if len(desired) != len(destination) {546 return fmt.Errorf("cannot set learnables: number of desired nodes not equal to number of nodes in model")547 }548 for i, learnable := range destination {549 c := desired[i].Clone()550 err := g.Let(learnable, c.(*g.Node).Value())551 if err != nil {552 return err553 }554 }555 new := s.trainChain.Learnables()556 shared := map[string]*layer.Chain{557 "trainBatch": s.trainBatchChain,558 "online": s.onlineChain,559 "onlineBatch": s.onlineBatchChain,560 }561 for name, chain := range shared {562 s.logger.Debugv("chain", name)563 for i, learnable := range chain.Learnables() {564 err := g.Let(learnable, new[i].Value())565 if err != nil {566 return err567 }568 s.logger.Debugvb(learnable.Name(), learnable.Value())569 }570 }571 return nil572}573// Opts are optsion for a model574type Opts struct {575 opts []Opt576}577// NewOpts returns a new set of options for a model.578func NewOpts() *Opts {579 return &Opts{opts: []Opt{}}580}581// Add an option to the options.582func (o *Opts) Add(opts ...Opt) {583 o.opts = append(o.opts, opts...)584}585// Values are the options.586func (o *Opts) Values() []Opt {587 return o.opts588}...

Full Screen

Full Screen

poller.go

Source:poller.go Github

copy

Full Screen

1package active2import (3 "context"4 "fmt"5 "log"6 "net/http"7 "net/url"8 "regexp"9 "time"10 "cloud.google.com/go/storage"11 "github.com/googleapis/google-cloud-go-testing/storage/stiface"12 "github.com/prometheus/client_golang/prometheus"13 "github.com/prometheus/client_golang/prometheus/promauto"14 "golang.org/x/sync/errgroup"15 "google.golang.org/api/iterator"16 "google.golang.org/api/option"17 gardener "github.com/m-lab/etl-gardener/client/v2"18 "github.com/m-lab/etl-gardener/tracker"19 "github.com/m-lab/go/cloud/gcs"20 "github.com/m-lab/go/rtx"21 "github.com/m-lab/etl/metrics"22)23// JobFailures counts the all errors that result in test loss.24//25// Provides metrics:26// etl_job_failures{prefix, year, kind}27// Example usage:28// JobFailures.WithLabelValues("ndt/tcpinfo" "2019", "insert").Inc()29var JobFailures = promauto.NewCounterVec(30 prometheus.CounterOpts{31 Name: "etl_job_failures",32 Help: "Job level failures.",33 },34 // Parser type, error description.35 []string{"prefix", "year", "type"},36)37// GardenerAPI encapsulates the backend paths and clients to connect to gardener and GCS.38type GardenerAPI struct {39 trackerBase url.URL40 gcs stiface.Client41 jobs *gardener.JobClient42}43// NewGardenerAPI creates a GardenerAPI.44func NewGardenerAPI(trackerBase url.URL, gcs stiface.Client) *GardenerAPI {45 c := gardener.NewJobClient(trackerBase)46 return &GardenerAPI{trackerBase: trackerBase, gcs: gcs, jobs: c}47}48// MustStorageClient creates a default GCS client.49func MustStorageClient(ctx context.Context) stiface.Client {50 c, err := storage.NewClient(ctx, option.WithScopes(storage.ScopeReadOnly))51 rtx.Must(err, "Failed to create storage client")52 return stiface.AdaptClient(c)53}54// RunAll will execute functions provided by Next() until there are no more,55// or the context is canceled.56func (g *GardenerAPI) RunAll(ctx context.Context, rSrc RunnableSource, jt *tracker.JobWithTarget) (*errgroup.Group, error) {57 eg := &errgroup.Group{}58 count := 059 job := jt.Job60 for {61 run, err := rSrc.Next(ctx)62 if err != nil {63 if err == iterator.Done {64 debug.Printf("Dispatched total of %d archives for %s\n", count, job.String())65 return eg, nil66 } else {67 metrics.BackendFailureCount.WithLabelValues(68 job.Datatype, "rSrc.Next").Inc()69 log.Println(err, "processing", job.String())70 return eg, err71 }72 }73 if err := g.jobs.Heartbeat(ctx, jt.ID); err != nil {74 log.Println(err, "on heartbeat for", job.Path())75 }76 debug.Println("Starting func")77 f := func() (err error) {78 metrics.ActiveTasks.WithLabelValues(rSrc.Label()).Inc()79 defer metrics.ActiveTasks.WithLabelValues(rSrc.Label()).Dec()80 // Capture any panic and convert it to an error.81 defer func(tag string) {82 if err2 := metrics.PanicToErr(err, recover(), "Runall.f: "+tag); err2 != nil {83 err = err284 }85 }(run.Info())86 err = run.Run(ctx)87 if err == nil {88 if err := g.jobs.Update(ctx, jt.ID, tracker.Parsing, run.Info()); err != nil {89 log.Println(err, "on update for", job.Path())90 }91 }92 return93 }94 count++95 eg.Go(f)96 }97}98func failMetric(j tracker.Job, label string) {99 JobFailures.WithLabelValues(100 j.Experiment+"/"+j.Datatype, j.Date.Format("2006"), label).Inc()101}102// JobFileSource creates a gcsSource for the job.103func (g *GardenerAPI) JobFileSource(ctx context.Context, job tracker.Job,104 toRunnable func(*storage.ObjectAttrs) Runnable) (*GCSSource, error) {105 filter, err := regexp.Compile(job.Filter)106 if err != nil {107 failMetric(job, "filter compile")108 return nil, err109 }110 bh, err := gcs.GetBucket(ctx, g.gcs, job.Bucket)111 if err != nil {112 failMetric(job, "filesource")113 return nil, err114 }115 prefix, err := job.Prefix()116 if err != nil {117 failMetric(job, "prefix")118 return nil, err119 }120 lister := FileListerFunc(bh, prefix, filter)121 gcsSource, err := NewGCSSource(ctx, job, lister, toRunnable)122 if err != nil {123 failMetric(job, "GCSSource")124 return nil, err125 }126 return gcsSource, nil127}128// NextJob requests a new job from Gardener service.129func (g *GardenerAPI) NextJob(ctx context.Context) (*tracker.JobWithTarget, error) {130 return g.jobs.Next(ctx)131}132func (g *GardenerAPI) pollAndRun(ctx context.Context,133 toRunnable func(o *storage.ObjectAttrs) Runnable, tokens TokenSource) error {134 jt, err := g.jobs.Next(ctx)135 if err != nil {136 log.Println(err, "on Gardener client.NextJob()")137 return err138 }139 log.Println(jt, "filter:", jt.Job.Filter)140 gcsSource, err := g.JobFileSource(ctx, jt.Job, toRunnable)141 if err != nil {142 log.Println(err, "on JobFileSource")143 return err144 }145 src := Throttle(gcsSource, tokens)146 log.Println("Running", jt.Job.Path())147 if err := g.jobs.Update(ctx, jt.ID, tracker.Parsing, "starting tasks"); err != nil {148 log.Println(err)149 }150 eg, err := g.RunAll(ctx, src, jt)151 if err != nil {152 log.Println(err)153 }154 // Once all are dispatched, we want to wait until all have completed155 // before posting the state change.156 go func() {157 log.Println("all tasks dispatched for", jt.Job.Path())158 err := eg.Wait()159 if err != nil {160 log.Println(err, "on wait for", jt.Job.Path())161 } else {162 log.Println("finished", jt.Job.Path())163 }164 if err := g.jobs.Update(ctx, jt.ID, tracker.ParseComplete, ""); err != nil {165 log.Println(err)166 }167 }()168 return err169}170// Poll requests work items from gardener, and processes them.171func (g *GardenerAPI) Poll(ctx context.Context,172 toRunnable func(o *storage.ObjectAttrs) Runnable, maxWorkers int, period time.Duration) {173 // Poll no faster than period.174 ticker := time.NewTicker(period)175 throttle := NewWSTokenSource(maxWorkers)176 for {177 select {178 case <-ctx.Done():179 log.Println("Poller context done")180 return181 default:182 err := g.pollAndRun(ctx, toRunnable, throttle)183 if err != nil {184 log.Println(err)185 }186 }187 <-ticker.C // Wait for next tick, to avoid fast spinning on errors.188 }189}190// Status adds a small amount of status info to w.191func (g *GardenerAPI) Status(w http.ResponseWriter) {192 fmt.Fprintf(w, "Gardener API: %s\n", g.trackerBase.String())193}...

Full Screen

Full Screen

run_all.go

Source:run_all.go Github

copy

Full Screen

1package cmd2import (3 "time"4 "github.com/RichardKnop/pinglist-api/scheduler"5)6// RunAll runs the both the scheduler and the app7func RunAll() error {8 cnf, db, err := initConfigDB(true, true)9 if err != nil {10 return err11 }12 defer db.Close()13 if err := initServices(cnf, db); err != nil {14 return err15 }16 // Init the scheduler17 theScheduler := scheduler.New(metricsService, alarmsService)18 // Init the app19 app, err := initApp(cnf, db)20 if err != nil {21 return err22 }23 // Run the scheduling goroutines24 alarmsInterval := time.Duration(10) // alarms check interval = 10s25 partitionInterval := time.Duration(600) // partition / rotate interval = 10m26 stoppedChan := theScheduler.Start(alarmsInterval, partitionInterval)27 // Run the server on port 808028 app.Run(":8080")29 // Stop the scheduler30 stoppedChan <- true31 return nil32}...

Full Screen

Full Screen

runAll

Using AI Code Generation

copy

Full Screen

1import "fmt"2func main() {3 fmt.Println("Hello, World!")4}5import "fmt"6func main() {7 fmt.Println("Hello, World!")8}9import "fmt"10func main() {11 fmt.Println("Hello, World!")12}13import "fmt"14func main() {15 fmt.Println("Hello, World!")16}17import "fmt"18func main() {19 fmt.Println("Hello, World!")20}21import "fmt"22func main() {23 fmt.Println("Hello, World!")24}25import "fmt"26func main() {27 fmt.Println("Hello, World!")28}29import "fmt"30func main() {31 fmt.Println("Hello, World!")32}33import "fmt"34func main() {35 fmt.Println("Hello, World!")36}37import "fmt"38func main() {39 fmt.Println("Hello, World!")40}41import "fmt"42func main() {43 fmt.Println("Hello, World!")44}45import "fmt"46func main() {47 fmt.Println("Hello, World!")48}49import "fmt"50func main() {51 fmt.Println("Hello, World!")52}53import

Full Screen

Full Screen

runAll

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 m := metrics.New()4 go m.RunAll()5 for {6 fmt.Println("Hello")7 time.Sleep(1 * time.Second)8 }9}10import (11func main() {12 m := metrics.New()13 go m.RunAll()14 for {15 fmt.Println("Hello")16 time.Sleep(1 * time.Second)17 }18}19import (20func main() {21 m := metrics.New()22 go m.RunAll()23 for {24 fmt.Println("Hello")25 time.Sleep(1 * time.Second)26 }27}28import (29func main() {30 m := metrics.New()31 go m.RunAll()32 for {33 fmt.Println("Hello")34 time.Sleep(1 * time.Second)35 }36}37import (38func main() {39 m := metrics.New()40 go m.RunAll()41 for {42 fmt.Println("Hello")43 time.Sleep(1 * time.Second)44 }45}46import (47func main() {48 m := metrics.New()49 go m.RunAll()50 for {51 fmt.Println("Hello")52 time.Sleep(1 * time.Second)53 }54}55import (56func main() {57 m := metrics.New()58 go m.RunAll()59 for {60 fmt.Println("Hello")61 time.Sleep(1 * time.Second)62 }63}64import (65func main() {66 m := metrics.New()67 go m.RunAll()68 for {69 fmt.Println("Hello")70 time.Sleep(1 * time.Second)71 }72}

Full Screen

Full Screen

runAll

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 fmt.Println("Starting program")4 m := metrics.New()5 m.RunAll()6 fmt.Println("Program ended")7}8import (9func main() {10 fmt.Println("Starting program")11 m := metrics.New()12 m.RunAll()13 fmt.Println("Program ended")14}15import (16func main() {17 fmt.Println("Starting program")18 m := metrics.New()19 m.RunAll()20 fmt.Println("Program ended")21}22import (23func main() {24 fmt.Println("Starting program")25 m := metrics.New()26 m.RunAll()27 fmt.Println("Program ended")28}29import (30func main() {31 fmt.Println("Starting program")32 m := metrics.New()33 m.RunAll()34 fmt.Println("Program ended")35}36import (37func main() {38 fmt.Println("Starting program")39 m := metrics.New()40 m.RunAll()41 fmt.Println("Program ended")42}43import (44func main() {45 fmt.Println("Starting program")46 m := metrics.New()47 m.RunAll()48 fmt.Println("Program ended")49}50import (51func main() {52 fmt.Println("Starting program")53 m := metrics.New()54 m.RunAll()55 fmt.Println("Program ended")56}57import (58func main() {59 fmt.Println("Starting program")60 m := metrics.New()61 m.RunAll()62 fmt.Println("Program ended")63}

Full Screen

Full Screen

runAll

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 metrics := Metrics{}4 c := make(chan int)5 go metrics.runAll(c)6 fmt.Println(<-c)7}8import (9func main() {10 metrics := Metrics{}11 c := make(chan int)12 go metrics.runAll(c)13 fmt.Println(<-c)14}15import (16func main() {17 metrics := Metrics{}18 c := make(chan int)19 go metrics.runAll(c)20 fmt.Println(<-c)21}22import (23func main() {24 metrics := Metrics{}25 c := make(chan int)26 go metrics.runAll(c)27 fmt.Println(<-c)28}29import (30func main() {31 metrics := Metrics{}32 c := make(chan int)33 go metrics.runAll(c)34 fmt.Println(<-c)35}36import (37func main() {38 metrics := Metrics{}39 c := make(chan int

Full Screen

Full Screen

runAll

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 m.RunAll()4 fmt.Println("All metrics are running")5}6import (7func main() {8 m.Run("cpu")9 fmt.Println("cpu metric is running")10}11import (12func main() {13 m.Run("memory")14 fmt.Println("memory metric is running")15}16import (17func main() {18 m.Run("disk")19 fmt.Println("disk metric is running")20}21import (22func main() {23 m.Run("network")24 fmt.Println("network metric is running")25}26import (27func main() {28 m.Run("process")29 fmt.Println("process metric is running")30}31import (32func main() {33 m.Run("cpu")34 m.Run("memory")35 m.Run("disk")36 m.Run("network")37 m.Run("process")38 fmt.Println("all metrics are running")39}40import (41func main() {42 m.Run("cpu")

Full Screen

Full Screen

runAll

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 metricsObj := metrics.New()4 metricsObj.Add("metric1", "metric1", "metric1")5 metricsObj.Add("metric2", "metric2", "metric2")6 metricsObj.Add("metric3", "metric3", "metric3")7 metricsObj.StartAll()8 time.Sleep(5 * time.Second)9 metricsObj.StopAll()10 fmt.Println(metricsObj.RunAll())11}12import (13func main() {14 metricsObj := metrics.New()15 metricsObj.Add("metric1", "metric1", "metric1")16 metricsObj.Add("metric2", "metric2", "metric2")17 metricsObj.Add("metric3", "metric3", "metric3")18 metricsObj.StartAll()19 time.Sleep(5 * time.Second)20 metricsObj.StopAll()21 fmt.Println(metricsObj.RunAll())22}

Full Screen

Full Screen

runAll

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 m := metrics.NewMetrics()4 m.RunAll()5 fmt.Println(m)6}7{1 1 1 1 1}8m := metrics.NewMetrics("

Full Screen

Full Screen

runAll

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 metric := metrics.New()4 metric.Add("cpu", 10)5 metric.Add("ram", 20)6 metric.Add("disk", 30)7 metricMap := metric.RunAll()8 fmt.Println(metricMap)9}10import (11func main() {12 metric := metrics.New()13 metric.Add("cpu", 10)14 metric.Add("ram", 20)15 metric.Add("disk", 30)16 fmt.Println(metric.Run("cpu"))17}18import (19func main() {20 metric := metrics.New()21 metric.Add("cpu", 10)22 metric.Add("ram", 20)23 metric.Add("disk", 30)24 fmt.Println(metric.Run("cpu"))25}26import (27func main() {28 metric := metrics.New()29 metric.Add("cpu", 10)30 metric.Add("ram", 20)31 metric.Add("disk", 30)32 fmt.Println(metric.Run("cpu"))33}

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run K6 automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Most used method in

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful