How to use Put method of regression Package

Best Keploy code snippet using regression.Put

builder.go

Source:builder.go Github

copy

Full Screen

...39 b.dbMeta = []*DbMeta{}40 }41 b.dbMeta = append(b.dbMeta, meta)42 var state = data.NewMap()43 state.Put("db", request.Name)44 init, err := b.NewAssetMap(assets, "init.yaml", state)45 if err != nil {46 return err47 }48 registerDb, err := b.NewAssetMap(assets, "register.yaml", state)49 if err != nil {50 return err51 }52 if len(b.registerDb) == 0 {53 b.registerDb = make([]Map, 0)54 }55 b.registerDb = append(b.registerDb, registerDb)56 //ddl/schema.ddl57 if meta.Schema != "" {58 var scriptURL = fmt.Sprintf("datastore/%v/schema.sql", request.Name)59 schema, ok := assets[meta.Schema]60 if !ok {61 return fmt.Errorf("unable locate %v schema : %v", request.Driver, meta.Schema)62 }63 b.UploadToEndly(scriptURL, strings.NewReader(toolbox.AsString(schema)))64 state.Put("script", scriptURL)65 script, err := b.NewMapFromURI("datastore/script.yaml", state)66 if err != nil {67 return err68 }69 init.Put("scripts", script.Get("scripts"))70 }71 b.createDb.Put(request.Name, init)72 //dictionary73 if meta.Dictionary != "" {74 dictionaryURL := fmt.Sprintf("datastore/%v/dictionary", request.Name)75 for k, v := range assets {76 if strings.HasPrefix(k, meta.Dictionary) {77 k = string(k[len(meta.Dictionary):])78 assetURL := path.Join(dictionaryURL, k)79 _ = b.UploadToEndly(assetURL, strings.NewReader(v))80 }81 }82 state.Put("dictionary", dictionaryURL)83 prepare, err := b.NewMapFromURI("datastore/prepare.yaml", state)84 if err != nil {85 return err86 }87 b.populateDb.Put(request.Name, prepare)88 }89 for k, v := range assets {90 schemaURL := fmt.Sprintf("datastore/%v/", request.Name)91 if strings.HasPrefix(k, "schema/") {92 assetURL := path.Join(schemaURL, k)93 _ = b.UploadToEndly(assetURL, strings.NewReader(v))94 }95 }96 return nil97}98func (b *builder) addDatastoreService(assets map[string]string, meta *DbMeta, request *Datastore) error {99 if b.services.Has(request.Driver) || meta.Service == "" {100 return nil101 }102 var state = data.NewMap()103 state.Put("db", request.Name)104 state.Put("driver", request.Driver)105 state.Put("credentials", meta.Credentials)106 if _, has := assets[meta.Service]; !has {107 return fmt.Errorf("service was empty %v", meta.Service)108 }109 useConifg := request.Config && meta.Config != ""110 deploy, err := b.NewAssetMap(assets, meta.Service, state)111 if err != nil {112 return fmt.Errorf("failed to load service deployment: %v", err)113 }114 var service = NewMap()115 if deploy.Has("init") {116 aMap := toolbox.AsMap(deploy.Get("init"))117 for k, v := range aMap {118 b.serviceInit[k] = v119 }120 }121 if deploy.Has("deploy") {122 deployService := deploy.GetMap("deploy")123 if !useConifg {124 deployService = deployService.Remove("mount")125 }126 image := deployService.Get("image")127 if imageParts := strings.Split(toolbox.AsString(image), ":"); len(imageParts) == 2 {128 b.serviceImages = append(b.serviceImages, imageParts[0])129 }130 service = deployService131 }132 if useConifg {133 config, ok := assets[meta.Config]134 if !ok {135 return fmt.Errorf("unable locate %v service config: %v", request.Driver, meta.Config)136 }137 var configURL = fmt.Sprintf("datastore/%v", meta.Config)138 _ = b.UploadToEndly(configURL, strings.NewReader(toolbox.AsString(config)))139 //service.Put("config", configURL)140 if deploy.Has("config") {141 copy := NewMap()142 copy.Put("action", "storage:copy")143 copy.Put("assets", deploy.Get("conifg"))144 b.services.Put(request.Driver+"-config", copy)145 }146 }147 b.services.Put(request.Driver, service)148 ReadIp, _ := b.NewMapFromURI("datastore/ip.yaml", state)149 b.services.Put(request.Driver+"-ip", ReadIp)150 return nil151}152func (b *builder) asMap(text string, state data.Map) (Map, error) {153 aMap := yaml.MapSlice{}154 if state != nil {155 text = state.ExpandAsText(text)156 }157 err := yaml.NewDecoder(strings.NewReader(text)).Decode(&aMap)158 if err != nil {159 err = fmt.Errorf("failed to decode %v, %v", text, err)160 }161 var result = mapSlice(aMap)162 return &result, err163}164func (b *builder) Download(URI string, state data.Map) (string, error) {165 var resource = url.NewResource(toolbox.URLPathJoin(b.baseURL, URI))166 text, err := resource.DownloadText()167 if err != nil {168 return "", err169 }170 if state != nil {171 text = state.ExpandAsText(text)172 }173 return text, nil174}175func (b *builder) getDeployUploadMap(meta *AppMeta) Map {176 var result = NewMap()177 result.Put("${releasePath}/${app}", "$appPath")178 if len(meta.Assets) == 0 {179 return result180 }181 for _, asset := range meta.Assets {182 result.Put(fmt.Sprintf("${releasePath}/%v", asset), fmt.Sprintf("${appPath}/%v", asset))183 }184 return result185}186func (b *builder) getBuildDownloadMap(meta *AppMeta) Map {187 var result = NewMap()188 if meta.hasAppDirectory {189 result.Put("${buildPath}/app/${app}", "$releasePath")190 } else {191 result.Put("${buildPath}/${app}", "$releasePath")192 }193 if len(meta.Assets) == 0 {194 return result195 }196 for _, asset := range meta.Assets {197 result.Put(fmt.Sprintf("${buildPath}/%v", asset), fmt.Sprintf("${releasePath}%v", asset))198 }199 return result200}201func hasKeyPrefix(keyPrefix string, assets map[string]string) bool {202 for candidate := range assets {203 if strings.HasPrefix(candidate, keyPrefix) {204 return true205 }206 }207 return false208}209func removeComments(assets map[string]string) {210 for k, code := range assets {211 if strings.HasSuffix(k, ".go") && strings.Contains(code, "/*remove") {212 code = strings.Replace(code, "/*remove", "", len(code))213 assets[k] = strings.Replace(code, "remove*/", "", len(code))214 }215 }216}217func (b *builder) buildApp(meta *AppMeta, sdkMeta *SdkMeta, request *RunRequest, assets map[string]string) error {218 buildRequest := request.Build219 var state = data.NewMap()220 state.Put("buildCmd", meta.BuildCmd)221 var err error222 removeComments(assets)223 request.Build.path = meta.Build224 if meta.UseSdkBuild {225 request.Build.path = sdkMeta.Build226 }227 var buildTemplateURL = toolbox.URLPathJoin(b.baseURL, request.Build.path)228 buildAssets, err := DownloadAll(buildTemplateURL)229 if err != nil {230 return err231 }232 var args = meta.GetArguments(buildRequest.Docker)233 var appFile = fmt.Sprintf("app.yaml")234 var app string235 var appMap Map236 var originURL = meta.OriginURL237 if originURL == "" {238 originURL = request.Origin239 }240 appDirectory := ""241 dependency := ""242 if meta.Dependency != "" {243 dependency = fmt.Sprintf("\n - %v", strings.Replace(meta.Dependency, "\n", "", strings.Count(meta.Dependency, "\n")))244 }245 if meta.hasAppDirectory {246 appDirectory = "\n - cd ${buildPath}app"247 }248 state.Put("dependency", dependency)249 state.Put("originURL", fmt.Sprintf(`"%v"`, originURL))250 state.Put("appDirectory", appDirectory)251 var uploadDockerfile = buildRequest.Dockerfile252 if buildRequest.DockerCompose && buildRequest.Dockerfile {253 if buildRequest.Tag != nil {254 state.Put("app", buildRequest.Tag.Image)255 state.Put("image", buildRequest.Tag.Image)256 state.Put("appVersion", buildRequest.Tag.Version)257 state.Put("imageUsername", buildRequest.Tag.Username)258 }259 appFile = "docker/compose/app.yaml"260 if appMap, err = b.NewAssetMap(buildAssets, appFile, state); err != nil {261 return err262 }263 uploadDockerfile = false264 } else {265 if buildRequest.Docker {266 state.Put("args", args)267 appFile = "docker/app.yaml"268 if appMap, err = b.NewAssetMap(buildAssets, appFile, state); err != nil {269 return err270 }271 } else {272 if appMap, err = b.NewAssetMap(buildAssets, "app.yaml", state); err != nil {273 return err274 }275 start := appMap.SubMap("pipeline.start")276 start.Put("arguments", meta.Args)277 appMap.SubMap("pipeline.deploy").Put("upload", b.getDeployUploadMap(meta))278 }279 appMap.SubMap("pipeline.build").Put("download", b.getBuildDownloadMap(meta))280 }281 if app, err = toolbox.AsYamlText(appMap); err != nil {282 return err283 }284 _ = b.UploadToEndly("app.yaml", strings.NewReader(app))285 if uploadDockerfile {286 var dockerAssets = ""287 if len(meta.Assets) > 0 {288 for _, asset := range meta.Assets {289 if strings.Contains(asset, "config") {290 continue291 }292 if len(dockerAssets) > 0 {293 dockerAssets += "\n"294 }295 parent, _ := path.Split(asset)296 if parent == "" {297 dockerAssets += fmt.Sprintf("ADD %v /", asset)298 } else {299 dockerAssets += fmt.Sprintf("RUN mkdir -p %v\nADD %v /%v", parent, asset, parent)300 }301 }302 }303 state.Put("assets", dockerAssets)304 dockerfile, ok := buildAssets["docker/Dockerfile"]305 if !ok {306 return fmt.Errorf("failed to locate docker file %v", meta.Name)307 }308 dockerfile = state.ExpandAsText(dockerfile)309 _ = b.UploadToEndly("config/Dockerfile", strings.NewReader(dockerfile))310 }311 return err312}313func extractTag(composeContent string) *Tag {314 index := strings.Index(composeContent, "image:")315 if index == -1 {316 return nil317 }318 imageInfo := composeContent[index+6:]319 if breakIndex := strings.Index(imageInfo, "\n"); breakIndex != -1 {320 imageInfo = strings.TrimSpace(string(imageInfo[:breakIndex]))321 }322 var result = &Tag{}323 result.Version = "latest"324 result.Username = "endly"325 imageVersionPair := strings.SplitN(imageInfo, ":", 2)326 if len(imageVersionPair) > 1 {327 result.Version = imageVersionPair[1]328 userImagePair := strings.SplitN(imageVersionPair[0], "/", 2)329 if len(userImagePair) > 1 {330 result.Username = userImagePair[0]331 result.Image = userImagePair[1]332 }333 } else {334 result.Image = imageInfo335 }336 return result337}338//TODO java, node, react autodiscovery and initial test setup339func (b *builder) autoDiscover(request *Build, URL string) {340 service, err := storage.NewServiceForURL(request.Origin, "")341 if err != nil {342 return343 }344 objects, err := service.List(URL)345 if err != nil || len(objects) == 0 {346 return347 }348 for _, candidate := range objects {349 if request.DockerCompose && request.Dockerfile {350 return351 }352 if candidate.URL() == URL {353 continue354 }355 if candidate.FileInfo().Name() == "config" && candidate.IsFolder() {356 b.autoDiscover(request, candidate.URL())357 }358 if candidate.FileInfo().Name() == "Dockerfile" {359 if reader, err := service.Download(candidate); err == nil {360 defer reader.Close()361 if err := b.UploadToEndly("config/Dockerfile", reader); err == nil {362 request.Dockerfile = true363 }364 }365 }366 if candidate.FileInfo().Name() == "docker-compose.yml" || candidate.FileInfo().Name() == "docker-compose.yaml" {367 if reader, err := service.Download(candidate); err == nil {368 defer reader.Close()369 content, err := ioutil.ReadAll(reader)370 if err != nil {371 continue372 }373 request.Tag = extractTag(string(content))374 if err := b.UploadToEndly("config/docker-compose.yaml", bytes.NewReader(content)); err == nil {375 request.DockerCompose = true376 }377 }378 }379 }380}381func (b *builder) addSourceCode(meta *AppMeta, request *Build, assets map[string]string) error {382 var dbConfig Map383 if len(b.registerDb) > 0 {384 dbConfig = b.registerDb[0].GetMap("config")385 }386 if meta.DbConfigPath != "" && dbConfig != nil {387 if config, err := b.NewAssetMap(assets, meta.Config, nil); err == nil {388 config.Put(meta.DbConfigPath, dbConfig)389 if YAML, err := toolbox.AsYamlText(config); err == nil {390 assets[meta.Config] = YAML391 }392 }393 }394 for k, v := range assets {395 if k == "meta.yaml" || k == "regression" {396 continue397 }398 _ = b.Upload(k, strings.NewReader(v))399 }400 return nil401}402func (b *builder) Copy(state data.Map, URIs ...string) error {403 for _, URI := range URIs {404 var asset string405 var err error406 if state != nil && path.Ext(URI) == ".json" {407 var JSON = make([]interface{}, 0)408 resource := url.NewResource(toolbox.URLPathJoin(b.baseURL, URI))409 if err = resource.Decode(&JSON); err != nil {410 return err411 }412 expanded := state.Expand(JSON)413 asset, err = toolbox.AsIndentJSONText(expanded)414 } else {415 asset, err = b.Download(URI, state)416 }417 if err != nil {418 return err419 }420 _ = b.UploadToEndly(URI, strings.NewReader(asset))421 }422 return nil423}424func (b *builder) addRun(appMeta *AppMeta, request *RunRequest) error {425 run, err := b.NewMapFromURI("run.yaml", nil)426 if err != nil {427 return err428 }429 var init = run.GetMap("init")430 init.Put("sdk", request.Build.Sdk)431 init.Put("app", request.Build.App)432 var hasService bool433 for _, dbMeta := range b.dbMeta {434 if b.dbMeta != nil && dbMeta.Credentials != "" {435 var credentialName = dbMeta.Credentials436 credentialName = strings.Replace(credentialName, "$", "", 1)437 secret := strings.ToLower(strings.Replace(credentialName, "Credentials", "", 1))438 defaults := run.GetMap("defaults")439 defaults.Put(credentialName, "$"+credentialName)440 run.Put("defaults", defaults)441 init.Put(credentialName, secret)442 }443 if dbMeta.Service != "" {444 hasService = true445 }446 }447 if !hasService {448 pieline := run.GetMap("pipeline")449 pielineInit := pieline.GetMap("init")450 pieline.Put("init", pielineInit.Remove("system"))451 pielineDestroy := pieline.GetMap("destroy")452 pieline.Put("destroy", pielineDestroy.Remove("system"))453 run.Put("pipeline", pieline)454 }455 run.Put("init", init)456 if content, err := toolbox.AsYamlText(run); err == nil {457 if inlineWorkflowFormat == request.Testing.Regression {458 content = strings.Replace(content, "name: regression", "request: '@regression/regression'", 1)459 }460 _ = b.UploadToEndly("run.yaml", strings.NewReader(content))461 }462 return err463}464func (b *builder) NewMapFromURI(URI string, state data.Map) (Map, error) {465 var resource = url.NewResource(toolbox.URLPathJoin(b.baseURL, URI))466 text, err := resource.DownloadText()467 if err != nil {468 return nil, err469 }470 return b.asMap(text, state)471}472func (b *builder) NewAssetMap(assets map[string]string, URI string, state data.Map) (Map, error) {473 value, ok := assets[URI]474 if !ok {475 return nil, fmt.Errorf("unable locate %v, available: %v", URI, toolbox.MapKeysToStringSlice(assets))476 }477 var text = state.ExpandAsText(toolbox.AsString(value))478 return b.asMap(text, state)479}480func (b *builder) buildSystem() error {481 system, err := b.NewMapFromURI("system/system.yaml", nil)482 if err != nil {483 return err484 }485 if len(b.serviceInit) > 0 {486 system.Put("init", b.serviceInit)487 } else {488 system = system.Remove("init")489 }490 initMap := system.SubMap("pipeline.init")491 initMap.Put("services", b.services)492 stopImagesMap := system.SubMap("pipeline.destroy.stop-images")493 stopImagesMap.Put("images", b.serviceImages)494 var content string495 if content, err = toolbox.AsYamlText(system); err == nil {496 _ = b.UploadToEndly("system.yaml", strings.NewReader(content))497 }498 return err499}500func (b *builder) buildDatastore() error {501 datastore, err := b.NewMapFromURI("datastore/datastore.yaml", nil)502 if err != nil {503 return err504 }505 pipeline := datastore.SubMap("pipeline")506 pipeline.Put("create-db", b.createDb)507 pipeline.Put("prepare", b.populateDb)508 var content string509 if content, err = toolbox.AsYamlText(datastore); err == nil {510 _ = b.UploadToEndly("datastore.yaml", strings.NewReader(content))511 }512 return err513}514func removeMatchedLines(text string, format string, matchExpressions ...string) string {515 if len(matchExpressions) > 1 {516 for i := 1; i < len(matchExpressions); i++ {517 text = removeMatchedLines(text, format, matchExpressions[i])518 }519 }520 text = strings.Replace(text, "\r", "", len(text))521 var lines = strings.Split(text, "\n")522 var result = make([]string, 0)523 matchExpr := matchExpressions[0]524 if format == neatlyWorkflowFormat {525 for _, line := range lines {526 if strings.Contains(line, matchExpr) {527 continue528 }529 result = append(result, line)530 }531 return strings.Join(result, "\n")532 }533 return processBlockedText(text, matchExpr, "comments:", func(matched string) string {534 return ""535 })536}537func processBlockedText(text string, blockBeginExpr, blockEndExpr string, matchingBlockHandler func(matched string) string) string {538 text = strings.Replace(text, "\r", "", len(text))539 var lines = strings.Split(text, "\n")540 var result = make([]string, 0)541 var matched = make([]string, 0)542 for _, line := range lines {543 if strings.Contains(line, blockBeginExpr) {544 matched = append(matched, line)545 continue546 }547 if len(matched) > 0 {548 matched = append(matched, line)549 if strings.Contains(line, blockEndExpr) {550 block := matchingBlockHandler(strings.Join(matched, "\n"))551 if block != "" {552 result = append(result, block)553 }554 matched = make([]string, 0)555 }556 continue557 }558 result = append(result, line)559 }560 return strings.Join(result, "\n")561}562func (b *builder) addUseCaseAssets(appMeta *AppMeta, request *RunRequest) error {563 b.Copy(nil,564 "regression/use_cases/001_xx_case/use_case.txt",565 "regression/use_cases/002_yy_case/use_case.txt")566 return nil567}568func (b *builder) buildSeleniumTestAssets(appMeta *AppMeta, request *RunRequest) error {569 b.Copy(nil,570 "regression/req/selenium_init.yaml",571 "regression/req/selenium_destroy.yaml")572 var aMap = map[string]interface{}{573 "in": "name",574 "output": "name",575 "expected": "empty",576 "url": "http://127.0.0.1:8080/",577 }578 if len(appMeta.Selenium) > 0 {579 aMap, _ = util.NormalizeMap(appMeta.Selenium, true)580 }581 test, err := b.Download("regression/selenium_test.yaml", data.Map(aMap))582 if err != nil {583 return err584 }585 b.UploadToEndly("regression/use_cases/001_xx_case/selenium_test.yaml", strings.NewReader(strings.Replace(test, "$index", "1", 2)))586 b.UploadToEndly("regression/use_cases/002_yy_case/selenium_test.yaml", strings.NewReader(strings.Replace(test, "$index", "1", 2)))587 return nil588}589func (b *builder) buildDataTestAssets(appMeta *AppMeta, request *RunRequest) error {590 for i, dbMeta := range b.dbMeta {591 var setupSource = fmt.Sprintf("regression/%v/setup_data.json", strings.ToLower(dbMeta.Kind))592 datastore := request.Datastore[i]593 if datastore.MultiTableMapping {594 setupSource = fmt.Sprintf("regression/%v/v_setup_data.json", strings.ToLower(dbMeta.Kind))595 }596 if setupData, err := b.Download(setupSource, nil); err == nil {597 b.UploadToEndly(fmt.Sprintf("regression/use_cases/001_xx_case/%s_data.json", datastore.Name), strings.NewReader(strings.Replace(setupData, "$index", "1", 2)))598 b.UploadToEndly(fmt.Sprintf("regression/use_cases/002_yy_case/%s_data.json", datastore.Name), strings.NewReader(strings.Replace(setupData, "$index", "1", 2)))599 b.UploadToEndly(fmt.Sprintf("regression/%s_data.json", datastore.Name), strings.NewReader("[]"))600 b.UploadToEndly(fmt.Sprintf("regression/data/%s/dummy.json", datastore.Name), strings.NewReader("[]"))601 }602 }603 return nil604}605func (b *builder) buildTestUseCaseDataTestAssets(appMeta *AppMeta, request *RunRequest) error {606 for _, datastore := range request.Datastore {607 var dataSource = "dummy.json"608 if datastore.MultiTableMapping {609 dataSource = "v_dummy.json"610 }611 setupSource := fmt.Sprintf("regression/data/%v", dataSource)612 setupData, err := b.Download(setupSource, nil)613 if err == nil {614 err = b.UploadToEndly(fmt.Sprintf("regression/use_cases/001_xx_case/prepare/%v/%v", datastore.Name, dataSource), strings.NewReader(setupData))615 }616 }617 return nil618}619func (b *builder) buildStaticDataTestAssets(appMeta *AppMeta, request *RunRequest) error {620 for _, datastore := range request.Datastore {621 var dataSource = "dummy.json"622 if datastore.MultiTableMapping {623 dataSource = "v_dummy.json"624 }625 setupSource := fmt.Sprintf("regression/data/%v", dataSource)626 setupData, err := b.Download(setupSource, nil)627 if err == nil {628 b.UploadToEndly(fmt.Sprintf("regression/data/%v/%v", datastore.Name, dataSource), strings.NewReader(setupData))629 }630 }631 return nil632}633func (b *builder) buildHTTPTestAssets(appMeta *AppMeta, request *RunRequest) error {634 var requestMap = map[string]interface{}{635 "url": "http://127.0.0.1/",636 }637 var expectMap = map[string]interface{}{638 "Code": 200,639 }640 var http map[string]interface{}641 if len(appMeta.HTTP) > 0 {642 http, _ = util.NormalizeMap(appMeta.HTTP, true)643 if value, ok := http["request"]; ok {644 valueMap := toolbox.AsMap(value)645 util.Append(requestMap, valueMap, true)646 }647 if value, ok := http["expect"]; ok {648 valueMap := toolbox.AsMap(value)649 util.Append(expectMap, valueMap, true)650 }651 }652 var httpTest = map[string]interface{}{}653 var httpTestResource = url.NewResource(toolbox.URLPathJoin(b.baseURL, "regression/http_test.json"))654 if err := httpTestResource.Decode(&httpTest); err != nil {655 return err656 }657 var state = data.NewMap()658 state.Put("request", requestMap)659 state.Put("expect", expectMap)660 expandedHttpTest := state.Expand(httpTest)661 if test, err := toolbox.AsIndentJSONText(expandedHttpTest); err == nil {662 b.UploadToEndly("regression/use_cases/001_xx_case/http_test.json", strings.NewReader(strings.Replace(test, "$index", "1", 2)))663 b.UploadToEndly("regression/use_cases/002_yy_case/http_test.json", strings.NewReader(strings.Replace(test, "$index", "1", 2)))664 }665 return nil666}667func (b *builder) buildRESTTestAssets(appMeta *AppMeta, request *RunRequest) error {668 var requestMap = map[string]interface{}{}669 var requesURL = "http://127.0.0.1/"670 var method = "POST"671 var expectMap = map[string]interface{}{}672 var http map[string]interface{}673 if len(appMeta.REST) > 0 {674 http, _ = util.NormalizeMap(appMeta.REST, true)675 if value, ok := http["request"]; ok {676 valueMap := toolbox.AsMap(value)677 util.Append(requestMap, valueMap, true)678 }679 if value, ok := http["expect"]; ok {680 valueMap := toolbox.AsMap(value)681 util.Append(expectMap, valueMap, true)682 }683 if value, ok := http["url"]; ok {684 requesURL = toolbox.AsString(value)685 }686 if value, ok := http["method"]; ok {687 method = toolbox.AsString(value)688 }689 }690 var httpTest = map[string]interface{}{}691 var httpTestResource = url.NewResource(toolbox.URLPathJoin(b.baseURL, "regression/rest_test.json"))692 if err := httpTestResource.Decode(&httpTest); err != nil {693 return err694 }695 var state = data.NewMap()696 state.Put("request", requestMap)697 state.Put("expect", expectMap)698 state.Put("url", requesURL)699 state.Put("method", method)700 udf.Register(state)701 expandedHttpTest := state.Expand(httpTest)702 if test, err := toolbox.AsIndentJSONText(expandedHttpTest); err == nil {703 b.UploadToEndly("regression/use_cases/001_xx_case/rest_test.json", strings.NewReader(strings.Replace(test, "$index", "1", 2)))704 b.UploadToEndly("regression/use_cases/002_yy_case/rest_test.json", strings.NewReader(strings.Replace(test, "$index", "1", 2)))705 }706 return nil707}708func (b *builder) addRegressionData(appMeta *AppMeta, request *RunRequest) error {709 if request.Datastore == nil {710 return nil711 }712 var state = data.NewMap()713 dataInit, err := b.NewMapFromURI("datastore/regression/data_init.yaml", state)714 if err != nil {715 return err716 }717 pipeline := dataInit.GetMap("pipeline")718 for i, datastore := range request.Datastore {719 state.Put("db", datastore.Name)720 state.Put("dbKey", "$"+datastore.Name)721 var prepare Map722 dump, err := b.Download("util/dump.yaml", state)723 if err == nil {724 _ = b.UploadToEndly(fmt.Sprintf("util/%v/dump.yaml", datastore.Name), strings.NewReader(dump))725 }726 if freeze, err := b.Download("util/freeze.yaml", state); err == nil {727 _ = b.UploadToEndly(fmt.Sprintf("util/%v/freeze.yaml", datastore.Name), strings.NewReader(freeze))728 }729 switch request.Testing.UseCaseData {730 case "preload":731 prepare, err = b.NewMapFromURI("datastore/regression/data.yaml", state)732 default:733 prepare, err = b.NewMapFromURI("datastore/regression/prepare.yaml", state)734 }735 if err != nil {736 return err737 }738 dbMeta := b.dbMeta[i]739 var tables interface{} = dbMeta.Tables740 if !datastore.MultiTableMapping {741 prepare = prepare.Remove("mapping")742 } else {743 tables = "$tables"744 mappping, err := b.Download("regression/mapping.json", nil)745 if err == nil {746 b.UploadToEndly(fmt.Sprintf("regression/%v/mapping.json", datastore.Name), strings.NewReader(mappping))747 }748 }749 switch request.Testing.UseCaseData {750 case "test":751 b.buildTestUseCaseDataTestAssets(appMeta, request)752 case "preload":753 if !dbMeta.Sequence || len(dbMeta.Tables) == 0 {754 prepare = prepare.Remove("sequence")755 } else {756 prepare.GetMap("sequence").Put("tables", tables)757 }758 b.buildDataTestAssets(appMeta, request)759 default:760 b.buildStaticDataTestAssets(appMeta, request)761 }762 state.Put("driver", datastore.Driver)763 state.Put("db", datastore.Name)764 dbNode, err := b.NewMapFromURI("datastore/regression/dbnode.yaml", state)765 if err != nil {766 return err767 }768 readIp, _ := b.NewMapFromURI("datastore/ip.yaml", state)769 prepareText, _ := toolbox.AsYamlText(prepare)770 prepareText = strings.Replace(prepareText, "${db}", datastore.Name, len(prepareText))771 prepareYAML, _ := b.asMap(prepareText, state)772 if b.dbMeta[i].Service == "" {773 dbNode = dbNode.Remove(fmt.Sprintf("%v-ip", datastore.Driver))774 } else {775 dbNode.Put(fmt.Sprintf("%v-ip", datastore.Driver), readIp)776 }777 dbNode.Put("register", b.registerDb[i])778 if request.Testing.UseCaseData == "test" {779 mapping, err := b.NewMapFromURI("datastore/regression/mapping.yaml", state)780 if err != nil {781 return err782 }783 if datastore.MultiTableMapping {784 dbNode.Put("mapping", mapping)785 }786 dbNode = dbNode.Remove("prepare")787 } else {788 dbNode.Put("prepare", prepareYAML)789 }790 pipeline.Put(datastore.Name, dbNode)791 }792 dataYAML, _ := toolbox.AsYamlText(dataInit)793 b.UploadToEndly("regression/data_init.yaml", strings.NewReader(dataYAML))794 return nil795}796func removePreloadUseCaseReference(regression string, format string) string {797 if format == inlineWorkflowFormat {798 return removeMatchedLines(regression, format, "data:")799 }800 regression = strings.Replace(regression, "/Data.db", "", 1)801 var lines = []string{}802 for _, line := range strings.Split(regression, "\n") {803 lines = append(lines, string(line[:len(line)-1]))804 }805 return strings.Join(lines, "\n")806}807func (b *builder) expandPrepareTestUseCaseData(regression, format string, request *RunRequest) string {808 if inlineWorkflowFormat == format {809 return processBlockedText(regression, "-prepare", "comments:", func(matched string) string {810 var result = make([]string, 0)811 for _, datastore := range request.Datastore {812 var state = data.NewMap()813 state.Put("datastore", datastore.Name)814 result = append(result, state.ExpandAsText(matched))815 }816 return strings.Join(result, "\n")817 })818 }819 var lines = make([]string, 0)820 for _, line := range strings.Split(regression, "\n") {821 if strings.Contains(line, "set initial test") {822 for _, datastore := range request.Datastore {823 var state = data.NewMap()824 state.Put("datastore", datastore.Name)825 lines = append(lines, state.ExpandAsText(line))826 }827 } else {828 lines = append(lines, line)829 }830 }831 return strings.Join(lines, "\n")832}833func (b *builder) expandExpectTestUseCaseData(regression, format string, request *RunRequest) string {834 if inlineWorkflowFormat == format {835 return processBlockedText(regression, "-expect", "comments:", func(matched string) string {836 var result = make([]string, 0)837 for _, datastore := range request.Datastore {838 var state = data.NewMap()839 state.Put("datastore", datastore.Name)840 result = append(result, state.ExpandAsText(matched))841 }842 return strings.Join(result, "\n")843 })844 }845 var lines = []string{}846 for _, line := range strings.Split(regression, "\n") {847 if strings.Contains(line, "verify test") {848 for _, datastore := range request.Datastore {849 var state = data.NewMap()850 state.Put("datastore", datastore.Name)851 lines = append(lines, state.ExpandAsText(line))852 }853 } else {854 lines = append(lines, line)855 }856 }857 return strings.Join(lines, "\n")858}859func (b *builder) expandPushPreloadedUseCaseData(regression string, format string, request *RunRequest) string {860 if inlineWorkflowFormat == format {861 return processBlockedText(regression, "data", "comments:", func(matched string) string {862 var result = make([]string, 0)863 for _, datastore := range request.Datastore {864 var state = data.NewMap()865 state.Put("dataTarget", fmt.Sprintf("%v.[]setup", datastore.Name))866 state.Put("dataFile", fmt.Sprintf("@%v_data", datastore.Name))867 result = append(result, state.ExpandAsText(matched))868 }869 return strings.Join(result, "\n")870 })871 }872 lines := strings.Split(regression, "\n")873 var before = []string{}874 var setupLine = ""875 //extract lines before setup_data and after876 var after = []string{}877 for i, line := range lines {878 if strings.Contains(lines[i], "@setup_data") {879 before = lines[:i]880 setupLine = line...

Full Screen

Full Screen

regression_test.go

Source:regression_test.go Github

copy

Full Screen

...130 destroyLocalBucket(httpclient, t, bucket)131}132func regressionBucket(httpclient *http.Client, t *testing.T, bucket string) {133 var (134 numPuts = 10135 filesput = make(chan string, numPuts)136 errch = make(chan error, 100)137 wg = &sync.WaitGroup{}138 sgl *dfc.SGLIO139 filesize = uint64(1024)140 )141 if usingSG {142 sgl = dfc.NewSGLIO(filesize)143 defer sgl.Free()144 }145 putRandomFiles(0, baseseed+2, filesize, numPuts, bucket, t, nil, errch, filesput, SmokeDir, SmokeStr, "", false, sgl)146 close(filesput)147 selectErr(errch, "put", t, false)148 getRandomFiles(0, 0, numPuts, bucket, SmokeStr+"/", t, nil, errch)149 selectErr(errch, "get", t, false)150 for fname := range filesput {151 if usingFile {152 err := os.Remove(SmokeDir + "/" + fname)153 if err != nil {154 t.Error(err)155 }156 }157 wg.Add(1)158 go client.Del(proxyurl, bucket, "smoke/"+fname, wg, errch, false)159 }160 wg.Wait()161 selectErr(errch, "delete", t, abortonerr)162 close(errch)163}164func regressionStats(t *testing.T) {165 smap := getClusterMap(httpclient, t)166 stats := getClusterStats(httpclient, t)167 for k, v := range stats.Target {168 tdstats := getDaemonStats(httpclient, t, smap.Smap[k].DirectURL)169 tdcapstats := tdstats["capacity"].(map[string]interface{})170 dcapstats := v.Capacity171 for fspath, fstats := range dcapstats {172 tfstats := tdcapstats[fspath].(map[string]interface{})173 used, err := tfstats["used"].(json.Number).Int64()174 if err != nil {175 t.Fatalf("Could not decode Target Stats: fstats.Used")176 }177 avail, err := tfstats["avail"].(json.Number).Int64()178 if err != nil {179 t.Fatalf("Could not decode Target Stats: fstats.Avail")180 }181 usedpct, err := tfstats["usedpct"].(json.Number).Int64()182 if err != nil {183 t.Fatalf("Could not decode Target Stats: fstats.Usedpct")184 }185 if uint64(used) != fstats.Used || uint64(avail) != fstats.Avail || uint32(usedpct) != fstats.Usedpct {186 t.Errorf("Stats are different when queried from Target and Proxy: "+187 "Used: %v, %v | Available: %v, %v | Percentage: %v, %v",188 tfstats["used"], fstats.Used, tfstats["avail"], fstats.Avail, tfstats["usedpct"], fstats.Usedpct)189 }190 if fstats.Usedpct > HighWaterMark {191 t.Error("Used Percentage above High Watermark")192 }193 }194 }195}196func regressionConfig(t *testing.T) {197 oconfig := getConfig(proxyurl+"/v1/daemon", httpclient, t)198 olruconfig := oconfig["lru_config"].(map[string]interface{})199 orebconfig := oconfig["rebalance_conf"].(map[string]interface{})200 oproxyconfig := oconfig["proxy"].(map[string]interface{})201 for k, v := range configRegression {202 setConfig(k, v, proxyurl+"/v1/cluster", httpclient, t)203 }204 nconfig := getConfig(proxyurl+"/v1/daemon", httpclient, t)205 nlruconfig := nconfig["lru_config"].(map[string]interface{})206 nrebconfig := nconfig["rebalance_conf"].(map[string]interface{})207 nproxyconfig := nconfig["proxy"].(map[string]interface{})208 if nconfig["stats_time"] != configRegression["stats_time"] {209 t.Errorf("StatsTime was not set properly: %v, should be: %v",210 nconfig["stats_time"], configRegression["stats_time"])211 } else {212 o := oconfig["stats_time"].(string)213 setConfig("stats_time", o, proxyurl+"/v1/cluster", httpclient, t)214 }215 if nlruconfig["dont_evict_time"] != configRegression["dont_evict_time"] {216 t.Errorf("DontEvictTime was not set properly: %v, should be: %v",217 nlruconfig["dont_evict_time"], configRegression["dont_evict_time"])218 } else {219 o := olruconfig["dont_evict_time"].(string)220 setConfig("dont_evict_time", o, proxyurl+"/v1/cluster", httpclient, t)221 }222 if nlruconfig["capacity_upd_time"] != configRegression["capacity_upd_time"] {223 t.Errorf("CapacityUpdTime was not set properly: %v, should be: %v",224 nlruconfig["capacity_upd_time"], configRegression["capacity_upd_time"])225 } else {226 o := olruconfig["capacity_upd_time"].(string)227 setConfig("capacity_upd_time", o, proxyurl+"/v1/cluster", httpclient, t)228 }229 if nrebconfig["startup_delay_time"] != configRegression["startup_delay_time"] {230 t.Errorf("StartupDelayTime was not set properly: %v, should be: %v",231 nrebconfig["startup_delay_time"], configRegression["startup_delay_time"])232 } else {233 o := orebconfig["startup_delay_time"].(string)234 setConfig("startup_delay_time", o, proxyurl+"/v1/cluster", httpclient, t)235 }236 if hw, err := strconv.Atoi(configRegression["highwm"]); err != nil {237 t.Fatalf("Error parsing HighWM: %v", err)238 } else if nlruconfig["highwm"] != float64(hw) {239 t.Errorf("HighWatermark was not set properly: %.0f, should be: %d",240 nlruconfig["highwm"], hw)241 } else {242 o := olruconfig["highwm"].(float64)243 setConfig("highwm", strconv.Itoa(int(o)), proxyurl+"/v1/cluster", httpclient, t)244 }245 if lw, err := strconv.Atoi(configRegression["lowwm"]); err != nil {246 t.Fatalf("Error parsing LowWM: %v", err)247 } else if nlruconfig["lowwm"] != float64(lw) {248 t.Errorf("LowWatermark was not set properly: %.0f, should be: %d",249 nlruconfig["lowwm"], lw)250 } else {251 o := olruconfig["lowwm"].(float64)252 setConfig("lowwm", strconv.Itoa(int(o)), proxyurl+"/v1/cluster", httpclient, t)253 }254 if pt, err := strconv.ParseBool(configRegression["passthru"]); err != nil {255 t.Fatalf("Error parsing Passthru: %v", err)256 } else if nproxyconfig["passthru"] != pt {257 t.Errorf("Proxy Passthru was not set properly: %v, should be %v",258 nproxyconfig["passthru"], pt)259 } else {260 o := oproxyconfig["passthru"].(bool)261 setConfig("passthru", strconv.FormatBool(o), proxyurl+"/v1/cluster", httpclient, t)262 }263 if pt, err := strconv.ParseBool(configRegression["lru_enabled"]); err != nil {264 t.Fatalf("Error parsing LRUEnabled: %v", err)265 } else if nlruconfig["lru_enabled"] != pt {266 t.Errorf("LRUEnabled was not set properly: %v, should be %v",267 nlruconfig["lru_enabled"], pt)268 } else {269 o := olruconfig["lru_enabled"].(bool)270 setConfig("lru_enabled", strconv.FormatBool(o), proxyurl+"/v1/cluster", httpclient, t)271 }272}273func regressionLRU(t *testing.T) {274 if failLRU != "" {275 t.Errorf(failLRU)276 t.Fail()277 return278 }279 var (280 errch = make(chan error, 100)281 usedpct = uint32(100)282 )283 //284 // remember targets' watermarks285 //286 smap := getClusterMap(httpclient, t)287 lwms := make(map[string]interface{})288 hwms := make(map[string]interface{})289 bytesEvictedOrig := make(map[string]int64)290 filesEvictedOrig := make(map[string]int64)291 for k, di := range smap.Smap {292 cfg := getConfig(di.DirectURL+RestAPIDaemonSuffix, httpclient, t)293 lrucfg := cfg["lru_config"].(map[string]interface{})294 lwms[k] = lrucfg["lowwm"]295 hwms[k] = lrucfg["highwm"]296 }297 // add a few more298 getRandomFiles(0, 0, 3, clibucket, "", t, nil, errch)299 selectErr(errch, "get", t, true)300 //301 // find out min usage %% across all targets302 //303 stats := getClusterStats(httpclient, t)304 for k, v := range stats.Target {305 bytesEvictedOrig[k], filesEvictedOrig[k] = v.Core.Bytesevicted, v.Core.Filesevicted306 for _, c := range v.Capacity {307 usedpct = min(usedpct, c.Usedpct)308 }309 }310 tlogf("LRU: current min space usage in the cluster: %d%%\n", usedpct)311 var (312 lowwm = usedpct - 5313 highwm = usedpct - 1314 )315 if int(lowwm) < 10 {316 t.Errorf("The current space usage is too low (%d) for the LRU to be tested", lowwm)317 t.Fail()318 return319 }320 oconfig := getConfig(proxyurl+"/v1/daemon", httpclient, t)321 if t.Failed() {322 return323 }324 //325 // all targets: set new watermarks; restore upon exit326 //327 olruconfig := oconfig["lru_config"].(map[string]interface{})328 defer func() {329 setConfig("dont_evict_time", olruconfig["dont_evict_time"].(string), proxyurl+"/v1/cluster", httpclient, t)330 setConfig("capacity_upd_time", olruconfig["capacity_upd_time"].(string), proxyurl+"/v1/cluster", httpclient, t)331 setConfig("highwm", fmt.Sprint(olruconfig["highwm"]), proxyurl+"/v1/cluster", httpclient, t)332 setConfig("lowwm", fmt.Sprint(olruconfig["lowwm"]), proxyurl+"/v1/cluster", httpclient, t)333 for k, di := range smap.Smap {334 setConfig("highwm", fmt.Sprint(hwms[k]), di.DirectURL+RestAPIDaemonSuffix, httpclient, t)335 setConfig("lowwm", fmt.Sprint(lwms[k]), di.DirectURL+RestAPIDaemonSuffix, httpclient, t)336 }337 }()338 //339 // cluster-wide reduce dont-evict-time340 //341 dontevicttimestr := "30s"342 capacityupdtimestr := "5s"343 sleeptime, err := time.ParseDuration(oconfig["stats_time"].(string)) // to make sure the stats get updated344 if err != nil {345 t.Fatalf("Failed to parse stats_time: %v", err)346 }347 setConfig("dont_evict_time", dontevicttimestr, proxyurl+"/v1/cluster", httpclient, t)348 setConfig("capacity_upd_time", capacityupdtimestr, proxyurl+"/v1/cluster", httpclient, t)349 if t.Failed() {350 return351 }352 setConfig("lowwm", fmt.Sprint(lowwm), proxyurl+"/v1/cluster", httpclient, t)353 if t.Failed() {354 return355 }356 setConfig("highwm", fmt.Sprint(highwm), proxyurl+"/v1/cluster", httpclient, t)357 if t.Failed() {358 return359 }360 waitProgressBar("LRU: ", sleeptime/2)361 getRandomFiles(0, 0, 1, clibucket, "", t, nil, errch)362 waitProgressBar("LRU: ", sleeptime/2)363 //364 // results365 //366 stats = getClusterStats(httpclient, t)367 testFsPaths := oconfig["test_fspaths"].(map[string]interface{})368 for k, v := range stats.Target {369 bytes := v.Core.Bytesevicted - bytesEvictedOrig[k]370 tlogf("Target %s: evicted %d files - %.2f MB (%dB) total\n",371 k, v.Core.Filesevicted-filesEvictedOrig[k], float64(bytes)/1000/1000, bytes)372 //373 // testingFSPpaths() - cannot reliably verify space utilization by tmpfs374 //375 if testFsPaths["count"].(float64) > 0 {376 continue377 }378 for mpath, c := range v.Capacity {379 if c.Usedpct < lowwm-1 || c.Usedpct > lowwm+1 {380 t.Errorf("Target %s failed to reach lwm %d%%: mpath %s, used space %d%%", k, lowwm, mpath, c.Usedpct)381 }382 }383 }384}385func regressionRebalance(t *testing.T) {386 var (387 sid string388 numPuts = 40389 filesput = make(chan string, numPuts)390 errch = make(chan error, 100)391 wg = &sync.WaitGroup{}392 sgl *dfc.SGLIO393 filesize = uint64(1024 * 128)394 )395 filesSentOrig := make(map[string]int64)396 bytesSentOrig := make(map[string]int64)397 filesRecvOrig := make(map[string]int64)398 bytesRecvOrig := make(map[string]int64)399 stats := getClusterStats(httpclient, t)400 for k, v := range stats.Target {401 bytesSentOrig[k], filesSentOrig[k], bytesRecvOrig[k], filesRecvOrig[k] =402 v.Core.Numsentbytes, v.Core.Numsentfiles, v.Core.Numrecvbytes, v.Core.Numrecvfiles403 }404 //405 // step 1. config406 //407 oconfig := getConfig(proxyurl+"/v1/daemon", httpclient, t)408 orebconfig := oconfig["rebalance_conf"].(map[string]interface{})409 defer func() {410 setConfig("startup_delay_time", orebconfig["startup_delay_time"].(string), proxyurl+"/v1/cluster", httpclient, t)411 }()412 //413 // cluster-wide reduce startup_delay_time414 //415 startupdelaytimestr := "20s"416 setConfig("startup_delay_time", startupdelaytimestr, proxyurl+"/v1/cluster", httpclient, t) // NOTE: 1 second417 if t.Failed() {418 return419 }420 waitProgressBar("Rebalance: ", time.Second*10)421 //422 // step 2. unregister random target423 //424 smap := getClusterMap(httpclient, t)425 l := len(smap.Smap)426 if l < 3 { // NOTE: proxy is counted; FIXME: will have to be fixed for "multi-proxies"...427 if l == 0 {428 t.Fatal("DFC cluster is empty - zero targets")429 } else {430 t.Fatalf("Must have 2 or more targets in the cluster, have only %d", l)431 }432 }433 for sid = range smap.Smap {434 break435 }436 unregisterTarget(sid, t)437 tlogf("Unregistered %s: cluster size = %d (targets)\n", sid, l-1)438 //439 // step 3. put random files => (cluster - 1)440 //441 if usingSG {442 sgl = dfc.NewSGLIO(filesize)443 defer sgl.Free()444 }445 putRandomFiles(0, baseseed, filesize, numPuts, clibucket, t, nil, errch, filesput, SmokeDir, SmokeStr, "", false, sgl)446 selectErr(errch, "put", t, false)447 //448 // step 4. register back449 //450 registerTarget(sid, &smap, t)451 for i := 0; i < 10; i++ {452 time.Sleep(time.Second)453 smap = getClusterMap(httpclient, t)454 if len(smap.Smap) == l {455 break456 }457 }458 if len(smap.Smap) != l {459 t.Errorf("Re-registration timed out: target %s, original num targets %d\n", sid, l)460 return461 }462 tlogf("Re-registered %s: the cluster is now back to %d targets\n", sid, l)463 //464 // step 5. wait for rebalance to run its course465 //466 waitProgressBar("Rebalance: ", time.Second*10)467 //468 // step 6. statistics469 //470 stats = getClusterStats(httpclient, t)471 var bsent, fsent, brecv, frecv int64472 for k, v := range stats.Target {473 bsent += v.Core.Numsentbytes - bytesSentOrig[k]474 fsent += v.Core.Numsentfiles - filesSentOrig[k]475 brecv += v.Core.Numrecvbytes - bytesRecvOrig[k]476 frecv += v.Core.Numrecvfiles - filesRecvOrig[k]477 }478 //479 // step 7. cleanup480 //481 close(filesput) // to exit for-range482 for fname := range filesput {483 if usingFile {484 err := os.Remove(SmokeDir + "/" + fname)485 if err != nil {486 t.Error(err)487 }488 }489 wg.Add(1)490 go client.Del(proxyurl, clibucket, "smoke/"+fname, wg, errch, false)491 }492 wg.Wait()493 selectErr(errch, "delete", t, abortonerr)494 close(errch)495 if !t.Failed() && testing.Verbose() {496 fmt.Printf("Rebalance: sent %.2f MB in %d files\n", float64(bsent)/1000/1000, fsent)497 fmt.Printf(" received %.2f MB in %d files\n", float64(brecv)/1000/1000, frecv)498 }499}500func regressionRename(t *testing.T) {501 var (502 req *http.Request503 r *http.Response504 injson []byte505 err error506 numPuts = 10507 filesput = make(chan string, numPuts)508 errch = make(chan error, numPuts)509 basenames = make([]string, 0, numPuts) // basenames510 bnewnames = make([]string, 0, numPuts) // new basenames511 sgl *dfc.SGLIO512 )513 // create & put514 createLocalBucket(httpclient, t, RenameLocalBucketName)515 defer func() {516 // cleanup517 wg := &sync.WaitGroup{}518 for _, fname := range bnewnames {519 wg.Add(1)520 go client.Del(proxyurl, RenameLocalBucketName, RenameStr+"/"+fname, wg, errch, false)521 }522 if usingFile {523 for _, fname := range basenames {524 err = os.Remove(RenameDir + "/" + fname)525 if err != nil {526 t.Errorf("Failed to remove file %s: %v", fname, err)527 }528 }529 }530 wg.Wait()531 selectErr(errch, "delete", t, false)532 close(errch)533 destroyLocalBucket(httpclient, t, RenameLocalBucketName)534 }()535 time.Sleep(time.Second * 5)536 if err = dfc.CreateDir(RenameDir); err != nil {537 t.Errorf("Error creating dir: %v", err)538 }539 if usingSG {540 sgl = dfc.NewSGLIO(1024 * 1024)541 defer sgl.Free()542 }543 putRandomFiles(0, baseseed+1, 0, numPuts, RenameLocalBucketName, t, nil, nil, filesput, RenameDir, RenameStr, "", false, sgl)544 selectErr(errch, "put", t, false)545 close(filesput)546 for fname := range filesput {547 basenames = append(basenames, fname)548 }549 // rename550 for _, fname := range basenames {551 RenameMsg.Name = RenameStr + "/" + fname + ".renamed" // objname552 bnewnames = append(bnewnames, fname+".renamed") // base name553 injson, err = json.Marshal(RenameMsg)554 if err != nil {555 t.Fatalf("Failed to marshal RenameMsg: %v", err)556 }557 req, err = http.NewRequest("POST", proxyurl+"/v1/files/"+RenameLocalBucketName+"/"+RenameStr+"/"+fname, bytes.NewBuffer(injson))558 if err != nil {559 t.Fatalf("Failed to create request: %v", err)560 }561 r, err = httpclient.Do(req)562 if r != nil {563 r.Body.Close()564 }565 s := fmt.Sprintf("Rename %s/%s => %s", RenameStr, fname, RenameMsg.Name)566 if testfail(err, s, r, nil, t) {567 destroyLocalBucket(httpclient, t, RenameLocalBucketName)568 return569 }570 tlogln(s)571 }572 // get renamed objects573 waitProgressBar("Rename/move: ", time.Second*5)574 for _, fname := range bnewnames {575 client.Get(proxyurl, RenameLocalBucketName, RenameStr+"/"+fname, nil, errch, false, false)576 }577 selectErr(errch, "get", t, false)578}579func regressionPrefetchList(t *testing.T) {580 var (581 toprefetch = make(chan string, numfiles)582 netprefetches = int64(0)583 )584 // Skip the test when given a local bucket585 props, err := client.HeadBucket(proxyurl, clibucket)586 if err != nil {587 t.Errorf("Could not execute HeadBucket Request: %v", err)588 return589 }590 if props.CloudProvider == dfc.ProviderDfc {591 t.Skipf("Cannot prefetch from local bucket %s", clibucket)592 }593 // 1. Get initial number of prefetches594 smap := getClusterMap(httpclient, t)595 for _, v := range smap.Smap {596 stats := getDaemonStats(httpclient, t, v.DirectURL)597 corestats := stats["core"].(map[string]interface{})598 npf, err := corestats["numprefetch"].(json.Number).Int64()599 if err != nil {600 t.Fatalf("Could not decode target stats: numprefetch")601 }602 netprefetches -= npf603 }604 // 2. Get keys to prefetch605 n := int64(getMatchingKeys(match, clibucket, []chan string{toprefetch}, nil, t))606 close(toprefetch) // to exit for-range607 files := make([]string, 0)608 for i := range toprefetch {609 files = append(files, i)610 }611 // 3. Evict those objects from the cache and prefetch them612 tlogf("Evicting and Prefetching %d objects\n", len(files))613 err = client.EvictList(proxyurl, clibucket, files, true, 0)614 if err != nil {615 t.Error(err)616 }617 err = client.PrefetchList(proxyurl, clibucket, files, true, 0)618 if err != nil {619 t.Error(err)620 }621 // 5. Ensure that all the prefetches occurred.622 for _, v := range smap.Smap {623 stats := getDaemonStats(httpclient, t, v.DirectURL)624 corestats := stats["core"].(map[string]interface{})625 npf, err := corestats["numprefetch"].(json.Number).Int64()626 if err != nil {627 t.Fatalf("Could not decode target stats: numprefetch")628 }629 netprefetches += npf630 }631 if netprefetches != n {632 t.Errorf("Did not prefetch all files: Missing %d of %d\n", (n - netprefetches), n)633 }634}635func regressionPrefetchRange(t *testing.T) {636 var (637 netprefetches = int64(0)638 err error639 rmin, rmax int64640 re *regexp.Regexp641 )642 // Skip the test when given a local bucket643 props, err := client.HeadBucket(proxyurl, clibucket)644 if err != nil {645 t.Errorf("Could not execute HeadBucket Request: %v", err)646 return647 }648 if props.CloudProvider == dfc.ProviderDfc {649 t.Skipf("Cannot prefetch from local bucket %s", clibucket)650 }651 // 1. Get initial number of prefetches652 smap := getClusterMap(httpclient, t)653 for _, v := range smap.Smap {654 stats := getDaemonStats(httpclient, t, v.DirectURL)655 corestats := stats["core"].(map[string]interface{})656 npf, err := corestats["numprefetch"].(json.Number).Int64()657 if err != nil {658 t.Fatalf("Could not decode target stats: numprefetch")659 }660 netprefetches -= npf661 }662 // 2. Parse arguments663 if prefetchRange != "" {664 ranges := strings.Split(prefetchRange, ":")665 if rmin, err = strconv.ParseInt(ranges[0], 10, 64); err != nil {666 t.Errorf("Error parsing range min: %v", err)667 }668 if rmax, err = strconv.ParseInt(ranges[1], 10, 64); err != nil {669 t.Errorf("Error parsing range max: %v", err)670 }671 }672 // 3. Discover the number of items we expect to be prefetched673 if re, err = regexp.Compile(prefetchRegex); err != nil {674 t.Errorf("Error compiling regex: %v", err)675 }676 msg := &dfc.GetMsg{GetPrefix: prefetchPrefix, GetPageSize: int(pagesize)}677 objsToFilter := testListBucket(t, clibucket, msg, 0)678 files := make([]string, 0)679 if objsToFilter != nil {680 for _, be := range objsToFilter.Entries {681 if oname := strings.TrimPrefix(be.Name, prefetchPrefix); oname != be.Name {682 s := re.FindStringSubmatch(oname)683 if s == nil {684 continue685 }686 if i, err := strconv.ParseInt(s[0], 10, 64); err != nil && s[0] != "" {687 continue688 } else if s[0] == "" || (rmin == 0 && rmax == 0) || (i >= rmin && i <= rmax) {689 files = append(files, be.Name)690 }691 }692 }693 }694 // 4. Evict those objects from the cache, and then prefetch them.695 tlogf("Evicting and Prefetching %d objects\n", len(files))696 err = client.EvictRange(proxyurl, clibucket, prefetchPrefix, prefetchRegex, prefetchRange, true, 0)697 if err != nil {698 t.Error(err)699 }700 err = client.PrefetchRange(proxyurl, clibucket, prefetchPrefix, prefetchRegex, prefetchRange, true, 0)701 if err != nil {702 t.Error(err)703 }704 // 5. Ensure that all the prefetches occurred.705 for _, v := range smap.Smap {706 stats := getDaemonStats(httpclient, t, v.DirectURL)707 corestats := stats["core"].(map[string]interface{})708 npf, err := corestats["numprefetch"].(json.Number).Int64()709 if err != nil {710 t.Fatalf("Could not decode target stats: numprefetch")711 }712 netprefetches += npf713 }714 if netprefetches != int64(len(files)) {715 t.Errorf("Did not prefetch all files: Missing %d of %d\n",716 (int64(len(files)) - netprefetches), len(files))717 }718}719func regressionDeleteRange(t *testing.T) {720 var (721 err error722 prefix = ListRangeStr + "/tstf-"723 quarter = numfiles / 4724 third = numfiles / 3725 smallrangesize = third - quarter + 1726 smallrange = fmt.Sprintf("%d:%d", quarter, third)727 bigrange = fmt.Sprintf("0:%d", numfiles)728 regex = "\\d?\\d"729 wg = &sync.WaitGroup{}730 errch = make(chan error, numfiles)731 )732 // 1. Put files to delete:733 for i := 0; i < numfiles; i++ {734 r, err := readers.NewRandReader(fileSize, true /* withHash */)735 if err != nil {736 t.Fatal(err)737 }738 wg.Add(1)739 go client.PutAsync(wg, proxyurl, r, clibucket, fmt.Sprintf("%s%d", prefix, i), errch, false /* silent */)740 }741 wg.Wait()742 selectErr(errch, "put", t, true)743 // 2. Delete the small range of objects:744 err = client.DeleteRange(proxyurl, clibucket, prefix, regex, smallrange, true, 0)745 if err != nil {746 t.Error(err)747 }748 // 3. Check to see that the correct files have been deleted749 msg := &dfc.GetMsg{GetPrefix: prefix, GetPageSize: int(pagesize)}750 bktlst, err := client.ListBucket(proxyurl, clibucket, msg, 0)751 if len(bktlst.Entries) != numfiles-smallrangesize {752 t.Errorf("Incorrect number of remaining files: %d, should be %d", len(bktlst.Entries), numfiles-smallrangesize)753 }754 filemap := make(map[string]*dfc.BucketEntry)755 for _, entry := range bktlst.Entries {756 filemap[entry.Name] = entry757 }758 for i := 0; i < numfiles; i++ {759 keyname := fmt.Sprintf("%s%d", prefix, i)760 _, ok := filemap[keyname]761 if ok && i >= quarter && i <= third {762 t.Errorf("File exists that should have been deleted: %s", keyname)763 } else if !ok && (i < quarter || i > third) {764 t.Errorf("File does not exist that should not have been deleted: %s", keyname)765 }766 }767 // 4. Delete the big range of objects:768 err = client.DeleteRange(proxyurl, clibucket, prefix, regex, bigrange, true, 0)769 if err != nil {770 t.Error(err)771 }772 // 5. Check to see that all the files have been deleted773 bktlst, err = client.ListBucket(proxyurl, clibucket, msg, 0)774 if len(bktlst.Entries) != 0 {775 t.Errorf("Incorrect number of remaining files: %d, should be 0", len(bktlst.Entries))776 }777}778func regressionDeleteList(t *testing.T) {779 var (780 err error781 prefix = ListRangeStr + "/tstf-"782 wg = &sync.WaitGroup{}783 errch = make(chan error, numfiles)784 files = make([]string, 0)785 )786 // 1. Put files to delete:787 for i := 0; i < numfiles; i++ {788 r, err := readers.NewRandReader(fileSize, true /* withHash */)789 if err != nil {790 t.Fatal(err)791 }792 keyname := fmt.Sprintf("%s%d", prefix, i)793 wg.Add(1)794 go client.PutAsync(wg, proxyurl, r, clibucket, keyname, errch, false /* silent */)795 files = append(files, keyname)796 }797 wg.Wait()798 selectErr(errch, "put", t, true)799 // 2. Delete the objects800 err = client.DeleteList(proxyurl, clibucket, files, true, 0)801 if err != nil {802 t.Error(err)803 }804 // 3. Check to see that all the files have been deleted.805 msg := &dfc.GetMsg{GetPrefix: prefix, GetPageSize: int(pagesize)}806 bktlst, err := client.ListBucket(proxyurl, clibucket, msg, 0)807 if len(bktlst.Entries) != 0 {808 t.Errorf("Incorrect number of remaining files: %d, should be 0", len(bktlst.Entries))...

Full Screen

Full Screen

blockchain_test.go

Source:blockchain_test.go Github

copy

Full Screen

...61 header: hdr,62 height: x,63 totalWork: big.NewInt(0),64 }65 bc.db.Put(sh, true)66 last = hdr67 }68 return nil69}70var MockCreationTime time.Time71func TestNewBlockchain(t *testing.T) {72 bc, err := NewBlockchain("", MockCreationTime, &chaincfg.MainNetParams)73 if err != nil {74 t.Error(err)75 }76 best, err := bc.db.GetBestHeader()77 if err != nil {78 t.Error(err)79 }80 bestHash := best.header.BlockHash()81 checkHash := mainnetCheckpoints[0].Header.BlockHash()82 if !bestHash.IsEqual(&checkHash) {83 t.Error("Blockchain failed to initialize with correct mainnet checkpoint")84 }85 if best.height != mainnetCheckpoints[0].Height {86 t.Error("Blockchain failed to initialized with correct mainnet checkpoint height")87 }88 if best.totalWork.Uint64() != 0 {89 t.Error("Blockchain failed to initialized with correct mainnet total work")90 }91 os.RemoveAll("headers.bin")92 bc, err = NewBlockchain("", MockCreationTime, &chaincfg.TestNet3Params)93 if err != nil {94 t.Error(err)95 }96 best, err = bc.db.GetBestHeader()97 if err != nil {98 t.Error(err)99 }100 bestHash = best.header.BlockHash()101 checkHash = testnet3Checkpoints[0].Header.BlockHash()102 if !bestHash.IsEqual(&checkHash) {103 t.Error("Blockchain failed to initialize with correct testnet checkpoint")104 }105 if best.height != testnet3Checkpoints[0].Height {106 t.Error("Blockchain failed to initialized with correct testnet checkpoint height")107 }108 if best.totalWork.Uint64() != 0 {109 t.Error("Blockchain failed to initialized with correct testnet total work")110 }111 os.RemoveAll("headers.bin")112 bc, err = NewBlockchain("", MockCreationTime, &chaincfg.RegressionNetParams)113 if err != nil {114 t.Error(err)115 }116 best, err = bc.db.GetBestHeader()117 if err != nil {118 t.Error(err)119 }120 bestHash = best.header.BlockHash()121 checkHash = regtestCheckpoint.Header.BlockHash()122 if !bestHash.IsEqual(&checkHash) {123 t.Error("Blockchain failed to initialize with correct regtest checkpoint")124 }125 if best.height != regtestCheckpoint.Height {126 t.Error("Blockchain failed to initialized with correct regtest checkpoint height")127 }128 if best.totalWork.Uint64() != 0 {129 t.Error("Blockchain failed to initialized with correct regtest total work")130 }131 os.RemoveAll("headers.bin")132}133func TestBlockchain_CommitHeader(t *testing.T) {134 bc, err := NewBlockchain("", MockCreationTime, &chaincfg.RegressionNetParams)135 if err != nil {136 t.Error(err)137 }138 var headers = []wire.BlockHeader{regtestCheckpoint.Header}139 for i, c := range chain {140 b, err := hex.DecodeString(c)141 if err != nil {142 t.Error(err)143 }144 var hdr wire.BlockHeader145 hdr.Deserialize(bytes.NewReader(b))146 newTip, reorg, height, err := bc.CommitHeader(hdr)147 if err != nil {148 t.Error()149 }150 if !newTip {151 t.Error("Failed to set new tip when inserting header")152 }153 if reorg != nil {154 t.Error("Incorrectly set reorg when inserting header")155 }156 if height != uint32(i+1) {157 t.Error("Returned incorrect height when inserting header")158 }159 headers = append(headers, hdr)160 }161 best, err := bc.db.GetBestHeader()162 if err != nil {163 t.Error(err)164 }165 for i := len(headers) - 1; i >= 0; i-- {166 putHash := headers[i].BlockHash()167 retHash := best.header.BlockHash()168 if !putHash.IsEqual(&retHash) {169 t.Error("Header put failed")170 }171 best, err = bc.db.GetPreviousHeader(best.header)172 }173 os.RemoveAll("headers.bin")174}175func Test_Reorg(t *testing.T) {176 bc, err := NewBlockchain("", MockCreationTime, &chaincfg.RegressionNetParams)177 if err != nil {178 t.Error(err)179 }180 var headers = []wire.BlockHeader{regtestCheckpoint.Header}181 for i, c := range chain {182 b, err := hex.DecodeString(c)183 if err != nil {184 t.Error(err)185 }186 var hdr wire.BlockHeader187 hdr.Deserialize(bytes.NewReader(b))188 newTip, reorg, height, err := bc.CommitHeader(hdr)189 if err != nil {190 t.Error()191 }192 if !newTip {193 t.Error("Failed to set new tip when inserting header")194 }195 if reorg != nil {196 t.Error("Incorrectly set reorg when inserting header")197 }198 if height != uint32(i+1) {199 t.Error("Returned incorrect height when inserting header")200 }201 if i < 5 {202 headers = append(headers, hdr)203 }204 }205 for i, c := range fork {206 b, err := hex.DecodeString(c)207 if err != nil {208 t.Error(err)209 }210 var hdr wire.BlockHeader211 hdr.Deserialize(bytes.NewReader(b))212 newTip, reorg, height, err := bc.CommitHeader(hdr)213 if err != nil {214 t.Error()215 }216 if newTip && i+6 < 11 {217 t.Error("Incorrectly set new tip when inserting header")218 }219 if !newTip && i+6 >= 11 {220 t.Error("Failed to set new tip when inserting header")221 }222 if reorg != nil && i+6 != 11 {223 t.Error("Incorrectly set reorg when inserting header")224 }225 if reorg == nil && i+6 == 11 {226 t.Error("Failed to return reorg when inserting a header that caused a reorg")227 }228 if height != uint32(i+6) {229 t.Error("Returned incorrect height when inserting header")230 }231 headers = append(headers, hdr)232 }233 best, err := bc.db.GetBestHeader()234 if err != nil {235 t.Error(err)236 }237 for i := len(headers) - 1; i >= 0; i-- {238 putHash := headers[i].BlockHash()239 retHash := best.header.BlockHash()240 if !putHash.IsEqual(&retHash) {241 t.Error("Header put failed")242 }243 best, err = bc.db.GetPreviousHeader(best.header)244 }245 os.RemoveAll("headers.bin")246}247func TestBlockchain_GetLastGoodHeader(t *testing.T) {248 bc, err := NewBlockchain("", MockCreationTime, &chaincfg.RegressionNetParams)249 if err != nil {250 t.Error(err)251 }252 var hdr wire.BlockHeader253 for _, c := range chain {254 b, err := hex.DecodeString(c)255 if err != nil {256 t.Error(err)257 }258 hdr.Deserialize(bytes.NewReader(b))259 bc.CommitHeader(hdr)260 }261 prevBest := StoredHeader{header: hdr, height: 10}262 for i := 0; i < len(fork)-1; i++ {263 b, err := hex.DecodeString(fork[i])264 if err != nil {265 t.Error(err)266 }267 hdr.Deserialize(bytes.NewReader(b))268 bc.CommitHeader(hdr)269 }270 currentBest := StoredHeader{header: hdr, height: 11}271 last, err := bc.GetLastGoodHeader(currentBest, prevBest)272 if err != nil {273 t.Error(err)274 }275 if last.height != 5 {276 t.Error("Incorrect reorg height")277 }278 os.RemoveAll("headers.bin")279}280func TestBlockchain_CheckHeader(t *testing.T) {281 params := &chaincfg.RegressionNetParams282 bc, err := NewBlockchain("", MockCreationTime, params)283 if err != nil {284 t.Error(err)285 }286 // Test valid header287 header0, err := hex.DecodeString(chain[0])288 if err != nil {289 t.Error(err)290 }291 var buf bytes.Buffer292 buf.Write(header0)293 hdr0 := wire.BlockHeader{}294 hdr0.Deserialize(&buf)295 header1, err := hex.DecodeString(chain[1])296 if err != nil {297 t.Error(err)298 }299 buf.Write(header1)300 hdr1 := wire.BlockHeader{}301 hdr1.Deserialize(&buf)302 sh := StoredHeader{303 header: hdr0,304 height: 0,305 totalWork: big.NewInt(0),306 }307 if !bc.CheckHeader(hdr1, sh) {308 t.Error("Check header incorrectly returned false")309 }310 // Test header doesn't link311 header2, err := hex.DecodeString(chain[2])312 if err != nil {313 t.Error(err)314 }315 buf.Write(header2)316 hdr2 := wire.BlockHeader{}317 hdr2.Deserialize(&buf)318 if bc.CheckHeader(hdr2, sh) {319 t.Error("Check header missed headers that don't link")320 }321 // Test invalid difficulty322 params.ReduceMinDifficulty = false323 invalidDiffHdr := hdr1324 invalidDiffHdr.Bits = 0325 if bc.CheckHeader(invalidDiffHdr, sh) {326 t.Error("Check header did not detect invalid PoW")327 }328 // Test invalid proof of work329 params.ReduceMinDifficulty = true330 invalidPoWHdr := hdr1331 invalidPoWHdr.Nonce = 0332 if bc.CheckHeader(invalidPoWHdr, sh) {333 t.Error("Check header did not detect invalid PoW")334 }335 os.RemoveAll("headers.bin")336}337func TestBlockchain_GetNPrevBlockHashes(t *testing.T) {338 bc, err := NewBlockchain("", MockCreationTime, &chaincfg.RegressionNetParams)339 if err != nil {340 t.Error(err)341 }342 var headers = []wire.BlockHeader{regtestCheckpoint.Header}343 for i, c := range chain {344 b, err := hex.DecodeString(c)345 if err != nil {346 t.Error(err)347 }348 var hdr wire.BlockHeader349 hdr.Deserialize(bytes.NewReader(b))350 newTip, reorg, height, err := bc.CommitHeader(hdr)351 if err != nil {352 t.Error()353 }354 if !newTip {355 t.Error("Failed to set new tip when inserting header")356 }357 if reorg != nil {358 t.Error("Incorrectly set reorg when inserting header")359 }360 if height != uint32(i+1) {361 t.Error("Returned incorrect height when inserting header")362 }363 headers = append(headers, hdr)364 }365 nHashes := bc.GetNPrevBlockHashes(5)366 for i := 0; i < 5; i++ {367 h := headers[(len(headers)-1)-i].BlockHash()368 if !nHashes[i].IsEqual(&h) {369 t.Error("GetNPrevBlockHashes returned invalid hashes")370 }371 }372 os.RemoveAll("headers.bin")373}374func TestBlockchain_checkProofOfWork(t *testing.T) {375 // Test valid376 header0, err := hex.DecodeString(chain[0])377 if err != nil {378 t.Error(err)379 }380 var buf bytes.Buffer381 buf.Write(header0)382 hdr0 := wire.BlockHeader{}383 hdr0.Deserialize(&buf)384 if !checkProofOfWork(hdr0, &chaincfg.RegressionNetParams) {385 t.Error("checkProofOfWork failed")386 }387 // Test negative target388 neg := hdr0389 neg.Bits = 1000000000390 if checkProofOfWork(neg, &chaincfg.RegressionNetParams) {391 t.Error("checkProofOfWork failed to negative target")392 }393 // Test too high diff394 params := chaincfg.RegressionNetParams395 params.PowLimit = big.NewInt(0)396 if checkProofOfWork(hdr0, &params) {397 t.Error("checkProofOfWork failed to detect above max PoW")398 }399 // Test to low work400 badHeader := "1" + chain[0][1:]401 header0, err = hex.DecodeString(badHeader)402 if err != nil {403 t.Error(err)404 }405 badHdr := wire.BlockHeader{}406 buf.Write(header0)407 badHdr.Deserialize(&buf)408 if checkProofOfWork(badHdr, &chaincfg.RegressionNetParams) {409 t.Error("checkProofOfWork failed to detect insuffient work")410 }411}412func TestBlockchain_SetChainState(t *testing.T) {413 bc, err := NewBlockchain("", MockCreationTime, &chaincfg.RegressionNetParams)414 if err != nil {415 t.Error(err)416 }417 bc.SetChainState(WAITING)418 if bc.ChainState() != WAITING {419 t.Error("Failed to set chainstate correctly")420 }421 os.RemoveAll("headers.bin")422}423func TestBlockchain_calcDiffAdjust(t *testing.T) {424 // Test calculation of next difficulty target with no constraints applying425 start := wire.BlockHeader{}426 end := wire.BlockHeader{}427 start.Timestamp = time.Unix(1261130161, 0) // Block #30240428 end.Timestamp = time.Unix(1262152739, 0) // Block #32255429 end.Bits = 0x1d00ffff430 if calcDiffAdjust(start, end, &chaincfg.RegressionNetParams) != 0x1d00d86a {431 t.Error("callDiffAdjust returned incorrect difficulty")432 }433 // Test the constraint on the upper bound for next work434 start = wire.BlockHeader{}435 end = wire.BlockHeader{}436 start.Timestamp = time.Unix(1279008237, 0) // Block #0437 end.Timestamp = time.Unix(1279297671, 0) // Block #2015438 end.Bits = 0x1c05a3f4439 if calcDiffAdjust(start, end, &chaincfg.RegressionNetParams) != 0x1c0168fd {440 t.Error("callDiffAdjust returned incorrect difficulty")441 }442 // Test the constraint on the lower bound for actual time taken443 start = wire.BlockHeader{}444 end = wire.BlockHeader{}445 start.Timestamp = time.Unix(1279008237, 0) // Block #66528446 end.Timestamp = time.Unix(1279297671, 0) // Block #68543447 end.Bits = 0x1c05a3f4448 if calcDiffAdjust(start, end, &chaincfg.RegressionNetParams) != 0x1c0168fd {449 t.Error("callDiffAdjust returned incorrect difficulty")450 }451 // Test the constraint on the upper bound for actual time taken452 start = wire.BlockHeader{}453 end = wire.BlockHeader{}454 start.Timestamp = time.Unix(1263163443, 0) // NOTE: Not an actual block time455 end.Timestamp = time.Unix(1269211443, 0) // Block #46367456 end.Bits = 0x1c387f6f457 if calcDiffAdjust(start, end, &chaincfg.RegressionNetParams) != 0x1d00e1fd {458 t.Error("callDiffAdjust returned incorrect difficulty")459 }460}461func TestBlockchain_GetBlockLocatorHashes(t *testing.T) {462 bc, err := NewBlockchain("", MockCreationTime, &chaincfg.RegressionNetParams)463 if err != nil {464 t.Error(err)465 }466 var headers = []wire.BlockHeader{regtestCheckpoint.Header}467 for i, c := range chain {468 b, err := hex.DecodeString(c)469 if err != nil {470 t.Error(err)471 }472 var hdr wire.BlockHeader473 hdr.Deserialize(bytes.NewReader(b))474 bc.CommitHeader(hdr)475 if i < 5 {476 headers = append(headers, hdr)477 }478 }479 for _, c := range fork {480 b, err := hex.DecodeString(c)481 if err != nil {482 t.Error(err)483 }484 var hdr wire.BlockHeader485 hdr.Deserialize(bytes.NewReader(b))486 bc.CommitHeader(hdr)487 headers = append(headers, hdr)488 }489 nHashes := bc.GetBlockLocatorHashes()490 for i := 0; i < 10; i++ {491 h := headers[(len(headers)-1)-i].BlockHash()492 if !nHashes[i].IsEqual(&h) {493 t.Error("GetBlockLocatorHashes returned invalid hashes")494 }495 }496 if nHashes[10].String() != "13ae8e4687ffe9daa1d9d42d1100b398bc328e2d971af1f4464af37f412d3d7c" {497 t.Error("Error calculating locator hashes after step increase")498 }499 os.RemoveAll("headers.bin")500}501func TestBlockchain_GetEpoch(t *testing.T) {502 bc, err := NewBlockchain("", MockCreationTime, &chaincfg.RegressionNetParams)503 if err != nil {504 t.Error(err)505 }506 err = createBlockChain(bc)507 if err != nil {508 t.Error(err)509 }510 epoch, err := bc.GetEpoch()511 if err != nil {512 t.Error(err)513 }514 if epoch.BlockHash().String() != "0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206" {515 t.Error("Returned incorrect epoch")516 }517 os.RemoveAll("headers.bin")518}519func TestBlockchain_calcRequiredWork(t *testing.T) {520 params := &chaincfg.TestNet3Params521 bc, err := NewBlockchain("", MockCreationTime, params)522 if err != nil {523 t.Error(err)524 }525 err = createBlockChain(bc)526 if err != nil {527 t.Error(err)528 }529 best, err := bc.db.GetBestHeader()530 if err != nil {531 t.Error(err)532 }533 // Test during difficulty adjust period534 newHdr := wire.BlockHeader{}535 newHdr.PrevBlock = best.header.BlockHash()536 work, err := bc.calcRequiredWork(newHdr, 2016, best)537 if err != nil {538 t.Error(err)539 }540 if work <= best.header.Bits {541 t.Error("Returned in correct bits")542 }543 newHdr.Bits = work544 sh := StoredHeader{545 header: newHdr,546 height: 2016,547 totalWork: blockchain.CompactToBig(work),548 }549 bc.db.Put(sh, true)550 // Test during normal adjustment551 params.ReduceMinDifficulty = false552 newHdr1 := wire.BlockHeader{}553 newHdr1.PrevBlock = newHdr.BlockHash()554 work1, err := bc.calcRequiredWork(newHdr1, 2017, sh)555 if err != nil {556 t.Error(err)557 }558 if work1 != work {559 t.Error("Returned in correct bits")560 }561 newHdr1.Bits = work1562 sh = StoredHeader{563 header: newHdr1,564 height: 2017,565 totalWork: blockchain.CompactToBig(work1),566 }567 bc.db.Put(sh, true)568 // Test with reduced difficult flag569 params.ReduceMinDifficulty = true570 newHdr2 := wire.BlockHeader{}571 newHdr2.PrevBlock = newHdr1.BlockHash()572 work2, err := bc.calcRequiredWork(newHdr2, 2018, sh)573 if err != nil {574 t.Error(err)575 }576 if work2 != work1 {577 t.Error("Returned in correct bits")578 }579 newHdr2.Bits = work2580 sh = StoredHeader{581 header: newHdr2,582 height: 2018,583 totalWork: blockchain.CompactToBig(work2),584 }585 bc.db.Put(sh, true)586 // Test testnet exemption587 newHdr3 := wire.BlockHeader{}588 newHdr3.PrevBlock = newHdr2.BlockHash()589 newHdr3.Timestamp = newHdr2.Timestamp.Add(time.Minute * 21)590 work3, err := bc.calcRequiredWork(newHdr3, 2019, sh)591 if err != nil {592 t.Error(err)593 }594 if work3 != params.PowLimitBits {595 t.Error("Returned in correct bits")596 }597 newHdr3.Bits = work3598 sh = StoredHeader{599 header: newHdr3,600 height: 2019,601 totalWork: blockchain.CompactToBig(work3),602 }603 bc.db.Put(sh, true)604 // Test multiple special difficulty blocks in a row605 params.ReduceMinDifficulty = true606 newHdr4 := wire.BlockHeader{}607 newHdr4.PrevBlock = newHdr3.BlockHash()608 work4, err := bc.calcRequiredWork(newHdr4, 2020, sh)609 if err != nil {610 t.Error(err)611 }612 if work4 != work2 {613 t.Error("Returned in correct bits")614 }615 os.RemoveAll("headers.bin")616}...

Full Screen

Full Screen

Put

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r.SetObserved("Y")4 r.SetVar(0, "X")5 r.Train(6 regression.DataPoint(2.71, []float64{1.44}),7 regression.DataPoint(3.14, []float64{1.73}),8 regression.DataPoint(1.41, []float64{1.14}),9 regression.DataPoint(1.62, []float64{1.41}),10 regression.DataPoint(2.41, []float64{1.61}),11 r.Run()12 fmt.Printf("\nRegression Formula:\n%v\n", r.Formula)13}

Full Screen

Full Screen

Put

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r := new(regression.Regression)4 r.SetObserved("Y")5 r.SetVar(0, "X")6 r.Train(regression.Data{7 X: []float64{1, 2, 3, 4, 5},8 Y: []float64{2, 4, 6, 8, 10},9 })10 r.Train(regression.Data{11 X: []float64{6, 7, 8, 9, 10},12 Y: []float64{12, 14, 16, 18, 20},13 })14 if err := r.Run(); err != nil {15 log.Fatal(err)16 }17 fmt.Printf("\nRegression Formula:\n%v\n", r.Formula)18 fmt.Printf("\nRegress

Full Screen

Full Screen

Put

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r.SetObserved("y")4 r.SetVar(0, "x")5 rand.Seed(time.Now().UnixNano())6 for x := 0; x < 100; x++ {7 y := 5.0*float64(x) + 10.0 + rand.Float64()*100.08 r.Train(regression.DataPoint(y, []float64{float64(x)}))9 }10 r.Run()11 fmt.Printf("\nRegression Formula:\n")12 fmt.Printf("%v\n\n", r.Formula)13 fmt.Printf("R²: %v\n", r.R2)14 fmt.Printf("\nx predicted residual\n")15 for x := 0.0; x <= 5.0; x += 1.0 {16 y, err := r.Predict([]float64{x})17 if err != nil {18 panic(err)19 }20 fmt.Printf("%0.2f %0.2f %0.2f\n", x, y, y-5.0*x-10.0)21 }22}23import (24func main() {

Full Screen

Full Screen

Put

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 r := NewRegression()4 r.Put(1, 1)5 fmt.Println(r)6}

Full Screen

Full Screen

Put

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 x := [][]float64{{1, 1}, {1, 2}, {2, 2}, {2, 3}}4 y := []float64{6, 8, 9, 11}5 r := regression.NewRegression()6 r.Fit(x, y)7 test := []float64{3, 5}8 output := r.Predict(test)9 fmt.Println("Predicted output: ", math.Round(output))10}

Full Screen

Full Screen

Put

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 reg := regression.NewRegression()4 reg.Put(1, 1)5 reg.Put(2, 2)6 reg.Put(3, 3)7 fmt.Println(reg.Get(1))8}9import (10func main() {11 reg := regression.NewRegression()12 reg.Put(1, 1)13 reg.Put(2, 2)14 reg.Put(3, 3)15 fmt.Println(reg.Get(1))16}17import (18func main() {19 reg := regression.NewRegression()20 reg.Put(1, 1)21 reg.Put(2, 2)22 reg.Put(3, 3)23 fmt.Println(reg.Get(1))24}25import (26func main() {27 reg := regression.NewRegression()

Full Screen

Full Screen

Put

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 f, err := os.Open("./housing.csv")4 if err != nil {5 log.Fatal(err)6 }7 r := csv.NewReader(f)8 reg.SetObserved("Housing Price")9 reg.SetVar(0, "House Age")10 reg.SetVar(1, "Distance to the nearest MRT station")11 reg.SetVar(2, "Number of convenience stores")12 reg.SetVar(3, "Latitude")13 reg.SetVar(4, "Longitude")14 if err := reg.Regress(r); err != nil {15 log.Fatal(err)16 }17 fmt.Printf("\nRegression Formula:\n%v\n\n", reg.Formula)18 fmt.Printf("R2: %v\n\n", reg.R2)19 for i, x := range [][]float64{20 {32, 84.87882, 10, 24.98034, 121.53951},21 {19.5, 306.59470, 9, 24.98746, 121.54391},22 {13.3, 561.98450, 5, 24.97937, 121.54245},23 } {24 y, err := reg.Predict(x)25 if err != nil {26 log.Fatal(err)27 }28 fmt.Printf("Predicted value #%d: %0.2f\n", i, y)29 }30}

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run Keploy automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful