How to use Write method of config Package

Best Gauge code snippet using config.Write

dataretriever_test.go

Source:dataretriever_test.go Github

copy

Full Screen

...29 collectionName := "testCollectionName"30 txID := "testTxID"31 committer.On("LedgerHeight").Return(uint64(1), nil)32 retriever := NewDataRetriever("testchannel", store.store, committer)33 store.Persist(txID, 2, &transientstore.TxPvtReadWriteSetWithConfigInfo{34 PvtRwset: &rwset.TxPvtReadWriteSet{35 DataModel: rwset.TxReadWriteSet_KV,36 NsPvtRwset: []*rwset.NsPvtReadWriteSet{37 pvtReadWriteSet(namespace, collectionName, []byte{1, 2}),38 pvtReadWriteSet(namespace, collectionName, []byte{3, 4}),39 },40 },41 CollectionConfigs: map[string]*peer.CollectionConfigPackage{42 namespace: {43 Config: []*peer.CollectionConfig{44 {45 Payload: &peer.CollectionConfig_StaticCollectionConfig{46 StaticCollectionConfig: &peer.StaticCollectionConfig{47 Name: collectionName,48 },49 },50 },51 },52 },53 },54 })55 // Request digest for private data which is greater than current ledger height56 // to make it query transient store for missed private data57 rwSets, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{58 Namespace: namespace,59 Collection: collectionName,60 BlockSeq: 2,61 TxId: txID,62 SeqInBlock: 1,63 }}, 2)64 assertion := require.New(t)65 assertion.NoError(err)66 assertion.NotEmpty(rwSets)67 dig2pvtRWSet := rwSets[privdatacommon.DigKey{68 Namespace: namespace,69 Collection: collectionName,70 BlockSeq: 2,71 TxId: txID,72 SeqInBlock: 1,73 }]74 assertion.NotNil(dig2pvtRWSet)75 pvtRWSets := dig2pvtRWSet.RWSet76 assertion.Equal(2, len(pvtRWSets))77 var mergedRWSet []byte78 for _, rws := range pvtRWSets {79 mergedRWSet = append(mergedRWSet, rws...)80 }81 assertion.Equal([]byte{1, 2, 3, 4}, mergedRWSet)82}83/*84 Simple test case where available ledger height is greater than85 requested block sequence and therefore private data will be retrieved86 from the ledger rather than transient store as data being committed87*/88func TestNewDataRetriever_GetDataFromLedger(t *testing.T) {89 committer := &mocks.Committer{}90 store := newTransientStore(t)91 defer store.tearDown()92 namespace := "testChaincodeName1"93 collectionName := "testCollectionName"94 result := []*ledger.TxPvtData{{95 WriteSet: &rwset.TxPvtReadWriteSet{96 DataModel: rwset.TxReadWriteSet_KV,97 NsPvtRwset: []*rwset.NsPvtReadWriteSet{98 pvtReadWriteSet(namespace, collectionName, []byte{1, 2}),99 pvtReadWriteSet(namespace, collectionName, []byte{3, 4}),100 },101 },102 SeqInBlock: 1,103 }}104 committer.On("LedgerHeight").Return(uint64(10), nil)105 committer.On("GetPvtDataByNum", uint64(5), mock.Anything).Return(result, nil)106 historyRetreiver := &mocks.ConfigHistoryRetriever{}107 historyRetreiver.On("MostRecentCollectionConfigBelow", mock.Anything, namespace).Return(newCollectionConfig(collectionName), nil)108 committer.On("GetConfigHistoryRetriever").Return(historyRetreiver, nil)109 retriever := NewDataRetriever("testchannel", store.store, committer)110 // Request digest for private data which is greater than current ledger height111 // to make it query ledger for missed private data112 rwSets, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{113 Namespace: namespace,114 Collection: collectionName,115 BlockSeq: uint64(5),116 TxId: "testTxID",117 SeqInBlock: 1,118 }}, uint64(5))119 assertion := require.New(t)120 assertion.NoError(err)121 assertion.NotEmpty(rwSets)122 pvtRWSet := rwSets[privdatacommon.DigKey{123 Namespace: namespace,124 Collection: collectionName,125 BlockSeq: 5,126 TxId: "testTxID",127 SeqInBlock: 1,128 }]129 assertion.NotEmpty(pvtRWSet)130 assertion.Equal(2, len(pvtRWSet.RWSet))131 var mergedRWSet []byte132 for _, rws := range pvtRWSet.RWSet {133 mergedRWSet = append(mergedRWSet, rws...)134 }135 assertion.Equal([]byte{1, 2, 3, 4}, mergedRWSet)136}137func TestNewDataRetriever_FailGetPvtDataFromLedger(t *testing.T) {138 committer := &mocks.Committer{}139 store := newTransientStore(t)140 defer store.tearDown()141 namespace := "testChaincodeName1"142 collectionName := "testCollectionName"143 committer.On("LedgerHeight").Return(uint64(10), nil)144 committer.On("GetPvtDataByNum", uint64(5), mock.Anything).145 Return(nil, errors.New("failing retrieving private data"))146 retriever := NewDataRetriever("testchannel", store.store, committer)147 // Request digest for private data which is greater than current ledger height148 // to make it query transient store for missed private data149 rwSets, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{150 Namespace: namespace,151 Collection: collectionName,152 BlockSeq: uint64(5),153 TxId: "testTxID",154 SeqInBlock: 1,155 }}, uint64(5))156 assertion := require.New(t)157 assertion.Error(err)158 assertion.Empty(rwSets)159}160func TestNewDataRetriever_GetOnlyRelevantPvtData(t *testing.T) {161 committer := &mocks.Committer{}162 store := newTransientStore(t)163 defer store.tearDown()164 namespace := "testChaincodeName1"165 collectionName := "testCollectionName"166 result := []*ledger.TxPvtData{{167 WriteSet: &rwset.TxPvtReadWriteSet{168 DataModel: rwset.TxReadWriteSet_KV,169 NsPvtRwset: []*rwset.NsPvtReadWriteSet{170 pvtReadWriteSet(namespace, collectionName, []byte{1}),171 pvtReadWriteSet(namespace, collectionName, []byte{2}),172 pvtReadWriteSet("invalidNamespace", collectionName, []byte{0, 0}),173 pvtReadWriteSet(namespace, "invalidCollectionName", []byte{0, 0}),174 },175 },176 SeqInBlock: 1,177 }}178 committer.On("LedgerHeight").Return(uint64(10), nil)179 committer.On("GetPvtDataByNum", uint64(5), mock.Anything).Return(result, nil)180 historyRetreiver := &mocks.ConfigHistoryRetriever{}181 historyRetreiver.On("MostRecentCollectionConfigBelow", mock.Anything, namespace).Return(newCollectionConfig(collectionName), nil)182 committer.On("GetConfigHistoryRetriever").Return(historyRetreiver, nil)183 retriever := NewDataRetriever("testchannel", store.store, committer)184 // Request digest for private data which is greater than current ledger height185 // to make it query transient store for missed private data186 rwSets, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{187 Namespace: namespace,188 Collection: collectionName,189 BlockSeq: uint64(5),190 TxId: "testTxID",191 SeqInBlock: 1,192 }}, 5)193 assertion := require.New(t)194 assertion.NoError(err)195 assertion.NotEmpty(rwSets)196 pvtRWSet := rwSets[privdatacommon.DigKey{197 Namespace: namespace,198 Collection: collectionName,199 BlockSeq: 5,200 TxId: "testTxID",201 SeqInBlock: 1,202 }]203 assertion.NotEmpty(pvtRWSet)204 assertion.Equal(2, len(pvtRWSet.RWSet))205 var mergedRWSet []byte206 for _, rws := range pvtRWSet.RWSet {207 mergedRWSet = append(mergedRWSet, rws...)208 }209 assertion.Equal([]byte{1, 2}, mergedRWSet)210}211func TestNewDataRetriever_GetMultipleDigests(t *testing.T) {212 committer := &mocks.Committer{}213 store := newTransientStore(t)214 defer store.tearDown()215 ns1, ns2 := "testChaincodeName1", "testChaincodeName2"216 col1, col2 := "testCollectionName1", "testCollectionName2"217 result := []*ledger.TxPvtData{218 {219 WriteSet: &rwset.TxPvtReadWriteSet{220 DataModel: rwset.TxReadWriteSet_KV,221 NsPvtRwset: []*rwset.NsPvtReadWriteSet{222 pvtReadWriteSet(ns1, col1, []byte{1}),223 pvtReadWriteSet(ns1, col1, []byte{2}),224 pvtReadWriteSet("invalidNamespace", col1, []byte{0, 0}),225 pvtReadWriteSet(ns1, "invalidCollectionName", []byte{0, 0}),226 },227 },228 SeqInBlock: 1,229 },230 {231 WriteSet: &rwset.TxPvtReadWriteSet{232 DataModel: rwset.TxReadWriteSet_KV,233 NsPvtRwset: []*rwset.NsPvtReadWriteSet{234 pvtReadWriteSet(ns2, col2, []byte{3}),235 pvtReadWriteSet(ns2, col2, []byte{4}),236 pvtReadWriteSet("invalidNamespace", col2, []byte{0, 0}),237 pvtReadWriteSet(ns2, "invalidCollectionName", []byte{0, 0}),238 },239 },240 SeqInBlock: 2,241 },242 {243 WriteSet: &rwset.TxPvtReadWriteSet{244 DataModel: rwset.TxReadWriteSet_KV,245 NsPvtRwset: []*rwset.NsPvtReadWriteSet{246 pvtReadWriteSet(ns1, col1, []byte{5}),247 pvtReadWriteSet(ns2, col2, []byte{6}),248 pvtReadWriteSet("invalidNamespace", col2, []byte{0, 0}),249 pvtReadWriteSet(ns2, "invalidCollectionName", []byte{0, 0}),250 },251 },252 SeqInBlock: 3,253 },254 }255 committer.On("LedgerHeight").Return(uint64(10), nil)256 committer.On("GetPvtDataByNum", uint64(5), mock.Anything).Return(result, nil)257 historyRetreiver := &mocks.ConfigHistoryRetriever{}258 historyRetreiver.On("MostRecentCollectionConfigBelow", mock.Anything, ns1).Return(newCollectionConfig(col1), nil)259 historyRetreiver.On("MostRecentCollectionConfigBelow", mock.Anything, ns2).Return(newCollectionConfig(col2), nil)260 committer.On("GetConfigHistoryRetriever").Return(historyRetreiver, nil)261 retriever := NewDataRetriever("testchannel", store.store, committer)262 // Request digest for private data which is greater than current ledger height263 // to make it query transient store for missed private data264 rwSets, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{265 Namespace: ns1,266 Collection: col1,267 BlockSeq: uint64(5),268 TxId: "testTxID",269 SeqInBlock: 1,270 }, {271 Namespace: ns2,272 Collection: col2,273 BlockSeq: uint64(5),274 TxId: "testTxID",275 SeqInBlock: 2,276 }}, 5)277 assertion := require.New(t)278 assertion.NoError(err)279 assertion.NotEmpty(rwSets)280 assertion.Equal(2, len(rwSets))281 pvtRWSet := rwSets[privdatacommon.DigKey{282 Namespace: ns1,283 Collection: col1,284 BlockSeq: 5,285 TxId: "testTxID",286 SeqInBlock: 1,287 }]288 assertion.NotEmpty(pvtRWSet)289 assertion.Equal(2, len(pvtRWSet.RWSet))290 var mergedRWSet []byte291 for _, rws := range pvtRWSet.RWSet {292 mergedRWSet = append(mergedRWSet, rws...)293 }294 pvtRWSet = rwSets[privdatacommon.DigKey{295 Namespace: ns2,296 Collection: col2,297 BlockSeq: 5,298 TxId: "testTxID",299 SeqInBlock: 2,300 }]301 assertion.NotEmpty(pvtRWSet)302 assertion.Equal(2, len(pvtRWSet.RWSet))303 for _, rws := range pvtRWSet.RWSet {304 mergedRWSet = append(mergedRWSet, rws...)305 }306 assertion.Equal([]byte{1, 2, 3, 4}, mergedRWSet)307}308func TestNewDataRetriever_EmptyWriteSet(t *testing.T) {309 committer := &mocks.Committer{}310 store := newTransientStore(t)311 defer store.tearDown()312 ns1 := "testChaincodeName1"313 col1 := "testCollectionName1"314 result := []*ledger.TxPvtData{315 {316 SeqInBlock: 1,317 },318 }319 committer.On("LedgerHeight").Return(uint64(10), nil)320 committer.On("GetPvtDataByNum", uint64(5), mock.Anything).Return(result, nil)321 historyRetreiver := &mocks.ConfigHistoryRetriever{}322 historyRetreiver.On("MostRecentCollectionConfigBelow", mock.Anything, ns1).Return(newCollectionConfig(col1), nil)323 committer.On("GetConfigHistoryRetriever").Return(historyRetreiver, nil)324 retriever := NewDataRetriever("testchannel", store.store, committer)325 rwSets, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{326 Namespace: ns1,327 Collection: col1,328 BlockSeq: uint64(5),329 TxId: "testTxID",330 SeqInBlock: 1,331 }}, 5)332 assertion := require.New(t)333 assertion.NoError(err)334 assertion.NotEmpty(rwSets)335 pvtRWSet := rwSets[privdatacommon.DigKey{336 Namespace: ns1,337 Collection: col1,338 BlockSeq: 5,339 TxId: "testTxID",340 SeqInBlock: 1,341 }]342 assertion.NotEmpty(pvtRWSet)343 assertion.Empty(pvtRWSet.RWSet)344}345func TestNewDataRetriever_FailedObtainConfigHistoryRetriever(t *testing.T) {346 committer := &mocks.Committer{}347 store := newTransientStore(t)348 defer store.tearDown()349 ns1 := "testChaincodeName1"350 col1 := "testCollectionName1"351 result := []*ledger.TxPvtData{352 {353 WriteSet: &rwset.TxPvtReadWriteSet{354 DataModel: rwset.TxReadWriteSet_KV,355 NsPvtRwset: []*rwset.NsPvtReadWriteSet{356 pvtReadWriteSet(ns1, col1, []byte{1}),357 pvtReadWriteSet(ns1, col1, []byte{2}),358 },359 },360 SeqInBlock: 1,361 },362 }363 committer.On("LedgerHeight").Return(uint64(10), nil)364 committer.On("GetPvtDataByNum", uint64(5), mock.Anything).Return(result, nil)365 committer.On("GetConfigHistoryRetriever").Return(nil, errors.New("failed to obtain ConfigHistoryRetriever"))366 retriever := NewDataRetriever("testchannel", store.store, committer)367 _, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{368 Namespace: ns1,369 Collection: col1,370 BlockSeq: uint64(5),371 TxId: "testTxID",372 SeqInBlock: 1,373 }}, 5)374 assertion := require.New(t)375 assertion.Contains(err.Error(), "failed to obtain ConfigHistoryRetriever")376}377func TestNewDataRetriever_NoCollectionConfig(t *testing.T) {378 committer := &mocks.Committer{}379 store := newTransientStore(t)380 defer store.tearDown()381 ns1, ns2 := "testChaincodeName1", "testChaincodeName2"382 col1, col2 := "testCollectionName1", "testCollectionName2"383 result := []*ledger.TxPvtData{384 {385 WriteSet: &rwset.TxPvtReadWriteSet{386 DataModel: rwset.TxReadWriteSet_KV,387 NsPvtRwset: []*rwset.NsPvtReadWriteSet{388 pvtReadWriteSet(ns1, col1, []byte{1}),389 pvtReadWriteSet(ns1, col1, []byte{2}),390 },391 },392 SeqInBlock: 1,393 },394 {395 WriteSet: &rwset.TxPvtReadWriteSet{396 DataModel: rwset.TxReadWriteSet_KV,397 NsPvtRwset: []*rwset.NsPvtReadWriteSet{398 pvtReadWriteSet(ns2, col2, []byte{3}),399 pvtReadWriteSet(ns2, col2, []byte{4}),400 },401 },402 SeqInBlock: 2,403 },404 }405 committer.On("LedgerHeight").Return(uint64(10), nil)406 committer.On("GetPvtDataByNum", uint64(5), mock.Anything).Return(result, nil)407 historyRetreiver := &mocks.ConfigHistoryRetriever{}408 historyRetreiver.On("MostRecentCollectionConfigBelow", mock.Anything, ns1).409 Return(newCollectionConfig(col1), errors.New("failed to obtain collection config"))410 historyRetreiver.On("MostRecentCollectionConfigBelow", mock.Anything, ns2).411 Return(nil, nil)412 committer.On("GetConfigHistoryRetriever").Return(historyRetreiver, nil)413 retriever := NewDataRetriever("testchannel", store.store, committer)414 assertion := require.New(t)415 _, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{416 Namespace: ns1,417 Collection: col1,418 BlockSeq: uint64(5),419 TxId: "testTxID",420 SeqInBlock: 1,421 }}, 5)422 assertion.Error(err)423 assertion.Contains(err.Error(), "cannot find recent collection config update below block sequence")424 _, _, err = retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{425 Namespace: ns2,426 Collection: col2,427 BlockSeq: uint64(5),428 TxId: "testTxID",429 SeqInBlock: 2,430 }}, 5)431 assertion.Error(err)432 assertion.Contains(err.Error(), "no collection config update below block sequence")433}434func TestNewDataRetriever_FailedGetLedgerHeight(t *testing.T) {435 committer := &mocks.Committer{}436 store := newTransientStore(t)437 defer store.tearDown()438 ns1 := "testChaincodeName1"439 col1 := "testCollectionName1"440 committer.On("LedgerHeight").Return(uint64(0), errors.New("failed to read ledger height"))441 retriever := NewDataRetriever("testchannel", store.store, committer)442 _, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{443 Namespace: ns1,444 Collection: col1,445 BlockSeq: uint64(5),446 TxId: "testTxID",447 SeqInBlock: 1,448 }}, 5)449 assertion := require.New(t)450 assertion.Error(err)451 assertion.Contains(err.Error(), "failed to read ledger height")452}453func TestNewDataRetriever_EmptyPvtRWSetInTransientStore(t *testing.T) {454 committer := &mocks.Committer{}455 store := newTransientStore(t)456 defer store.tearDown()457 namespace := "testChaincodeName1"458 collectionName := "testCollectionName"459 committer.On("LedgerHeight").Return(uint64(1), nil)460 retriever := NewDataRetriever("testchannel", store.store, committer)461 rwSets, _, err := retriever.CollectionRWSet([]*gossip2.PvtDataDigest{{462 Namespace: namespace,463 Collection: collectionName,464 BlockSeq: 2,465 TxId: "testTxID",466 SeqInBlock: 1,467 }}, 2)468 assertion := require.New(t)469 assertion.NoError(err)470 assertion.NotEmpty(rwSets)471 assertion.Empty(rwSets[privdatacommon.DigKey{472 Namespace: namespace,473 Collection: collectionName,474 BlockSeq: 2,475 TxId: "testTxID",476 SeqInBlock: 1,477 }])478}479func newCollectionConfig(collectionName string) *ledger.CollectionConfigInfo {480 return &ledger.CollectionConfigInfo{481 CollectionConfig: &peer.CollectionConfigPackage{482 Config: []*peer.CollectionConfig{483 {484 Payload: &peer.CollectionConfig_StaticCollectionConfig{485 StaticCollectionConfig: &peer.StaticCollectionConfig{486 Name: collectionName,487 },488 },489 },490 },491 },492 }493}494func pvtReadWriteSet(ns string, collectionName string, data []byte) *rwset.NsPvtReadWriteSet {495 return &rwset.NsPvtReadWriteSet{496 Namespace: ns,497 CollectionPvtRwset: []*rwset.CollectionPvtReadWriteSet{{498 CollectionName: collectionName,499 Rwset: data,500 }},501 }502}...

Full Screen

Full Screen

update_test.go

Source:update_test.go Github

copy

Full Screen

...53 Policies: map[string]*cb.ConfigPolicy{},54 Values: map[string]*cb.ConfigValue{},55 }56 require.Equal(t, expectedReadSet, cu.ReadSet, "Mismatched read set")57 expectedWriteSet := &cb.ConfigGroup{58 Version: original.Version + 1,59 Groups: map[string]*cb.ConfigGroup{},60 Policies: map[string]*cb.ConfigPolicy{},61 Values: map[string]*cb.ConfigValue{},62 ModPolicy: updated.ModPolicy,63 }64 require.Equal(t, expectedWriteSet, cu.WriteSet, "Mismatched write set")65}66func TestGroupPolicyModification(t *testing.T) {67 policy1Name := "foo"68 policy2Name := "bar"69 original := &cb.ConfigGroup{70 Version: 4,71 Policies: map[string]*cb.ConfigPolicy{72 policy1Name: {73 Version: 2,74 Policy: &cb.Policy{75 Type: 3,76 },77 },78 policy2Name: {79 Version: 1,80 Policy: &cb.Policy{81 Type: 5,82 },83 },84 },85 }86 updated := &cb.ConfigGroup{87 Policies: map[string]*cb.ConfigPolicy{88 policy1Name: original.Policies[policy1Name],89 policy2Name: {90 Policy: &cb.Policy{91 Type: 9,92 },93 },94 },95 }96 cu, err := Compute(&cb.Config{97 ChannelGroup: original,98 }, &cb.Config{99 ChannelGroup: updated,100 })101 require.NoError(t, err)102 expectedReadSet := &cb.ConfigGroup{103 Version: original.Version,104 Policies: map[string]*cb.ConfigPolicy{},105 Values: map[string]*cb.ConfigValue{},106 Groups: map[string]*cb.ConfigGroup{},107 }108 require.Equal(t, expectedReadSet, cu.ReadSet, "Mismatched read set")109 expectedWriteSet := &cb.ConfigGroup{110 Version: original.Version,111 Policies: map[string]*cb.ConfigPolicy{112 policy2Name: {113 Policy: &cb.Policy{114 Type: updated.Policies[policy2Name].Policy.Type,115 },116 Version: original.Policies[policy2Name].Version + 1,117 },118 },119 Values: map[string]*cb.ConfigValue{},120 Groups: map[string]*cb.ConfigGroup{},121 }122 require.Equal(t, expectedWriteSet, cu.WriteSet, "Mismatched write set")123}124func TestGroupValueModification(t *testing.T) {125 value1Name := "foo"126 value2Name := "bar"127 original := &cb.ConfigGroup{128 Version: 7,129 Values: map[string]*cb.ConfigValue{130 value1Name: {131 Version: 3,132 Value: []byte("value1value"),133 },134 value2Name: {135 Version: 6,136 Value: []byte("value2value"),137 },138 },139 }140 updated := &cb.ConfigGroup{141 Values: map[string]*cb.ConfigValue{142 value1Name: original.Values[value1Name],143 value2Name: {144 Value: []byte("updatedValued2Value"),145 },146 },147 }148 cu, err := Compute(&cb.Config{149 ChannelGroup: original,150 }, &cb.Config{151 ChannelGroup: updated,152 })153 require.NoError(t, err)154 expectedReadSet := &cb.ConfigGroup{155 Version: original.Version,156 Values: map[string]*cb.ConfigValue{},157 Policies: map[string]*cb.ConfigPolicy{},158 Groups: map[string]*cb.ConfigGroup{},159 }160 require.Equal(t, expectedReadSet, cu.ReadSet, "Mismatched read set")161 expectedWriteSet := &cb.ConfigGroup{162 Version: original.Version,163 Values: map[string]*cb.ConfigValue{164 value2Name: {165 Value: updated.Values[value2Name].Value,166 Version: original.Values[value2Name].Version + 1,167 },168 },169 Policies: map[string]*cb.ConfigPolicy{},170 Groups: map[string]*cb.ConfigGroup{},171 }172 require.Equal(t, expectedWriteSet, cu.WriteSet, "Mismatched write set")173}174func TestGroupGroupsModification(t *testing.T) {175 subGroupName := "foo"176 original := &cb.ConfigGroup{177 Version: 7,178 Groups: map[string]*cb.ConfigGroup{179 subGroupName: {180 Version: 3,181 Values: map[string]*cb.ConfigValue{182 "testValue": {183 Version: 3,184 },185 },186 },187 },188 }189 updated := &cb.ConfigGroup{190 Groups: map[string]*cb.ConfigGroup{191 subGroupName: {},192 },193 }194 cu, err := Compute(&cb.Config{195 ChannelGroup: original,196 }, &cb.Config{197 ChannelGroup: updated,198 })199 require.NoError(t, err)200 expectedReadSet := &cb.ConfigGroup{201 Version: original.Version,202 Groups: map[string]*cb.ConfigGroup{203 subGroupName: {204 Version: original.Groups[subGroupName].Version,205 Policies: map[string]*cb.ConfigPolicy{},206 Values: map[string]*cb.ConfigValue{},207 Groups: map[string]*cb.ConfigGroup{},208 },209 },210 Policies: map[string]*cb.ConfigPolicy{},211 Values: map[string]*cb.ConfigValue{},212 }213 require.Equal(t, expectedReadSet, cu.ReadSet, "Mismatched read set")214 expectedWriteSet := &cb.ConfigGroup{215 Version: original.Version,216 Groups: map[string]*cb.ConfigGroup{217 subGroupName: {218 Version: original.Groups[subGroupName].Version + 1,219 Groups: map[string]*cb.ConfigGroup{},220 Policies: map[string]*cb.ConfigPolicy{},221 Values: map[string]*cb.ConfigValue{},222 },223 },224 Policies: map[string]*cb.ConfigPolicy{},225 Values: map[string]*cb.ConfigValue{},226 }227 require.Equal(t, expectedWriteSet, cu.WriteSet, "Mismatched write set")228}229func TestGroupValueAddition(t *testing.T) {230 value1Name := "foo"231 value2Name := "bar"232 original := &cb.ConfigGroup{233 Version: 7,234 Values: map[string]*cb.ConfigValue{235 value1Name: {236 Version: 3,237 Value: []byte("value1value"),238 },239 },240 }241 updated := &cb.ConfigGroup{242 Values: map[string]*cb.ConfigValue{243 value1Name: original.Values[value1Name],244 value2Name: {245 Version: 9,246 Value: []byte("newValue2"),247 },248 },249 }250 cu, err := Compute(&cb.Config{251 ChannelGroup: original,252 }, &cb.Config{253 ChannelGroup: updated,254 })255 require.NoError(t, err)256 expectedReadSet := &cb.ConfigGroup{257 Version: original.Version,258 Values: map[string]*cb.ConfigValue{259 value1Name: {260 Version: original.Values[value1Name].Version,261 },262 },263 Policies: map[string]*cb.ConfigPolicy{},264 Groups: map[string]*cb.ConfigGroup{},265 }266 require.Equal(t, expectedReadSet, cu.ReadSet, "Mismatched read set")267 expectedWriteSet := &cb.ConfigGroup{268 Version: original.Version + 1,269 Values: map[string]*cb.ConfigValue{270 value1Name: {271 Version: original.Values[value1Name].Version,272 },273 value2Name: {274 Value: updated.Values[value2Name].Value,275 Version: 0,276 },277 },278 Policies: map[string]*cb.ConfigPolicy{},279 Groups: map[string]*cb.ConfigGroup{},280 }281 require.Equal(t, expectedWriteSet, cu.WriteSet, "Mismatched write set")282}283func TestGroupPolicySwap(t *testing.T) {284 policy1Name := "foo"285 policy2Name := "bar"286 original := &cb.ConfigGroup{287 Version: 4,288 Policies: map[string]*cb.ConfigPolicy{289 policy1Name: {290 Version: 2,291 Policy: &cb.Policy{292 Type: 3,293 },294 },295 },296 }297 updated := &cb.ConfigGroup{298 Policies: map[string]*cb.ConfigPolicy{299 policy2Name: {300 Version: 1,301 Policy: &cb.Policy{302 Type: 5,303 },304 },305 },306 }307 cu, err := Compute(&cb.Config{308 ChannelGroup: original,309 }, &cb.Config{310 ChannelGroup: updated,311 })312 require.NoError(t, err)313 expectedReadSet := &cb.ConfigGroup{314 Version: original.Version,315 Policies: map[string]*cb.ConfigPolicy{},316 Values: map[string]*cb.ConfigValue{},317 Groups: map[string]*cb.ConfigGroup{},318 }319 require.Equal(t, expectedReadSet, cu.ReadSet, "Mismatched read set")320 expectedWriteSet := &cb.ConfigGroup{321 Version: original.Version + 1,322 Policies: map[string]*cb.ConfigPolicy{323 policy2Name: {324 Policy: &cb.Policy{325 Type: updated.Policies[policy2Name].Policy.Type,326 },327 Version: 0,328 },329 },330 Values: map[string]*cb.ConfigValue{},331 Groups: map[string]*cb.ConfigGroup{},332 }333 require.Equal(t, expectedWriteSet, cu.WriteSet, "Mismatched write set")334}335func TestComplex(t *testing.T) {336 existingGroup1Name := "existingGroup1"337 existingGroup2Name := "existingGroup2"338 existingPolicyName := "existingPolicy"339 original := &cb.ConfigGroup{340 Version: 4,341 Groups: map[string]*cb.ConfigGroup{342 existingGroup1Name: {343 Version: 2,344 },345 existingGroup2Name: {346 Version: 2,347 },348 },349 Policies: map[string]*cb.ConfigPolicy{350 existingPolicyName: {351 Version: 8,352 Policy: &cb.Policy{353 Type: 5,354 },355 },356 },357 }358 newGroupName := "newGroup"359 newPolicyName := "newPolicy"360 newValueName := "newValue"361 updated := &cb.ConfigGroup{362 Groups: map[string]*cb.ConfigGroup{363 existingGroup1Name: {},364 newGroupName: {365 Values: map[string]*cb.ConfigValue{366 newValueName: {},367 },368 },369 },370 Policies: map[string]*cb.ConfigPolicy{371 existingPolicyName: {372 Policy: &cb.Policy{373 Type: 5,374 },375 },376 newPolicyName: {377 Version: 6,378 Policy: &cb.Policy{379 Type: 5,380 },381 },382 },383 }384 cu, err := Compute(&cb.Config{385 ChannelGroup: original,386 }, &cb.Config{387 ChannelGroup: updated,388 })389 require.NoError(t, err)390 expectedReadSet := &cb.ConfigGroup{391 Version: original.Version,392 Policies: map[string]*cb.ConfigPolicy{393 existingPolicyName: {394 Version: original.Policies[existingPolicyName].Version,395 },396 },397 Values: map[string]*cb.ConfigValue{},398 Groups: map[string]*cb.ConfigGroup{399 existingGroup1Name: {400 Version: original.Groups[existingGroup1Name].Version,401 },402 },403 }404 require.Equal(t, expectedReadSet, cu.ReadSet, "Mismatched read set")405 expectedWriteSet := &cb.ConfigGroup{406 Version: original.Version + 1,407 Policies: map[string]*cb.ConfigPolicy{408 existingPolicyName: {409 Version: original.Policies[existingPolicyName].Version,410 },411 newPolicyName: {412 Version: 0,413 Policy: &cb.Policy{414 Type: 5,415 },416 },417 },418 Groups: map[string]*cb.ConfigGroup{419 existingGroup1Name: {420 Version: original.Groups[existingGroup1Name].Version,421 },422 newGroupName: {423 Version: 0,424 Values: map[string]*cb.ConfigValue{425 newValueName: {},426 },427 Policies: map[string]*cb.ConfigPolicy{},428 Groups: map[string]*cb.ConfigGroup{},429 },430 },431 Values: map[string]*cb.ConfigValue{},432 }433 require.Equal(t, expectedWriteSet, cu.WriteSet, "Mismatched write set")434}435func TestTwiceNestedModification(t *testing.T) {436 subGroupName := "foo"437 subSubGroupName := "bar"438 valueName := "testValue"439 original := &cb.ConfigGroup{440 Groups: map[string]*cb.ConfigGroup{441 subGroupName: {442 Groups: map[string]*cb.ConfigGroup{443 subSubGroupName: {444 Values: map[string]*cb.ConfigValue{445 valueName: {},446 },447 },448 },449 },450 },451 }452 updated := &cb.ConfigGroup{453 Groups: map[string]*cb.ConfigGroup{454 subGroupName: {455 Groups: map[string]*cb.ConfigGroup{456 subSubGroupName: {457 Values: map[string]*cb.ConfigValue{458 valueName: {459 ModPolicy: "new",460 },461 },462 },463 },464 },465 },466 }467 cu, err := Compute(&cb.Config{468 ChannelGroup: original,469 }, &cb.Config{470 ChannelGroup: updated,471 })472 require.NoError(t, err)473 expectedReadSet := &cb.ConfigGroup{474 Version: original.Version,475 Groups: map[string]*cb.ConfigGroup{476 subGroupName: {477 Groups: map[string]*cb.ConfigGroup{478 subSubGroupName: {479 Policies: map[string]*cb.ConfigPolicy{},480 Values: map[string]*cb.ConfigValue{},481 Groups: map[string]*cb.ConfigGroup{},482 },483 },484 Policies: map[string]*cb.ConfigPolicy{},485 Values: map[string]*cb.ConfigValue{},486 },487 },488 Policies: map[string]*cb.ConfigPolicy{},489 Values: map[string]*cb.ConfigValue{},490 }491 require.Equal(t, expectedReadSet, cu.ReadSet, "Mismatched read set")492 expectedWriteSet := &cb.ConfigGroup{493 Version: original.Version,494 Groups: map[string]*cb.ConfigGroup{495 subGroupName: {496 Groups: map[string]*cb.ConfigGroup{497 subSubGroupName: {498 Values: map[string]*cb.ConfigValue{499 valueName: {500 Version: original.Groups[subGroupName].Groups[subSubGroupName].Values[valueName].Version + 1,501 ModPolicy: updated.Groups[subGroupName].Groups[subSubGroupName].Values[valueName].ModPolicy,502 },503 },504 Policies: map[string]*cb.ConfigPolicy{},505 Groups: map[string]*cb.ConfigGroup{},506 },507 },508 Policies: map[string]*cb.ConfigPolicy{},509 Values: map[string]*cb.ConfigValue{},510 },511 },512 Policies: map[string]*cb.ConfigPolicy{},513 Values: map[string]*cb.ConfigValue{},514 }515 require.Equal(t, expectedWriteSet, cu.WriteSet, "Mismatched write set")516}...

Full Screen

Full Screen

update.go

Source:update.go Github

copy

Full Screen

...97 if !ok {98 updatedMembers = true99 continue100 }101 groupReadSet, groupWriteSet, groupUpdated := computeGroupUpdate(originalGroup, updatedGroup)102 if !groupUpdated {103 sameSet[groupName] = groupReadSet104 continue105 }106 readSet[groupName] = groupReadSet107 writeSet[groupName] = groupWriteSet108 }109 for groupName, updatedGroup := range updated {110 if _, ok := original[groupName]; ok {111 // If the updatedGroup is in the original set of groups, it was already handled112 continue113 }114 updatedMembers = true115 _, groupWriteSet, _ := computeGroupUpdate(protoutil.NewConfigGroup(), updatedGroup)116 writeSet[groupName] = &cb.ConfigGroup{117 Version: 0,118 ModPolicy: updatedGroup.ModPolicy,119 Policies: groupWriteSet.Policies,120 Values: groupWriteSet.Values,121 Groups: groupWriteSet.Groups,122 }123 }124 return125}126func computeGroupUpdate(original, updated *cb.ConfigGroup) (readSet, writeSet *cb.ConfigGroup, updatedGroup bool) {127 readSetPolicies, writeSetPolicies, sameSetPolicies, policiesMembersUpdated := computePoliciesMapUpdate(original.Policies, updated.Policies)128 readSetValues, writeSetValues, sameSetValues, valuesMembersUpdated := computeValuesMapUpdate(original.Values, updated.Values)129 readSetGroups, writeSetGroups, sameSetGroups, groupsMembersUpdated := computeGroupsMapUpdate(original.Groups, updated.Groups)130 // If the updated group is 'Equal' to the updated group (none of the members nor the mod policy changed)131 if !(policiesMembersUpdated || valuesMembersUpdated || groupsMembersUpdated || original.ModPolicy != updated.ModPolicy) {132 // If there were no modified entries in any of the policies/values/groups maps133 if len(readSetPolicies) == 0 &&134 len(writeSetPolicies) == 0 &&135 len(readSetValues) == 0 &&136 len(writeSetValues) == 0 &&137 len(readSetGroups) == 0 &&138 len(writeSetGroups) == 0 {139 return &cb.ConfigGroup{140 Version: original.Version,141 }, &cb.ConfigGroup{142 Version: original.Version,143 }, false144 }145 return &cb.ConfigGroup{146 Version: original.Version,147 Policies: readSetPolicies,148 Values: readSetValues,149 Groups: readSetGroups,150 }, &cb.ConfigGroup{151 Version: original.Version,152 Policies: writeSetPolicies,153 Values: writeSetValues,154 Groups: writeSetGroups,155 }, true156 }157 for k, samePolicy := range sameSetPolicies {158 readSetPolicies[k] = samePolicy159 writeSetPolicies[k] = samePolicy160 }161 for k, sameValue := range sameSetValues {162 readSetValues[k] = sameValue163 writeSetValues[k] = sameValue164 }165 for k, sameGroup := range sameSetGroups {166 readSetGroups[k] = sameGroup167 writeSetGroups[k] = sameGroup168 }169 return &cb.ConfigGroup{170 Version: original.Version,171 Policies: readSetPolicies,172 Values: readSetValues,173 Groups: readSetGroups,174 }, &cb.ConfigGroup{175 Version: original.Version + 1,176 Policies: writeSetPolicies,177 Values: writeSetValues,178 Groups: writeSetGroups,179 ModPolicy: updated.ModPolicy,180 }, true181}182func Compute(original, updated *cb.Config) (*cb.ConfigUpdate, error) {183 if original.ChannelGroup == nil {184 return nil, fmt.Errorf("no channel group included for original config")185 }186 if updated.ChannelGroup == nil {187 return nil, fmt.Errorf("no channel group included for updated config")188 }189 readSet, writeSet, groupUpdated := computeGroupUpdate(original.ChannelGroup, updated.ChannelGroup)190 if !groupUpdated {191 return nil, fmt.Errorf("no differences detected between original and updated config")192 }193 return &cb.ConfigUpdate{194 ReadSet: readSet,195 WriteSet: writeSet,196 }, nil197}...

Full Screen

Full Screen

Write

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 fmt.Println("Hello, playground")4 panic(fmt.Errorf("Fatal error config file: %s \n", err))5 }6 viper.Set("newkey", "newvalue")7 viper.WriteConfig()8}9panic(0x4a9c00, 0xc82000e0c0)10main.main()

Full Screen

Full Screen

Write

Using AI Code Generation

copy

Full Screen

1config.Write()2config.Read()3config.Write()4config.Read()5config.Write()6config.Read()7config.Write()8config.Read()9config.Write()10config.Read()11config.Write()12config.Read()13config.Write()14config.Read()15config.Write()16config.Read()17config.Write()18config.Read()19config.Write()20config.Read()21config.Write()22config.Read()23config.Write()24config.Read()25config.Write()26config.Read()27config.Write()

Full Screen

Full Screen

Write

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 config := viper.New()4 config.SetConfigName("config")5 config.AddConfigPath(".")6 panic(fmt.Errorf("Fatal error config file: %s \n", err))7 }8 config.Set("age", 25)9}

Full Screen

Full Screen

Write

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 cfg, err := config.ParseYamlFile("config.yaml")4 if err != nil {5 fmt.Println(err)6 os.Exit(1)7 }8 cfg.Set("test", "test")9 cfg.Write("config.yaml")10}11import (12func main() {13 cfg, err := config.ParseYamlFile("config.yaml")14 if err != nil {15 fmt.Println(err)16 os.Exit(1)17 }18 cfg.Set("test", "test")19 cfg.Write("config.json")20}21{22}23import (24func main() {25 cfg, err := config.ParseYamlFile("config.yaml")26 if err != nil {27 fmt.Println(err)28 os.Exit(1)29 }30 cfg.Set("test", "test")31 cfg.Write("config.toml")32}33import (34func main() {35 cfg, err := config.ParseYamlFile("config.yaml")36 if err != nil {37 fmt.Println(err)38 os.Exit(1)39 }40 cfg.Set("test", "test")41 cfg.Write("config.ini")42}43import (44func main() {45 cfg, err := config.ParseYamlFile("config.yaml")46 if err != nil {47 fmt.Println(err)48 os.Exit(1)49 }50 cfg.Set("test", "test")51 cfg.Write("config.properties")52}

Full Screen

Full Screen

Write

Using AI Code Generation

copy

Full Screen

1import "fmt"2import "github.com/robfig/config"3func main() {4 c, err := config.ReadDefault("config.ini")5 if err != nil {6 fmt.Println("Error: ", err)7 }8 c.AddSection("section1")9 c.AddOption("section1", "option1", "value1")10 c.AddOption("section1", "option2", "value2")11 c.AddOption("section1", "option3", "value3")12 c.AddSection("section2")13 c.AddOption("section2", "option1", "value1")14 c.AddOption("section2", "option2", "value2")15 c.AddOption("section2", "option3", "value3")16 c.Write()17}

Full Screen

Full Screen

Write

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 config.Write()4}5import (6func main() {7 config.Read()8}

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful