How to use CreateMessage method of kafka Package

Best Venom code snippet using kafka.CreateMessage

redeliverer_test.go

Source:redeliverer_test.go Github

copy

Full Screen

1package redelivery_test2import (3 "errors"4 "fmt"5 "github.com/splunk/kafka-mq-go/queue/internal/mocks"6 kafkamq "github.com/splunk/kafka-mq-go/queue/proto"7 . "github.com/splunk/kafka-mq-go/queue/redelivery"8 "github.com/cenkalti/backoff"9 confluentKafka "github.com/confluentinc/confluent-kafka-go/kafka"10 . "github.com/onsi/ginkgo"11 . "github.com/onsi/gomega"12)13var _ = Describe("Redeliverer", func() {14 const (15 partition = 116 numRetries = 1617 )18 var (19 r Redeliverer20 messageSender *mocks.MessageSender21 markerProducer *mocks.MarkerProducer22 exponentialBackoff backoff.BackOff23 )24 BeforeEach(func() {25 r = nil26 messageSender = &mocks.MessageSender{}27 markerProducer = &mocks.MarkerProducer{}28 exponentialBackoff = backoff.NewExponentialBackOff()29 })30 AfterEach(func() {31 messageSender.AssertExpectations(GinkgoT())32 markerProducer.AssertExpectations(GinkgoT())33 })34 Describe("NewRedeliverer", func() {35 JustBeforeEach(func() {36 r = NewRedeliverer(partition, messageSender, markerProducer, exponentialBackoff)37 })38 Context("Happy path", func() {39 It("should return a Redeliverer", func() {40 Expect(r).ShouldNot(BeNil())41 })42 })43 })44 Describe("Redeliver", func() {45 var (46 markers []*kafkamq.Marker47 queueID = "test-queue"48 criticalError = errors.New("oops")49 expectedMsg *confluentKafka.Message50 )51 BeforeEach(func() {52 r = NewRedeliverer(partition, messageSender, markerProducer, exponentialBackoff)53 Expect(r).ShouldNot(BeNil())54 })55 AfterEach(func() {56 expectedMsg = nil57 })58 JustBeforeEach(func() {59 r.Redeliver(markers)60 })61 When("the redelivery list is empty", func() {62 BeforeEach(func() {63 markers = createRedeliveryList(queueID, partition, 0)64 })65 It("should not attempt any redeliveries", func() {66 // assertions are made on the calls to message and marker producers67 })68 })69 When("the redelivery list contains a single end marker", func() {70 BeforeEach(func() {71 markers = []*kafkamq.Marker{72 &kafkamq.Marker{73 Type: kafkamq.Marker_END,74 MessageId: &kafkamq.MessageID{75 Partition: int32(partition),76 Offset: int64(0),77 },78 },79 }80 })81 It("should not attempt any redeliveries", func() {82 // assertions are made on the calls to message and marker producers83 })84 })85 When("the redelivery list contains a single keep-alive marker", func() {86 BeforeEach(func() {87 markers = []*kafkamq.Marker{88 &kafkamq.Marker{89 Type: kafkamq.Marker_KEEPALIVE,90 MessageId: &kafkamq.MessageID{91 Partition: int32(partition),92 Offset: int64(0),93 },94 },95 }96 })97 It("should not attempt any redeliveries", func() {98 // assertions are made on the calls to message and marker producers99 })100 })101 When("the redelivery list contains a single start marker", func() {102 BeforeEach(func() {103 markers = createRedeliveryList(queueID, partition, 1)104 expectedMsg = createMessage(partition, 0, []byte(queueID), []byte(fmt.Sprint(0)))105 messageSender.On("SendMessage", queueID, expectedMsg.Value).Return(nil).Once()106 markerProducer.On("SendEndMarker", queueID, expectedMsg).Return(nil).Once()107 })108 It("should redeliver the message", func() {109 // assertions are made on the calls to message and marker producers110 Eventually(func() bool {111 return methodWasCalled(&messageSender.Mock, "SendMessage", queueID, expectedMsg.Value) &&112 methodWasCalled(&markerProducer.Mock, "SendEndMarker", queueID, expectedMsg)113 }).Should(BeTrue())114 })115 })116 When("the redelivery list contains a more than one start marker", func() {117 var (118 expectedMsg1 *confluentKafka.Message119 expectedMsg2 *confluentKafka.Message120 expectedMsg3 *confluentKafka.Message121 )122 BeforeEach(func() {123 markers = createRedeliveryList(queueID, partition, 3)124 expectedMsg1 = createMessage(partition, 0, []byte(queueID), []byte(fmt.Sprint(0)))125 expectedMsg2 = createMessage(partition, 1, []byte(queueID), []byte(fmt.Sprint(1)))126 expectedMsg3 = createMessage(partition, 2, []byte(queueID), []byte(fmt.Sprint(2)))127 messageSender.On("SendMessage", queueID, expectedMsg1.Value).Return(nil).Once()128 messageSender.On("SendMessage", queueID, expectedMsg2.Value).Return(nil).Once()129 messageSender.On("SendMessage", queueID, expectedMsg3.Value).Return(nil).Once()130 markerProducer.On("SendEndMarker", queueID, expectedMsg1).Return(nil).Once()131 markerProducer.On("SendEndMarker", queueID, expectedMsg2).Return(nil).Once()132 markerProducer.On("SendEndMarker", queueID, expectedMsg3).Return(nil).Once()133 })134 It("should redeliver the messages", func() {135 // assertions are made on the calls to message and marker producers136 Eventually(func() bool {137 return methodWasCalled(&messageSender.Mock, "SendMessage", queueID, expectedMsg1.Value) &&138 methodWasCalled(&messageSender.Mock, "SendMessage", queueID, expectedMsg2.Value) &&139 methodWasCalled(&messageSender.Mock, "SendMessage", queueID, expectedMsg3.Value) &&140 methodWasCalled(&markerProducer.Mock, "SendEndMarker", queueID, expectedMsg1) &&141 methodWasCalled(&markerProducer.Mock, "SendEndMarker", queueID, expectedMsg2) &&142 methodWasCalled(&markerProducer.Mock, "SendEndMarker", queueID, expectedMsg3)143 }).Should(BeTrue())144 })145 })146 When("the message cannot be redelivered", func() {147 BeforeEach(func() {148 markers = createRedeliveryList(queueID, partition, 1)149 expectedMsg = createMessage(partition, 0, []byte(queueID), []byte(fmt.Sprint(0)))150 messageSender.On("SendMessage", queueID, expectedMsg.Value).Return(criticalError)151 })152 It("should not redeliver the message", func() {153 // assertions are made on the calls to message and marker producers154 Eventually(func() bool {155 return methodWasCalled(&messageSender.Mock, "SendMessage", queueID, expectedMsg.Value)156 }).Should(BeTrue())157 })158 })159 When("the message can be redelivered but the marker cannot", func() {160 BeforeEach(func() {161 markers = createRedeliveryList(queueID, partition, 1)162 expectedMsg = createMessage(partition, 0, []byte(queueID), []byte(fmt.Sprint(0)))163 messageSender.On("SendMessage", queueID, expectedMsg.Value).Return(nil).Once()164 markerProducer.On("SendEndMarker", queueID, expectedMsg).Return(criticalError)165 })166 It("should redeliver the message", func() {167 Eventually(func() bool {168 return methodWasCalled(&messageSender.Mock, "SendMessage", queueID, expectedMsg.Value) &&169 methodWasCalled(&markerProducer.Mock, "SendEndMarker", queueID, expectedMsg)170 }).Should(BeTrue())171 r.Stop()172 })173 })174 When("the message cannot be redelivered it will be retried", func() {175 BeforeEach(func() {176 markers = createRedeliveryList(queueID, partition, 1)177 expectedMsg = createMessage(partition, 0, []byte(queueID), []byte(fmt.Sprint(0)))178 messageSender.On("SendMessage", queueID, expectedMsg.Value).Return(criticalError).Once()179 messageSender.On("SendMessage", queueID, expectedMsg.Value).Return(nil).Once()180 markerProducer.On("SendEndMarker", queueID, expectedMsg).Return(nil).Once()181 })182 It("should redeliver the message", func() {183 // assertions are made on the calls to message and marker producers184 Eventually(func() bool {185 return methodWasCalled(&messageSender.Mock, "SendMessage", queueID, expectedMsg.Value) &&186 methodWasCalled(&markerProducer.Mock, "SendEndMarker", queueID, expectedMsg)187 }).Should(BeTrue())188 })189 })190 })191})192func createRedeliveryList(queueID string, partition int, n int) []*kafkamq.Marker {193 var ret = []*kafkamq.Marker{}194 for i := 0; i < n; i++ {195 ret = append(ret, &kafkamq.Marker{196 Type: kafkamq.Marker_START,197 Value: []byte(fmt.Sprint(i)),198 Key: []byte(queueID),199 MessageId: &kafkamq.MessageID{200 Partition: int32(partition),201 Offset: int64(i),202 },203 RedeliverAfterMs: 600,204 })205 }206 return ret207}...

Full Screen

Full Screen

catch_suite_test.go

Source:catch_suite_test.go Github

copy

Full Screen

...81 mkr2, err := json.Marshal(kr2)82 Expect(err).ToNot(HaveOccurred())83 mkr3, err := json.Marshal(kr3)84 Expect(err).ToNot(HaveOccurred())85 producer.Input() <- kafka.CreateMessage(topic, mkr1)86 producer.Input() <- kafka.CreateMessage(topic, mkr2)87 producer.Input() <- kafka.CreateMessage(topic, mkr3)88 krChan1, err := f.Catch(group, topic, uuid1)89 Expect(err).ToNot(HaveOccurred())90 krChan2, err := f.Catch(group, topic, uuid2)91 Expect(err).ToNot(HaveOccurred())92 krChan3, err := f.Catch(group, topic, uuid3)93 Expect(err).ToNot(HaveOccurred())94 <-krChan195 <-krChan296 <-krChan397 close(done)98 }, 15)99 })100 Describe("test pre-request disabled", func() {101 It("should return nil if response is not already present", func() {102 ctx, cancel := context.WithCancel(context.Background())103 defer cancel()104 f, err := NewFactory(ctx, Config{105 PreRequest: false,106 KafkaBrokers: kafkaBrokers,107 TimeoutSec: 5,108 })109 Expect(err).ToNot(HaveOccurred())110 uuid1, err := uuuid.NewV4()111 Expect(err).ToNot(HaveOccurred())112 uuid2, err := uuuid.NewV4()113 Expect(err).ToNot(HaveOccurred())114 uuid3, err := uuuid.NewV4()115 Expect(err).ToNot(HaveOccurred())116 krChan1, err := f.Catch(group, topic, uuid1)117 Expect(err).ToNot(HaveOccurred())118 krChan2, err := f.Catch(group, topic, uuid2)119 Expect(err).ToNot(HaveOccurred())120 krChan3, err := f.Catch(group, topic, uuid3)121 Expect(err).ToNot(HaveOccurred())122 Expect(<-krChan1).To(BeNil())123 Expect(<-krChan2).To(BeNil())124 Expect(<-krChan3).To(BeNil())125 })126 It("should return response if its present", func(done Done) {127 ctx, cancel := context.WithCancel(context.Background())128 f, err := NewFactory(ctx, Config{129 PreRequest: false,130 KafkaBrokers: kafkaBrokers,131 TimeoutSec: 10,132 })133 Expect(err).ToNot(HaveOccurred())134 uuid1, err := uuuid.NewV4()135 Expect(err).ToNot(HaveOccurred())136 uuid2, err := uuuid.NewV4()137 Expect(err).ToNot(HaveOccurred())138 uuid3, err := uuuid.NewV4()139 Expect(err).ToNot(HaveOccurred())140 kr1 := model.KafkaResponse{UUID: uuid1}141 kr2 := model.KafkaResponse{UUID: uuid2}142 kr3 := model.KafkaResponse{UUID: uuid3}143 mkr1, err := json.Marshal(kr1)144 Expect(err).ToNot(HaveOccurred())145 mkr2, err := json.Marshal(kr2)146 Expect(err).ToNot(HaveOccurred())147 mkr3, err := json.Marshal(kr3)148 Expect(err).ToNot(HaveOccurred())149 krChan1, err := f.Catch(group, topic, uuid1)150 Expect(err).ToNot(HaveOccurred())151 krChan2, err := f.Catch(group, topic, uuid2)152 Expect(err).ToNot(HaveOccurred())153 krChan3, err := f.Catch(group, topic, uuid3)154 Expect(err).ToNot(HaveOccurred())155 go func() {156 defer GinkgoRecover()157 kr1 := <-krChan1158 kr2 := <-krChan2159 kr3 := <-krChan3160 cancel()161 Expect(kr1).ToNot(BeNil())162 Expect(kr2).ToNot(BeNil())163 Expect(kr3).ToNot(BeNil())164 Expect(kr1.UUID).To(Equal(uuid1))165 Expect(kr2.UUID).To(Equal(uuid2))166 Expect(kr3.UUID).To(Equal(uuid3))167 close(done)168 }()169 <-time.After(3 * time.Second)170 producer.Input() <- kafka.CreateMessage(topic, mkr1)171 producer.Input() <- kafka.CreateMessage(topic, mkr2)172 producer.Input() <- kafka.CreateMessage(topic, mkr3)173 }, 15)174 })175 Describe("testValidation", func() {176 It("should return error when empty arguments are passed to NewFactory", func() {177 _, err := NewFactory(nil, Config{})178 Expect(err).To(HaveOccurred())179 _, err = NewFactory(context.Background(), Config{})180 Expect(err).To(HaveOccurred())181 })182 It("should return error when empty arguments are passed to Catch", func() {183 ctx, cancel := context.WithCancel(context.Background())184 defer cancel()185 f, err := NewFactory(ctx, Config{186 PreRequest: false,...

Full Screen

Full Screen

producer.go

Source:producer.go Github

copy

Full Screen

1package studies2import (3 "fmt"4 "time"5 "github.com/confluentinc/confluent-kafka-go/kafka"6 log "github.com/sirupsen/logrus"7)8type producer struct {9 instance *kafka.Producer10 topics []kafka.TopicPartition11 name string12 stopChan chan bool13}14func NewProducer(name string, bootstrapServers string, topics []string) *producer {15 prod, err := kafka.NewProducer(&kafka.ConfigMap{16 "bootstrap.servers": bootstrapServers,17 })18 partitions := make([]kafka.TopicPartition, 0)19 for _, topic := range topics {20 partitions = append(partitions, kafka.TopicPartition{Topic: &topic, Partition: kafka.PartitionAny})21 }22 if err != nil {23 log.WithError(err).Error("Failed to create producer")24 panic(err)25 }26 return &producer{27 instance: prod,28 topics: partitions,29 name: name,30 }31}32func (p *producer) createMessage(topic kafka.TopicPartition, key string, value string) *kafka.Message {33 return &kafka.Message{34 TopicPartition: topic,35 Value: []byte(value),36 Key: []byte(key),37 }38}39func (p *producer) Start(interval time.Duration) {40 p.stopChan = make(chan bool, 1)41 go func() {42 var count int43 for {44 select {45 case <-p.stopChan:46 p.instance.Close()47 return48 default:49 for _, topic := range p.topics {50 p.instance.ProduceChannel() <- p.createMessage(topic, p.name, fmt.Sprintf("Message number %d", count))51 }52 count++53 time.Sleep(interval)54 }55 }56 }()57}58func (p *producer) Stop() {59 log.Warnf("Stopping producer %s", p.name)60 p.stopChan <- true61}...

Full Screen

Full Screen

CreateMessage

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 config := sarama.NewConfig()4 producer, err := sarama.NewSyncProducer([]string{"localhost:9092"}, config)5 if err != nil {6 fmt.Println("Error: ", err)7 }8 msg := &sarama.ProducerMessage{9 Value: sarama.StringEncoder("Hello World"),10 }11 partition, offset, err := producer.SendMessage(msg)12 if err != nil {13 fmt.Println("Error: ", err)14 }15 fmt.Println("Partition: ", partition)16 fmt.Println("Offset: ", offset)17}18import (19func main() {20 config := sarama.NewConfig()21 consumer, err := sarama.NewConsumer([]string{"localhost:9092"}, config)22 if err != nil {23 fmt.Println("Error: ", err)24 }25 partitionConsumer, err := consumer.ConsumePartition("test", 0, sarama.OffsetNewest)26 if err != nil {27 fmt.Println("Error: ", err)28 }29 msg := <-partitionConsumer.Messages()30 fmt.Println("Message: ", string(msg.Value))31}32import (33func main() {34 config := sarama.NewConfig()35 consumer, err := sarama.NewConsumer([]string{"localhost:9092"}, config)36 if err != nil {37 fmt.Println("Error: ", err)38 }39 partitionConsumer, err := consumer.ConsumePartition("test", 0, sarama.OffsetNewest)40 if err != nil {41 fmt.Println("Error: ", err)42 }

Full Screen

Full Screen

CreateMessage

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 kafka := kafka.NewKafka()4 kafka.CreateMessage("test", "this is a test message")5}6import (7func main() {8 kafka := kafka.NewKafka()9 kafka.ConsumeMessage("test")10}11import (12func main() {13 kafka := kafka.NewKafka()14 kafka.DeleteMessage("test", 0)15}

Full Screen

Full Screen

CreateMessage

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 go ConsumeMessage()4}5func CreateMessage() {6 config := sarama.NewConfig()7 brokers := []string{"localhost:9092"}8 producer, err := sarama.NewSyncProducer(brokers, config)9 if err != nil {10 panic(err)11 }12 defer func() {13 if err := producer.Close(); err != nil {14 panic(err)15 }16 }()17 msg := &sarama.ProducerMessage{18 Value: sarama.StringEncoder("testing 123"),19 }20 partition, offset, err := producer.SendMessage(msg)21 if err != nil {22 panic(err)23 }24 fmt.Printf("Message is stored in topic(%s)/partition(%d)/offset(%d)25}26func ConsumeMessage() {27 config := sarama.NewConfig()28 brokers := []string{"localhost:9092"}29 master, err := sarama.NewConsumer(brokers, config)30 if err != nil {31 panic(err)32 }33 defer func() {34 if err := master.Close(); err != nil {35 panic(err)36 }37 }()38 consumer, err := master.ConsumePartition("test", 0, sarama.OffsetNewest)39 if err != nil {40 panic(err)41 }42 defer func() {43 if err := consumer.Close(); err != nil {44 panic(err)45 }46 }()47 signals := make(chan os.Signal, 1)48 signal.Notify(signals, os.Interrupt)49 for {50 select {51 case err := <-consumer.Errors():52 log.Println(err)53 case msg := <-consumer.Messages():54 log.Printf("Received messages topic(%s)/partition(%d)/offset(%d): %s", msg.Topic, msg.Partition, msg.Offset, msg.Value)

Full Screen

Full Screen

CreateMessage

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 fmt.Println("Hello World")4 kafka := Kafka{}5 kafka.CreateMessage("test")6}7import (8func main() {9 fmt.Println("Hello World")10 kafka := Kafka{}11 kafka.CreateMessage("test")12}13import (14func main() {15 fmt.Println("Hello World")16 kafka := Kafka{}17 kafka.CreateMessage("test")18}19import (20func main() {21 fmt.Println("Hello World")22 kafka := Kafka{}23 kafka.CreateMessage("test")24}25import (26func main() {27 fmt.Println("Hello World")28 kafka := Kafka{}29 kafka.CreateMessage("test")30}31import (32func main() {33 fmt.Println("Hello World")34 kafka := Kafka{}35 kafka.CreateMessage("test")36}37import (38func main() {39 fmt.Println("Hello World")40 kafka := Kafka{}41 kafka.CreateMessage("test")42}43import (44func main() {45 fmt.Println("Hello World")46 kafka := Kafka{}47 kafka.CreateMessage("test")48}

Full Screen

Full Screen

CreateMessage

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 kafka.CreateMessage("test", "test message")4}5import (6func CreateMessage(topic string, message string) {7 config := sarama.NewConfig()8 producer, err := sarama.NewSyncProducer([]string{"localhost:9092"}, config)9 if err != nil {10 fmt.Println("Error: ", err)11 }12 defer producer.Close()13 msg := &sarama.ProducerMessage{14 Value: sarama.StringEncoder(message),15 }16 _, _, err = producer.SendMessage(msg)17 if err != nil {18 fmt.Println("Error: ", err)19 }20}212019/10/23 15:57:49 Error: kafka: client has run out of available brokers to talk to (Is your cluster reachable?)

Full Screen

Full Screen

CreateMessage

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 client, err := sarama.NewClient("my_client_id", []string{"localhost:9092"}, nil)4 if err != nil {5 panic(err)6 }7 producer, err := sarama.NewProducer(client)8 if err != nil {9 panic(err)10 }11 defer func() {12 if err := producer.Close(); err != nil {13 panic(err)14 }15 }()16 message := &sarama.ProducerMessage{17 Value: sarama.StringEncoder("testing 123"),18 }19 partition, offset, err := producer.SendMessage(message)20 if err != nil {21 panic(err)22 }23 fmt.Printf("Message is stored in topic(%s)/partition(%d)/offset(%d)24}25Message is stored in topic(my_topic)/partition(0)/offset(0)

Full Screen

Full Screen

CreateMessage

Using AI Code Generation

copy

Full Screen

1import (2func main() {3 config := sarama.NewConfig()4 broker := sarama.NewBroker("localhost:9092")5 err := broker.Open(config)6 if err != nil {7 panic(err)8 }9 defer broker.Close()10 msg := &sarama.ProducerMessage{11 Key: sarama.StringEncoder("key"),12 Value: sarama.StringEncoder("value"),13 }14 partition, offset, err := broker.SendMessage(msg)15 if err != nil {16 panic(err)17 }18 fmt.Printf("Message is stored in topic(%s)/partition(%d)/offset(%d)19 consumer, err := sarama.NewConsumer([]string{"localhost:9092"}, config)20 if err != nil {21 panic(err)22 }23 defer consumer.Close()24 partitions, err := consumer.Partitions("my_topic")25 if err != nil {26 panic(err)27 }28 pc, err := consumer.ConsumePartition("my_topic", partitions[0], sarama.OffsetNewest)29 if err != nil {30 panic(err)31 }32 defer pc.AsyncClose()33 signals := make(chan os.Signal, 1)

Full Screen

Full Screen

CreateMessage

Using AI Code Generation

copy

Full Screen

1func main() {2 fmt.Println("Hello World")3 kafka := Kafka{}4 kafka.CreateMessage("Test Message")5}6func main() {7 fmt.Println("Hello World")8 kafka := Kafka{}9 kafka.CreateMessage("Test Message")10}11func main() {12 fmt.Println("Hello World")13 kafka := Kafka{}14 kafka.CreateMessage("Test Message")15}16func main() {17 fmt.Println("Hello World")18 kafka := Kafka{}19 kafka.CreateMessage("Test Message")20}21func main() {22 fmt.Println("Hello World")23 kafka := Kafka{}24 kafka.CreateMessage("Test Message")25}26func main() {27 fmt.Println("Hello World")28 kafka := Kafka{}29 kafka.CreateMessage("Test Message")30}31func main() {32 fmt.Println("Hello World")33 kafka := Kafka{}34 kafka.CreateMessage("Test Message")35}36func main() {37 fmt.Println("Hello World")38 kafka := Kafka{}39 kafka.CreateMessage("Test Message")40}41func main() {42 fmt.Println("Hello World")43 kafka := Kafka{}44 kafka.CreateMessage("Test Message")45}46func main() {47 fmt.Println("Hello World")48 kafka := Kafka{}49 kafka.CreateMessage("Test Message")50}51func main() {52 fmt.Println("Hello World")53 kafka := Kafka{}54 kafka.CreateMessage("Test Message")55}56func main() {57 fmt.Println("Hello World")58 kafka := Kafka{}59 kafka.CreateMessage("Test Message")60}

Full Screen

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful