参考下面文章,按照 kafka 版本为 kafka_2.13-3.4.1 版本, 使用 segmentio/kafka-go v0.4.47
KRaft模式安装kafka(single node) 和 kafka-ui
一、producer
// https://juejin.cn/post/7171348874420813832
package main
import (
"context"
"encoding/json"
"fmt"
"log"
"github.com/segmentio/kafka-go"
)
func main() {
type User struct {
Id int
Name string
}
user := &User{
1,
"A",
}
str, err2 := json.Marshal(user)
if err2 != nil {
fmt.Printf("err: %v\n", err2)
}
// make a writer that produces to topic-A, using the least-bytes distribution
w := &kafka.Writer{
Addr: kafka.TCP("localhost:9092"),
Topic: "quickstart-events",
Balancer: &kafka.LeastBytes{},
}
err := w.WriteMessages(context.Background(),
kafka.Message{
Value: str,
},
// kafka.Message{
// Key: []byte("Key-B"),
// Value: []byte("One!"),
// },
// kafka.Message{
// Key: []byte("Key-C"),
// Value: []byte("Two!"),
// },
)
if err != nil {
log.Fatal("failed to write messages:", err)
}
if err := w.Close(); err != nil {
log.Fatal("failed to close writer:", err)
}
}
二、consumer
// https://juejin.cn/post/7171348874420813832
package main
import (
"context"
"encoding/json"
"fmt"
"time"
kafka "github.com/segmentio/kafka-go"
)
func main() {
type User struct {
Id int
Name string
}
// make a new reader that consumes from topic-A, partition 0, at offset 42
r := kafka.NewReader(kafka.ReaderConfig{
Brokers: []string{"localhost:9092"},
GroupID: "xxxx", // 如果这个值设置的话,那么程序多次运行的时候,已经获取过的数据,就不会再次获取了
Topic: "quickstart-events",
Partition: 0,
MinBytes: 10e3, // 10KB
MaxBytes: 10e6, // 10MB
CommitInterval: time.Second, // flushes commits to Kafka every second 要 加 GroupID 才能自动提交
})
//r.SetOffset(1)
ctx := context.Background()
for {
m, err := r.ReadMessage(ctx)
if err != nil {
break
}
fmt.Println(m.Value)
var user User
err2 := json.Unmarshal(m.Value, &user)
if err2 != nil {
fmt.Printf("json unmarshal err: %v \n", err2)
}
fmt.Printf("--user-: %v\n", user)
fmt.Printf("message at offset %d: %s = %s\n", m.Offset, string(m.Key), string(m.Value))
}
// if err := r.Close(); err != nil {
// log.Fatal("failed to close reader:", err)
// }
}
参考文章: kafka-go库的使用 - 掘金