日本熟妇hd丰满老熟妇,中文字幕一区二区三区在线不卡 ,亚洲成片在线观看,免费女同在线一区二区

Go SDK收發(fā)消息

本文介紹如何使用Go SDK通過接入點(diǎn)接入云消息隊(duì)列 Kafka 版并收發(fā)消息。

環(huán)境準(zhǔn)備

您已安裝Go。更多信息,請(qǐng)參見安裝Go。

說明

該kafka-confluent-go-demo不支持Windows系統(tǒng)。

準(zhǔn)備配置

  1. 可選:下載SSL根證書。如果是SSL接入點(diǎn),需下載該證書。

  2. 訪問aliware-kafka-demos,單擊code圖標(biāo),然后在下拉框選擇Download ZIP,下載Demo工程并解壓。

  3. 在解壓的Demo工程中,找到kafka-confluent-go-demo文件夾,將此文件夾上傳在Linux系統(tǒng)的/home路徑下。

  4. 登錄Linux系統(tǒng),進(jìn)入/home/kafka-confluent-go-demo路徑,修改配置文件conf/kafka.json。

    {
      "topic": "XXX",
      "group.id": "XXX",
      "bootstrap.servers" : "XXX:XX,XXX:XX,XXX:XX",
      "security.protocol" : "plaintext",
      "sasl.mechanism" : "XXX",
      "sasl.username" : "XXX",
      "sasl.password" : "XXX"
    }

    參數(shù)

    描述

    topic

    實(shí)例的Topic名稱。您可在云消息隊(duì)列 Kafka 版控制臺(tái)Topic 管理頁面獲取。

    group.id

    實(shí)例的Group。您可在云消息隊(duì)列 Kafka 版控制臺(tái)Group 管理頁面獲取。

    說明

    如果應(yīng)用運(yùn)行producer.go發(fā)送消息,該參數(shù)可以不配置;如果應(yīng)用運(yùn)行consumer.go訂閱消息,該參數(shù)必須配置。

    bootstrap.servers

    SSL接入點(diǎn)的IP地址以及端口。您可在云消息隊(duì)列 Kafka 版控制臺(tái)實(shí)例詳情頁面的接入點(diǎn)信息區(qū)域獲取。

    security.protocol

    SASL用戶認(rèn)證協(xié)議,默認(rèn)為plaintext。各類型接入點(diǎn)對(duì)應(yīng)取值如下:

    • 默認(rèn)接入點(diǎn):plaintext。

    • SSL接入點(diǎn):sasl_ssl。

    • SASL接入點(diǎn):sasl_plaintext。

    sasl.mechanism

    消息收發(fā)的機(jī)制。各類型接入點(diǎn)對(duì)應(yīng)取值如下:

    • 默認(rèn)接入點(diǎn):不涉及,無需配置。

    • SSL接入點(diǎn):PLAIN。

    • SASL接入點(diǎn):PLAIN機(jī)制需配置為PLAIN;SCRAM機(jī)制需配置為SCRAM-SHA-256。

    sasl.username

    SASL用戶名。如果是SSL接入點(diǎn)或SASL接入點(diǎn),需配置該參數(shù)。

    說明
    • 如果實(shí)例未開啟ACL,您可以在云消息隊(duì)列 Kafka 版控制臺(tái)實(shí)例詳情頁面的配置信息區(qū)域獲取默認(rèn)的用戶名密碼。
    • 如果實(shí)例已開啟ACL,請(qǐng)確保要使用的SASL用戶已被授予向云消息隊(duì)列 Kafka 版實(shí)例收發(fā)消息的權(quán)限。具體操作,請(qǐng)參見SASL用戶授權(quán)。

    sasl.password

    SASL用戶密碼。如果是SSL接入點(diǎn)或SASL接入點(diǎn),需配置該參數(shù)。

發(fā)送消息

執(zhí)行以下命令運(yùn)行producer.go發(fā)送消息。

go run -mod=vendor producer/producer.go

消息程序producer.go示例代碼如下:

package main

import (
    "encoding/json"
    "fmt"
    "github.com/confluentinc/confluent-kafka-go/kafka"
    "os"
    "path/filepath"
)

type KafkaConfig struct {
    Topic      string `json:"topic"`
    GroupId    string `json:"group.id"`
    BootstrapServers    string `json:"bootstrap.servers"`
    SecurityProtocol string `json:"security.protocol"`
    SslCaLocation string `json:"ssl.ca.location"`
    SaslMechanism string `json:"sasl.mechanism"`
    SaslUsername string `json:"sasl.username"`
    SaslPassword string `json:"sasl.password"`
}

// config should be a pointer to structure, if not, panic
func loadJsonConfig() *KafkaConfig {
    workPath, err := os.Getwd()
    if err != nil {
        panic(err)
    }
    configPath := filepath.Join(workPath, "conf")
    fullPath := filepath.Join(configPath, "kafka.json")
    file, err := os.Open(fullPath);
    if (err != nil) {
        msg := fmt.Sprintf("Can not load config at %s. Error: %v", fullPath, err)
        panic(msg)
    }

    defer file.Close()

    decoder := json.NewDecoder(file)
    var config = &KafkaConfig{}
    err = decoder.Decode(config);
    if (err != nil) {
        msg := fmt.Sprintf("Decode json fail for config file at %s. Error: %v", fullPath, err)
        panic(msg)
    }
    json.Marshal(config)
    return  config
}

func doInitProducer(cfg *KafkaConfig) *kafka.Producer {
    fmt.Print("init kafka producer, it may take a few seconds to init the connection\n")
    //common arguments
    var kafkaconf = &kafka.ConfigMap{
        "api.version.request": "true",
        "message.max.bytes": 1000000,
        "linger.ms": 10,
        "retries": 30,
        "retry.backoff.ms": 1000,
        "acks": "1"}
    kafkaconf.SetKey("bootstrap.servers", cfg.BootstrapServers)

    switch cfg.SecurityProtocol {
        case "plaintext" :
            kafkaconf.SetKey("security.protocol", "plaintext");
        case "sasl_ssl":
            kafkaconf.SetKey("security.protocol", "sasl_ssl");
            kafkaconf.SetKey("ssl.ca.location", "conf/ca-cert.pem");
            kafkaconf.SetKey("sasl.username", cfg.SaslUsername);
            kafkaconf.SetKey("sasl.password", cfg.SaslPassword);
            kafkaconf.SetKey("sasl.mechanism", cfg.SaslMechanism);
            kafkaconf.SetKey("enable.ssl.certificate.verification", "false");
            kafkaconf.SetKey("ssl.endpoint.identification.algorithm", "None")
    case "sasl_plaintext":
            kafkaconf.SetKey("sasl.mechanism", "PLAIN")
            kafkaconf.SetKey("security.protocol", "sasl_plaintext");
            kafkaconf.SetKey("sasl.username", cfg.SaslUsername);
            kafkaconf.SetKey("sasl.password", cfg.SaslPassword);
            kafkaconf.SetKey("sasl.mechanism", cfg.SaslMechanism)
    default:
            panic(kafka.NewError(kafka.ErrUnknownProtocol, "unknown protocol", true))
    }

    producer, err := kafka.NewProducer(kafkaconf)
    if err != nil {
        panic(err)
    }
    fmt.Print("init kafka producer success\n")
    return producer;
}

func main() {
    // Choose the correct protocol
    // 9092 for PLAINTEXT
    // 9093 for SASL_SSL, need to provide sasl.username and sasl.password
    // 9094 for SASL_PLAINTEXT, need to provide sasl.username and sasl.password
    cfg := loadJsonConfig();
    producer := doInitProducer(cfg)

    defer producer.Close()

    // Delivery report handler for produced messages
    go func() {
        for e := range producer.Events() {
            switch ev := e.(type) {
            case *kafka.Message:
                if ev.TopicPartition.Error != nil {
                    fmt.Printf("Delivery failed: %v\n", ev.TopicPartition)
                } else {
                    fmt.Printf("Delivered message to %v\n", ev.TopicPartition)
                }
            }
        }
    }()

    // Produce messages to topic (asynchronously)
    topic := cfg.Topic
    for _, word := range []string{"Welcome", "to", "the", "Confluent", "Kafka", "Golang", "client"} {
        producer.Produce(&kafka.Message{
            TopicPartition: kafka.TopicPartition{Topic: &topic, Partition: kafka.PartitionAny},
            Value:          []byte(word),
        }, nil)
    }

    // Wait for message deliveries before shutting down
    producer.Flush(15 * 1000)
}
                        

訂閱消息

執(zhí)行以下命令運(yùn)行consumer.go訂閱消息。

go run -mod=vendor consumer/consumer.go

消息程序consumer.go示例代碼如下:

package main

import (
    "encoding/json"
    "fmt"
    "github.com/confluentinc/confluent-kafka-go/kafka"
    "os"
    "path/filepath"
)
type KafkaConfig struct {
    Topic      string `json:"topic"`
    GroupId    string `json:"group.id"`
    BootstrapServers    string `json:"bootstrap.servers"`
    SecurityProtocol string `json:"security.protocol"`
    SaslMechanism string `json:"sasl.mechanism"`
    SaslUsername string `json:"sasl.username"`
    SaslPassword string `json:"sasl.password"`
}

// config should be a pointer to structure, if not, panic
func loadJsonConfig() *KafkaConfig {
    workPath, err := os.Getwd()
    if err != nil {
        panic(err)
    }
    configPath := filepath.Join(workPath, "conf")
    fullPath := filepath.Join(configPath, "kafka.json")
    file, err := os.Open(fullPath);
    if (err != nil) {
        msg := fmt.Sprintf("Can not load config at %s. Error: %v", fullPath, err)
        panic(msg)
    }

    defer file.Close()

    decoder := json.NewDecoder(file)
    var config = &KafkaConfig{}
    err = decoder.Decode(config);
    if (err != nil) {
        msg := fmt.Sprintf("Decode json fail for config file at %s. Error: %v", fullPath, err)
        panic(msg)
    }
    json.Marshal(config)
    return  config
}


func doInitConsumer(cfg *KafkaConfig) *kafka.Consumer {
    fmt.Print("init kafka consumer, it may take a few seconds to init the connection\n")
    //common arguments
    var kafkaconf = &kafka.ConfigMap{
        "api.version.request": "true",
        "auto.offset.reset": "latest",
        "heartbeat.interval.ms": 3000,
        "session.timeout.ms": 30000,
        "max.poll.interval.ms": 120000,
        "fetch.max.bytes": 1024000,
        "max.partition.fetch.bytes": 256000}
    kafkaconf.SetKey("bootstrap.servers", cfg.BootstrapServers);
    kafkaconf.SetKey("group.id", cfg.GroupId)

    switch cfg.SecurityProtocol {
    case "plaintext" :
        kafkaconf.SetKey("security.protocol", "plaintext");
    case "sasl_ssl":
        kafkaconf.SetKey("security.protocol", "sasl_ssl");
        kafkaconf.SetKey("ssl.ca.location", "./conf/ca-cert.pem");
        kafkaconf.SetKey("sasl.username", cfg.SaslUsername);
        kafkaconf.SetKey("sasl.password", cfg.SaslPassword);
        kafkaconf.SetKey("sasl.mechanism", cfg.SaslMechanism);
        kafkaconf.SetKey("ssl.endpoint.identification.algorithm", "None");
        kafkaconf.SetKey("enable.ssl.certificate.verification", "false")
    case "sasl_plaintext":
        kafkaconf.SetKey("security.protocol", "sasl_plaintext");
        kafkaconf.SetKey("sasl.username", cfg.SaslUsername);
        kafkaconf.SetKey("sasl.password", cfg.SaslPassword);
        kafkaconf.SetKey("sasl.mechanism", cfg.SaslMechanism)

    default:
        panic(kafka.NewError(kafka.ErrUnknownProtocol, "unknown protocol", true))
    }

    consumer, err := kafka.NewConsumer(kafkaconf)
    if err != nil {
        panic(err)
    }
    fmt.Print("init kafka consumer success\n")
    return consumer;
}

func main() {

    // Choose the correct protocol
    // 9092 for PLAINTEXT
    // 9093 for SASL_SSL, need to provide sasl.username and sasl.password
    // 9094 for SASL_PLAINTEXT, need to provide sasl.username and sasl.password
    cfg := loadJsonConfig();
    consumer := doInitConsumer(cfg)

    consumer.SubscribeTopics([]string{cfg.Topic}, nil)

    for {
        msg, err := consumer.ReadMessage(-1)
        if err == nil {
            fmt.Printf("Message on %s: %s\n", msg.TopicPartition, string(msg.Value))
        } else {
            // The client will automatically try to recover from all errors.
            fmt.Printf("Consumer error: %v (%v)\n", err, msg)
        }
    }

    consumer.Close()
}