kafka

package module
v0.0.0-...-25d3f34 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jan 16, 2026 License: Apache-2.0 Imports: 15 Imported by: 0

README

Kafka Extension

The Kafka extension provides a production-ready Apache Kafka client with support for producers, consumers, and consumer groups.

Features

  • Producer Support: Synchronous and asynchronous message publishing
  • Consumer Support: Simple consumers and consumer groups
  • Admin Operations: Topic management and metadata queries
  • TLS/SSL: Secure connections with mTLS support
  • SASL Authentication: Multiple SASL mechanisms (PLAIN, SCRAM-SHA-256, SCRAM-SHA-512)
  • Compression: Support for gzip, snappy, lz4, and zstd
  • Idempotent Producer: Exactly-once semantics support
  • Consumer Groups: Automatic partition rebalancing
  • Metrics & Tracing: Built-in observability

Installation

go get github.com/IBM/sarama
go get github.com/xdg-go/scram

Basic Usage

package main

import (
    "context"
    "log"
    
    "github.com/xraph/forge"
    "github.com/xraph/forge/extensions/kafka"
)

func main() {
    app := forge.New("my-app")
    
    // Add Kafka extension
    app.AddExtension(kafka.NewExtension(
        kafka.WithBrokers("localhost:9092"),
        kafka.WithClientID("my-app"),
        kafka.WithProducer(true),
        kafka.WithConsumer(true),
    ))
    
    // Start application
    if err := app.Start(context.Background()); err != nil {
        log.Fatal(err)
    }
    
    // Get Kafka client
    var client kafka.Kafka
    app.Container().Resolve(&client)
    
    // Produce message
    err := client.SendMessage("my-topic", []byte("key"), []byte("value"))
    if err != nil {
        log.Fatal(err)
    }
    
    // Consume messages
    err = client.Consume(context.Background(), []string{"my-topic"}, func(msg *sarama.ConsumerMessage) error {
        log.Printf("Received: %s", string(msg.Value))
        return nil
    })
    
    app.Wait()
}

Configuration

YAML Configuration
kafka:
  brokers:
    - localhost:9092
    - localhost:9093
  client_id: my-app
  version: "3.0.0"
  
  # Producer settings
  producer_enabled: true
  producer_compression: snappy
  producer_idempotent: true
  producer_acks: all
  
  # Consumer settings
  consumer_enabled: true
  consumer_group_id: my-group
  consumer_offsets: newest
  consumer_group_rebalance: sticky
  
  # TLS settings
  enable_tls: true
  tls_cert_file: /path/to/cert.pem
  tls_key_file: /path/to/key.pem
  tls_ca_file: /path/to/ca.pem
  
  # SASL settings
  enable_sasl: true
  sasl_mechanism: SCRAM-SHA-512
  sasl_username: user
  sasl_password: pass
Programmatic Configuration
app.AddExtension(kafka.NewExtension(
    kafka.WithBrokers("localhost:9092"),
    kafka.WithVersion("3.0.0"),
    kafka.WithTLS("cert.pem", "key.pem", "ca.pem", false),
    kafka.WithSASL("SCRAM-SHA-512", "user", "pass"),
    kafka.WithCompression("snappy"),
    kafka.WithIdempotent(true),
    kafka.WithConsumerGroup("my-group"),
))

Producer Examples

Synchronous Publishing
err := client.SendMessage(
    "my-topic",
    []byte("key"),
    []byte("message-value"),
)
Asynchronous Publishing
err := client.SendMessageAsync(
    "my-topic",
    []byte("key"),
    []byte("message-value"),
)
Batch Publishing
messages := []*kafka.ProducerMessage{
    {Topic: "topic1", Key: []byte("k1"), Value: []byte("v1")},
    {Topic: "topic2", Key: []byte("k2"), Value: []byte("v2")},
}

err := client.SendMessages(messages)

Consumer Examples

Simple Consumer
err := client.Consume(ctx, []string{"my-topic"}, func(msg *sarama.ConsumerMessage) error {
    log.Printf("Topic: %s, Partition: %d, Offset: %d, Value: %s",
        msg.Topic, msg.Partition, msg.Offset, string(msg.Value))
    return nil
})
Partition Consumer
err := client.ConsumePartition(
    ctx,
    "my-topic",
    0,  // partition
    sarama.OffsetNewest,
    func(msg *sarama.ConsumerMessage) error {
        // Process message
        return nil
    },
)
Consumer Group
type MyHandler struct{}

func (h *MyHandler) Setup(sarama.ConsumerGroupSession) error {
    return nil
}

func (h *MyHandler) Cleanup(sarama.ConsumerGroupSession) error {
    return nil
}

func (h *MyHandler) ConsumeClaim(session sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error {
    for msg := range claim.Messages() {
        log.Printf("Message: %s", string(msg.Value))
        session.MarkMessage(msg, "")
    }
    return nil
}

// Join consumer group
err := client.JoinConsumerGroup(ctx, "my-group", []string{"my-topic"}, &MyHandler{})

Admin Operations

Create Topic
err := client.CreateTopic("my-topic", kafka.TopicConfig{
    NumPartitions:     3,
    ReplicationFactor: 2,
})
List Topics
topics, err := client.ListTopics()
Describe Topic
metadata, err := client.DescribeTopic("my-topic")
log.Printf("Partitions: %d", len(metadata.Partitions))
Delete Topic
err := client.DeleteTopic("my-topic")

Security

TLS Configuration
app.AddExtension(kafka.NewExtension(
    kafka.WithTLS(
        "/path/to/client-cert.pem",
        "/path/to/client-key.pem",
        "/path/to/ca-cert.pem",
        false, // skipVerify
    ),
))
SASL/PLAIN
app.AddExtension(kafka.NewExtension(
    kafka.WithSASL("PLAIN", "username", "password"),
))
SASL/SCRAM
app.AddExtension(kafka.NewExtension(
    kafka.WithSASL("SCRAM-SHA-512", "username", "password"),
))

Observability

Metrics

The extension automatically tracks:

  • kafka.messages.sent - Messages sent counter
  • kafka.messages.received - Messages received counter
  • kafka.bytes.sent - Bytes sent gauge
  • kafka.bytes.received - Bytes received gauge
Client Statistics
stats := client.GetStats()
log.Printf("Messages sent: %d", stats.MessagesSent)
log.Printf("Messages received: %d", stats.MessagesReceived)
log.Printf("Errors: %d", stats.Errors)

Best Practices

  1. Use Consumer Groups: For scalable consumption, use consumer groups instead of simple consumers
  2. Enable Idempotence: For exactly-once semantics, enable idempotent producer
  3. Compression: Use compression (snappy or lz4) for better throughput
  4. Batch Publishing: Use SendMessages() for batch operations
  5. Error Handling: Always check for errors and implement retry logic
  6. Graceful Shutdown: Always call Close() on shutdown

Error Handling

if err := client.SendMessage(topic, key, value); err != nil {
    // Check specific error types
    switch err {
    case kafka.ErrProducerNotEnabled:
        log.Println("Producer is not enabled")
    case kafka.ErrConnectionFailed:
        log.Println("Connection failed, will retry")
    default:
        log.Printf("Send failed: %v", err)
    }
}

Testing

func TestKafkaIntegration(t *testing.T) {
    app := forge.New("test-app")
    app.AddExtension(kafka.NewExtension(
        kafka.WithBrokers("localhost:9092"),
    ))
    
    ctx := context.Background()
    if err := app.Start(ctx); err != nil {
        t.Fatal(err)
    }
    defer app.Stop(ctx)
    
    var client kafka.Kafka
    app.Container().Resolve(&client)
    
    // Test producer
    err := client.SendMessage("test-topic", []byte("key"), []byte("value"))
    if err != nil {
        t.Fatalf("Send failed: %v", err)
    }
}

License

Part of the Forge framework.

Documentation

Index

Constants

View Source
const (
	// ServiceKey is the DI key for the Kafka service.
	ServiceKey = "kafka"
)

DI container keys for Kafka extension services.

Variables

View Source
var (
	// ErrClientNotInitialized is returned when client is not initialized
	ErrClientNotInitialized = errors.New("kafka: client not initialized")

	// ErrProducerNotEnabled is returned when producer operations are attempted but producer is disabled
	ErrProducerNotEnabled = errors.New("kafka: producer not enabled")

	// ErrConsumerNotEnabled is returned when consumer operations are attempted but consumer is disabled
	ErrConsumerNotEnabled = errors.New("kafka: consumer not enabled")

	// ErrAlreadyConsuming is returned when attempting to start consuming while already consuming
	ErrAlreadyConsuming = errors.New("kafka: already consuming")

	// ErrNotConsuming is returned when attempting to stop consuming while not consuming
	ErrNotConsuming = errors.New("kafka: not consuming")

	// ErrInConsumerGroup is returned when attempting consumer operations while in consumer group
	ErrInConsumerGroup = errors.New("kafka: already in consumer group")

	// ErrNotInConsumerGroup is returned when attempting to leave consumer group while not in one
	ErrNotInConsumerGroup = errors.New("kafka: not in consumer group")

	// ErrSendFailed is returned when message send fails
	ErrSendFailed = errors.New("kafka: send failed")

	// ErrConsumeFailed is returned when message consumption fails
	ErrConsumeFailed = errors.New("kafka: consume failed")

	// ErrTopicNotFound is returned when topic doesn't exist
	ErrTopicNotFound = errors.New("kafka: topic not found")

	// ErrInvalidPartition is returned when partition is invalid
	ErrInvalidPartition = errors.New("kafka: invalid partition")

	// ErrConnectionFailed is returned when connection fails
	ErrConnectionFailed = errors.New("kafka: connection failed")

	// ErrClientClosed is returned when operations are attempted on closed client
	ErrClientClosed = errors.New("kafka: client closed")
)

Functions

func NewExtension

func NewExtension(opts ...ConfigOption) forge.Extension

NewExtension creates a new Kafka extension

func NewExtensionWithConfig

func NewExtensionWithConfig(config Config) forge.Extension

NewExtensionWithConfig creates a new Kafka extension with a complete config

Types

type ClientStats

type ClientStats struct {
	Connected        bool
	ConnectTime      time.Time
	MessagesSent     int64
	MessagesReceived int64
	BytesSent        int64
	BytesReceived    int64
	Errors           int64
	LastError        error
	LastErrorTime    time.Time
	ActiveConsumers  int
	ActiveProducers  int
}

ClientStats contains client statistics

type Config

type Config struct {
	// Connection settings
	Brokers      []string      `json:"brokers" yaml:"brokers" mapstructure:"brokers"`
	ClientID     string        `json:"client_id" yaml:"client_id" mapstructure:"client_id"`
	Version      string        `json:"version" yaml:"version" mapstructure:"version"` // Kafka version (e.g., "3.0.0")
	DialTimeout  time.Duration `json:"dial_timeout" yaml:"dial_timeout" mapstructure:"dial_timeout"`
	ReadTimeout  time.Duration `json:"read_timeout" yaml:"read_timeout" mapstructure:"read_timeout"`
	WriteTimeout time.Duration `json:"write_timeout" yaml:"write_timeout" mapstructure:"write_timeout"`
	KeepAlive    time.Duration `json:"keep_alive" yaml:"keep_alive" mapstructure:"keep_alive"`

	// TLS/SASL
	EnableTLS     bool   `json:"enable_tls" yaml:"enable_tls" mapstructure:"enable_tls"`
	TLSCertFile   string `json:"tls_cert_file,omitempty" yaml:"tls_cert_file,omitempty" mapstructure:"tls_cert_file"`
	TLSKeyFile    string `json:"tls_key_file,omitempty" yaml:"tls_key_file,omitempty" mapstructure:"tls_key_file"`
	TLSCAFile     string `json:"tls_ca_file,omitempty" yaml:"tls_ca_file,omitempty" mapstructure:"tls_ca_file"`
	TLSSkipVerify bool   `json:"tls_skip_verify" yaml:"tls_skip_verify" mapstructure:"tls_skip_verify"`

	EnableSASL    bool   `json:"enable_sasl" yaml:"enable_sasl" mapstructure:"enable_sasl"`
	SASLMechanism string `json:"sasl_mechanism,omitempty" yaml:"sasl_mechanism,omitempty" mapstructure:"sasl_mechanism"` // PLAIN, SCRAM-SHA-256, SCRAM-SHA-512
	SASLUsername  string `json:"sasl_username,omitempty" yaml:"sasl_username,omitempty" mapstructure:"sasl_username"`
	SASLPassword  string `json:"sasl_password,omitempty" yaml:"sasl_password,omitempty" mapstructure:"sasl_password"`

	// Producer settings
	ProducerEnabled         bool          `json:"producer_enabled" yaml:"producer_enabled" mapstructure:"producer_enabled"`
	ProducerMaxMessageBytes int           `json:"producer_max_message_bytes" yaml:"producer_max_message_bytes" mapstructure:"producer_max_message_bytes"`
	ProducerCompression     string        `json:"producer_compression" yaml:"producer_compression" mapstructure:"producer_compression"` // none, gzip, snappy, lz4, zstd
	ProducerFlushMessages   int           `json:"producer_flush_messages" yaml:"producer_flush_messages" mapstructure:"producer_flush_messages"`
	ProducerFlushFrequency  time.Duration `json:"producer_flush_frequency" yaml:"producer_flush_frequency" mapstructure:"producer_flush_frequency"`
	ProducerRetryMax        int           `json:"producer_retry_max" yaml:"producer_retry_max" mapstructure:"producer_retry_max"`
	ProducerIdempotent      bool          `json:"producer_idempotent" yaml:"producer_idempotent" mapstructure:"producer_idempotent"`
	ProducerAcks            string        `json:"producer_acks" yaml:"producer_acks" mapstructure:"producer_acks"` // none, local, all

	// Consumer settings
	ConsumerEnabled   bool          `json:"consumer_enabled" yaml:"consumer_enabled" mapstructure:"consumer_enabled"`
	ConsumerGroupID   string        `json:"consumer_group_id,omitempty" yaml:"consumer_group_id,omitempty" mapstructure:"consumer_group_id"`
	ConsumerOffsets   string        `json:"consumer_offsets" yaml:"consumer_offsets" mapstructure:"consumer_offsets"` // newest, oldest
	ConsumerMaxWait   time.Duration `json:"consumer_max_wait" yaml:"consumer_max_wait" mapstructure:"consumer_max_wait"`
	ConsumerFetchMin  int32         `json:"consumer_fetch_min" yaml:"consumer_fetch_min" mapstructure:"consumer_fetch_min"`
	ConsumerFetchMax  int32         `json:"consumer_fetch_max" yaml:"consumer_fetch_max" mapstructure:"consumer_fetch_max"`
	ConsumerIsolation string        `json:"consumer_isolation" yaml:"consumer_isolation" mapstructure:"consumer_isolation"` // read_uncommitted, read_committed

	// Consumer group settings
	ConsumerGroupRebalance string        `json:"consumer_group_rebalance" yaml:"consumer_group_rebalance" mapstructure:"consumer_group_rebalance"` // range, roundrobin, sticky
	ConsumerGroupSession   time.Duration `json:"consumer_group_session" yaml:"consumer_group_session" mapstructure:"consumer_group_session"`
	ConsumerGroupHeartbeat time.Duration `json:"consumer_group_heartbeat" yaml:"consumer_group_heartbeat" mapstructure:"consumer_group_heartbeat"`

	// Metadata settings
	MetadataRetryMax     int           `json:"metadata_retry_max" yaml:"metadata_retry_max" mapstructure:"metadata_retry_max"`
	MetadataRetryBackoff time.Duration `json:"metadata_retry_backoff" yaml:"metadata_retry_backoff" mapstructure:"metadata_retry_backoff"`
	MetadataRefreshFreq  time.Duration `json:"metadata_refresh_freq" yaml:"metadata_refresh_freq" mapstructure:"metadata_refresh_freq"`
	MetadataFullRefresh  bool          `json:"metadata_full_refresh" yaml:"metadata_full_refresh" mapstructure:"metadata_full_refresh"`

	// Observability
	EnableMetrics bool `json:"enable_metrics" yaml:"enable_metrics" mapstructure:"enable_metrics"`
	EnableTracing bool `json:"enable_tracing" yaml:"enable_tracing" mapstructure:"enable_tracing"`
	EnableLogging bool `json:"enable_logging" yaml:"enable_logging" mapstructure:"enable_logging"`

	// Config loading flags
	RequireConfig bool `json:"-" yaml:"-" mapstructure:"-"`
}

Config contains configuration for the Kafka extension

func DefaultConfig

func DefaultConfig() Config

DefaultConfig returns default Kafka configuration

func (*Config) ToSaramaConfig

func (c *Config) ToSaramaConfig() (*sarama.Config, error)

ToSaramaConfig converts to Sarama configuration

func (*Config) Validate

func (c *Config) Validate() error

Validate validates the configuration

type ConfigOption

type ConfigOption func(*Config)

ConfigOption is a functional option for Config

func WithBrokers

func WithBrokers(brokers ...string) ConfigOption

func WithClientID

func WithClientID(clientID string) ConfigOption

func WithCompression

func WithCompression(compression string) ConfigOption

func WithConfig

func WithConfig(config Config) ConfigOption

func WithConsumer

func WithConsumer(enabled bool) ConfigOption

func WithConsumerGroup

func WithConsumerGroup(groupID string) ConfigOption

func WithIdempotent

func WithIdempotent(enabled bool) ConfigOption

func WithMetrics

func WithMetrics(enable bool) ConfigOption

func WithProducer

func WithProducer(enabled bool) ConfigOption

func WithRequireConfig

func WithRequireConfig(require bool) ConfigOption

func WithSASL

func WithSASL(mechanism, username, password string) ConfigOption

func WithTLS

func WithTLS(certFile, keyFile, caFile string, skipVerify bool) ConfigOption

func WithTracing

func WithTracing(enable bool) ConfigOption

func WithVersion

func WithVersion(version string) ConfigOption

type ConsumerGroupHandler

type ConsumerGroupHandler interface {
	Setup(sarama.ConsumerGroupSession) error
	Cleanup(sarama.ConsumerGroupSession) error
	ConsumeClaim(sarama.ConsumerGroupSession, sarama.ConsumerGroupClaim) error
}

ConsumerGroupHandler handles consumer group messages

type Extension

type Extension struct {
	*forge.BaseExtension
	// contains filtered or unexported fields
}

Extension implements forge.Extension for Kafka functionality. The extension is now a lightweight facade that loads config and registers services.

func (*Extension) Health

func (e *Extension) Health(ctx context.Context) error

Health checks the extension health. Service health is managed by Vessel through KafkaService.Health().

func (*Extension) Register

func (e *Extension) Register(app forge.App) error

Register registers the Kafka extension with the app

func (*Extension) Start

func (e *Extension) Start(ctx context.Context) error

Start marks the extension as started. The actual client is started by Vessel calling KafkaService.Start().

func (*Extension) Stop

func (e *Extension) Stop(ctx context.Context) error

Stop marks the extension as stopped. The actual client is stopped by Vessel calling KafkaService.Stop().

type HashGeneratorFcn

type HashGeneratorFcn func() hash.Hash

HashGeneratorFcn is a function type for generating hash functions

var (
	// SHA256 is a hash generator for SHA-256
	SHA256 HashGeneratorFcn = sha256.New

	// SHA512 is a hash generator for SHA-512
	SHA512 HashGeneratorFcn = sha512.New
)

type Kafka

type Kafka interface {
	// Producer operations
	SendMessage(topic string, key, value []byte) error
	SendMessageAsync(topic string, key, value []byte) error
	SendMessages(messages []*ProducerMessage) error

	// Consumer operations
	Consume(ctx context.Context, topics []string, handler MessageHandler) error
	ConsumePartition(ctx context.Context, topic string, partition int32, offset int64, handler MessageHandler) error
	StopConsume() error

	// Consumer group operations
	JoinConsumerGroup(ctx context.Context, groupID string, topics []string, handler ConsumerGroupHandler) error
	LeaveConsumerGroup(ctx context.Context) error

	// Admin operations
	CreateTopic(topic string, config TopicConfig) error
	DeleteTopic(topic string) error
	ListTopics() ([]string, error)
	DescribeTopic(topic string) (*TopicMetadata, error)
	GetPartitions(topic string) ([]int32, error)
	GetOffset(topic string, partition int32, time int64) (int64, error)

	// Client info
	GetProducer() sarama.SyncProducer
	GetAsyncProducer() sarama.AsyncProducer
	GetConsumer() sarama.Consumer
	GetConsumerGroup() sarama.ConsumerGroup
	GetClient() sarama.Client
	GetStats() ClientStats

	// Lifecycle
	Close() error

	// Health
	Ping(ctx context.Context) error
}

Kafka represents a unified Kafka client interface

func NewKafkaClient

func NewKafkaClient(config Config, logger forge.Logger, metrics forge.Metrics) (Kafka, error)

NewKafkaClient creates a new Kafka client

type KafkaService

type KafkaService struct {
	// contains filtered or unexported fields
}

KafkaService wraps a Kafka client and provides lifecycle management. It implements vessel's di.Service interface so Vessel can manage its lifecycle.

func NewKafkaService

func NewKafkaService(config Config, logger forge.Logger, metrics forge.Metrics) (*KafkaService, error)

NewKafkaService creates a new Kafka service with the given configuration. This is the constructor that will be registered with the DI container.

func (*KafkaService) Client

func (s *KafkaService) Client() Kafka

Client returns the underlying Kafka client.

func (*KafkaService) Close

func (s *KafkaService) Close() error

func (*KafkaService) Consume

func (s *KafkaService) Consume(ctx context.Context, topics []string, handler MessageHandler, opts ...ConsumeOption) error

func (*KafkaService) CreateTopic

func (s *KafkaService) CreateTopic(ctx context.Context, topic string, partitions int, replicationFactor int16) error

func (*KafkaService) DeleteTopic

func (s *KafkaService) DeleteTopic(ctx context.Context, topic string) error

func (*KafkaService) GetTopicMetadata

func (s *KafkaService) GetTopicMetadata(ctx context.Context, topic string) (*TopicMetadata, error)

func (*KafkaService) Health

func (s *KafkaService) Health(ctx context.Context) error

Health checks if the Kafka service is healthy.

func (*KafkaService) ListTopics

func (s *KafkaService) ListTopics(ctx context.Context) ([]string, error)

func (*KafkaService) Name

func (s *KafkaService) Name() string

Name returns the service name for Vessel's lifecycle management.

func (*KafkaService) Ping

func (s *KafkaService) Ping(ctx context.Context) error

func (*KafkaService) Produce

func (s *KafkaService) Produce(ctx context.Context, message Message) error

func (*KafkaService) ProduceBatch

func (s *KafkaService) ProduceBatch(ctx context.Context, messages []Message) error

func (*KafkaService) Start

func (s *KafkaService) Start(ctx context.Context) error

Start starts the Kafka service by verifying connection. This is called automatically by Vessel during container.Start().

func (*KafkaService) Stop

func (s *KafkaService) Stop(ctx context.Context) error

Stop stops the Kafka service by closing the client. This is called automatically by Vessel during container.Stop().

func (*KafkaService) StopConsuming

func (s *KafkaService) StopConsuming() error

type MessageHandler

type MessageHandler func(message *sarama.ConsumerMessage) error

MessageHandler processes incoming Kafka messages

type MessageHeader

type MessageHeader struct {
	Key   string
	Value []byte
}

MessageHeader represents a Kafka message header

type PartitionMetadata

type PartitionMetadata struct {
	ID       int32
	Leader   int32
	Replicas []int32
	Isr      []int32
}

PartitionMetadata contains partition metadata

type ProducerMessage

type ProducerMessage struct {
	Topic     string
	Key       []byte
	Value     []byte
	Headers   []MessageHeader
	Partition int32
	Offset    int64
	Timestamp time.Time
}

ProducerMessage represents a message to be produced

type TopicConfig

type TopicConfig struct {
	NumPartitions     int32
	ReplicationFactor int16
	ConfigEntries     map[string]*string
}

TopicConfig contains topic configuration

type TopicMetadata

type TopicMetadata struct {
	Name       string
	Partitions []PartitionMetadata
	Config     map[string]string
}

TopicMetadata contains topic metadata

type XDGSCRAMClient

type XDGSCRAMClient struct {
	*scram.Client
	*scram.ClientConversation
	HashGeneratorFcn scram.HashGeneratorFcn
}

XDGSCRAMClient implements the SCRAM client interface

func (*XDGSCRAMClient) Begin

func (x *XDGSCRAMClient) Begin(userName, password, authzID string) (err error)

Begin starts the SCRAM authentication process

func (*XDGSCRAMClient) Done

func (x *XDGSCRAMClient) Done() bool

Done returns true if the authentication is complete

func (*XDGSCRAMClient) Step

func (x *XDGSCRAMClient) Step(challenge string) (response string, err error)

Step processes one step of the SCRAM authentication

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL