Documentation
¶
Index ¶
- type ChangeStreamOperation
- type KafkaSource
- func (k *KafkaSource) Connect(ctx context.Context) error
- func (k *KafkaSource) Disconnect() error
- func (m *KafkaSource) Info() string
- func (m *KafkaSource) Init(args SourceConfig) error
- func (k *KafkaSource) Key() (string, error)
- func (m *KafkaSource) LoadInitialData(ctx context.Context, wg *sync.WaitGroup) (<-chan *models.Job, error)
- func (k *KafkaSource) Name() string
- func (k *KafkaSource) Read(ctx context.Context, wg *sync.WaitGroup) (<-chan *models.Job, error)
- type MongoSource
- func (m *MongoSource) Connect(ctx context.Context) error
- func (m *MongoSource) Disconnect() error
- func (m *MongoSource) Info() string
- func (m *MongoSource) Init(args SourceConfig) error
- func (m *MongoSource) Key() (string, error)
- func (m *MongoSource) LoadInitialData(ctx context.Context, wg *sync.WaitGroup) (<-chan *models.Job, error)
- func (m *MongoSource) Name() string
- func (m *MongoSource) Read(ctx context.Context, wg *sync.WaitGroup) (<-chan *models.Job, error)
- type SourceConfig
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type ChangeStreamOperation ¶
type ChangeStreamOperation struct {
ResumeToken map[string]interface{} `bson:"_id" json:"_id"`
ResumeTokenString string `bson:"_id._data" json:"_id._data"`
DocumentId map[string]interface{} `bson:"documentKey" json:"documentKey"`
DocumentIdString string `bson:"documentKey._id" json:"documentKey._id"`
OperationType string `bson:"operationType" json:"operationType"`
FullDocument map[string]interface{} `bson:"fullDocument" json:"fullDocument"`
WallTime time.Time `bson:"wallTime" json:"wallTime"`
Ns struct {
Db string `bson:"db" json:"db"`
Coll string `bson:"coll" json:"coll"`
} `bson:"ns" json:"ns"`
UpdateDescription struct {
UpdatedFields map[string]interface{} `bson:"updatedFields" json:"updatedFields"`
RemovedFields []interface{} `bson:"removedFields" json:"removedFields"`
TruncatedArrays []interface{} `bson:"truncatedArrays" json:"truncatedArrays"`
} `bson:"updateDescription" json:"updateDescription"`
ClusterTime primitive.Timestamp `json:"clusterTime" bson:"clusterTime"`
}
type KafkaSource ¶
type KafkaSource struct {
// contains filtered or unexported fields
}
func (*KafkaSource) Disconnect ¶
func (k *KafkaSource) Disconnect() error
func (*KafkaSource) Info ¶
func (m *KafkaSource) Info() string
func (*KafkaSource) Init ¶
func (m *KafkaSource) Init(args SourceConfig) error
func (*KafkaSource) Key ¶
func (k *KafkaSource) Key() (string, error)
func (*KafkaSource) LoadInitialData ¶
func (*KafkaSource) Name ¶
func (k *KafkaSource) Name() string
type MongoSource ¶
type MongoSource struct {
// contains filtered or unexported fields
}
func NewMongoSource ¶
func NewMongoSource() *MongoSource
NewMongoSource returns a new instance of MongoSource
func (*MongoSource) Disconnect ¶
func (m *MongoSource) Disconnect() error
func (*MongoSource) Info ¶
func (m *MongoSource) Info() string
func (*MongoSource) Init ¶
func (m *MongoSource) Init(args SourceConfig) error
func (*MongoSource) Key ¶
func (m *MongoSource) Key() (string, error)
func (*MongoSource) LoadInitialData ¶
func (m *MongoSource) LoadInitialData(ctx context.Context, wg *sync.WaitGroup) (<-chan *models.Job, error)
As of now this function is not optimized to handled a lot of data, do not use this for huge amounts of data a it holds the initial loaded data in memory
func (*MongoSource) Name ¶
func (m *MongoSource) Name() string
Click to show internal directories.
Click to hide internal directories.