Documentation
¶
Index ¶
- Constants
- func LoadJobRunData(ctx context.Context, jobRunData *JobRunDataEvent) error
- func LoadJobRunDataCloudEvent(ctx context.Context, e event.Event) error
- func UnMarshalJSON(jsonb []byte, result interface{}) error
- type BigQueryDataItem
- type BigQueryLoader
- func (b *BigQueryLoader) FinalizeStreamingComplexDataItems(ctx context.Context, dataInstance DataInstance)
- func (b *BigQueryLoader) FindExistingData(ctx context.Context, partitionTime time.Time, ...) (bool, error)
- func (b *BigQueryLoader) GetMetaData(ctx context.Context, di DataInstance) (bigquery.TableMetadata, error)
- func (b *BigQueryLoader) Init(ctx context.Context) error
- func (b *BigQueryLoader) InitializeStreamingComplexDataItems(ctx context.Context, dataInstance DataInstance) error
- func (b *BigQueryLoader) LoadComplexDataItems(ctx context.Context, dataInstance DataInstance) ([]interface{}, error)
- func (b *BigQueryLoader) LoadDataItems(ctx context.Context, dataInstance DataInstance) ([]interface{}, error)
- func (b *BigQueryLoader) LoadStreamingComplexDataItems(ctx context.Context, dataInstance DataInstance, diRows []interface{}) error
- func (b *BigQueryLoader) ValidateTable(ctx context.Context, dataInstance DataInstance) error
- type BigQueryTableCache
- type ClientError
- type ClientsCache
- type ComplexInterval
- type DataFile
- type DataInstance
- type DataLoader
- type DataType
- type DataUploader
- type JobRunDataEvent
- type KeyValue
- type Locator
- type Message
- type OutputMetric
- type PrometheusData
- type PrometheusLabels
- type PrometheusMetric
- type PrometheusResult
- type PrometheusValue
- type SimpleUploader
- type StorageObjectData
Constants ¶
View Source
const ( AutoDataLoaderSuffix = "autodl.json" DataSetEnv = "DATASET_ID" ProjectIdEnv = "PROJECT_ID" GCSCredentialsFileEnv = "GCS_CREDENTIALS_FILE" // local testing only PRDataFiles = "PR_DATA_FILES" MatchDataFiles = "MATCH_DATA_FILES" )
View Source
const DataPartitioningField = "PartitionTime"
View Source
const JobRunNameField = "JobRunName"
View Source
const SourceNameField = "Source"
Variables ¶
This section is empty.
Functions ¶
func LoadJobRunData ¶
func LoadJobRunData(ctx context.Context, jobRunData *JobRunDataEvent) error
func LoadJobRunDataCloudEvent ¶
LoadJobRunDataCloudEvent is the CloudEvent handler for gen2 functions
func UnMarshalJSON ¶
Types ¶
type BigQueryDataItem ¶
type BigQueryDataItem struct {
Instance *DataInstance
Row map[string]string
InsertID string
}
type BigQueryLoader ¶
type BigQueryLoader struct {
ProjectID string
DataSetID string
Client *bigquery.Client
DryRun bool
// contains filtered or unexported fields
}
func (*BigQueryLoader) FinalizeStreamingComplexDataItems ¶
func (b *BigQueryLoader) FinalizeStreamingComplexDataItems(ctx context.Context, dataInstance DataInstance)
func (*BigQueryLoader) FindExistingData ¶
func (*BigQueryLoader) GetMetaData ¶
func (b *BigQueryLoader) GetMetaData(ctx context.Context, di DataInstance) (bigquery.TableMetadata, error)
func (*BigQueryLoader) InitializeStreamingComplexDataItems ¶
func (b *BigQueryLoader) InitializeStreamingComplexDataItems(ctx context.Context, dataInstance DataInstance) error
func (*BigQueryLoader) LoadComplexDataItems ¶
func (b *BigQueryLoader) LoadComplexDataItems(ctx context.Context, dataInstance DataInstance) ([]interface{}, error)
func (*BigQueryLoader) LoadDataItems ¶
func (b *BigQueryLoader) LoadDataItems(ctx context.Context, dataInstance DataInstance) ([]interface{}, error)
func (*BigQueryLoader) LoadStreamingComplexDataItems ¶
func (b *BigQueryLoader) LoadStreamingComplexDataItems(ctx context.Context, dataInstance DataInstance, diRows []interface{}) error
func (*BigQueryLoader) ValidateTable ¶
func (b *BigQueryLoader) ValidateTable(ctx context.Context, dataInstance DataInstance) error
type BigQueryTableCache ¶
type BigQueryTableCache struct {
// contains filtered or unexported fields
}
type ClientError ¶
func (*ClientError) Error ¶
func (c *ClientError) Error() string
type ClientsCache ¶
type ClientsCache struct {
// contains filtered or unexported fields
}
type ComplexInterval ¶
type ComplexInterval struct {
Locator Locator `json:"locator" bigquery:"locator"`
Message Message `json:"message" bigquery:"message"`
From time.Time `json:"from_time" bigquery:"from_time"`
To time.Time `json:"to_time" bigquery:"to_time"`
IntervalSource string `json:"interval_source" bigquery:"interval_source"`
JobRunName string `json:"JobRunName" bigquery:"JobRunName"`
Source string `json:"source" bigquery:"source"`
}
type DataFile ¶
type DataFile struct {
TableName string `json:"table_name"`
Schema map[string]DataType `json:"schema"`
SchemaMapping map[string]string `json:"schema_mapping"`
Rows []map[string]string `json:"rows"`
ComplexRows []interface{} `json:"complex_rows"`
ExpirationDays int `json:"expiration_days"`
PartitionColumn string `json:"partition_column"`
PartitionType string `json:"partition_type"`
ChunkSize int `json:"chunk_size"`
}
type DataInstance ¶
type DataLoader ¶
type DataLoader interface {
ValidateTable(ctx context.Context, dataInstance DataInstance) error
FindExistingData(ctx context.Context, partitionTime time.Time, partitionColumn, tableName, jobRunName, source string) (bool, error)
LoadDataItems(ctx context.Context, dataInstance DataInstance) ([]interface{}, error)
}
type DataUploader ¶
type DataUploader struct {
// contains filtered or unexported fields
}
type JobRunDataEvent ¶
type JobRunDataEvent struct {
Job string
BuildID string
Filename string
Event *event.Event
Name string
Bucket string
TimeCreated time.Time
}
JobRunDataEvent contains relevant event attributes
type Locator ¶
type Locator struct {
Type string `json:"type" bigquery:"type"`
Hmsg string `json:"hmsg" bigquery:"hmsg"`
Namespace string `json:"namespace" bigquery:"namespace"`
Node string `json:"node" bigquery:"node"`
Pod string `json:"pod" bigquery:"pod"`
Uid string `json:"uid" bigquery:"uid"`
Container string `json:"container" bigquery:"container"`
E2eTest string `json:"e2eTest" bigquery:"e2eTest"`
BackendDisruptionName string `json:"backend_disruption_name" bigquery:"backend_disruption_name"`
Keys []KeyValue `json:"keys" bigquery:"keys"`
}
type Message ¶
type Message struct {
Reason string `json:"reason" bigquery:"reason"`
Cause string `json:"cause" bigquery:"cause"`
HumanMessage string `json:"human_message" bigquery:"human_message"`
Container string `json:"container" bigquery:"container"`
FirstTimestamp time.Time `json:"firstimestamp" bigquery:"firstTimestamp"`
LastTimestamp time.Time `json:"lastimestamp" bigquery:"lastTimestamp"`
Image string `json:"image" bigquery:"image"`
Constructed string `json:"constructed" bigquery:"constructed"`
Status string `json:"status" bigquery:"status"`
Node string `json:"node" bigquery:"node"`
Annotations []KeyValue `json:"annotations" bigquery:"annotations"`
}
type OutputMetric ¶
type PrometheusData ¶
type PrometheusData struct {
ResultType string `json:"resultType"`
Result []PrometheusMetric `json:"result"`
}
type PrometheusLabels ¶
PrometheusLabels avoids deserialization allocations
func (PrometheusLabels) MarshalJSON ¶
func (l PrometheusLabels) MarshalJSON() ([]byte, error)
func (*PrometheusLabels) UnmarshalJSON ¶
func (l *PrometheusLabels) UnmarshalJSON(data []byte) error
type PrometheusMetric ¶
type PrometheusMetric struct {
Metric PrometheusLabels `json:"metric"`
Value PrometheusValue `json:"value"`
}
type PrometheusResult ¶
type PrometheusResult struct {
Status string `json:"status"`
Data PrometheusData `json:"data"`
}
type PrometheusValue ¶
func (*PrometheusValue) UnmarshalJSON ¶
func (l *PrometheusValue) UnmarshalJSON(data []byte) error
type SimpleUploader ¶
type SimpleUploader interface {
// contains filtered or unexported methods
}
type StorageObjectData ¶
type StorageObjectData struct {
Bucket string `json:"bucket"`
Name string `json:"name"`
Size int64 `json:"size,string"`
ContentType string `json:"contentType"`
TimeCreated time.Time `json:"timeCreated"`
Updated time.Time `json:"updated"`
MD5Hash string `json:"md5Hash"`
Metadata map[string]string `json:"metadata"`
}
StorageObjectData contains metadata about a Cloud Storage object.
Click to show internal directories.
Click to hide internal directories.