Documentation
¶
Overview ¶
These APIs allow you to manage Alerts, Dashboards, Data Sources, Dbsql Permissions, Queries, Query History, Warehouses, etc.
Index ¶
- type AccessControl
- type Alert
- type AlertOptions
- type AlertState
- type AlertsAPI
- func (a *AlertsAPI) AlertNameToIdMap(ctx context.Context) (map[string]string, error)
- func (a *AlertsAPI) Create(ctx context.Context, request EditAlert) (*Alert, error)
- func (a *AlertsAPI) CreateSchedule(ctx context.Context, request CreateRefreshSchedule) (*RefreshSchedule, error)
- func (a *AlertsAPI) Delete(ctx context.Context, request DeleteAlertRequest) error
- func (a *AlertsAPI) DeleteByAlertId(ctx context.Context, alertId string) error
- func (a *AlertsAPI) DeleteSchedule(ctx context.Context, request DeleteScheduleRequest) error
- func (a *AlertsAPI) DeleteScheduleByAlertIdAndScheduleId(ctx context.Context, alertId string, scheduleId string) error
- func (a *AlertsAPI) Get(ctx context.Context, request GetAlertRequest) (*Alert, error)
- func (a *AlertsAPI) GetByAlertId(ctx context.Context, alertId string) (*Alert, error)
- func (a *AlertsAPI) GetByName(ctx context.Context, name string) (*Alert, error)
- func (a *AlertsAPI) GetSubscriptions(ctx context.Context, request GetSubscriptionsRequest) ([]Subscription, error)
- func (a *AlertsAPI) GetSubscriptionsByAlertId(ctx context.Context, alertId string) ([]Subscription, error)
- func (a *AlertsAPI) Impl() AlertsService
- func (a *AlertsAPI) List(ctx context.Context) ([]Alert, error)
- func (a *AlertsAPI) ListSchedules(ctx context.Context, request ListSchedulesRequest) ([]RefreshSchedule, error)
- func (a *AlertsAPI) ListSchedulesByAlertId(ctx context.Context, alertId string) ([]RefreshSchedule, error)
- func (a *AlertsAPI) Subscribe(ctx context.Context, request CreateSubscription) (*Subscription, error)
- func (a *AlertsAPI) Unsubscribe(ctx context.Context, request UnsubscribeRequest) error
- func (a *AlertsAPI) UnsubscribeByAlertIdAndSubscriptionId(ctx context.Context, alertId string, subscriptionId string) error
- func (a *AlertsAPI) Update(ctx context.Context, request EditAlert) error
- func (a *AlertsAPI) WithImpl(impl AlertsService) *AlertsAPI
- type AlertsService
- type Channel
- type ChannelInfo
- type ChannelName
- type CreateDashboardRequest
- type CreateRefreshSchedule
- type CreateSubscription
- type CreateWarehouseRequest
- type CreateWarehouseResponse
- type Dashboard
- type DashboardOptions
- type DashboardsAPI
- func (a *DashboardsAPI) Create(ctx context.Context, request CreateDashboardRequest) (*Dashboard, error)
- func (a *DashboardsAPI) DashboardNameToIdMap(ctx context.Context, request ListDashboardsRequest) (map[string]string, error)
- func (a *DashboardsAPI) Delete(ctx context.Context, request DeleteDashboardRequest) error
- func (a *DashboardsAPI) DeleteByDashboardId(ctx context.Context, dashboardId string) error
- func (a *DashboardsAPI) Get(ctx context.Context, request GetDashboardRequest) (*Dashboard, error)
- func (a *DashboardsAPI) GetByDashboardId(ctx context.Context, dashboardId string) (*Dashboard, error)
- func (a *DashboardsAPI) GetByName(ctx context.Context, name string) (*Dashboard, error)
- func (a *DashboardsAPI) Impl() DashboardsService
- func (a *DashboardsAPI) ListAll(ctx context.Context, request ListDashboardsRequest) ([]Dashboard, error)
- func (a *DashboardsAPI) Restore(ctx context.Context, request RestoreDashboardRequest) error
- func (a *DashboardsAPI) WithImpl(impl DashboardsService) *DashboardsAPI
- type DashboardsService
- type DataSource
- type DataSourcesAPI
- func (a *DataSourcesAPI) DataSourceNameToIdMap(ctx context.Context) (map[string]string, error)
- func (a *DataSourcesAPI) GetByName(ctx context.Context, name string) (*DataSource, error)
- func (a *DataSourcesAPI) Impl() DataSourcesService
- func (a *DataSourcesAPI) List(ctx context.Context) ([]DataSource, error)
- func (a *DataSourcesAPI) WithImpl(impl DataSourcesService) *DataSourcesAPI
- type DataSourcesService
- type DbsqlPermissionsAPI
- func (a *DbsqlPermissionsAPI) Get(ctx context.Context, request GetDbsqlPermissionRequest) (*GetResponse, error)
- func (a *DbsqlPermissionsAPI) GetByObjectTypeAndObjectId(ctx context.Context, objectType ObjectTypePlural, objectId string) (*GetResponse, error)
- func (a *DbsqlPermissionsAPI) Impl() DbsqlPermissionsService
- func (a *DbsqlPermissionsAPI) Set(ctx context.Context, request SetRequest) (*SetResponse, error)
- func (a *DbsqlPermissionsAPI) TransferOwnership(ctx context.Context, request TransferOwnershipRequest) (*Success, error)
- func (a *DbsqlPermissionsAPI) WithImpl(impl DbsqlPermissionsService) *DbsqlPermissionsAPI
- type DbsqlPermissionsService
- type DeleteAlertRequest
- type DeleteDashboardRequest
- type DeleteQueryRequest
- type DeleteScheduleRequest
- type DeleteWarehouseRequest
- type Destination
- type DestinationType
- type EditAlert
- type EditWarehouseRequest
- type EndpointConfPair
- type EndpointHealth
- type EndpointInfo
- type EndpointTagPair
- type EndpointTags
- type GetAlertRequest
- type GetDashboardRequest
- type GetDbsqlPermissionRequest
- type GetQueryRequest
- type GetResponse
- type GetSubscriptionsRequest
- type GetWarehouseRequest
- type GetWarehouseResponse
- type GetWorkspaceWarehouseConfigResponse
- type GetWorkspaceWarehouseConfigResponseSecurityPolicy
- type ListDashboardsRequest
- type ListOrder
- type ListQueriesRequest
- type ListQueriesResponse
- type ListQueryHistoryRequest
- type ListResponse
- type ListSchedulesRequest
- type ListWarehousesRequest
- type ListWarehousesResponse
- type ObjectType
- type ObjectTypePlural
- type OdbcParams
- type OwnableObjectType
- type Parameter
- type ParameterType
- type PermissionLevel
- type PlansState
- type QueriesAPI
- func (a *QueriesAPI) Create(ctx context.Context, request QueryPostContent) (*Query, error)
- func (a *QueriesAPI) Delete(ctx context.Context, request DeleteQueryRequest) error
- func (a *QueriesAPI) DeleteByQueryId(ctx context.Context, queryId string) error
- func (a *QueriesAPI) Get(ctx context.Context, request GetQueryRequest) (*Query, error)
- func (a *QueriesAPI) GetByName(ctx context.Context, name string) (*Query, error)
- func (a *QueriesAPI) GetByQueryId(ctx context.Context, queryId string) (*Query, error)
- func (a *QueriesAPI) Impl() QueriesService
- func (a *QueriesAPI) ListAll(ctx context.Context, request ListQueriesRequest) ([]Query, error)
- func (a *QueriesAPI) QueryNameToIdMap(ctx context.Context, request ListQueriesRequest) (map[string]string, error)
- func (a *QueriesAPI) Restore(ctx context.Context, request RestoreQueryRequest) error
- func (a *QueriesAPI) Update(ctx context.Context, request QueryPostContent) (*Query, error)
- func (a *QueriesAPI) WithImpl(impl QueriesService) *QueriesAPI
- type QueriesService
- type Query
- type QueryFilter
- type QueryHistoryAPI
- type QueryHistoryService
- type QueryInfo
- type QueryInterval
- type QueryList
- type QueryMetrics
- type QueryOptions
- type QueryPostContent
- type QueryStatementType
- type QueryStatus
- type RefreshSchedule
- type RepeatedEndpointConfPairs
- type RestoreDashboardRequest
- type RestoreQueryRequest
- type SetRequest
- type SetResponse
- type SetWorkspaceWarehouseConfigRequest
- type SetWorkspaceWarehouseConfigRequestSecurityPolicy
- type SpotInstancePolicy
- type StartRequest
- type State
- type Status
- type StopRequest
- type Subscription
- type Success
- type SuccessMessage
- type TerminationReason
- type TerminationReasonCode
- type TerminationReasonType
- type TimeRange
- type TransferOwnershipObjectId
- type TransferOwnershipRequest
- type UnsubscribeRequest
- type User
- type Visualization
- type WarehouseType
- type WarehouseTypePair
- type WarehousesAPI
- func (a *WarehousesAPI) Create(ctx context.Context, request CreateWarehouseRequest) (*CreateWarehouseResponse, error)
- func (a *WarehousesAPI) CreateAndWait(ctx context.Context, createWarehouseRequest CreateWarehouseRequest, ...) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) Delete(ctx context.Context, request DeleteWarehouseRequest) error
- func (a *WarehousesAPI) DeleteAndWait(ctx context.Context, deleteWarehouseRequest DeleteWarehouseRequest, ...) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) DeleteById(ctx context.Context, id string) error
- func (a *WarehousesAPI) DeleteByIdAndWait(ctx context.Context, id string, ...) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) Edit(ctx context.Context, request EditWarehouseRequest) error
- func (a *WarehousesAPI) EditAndWait(ctx context.Context, editWarehouseRequest EditWarehouseRequest, ...) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) EndpointInfoNameToIdMap(ctx context.Context, request ListWarehousesRequest) (map[string]string, error)
- func (a *WarehousesAPI) Get(ctx context.Context, request GetWarehouseRequest) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) GetAndWait(ctx context.Context, getWarehouseRequest GetWarehouseRequest, ...) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) GetById(ctx context.Context, id string) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) GetByIdAndWait(ctx context.Context, id string, ...) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) GetByName(ctx context.Context, name string) (*EndpointInfo, error)
- func (a *WarehousesAPI) GetWorkspaceWarehouseConfig(ctx context.Context) (*GetWorkspaceWarehouseConfigResponse, error)
- func (a *WarehousesAPI) Impl() WarehousesService
- func (a *WarehousesAPI) ListAll(ctx context.Context, request ListWarehousesRequest) ([]EndpointInfo, error)
- func (a *WarehousesAPI) SetWorkspaceWarehouseConfig(ctx context.Context, request SetWorkspaceWarehouseConfigRequest) error
- func (a *WarehousesAPI) Start(ctx context.Context, request StartRequest) error
- func (a *WarehousesAPI) StartAndWait(ctx context.Context, startRequest StartRequest, ...) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) Stop(ctx context.Context, request StopRequest) error
- func (a *WarehousesAPI) StopAndWait(ctx context.Context, stopRequest StopRequest, ...) (*GetWarehouseResponse, error)
- func (a *WarehousesAPI) WithImpl(impl WarehousesService) *WarehousesAPI
- type WarehousesService
- type Widget
- type WidgetOptions
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type AccessControl ¶
type AccessControl struct {
GroupName string `json:"group_name,omitempty"`
// This describes an enum
PermissionLevel PermissionLevel `json:"permission_level,omitempty"`
UserName string `json:"user_name,omitempty"`
}
type Alert ¶
type Alert struct {
// Timestamp when the alert was created.
CreatedAt string `json:"created_at,omitempty"`
// ID of the alert.
Id string `json:"id,omitempty"`
// Timestamp when the alert was last triggered.
LastTriggeredAt string `json:"last_triggered_at,omitempty"`
// Name of the alert.
Name string `json:"name,omitempty"`
// Alert configuration options.
Options *AlertOptions `json:"options,omitempty"`
Query *Query `json:"query,omitempty"`
// Number of seconds after being triggered before the alert rearms itself
// and can be triggered again. If `null`, alert will never be triggered
// again.
Rearm int `json:"rearm,omitempty"`
// State of the alert. Possible values are: `unknown` (yet to be evaluated),
// `triggered` (evaluated and fulfilled trigger conditions), or `ok`
// (evaluated and did not fulfill trigger conditions).
State AlertState `json:"state,omitempty"`
// Timestamp when the alert was last updated.
UpdatedAt string `json:"updated_at,omitempty"`
User *User `json:"user,omitempty"`
}
type AlertOptions ¶
type AlertOptions struct {
// Name of column in the query result to compare in alert evaluation.
Column string `json:"column"`
// Custom body of alert notification, if it exists. See [here] for custom
// templating instructions.
//
// [here]: https://docs.databricks.com/sql/user/alerts/index.html
CustomBody string `json:"custom_body,omitempty"`
// Custom subject of alert notification, if it exists. This includes email
// subject, Slack notification header, etc. See [here] for custom templating
// instructions.
//
// [here]: https://docs.databricks.com/sql/user/alerts/index.html
CustomSubject string `json:"custom_subject,omitempty"`
// Whether or not the alert is muted. If an alert is muted, it will not
// notify users and alert destinations when triggered.
Muted bool `json:"muted,omitempty"`
// Operator used to compare in alert evaluation: `>`, `>=`, `<`, `<=`, `==`,
// `!=`
Op string `json:"op"`
// Number of failures encountered during alert refresh. This counter is used
// for sending aggregated alert failure email notifications.
ScheduleFailures int `json:"schedule_failures,omitempty"`
// Value used to compare in alert evaluation.
Value string `json:"value"`
}
Alert configuration options.
type AlertState ¶
type AlertState string
State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).
const AlertStateOk AlertState = `ok`
const AlertStateTriggered AlertState = `triggered`
const AlertStateUnknown AlertState = `unknown`
func (*AlertState) Set ¶
func (as *AlertState) Set(v string) error
Set raw string value and validate it against allowed values
func (*AlertState) String ¶
func (as *AlertState) String() string
String representation for fmt.Print
func (*AlertState) Type ¶
func (as *AlertState) Type() string
Type always returns AlertState to satisfy [pflag.Value] interface
type AlertsAPI ¶
type AlertsAPI struct {
// contains filtered or unexported fields
}
The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or alert destinations if the condition was met.
func NewAlerts ¶
func NewAlerts(client *client.DatabricksClient) *AlertsAPI
func (*AlertsAPI) AlertNameToIdMap ¶
AlertNameToIdMap calls AlertsAPI.List and creates a map of results with Alert.Name as key and Alert.Id as value.
Returns an error if there's more than one Alert with the same .Name.
Note: All Alert instances are loaded into memory before creating a map.
This method is generated by Databricks SDK Code Generator.
func (*AlertsAPI) Create ¶
Create an alert.
Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or alert destinations if the condition was met.
func (*AlertsAPI) CreateSchedule ¶
func (a *AlertsAPI) CreateSchedule(ctx context.Context, request CreateRefreshSchedule) (*RefreshSchedule, error)
Create a refresh schedule.
Creates a new refresh schedule for an alert.
**Note:** The structure of refresh schedules is subject to change.
func (*AlertsAPI) Delete ¶
func (a *AlertsAPI) Delete(ctx context.Context, request DeleteAlertRequest) error
Delete an alert.
Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note:** Unlike queries and dashboards, alerts cannot be moved to the trash.
func (*AlertsAPI) DeleteByAlertId ¶
Delete an alert.
Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note:** Unlike queries and dashboards, alerts cannot be moved to the trash.
func (*AlertsAPI) DeleteSchedule ¶
func (a *AlertsAPI) DeleteSchedule(ctx context.Context, request DeleteScheduleRequest) error
Delete a refresh schedule.
Deletes an alert's refresh schedule. The refresh schedule specifies when to refresh and evaluate the associated query result.
func (*AlertsAPI) DeleteScheduleByAlertIdAndScheduleId ¶
func (a *AlertsAPI) DeleteScheduleByAlertIdAndScheduleId(ctx context.Context, alertId string, scheduleId string) error
Delete a refresh schedule.
Deletes an alert's refresh schedule. The refresh schedule specifies when to refresh and evaluate the associated query result.
func (*AlertsAPI) GetByName ¶
GetByName calls AlertsAPI.AlertNameToIdMap and returns a single Alert.
Returns an error if there's more than one Alert with the same .Name.
Note: All Alert instances are loaded into memory before returning matching by name.
This method is generated by Databricks SDK Code Generator.
func (*AlertsAPI) GetSubscriptions ¶
func (a *AlertsAPI) GetSubscriptions(ctx context.Context, request GetSubscriptionsRequest) ([]Subscription, error)
Get an alert's subscriptions.
Get the subscriptions for an alert. An alert subscription represents exactly one recipient being notified whenever the alert is triggered. The alert recipient is specified by either the `user` field or the `destination` field. The `user` field is ignored if `destination` is non-`null`.
func (*AlertsAPI) GetSubscriptionsByAlertId ¶
func (a *AlertsAPI) GetSubscriptionsByAlertId(ctx context.Context, alertId string) ([]Subscription, error)
Get an alert's subscriptions.
Get the subscriptions for an alert. An alert subscription represents exactly one recipient being notified whenever the alert is triggered. The alert recipient is specified by either the `user` field or the `destination` field. The `user` field is ignored if `destination` is non-`null`.
func (*AlertsAPI) Impl ¶
func (a *AlertsAPI) Impl() AlertsService
Impl returns low-level Alerts API implementation
func (*AlertsAPI) ListSchedules ¶
func (a *AlertsAPI) ListSchedules(ctx context.Context, request ListSchedulesRequest) ([]RefreshSchedule, error)
Get refresh schedules.
Gets the refresh schedules for the specified alert. Alerts can have refresh schedules that specify when to refresh and evaluate the associated query result.
**Note:** Although refresh schedules are returned in a list, only one refresh schedule per alert is currently supported. The structure of refresh schedules is subject to change.
func (*AlertsAPI) ListSchedulesByAlertId ¶
func (a *AlertsAPI) ListSchedulesByAlertId(ctx context.Context, alertId string) ([]RefreshSchedule, error)
Get refresh schedules.
Gets the refresh schedules for the specified alert. Alerts can have refresh schedules that specify when to refresh and evaluate the associated query result.
**Note:** Although refresh schedules are returned in a list, only one refresh schedule per alert is currently supported. The structure of refresh schedules is subject to change.
func (*AlertsAPI) Subscribe ¶
func (a *AlertsAPI) Subscribe(ctx context.Context, request CreateSubscription) (*Subscription, error)
Subscribe to an alert.
func (*AlertsAPI) Unsubscribe ¶
func (a *AlertsAPI) Unsubscribe(ctx context.Context, request UnsubscribeRequest) error
Unsubscribe to an alert.
Unsubscribes a user or a destination to an alert.
func (*AlertsAPI) UnsubscribeByAlertIdAndSubscriptionId ¶
func (a *AlertsAPI) UnsubscribeByAlertIdAndSubscriptionId(ctx context.Context, alertId string, subscriptionId string) error
Unsubscribe to an alert.
Unsubscribes a user or a destination to an alert.
func (*AlertsAPI) WithImpl ¶
func (a *AlertsAPI) WithImpl(impl AlertsService) *AlertsAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type AlertsService ¶
type AlertsService interface {
// Create an alert.
//
// Creates an alert. An alert is a Databricks SQL object that periodically
// runs a query, evaluates a condition of its result, and notifies users or
// alert destinations if the condition was met.
Create(ctx context.Context, request EditAlert) (*Alert, error)
// Create a refresh schedule.
//
// Creates a new refresh schedule for an alert.
//
// **Note:** The structure of refresh schedules is subject to change.
CreateSchedule(ctx context.Context, request CreateRefreshSchedule) (*RefreshSchedule, error)
// Delete an alert.
//
// Deletes an alert. Deleted alerts are no longer accessible and cannot be
// restored. **Note:** Unlike queries and dashboards, alerts cannot be moved
// to the trash.
Delete(ctx context.Context, request DeleteAlertRequest) error
// Delete a refresh schedule.
//
// Deletes an alert's refresh schedule. The refresh schedule specifies when
// to refresh and evaluate the associated query result.
DeleteSchedule(ctx context.Context, request DeleteScheduleRequest) error
// Get an alert.
//
// Gets an alert.
Get(ctx context.Context, request GetAlertRequest) (*Alert, error)
// Get an alert's subscriptions.
//
// Get the subscriptions for an alert. An alert subscription represents
// exactly one recipient being notified whenever the alert is triggered. The
// alert recipient is specified by either the `user` field or the
// `destination` field. The `user` field is ignored if `destination` is
// non-`null`.
GetSubscriptions(ctx context.Context, request GetSubscriptionsRequest) ([]Subscription, error)
// Get alerts.
//
// Gets a list of alerts.
List(ctx context.Context) ([]Alert, error)
// Get refresh schedules.
//
// Gets the refresh schedules for the specified alert. Alerts can have
// refresh schedules that specify when to refresh and evaluate the
// associated query result.
//
// **Note:** Although refresh schedules are returned in a list, only one
// refresh schedule per alert is currently supported. The structure of
// refresh schedules is subject to change.
ListSchedules(ctx context.Context, request ListSchedulesRequest) ([]RefreshSchedule, error)
// Subscribe to an alert.
Subscribe(ctx context.Context, request CreateSubscription) (*Subscription, error)
// Unsubscribe to an alert.
//
// Unsubscribes a user or a destination to an alert.
Unsubscribe(ctx context.Context, request UnsubscribeRequest) error
// Update an alert.
//
// Updates an alert.
Update(ctx context.Context, request EditAlert) error
}
The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or alert destinations if the condition was met.
type Channel ¶
type Channel struct {
DbsqlVersion string `json:"dbsql_version,omitempty"`
Name ChannelName `json:"name,omitempty"`
}
type ChannelInfo ¶
type ChannelInfo struct {
// DBSQL Version the channel is mapped to
DbsqlVersion string `json:"dbsql_version,omitempty"`
// Name of the channel
Name ChannelName `json:"name,omitempty"`
}
Channel information for the SQL warehouse at the time of query execution
type ChannelName ¶
type ChannelName string
Name of the channel
const ChannelNameChannelNameCurrent ChannelName = `CHANNEL_NAME_CURRENT`
const ChannelNameChannelNameCustom ChannelName = `CHANNEL_NAME_CUSTOM`
const ChannelNameChannelNamePreview ChannelName = `CHANNEL_NAME_PREVIEW`
const ChannelNameChannelNamePrevious ChannelName = `CHANNEL_NAME_PREVIOUS`
const ChannelNameChannelNameUnspecified ChannelName = `CHANNEL_NAME_UNSPECIFIED`
func (*ChannelName) Set ¶
func (cn *ChannelName) Set(v string) error
Set raw string value and validate it against allowed values
func (*ChannelName) String ¶
func (cn *ChannelName) String() string
String representation for fmt.Print
func (*ChannelName) Type ¶
func (cn *ChannelName) Type() string
Type always returns ChannelName to satisfy [pflag.Value] interface
type CreateDashboardRequest ¶
type CreateDashboardRequest struct {
// In the web application, query filters that share a name are coupled to a
// single selection box if this value is true.
DashboardFiltersEnabled bool `json:"dashboard_filters_enabled,omitempty"`
// Draft dashboards only appear in list views for their owners.
IsDraft bool `json:"is_draft,omitempty"`
// Indicates whether the dashboard is trashed. Trashed dashboards don't
// appear in list views.
IsTrashed bool `json:"is_trashed,omitempty"`
// The title of this dashboard that appears in list views and at the top of
// the dashboard page.
Name string `json:"name,omitempty"`
Tags []string `json:"tags,omitempty"`
// An array of widget objects. A complete description of widget objects can
// be found in the response to [Retrieve A Dashboard
// Definition](#operation/sql-analytics-fetch-dashboard). Databricks does
// not recommend creating new widgets via this API.
Widgets []Widget `json:"widgets,omitempty"`
}
Create a dashboard object
type CreateRefreshSchedule ¶
type CreateRefreshSchedule struct {
AlertId string `json:"-" url:"-"`
// Cron string representing the refresh schedule.
Cron string `json:"cron"`
// ID of the SQL warehouse to refresh with. If `null`, query's SQL warehouse
// will be used to refresh.
DataSourceId string `json:"data_source_id,omitempty"`
}
type CreateSubscription ¶
type CreateSubscription struct {
// ID of the alert.
AlertId string `json:"alert_id" url:"-"`
// ID of the alert subscriber (if subscribing an alert destination). Alert
// destinations can be configured by admins through the UI. See
// [here](/sql/admin/alert-destinations.html).
DestinationId string `json:"destination_id,omitempty"`
// ID of the alert subscriber (if subscribing a user).
UserId int64 `json:"user_id,omitempty"`
}
type CreateWarehouseRequest ¶
type CreateWarehouseRequest struct {
// The amount of time in minutes that a SQL Endpoint must be idle (i.e., no
// RUNNING queries) before it is automatically stopped.
//
// Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
//
// Defaults to 120 mins
AutoStopMins int `json:"auto_stop_mins,omitempty"`
// Channel Details
Channel *Channel `json:"channel,omitempty"`
// Size of the clusters allocated for this endpoint. Increasing the size of
// a spark cluster allows you to run larger queries on it. If you want to
// increase the number of concurrent queries, please tune max_num_clusters.
//
// Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large
// - 2X-Large - 3X-Large - 4X-Large
ClusterSize string `json:"cluster_size,omitempty"`
// endpoint creator name
CreatorName string `json:"creator_name,omitempty"`
// Configures whether the endpoint should use Photon optimized clusters.
//
// Defaults to false.
EnablePhoton bool `json:"enable_photon,omitempty"`
// Configures whether the endpoint should use Serverless Compute (aka
// Nephos)
//
// Defaults to value in global endpoint settings
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
// Deprecated. Instance profile used to pass IAM role to the cluster
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
// Maximum number of clusters that the autoscaler will create to handle
// concurrent queries.
//
// Supported values: - Must be >= min_num_clusters - Must be <= 30.
//
// Defaults to min_clusters if unset.
MaxNumClusters int `json:"max_num_clusters,omitempty"`
// Minimum number of available clusters that will be maintained for this SQL
// Endpoint. Increasing this will ensure that a larger number of clusters
// are always running and therefore may reduce the cold start time for new
// queries. This is similar to reserved vs. revocable cores in a resource
// manager.
//
// Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
//
// Defaults to 1
MinNumClusters int `json:"min_num_clusters,omitempty"`
// Logical name for the cluster.
//
// Supported values: - Must be unique within an org. - Must be less than 100
// characters.
Name string `json:"name,omitempty"`
// Configurations whether the warehouse should use spot instances.
SpotInstancePolicy SpotInstancePolicy `json:"spot_instance_policy,omitempty"`
// A set of key-value pairs that will be tagged on all resources (e.g., AWS
// instances and EBS volumes) associated with this SQL Endpoints.
//
// Supported values: - Number of tags < 45.
Tags *EndpointTags `json:"tags,omitempty"`
WarehouseType WarehouseType `json:"warehouse_type,omitempty"`
}
type CreateWarehouseResponse ¶
type CreateWarehouseResponse struct {
// Id for the SQL warehouse. This value is unique across all SQL warehouses.
Id string `json:"id,omitempty"`
}
type Dashboard ¶
type Dashboard struct {
// Whether the authenticated user can edit the query definition.
CanEdit bool `json:"can_edit,omitempty"`
// Timestamp when this dashboard was created.
CreatedAt string `json:"created_at,omitempty"`
// In the web application, query filters that share a name are coupled to a
// single selection box if this value is `true`.
DashboardFiltersEnabled bool `json:"dashboard_filters_enabled,omitempty"`
// The ID for this dashboard.
Id string `json:"id,omitempty"`
// Indicates whether a dashboard is trashed. Trashed dashboards won't appear
// in list views. If this boolean is `true`, the `options` property for this
// dashboard includes a `moved_to_trash_at` timestamp. Items in trash are
// permanently deleted after 30 days.
IsArchived bool `json:"is_archived,omitempty"`
// Whether a dashboard is a draft. Draft dashboards only appear in list
// views for their owners.
IsDraft bool `json:"is_draft,omitempty"`
// Indicates whether this query object appears in the current user's
// favorites list. This flag determines whether the star icon for favorites
// is selected.
IsFavorite bool `json:"is_favorite,omitempty"`
// The title of the dashboard that appears in list views and at the top of
// the dashboard page.
Name string `json:"name,omitempty"`
Options *DashboardOptions `json:"options,omitempty"`
// This describes an enum
PermissionTier PermissionLevel `json:"permission_tier,omitempty"`
// URL slug. Usually mirrors the query name with dashes (`-`) instead of
// spaces. Appears in the URL for this query.
Slug string `json:"slug,omitempty"`
Tags []string `json:"tags,omitempty"`
// Timestamp when this dashboard was last updated.
UpdatedAt string `json:"updated_at,omitempty"`
User *User `json:"user,omitempty"`
// The ID of the user that created and owns this dashboard.
UserId int `json:"user_id,omitempty"`
Widgets []Widget `json:"widgets,omitempty"`
}
A JSON representing a dashboard containing widgets of visualizations and text boxes.
type DashboardOptions ¶
type DashboardOptions struct {
// The timestamp when this dashboard was moved to trash. Only present when
// the `is_archived` property is `true`. Trashed items are deleted after
// thirty days.
MovedToTrashAt string `json:"moved_to_trash_at,omitempty"`
}
type DashboardsAPI ¶
type DashboardsAPI struct {
// contains filtered or unexported fields
}
In general, there is little need to modify dashboards using the API. However, it can be useful to use dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it to create a new one.
func NewDashboards ¶
func NewDashboards(client *client.DatabricksClient) *DashboardsAPI
func (*DashboardsAPI) Create ¶
func (a *DashboardsAPI) Create(ctx context.Context, request CreateDashboardRequest) (*Dashboard, error)
Create a dashboard object.
func (*DashboardsAPI) DashboardNameToIdMap ¶
func (a *DashboardsAPI) DashboardNameToIdMap(ctx context.Context, request ListDashboardsRequest) (map[string]string, error)
DashboardNameToIdMap calls DashboardsAPI.ListAll and creates a map of results with Dashboard.Name as key and Dashboard.Id as value.
Returns an error if there's more than one Dashboard with the same .Name.
Note: All Dashboard instances are loaded into memory before creating a map.
This method is generated by Databricks SDK Code Generator.
func (*DashboardsAPI) Delete ¶
func (a *DashboardsAPI) Delete(ctx context.Context, request DeleteDashboardRequest) error
Remove a dashboard.
Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared.
func (*DashboardsAPI) DeleteByDashboardId ¶
func (a *DashboardsAPI) DeleteByDashboardId(ctx context.Context, dashboardId string) error
Remove a dashboard.
Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared.
func (*DashboardsAPI) Get ¶
func (a *DashboardsAPI) Get(ctx context.Context, request GetDashboardRequest) (*Dashboard, error)
Retrieve a definition.
Returns a JSON representation of a dashboard object, including its visualization and query objects.
func (*DashboardsAPI) GetByDashboardId ¶
func (a *DashboardsAPI) GetByDashboardId(ctx context.Context, dashboardId string) (*Dashboard, error)
Retrieve a definition.
Returns a JSON representation of a dashboard object, including its visualization and query objects.
func (*DashboardsAPI) GetByName ¶
GetByName calls DashboardsAPI.DashboardNameToIdMap and returns a single Dashboard.
Returns an error if there's more than one Dashboard with the same .Name.
Note: All Dashboard instances are loaded into memory before returning matching by name.
This method is generated by Databricks SDK Code Generator.
func (*DashboardsAPI) Impl ¶
func (a *DashboardsAPI) Impl() DashboardsService
Impl returns low-level Dashboards API implementation
func (*DashboardsAPI) ListAll ¶
func (a *DashboardsAPI) ListAll(ctx context.Context, request ListDashboardsRequest) ([]Dashboard, error)
Get dashboard objects.
Fetch a paginated list of dashboard objects.
This method is generated by Databricks SDK Code Generator.
func (*DashboardsAPI) Restore ¶
func (a *DashboardsAPI) Restore(ctx context.Context, request RestoreDashboardRequest) error
Restore a dashboard.
A restored dashboard appears in list views and searches and can be shared.
func (*DashboardsAPI) WithImpl ¶
func (a *DashboardsAPI) WithImpl(impl DashboardsService) *DashboardsAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type DashboardsService ¶
type DashboardsService interface {
// Create a dashboard object.
Create(ctx context.Context, request CreateDashboardRequest) (*Dashboard, error)
// Remove a dashboard.
//
// Moves a dashboard to the trash. Trashed dashboards do not appear in list
// views or searches, and cannot be shared.
Delete(ctx context.Context, request DeleteDashboardRequest) error
// Retrieve a definition.
//
// Returns a JSON representation of a dashboard object, including its
// visualization and query objects.
Get(ctx context.Context, request GetDashboardRequest) (*Dashboard, error)
// Get dashboard objects.
//
// Fetch a paginated list of dashboard objects.
//
// Use ListAll() to get all Dashboard instances, which will iterate over every result page.
List(ctx context.Context, request ListDashboardsRequest) (*ListResponse, error)
// Restore a dashboard.
//
// A restored dashboard appears in list views and searches and can be
// shared.
Restore(ctx context.Context, request RestoreDashboardRequest) error
}
In general, there is little need to modify dashboards using the API. However, it can be useful to use dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it to create a new one.
type DataSource ¶
type DataSource struct {
// The unique identifier for this data source / SQL warehouse. Can be used
// when creating / modifying queries and dashboards.
Id string `json:"id,omitempty"`
// The string name of this data source / SQL warehouse as it appears in the
// Databricks SQL web application.
Name string `json:"name,omitempty"`
// <needs content>
PauseReason string `json:"pause_reason,omitempty"`
// <needs content>
Paused int `json:"paused,omitempty"`
// <needs content>
SupportsAutoLimit bool `json:"supports_auto_limit,omitempty"`
// <needs content>
Syntax string `json:"syntax,omitempty"`
// <needs content>
Type string `json:"type,omitempty"`
// <needs content>
ViewOnly bool `json:"view_only,omitempty"`
// <needs content>
WarehouseId string `json:"warehouse_id,omitempty"`
}
A JSON object representing a DBSQL data source / SQL warehouse.
type DataSourcesAPI ¶
type DataSourcesAPI struct {
// contains filtered or unexported fields
}
This API is provided to assist you in making new query objects. When creating a query object, you may optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't already know the `data_source_id` for your desired SQL warehouse, this API will help you find it.
This API does not support searches. It returns the full list of SQL warehouses in your workspace. We advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL.
func NewDataSources ¶
func NewDataSources(client *client.DatabricksClient) *DataSourcesAPI
func (*DataSourcesAPI) DataSourceNameToIdMap ¶
DataSourceNameToIdMap calls DataSourcesAPI.List and creates a map of results with DataSource.Name as key and DataSource.Id as value.
Returns an error if there's more than one DataSource with the same .Name.
Note: All DataSource instances are loaded into memory before creating a map.
This method is generated by Databricks SDK Code Generator.
func (*DataSourcesAPI) GetByName ¶
func (a *DataSourcesAPI) GetByName(ctx context.Context, name string) (*DataSource, error)
GetByName calls DataSourcesAPI.DataSourceNameToIdMap and returns a single DataSource.
Returns an error if there's more than one DataSource with the same .Name.
Note: All DataSource instances are loaded into memory before returning matching by name.
This method is generated by Databricks SDK Code Generator.
func (*DataSourcesAPI) Impl ¶
func (a *DataSourcesAPI) Impl() DataSourcesService
Impl returns low-level DataSources API implementation
func (*DataSourcesAPI) List ¶
func (a *DataSourcesAPI) List(ctx context.Context) ([]DataSource, error)
Get a list of SQL warehouses.
Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it.
func (*DataSourcesAPI) WithImpl ¶
func (a *DataSourcesAPI) WithImpl(impl DataSourcesService) *DataSourcesAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type DataSourcesService ¶
type DataSourcesService interface {
// Get a list of SQL warehouses.
//
// Retrieves a full list of SQL warehouses available in this workspace. All
// fields that appear in this API response are enumerated for clarity.
// However, you need only a SQL warehouse's `id` to create new queries
// against it.
List(ctx context.Context) ([]DataSource, error)
}
This API is provided to assist you in making new query objects. When creating a query object, you may optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't already know the `data_source_id` for your desired SQL warehouse, this API will help you find it.
This API does not support searches. It returns the full list of SQL warehouses in your workspace. We advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL.
type DbsqlPermissionsAPI ¶
type DbsqlPermissionsAPI struct {
// contains filtered or unexported fields
}
The SQL Permissions API is similar to the endpoints of the :method:permissions/setobjectpermissions. However, this exposes only one endpoint, which gets the Access Control List for a given object. You cannot modify any permissions using this API.
There are three levels of permission:
- `CAN_VIEW`: Allows read-only access
- `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
func NewDbsqlPermissions ¶
func NewDbsqlPermissions(client *client.DatabricksClient) *DbsqlPermissionsAPI
func (*DbsqlPermissionsAPI) Get ¶
func (a *DbsqlPermissionsAPI) Get(ctx context.Context, request GetDbsqlPermissionRequest) (*GetResponse, error)
Get object ACL.
Gets a JSON representation of the access control list (ACL) for a specified object.
func (*DbsqlPermissionsAPI) GetByObjectTypeAndObjectId ¶
func (a *DbsqlPermissionsAPI) GetByObjectTypeAndObjectId(ctx context.Context, objectType ObjectTypePlural, objectId string) (*GetResponse, error)
Get object ACL.
Gets a JSON representation of the access control list (ACL) for a specified object.
func (*DbsqlPermissionsAPI) Impl ¶
func (a *DbsqlPermissionsAPI) Impl() DbsqlPermissionsService
Impl returns low-level DbsqlPermissions API implementation
func (*DbsqlPermissionsAPI) Set ¶
func (a *DbsqlPermissionsAPI) Set(ctx context.Context, request SetRequest) (*SetResponse, error)
Set object ACL.
Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL.
func (*DbsqlPermissionsAPI) TransferOwnership ¶
func (a *DbsqlPermissionsAPI) TransferOwnership(ctx context.Context, request TransferOwnershipRequest) (*Success, error)
Transfer object ownership.
Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
func (*DbsqlPermissionsAPI) WithImpl ¶
func (a *DbsqlPermissionsAPI) WithImpl(impl DbsqlPermissionsService) *DbsqlPermissionsAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type DbsqlPermissionsService ¶
type DbsqlPermissionsService interface {
// Get object ACL.
//
// Gets a JSON representation of the access control list (ACL) for a
// specified object.
Get(ctx context.Context, request GetDbsqlPermissionRequest) (*GetResponse, error)
// Set object ACL.
//
// Sets the access control list (ACL) for a specified object. This operation
// will complete rewrite the ACL.
Set(ctx context.Context, request SetRequest) (*SetResponse, error)
// Transfer object ownership.
//
// Transfers ownership of a dashboard, query, or alert to an active user.
// Requires an admin API key.
TransferOwnership(ctx context.Context, request TransferOwnershipRequest) (*Success, error)
}
The SQL Permissions API is similar to the endpoints of the :method:permissions/setobjectpermissions. However, this exposes only one endpoint, which gets the Access Control List for a given object. You cannot modify any permissions using this API.
There are three levels of permission:
- `CAN_VIEW`: Allows read-only access
- `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
type DeleteAlertRequest ¶
type DeleteAlertRequest struct {
AlertId string `json:"-" url:"-"`
}
Delete an alert
type DeleteDashboardRequest ¶
type DeleteDashboardRequest struct {
DashboardId string `json:"-" url:"-"`
}
Remove a dashboard
type DeleteQueryRequest ¶
type DeleteQueryRequest struct {
QueryId string `json:"-" url:"-"`
}
Delete a query
type DeleteScheduleRequest ¶
type DeleteScheduleRequest struct {
AlertId string `json:"-" url:"-"`
ScheduleId string `json:"-" url:"-"`
}
Delete a refresh schedule
type DeleteWarehouseRequest ¶
type DeleteWarehouseRequest struct {
// Required. Id of the SQL warehouse.
Id string `json:"-" url:"-"`
}
Delete a warehouse
type Destination ¶
type Destination struct {
// ID of the alert destination.
Id string `json:"id,omitempty"`
// Name of the alert destination.
Name string `json:"name,omitempty"`
// Type of the alert destination.
Type DestinationType `json:"type,omitempty"`
}
Alert destination subscribed to the alert, if it exists. Alert destinations can be configured by admins through the UI. See here.
type DestinationType ¶
type DestinationType string
Type of the alert destination.
const DestinationTypeEmail DestinationType = `email`
const DestinationTypeHangoutsChat DestinationType = `hangouts_chat`
const DestinationTypeMattermost DestinationType = `mattermost`
const DestinationTypeMicrosoftTeams DestinationType = `microsoft_teams`
const DestinationTypePagerduty DestinationType = `pagerduty`
const DestinationTypeSlack DestinationType = `slack`
const DestinationTypeWebhook DestinationType = `webhook`
func (*DestinationType) Set ¶
func (dt *DestinationType) Set(v string) error
Set raw string value and validate it against allowed values
func (*DestinationType) String ¶
func (dt *DestinationType) String() string
String representation for fmt.Print
func (*DestinationType) Type ¶
func (dt *DestinationType) Type() string
Type always returns DestinationType to satisfy [pflag.Value] interface
type EditAlert ¶
type EditAlert struct {
AlertId string `json:"-" url:"-"`
// Name of the alert.
Name string `json:"name"`
// Alert configuration options.
Options AlertOptions `json:"options"`
// ID of the query evaluated by the alert.
QueryId string `json:"query_id"`
// Number of seconds after being triggered before the alert rearms itself
// and can be triggered again. If `null`, alert will never be triggered
// again.
Rearm int `json:"rearm,omitempty"`
}
type EditWarehouseRequest ¶
type EditWarehouseRequest struct {
// The amount of time in minutes that a SQL Endpoint must be idle (i.e., no
// RUNNING queries) before it is automatically stopped.
//
// Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
//
// Defaults to 120 mins
AutoStopMins int `json:"auto_stop_mins,omitempty"`
// Channel Details
Channel *Channel `json:"channel,omitempty"`
// Size of the clusters allocated for this endpoint. Increasing the size of
// a spark cluster allows you to run larger queries on it. If you want to
// increase the number of concurrent queries, please tune max_num_clusters.
//
// Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large
// - 2X-Large - 3X-Large - 4X-Large
ClusterSize string `json:"cluster_size,omitempty"`
// endpoint creator name
CreatorName string `json:"creator_name,omitempty"`
// Configures whether the endpoint should use Databricks Compute (aka
// Nephos)
//
// Deprecated: Use enable_serverless_compute TODO(SC-79930): Remove the
// field once clients are updated
EnableDatabricksCompute bool `json:"enable_databricks_compute,omitempty"`
// Configures whether the endpoint should use Photon optimized clusters.
//
// Defaults to false.
EnablePhoton bool `json:"enable_photon,omitempty"`
// Configures whether the endpoint should use Serverless Compute (aka
// Nephos)
//
// Defaults to value in global endpoint settings
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
// Required. Id of the warehouse to configure.
Id string `json:"-" url:"-"`
// Deprecated. Instance profile used to pass IAM role to the cluster
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
// Maximum number of clusters that the autoscaler will create to handle
// concurrent queries.
//
// Supported values: - Must be >= min_num_clusters - Must be <= 30.
//
// Defaults to min_clusters if unset.
MaxNumClusters int `json:"max_num_clusters,omitempty"`
// Minimum number of available clusters that will be maintained for this SQL
// Endpoint. Increasing this will ensure that a larger number of clusters
// are always running and therefore may reduce the cold start time for new
// queries. This is similar to reserved vs. revocable cores in a resource
// manager.
//
// Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
//
// Defaults to 1
MinNumClusters int `json:"min_num_clusters,omitempty"`
// Logical name for the cluster.
//
// Supported values: - Must be unique within an org. - Must be less than 100
// characters.
Name string `json:"name,omitempty"`
// Configurations whether the warehouse should use spot instances.
SpotInstancePolicy SpotInstancePolicy `json:"spot_instance_policy,omitempty"`
// A set of key-value pairs that will be tagged on all resources (e.g., AWS
// instances and EBS volumes) associated with this SQL Endpoints.
//
// Supported values: - Number of tags < 45.
Tags *EndpointTags `json:"tags,omitempty"`
WarehouseType WarehouseType `json:"warehouse_type,omitempty"`
}
type EndpointConfPair ¶
type EndpointHealth ¶
type EndpointHealth struct {
// Details about errors that are causing current degraded/failed status.
Details string `json:"details,omitempty"`
// The reason for failure to bring up clusters for this endpoint. This is
// available when status is 'FAILED' and sometimes when it is DEGRADED.
FailureReason *TerminationReason `json:"failure_reason,omitempty"`
// Deprecated. split into summary and details for security
Message string `json:"message,omitempty"`
// Health status of the endpoint.
Status Status `json:"status,omitempty"`
// A short summary of the health status in case of degraded/failed
// endpoints.
Summary string `json:"summary,omitempty"`
}
type EndpointInfo ¶
type EndpointInfo struct {
// The amount of time in minutes that a SQL Endpoint must be idle (i.e., no
// RUNNING queries) before it is automatically stopped.
//
// Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
//
// Defaults to 120 mins
AutoStopMins int `json:"auto_stop_mins,omitempty"`
// Channel Details
Channel *Channel `json:"channel,omitempty"`
// Size of the clusters allocated for this endpoint. Increasing the size of
// a spark cluster allows you to run larger queries on it. If you want to
// increase the number of concurrent queries, please tune max_num_clusters.
//
// Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large
// - 2X-Large - 3X-Large - 4X-Large
ClusterSize string `json:"cluster_size,omitempty"`
// endpoint creator name
CreatorName string `json:"creator_name,omitempty"`
// Configures whether the endpoint should use Databricks Compute (aka
// Nephos)
//
// Deprecated: Use enable_serverless_compute TODO(SC-79930): Remove the
// field once clients are updated
EnableDatabricksCompute bool `json:"enable_databricks_compute,omitempty"`
// Configures whether the endpoint should use Photon optimized clusters.
//
// Defaults to false.
EnablePhoton bool `json:"enable_photon,omitempty"`
// Configures whether the endpoint should use Serverless Compute (aka
// Nephos)
//
// Defaults to value in global endpoint settings
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
// Optional health status. Assume the endpoint is healthy if this field is
// not set.
Health *EndpointHealth `json:"health,omitempty"`
// unique identifier for endpoint
Id string `json:"id,omitempty"`
// Deprecated. Instance profile used to pass IAM role to the cluster
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
// the jdbc connection string for this endpoint
JdbcUrl string `json:"jdbc_url,omitempty"`
// Maximum number of clusters that the autoscaler will create to handle
// concurrent queries.
//
// Supported values: - Must be >= min_num_clusters - Must be <= 30.
//
// Defaults to min_clusters if unset.
MaxNumClusters int `json:"max_num_clusters,omitempty"`
// Minimum number of available clusters that will be maintained for this SQL
// Endpoint. Increasing this will ensure that a larger number of clusters
// are always running and therefore may reduce the cold start time for new
// queries. This is similar to reserved vs. revocable cores in a resource
// manager.
//
// Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
//
// Defaults to 1
MinNumClusters int `json:"min_num_clusters,omitempty"`
// Logical name for the cluster.
//
// Supported values: - Must be unique within an org. - Must be less than 100
// characters.
Name string `json:"name,omitempty"`
// current number of active sessions for the endpoint
NumActiveSessions int64 `json:"num_active_sessions,omitempty"`
// current number of clusters running for the service
NumClusters int `json:"num_clusters,omitempty"`
// ODBC parameters for the sql endpoint
OdbcParams *OdbcParams `json:"odbc_params,omitempty"`
// Configurations whether the warehouse should use spot instances.
SpotInstancePolicy SpotInstancePolicy `json:"spot_instance_policy,omitempty"`
// State of the warehouse
State State `json:"state,omitempty"`
// A set of key-value pairs that will be tagged on all resources (e.g., AWS
// instances and EBS volumes) associated with this SQL Endpoints.
//
// Supported values: - Number of tags < 45.
Tags *EndpointTags `json:"tags,omitempty"`
WarehouseType WarehouseType `json:"warehouse_type,omitempty"`
}
type EndpointTagPair ¶
type EndpointTags ¶
type EndpointTags struct {
CustomTags []EndpointTagPair `json:"custom_tags,omitempty"`
}
type GetAlertRequest ¶
type GetAlertRequest struct {
AlertId string `json:"-" url:"-"`
}
Get an alert
type GetDashboardRequest ¶
type GetDashboardRequest struct {
DashboardId string `json:"-" url:"-"`
}
Retrieve a definition
type GetDbsqlPermissionRequest ¶
type GetDbsqlPermissionRequest struct {
// Object ID. An ACL is returned for the object with this UUID.
ObjectId string `json:"-" url:"-"`
// The type of object permissions to check.
ObjectType ObjectTypePlural `json:"-" url:"-"`
}
Get object ACL
type GetQueryRequest ¶
type GetQueryRequest struct {
QueryId string `json:"-" url:"-"`
}
Get a query definition.
type GetResponse ¶
type GetResponse struct {
AccessControlList []AccessControl `json:"access_control_list,omitempty"`
// A singular noun object type.
ObjectId ObjectType `json:"object_id,omitempty"`
// An object's type and UUID, separated by a forward slash (/) character.
ObjectType string `json:"object_type,omitempty"`
}
type GetSubscriptionsRequest ¶
type GetSubscriptionsRequest struct {
AlertId string `json:"-" url:"-"`
}
Get an alert's subscriptions
type GetWarehouseRequest ¶
type GetWarehouseRequest struct {
// Required. Id of the SQL warehouse.
Id string `json:"-" url:"-"`
}
Get warehouse info
type GetWarehouseResponse ¶
type GetWarehouseResponse struct {
// The amount of time in minutes that a SQL Endpoint must be idle (i.e., no
// RUNNING queries) before it is automatically stopped.
//
// Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
//
// Defaults to 120 mins
AutoStopMins int `json:"auto_stop_mins,omitempty"`
// Channel Details
Channel *Channel `json:"channel,omitempty"`
// Size of the clusters allocated for this endpoint. Increasing the size of
// a spark cluster allows you to run larger queries on it. If you want to
// increase the number of concurrent queries, please tune max_num_clusters.
//
// Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large
// - 2X-Large - 3X-Large - 4X-Large
ClusterSize string `json:"cluster_size,omitempty"`
// endpoint creator name
CreatorName string `json:"creator_name,omitempty"`
// Configures whether the endpoint should use Databricks Compute (aka
// Nephos)
//
// Deprecated: Use enable_serverless_compute TODO(SC-79930): Remove the
// field once clients are updated
EnableDatabricksCompute bool `json:"enable_databricks_compute,omitempty"`
// Configures whether the endpoint should use Photon optimized clusters.
//
// Defaults to false.
EnablePhoton bool `json:"enable_photon,omitempty"`
// Configures whether the endpoint should use Serverless Compute (aka
// Nephos)
//
// Defaults to value in global endpoint settings
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
// Optional health status. Assume the endpoint is healthy if this field is
// not set.
Health *EndpointHealth `json:"health,omitempty"`
// unique identifier for endpoint
Id string `json:"id,omitempty"`
// Deprecated. Instance profile used to pass IAM role to the cluster
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
// the jdbc connection string for this endpoint
JdbcUrl string `json:"jdbc_url,omitempty"`
// Maximum number of clusters that the autoscaler will create to handle
// concurrent queries.
//
// Supported values: - Must be >= min_num_clusters - Must be <= 30.
//
// Defaults to min_clusters if unset.
MaxNumClusters int `json:"max_num_clusters,omitempty"`
// Minimum number of available clusters that will be maintained for this SQL
// Endpoint. Increasing this will ensure that a larger number of clusters
// are always running and therefore may reduce the cold start time for new
// queries. This is similar to reserved vs. revocable cores in a resource
// manager.
//
// Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
//
// Defaults to 1
MinNumClusters int `json:"min_num_clusters,omitempty"`
// Logical name for the cluster.
//
// Supported values: - Must be unique within an org. - Must be less than 100
// characters.
Name string `json:"name,omitempty"`
// current number of active sessions for the endpoint
NumActiveSessions int64 `json:"num_active_sessions,omitempty"`
// current number of clusters running for the service
NumClusters int `json:"num_clusters,omitempty"`
// ODBC parameters for the sql endpoint
OdbcParams *OdbcParams `json:"odbc_params,omitempty"`
// Configurations whether the warehouse should use spot instances.
SpotInstancePolicy SpotInstancePolicy `json:"spot_instance_policy,omitempty"`
// State of the warehouse
State State `json:"state,omitempty"`
// A set of key-value pairs that will be tagged on all resources (e.g., AWS
// instances and EBS volumes) associated with this SQL Endpoints.
//
// Supported values: - Number of tags < 45.
Tags *EndpointTags `json:"tags,omitempty"`
WarehouseType WarehouseType `json:"warehouse_type,omitempty"`
}
type GetWorkspaceWarehouseConfigResponse ¶
type GetWorkspaceWarehouseConfigResponse struct {
// Optional: Channel selection details
Channel *Channel `json:"channel,omitempty"`
// Deprecated: Use sql_configuration_parameters
ConfigParam *RepeatedEndpointConfPairs `json:"config_param,omitempty"`
// Spark confs for external hive metastore configuration JSON serialized
// size must be less than <= 512K
DataAccessConfig []EndpointConfPair `json:"data_access_config,omitempty"`
// Enable Serverless compute for SQL Endpoints
//
// Deprecated: Use enable_serverless_compute TODO(SC-79930): Remove the
// field once clients are updated
EnableDatabricksCompute bool `json:"enable_databricks_compute,omitempty"`
// Enable Serverless compute for SQL Endpoints
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
// List of Warehouse Types allowed in this workspace (limits allowed value
// of the type field in CreateWarehouse and EditWarehouse). Note: Some types
// cannot be disabled, they don't need to be specified in
// SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing
// warehouses to be converted to another type. Used by frontend to save
// specific type availability in the warehouse create and edit form UI.
EnabledWarehouseTypes []WarehouseTypePair `json:"enabled_warehouse_types,omitempty"`
// Deprecated: Use sql_configuration_parameters
GlobalParam *RepeatedEndpointConfPairs `json:"global_param,omitempty"`
// GCP only: Google Service Account used to pass to cluster to access Google
// Cloud Storage
GoogleServiceAccount string `json:"google_service_account,omitempty"`
// AWS Only: Instance profile used to pass IAM role to the cluster
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
// Security policy for endpoints
SecurityPolicy GetWorkspaceWarehouseConfigResponseSecurityPolicy `json:"security_policy,omitempty"`
// SQL configuration parameters
SqlConfigurationParameters *RepeatedEndpointConfPairs `json:"sql_configuration_parameters,omitempty"`
}
type GetWorkspaceWarehouseConfigResponseSecurityPolicy ¶
type GetWorkspaceWarehouseConfigResponseSecurityPolicy string
Security policy for endpoints
const GetWorkspaceWarehouseConfigResponseSecurityPolicyDataAccessControl GetWorkspaceWarehouseConfigResponseSecurityPolicy = `DATA_ACCESS_CONTROL`
const GetWorkspaceWarehouseConfigResponseSecurityPolicyNone GetWorkspaceWarehouseConfigResponseSecurityPolicy = `NONE`
const GetWorkspaceWarehouseConfigResponseSecurityPolicyPassthrough GetWorkspaceWarehouseConfigResponseSecurityPolicy = `PASSTHROUGH`
func (*GetWorkspaceWarehouseConfigResponseSecurityPolicy) Set ¶
func (gwwcrsp *GetWorkspaceWarehouseConfigResponseSecurityPolicy) Set(v string) error
Set raw string value and validate it against allowed values
func (*GetWorkspaceWarehouseConfigResponseSecurityPolicy) String ¶
func (gwwcrsp *GetWorkspaceWarehouseConfigResponseSecurityPolicy) String() string
String representation for fmt.Print
func (*GetWorkspaceWarehouseConfigResponseSecurityPolicy) Type ¶
func (gwwcrsp *GetWorkspaceWarehouseConfigResponseSecurityPolicy) Type() string
Type always returns GetWorkspaceWarehouseConfigResponseSecurityPolicy to satisfy [pflag.Value] interface
type ListDashboardsRequest ¶
type ListDashboardsRequest struct {
// Name of dashboard attribute to order by.
Order ListOrder `json:"-" url:"order,omitempty"`
// Page number to retrieve.
Page int `json:"-" url:"page,omitempty"`
// Number of dashboards to return per page.
PageSize int `json:"-" url:"page_size,omitempty"`
// Full text search term.
Q string `json:"-" url:"q,omitempty"`
}
Get dashboard objects
type ListOrder ¶
type ListOrder string
const ListOrderCreatedAt ListOrder = `created_at`
const ListOrderName ListOrder = `name`
type ListQueriesRequest ¶
type ListQueriesRequest struct {
// Name of query attribute to order by. Default sort order is ascending.
// Append a dash (`-`) to order descending instead.
//
// - `name`: The name of the query.
//
// - `created_at`: The timestamp the query was created.
//
// - `schedule`: The refresh interval for each query. For example: "Every 5
// Hours" or "Every 5 Minutes". "Never" is treated as the highest value for
// sorting.
//
// - `runtime`: The time it took to run this query. This is blank for
// parameterized queries. A blank value is treated as the highest value for
// sorting.
//
// - `executed_at`: The timestamp when the query was last run.
//
// - `created_by`: The user name of the user that created the query.
Order string `json:"-" url:"order,omitempty"`
// Page number to retrieve.
Page int `json:"-" url:"page,omitempty"`
// Number of queries to return per page.
PageSize int `json:"-" url:"page_size,omitempty"`
// Full text search term
Q string `json:"-" url:"q,omitempty"`
}
Get a list of queries
type ListQueriesResponse ¶
type ListQueryHistoryRequest ¶
type ListQueryHistoryRequest struct {
// A filter to limit query history results. This field is optional.
FilterBy *QueryFilter `json:"-" url:"filter_by,omitempty"`
// Whether to include metrics about query.
IncludeMetrics bool `json:"-" url:"include_metrics,omitempty"`
// Limit the number of results returned in one page. The default is 100.
MaxResults int `json:"-" url:"max_results,omitempty"`
// A token that can be used to get the next page of results.
PageToken string `json:"-" url:"page_token,omitempty"`
}
List Queries
type ListResponse ¶
type ListResponse struct {
// The total number of dashboards.
Count int `json:"count,omitempty"`
// The current page being displayed.
Page int `json:"page,omitempty"`
// The number of dashboards per page.
PageSize int `json:"page_size,omitempty"`
// List of dashboards returned.
Results []Dashboard `json:"results,omitempty"`
}
type ListSchedulesRequest ¶
type ListSchedulesRequest struct {
AlertId string `json:"-" url:"-"`
}
Get refresh schedules
type ListWarehousesRequest ¶
type ListWarehousesRequest struct {
// Service Principal which will be used to fetch the list of endpoints. If
// not specified, the user from the session header is used.
RunAsUserId int `json:"-" url:"run_as_user_id,omitempty"`
}
List warehouses
type ListWarehousesResponse ¶
type ListWarehousesResponse struct {
// A list of warehouses and their configurations.
Warehouses []EndpointInfo `json:"warehouses,omitempty"`
}
type ObjectType ¶
type ObjectType string
A singular noun object type.
const ObjectTypeAlert ObjectType = `alert`
const ObjectTypeDashboard ObjectType = `dashboard`
const ObjectTypeDataSource ObjectType = `data_source`
const ObjectTypeQuery ObjectType = `query`
func (*ObjectType) Set ¶
func (ot *ObjectType) Set(v string) error
Set raw string value and validate it against allowed values
func (*ObjectType) String ¶
func (ot *ObjectType) String() string
String representation for fmt.Print
func (*ObjectType) Type ¶
func (ot *ObjectType) Type() string
Type always returns ObjectType to satisfy [pflag.Value] interface
type ObjectTypePlural ¶
type ObjectTypePlural string
Always a plural of the object type.
const ObjectTypePluralAlerts ObjectTypePlural = `alerts`
const ObjectTypePluralDashboards ObjectTypePlural = `dashboards`
const ObjectTypePluralDataSources ObjectTypePlural = `data_sources`
const ObjectTypePluralQueries ObjectTypePlural = `queries`
func (*ObjectTypePlural) Set ¶
func (otp *ObjectTypePlural) Set(v string) error
Set raw string value and validate it against allowed values
func (*ObjectTypePlural) String ¶
func (otp *ObjectTypePlural) String() string
String representation for fmt.Print
func (*ObjectTypePlural) Type ¶
func (otp *ObjectTypePlural) Type() string
Type always returns ObjectTypePlural to satisfy [pflag.Value] interface
type OdbcParams ¶
type OwnableObjectType ¶
type OwnableObjectType string
The singular form of the type of object which can be owned.
const OwnableObjectTypeAlert OwnableObjectType = `alert`
const OwnableObjectTypeDashboard OwnableObjectType = `dashboard`
const OwnableObjectTypeQuery OwnableObjectType = `query`
func (*OwnableObjectType) Set ¶
func (oot *OwnableObjectType) Set(v string) error
Set raw string value and validate it against allowed values
func (*OwnableObjectType) String ¶
func (oot *OwnableObjectType) String() string
String representation for fmt.Print
func (*OwnableObjectType) Type ¶
func (oot *OwnableObjectType) Type() string
Type always returns OwnableObjectType to satisfy [pflag.Value] interface
type Parameter ¶
type Parameter struct {
// The literal parameter marker that appears between double curly braces in
// the query text.
Name string `json:"name,omitempty"`
// The text displayed in a parameter picking widget.
Title string `json:"title,omitempty"`
// Parameters can have several different types.
Type ParameterType `json:"type,omitempty"`
// The default value for this parameter.
Value any `json:"value,omitempty"`
}
type ParameterType ¶
type ParameterType string
Parameters can have several different types.
const ParameterTypeDatetime ParameterType = `datetime`
const ParameterTypeNumber ParameterType = `number`
const ParameterTypeText ParameterType = `text`
func (*ParameterType) Set ¶
func (pt *ParameterType) Set(v string) error
Set raw string value and validate it against allowed values
func (*ParameterType) String ¶
func (pt *ParameterType) String() string
String representation for fmt.Print
func (*ParameterType) Type ¶
func (pt *ParameterType) Type() string
Type always returns ParameterType to satisfy [pflag.Value] interface
type PermissionLevel ¶
type PermissionLevel string
This describes an enum
const PermissionLevelCanManage PermissionLevel = `CAN_MANAGE`
Can manage the query
const PermissionLevelCanRun PermissionLevel = `CAN_RUN`
Can run the query
const PermissionLevelCanView PermissionLevel = `CAN_VIEW`
Can view the query
func (*PermissionLevel) Set ¶
func (pl *PermissionLevel) Set(v string) error
Set raw string value and validate it against allowed values
func (*PermissionLevel) String ¶
func (pl *PermissionLevel) String() string
String representation for fmt.Print
func (*PermissionLevel) Type ¶
func (pl *PermissionLevel) Type() string
Type always returns PermissionLevel to satisfy [pflag.Value] interface
type PlansState ¶
type PlansState string
Whether plans exist for the execution, or the reason why they are missing
const PlansStateEmpty PlansState = `EMPTY`
const PlansStateExists PlansState = `EXISTS`
const PlansStateIgnoredLargePlansSize PlansState = `IGNORED_LARGE_PLANS_SIZE`
const PlansStateIgnoredSmallDuration PlansState = `IGNORED_SMALL_DURATION`
const PlansStateIgnoredSparkPlanType PlansState = `IGNORED_SPARK_PLAN_TYPE`
const PlansStateUnknown PlansState = `UNKNOWN`
func (*PlansState) Set ¶
func (ps *PlansState) Set(v string) error
Set raw string value and validate it against allowed values
func (*PlansState) String ¶
func (ps *PlansState) String() string
String representation for fmt.Print
func (*PlansState) Type ¶
func (ps *PlansState) Type() string
Type always returns PlansState to satisfy [pflag.Value] interface
type QueriesAPI ¶
type QueriesAPI struct {
// contains filtered or unexported fields
}
These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, execution schedule, parameters, and visualizations.
func NewQueries ¶
func NewQueries(client *client.DatabricksClient) *QueriesAPI
func (*QueriesAPI) Create ¶
func (a *QueriesAPI) Create(ctx context.Context, request QueryPostContent) (*Query, error)
Create a new query definition.
Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request.
The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the `data_source_id` from an existing query.
**Note**: You cannot add a visualization until you create the query.
func (*QueriesAPI) Delete ¶
func (a *QueriesAPI) Delete(ctx context.Context, request DeleteQueryRequest) error
Delete a query.
Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days.
func (*QueriesAPI) DeleteByQueryId ¶
func (a *QueriesAPI) DeleteByQueryId(ctx context.Context, queryId string) error
Delete a query.
Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days.
func (*QueriesAPI) Get ¶
func (a *QueriesAPI) Get(ctx context.Context, request GetQueryRequest) (*Query, error)
Get a query definition.
Retrieve a query object definition along with contextual permissions information about the currently authenticated user.
func (*QueriesAPI) GetByName ¶
GetByName calls QueriesAPI.QueryNameToIdMap and returns a single Query.
Returns an error if there's more than one Query with the same .Name.
Note: All Query instances are loaded into memory before returning matching by name.
This method is generated by Databricks SDK Code Generator.
func (*QueriesAPI) GetByQueryId ¶
Get a query definition.
Retrieve a query object definition along with contextual permissions information about the currently authenticated user.
func (*QueriesAPI) Impl ¶
func (a *QueriesAPI) Impl() QueriesService
Impl returns low-level Queries API implementation
func (*QueriesAPI) ListAll ¶
func (a *QueriesAPI) ListAll(ctx context.Context, request ListQueriesRequest) ([]Query, error)
Get a list of queries.
Gets a list of queries. Optionally, this list can be filtered by a search term.
This method is generated by Databricks SDK Code Generator.
func (*QueriesAPI) QueryNameToIdMap ¶
func (a *QueriesAPI) QueryNameToIdMap(ctx context.Context, request ListQueriesRequest) (map[string]string, error)
QueryNameToIdMap calls QueriesAPI.ListAll and creates a map of results with Query.Name as key and Query.Id as value.
Returns an error if there's more than one Query with the same .Name.
Note: All Query instances are loaded into memory before creating a map.
This method is generated by Databricks SDK Code Generator.
func (*QueriesAPI) Restore ¶
func (a *QueriesAPI) Restore(ctx context.Context, request RestoreQueryRequest) error
Restore a query.
Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts.
func (*QueriesAPI) Update ¶
func (a *QueriesAPI) Update(ctx context.Context, request QueryPostContent) (*Query, error)
Change a query definition.
Modify this query definition.
**Note**: You cannot undo this operation.
func (*QueriesAPI) WithImpl ¶
func (a *QueriesAPI) WithImpl(impl QueriesService) *QueriesAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type QueriesService ¶
type QueriesService interface {
// Create a new query definition.
//
// Creates a new query definition. Queries created with this endpoint belong
// to the authenticated user making the request.
//
// The `data_source_id` field specifies the ID of the SQL warehouse to run
// this query against. You can use the Data Sources API to see a complete
// list of available SQL warehouses. Or you can copy the `data_source_id`
// from an existing query.
//
// **Note**: You cannot add a visualization until you create the query.
Create(ctx context.Context, request QueryPostContent) (*Query, error)
// Delete a query.
//
// Moves a query to the trash. Trashed queries immediately disappear from
// searches and list views, and they cannot be used for alerts. The trash is
// deleted after 30 days.
Delete(ctx context.Context, request DeleteQueryRequest) error
// Get a query definition.
//
// Retrieve a query object definition along with contextual permissions
// information about the currently authenticated user.
Get(ctx context.Context, request GetQueryRequest) (*Query, error)
// Get a list of queries.
//
// Gets a list of queries. Optionally, this list can be filtered by a search
// term.
//
// Use ListAll() to get all Query instances, which will iterate over every result page.
List(ctx context.Context, request ListQueriesRequest) (*QueryList, error)
// Restore a query.
//
// Restore a query that has been moved to the trash. A restored query
// appears in list views and searches. You can use restored queries for
// alerts.
Restore(ctx context.Context, request RestoreQueryRequest) error
// Change a query definition.
//
// Modify this query definition.
//
// **Note**: You cannot undo this operation.
Update(ctx context.Context, request QueryPostContent) (*Query, error)
}
These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, execution schedule, parameters, and visualizations.
type Query ¶
type Query struct {
// Describes whether the authenticated user is allowed to edit the
// definition of this query.
CanEdit bool `json:"can_edit,omitempty"`
// The timestamp when this query was created.
CreatedAt string `json:"created_at,omitempty"`
// Data Source ID. The UUID that uniquely identifies this data source / SQL
// warehouse across the API.
DataSourceId string `json:"data_source_id,omitempty"`
// General description that conveys additional information about this query
// such as usage notes.
Description string `json:"description,omitempty"`
Id string `json:"id,omitempty"`
// Indicates whether the query is trashed. Trashed queries can't be used in
// dashboards, or appear in search results. If this boolean is `true`, the
// `options` property for this query includes a `moved_to_trash_at`
// timestamp. Trashed queries are permanently deleted after 30 days.
IsArchived bool `json:"is_archived,omitempty"`
// Whether the query is a draft. Draft queries only appear in list views for
// their owners. Visualizations from draft queries cannot appear on
// dashboards.
IsDraft bool `json:"is_draft,omitempty"`
// Whether this query object appears in the current user's favorites list.
// This flag determines whether the star icon for favorites is selected.
IsFavorite bool `json:"is_favorite,omitempty"`
// Text parameter types are not safe from SQL injection for all types of
// data source. Set this Boolean parameter to `true` if a query either does
// not use any text type parameters or uses a data source type where text
// type parameters are handled safely.
IsSafe bool `json:"is_safe,omitempty"`
LastModifiedBy *User `json:"last_modified_by,omitempty"`
// The ID of the user who last saved changes to this query.
LastModifiedById int `json:"last_modified_by_id,omitempty"`
// If there is a cached result for this query and user, this field includes
// the query result ID. If this query uses parameters, this field is always
// null.
LatestQueryDataId string `json:"latest_query_data_id,omitempty"`
// The title of this query that appears in list views, widget headings, and
// on the query page.
Name string `json:"name,omitempty"`
Options *QueryOptions `json:"options,omitempty"`
// This describes an enum
PermissionTier PermissionLevel `json:"permission_tier,omitempty"`
// The text of the query to be run.
Query string `json:"query,omitempty"`
// A SHA-256 hash of the query text along with the authenticated user ID.
QueryHash string `json:"query_hash,omitempty"`
Schedule *QueryInterval `json:"schedule,omitempty"`
Tags []string `json:"tags,omitempty"`
// The timestamp at which this query was last updated.
UpdatedAt string `json:"updated_at,omitempty"`
User *User `json:"user,omitempty"`
// The ID of the user who created this query.
UserId int `json:"user_id,omitempty"`
Visualizations []Visualization `json:"visualizations,omitempty"`
}
type QueryFilter ¶
type QueryFilter struct {
QueryStartTimeRange *TimeRange `json:"query_start_time_range,omitempty"`
Statuses []QueryStatus `json:"statuses,omitempty"`
// A list of user IDs who ran the queries.
UserIds []int `json:"user_ids,omitempty"`
// A list of warehouse IDs.
WarehouseIds []string `json:"warehouse_ids,omitempty"`
}
A filter to limit query history results. This field is optional.
type QueryHistoryAPI ¶
type QueryHistoryAPI struct {
// contains filtered or unexported fields
}
Access the history of queries through SQL warehouses.
func NewQueryHistory ¶
func NewQueryHistory(client *client.DatabricksClient) *QueryHistoryAPI
func (*QueryHistoryAPI) Impl ¶
func (a *QueryHistoryAPI) Impl() QueryHistoryService
Impl returns low-level QueryHistory API implementation
func (*QueryHistoryAPI) ListAll ¶
func (a *QueryHistoryAPI) ListAll(ctx context.Context, request ListQueryHistoryRequest) ([]QueryInfo, error)
List Queries.
List the history of queries through SQL warehouses.
You can filter by user ID, warehouse ID, status, and time range.
This method is generated by Databricks SDK Code Generator.
func (*QueryHistoryAPI) WithImpl ¶
func (a *QueryHistoryAPI) WithImpl(impl QueryHistoryService) *QueryHistoryAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type QueryHistoryService ¶
type QueryHistoryService interface {
// List Queries.
//
// List the history of queries through SQL warehouses.
//
// You can filter by user ID, warehouse ID, status, and time range.
//
// Use ListAll() to get all QueryInfo instances, which will iterate over every result page.
List(ctx context.Context, request ListQueryHistoryRequest) (*ListQueriesResponse, error)
}
Access the history of queries through SQL warehouses.
type QueryInfo ¶
type QueryInfo struct {
// Channel information for the SQL warehouse at the time of query execution
ChannelUsed *ChannelInfo `json:"channel_used,omitempty"`
// Total execution time of the query from the client’s point of view, in
// milliseconds.
Duration int `json:"duration,omitempty"`
// Alias for `warehouse_id`.
EndpointId string `json:"endpoint_id,omitempty"`
// Message describing why the query could not complete.
ErrorMessage string `json:"error_message,omitempty"`
// The ID of the user whose credentials were used to run the query.
ExecutedAsUserId int `json:"executed_as_user_id,omitempty"`
// The email address or username of the user whose credentials were used to
// run the query.
ExecutedAsUserName string `json:"executed_as_user_name,omitempty"`
// The time execution of the query ended.
ExecutionEndTimeMs int `json:"execution_end_time_ms,omitempty"`
// Whether more updates for the query are expected.
IsFinal bool `json:"is_final,omitempty"`
// A key that can be used to look up query details.
LookupKey string `json:"lookup_key,omitempty"`
// Metrics about query execution.
Metrics *QueryMetrics `json:"metrics,omitempty"`
// Whether plans exist for the execution, or the reason why they are missing
PlansState PlansState `json:"plans_state,omitempty"`
// The time the query ended.
QueryEndTimeMs int `json:"query_end_time_ms,omitempty"`
// The query ID.
QueryId string `json:"query_id,omitempty"`
// The time the query started.
QueryStartTimeMs int `json:"query_start_time_ms,omitempty"`
// The text of the query.
QueryText string `json:"query_text,omitempty"`
// The number of results returned by the query.
RowsProduced int `json:"rows_produced,omitempty"`
// URL to the query plan.
SparkUiUrl string `json:"spark_ui_url,omitempty"`
// Type of statement for this query
StatementType QueryStatementType `json:"statement_type,omitempty"`
// This describes an enum
Status QueryStatus `json:"status,omitempty"`
// The ID of the user who ran the query.
UserId int `json:"user_id,omitempty"`
// The email address or username of the user who ran the query.
UserName string `json:"user_name,omitempty"`
// Warehouse ID.
WarehouseId string `json:"warehouse_id,omitempty"`
}
type QueryInterval ¶
type QueryInterval struct {
// For weekly runs, the day of the week to start the run.
DayOfWeek string `json:"day_of_week,omitempty"`
// Integer number of seconds between runs.
Interval int `json:"interval,omitempty"`
// For daily, weekly, and monthly runs, the time of day to start the run.
Time string `json:"time,omitempty"`
// A date after which this schedule no longer applies.
Until string `json:"until,omitempty"`
}
type QueryList ¶
type QueryList struct {
// The total number of queries.
Count int `json:"count,omitempty"`
// The page number that is currently displayed.
Page int `json:"page,omitempty"`
// The number of queries per page.
PageSize int `json:"page_size,omitempty"`
// List of queries returned.
Results []Query `json:"results,omitempty"`
}
type QueryMetrics ¶
type QueryMetrics struct {
// Time spent loading metadata and optimizing the query, in milliseconds.
CompilationTimeMs int `json:"compilation_time_ms,omitempty"`
// Time spent executing the query, in milliseconds.
ExecutionTimeMs int `json:"execution_time_ms,omitempty"`
// Total amount of data sent over the network, in bytes.
NetworkSentBytes int `json:"network_sent_bytes,omitempty"`
// Total execution time for all individual Photon query engine tasks in the
// query, in milliseconds.
PhotonTotalTimeMs int `json:"photon_total_time_ms,omitempty"`
// Time spent waiting to execute the query because the SQL warehouse is
// already running the maximum number of concurrent queries, in
// milliseconds.
QueuedOverloadTimeMs int `json:"queued_overload_time_ms,omitempty"`
// Time waiting for compute resources to be provisioned for the SQL
// warehouse, in milliseconds.
QueuedProvisioningTimeMs int `json:"queued_provisioning_time_ms,omitempty"`
// Total size of data read by the query, in bytes.
ReadBytes int `json:"read_bytes,omitempty"`
// Size of persistent data read from the cache, in bytes.
ReadCacheBytes int `json:"read_cache_bytes,omitempty"`
// Number of files read after pruning.
ReadFilesCount int `json:"read_files_count,omitempty"`
// Number of partitions read after pruning.
ReadPartitionsCount int `json:"read_partitions_count,omitempty"`
// Size of persistent data read from cloud object storage on your cloud
// tenant, in bytes.
ReadRemoteBytes int `json:"read_remote_bytes,omitempty"`
// Time spent fetching the query results after the execution finished, in
// milliseconds.
ResultFetchTimeMs int `json:"result_fetch_time_ms,omitempty"`
// true if the query result was fetched from cache, false otherwise.
ResultFromCache bool `json:"result_from_cache,omitempty"`
// Total number of rows returned by the query.
RowsProducedCount int `json:"rows_produced_count,omitempty"`
// Total number of rows read by the query.
RowsReadCount int `json:"rows_read_count,omitempty"`
// Size of data temporarily written to disk while executing the query, in
// bytes.
SpillToDiskBytes int `json:"spill_to_disk_bytes,omitempty"`
// Sum of execution time for all of the query’s tasks, in milliseconds.
TaskTotalTimeMs int `json:"task_total_time_ms,omitempty"`
// Number of files that would have been read without pruning.
TotalFilesCount int `json:"total_files_count,omitempty"`
// Number of partitions that would have been read without pruning.
TotalPartitionsCount int `json:"total_partitions_count,omitempty"`
// Total execution time of the query from the client’s point of view, in
// milliseconds.
TotalTimeMs int `json:"total_time_ms,omitempty"`
// Size pf persistent data written to cloud object storage in your cloud
// tenant, in bytes.
WriteRemoteBytes int `json:"write_remote_bytes,omitempty"`
}
Metrics about query execution.
type QueryOptions ¶
type QueryOptions struct {
// The timestamp when this query was moved to trash. Only present when the
// `is_archived` property is `true`. Trashed items are deleted after thirty
// days.
MovedToTrashAt string `json:"moved_to_trash_at,omitempty"`
Parameters []Parameter `json:"parameters,omitempty"`
}
type QueryPostContent ¶
type QueryPostContent struct {
// The ID of the data source / SQL warehouse where this query will run.
DataSourceId string `json:"data_source_id,omitempty"`
// General description that can convey additional information about this
// query such as usage notes.
Description string `json:"description,omitempty"`
// The name or title of this query to display in list views.
Name string `json:"name,omitempty"`
// Exclusively used for storing a list parameter definitions. A parameter is
// an object with `title`, `name`, `type`, and `value` properties. The
// `value` field here is the default value. It can be overridden at runtime.
Options any `json:"options,omitempty"`
// The text of the query.
Query string `json:"query,omitempty"`
QueryId string `json:"-" url:"-"`
// JSON object that describes the scheduled execution frequency. A schedule
// object includes `interval`, `time`, `day_of_week`, and `until` fields. If
// a scheduled is supplied, then only `interval` is required. All other
// field can be `null`.
Schedule *QueryInterval `json:"schedule,omitempty"`
}
type QueryStatementType ¶
type QueryStatementType string
Type of statement for this query
const QueryStatementTypeAlter QueryStatementType = `ALTER`
const QueryStatementTypeAnalyze QueryStatementType = `ANALYZE`
const QueryStatementTypeCopy QueryStatementType = `COPY`
const QueryStatementTypeCreate QueryStatementType = `CREATE`
const QueryStatementTypeDelete QueryStatementType = `DELETE`
const QueryStatementTypeDescribe QueryStatementType = `DESCRIBE`
const QueryStatementTypeDrop QueryStatementType = `DROP`
const QueryStatementTypeExplain QueryStatementType = `EXPLAIN`
const QueryStatementTypeGrant QueryStatementType = `GRANT`
const QueryStatementTypeInsert QueryStatementType = `INSERT`
const QueryStatementTypeMerge QueryStatementType = `MERGE`
const QueryStatementTypeOptimize QueryStatementType = `OPTIMIZE`
const QueryStatementTypeOther QueryStatementType = `OTHER`
const QueryStatementTypeRefresh QueryStatementType = `REFRESH`
const QueryStatementTypeReplace QueryStatementType = `REPLACE`
const QueryStatementTypeRevoke QueryStatementType = `REVOKE`
const QueryStatementTypeSelect QueryStatementType = `SELECT`
const QueryStatementTypeSet QueryStatementType = `SET`
const QueryStatementTypeShow QueryStatementType = `SHOW`
const QueryStatementTypeTruncate QueryStatementType = `TRUNCATE`
const QueryStatementTypeUpdate QueryStatementType = `UPDATE`
const QueryStatementTypeUse QueryStatementType = `USE`
func (*QueryStatementType) Set ¶
func (qst *QueryStatementType) Set(v string) error
Set raw string value and validate it against allowed values
func (*QueryStatementType) String ¶
func (qst *QueryStatementType) String() string
String representation for fmt.Print
func (*QueryStatementType) Type ¶
func (qst *QueryStatementType) Type() string
Type always returns QueryStatementType to satisfy [pflag.Value] interface
type QueryStatus ¶
type QueryStatus string
This describes an enum
const QueryStatusCanceled QueryStatus = `CANCELED`
Query has been cancelled by the user.
const QueryStatusFailed QueryStatus = `FAILED`
Query has failed.
const QueryStatusFinished QueryStatus = `FINISHED`
Query has completed.
const QueryStatusQueued QueryStatus = `QUEUED`
Query has been received and queued.
const QueryStatusRunning QueryStatus = `RUNNING`
Query has started.
func (*QueryStatus) Set ¶
func (qs *QueryStatus) Set(v string) error
Set raw string value and validate it against allowed values
func (*QueryStatus) String ¶
func (qs *QueryStatus) String() string
String representation for fmt.Print
func (*QueryStatus) Type ¶
func (qs *QueryStatus) Type() string
Type always returns QueryStatus to satisfy [pflag.Value] interface
type RefreshSchedule ¶
type RefreshSchedule struct {
// Cron string representing the refresh schedule.
Cron string `json:"cron,omitempty"`
// ID of the SQL warehouse to refresh with. If `null`, query's SQL warehouse
// will be used to refresh.
DataSourceId string `json:"data_source_id,omitempty"`
// ID of the refresh schedule.
Id string `json:"id,omitempty"`
}
type RepeatedEndpointConfPairs ¶
type RepeatedEndpointConfPairs struct {
// Deprecated: Use configuration_pairs
ConfigPair []EndpointConfPair `json:"config_pair,omitempty"`
ConfigurationPairs []EndpointConfPair `json:"configuration_pairs,omitempty"`
}
type RestoreDashboardRequest ¶
type RestoreDashboardRequest struct {
DashboardId string `json:"-" url:"-"`
}
Restore a dashboard
type RestoreQueryRequest ¶
type RestoreQueryRequest struct {
QueryId string `json:"-" url:"-"`
}
Restore a query
type SetRequest ¶
type SetRequest struct {
AccessControlList []AccessControl `json:"access_control_list,omitempty"`
// Object ID. The ACL for the object with this UUID is overwritten by this
// request's POST content.
ObjectId string `json:"-" url:"-"`
// The type of object permission to set.
ObjectType ObjectTypePlural `json:"-" url:"-"`
}
Set object ACL
type SetResponse ¶
type SetResponse struct {
AccessControlList []AccessControl `json:"access_control_list,omitempty"`
// A singular noun object type.
ObjectId ObjectType `json:"object_id,omitempty"`
// An object's type and UUID, separated by a forward slash (/) character.
ObjectType string `json:"object_type,omitempty"`
}
type SetWorkspaceWarehouseConfigRequest ¶
type SetWorkspaceWarehouseConfigRequest struct {
// Optional: Channel selection details
Channel *Channel `json:"channel,omitempty"`
// Deprecated: Use sql_configuration_parameters
ConfigParam *RepeatedEndpointConfPairs `json:"config_param,omitempty"`
// Spark confs for external hive metastore configuration JSON serialized
// size must be less than <= 512K
DataAccessConfig []EndpointConfPair `json:"data_access_config,omitempty"`
// Enable Serverless compute for SQL Endpoints
//
// Deprecated: Use enable_serverless_compute TODO(SC-79930): Remove the
// field once clients are updated
EnableDatabricksCompute bool `json:"enable_databricks_compute,omitempty"`
// Enable Serverless compute for SQL Endpoints
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
// List of Warehouse Types allowed in this workspace (limits allowed value
// of the type field in CreateWarehouse and EditWarehouse). Note: Some types
// cannot be disabled, they don't need to be specified in
// SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing
// warehouses to be converted to another type. Used by frontend to save
// specific type availability in the warehouse create and edit form UI.
EnabledWarehouseTypes []WarehouseTypePair `json:"enabled_warehouse_types,omitempty"`
// Deprecated: Use sql_configuration_parameters
GlobalParam *RepeatedEndpointConfPairs `json:"global_param,omitempty"`
// GCP only: Google Service Account used to pass to cluster to access Google
// Cloud Storage
GoogleServiceAccount string `json:"google_service_account,omitempty"`
// AWS Only: Instance profile used to pass IAM role to the cluster
InstanceProfileArn string `json:"instance_profile_arn,omitempty"`
// Security policy for endpoints
SecurityPolicy SetWorkspaceWarehouseConfigRequestSecurityPolicy `json:"security_policy,omitempty"`
// Internal. Used by frontend to save Serverless Compute agreement value.
ServerlessAgreement bool `json:"serverless_agreement,omitempty"`
// SQL configuration parameters
SqlConfigurationParameters *RepeatedEndpointConfPairs `json:"sql_configuration_parameters,omitempty"`
}
type SetWorkspaceWarehouseConfigRequestSecurityPolicy ¶
type SetWorkspaceWarehouseConfigRequestSecurityPolicy string
Security policy for endpoints
const SetWorkspaceWarehouseConfigRequestSecurityPolicyDataAccessControl SetWorkspaceWarehouseConfigRequestSecurityPolicy = `DATA_ACCESS_CONTROL`
const SetWorkspaceWarehouseConfigRequestSecurityPolicyNone SetWorkspaceWarehouseConfigRequestSecurityPolicy = `NONE`
const SetWorkspaceWarehouseConfigRequestSecurityPolicyPassthrough SetWorkspaceWarehouseConfigRequestSecurityPolicy = `PASSTHROUGH`
func (*SetWorkspaceWarehouseConfigRequestSecurityPolicy) Set ¶
func (swwcrsp *SetWorkspaceWarehouseConfigRequestSecurityPolicy) Set(v string) error
Set raw string value and validate it against allowed values
func (*SetWorkspaceWarehouseConfigRequestSecurityPolicy) String ¶
func (swwcrsp *SetWorkspaceWarehouseConfigRequestSecurityPolicy) String() string
String representation for fmt.Print
func (*SetWorkspaceWarehouseConfigRequestSecurityPolicy) Type ¶
func (swwcrsp *SetWorkspaceWarehouseConfigRequestSecurityPolicy) Type() string
Type always returns SetWorkspaceWarehouseConfigRequestSecurityPolicy to satisfy [pflag.Value] interface
type SpotInstancePolicy ¶
type SpotInstancePolicy string
Configurations whether the warehouse should use spot instances.
const SpotInstancePolicyCostOptimized SpotInstancePolicy = `COST_OPTIMIZED`
const SpotInstancePolicyPolicyUnspecified SpotInstancePolicy = `POLICY_UNSPECIFIED`
const SpotInstancePolicyReliabilityOptimized SpotInstancePolicy = `RELIABILITY_OPTIMIZED`
func (*SpotInstancePolicy) Set ¶
func (sip *SpotInstancePolicy) Set(v string) error
Set raw string value and validate it against allowed values
func (*SpotInstancePolicy) String ¶
func (sip *SpotInstancePolicy) String() string
String representation for fmt.Print
func (*SpotInstancePolicy) Type ¶
func (sip *SpotInstancePolicy) Type() string
Type always returns SpotInstancePolicy to satisfy [pflag.Value] interface
type StartRequest ¶
type StartRequest struct {
// Required. Id of the SQL warehouse.
Id string `json:"-" url:"-"`
}
Start a warehouse
type State ¶
type State string
State of the warehouse
const StateDeleted State = `DELETED`
const StateDeleting State = `DELETING`
const StateRunning State = `RUNNING`
const StateStarting State = `STARTING`
const StateStopped State = `STOPPED`
const StateStopping State = `STOPPING`
type Status ¶
type Status string
Health status of the endpoint.
const StatusDegraded Status = `DEGRADED`
const StatusFailed Status = `FAILED`
const StatusHealthy Status = `HEALTHY`
const StatusStatusUnspecified Status = `STATUS_UNSPECIFIED`
type StopRequest ¶
type StopRequest struct {
// Required. Id of the SQL warehouse.
Id string `json:"-" url:"-"`
}
Stop a warehouse
type Subscription ¶
type Subscription struct {
// ID of the alert.
AlertId string `json:"alert_id,omitempty"`
// Alert destination subscribed to the alert, if it exists. Alert
// destinations can be configured by admins through the UI. See [here].
//
// [here]: https://docs.databricks.com/sql/admin/alert-destinations.html
Destination *Destination `json:"destination,omitempty"`
// ID of the alert subscription.
Id string `json:"id,omitempty"`
User *User `json:"user,omitempty"`
}
type Success ¶
type Success struct {
Message SuccessMessage `json:"message,omitempty"`
}
type SuccessMessage ¶
type SuccessMessage string
const SuccessMessageSuccess SuccessMessage = `Success`
func (*SuccessMessage) Set ¶
func (sm *SuccessMessage) Set(v string) error
Set raw string value and validate it against allowed values
func (*SuccessMessage) String ¶
func (sm *SuccessMessage) String() string
String representation for fmt.Print
func (*SuccessMessage) Type ¶
func (sm *SuccessMessage) Type() string
Type always returns SuccessMessage to satisfy [pflag.Value] interface
type TerminationReason ¶
type TerminationReason struct {
// status code indicating why the cluster was terminated
Code TerminationReasonCode `json:"code,omitempty"`
// list of parameters that provide additional information about why the
// cluster was terminated
Parameters map[string]string `json:"parameters,omitempty"`
// type of the termination
Type TerminationReasonType `json:"type,omitempty"`
}
type TerminationReasonCode ¶
type TerminationReasonCode string
status code indicating why the cluster was terminated
const TerminationReasonCodeAbuseDetected TerminationReasonCode = `ABUSE_DETECTED`
const TerminationReasonCodeAttachProjectFailure TerminationReasonCode = `ATTACH_PROJECT_FAILURE`
const TerminationReasonCodeAwsAuthorizationFailure TerminationReasonCode = `AWS_AUTHORIZATION_FAILURE`
const TerminationReasonCodeAwsInsufficientFreeAddressesInSubnetFailure TerminationReasonCode = `AWS_INSUFFICIENT_FREE_ADDRESSES_IN_SUBNET_FAILURE`
const TerminationReasonCodeAwsInsufficientInstanceCapacityFailure TerminationReasonCode = `AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE`
const TerminationReasonCodeAwsMaxSpotInstanceCountExceededFailure TerminationReasonCode = `AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE`
const TerminationReasonCodeAwsRequestLimitExceeded TerminationReasonCode = `AWS_REQUEST_LIMIT_EXCEEDED`
const TerminationReasonCodeAwsUnsupportedFailure TerminationReasonCode = `AWS_UNSUPPORTED_FAILURE`
const TerminationReasonCodeAzureByokKeyPermissionFailure TerminationReasonCode = `AZURE_BYOK_KEY_PERMISSION_FAILURE`
const TerminationReasonCodeAzureEphemeralDiskFailure TerminationReasonCode = `AZURE_EPHEMERAL_DISK_FAILURE`
const TerminationReasonCodeAzureInvalidDeploymentTemplate TerminationReasonCode = `AZURE_INVALID_DEPLOYMENT_TEMPLATE`
const TerminationReasonCodeAzureOperationNotAllowedException TerminationReasonCode = `AZURE_OPERATION_NOT_ALLOWED_EXCEPTION`
const TerminationReasonCodeAzureQuotaExceededException TerminationReasonCode = `AZURE_QUOTA_EXCEEDED_EXCEPTION`
const TerminationReasonCodeAzureResourceManagerThrottling TerminationReasonCode = `AZURE_RESOURCE_MANAGER_THROTTLING`
const TerminationReasonCodeAzureResourceProviderThrottling TerminationReasonCode = `AZURE_RESOURCE_PROVIDER_THROTTLING`
const TerminationReasonCodeAzureUnexpectedDeploymentTemplateFailure TerminationReasonCode = `AZURE_UNEXPECTED_DEPLOYMENT_TEMPLATE_FAILURE`
const TerminationReasonCodeAzureVmExtensionFailure TerminationReasonCode = `AZURE_VM_EXTENSION_FAILURE`
const TerminationReasonCodeAzureVnetConfigurationFailure TerminationReasonCode = `AZURE_VNET_CONFIGURATION_FAILURE`
const TerminationReasonCodeBootstrapTimeout TerminationReasonCode = `BOOTSTRAP_TIMEOUT`
const TerminationReasonCodeBootstrapTimeoutCloudProviderException TerminationReasonCode = `BOOTSTRAP_TIMEOUT_CLOUD_PROVIDER_EXCEPTION`
const TerminationReasonCodeCloudProviderDiskSetupFailure TerminationReasonCode = `CLOUD_PROVIDER_DISK_SETUP_FAILURE`
const TerminationReasonCodeCloudProviderLaunchFailure TerminationReasonCode = `CLOUD_PROVIDER_LAUNCH_FAILURE`
const TerminationReasonCodeCloudProviderResourceStockout TerminationReasonCode = `CLOUD_PROVIDER_RESOURCE_STOCKOUT`
const TerminationReasonCodeCloudProviderShutdown TerminationReasonCode = `CLOUD_PROVIDER_SHUTDOWN`
const TerminationReasonCodeCommunicationLost TerminationReasonCode = `COMMUNICATION_LOST`
const TerminationReasonCodeContainerLaunchFailure TerminationReasonCode = `CONTAINER_LAUNCH_FAILURE`
const TerminationReasonCodeControlPlaneRequestFailure TerminationReasonCode = `CONTROL_PLANE_REQUEST_FAILURE`
const TerminationReasonCodeDatabaseConnectionFailure TerminationReasonCode = `DATABASE_CONNECTION_FAILURE`
const TerminationReasonCodeDbfsComponentUnhealthy TerminationReasonCode = `DBFS_COMPONENT_UNHEALTHY`
const TerminationReasonCodeDockerImagePullFailure TerminationReasonCode = `DOCKER_IMAGE_PULL_FAILURE`
const TerminationReasonCodeDriverUnreachable TerminationReasonCode = `DRIVER_UNREACHABLE`
const TerminationReasonCodeDriverUnresponsive TerminationReasonCode = `DRIVER_UNRESPONSIVE`
const TerminationReasonCodeExecutionComponentUnhealthy TerminationReasonCode = `EXECUTION_COMPONENT_UNHEALTHY`
const TerminationReasonCodeGcpQuotaExceeded TerminationReasonCode = `GCP_QUOTA_EXCEEDED`
const TerminationReasonCodeGcpServiceAccountDeleted TerminationReasonCode = `GCP_SERVICE_ACCOUNT_DELETED`
const TerminationReasonCodeGlobalInitScriptFailure TerminationReasonCode = `GLOBAL_INIT_SCRIPT_FAILURE`
const TerminationReasonCodeHiveMetastoreProvisioningFailure TerminationReasonCode = `HIVE_METASTORE_PROVISIONING_FAILURE`
const TerminationReasonCodeImagePullPermissionDenied TerminationReasonCode = `IMAGE_PULL_PERMISSION_DENIED`
const TerminationReasonCodeInactivity TerminationReasonCode = `INACTIVITY`
const TerminationReasonCodeInitScriptFailure TerminationReasonCode = `INIT_SCRIPT_FAILURE`
const TerminationReasonCodeInstancePoolClusterFailure TerminationReasonCode = `INSTANCE_POOL_CLUSTER_FAILURE`
const TerminationReasonCodeInstanceUnreachable TerminationReasonCode = `INSTANCE_UNREACHABLE`
const TerminationReasonCodeInternalError TerminationReasonCode = `INTERNAL_ERROR`
const TerminationReasonCodeInvalidArgument TerminationReasonCode = `INVALID_ARGUMENT`
const TerminationReasonCodeInvalidSparkImage TerminationReasonCode = `INVALID_SPARK_IMAGE`
const TerminationReasonCodeIpExhaustionFailure TerminationReasonCode = `IP_EXHAUSTION_FAILURE`
const TerminationReasonCodeJobFinished TerminationReasonCode = `JOB_FINISHED`
const TerminationReasonCodeKsAutoscalingFailure TerminationReasonCode = `K8S_AUTOSCALING_FAILURE`
const TerminationReasonCodeKsDbrClusterLaunchTimeout TerminationReasonCode = `K8S_DBR_CLUSTER_LAUNCH_TIMEOUT`
const TerminationReasonCodeMetastoreComponentUnhealthy TerminationReasonCode = `METASTORE_COMPONENT_UNHEALTHY`
const TerminationReasonCodeNephosResourceManagement TerminationReasonCode = `NEPHOS_RESOURCE_MANAGEMENT`
const TerminationReasonCodeNetworkConfigurationFailure TerminationReasonCode = `NETWORK_CONFIGURATION_FAILURE`
const TerminationReasonCodeNfsMountFailure TerminationReasonCode = `NFS_MOUNT_FAILURE`
const TerminationReasonCodeNpipTunnelSetupFailure TerminationReasonCode = `NPIP_TUNNEL_SETUP_FAILURE`
const TerminationReasonCodeNpipTunnelTokenFailure TerminationReasonCode = `NPIP_TUNNEL_TOKEN_FAILURE`
const TerminationReasonCodeRequestRejected TerminationReasonCode = `REQUEST_REJECTED`
const TerminationReasonCodeRequestThrottled TerminationReasonCode = `REQUEST_THROTTLED`
const TerminationReasonCodeSecretResolutionError TerminationReasonCode = `SECRET_RESOLUTION_ERROR`
const TerminationReasonCodeSecurityDaemonRegistrationException TerminationReasonCode = `SECURITY_DAEMON_REGISTRATION_EXCEPTION`
const TerminationReasonCodeSelfBootstrapFailure TerminationReasonCode = `SELF_BOOTSTRAP_FAILURE`
const TerminationReasonCodeSkippedSlowNodes TerminationReasonCode = `SKIPPED_SLOW_NODES`
const TerminationReasonCodeSlowImageDownload TerminationReasonCode = `SLOW_IMAGE_DOWNLOAD`
const TerminationReasonCodeSparkError TerminationReasonCode = `SPARK_ERROR`
const TerminationReasonCodeSparkImageDownloadFailure TerminationReasonCode = `SPARK_IMAGE_DOWNLOAD_FAILURE`
const TerminationReasonCodeSparkStartupFailure TerminationReasonCode = `SPARK_STARTUP_FAILURE`
const TerminationReasonCodeSpotInstanceTermination TerminationReasonCode = `SPOT_INSTANCE_TERMINATION`
const TerminationReasonCodeStorageDownloadFailure TerminationReasonCode = `STORAGE_DOWNLOAD_FAILURE`
const TerminationReasonCodeStsClientSetupFailure TerminationReasonCode = `STS_CLIENT_SETUP_FAILURE`
const TerminationReasonCodeSubnetExhaustedFailure TerminationReasonCode = `SUBNET_EXHAUSTED_FAILURE`
const TerminationReasonCodeTrialExpired TerminationReasonCode = `TRIAL_EXPIRED`
const TerminationReasonCodeUnexpectedLaunchFailure TerminationReasonCode = `UNEXPECTED_LAUNCH_FAILURE`
const TerminationReasonCodeUnknown TerminationReasonCode = `UNKNOWN`
const TerminationReasonCodeUnsupportedInstanceType TerminationReasonCode = `UNSUPPORTED_INSTANCE_TYPE`
const TerminationReasonCodeUpdateInstanceProfileFailure TerminationReasonCode = `UPDATE_INSTANCE_PROFILE_FAILURE`
const TerminationReasonCodeUserRequest TerminationReasonCode = `USER_REQUEST`
const TerminationReasonCodeWorkerSetupFailure TerminationReasonCode = `WORKER_SETUP_FAILURE`
const TerminationReasonCodeWorkspaceCancelledError TerminationReasonCode = `WORKSPACE_CANCELLED_ERROR`
const TerminationReasonCodeWorkspaceConfigurationError TerminationReasonCode = `WORKSPACE_CONFIGURATION_ERROR`
func (*TerminationReasonCode) Set ¶
func (trc *TerminationReasonCode) Set(v string) error
Set raw string value and validate it against allowed values
func (*TerminationReasonCode) String ¶
func (trc *TerminationReasonCode) String() string
String representation for fmt.Print
func (*TerminationReasonCode) Type ¶
func (trc *TerminationReasonCode) Type() string
Type always returns TerminationReasonCode to satisfy [pflag.Value] interface
type TerminationReasonType ¶
type TerminationReasonType string
type of the termination
const TerminationReasonTypeClientError TerminationReasonType = `CLIENT_ERROR`
const TerminationReasonTypeCloudFailure TerminationReasonType = `CLOUD_FAILURE`
const TerminationReasonTypeServiceFault TerminationReasonType = `SERVICE_FAULT`
const TerminationReasonTypeSuccess TerminationReasonType = `SUCCESS`
func (*TerminationReasonType) Set ¶
func (trt *TerminationReasonType) Set(v string) error
Set raw string value and validate it against allowed values
func (*TerminationReasonType) String ¶
func (trt *TerminationReasonType) String() string
String representation for fmt.Print
func (*TerminationReasonType) Type ¶
func (trt *TerminationReasonType) Type() string
Type always returns TerminationReasonType to satisfy [pflag.Value] interface
type TransferOwnershipObjectId ¶
type TransferOwnershipObjectId struct {
// Email address for the new owner, who must exist in the workspace.
NewOwner string `json:"new_owner,omitempty"`
}
type TransferOwnershipRequest ¶
type TransferOwnershipRequest struct {
// Email address for the new owner, who must exist in the workspace.
NewOwner string `json:"new_owner,omitempty"`
// The ID of the object on which to change ownership.
ObjectId TransferOwnershipObjectId `json:"-" url:"-"`
// The type of object on which to change ownership.
ObjectType OwnableObjectType `json:"-" url:"-"`
}
Transfer object ownership
type UnsubscribeRequest ¶
type UnsubscribeRequest struct {
AlertId string `json:"-" url:"-"`
SubscriptionId string `json:"-" url:"-"`
}
Unsubscribe to an alert
type User ¶
type User struct {
Email string `json:"email,omitempty"`
Id int `json:"id,omitempty"`
// Whether this user is an admin in the Databricks workspace.
IsDbAdmin bool `json:"is_db_admin,omitempty"`
Name string `json:"name,omitempty"`
// The URL for the gravatar profile picture tied to this user's email
// address.
ProfileImageUrl string `json:"profile_image_url,omitempty"`
}
type Visualization ¶
type Visualization struct {
CreatedAt string `json:"created_at,omitempty"`
// A short description of this visualization. This is not displayed in the
// UI.
Description string `json:"description,omitempty"`
// The UUID for this visualization.
Id string `json:"id,omitempty"`
// The name of the visualization that appears on dashboards and the query
// screen.
Name string `json:"name,omitempty"`
// The options object varies widely from one visualization type to the next
// and is unsupported. Databricks does not recommend modifying visualization
// settings in JSON.
Options any `json:"options,omitempty"`
// The type of visualization: chart, table, pivot table, and so on.
Type string `json:"type,omitempty"`
UpdatedAt string `json:"updated_at,omitempty"`
}
The visualization description API changes frequently and is unsupported. You can duplicate a visualization by copying description objects received _from the API_ and then using them to create a new one with a POST request to the same endpoint. Databricks does not recommend constructing ad-hoc visualizations entirely in JSON.
type WarehouseType ¶
type WarehouseType string
const WarehouseTypeClassic WarehouseType = `CLASSIC`
const WarehouseTypePro WarehouseType = `PRO`
const WarehouseTypeTypeUnspecified WarehouseType = `TYPE_UNSPECIFIED`
func (*WarehouseType) Set ¶
func (wt *WarehouseType) Set(v string) error
Set raw string value and validate it against allowed values
func (*WarehouseType) String ¶
func (wt *WarehouseType) String() string
String representation for fmt.Print
func (*WarehouseType) Type ¶
func (wt *WarehouseType) Type() string
Type always returns WarehouseType to satisfy [pflag.Value] interface
type WarehouseTypePair ¶
type WarehouseTypePair struct {
// If set to false the specific warehouse type will not be be allowed as a
// value for warehouse_type in CreateWarehouse and EditWarehouse
Enabled bool `json:"enabled,omitempty"`
WarehouseType WarehouseType `json:"warehouse_type,omitempty"`
}
type WarehousesAPI ¶
type WarehousesAPI struct {
// contains filtered or unexported fields
}
A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.
func NewWarehouses ¶
func NewWarehouses(client *client.DatabricksClient) *WarehousesAPI
func (*WarehousesAPI) Create ¶
func (a *WarehousesAPI) Create(ctx context.Context, request CreateWarehouseRequest) (*CreateWarehouseResponse, error)
Create a warehouse.
Creates a new SQL warehouse.
func (*WarehousesAPI) CreateAndWait ¶
func (a *WarehousesAPI) CreateAndWait(ctx context.Context, createWarehouseRequest CreateWarehouseRequest, options ...retries.Option[GetWarehouseResponse]) (*GetWarehouseResponse, error)
Calls WarehousesAPI.Create and waits to reach RUNNING state
You can override the default timeout of 20 minutes by calling adding retries.Timeout[GetWarehouseResponse](60*time.Minute) functional option.
func (*WarehousesAPI) Delete ¶
func (a *WarehousesAPI) Delete(ctx context.Context, request DeleteWarehouseRequest) error
Delete a warehouse.
Deletes a SQL warehouse.
func (*WarehousesAPI) DeleteAndWait ¶
func (a *WarehousesAPI) DeleteAndWait(ctx context.Context, deleteWarehouseRequest DeleteWarehouseRequest, options ...retries.Option[GetWarehouseResponse]) (*GetWarehouseResponse, error)
Calls WarehousesAPI.Delete and waits to reach DELETED state
You can override the default timeout of 20 minutes by calling adding retries.Timeout[GetWarehouseResponse](60*time.Minute) functional option.
func (*WarehousesAPI) DeleteById ¶
func (a *WarehousesAPI) DeleteById(ctx context.Context, id string) error
Delete a warehouse.
Deletes a SQL warehouse.
func (*WarehousesAPI) DeleteByIdAndWait ¶
func (a *WarehousesAPI) DeleteByIdAndWait(ctx context.Context, id string, options ...retries.Option[GetWarehouseResponse]) (*GetWarehouseResponse, error)
func (*WarehousesAPI) Edit ¶
func (a *WarehousesAPI) Edit(ctx context.Context, request EditWarehouseRequest) error
Update a warehouse.
Updates the configuration for a SQL warehouse.
func (*WarehousesAPI) EditAndWait ¶
func (a *WarehousesAPI) EditAndWait(ctx context.Context, editWarehouseRequest EditWarehouseRequest, options ...retries.Option[GetWarehouseResponse]) (*GetWarehouseResponse, error)
Calls WarehousesAPI.Edit and waits to reach RUNNING state
You can override the default timeout of 20 minutes by calling adding retries.Timeout[GetWarehouseResponse](60*time.Minute) functional option.
func (*WarehousesAPI) EndpointInfoNameToIdMap ¶
func (a *WarehousesAPI) EndpointInfoNameToIdMap(ctx context.Context, request ListWarehousesRequest) (map[string]string, error)
EndpointInfoNameToIdMap calls WarehousesAPI.ListAll and creates a map of results with EndpointInfo.Name as key and EndpointInfo.Id as value.
Returns an error if there's more than one EndpointInfo with the same .Name.
Note: All EndpointInfo instances are loaded into memory before creating a map.
This method is generated by Databricks SDK Code Generator.
func (*WarehousesAPI) Get ¶
func (a *WarehousesAPI) Get(ctx context.Context, request GetWarehouseRequest) (*GetWarehouseResponse, error)
Get warehouse info.
Gets the information for a single SQL warehouse.
func (*WarehousesAPI) GetAndWait ¶
func (a *WarehousesAPI) GetAndWait(ctx context.Context, getWarehouseRequest GetWarehouseRequest, options ...retries.Option[GetWarehouseResponse]) (*GetWarehouseResponse, error)
Calls WarehousesAPI.Get and waits to reach RUNNING state
You can override the default timeout of 20 minutes by calling adding retries.Timeout[GetWarehouseResponse](60*time.Minute) functional option.
func (*WarehousesAPI) GetById ¶
func (a *WarehousesAPI) GetById(ctx context.Context, id string) (*GetWarehouseResponse, error)
Get warehouse info.
Gets the information for a single SQL warehouse.
func (*WarehousesAPI) GetByIdAndWait ¶
func (a *WarehousesAPI) GetByIdAndWait(ctx context.Context, id string, options ...retries.Option[GetWarehouseResponse]) (*GetWarehouseResponse, error)
func (*WarehousesAPI) GetByName ¶
func (a *WarehousesAPI) GetByName(ctx context.Context, name string) (*EndpointInfo, error)
GetByName calls WarehousesAPI.EndpointInfoNameToIdMap and returns a single EndpointInfo.
Returns an error if there's more than one EndpointInfo with the same .Name.
Note: All EndpointInfo instances are loaded into memory before returning matching by name.
This method is generated by Databricks SDK Code Generator.
func (*WarehousesAPI) GetWorkspaceWarehouseConfig ¶
func (a *WarehousesAPI) GetWorkspaceWarehouseConfig(ctx context.Context) (*GetWorkspaceWarehouseConfigResponse, error)
Get the workspace configuration.
Gets the workspace level configuration that is shared by all SQL warehouses in a workspace.
func (*WarehousesAPI) Impl ¶
func (a *WarehousesAPI) Impl() WarehousesService
Impl returns low-level Warehouses API implementation
func (*WarehousesAPI) ListAll ¶
func (a *WarehousesAPI) ListAll(ctx context.Context, request ListWarehousesRequest) ([]EndpointInfo, error)
List warehouses.
Lists all SQL warehouses that a user has manager permissions on.
This method is generated by Databricks SDK Code Generator.
func (*WarehousesAPI) SetWorkspaceWarehouseConfig ¶
func (a *WarehousesAPI) SetWorkspaceWarehouseConfig(ctx context.Context, request SetWorkspaceWarehouseConfigRequest) error
Set the workspace configuration.
Sets the workspace level configuration that is shared by all SQL warehouses in a workspace.
func (*WarehousesAPI) Start ¶
func (a *WarehousesAPI) Start(ctx context.Context, request StartRequest) error
Start a warehouse.
Starts a SQL warehouse.
func (*WarehousesAPI) StartAndWait ¶
func (a *WarehousesAPI) StartAndWait(ctx context.Context, startRequest StartRequest, options ...retries.Option[GetWarehouseResponse]) (*GetWarehouseResponse, error)
Calls WarehousesAPI.Start and waits to reach RUNNING state
You can override the default timeout of 20 minutes by calling adding retries.Timeout[GetWarehouseResponse](60*time.Minute) functional option.
func (*WarehousesAPI) Stop ¶
func (a *WarehousesAPI) Stop(ctx context.Context, request StopRequest) error
Stop a warehouse.
Stops a SQL warehouse.
func (*WarehousesAPI) StopAndWait ¶
func (a *WarehousesAPI) StopAndWait(ctx context.Context, stopRequest StopRequest, options ...retries.Option[GetWarehouseResponse]) (*GetWarehouseResponse, error)
Calls WarehousesAPI.Stop and waits to reach STOPPED state
You can override the default timeout of 20 minutes by calling adding retries.Timeout[GetWarehouseResponse](60*time.Minute) functional option.
func (*WarehousesAPI) WithImpl ¶
func (a *WarehousesAPI) WithImpl(impl WarehousesService) *WarehousesAPI
WithImpl could be used to override low-level API implementations for unit testing purposes with github.com/golang/mock or other mocking frameworks.
type WarehousesService ¶
type WarehousesService interface {
// Create a warehouse.
//
// Creates a new SQL warehouse.
Create(ctx context.Context, request CreateWarehouseRequest) (*CreateWarehouseResponse, error)
// Delete a warehouse.
//
// Deletes a SQL warehouse.
Delete(ctx context.Context, request DeleteWarehouseRequest) error
// Update a warehouse.
//
// Updates the configuration for a SQL warehouse.
Edit(ctx context.Context, request EditWarehouseRequest) error
// Get warehouse info.
//
// Gets the information for a single SQL warehouse.
Get(ctx context.Context, request GetWarehouseRequest) (*GetWarehouseResponse, error)
// Get the workspace configuration.
//
// Gets the workspace level configuration that is shared by all SQL
// warehouses in a workspace.
GetWorkspaceWarehouseConfig(ctx context.Context) (*GetWorkspaceWarehouseConfigResponse, error)
// List warehouses.
//
// Lists all SQL warehouses that a user has manager permissions on.
//
// Use ListAll() to get all EndpointInfo instances
List(ctx context.Context, request ListWarehousesRequest) (*ListWarehousesResponse, error)
// Set the workspace configuration.
//
// Sets the workspace level configuration that is shared by all SQL
// warehouses in a workspace.
SetWorkspaceWarehouseConfig(ctx context.Context, request SetWorkspaceWarehouseConfigRequest) error
// Start a warehouse.
//
// Starts a SQL warehouse.
Start(ctx context.Context, request StartRequest) error
// Stop a warehouse.
//
// Stops a SQL warehouse.
Stop(ctx context.Context, request StopRequest) error
}
A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.
type Widget ¶
type Widget struct {
// The unique ID for this widget.
Id int `json:"id,omitempty"`
Options *WidgetOptions `json:"options,omitempty"`
// The visualization description API changes frequently and is unsupported.
// You can duplicate a visualization by copying description objects received
// _from the API_ and then using them to create a new one with a POST
// request to the same endpoint. Databricks does not recommend constructing
// ad-hoc visualizations entirely in JSON.
Visualization *Visualization `json:"visualization,omitempty"`
// Unused field.
Width int `json:"width,omitempty"`
}
type WidgetOptions ¶
type WidgetOptions struct {
// Timestamp when this object was created
CreatedAt string `json:"created_at,omitempty"`
// The dashboard ID to which this widget belongs. Each widget can belong to
// one dashboard.
DashboardId string `json:"dashboard_id,omitempty"`
// Whether this widget is hidden on the dashboard.
IsHidden bool `json:"isHidden,omitempty"`
// How parameters used by the visualization in this widget relate to other
// widgets on the dashboard. Databricks does not recommend modifying this
// definition in JSON.
ParameterMappings any `json:"parameterMappings,omitempty"`
// Coordinates of this widget on a dashboard. This portion of the API
// changes frequently and is unsupported.
Position any `json:"position,omitempty"`
// If this is a textbox widget, the application displays this text. This
// field is ignored if the widget contains a visualization in the
// `visualization` field.
Text string `json:"text,omitempty"`
// Timestamp of the last time this object was updated.
UpdatedAt string `json:"updated_at,omitempty"`
}