Documentation
¶
Index ¶
- Constants
- type CancelTaskRequest
- type CancelTaskRequestParams
- type CancelTaskResponse
- type CancelTaskResponseParams
- type Client
- func (c *Client) CancelTask(request *CancelTaskRequest) (response *CancelTaskResponse, err error)
- func (c *Client) CancelTaskWithContext(ctx context.Context, request *CancelTaskRequest) (response *CancelTaskResponse, err error)
- func (c *Client) CreateSparkApp(request *CreateSparkAppRequest) (response *CreateSparkAppResponse, err error)
- func (c *Client) CreateSparkAppTask(request *CreateSparkAppTaskRequest) (response *CreateSparkAppTaskResponse, err error)
- func (c *Client) CreateSparkAppTaskWithContext(ctx context.Context, request *CreateSparkAppTaskRequest) (response *CreateSparkAppTaskResponse, err error)
- func (c *Client) CreateSparkAppWithContext(ctx context.Context, request *CreateSparkAppRequest) (response *CreateSparkAppResponse, err error)
- func (c *Client) CreateTask(request *CreateTaskRequest) (response *CreateTaskResponse, err error)
- func (c *Client) CreateTaskWithContext(ctx context.Context, request *CreateTaskRequest) (response *CreateTaskResponse, err error)
- func (c *Client) CreateTasks(request *CreateTasksRequest) (response *CreateTasksResponse, err error)
- func (c *Client) CreateTasksWithContext(ctx context.Context, request *CreateTasksRequest) (response *CreateTasksResponse, err error)
- func (c *Client) DeleteSparkApp(request *DeleteSparkAppRequest) (response *DeleteSparkAppResponse, err error)
- func (c *Client) DeleteSparkAppWithContext(ctx context.Context, request *DeleteSparkAppRequest) (response *DeleteSparkAppResponse, err error)
- func (c *Client) DescribeSparkAppJob(request *DescribeSparkAppJobRequest) (response *DescribeSparkAppJobResponse, err error)
- func (c *Client) DescribeSparkAppJobWithContext(ctx context.Context, request *DescribeSparkAppJobRequest) (response *DescribeSparkAppJobResponse, err error)
- func (c *Client) DescribeSparkAppJobs(request *DescribeSparkAppJobsRequest) (response *DescribeSparkAppJobsResponse, err error)
- func (c *Client) DescribeSparkAppJobsWithContext(ctx context.Context, request *DescribeSparkAppJobsRequest) (response *DescribeSparkAppJobsResponse, err error)
- func (c *Client) DescribeSparkAppTasks(request *DescribeSparkAppTasksRequest) (response *DescribeSparkAppTasksResponse, err error)
- func (c *Client) DescribeSparkAppTasksWithContext(ctx context.Context, request *DescribeSparkAppTasksRequest) (response *DescribeSparkAppTasksResponse, err error)
- func (c *Client) DescribeTaskResult(request *DescribeTaskResultRequest) (response *DescribeTaskResultResponse, err error)
- func (c *Client) DescribeTaskResultWithContext(ctx context.Context, request *DescribeTaskResultRequest) (response *DescribeTaskResultResponse, err error)
- func (c *Client) DescribeTasks(request *DescribeTasksRequest) (response *DescribeTasksResponse, err error)
- func (c *Client) DescribeTasksWithContext(ctx context.Context, request *DescribeTasksRequest) (response *DescribeTasksResponse, err error)
- func (c *Client) ModifySparkApp(request *ModifySparkAppRequest) (response *ModifySparkAppResponse, err error)
- func (c *Client) ModifySparkAppWithContext(ctx context.Context, request *ModifySparkAppRequest) (response *ModifySparkAppResponse, err error)
- type Column
- type CreateSparkAppRequest
- type CreateSparkAppRequestParams
- type CreateSparkAppResponse
- type CreateSparkAppResponseParams
- type CreateSparkAppTaskRequest
- type CreateSparkAppTaskRequestParams
- type CreateSparkAppTaskResponse
- type CreateSparkAppTaskResponseParams
- type CreateTaskRequest
- type CreateTaskRequestParams
- type CreateTaskResponse
- type CreateTaskResponseParams
- type CreateTasksRequest
- type CreateTasksRequestParams
- type CreateTasksResponse
- type CreateTasksResponseParams
- type DeleteSparkAppRequest
- type DeleteSparkAppRequestParams
- type DeleteSparkAppResponse
- type DeleteSparkAppResponseParams
- type DescribeSparkAppJobRequest
- type DescribeSparkAppJobRequestParams
- type DescribeSparkAppJobResponse
- type DescribeSparkAppJobResponseParams
- type DescribeSparkAppJobsRequest
- type DescribeSparkAppJobsRequestParams
- type DescribeSparkAppJobsResponse
- type DescribeSparkAppJobsResponseParams
- type DescribeSparkAppTasksRequest
- type DescribeSparkAppTasksRequestParams
- type DescribeSparkAppTasksResponse
- type DescribeSparkAppTasksResponseParams
- type DescribeTaskResultRequest
- type DescribeTaskResultRequestParams
- type DescribeTaskResultResponse
- type DescribeTaskResultResponseParams
- type DescribeTasksRequest
- type DescribeTasksRequestParams
- type DescribeTasksResponse
- type DescribeTasksResponseParams
- type Filter
- type KVPair
- type ModifySparkAppRequest
- type ModifySparkAppRequestParams
- type ModifySparkAppResponse
- type ModifySparkAppResponseParams
- type SQLTask
- type SparkJobInfo
- type StreamingStatistics
- type Task
- type TaskResponseInfo
- type TaskResultInfo
- type TasksInfo
- type TasksOverview
Constants ¶
const ( // The operation failed. FAILEDOPERATION = "FailedOperation" // Another request is being processed. Try again later. FAILEDOPERATION_ANOTHERREQUESTPROCESSING = "FailedOperation.AnotherRequestProcessing" // The HTTP client request failed. FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" // An internal error occurred. INTERNALERROR = "InternalError" // A database error occurred. INTERNALERROR_DBERROR = "InternalError.DBError" // The parameter is incorrect. INVALIDPARAMETER = "InvalidParameter" // The data engine name is invalid. INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" // The fault tolerance policy is invalid. INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" // The CAM role arn is invalid. INVALIDPARAMETER_INVALIDROLEARN = "InvalidParameter.InvalidRoleArn" // SQL parsing failed. INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" // The number of SQL statements does not meet the specification. INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" // The `SparkAppParam` is invalid. INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam" // The storage location is incorrect. INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" // The `taskid` is invalid. INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId" // The task type is invalid. INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" // The task has ended and cannot be canceled. INVALIDPARAMETER_TASKALREADYFINISHED = "InvalidParameter.TaskAlreadyFinished" // The parameter value is incorrect. INVALIDPARAMETERVALUE = "InvalidParameterValue" // The resource does not exist. RESOURCENOTFOUND = "ResourceNotFound" // The result path was not found. RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" // The resource is unavailable. RESOURCEUNAVAILABLE = "ResourceUnavailable" // The account balance is insufficient to run the SQL task. RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" // The sub-user does not have permission to use the compute engine. UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine" )
const APIVersion = "2021-01-25"
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type CancelTaskRequest ¶
type CancelTaskRequest struct {
*tchttp.BaseRequest
// Globally unique task ID
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
}
func NewCancelTaskRequest ¶
func NewCancelTaskRequest() (request *CancelTaskRequest)
func (*CancelTaskRequest) FromJsonString ¶
func (r *CancelTaskRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CancelTaskRequest) ToJsonString ¶
func (r *CancelTaskRequest) ToJsonString() string
type CancelTaskRequestParams ¶
type CancelTaskRequestParams struct {
// Globally unique task ID
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
}
Predefined struct for user
type CancelTaskResponse ¶
type CancelTaskResponse struct {
*tchttp.BaseResponse
Response *CancelTaskResponseParams `json:"Response"`
}
func NewCancelTaskResponse ¶
func NewCancelTaskResponse() (response *CancelTaskResponse)
func (*CancelTaskResponse) FromJsonString ¶
func (r *CancelTaskResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CancelTaskResponse) ToJsonString ¶
func (r *CancelTaskResponse) ToJsonString() string
type CancelTaskResponseParams ¶
type CancelTaskResponseParams struct {
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type Client ¶
func NewClient ¶
func NewClient(credential common.CredentialIface, region string, clientProfile *profile.ClientProfile) (client *Client, err error)
func NewClientWithSecretId ¶
Deprecated
func (*Client) CancelTask ¶
func (c *Client) CancelTask(request *CancelTaskRequest) (response *CancelTaskResponse, err error)
CancelTask This API is used to cancel a task.
error code that may be returned:
FAILEDOPERATION_ANOTHERREQUESTPROCESSING = "FailedOperation.AnotherRequestProcessing" FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" INTERNALERROR = "InternalError" INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId" INVALIDPARAMETER_TASKALREADYFINISHED = "InvalidParameter.TaskAlreadyFinished"
func (*Client) CancelTaskWithContext ¶
func (c *Client) CancelTaskWithContext(ctx context.Context, request *CancelTaskRequest) (response *CancelTaskResponse, err error)
CancelTask This API is used to cancel a task.
error code that may be returned:
FAILEDOPERATION_ANOTHERREQUESTPROCESSING = "FailedOperation.AnotherRequestProcessing" FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" INTERNALERROR = "InternalError" INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId" INVALIDPARAMETER_TASKALREADYFINISHED = "InvalidParameter.TaskAlreadyFinished"
func (*Client) CreateSparkApp ¶
func (c *Client) CreateSparkApp(request *CreateSparkAppRequest) (response *CreateSparkAppResponse, err error)
CreateSparkApp This API is used to create a Spark application.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDROLEARN = "InvalidParameter.InvalidRoleArn"
func (*Client) CreateSparkAppTask ¶
func (c *Client) CreateSparkAppTask(request *CreateSparkAppTaskRequest) (response *CreateSparkAppTaskResponse, err error)
CreateSparkAppTask This API is used to create a Spark task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam" RESOURCEUNAVAILABLE = "ResourceUnavailable" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateSparkAppTaskWithContext ¶
func (c *Client) CreateSparkAppTaskWithContext(ctx context.Context, request *CreateSparkAppTaskRequest) (response *CreateSparkAppTaskResponse, err error)
CreateSparkAppTask This API is used to create a Spark task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam" RESOURCEUNAVAILABLE = "ResourceUnavailable" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateSparkAppWithContext ¶
func (c *Client) CreateSparkAppWithContext(ctx context.Context, request *CreateSparkAppRequest) (response *CreateSparkAppResponse, err error)
CreateSparkApp This API is used to create a Spark application.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDROLEARN = "InvalidParameter.InvalidRoleArn"
func (*Client) CreateTask ¶
func (c *Client) CreateTask(request *CreateTaskRequest) (response *CreateTaskResponse, err error)
CreateTask This API is used to create a SQL query task. (We recommend you use the `CreateTasks` API instead.)
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateTaskWithContext ¶
func (c *Client) CreateTaskWithContext(ctx context.Context, request *CreateTaskRequest) (response *CreateTaskResponse, err error)
CreateTask This API is used to create a SQL query task. (We recommend you use the `CreateTasks` API instead.)
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateTasks ¶
func (c *Client) CreateTasks(request *CreateTasksRequest) (response *CreateTasksResponse, err error)
CreateTasks This API is used to create tasks in batches.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) CreateTasksWithContext ¶
func (c *Client) CreateTasksWithContext(ctx context.Context, request *CreateTasksRequest) (response *CreateTasksResponse, err error)
CreateTasks This API is used to create tasks in batches.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) DeleteSparkApp ¶
func (c *Client) DeleteSparkApp(request *DeleteSparkAppRequest) (response *DeleteSparkAppResponse, err error)
DeleteSparkApp This API is used to delete a Spark application.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) DeleteSparkAppWithContext ¶
func (c *Client) DeleteSparkAppWithContext(ctx context.Context, request *DeleteSparkAppRequest) (response *DeleteSparkAppResponse, err error)
DeleteSparkApp This API is used to delete a Spark application.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName" INVALIDPARAMETER_INVALIDFAILURETOLERANCE = "InvalidParameter.InvalidFailureTolerance" INVALIDPARAMETER_INVALIDSQL = "InvalidParameter.InvalidSQL" INVALIDPARAMETER_INVALIDSQLNUM = "InvalidParameter.InvalidSQLNum" INVALIDPARAMETER_INVALIDSTORELOCATION = "InvalidParameter.InvalidStoreLocation" INVALIDPARAMETER_INVALIDTASKTYPE = "InvalidParameter.InvalidTaskType" RESOURCENOTFOUND = "ResourceNotFound" RESOURCENOTFOUND_RESULTOUTPUTPATHNOTFOUND = "ResourceNotFound.ResultOutputPathNotFound" RESOURCEUNAVAILABLE_BALANCEINSUFFICIENT = "ResourceUnavailable.BalanceInsufficient" UNAUTHORIZEDOPERATION_USECOMPUTINGENGINE = "UnauthorizedOperation.UseComputingEngine"
func (*Client) DescribeSparkAppJob ¶
func (c *Client) DescribeSparkAppJob(request *DescribeSparkAppJobRequest) (response *DescribeSparkAppJobResponse, err error)
DescribeSparkAppJob This API is used to query a specific Spark application.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam"
func (*Client) DescribeSparkAppJobWithContext ¶
func (c *Client) DescribeSparkAppJobWithContext(ctx context.Context, request *DescribeSparkAppJobRequest) (response *DescribeSparkAppJobResponse, err error)
DescribeSparkAppJob This API is used to query a specific Spark application.
error code that may be returned:
INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDSPARKAPPPARAM = "InvalidParameter.InvalidSparkAppParam"
func (*Client) DescribeSparkAppJobs ¶
func (c *Client) DescribeSparkAppJobs(request *DescribeSparkAppJobsRequest) (response *DescribeSparkAppJobsResponse, err error)
DescribeSparkAppJobs This API is used to get the list of Spark applications.
error code that may be returned:
RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeSparkAppJobsWithContext ¶
func (c *Client) DescribeSparkAppJobsWithContext(ctx context.Context, request *DescribeSparkAppJobsRequest) (response *DescribeSparkAppJobsResponse, err error)
DescribeSparkAppJobs This API is used to get the list of Spark applications.
error code that may be returned:
RESOURCENOTFOUND = "ResourceNotFound"
func (*Client) DescribeSparkAppTasks ¶
func (c *Client) DescribeSparkAppTasks(request *DescribeSparkAppTasksRequest) (response *DescribeSparkAppTasksResponse, err error)
DescribeSparkAppTasks This API is used to query the list of running task instances of a Spark application.
error code that may be returned:
FAILEDOPERATION = "FailedOperation"
func (*Client) DescribeSparkAppTasksWithContext ¶
func (c *Client) DescribeSparkAppTasksWithContext(ctx context.Context, request *DescribeSparkAppTasksRequest) (response *DescribeSparkAppTasksResponse, err error)
DescribeSparkAppTasks This API is used to query the list of running task instances of a Spark application.
error code that may be returned:
FAILEDOPERATION = "FailedOperation"
func (*Client) DescribeTaskResult ¶
func (c *Client) DescribeTaskResult(request *DescribeTaskResultRequest) (response *DescribeTaskResultResponse, err error)
DescribeTaskResult This API is used to query the result of a task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId"
func (*Client) DescribeTaskResultWithContext ¶
func (c *Client) DescribeTaskResultWithContext(ctx context.Context, request *DescribeTaskResultRequest) (response *DescribeTaskResultResponse, err error)
DescribeTaskResult This API is used to query the result of a task.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" FAILEDOPERATION_HTTPCLIENTDOREQUESTFAILED = "FailedOperation.HttpClientDoRequestFailed" INTERNALERROR = "InternalError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETER_INVALIDTASKID = "InvalidParameter.InvalidTaskId"
func (*Client) DescribeTasks ¶
func (c *Client) DescribeTasks(request *DescribeTasksRequest) (response *DescribeTasksResponse, err error)
DescribeTasks This API is used to query the list of tasks.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue"
func (*Client) DescribeTasksWithContext ¶
func (c *Client) DescribeTasksWithContext(ctx context.Context, request *DescribeTasksRequest) (response *DescribeTasksResponse, err error)
DescribeTasks This API is used to query the list of tasks.
error code that may be returned:
INTERNALERROR = "InternalError" INTERNALERROR_DBERROR = "InternalError.DBError" INVALIDPARAMETER = "InvalidParameter" INVALIDPARAMETERVALUE = "InvalidParameterValue"
func (*Client) ModifySparkApp ¶
func (c *Client) ModifySparkApp(request *ModifySparkAppRequest) (response *ModifySparkAppResponse, err error)
ModifySparkApp This API is used to update a Spark application.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName"
func (*Client) ModifySparkAppWithContext ¶
func (c *Client) ModifySparkAppWithContext(ctx context.Context, request *ModifySparkAppRequest) (response *ModifySparkAppResponse, err error)
ModifySparkApp This API is used to update a Spark application.
error code that may be returned:
FAILEDOPERATION = "FailedOperation" INVALIDPARAMETER_INVALIDDATAENGINENAME = "InvalidParameter.InvalidDataEngineName"
type Column ¶
type Column struct {
// Column name, which is case-insensitive and can contain up to 25 characters.
Name *string `json:"Name,omitempty" name:"Name"`
// Column type. Valid values:
// string|tinyint|smallint|int|bigint|boolean|float|double|decimal|timestamp|date|binary|array<data_type>|map<primitive_type, data_type>|struct<col_name : data_type [COMMENT col_comment], ...>|uniontype<data_type, data_type, ...>.
Type *string `json:"Type,omitempty" name:"Type"`
// Class comment.
// Note: This field may return null, indicating that no valid values can be obtained.
Comment *string `json:"Comment,omitempty" name:"Comment"`
// Length of the entire numeric value
// Note: This field may return null, indicating that no valid values can be obtained.
Precision *int64 `json:"Precision,omitempty" name:"Precision"`
// Length of the decimal part
// Note: This field may return null, indicating that no valid values can be obtained.
Scale *int64 `json:"Scale,omitempty" name:"Scale"`
// Whether the column is null.
// Note: This field may return null, indicating that no valid values can be obtained.
Nullable *string `json:"Nullable,omitempty" name:"Nullable"`
// Field position
// Note: This field may return null, indicating that no valid values can be obtained.
Position *int64 `json:"Position,omitempty" name:"Position"`
// Field creation time
// Note: This field may return null, indicating that no valid values can be obtained.
CreateTime *string `json:"CreateTime,omitempty" name:"CreateTime"`
// Field modification time
// Note: This field may return null, indicating that no valid values can be obtained.
ModifiedTime *string `json:"ModifiedTime,omitempty" name:"ModifiedTime"`
// Whether the column is the partition field.
// Note: This field may return null, indicating that no valid values can be obtained.
IsPartition *bool `json:"IsPartition,omitempty" name:"IsPartition"`
}
type CreateSparkAppRequest ¶
type CreateSparkAppRequest struct {
*tchttp.BaseRequest
// Spark application name
AppName *string `json:"AppName,omitempty" name:"AppName"`
// 1: Spark JAR application; 2: Spark streaming application
AppType *int64 `json:"AppType,omitempty" name:"AppType"`
// The data engine executing the Spark job
DataEngine *string `json:"DataEngine,omitempty" name:"DataEngine"`
// Execution entry of the Spark application
AppFile *string `json:"AppFile,omitempty" name:"AppFile"`
// Execution role ID of the Spark job
RoleArn *int64 `json:"RoleArn,omitempty" name:"RoleArn"`
// Driver resource specification of the Spark job. Valid values: `small`, `medium`, `large`, `xlarge`.
AppDriverSize *string `json:"AppDriverSize,omitempty" name:"AppDriverSize"`
// Executor resource specification of the Spark job. Valid values: `small`, `medium`, `large`, `xlarge`.
AppExecutorSize *string `json:"AppExecutorSize,omitempty" name:"AppExecutorSize"`
// Number of Spark job executors
AppExecutorNums *int64 `json:"AppExecutorNums,omitempty" name:"AppExecutorNums"`
// This field has been disused. Use the `Datasource` field instead.
Eni *string `json:"Eni,omitempty" name:"Eni"`
// Whether it is upload locally. Valid values: `cos`, `lakefs`.
IsLocal *string `json:"IsLocal,omitempty" name:"IsLocal"`
// Main class of the Spark JAR job during execution
MainClass *string `json:"MainClass,omitempty" name:"MainClass"`
// Spark configurations separated by line break
AppConf *string `json:"AppConf,omitempty" name:"AppConf"`
// Whether it is upload locally. Valid values: `cos`, `lakefs`.
IsLocalJars *string `json:"IsLocalJars,omitempty" name:"IsLocalJars"`
// Dependency JAR packages of the Spark JAR job separated by comma
AppJars *string `json:"AppJars,omitempty" name:"AppJars"`
// Whether it is upload locally. Valid values: `cos`, `lakefs`.
IsLocalFiles *string `json:"IsLocalFiles,omitempty" name:"IsLocalFiles"`
// Dependency resources of the Spark job separated by comma
AppFiles *string `json:"AppFiles,omitempty" name:"AppFiles"`
// Command line parameters of the Spark job
CmdArgs *string `json:"CmdArgs,omitempty" name:"CmdArgs"`
// This parameter takes effect only for Spark flow tasks.
MaxRetries *int64 `json:"MaxRetries,omitempty" name:"MaxRetries"`
// Data source name
DataSource *string `json:"DataSource,omitempty" name:"DataSource"`
// PySpark: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitempty" name:"IsLocalPythonFiles"`
// PySpark: Python dependency, which can be in .py, .zip, or .egg format. Multiple files should be separated by comma.
AppPythonFiles *string `json:"AppPythonFiles,omitempty" name:"AppPythonFiles"`
// Archives: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalArchives *string `json:"IsLocalArchives,omitempty" name:"IsLocalArchives"`
// Archives: Dependency resources
AppArchives *string `json:"AppArchives,omitempty" name:"AppArchives"`
}
func NewCreateSparkAppRequest ¶
func NewCreateSparkAppRequest() (request *CreateSparkAppRequest)
func (*CreateSparkAppRequest) FromJsonString ¶
func (r *CreateSparkAppRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkAppRequest) ToJsonString ¶
func (r *CreateSparkAppRequest) ToJsonString() string
type CreateSparkAppRequestParams ¶
type CreateSparkAppRequestParams struct {
// Spark application name
AppName *string `json:"AppName,omitempty" name:"AppName"`
// 1: Spark JAR application; 2: Spark streaming application
AppType *int64 `json:"AppType,omitempty" name:"AppType"`
// The data engine executing the Spark job
DataEngine *string `json:"DataEngine,omitempty" name:"DataEngine"`
// Execution entry of the Spark application
AppFile *string `json:"AppFile,omitempty" name:"AppFile"`
// Execution role ID of the Spark job
RoleArn *int64 `json:"RoleArn,omitempty" name:"RoleArn"`
// Driver resource specification of the Spark job. Valid values: `small`, `medium`, `large`, `xlarge`.
AppDriverSize *string `json:"AppDriverSize,omitempty" name:"AppDriverSize"`
// Executor resource specification of the Spark job. Valid values: `small`, `medium`, `large`, `xlarge`.
AppExecutorSize *string `json:"AppExecutorSize,omitempty" name:"AppExecutorSize"`
// Number of Spark job executors
AppExecutorNums *int64 `json:"AppExecutorNums,omitempty" name:"AppExecutorNums"`
// This field has been disused. Use the `Datasource` field instead.
Eni *string `json:"Eni,omitempty" name:"Eni"`
// Whether it is upload locally. Valid values: `cos`, `lakefs`.
IsLocal *string `json:"IsLocal,omitempty" name:"IsLocal"`
// Main class of the Spark JAR job during execution
MainClass *string `json:"MainClass,omitempty" name:"MainClass"`
// Spark configurations separated by line break
AppConf *string `json:"AppConf,omitempty" name:"AppConf"`
// Whether it is upload locally. Valid values: `cos`, `lakefs`.
IsLocalJars *string `json:"IsLocalJars,omitempty" name:"IsLocalJars"`
// Dependency JAR packages of the Spark JAR job separated by comma
AppJars *string `json:"AppJars,omitempty" name:"AppJars"`
// Whether it is upload locally. Valid values: `cos`, `lakefs`.
IsLocalFiles *string `json:"IsLocalFiles,omitempty" name:"IsLocalFiles"`
// Dependency resources of the Spark job separated by comma
AppFiles *string `json:"AppFiles,omitempty" name:"AppFiles"`
// Command line parameters of the Spark job
CmdArgs *string `json:"CmdArgs,omitempty" name:"CmdArgs"`
// This parameter takes effect only for Spark flow tasks.
MaxRetries *int64 `json:"MaxRetries,omitempty" name:"MaxRetries"`
// Data source name
DataSource *string `json:"DataSource,omitempty" name:"DataSource"`
// PySpark: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitempty" name:"IsLocalPythonFiles"`
// PySpark: Python dependency, which can be in .py, .zip, or .egg format. Multiple files should be separated by comma.
AppPythonFiles *string `json:"AppPythonFiles,omitempty" name:"AppPythonFiles"`
// Archives: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalArchives *string `json:"IsLocalArchives,omitempty" name:"IsLocalArchives"`
// Archives: Dependency resources
AppArchives *string `json:"AppArchives,omitempty" name:"AppArchives"`
}
Predefined struct for user
type CreateSparkAppResponse ¶
type CreateSparkAppResponse struct {
*tchttp.BaseResponse
Response *CreateSparkAppResponseParams `json:"Response"`
}
func NewCreateSparkAppResponse ¶
func NewCreateSparkAppResponse() (response *CreateSparkAppResponse)
func (*CreateSparkAppResponse) FromJsonString ¶
func (r *CreateSparkAppResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkAppResponse) ToJsonString ¶
func (r *CreateSparkAppResponse) ToJsonString() string
type CreateSparkAppResponseParams ¶
type CreateSparkAppResponseParams struct {
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type CreateSparkAppTaskRequest ¶
type CreateSparkAppTaskRequest struct {
*tchttp.BaseRequest
// Spark job name
JobName *string `json:"JobName,omitempty" name:"JobName"`
// Command line parameters of the Spark job separated by space. They are generally used for periodic calls.
CmdArgs *string `json:"CmdArgs,omitempty" name:"CmdArgs"`
}
func NewCreateSparkAppTaskRequest ¶
func NewCreateSparkAppTaskRequest() (request *CreateSparkAppTaskRequest)
func (*CreateSparkAppTaskRequest) FromJsonString ¶
func (r *CreateSparkAppTaskRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkAppTaskRequest) ToJsonString ¶
func (r *CreateSparkAppTaskRequest) ToJsonString() string
type CreateSparkAppTaskRequestParams ¶
type CreateSparkAppTaskRequestParams struct {
// Spark job name
JobName *string `json:"JobName,omitempty" name:"JobName"`
// Command line parameters of the Spark job separated by space. They are generally used for periodic calls.
CmdArgs *string `json:"CmdArgs,omitempty" name:"CmdArgs"`
}
Predefined struct for user
type CreateSparkAppTaskResponse ¶
type CreateSparkAppTaskResponse struct {
*tchttp.BaseResponse
Response *CreateSparkAppTaskResponseParams `json:"Response"`
}
func NewCreateSparkAppTaskResponse ¶
func NewCreateSparkAppTaskResponse() (response *CreateSparkAppTaskResponse)
func (*CreateSparkAppTaskResponse) FromJsonString ¶
func (r *CreateSparkAppTaskResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateSparkAppTaskResponse) ToJsonString ¶
func (r *CreateSparkAppTaskResponse) ToJsonString() string
type CreateSparkAppTaskResponseParams ¶
type CreateSparkAppTaskResponseParams struct {
// Batch ID
BatchId *string `json:"BatchId,omitempty" name:"BatchId"`
// Task ID
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type CreateTaskRequest ¶
type CreateTaskRequest struct {
*tchttp.BaseRequest
// Computing task. This parameter contains the task type and related configuration information.
Task *Task `json:"Task,omitempty" name:"Task"`
// Database name. If there is a database name in the SQL statement, the database in the SQL statement will be used first; otherwise, the database specified by this parameter will be used (note: when submitting the database creation SQL statement, passed in an empty string for this field).
DatabaseName *string `json:"DatabaseName,omitempty" name:"DatabaseName"`
// Name of the default data source
DatasourceConnectionName *string `json:"DatasourceConnectionName,omitempty" name:"DatasourceConnectionName"`
// Data engine name. If this parameter is not specified, the task will be submitted to the default engine.
DataEngineName *string `json:"DataEngineName,omitempty" name:"DataEngineName"`
}
func NewCreateTaskRequest ¶
func NewCreateTaskRequest() (request *CreateTaskRequest)
func (*CreateTaskRequest) FromJsonString ¶
func (r *CreateTaskRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateTaskRequest) ToJsonString ¶
func (r *CreateTaskRequest) ToJsonString() string
type CreateTaskRequestParams ¶
type CreateTaskRequestParams struct {
// Computing task. This parameter contains the task type and related configuration information.
Task *Task `json:"Task,omitempty" name:"Task"`
// Database name. If there is a database name in the SQL statement, the database in the SQL statement will be used first; otherwise, the database specified by this parameter will be used (note: when submitting the database creation SQL statement, passed in an empty string for this field).
DatabaseName *string `json:"DatabaseName,omitempty" name:"DatabaseName"`
// Name of the default data source
DatasourceConnectionName *string `json:"DatasourceConnectionName,omitempty" name:"DatasourceConnectionName"`
// Data engine name. If this parameter is not specified, the task will be submitted to the default engine.
DataEngineName *string `json:"DataEngineName,omitempty" name:"DataEngineName"`
}
Predefined struct for user
type CreateTaskResponse ¶
type CreateTaskResponse struct {
*tchttp.BaseResponse
Response *CreateTaskResponseParams `json:"Response"`
}
func NewCreateTaskResponse ¶
func NewCreateTaskResponse() (response *CreateTaskResponse)
func (*CreateTaskResponse) FromJsonString ¶
func (r *CreateTaskResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateTaskResponse) ToJsonString ¶
func (r *CreateTaskResponse) ToJsonString() string
type CreateTaskResponseParams ¶
type CreateTaskResponseParams struct {
// Task ID
// Note: This field may return null, indicating that no valid values can be obtained.
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type CreateTasksRequest ¶
type CreateTasksRequest struct {
*tchttp.BaseRequest
// Database name. If there is a database name in the SQL statement, the database in the SQL statement will be used first; otherwise, the database specified by this parameter will be used (note: when submitting the database creation SQL statement, passed in an empty string for this field).
DatabaseName *string `json:"DatabaseName,omitempty" name:"DatabaseName"`
// SQL task information
Tasks *TasksInfo `json:"Tasks,omitempty" name:"Tasks"`
// Data source name. Default value: DataLakeCatalog.
DatasourceConnectionName *string `json:"DatasourceConnectionName,omitempty" name:"DatasourceConnectionName"`
// Compute engine name. If this parameter is not specified, the task will be submitted to the default engine.
DataEngineName *string `json:"DataEngineName,omitempty" name:"DataEngineName"`
}
func NewCreateTasksRequest ¶
func NewCreateTasksRequest() (request *CreateTasksRequest)
func (*CreateTasksRequest) FromJsonString ¶
func (r *CreateTasksRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateTasksRequest) ToJsonString ¶
func (r *CreateTasksRequest) ToJsonString() string
type CreateTasksRequestParams ¶
type CreateTasksRequestParams struct {
// Database name. If there is a database name in the SQL statement, the database in the SQL statement will be used first; otherwise, the database specified by this parameter will be used (note: when submitting the database creation SQL statement, passed in an empty string for this field).
DatabaseName *string `json:"DatabaseName,omitempty" name:"DatabaseName"`
// SQL task information
Tasks *TasksInfo `json:"Tasks,omitempty" name:"Tasks"`
// Data source name. Default value: DataLakeCatalog.
DatasourceConnectionName *string `json:"DatasourceConnectionName,omitempty" name:"DatasourceConnectionName"`
// Compute engine name. If this parameter is not specified, the task will be submitted to the default engine.
DataEngineName *string `json:"DataEngineName,omitempty" name:"DataEngineName"`
}
Predefined struct for user
type CreateTasksResponse ¶
type CreateTasksResponse struct {
*tchttp.BaseResponse
Response *CreateTasksResponseParams `json:"Response"`
}
func NewCreateTasksResponse ¶
func NewCreateTasksResponse() (response *CreateTasksResponse)
func (*CreateTasksResponse) FromJsonString ¶
func (r *CreateTasksResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*CreateTasksResponse) ToJsonString ¶
func (r *CreateTasksResponse) ToJsonString() string
type CreateTasksResponseParams ¶
type CreateTasksResponseParams struct {
// ID of the current batch of submitted tasks
BatchId *string `json:"BatchId,omitempty" name:"BatchId"`
// Collection of task IDs arranged in order of execution
TaskIdSet []*string `json:"TaskIdSet,omitempty" name:"TaskIdSet"`
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type DeleteSparkAppRequest ¶
type DeleteSparkAppRequest struct {
*tchttp.BaseRequest
// Spark application name
AppName *string `json:"AppName,omitempty" name:"AppName"`
}
func NewDeleteSparkAppRequest ¶
func NewDeleteSparkAppRequest() (request *DeleteSparkAppRequest)
func (*DeleteSparkAppRequest) FromJsonString ¶
func (r *DeleteSparkAppRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DeleteSparkAppRequest) ToJsonString ¶
func (r *DeleteSparkAppRequest) ToJsonString() string
type DeleteSparkAppRequestParams ¶
type DeleteSparkAppRequestParams struct {
// Spark application name
AppName *string `json:"AppName,omitempty" name:"AppName"`
}
Predefined struct for user
type DeleteSparkAppResponse ¶
type DeleteSparkAppResponse struct {
*tchttp.BaseResponse
Response *DeleteSparkAppResponseParams `json:"Response"`
}
func NewDeleteSparkAppResponse ¶
func NewDeleteSparkAppResponse() (response *DeleteSparkAppResponse)
func (*DeleteSparkAppResponse) FromJsonString ¶
func (r *DeleteSparkAppResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DeleteSparkAppResponse) ToJsonString ¶
func (r *DeleteSparkAppResponse) ToJsonString() string
type DeleteSparkAppResponseParams ¶
type DeleteSparkAppResponseParams struct {
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type DescribeSparkAppJobRequest ¶
type DescribeSparkAppJobRequest struct {
*tchttp.BaseRequest
// Spark job ID. If it co-exists with `JobName`, `JobName` will become invalid.
JobId *string `json:"JobId,omitempty" name:"JobId"`
// Spark job name
JobName *string `json:"JobName,omitempty" name:"JobName"`
}
func NewDescribeSparkAppJobRequest ¶
func NewDescribeSparkAppJobRequest() (request *DescribeSparkAppJobRequest)
func (*DescribeSparkAppJobRequest) FromJsonString ¶
func (r *DescribeSparkAppJobRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppJobRequest) ToJsonString ¶
func (r *DescribeSparkAppJobRequest) ToJsonString() string
type DescribeSparkAppJobRequestParams ¶
type DescribeSparkAppJobRequestParams struct {
// Spark job ID. If it co-exists with `JobName`, `JobName` will become invalid.
JobId *string `json:"JobId,omitempty" name:"JobId"`
// Spark job name
JobName *string `json:"JobName,omitempty" name:"JobName"`
}
Predefined struct for user
type DescribeSparkAppJobResponse ¶
type DescribeSparkAppJobResponse struct {
*tchttp.BaseResponse
Response *DescribeSparkAppJobResponseParams `json:"Response"`
}
func NewDescribeSparkAppJobResponse ¶
func NewDescribeSparkAppJobResponse() (response *DescribeSparkAppJobResponse)
func (*DescribeSparkAppJobResponse) FromJsonString ¶
func (r *DescribeSparkAppJobResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppJobResponse) ToJsonString ¶
func (r *DescribeSparkAppJobResponse) ToJsonString() string
type DescribeSparkAppJobResponseParams ¶
type DescribeSparkAppJobResponseParams struct {
// Spark job details
// Note: This field may return null, indicating that no valid values can be obtained.
Job *SparkJobInfo `json:"Job,omitempty" name:"Job"`
// Whether the queried Spark job exists
IsExists *bool `json:"IsExists,omitempty" name:"IsExists"`
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type DescribeSparkAppJobsRequest ¶
type DescribeSparkAppJobsRequest struct {
*tchttp.BaseRequest
// The returned results are sorted by this field.
SortBy *string `json:"SortBy,omitempty" name:"SortBy"`
// Descending or ascending order, such as `desc`.
Sorting *string `json:"Sorting,omitempty" name:"Sorting"`
// Filter by this parameter, which can be `spark-job-name`.
Filters []*Filter `json:"Filters,omitempty" name:"Filters"`
// Update start time
StartTime *string `json:"StartTime,omitempty" name:"StartTime"`
// Update end time
EndTime *string `json:"EndTime,omitempty" name:"EndTime"`
// Query list offset
Offset *int64 `json:"Offset,omitempty" name:"Offset"`
// Query list limit
Limit *int64 `json:"Limit,omitempty" name:"Limit"`
}
func NewDescribeSparkAppJobsRequest ¶
func NewDescribeSparkAppJobsRequest() (request *DescribeSparkAppJobsRequest)
func (*DescribeSparkAppJobsRequest) FromJsonString ¶
func (r *DescribeSparkAppJobsRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppJobsRequest) ToJsonString ¶
func (r *DescribeSparkAppJobsRequest) ToJsonString() string
type DescribeSparkAppJobsRequestParams ¶
type DescribeSparkAppJobsRequestParams struct {
// The returned results are sorted by this field.
SortBy *string `json:"SortBy,omitempty" name:"SortBy"`
// Descending or ascending order, such as `desc`.
Sorting *string `json:"Sorting,omitempty" name:"Sorting"`
// Filter by this parameter, which can be `spark-job-name`.
Filters []*Filter `json:"Filters,omitempty" name:"Filters"`
// Update start time
StartTime *string `json:"StartTime,omitempty" name:"StartTime"`
// Update end time
EndTime *string `json:"EndTime,omitempty" name:"EndTime"`
// Query list offset
Offset *int64 `json:"Offset,omitempty" name:"Offset"`
// Query list limit
Limit *int64 `json:"Limit,omitempty" name:"Limit"`
}
Predefined struct for user
type DescribeSparkAppJobsResponse ¶
type DescribeSparkAppJobsResponse struct {
*tchttp.BaseResponse
Response *DescribeSparkAppJobsResponseParams `json:"Response"`
}
func NewDescribeSparkAppJobsResponse ¶
func NewDescribeSparkAppJobsResponse() (response *DescribeSparkAppJobsResponse)
func (*DescribeSparkAppJobsResponse) FromJsonString ¶
func (r *DescribeSparkAppJobsResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppJobsResponse) ToJsonString ¶
func (r *DescribeSparkAppJobsResponse) ToJsonString() string
type DescribeSparkAppJobsResponseParams ¶
type DescribeSparkAppJobsResponseParams struct {
// Detailed list of Spark jobs
SparkAppJobs []*SparkJobInfo `json:"SparkAppJobs,omitempty" name:"SparkAppJobs"`
// Total number of Spark jobs
TotalCount *int64 `json:"TotalCount,omitempty" name:"TotalCount"`
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type DescribeSparkAppTasksRequest ¶
type DescribeSparkAppTasksRequest struct {
*tchttp.BaseRequest
// Spark job ID
JobId *string `json:"JobId,omitempty" name:"JobId"`
// Paginated query offset
Offset *int64 `json:"Offset,omitempty" name:"Offset"`
// Paginated query limit
Limit *int64 `json:"Limit,omitempty" name:"Limit"`
// Execution instance ID
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
// Update start time
StartTime *string `json:"StartTime,omitempty" name:"StartTime"`
// Update end time
EndTime *string `json:"EndTime,omitempty" name:"EndTime"`
// Filter by this parameter, which can be `task-state`.
Filters []*Filter `json:"Filters,omitempty" name:"Filters"`
}
func NewDescribeSparkAppTasksRequest ¶
func NewDescribeSparkAppTasksRequest() (request *DescribeSparkAppTasksRequest)
func (*DescribeSparkAppTasksRequest) FromJsonString ¶
func (r *DescribeSparkAppTasksRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppTasksRequest) ToJsonString ¶
func (r *DescribeSparkAppTasksRequest) ToJsonString() string
type DescribeSparkAppTasksRequestParams ¶
type DescribeSparkAppTasksRequestParams struct {
// Spark job ID
JobId *string `json:"JobId,omitempty" name:"JobId"`
// Paginated query offset
Offset *int64 `json:"Offset,omitempty" name:"Offset"`
// Paginated query limit
Limit *int64 `json:"Limit,omitempty" name:"Limit"`
// Execution instance ID
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
// Update start time
StartTime *string `json:"StartTime,omitempty" name:"StartTime"`
// Update end time
EndTime *string `json:"EndTime,omitempty" name:"EndTime"`
// Filter by this parameter, which can be `task-state`.
Filters []*Filter `json:"Filters,omitempty" name:"Filters"`
}
Predefined struct for user
type DescribeSparkAppTasksResponse ¶
type DescribeSparkAppTasksResponse struct {
*tchttp.BaseResponse
Response *DescribeSparkAppTasksResponseParams `json:"Response"`
}
func NewDescribeSparkAppTasksResponse ¶
func NewDescribeSparkAppTasksResponse() (response *DescribeSparkAppTasksResponse)
func (*DescribeSparkAppTasksResponse) FromJsonString ¶
func (r *DescribeSparkAppTasksResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeSparkAppTasksResponse) ToJsonString ¶
func (r *DescribeSparkAppTasksResponse) ToJsonString() string
type DescribeSparkAppTasksResponseParams ¶
type DescribeSparkAppTasksResponseParams struct {
// Task result (this field has been disused)
// Note: This field may return null, indicating that no valid values can be obtained.
Tasks *TaskResponseInfo `json:"Tasks,omitempty" name:"Tasks"`
// Total number of tasks
TotalCount *int64 `json:"TotalCount,omitempty" name:"TotalCount"`
// List of task results
// Note: This field may return null, indicating that no valid values can be obtained.
SparkAppTasks []*TaskResponseInfo `json:"SparkAppTasks,omitempty" name:"SparkAppTasks"`
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type DescribeTaskResultRequest ¶
type DescribeTaskResultRequest struct {
*tchttp.BaseRequest
// Unique task ID
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
// The pagination information returned by the last response. This parameter can be omitted for the first response, where the data will be returned from the beginning. The data with a volume set by the `MaxResults` field is returned each time.
NextToken *string `json:"NextToken,omitempty" name:"NextToken"`
// Maximum number of returned rows. Value range: 0–1,000. Default value: 1,000.
MaxResults *int64 `json:"MaxResults,omitempty" name:"MaxResults"`
}
func NewDescribeTaskResultRequest ¶
func NewDescribeTaskResultRequest() (request *DescribeTaskResultRequest)
func (*DescribeTaskResultRequest) FromJsonString ¶
func (r *DescribeTaskResultRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeTaskResultRequest) ToJsonString ¶
func (r *DescribeTaskResultRequest) ToJsonString() string
type DescribeTaskResultRequestParams ¶
type DescribeTaskResultRequestParams struct {
// Unique task ID
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
// The pagination information returned by the last response. This parameter can be omitted for the first response, where the data will be returned from the beginning. The data with a volume set by the `MaxResults` field is returned each time.
NextToken *string `json:"NextToken,omitempty" name:"NextToken"`
// Maximum number of returned rows. Value range: 0–1,000. Default value: 1,000.
MaxResults *int64 `json:"MaxResults,omitempty" name:"MaxResults"`
}
Predefined struct for user
type DescribeTaskResultResponse ¶
type DescribeTaskResultResponse struct {
*tchttp.BaseResponse
Response *DescribeTaskResultResponseParams `json:"Response"`
}
func NewDescribeTaskResultResponse ¶
func NewDescribeTaskResultResponse() (response *DescribeTaskResultResponse)
func (*DescribeTaskResultResponse) FromJsonString ¶
func (r *DescribeTaskResultResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeTaskResultResponse) ToJsonString ¶
func (r *DescribeTaskResultResponse) ToJsonString() string
type DescribeTaskResultResponseParams ¶
type DescribeTaskResultResponseParams struct {
// The queried task information. If the returned value is empty, the task with the entered task ID does not exist. The task result will be returned only if the task status is `2` (succeeded).
// Note: This field may return null, indicating that no valid values can be obtained.
TaskInfo *TaskResultInfo `json:"TaskInfo,omitempty" name:"TaskInfo"`
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type DescribeTasksRequest ¶
type DescribeTasksRequest struct {
*tchttp.BaseRequest
// Number of returned results. Default value: 10. Maximum value: 100.
Limit *int64 `json:"Limit,omitempty" name:"Limit"`
// Offset. Default value: 0.
Offset *int64 `json:"Offset,omitempty" name:"Offset"`
// Filter. The following filters are supported, and the `Name` input parameter must be one of them. Up to 50 `task-id` values can be filtered, while up to 5 other parameters can be filtered in total.
// task-id - String - (filter by task ID). `task-id` format: e386471f-139a-4e59-877f-50ece8135b99.
// task-state - String - (filter exactly by task status). Valid values: `0` (initial), `1` (running), `2` (succeeded), `-1` (failed).
// task-sql-keyword - String - (filter fuzzily by SQL statement keyword, such as `DROP TABLE`).
// task-operator- string (filter by sub-UIN)
// task-kind - string (filter by task type)
Filters []*Filter `json:"Filters,omitempty" name:"Filters"`
// Sorting field. Valid values: `create-time` (default value), `update-time`.
SortBy *string `json:"SortBy,omitempty" name:"SortBy"`
// Sorting order. Valid values: `asc` (ascending order), `desc` (descending order). Default value: `asc`.
Sorting *string `json:"Sorting,omitempty" name:"Sorting"`
// Start time in the format of `yyyy-mm-dd HH:MM:SS`, which is the current time seven days ago by default.
StartTime *string `json:"StartTime,omitempty" name:"StartTime"`
// End time in the format of `yyyy-mm-dd HH:MM:SS`, which is the current time by default. The time span is (0, 30] days. Data in the last 45 days can be queried.
EndTime *string `json:"EndTime,omitempty" name:"EndTime"`
// Filter by compute resource name
DataEngineName *string `json:"DataEngineName,omitempty" name:"DataEngineName"`
}
func NewDescribeTasksRequest ¶
func NewDescribeTasksRequest() (request *DescribeTasksRequest)
func (*DescribeTasksRequest) FromJsonString ¶
func (r *DescribeTasksRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeTasksRequest) ToJsonString ¶
func (r *DescribeTasksRequest) ToJsonString() string
type DescribeTasksRequestParams ¶
type DescribeTasksRequestParams struct {
// Number of returned results. Default value: 10. Maximum value: 100.
Limit *int64 `json:"Limit,omitempty" name:"Limit"`
// Offset. Default value: 0.
Offset *int64 `json:"Offset,omitempty" name:"Offset"`
// Filter. The following filters are supported, and the `Name` input parameter must be one of them. Up to 50 `task-id` values can be filtered, while up to 5 other parameters can be filtered in total.
// task-id - String - (filter by task ID). `task-id` format: e386471f-139a-4e59-877f-50ece8135b99.
// task-state - String - (filter exactly by task status). Valid values: `0` (initial), `1` (running), `2` (succeeded), `-1` (failed).
// task-sql-keyword - String - (filter fuzzily by SQL statement keyword, such as `DROP TABLE`).
// task-operator- string (filter by sub-UIN)
// task-kind - string (filter by task type)
Filters []*Filter `json:"Filters,omitempty" name:"Filters"`
// Sorting field. Valid values: `create-time` (default value), `update-time`.
SortBy *string `json:"SortBy,omitempty" name:"SortBy"`
// Sorting order. Valid values: `asc` (ascending order), `desc` (descending order). Default value: `asc`.
Sorting *string `json:"Sorting,omitempty" name:"Sorting"`
// Start time in the format of `yyyy-mm-dd HH:MM:SS`, which is the current time seven days ago by default.
StartTime *string `json:"StartTime,omitempty" name:"StartTime"`
// End time in the format of `yyyy-mm-dd HH:MM:SS`, which is the current time by default. The time span is (0, 30] days. Data in the last 45 days can be queried.
EndTime *string `json:"EndTime,omitempty" name:"EndTime"`
// Filter by compute resource name
DataEngineName *string `json:"DataEngineName,omitempty" name:"DataEngineName"`
}
Predefined struct for user
type DescribeTasksResponse ¶
type DescribeTasksResponse struct {
*tchttp.BaseResponse
Response *DescribeTasksResponseParams `json:"Response"`
}
func NewDescribeTasksResponse ¶
func NewDescribeTasksResponse() (response *DescribeTasksResponse)
func (*DescribeTasksResponse) FromJsonString ¶
func (r *DescribeTasksResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*DescribeTasksResponse) ToJsonString ¶
func (r *DescribeTasksResponse) ToJsonString() string
type DescribeTasksResponseParams ¶
type DescribeTasksResponseParams struct {
// List of task objects.
TaskList []*TaskResponseInfo `json:"TaskList,omitempty" name:"TaskList"`
// Total number of instances
TotalCount *uint64 `json:"TotalCount,omitempty" name:"TotalCount"`
// The task overview.
// Note: This field may return null, indicating that no valid values can be obtained.
TasksOverview *TasksOverview `json:"TasksOverview,omitempty" name:"TasksOverview"`
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type Filter ¶
type Filter struct {
// Attribute name. If more than one filter exists, the logical relationship between these filters is `OR`.
Name *string `json:"Name,omitempty" name:"Name"`
// Attribute value. If multiple values exist in one filter, the logical relationship between these values is `OR`.
Values []*string `json:"Values,omitempty" name:"Values"`
}
type KVPair ¶
type KVPair struct {
// Configured key
// Note: This field may return null, indicating that no valid values can be obtained.
Key *string `json:"Key,omitempty" name:"Key"`
// Configured value
// Note: This field may return null, indicating that no valid values can be obtained.
Value *string `json:"Value,omitempty" name:"Value"`
}
type ModifySparkAppRequest ¶
type ModifySparkAppRequest struct {
*tchttp.BaseRequest
// Spark application name
AppName *string `json:"AppName,omitempty" name:"AppName"`
// 1: Spark JAR application; 2: Spark streaming application
AppType *int64 `json:"AppType,omitempty" name:"AppType"`
// The data engine executing the Spark job
DataEngine *string `json:"DataEngine,omitempty" name:"DataEngine"`
// Execution entry of the Spark application
AppFile *string `json:"AppFile,omitempty" name:"AppFile"`
// Execution role ID of the Spark job
RoleArn *int64 `json:"RoleArn,omitempty" name:"RoleArn"`
// Driver resource specification of the Spark job. Valid values: `small`, `medium`, `large`, `xlarge`.
AppDriverSize *string `json:"AppDriverSize,omitempty" name:"AppDriverSize"`
// Executor resource specification of the Spark job. Valid values: `small`, `medium`, `large`, `xlarge`.
AppExecutorSize *string `json:"AppExecutorSize,omitempty" name:"AppExecutorSize"`
// Number of Spark job executors
AppExecutorNums *int64 `json:"AppExecutorNums,omitempty" name:"AppExecutorNums"`
// Spark application ID
SparkAppId *string `json:"SparkAppId,omitempty" name:"SparkAppId"`
// This field has been disused. Use the `Datasource` field instead.
Eni *string `json:"Eni,omitempty" name:"Eni"`
// Whether it is uploaded locally. Valid values: `cos`, `lakefs`.
IsLocal *string `json:"IsLocal,omitempty" name:"IsLocal"`
// Main class of the Spark JAR job during execution
MainClass *string `json:"MainClass,omitempty" name:"MainClass"`
// Spark configurations separated by line break
AppConf *string `json:"AppConf,omitempty" name:"AppConf"`
// JAR resource dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalJars *string `json:"IsLocalJars,omitempty" name:"IsLocalJars"`
// Dependency JAR packages of the Spark JAR job separated by comma
AppJars *string `json:"AppJars,omitempty" name:"AppJars"`
// File resource dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalFiles *string `json:"IsLocalFiles,omitempty" name:"IsLocalFiles"`
// Dependency resources of the Spark job separated by comma
AppFiles *string `json:"AppFiles,omitempty" name:"AppFiles"`
// PySpark: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitempty" name:"IsLocalPythonFiles"`
// PySpark: Python dependency, which can be in .py, .zip, or .egg format. Multiple files should be separated by comma.
AppPythonFiles *string `json:"AppPythonFiles,omitempty" name:"AppPythonFiles"`
// Command line parameters of the Spark job
CmdArgs *string `json:"CmdArgs,omitempty" name:"CmdArgs"`
// This parameter takes effect only for Spark flow tasks.
MaxRetries *int64 `json:"MaxRetries,omitempty" name:"MaxRetries"`
// Data source name
DataSource *string `json:"DataSource,omitempty" name:"DataSource"`
// Archives: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalArchives *string `json:"IsLocalArchives,omitempty" name:"IsLocalArchives"`
// Archives: Dependency resources
AppArchives *string `json:"AppArchives,omitempty" name:"AppArchives"`
}
func NewModifySparkAppRequest ¶
func NewModifySparkAppRequest() (request *ModifySparkAppRequest)
func (*ModifySparkAppRequest) FromJsonString ¶
func (r *ModifySparkAppRequest) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*ModifySparkAppRequest) ToJsonString ¶
func (r *ModifySparkAppRequest) ToJsonString() string
type ModifySparkAppRequestParams ¶
type ModifySparkAppRequestParams struct {
// Spark application name
AppName *string `json:"AppName,omitempty" name:"AppName"`
// 1: Spark JAR application; 2: Spark streaming application
AppType *int64 `json:"AppType,omitempty" name:"AppType"`
// The data engine executing the Spark job
DataEngine *string `json:"DataEngine,omitempty" name:"DataEngine"`
// Execution entry of the Spark application
AppFile *string `json:"AppFile,omitempty" name:"AppFile"`
// Execution role ID of the Spark job
RoleArn *int64 `json:"RoleArn,omitempty" name:"RoleArn"`
// Driver resource specification of the Spark job. Valid values: `small`, `medium`, `large`, `xlarge`.
AppDriverSize *string `json:"AppDriverSize,omitempty" name:"AppDriverSize"`
// Executor resource specification of the Spark job. Valid values: `small`, `medium`, `large`, `xlarge`.
AppExecutorSize *string `json:"AppExecutorSize,omitempty" name:"AppExecutorSize"`
// Number of Spark job executors
AppExecutorNums *int64 `json:"AppExecutorNums,omitempty" name:"AppExecutorNums"`
// Spark application ID
SparkAppId *string `json:"SparkAppId,omitempty" name:"SparkAppId"`
// This field has been disused. Use the `Datasource` field instead.
Eni *string `json:"Eni,omitempty" name:"Eni"`
// Whether it is uploaded locally. Valid values: `cos`, `lakefs`.
IsLocal *string `json:"IsLocal,omitempty" name:"IsLocal"`
// Main class of the Spark JAR job during execution
MainClass *string `json:"MainClass,omitempty" name:"MainClass"`
// Spark configurations separated by line break
AppConf *string `json:"AppConf,omitempty" name:"AppConf"`
// JAR resource dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalJars *string `json:"IsLocalJars,omitempty" name:"IsLocalJars"`
// Dependency JAR packages of the Spark JAR job separated by comma
AppJars *string `json:"AppJars,omitempty" name:"AppJars"`
// File resource dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalFiles *string `json:"IsLocalFiles,omitempty" name:"IsLocalFiles"`
// Dependency resources of the Spark job separated by comma
AppFiles *string `json:"AppFiles,omitempty" name:"AppFiles"`
// PySpark: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitempty" name:"IsLocalPythonFiles"`
// PySpark: Python dependency, which can be in .py, .zip, or .egg format. Multiple files should be separated by comma.
AppPythonFiles *string `json:"AppPythonFiles,omitempty" name:"AppPythonFiles"`
// Command line parameters of the Spark job
CmdArgs *string `json:"CmdArgs,omitempty" name:"CmdArgs"`
// This parameter takes effect only for Spark flow tasks.
MaxRetries *int64 `json:"MaxRetries,omitempty" name:"MaxRetries"`
// Data source name
DataSource *string `json:"DataSource,omitempty" name:"DataSource"`
// Archives: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
IsLocalArchives *string `json:"IsLocalArchives,omitempty" name:"IsLocalArchives"`
// Archives: Dependency resources
AppArchives *string `json:"AppArchives,omitempty" name:"AppArchives"`
}
Predefined struct for user
type ModifySparkAppResponse ¶
type ModifySparkAppResponse struct {
*tchttp.BaseResponse
Response *ModifySparkAppResponseParams `json:"Response"`
}
func NewModifySparkAppResponse ¶
func NewModifySparkAppResponse() (response *ModifySparkAppResponse)
func (*ModifySparkAppResponse) FromJsonString ¶
func (r *ModifySparkAppResponse) FromJsonString(s string) error
FromJsonString It is highly **NOT** recommended to use this function because it has no param check, nor strict type check
func (*ModifySparkAppResponse) ToJsonString ¶
func (r *ModifySparkAppResponse) ToJsonString() string
type ModifySparkAppResponseParams ¶
type ModifySparkAppResponseParams struct {
// The unique request ID, which is returned for each request. RequestId is required for locating a problem.
RequestId *string `json:"RequestId,omitempty" name:"RequestId"`
}
Predefined struct for user
type SparkJobInfo ¶
type SparkJobInfo struct {
// Spark job ID
JobId *string `json:"JobId,omitempty" name:"JobId"`
// Spark job name
JobName *string `json:"JobName,omitempty" name:"JobName"`
// Spark job type. Valid values: `1` (batch job), `2` (streaming job).
JobType *int64 `json:"JobType,omitempty" name:"JobType"`
// Engine name
DataEngine *string `json:"DataEngine,omitempty" name:"DataEngine"`
// This field has been disused. Use the `Datasource` field instead.
Eni *string `json:"Eni,omitempty" name:"Eni"`
// Whether the program package is uploaded locally. Valid values: `cos`, `lakefs`.
IsLocal *string `json:"IsLocal,omitempty" name:"IsLocal"`
// Program package path
JobFile *string `json:"JobFile,omitempty" name:"JobFile"`
// Role ID
RoleArn *int64 `json:"RoleArn,omitempty" name:"RoleArn"`
// Main class of Spark job execution
MainClass *string `json:"MainClass,omitempty" name:"MainClass"`
// Command line parameters of the Spark job separated by space
CmdArgs *string `json:"CmdArgs,omitempty" name:"CmdArgs"`
// Native Spark configurations separated by line break
JobConf *string `json:"JobConf,omitempty" name:"JobConf"`
// Whether the dependency JAR packages are uploaded locally. Valid values: `cos`, `lakefs`.
IsLocalJars *string `json:"IsLocalJars,omitempty" name:"IsLocalJars"`
// Dependency JAR packages of the Spark job separated by comma
JobJars *string `json:"JobJars,omitempty" name:"JobJars"`
// Whether the dependency file is uploaded locally. Valid values: `cos`, `lakefs`.
IsLocalFiles *string `json:"IsLocalFiles,omitempty" name:"IsLocalFiles"`
// Dependency files of the Spark job separated by comma
JobFiles *string `json:"JobFiles,omitempty" name:"JobFiles"`
// Driver resource size of the Spark job
JobDriverSize *string `json:"JobDriverSize,omitempty" name:"JobDriverSize"`
// Executor resource size of the Spark job
JobExecutorSize *string `json:"JobExecutorSize,omitempty" name:"JobExecutorSize"`
// Number of Spark job executors
JobExecutorNums *int64 `json:"JobExecutorNums,omitempty" name:"JobExecutorNums"`
// Maximum number of retries of the Spark flow task
JobMaxAttempts *int64 `json:"JobMaxAttempts,omitempty" name:"JobMaxAttempts"`
// Spark job creator
JobCreator *string `json:"JobCreator,omitempty" name:"JobCreator"`
// Spark job creation time
JobCreateTime *int64 `json:"JobCreateTime,omitempty" name:"JobCreateTime"`
// Spark job update time
JobUpdateTime *uint64 `json:"JobUpdateTime,omitempty" name:"JobUpdateTime"`
// Last task ID of the Spark job
CurrentTaskId *string `json:"CurrentTaskId,omitempty" name:"CurrentTaskId"`
// Last status of the Spark job
JobStatus *int64 `json:"JobStatus,omitempty" name:"JobStatus"`
// Spark streaming job statistics
// Note: This field may return null, indicating that no valid values can be obtained.
StreamingStat *StreamingStatistics `json:"StreamingStat,omitempty" name:"StreamingStat"`
// Data source name
// Note: This field may return null, indicating that no valid values can be obtained.
DataSource *string `json:"DataSource,omitempty" name:"DataSource"`
// PySpark: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
// Note: This field may return null, indicating that no valid values can be obtained.
IsLocalPythonFiles *string `json:"IsLocalPythonFiles,omitempty" name:"IsLocalPythonFiles"`
// Note: This returned value has been disused.
// Note: This field may return null, indicating that no valid values can be obtained.
AppPythonFiles *string `json:"AppPythonFiles,omitempty" name:"AppPythonFiles"`
// Archives: Dependency upload method. 1: cos; 2: lakefs (this method needs to be used in the console but cannot be called through APIs).
// Note: This field may return null, indicating that no valid values can be obtained.
IsLocalArchives *string `json:"IsLocalArchives,omitempty" name:"IsLocalArchives"`
// Archives: Dependency resources
// Note: This field may return null, indicating that no valid values can be obtained.
JobArchives *string `json:"JobArchives,omitempty" name:"JobArchives"`
// PySpark: Python dependency, which can be in .py, .zip, or .egg format. Multiple files should be separated by comma.
// Note: This field may return null, indicating that no valid values can be obtained.
JobPythonFiles *string `json:"JobPythonFiles,omitempty" name:"JobPythonFiles"`
// Number of tasks running or ready to run under the current job
// Note: This field may return null, indicating that no valid values can be obtained.
TaskNum *int64 `json:"TaskNum,omitempty" name:"TaskNum"`
// Engine status. -100 (default value): unknown; -2–11: normal.
// Note: This field may return null, indicating that no valid values can be obtained.
DataEngineStatus *int64 `json:"DataEngineStatus,omitempty" name:"DataEngineStatus"`
}
type StreamingStatistics ¶
type StreamingStatistics struct {
// Task start time
StartTime *string `json:"StartTime,omitempty" name:"StartTime"`
// Number of data receivers
Receivers *int64 `json:"Receivers,omitempty" name:"Receivers"`
// Number of receivers in service
NumActiveReceivers *int64 `json:"NumActiveReceivers,omitempty" name:"NumActiveReceivers"`
// Number of inactive receivers
NumInactiveReceivers *int64 `json:"NumInactiveReceivers,omitempty" name:"NumInactiveReceivers"`
// Number of running batches
NumActiveBatches *int64 `json:"NumActiveBatches,omitempty" name:"NumActiveBatches"`
// Number of batches to be processed
NumRetainedCompletedBatches *int64 `json:"NumRetainedCompletedBatches,omitempty" name:"NumRetainedCompletedBatches"`
// Number of completed batches
NumTotalCompletedBatches *int64 `json:"NumTotalCompletedBatches,omitempty" name:"NumTotalCompletedBatches"`
// Average input speed
AverageInputRate *float64 `json:"AverageInputRate,omitempty" name:"AverageInputRate"`
// Average queue time
AverageSchedulingDelay *float64 `json:"AverageSchedulingDelay,omitempty" name:"AverageSchedulingDelay"`
// Average processing time
AverageProcessingTime *float64 `json:"AverageProcessingTime,omitempty" name:"AverageProcessingTime"`
// Average latency
AverageTotalDelay *float64 `json:"AverageTotalDelay,omitempty" name:"AverageTotalDelay"`
}
type TaskResponseInfo ¶
type TaskResponseInfo struct {
// Database name of the task
DatabaseName *string `json:"DatabaseName,omitempty" name:"DatabaseName"`
// Data volume of the task
DataAmount *int64 `json:"DataAmount,omitempty" name:"DataAmount"`
// Task ID
Id *string `json:"Id,omitempty" name:"Id"`
// The compute time in ms.
UsedTime *int64 `json:"UsedTime,omitempty" name:"UsedTime"`
// Task output path
OutputPath *string `json:"OutputPath,omitempty" name:"OutputPath"`
// Task creation time
CreateTime *string `json:"CreateTime,omitempty" name:"CreateTime"`
// Task status. Valid values: `0` (initial), `1` (executing), `2` (executed successfully), `-1` (failed to execute), `-3` (canceled).
State *int64 `json:"State,omitempty" name:"State"`
// SQL statement type of the task, such as DDL and DML.
SQLType *string `json:"SQLType,omitempty" name:"SQLType"`
// SQL statement of the task
SQL *string `json:"SQL,omitempty" name:"SQL"`
// Whether the result has expired
ResultExpired *bool `json:"ResultExpired,omitempty" name:"ResultExpired"`
// Number of affected data rows
RowAffectInfo *string `json:"RowAffectInfo,omitempty" name:"RowAffectInfo"`
// Dataset of task results
// Note: This field may return null, indicating that no valid values can be obtained.
DataSet *string `json:"DataSet,omitempty" name:"DataSet"`
// Failure information, such as `errorMessage`. This field has been disused.
Error *string `json:"Error,omitempty" name:"Error"`
// Task progress (%)
Percentage *int64 `json:"Percentage,omitempty" name:"Percentage"`
// Output information of task execution
OutputMessage *string `json:"OutputMessage,omitempty" name:"OutputMessage"`
// Type of the engine executing the SQL statement
TaskType *string `json:"TaskType,omitempty" name:"TaskType"`
// Task progress details
// Note: This field may return null, indicating that no valid values can be obtained.
ProgressDetail *string `json:"ProgressDetail,omitempty" name:"ProgressDetail"`
// Task end time
// Note: This field may return null, indicating that no valid values can be obtained.
UpdateTime *string `json:"UpdateTime,omitempty" name:"UpdateTime"`
// Compute resource ID
// Note: This field may return null, indicating that no valid values can be obtained.
DataEngineId *string `json:"DataEngineId,omitempty" name:"DataEngineId"`
// Sub-UIN that executes the SQL statement
// Note: This field may return null, indicating that no valid values can be obtained.
OperateUin *string `json:"OperateUin,omitempty" name:"OperateUin"`
// Compute resource name
// Note: This field may return null, indicating that no valid values can be obtained.
DataEngineName *string `json:"DataEngineName,omitempty" name:"DataEngineName"`
// Whether the import type is local import or COS
// Note: This field may return null, indicating that no valid values can be obtained.
InputType *string `json:"InputType,omitempty" name:"InputType"`
// Import configuration
// Note: This field may return null, indicating that no valid values can be obtained.
InputConf *string `json:"InputConf,omitempty" name:"InputConf"`
// Number of data entries
// Note: This field may return null, indicating that no valid values can be obtained.
DataNumber *int64 `json:"DataNumber,omitempty" name:"DataNumber"`
// Whether the data can be downloaded
// Note: This field may return null, indicating that no valid values can be obtained.
CanDownload *bool `json:"CanDownload,omitempty" name:"CanDownload"`
// User alias
// Note: This field may return null, indicating that no valid values can be obtained.
UserAlias *string `json:"UserAlias,omitempty" name:"UserAlias"`
// Spark application job name
// Note: This field may return null, indicating that no valid values can be obtained.
SparkJobName *string `json:"SparkJobName,omitempty" name:"SparkJobName"`
// Spark application job ID
// Note: This field may return null, indicating that no valid values can be obtained.
SparkJobId *string `json:"SparkJobId,omitempty" name:"SparkJobId"`
// JAR file of the Spark application entry
// Note: This field may return null, indicating that no valid values can be obtained.
SparkJobFile *string `json:"SparkJobFile,omitempty" name:"SparkJobFile"`
// Spark UI URL
// Note: This field may return null, indicating that no valid values can be obtained.
UiUrl *string `json:"UiUrl,omitempty" name:"UiUrl"`
// The task time in ms.
// Note: This field may return null, indicating that no valid values can be obtained.
TotalTime *int64 `json:"TotalTime,omitempty" name:"TotalTime"`
// The program entry parameter for running a task under a Spark job.
// Note: This field may return null, indicating that no valid values can be obtained.
CmdArgs *string `json:"CmdArgs,omitempty" name:"CmdArgs"`
}
type TaskResultInfo ¶
type TaskResultInfo struct {
// Unique task ID
TaskId *string `json:"TaskId,omitempty" name:"TaskId"`
// Name of the default selected data source when the current job is executed
// Note: This field may return null, indicating that no valid values can be obtained.
DatasourceConnectionName *string `json:"DatasourceConnectionName,omitempty" name:"DatasourceConnectionName"`
// Name of the default selected database when the current job is executed
// Note: This field may return null, indicating that no valid values can be obtained.
DatabaseName *string `json:"DatabaseName,omitempty" name:"DatabaseName"`
// The currently executed SQL statement. Each task contains one SQL statement.
SQL *string `json:"SQL,omitempty" name:"SQL"`
// Type of the executed task. Valid values: `DDL`, `DML`, `DQL`.
SQLType *string `json:"SQLType,omitempty" name:"SQLType"`
// Current status of the task. `0`: initial; `1`: task running; `2`: task execution succeeded; `-1`: task execution failed; `-3`: task terminated manually by the user. The task execution result will be returned only if task execution succeeds.
State *int64 `json:"State,omitempty" name:"State"`
// Amount of the data scanned in bytes
DataAmount *int64 `json:"DataAmount,omitempty" name:"DataAmount"`
// The compute time in ms.
UsedTime *int64 `json:"UsedTime,omitempty" name:"UsedTime"`
// Address of the COS bucket for storing the task result
OutputPath *string `json:"OutputPath,omitempty" name:"OutputPath"`
// Task creation timestamp
CreateTime *string `json:"CreateTime,omitempty" name:"CreateTime"`
// Task execution information. `success` will be returned if the task succeeds; otherwise, the failure cause will be returned.
OutputMessage *string `json:"OutputMessage,omitempty" name:"OutputMessage"`
// Number of affected rows
RowAffectInfo *string `json:"RowAffectInfo,omitempty" name:"RowAffectInfo"`
// Schema information of the result
// Note: This field may return null, indicating that no valid values can be obtained.
ResultSchema []*Column `json:"ResultSchema,omitempty" name:"ResultSchema"`
// Result information. After it is unescaped, each element of the outer array is a data row.
// Note: This field may return null, indicating that no valid values can be obtained.
ResultSet *string `json:"ResultSet,omitempty" name:"ResultSet"`
// Pagination information. If there is no more result data, `nextToken` will be empty.
NextToken *string `json:"NextToken,omitempty" name:"NextToken"`
// Task progress (%)
Percentage *int64 `json:"Percentage,omitempty" name:"Percentage"`
// Task progress details
ProgressDetail *string `json:"ProgressDetail,omitempty" name:"ProgressDetail"`
// Console display format. Valid values: `table`, `text`.
DisplayFormat *string `json:"DisplayFormat,omitempty" name:"DisplayFormat"`
// The task time in ms.
TotalTime *int64 `json:"TotalTime,omitempty" name:"TotalTime"`
}
type TasksInfo ¶
type TasksInfo struct {
// Task type. Valid values: `SQLTask` (SQL query task), `SparkSQLTask` (Spark SQL query task).
TaskType *string `json:"TaskType,omitempty" name:"TaskType"`
// Fault tolerance policy. `Proceed`: continues to execute subsequent tasks after the current task fails or is canceled. `Terminate`: terminates the execution of subsequent tasks after the current task fails or is canceled, and marks all subsequent tasks as canceled.
FailureTolerance *string `json:"FailureTolerance,omitempty" name:"FailureTolerance"`
// Base64-encrypted SQL statements separated by ";". Up to 50 tasks can be submitted at a time, and they will be executed strictly in sequence.
SQL *string `json:"SQL,omitempty" name:"SQL"`
// Configuration information of the task. Currently, only `SparkSQLTask` tasks are supported.
Config []*KVPair `json:"Config,omitempty" name:"Config"`
// User-defined parameters of the task
Params []*KVPair `json:"Params,omitempty" name:"Params"`
}
type TasksOverview ¶
type TasksOverview struct {
// The number of tasks in queue.
TaskQueuedCount *int64 `json:"TaskQueuedCount,omitempty" name:"TaskQueuedCount"`
// The number of initialized tasks.
TaskInitCount *int64 `json:"TaskInitCount,omitempty" name:"TaskInitCount"`
// The number of tasks in progress.
TaskRunningCount *int64 `json:"TaskRunningCount,omitempty" name:"TaskRunningCount"`
// The total number of tasks in this time range.
TotalTaskCount *int64 `json:"TotalTaskCount,omitempty" name:"TotalTaskCount"`
}