Documentation
¶
Overview ¶
Package v1 contains the API of ML services.
Package v1 is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
Index ¶
- Constants
- Variables
- func Equals(source, other []*ServiceStatus) bool
- func MLServicesURL(deplURL string) string
- func RegisterMLServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error
- func RegisterMLServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client MLServiceClient) error
- func RegisterMLServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, ...) (err error)
- func RegisterMLServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server MLServiceServer) error
- func RegisterMLServiceServer(s *grpc.Server, srv MLServiceServer)
- type ListMLServicesSizesRequest
- func (*ListMLServicesSizesRequest) Descriptor() ([]byte, []int)deprecated
- func (x *ListMLServicesSizesRequest) GetDeploymentId() string
- func (*ListMLServicesSizesRequest) ProtoMessage()
- func (x *ListMLServicesSizesRequest) ProtoReflect() protoreflect.Message
- func (x *ListMLServicesSizesRequest) Reset()
- func (x *ListMLServicesSizesRequest) String() string
- type MLServiceClient
- type MLServiceServer
- type MLServices
- func (*MLServices) Descriptor() ([]byte, []int)deprecated
- func (x *MLServices) GetCreatedAt() *timestamppb.Timestamp
- func (x *MLServices) GetDeploymentId() string
- func (x *MLServices) GetEnabled() bool
- func (x *MLServices) GetSize() string
- func (x *MLServices) GetStatus() *Status
- func (ml *MLServices) IsExpired() bool
- func (*MLServices) ProtoMessage()
- func (x *MLServices) ProtoReflect() protoreflect.Message
- func (x *MLServices) Reset()
- func (x *MLServices) String() string
- type MLServicesSize
- func (*MLServicesSize) Descriptor() ([]byte, []int)deprecated
- func (x *MLServicesSize) GetCpu() float32
- func (x *MLServicesSize) GetGpu() float32
- func (x *MLServicesSize) GetIsDefault() bool
- func (x *MLServicesSize) GetMemory() int32
- func (x *MLServicesSize) GetSizeId() string
- func (*MLServicesSize) ProtoMessage()
- func (x *MLServicesSize) ProtoReflect() protoreflect.Message
- func (x *MLServicesSize) Reset()
- func (x *MLServicesSize) String() string
- type MLServicesSizeList
- func (*MLServicesSizeList) Descriptor() ([]byte, []int)deprecated
- func (x *MLServicesSizeList) GetItems() []*MLServicesSize
- func (*MLServicesSizeList) ProtoMessage()
- func (x *MLServicesSizeList) ProtoReflect() protoreflect.Message
- func (x *MLServicesSizeList) Reset()
- func (x *MLServicesSizeList) String() string
- type ServiceStatus
- func (*ServiceStatus) Descriptor() ([]byte, []int)deprecated
- func (source *ServiceStatus) Equals(other *ServiceStatus) bool
- func (x *ServiceStatus) GetAvailable() bool
- func (x *ServiceStatus) GetFailed() bool
- func (x *ServiceStatus) GetReplicas() int32
- func (x *ServiceStatus) GetType() string
- func (x *ServiceStatus) GetUsage() *ServiceStatus_Usage
- func (*ServiceStatus) ProtoMessage()
- func (x *ServiceStatus) ProtoReflect() protoreflect.Message
- func (x *ServiceStatus) Reset()
- func (x *ServiceStatus) String() string
- type ServiceStatus_Usage
- func (*ServiceStatus_Usage) Descriptor() ([]byte, []int)deprecated
- func (source *ServiceStatus_Usage) Equals(other *ServiceStatus_Usage) bool
- func (x *ServiceStatus_Usage) GetLastCpuLimit() float32
- func (x *ServiceStatus_Usage) GetLastCpuUsage() float32
- func (x *ServiceStatus_Usage) GetLastMemoryLimit() int64
- func (x *ServiceStatus_Usage) GetLastMemoryUsage() int64
- func (*ServiceStatus_Usage) ProtoMessage()
- func (x *ServiceStatus_Usage) ProtoReflect() protoreflect.Message
- func (x *ServiceStatus_Usage) Reset()
- func (x *ServiceStatus_Usage) String() string
- type Status
- func (*Status) Descriptor() ([]byte, []int)deprecated
- func (status *Status) EnsureServiceStatus(svcStatus *ServiceStatus)
- func (source *Status) Equals(other *Status) bool
- func (x *Status) GetExpiresAt() *timestamppb.Timestamp
- func (x *Status) GetHoursAllowed() float32
- func (x *Status) GetHoursUsed() float32
- func (x *Status) GetLastEnabledAt() *timestamppb.Timestamp
- func (x *Status) GetLastUpdatedAt() *timestamppb.Timestamp
- func (x *Status) GetMessage() string
- func (x *Status) GetPhase() string
- func (x *Status) GetServices() []*ServiceStatus
- func (*Status) ProtoMessage()
- func (x *Status) ProtoReflect() protoreflect.Message
- func (x *Status) Reset()
- func (x *Status) String() string
- type UnimplementedMLServiceServer
- func (*UnimplementedMLServiceServer) GetAPIVersion(context.Context, *v1.Empty) (*v1.Version, error)
- func (*UnimplementedMLServiceServer) GetMLServices(context.Context, *v1.IDOptions) (*MLServices, error)
- func (*UnimplementedMLServiceServer) ListMLServicesSizes(context.Context, *ListMLServicesSizesRequest) (*MLServicesSizeList, error)
- func (*UnimplementedMLServiceServer) UpdateMLServices(context.Context, *MLServices) (*MLServices, error)
Constants ¶
const ( // PermissionGetMLServices is needed for getting MLServices. PermissionGetMLServices = "ml.mlservices.get" // PermissionUpdateMLServices is needed for updating MLServices. PermissionUpdateMLServices = "ml.mlservices.update" )
const ( // QuotaKindMLTrialJobUsageHours limits the total number of hours ML jobs can be run for, // for free deployments. // This kind of quota must be requested on a project level. QuotaKindMLTrialJobUsageHours = "ml.trial-usage-hours" // QuotaKindMLTrialExpiryDays limits the number of days ML may be enabled for free deployments. // This kind of quota must be requested on a project level. QuotaKindMLTrialExpiryDays = "ml.trial-expiry-days" )
const ( // The services needed for ArangoGraphML are being installed. MLServicesPhaseInitialising = "Initialising" // ArangoDB Deployment is being bootstrapped with the required databases, schemas and data. MLServicesPhaseBootstrapping = "Bootstrapping" // ArangoGraphML is setup and running correctly. MLServicesPhaseRunning = "Running" // Indicates that there was an error with setting up ArangoGraphML. Check `message` field for additional info. MLServicesPhaseError = "Error" // Indicates that ArangoGraphML and all its associated services are hibernated. MLServicesPhaseHibernated = "Hibernated" )
const ( // ServiceTypePrediction indicates that the service is a prediction API service. ServiceTypePrediction = "prediction" // ServiceTypeTraining indicates that the service is a training API service. ServiceTypeTraining = "training" // ServiceTypeProjects indicates that the service is a projects API service. ServiceTypeProjects = "projects" )
const ( // UsageItemResourceKindMLServices is the kind used inside the UsageItem.Resource to refer to a MLServices resource. UsageItemResourceKindMLServices = "MLServices" // UsageItemResourceKindMLJob is the kind used inside the UsageItem.Resource to refer to a ML Job. UsageItemResourceKindMLServicesJob = "MLServicesJob" )
const ( // APIID contains identifier of this API APIID = "ml/v1" // APIMajorVersion contains major version of this API APIMajorVersion = 2 // APIMinorVersion contains minor version of this API APIMinorVersion = 3 // APIPatchVersion contains patch version of this API APIPatchVersion = 0 )
const ( // EventTypeMLServicesUpdated is fired after MLServices for a deployment has been updated. // SubjectID contains the Deployment ID. EventTypeMLServicesUpdated = "ml.mlservices.updated" )
const (
// KindMLServices is a constant for MLServices resources.
KindMLServices = "MLServices"
)
const (
// PermissionListMLServicesSize is needed for listing MLServicesSize.
PermissionListMLServicesSize = "ml.mlservicessize.list"
)
Variables ¶
var File_ml_proto protoreflect.FileDescriptor
Functions ¶
func Equals ¶ added in v0.79.2
func Equals(source, other []*ServiceStatus) bool
Equals returns true when source and other have the same values.
func MLServicesURL ¶
MLServicesURL creates a resource URL for the MLServices resources from the given deployment URL.
func RegisterMLServiceHandler ¶
func RegisterMLServiceHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error
RegisterMLServiceHandler registers the http handlers for service MLService to "mux". The handlers forward requests to the grpc endpoint over "conn".
func RegisterMLServiceHandlerClient ¶
func RegisterMLServiceHandlerClient(ctx context.Context, mux *runtime.ServeMux, client MLServiceClient) error
RegisterMLServiceHandlerClient registers the http handlers for service MLService to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "MLServiceClient". Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "MLServiceClient" doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in "MLServiceClient" to call the correct interceptors.
func RegisterMLServiceHandlerFromEndpoint ¶
func RegisterMLServiceHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error)
RegisterMLServiceHandlerFromEndpoint is same as RegisterMLServiceHandler but automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterMLServiceHandlerServer ¶
func RegisterMLServiceHandlerServer(ctx context.Context, mux *runtime.ServeMux, server MLServiceServer) error
RegisterMLServiceHandlerServer registers the http handlers for service MLService to "mux". UnaryRPC :call MLServiceServer directly. StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterMLServiceHandlerFromEndpoint instead.
func RegisterMLServiceServer ¶
func RegisterMLServiceServer(s *grpc.Server, srv MLServiceServer)
Types ¶
type ListMLServicesSizesRequest ¶ added in v0.87.0
type ListMLServicesSizesRequest struct { // Optional ID of the Deployment for which sizes are being requested. // If set, the response will exclude any sizes that are unavailable for the specified deployment model. DeploymentId string `protobuf:"bytes,1,opt,name=deployment_id,json=deploymentId,proto3" json:"deployment_id,omitempty"` // contains filtered or unexported fields }
func (*ListMLServicesSizesRequest) Descriptor
deprecated
added in
v0.87.0
func (*ListMLServicesSizesRequest) Descriptor() ([]byte, []int)
Deprecated: Use ListMLServicesSizesRequest.ProtoReflect.Descriptor instead.
func (*ListMLServicesSizesRequest) GetDeploymentId ¶ added in v0.87.0
func (x *ListMLServicesSizesRequest) GetDeploymentId() string
func (*ListMLServicesSizesRequest) ProtoMessage ¶ added in v0.87.0
func (*ListMLServicesSizesRequest) ProtoMessage()
func (*ListMLServicesSizesRequest) ProtoReflect ¶ added in v0.89.0
func (x *ListMLServicesSizesRequest) ProtoReflect() protoreflect.Message
func (*ListMLServicesSizesRequest) Reset ¶ added in v0.87.0
func (x *ListMLServicesSizesRequest) Reset()
func (*ListMLServicesSizesRequest) String ¶ added in v0.87.0
func (x *ListMLServicesSizesRequest) String() string
type MLServiceClient ¶
type MLServiceClient interface { // Get the current API version of this service. // Required permissions: // - None GetAPIVersion(ctx context.Context, in *v1.Empty, opts ...grpc.CallOption) (*v1.Version, error) // Get an existing MLServices resource for a given deployment (specified by the id). // Required permissions: // - ml.mlservices.get GetMLServices(ctx context.Context, in *v1.IDOptions, opts ...grpc.CallOption) (*MLServices, error) // Update an existing MLServices resource. If it does not exist, this will create a new one. // Pass the desired updated state of MLServices to this call. // Required permissions: // - ml.mlservices.update UpdateMLServices(ctx context.Context, in *MLServices, opts ...grpc.CallOption) (*MLServices, error) // List the available size configurations for MLServices. // Note that the returned size specifications are applied for ML Jobs. // Required permissions: // - ml.mlservicessize.list on the deployment (if deployment_id is provided) // - None, authenticated only ListMLServicesSizes(ctx context.Context, in *ListMLServicesSizesRequest, opts ...grpc.CallOption) (*MLServicesSizeList, error) }
MLServiceClient is the client API for MLService service.
For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
func NewMLServiceClient ¶
func NewMLServiceClient(cc grpc.ClientConnInterface) MLServiceClient
type MLServiceServer ¶
type MLServiceServer interface { // Get the current API version of this service. // Required permissions: // - None GetAPIVersion(context.Context, *v1.Empty) (*v1.Version, error) // Get an existing MLServices resource for a given deployment (specified by the id). // Required permissions: // - ml.mlservices.get GetMLServices(context.Context, *v1.IDOptions) (*MLServices, error) // Update an existing MLServices resource. If it does not exist, this will create a new one. // Pass the desired updated state of MLServices to this call. // Required permissions: // - ml.mlservices.update UpdateMLServices(context.Context, *MLServices) (*MLServices, error) // List the available size configurations for MLServices. // Note that the returned size specifications are applied for ML Jobs. // Required permissions: // - ml.mlservicessize.list on the deployment (if deployment_id is provided) // - None, authenticated only ListMLServicesSizes(context.Context, *ListMLServicesSizesRequest) (*MLServicesSizeList, error) }
MLServiceServer is the server API for MLService service.
type MLServices ¶
type MLServices struct { // Identifier of the deployment for this MLServices resource. // This is a ready-only value. DeploymentId string `protobuf:"bytes,1,opt,name=deployment_id,json=deploymentId,proto3" json:"deployment_id,omitempty"` // Set to true if ML services are enabled for this deployment. Enabled bool `protobuf:"varint,2,opt,name=enabled,proto3" json:"enabled,omitempty"` // Size to use for the ML Jobs. // Use `ListMLServicesSizes` to get a list of available sizes. // If unspecified, the MLServiceSize marked as `is_default` is used. // This is an optional field. Size string `protobuf:"bytes,3,opt,name=size,proto3" json:"size,omitempty"` // The creation timestamp of the MLServices. // This also serves as a timestamp of when MLServices were first enabled. // This is a read-only value. CreatedAt *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` // Status of the MLServices. // This is a read-only value. Status *Status `protobuf:"bytes,100,opt,name=status,proto3" json:"status,omitempty"` // contains filtered or unexported fields }
MLServices is a single resource which represents the state and configuration of ML Services (ArangoGraphML) for a deployment specified by deployment_id.
func (*MLServices) Descriptor
deprecated
func (*MLServices) Descriptor() ([]byte, []int)
Deprecated: Use MLServices.ProtoReflect.Descriptor instead.
func (*MLServices) GetCreatedAt ¶ added in v0.88.0
func (x *MLServices) GetCreatedAt() *timestamppb.Timestamp
func (*MLServices) GetDeploymentId ¶
func (x *MLServices) GetDeploymentId() string
func (*MLServices) GetEnabled ¶
func (x *MLServices) GetEnabled() bool
func (*MLServices) GetSize ¶ added in v0.89.0
func (x *MLServices) GetSize() string
func (*MLServices) GetStatus ¶ added in v0.79.0
func (x *MLServices) GetStatus() *Status
func (*MLServices) IsExpired ¶ added in v0.88.3
func (ml *MLServices) IsExpired() bool
IsExpired returns true if the MLServices has expired.
func (*MLServices) ProtoMessage ¶
func (*MLServices) ProtoMessage()
func (*MLServices) ProtoReflect ¶ added in v0.89.0
func (x *MLServices) ProtoReflect() protoreflect.Message
func (*MLServices) Reset ¶
func (x *MLServices) Reset()
func (*MLServices) String ¶
func (x *MLServices) String() string
type MLServicesSize ¶ added in v0.87.0
type MLServicesSize struct { // Identifier of the size configuration. SizeId string `protobuf:"bytes,1,opt,name=size_id,json=sizeId,proto3" json:"size_id,omitempty"` // If set, this is the default size when unspecified in MLServices. IsDefault bool `protobuf:"varint,2,opt,name=is_default,json=isDefault,proto3" json:"is_default,omitempty"` // Amount of CPU allocated (in vCPU units) Cpu float32 `protobuf:"fixed32,3,opt,name=cpu,proto3" json:"cpu,omitempty"` // Amount of Memory allocated (in GB) Memory int32 `protobuf:"varint,4,opt,name=memory,proto3" json:"memory,omitempty"` // Amount of GPUs allocated Gpu float32 `protobuf:"fixed32,5,opt,name=gpu,proto3" json:"gpu,omitempty"` // contains filtered or unexported fields }
MLServicesSize represents the resources allocated for MLServices. Note that the specified configuration is applied for the ML jobs.
func (*MLServicesSize) Descriptor
deprecated
added in
v0.87.0
func (*MLServicesSize) Descriptor() ([]byte, []int)
Deprecated: Use MLServicesSize.ProtoReflect.Descriptor instead.
func (*MLServicesSize) GetCpu ¶ added in v0.87.0
func (x *MLServicesSize) GetCpu() float32
func (*MLServicesSize) GetGpu ¶ added in v0.87.0
func (x *MLServicesSize) GetGpu() float32
func (*MLServicesSize) GetIsDefault ¶ added in v0.87.0
func (x *MLServicesSize) GetIsDefault() bool
func (*MLServicesSize) GetMemory ¶ added in v0.87.0
func (x *MLServicesSize) GetMemory() int32
func (*MLServicesSize) GetSizeId ¶ added in v0.87.0
func (x *MLServicesSize) GetSizeId() string
func (*MLServicesSize) ProtoMessage ¶ added in v0.87.0
func (*MLServicesSize) ProtoMessage()
func (*MLServicesSize) ProtoReflect ¶ added in v0.89.0
func (x *MLServicesSize) ProtoReflect() protoreflect.Message
func (*MLServicesSize) Reset ¶ added in v0.87.0
func (x *MLServicesSize) Reset()
func (*MLServicesSize) String ¶ added in v0.87.0
func (x *MLServicesSize) String() string
type MLServicesSizeList ¶ added in v0.87.0
type MLServicesSizeList struct { // Items in this list. Items []*MLServicesSize `protobuf:"bytes,1,rep,name=items,proto3" json:"items,omitempty"` // contains filtered or unexported fields }
List of MLServicesSize.
func (*MLServicesSizeList) Descriptor
deprecated
added in
v0.87.0
func (*MLServicesSizeList) Descriptor() ([]byte, []int)
Deprecated: Use MLServicesSizeList.ProtoReflect.Descriptor instead.
func (*MLServicesSizeList) GetItems ¶ added in v0.87.0
func (x *MLServicesSizeList) GetItems() []*MLServicesSize
func (*MLServicesSizeList) ProtoMessage ¶ added in v0.87.0
func (*MLServicesSizeList) ProtoMessage()
func (*MLServicesSizeList) ProtoReflect ¶ added in v0.89.0
func (x *MLServicesSizeList) ProtoReflect() protoreflect.Message
func (*MLServicesSizeList) Reset ¶ added in v0.87.0
func (x *MLServicesSizeList) Reset()
func (*MLServicesSizeList) String ¶ added in v0.87.0
func (x *MLServicesSizeList) String() string
type ServiceStatus ¶ added in v0.79.1
type ServiceStatus struct { // Type of service. // Should be one of: [training|prediction|projects] Type string `protobuf:"bytes,1,opt,name=type,proto3" json:"type,omitempty"` // Set to true if the service is available. // Every service is always in ONLY ONE of the following states: (available|failed) Available bool `protobuf:"varint,2,opt,name=available,proto3" json:"available,omitempty"` // Set to true if the service is in a failed state. // Every service is always in ONLY ONE of the following states: (available|failed) Failed bool `protobuf:"varint,3,opt,name=failed,proto3" json:"failed,omitempty"` // Resource usage information for this service. Usage *ServiceStatus_Usage `protobuf:"bytes,4,opt,name=usage,proto3" json:"usage,omitempty"` // Number of replicas running for this service. Replicas int32 `protobuf:"varint,5,opt,name=replicas,proto3" json:"replicas,omitempty"` // contains filtered or unexported fields }
Status of a single ArangoGraphML component.
func (*ServiceStatus) Descriptor
deprecated
added in
v0.79.1
func (*ServiceStatus) Descriptor() ([]byte, []int)
Deprecated: Use ServiceStatus.ProtoReflect.Descriptor instead.
func (*ServiceStatus) Equals ¶ added in v0.79.2
func (source *ServiceStatus) Equals(other *ServiceStatus) bool
Equals returns true when source and other have the same values.
func (*ServiceStatus) GetAvailable ¶ added in v0.79.1
func (x *ServiceStatus) GetAvailable() bool
func (*ServiceStatus) GetFailed ¶ added in v0.79.1
func (x *ServiceStatus) GetFailed() bool
func (*ServiceStatus) GetReplicas ¶ added in v0.79.5
func (x *ServiceStatus) GetReplicas() int32
func (*ServiceStatus) GetType ¶ added in v0.79.1
func (x *ServiceStatus) GetType() string
func (*ServiceStatus) GetUsage ¶ added in v0.79.1
func (x *ServiceStatus) GetUsage() *ServiceStatus_Usage
func (*ServiceStatus) ProtoMessage ¶ added in v0.79.1
func (*ServiceStatus) ProtoMessage()
func (*ServiceStatus) ProtoReflect ¶ added in v0.89.0
func (x *ServiceStatus) ProtoReflect() protoreflect.Message
func (*ServiceStatus) Reset ¶ added in v0.79.1
func (x *ServiceStatus) Reset()
func (*ServiceStatus) String ¶ added in v0.79.1
func (x *ServiceStatus) String() string
type ServiceStatus_Usage ¶ added in v0.79.1
type ServiceStatus_Usage struct { // Last known memory usage in bytes LastMemoryUsage int64 `protobuf:"varint,1,opt,name=last_memory_usage,json=lastMemoryUsage,proto3" json:"last_memory_usage,omitempty"` // Last known CPU usage in vCPU units LastCpuUsage float32 `protobuf:"fixed32,2,opt,name=last_cpu_usage,json=lastCpuUsage,proto3" json:"last_cpu_usage,omitempty"` // Last known memory limit in bytes LastMemoryLimit int64 `protobuf:"varint,3,opt,name=last_memory_limit,json=lastMemoryLimit,proto3" json:"last_memory_limit,omitempty"` // Last known CPU limit in vCPU units LastCpuLimit float32 `protobuf:"fixed32,4,opt,name=last_cpu_limit,json=lastCpuLimit,proto3" json:"last_cpu_limit,omitempty"` // contains filtered or unexported fields }
Resource usage for this service.
func (*ServiceStatus_Usage) Descriptor
deprecated
added in
v0.79.1
func (*ServiceStatus_Usage) Descriptor() ([]byte, []int)
Deprecated: Use ServiceStatus_Usage.ProtoReflect.Descriptor instead.
func (*ServiceStatus_Usage) Equals ¶ added in v0.79.2
func (source *ServiceStatus_Usage) Equals(other *ServiceStatus_Usage) bool
Equals returns true when source and other have the same values.
func (*ServiceStatus_Usage) GetLastCpuLimit ¶ added in v0.79.1
func (x *ServiceStatus_Usage) GetLastCpuLimit() float32
func (*ServiceStatus_Usage) GetLastCpuUsage ¶ added in v0.79.1
func (x *ServiceStatus_Usage) GetLastCpuUsage() float32
func (*ServiceStatus_Usage) GetLastMemoryLimit ¶ added in v0.79.1
func (x *ServiceStatus_Usage) GetLastMemoryLimit() int64
func (*ServiceStatus_Usage) GetLastMemoryUsage ¶ added in v0.79.1
func (x *ServiceStatus_Usage) GetLastMemoryUsage() int64
func (*ServiceStatus_Usage) ProtoMessage ¶ added in v0.79.1
func (*ServiceStatus_Usage) ProtoMessage()
func (*ServiceStatus_Usage) ProtoReflect ¶ added in v0.89.0
func (x *ServiceStatus_Usage) ProtoReflect() protoreflect.Message
func (*ServiceStatus_Usage) Reset ¶ added in v0.79.1
func (x *ServiceStatus_Usage) Reset()
func (*ServiceStatus_Usage) String ¶ added in v0.79.1
func (x *ServiceStatus_Usage) String() string
type Status ¶ added in v0.79.0
type Status struct { // Overall status of where the MLServices resource is in its lifecycle at a given time. // It will contain only one of the following values: // "Bootstrapping" - ArangoDB Deployment is being bootstrapped with the required databases, schemas and data. // "Initialising" - The services needed for ArangoGraphML are being installed. // "Running" - ArangoGraphML is setup and running correctly. // "Error" - Indicates that there was an error with setting up ArangoGraphML. Check `message` field for additional info. // "Hibernated" - Indicates that ArangoGraphML and all its associated services are hibernated. Phase string `protobuf:"bytes,1,opt,name=phase,proto3" json:"phase,omitempty"` // Supporting information about the phase of MLServices (such as error messages in case of failures). Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` // The timestamp of when this status was last updated. LastUpdatedAt *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=last_updated_at,json=lastUpdatedAt,proto3" json:"last_updated_at,omitempty"` // Status of each ArangoGraphML components/services. Services []*ServiceStatus `protobuf:"bytes,4,rep,name=services,proto3" json:"services,omitempty"` // Total number of hours ML Jobs have run for this Deployment. HoursUsed float32 `protobuf:"fixed32,5,opt,name=hours_used,json=hoursUsed,proto3" json:"hours_used,omitempty"` // Total number of runtime hours allowed for ML Jobs for this Deployment. // Set to 0 if unlimited (i.e, no restriction). HoursAllowed float32 `protobuf:"fixed32,6,opt,name=hours_allowed,json=hoursAllowed,proto3" json:"hours_allowed,omitempty"` // Timestamp after which MLServices are no longer usable. // This is set during trial use. // If unset, no expiry. ExpiresAt *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=expires_at,json=expiresAt,proto3" json:"expires_at,omitempty"` // Timestamp of when MLServices were last enabled for this deployment. LastEnabledAt *timestamppb.Timestamp `protobuf:"bytes,8,opt,name=last_enabled_at,json=lastEnabledAt,proto3" json:"last_enabled_at,omitempty"` // contains filtered or unexported fields }
Status of the MLServices. Note: All fields are read-only.
func (*Status) Descriptor
deprecated
added in
v0.79.0
func (*Status) EnsureServiceStatus ¶ added in v0.79.2
func (status *Status) EnsureServiceStatus(svcStatus *ServiceStatus)
EnsureServiceStatus sets the status of a ML service.
func (*Status) Equals ¶ added in v0.79.2
Equals returns true when source and other have the same values.
func (*Status) GetExpiresAt ¶ added in v0.88.0
func (x *Status) GetExpiresAt() *timestamppb.Timestamp
func (*Status) GetHoursAllowed ¶ added in v0.88.0
func (*Status) GetHoursUsed ¶ added in v0.88.0
func (*Status) GetLastEnabledAt ¶ added in v0.88.0
func (x *Status) GetLastEnabledAt() *timestamppb.Timestamp
func (*Status) GetLastUpdatedAt ¶ added in v0.79.0
func (x *Status) GetLastUpdatedAt() *timestamppb.Timestamp
func (*Status) GetMessage ¶ added in v0.79.0
func (*Status) GetServices ¶ added in v0.79.0
func (x *Status) GetServices() []*ServiceStatus
func (*Status) ProtoMessage ¶ added in v0.79.0
func (*Status) ProtoMessage()
func (*Status) ProtoReflect ¶ added in v0.89.0
func (x *Status) ProtoReflect() protoreflect.Message
type UnimplementedMLServiceServer ¶
type UnimplementedMLServiceServer struct { }
UnimplementedMLServiceServer can be embedded to have forward compatible implementations.
func (*UnimplementedMLServiceServer) GetAPIVersion ¶
func (*UnimplementedMLServiceServer) GetMLServices ¶
func (*UnimplementedMLServiceServer) GetMLServices(context.Context, *v1.IDOptions) (*MLServices, error)
func (*UnimplementedMLServiceServer) ListMLServicesSizes ¶ added in v0.87.0
func (*UnimplementedMLServiceServer) ListMLServicesSizes(context.Context, *ListMLServicesSizesRequest) (*MLServicesSizeList, error)
func (*UnimplementedMLServiceServer) UpdateMLServices ¶
func (*UnimplementedMLServiceServer) UpdateMLServices(context.Context, *MLServices) (*MLServices, error)