storage

package
v0.21.0 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Nov 17, 2025 License: Apache-2.0 Imports: 14 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type Bucket

type Bucket struct {
	// Name of the S3 bucket
	Name string `json:"name,required"`
	// Lifecycle policy expiration days (zero if not set)
	Lifecycle int64 `json:"lifecycle"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		Name        respjson.Field
		Lifecycle   respjson.Field
		ExtraFields map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

BucketDtoV2 for response

func (Bucket) RawJSON

func (r Bucket) RawJSON() string

Returns the unmodified JSON received from the API

func (*Bucket) UnmarshalJSON

func (r *Bucket) UnmarshalJSON(data []byte) error

type BucketCorGetParams

type BucketCorGetParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type BucketCorNewParams

type BucketCorNewParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// List of allowed origins for CORS requests
	AllowedOrigins []string `json:"allowedOrigins,omitzero"`
	// contains filtered or unexported fields
}

func (BucketCorNewParams) MarshalJSON

func (r BucketCorNewParams) MarshalJSON() (data []byte, err error)

func (*BucketCorNewParams) UnmarshalJSON

func (r *BucketCorNewParams) UnmarshalJSON(data []byte) error

type BucketCorService

type BucketCorService struct {
	Options []option.RequestOption
}

BucketCorService contains methods and other services that help with interacting with the gcore API.

Note, unlike clients, this service does not read variables from the environment automatically. You should not instantiate this service directly, and instead use the NewBucketCorService method instead.

func NewBucketCorService

func NewBucketCorService(opts ...option.RequestOption) (r BucketCorService)

NewBucketCorService generates a new service that applies the given options to each request. These options are applied after the parent client's options (if there is one), and before any request-specific options.

func (*BucketCorService) Get

func (r *BucketCorService) Get(ctx context.Context, bucketName string, query BucketCorGetParams, opts ...option.RequestOption) (res *BucketCors, err error)

Retrieves the current Cross-Origin Resource Sharing (CORS) configuration for an S3 bucket, showing which domains are allowed to access the bucket from web browsers.

func (*BucketCorService) New

func (r *BucketCorService) New(ctx context.Context, bucketName string, params BucketCorNewParams, opts ...option.RequestOption) (err error)

Configures Cross-Origin Resource Sharing (CORS) rules for an S3 bucket, allowing web applications from specified domains to access bucket resources directly from browsers.

type BucketCors

type BucketCors struct {
	// List of allowed origins for Cross-Origin Resource Sharing (CORS) requests.
	// Contains domains/URLs that are permitted to make cross-origin requests to this
	// bucket.
	AllowedOrigins []string `json:"allowedOrigins"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		AllowedOrigins respjson.Field
		ExtraFields    map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

StorageGetBucketCorsEndpointRes output

func (BucketCors) RawJSON

func (r BucketCors) RawJSON() string

Returns the unmodified JSON received from the API

func (*BucketCors) UnmarshalJSON

func (r *BucketCors) UnmarshalJSON(data []byte) error

type BucketDeleteParams

type BucketDeleteParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type BucketLifecycleDeleteParams

type BucketLifecycleDeleteParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type BucketLifecycleNewParams

type BucketLifecycleNewParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// Number of days after which objects will be automatically deleted from the
	// bucket. Must be a positive integer. Common values: 30 for monthly cleanup, 365
	// for yearly retention.
	ExpirationDays param.Opt[int64] `json:"expiration_days,omitzero"`
	// contains filtered or unexported fields
}

func (BucketLifecycleNewParams) MarshalJSON

func (r BucketLifecycleNewParams) MarshalJSON() (data []byte, err error)

func (*BucketLifecycleNewParams) UnmarshalJSON

func (r *BucketLifecycleNewParams) UnmarshalJSON(data []byte) error

type BucketLifecycleService

type BucketLifecycleService struct {
	Options []option.RequestOption
}

BucketLifecycleService contains methods and other services that help with interacting with the gcore API.

Note, unlike clients, this service does not read variables from the environment automatically. You should not instantiate this service directly, and instead use the NewBucketLifecycleService method instead.

func NewBucketLifecycleService

func NewBucketLifecycleService(opts ...option.RequestOption) (r BucketLifecycleService)

NewBucketLifecycleService generates a new service that applies the given options to each request. These options are applied after the parent client's options (if there is one), and before any request-specific options.

func (*BucketLifecycleService) Delete

func (r *BucketLifecycleService) Delete(ctx context.Context, bucketName string, body BucketLifecycleDeleteParams, opts ...option.RequestOption) (err error)

Removes all lifecycle rules from an S3 bucket, disabling automatic object expiration. Objects will no longer be automatically deleted based on age.

func (*BucketLifecycleService) New

func (r *BucketLifecycleService) New(ctx context.Context, bucketName string, params BucketLifecycleNewParams, opts ...option.RequestOption) (err error)

Sets up automatic object expiration for an S3 bucket. All objects in the bucket will be automatically deleted after the specified number of days to help manage storage costs and meet compliance requirements. This applies a global lifecycle rule to the entire bucket - all existing and future objects will be subject to the expiration policy.

type BucketListParams

type BucketListParams struct {
	// Max number of records in response
	Limit param.Opt[int64] `query:"limit,omitzero" json:"-"`
	// Number of records to skip before beginning to write in response.
	Offset param.Opt[int64] `query:"offset,omitzero" json:"-"`
	// contains filtered or unexported fields
}

func (BucketListParams) URLQuery

func (r BucketListParams) URLQuery() (v url.Values, err error)

URLQuery serializes BucketListParams's query parameters as `url.Values`.

type BucketNewParams

type BucketNewParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type BucketPolicyDeleteParams

type BucketPolicyDeleteParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type BucketPolicyGetParams

type BucketPolicyGetParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type BucketPolicyNewParams

type BucketPolicyNewParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type BucketPolicyService

type BucketPolicyService struct {
	Options []option.RequestOption
}

BucketPolicyService contains methods and other services that help with interacting with the gcore API.

Note, unlike clients, this service does not read variables from the environment automatically. You should not instantiate this service directly, and instead use the NewBucketPolicyService method instead.

func NewBucketPolicyService

func NewBucketPolicyService(opts ...option.RequestOption) (r BucketPolicyService)

NewBucketPolicyService generates a new service that applies the given options to each request. These options are applied after the parent client's options (if there is one), and before any request-specific options.

func (*BucketPolicyService) Delete

func (r *BucketPolicyService) Delete(ctx context.Context, bucketName string, body BucketPolicyDeleteParams, opts ...option.RequestOption) (err error)

Removes the public read policy from an S3 bucket, making all objects private and accessible only with proper authentication credentials. After this operation, anonymous users will no longer be able to access bucket contents via HTTP requests.

func (*BucketPolicyService) Get

func (r *BucketPolicyService) Get(ctx context.Context, bucketName string, query BucketPolicyGetParams, opts ...option.RequestOption) (res *bool, err error)

Returns whether the S3 bucket is currently configured for public read access. Shows if anonymous users can download objects from the bucket via HTTP requests.

func (*BucketPolicyService) New

func (r *BucketPolicyService) New(ctx context.Context, bucketName string, body BucketPolicyNewParams, opts ...option.RequestOption) (err error)

Applies a public read policy to the S3 bucket, allowing anonymous users to download/access all objects in the bucket via HTTP GET requests. This makes the bucket suitable for static website hosting, public file sharing, or CDN integration. Only grants read access - users cannot upload, modify, or delete objects without proper authentication.

type BucketService

type BucketService struct {
	Options   []option.RequestOption
	Cors      BucketCorService
	Lifecycle BucketLifecycleService
	Policy    BucketPolicyService
}

BucketService contains methods and other services that help with interacting with the gcore API.

Note, unlike clients, this service does not read variables from the environment automatically. You should not instantiate this service directly, and instead use the NewBucketService method instead.

func NewBucketService

func NewBucketService(opts ...option.RequestOption) (r BucketService)

NewBucketService generates a new service that applies the given options to each request. These options are applied after the parent client's options (if there is one), and before any request-specific options.

func (*BucketService) Delete

func (r *BucketService) Delete(ctx context.Context, bucketName string, body BucketDeleteParams, opts ...option.RequestOption) (err error)

Removes a bucket from an S3 storage. All objects in the bucket will be automatically deleted before the bucket is removed.

func (*BucketService) List

func (r *BucketService) List(ctx context.Context, storageID int64, query BucketListParams, opts ...option.RequestOption) (res *pagination.OffsetPage[Bucket], err error)

Returns the list of buckets for the storage in a wrapped response.

Response format: count: total number of buckets (independent of pagination) results: current page of buckets according to limit/offset

func (*BucketService) ListAutoPaging

func (r *BucketService) ListAutoPaging(ctx context.Context, storageID int64, query BucketListParams, opts ...option.RequestOption) *pagination.OffsetPageAutoPager[Bucket]

Returns the list of buckets for the storage in a wrapped response.

Response format: count: total number of buckets (independent of pagination) results: current page of buckets according to limit/offset

func (*BucketService) New

func (r *BucketService) New(ctx context.Context, bucketName string, body BucketNewParams, opts ...option.RequestOption) (err error)

Creates a new bucket within an S3 storage. Only applicable to S3-compatible storages.

type CredentialRecreateParams

type CredentialRecreateParams struct {
	DeleteSftpPassword   param.Opt[bool]   `json:"delete_sftp_password,omitzero"`
	GenerateS3Keys       param.Opt[bool]   `json:"generate_s3_keys,omitzero"`
	GenerateSftpPassword param.Opt[bool]   `json:"generate_sftp_password,omitzero"`
	ResetSftpKeys        param.Opt[bool]   `json:"reset_sftp_keys,omitzero"`
	SftpPassword         param.Opt[string] `json:"sftp_password,omitzero"`
	// contains filtered or unexported fields
}

func (CredentialRecreateParams) MarshalJSON

func (r CredentialRecreateParams) MarshalJSON() (data []byte, err error)

func (*CredentialRecreateParams) UnmarshalJSON

func (r *CredentialRecreateParams) UnmarshalJSON(data []byte) error

type CredentialService

type CredentialService struct {
	Options []option.RequestOption
}

CredentialService contains methods and other services that help with interacting with the gcore API.

Note, unlike clients, this service does not read variables from the environment automatically. You should not instantiate this service directly, and instead use the NewCredentialService method instead.

func NewCredentialService

func NewCredentialService(opts ...option.RequestOption) (r CredentialService)

NewCredentialService generates a new service that applies the given options to each request. These options are applied after the parent client's options (if there is one), and before any request-specific options.

func (*CredentialService) Recreate

func (r *CredentialService) Recreate(ctx context.Context, storageID int64, body CredentialRecreateParams, opts ...option.RequestOption) (res *Storage, err error)

Generates new access credentials for the storage (S3 keys for S3 storage, SFTP password for SFTP storage).

type Error

type Error = apierror.Error

type Location

type Location struct {
	// Full hostname/address for accessing the storage endpoint in this location
	Address string `json:"address,required"`
	// Indicates whether new storage can be created in this location
	//
	// Any of "deny", "allow".
	AllowForNewStorage LocationAllowForNewStorage `json:"allow_for_new_storage,required"`
	// Location code (region identifier)
	Name string `json:"name,required"`
	// Human-readable title for the location
	Title string `json:"title,required"`
	// Storage protocol type supported in this location
	//
	// Any of "s3", "sftp".
	Type LocationType `json:"type,required"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		Address            respjson.Field
		AllowForNewStorage respjson.Field
		Name               respjson.Field
		Title              respjson.Field
		Type               respjson.Field
		ExtraFields        map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

LocationV2 represents location data for v2 API where title is a string

func (Location) RawJSON

func (r Location) RawJSON() string

Returns the unmodified JSON received from the API

func (*Location) UnmarshalJSON

func (r *Location) UnmarshalJSON(data []byte) error

type LocationAllowForNewStorage

type LocationAllowForNewStorage string

Indicates whether new storage can be created in this location

const (
	LocationAllowForNewStorageDeny  LocationAllowForNewStorage = "deny"
	LocationAllowForNewStorageAllow LocationAllowForNewStorage = "allow"
)

type LocationListParams

type LocationListParams struct {
	Limit  param.Opt[int64] `query:"limit,omitzero" json:"-"`
	Offset param.Opt[int64] `query:"offset,omitzero" json:"-"`
	// contains filtered or unexported fields
}

func (LocationListParams) URLQuery

func (r LocationListParams) URLQuery() (v url.Values, err error)

URLQuery serializes LocationListParams's query parameters as `url.Values`.

type LocationService

type LocationService struct {
	Options []option.RequestOption
}

LocationService contains methods and other services that help with interacting with the gcore API.

Note, unlike clients, this service does not read variables from the environment automatically. You should not instantiate this service directly, and instead use the NewLocationService method instead.

func NewLocationService

func NewLocationService(opts ...option.RequestOption) (r LocationService)

NewLocationService generates a new service that applies the given options to each request. These options are applied after the parent client's options (if there is one), and before any request-specific options.

func (*LocationService) List

Returns available storage locations where you can create storages. Each location represents a geographic region with specific data center facilities.

func (*LocationService) ListAutoPaging

Returns available storage locations where you can create storages. Each location represents a geographic region with specific data center facilities.

type LocationType

type LocationType string

Storage protocol type supported in this location

const (
	LocationTypeS3   LocationType = "s3"
	LocationTypeSftp LocationType = "sftp"
)

type StatisticGetUsageAggregatedParams

type StatisticGetUsageAggregatedParams struct {
	// a From date filter
	From param.Opt[string] `json:"from,omitzero"`
	// a To date filter
	To param.Opt[string] `json:"to,omitzero"`
	// a Locations list of filter
	Locations []string `json:"locations,omitzero"`
	// a Storages list of filter
	Storages []string `json:"storages,omitzero"`
	// contains filtered or unexported fields
}

func (StatisticGetUsageAggregatedParams) MarshalJSON

func (r StatisticGetUsageAggregatedParams) MarshalJSON() (data []byte, err error)

func (*StatisticGetUsageAggregatedParams) UnmarshalJSON

func (r *StatisticGetUsageAggregatedParams) UnmarshalJSON(data []byte) error

type StatisticGetUsageSeriesParams

type StatisticGetUsageSeriesParams struct {
	// a From date filter
	From param.Opt[string] `json:"from,omitzero"`
	// a Granularity is period of time for grouping data Valid values are: 1h, 12h, 24h
	Granularity param.Opt[string] `json:"granularity,omitzero"`
	// a Source is deprecated parameter
	Source param.Opt[int64] `json:"source,omitzero"`
	// a To date filter
	To param.Opt[string] `json:"to,omitzero"`
	// a TsString is configurator of response time format switch response from unix
	// time format to RFC3339 (2006-01-02T15:04:05Z07:00)
	TsString param.Opt[bool] `json:"ts_string,omitzero"`
	// a Locations list of filter
	Locations []string `json:"locations,omitzero"`
	// a Storages list of filter
	Storages []string `json:"storages,omitzero"`
	// contains filtered or unexported fields
}

func (StatisticGetUsageSeriesParams) MarshalJSON

func (r StatisticGetUsageSeriesParams) MarshalJSON() (data []byte, err error)

func (*StatisticGetUsageSeriesParams) UnmarshalJSON

func (r *StatisticGetUsageSeriesParams) UnmarshalJSON(data []byte) error

type StatisticGetUsageSeriesResponse

type StatisticGetUsageSeriesResponse struct {
	Data UsageSeries `json:"data"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		Data        respjson.Field
		ExtraFields map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (StatisticGetUsageSeriesResponse) RawJSON

Returns the unmodified JSON received from the API

func (*StatisticGetUsageSeriesResponse) UnmarshalJSON

func (r *StatisticGetUsageSeriesResponse) UnmarshalJSON(data []byte) error

type StatisticService

type StatisticService struct {
	Options []option.RequestOption
}

StatisticService contains methods and other services that help with interacting with the gcore API.

Note, unlike clients, this service does not read variables from the environment automatically. You should not instantiate this service directly, and instead use the NewStatisticService method instead.

func NewStatisticService

func NewStatisticService(opts ...option.RequestOption) (r StatisticService)

NewStatisticService generates a new service that applies the given options to each request. These options are applied after the parent client's options (if there is one), and before any request-specific options.

func (*StatisticService) GetUsageAggregated

func (r *StatisticService) GetUsageAggregated(ctx context.Context, body StatisticGetUsageAggregatedParams, opts ...option.RequestOption) (res *UsageTotal, err error)

Consumption statistics is updated in near real-time as a standard practice. However, the frequency of updates can vary, but they are typically available within a 60 minutes period. Exceptions, such as maintenance periods, may delay data beyond 60 minutes until servers resume and backfill missing statistics.

Shows storage total usage data in filtered by storages, locations and interval.

func (*StatisticService) GetUsageSeries

Consumption statistics is updated in near real-time as a standard practice. However, the frequency of updates can vary, but they are typically available within a 60 minutes period. Exceptions, such as maintenance periods, may delay data beyond 60 minutes until servers resume and backfill missing statistics.

Shows storage usage data in series format filtered by clients, storages and interval.

type Storage

type Storage struct {
	// Unique identifier for the storage instance
	ID int64 `json:"id,required"`
	// Full hostname/address for accessing the storage endpoint
	Address string `json:"address,required"`
	// Client identifier who owns this storage
	ClientID int64 `json:"client_id,required"`
	// ISO 8601 timestamp when the storage was created
	CreatedAt string `json:"created_at,required"`
	// Geographic location code where the storage is provisioned
	Location string `json:"location,required"`
	// User-defined name for the storage instance
	Name string `json:"name,required"`
	// Current provisioning status of the storage instance
	//
	// Any of "creating", "ok", "updating", "deleting", "deleted".
	ProvisioningStatus StorageProvisioningStatus `json:"provisioning_status,required"`
	// Reseller technical client ID associated with the client
	ResellerID int64 `json:"reseller_id,required"`
	// Storage protocol type - either S3-compatible object storage or SFTP file
	// transfer
	//
	// Any of "sftp", "s3".
	Type StorageType `json:"type,required"`
	// Whether this storage can be restored if deleted (S3 storages only, within 2
	// weeks)
	CanRestore  bool               `json:"can_restore"`
	Credentials StorageCredentials `json:"credentials"`
	// Whether custom configuration file is used for this storage
	CustomConfigFile bool `json:"custom_config_file"`
	// ISO 8601 timestamp when the storage was deleted (only present for deleted
	// storages)
	DeletedAt string `json:"deleted_at"`
	// Whether HTTP access is disabled for this storage (HTTPS only)
	DisableHTTP bool `json:"disable_http"`
	// ISO 8601 timestamp when the storage will expire (if set)
	Expires string `json:"expires"`
	// Custom URL rewrite rules for the storage (admin-configurable)
	RewriteRules map[string]string `json:"rewrite_rules"`
	// Custom domain alias for accessing the storage
	ServerAlias string `json:"server_alias"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		ID                 respjson.Field
		Address            respjson.Field
		ClientID           respjson.Field
		CreatedAt          respjson.Field
		Location           respjson.Field
		Name               respjson.Field
		ProvisioningStatus respjson.Field
		ResellerID         respjson.Field
		Type               respjson.Field
		CanRestore         respjson.Field
		Credentials        respjson.Field
		CustomConfigFile   respjson.Field
		DeletedAt          respjson.Field
		DisableHTTP        respjson.Field
		Expires            respjson.Field
		RewriteRules       respjson.Field
		ServerAlias        respjson.Field
		ExtraFields        map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (Storage) RawJSON

func (r Storage) RawJSON() string

Returns the unmodified JSON received from the API

func (*Storage) UnmarshalJSON

func (r *Storage) UnmarshalJSON(data []byte) error

type StorageCredentials

type StorageCredentials struct {
	// SSH public keys associated with SFTP storage for passwordless authentication
	Keys []StorageCredentialsKey `json:"keys"`
	S3   StorageCredentialsS3    `json:"s3"`
	// Generated or user-provided password for SFTP access (only present for SFTP
	// storage type)
	SftpPassword string `json:"sftp_password"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		Keys         respjson.Field
		S3           respjson.Field
		SftpPassword respjson.Field
		ExtraFields  map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (StorageCredentials) RawJSON

func (r StorageCredentials) RawJSON() string

Returns the unmodified JSON received from the API

func (*StorageCredentials) UnmarshalJSON

func (r *StorageCredentials) UnmarshalJSON(data []byte) error

type StorageCredentialsKey

type StorageCredentialsKey struct {
	// Unique identifier for the SSH key
	ID int64 `json:"id"`
	// ISO 8601 timestamp when the SSH key was created
	CreatedAt string `json:"created_at"`
	// User-defined name for the SSH key
	Name string `json:"name"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		ID          respjson.Field
		CreatedAt   respjson.Field
		Name        respjson.Field
		ExtraFields map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (StorageCredentialsKey) RawJSON

func (r StorageCredentialsKey) RawJSON() string

Returns the unmodified JSON received from the API

func (*StorageCredentialsKey) UnmarshalJSON

func (r *StorageCredentialsKey) UnmarshalJSON(data []byte) error

type StorageCredentialsS3

type StorageCredentialsS3 struct {
	// S3-compatible access key identifier for authentication
	AccessKey string `json:"access_key"`
	// S3-compatible secret key for authentication (keep secure)
	SecretKey string `json:"secret_key"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		AccessKey   respjson.Field
		SecretKey   respjson.Field
		ExtraFields map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (StorageCredentialsS3) RawJSON

func (r StorageCredentialsS3) RawJSON() string

Returns the unmodified JSON received from the API

func (*StorageCredentialsS3) UnmarshalJSON

func (r *StorageCredentialsS3) UnmarshalJSON(data []byte) error

type StorageLinkSSHKeyParams

type StorageLinkSSHKeyParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type StorageListParams

type StorageListParams struct {
	// Filter by storage ID
	ID param.Opt[string] `query:"id,omitzero" json:"-"`
	// Max number of records in response
	Limit param.Opt[int64] `query:"limit,omitzero" json:"-"`
	// Filter by storage location/region
	Location param.Opt[string] `query:"location,omitzero" json:"-"`
	// Filter by storage name (exact match)
	Name param.Opt[string] `query:"name,omitzero" json:"-"`
	// Number of records to skip before beginning to write in response.
	Offset param.Opt[int64] `query:"offset,omitzero" json:"-"`
	// Field name to sort by
	OrderBy param.Opt[string] `query:"order_by,omitzero" json:"-"`
	// Include deleted storages in the response
	ShowDeleted param.Opt[bool] `query:"show_deleted,omitzero" json:"-"`
	// Ascending or descending order
	//
	// Any of "asc", "desc".
	OrderDirection StorageListParamsOrderDirection `query:"order_direction,omitzero" json:"-"`
	// Filter by storage status
	//
	// Any of "active", "suspended", "deleted", "pending".
	Status StorageListParamsStatus `query:"status,omitzero" json:"-"`
	// Filter by storage type
	//
	// Any of "s3", "sftp".
	Type StorageListParamsType `query:"type,omitzero" json:"-"`
	// contains filtered or unexported fields
}

func (StorageListParams) URLQuery

func (r StorageListParams) URLQuery() (v url.Values, err error)

URLQuery serializes StorageListParams's query parameters as `url.Values`.

type StorageListParamsOrderDirection

type StorageListParamsOrderDirection string

Ascending or descending order

const (
	StorageListParamsOrderDirectionAsc  StorageListParamsOrderDirection = "asc"
	StorageListParamsOrderDirectionDesc StorageListParamsOrderDirection = "desc"
)

type StorageListParamsStatus

type StorageListParamsStatus string

Filter by storage status

const (
	StorageListParamsStatusActive    StorageListParamsStatus = "active"
	StorageListParamsStatusSuspended StorageListParamsStatus = "suspended"
	StorageListParamsStatusDeleted   StorageListParamsStatus = "deleted"
	StorageListParamsStatusPending   StorageListParamsStatus = "pending"
)

type StorageListParamsType

type StorageListParamsType string

Filter by storage type

const (
	StorageListParamsTypeS3   StorageListParamsType = "s3"
	StorageListParamsTypeSftp StorageListParamsType = "sftp"
)

type StorageNewParams

type StorageNewParams struct {
	// Geographic location where the storage will be provisioned. Each location
	// represents a specific data center region.
	Location string `json:"location,required"`
	// Unique storage name identifier. Must contain only letters, numbers, dashes, and
	// underscores. Cannot be empty and must be less than 256 characters.
	Name string `json:"name,required"`
	// Storage protocol type. Choose 's3' for S3-compatible object storage with API
	// access, or `sftp` for SFTP file transfer protocol.
	//
	// Any of "sftp", "s3".
	Type StorageNewParamsType `json:"type,omitzero,required"`
	// Automatically generate a secure password for SFTP storage access. Only
	// applicable when type is `sftp`. When `true`, a random password will be generated
	// and returned in the response.
	GenerateSftpPassword param.Opt[bool] `json:"generate_sftp_password,omitzero"`
	// Custom password for SFTP storage access. Only applicable when type is `sftp`. If
	// not provided and `generate_sftp_password` is `false`, no password authentication
	// will be available.
	SftpPassword param.Opt[string] `json:"sftp_password,omitzero"`
	// contains filtered or unexported fields
}

func (StorageNewParams) MarshalJSON

func (r StorageNewParams) MarshalJSON() (data []byte, err error)

func (*StorageNewParams) UnmarshalJSON

func (r *StorageNewParams) UnmarshalJSON(data []byte) error

type StorageNewParamsType

type StorageNewParamsType string

Storage protocol type. Choose 's3' for S3-compatible object storage with API access, or `sftp` for SFTP file transfer protocol.

const (
	StorageNewParamsTypeSftp StorageNewParamsType = "sftp"
	StorageNewParamsTypeS3   StorageNewParamsType = "s3"
)

type StorageProvisioningStatus

type StorageProvisioningStatus string

Current provisioning status of the storage instance

const (
	StorageProvisioningStatusCreating StorageProvisioningStatus = "creating"
	StorageProvisioningStatusOk       StorageProvisioningStatus = "ok"
	StorageProvisioningStatusUpdating StorageProvisioningStatus = "updating"
	StorageProvisioningStatusDeleting StorageProvisioningStatus = "deleting"
	StorageProvisioningStatusDeleted  StorageProvisioningStatus = "deleted"
)

type StorageRestoreParams

type StorageRestoreParams struct {
	ClientID param.Opt[int64] `query:"client_id,omitzero" json:"-"`
	// contains filtered or unexported fields
}

func (StorageRestoreParams) URLQuery

func (r StorageRestoreParams) URLQuery() (v url.Values, err error)

URLQuery serializes StorageRestoreParams's query parameters as `url.Values`.

type StorageService

type StorageService struct {
	Options     []option.RequestOption
	Locations   LocationService
	Statistics  StatisticService
	Credentials CredentialService
	Buckets     BucketService
}

StorageService contains methods and other services that help with interacting with the gcore API.

Note, unlike clients, this service does not read variables from the environment automatically. You should not instantiate this service directly, and instead use the NewStorageService method instead.

func NewStorageService

func NewStorageService(opts ...option.RequestOption) (r StorageService)

NewStorageService generates a new service that applies the given options to each request. These options are applied after the parent client's options (if there is one), and before any request-specific options.

func (*StorageService) Delete

func (r *StorageService) Delete(ctx context.Context, storageID int64, opts ...option.RequestOption) (err error)

Permanently deletes a storage and all its data. This action cannot be undone.

func (*StorageService) Get

func (r *StorageService) Get(ctx context.Context, storageID int64, opts ...option.RequestOption) (res *Storage, err error)

Retrieves detailed information about a specific storage including its configuration, credentials, and current status.

func (*StorageService) LinkSSHKey

func (r *StorageService) LinkSSHKey(ctx context.Context, keyID int64, body StorageLinkSSHKeyParams, opts ...option.RequestOption) (err error)

Associates an SSH public key with an SFTP storage, enabling passwordless authentication. Only works with SFTP storage types - not applicable to S3-compatible storage.

func (*StorageService) List

Returns storages with the same filtering and pagination as v2, but in a simplified response shape for easier client consumption.

Response format: count: total number of storages matching the filter (independent of pagination) results: the current page of storages according to limit/offset

func (*StorageService) ListAutoPaging

Returns storages with the same filtering and pagination as v2, but in a simplified response shape for easier client consumption.

Response format: count: total number of storages matching the filter (independent of pagination) results: the current page of storages according to limit/offset

func (*StorageService) New

func (r *StorageService) New(ctx context.Context, body StorageNewParams, opts ...option.RequestOption) (res *Storage, err error)

Creates a new storage instance (S3 or SFTP) in the specified location and returns the storage details including credentials.

func (*StorageService) Restore

func (r *StorageService) Restore(ctx context.Context, storageID int64, body StorageRestoreParams, opts ...option.RequestOption) (err error)

Restores a previously deleted S3 storage if it was deleted within the last 2 weeks. SFTP storages cannot be restored.

func (*StorageService) UnlinkSSHKey

func (r *StorageService) UnlinkSSHKey(ctx context.Context, keyID int64, body StorageUnlinkSSHKeyParams, opts ...option.RequestOption) (err error)

Removes SSH key association from an SFTP storage, disabling passwordless authentication for that key. The key itself remains available for other storages.

func (*StorageService) Update

func (r *StorageService) Update(ctx context.Context, storageID int64, body StorageUpdateParams, opts ...option.RequestOption) (res *Storage, err error)

Updates storage configuration such as expiration date and server alias. Used for SFTP storages.

type StorageType

type StorageType string

Storage protocol type - either S3-compatible object storage or SFTP file transfer

const (
	StorageTypeSftp StorageType = "sftp"
	StorageTypeS3   StorageType = "s3"
)

type StorageUnlinkSSHKeyParams

type StorageUnlinkSSHKeyParams struct {
	StorageID int64 `path:"storage_id,required" json:"-"`
	// contains filtered or unexported fields
}

type StorageUpdateParams

type StorageUpdateParams struct {
	// Duration when the storage should expire in format like "1 years 6 months 2 weeks
	// 3 days 5 hours 10 minutes 15 seconds". Set empty to remove expiration.
	Expires param.Opt[string] `json:"expires,omitzero"`
	// Custom domain alias for accessing the storage. Set empty to remove alias.
	ServerAlias param.Opt[string] `json:"server_alias,omitzero"`
	// contains filtered or unexported fields
}

func (StorageUpdateParams) MarshalJSON

func (r StorageUpdateParams) MarshalJSON() (data []byte, err error)

func (*StorageUpdateParams) UnmarshalJSON

func (r *StorageUpdateParams) UnmarshalJSON(data []byte) error

type UsageSeries

type UsageSeries struct {
	// a Clients grouped data
	Clients map[string]UsageSeriesClient `json:"clients"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		Clients     respjson.Field
		ExtraFields map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (UsageSeries) RawJSON

func (r UsageSeries) RawJSON() string

Returns the unmodified JSON received from the API

func (*UsageSeries) UnmarshalJSON

func (r *UsageSeries) UnmarshalJSON(data []byte) error

type UsageSeriesClient

type UsageSeriesClient struct {
	// an ID of client
	ID int64 `json:"id"`
	// a FileQuantitySumMax is max sum of files quantity for grouped period
	FileQuantitySumMax int64 `json:"file_quantity_sum_max"`
	// a Locations grouped data
	Locations map[string]UsageSeriesClientLocation `json:"locations"`
	// a RequestsInSum is sum of incoming requests for grouped period
	RequestsInSum int64 `json:"requests_in_sum"`
	// a RequestsOutEdgesSum is sum of out edges requests for grouped period
	RequestsOutEdgesSum int64 `json:"requests_out_edges_sum"`
	// a RequestsOutWoEdgesSum is sum of out no edges requests for grouped period
	RequestsOutWoEdgesSum int64 `json:"requests_out_wo_edges_sum"`
	// a RequestsSum is sum of all requests for grouped period
	RequestsSum int64 `json:"requests_sum"`
	// a SizeSumBytesHour is sum of bytes hour for grouped period
	SizeSumBytesHour int64 `json:"size_sum_bytes_hour"`
	// a SizeSumMax is max sum of all files sizes for grouped period
	SizeSumMax int64 `json:"size_sum_max"`
	// a SizeSumMean is mean sum of all files sizes for grouped period
	SizeSumMean int64 `json:"size_sum_mean"`
	// a TrafficInSum is sum of incoming traffic for grouped period
	TrafficInSum int64 `json:"traffic_in_sum"`
	// a TrafficOutEdgesSum is sum of out edges traffic for grouped period
	TrafficOutEdgesSum int64 `json:"traffic_out_edges_sum"`
	// a TrafficOutWoEdgesSum is sum of out no edges traffic for grouped period
	TrafficOutWoEdgesSum int64 `json:"traffic_out_wo_edges_sum"`
	// a TrafficSum is sum of all traffic for grouped period
	TrafficSum int64 `json:"traffic_sum"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		ID                    respjson.Field
		FileQuantitySumMax    respjson.Field
		Locations             respjson.Field
		RequestsInSum         respjson.Field
		RequestsOutEdgesSum   respjson.Field
		RequestsOutWoEdgesSum respjson.Field
		RequestsSum           respjson.Field
		SizeSumBytesHour      respjson.Field
		SizeSumMax            respjson.Field
		SizeSumMean           respjson.Field
		TrafficInSum          respjson.Field
		TrafficOutEdgesSum    respjson.Field
		TrafficOutWoEdgesSum  respjson.Field
		TrafficSum            respjson.Field
		ExtraFields           map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (UsageSeriesClient) RawJSON

func (r UsageSeriesClient) RawJSON() string

Returns the unmodified JSON received from the API

func (*UsageSeriesClient) UnmarshalJSON

func (r *UsageSeriesClient) UnmarshalJSON(data []byte) error

type UsageSeriesClientLocation

type UsageSeriesClientLocation struct {
	// a FileQuantitySumMax is max sum of files quantity for grouped period
	FileQuantitySumMax int64 `json:"file_quantity_sum_max"`
	// a Name of location
	Name string `json:"name"`
	// a RequestsInSum is sum of incoming requests for grouped period
	RequestsInSum int64 `json:"requests_in_sum"`
	// a RequestsOutEdgesSum is sum of out edges requests for grouped period
	RequestsOutEdgesSum int64 `json:"requests_out_edges_sum"`
	// a RequestsOutWoEdgesSum is sum of out no edges requests for grouped period
	RequestsOutWoEdgesSum int64 `json:"requests_out_wo_edges_sum"`
	// a RequestsSum is sum of all requests for grouped period
	RequestsSum int64 `json:"requests_sum"`
	// a SizeSumBytesHour is sum of bytes hour for grouped period
	SizeSumBytesHour int64 `json:"size_sum_bytes_hour"`
	// a SizeSumMax is max sum of all files sizes for grouped period
	SizeSumMax int64 `json:"size_sum_max"`
	// a SizeSumMean is mean sum of all files sizes for grouped period
	SizeSumMean int64 `json:"size_sum_mean"`
	// a Storages grouped data
	Storages map[string]UsageSeriesClientLocationStorage `json:"storages"`
	// a TrafficInSum is sum of incoming traffic for grouped period
	TrafficInSum int64 `json:"traffic_in_sum"`
	// a TrafficOutEdgesSum is sum of out edges traffic for grouped period
	TrafficOutEdgesSum int64 `json:"traffic_out_edges_sum"`
	// a TrafficOutWoEdgesSum is sum of out no edges traffic for grouped period
	TrafficOutWoEdgesSum int64 `json:"traffic_out_wo_edges_sum"`
	// a TrafficSum is sum of all traffic for grouped period
	TrafficSum int64 `json:"traffic_sum"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		FileQuantitySumMax    respjson.Field
		Name                  respjson.Field
		RequestsInSum         respjson.Field
		RequestsOutEdgesSum   respjson.Field
		RequestsOutWoEdgesSum respjson.Field
		RequestsSum           respjson.Field
		SizeSumBytesHour      respjson.Field
		SizeSumMax            respjson.Field
		SizeSumMean           respjson.Field
		Storages              respjson.Field
		TrafficInSum          respjson.Field
		TrafficOutEdgesSum    respjson.Field
		TrafficOutWoEdgesSum  respjson.Field
		TrafficSum            respjson.Field
		ExtraFields           map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (UsageSeriesClientLocation) RawJSON

func (r UsageSeriesClientLocation) RawJSON() string

Returns the unmodified JSON received from the API

func (*UsageSeriesClientLocation) UnmarshalJSON

func (r *UsageSeriesClientLocation) UnmarshalJSON(data []byte) error

type UsageSeriesClientLocationStorage

type UsageSeriesClientLocationStorage struct {
	// a BucketsSeries is max bucket files count for grouped period
	// {name:[[timestamp, count]]}
	BucketsSeries map[string][][]any `json:"buckets_series"`
	// a FileQuantitySumMax is max sum of files quantity for grouped period
	FileQuantitySumMax int64 `json:"file_quantity_sum_max"`
	// a Name of storage
	Name string `json:"name"`
	// a RequestsInSeries is sum of incoming requests for grouped period
	// [[timestamp, count]]
	RequestsInSeries [][]any `json:"requests_in_series"`
	// a RequestsInSum is sum of incoming requests for grouped period
	RequestsInSum int64 `json:"requests_in_sum"`
	// a RequestsOutWoEdgesSeries is sum of out requests (only edges) for grouped
	// period [[timestamp, count]]
	RequestsOutEdgesSeries [][]any `json:"requests_out_edges_series"`
	// a RequestsOutEdgesSum is sum of out edges requests for grouped period
	RequestsOutEdgesSum int64 `json:"requests_out_edges_sum"`
	// a RequestsOutWoEdgesSeries is sum of out requests (without edges) for grouped
	// period [[timestamp, count]]
	RequestsOutWoEdgesSeries [][]any `json:"requests_out_wo_edges_series"`
	// a RequestsOutWoEdgesSum is sum of out no edges requests for grouped period
	RequestsOutWoEdgesSum int64 `json:"requests_out_wo_edges_sum"`
	// a RequestsSeries is sum of out requests for grouped period [[timestamp, count]]
	RequestsSeries [][]any `json:"requests_series"`
	// a RequestsSum is sum of all requests for grouped period
	RequestsSum int64 `json:"requests_sum"`
	// a SizeBytesHourSeries is value that displays how many bytes were stored per hour
	// [[timestamp, count]]
	SizeBytesHourSeries [][]any `json:"size_bytes_hour_series"`
	// a SizeMaxSeries is max of files size for grouped period [[timestamp, count]]
	SizeMaxSeries [][]any `json:"size_max_series"`
	// a SizeMeanSeries is mean of files size for grouped period [[timestamp, count]]
	SizeMeanSeries [][]any `json:"size_mean_series"`
	// a SizeSumBytesHour is sum of bytes hour for grouped period
	SizeSumBytesHour int64 `json:"size_sum_bytes_hour"`
	// a SizeSumMax is max sum of all files sizes for grouped period
	SizeSumMax int64 `json:"size_sum_max"`
	// a SizeSumMean is mean sum of all files sizes for grouped period
	SizeSumMean int64 `json:"size_sum_mean"`
	// a TrafficInSeries is sum of incoming traffic bytes for grouped period
	// [[timestamp, count]]
	TrafficInSeries [][]any `json:"traffic_in_series"`
	// a TrafficInSum is sum of incoming traffic for grouped period
	TrafficInSum int64 `json:"traffic_in_sum"`
	// a TrafficOutWoEdgesSeries is sum of out traffic bytes (only edges) for grouped
	// period [[timestamp, count]]
	TrafficOutEdgesSeries [][]any `json:"traffic_out_edges_series"`
	// a TrafficOutEdgesSum is sum of out edges traffic for grouped period
	TrafficOutEdgesSum int64 `json:"traffic_out_edges_sum"`
	// a TrafficOutWoEdgesSeries is sum of out traffic bytes (without edges) for
	// grouped period [[timestamp, count]]
	TrafficOutWoEdgesSeries [][]any `json:"traffic_out_wo_edges_series"`
	// a TrafficOutWoEdgesSum is sum of out no edges traffic for grouped period
	TrafficOutWoEdgesSum int64 `json:"traffic_out_wo_edges_sum"`
	// a TrafficSeries is sum of traffic bytes for grouped period [[timestamp, count]]
	TrafficSeries [][]any `json:"traffic_series"`
	// a TrafficSum is sum of all traffic for grouped period
	TrafficSum int64 `json:"traffic_sum"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		BucketsSeries            respjson.Field
		FileQuantitySumMax       respjson.Field
		Name                     respjson.Field
		RequestsInSeries         respjson.Field
		RequestsInSum            respjson.Field
		RequestsOutEdgesSeries   respjson.Field
		RequestsOutEdgesSum      respjson.Field
		RequestsOutWoEdgesSeries respjson.Field
		RequestsOutWoEdgesSum    respjson.Field
		RequestsSeries           respjson.Field
		RequestsSum              respjson.Field
		SizeBytesHourSeries      respjson.Field
		SizeMaxSeries            respjson.Field
		SizeMeanSeries           respjson.Field
		SizeSumBytesHour         respjson.Field
		SizeSumMax               respjson.Field
		SizeSumMean              respjson.Field
		TrafficInSeries          respjson.Field
		TrafficInSum             respjson.Field
		TrafficOutEdgesSeries    respjson.Field
		TrafficOutEdgesSum       respjson.Field
		TrafficOutWoEdgesSeries  respjson.Field
		TrafficOutWoEdgesSum     respjson.Field
		TrafficSeries            respjson.Field
		TrafficSum               respjson.Field
		ExtraFields              map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (UsageSeriesClientLocationStorage) RawJSON

Returns the unmodified JSON received from the API

func (*UsageSeriesClientLocationStorage) UnmarshalJSON

func (r *UsageSeriesClientLocationStorage) UnmarshalJSON(data []byte) error

type UsageTotal

type UsageTotal struct {
	// StorageUsageTotalRes for response
	Data []UsageTotalData `json:"data"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		Data        respjson.Field
		ExtraFields map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (UsageTotal) RawJSON

func (r UsageTotal) RawJSON() string

Returns the unmodified JSON received from the API

func (*UsageTotal) UnmarshalJSON

func (r *UsageTotal) UnmarshalJSON(data []byte) error

type UsageTotalData

type UsageTotalData struct {
	Metrics UsageTotalDataMetrics `json:"metrics"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		Metrics     respjson.Field
		ExtraFields map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

StorageStatsTotalElement for response

func (UsageTotalData) RawJSON

func (r UsageTotalData) RawJSON() string

Returns the unmodified JSON received from the API

func (*UsageTotalData) UnmarshalJSON

func (r *UsageTotalData) UnmarshalJSON(data []byte) error

type UsageTotalDataMetrics

type UsageTotalDataMetrics struct {
	// a FileQuantitySumMax is max sum of files quantity for grouped period
	FileQuantitySumMax int64 `json:"file_quantity_sum_max"`
	// a RequestsInSum is sum of incoming requests for grouped period
	RequestsInSum int64 `json:"requests_in_sum"`
	// a RequestsOutEdgesSum is sum of out edges requests for grouped period
	RequestsOutEdgesSum int64 `json:"requests_out_edges_sum"`
	// a RequestsOutWoEdgesSum is sum of out no edges requests for grouped period
	RequestsOutWoEdgesSum int64 `json:"requests_out_wo_edges_sum"`
	// a RequestsSum is sum of all requests for grouped period
	RequestsSum int64 `json:"requests_sum"`
	// a SizeSumBytesHour is sum of bytes hour for grouped period
	SizeSumBytesHour int64 `json:"size_sum_bytes_hour"`
	// a SizeSumMax is max sum of all files sizes for grouped period
	SizeSumMax int64 `json:"size_sum_max"`
	// a SizeSumMean is mean sum of all files sizes for grouped period
	SizeSumMean int64 `json:"size_sum_mean"`
	// a TrafficInSum is sum of incoming traffic for grouped period
	TrafficInSum int64 `json:"traffic_in_sum"`
	// a TrafficOutEdgesSum is sum of out edges traffic for grouped period
	TrafficOutEdgesSum int64 `json:"traffic_out_edges_sum"`
	// a TrafficOutWoEdgesSum is sum of out no edges traffic for grouped period
	TrafficOutWoEdgesSum int64 `json:"traffic_out_wo_edges_sum"`
	// a TrafficSum is sum of all traffic for grouped period
	TrafficSum int64 `json:"traffic_sum"`
	// JSON contains metadata for fields, check presence with [respjson.Field.Valid].
	JSON struct {
		FileQuantitySumMax    respjson.Field
		RequestsInSum         respjson.Field
		RequestsOutEdgesSum   respjson.Field
		RequestsOutWoEdgesSum respjson.Field
		RequestsSum           respjson.Field
		SizeSumBytesHour      respjson.Field
		SizeSumMax            respjson.Field
		SizeSumMean           respjson.Field
		TrafficInSum          respjson.Field
		TrafficOutEdgesSum    respjson.Field
		TrafficOutWoEdgesSum  respjson.Field
		TrafficSum            respjson.Field
		ExtraFields           map[string]respjson.Field
		// contains filtered or unexported fields
	} `json:"-"`
}

func (UsageTotalDataMetrics) RawJSON

func (r UsageTotalDataMetrics) RawJSON() string

Returns the unmodified JSON received from the API

func (*UsageTotalDataMetrics) UnmarshalJSON

func (r *UsageTotalDataMetrics) UnmarshalJSON(data []byte) error

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL