models

package
v0.3.2 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Sep 5, 2023 License: Apache-2.0, MIT Imports: 7 Imported by: 1

Documentation

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type APIHTTPError

type APIHTTPError struct {

	// err
	Err string `json:"err,omitempty"`
}

APIHTTPError api HTTP error

swagger:model api.HTTPError

func (*APIHTTPError) ContextValidate

func (m *APIHTTPError) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this api HTTP error based on context it is used

func (*APIHTTPError) MarshalBinary

func (m *APIHTTPError) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*APIHTTPError) UnmarshalBinary

func (m *APIHTTPError) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*APIHTTPError) Validate

func (m *APIHTTPError) Validate(formats strfmt.Registry) error

Validate validates this api HTTP error

type DatasetAddPieceRequest

type DatasetAddPieceRequest struct {

	// Path to the CAR file, used to determine the size of the file and root CID
	FilePath string `json:"filePath,omitempty"`

	// CID of the piece
	PieceCid string `json:"pieceCid,omitempty"`

	// Size of the piece
	PieceSize string `json:"pieceSize,omitempty"`

	// Root CID of the CAR file, if not provided, will be determined by the CAR file header. Used to populate the label field of storage deal
	RootCid string `json:"rootCid,omitempty"`
}

DatasetAddPieceRequest dataset add piece request

swagger:model dataset.AddPieceRequest

func (*DatasetAddPieceRequest) ContextValidate

func (m *DatasetAddPieceRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this dataset add piece request based on context it is used

func (*DatasetAddPieceRequest) MarshalBinary

func (m *DatasetAddPieceRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasetAddPieceRequest) UnmarshalBinary

func (m *DatasetAddPieceRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasetAddPieceRequest) Validate

func (m *DatasetAddPieceRequest) Validate(formats strfmt.Registry) error

Validate validates this dataset add piece request

type DatasetCreateRequest

type DatasetCreateRequest struct {

	// Public key of the encryption recipient
	EncryptionRecipients []string `json:"encryptionRecipients"`

	// Maximum size of the CAR files to be created
	// Required: true
	MaxSize *string `json:"maxSize"`

	// Name must be a unique identifier for a dataset
	// Required: true
	Name *string `json:"name"`

	// Output directory for CAR files. Do not set if using inline preparation
	OutputDirs []string `json:"outputDirs"`

	// Target piece size of the CAR files used for piece commitment calculation
	PieceSize string `json:"pieceSize,omitempty"`
}

DatasetCreateRequest dataset create request

swagger:model dataset.CreateRequest

func (*DatasetCreateRequest) ContextValidate

func (m *DatasetCreateRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this dataset create request based on context it is used

func (*DatasetCreateRequest) MarshalBinary

func (m *DatasetCreateRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasetCreateRequest) UnmarshalBinary

func (m *DatasetCreateRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasetCreateRequest) Validate

func (m *DatasetCreateRequest) Validate(formats strfmt.Registry) error

Validate validates this dataset create request

type DatasetUpdateRequest

type DatasetUpdateRequest struct {

	// Public key of the encryption recipient
	EncryptionRecipients []string `json:"encryptionRecipients"`

	// Maximum size of the CAR files to be created
	MaxSize *string `json:"maxSize,omitempty"`

	// Output directory for CAR files. Do not set if using inline preparation
	OutputDirs []string `json:"outputDirs"`

	// Target piece size of the CAR files used for piece commitment calculation
	PieceSize string `json:"pieceSize,omitempty"`
}

DatasetUpdateRequest dataset update request

swagger:model dataset.UpdateRequest

func (*DatasetUpdateRequest) ContextValidate

func (m *DatasetUpdateRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this dataset update request based on context it is used

func (*DatasetUpdateRequest) MarshalBinary

func (m *DatasetUpdateRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasetUpdateRequest) UnmarshalBinary

func (m *DatasetUpdateRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasetUpdateRequest) Validate

func (m *DatasetUpdateRequest) Validate(formats strfmt.Registry) error

Validate validates this dataset update request

type DatasourceAcdRequest

type DatasourceAcdRequest struct {

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// Checkpoint for internal polling (debug).
	Checkpoint string `json:"checkpoint,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Files >= this size will be downloaded via their tempLink.
	TemplinkThreshold *string `json:"templinkThreshold,omitempty"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`

	// Additional time per GiB to wait after a failed complete upload to see if it appears.
	UploadWaitPerGb *string `json:"uploadWaitPerGb,omitempty"`
}

DatasourceAcdRequest datasource acd request

swagger:model datasource.AcdRequest

func (*DatasourceAcdRequest) ContextValidate

func (m *DatasourceAcdRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource acd request based on the context it is used

func (*DatasourceAcdRequest) MarshalBinary

func (m *DatasourceAcdRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceAcdRequest) UnmarshalBinary

func (m *DatasourceAcdRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceAcdRequest) Validate

func (m *DatasourceAcdRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource acd request

type DatasourceAllConfig

type DatasourceAllConfig struct {

	// Auth server URL.
	AcdAuthURL string `json:"acdAuthUrl,omitempty"`

	// Checkpoint for internal polling (debug).
	AcdCheckpoint string `json:"acdCheckpoint,omitempty"`

	// OAuth Client Id.
	AcdClientID string `json:"acdClientId,omitempty"`

	// OAuth Client Secret.
	AcdClientSecret string `json:"acdClientSecret,omitempty"`

	// The encoding for the backend.
	AcdEncoding *string `json:"acdEncoding,omitempty"`

	// Files >= this size will be downloaded via their tempLink.
	AcdTemplinkThreshold *string `json:"acdTemplinkThreshold,omitempty"`

	// OAuth Access Token as a JSON blob.
	AcdToken string `json:"acdToken,omitempty"`

	// Token server url.
	AcdTokenURL string `json:"acdTokenUrl,omitempty"`

	// Additional time per GiB to wait after a failed complete upload to see if it appears.
	AcdUploadWaitPerGb *string `json:"acdUploadWaitPerGb,omitempty"`

	// Access tier of blob: hot, cool or archive.
	AzureblobAccessTier string `json:"azureblobAccessTier,omitempty"`

	// Azure Storage Account Name.
	AzureblobAccount string `json:"azureblobAccount,omitempty"`

	// Delete archive tier blobs before overwriting.
	AzureblobArchiveTierDelete *string `json:"azureblobArchiveTierDelete,omitempty"`

	// Upload chunk size.
	AzureblobChunkSize *string `json:"azureblobChunkSize,omitempty"`

	// Password for the certificate file (optional).
	AzureblobClientCertificatePassword string `json:"azureblobClientCertificatePassword,omitempty"`

	// Path to a PEM or PKCS12 certificate file including the private key.
	AzureblobClientCertificatePath string `json:"azureblobClientCertificatePath,omitempty"`

	// The ID of the client in use.
	AzureblobClientID string `json:"azureblobClientId,omitempty"`

	// One of the service principal's client secrets
	AzureblobClientSecret string `json:"azureblobClientSecret,omitempty"`

	// Send the certificate chain when using certificate auth.
	AzureblobClientSendCertificateChain *string `json:"azureblobClientSendCertificateChain,omitempty"`

	// Don't store MD5 checksum with object metadata.
	AzureblobDisableChecksum *string `json:"azureblobDisableChecksum,omitempty"`

	// The encoding for the backend.
	AzureblobEncoding *string `json:"azureblobEncoding,omitempty"`

	// Endpoint for the service.
	AzureblobEndpoint string `json:"azureblobEndpoint,omitempty"`

	// Read credentials from runtime (environment variables, CLI or MSI).
	AzureblobEnvAuth *string `json:"azureblobEnvAuth,omitempty"`

	// Storage Account Shared Key.
	AzureblobKey string `json:"azureblobKey,omitempty"`

	// Size of blob list.
	AzureblobListChunk *string `json:"azureblobListChunk,omitempty"`

	// How often internal memory buffer pools will be flushed.
	AzureblobMemoryPoolFlushTime *string `json:"azureblobMemoryPoolFlushTime,omitempty"`

	// Whether to use mmap buffers in internal memory pool.
	AzureblobMemoryPoolUseMmap *string `json:"azureblobMemoryPoolUseMmap,omitempty"`

	// Object ID of the user-assigned MSI to use, if any.
	AzureblobMsiClientID string `json:"azureblobMsiClientId,omitempty"`

	// Azure resource ID of the user-assigned MSI to use, if any.
	AzureblobMsiMiResID string `json:"azureblobMsiMiResId,omitempty"`

	// Object ID of the user-assigned MSI to use, if any.
	AzureblobMsiObjectID string `json:"azureblobMsiObjectId,omitempty"`

	// If set, don't attempt to check the container exists or create it.
	AzureblobNoCheckContainer *string `json:"azureblobNoCheckContainer,omitempty"`

	// If set, do not do HEAD before GET when getting objects.
	AzureblobNoHeadObject *string `json:"azureblobNoHeadObject,omitempty"`

	// The user's password
	AzureblobPassword string `json:"azureblobPassword,omitempty"`

	// Public access level of a container: blob or container.
	AzureblobPublicAccess string `json:"azureblobPublicAccess,omitempty"`

	// SAS URL for container level access only.
	AzureblobSasURL string `json:"azureblobSasUrl,omitempty"`

	// Path to file containing credentials for use with a service principal.
	AzureblobServicePrincipalFile string `json:"azureblobServicePrincipalFile,omitempty"`

	// ID of the service principal's tenant. Also called its directory ID.
	AzureblobTenant string `json:"azureblobTenant,omitempty"`

	// Concurrency for multipart uploads.
	AzureblobUploadConcurrency *string `json:"azureblobUploadConcurrency,omitempty"`

	// Cutoff for switching to chunked upload (<= 256 MiB) (deprecated).
	AzureblobUploadCutoff string `json:"azureblobUploadCutoff,omitempty"`

	// Uses local storage emulator if provided as 'true'.
	AzureblobUseEmulator *string `json:"azureblobUseEmulator,omitempty"`

	// Use a managed service identity to authenticate (only works in Azure).
	AzureblobUseMsi *string `json:"azureblobUseMsi,omitempty"`

	// User name (usually an email address)
	AzureblobUsername string `json:"azureblobUsername,omitempty"`

	// Account ID or Application Key ID.
	B2Account string `json:"b2Account,omitempty"`

	// Upload chunk size.
	B2ChunkSize *string `json:"b2ChunkSize,omitempty"`

	// Cutoff for switching to multipart copy.
	B2CopyCutoff *string `json:"b2CopyCutoff,omitempty"`

	// Disable checksums for large (> upload cutoff) files.
	B2DisableChecksum *string `json:"b2DisableChecksum,omitempty"`

	// Time before the authorization token will expire in s or suffix ms|s|m|h|d.
	B2DownloadAuthDuration *string `json:"b2DownloadAuthDuration,omitempty"`

	// Custom endpoint for downloads.
	B2DownloadURL string `json:"b2DownloadUrl,omitempty"`

	// The encoding for the backend.
	B2Encoding *string `json:"b2Encoding,omitempty"`

	// Endpoint for the service.
	B2Endpoint string `json:"b2Endpoint,omitempty"`

	// Permanently delete files on remote removal, otherwise hide files.
	B2HardDelete *string `json:"b2HardDelete,omitempty"`

	// Application Key.
	B2Key string `json:"b2Key,omitempty"`

	// How often internal memory buffer pools will be flushed.
	B2MemoryPoolFlushTime *string `json:"b2MemoryPoolFlushTime,omitempty"`

	// Whether to use mmap buffers in internal memory pool.
	B2MemoryPoolUseMmap *string `json:"b2MemoryPoolUseMmap,omitempty"`

	// A flag string for X-Bz-Test-Mode header for debugging.
	B2TestMode string `json:"b2TestMode,omitempty"`

	// Cutoff for switching to chunked upload.
	B2UploadCutoff *string `json:"b2UploadCutoff,omitempty"`

	// Show file versions as they were at the specified time.
	B2VersionAt *string `json:"b2VersionAt,omitempty"`

	// Include old versions in directory listings.
	B2Versions *string `json:"b2Versions,omitempty"`

	// Box App Primary Access Token
	BoxAccessToken string `json:"boxAccessToken,omitempty"`

	// Auth server URL.
	BoxAuthURL string `json:"boxAuthUrl,omitempty"`

	// Box App config.json location
	BoxBoxConfigFile string `json:"boxBoxConfigFile,omitempty"`

	// box box sub type
	BoxBoxSubType *string `json:"boxBoxSubType,omitempty"`

	// OAuth Client Id.
	BoxClientID string `json:"boxClientId,omitempty"`

	// OAuth Client Secret.
	BoxClientSecret string `json:"boxClientSecret,omitempty"`

	// Max number of times to try committing a multipart file.
	BoxCommitRetries *string `json:"boxCommitRetries,omitempty"`

	// The encoding for the backend.
	BoxEncoding *string `json:"boxEncoding,omitempty"`

	// Size of listing chunk 1-1000.
	BoxListChunk *string `json:"boxListChunk,omitempty"`

	// Only show items owned by the login (email address) passed in.
	BoxOwnedBy string `json:"boxOwnedBy,omitempty"`

	// Fill in for rclone to use a non root folder as its starting point.
	BoxRootFolderID *string `json:"boxRootFolderId,omitempty"`

	// OAuth Access Token as a JSON blob.
	BoxToken string `json:"boxToken,omitempty"`

	// Token server url.
	BoxTokenURL string `json:"boxTokenUrl,omitempty"`

	// Cutoff for switching to multipart upload (>= 50 MiB).
	BoxUploadCutoff *string `json:"boxUploadCutoff,omitempty"`

	// Delete the source after exporting to CAR files
	DeleteAfterExport bool `json:"deleteAfterExport,omitempty"`

	// Set to allow files which return cannotDownloadAbusiveFile to be downloaded.
	DriveAcknowledgeAbuse *string `json:"driveAcknowledgeAbuse,omitempty"`

	// Allow the filetype to change when uploading Google docs.
	DriveAllowImportNameChange *string `json:"driveAllowImportNameChange,omitempty"`

	// Deprecated: No longer needed.
	DriveAlternateExport *string `json:"driveAlternateExport,omitempty"`

	// Only consider files owned by the authenticated user.
	DriveAuthOwnerOnly *string `json:"driveAuthOwnerOnly,omitempty"`

	// Auth server URL.
	DriveAuthURL string `json:"driveAuthUrl,omitempty"`

	// Upload chunk size.
	DriveChunkSize *string `json:"driveChunkSize,omitempty"`

	// Google Application Client Id
	DriveClientID string `json:"driveClientId,omitempty"`

	// OAuth Client Secret.
	DriveClientSecret string `json:"driveClientSecret,omitempty"`

	// Server side copy contents of shortcuts instead of the shortcut.
	DriveCopyShortcutContent *string `json:"driveCopyShortcutContent,omitempty"`

	// Disable drive using http2.
	DriveDisableHttp2 *string `json:"driveDisableHttp2,omitempty"`

	// The encoding for the backend.
	DriveEncoding *string `json:"driveEncoding,omitempty"`

	// Comma separated list of preferred formats for downloading Google docs.
	DriveExportFormats *string `json:"driveExportFormats,omitempty"`

	// Deprecated: See export_formats.
	DriveFormats string `json:"driveFormats,omitempty"`

	// Impersonate this user when using a service account.
	DriveImpersonate string `json:"driveImpersonate,omitempty"`

	// Comma separated list of preferred formats for uploading Google docs.
	DriveImportFormats string `json:"driveImportFormats,omitempty"`

	// Keep new head revision of each file forever.
	DriveKeepRevisionForever *string `json:"driveKeepRevisionForever,omitempty"`

	// Size of listing chunk 100-1000, 0 to disable.
	DriveListChunk *string `json:"driveListChunk,omitempty"`

	// Number of API calls to allow without sleeping.
	DrivePacerBurst *string `json:"drivePacerBurst,omitempty"`

	// Minimum time to sleep between API calls.
	DrivePacerMinSleep *string `json:"drivePacerMinSleep,omitempty"`

	// Resource key for accessing a link-shared file.
	DriveResourceKey string `json:"driveResourceKey,omitempty"`

	// ID of the root folder.
	DriveRootFolderID string `json:"driveRootFolderId,omitempty"`

	// Scope that rclone should use when requesting access from drive.
	DriveScope string `json:"driveScope,omitempty"`

	// Allow server-side operations (e.g. copy) to work across different drive configs.
	DriveServerSideAcrossConfigs *string `json:"driveServerSideAcrossConfigs,omitempty"`

	// Service Account Credentials JSON blob.
	DriveServiceAccountCredentials string `json:"driveServiceAccountCredentials,omitempty"`

	// Service Account Credentials JSON file path.
	DriveServiceAccountFile string `json:"driveServiceAccountFile,omitempty"`

	// Only show files that are shared with me.
	DriveSharedWithMe *string `json:"driveSharedWithMe,omitempty"`

	// Show sizes as storage quota usage, not actual size.
	DriveSizeAsQuota *string `json:"driveSizeAsQuota,omitempty"`

	// Skip MD5 checksum on Google photos and videos only.
	DriveSkipChecksumGphotos *string `json:"driveSkipChecksumGphotos,omitempty"`

	// If set skip dangling shortcut files.
	DriveSkipDanglingShortcuts *string `json:"driveSkipDanglingShortcuts,omitempty"`

	// Skip google documents in all listings.
	DriveSkipGdocs *string `json:"driveSkipGdocs,omitempty"`

	// If set skip shortcut files.
	DriveSkipShortcuts *string `json:"driveSkipShortcuts,omitempty"`

	// Only show files that are starred.
	DriveStarredOnly *string `json:"driveStarredOnly,omitempty"`

	// Make download limit errors be fatal.
	DriveStopOnDownloadLimit *string `json:"driveStopOnDownloadLimit,omitempty"`

	// Make upload limit errors be fatal.
	DriveStopOnUploadLimit *string `json:"driveStopOnUploadLimit,omitempty"`

	// ID of the Shared Drive (Team Drive).
	DriveTeamDrive string `json:"driveTeamDrive,omitempty"`

	// OAuth Access Token as a JSON blob.
	DriveToken string `json:"driveToken,omitempty"`

	// Token server url.
	DriveTokenURL string `json:"driveTokenUrl,omitempty"`

	// Only show files that are in the trash.
	DriveTrashedOnly *string `json:"driveTrashedOnly,omitempty"`

	// Cutoff for switching to chunked upload.
	DriveUploadCutoff *string `json:"driveUploadCutoff,omitempty"`

	// Use file created date instead of modified date.
	DriveUseCreatedDate *string `json:"driveUseCreatedDate,omitempty"`

	// Use date file was shared instead of modified date.
	DriveUseSharedDate *string `json:"driveUseSharedDate,omitempty"`

	// Send files to the trash instead of deleting permanently.
	DriveUseTrash *string `json:"driveUseTrash,omitempty"`

	// If Object's are greater, use drive v2 API to download.
	DriveV2DownloadMinSize *string `json:"driveV2DownloadMinSize,omitempty"`

	// Auth server URL.
	DropboxAuthURL string `json:"dropboxAuthUrl,omitempty"`

	// Max time to wait for a batch to finish committing
	DropboxBatchCommitTimeout *string `json:"dropboxBatchCommitTimeout,omitempty"`

	// Upload file batching sync|async|off.
	DropboxBatchMode *string `json:"dropboxBatchMode,omitempty"`

	// Max number of files in upload batch.
	DropboxBatchSize *string `json:"dropboxBatchSize,omitempty"`

	// Max time to allow an idle upload batch before uploading.
	DropboxBatchTimeout *string `json:"dropboxBatchTimeout,omitempty"`

	// Upload chunk size (< 150Mi).
	DropboxChunkSize *string `json:"dropboxChunkSize,omitempty"`

	// OAuth Client Id.
	DropboxClientID string `json:"dropboxClientId,omitempty"`

	// OAuth Client Secret.
	DropboxClientSecret string `json:"dropboxClientSecret,omitempty"`

	// The encoding for the backend.
	DropboxEncoding *string `json:"dropboxEncoding,omitempty"`

	// Impersonate this user when using a business account.
	DropboxImpersonate string `json:"dropboxImpersonate,omitempty"`

	// Instructs rclone to work on individual shared files.
	DropboxSharedFiles *string `json:"dropboxSharedFiles,omitempty"`

	// Instructs rclone to work on shared folders.
	DropboxSharedFolders *string `json:"dropboxSharedFolders,omitempty"`

	// OAuth Access Token as a JSON blob.
	DropboxToken string `json:"dropboxToken,omitempty"`

	// Token server url.
	DropboxTokenURL string `json:"dropboxTokenUrl,omitempty"`

	// Your API Key, get it from https://1fichier.com/console/params.pl.
	FichierAPIKey string `json:"fichierApiKey,omitempty"`

	// The encoding for the backend.
	FichierEncoding *string `json:"fichierEncoding,omitempty"`

	// If you want to download a shared file that is password protected, add this parameter.
	FichierFilePassword string `json:"fichierFilePassword,omitempty"`

	// If you want to list the files in a shared folder that is password protected, add this parameter.
	FichierFolderPassword string `json:"fichierFolderPassword,omitempty"`

	// If you want to download a shared folder, add this parameter.
	FichierSharedFolder string `json:"fichierSharedFolder,omitempty"`

	// The encoding for the backend.
	FilefabricEncoding *string `json:"filefabricEncoding,omitempty"`

	// Permanent Authentication Token.
	FilefabricPermanentToken string `json:"filefabricPermanentToken,omitempty"`

	// ID of the root folder.
	FilefabricRootFolderID string `json:"filefabricRootFolderId,omitempty"`

	// Session Token.
	FilefabricToken string `json:"filefabricToken,omitempty"`

	// Token expiry time.
	FilefabricTokenExpiry string `json:"filefabricTokenExpiry,omitempty"`

	// URL of the Enterprise File Fabric to connect to.
	FilefabricURL string `json:"filefabricUrl,omitempty"`

	// Version read from the file fabric.
	FilefabricVersion string `json:"filefabricVersion,omitempty"`

	// Allow asking for FTP password when needed.
	FtpAskPassword *string `json:"ftpAskPassword,omitempty"`

	// Maximum time to wait for a response to close.
	FtpCloseTimeout *string `json:"ftpCloseTimeout,omitempty"`

	// Maximum number of FTP simultaneous connections, 0 for unlimited.
	FtpConcurrency *string `json:"ftpConcurrency,omitempty"`

	// Disable using EPSV even if server advertises support.
	FtpDisableEpsv *string `json:"ftpDisableEpsv,omitempty"`

	// Disable using MLSD even if server advertises support.
	FtpDisableMlsd *string `json:"ftpDisableMlsd,omitempty"`

	// Disable TLS 1.3 (workaround for FTP servers with buggy TLS)
	FtpDisableTls13 *string `json:"ftpDisableTls13,omitempty"`

	// Disable using UTF-8 even if server advertises support.
	FtpDisableUTF8 *string `json:"ftpDisableUtf8,omitempty"`

	// The encoding for the backend.
	FtpEncoding *string `json:"ftpEncoding,omitempty"`

	// Use Explicit FTPS (FTP over TLS).
	FtpExplicitTLS *string `json:"ftpExplicitTls,omitempty"`

	// Use LIST -a to force listing of hidden files and folders. This will disable the use of MLSD.
	FtpForceListHidden *string `json:"ftpForceListHidden,omitempty"`

	// FTP host to connect to.
	FtpHost string `json:"ftpHost,omitempty"`

	// Max time before closing idle connections.
	FtpIdleTimeout *string `json:"ftpIdleTimeout,omitempty"`

	// Do not verify the TLS certificate of the server.
	FtpNoCheckCertificate *string `json:"ftpNoCheckCertificate,omitempty"`

	// FTP password.
	FtpPass string `json:"ftpPass,omitempty"`

	// FTP port number.
	FtpPort *string `json:"ftpPort,omitempty"`

	// Maximum time to wait for data connection closing status.
	FtpShutTimeout *string `json:"ftpShutTimeout,omitempty"`

	// Use Implicit FTPS (FTP over TLS).
	FtpTLS *string `json:"ftpTls,omitempty"`

	// Size of TLS session cache for all control and data connections.
	FtpTLSCacheSize *string `json:"ftpTlsCacheSize,omitempty"`

	// FTP username.
	FtpUser *string `json:"ftpUser,omitempty"`

	// Use MDTM to set modification time (VsFtpd quirk)
	FtpWritingMdtm *string `json:"ftpWritingMdtm,omitempty"`

	// Access public buckets and objects without credentials.
	GcsAnonymous *string `json:"gcsAnonymous,omitempty"`

	// Auth server URL.
	GcsAuthURL string `json:"gcsAuthUrl,omitempty"`

	// Access Control List for new buckets.
	GcsBucketACL string `json:"gcsBucketAcl,omitempty"`

	// Access checks should use bucket-level IAM policies.
	GcsBucketPolicyOnly *string `json:"gcsBucketPolicyOnly,omitempty"`

	// OAuth Client Id.
	GcsClientID string `json:"gcsClientId,omitempty"`

	// OAuth Client Secret.
	GcsClientSecret string `json:"gcsClientSecret,omitempty"`

	// If set this will decompress gzip encoded objects.
	GcsDecompress *string `json:"gcsDecompress,omitempty"`

	// The encoding for the backend.
	GcsEncoding *string `json:"gcsEncoding,omitempty"`

	// Endpoint for the service.
	GcsEndpoint string `json:"gcsEndpoint,omitempty"`

	// Get GCP IAM credentials from runtime (environment variables or instance meta data if no env vars).
	GcsEnvAuth *string `json:"gcsEnvAuth,omitempty"`

	// Location for the newly created buckets.
	GcsLocation string `json:"gcsLocation,omitempty"`

	// If set, don't attempt to check the bucket exists or create it.
	GcsNoCheckBucket *string `json:"gcsNoCheckBucket,omitempty"`

	// Access Control List for new objects.
	GcsObjectACL string `json:"gcsObjectAcl,omitempty"`

	// Project number.
	GcsProjectNumber string `json:"gcsProjectNumber,omitempty"`

	// Service Account Credentials JSON blob.
	GcsServiceAccountCredentials string `json:"gcsServiceAccountCredentials,omitempty"`

	// Service Account Credentials JSON file path.
	GcsServiceAccountFile string `json:"gcsServiceAccountFile,omitempty"`

	// The storage class to use when storing objects in Google Cloud Storage.
	GcsStorageClass string `json:"gcsStorageClass,omitempty"`

	// OAuth Access Token as a JSON blob.
	GcsToken string `json:"gcsToken,omitempty"`

	// Token server url.
	GcsTokenURL string `json:"gcsTokenUrl,omitempty"`

	// Auth server URL.
	GphotosAuthURL string `json:"gphotosAuthUrl,omitempty"`

	// OAuth Client Id.
	GphotosClientID string `json:"gphotosClientId,omitempty"`

	// OAuth Client Secret.
	GphotosClientSecret string `json:"gphotosClientSecret,omitempty"`

	// The encoding for the backend.
	GphotosEncoding *string `json:"gphotosEncoding,omitempty"`

	// Also view and download archived media.
	GphotosIncludeArchived *string `json:"gphotosIncludeArchived,omitempty"`

	// Set to make the Google Photos backend read only.
	GphotosReadOnly *string `json:"gphotosReadOnly,omitempty"`

	// Set to read the size of media items.
	GphotosReadSize *string `json:"gphotosReadSize,omitempty"`

	// Year limits the photos to be downloaded to those which are uploaded after the given year.
	GphotosStartYear *string `json:"gphotosStartYear,omitempty"`

	// OAuth Access Token as a JSON blob.
	GphotosToken string `json:"gphotosToken,omitempty"`

	// Token server url.
	GphotosTokenURL string `json:"gphotosTokenUrl,omitempty"`

	// Kerberos data transfer protection: authentication|integrity|privacy.
	HdfsDataTransferProtection string `json:"hdfsDataTransferProtection,omitempty"`

	// The encoding for the backend.
	HdfsEncoding *string `json:"hdfsEncoding,omitempty"`

	// Hadoop name node and port.
	HdfsNamenode string `json:"hdfsNamenode,omitempty"`

	// Kerberos service principal name for the namenode.
	HdfsServicePrincipalName string `json:"hdfsServicePrincipalName,omitempty"`

	// Hadoop user name.
	HdfsUsername string `json:"hdfsUsername,omitempty"`

	// Auth server URL.
	HidriveAuthURL string `json:"hidriveAuthUrl,omitempty"`

	// Chunksize for chunked uploads.
	HidriveChunkSize *string `json:"hidriveChunkSize,omitempty"`

	// OAuth Client Id.
	HidriveClientID string `json:"hidriveClientId,omitempty"`

	// OAuth Client Secret.
	HidriveClientSecret string `json:"hidriveClientSecret,omitempty"`

	// Do not fetch number of objects in directories unless it is absolutely necessary.
	HidriveDisableFetchingMemberCount *string `json:"hidriveDisableFetchingMemberCount,omitempty"`

	// The encoding for the backend.
	HidriveEncoding *string `json:"hidriveEncoding,omitempty"`

	// Endpoint for the service.
	HidriveEndpoint *string `json:"hidriveEndpoint,omitempty"`

	// The root/parent folder for all paths.
	HidriveRootPrefix *string `json:"hidriveRootPrefix,omitempty"`

	// Access permissions that rclone should use when requesting access from HiDrive.
	HidriveScopeAccess *string `json:"hidriveScopeAccess,omitempty"`

	// User-level that rclone should use when requesting access from HiDrive.
	HidriveScopeRole *string `json:"hidriveScopeRole,omitempty"`

	// OAuth Access Token as a JSON blob.
	HidriveToken string `json:"hidriveToken,omitempty"`

	// Token server url.
	HidriveTokenURL string `json:"hidriveTokenUrl,omitempty"`

	// Concurrency for chunked uploads.
	HidriveUploadConcurrency *string `json:"hidriveUploadConcurrency,omitempty"`

	// Cutoff/Threshold for chunked uploads.
	HidriveUploadCutoff *string `json:"hidriveUploadCutoff,omitempty"`

	// Set HTTP headers for all transactions.
	HTTPHeaders string `json:"httpHeaders,omitempty"`

	// Don't use HEAD requests.
	HTTPNoHead *string `json:"httpNoHead,omitempty"`

	// Set this if the site doesn't end directories with /.
	HTTPNoSlash *string `json:"httpNoSlash,omitempty"`

	// URL of HTTP host to connect to.
	HTTPURL string `json:"httpUrl,omitempty"`

	// IAS3 Access Key.
	InternetarchiveAccessKeyID string `json:"internetarchiveAccessKeyId,omitempty"`

	// Don't ask the server to test against MD5 checksum calculated by rclone.
	InternetarchiveDisableChecksum *string `json:"internetarchiveDisableChecksum,omitempty"`

	// The encoding for the backend.
	InternetarchiveEncoding *string `json:"internetarchiveEncoding,omitempty"`

	// IAS3 Endpoint.
	InternetarchiveEndpoint *string `json:"internetarchiveEndpoint,omitempty"`

	// Host of InternetArchive Frontend.
	InternetarchiveFrontEndpoint *string `json:"internetarchiveFrontEndpoint,omitempty"`

	// IAS3 Secret Key (password).
	InternetarchiveSecretAccessKey string `json:"internetarchiveSecretAccessKey,omitempty"`

	// Timeout for waiting the server's processing tasks (specifically archive and book_op) to finish.
	InternetarchiveWaitArchive *string `json:"internetarchiveWaitArchive,omitempty"`

	// The encoding for the backend.
	JottacloudEncoding *string `json:"jottacloudEncoding,omitempty"`

	// Delete files permanently rather than putting them into the trash.
	JottacloudHardDelete *string `json:"jottacloudHardDelete,omitempty"`

	// Files bigger than this will be cached on disk to calculate the MD5 if required.
	JottacloudMd5MemoryLimit *string `json:"jottacloudMd5MemoryLimit,omitempty"`

	// Avoid server side versioning by deleting files and recreating files instead of overwriting them.
	JottacloudNoVersions *string `json:"jottacloudNoVersions,omitempty"`

	// Only show files that are in the trash.
	JottacloudTrashedOnly *string `json:"jottacloudTrashedOnly,omitempty"`

	// Files bigger than this can be resumed if the upload fail's.
	JottacloudUploadResumeLimit *string `json:"jottacloudUploadResumeLimit,omitempty"`

	// The encoding for the backend.
	KoofrEncoding *string `json:"koofrEncoding,omitempty"`

	// The Koofr API endpoint to use.
	KoofrEndpoint string `json:"koofrEndpoint,omitempty"`

	// Mount ID of the mount to use.
	KoofrMountid string `json:"koofrMountid,omitempty"`

	// Your password for rclone (generate one at https://app.koofr.net/app/admin/preferences/password).
	KoofrPassword string `json:"koofrPassword,omitempty"`

	// Choose your storage provider.
	KoofrProvider string `json:"koofrProvider,omitempty"`

	// Does the backend support setting modification time.
	KoofrSetmtime *string `json:"koofrSetmtime,omitempty"`

	// Your user name.
	KoofrUser string `json:"koofrUser,omitempty"`

	// Force the filesystem to report itself as case insensitive.
	LocalCaseInsensitive *string `json:"localCaseInsensitive,omitempty"`

	// Force the filesystem to report itself as case sensitive.
	LocalCaseSensitive *string `json:"localCaseSensitive,omitempty"`

	// Follow symlinks and copy the pointed to item.
	LocalCopyLinks *string `json:"localCopyLinks,omitempty"`

	// The encoding for the backend.
	LocalEncoding *string `json:"localEncoding,omitempty"`

	// Translate symlinks to/from regular files with a '.rclonelink' extension.
	LocalLinks *string `json:"localLinks,omitempty"`

	// Don't check to see if the files change during upload.
	LocalNoCheckUpdated *string `json:"localNoCheckUpdated,omitempty"`

	// Disable preallocation of disk space for transferred files.
	LocalNoPreallocate *string `json:"localNoPreallocate,omitempty"`

	// Disable setting modtime.
	LocalNoSetModtime *string `json:"localNoSetModtime,omitempty"`

	// Disable sparse files for multi-thread downloads.
	LocalNoSparse *string `json:"localNoSparse,omitempty"`

	// Disable UNC (long path names) conversion on Windows.
	LocalNounc *string `json:"localNounc,omitempty"`

	// Don't cross filesystem boundaries (unix/macOS only).
	LocalOneFileSystem *string `json:"localOneFileSystem,omitempty"`

	// Don't warn about skipped symlinks.
	LocalSkipLinks *string `json:"localSkipLinks,omitempty"`

	// Apply unicode NFC normalization to paths and filenames.
	LocalUnicodeNormalization *string `json:"localUnicodeNormalization,omitempty"`

	// Assume the Stat size of links is zero (and read them instead) (deprecated).
	LocalZeroSizeLinks *string `json:"localZeroSizeLinks,omitempty"`

	// What should copy do if file checksum is mismatched or invalid.
	MailruCheckHash *string `json:"mailruCheckHash,omitempty"`

	// The encoding for the backend.
	MailruEncoding *string `json:"mailruEncoding,omitempty"`

	// Password.
	MailruPass string `json:"mailruPass,omitempty"`

	// Comma separated list of internal maintenance flags.
	MailruQuirks string `json:"mailruQuirks,omitempty"`

	// Skip full upload if there is another file with same data hash.
	MailruSpeedupEnable *string `json:"mailruSpeedupEnable,omitempty"`

	// Comma separated list of file name patterns eligible for speedup (put by hash).
	MailruSpeedupFilePatterns *string `json:"mailruSpeedupFilePatterns,omitempty"`

	// This option allows you to disable speedup (put by hash) for large files.
	MailruSpeedupMaxDisk *string `json:"mailruSpeedupMaxDisk,omitempty"`

	// Files larger than the size given below will always be hashed on disk.
	MailruSpeedupMaxMemory *string `json:"mailruSpeedupMaxMemory,omitempty"`

	// User name (usually email).
	MailruUser string `json:"mailruUser,omitempty"`

	// HTTP user agent used internally by client.
	MailruUserAgent string `json:"mailruUserAgent,omitempty"`

	// Output more debug from Mega.
	MegaDebug *string `json:"megaDebug,omitempty"`

	// The encoding for the backend.
	MegaEncoding *string `json:"megaEncoding,omitempty"`

	// Delete files permanently rather than putting them into the trash.
	MegaHardDelete *string `json:"megaHardDelete,omitempty"`

	// Password.
	MegaPass string `json:"megaPass,omitempty"`

	// Use HTTPS for transfers.
	MegaUseHTTPS *string `json:"megaUseHttps,omitempty"`

	// User name.
	MegaUser string `json:"megaUser,omitempty"`

	// Set the NetStorage account name
	NetstorageAccount string `json:"netstorageAccount,omitempty"`

	// Domain+path of NetStorage host to connect to.
	NetstorageHost string `json:"netstorageHost,omitempty"`

	// Select between HTTP or HTTPS protocol.
	NetstorageProtocol *string `json:"netstorageProtocol,omitempty"`

	// Set the NetStorage account secret/G2O key for authentication.
	NetstorageSecret string `json:"netstorageSecret,omitempty"`

	// Set scopes to be requested by rclone.
	OnedriveAccessScopes *string `json:"onedriveAccessScopes,omitempty"`

	// Auth server URL.
	OnedriveAuthURL string `json:"onedriveAuthUrl,omitempty"`

	// Chunk size to upload files with - must be multiple of 320k (327,680 bytes).
	OnedriveChunkSize *string `json:"onedriveChunkSize,omitempty"`

	// OAuth Client Id.
	OnedriveClientID string `json:"onedriveClientId,omitempty"`

	// OAuth Client Secret.
	OnedriveClientSecret string `json:"onedriveClientSecret,omitempty"`

	// Disable the request for Sites.Read.All permission.
	OnedriveDisableSitePermission *string `json:"onedriveDisableSitePermission,omitempty"`

	// The ID of the drive to use.
	OnedriveDriveID string `json:"onedriveDriveId,omitempty"`

	// The type of the drive (personal | business | documentLibrary).
	OnedriveDriveType string `json:"onedriveDriveType,omitempty"`

	// The encoding for the backend.
	OnedriveEncoding *string `json:"onedriveEncoding,omitempty"`

	// Set to make OneNote files show up in directory listings.
	OnedriveExposeOnenoteFiles *string `json:"onedriveExposeOnenoteFiles,omitempty"`

	// Specify the hash in use for the backend.
	OnedriveHashType *string `json:"onedriveHashType,omitempty"`

	// Set the password for links created by the link command.
	OnedriveLinkPassword string `json:"onedriveLinkPassword,omitempty"`

	// Set the scope of the links created by the link command.
	OnedriveLinkScope *string `json:"onedriveLinkScope,omitempty"`

	// Set the type of the links created by the link command.
	OnedriveLinkType *string `json:"onedriveLinkType,omitempty"`

	// Size of listing chunk.
	OnedriveListChunk *string `json:"onedriveListChunk,omitempty"`

	// Remove all versions on modifying operations.
	OnedriveNoVersions *string `json:"onedriveNoVersions,omitempty"`

	// Choose national cloud region for OneDrive.
	OnedriveRegion *string `json:"onedriveRegion,omitempty"`

	// ID of the root folder.
	OnedriveRootFolderID string `json:"onedriveRootFolderId,omitempty"`

	// Allow server-side operations (e.g. copy) to work across different onedrive configs.
	OnedriveServerSideAcrossConfigs *string `json:"onedriveServerSideAcrossConfigs,omitempty"`

	// OAuth Access Token as a JSON blob.
	OnedriveToken string `json:"onedriveToken,omitempty"`

	// Token server url.
	OnedriveTokenURL string `json:"onedriveTokenUrl,omitempty"`

	// Chunk size to use for uploading.
	OosChunkSize *string `json:"oosChunkSize,omitempty"`

	// Object storage compartment OCID
	OosCompartment string `json:"oosCompartment,omitempty"`

	// Path to OCI config file
	OosConfigFile *string `json:"oosConfigFile,omitempty"`

	// Profile name inside the oci config file
	OosConfigProfile *string `json:"oosConfigProfile,omitempty"`

	// Cutoff for switching to multipart copy.
	OosCopyCutoff *string `json:"oosCopyCutoff,omitempty"`

	// Timeout for copy.
	OosCopyTimeout *string `json:"oosCopyTimeout,omitempty"`

	// Don't store MD5 checksum with object metadata.
	OosDisableChecksum *string `json:"oosDisableChecksum,omitempty"`

	// The encoding for the backend.
	OosEncoding *string `json:"oosEncoding,omitempty"`

	// Endpoint for Object storage API.
	OosEndpoint string `json:"oosEndpoint,omitempty"`

	// If true avoid calling abort upload on a failure, leaving all successfully uploaded parts on S3 for manual recovery.
	OosLeavePartsOnError *string `json:"oosLeavePartsOnError,omitempty"`

	// Object storage namespace
	OosNamespace string `json:"oosNamespace,omitempty"`

	// If set, don't attempt to check the bucket exists or create it.
	OosNoCheckBucket *string `json:"oosNoCheckBucket,omitempty"`

	// Choose your Auth Provider
	OosProvider *string `json:"oosProvider,omitempty"`

	// Object storage Region
	OosRegion string `json:"oosRegion,omitempty"`

	// If using SSE-C, the optional header that specifies "AES256" as the encryption algorithm.
	OosSseCustomerAlgorithm string `json:"oosSseCustomerAlgorithm,omitempty"`

	// To use SSE-C, the optional header that specifies the base64-encoded 256-bit encryption key to use to
	OosSseCustomerKey string `json:"oosSseCustomerKey,omitempty"`

	// To use SSE-C, a file containing the base64-encoded string of the AES-256 encryption key associated
	OosSseCustomerKeyFile string `json:"oosSseCustomerKeyFile,omitempty"`

	// If using SSE-C, The optional header that specifies the base64-encoded SHA256 hash of the encryption
	OosSseCustomerKeySha256 string `json:"oosSseCustomerKeySha256,omitempty"`

	// if using using your own master key in vault, this header specifies the
	OosSseKmsKeyID string `json:"oosSseKmsKeyId,omitempty"`

	// The storage class to use when storing new objects in storage. https://docs.oracle.com/en-us/iaas/Content/Object/Concepts/understandingstoragetiers.htm
	OosStorageTier *string `json:"oosStorageTier,omitempty"`

	// Concurrency for multipart uploads.
	OosUploadConcurrency *string `json:"oosUploadConcurrency,omitempty"`

	// Cutoff for switching to chunked upload.
	OosUploadCutoff *string `json:"oosUploadCutoff,omitempty"`

	// Files will be uploaded in chunks this size.
	OpendriveChunkSize *string `json:"opendriveChunkSize,omitempty"`

	// The encoding for the backend.
	OpendriveEncoding *string `json:"opendriveEncoding,omitempty"`

	// Password.
	OpendrivePassword string `json:"opendrivePassword,omitempty"`

	// Username.
	OpendriveUsername string `json:"opendriveUsername,omitempty"`

	// Auth server URL.
	PcloudAuthURL string `json:"pcloudAuthUrl,omitempty"`

	// OAuth Client Id.
	PcloudClientID string `json:"pcloudClientId,omitempty"`

	// OAuth Client Secret.
	PcloudClientSecret string `json:"pcloudClientSecret,omitempty"`

	// The encoding for the backend.
	PcloudEncoding *string `json:"pcloudEncoding,omitempty"`

	// Hostname to connect to.
	PcloudHostname *string `json:"pcloudHostname,omitempty"`

	// Your pcloud password.
	PcloudPassword string `json:"pcloudPassword,omitempty"`

	// Fill in for rclone to use a non root folder as its starting point.
	PcloudRootFolderID *string `json:"pcloudRootFolderId,omitempty"`

	// OAuth Access Token as a JSON blob.
	PcloudToken string `json:"pcloudToken,omitempty"`

	// Token server url.
	PcloudTokenURL string `json:"pcloudTokenUrl,omitempty"`

	// Your pcloud username.
	PcloudUsername string `json:"pcloudUsername,omitempty"`

	// API Key.
	PremiumizemeAPIKey string `json:"premiumizemeApiKey,omitempty"`

	// The encoding for the backend.
	PremiumizemeEncoding *string `json:"premiumizemeEncoding,omitempty"`

	// The encoding for the backend.
	PutioEncoding *string `json:"putioEncoding,omitempty"`

	// QingStor Access Key ID.
	QingstorAccessKeyID string `json:"qingstorAccessKeyId,omitempty"`

	// Chunk size to use for uploading.
	QingstorChunkSize *string `json:"qingstorChunkSize,omitempty"`

	// Number of connection retries.
	QingstorConnectionRetries *string `json:"qingstorConnectionRetries,omitempty"`

	// The encoding for the backend.
	QingstorEncoding *string `json:"qingstorEncoding,omitempty"`

	// Enter an endpoint URL to connection QingStor API.
	QingstorEndpoint string `json:"qingstorEndpoint,omitempty"`

	// Get QingStor credentials from runtime.
	QingstorEnvAuth *string `json:"qingstorEnvAuth,omitempty"`

	// QingStor Secret Access Key (password).
	QingstorSecretAccessKey string `json:"qingstorSecretAccessKey,omitempty"`

	// Concurrency for multipart uploads.
	QingstorUploadConcurrency *string `json:"qingstorUploadConcurrency,omitempty"`

	// Cutoff for switching to chunked upload.
	QingstorUploadCutoff *string `json:"qingstorUploadCutoff,omitempty"`

	// Zone to connect to.
	QingstorZone string `json:"qingstorZone,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	RescanInterval string `json:"rescanInterval,omitempty"`

	// AWS Access Key ID.
	S3AccessKeyID string `json:"s3AccessKeyId,omitempty"`

	// Canned ACL used when creating buckets and storing or copying objects.
	S3ACL string `json:"s3Acl,omitempty"`

	// Canned ACL used when creating buckets.
	S3BucketACL string `json:"s3BucketAcl,omitempty"`

	// Chunk size to use for uploading.
	S3ChunkSize *string `json:"s3ChunkSize,omitempty"`

	// Cutoff for switching to multipart copy.
	S3CopyCutoff *string `json:"s3CopyCutoff,omitempty"`

	// If set this will decompress gzip encoded objects.
	S3Decompress *string `json:"s3Decompress,omitempty"`

	// Don't store MD5 checksum with object metadata.
	S3DisableChecksum *string `json:"s3DisableChecksum,omitempty"`

	// Disable usage of http2 for S3 backends.
	S3DisableHttp2 *string `json:"s3DisableHttp2,omitempty"`

	// Custom endpoint for downloads.
	S3DownloadURL string `json:"s3DownloadUrl,omitempty"`

	// The encoding for the backend.
	S3Encoding *string `json:"s3Encoding,omitempty"`

	// Endpoint for S3 API.
	S3Endpoint string `json:"s3Endpoint,omitempty"`

	// Get AWS credentials from runtime (environment variables or EC2/ECS meta data if no env vars).
	S3EnvAuth *string `json:"s3EnvAuth,omitempty"`

	// If true use path style access if false use virtual hosted style.
	S3ForcePathStyle *string `json:"s3ForcePathStyle,omitempty"`

	// If true avoid calling abort upload on a failure, leaving all successfully uploaded parts on S3 for manual recovery.
	S3LeavePartsOnError *string `json:"s3LeavePartsOnError,omitempty"`

	// Size of listing chunk (response list for each ListObject S3 request).
	S3ListChunk *string `json:"s3ListChunk,omitempty"`

	// Whether to url encode listings: true/false/unset
	S3ListURLEncode *string `json:"s3ListUrlEncode,omitempty"`

	// Version of ListObjects to use: 1,2 or 0 for auto.
	S3ListVersion *string `json:"s3ListVersion,omitempty"`

	// Location constraint - must be set to match the Region.
	S3LocationConstraint string `json:"s3LocationConstraint,omitempty"`

	// Maximum number of parts in a multipart upload.
	S3MaxUploadParts *string `json:"s3MaxUploadParts,omitempty"`

	// How often internal memory buffer pools will be flushed.
	S3MemoryPoolFlushTime *string `json:"s3MemoryPoolFlushTime,omitempty"`

	// Whether to use mmap buffers in internal memory pool.
	S3MemoryPoolUseMmap *string `json:"s3MemoryPoolUseMmap,omitempty"`

	// Set this if the backend might gzip objects.
	S3MightGzip *string `json:"s3MightGzip,omitempty"`

	// If set, don't attempt to check the bucket exists or create it.
	S3NoCheckBucket *string `json:"s3NoCheckBucket,omitempty"`

	// If set, don't HEAD uploaded objects to check integrity.
	S3NoHead *string `json:"s3NoHead,omitempty"`

	// If set, do not do HEAD before GET when getting objects.
	S3NoHeadObject *string `json:"s3NoHeadObject,omitempty"`

	// Suppress setting and reading of system metadata
	S3NoSystemMetadata *string `json:"s3NoSystemMetadata,omitempty"`

	// Profile to use in the shared credentials file.
	S3Profile string `json:"s3Profile,omitempty"`

	// Choose your S3 provider.
	S3Provider string `json:"s3Provider,omitempty"`

	// Region to connect to.
	S3Region string `json:"s3Region,omitempty"`

	// Enables requester pays option when interacting with S3 bucket.
	S3RequesterPays *string `json:"s3RequesterPays,omitempty"`

	// AWS Secret Access Key (password).
	S3SecretAccessKey string `json:"s3SecretAccessKey,omitempty"`

	// The server-side encryption algorithm used when storing this object in S3.
	S3ServerSideEncryption string `json:"s3ServerSideEncryption,omitempty"`

	// An AWS session token.
	S3SessionToken string `json:"s3SessionToken,omitempty"`

	// Path to the shared credentials file.
	S3SharedCredentialsFile string `json:"s3SharedCredentialsFile,omitempty"`

	// If using SSE-C, the server-side encryption algorithm used when storing this object in S3.
	S3SseCustomerAlgorithm string `json:"s3SseCustomerAlgorithm,omitempty"`

	// To use SSE-C you may provide the secret encryption key used to encrypt/decrypt your data.
	S3SseCustomerKey string `json:"s3SseCustomerKey,omitempty"`

	// If using SSE-C you must provide the secret encryption key encoded in base64 format to encrypt/decrypt your data.
	S3SseCustomerKeyBase64 string `json:"s3SseCustomerKeyBase64,omitempty"`

	// If using SSE-C you may provide the secret encryption key MD5 checksum (optional).
	S3SseCustomerKeyMd5 string `json:"s3SseCustomerKeyMd5,omitempty"`

	// If using KMS ID you must provide the ARN of Key.
	S3SseKmsKeyID string `json:"s3SseKmsKeyId,omitempty"`

	// The storage class to use when storing new objects in S3.
	S3StorageClass string `json:"s3StorageClass,omitempty"`

	// Endpoint for STS.
	S3StsEndpoint string `json:"s3StsEndpoint,omitempty"`

	// Concurrency for multipart uploads.
	S3UploadConcurrency *string `json:"s3UploadConcurrency,omitempty"`

	// Cutoff for switching to chunked upload.
	S3UploadCutoff *string `json:"s3UploadCutoff,omitempty"`

	// If true use the AWS S3 accelerated endpoint.
	S3UseAccelerateEndpoint *string `json:"s3UseAccelerateEndpoint,omitempty"`

	// Whether to use ETag in multipart uploads for verification
	S3UseMultipartEtag *string `json:"s3UseMultipartEtag,omitempty"`

	// Whether to use a presigned request or PutObject for single part uploads
	S3UsePresignedRequest *string `json:"s3UsePresignedRequest,omitempty"`

	// If true use v2 authentication.
	S3V2Auth *string `json:"s3V2Auth,omitempty"`

	// Show file versions as they were at the specified time.
	S3VersionAt *string `json:"s3VersionAt,omitempty"`

	// Include old versions in directory listings.
	S3Versions *string `json:"s3Versions,omitempty"`

	// Starting state for scanning
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState,omitempty"`

	// Two-factor authentication ('true' if the account has 2FA enabled).
	Seafile2fa *string `json:"seafile2fa,omitempty"`

	// Authentication token.
	SeafileAuthToken string `json:"seafileAuthToken,omitempty"`

	// Should rclone create a library if it doesn't exist.
	SeafileCreateLibrary *string `json:"seafileCreateLibrary,omitempty"`

	// The encoding for the backend.
	SeafileEncoding *string `json:"seafileEncoding,omitempty"`

	// Name of the library.
	SeafileLibrary string `json:"seafileLibrary,omitempty"`

	// Library password (for encrypted libraries only).
	SeafileLibraryKey string `json:"seafileLibraryKey,omitempty"`

	// Password.
	SeafilePass string `json:"seafilePass,omitempty"`

	// URL of seafile host to connect to.
	SeafileURL string `json:"seafileUrl,omitempty"`

	// User name (usually email address).
	SeafileUser string `json:"seafileUser,omitempty"`

	// Allow asking for SFTP password when needed.
	SftpAskPassword *string `json:"sftpAskPassword,omitempty"`

	// Upload and download chunk size.
	SftpChunkSize *string `json:"sftpChunkSize,omitempty"`

	// Space separated list of ciphers to be used for session encryption, ordered by preference.
	SftpCiphers string `json:"sftpCiphers,omitempty"`

	// The maximum number of outstanding requests for one file
	SftpConcurrency *string `json:"sftpConcurrency,omitempty"`

	// If set don't use concurrent reads.
	SftpDisableConcurrentReads *string `json:"sftpDisableConcurrentReads,omitempty"`

	// If set don't use concurrent writes.
	SftpDisableConcurrentWrites *string `json:"sftpDisableConcurrentWrites,omitempty"`

	// Disable the execution of SSH commands to determine if remote file hashing is available.
	SftpDisableHashcheck *string `json:"sftpDisableHashcheck,omitempty"`

	// SSH host to connect to.
	SftpHost string `json:"sftpHost,omitempty"`

	// Max time before closing idle connections.
	SftpIdleTimeout *string `json:"sftpIdleTimeout,omitempty"`

	// Space separated list of key exchange algorithms, ordered by preference.
	SftpKeyExchange string `json:"sftpKeyExchange,omitempty"`

	// Path to PEM-encoded private key file.
	SftpKeyFile string `json:"sftpKeyFile,omitempty"`

	// The passphrase to decrypt the PEM-encoded private key file.
	SftpKeyFilePass string `json:"sftpKeyFilePass,omitempty"`

	// Raw PEM-encoded private key.
	SftpKeyPem string `json:"sftpKeyPem,omitempty"`

	// When set forces the usage of the ssh-agent.
	SftpKeyUseAgent *string `json:"sftpKeyUseAgent,omitempty"`

	// Optional path to known_hosts file.
	SftpKnownHostsFile string `json:"sftpKnownHostsFile,omitempty"`

	// Space separated list of MACs (message authentication code) algorithms, ordered by preference.
	SftpMacs string `json:"sftpMacs,omitempty"`

	// The command used to read md5 hashes.
	SftpMd5sumCommand string `json:"sftpMd5sumCommand,omitempty"`

	// SSH password, leave blank to use ssh-agent.
	SftpPass string `json:"sftpPass,omitempty"`

	// Override path used by SSH shell commands.
	SftpPathOverride string `json:"sftpPathOverride,omitempty"`

	// SSH port number.
	SftpPort *string `json:"sftpPort,omitempty"`

	// Optional path to public key file.
	SftpPubkeyFile string `json:"sftpPubkeyFile,omitempty"`

	// Specifies the path or command to run a sftp server on the remote host.
	SftpServerCommand string `json:"sftpServerCommand,omitempty"`

	// Environment variables to pass to sftp and commands
	SftpSetEnv string `json:"sftpSetEnv,omitempty"`

	// Set the modified time on the remote if set.
	SftpSetModtime *string `json:"sftpSetModtime,omitempty"`

	// The command used to read sha1 hashes.
	SftpSha1sumCommand string `json:"sftpSha1sumCommand,omitempty"`

	// The type of SSH shell on remote server, if any.
	SftpShellType string `json:"sftpShellType,omitempty"`

	// Set to skip any symlinks and any other non regular files.
	SftpSkipLinks *string `json:"sftpSkipLinks,omitempty"`

	// Specifies the SSH2 subsystem on the remote host.
	SftpSubsystem *string `json:"sftpSubsystem,omitempty"`

	// If set use fstat instead of stat.
	SftpUseFstat *string `json:"sftpUseFstat,omitempty"`

	// Enable the use of insecure ciphers and key exchange methods.
	SftpUseInsecureCipher *string `json:"sftpUseInsecureCipher,omitempty"`

	// SSH username.
	SftpUser *string `json:"sftpUser,omitempty"`

	// Upload chunk size.
	SharefileChunkSize *string `json:"sharefileChunkSize,omitempty"`

	// The encoding for the backend.
	SharefileEncoding *string `json:"sharefileEncoding,omitempty"`

	// Endpoint for API calls.
	SharefileEndpoint string `json:"sharefileEndpoint,omitempty"`

	// ID of the root folder.
	SharefileRootFolderID string `json:"sharefileRootFolderId,omitempty"`

	// Cutoff for switching to multipart upload.
	SharefileUploadCutoff *string `json:"sharefileUploadCutoff,omitempty"`

	// Sia Daemon API Password.
	SiaAPIPassword string `json:"siaApiPassword,omitempty"`

	// Sia daemon API URL, like http://sia.daemon.host:9980.
	SiaAPIURL *string `json:"siaApiUrl,omitempty"`

	// The encoding for the backend.
	SiaEncoding *string `json:"siaEncoding,omitempty"`

	// Siad User Agent
	SiaUserAgent *string `json:"siaUserAgent,omitempty"`

	// Whether the server is configured to be case-insensitive.
	SmbCaseInsensitive *string `json:"smbCaseInsensitive,omitempty"`

	// Domain name for NTLM authentication.
	SmbDomain *string `json:"smbDomain,omitempty"`

	// The encoding for the backend.
	SmbEncoding *string `json:"smbEncoding,omitempty"`

	// Hide special shares (e.g. print$) which users aren't supposed to access.
	SmbHideSpecialShare *string `json:"smbHideSpecialShare,omitempty"`

	// SMB server hostname to connect to.
	SmbHost string `json:"smbHost,omitempty"`

	// Max time before closing idle connections.
	SmbIdleTimeout *string `json:"smbIdleTimeout,omitempty"`

	// SMB password.
	SmbPass string `json:"smbPass,omitempty"`

	// SMB port number.
	SmbPort *string `json:"smbPort,omitempty"`

	// Service principal name.
	SmbSpn string `json:"smbSpn,omitempty"`

	// SMB username.
	SmbUser *string `json:"smbUser,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Access grant.
	StorjAccessGrant string `json:"storjAccessGrant,omitempty"`

	// API key.
	StorjAPIKey string `json:"storjApiKey,omitempty"`

	// Encryption passphrase.
	StorjPassphrase string `json:"storjPassphrase,omitempty"`

	// Choose an authentication method.
	StorjProvider *string `json:"storjProvider,omitempty"`

	// Satellite address.
	StorjSatelliteAddress *string `json:"storjSatelliteAddress,omitempty"`

	// Sugarsync Access Key ID.
	SugarsyncAccessKeyID string `json:"sugarsyncAccessKeyId,omitempty"`

	// Sugarsync App ID.
	SugarsyncAppID string `json:"sugarsyncAppId,omitempty"`

	// Sugarsync authorization.
	SugarsyncAuthorization string `json:"sugarsyncAuthorization,omitempty"`

	// Sugarsync authorization expiry.
	SugarsyncAuthorizationExpiry string `json:"sugarsyncAuthorizationExpiry,omitempty"`

	// Sugarsync deleted folder id.
	SugarsyncDeletedID string `json:"sugarsyncDeletedId,omitempty"`

	// The encoding for the backend.
	SugarsyncEncoding *string `json:"sugarsyncEncoding,omitempty"`

	// Permanently delete files if true
	SugarsyncHardDelete *string `json:"sugarsyncHardDelete,omitempty"`

	// Sugarsync Private Access Key.
	SugarsyncPrivateAccessKey string `json:"sugarsyncPrivateAccessKey,omitempty"`

	// Sugarsync refresh token.
	SugarsyncRefreshToken string `json:"sugarsyncRefreshToken,omitempty"`

	// Sugarsync root id.
	SugarsyncRootID string `json:"sugarsyncRootId,omitempty"`

	// Sugarsync user.
	SugarsyncUser string `json:"sugarsyncUser,omitempty"`

	// Application Credential ID (OS_APPLICATION_CREDENTIAL_ID).
	SwiftApplicationCredentialID string `json:"swiftApplicationCredentialId,omitempty"`

	// Application Credential Name (OS_APPLICATION_CREDENTIAL_NAME).
	SwiftApplicationCredentialName string `json:"swiftApplicationCredentialName,omitempty"`

	// Application Credential Secret (OS_APPLICATION_CREDENTIAL_SECRET).
	SwiftApplicationCredentialSecret string `json:"swiftApplicationCredentialSecret,omitempty"`

	// Authentication URL for server (OS_AUTH_URL).
	SwiftAuth string `json:"swiftAuth,omitempty"`

	// Auth Token from alternate authentication - optional (OS_AUTH_TOKEN).
	SwiftAuthToken string `json:"swiftAuthToken,omitempty"`

	// AuthVersion - optional - set to (1,2,3) if your auth URL has no version (ST_AUTH_VERSION).
	SwiftAuthVersion *string `json:"swiftAuthVersion,omitempty"`

	// Above this size files will be chunked into a _segments container.
	SwiftChunkSize *string `json:"swiftChunkSize,omitempty"`

	// User domain - optional (v3 auth) (OS_USER_DOMAIN_NAME)
	SwiftDomain string `json:"swiftDomain,omitempty"`

	// The encoding for the backend.
	SwiftEncoding *string `json:"swiftEncoding,omitempty"`

	// Endpoint type to choose from the service catalogue (OS_ENDPOINT_TYPE).
	SwiftEndpointType *string `json:"swiftEndpointType,omitempty"`

	// Get swift credentials from environment variables in standard OpenStack form.
	SwiftEnvAuth *string `json:"swiftEnvAuth,omitempty"`

	// API key or password (OS_PASSWORD).
	SwiftKey string `json:"swiftKey,omitempty"`

	// If true avoid calling abort upload on a failure.
	SwiftLeavePartsOnError *string `json:"swiftLeavePartsOnError,omitempty"`

	// Don't chunk files during streaming upload.
	SwiftNoChunk *string `json:"swiftNoChunk,omitempty"`

	// Disable support for static and dynamic large objects
	SwiftNoLargeObjects *string `json:"swiftNoLargeObjects,omitempty"`

	// Region name - optional (OS_REGION_NAME).
	SwiftRegion string `json:"swiftRegion,omitempty"`

	// The storage policy to use when creating a new container.
	SwiftStoragePolicy string `json:"swiftStoragePolicy,omitempty"`

	// Storage URL - optional (OS_STORAGE_URL).
	SwiftStorageURL string `json:"swiftStorageUrl,omitempty"`

	// Tenant name - optional for v1 auth, this or tenant_id required otherwise (OS_TENANT_NAME or OS_PROJECT_NAME).
	SwiftTenant string `json:"swiftTenant,omitempty"`

	// Tenant domain - optional (v3 auth) (OS_PROJECT_DOMAIN_NAME).
	SwiftTenantDomain string `json:"swiftTenantDomain,omitempty"`

	// Tenant ID - optional for v1 auth, this or tenant required otherwise (OS_TENANT_ID).
	SwiftTenantID string `json:"swiftTenantId,omitempty"`

	// User name to log in (OS_USERNAME).
	SwiftUser string `json:"swiftUser,omitempty"`

	// User ID to log in - optional - most swift systems use user and leave this blank (v3 auth) (OS_USER_ID).
	SwiftUserID string `json:"swiftUserId,omitempty"`

	// Your access token.
	UptoboxAccessToken string `json:"uptoboxAccessToken,omitempty"`

	// The encoding for the backend.
	UptoboxEncoding *string `json:"uptoboxEncoding,omitempty"`

	// Bearer token instead of user/pass (e.g. a Macaroon).
	WebdavBearerToken string `json:"webdavBearerToken,omitempty"`

	// Command to run to get a bearer token.
	WebdavBearerTokenCommand string `json:"webdavBearerTokenCommand,omitempty"`

	// The encoding for the backend.
	WebdavEncoding string `json:"webdavEncoding,omitempty"`

	// Set HTTP headers for all transactions.
	WebdavHeaders string `json:"webdavHeaders,omitempty"`

	// Password.
	WebdavPass string `json:"webdavPass,omitempty"`

	// URL of http host to connect to.
	WebdavURL string `json:"webdavUrl,omitempty"`

	// User name.
	WebdavUser string `json:"webdavUser,omitempty"`

	// Name of the WebDAV site/service/software you are using.
	WebdavVendor string `json:"webdavVendor,omitempty"`

	// Auth server URL.
	YandexAuthURL string `json:"yandexAuthUrl,omitempty"`

	// OAuth Client Id.
	YandexClientID string `json:"yandexClientId,omitempty"`

	// OAuth Client Secret.
	YandexClientSecret string `json:"yandexClientSecret,omitempty"`

	// The encoding for the backend.
	YandexEncoding *string `json:"yandexEncoding,omitempty"`

	// Delete files permanently rather than putting them into the trash.
	YandexHardDelete *string `json:"yandexHardDelete,omitempty"`

	// OAuth Access Token as a JSON blob.
	YandexToken string `json:"yandexToken,omitempty"`

	// Token server url.
	YandexTokenURL string `json:"yandexTokenUrl,omitempty"`

	// Auth server URL.
	ZohoAuthURL string `json:"zohoAuthUrl,omitempty"`

	// OAuth Client Id.
	ZohoClientID string `json:"zohoClientId,omitempty"`

	// OAuth Client Secret.
	ZohoClientSecret string `json:"zohoClientSecret,omitempty"`

	// The encoding for the backend.
	ZohoEncoding *string `json:"zohoEncoding,omitempty"`

	// Zoho region to connect to.
	ZohoRegion string `json:"zohoRegion,omitempty"`

	// OAuth Access Token as a JSON blob.
	ZohoToken string `json:"zohoToken,omitempty"`

	// Token server url.
	ZohoTokenURL string `json:"zohoTokenUrl,omitempty"`
}

DatasourceAllConfig datasource all config

swagger:model datasource.AllConfig

func (*DatasourceAllConfig) ContextValidate

func (m *DatasourceAllConfig) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource all config based on the context it is used

func (*DatasourceAllConfig) MarshalBinary

func (m *DatasourceAllConfig) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceAllConfig) UnmarshalBinary

func (m *DatasourceAllConfig) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceAllConfig) Validate

func (m *DatasourceAllConfig) Validate(formats strfmt.Registry) error

Validate validates this datasource all config

type DatasourceAzureblobRequest

type DatasourceAzureblobRequest struct {

	// Access tier of blob: hot, cool or archive.
	AccessTier string `json:"accessTier,omitempty"`

	// Azure Storage Account Name.
	Account string `json:"account,omitempty"`

	// Delete archive tier blobs before overwriting.
	ArchiveTierDelete *string `json:"archiveTierDelete,omitempty"`

	// Upload chunk size.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Password for the certificate file (optional).
	ClientCertificatePassword string `json:"clientCertificatePassword,omitempty"`

	// Path to a PEM or PKCS12 certificate file including the private key.
	ClientCertificatePath string `json:"clientCertificatePath,omitempty"`

	// The ID of the client in use.
	ClientID string `json:"clientId,omitempty"`

	// One of the service principal's client secrets
	ClientSecret string `json:"clientSecret,omitempty"`

	// Send the certificate chain when using certificate auth.
	ClientSendCertificateChain *string `json:"clientSendCertificateChain,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Don't store MD5 checksum with object metadata.
	DisableChecksum *string `json:"disableChecksum,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Endpoint for the service.
	Endpoint string `json:"endpoint,omitempty"`

	// Read credentials from runtime (environment variables, CLI or MSI).
	EnvAuth *string `json:"envAuth,omitempty"`

	// Storage Account Shared Key.
	Key string `json:"key,omitempty"`

	// Size of blob list.
	ListChunk *string `json:"listChunk,omitempty"`

	// How often internal memory buffer pools will be flushed.
	MemoryPoolFlushTime *string `json:"memoryPoolFlushTime,omitempty"`

	// Whether to use mmap buffers in internal memory pool.
	MemoryPoolUseMmap *string `json:"memoryPoolUseMmap,omitempty"`

	// Object ID of the user-assigned MSI to use, if any.
	MsiClientID string `json:"msiClientId,omitempty"`

	// Azure resource ID of the user-assigned MSI to use, if any.
	MsiMiResID string `json:"msiMiResId,omitempty"`

	// Object ID of the user-assigned MSI to use, if any.
	MsiObjectID string `json:"msiObjectId,omitempty"`

	// If set, don't attempt to check the container exists or create it.
	NoCheckContainer *string `json:"noCheckContainer,omitempty"`

	// If set, do not do HEAD before GET when getting objects.
	NoHeadObject *string `json:"noHeadObject,omitempty"`

	// The user's password
	Password string `json:"password,omitempty"`

	// Public access level of a container: blob or container.
	PublicAccess string `json:"publicAccess,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// SAS URL for container level access only.
	SasURL string `json:"sasUrl,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Path to file containing credentials for use with a service principal.
	ServicePrincipalFile string `json:"servicePrincipalFile,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// ID of the service principal's tenant. Also called its directory ID.
	Tenant string `json:"tenant,omitempty"`

	// Concurrency for multipart uploads.
	UploadConcurrency *string `json:"uploadConcurrency,omitempty"`

	// Cutoff for switching to chunked upload (<= 256 MiB) (deprecated).
	UploadCutoff string `json:"uploadCutoff,omitempty"`

	// Uses local storage emulator if provided as 'true'.
	UseEmulator *string `json:"useEmulator,omitempty"`

	// Use a managed service identity to authenticate (only works in Azure).
	UseMsi *string `json:"useMsi,omitempty"`

	// User name (usually an email address)
	Username string `json:"username,omitempty"`
}

DatasourceAzureblobRequest datasource azureblob request

swagger:model datasource.AzureblobRequest

func (*DatasourceAzureblobRequest) ContextValidate

func (m *DatasourceAzureblobRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource azureblob request based on the context it is used

func (*DatasourceAzureblobRequest) MarshalBinary

func (m *DatasourceAzureblobRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceAzureblobRequest) UnmarshalBinary

func (m *DatasourceAzureblobRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceAzureblobRequest) Validate

func (m *DatasourceAzureblobRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource azureblob request

type DatasourceB2Request

type DatasourceB2Request struct {

	// Account ID or Application Key ID.
	Account string `json:"account,omitempty"`

	// Upload chunk size.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Cutoff for switching to multipart copy.
	CopyCutoff *string `json:"copyCutoff,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Disable checksums for large (> upload cutoff) files.
	DisableChecksum *string `json:"disableChecksum,omitempty"`

	// Time before the authorization token will expire in s or suffix ms|s|m|h|d.
	DownloadAuthDuration *string `json:"downloadAuthDuration,omitempty"`

	// Custom endpoint for downloads.
	DownloadURL string `json:"downloadUrl,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Endpoint for the service.
	Endpoint string `json:"endpoint,omitempty"`

	// Permanently delete files on remote removal, otherwise hide files.
	HardDelete *string `json:"hardDelete,omitempty"`

	// Application Key.
	Key string `json:"key,omitempty"`

	// How often internal memory buffer pools will be flushed.
	MemoryPoolFlushTime *string `json:"memoryPoolFlushTime,omitempty"`

	// Whether to use mmap buffers in internal memory pool.
	MemoryPoolUseMmap *string `json:"memoryPoolUseMmap,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// A flag string for X-Bz-Test-Mode header for debugging.
	TestMode string `json:"testMode,omitempty"`

	// Cutoff for switching to chunked upload.
	UploadCutoff *string `json:"uploadCutoff,omitempty"`

	// Show file versions as they were at the specified time.
	VersionAt *string `json:"versionAt,omitempty"`

	// Include old versions in directory listings.
	Versions *string `json:"versions,omitempty"`
}

DatasourceB2Request datasource b2 request

swagger:model datasource.B2Request

func (*DatasourceB2Request) ContextValidate

func (m *DatasourceB2Request) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource b2 request based on the context it is used

func (*DatasourceB2Request) MarshalBinary

func (m *DatasourceB2Request) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceB2Request) UnmarshalBinary

func (m *DatasourceB2Request) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceB2Request) Validate

func (m *DatasourceB2Request) Validate(formats strfmt.Registry) error

Validate validates this datasource b2 request

type DatasourceBoxRequest

type DatasourceBoxRequest struct {

	// Box App Primary Access Token
	AccessToken string `json:"accessToken,omitempty"`

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// Box App config.json location
	BoxConfigFile string `json:"boxConfigFile,omitempty"`

	// box sub type
	BoxSubType *string `json:"boxSubType,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Max number of times to try committing a multipart file.
	CommitRetries *string `json:"commitRetries,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Size of listing chunk 1-1000.
	ListChunk *string `json:"listChunk,omitempty"`

	// Only show items owned by the login (email address) passed in.
	OwnedBy string `json:"ownedBy,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Fill in for rclone to use a non root folder as its starting point.
	RootFolderID *string `json:"rootFolderId,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`

	// Cutoff for switching to multipart upload (>= 50 MiB).
	UploadCutoff *string `json:"uploadCutoff,omitempty"`
}

DatasourceBoxRequest datasource box request

swagger:model datasource.BoxRequest

func (*DatasourceBoxRequest) ContextValidate

func (m *DatasourceBoxRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource box request based on the context it is used

func (*DatasourceBoxRequest) MarshalBinary

func (m *DatasourceBoxRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceBoxRequest) UnmarshalBinary

func (m *DatasourceBoxRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceBoxRequest) Validate

func (m *DatasourceBoxRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource box request

type DatasourceCheckSourceRequest

type DatasourceCheckSourceRequest struct {

	// Path relative to the data source root
	Path string `json:"path,omitempty"`
}

DatasourceCheckSourceRequest datasource check source request

swagger:model datasource.CheckSourceRequest

func (*DatasourceCheckSourceRequest) ContextValidate

func (m *DatasourceCheckSourceRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this datasource check source request based on context it is used

func (*DatasourceCheckSourceRequest) MarshalBinary

func (m *DatasourceCheckSourceRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceCheckSourceRequest) UnmarshalBinary

func (m *DatasourceCheckSourceRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceCheckSourceRequest) Validate

func (m *DatasourceCheckSourceRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource check source request

type DatasourceCreatePackJobRequest

type DatasourceCreatePackJobRequest struct {

	// file range i ds
	FileRangeIDs []int64 `json:"fileRangeIDs"`
}

DatasourceCreatePackJobRequest datasource create pack job request

swagger:model datasource.CreatePackJobRequest

func (*DatasourceCreatePackJobRequest) ContextValidate

func (m *DatasourceCreatePackJobRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this datasource create pack job request based on context it is used

func (*DatasourceCreatePackJobRequest) MarshalBinary

func (m *DatasourceCreatePackJobRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceCreatePackJobRequest) UnmarshalBinary

func (m *DatasourceCreatePackJobRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceCreatePackJobRequest) Validate

func (m *DatasourceCreatePackJobRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource create pack job request

type DatasourceDriveRequest

type DatasourceDriveRequest struct {

	// Set to allow files which return cannotDownloadAbusiveFile to be downloaded.
	AcknowledgeAbuse *string `json:"acknowledgeAbuse,omitempty"`

	// Allow the filetype to change when uploading Google docs.
	AllowImportNameChange *string `json:"allowImportNameChange,omitempty"`

	// Deprecated: No longer needed.
	AlternateExport *string `json:"alternateExport,omitempty"`

	// Only consider files owned by the authenticated user.
	AuthOwnerOnly *string `json:"authOwnerOnly,omitempty"`

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// Upload chunk size.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Google Application Client Id
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Server side copy contents of shortcuts instead of the shortcut.
	CopyShortcutContent *string `json:"copyShortcutContent,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Disable drive using http2.
	DisableHttp2 *string `json:"disableHttp2,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Comma separated list of preferred formats for downloading Google docs.
	ExportFormats *string `json:"exportFormats,omitempty"`

	// Deprecated: See export_formats.
	Formats string `json:"formats,omitempty"`

	// Impersonate this user when using a service account.
	Impersonate string `json:"impersonate,omitempty"`

	// Comma separated list of preferred formats for uploading Google docs.
	ImportFormats string `json:"importFormats,omitempty"`

	// Keep new head revision of each file forever.
	KeepRevisionForever *string `json:"keepRevisionForever,omitempty"`

	// Size of listing chunk 100-1000, 0 to disable.
	ListChunk *string `json:"listChunk,omitempty"`

	// Number of API calls to allow without sleeping.
	PacerBurst *string `json:"pacerBurst,omitempty"`

	// Minimum time to sleep between API calls.
	PacerMinSleep *string `json:"pacerMinSleep,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Resource key for accessing a link-shared file.
	ResourceKey string `json:"resourceKey,omitempty"`

	// ID of the root folder.
	RootFolderID string `json:"rootFolderId,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Scope that rclone should use when requesting access from drive.
	Scope string `json:"scope,omitempty"`

	// Allow server-side operations (e.g. copy) to work across different drive configs.
	ServerSideAcrossConfigs *string `json:"serverSideAcrossConfigs,omitempty"`

	// Service Account Credentials JSON blob.
	ServiceAccountCredentials string `json:"serviceAccountCredentials,omitempty"`

	// Service Account Credentials JSON file path.
	ServiceAccountFile string `json:"serviceAccountFile,omitempty"`

	// Only show files that are shared with me.
	SharedWithMe *string `json:"sharedWithMe,omitempty"`

	// Show sizes as storage quota usage, not actual size.
	SizeAsQuota *string `json:"sizeAsQuota,omitempty"`

	// Skip MD5 checksum on Google photos and videos only.
	SkipChecksumGphotos *string `json:"skipChecksumGphotos,omitempty"`

	// If set skip dangling shortcut files.
	SkipDanglingShortcuts *string `json:"skipDanglingShortcuts,omitempty"`

	// Skip google documents in all listings.
	SkipGdocs *string `json:"skipGdocs,omitempty"`

	// If set skip shortcut files.
	SkipShortcuts *string `json:"skipShortcuts,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Only show files that are starred.
	StarredOnly *string `json:"starredOnly,omitempty"`

	// Make download limit errors be fatal.
	StopOnDownloadLimit *string `json:"stopOnDownloadLimit,omitempty"`

	// Make upload limit errors be fatal.
	StopOnUploadLimit *string `json:"stopOnUploadLimit,omitempty"`

	// ID of the Shared Drive (Team Drive).
	TeamDrive string `json:"teamDrive,omitempty"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`

	// Only show files that are in the trash.
	TrashedOnly *string `json:"trashedOnly,omitempty"`

	// Cutoff for switching to chunked upload.
	UploadCutoff *string `json:"uploadCutoff,omitempty"`

	// Use file created date instead of modified date.
	UseCreatedDate *string `json:"useCreatedDate,omitempty"`

	// Use date file was shared instead of modified date.
	UseSharedDate *string `json:"useSharedDate,omitempty"`

	// Send files to the trash instead of deleting permanently.
	UseTrash *string `json:"useTrash,omitempty"`

	// If Object's are greater, use drive v2 API to download.
	V2DownloadMinSize *string `json:"v2DownloadMinSize,omitempty"`
}

DatasourceDriveRequest datasource drive request

swagger:model datasource.DriveRequest

func (*DatasourceDriveRequest) ContextValidate

func (m *DatasourceDriveRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource drive request based on the context it is used

func (*DatasourceDriveRequest) MarshalBinary

func (m *DatasourceDriveRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceDriveRequest) UnmarshalBinary

func (m *DatasourceDriveRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceDriveRequest) Validate

func (m *DatasourceDriveRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource drive request

type DatasourceDropboxRequest

type DatasourceDropboxRequest struct {

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// Max time to wait for a batch to finish committing
	BatchCommitTimeout *string `json:"batchCommitTimeout,omitempty"`

	// Upload file batching sync|async|off.
	BatchMode *string `json:"batchMode,omitempty"`

	// Max number of files in upload batch.
	BatchSize *string `json:"batchSize,omitempty"`

	// Max time to allow an idle upload batch before uploading.
	BatchTimeout *string `json:"batchTimeout,omitempty"`

	// Upload chunk size (< 150Mi).
	ChunkSize *string `json:"chunkSize,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Impersonate this user when using a business account.
	Impersonate string `json:"impersonate,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Instructs rclone to work on individual shared files.
	SharedFiles *string `json:"sharedFiles,omitempty"`

	// Instructs rclone to work on shared folders.
	SharedFolders *string `json:"sharedFolders,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`
}

DatasourceDropboxRequest datasource dropbox request

swagger:model datasource.DropboxRequest

func (*DatasourceDropboxRequest) ContextValidate

func (m *DatasourceDropboxRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource dropbox request based on the context it is used

func (*DatasourceDropboxRequest) MarshalBinary

func (m *DatasourceDropboxRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceDropboxRequest) UnmarshalBinary

func (m *DatasourceDropboxRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceDropboxRequest) Validate

func (m *DatasourceDropboxRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource dropbox request

type DatasourceFichierRequest

type DatasourceFichierRequest struct {

	// Your API Key, get it from https://1fichier.com/console/params.pl.
	APIKey string `json:"apiKey,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// If you want to download a shared file that is password protected, add this parameter.
	FilePassword string `json:"filePassword,omitempty"`

	// If you want to list the files in a shared folder that is password protected, add this parameter.
	FolderPassword string `json:"folderPassword,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// If you want to download a shared folder, add this parameter.
	SharedFolder string `json:"sharedFolder,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`
}

DatasourceFichierRequest datasource fichier request

swagger:model datasource.FichierRequest

func (*DatasourceFichierRequest) ContextValidate

func (m *DatasourceFichierRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource fichier request based on the context it is used

func (*DatasourceFichierRequest) MarshalBinary

func (m *DatasourceFichierRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceFichierRequest) UnmarshalBinary

func (m *DatasourceFichierRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceFichierRequest) Validate

func (m *DatasourceFichierRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource fichier request

type DatasourceFileInfo

type DatasourceFileInfo struct {

	// Path to the new file, relative to the source
	Path string `json:"path,omitempty"`
}

DatasourceFileInfo datasource file info

swagger:model datasource.FileInfo

func (*DatasourceFileInfo) ContextValidate

func (m *DatasourceFileInfo) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this datasource file info based on context it is used

func (*DatasourceFileInfo) MarshalBinary

func (m *DatasourceFileInfo) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceFileInfo) UnmarshalBinary

func (m *DatasourceFileInfo) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceFileInfo) Validate

func (m *DatasourceFileInfo) Validate(formats strfmt.Registry) error

Validate validates this datasource file info

type DatasourceFilefabricRequest

type DatasourceFilefabricRequest struct {

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Permanent Authentication Token.
	PermanentToken string `json:"permanentToken,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// ID of the root folder.
	RootFolderID string `json:"rootFolderId,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Session Token.
	Token string `json:"token,omitempty"`

	// Token expiry time.
	TokenExpiry string `json:"tokenExpiry,omitempty"`

	// URL of the Enterprise File Fabric to connect to.
	URL string `json:"url,omitempty"`

	// Version read from the file fabric.
	Version string `json:"version,omitempty"`
}

DatasourceFilefabricRequest datasource filefabric request

swagger:model datasource.FilefabricRequest

func (*DatasourceFilefabricRequest) ContextValidate

func (m *DatasourceFilefabricRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource filefabric request based on the context it is used

func (*DatasourceFilefabricRequest) MarshalBinary

func (m *DatasourceFilefabricRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceFilefabricRequest) UnmarshalBinary

func (m *DatasourceFilefabricRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceFilefabricRequest) Validate

func (m *DatasourceFilefabricRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource filefabric request

type DatasourceFtpRequest

type DatasourceFtpRequest struct {

	// Allow asking for FTP password when needed.
	AskPassword *string `json:"askPassword,omitempty"`

	// Maximum time to wait for a response to close.
	CloseTimeout *string `json:"closeTimeout,omitempty"`

	// Maximum number of FTP simultaneous connections, 0 for unlimited.
	Concurrency *string `json:"concurrency,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Disable using EPSV even if server advertises support.
	DisableEpsv *string `json:"disableEpsv,omitempty"`

	// Disable using MLSD even if server advertises support.
	DisableMlsd *string `json:"disableMlsd,omitempty"`

	// Disable TLS 1.3 (workaround for FTP servers with buggy TLS)
	DisableTls13 *string `json:"disableTls13,omitempty"`

	// Disable using UTF-8 even if server advertises support.
	DisableUTF8 *string `json:"disableUtf8,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Use Explicit FTPS (FTP over TLS).
	ExplicitTLS *string `json:"explicitTls,omitempty"`

	// Use LIST -a to force listing of hidden files and folders. This will disable the use of MLSD.
	ForceListHidden *string `json:"forceListHidden,omitempty"`

	// FTP host to connect to.
	Host string `json:"host,omitempty"`

	// Max time before closing idle connections.
	IdleTimeout *string `json:"idleTimeout,omitempty"`

	// Do not verify the TLS certificate of the server.
	NoCheckCertificate *string `json:"noCheckCertificate,omitempty"`

	// FTP password.
	Pass string `json:"pass,omitempty"`

	// FTP port number.
	Port *string `json:"port,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Maximum time to wait for data connection closing status.
	ShutTimeout *string `json:"shutTimeout,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Use Implicit FTPS (FTP over TLS).
	TLS *string `json:"tls,omitempty"`

	// Size of TLS session cache for all control and data connections.
	TLSCacheSize *string `json:"tlsCacheSize,omitempty"`

	// FTP username.
	User *string `json:"user,omitempty"`

	// Use MDTM to set modification time (VsFtpd quirk)
	WritingMdtm *string `json:"writingMdtm,omitempty"`
}

DatasourceFtpRequest datasource ftp request

swagger:model datasource.FtpRequest

func (*DatasourceFtpRequest) ContextValidate

func (m *DatasourceFtpRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource ftp request based on the context it is used

func (*DatasourceFtpRequest) MarshalBinary

func (m *DatasourceFtpRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceFtpRequest) UnmarshalBinary

func (m *DatasourceFtpRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceFtpRequest) Validate

func (m *DatasourceFtpRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource ftp request

type DatasourceGcsRequest

type DatasourceGcsRequest struct {

	// Access public buckets and objects without credentials.
	Anonymous *string `json:"anonymous,omitempty"`

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// Access Control List for new buckets.
	BucketACL string `json:"bucketAcl,omitempty"`

	// Access checks should use bucket-level IAM policies.
	BucketPolicyOnly *string `json:"bucketPolicyOnly,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// If set this will decompress gzip encoded objects.
	Decompress *string `json:"decompress,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Endpoint for the service.
	Endpoint string `json:"endpoint,omitempty"`

	// Get GCP IAM credentials from runtime (environment variables or instance meta data if no env vars).
	EnvAuth *string `json:"envAuth,omitempty"`

	// Location for the newly created buckets.
	Location string `json:"location,omitempty"`

	// If set, don't attempt to check the bucket exists or create it.
	NoCheckBucket *string `json:"noCheckBucket,omitempty"`

	// Access Control List for new objects.
	ObjectACL string `json:"objectAcl,omitempty"`

	// Project number.
	ProjectNumber string `json:"projectNumber,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Service Account Credentials JSON blob.
	ServiceAccountCredentials string `json:"serviceAccountCredentials,omitempty"`

	// Service Account Credentials JSON file path.
	ServiceAccountFile string `json:"serviceAccountFile,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// The storage class to use when storing objects in Google Cloud Storage.
	StorageClass string `json:"storageClass,omitempty"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`
}

DatasourceGcsRequest datasource gcs request

swagger:model datasource.GcsRequest

func (*DatasourceGcsRequest) ContextValidate

func (m *DatasourceGcsRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource gcs request based on the context it is used

func (*DatasourceGcsRequest) MarshalBinary

func (m *DatasourceGcsRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceGcsRequest) UnmarshalBinary

func (m *DatasourceGcsRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceGcsRequest) Validate

func (m *DatasourceGcsRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource gcs request

type DatasourceGphotosRequest

type DatasourceGphotosRequest struct {

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Also view and download archived media.
	IncludeArchived *string `json:"includeArchived,omitempty"`

	// Set to make the Google Photos backend read only.
	ReadOnly *string `json:"readOnly,omitempty"`

	// Set to read the size of media items.
	ReadSize *string `json:"readSize,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Year limits the photos to be downloaded to those which are uploaded after the given year.
	StartYear *string `json:"startYear,omitempty"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`
}

DatasourceGphotosRequest datasource gphotos request

swagger:model datasource.GphotosRequest

func (*DatasourceGphotosRequest) ContextValidate

func (m *DatasourceGphotosRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource gphotos request based on the context it is used

func (*DatasourceGphotosRequest) MarshalBinary

func (m *DatasourceGphotosRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceGphotosRequest) UnmarshalBinary

func (m *DatasourceGphotosRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceGphotosRequest) Validate

func (m *DatasourceGphotosRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource gphotos request

type DatasourceHTTPRequest

type DatasourceHTTPRequest struct {

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Set HTTP headers for all transactions.
	Headers string `json:"headers,omitempty"`

	// Don't use HEAD requests.
	NoHead *string `json:"noHead,omitempty"`

	// Set this if the site doesn't end directories with /.
	NoSlash *string `json:"noSlash,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// URL of HTTP host to connect to.
	URL string `json:"url,omitempty"`
}

DatasourceHTTPRequest datasource Http request

swagger:model datasource.HttpRequest

func (*DatasourceHTTPRequest) ContextValidate

func (m *DatasourceHTTPRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource Http request based on the context it is used

func (*DatasourceHTTPRequest) MarshalBinary

func (m *DatasourceHTTPRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceHTTPRequest) UnmarshalBinary

func (m *DatasourceHTTPRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceHTTPRequest) Validate

func (m *DatasourceHTTPRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource Http request

type DatasourceHdfsRequest

type DatasourceHdfsRequest struct {

	// Kerberos data transfer protection: authentication|integrity|privacy.
	DataTransferProtection string `json:"dataTransferProtection,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Hadoop name node and port.
	Namenode string `json:"namenode,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Kerberos service principal name for the namenode.
	ServicePrincipalName string `json:"servicePrincipalName,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Hadoop user name.
	Username string `json:"username,omitempty"`
}

DatasourceHdfsRequest datasource hdfs request

swagger:model datasource.HdfsRequest

func (*DatasourceHdfsRequest) ContextValidate

func (m *DatasourceHdfsRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource hdfs request based on the context it is used

func (*DatasourceHdfsRequest) MarshalBinary

func (m *DatasourceHdfsRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceHdfsRequest) UnmarshalBinary

func (m *DatasourceHdfsRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceHdfsRequest) Validate

func (m *DatasourceHdfsRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource hdfs request

type DatasourceHidriveRequest

type DatasourceHidriveRequest struct {

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// Chunksize for chunked uploads.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Do not fetch number of objects in directories unless it is absolutely necessary.
	DisableFetchingMemberCount *string `json:"disableFetchingMemberCount,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Endpoint for the service.
	Endpoint *string `json:"endpoint,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// The root/parent folder for all paths.
	RootPrefix *string `json:"rootPrefix,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Access permissions that rclone should use when requesting access from HiDrive.
	ScopeAccess *string `json:"scopeAccess,omitempty"`

	// User-level that rclone should use when requesting access from HiDrive.
	ScopeRole *string `json:"scopeRole,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`

	// Concurrency for chunked uploads.
	UploadConcurrency *string `json:"uploadConcurrency,omitempty"`

	// Cutoff/Threshold for chunked uploads.
	UploadCutoff *string `json:"uploadCutoff,omitempty"`
}

DatasourceHidriveRequest datasource hidrive request

swagger:model datasource.HidriveRequest

func (*DatasourceHidriveRequest) ContextValidate

func (m *DatasourceHidriveRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource hidrive request based on the context it is used

func (*DatasourceHidriveRequest) MarshalBinary

func (m *DatasourceHidriveRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceHidriveRequest) UnmarshalBinary

func (m *DatasourceHidriveRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceHidriveRequest) Validate

func (m *DatasourceHidriveRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource hidrive request

type DatasourceInternetarchiveRequest

type DatasourceInternetarchiveRequest struct {

	// IAS3 Access Key.
	AccessKeyID string `json:"accessKeyId,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Don't ask the server to test against MD5 checksum calculated by rclone.
	DisableChecksum *string `json:"disableChecksum,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// IAS3 Endpoint.
	Endpoint *string `json:"endpoint,omitempty"`

	// Host of InternetArchive Frontend.
	FrontEndpoint *string `json:"frontEndpoint,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// IAS3 Secret Key (password).
	SecretAccessKey string `json:"secretAccessKey,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Timeout for waiting the server's processing tasks (specifically archive and book_op) to finish.
	WaitArchive *string `json:"waitArchive,omitempty"`
}

DatasourceInternetarchiveRequest datasource internetarchive request

swagger:model datasource.InternetarchiveRequest

func (*DatasourceInternetarchiveRequest) ContextValidate

func (m *DatasourceInternetarchiveRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource internetarchive request based on the context it is used

func (*DatasourceInternetarchiveRequest) MarshalBinary

func (m *DatasourceInternetarchiveRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceInternetarchiveRequest) UnmarshalBinary

func (m *DatasourceInternetarchiveRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceInternetarchiveRequest) Validate

Validate validates this datasource internetarchive request

type DatasourceJottacloudRequest

type DatasourceJottacloudRequest struct {

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Delete files permanently rather than putting them into the trash.
	HardDelete *string `json:"hardDelete,omitempty"`

	// Files bigger than this will be cached on disk to calculate the MD5 if required.
	Md5MemoryLimit *string `json:"md5MemoryLimit,omitempty"`

	// Avoid server side versioning by deleting files and recreating files instead of overwriting them.
	NoVersions *string `json:"noVersions,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Only show files that are in the trash.
	TrashedOnly *string `json:"trashedOnly,omitempty"`

	// Files bigger than this can be resumed if the upload fail's.
	UploadResumeLimit *string `json:"uploadResumeLimit,omitempty"`
}

DatasourceJottacloudRequest datasource jottacloud request

swagger:model datasource.JottacloudRequest

func (*DatasourceJottacloudRequest) ContextValidate

func (m *DatasourceJottacloudRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource jottacloud request based on the context it is used

func (*DatasourceJottacloudRequest) MarshalBinary

func (m *DatasourceJottacloudRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceJottacloudRequest) UnmarshalBinary

func (m *DatasourceJottacloudRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceJottacloudRequest) Validate

func (m *DatasourceJottacloudRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource jottacloud request

type DatasourceKoofrRequest

type DatasourceKoofrRequest struct {

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// The Koofr API endpoint to use.
	Endpoint string `json:"endpoint,omitempty"`

	// Mount ID of the mount to use.
	Mountid string `json:"mountid,omitempty"`

	// Your password for rclone (generate one at https://app.koofr.net/app/admin/preferences/password).
	Password string `json:"password,omitempty"`

	// Choose your storage provider.
	Provider string `json:"provider,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Does the backend support setting modification time.
	Setmtime *string `json:"setmtime,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Your user name.
	User string `json:"user,omitempty"`
}

DatasourceKoofrRequest datasource koofr request

swagger:model datasource.KoofrRequest

func (*DatasourceKoofrRequest) ContextValidate

func (m *DatasourceKoofrRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource koofr request based on the context it is used

func (*DatasourceKoofrRequest) MarshalBinary

func (m *DatasourceKoofrRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceKoofrRequest) UnmarshalBinary

func (m *DatasourceKoofrRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceKoofrRequest) Validate

func (m *DatasourceKoofrRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource koofr request

type DatasourceLocalRequest

type DatasourceLocalRequest struct {

	// Force the filesystem to report itself as case insensitive.
	CaseInsensitive *string `json:"caseInsensitive,omitempty"`

	// Force the filesystem to report itself as case sensitive.
	CaseSensitive *string `json:"caseSensitive,omitempty"`

	// Follow symlinks and copy the pointed to item.
	CopyLinks *string `json:"copyLinks,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Translate symlinks to/from regular files with a '.rclonelink' extension.
	Links *string `json:"links,omitempty"`

	// Don't check to see if the files change during upload.
	NoCheckUpdated *string `json:"noCheckUpdated,omitempty"`

	// Disable preallocation of disk space for transferred files.
	NoPreallocate *string `json:"noPreallocate,omitempty"`

	// Disable setting modtime.
	NoSetModtime *string `json:"noSetModtime,omitempty"`

	// Disable sparse files for multi-thread downloads.
	NoSparse *string `json:"noSparse,omitempty"`

	// Disable UNC (long path names) conversion on Windows.
	Nounc *string `json:"nounc,omitempty"`

	// Don't cross filesystem boundaries (unix/macOS only).
	OneFileSystem *string `json:"oneFileSystem,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Don't warn about skipped symlinks.
	SkipLinks *string `json:"skipLinks,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Apply unicode NFC normalization to paths and filenames.
	UnicodeNormalization *string `json:"unicodeNormalization,omitempty"`

	// Assume the Stat size of links is zero (and read them instead) (deprecated).
	ZeroSizeLinks *string `json:"zeroSizeLinks,omitempty"`
}

DatasourceLocalRequest datasource local request

swagger:model datasource.LocalRequest

func (*DatasourceLocalRequest) ContextValidate

func (m *DatasourceLocalRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource local request based on the context it is used

func (*DatasourceLocalRequest) MarshalBinary

func (m *DatasourceLocalRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceLocalRequest) UnmarshalBinary

func (m *DatasourceLocalRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceLocalRequest) Validate

func (m *DatasourceLocalRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource local request

type DatasourceMailruRequest

type DatasourceMailruRequest struct {

	// What should copy do if file checksum is mismatched or invalid.
	CheckHash *string `json:"checkHash,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Password.
	Pass string `json:"pass,omitempty"`

	// Comma separated list of internal maintenance flags.
	Quirks string `json:"quirks,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Skip full upload if there is another file with same data hash.
	SpeedupEnable *string `json:"speedupEnable,omitempty"`

	// Comma separated list of file name patterns eligible for speedup (put by hash).
	SpeedupFilePatterns *string `json:"speedupFilePatterns,omitempty"`

	// This option allows you to disable speedup (put by hash) for large files.
	SpeedupMaxDisk *string `json:"speedupMaxDisk,omitempty"`

	// Files larger than the size given below will always be hashed on disk.
	SpeedupMaxMemory *string `json:"speedupMaxMemory,omitempty"`

	// User name (usually email).
	User string `json:"user,omitempty"`

	// HTTP user agent used internally by client.
	UserAgent string `json:"userAgent,omitempty"`
}

DatasourceMailruRequest datasource mailru request

swagger:model datasource.MailruRequest

func (*DatasourceMailruRequest) ContextValidate

func (m *DatasourceMailruRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource mailru request based on the context it is used

func (*DatasourceMailruRequest) MarshalBinary

func (m *DatasourceMailruRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceMailruRequest) UnmarshalBinary

func (m *DatasourceMailruRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceMailruRequest) Validate

func (m *DatasourceMailruRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource mailru request

type DatasourceMegaRequest

type DatasourceMegaRequest struct {

	// Output more debug from Mega.
	Debug *string `json:"debug,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Delete files permanently rather than putting them into the trash.
	HardDelete *string `json:"hardDelete,omitempty"`

	// Password.
	Pass string `json:"pass,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Use HTTPS for transfers.
	UseHTTPS *string `json:"useHttps,omitempty"`

	// User name.
	User string `json:"user,omitempty"`
}

DatasourceMegaRequest datasource mega request

swagger:model datasource.MegaRequest

func (*DatasourceMegaRequest) ContextValidate

func (m *DatasourceMegaRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource mega request based on the context it is used

func (*DatasourceMegaRequest) MarshalBinary

func (m *DatasourceMegaRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceMegaRequest) UnmarshalBinary

func (m *DatasourceMegaRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceMegaRequest) Validate

func (m *DatasourceMegaRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource mega request

type DatasourceNetstorageRequest

type DatasourceNetstorageRequest struct {

	// Set the NetStorage account name
	Account string `json:"account,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Domain+path of NetStorage host to connect to.
	Host string `json:"host,omitempty"`

	// Select between HTTP or HTTPS protocol.
	Protocol *string `json:"protocol,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Set the NetStorage account secret/G2O key for authentication.
	Secret string `json:"secret,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`
}

DatasourceNetstorageRequest datasource netstorage request

swagger:model datasource.NetstorageRequest

func (*DatasourceNetstorageRequest) ContextValidate

func (m *DatasourceNetstorageRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource netstorage request based on the context it is used

func (*DatasourceNetstorageRequest) MarshalBinary

func (m *DatasourceNetstorageRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceNetstorageRequest) UnmarshalBinary

func (m *DatasourceNetstorageRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceNetstorageRequest) Validate

func (m *DatasourceNetstorageRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource netstorage request

type DatasourceOnedriveRequest

type DatasourceOnedriveRequest struct {

	// Set scopes to be requested by rclone.
	AccessScopes *string `json:"accessScopes,omitempty"`

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// Chunk size to upload files with - must be multiple of 320k (327,680 bytes).
	ChunkSize *string `json:"chunkSize,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Disable the request for Sites.Read.All permission.
	DisableSitePermission *string `json:"disableSitePermission,omitempty"`

	// The ID of the drive to use.
	DriveID string `json:"driveId,omitempty"`

	// The type of the drive (personal | business | documentLibrary).
	DriveType string `json:"driveType,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Set to make OneNote files show up in directory listings.
	ExposeOnenoteFiles *string `json:"exposeOnenoteFiles,omitempty"`

	// Specify the hash in use for the backend.
	HashType *string `json:"hashType,omitempty"`

	// Set the password for links created by the link command.
	LinkPassword string `json:"linkPassword,omitempty"`

	// Set the scope of the links created by the link command.
	LinkScope *string `json:"linkScope,omitempty"`

	// Set the type of the links created by the link command.
	LinkType *string `json:"linkType,omitempty"`

	// Size of listing chunk.
	ListChunk *string `json:"listChunk,omitempty"`

	// Remove all versions on modifying operations.
	NoVersions *string `json:"noVersions,omitempty"`

	// Choose national cloud region for OneDrive.
	Region *string `json:"region,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// ID of the root folder.
	RootFolderID string `json:"rootFolderId,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Allow server-side operations (e.g. copy) to work across different onedrive configs.
	ServerSideAcrossConfigs *string `json:"serverSideAcrossConfigs,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`
}

DatasourceOnedriveRequest datasource onedrive request

swagger:model datasource.OnedriveRequest

func (*DatasourceOnedriveRequest) ContextValidate

func (m *DatasourceOnedriveRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource onedrive request based on the context it is used

func (*DatasourceOnedriveRequest) MarshalBinary

func (m *DatasourceOnedriveRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceOnedriveRequest) UnmarshalBinary

func (m *DatasourceOnedriveRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceOnedriveRequest) Validate

func (m *DatasourceOnedriveRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource onedrive request

type DatasourceOosRequest

type DatasourceOosRequest struct {

	// Chunk size to use for uploading.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Object storage compartment OCID
	Compartment string `json:"compartment,omitempty"`

	// Path to OCI config file
	ConfigFile *string `json:"configFile,omitempty"`

	// Profile name inside the oci config file
	ConfigProfile *string `json:"configProfile,omitempty"`

	// Cutoff for switching to multipart copy.
	CopyCutoff *string `json:"copyCutoff,omitempty"`

	// Timeout for copy.
	CopyTimeout *string `json:"copyTimeout,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Don't store MD5 checksum with object metadata.
	DisableChecksum *string `json:"disableChecksum,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Endpoint for Object storage API.
	Endpoint string `json:"endpoint,omitempty"`

	// If true avoid calling abort upload on a failure, leaving all successfully uploaded parts on S3 for manual recovery.
	LeavePartsOnError *string `json:"leavePartsOnError,omitempty"`

	// Object storage namespace
	Namespace string `json:"namespace,omitempty"`

	// If set, don't attempt to check the bucket exists or create it.
	NoCheckBucket *string `json:"noCheckBucket,omitempty"`

	// Choose your Auth Provider
	Provider *string `json:"provider,omitempty"`

	// Object storage Region
	Region string `json:"region,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// If using SSE-C, the optional header that specifies "AES256" as the encryption algorithm.
	SseCustomerAlgorithm string `json:"sseCustomerAlgorithm,omitempty"`

	// To use SSE-C, the optional header that specifies the base64-encoded 256-bit encryption key to use to
	SseCustomerKey string `json:"sseCustomerKey,omitempty"`

	// To use SSE-C, a file containing the base64-encoded string of the AES-256 encryption key associated
	SseCustomerKeyFile string `json:"sseCustomerKeyFile,omitempty"`

	// If using SSE-C, The optional header that specifies the base64-encoded SHA256 hash of the encryption
	SseCustomerKeySha256 string `json:"sseCustomerKeySha256,omitempty"`

	// if using using your own master key in vault, this header specifies the
	SseKmsKeyID string `json:"sseKmsKeyId,omitempty"`

	// The storage class to use when storing new objects in storage. https://docs.oracle.com/en-us/iaas/Content/Object/Concepts/understandingstoragetiers.htm
	StorageTier *string `json:"storageTier,omitempty"`

	// Concurrency for multipart uploads.
	UploadConcurrency *string `json:"uploadConcurrency,omitempty"`

	// Cutoff for switching to chunked upload.
	UploadCutoff *string `json:"uploadCutoff,omitempty"`
}

DatasourceOosRequest datasource oos request

swagger:model datasource.OosRequest

func (*DatasourceOosRequest) ContextValidate

func (m *DatasourceOosRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource oos request based on the context it is used

func (*DatasourceOosRequest) MarshalBinary

func (m *DatasourceOosRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceOosRequest) UnmarshalBinary

func (m *DatasourceOosRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceOosRequest) Validate

func (m *DatasourceOosRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource oos request

type DatasourceOpendriveRequest

type DatasourceOpendriveRequest struct {

	// Files will be uploaded in chunks this size.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Password.
	Password string `json:"password,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Username.
	Username string `json:"username,omitempty"`
}

DatasourceOpendriveRequest datasource opendrive request

swagger:model datasource.OpendriveRequest

func (*DatasourceOpendriveRequest) ContextValidate

func (m *DatasourceOpendriveRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource opendrive request based on the context it is used

func (*DatasourceOpendriveRequest) MarshalBinary

func (m *DatasourceOpendriveRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceOpendriveRequest) UnmarshalBinary

func (m *DatasourceOpendriveRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceOpendriveRequest) Validate

func (m *DatasourceOpendriveRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource opendrive request

type DatasourcePackJobsByState

type DatasourcePackJobsByState struct {

	// number of pack jobs in this state
	Count int64 `json:"count,omitempty"`

	// the state of the pack jobs
	State struct {
		ModelWorkState
	} `json:"state,omitempty"`
}

DatasourcePackJobsByState datasource pack jobs by state

swagger:model datasource.PackJobsByState

func (*DatasourcePackJobsByState) ContextValidate

func (m *DatasourcePackJobsByState) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource pack jobs by state based on the context it is used

func (*DatasourcePackJobsByState) MarshalBinary

func (m *DatasourcePackJobsByState) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourcePackJobsByState) UnmarshalBinary

func (m *DatasourcePackJobsByState) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourcePackJobsByState) Validate

func (m *DatasourcePackJobsByState) Validate(formats strfmt.Registry) error

Validate validates this datasource pack jobs by state

type DatasourcePcloudRequest

type DatasourcePcloudRequest struct {

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Hostname to connect to.
	Hostname *string `json:"hostname,omitempty"`

	// Your pcloud password.
	Password string `json:"password,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Fill in for rclone to use a non root folder as its starting point.
	RootFolderID *string `json:"rootFolderId,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`

	// Your pcloud username.
	Username string `json:"username,omitempty"`
}

DatasourcePcloudRequest datasource pcloud request

swagger:model datasource.PcloudRequest

func (*DatasourcePcloudRequest) ContextValidate

func (m *DatasourcePcloudRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource pcloud request based on the context it is used

func (*DatasourcePcloudRequest) MarshalBinary

func (m *DatasourcePcloudRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourcePcloudRequest) UnmarshalBinary

func (m *DatasourcePcloudRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourcePcloudRequest) Validate

func (m *DatasourcePcloudRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource pcloud request

type DatasourcePremiumizemeRequest

type DatasourcePremiumizemeRequest struct {

	// API Key.
	APIKey string `json:"apiKey,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`
}

DatasourcePremiumizemeRequest datasource premiumizeme request

swagger:model datasource.PremiumizemeRequest

func (*DatasourcePremiumizemeRequest) ContextValidate

func (m *DatasourcePremiumizemeRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource premiumizeme request based on the context it is used

func (*DatasourcePremiumizemeRequest) MarshalBinary

func (m *DatasourcePremiumizemeRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourcePremiumizemeRequest) UnmarshalBinary

func (m *DatasourcePremiumizemeRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourcePremiumizemeRequest) Validate

func (m *DatasourcePremiumizemeRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource premiumizeme request

type DatasourcePutioRequest

type DatasourcePutioRequest struct {

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`
}

DatasourcePutioRequest datasource putio request

swagger:model datasource.PutioRequest

func (*DatasourcePutioRequest) ContextValidate

func (m *DatasourcePutioRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource putio request based on the context it is used

func (*DatasourcePutioRequest) MarshalBinary

func (m *DatasourcePutioRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourcePutioRequest) UnmarshalBinary

func (m *DatasourcePutioRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourcePutioRequest) Validate

func (m *DatasourcePutioRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource putio request

type DatasourceQingstorRequest

type DatasourceQingstorRequest struct {

	// QingStor Access Key ID.
	AccessKeyID string `json:"accessKeyId,omitempty"`

	// Chunk size to use for uploading.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Number of connection retries.
	ConnectionRetries *string `json:"connectionRetries,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Enter an endpoint URL to connection QingStor API.
	Endpoint string `json:"endpoint,omitempty"`

	// Get QingStor credentials from runtime.
	EnvAuth *string `json:"envAuth,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// QingStor Secret Access Key (password).
	SecretAccessKey string `json:"secretAccessKey,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Concurrency for multipart uploads.
	UploadConcurrency *string `json:"uploadConcurrency,omitempty"`

	// Cutoff for switching to chunked upload.
	UploadCutoff *string `json:"uploadCutoff,omitempty"`

	// Zone to connect to.
	Zone string `json:"zone,omitempty"`
}

DatasourceQingstorRequest datasource qingstor request

swagger:model datasource.QingstorRequest

func (*DatasourceQingstorRequest) ContextValidate

func (m *DatasourceQingstorRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource qingstor request based on the context it is used

func (*DatasourceQingstorRequest) MarshalBinary

func (m *DatasourceQingstorRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceQingstorRequest) UnmarshalBinary

func (m *DatasourceQingstorRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceQingstorRequest) Validate

func (m *DatasourceQingstorRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource qingstor request

type DatasourceRepackRequest

type DatasourceRepackRequest struct {

	// pack job Id
	PackJobID int64 `json:"packJobId,omitempty"`
}

DatasourceRepackRequest datasource repack request

swagger:model datasource.RepackRequest

func (*DatasourceRepackRequest) ContextValidate

func (m *DatasourceRepackRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this datasource repack request based on context it is used

func (*DatasourceRepackRequest) MarshalBinary

func (m *DatasourceRepackRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceRepackRequest) UnmarshalBinary

func (m *DatasourceRepackRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceRepackRequest) Validate

func (m *DatasourceRepackRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource repack request

type DatasourceS3Request

type DatasourceS3Request struct {

	// AWS Access Key ID.
	AccessKeyID string `json:"accessKeyId,omitempty"`

	// Canned ACL used when creating buckets and storing or copying objects.
	ACL string `json:"acl,omitempty"`

	// Canned ACL used when creating buckets.
	BucketACL string `json:"bucketAcl,omitempty"`

	// Chunk size to use for uploading.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Cutoff for switching to multipart copy.
	CopyCutoff *string `json:"copyCutoff,omitempty"`

	// If set this will decompress gzip encoded objects.
	Decompress *string `json:"decompress,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Don't store MD5 checksum with object metadata.
	DisableChecksum *string `json:"disableChecksum,omitempty"`

	// Disable usage of http2 for S3 backends.
	DisableHttp2 *string `json:"disableHttp2,omitempty"`

	// Custom endpoint for downloads.
	DownloadURL string `json:"downloadUrl,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Endpoint for S3 API.
	Endpoint string `json:"endpoint,omitempty"`

	// Get AWS credentials from runtime (environment variables or EC2/ECS meta data if no env vars).
	EnvAuth *string `json:"envAuth,omitempty"`

	// If true use path style access if false use virtual hosted style.
	ForcePathStyle *string `json:"forcePathStyle,omitempty"`

	// If true avoid calling abort upload on a failure, leaving all successfully uploaded parts on S3 for manual recovery.
	LeavePartsOnError *string `json:"leavePartsOnError,omitempty"`

	// Size of listing chunk (response list for each ListObject S3 request).
	ListChunk *string `json:"listChunk,omitempty"`

	// Whether to url encode listings: true/false/unset
	ListURLEncode *string `json:"listUrlEncode,omitempty"`

	// Version of ListObjects to use: 1,2 or 0 for auto.
	ListVersion *string `json:"listVersion,omitempty"`

	// Location constraint - must be set to match the Region.
	LocationConstraint string `json:"locationConstraint,omitempty"`

	// Maximum number of parts in a multipart upload.
	MaxUploadParts *string `json:"maxUploadParts,omitempty"`

	// How often internal memory buffer pools will be flushed.
	MemoryPoolFlushTime *string `json:"memoryPoolFlushTime,omitempty"`

	// Whether to use mmap buffers in internal memory pool.
	MemoryPoolUseMmap *string `json:"memoryPoolUseMmap,omitempty"`

	// Set this if the backend might gzip objects.
	MightGzip *string `json:"mightGzip,omitempty"`

	// If set, don't attempt to check the bucket exists or create it.
	NoCheckBucket *string `json:"noCheckBucket,omitempty"`

	// If set, don't HEAD uploaded objects to check integrity.
	NoHead *string `json:"noHead,omitempty"`

	// If set, do not do HEAD before GET when getting objects.
	NoHeadObject *string `json:"noHeadObject,omitempty"`

	// Suppress setting and reading of system metadata
	NoSystemMetadata *string `json:"noSystemMetadata,omitempty"`

	// Profile to use in the shared credentials file.
	Profile string `json:"profile,omitempty"`

	// Choose your S3 provider.
	Provider string `json:"provider,omitempty"`

	// Region to connect to.
	Region string `json:"region,omitempty"`

	// Enables requester pays option when interacting with S3 bucket.
	RequesterPays *string `json:"requesterPays,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// AWS Secret Access Key (password).
	SecretAccessKey string `json:"secretAccessKey,omitempty"`

	// The server-side encryption algorithm used when storing this object in S3.
	ServerSideEncryption string `json:"serverSideEncryption,omitempty"`

	// An AWS session token.
	SessionToken string `json:"sessionToken,omitempty"`

	// Path to the shared credentials file.
	SharedCredentialsFile string `json:"sharedCredentialsFile,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// If using SSE-C, the server-side encryption algorithm used when storing this object in S3.
	SseCustomerAlgorithm string `json:"sseCustomerAlgorithm,omitempty"`

	// To use SSE-C you may provide the secret encryption key used to encrypt/decrypt your data.
	SseCustomerKey string `json:"sseCustomerKey,omitempty"`

	// If using SSE-C you must provide the secret encryption key encoded in base64 format to encrypt/decrypt your data.
	SseCustomerKeyBase64 string `json:"sseCustomerKeyBase64,omitempty"`

	// If using SSE-C you may provide the secret encryption key MD5 checksum (optional).
	SseCustomerKeyMd5 string `json:"sseCustomerKeyMd5,omitempty"`

	// If using KMS ID you must provide the ARN of Key.
	SseKmsKeyID string `json:"sseKmsKeyId,omitempty"`

	// The storage class to use when storing new objects in S3.
	StorageClass string `json:"storageClass,omitempty"`

	// Endpoint for STS.
	StsEndpoint string `json:"stsEndpoint,omitempty"`

	// Concurrency for multipart uploads.
	UploadConcurrency *string `json:"uploadConcurrency,omitempty"`

	// Cutoff for switching to chunked upload.
	UploadCutoff *string `json:"uploadCutoff,omitempty"`

	// If true use the AWS S3 accelerated endpoint.
	UseAccelerateEndpoint *string `json:"useAccelerateEndpoint,omitempty"`

	// Whether to use ETag in multipart uploads for verification
	UseMultipartEtag *string `json:"useMultipartEtag,omitempty"`

	// Whether to use a presigned request or PutObject for single part uploads
	UsePresignedRequest *string `json:"usePresignedRequest,omitempty"`

	// If true use v2 authentication.
	V2Auth *string `json:"v2Auth,omitempty"`

	// Show file versions as they were at the specified time.
	VersionAt *string `json:"versionAt,omitempty"`

	// Include old versions in directory listings.
	Versions *string `json:"versions,omitempty"`
}

DatasourceS3Request datasource s3 request

swagger:model datasource.S3Request

func (*DatasourceS3Request) ContextValidate

func (m *DatasourceS3Request) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource s3 request based on the context it is used

func (*DatasourceS3Request) MarshalBinary

func (m *DatasourceS3Request) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceS3Request) UnmarshalBinary

func (m *DatasourceS3Request) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceS3Request) Validate

func (m *DatasourceS3Request) Validate(formats strfmt.Registry) error

Validate validates this datasource s3 request

type DatasourceSeafileRequest

type DatasourceSeafileRequest struct {

	// Two-factor authentication ('true' if the account has 2FA enabled).
	Nr2fa *string `json:"2fa,omitempty"`

	// Authentication token.
	AuthToken string `json:"authToken,omitempty"`

	// Should rclone create a library if it doesn't exist.
	CreateLibrary *string `json:"createLibrary,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Name of the library.
	Library string `json:"library,omitempty"`

	// Library password (for encrypted libraries only).
	LibraryKey string `json:"libraryKey,omitempty"`

	// Password.
	Pass string `json:"pass,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// URL of seafile host to connect to.
	URL string `json:"url,omitempty"`

	// User name (usually email address).
	User string `json:"user,omitempty"`
}

DatasourceSeafileRequest datasource seafile request

swagger:model datasource.SeafileRequest

func (*DatasourceSeafileRequest) ContextValidate

func (m *DatasourceSeafileRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource seafile request based on the context it is used

func (*DatasourceSeafileRequest) MarshalBinary

func (m *DatasourceSeafileRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceSeafileRequest) UnmarshalBinary

func (m *DatasourceSeafileRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceSeafileRequest) Validate

func (m *DatasourceSeafileRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource seafile request

type DatasourceSftpRequest

type DatasourceSftpRequest struct {

	// Allow asking for SFTP password when needed.
	AskPassword *string `json:"askPassword,omitempty"`

	// Upload and download chunk size.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Space separated list of ciphers to be used for session encryption, ordered by preference.
	Ciphers string `json:"ciphers,omitempty"`

	// The maximum number of outstanding requests for one file
	Concurrency *string `json:"concurrency,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// If set don't use concurrent reads.
	DisableConcurrentReads *string `json:"disableConcurrentReads,omitempty"`

	// If set don't use concurrent writes.
	DisableConcurrentWrites *string `json:"disableConcurrentWrites,omitempty"`

	// Disable the execution of SSH commands to determine if remote file hashing is available.
	DisableHashcheck *string `json:"disableHashcheck,omitempty"`

	// SSH host to connect to.
	Host string `json:"host,omitempty"`

	// Max time before closing idle connections.
	IdleTimeout *string `json:"idleTimeout,omitempty"`

	// Space separated list of key exchange algorithms, ordered by preference.
	KeyExchange string `json:"keyExchange,omitempty"`

	// Path to PEM-encoded private key file.
	KeyFile string `json:"keyFile,omitempty"`

	// The passphrase to decrypt the PEM-encoded private key file.
	KeyFilePass string `json:"keyFilePass,omitempty"`

	// Raw PEM-encoded private key.
	KeyPem string `json:"keyPem,omitempty"`

	// When set forces the usage of the ssh-agent.
	KeyUseAgent *string `json:"keyUseAgent,omitempty"`

	// Optional path to known_hosts file.
	KnownHostsFile string `json:"knownHostsFile,omitempty"`

	// Space separated list of MACs (message authentication code) algorithms, ordered by preference.
	Macs string `json:"macs,omitempty"`

	// The command used to read md5 hashes.
	Md5sumCommand string `json:"md5sumCommand,omitempty"`

	// SSH password, leave blank to use ssh-agent.
	Pass string `json:"pass,omitempty"`

	// Override path used by SSH shell commands.
	PathOverride string `json:"pathOverride,omitempty"`

	// SSH port number.
	Port *string `json:"port,omitempty"`

	// Optional path to public key file.
	PubkeyFile string `json:"pubkeyFile,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// Specifies the path or command to run a sftp server on the remote host.
	ServerCommand string `json:"serverCommand,omitempty"`

	// Environment variables to pass to sftp and commands
	SetEnv string `json:"setEnv,omitempty"`

	// Set the modified time on the remote if set.
	SetModtime *string `json:"setModtime,omitempty"`

	// The command used to read sha1 hashes.
	Sha1sumCommand string `json:"sha1sumCommand,omitempty"`

	// The type of SSH shell on remote server, if any.
	ShellType string `json:"shellType,omitempty"`

	// Set to skip any symlinks and any other non regular files.
	SkipLinks *string `json:"skipLinks,omitempty"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Specifies the SSH2 subsystem on the remote host.
	Subsystem *string `json:"subsystem,omitempty"`

	// If set use fstat instead of stat.
	UseFstat *string `json:"useFstat,omitempty"`

	// Enable the use of insecure ciphers and key exchange methods.
	UseInsecureCipher *string `json:"useInsecureCipher,omitempty"`

	// SSH username.
	User *string `json:"user,omitempty"`
}

DatasourceSftpRequest datasource sftp request

swagger:model datasource.SftpRequest

func (*DatasourceSftpRequest) ContextValidate

func (m *DatasourceSftpRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource sftp request based on the context it is used

func (*DatasourceSftpRequest) MarshalBinary

func (m *DatasourceSftpRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceSftpRequest) UnmarshalBinary

func (m *DatasourceSftpRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceSftpRequest) Validate

func (m *DatasourceSftpRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource sftp request

type DatasourceSharefileRequest

type DatasourceSharefileRequest struct {

	// Upload chunk size.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Endpoint for API calls.
	Endpoint string `json:"endpoint,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// ID of the root folder.
	RootFolderID string `json:"rootFolderId,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Cutoff for switching to multipart upload.
	UploadCutoff *string `json:"uploadCutoff,omitempty"`
}

DatasourceSharefileRequest datasource sharefile request

swagger:model datasource.SharefileRequest

func (*DatasourceSharefileRequest) ContextValidate

func (m *DatasourceSharefileRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource sharefile request based on the context it is used

func (*DatasourceSharefileRequest) MarshalBinary

func (m *DatasourceSharefileRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceSharefileRequest) UnmarshalBinary

func (m *DatasourceSharefileRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceSharefileRequest) Validate

func (m *DatasourceSharefileRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource sharefile request

type DatasourceSiaRequest

type DatasourceSiaRequest struct {

	// Sia Daemon API Password.
	APIPassword string `json:"apiPassword,omitempty"`

	// Sia daemon API URL, like http://sia.daemon.host:9980.
	APIURL *string `json:"apiUrl,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Siad User Agent
	UserAgent *string `json:"userAgent,omitempty"`
}

DatasourceSiaRequest datasource sia request

swagger:model datasource.SiaRequest

func (*DatasourceSiaRequest) ContextValidate

func (m *DatasourceSiaRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource sia request based on the context it is used

func (*DatasourceSiaRequest) MarshalBinary

func (m *DatasourceSiaRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceSiaRequest) UnmarshalBinary

func (m *DatasourceSiaRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceSiaRequest) Validate

func (m *DatasourceSiaRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource sia request

type DatasourceSmbRequest

type DatasourceSmbRequest struct {

	// Whether the server is configured to be case-insensitive.
	CaseInsensitive *string `json:"caseInsensitive,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Domain name for NTLM authentication.
	Domain *string `json:"domain,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Hide special shares (e.g. print$) which users aren't supposed to access.
	HideSpecialShare *string `json:"hideSpecialShare,omitempty"`

	// SMB server hostname to connect to.
	Host string `json:"host,omitempty"`

	// Max time before closing idle connections.
	IdleTimeout *string `json:"idleTimeout,omitempty"`

	// SMB password.
	Pass string `json:"pass,omitempty"`

	// SMB port number.
	Port *string `json:"port,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Service principal name.
	Spn string `json:"spn,omitempty"`

	// SMB username.
	User *string `json:"user,omitempty"`
}

DatasourceSmbRequest datasource smb request

swagger:model datasource.SmbRequest

func (*DatasourceSmbRequest) ContextValidate

func (m *DatasourceSmbRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource smb request based on the context it is used

func (*DatasourceSmbRequest) MarshalBinary

func (m *DatasourceSmbRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceSmbRequest) UnmarshalBinary

func (m *DatasourceSmbRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceSmbRequest) Validate

func (m *DatasourceSmbRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource smb request

type DatasourceStorjRequest

type DatasourceStorjRequest struct {

	// Access grant.
	AccessGrant string `json:"accessGrant,omitempty"`

	// API key.
	APIKey string `json:"apiKey,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Encryption passphrase.
	Passphrase string `json:"passphrase,omitempty"`

	// Choose an authentication method.
	Provider *string `json:"provider,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Satellite address.
	SatelliteAddress *string `json:"satelliteAddress,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`
}

DatasourceStorjRequest datasource storj request

swagger:model datasource.StorjRequest

func (*DatasourceStorjRequest) ContextValidate

func (m *DatasourceStorjRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource storj request based on the context it is used

func (*DatasourceStorjRequest) MarshalBinary

func (m *DatasourceStorjRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceStorjRequest) UnmarshalBinary

func (m *DatasourceStorjRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceStorjRequest) Validate

func (m *DatasourceStorjRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource storj request

type DatasourceSugarsyncRequest

type DatasourceSugarsyncRequest struct {

	// Sugarsync Access Key ID.
	AccessKeyID string `json:"accessKeyId,omitempty"`

	// Sugarsync App ID.
	AppID string `json:"appId,omitempty"`

	// Sugarsync authorization.
	Authorization string `json:"authorization,omitempty"`

	// Sugarsync authorization expiry.
	AuthorizationExpiry string `json:"authorizationExpiry,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// Sugarsync deleted folder id.
	DeletedID string `json:"deletedId,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Permanently delete files if true
	HardDelete *string `json:"hardDelete,omitempty"`

	// Sugarsync Private Access Key.
	PrivateAccessKey string `json:"privateAccessKey,omitempty"`

	// Sugarsync refresh token.
	RefreshToken string `json:"refreshToken,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Sugarsync root id.
	RootID string `json:"rootId,omitempty"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// Sugarsync user.
	User string `json:"user,omitempty"`
}

DatasourceSugarsyncRequest datasource sugarsync request

swagger:model datasource.SugarsyncRequest

func (*DatasourceSugarsyncRequest) ContextValidate

func (m *DatasourceSugarsyncRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource sugarsync request based on the context it is used

func (*DatasourceSugarsyncRequest) MarshalBinary

func (m *DatasourceSugarsyncRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceSugarsyncRequest) UnmarshalBinary

func (m *DatasourceSugarsyncRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceSugarsyncRequest) Validate

func (m *DatasourceSugarsyncRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource sugarsync request

type DatasourceSwiftRequest

type DatasourceSwiftRequest struct {

	// Application Credential ID (OS_APPLICATION_CREDENTIAL_ID).
	ApplicationCredentialID string `json:"applicationCredentialId,omitempty"`

	// Application Credential Name (OS_APPLICATION_CREDENTIAL_NAME).
	ApplicationCredentialName string `json:"applicationCredentialName,omitempty"`

	// Application Credential Secret (OS_APPLICATION_CREDENTIAL_SECRET).
	ApplicationCredentialSecret string `json:"applicationCredentialSecret,omitempty"`

	// Authentication URL for server (OS_AUTH_URL).
	Auth string `json:"auth,omitempty"`

	// Auth Token from alternate authentication - optional (OS_AUTH_TOKEN).
	AuthToken string `json:"authToken,omitempty"`

	// AuthVersion - optional - set to (1,2,3) if your auth URL has no version (ST_AUTH_VERSION).
	AuthVersion *string `json:"authVersion,omitempty"`

	// Above this size files will be chunked into a _segments container.
	ChunkSize *string `json:"chunkSize,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// User domain - optional (v3 auth) (OS_USER_DOMAIN_NAME)
	Domain string `json:"domain,omitempty"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Endpoint type to choose from the service catalogue (OS_ENDPOINT_TYPE).
	EndpointType *string `json:"endpointType,omitempty"`

	// Get swift credentials from environment variables in standard OpenStack form.
	EnvAuth *string `json:"envAuth,omitempty"`

	// API key or password (OS_PASSWORD).
	Key string `json:"key,omitempty"`

	// If true avoid calling abort upload on a failure.
	LeavePartsOnError *string `json:"leavePartsOnError,omitempty"`

	// Don't chunk files during streaming upload.
	NoChunk *string `json:"noChunk,omitempty"`

	// Disable support for static and dynamic large objects
	NoLargeObjects *string `json:"noLargeObjects,omitempty"`

	// Region name - optional (OS_REGION_NAME).
	Region string `json:"region,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// The storage policy to use when creating a new container.
	StoragePolicy string `json:"storagePolicy,omitempty"`

	// Storage URL - optional (OS_STORAGE_URL).
	StorageURL string `json:"storageUrl,omitempty"`

	// Tenant name - optional for v1 auth, this or tenant_id required otherwise (OS_TENANT_NAME or OS_PROJECT_NAME).
	Tenant string `json:"tenant,omitempty"`

	// Tenant domain - optional (v3 auth) (OS_PROJECT_DOMAIN_NAME).
	TenantDomain string `json:"tenantDomain,omitempty"`

	// Tenant ID - optional for v1 auth, this or tenant required otherwise (OS_TENANT_ID).
	TenantID string `json:"tenantId,omitempty"`

	// User name to log in (OS_USERNAME).
	User string `json:"user,omitempty"`

	// User ID to log in - optional - most swift systems use user and leave this blank (v3 auth) (OS_USER_ID).
	UserID string `json:"userId,omitempty"`
}

DatasourceSwiftRequest datasource swift request

swagger:model datasource.SwiftRequest

func (*DatasourceSwiftRequest) ContextValidate

func (m *DatasourceSwiftRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource swift request based on the context it is used

func (*DatasourceSwiftRequest) MarshalBinary

func (m *DatasourceSwiftRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceSwiftRequest) UnmarshalBinary

func (m *DatasourceSwiftRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceSwiftRequest) Validate

func (m *DatasourceSwiftRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource swift request

type DatasourceUptoboxRequest

type DatasourceUptoboxRequest struct {

	// Your access token.
	AccessToken string `json:"accessToken,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`
}

DatasourceUptoboxRequest datasource uptobox request

swagger:model datasource.UptoboxRequest

func (*DatasourceUptoboxRequest) ContextValidate

func (m *DatasourceUptoboxRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource uptobox request based on the context it is used

func (*DatasourceUptoboxRequest) MarshalBinary

func (m *DatasourceUptoboxRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceUptoboxRequest) UnmarshalBinary

func (m *DatasourceUptoboxRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceUptoboxRequest) Validate

func (m *DatasourceUptoboxRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource uptobox request

type DatasourceWebdavRequest

type DatasourceWebdavRequest struct {

	// Bearer token instead of user/pass (e.g. a Macaroon).
	BearerToken string `json:"bearerToken,omitempty"`

	// Command to run to get a bearer token.
	BearerTokenCommand string `json:"bearerTokenCommand,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding string `json:"encoding,omitempty"`

	// Set HTTP headers for all transactions.
	Headers string `json:"headers,omitempty"`

	// Password.
	Pass string `json:"pass,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// URL of http host to connect to.
	URL string `json:"url,omitempty"`

	// User name.
	User string `json:"user,omitempty"`

	// Name of the WebDAV site/service/software you are using.
	Vendor string `json:"vendor,omitempty"`
}

DatasourceWebdavRequest datasource webdav request

swagger:model datasource.WebdavRequest

func (*DatasourceWebdavRequest) ContextValidate

func (m *DatasourceWebdavRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource webdav request based on the context it is used

func (*DatasourceWebdavRequest) MarshalBinary

func (m *DatasourceWebdavRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceWebdavRequest) UnmarshalBinary

func (m *DatasourceWebdavRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceWebdavRequest) Validate

func (m *DatasourceWebdavRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource webdav request

type DatasourceYandexRequest

type DatasourceYandexRequest struct {

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Delete files permanently rather than putting them into the trash.
	HardDelete *string `json:"hardDelete,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`
}

DatasourceYandexRequest datasource yandex request

swagger:model datasource.YandexRequest

func (*DatasourceYandexRequest) ContextValidate

func (m *DatasourceYandexRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource yandex request based on the context it is used

func (*DatasourceYandexRequest) MarshalBinary

func (m *DatasourceYandexRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceYandexRequest) UnmarshalBinary

func (m *DatasourceYandexRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceYandexRequest) Validate

func (m *DatasourceYandexRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource yandex request

type DatasourceZohoRequest

type DatasourceZohoRequest struct {

	// Auth server URL.
	AuthURL string `json:"authUrl,omitempty"`

	// OAuth Client Id.
	ClientID string `json:"clientId,omitempty"`

	// OAuth Client Secret.
	ClientSecret string `json:"clientSecret,omitempty"`

	// Delete the source after exporting to CAR files
	// Required: true
	DeleteAfterExport *bool `json:"deleteAfterExport"`

	// The encoding for the backend.
	Encoding *string `json:"encoding,omitempty"`

	// Zoho region to connect to.
	Region string `json:"region,omitempty"`

	// Automatically rescan the source directory when this interval has passed from last successful scan
	// Required: true
	RescanInterval *string `json:"rescanInterval"`

	// Starting state for scanning
	// Required: true
	ScanningState struct {
		ModelWorkState
	} `json:"scanningState"`

	// The path of the source to scan files
	// Required: true
	SourcePath *string `json:"sourcePath"`

	// OAuth Access Token as a JSON blob.
	Token string `json:"token,omitempty"`

	// Token server url.
	TokenURL string `json:"tokenUrl,omitempty"`
}

DatasourceZohoRequest datasource zoho request

swagger:model datasource.ZohoRequest

func (*DatasourceZohoRequest) ContextValidate

func (m *DatasourceZohoRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this datasource zoho request based on the context it is used

func (*DatasourceZohoRequest) MarshalBinary

func (m *DatasourceZohoRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DatasourceZohoRequest) UnmarshalBinary

func (m *DatasourceZohoRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DatasourceZohoRequest) Validate

func (m *DatasourceZohoRequest) Validate(formats strfmt.Registry) error

Validate validates this datasource zoho request

type DealListDealRequest

type DealListDealRequest struct {

	// dataset name filter
	Datasets []string `json:"datasets"`

	// provider filter
	Providers []string `json:"providers"`

	// schedule id filter
	Schedules []int64 `json:"schedules"`

	// state filter
	States []string `json:"states"`
}

DealListDealRequest deal list deal request

swagger:model deal.ListDealRequest

func (*DealListDealRequest) ContextValidate

func (m *DealListDealRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this deal list deal request based on context it is used

func (*DealListDealRequest) MarshalBinary

func (m *DealListDealRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DealListDealRequest) UnmarshalBinary

func (m *DealListDealRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DealListDealRequest) Validate

func (m *DealListDealRequest) Validate(formats strfmt.Registry) error

Validate validates this deal list deal request

type DealProposal

type DealProposal struct {

	// Client address
	ClientAddress string `json:"clientAddress,omitempty"`

	// Duration in epoch or in duration format, i.e. 1500000, 2400h
	Duration *string `json:"duration,omitempty"`

	// File size in bytes for boost to fetch the CAR file
	FileSize int64 `json:"fileSize,omitempty"`

	// http headers to be passed with the request (i.e. key=value)
	HTTPHeaders []string `json:"httpHeaders"`

	// Whether the deal should be IPNI
	Ipni *bool `json:"ipni,omitempty"`

	// Whether the deal should be kept unsealed
	KeepUnsealed *bool `json:"keepUnsealed,omitempty"`

	// Piece CID
	PieceCid string `json:"pieceCid,omitempty"`

	// Piece size
	PieceSize string `json:"pieceSize,omitempty"`

	// Price in FIL per deal
	PricePerDeal float64 `json:"pricePerDeal,omitempty"`

	// Price in FIL  per GiB
	PricePerGb float64 `json:"pricePerGb,omitempty"`

	// Price in FIL per GiB per epoch
	PricePerGbEpoch float64 `json:"pricePerGbEpoch,omitempty"`

	// Provider ID
	ProviderID string `json:"providerId,omitempty"`

	// Root CID that is required as part of the deal proposal, if empty, will be set to empty CID
	RootCid *string `json:"rootCid,omitempty"`

	// Deal start delay in epoch or in duration format, i.e. 1000, 72h
	StartDelay *string `json:"startDelay,omitempty"`

	// URL template with PIECE_CID placeholder for boost to fetch the CAR file, i.e. http://127.0.0.1/piece/{PIECE_CID}.car
	URLTemplate string `json:"urlTemplate,omitempty"`

	// Whether the deal should be verified
	Verified *bool `json:"verified,omitempty"`
}

DealProposal deal proposal

swagger:model deal.Proposal

func (*DealProposal) ContextValidate

func (m *DealProposal) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this deal proposal based on context it is used

func (*DealProposal) MarshalBinary

func (m *DealProposal) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*DealProposal) UnmarshalBinary

func (m *DealProposal) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*DealProposal) Validate

func (m *DealProposal) Validate(formats strfmt.Registry) error

Validate validates this deal proposal

type GithubComDataPreservationProgramsSingularityHandlerDatasourceEntry

type GithubComDataPreservationProgramsSingularityHandlerDatasourceEntry struct {

	// is dir
	IsDir bool `json:"isDir,omitempty"`

	// last modified
	LastModified string `json:"lastModified,omitempty"`

	// path
	Path string `json:"path,omitempty"`

	// size
	Size int64 `json:"size,omitempty"`
}

GithubComDataPreservationProgramsSingularityHandlerDatasourceEntry github com data preservation programs singularity handler datasource entry

swagger:model github_com_data-preservation-programs_singularity_handler_datasource.Entry

func (*GithubComDataPreservationProgramsSingularityHandlerDatasourceEntry) ContextValidate

ContextValidate validates this github com data preservation programs singularity handler datasource entry based on context it is used

func (*GithubComDataPreservationProgramsSingularityHandlerDatasourceEntry) MarshalBinary

MarshalBinary interface implementation

func (*GithubComDataPreservationProgramsSingularityHandlerDatasourceEntry) UnmarshalBinary

UnmarshalBinary interface implementation

func (*GithubComDataPreservationProgramsSingularityHandlerDatasourceEntry) Validate

Validate validates this github com data preservation programs singularity handler datasource entry

type InspectDirDetail

type InspectDirDetail struct {

	// current
	Current *ModelDirectory `json:"current,omitempty"`

	// dirs
	Dirs []*ModelDirectory `json:"dirs"`

	// files
	Files []*ModelFile `json:"files"`
}

InspectDirDetail inspect dir detail

swagger:model inspect.DirDetail

func (*InspectDirDetail) ContextValidate

func (m *InspectDirDetail) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this inspect dir detail based on the context it is used

func (*InspectDirDetail) MarshalBinary

func (m *InspectDirDetail) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*InspectDirDetail) UnmarshalBinary

func (m *InspectDirDetail) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*InspectDirDetail) Validate

func (m *InspectDirDetail) Validate(formats strfmt.Registry) error

Validate validates this inspect dir detail

type InspectGetPathRequest

type InspectGetPathRequest struct {

	// path
	Path string `json:"path,omitempty"`
}

InspectGetPathRequest inspect get path request

swagger:model inspect.GetPathRequest

func (*InspectGetPathRequest) ContextValidate

func (m *InspectGetPathRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this inspect get path request based on context it is used

func (*InspectGetPathRequest) MarshalBinary

func (m *InspectGetPathRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*InspectGetPathRequest) UnmarshalBinary

func (m *InspectGetPathRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*InspectGetPathRequest) Validate

func (m *InspectGetPathRequest) Validate(formats strfmt.Registry) error

Validate validates this inspect get path request

type InspectGetSourcePackJobsRequest

type InspectGetSourcePackJobsRequest struct {

	// state
	State ModelWorkState `json:"state,omitempty"`
}

InspectGetSourcePackJobsRequest inspect get source pack jobs request

swagger:model inspect.GetSourcePackJobsRequest

func (*InspectGetSourcePackJobsRequest) ContextValidate

func (m *InspectGetSourcePackJobsRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this inspect get source pack jobs request based on the context it is used

func (*InspectGetSourcePackJobsRequest) MarshalBinary

func (m *InspectGetSourcePackJobsRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*InspectGetSourcePackJobsRequest) UnmarshalBinary

func (m *InspectGetSourcePackJobsRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*InspectGetSourcePackJobsRequest) Validate

Validate validates this inspect get source pack jobs request

type ModelCID

type ModelCID interface{}

ModelCID model c ID

swagger:model model.CID

type ModelCar

type ModelCar struct {

	// created at
	CreatedAt string `json:"createdAt,omitempty"`

	// dataset Id
	DatasetID int64 `json:"datasetId,omitempty"`

	// file path
	FilePath string `json:"filePath,omitempty"`

	// file size
	FileSize int64 `json:"fileSize,omitempty"`

	// header
	Header []int64 `json:"header"`

	// id
	ID int64 `json:"id,omitempty"`

	// pack job Id
	PackJobID int64 `json:"packJobId,omitempty"`

	// piece cid
	PieceCid ModelCID `json:"pieceCid,omitempty"`

	// piece size
	PieceSize int64 `json:"pieceSize,omitempty"`

	// root cid
	RootCid ModelCID `json:"rootCid,omitempty"`

	// source Id
	SourceID int64 `json:"sourceId,omitempty"`
}

ModelCar model car

swagger:model model.Car

func (*ModelCar) ContextValidate

func (m *ModelCar) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model car based on context it is used

func (*ModelCar) MarshalBinary

func (m *ModelCar) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelCar) UnmarshalBinary

func (m *ModelCar) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelCar) Validate

func (m *ModelCar) Validate(formats strfmt.Registry) error

Validate validates this model car

type ModelDataset

type ModelDataset struct {

	// created at
	CreatedAt string `json:"createdAt,omitempty"`

	// encryption recipients
	EncryptionRecipients []string `json:"encryptionRecipients"`

	// id
	ID int64 `json:"id,omitempty"`

	// max size
	MaxSize int64 `json:"maxSize,omitempty"`

	// metadata
	Metadata ModelMetadata `json:"metadata,omitempty"`

	// name
	Name string `json:"name,omitempty"`

	// output dirs
	OutputDirs []string `json:"outputDirs"`

	// piece size
	PieceSize int64 `json:"pieceSize,omitempty"`

	// updated at
	UpdatedAt string `json:"updatedAt,omitempty"`
}

ModelDataset model dataset

swagger:model model.Dataset

func (*ModelDataset) ContextValidate

func (m *ModelDataset) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this model dataset based on the context it is used

func (*ModelDataset) MarshalBinary

func (m *ModelDataset) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelDataset) UnmarshalBinary

func (m *ModelDataset) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelDataset) Validate

func (m *ModelDataset) Validate(formats strfmt.Registry) error

Validate validates this model dataset

type ModelDeal

type ModelDeal struct {

	// client Id
	ClientID string `json:"clientId,omitempty"`

	// created at
	CreatedAt string `json:"createdAt,omitempty"`

	// dataset Id
	DatasetID int64 `json:"datasetId,omitempty"`

	// deal Id
	DealID int64 `json:"dealId,omitempty"`

	// end epoch
	EndEpoch int64 `json:"endEpoch,omitempty"`

	// error message
	ErrorMessage string `json:"errorMessage,omitempty"`

	// id
	ID int64 `json:"id,omitempty"`

	// label
	Label string `json:"label,omitempty"`

	// piece cid
	PieceCid ModelCID `json:"pieceCid,omitempty"`

	// piece size
	PieceSize int64 `json:"pieceSize,omitempty"`

	// price
	Price string `json:"price,omitempty"`

	// proposal Id
	ProposalID string `json:"proposalId,omitempty"`

	// provider
	Provider string `json:"provider,omitempty"`

	// schedule Id
	ScheduleID int64 `json:"scheduleId,omitempty"`

	// sector start epoch
	SectorStartEpoch int64 `json:"sectorStartEpoch,omitempty"`

	// start epoch
	StartEpoch int64 `json:"startEpoch,omitempty"`

	// state
	State ModelDealState `json:"state,omitempty"`

	// updated at
	UpdatedAt string `json:"updatedAt,omitempty"`

	// verified
	Verified bool `json:"verified,omitempty"`
}

ModelDeal model deal

swagger:model model.Deal

func (*ModelDeal) ContextValidate

func (m *ModelDeal) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this model deal based on the context it is used

func (*ModelDeal) MarshalBinary

func (m *ModelDeal) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelDeal) UnmarshalBinary

func (m *ModelDeal) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelDeal) Validate

func (m *ModelDeal) Validate(formats strfmt.Registry) error

Validate validates this model deal

type ModelDealState

type ModelDealState string

ModelDealState model deal state

swagger:model model.DealState

const (

	// ModelDealStateProposed captures enum value "proposed"
	ModelDealStateProposed ModelDealState = "proposed"

	// ModelDealStatePublished captures enum value "published"
	ModelDealStatePublished ModelDealState = "published"

	// ModelDealStateActive captures enum value "active"
	ModelDealStateActive ModelDealState = "active"

	// ModelDealStateExpired captures enum value "expired"
	ModelDealStateExpired ModelDealState = "expired"

	// ModelDealStateProposalExpired captures enum value "proposal_expired"
	ModelDealStateProposalExpired ModelDealState = "proposal_expired"

	// ModelDealStateRejected captures enum value "rejected"
	ModelDealStateRejected ModelDealState = "rejected"

	// ModelDealStateSlashed captures enum value "slashed"
	ModelDealStateSlashed ModelDealState = "slashed"

	// ModelDealStateError captures enum value "error"
	ModelDealStateError ModelDealState = "error"
)

func NewModelDealState

func NewModelDealState(value ModelDealState) *ModelDealState

func (ModelDealState) ContextValidate

func (m ModelDealState) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model deal state based on context it is used

func (ModelDealState) Pointer

func (m ModelDealState) Pointer() *ModelDealState

Pointer returns a pointer to a freshly-allocated ModelDealState.

func (ModelDealState) Validate

func (m ModelDealState) Validate(formats strfmt.Registry) error

Validate validates this model deal state

type ModelDirectory

type ModelDirectory struct {

	// cid
	Cid ModelCID `json:"cid,omitempty"`

	// exported
	Exported bool `json:"exported,omitempty"`

	// id
	ID int64 `json:"id,omitempty"`

	// name
	Name string `json:"name,omitempty"`

	// parent Id
	ParentID int64 `json:"parentId,omitempty"`

	// source Id
	SourceID int64 `json:"sourceId,omitempty"`

	// updated at
	UpdatedAt string `json:"updatedAt,omitempty"`
}

ModelDirectory model directory

swagger:model model.Directory

func (*ModelDirectory) ContextValidate

func (m *ModelDirectory) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model directory based on context it is used

func (*ModelDirectory) MarshalBinary

func (m *ModelDirectory) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelDirectory) UnmarshalBinary

func (m *ModelDirectory) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelDirectory) Validate

func (m *ModelDirectory) Validate(formats strfmt.Registry) error

Validate validates this model directory

type ModelFile

type ModelFile struct {

	// cid
	Cid ModelCID `json:"cid,omitempty"`

	// created at
	CreatedAt string `json:"createdAt,omitempty"`

	// directory Id
	DirectoryID int64 `json:"directoryId,omitempty"`

	// file ranges
	FileRanges []*ModelFileRange `json:"fileRanges"`

	// hash
	Hash string `json:"hash,omitempty"`

	// id
	ID int64 `json:"id,omitempty"`

	// last modified
	LastModified int64 `json:"lastModified,omitempty"`

	// path
	Path string `json:"path,omitempty"`

	// size
	Size int64 `json:"size,omitempty"`

	// source Id
	SourceID int64 `json:"sourceId,omitempty"`
}

ModelFile model file

swagger:model model.File

func (*ModelFile) ContextValidate

func (m *ModelFile) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this model file based on the context it is used

func (*ModelFile) MarshalBinary

func (m *ModelFile) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelFile) UnmarshalBinary

func (m *ModelFile) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelFile) Validate

func (m *ModelFile) Validate(formats strfmt.Registry) error

Validate validates this model file

type ModelFileRange

type ModelFileRange struct {

	// cid
	Cid ModelCID `json:"cid,omitempty"`

	// file
	File *ModelFile `json:"file,omitempty"`

	// file Id
	FileID int64 `json:"fileId,omitempty"`

	// id
	ID int64 `json:"id,omitempty"`

	// length
	Length int64 `json:"length,omitempty"`

	// offset
	Offset int64 `json:"offset,omitempty"`

	// pack job Id
	PackJobID int64 `json:"packJobId,omitempty"`
}

ModelFileRange model file range

swagger:model model.FileRange

func (*ModelFileRange) ContextValidate

func (m *ModelFileRange) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this model file range based on the context it is used

func (*ModelFileRange) MarshalBinary

func (m *ModelFileRange) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelFileRange) UnmarshalBinary

func (m *ModelFileRange) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelFileRange) Validate

func (m *ModelFileRange) Validate(formats strfmt.Registry) error

Validate validates this model file range

type ModelMetadata

type ModelMetadata map[string]string

ModelMetadata model metadata

swagger:model model.Metadata

func (ModelMetadata) ContextValidate

func (m ModelMetadata) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model metadata based on context it is used

func (ModelMetadata) Validate

func (m ModelMetadata) Validate(formats strfmt.Registry) error

Validate validates this model metadata

type ModelPackJob

type ModelPackJob struct {

	// cars
	Cars []*ModelCar `json:"cars"`

	// created at
	CreatedAt string `json:"createdAt,omitempty"`

	// error message
	ErrorMessage string `json:"errorMessage,omitempty"`

	// file ranges
	FileRanges []*ModelFileRange `json:"fileRanges"`

	// id
	ID int64 `json:"id,omitempty"`

	// packing state
	PackingState ModelWorkState `json:"packingState,omitempty"`

	// packing worker Id
	PackingWorkerID string `json:"packingWorkerId,omitempty"`

	// source Id
	SourceID int64 `json:"sourceId,omitempty"`
}

ModelPackJob model pack job

swagger:model model.PackJob

func (*ModelPackJob) ContextValidate

func (m *ModelPackJob) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this model pack job based on the context it is used

func (*ModelPackJob) MarshalBinary

func (m *ModelPackJob) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelPackJob) UnmarshalBinary

func (m *ModelPackJob) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelPackJob) Validate

func (m *ModelPackJob) Validate(formats strfmt.Registry) error

Validate validates this model pack job

type ModelSchedule

type ModelSchedule struct {

	// allowed piece cids
	AllowedPieceCids []string `json:"allowedPieceCids"`

	// announce to ipni
	AnnounceToIpni bool `json:"announceToIpni,omitempty"`

	// created at
	CreatedAt string `json:"createdAt,omitempty"`

	// dataset Id
	DatasetID int64 `json:"datasetId,omitempty"`

	// duration
	Duration int64 `json:"duration,omitempty"`

	// error message
	ErrorMessage string `json:"errorMessage,omitempty"`

	// http headers
	HTTPHeaders []string `json:"httpHeaders"`

	// id
	ID int64 `json:"id,omitempty"`

	// keep unsealed
	KeepUnsealed bool `json:"keepUnsealed,omitempty"`

	// max pending deal number
	MaxPendingDealNumber int64 `json:"maxPendingDealNumber,omitempty"`

	// max pending deal size
	MaxPendingDealSize int64 `json:"maxPendingDealSize,omitempty"`

	// notes
	Notes string `json:"notes,omitempty"`

	// price per deal
	PricePerDeal float64 `json:"pricePerDeal,omitempty"`

	// price per gb
	PricePerGb float64 `json:"pricePerGb,omitempty"`

	// price per gb epoch
	PricePerGbEpoch float64 `json:"pricePerGbEpoch,omitempty"`

	// provider
	Provider string `json:"provider,omitempty"`

	// schedule cron
	ScheduleCron string `json:"scheduleCron,omitempty"`

	// schedule cron perpetual
	ScheduleCronPerpetual bool `json:"scheduleCronPerpetual,omitempty"`

	// schedule deal number
	ScheduleDealNumber int64 `json:"scheduleDealNumber,omitempty"`

	// schedule deal size
	ScheduleDealSize int64 `json:"scheduleDealSize,omitempty"`

	// start delay
	StartDelay int64 `json:"startDelay,omitempty"`

	// state
	State ModelScheduleState `json:"state,omitempty"`

	// total deal number
	TotalDealNumber int64 `json:"totalDealNumber,omitempty"`

	// total deal size
	TotalDealSize int64 `json:"totalDealSize,omitempty"`

	// updated at
	UpdatedAt string `json:"updatedAt,omitempty"`

	// url template
	URLTemplate string `json:"urlTemplate,omitempty"`

	// verified
	Verified bool `json:"verified,omitempty"`
}

ModelSchedule model schedule

swagger:model model.Schedule

func (*ModelSchedule) ContextValidate

func (m *ModelSchedule) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this model schedule based on the context it is used

func (*ModelSchedule) MarshalBinary

func (m *ModelSchedule) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelSchedule) UnmarshalBinary

func (m *ModelSchedule) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelSchedule) Validate

func (m *ModelSchedule) Validate(formats strfmt.Registry) error

Validate validates this model schedule

type ModelScheduleState

type ModelScheduleState string

ModelScheduleState model schedule state

swagger:model model.ScheduleState

const (

	// ModelScheduleStateActive captures enum value "active"
	ModelScheduleStateActive ModelScheduleState = "active"

	// ModelScheduleStatePaused captures enum value "paused"
	ModelScheduleStatePaused ModelScheduleState = "paused"

	// ModelScheduleStateError captures enum value "error"
	ModelScheduleStateError ModelScheduleState = "error"

	// ModelScheduleStateCompleted captures enum value "completed"
	ModelScheduleStateCompleted ModelScheduleState = "completed"
)

func NewModelScheduleState

func NewModelScheduleState(value ModelScheduleState) *ModelScheduleState

func (ModelScheduleState) ContextValidate

func (m ModelScheduleState) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model schedule state based on context it is used

func (ModelScheduleState) Pointer

Pointer returns a pointer to a freshly-allocated ModelScheduleState.

func (ModelScheduleState) Validate

func (m ModelScheduleState) Validate(formats strfmt.Registry) error

Validate validates this model schedule state

type ModelSource

type ModelSource struct {

	// created at
	CreatedAt string `json:"createdAt,omitempty"`

	// dag gen error message
	DagGenErrorMessage string `json:"dagGenErrorMessage,omitempty"`

	// dag gen state
	DagGenState ModelWorkState `json:"dagGenState,omitempty"`

	// dag gen worker Id
	DagGenWorkerID string `json:"dagGenWorkerId,omitempty"`

	// dataset Id
	DatasetID int64 `json:"datasetId,omitempty"`

	// delete after export
	DeleteAfterExport bool `json:"deleteAfterExport,omitempty"`

	// error message
	ErrorMessage string `json:"errorMessage,omitempty"`

	// id
	ID int64 `json:"id,omitempty"`

	// last scanned path
	LastScannedPath string `json:"lastScannedPath,omitempty"`

	// last scanned timestamp
	LastScannedTimestamp int64 `json:"lastScannedTimestamp,omitempty"`

	// metadata
	Metadata ModelMetadata `json:"metadata,omitempty"`

	// path
	Path string `json:"path,omitempty"`

	// scan interval seconds
	ScanIntervalSeconds int64 `json:"scanIntervalSeconds,omitempty"`

	// scanning state
	ScanningState ModelWorkState `json:"scanningState,omitempty"`

	// scanning worker Id
	ScanningWorkerID string `json:"scanningWorkerId,omitempty"`

	// type
	Type ModelSourceType `json:"type,omitempty"`

	// updated at
	UpdatedAt string `json:"updatedAt,omitempty"`
}

ModelSource model source

swagger:model model.Source

func (*ModelSource) ContextValidate

func (m *ModelSource) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validate this model source based on the context it is used

func (*ModelSource) MarshalBinary

func (m *ModelSource) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelSource) UnmarshalBinary

func (m *ModelSource) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelSource) Validate

func (m *ModelSource) Validate(formats strfmt.Registry) error

Validate validates this model source

type ModelSourceType

type ModelSourceType string

ModelSourceType model source type

swagger:model model.SourceType

const (

	// ModelSourceTypeLocal captures enum value "local"
	ModelSourceTypeLocal ModelSourceType = "local"

	// ModelSourceTypeUpload captures enum value "upload"
	ModelSourceTypeUpload ModelSourceType = "upload"
)

func NewModelSourceType

func NewModelSourceType(value ModelSourceType) *ModelSourceType

func (ModelSourceType) ContextValidate

func (m ModelSourceType) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model source type based on context it is used

func (ModelSourceType) Pointer

func (m ModelSourceType) Pointer() *ModelSourceType

Pointer returns a pointer to a freshly-allocated ModelSourceType.

func (ModelSourceType) Validate

func (m ModelSourceType) Validate(formats strfmt.Registry) error

Validate validates this model source type

type ModelWallet

type ModelWallet struct {

	// Address is the Filecoin full address of the wallet
	Address string `json:"address,omitempty"`

	// ID is the short ID of the wallet
	ID string `json:"id,omitempty"`

	// PrivateKey is the private key of the wallet
	PrivateKey string `json:"privateKey,omitempty"`

	// RemotePeer is the remote peer ID of the wallet, for remote signing purpose
	RemotePeer string `json:"remotePeer,omitempty"`
}

ModelWallet model wallet

swagger:model model.Wallet

func (*ModelWallet) ContextValidate

func (m *ModelWallet) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model wallet based on context it is used

func (*ModelWallet) MarshalBinary

func (m *ModelWallet) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelWallet) UnmarshalBinary

func (m *ModelWallet) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelWallet) Validate

func (m *ModelWallet) Validate(formats strfmt.Registry) error

Validate validates this model wallet

type ModelWalletAssignment

type ModelWalletAssignment struct {

	// dataset Id
	DatasetID int64 `json:"datasetId,omitempty"`

	// id
	ID int64 `json:"id,omitempty"`

	// wallet Id
	WalletID string `json:"walletId,omitempty"`
}

ModelWalletAssignment model wallet assignment

swagger:model model.WalletAssignment

func (*ModelWalletAssignment) ContextValidate

func (m *ModelWalletAssignment) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model wallet assignment based on context it is used

func (*ModelWalletAssignment) MarshalBinary

func (m *ModelWalletAssignment) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ModelWalletAssignment) UnmarshalBinary

func (m *ModelWalletAssignment) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ModelWalletAssignment) Validate

func (m *ModelWalletAssignment) Validate(formats strfmt.Registry) error

Validate validates this model wallet assignment

type ModelWorkState

type ModelWorkState string

ModelWorkState model work state

swagger:model model.WorkState

const (

	// ModelWorkStateCreated captures enum value "created"
	ModelWorkStateCreated ModelWorkState = "created"

	// ModelWorkStateReady captures enum value "ready"
	ModelWorkStateReady ModelWorkState = "ready"

	// ModelWorkStateProcessing captures enum value "processing"
	ModelWorkStateProcessing ModelWorkState = "processing"

	// ModelWorkStateComplete captures enum value "complete"
	ModelWorkStateComplete ModelWorkState = "complete"

	// ModelWorkStateError captures enum value "error"
	ModelWorkStateError ModelWorkState = "error"
)

func NewModelWorkState

func NewModelWorkState(value ModelWorkState) *ModelWorkState

func (ModelWorkState) ContextValidate

func (m ModelWorkState) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this model work state based on context it is used

func (ModelWorkState) Pointer

func (m ModelWorkState) Pointer() *ModelWorkState

Pointer returns a pointer to a freshly-allocated ModelWorkState.

func (ModelWorkState) Validate

func (m ModelWorkState) Validate(formats strfmt.Registry) error

Validate validates this model work state

type ScheduleCreateRequest

type ScheduleCreateRequest struct {

	// Allowed piece CIDs in this schedule
	AllowedPieceCids []string `json:"allowedPieceCids"`

	// Dataset name
	DatasetName string `json:"datasetName,omitempty"`

	// Duration in epoch or in duration format, i.e. 1500000, 2400h
	Duration *string `json:"duration,omitempty"`

	// http headers to be passed with the request (i.e. key=value)
	HTTPHeaders []string `json:"httpHeaders"`

	// Whether the deal should be IPNI
	Ipni *bool `json:"ipni,omitempty"`

	// Whether the deal should be kept unsealed
	KeepUnsealed *bool `json:"keepUnsealed,omitempty"`

	// Max pending deal number
	MaxPendingDealNumber int64 `json:"maxPendingDealNumber,omitempty"`

	// Max pending deal size in human readable format, i.e. 100 TiB
	MaxPendingDealSize string `json:"maxPendingDealSize,omitempty"`

	// Notes
	Notes string `json:"notes,omitempty"`

	// Price in FIL per deal
	PricePerDeal float64 `json:"pricePerDeal,omitempty"`

	// Price in FIL  per GiB
	PricePerGb float64 `json:"pricePerGb,omitempty"`

	// Price in FIL per GiB per epoch
	PricePerGbEpoch float64 `json:"pricePerGbEpoch,omitempty"`

	// Provider
	Provider string `json:"provider,omitempty"`

	// Schedule cron patter
	ScheduleCron string `json:"scheduleCron,omitempty"`

	// Whether a cron schedule should run in definitely
	ScheduleCronPerpetual bool `json:"scheduleCronPerpetual,omitempty"`

	// Number of deals per scheduled time
	ScheduleDealNumber int64 `json:"scheduleDealNumber,omitempty"`

	// Size of deals per schedule trigger in human readable format, i.e. 100 TiB
	ScheduleDealSize string `json:"scheduleDealSize,omitempty"`

	// Deal start delay in epoch or in duration format, i.e. 1000, 72h
	StartDelay *string `json:"startDelay,omitempty"`

	// Total number of deals
	TotalDealNumber int64 `json:"totalDealNumber,omitempty"`

	// Total size of deals in human readable format, i.e. 100 TiB
	TotalDealSize string `json:"totalDealSize,omitempty"`

	// URL template with PIECE_CID placeholder for boost to fetch the CAR file, i.e. http://127.0.0.1/piece/{PIECE_CID}.car
	URLTemplate string `json:"urlTemplate,omitempty"`

	// Whether the deal should be verified
	Verified *bool `json:"verified,omitempty"`
}

ScheduleCreateRequest schedule create request

swagger:model schedule.CreateRequest

func (*ScheduleCreateRequest) ContextValidate

func (m *ScheduleCreateRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this schedule create request based on context it is used

func (*ScheduleCreateRequest) MarshalBinary

func (m *ScheduleCreateRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*ScheduleCreateRequest) UnmarshalBinary

func (m *ScheduleCreateRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*ScheduleCreateRequest) Validate

func (m *ScheduleCreateRequest) Validate(formats strfmt.Registry) error

Validate validates this schedule create request

type StorePieceReader

type StorePieceReader interface{}

StorePieceReader store piece reader

swagger:model store.PieceReader

type WalletAddRemoteRequest

type WalletAddRemoteRequest struct {

	// Address is the Filecoin full address of the wallet
	Address string `json:"address,omitempty"`

	// RemotePeer is the remote peer ID of the wallet, for remote signing purpose
	RemotePeer string `json:"remotePeer,omitempty"`
}

WalletAddRemoteRequest wallet add remote request

swagger:model wallet.AddRemoteRequest

func (*WalletAddRemoteRequest) ContextValidate

func (m *WalletAddRemoteRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this wallet add remote request based on context it is used

func (*WalletAddRemoteRequest) MarshalBinary

func (m *WalletAddRemoteRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*WalletAddRemoteRequest) UnmarshalBinary

func (m *WalletAddRemoteRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*WalletAddRemoteRequest) Validate

func (m *WalletAddRemoteRequest) Validate(formats strfmt.Registry) error

Validate validates this wallet add remote request

type WalletImportRequest

type WalletImportRequest struct {

	// This is the exported private key from lotus wallet export
	PrivateKey string `json:"privateKey,omitempty"`
}

WalletImportRequest wallet import request

swagger:model wallet.ImportRequest

func (*WalletImportRequest) ContextValidate

func (m *WalletImportRequest) ContextValidate(ctx context.Context, formats strfmt.Registry) error

ContextValidate validates this wallet import request based on context it is used

func (*WalletImportRequest) MarshalBinary

func (m *WalletImportRequest) MarshalBinary() ([]byte, error)

MarshalBinary interface implementation

func (*WalletImportRequest) UnmarshalBinary

func (m *WalletImportRequest) UnmarshalBinary(b []byte) error

UnmarshalBinary interface implementation

func (*WalletImportRequest) Validate

func (m *WalletImportRequest) Validate(formats strfmt.Registry) error

Validate validates this wallet import request

Source Files

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL