Documentation
¶
Overview ¶
Package v1alpha1 contains API Schema definitions for the pipelines v1alpha1 API group. +kubebuilder:object:generate=true +groupName=filter.plainsight.ai
Index ¶
- Variables
- type BucketSource
- type ConfigVar
- type ExecutionConfig
- type FileCounts
- type Filter
- type Pipeline
- type PipelineList
- type PipelineMode
- type PipelineReference
- type PipelineRun
- type PipelineRunList
- type PipelineRunSpec
- type PipelineRunStatus
- type PipelineSpec
- type PipelineStatus
- type RTSPSource
- type SecretReference
- type ServicePort
- type Source
- type StreamingStatus
Constants ¶
This section is empty.
Variables ¶
var ( // GroupVersion is group version used to register these objects. GroupVersion = schema.GroupVersion{Group: "filter.plainsight.ai", Version: "v1alpha1"} // SchemeBuilder is used to add go types to the GroupVersionKind scheme. SchemeBuilder = &scheme.Builder{GroupVersion: GroupVersion} // AddToScheme adds the types in this group-version to the given scheme. AddToScheme = SchemeBuilder.AddToScheme )
Functions ¶
This section is empty.
Types ¶
type BucketSource ¶
type BucketSource struct {
// name is the name of the S3-compatible bucket
// +kubebuilder:validation:Required
// +kubebuilder:validation:MinLength=1
Name string `json:"name"`
// prefix is an optional path prefix within the bucket (e.g., "input-data/")
// +optional
Prefix string `json:"prefix,omitempty"`
// endpoint is the S3-compatible endpoint URL (required for non-AWS S3)
// Examples:
// - MinIO: "http://minio.example.com:9000"
// - GCS: "https://storage.googleapis.com"
// - Custom S3: "https://s3.custom.example.com"
// Leave empty for AWS S3 (will use default AWS endpoints)
// +optional
Endpoint string `json:"endpoint,omitempty"`
// region is the bucket region (e.g., "us-east-1")
// Required for AWS S3, optional for other providers
// +optional
Region string `json:"region,omitempty"`
// credentialsSecret references a Secret containing access credentials
// Expected keys: "accessKeyId" and "secretAccessKey"
// +optional
CredentialsSecret *SecretReference `json:"credentialsSecret,omitempty"`
// insecureSkipTLSVerify skips TLS certificate verification (useful for dev/test)
// +optional
// +kubebuilder:default=false
InsecureSkipTLSVerify bool `json:"insecureSkipTLSVerify,omitempty"`
// usePathStyle forces path-style addressing (endpoint.com/bucket vs bucket.endpoint.com)
// Required for MinIO and some S3-compatible services
// +optional
// +kubebuilder:default=false
UsePathStyle bool `json:"usePathStyle,omitempty"`
}
BucketSource defines an S3-compatible object storage bucket source
func (*BucketSource) DeepCopy ¶
func (in *BucketSource) DeepCopy() *BucketSource
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new BucketSource.
func (*BucketSource) DeepCopyInto ¶
func (in *BucketSource) DeepCopyInto(out *BucketSource)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ConfigVar ¶
type ConfigVar struct {
// name is the configuration key (will be prefixed with FILTER_ and uppercased)
// +kubebuilder:validation:Required
// +kubebuilder:validation:MinLength=1
Name string `json:"name"`
// value is the configuration value
// +kubebuilder:validation:Required
Value string `json:"value"`
}
ConfigVar defines a configuration key-value pair that will be injected as an environment variable with the FILTER_ prefix
func (*ConfigVar) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ConfigVar.
func (*ConfigVar) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ExecutionConfig ¶
type ExecutionConfig struct {
// parallelism defines the maximum number of parallel executions (max concurrent pods)
// +optional
// +kubebuilder:validation:Minimum=1
// +kubebuilder:default=10
Parallelism *int32 `json:"parallelism,omitempty"`
// maxAttempts defines the maximum number of retry attempts per file
// +optional
// +kubebuilder:validation:Minimum=1
// +kubebuilder:default=3
MaxAttempts *int32 `json:"maxAttempts,omitempty"`
// pendingTimeout defines the time after which pending messages are reclaimed
// Supports Kubernetes duration format (e.g., "15m", "1h", "30s")
// +optional
// +kubebuilder:default="15m"
PendingTimeout *metav1.Duration `json:"pendingTimeout,omitempty"`
}
ExecutionConfig defines execution parameters for pipeline runs
func (*ExecutionConfig) DeepCopy ¶
func (in *ExecutionConfig) DeepCopy() *ExecutionConfig
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ExecutionConfig.
func (*ExecutionConfig) DeepCopyInto ¶
func (in *ExecutionConfig) DeepCopyInto(out *ExecutionConfig)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type FileCounts ¶
type FileCounts struct {
// totalFiles is the total number of files to process
// +optional
TotalFiles int64 `json:"totalFiles,omitempty"`
// queued is the number of files waiting to be processed
// +optional
Queued int64 `json:"queued,omitempty"`
// running is the number of files currently being processed
// +optional
Running int64 `json:"running,omitempty"`
// succeeded is the number of files successfully processed
// +optional
Succeeded int64 `json:"succeeded,omitempty"`
// failed is the number of files that failed processing
// +optional
Failed int64 `json:"failed,omitempty"`
}
FileCounts tracks the number of files in each processing state
func (*FileCounts) DeepCopy ¶
func (in *FileCounts) DeepCopy() *FileCounts
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new FileCounts.
func (*FileCounts) DeepCopyInto ¶
func (in *FileCounts) DeepCopyInto(out *FileCounts)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Filter ¶
type Filter struct {
// name is a unique identifier for this filter within the pipeline
// +kubebuilder:validation:Required
// +kubebuilder:validation:MinLength=1
// +kubebuilder:validation:Pattern=`^[a-z0-9]([-a-z0-9]*[a-z0-9])?$`
Name string `json:"name"`
// image is the container image to run (e.g., "myregistry/filter:v1.0")
// +kubebuilder:validation:Required
Image string `json:"image"`
// config is a list of configuration key-value pairs that will be injected
// as environment variables with the FILTER_ prefix.
// For example, a config with name "sources" and value "mysource" will result
// in the environment variable FILTER_SOURCES=mysource
// +optional
Config []ConfigVar `json:"config,omitempty"`
// env is a list of environment variables to set in the container
// Uses the standard Kubernetes EnvVar type for full compatibility
// +optional
Env []corev1.EnvVar `json:"env,omitempty"`
// args are the command arguments to pass to the container
// +optional
Args []string `json:"args,omitempty"`
// command overrides the default entrypoint of the container
// +optional
Command []string `json:"command,omitempty"`
// resources defines compute resource requirements for this filter
// +optional
Resources *corev1.ResourceRequirements `json:"resources,omitempty"`
// imagePullPolicy determines when to pull the container image
// +optional
// +kubebuilder:validation:Enum=Always;Never;IfNotPresent
// +kubebuilder:default=IfNotPresent
ImagePullPolicy corev1.PullPolicy `json:"imagePullPolicy,omitempty"`
}
Filter defines a containerized processing step in the pipeline
func (*Filter) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Filter.
func (*Filter) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Pipeline ¶
type Pipeline struct {
metav1.TypeMeta `json:",inline"`
// metadata is a standard object metadata
// +optional
metav1.ObjectMeta `json:"metadata,omitempty"`
// spec defines the desired state of Pipeline
// +required
Spec PipelineSpec `json:"spec"`
// status defines the observed state of Pipeline
// +optional
Status PipelineStatus `json:"status,omitempty"`
}
Pipeline is the Schema for the pipelines API
func (*Pipeline) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Pipeline.
func (*Pipeline) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*Pipeline) DeepCopyObject ¶
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type PipelineList ¶
type PipelineList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []Pipeline `json:"items"`
}
PipelineList contains a list of Pipeline
func (*PipelineList) DeepCopy ¶
func (in *PipelineList) DeepCopy() *PipelineList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineList.
func (*PipelineList) DeepCopyInto ¶
func (in *PipelineList) DeepCopyInto(out *PipelineList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*PipelineList) DeepCopyObject ¶
func (in *PipelineList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type PipelineMode ¶
type PipelineMode string
PipelineMode defines the execution mode for a Pipeline +kubebuilder:validation:Enum=batch;stream
const ( // PipelineModeBatch runs the pipeline as a Kubernetes Job processing files from S3 PipelineModeBatch PipelineMode = "batch" // PipelineModeStream runs the pipeline as a Kubernetes Deployment processing an RTSP stream PipelineModeStream PipelineMode = "stream" )
type PipelineReference ¶
type PipelineReference struct {
// name is the name of the Pipeline resource
// +required
Name string `json:"name"`
// namespace is the namespace of the Pipeline resource
// If not specified, the PipelineRun's namespace is used
// +optional
Namespace *string `json:"namespace,omitempty"`
}
PipelineReference defines a reference to a Pipeline resource
func (*PipelineReference) DeepCopy ¶
func (in *PipelineReference) DeepCopy() *PipelineReference
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineReference.
func (*PipelineReference) DeepCopyInto ¶
func (in *PipelineReference) DeepCopyInto(out *PipelineReference)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PipelineRun ¶
type PipelineRun struct {
metav1.TypeMeta `json:",inline"`
// metadata is a standard object metadata
// +optional
metav1.ObjectMeta `json:"metadata,omitempty"`
// spec defines the desired state of PipelineRun
// +required
Spec PipelineRunSpec `json:"spec"`
// status defines the observed state of PipelineRun
// +optional
Status PipelineRunStatus `json:"status,omitempty"`
}
PipelineRun is the Schema for the pipelineruns API
func (*PipelineRun) DeepCopy ¶
func (in *PipelineRun) DeepCopy() *PipelineRun
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineRun.
func (*PipelineRun) DeepCopyInto ¶
func (in *PipelineRun) DeepCopyInto(out *PipelineRun)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*PipelineRun) DeepCopyObject ¶
func (in *PipelineRun) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (*PipelineRun) GetQueueGroup ¶
func (pr *PipelineRun) GetQueueGroup() string
GetQueueGroup returns the Valkey consumer group name for this PipelineRun Format: cg:<uid>
func (*PipelineRun) GetQueueStream ¶
func (pr *PipelineRun) GetQueueStream() string
GetQueueStream returns the Valkey stream key for this PipelineRun Format: pr:<uid>:work
func (*PipelineRun) GetRunID ¶
func (pr *PipelineRun) GetRunID() string
GetRunID returns the run ID (UID) for this PipelineRun
type PipelineRunList ¶
type PipelineRunList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata,omitempty"`
Items []PipelineRun `json:"items"`
}
PipelineRunList contains a list of PipelineRun
func (*PipelineRunList) DeepCopy ¶
func (in *PipelineRunList) DeepCopy() *PipelineRunList
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineRunList.
func (*PipelineRunList) DeepCopyInto ¶
func (in *PipelineRunList) DeepCopyInto(out *PipelineRunList)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (*PipelineRunList) DeepCopyObject ¶
func (in *PipelineRunList) DeepCopyObject() runtime.Object
DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
type PipelineRunSpec ¶
type PipelineRunSpec struct {
// pipelineRef references the Pipeline resource to execute
// +required
PipelineRef PipelineReference `json:"pipelineRef"`
// execution defines how the pipeline should be executed
// +optional
Execution *ExecutionConfig `json:"execution,omitempty"`
}
PipelineRunSpec defines the desired state of PipelineRun
func (*PipelineRunSpec) DeepCopy ¶
func (in *PipelineRunSpec) DeepCopy() *PipelineRunSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineRunSpec.
func (*PipelineRunSpec) DeepCopyInto ¶
func (in *PipelineRunSpec) DeepCopyInto(out *PipelineRunSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PipelineRunStatus ¶
type PipelineRunStatus struct {
// counts tracks the number of files in each state (Batch mode only)
// +optional
Counts *FileCounts `json:"counts,omitempty"`
// jobName is the name of the Kubernetes Job created for this run (Batch mode only)
// +optional
JobName string `json:"jobName,omitempty"`
// streaming provides observability into streaming pipeline runs (Stream mode only)
// +optional
Streaming *StreamingStatus `json:"streaming,omitempty"`
// startTime is when the pipeline run was started
// +optional
StartTime *metav1.Time `json:"startTime,omitempty"`
// completionTime is when the pipeline run completed
// +optional
CompletionTime *metav1.Time `json:"completionTime,omitempty"`
// conditions represent the current state of the PipelineRun resource.
// Each condition has a unique type and reflects the status of a specific aspect of the resource.
//
// Standard condition types include:
// - "Available": the resource is fully functional
// - "Progressing": the resource is being created or updated
// - "Degraded": the resource failed to reach or maintain its desired state
//
// The status of each condition is one of True, False, or Unknown.
// +listType=map
// +listMapKey=type
// +optional
Conditions []metav1.Condition `json:"conditions,omitempty"`
}
PipelineRunStatus defines the observed state of PipelineRun.
func (*PipelineRunStatus) DeepCopy ¶
func (in *PipelineRunStatus) DeepCopy() *PipelineRunStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineRunStatus.
func (*PipelineRunStatus) DeepCopyInto ¶
func (in *PipelineRunStatus) DeepCopyInto(out *PipelineRunStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PipelineSpec ¶
type PipelineSpec struct {
// mode defines the execution mode for this pipeline (batch or stream)
// batch mode processes files from S3 using Kubernetes Jobs
// stream mode processes RTSP streams using Kubernetes Deployments
// +optional
// +kubebuilder:default=batch
Mode PipelineMode `json:"mode,omitempty"`
// source defines the input source for the pipeline
// For Batch mode: source.bucket is required, source.rtsp is forbidden
// For Stream mode: source.rtsp is required, source.bucket is forbidden
// +kubebuilder:validation:Required
Source Source `json:"source"`
// filters is an ordered list of processing steps to apply to the input data
// Filters are executed sequentially in the order they are defined
// +optional
// +kubebuilder:validation:MinItems=1
Filters []Filter `json:"filters,omitempty"`
// services defines Kubernetes Services to expose filter ports
// Only applies to Stream mode. Multiple services can expose different ports for the same filter.
// Service naming: <pipelinerun-name>-<filter-name>-<index>
// +optional
Services []ServicePort `json:"services,omitempty"`
// videoInputPath defines where the controller stores downloaded source files.
// Downstream filters can reference this path to read the input artifact.
// Defaults to /ws/input.mp4.
// Only applies to Batch mode.
// +kubebuilder:validation:Optional
// +kubebuilder:validation:MinLength=1
// +kubebuilder:default=/ws/input.mp4
VideoInputPath string `json:"videoInputPath,omitempty"`
}
PipelineSpec defines the desired state of Pipeline
func (*PipelineSpec) DeepCopy ¶
func (in *PipelineSpec) DeepCopy() *PipelineSpec
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineSpec.
func (*PipelineSpec) DeepCopyInto ¶
func (in *PipelineSpec) DeepCopyInto(out *PipelineSpec)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type PipelineStatus ¶
type PipelineStatus struct {
// conditions represent the current state of the Pipeline resource.
// Each condition has a unique type and reflects the status of a specific aspect of the resource.
//
// Standard condition types include:
// - "Available": the resource is fully functional
// - "Progressing": the resource is being created or updated
// - "Degraded": the resource failed to reach or maintain its desired state
//
// The status of each condition is one of True, False, or Unknown.
// +listType=map
// +listMapKey=type
// +optional
Conditions []metav1.Condition `json:"conditions,omitempty"`
}
PipelineStatus defines the observed state of Pipeline.
func (*PipelineStatus) DeepCopy ¶
func (in *PipelineStatus) DeepCopy() *PipelineStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineStatus.
func (*PipelineStatus) DeepCopyInto ¶
func (in *PipelineStatus) DeepCopyInto(out *PipelineStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type RTSPSource ¶
type RTSPSource struct {
// host is the RTSP server hostname or IP address
// +kubebuilder:validation:Required
// +kubebuilder:validation:MinLength=1
Host string `json:"host"`
// port is the RTSP server port
// +optional
// +kubebuilder:default=554
// +kubebuilder:validation:Minimum=1
// +kubebuilder:validation:Maximum=65535
Port int32 `json:"port,omitempty"`
// path is the RTSP stream path (e.g., "/stream1", "/live/camera1")
// +optional
Path string `json:"path,omitempty"`
// credentialsSecret references a Secret containing RTSP credentials
// Expected keys: "username" and "password"
// +optional
CredentialsSecret *SecretReference `json:"credentialsSecret,omitempty"`
// idleTimeout defines the duration after which a continuously unready stream
// will cause the PipelineRun to complete and the Deployment to be deleted.
// If not set, the stream will run indefinitely.
// +optional
IdleTimeout *metav1.Duration `json:"idleTimeout,omitempty"`
}
RTSPSource defines an RTSP stream source
func (*RTSPSource) DeepCopy ¶
func (in *RTSPSource) DeepCopy() *RTSPSource
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RTSPSource.
func (*RTSPSource) DeepCopyInto ¶
func (in *RTSPSource) DeepCopyInto(out *RTSPSource)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type SecretReference ¶
type SecretReference struct {
// name is the name of the secret
// +kubebuilder:validation:Required
Name string `json:"name"`
// namespace is the namespace of the secret
// If empty, uses the same namespace as the Pipeline resource
// +optional
Namespace string `json:"namespace,omitempty"`
}
SecretReference contains information to locate a Kubernetes Secret
func (*SecretReference) DeepCopy ¶
func (in *SecretReference) DeepCopy() *SecretReference
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new SecretReference.
func (*SecretReference) DeepCopyInto ¶
func (in *SecretReference) DeepCopyInto(out *SecretReference)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type ServicePort ¶
type ServicePort struct {
// name is the name of the filter to expose
// Must match one of the filter names in the pipeline
// +kubebuilder:validation:Required
// +kubebuilder:validation:MinLength=1
Name string `json:"name"`
// port is the port number to expose
// +kubebuilder:validation:Required
// +kubebuilder:validation:Minimum=1
// +kubebuilder:validation:Maximum=65535
Port int32 `json:"port"`
// targetPort is the port on the container to forward to
// If not specified, defaults to the same value as port
// +optional
// +kubebuilder:validation:Minimum=1
// +kubebuilder:validation:Maximum=65535
TargetPort *int32 `json:"targetPort,omitempty"`
// protocol is the network protocol for this port (TCP or UDP)
// +optional
// +kubebuilder:default=TCP
// +kubebuilder:validation:Enum=TCP;UDP
Protocol corev1.Protocol `json:"protocol,omitempty"`
}
ServicePort defines a port to expose as a Kubernetes Service for a filter
func (*ServicePort) DeepCopy ¶
func (in *ServicePort) DeepCopy() *ServicePort
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ServicePort.
func (*ServicePort) DeepCopyInto ¶
func (in *ServicePort) DeepCopyInto(out *ServicePort)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type Source ¶
type Source struct {
// bucket defines an S3-compatible object storage source
// Required when mode is Batch, forbidden when mode is Stream
// +optional
Bucket *BucketSource `json:"bucket,omitempty"`
// rtsp defines an RTSP stream source
// Required when mode is Stream, forbidden when mode is Batch
// +optional
RTSP *RTSPSource `json:"rtsp,omitempty"`
}
Source defines the input source for the pipeline Supports bucket (S3-compatible object storage) or rtsp (streaming video) Exactly one source type must be specified based on the pipeline mode
func (*Source) DeepCopy ¶
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Source.
func (*Source) DeepCopyInto ¶
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
type StreamingStatus ¶
type StreamingStatus struct {
// readyReplicas is the number of ready replicas in the streaming deployment
// +optional
ReadyReplicas int32 `json:"readyReplicas,omitempty"`
// updatedReplicas is the number of updated replicas in the streaming deployment
// +optional
UpdatedReplicas int32 `json:"updatedReplicas,omitempty"`
// availableReplicas is the number of available replicas in the streaming deployment
// +optional
AvailableReplicas int32 `json:"availableReplicas,omitempty"`
// containerRestarts is the aggregate count of container restarts across all pods
// +optional
ContainerRestarts int32 `json:"containerRestarts,omitempty"`
// lastReadyTime is the last time the deployment became ready
// +optional
LastReadyTime *metav1.Time `json:"lastReadyTime,omitempty"`
// lastFrameAt is the timestamp of the last processed frame (best-effort)
// +optional
LastFrameAt *metav1.Time `json:"lastFrameAt,omitempty"`
// deploymentName is the name of the Kubernetes Deployment created for this streaming run
// +optional
DeploymentName string `json:"deploymentName,omitempty"`
}
StreamingStatus provides observability into streaming pipeline runs
func (*StreamingStatus) DeepCopy ¶
func (in *StreamingStatus) DeepCopy() *StreamingStatus
DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StreamingStatus.
func (*StreamingStatus) DeepCopyInto ¶
func (in *StreamingStatus) DeepCopyInto(out *StreamingStatus)
DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.