Documentation
¶
Overview ¶
Package azureblob provides a blob implementation that uses Azure Storage’s BlockBlob. Use OpenBucket to construct a *blob.Bucket.
Open URLs ¶
For blob.Open URLs, azureblob registers for the scheme "azblob"; URLs start with "azblob://".
The URL's Host is used as the bucket name.
By default, credentials are retrieved from the environment variables AZURE_STORAGE_ACCOUNT, AZURE_STORAGE_KEY, and AZURE_STORAGE_SAS_TOKEN. AZURE_STORAGE_ACCOUNT is required, along with one of the other two. See https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1#what-is-a-shared-access-signature for more on SAS tokens. Alternatively, credentials can be loaded from a file; see the cred_path query parameter below.
The following query options are supported:
- cred_path: Sets path to a credentials file in JSON format. The AccountName field must be specified, and either AccountKey or SASToken.
Example credentials file using AccountKey:
{
"AccountName": "STORAGE ACCOUNT NAME",
"AccountKey": "PRIMARY OR SECONDARY ACCOUNT KEY"
}
Example credentials file using SASToken:
{
"AccountName": "STORAGE ACCOUNT NAME",
"SASToken": "ENTER YOUR AZURE STORAGE SAS TOKEN"
}
Example URL:
azblob://mybucket?cred_path=pathToCredentials
As ¶
azureblob exposes the following types for As:
- Bucket: *azblob.ContainerURL
- Error: azblob.StorageError
- ListObject: azblob.BlobItem for objects, azblob.BlobPrefix for "directories"
- ListOptions.BeforeList: *azblob.ListBlobsSegmentOptions
- Reader: azblob.DownloadResponse
- Attributes: azblob.BlobGetPropertiesResponse
- WriterOptions.BeforeWrite: *azblob.UploadStreamToBlockBlobOptions
Example ¶
package main
import (
"context"
"encoding/base64"
"fmt"
"log"
"time"
"github.com/Azure/azure-storage-blob-go/azblob"
"gocloud.dev/blob/azureblob"
)
func main() {
ctx := context.Background()
// A fake account name and key. The key must be base64 encoded;
// real Azure Storage Access Keys are already base64 encoded.
accountName := azureblob.AccountName("myaccount")
accountKey := azureblob.AccountKey(base64.StdEncoding.EncodeToString([]byte("FAKECREDS")))
bucketName := "my-bucket"
// Create a credentials object.
credential, err := azureblob.NewCredential(accountName, accountKey)
if err != nil {
log.Fatal(err)
}
// Create a Pipeline, using whatever PipelineOptions you need.
// This example overrides the default retry policy so calls can return promptly
// for this test. Please review the timeout guidelines and set accordingly.
// See https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations for more information.
popts := azblob.PipelineOptions{
Retry: azblob.RetryOptions{
Policy: azblob.RetryPolicyFixed,
TryTimeout: 5 * time.Second,
MaxTries: 1,
RetryDelay: 0 * time.Second,
MaxRetryDelay: 0 * time.Second,
},
}
pipeline := azureblob.NewPipeline(credential, popts)
// Create a *blob.Bucket.
opts := &azureblob.Options{
// This credential is required only if you're going to use the SignedURL
// function.
Credential: credential,
}
b, err := azureblob.OpenBucket(ctx, pipeline, accountName, bucketName, opts)
if err != nil {
log.Fatal(err)
return
}
// Now we can use b!
_, err = b.ReadAll(ctx, "my-key")
if err != nil {
// This is expected due to the fake credentials we used above.
fmt.Println("ReadAll failed due to invalid credentials")
}
}
Output: ReadAll failed due to invalid credentials
Example (As) ¶
package main
import (
"context"
"encoding/base64"
"fmt"
"io"
"log"
"time"
"github.com/Azure/azure-storage-blob-go/azblob"
"gocloud.dev/blob"
"gocloud.dev/blob/azureblob"
)
func main() {
ctx := context.Background()
// A fake account name and key. The key must be base64 encoded;
// real Azure Storage Access Keys are already base64 encoded.
accountName := azureblob.AccountName("myaccount")
accountKey := azureblob.AccountKey(base64.StdEncoding.EncodeToString([]byte("FAKECREDS")))
bucketName := "my-bucket"
// Create a credentials object.
credential, err := azureblob.NewCredential(accountName, accountKey)
if err != nil {
log.Fatal(err)
}
// Create a Pipeline, using whatever PipelineOptions you need.
// This example overrides the default retry policy so calls can return promptly
// for this test. Please review the timeout guidelines and set accordingly.
// See https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations for more information.
popts := azblob.PipelineOptions{
Retry: azblob.RetryOptions{
Policy: azblob.RetryPolicyFixed,
TryTimeout: 5 * time.Second,
MaxTries: 1,
RetryDelay: 0 * time.Second,
MaxRetryDelay: 0 * time.Second,
},
}
pipeline := azureblob.NewPipeline(credential, popts)
// Create a *blob.Bucket.
opts := &azureblob.Options{
// This credential is required only if you're going to use the SignedURL
// function.
Credential: credential,
}
b, err := azureblob.OpenBucket(ctx, pipeline, accountName, bucketName, opts)
if err != nil {
log.Fatal(err)
return
}
// Create a *blob.Reader.
r, err := b.NewReader(ctx, "key", &blob.ReaderOptions{})
if err != nil {
fmt.Println("ReadAll failed due to invalid credentials")
// Due to the fake credentials used above, this test terminates here.
return
}
// IMPORTANT: The calls below are intended to show how to use As() to obtain the Azure Blob Storage SDK types.
// Due to the fake credentials used above, below calls are not executed.
// Use Reader.As to obtain SDK type azblob.DownloadResponse.
// See https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob#DownloadResponse for more information.
var nativeReader azblob.DownloadResponse
if r.As(&nativeReader) {
}
// Use Attribute.As to obtain SDK type azblob.BlobGetPropertiesResponse.
// See https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob#BlobGetPropertiesResponse for more information.
var nativeAttrs azblob.BlobGetPropertiesResponse
attr, _ := b.Attributes(ctx, "key")
if attr.As(&nativeAttrs) {
}
// Use Bucket.As to obtain SDK type azblob.ContainerURL.
// See https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob for more information.
var nativeBucket *azblob.ContainerURL
if b.As(&nativeBucket) {
}
// Use WriterOptions.BeforeWrite to obtain SDK type azblob.UploadStreamToBlockBlobOptions.
// See https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob#UploadStreamToBlockBlobOptions for more information.
beforeWrite := func(as func(i interface{}) bool) error {
var nativeWriteOptions *azblob.UploadStreamToBlockBlobOptions
if as(&nativeWriteOptions) {
}
return nil
}
wopts := &blob.WriterOptions{
ContentType: "application/json",
BeforeWrite: beforeWrite,
}
// Create a *blob.Writer.
w, _ := b.NewWriter(ctx, "key", wopts)
w.Write([]byte("{"))
w.Write([]byte(" message: 'hello' "))
w.Write([]byte("}"))
w.Close()
// Use ListOptions.BeforeList to obtain SDK type azblob.ListBlobsSegmentOptions.
// See https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob#ListBlobsSegmentOptions for more information.
beforeList := func(as func(i interface{}) bool) error {
var nativeListOptions *azblob.ListBlobsSegmentOptions
if as(&nativeListOptions) {
}
return nil
}
// Iterate through a virtual directory.
iter := b.List(&blob.ListOptions{Prefix: "blob-for-delimiters", Delimiter: "/", BeforeList: beforeList})
for {
if p, err := iter.Next(ctx); err == io.EOF {
break
} else if err == nil {
if p.IsDir {
// Use ListObject.As to obtain SDK type azblob.BlobPrefix.
// See https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob#BlobPrefix for more information.
var nativeDirObj azblob.BlobPrefix
if p.As(&nativeDirObj) {
}
} else {
// Use ListObject.As to obtain SDK type azblob.BlobItem.
// See https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob#BlobItem for more information.
var nativeBlobObj azblob.BlobItem
if p.As(&nativeBlobObj) {
}
}
}
}
}
Output: ReadAll failed due to invalid credentials
Example (Open) ¶
package main
import (
"context"
"fmt"
"gocloud.dev/blob"
)
func main() {
ctx := context.Background()
// Open creates a *Bucket from a URL.
// This URL will open the container "mycontainer" using default
// credentials found in the environment variables
// AZURE_STORAGE_ACCOUNT plus at least one of AZURE_STORAGE_KEY
// and AZURE_STORAGE_SAS_TOKEN.
_, err := blob.Open(ctx, "azblob://mycontainer")
// Alternatively, you can use the query parameter "cred_path" to load
// credentials from a file in JSON format.
// See the package documentation for the credentials file schema.
_, err = blob.Open(ctx, "azblob://mycontainer?cred_path=replace-with-path-to-credentials-file")
if err != nil {
// This is expected due to the invalid cred_path argument used above.
fmt.Println("blob.Open failed due to invalid creds_path argument")
}
}
Output: blob.Open failed due to invalid creds_path argument
Example (SasToken) ¶
package main
import (
"context"
"fmt"
"log"
"time"
"github.com/Azure/azure-storage-blob-go/azblob"
"gocloud.dev/blob/azureblob"
)
func main() {
ctx := context.Background()
// A fake account name and SASToken.
accountName := azureblob.AccountName("myaccount")
sasToken := azureblob.SASToken("https://myaccount.blob.core.windows.net/sascontainer/sasblob.txt?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D")
bucketName := "my-bucket"
// Since we're using a SASToken, we can use anonymous credentials.
credential := azblob.NewAnonymousCredential()
// Create a Pipeline, using whatever PipelineOptions you need.
// This example overrides the default retry policy so calls can return promptly
// for this test. Please review the timeout guidelines and set accordingly.
// See https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations for more information.
popts := azblob.PipelineOptions{
Retry: azblob.RetryOptions{
Policy: azblob.RetryPolicyFixed,
TryTimeout: 5 * time.Second,
MaxTries: 1,
RetryDelay: 0 * time.Second,
MaxRetryDelay: 0 * time.Second,
},
}
pipeline := azureblob.NewPipeline(credential, popts)
// Create a blob.Bucket.
// Note that we're not supplying azureblob.Options.Credential, so SignedURL
// won't work. To use SignedURL, you need a real credential (see the other
// example).
b, err := azureblob.OpenBucket(ctx, pipeline, accountName, bucketName, &azureblob.Options{SASToken: sasToken})
if err != nil {
log.Fatal(err)
return
}
// Now we can use b!
_, err = b.ReadAll(ctx, "my-key")
if err != nil {
// This is expected due to the fake SAS token we used above.
fmt.Println("ReadAll failed due to invalid SAS token")
}
}
Output: ReadAll failed due to invalid SAS token
Index ¶
- Variables
- func NewCredential(accountName AccountName, accountKey AccountKey) (*azblob.SharedKeyCredential, error)
- func NewPipeline(credential azblob.Credential, opts azblob.PipelineOptions) pipeline.Pipeline
- func OpenBucket(ctx context.Context, pipeline pipeline.Pipeline, accountName AccountName, ...) (*blob.Bucket, error)
- type AccountKey
- type AccountName
- type Options
- type SASToken
Examples ¶
Constants ¶
This section is empty.
Variables ¶
var DefaultIdentity = wire.NewSet( DefaultAccountName, DefaultAccountKey, NewCredential, wire.Bind(new(azblob.Credential), new(azblob.SharedKeyCredential)), wire.Value(azblob.PipelineOptions{}), )
DefaultIdentity is a Wire provider set that provides an Azure storage account name, key, and SharedKeyCredential from environment variables.
var SASTokenIdentity = wire.NewSet( DefaultAccountName, DefaultSASToken, azblob.NewAnonymousCredential, wire.Value(azblob.PipelineOptions{}), )
SASTokenIdentity is a Wire provider set that provides an Azure storage account name, SASToken, and anonymous credential from environment variables.
Functions ¶
func NewCredential ¶
func NewCredential(accountName AccountName, accountKey AccountKey) (*azblob.SharedKeyCredential, error)
NewCredential creates a SharedKeyCredential.
func NewPipeline ¶
func NewPipeline(credential azblob.Credential, opts azblob.PipelineOptions) pipeline.Pipeline
NewPipeline creates a Pipeline for making HTTP requests to Azure.
func OpenBucket ¶
func OpenBucket(ctx context.Context, pipeline pipeline.Pipeline, accountName AccountName, containerName string, opts *Options) (*blob.Bucket, error)
OpenBucket returns a *blob.Bucket backed by Azure Storage Account. See the package documentation for an example and https://godoc.org/github.com/Azure/azure-storage-blob-go/azblob for more details.
Types ¶
type AccountKey ¶
type AccountKey string
AccountKey is an Azure storage account key (primary or secondary).
func DefaultAccountKey ¶
func DefaultAccountKey() (AccountKey, error)
DefaultAccountKey loads the Azure storage account key (primary or secondary) from the AZURE_STORAGE_KEY environment variable.
type AccountName ¶
type AccountName string
AccountName is an Azure storage account name.
func DefaultAccountName ¶
func DefaultAccountName() (AccountName, error)
DefaultAccountName loads the Azure storage account name from the AZURE_STORAGE_ACCOUNT environment variable.
type Options ¶
type Options struct {
// Credential represents the authorizer for SignedURL.
// Required to use SignedURL.
Credential *azblob.SharedKeyCredential
// SASToken can be provided along with anonymous credentials to use
// delegated privileges.
// See https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1#shared-access-signature-parameters.
SASToken SASToken
}
Options sets options for constructing a *blob.Bucket backed by Azure Block Blob.
type SASToken ¶
type SASToken string
SASToken is an Azure shared access signature. https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1
func DefaultSASToken ¶
DefaultSASToken loads a Azure SAS token from the AZURE_STORAGE_SAS_TOKEN environment variable.