huggingface

package
v0.1.13 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Jul 15, 2025 License: MIT Imports: 9 Imported by: 0

Documentation

Index

Examples

Constants

This section is empty.

Variables

View Source
var (
	ErrEmptyResponse            = errors.New("empty response")
	ErrMissingToken             = errors.New("missing the Hugging Face API token. Set it in the HF_TOKEN or HUGGINGFACEHUB_API_TOKEN environment variable, or save it to ~/.cache/huggingface/token") //nolint:lll
	ErrUnexpectedResponseLength = errors.New("unexpected length of response")
)

Functions

This section is empty.

Types

type LLM

type LLM struct {
	CallbacksHandler callbacks.Handler
	// contains filtered or unexported fields
}

func New

func New(opts ...Option) (*LLM, error)
Example (StandardInference)
package main

import (
	"context"
	"fmt"
	"log"

	"github.com/tmc/langchaingo/llms"
	"github.com/tmc/langchaingo/llms/huggingface"
)

func main() {
	// Create a new HuggingFace LLM with standard inference API
	llm, err := huggingface.New(
		huggingface.WithModel("HuggingFaceH4/zephyr-7b-beta"),
		// Token will be read from HF_TOKEN or HUGGINGFACEHUB_API_TOKEN environment variable
	)
	if err != nil {
		log.Fatal(err)
	}

	ctx := context.Background()

	// Use the LLM
	result, err := llm.Call(ctx, "Hello, how are you?",
		llms.WithTemperature(0.5),
		llms.WithMaxLength(50),
	)
	if err != nil {
		log.Fatal(err)
	}

	fmt.Println(result)
}
Example (WithInferenceProvider)
package main

import (
	"context"
	"fmt"
	"log"

	"github.com/tmc/langchaingo/llms"
	"github.com/tmc/langchaingo/llms/huggingface"
)

func main() {
	// Create a new HuggingFace LLM with inference provider
	llm, err := huggingface.New(
		huggingface.WithModel("deepseek-ai/DeepSeek-R1-0528"),
		huggingface.WithInferenceProvider("hyperbolic"),
		// Token will be read from HF_TOKEN or HUGGINGFACEHUB_API_TOKEN environment variable
	)
	if err != nil {
		log.Fatal(err)
	}

	ctx := context.Background()

	// Use the LLM
	result, err := llm.Call(ctx, "What is the capital of France?",
		llms.WithTemperature(0.5),
		llms.WithMaxLength(50),
	)
	if err != nil {
		log.Fatal(err)
	}

	fmt.Println(result)
}

func (*LLM) Call

func (o *LLM) Call(ctx context.Context, prompt string, options ...llms.CallOption) (string, error)

Call implements the LLM interface.

func (*LLM) CreateEmbedding

func (o *LLM) CreateEmbedding(
	ctx context.Context,
	inputTexts []string,
	model string,
	task string,
) ([][]float32, error)

CreateEmbedding creates embeddings for the given input texts.

func (*LLM) GenerateContent

func (o *LLM) GenerateContent(ctx context.Context, messages []llms.MessageContent, options ...llms.CallOption) (*llms.ContentResponse, error)

GenerateContent implements the Model interface.

type Option

type Option func(*options)

func WithHTTPClient

func WithHTTPClient(httpClient *http.Client) Option

WithHTTPClient passes a custom HTTP client to the HuggingFace client.

func WithInferenceProvider

func WithInferenceProvider(provider string) Option

WithInferenceProvider passes the inference provider to use with HuggingFace's router. When set, the client will use the router URL (https://router.huggingface.co/{provider}/v1/...) instead of the default inference API. Common providers include "hyperbolic", "nebius", etc.

func WithModel

func WithModel(model string) Option

WithModel passes the HuggingFace model to the client. If not set, then will be used default model.

func WithToken

func WithToken(token string) Option

WithToken passes the HuggingFace API token to the client. If not set, the token is read from the HUGGINGFACEHUB_API_TOKEN environment variable.

func WithURL

func WithURL(url string) Option

WithURL passes the HuggingFace url to the client. If not set, then will be used default url.

Directories

Path Synopsis
internal

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL