gemini

package
v0.1.7 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Feb 25, 2026 License: Apache-2.0 Imports: 18 Imported by: 0

Documentation

Overview

Package gemini provides AI integration for embeddings and LLM.

TODO(2026-02-16): This package is named "gemini" for historical reasons, but it now supports multiple providers (Gemini and OpenAI). Recommend renaming directory/package to provider-neutral naming (for example `internal/integrations/ai`).

Package gemini provides Gemini AI integration for embeddings and LLM.

Index

Constants

This section is empty.

Variables

This section is empty.

Functions

This section is empty.

Types

type DuplicateCheckInput added in v0.1.0

type DuplicateCheckInput struct {
	CurrentIssue  *IssueInput
	SimilarIssues []SimilarIssueInput
}

DuplicateCheckInput represents input for duplicate detection.

type DuplicateResult added in v0.1.0

type DuplicateResult struct {
	IsDuplicate   bool            `json:"is_duplicate"`
	DuplicateOf   int             `json:"duplicate_of"` // Issue number
	Confidence    float64         `json:"confidence"`   // 0.0-1.0
	Reasoning     string          `json:"reasoning"`
	SimilarIssues json.RawMessage `json:"similar_issues"` // Flexible: can be []int or []object
}

DuplicateResult holds duplicate detection analysis.

type Embedder

type Embedder struct {
	// contains filtered or unexported fields
}

Embedder generates embeddings using Gemini or OpenAI.

func NewEmbedder

func NewEmbedder(apiKey, model string) (*Embedder, error)

NewEmbedder creates a new embedder.

func (*Embedder) Close

func (e *Embedder) Close() error

Close closes underlying provider clients.

func (*Embedder) Dimensions

func (e *Embedder) Dimensions() int

Dimensions returns the dimensionality of the embeddings.

func (*Embedder) Embed

func (e *Embedder) Embed(ctx context.Context, text string) ([]float32, error)

Embed generates an embedding for a single text. It retries on transient errors (429/5xx) with exponential backoff.

func (*Embedder) EmbedBatch

func (e *Embedder) EmbedBatch(ctx context.Context, texts []string) ([][]float32, error)

EmbedBatch generates embeddings for multiple texts.

func (*Embedder) Model added in v0.1.7

func (e *Embedder) Model() string

Model returns the resolved model.

func (*Embedder) Provider added in v0.1.7

func (e *Embedder) Provider() string

Provider returns the resolved provider.

type IssueInput

type IssueInput struct {
	Title  string
	Body   string
	Author string
	Labels []string
}

IssueInput represents the issue data needed for analysis.

type LLMClient

type LLMClient struct {
	// contains filtered or unexported fields
}

LLMClient provides LLM-based analysis using Gemini or OpenAI.

func NewLLMClient

func NewLLMClient(apiKey string, model ...string) (*LLMClient, error)

NewLLMClient creates a new LLM client.

func (*LLMClient) AnalyzeIssue

func (l *LLMClient) AnalyzeIssue(ctx context.Context, issue *IssueInput) (*TriageResult, error)

AnalyzeIssue performs triage analysis on an issue. It retries on transient errors (429/5xx) with exponential backoff.

func (*LLMClient) AssessQuality added in v0.1.0

func (l *LLMClient) AssessQuality(ctx context.Context, issue *IssueInput) (*QualityResult, error)

AssessQuality evaluates issue completeness and clarity. It retries on transient errors (429/5xx) with exponential backoff.

func (*LLMClient) Close

func (l *LLMClient) Close() error

Close closes underlying provider clients.

func (*LLMClient) DetectDuplicate added in v0.1.0

func (l *LLMClient) DetectDuplicate(ctx context.Context, input *DuplicateCheckInput) (*DuplicateResult, error)

DetectDuplicate analyzes semantic similarity for duplicate detection. It retries on transient errors (429/5xx) with exponential backoff.

func (*LLMClient) GenerateResponse

func (l *LLMClient) GenerateResponse(ctx context.Context, similar []SimilarIssueInput) (string, error)

GenerateResponse creates a comment for similar issues. It retries on transient errors (429/5xx) with exponential backoff.

func (*LLMClient) Model added in v0.1.7

func (l *LLMClient) Model() string

Model returns the resolved model.

func (*LLMClient) Provider added in v0.1.7

func (l *LLMClient) Provider() string

Provider returns the resolved provider.

func (*LLMClient) RouteIssue added in v0.1.0

func (l *LLMClient) RouteIssue(ctx context.Context, input *RouteIssueInput) (*RouterResult, error)

RouteIssue analyzes issue intent and ranks repositories by relevance. It retries on transient errors (429/5xx) with exponential backoff.

type Provider added in v0.1.7

type Provider string

Provider identifies the active AI provider.

const (
	ProviderGemini Provider = "gemini"
	ProviderOpenAI Provider = "openai"
)

func ResolveProvider added in v0.1.7

func ResolveProvider(apiKey string) (Provider, string, error)

ResolveProvider selects provider/key using environment variables and config key.

Selection order: 1. If both GEMINI_API_KEY and OPENAI_API_KEY are set, Gemini wins. 2. If only one env key is set, that provider is selected. 3. If no env keys are set, fallback to config api key.

type QualityResult added in v0.1.0

type QualityResult struct {
	Score       float64  `json:"score"`       // 0.0 (poor) to 1.0 (excellent)
	Assessment  string   `json:"assessment"`  // "excellent"|"good"|"needs-improvement"|"poor"
	Issues      []string `json:"issues"`      // Missing elements
	Suggestions []string `json:"suggestions"` // How to improve
	Reasoning   string   `json:"reasoning"`
}

QualityResult holds issue quality assessment.

type RepositoryCandidate added in v0.1.0

type RepositoryCandidate struct {
	Org         string
	Repo        string
	Description string
	Definition  string // Full repository documentation (README, etc.)
}

RepositoryCandidate represents a repository option for routing.

type RepositoryRanking added in v0.1.0

type RepositoryRanking struct {
	Org        string  `json:"org"`
	Repo       string  `json:"repo"`
	Confidence float64 `json:"confidence"` // 0.0-1.0
	Reasoning  string  `json:"reasoning"`
}

RepositoryRanking represents a repository match with confidence.

type RetryConfig added in v0.1.7

type RetryConfig struct {
	MaxRetries  int           // Maximum number of retry attempts (default: 5)
	BaseDelay   time.Duration // Initial delay before first retry (default: 1s)
	MaxDelay    time.Duration // Maximum delay cap (default: 60s)
	JitterRatio float64       // Jitter as fraction of delay, 0.0-1.0 (default: 0.25)
}

RetryConfig holds configuration for exponential backoff retry.

func DefaultRetryConfig added in v0.1.7

func DefaultRetryConfig() RetryConfig

DefaultRetryConfig returns sensible defaults for Gemini API retries. Defaults: 5 retries, 1s base delay, 60s max delay, 25% jitter.

type RouteIssueInput added in v0.1.0

type RouteIssueInput struct {
	Issue        *IssueInput
	Repositories []RepositoryCandidate
	CurrentRepo  string // Current repository (org/repo) where issue was created
}

RouteIssueInput represents input for repository routing.

type RouterResult added in v0.1.0

type RouterResult struct {
	Rankings  []RepositoryRanking
	BestMatch *RepositoryRanking
}

RouterResult holds repository routing analysis.

type SimilarIssueInput

type SimilarIssueInput struct {
	Number     int
	Title      string
	Body       string // Full text content from vector DB
	URL        string
	Similarity float64
	State      string
}

SimilarIssueInput represents a similar issue found.

type TriageResult

type TriageResult struct {
	Quality         string   `json:"quality"` // "good", "needs-improvement", "poor"
	SuggestedLabels []string `json:"suggested_labels"`
	Reasoning       string   `json:"reasoning"`
	IsDuplicate     bool     `json:"is_duplicate"`
	DuplicateReason string   `json:"duplicate_reason"`
}

TriageResult holds the result of issue triage analysis.

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL