Documentation
¶
Index ¶
- Constants
- func BuildQueryExpansionUserPrompt(text string, maxTerms int) string
- func BuildQueryRewriteUserPrompt(userQuery string, extraInstruction string) string
- func ExpandedQueryFromModelAnswer(original, modelOut string, maxTerms int, separator string) (expanded string, terms []string)
- func ExtractJSONFromLLMOutput(s string, maxChars int) string
- func GenerateLingRequestID() string
- func NormalizeRewrittenQuery(s string) string
- type AlibabaHandler
- func (h *AlibabaHandler) GetMaxMemoryMessages() int
- func (h *AlibabaHandler) Interrupt()
- func (h *AlibabaHandler) Provider() string
- func (h *AlibabaHandler) Query(text, model string) (string, error)
- func (h *AlibabaHandler) QueryStream(text string, options *QueryOptions, ...) (*QueryResponse, error)
- func (h *AlibabaHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
- func (h *AlibabaHandler) ResetMemory()
- func (h *AlibabaHandler) SetMaxMemoryMessages(n int)
- func (h *AlibabaHandler) SummarizeMemory(model string) (string, error)
- type AnthropicHandler
- func (h *AnthropicHandler) GetMaxMemoryMessages() int
- func (h *AnthropicHandler) Interrupt()
- func (h *AnthropicHandler) Provider() string
- func (h *AnthropicHandler) Query(text, model string) (string, error)
- func (h *AnthropicHandler) QueryStream(text string, options *QueryOptions, ...) (*QueryResponse, error)
- func (h *AnthropicHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
- func (h *AnthropicHandler) ResetMemory()
- func (h *AnthropicHandler) SetMaxMemoryMessages(n int)
- func (h *AnthropicHandler) SummarizeMemory(model string) (string, error)
- type CompletionTokensDetails
- type CozeHandler
- func (h *CozeHandler) GetMaxMemoryMessages() int
- func (h *CozeHandler) Interrupt()
- func (h *CozeHandler) Provider() string
- func (h *CozeHandler) Query(text, model string) (string, error)
- func (h *CozeHandler) QueryStream(text string, options *QueryOptions, ...) (*QueryResponse, error)
- func (h *CozeHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
- func (h *CozeHandler) ResetMemory()
- func (h *CozeHandler) SetMaxMemoryMessages(n int)
- func (h *CozeHandler) SummarizeMemory(model string) (string, error)
- type FewShotExample
- type LLMDetails
- type LLMHandler
- type LLMOptions
- type LLMProvider
- type LMStudioHandler
- func (h *LMStudioHandler) GetMaxMemoryMessages() int
- func (h *LMStudioHandler) Interrupt()
- func (h *LMStudioHandler) Provider() string
- func (h *LMStudioHandler) Query(text, model string) (string, error)
- func (h *LMStudioHandler) QueryStream(text string, options *QueryOptions, ...) (*QueryResponse, error)
- func (h *LMStudioHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
- func (h *LMStudioHandler) ResetMemory()
- func (h *LMStudioHandler) SetMaxMemoryMessages(n int)
- func (h *LMStudioHandler) SummarizeMemory(model string) (string, error)
- type OllamaHandler
- func (h *OllamaHandler) GetMaxMemoryMessages() int
- func (h *OllamaHandler) Interrupt()
- func (h *OllamaHandler) Provider() string
- func (h *OllamaHandler) Query(text, model string) (string, error)
- func (h *OllamaHandler) QueryStream(text string, options *QueryOptions, ...) (*QueryResponse, error)
- func (h *OllamaHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
- func (h *OllamaHandler) ResetMemory()
- func (h *OllamaHandler) SetMaxMemoryMessages(n int)
- func (h *OllamaHandler) SummarizeMemory(model string) (string, error)
- type OpenaiHandler
- func (oh *OpenaiHandler) GetMaxMemoryMessages() int
- func (oh *OpenaiHandler) Interrupt()
- func (oh *OpenaiHandler) Provider() string
- func (oh *OpenaiHandler) Query(text, model string) (string, error)
- func (oh *OpenaiHandler) QueryStream(text string, options *QueryOptions, ...) (*QueryResponse, error)
- func (oh *OpenaiHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
- func (oh *OpenaiHandler) ResetMemory()
- func (oh *OpenaiHandler) SetMaxMemoryMessages(n int)
- func (oh *OpenaiHandler) SummarizeMemory(model string) (string, error)
- type PromptTokensDetails
- type QueryChoice
- type QueryExpansion
- type QueryOptions
- type QueryResponse
- type QueryRewrite
- type SelfQueryExtractor
- type SelfQueryFilterSpec
- type SelfQueryOptions
- type SelfQueryResult
- type TokenUsage
Constants ¶
const ( ProviderOpenAI = "openai" ProviderOllama = "ollama" ProviderAlibaba = "alibaba" ProviderAnthropic = "anthropic" ProviderLMStudio = "lmstudio" ProviderCoze = "coze" )
const ( LLM_OPENAI = "llm.openai" LLM_ANTHROPIC = "llm.anthropic" LLM_COZE = "llm.coze" LLM_OLLAMA = "llm.ollama" LLM_LMSTUDIO = "llm.lmstudio" LLM_ALIBABA = "llm.alibaba" )
const ( OutputFormatText = "text" OutputFormatJSON = "json" OutputFormatJSONObject = "json_object" OutputFormatJSONSchema = "json_schema" OutputFormatXML = "xml" OutputFormatHTML = "html" OutputFormatSQL = "sql" )
Variables ¶
This section is empty.
Functions ¶
func BuildQueryExpansionUserPrompt ¶
BuildQueryExpansionUserPrompt builds the user message for one-shot query expansion.
func BuildQueryRewriteUserPrompt ¶
BuildQueryRewriteUserPrompt builds a one-shot user message for LLM-based query rewrite.
func ExpandedQueryFromModelAnswer ¶
func ExpandedQueryFromModelAnswer(original, modelOut string, maxTerms int, separator string) (expanded string, terms []string)
ExpandedQueryFromModelAnswer parses expansion model output and joins terms with the original query.
func ExtractJSONFromLLMOutput ¶
ExtractJSONFromLLMOutput pulls a JSON object/array substring from common LLM wrappers (fences, prose).
func GenerateLingRequestID ¶
func GenerateLingRequestID() string
func NormalizeRewrittenQuery ¶
NormalizeRewrittenQuery trims fences/quotes and keeps the first line of model output.
Types ¶
type AlibabaHandler ¶
type AlibabaHandler struct {
// contains filtered or unexported fields
}
func NewAlibabaHandler ¶
func NewAlibabaHandler(ctx context.Context, llmOptions *LLMOptions) (*AlibabaHandler, error)
func (*AlibabaHandler) GetMaxMemoryMessages ¶
func (h *AlibabaHandler) GetMaxMemoryMessages() int
func (*AlibabaHandler) Interrupt ¶
func (h *AlibabaHandler) Interrupt()
func (*AlibabaHandler) Provider ¶
func (h *AlibabaHandler) Provider() string
func (*AlibabaHandler) QueryStream ¶
func (h *AlibabaHandler) QueryStream(text string, options *QueryOptions, callback func(segment string, isComplete bool) error) (*QueryResponse, error)
func (*AlibabaHandler) QueryWithOptions ¶
func (h *AlibabaHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
func (*AlibabaHandler) ResetMemory ¶
func (h *AlibabaHandler) ResetMemory()
func (*AlibabaHandler) SetMaxMemoryMessages ¶
func (h *AlibabaHandler) SetMaxMemoryMessages(n int)
func (*AlibabaHandler) SummarizeMemory ¶
func (h *AlibabaHandler) SummarizeMemory(model string) (string, error)
type AnthropicHandler ¶
type AnthropicHandler struct {
// contains filtered or unexported fields
}
func NewAnthropicHandler ¶
func NewAnthropicHandler(ctx context.Context, llmOptions *LLMOptions) (*AnthropicHandler, error)
func (*AnthropicHandler) GetMaxMemoryMessages ¶
func (h *AnthropicHandler) GetMaxMemoryMessages() int
func (*AnthropicHandler) Interrupt ¶
func (h *AnthropicHandler) Interrupt()
func (*AnthropicHandler) Provider ¶
func (h *AnthropicHandler) Provider() string
func (*AnthropicHandler) Query ¶
func (h *AnthropicHandler) Query(text, model string) (string, error)
func (*AnthropicHandler) QueryStream ¶
func (h *AnthropicHandler) QueryStream(text string, options *QueryOptions, callback func(segment string, isComplete bool) error) (*QueryResponse, error)
func (*AnthropicHandler) QueryWithOptions ¶
func (h *AnthropicHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
func (*AnthropicHandler) ResetMemory ¶
func (h *AnthropicHandler) ResetMemory()
func (*AnthropicHandler) SetMaxMemoryMessages ¶
func (h *AnthropicHandler) SetMaxMemoryMessages(n int)
func (*AnthropicHandler) SummarizeMemory ¶
func (h *AnthropicHandler) SummarizeMemory(model string) (string, error)
type CompletionTokensDetails ¶
type CozeHandler ¶
type CozeHandler struct {
// contains filtered or unexported fields
}
func NewCozeHandler ¶
func NewCozeHandler(ctx context.Context, llmOptions *LLMOptions) (*CozeHandler, error)
func (*CozeHandler) GetMaxMemoryMessages ¶
func (h *CozeHandler) GetMaxMemoryMessages() int
func (*CozeHandler) Interrupt ¶
func (h *CozeHandler) Interrupt()
func (*CozeHandler) Provider ¶
func (h *CozeHandler) Provider() string
func (*CozeHandler) QueryStream ¶
func (h *CozeHandler) QueryStream(text string, options *QueryOptions, callback func(segment string, isComplete bool) error) (*QueryResponse, error)
func (*CozeHandler) QueryWithOptions ¶
func (h *CozeHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
func (*CozeHandler) ResetMemory ¶
func (h *CozeHandler) ResetMemory()
func (*CozeHandler) SetMaxMemoryMessages ¶
func (h *CozeHandler) SetMaxMemoryMessages(n int)
func (*CozeHandler) SummarizeMemory ¶
func (h *CozeHandler) SummarizeMemory(model string) (string, error)
type FewShotExample ¶
type LLMDetails ¶
type LLMDetails struct {
RequestID string
Provider string
BaseURL string
Model string
Input string
SystemPrompt string
N int
MaxTokens int
EstimatedMaxOutputChars int
FilterEmoji bool
RequestedOutputFormat string
AppliedResponseFormat string
ResponseFormatApplied bool
ResponseID string
Object string
Created int64
SystemFingerprint string
PromptFilterResultsJSON string
ServiceTierJSON string
ChoicesCount int
Choices []QueryChoice
Usage *TokenUsage
UsageRawJSON string
ChoicesRawJSON string
RawResponseJSON string
}
type LLMHandler ¶
type LLMHandler interface {
Query(text, model string) (string, error)
QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
QueryStream(text string, options *QueryOptions, callback func(segment string, isComplete bool) error) (*QueryResponse, error)
Provider() string
Interrupt()
ResetMemory()
SummarizeMemory(model string) (string, error)
SetMaxMemoryMessages(n int)
GetMaxMemoryMessages() int
}
LLMHandler common llm hanlder interface
func NewLLMProvider ¶
func NewLLMProvider(ctx context.Context, provider, apiKey, apiURL, systemPrompt string) (LLMHandler, error)
NewLLMProvider provides a SoulNexus-like factory signature for Ling.
func NewProviderHandler ¶
func NewProviderHandler(ctx context.Context, provider string, llmOptions *LLMOptions) (LLMHandler, error)
NewProviderHandler creates an LLM handler by provider type. Note: in Ling, non-OpenAI providers currently use OpenAI-compatible chat API shape.
type LLMOptions ¶
type LMStudioHandler ¶
type LMStudioHandler struct {
// contains filtered or unexported fields
}
func NewLMStudioHandler ¶
func NewLMStudioHandler(ctx context.Context, llmOptions *LLMOptions) (*LMStudioHandler, error)
func (*LMStudioHandler) GetMaxMemoryMessages ¶
func (h *LMStudioHandler) GetMaxMemoryMessages() int
func (*LMStudioHandler) Interrupt ¶
func (h *LMStudioHandler) Interrupt()
func (*LMStudioHandler) Provider ¶
func (h *LMStudioHandler) Provider() string
func (*LMStudioHandler) QueryStream ¶
func (h *LMStudioHandler) QueryStream(text string, options *QueryOptions, callback func(segment string, isComplete bool) error) (*QueryResponse, error)
func (*LMStudioHandler) QueryWithOptions ¶
func (h *LMStudioHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
func (*LMStudioHandler) ResetMemory ¶
func (h *LMStudioHandler) ResetMemory()
func (*LMStudioHandler) SetMaxMemoryMessages ¶
func (h *LMStudioHandler) SetMaxMemoryMessages(n int)
func (*LMStudioHandler) SummarizeMemory ¶
func (h *LMStudioHandler) SummarizeMemory(model string) (string, error)
type OllamaHandler ¶
type OllamaHandler struct {
// contains filtered or unexported fields
}
func NewOllamaHandler ¶
func NewOllamaHandler(ctx context.Context, llmOptions *LLMOptions) (*OllamaHandler, error)
func (*OllamaHandler) GetMaxMemoryMessages ¶
func (h *OllamaHandler) GetMaxMemoryMessages() int
func (*OllamaHandler) Interrupt ¶
func (h *OllamaHandler) Interrupt()
func (*OllamaHandler) Provider ¶
func (h *OllamaHandler) Provider() string
func (*OllamaHandler) QueryStream ¶
func (h *OllamaHandler) QueryStream(text string, options *QueryOptions, callback func(segment string, isComplete bool) error) (*QueryResponse, error)
func (*OllamaHandler) QueryWithOptions ¶
func (h *OllamaHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
func (*OllamaHandler) ResetMemory ¶
func (h *OllamaHandler) ResetMemory()
func (*OllamaHandler) SetMaxMemoryMessages ¶
func (h *OllamaHandler) SetMaxMemoryMessages(n int)
func (*OllamaHandler) SummarizeMemory ¶
func (h *OllamaHandler) SummarizeMemory(model string) (string, error)
type OpenaiHandler ¶
type OpenaiHandler struct {
// contains filtered or unexported fields
}
func NewOpenaiHandler ¶
func NewOpenaiHandler(ctx context.Context, llmOptions *LLMOptions) (*OpenaiHandler, error)
func (*OpenaiHandler) GetMaxMemoryMessages ¶
func (oh *OpenaiHandler) GetMaxMemoryMessages() int
func (*OpenaiHandler) Interrupt ¶
func (oh *OpenaiHandler) Interrupt()
func (*OpenaiHandler) Provider ¶
func (oh *OpenaiHandler) Provider() string
func (*OpenaiHandler) QueryStream ¶
func (oh *OpenaiHandler) QueryStream(text string, options *QueryOptions, callback func(segment string, isComplete bool) error) (*QueryResponse, error)
func (*OpenaiHandler) QueryWithOptions ¶
func (oh *OpenaiHandler) QueryWithOptions(text string, options *QueryOptions) (*QueryResponse, error)
func (*OpenaiHandler) ResetMemory ¶
func (oh *OpenaiHandler) ResetMemory()
func (*OpenaiHandler) SetMaxMemoryMessages ¶
func (oh *OpenaiHandler) SetMaxMemoryMessages(n int)
func (*OpenaiHandler) SummarizeMemory ¶
func (oh *OpenaiHandler) SummarizeMemory(model string) (string, error)
type PromptTokensDetails ¶
type QueryChoice ¶
type QueryExpansion ¶
QueryExpansion contains the results of query expansion
type QueryOptions ¶
type QueryOptions struct {
Model string
N int
MaxTokens int
Temperature float32
TopP float32
LogitBias map[string]int
FilterEmoji bool
EnableJSONOutput bool
OutputFormat string
EmotionalTone bool // EmotionalTone, when true, appends a short instruction so replies read warmer and more human (still factual).
EnableQueryExpansion bool // EnableQueryExpansion enables automatic query expansion using LLM
ExpansionMaxTerms int // ExpansionMaxTerms maximum number of expansion terms
ExpansionSeparator string // ExpansionSeparator separator for expanded terms
// EnableQueryRewrite rewrites the user message with a stateless LLM call before expansion/main query.
EnableQueryRewrite bool
// QueryRewriteModel overrides the model for the rewrite call only (empty = use Model, then handler default).
QueryRewriteModel string
// QueryRewriteInstruction is appended to the rewrite prompt as extra constraints.
QueryRewriteInstruction string
// EnableSelfQueryJSONOutput requests strict JSON object replies (response_format json_object on OpenaiHandler).
// SelfQueryExtractor sets this by default; other handlers may ignore it and still return parseable text.
EnableSelfQueryJSONOutput bool
}
type QueryResponse ¶
type QueryResponse struct {
Provider string
Model string
Choices []QueryChoice
Usage *TokenUsage
// Expansion contains query expansion results if enabled
Expansion *QueryExpansion
// Rewrite contains query rewrite results if enabled
Rewrite *QueryRewrite
}
type QueryRewrite ¶
QueryRewrite records the optional LLM rewrite step applied before expansion / main completion.
type SelfQueryExtractor ¶
type SelfQueryExtractor struct {
LLM LLMHandler
AllowedFields []string
}
SelfQueryExtractor turns a natural-language question into a search query + structured filters for the retrieval step of RAG. It does not execute search or produce grounded answers by itself. Use NewSelfQueryExtractor and call Extract only when you need that decomposition (on demand).
func NewSelfQueryExtractor ¶
func NewSelfQueryExtractor(h LLMHandler, allowedFields []string) *SelfQueryExtractor
NewSelfQueryExtractor returns an extractor. allowedFields may be empty (no restriction hint).
func (*SelfQueryExtractor) Extract ¶
func (e *SelfQueryExtractor) Extract(ctx context.Context, question string, opt *SelfQueryOptions) (*SelfQueryResult, error)
Extract runs the self-query prompt and parses JSON from the model output.
type SelfQueryFilterSpec ¶
type SelfQueryFilterSpec struct {
Namespace string `json:"namespace,omitempty"`
Source string `json:"source,omitempty"`
DocType string `json:"doc_type,omitempty"`
Location string `json:"location,omitempty"`
Years []string `json:"years,omitempty"`
Dates []string `json:"dates,omitempty"`
TagsAny []string `json:"tags_any,omitempty"`
}
SelfQueryFilterSpec is the structured filter subset produced by the model.
type SelfQueryOptions ¶
type SelfQueryOptions struct {
Model string
// AllowedFields restricts which filter keys the model may use (prompt hint).
AllowedFields []string
// MaxJSONChars caps input length before JSON extraction (0 = default 16000).
MaxJSONChars int
// UsePlainQuery uses Query instead of QueryWithOptions with JSON object mode.
// Set true for backends that handle JSON poorly in structured-output mode.
UsePlainQuery bool
}
SelfQueryOptions configures a single SelfQuery extraction call.
type SelfQueryResult ¶
type SelfQueryResult struct {
Query string
Filters map[string]any
Spec SelfQueryFilterSpec
Raw string
}
SelfQueryResult is the parsed self-query output plus a Qdrant-oriented filter map. It is an intermediate retrieval plan (rewritten query + metadata constraints), not retrieved documents and not the final user-facing answer; wire it to your search/vector layer, then run generation over returned chunks.
func SelfQueryExtract ¶
func SelfQueryExtract(ctx context.Context, h LLMHandler, question string, allowedFields []string, opt *SelfQueryOptions) (*SelfQueryResult, error)
SelfQueryExtract is a convenience wrapper around NewSelfQueryExtractor(...).Extract(...).
type TokenUsage ¶
type TokenUsage struct {
PromptTokens int
CompletionTokens int
TotalTokens int
PromptTokensDetails *PromptTokensDetails
CompletionTokensDetails *CompletionTokensDetails
}