Documentation
¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func BuildPRReviewPrompt ¶
func BuildPRReviewPrompt(payload *queue.PRReviewTaskPayload, files []*github.CommitFile) string
Types ¶
type AIClient ¶
type AIClient interface {
GenerateReviewForPR(ctx context.Context, prompt string) (string, error)
GenerateSampleReviewForPR() (string, error)
}
AIClient is the main interface for AI-powered code review (OpenAI, REST LLM, etc.)
type FileGroup ¶
type FileGroup struct {
Path string `json:"path"`
Comments []LineComment `json:"comments"`
}
type LLMClient ¶
type LLMClient interface {
// AnalyzeCode takes code and returns a review or suggestions.
AnalyzeCode(code string) (string, error)
}
LLMClient defines the interface for interacting with an LLM service (e.g., HuggingFace, Ollama, etc.)
type LineComment ¶
type OpenAIClient ¶
type OpenAIClient struct {
// contains filtered or unexported fields
}
func NewOpenAIClient ¶
func NewOpenAIClient(cfg *config.Config) *OpenAIClient
NewOpenAIClient initializes the singleton instance
func (*OpenAIClient) GenerateReviewForPR ¶
func (*OpenAIClient) GenerateSampleReviewForPR ¶
func (c *OpenAIClient) GenerateSampleReviewForPR() (string, error)
use this for development(no need to call AI API to get review json)
type PRReviewResponse ¶
type PRReviewResponse struct {
Summary string `json:"summary,omitempty"`
Action string `json:"action"` // COMMENT, APPROVE, REQUEST_CHANGES
Comments []FileGroup `json:"comments"`
}
func ParseReviewResponse ¶
func ParseReviewResponse(jsonStr string) (*PRReviewResponse, error)
type RESTLLMClient ¶
RESTLLMClient is a client that calls an external LLM REST API (Python, HuggingFace, Ollama, etc.)
func (*RESTLLMClient) AnalyzeCode ¶
func (c *RESTLLMClient) AnalyzeCode(code string) (string, error)
AnalyzeCode sends code to the LLM REST API and returns the response.
func (*RESTLLMClient) GenerateReviewForPR ¶
Example usage of LLM integration in Go
func ExampleLLMUsage() {
// Configure your LLM endpoint (e.g., HuggingFace Inference API, Ollama, etc.)
llmClient := &RESTLLMClient{Endpoint: "http://localhost:11434/api/generate"}
code := "func add(a int, b int) int { return a + b }"
result, err := llmClient.AnalyzeCode(code)
if err != nil {
fmt.Println("LLM error:", err)
return
}
fmt.Println("LLM review result:", result)
}
Ensure RESTLLMClient implements AIClient for code review integration
func (*RESTLLMClient) GenerateSampleReviewForPR ¶
func (c *RESTLLMClient) GenerateSampleReviewForPR() (string, error)
type SonarQubeClient ¶
--- SonarQube Integration (Static Analysis) --- See: server/go-static-analyzers-research.md for research and recommendations.
SonarQubeClient connects to a real SonarQube server via REST API.
func (*SonarQubeClient) AnalyzeCode ¶
func (c *SonarQubeClient) AnalyzeCode(projectKey string) (string, error)
AnalyzeCode triggers SonarQube analysis and fetches results.
func (*SonarQubeClient) GenerateReviewForPR ¶
Ensure SonarQubeClient implements AIClient for code review integration
func (*SonarQubeClient) GenerateSampleReviewForPR ¶
func (c *SonarQubeClient) GenerateSampleReviewForPR() (string, error)