Documentation
¶
Overview ¶
Package complexity provides complexity functionality.
Index ¶
- Constants
- Variables
- func RegisterPlotSections()
- type AggregateData
- type AggregateMetric
- type Aggregator
- type Analyzer
- func (c *Analyzer) Analyze(root *node.Node) (analyze.Report, error)
- func (c *Analyzer) Configure(_ map[string]any) error
- func (c *Analyzer) CreateAggregator() analyze.ResultAggregator
- func (c *Analyzer) CreateReportSection(report analyze.Report) analyze.ReportSection
- func (c *Analyzer) CreateVisitor() analyze.AnalysisVisitor
- func (c *Analyzer) DefaultConfig() Config
- func (c *Analyzer) Description() string
- func (c *Analyzer) Descriptor() analyze.Descriptor
- func (c *Analyzer) Flag() string
- func (c *Analyzer) FormatReport(report analyze.Report, w io.Writer) error
- func (c *Analyzer) FormatReportBinary(report analyze.Report, w io.Writer) error
- func (c *Analyzer) FormatReportJSON(report analyze.Report, w io.Writer) error
- func (c *Analyzer) FormatReportPlot(report analyze.Report, w io.Writer) error
- func (c *Analyzer) FormatReportYAML(report analyze.Report, w io.Writer) error
- func (c *Analyzer) ListConfigurationOptions() []pipeline.ConfigurationOption
- func (c *Analyzer) Name() string
- func (c *Analyzer) Thresholds() analyze.Thresholds
- type CognitiveComplexityCalculator
- type ComputedMetrics
- type Config
- type DistributionMetric
- type FunctionComplexityData
- type FunctionComplexityMetric
- type FunctionData
- type FunctionMetrics
- type FunctionReportItem
- type HighRiskFunctionData
- type HighRiskFunctionMetric
- type Metrics
- type ReportData
- type ReportSection
- type Visitor
Constants ¶
const ( // MaxDepthValue is the default maximum UAST traversal depth for complexity analysis. MaxDepthValue = 10 // MaxNestingDepthValue is the default maximum nesting depth tracked during complexity analysis. MaxNestingDepthValue = 10 )
Configuration constants for complexity analysis.
const ( MetricDistSimple = "simple" MetricDistModerate = "moderate" MetricDistComplex = "complex" )
MetricDist* constants are JSON-compatible distribution keys for metrics output.
const ( CyclomaticThresholdHigh = 10 CyclomaticThresholdModerate = 5 CognitiveThresholdHigh = 15 CognitiveThresholdModerate = 7 NestingThresholdHigh = 5 NestingThresholdModerate = 3 )
Complexity thresholds.
const ( SectionTitle = "COMPLEXITY" // ScoreExcellentThreshold is the upper bound of average complexity for an excellent score. ScoreExcellentThreshold = 1.0 ScoreGoodThreshold = 3.0 ScoreFairThreshold = 5.0 ScoreModerateThreshold = 7.0 ScorePoorThreshold = 10.0 ScoreExcellent = 1.0 ScoreGood = 0.8 ScoreFair = 0.6 ScoreModerate = 0.4 ScorePoor = 0.2 ScoreCritical = 0.1 // DistSimpleMax is the maximum cyclomatic complexity for the "simple" distribution bucket. DistSimpleMax = 5 DistModerateMax = 10 DistComplexMax = 20 DistLabelSimple = "Simple (1-5)" DistLabelMod = "Moderate (6-10)" DistLabelComplex = "Complex (11-20)" DistLabelVeryC = "Very Complex (>20)" // IssueSeverityFairMin is the minimum cyclomatic complexity for fair severity. IssueSeverityFairMin = 6 IssueSeverityPoorMin = 11 IssueValuePrefix = "CC=" // MetricTotalFunctions is the label for the total functions metric. MetricTotalFunctions = "Total Functions" MetricAvgComplexity = "Avg Complexity" MetricMaxComplexity = "Max Complexity" MetricTotalComplexity = "Total Complexity" MetricCognitiveTotal = "Cognitive Total" MetricDecisionPoints = "Decision Points" // DefaultStatusMessage is the fallback message when no complexity data is available. DefaultStatusMessage = "No complexity data available" // KeyAvgComplexity is the report key for average complexity. KeyAvgComplexity = "average_complexity" KeyTotalFunctions = "total_functions" KeyMaxComplexity = "max_complexity" KeyTotalComplexity = "total_complexity" KeyCognitiveComplexity = "cognitive_complexity" KeyDecisionPoints = "decision_points" KeyMessage = "message" KeyFunctions = "functions" KeyFuncName = "name" KeyFuncCyclomatic = "cyclomatic_complexity" KeyFuncCognitive = "cognitive_complexity" KeyFuncNesting = "nesting_depth" )
Section rendering constants.
Variables ¶
var ErrInvalidFunctionsData = errors.New("invalid complexity report: expected []map[string]any for functions")
ErrInvalidFunctionsData indicates the report doesn't contain expected functions data.
Functions ¶
func RegisterPlotSections ¶
func RegisterPlotSections()
RegisterPlotSections registers the complexity plot section renderer with the analyze package.
Types ¶
type AggregateData ¶
type AggregateData struct {
TotalFunctions int `json:"total_functions" yaml:"total_functions"`
AverageComplexity float64 `json:"average_complexity" yaml:"average_complexity"`
MaxComplexity int `json:"max_complexity" yaml:"max_complexity"`
TotalComplexity int `json:"total_complexity" yaml:"total_complexity"`
CognitiveComplexity int `json:"cognitive_complexity" yaml:"cognitive_complexity"`
NestingDepth int `json:"nesting_depth" yaml:"nesting_depth"`
DecisionPoints int `json:"decision_points" yaml:"decision_points"`
HealthScore float64 `json:"health_score" yaml:"health_score"`
Message string `json:"message" yaml:"message"`
}
AggregateData contains summary statistics.
type AggregateMetric ¶
type AggregateMetric struct {
metrics.MetricMeta
}
AggregateMetric computes summary statistics.
func NewAggregateMetric ¶
func NewAggregateMetric() *AggregateMetric
NewAggregateMetric creates the aggregate metric.
func (*AggregateMetric) Compute ¶
func (m *AggregateMetric) Compute(input *ReportData) AggregateData
Compute calculates aggregate statistics.
type Aggregator ¶
type Aggregator struct {
*common.Aggregator
common.PerFileRetainer
// contains filtered or unexported fields
}
Aggregator aggregates results from multiple complexity analyses.
func (*Aggregator) Aggregate ¶
func (ca *Aggregator) Aggregate(results map[string]analyze.Report)
Aggregate overrides the base Aggregate method to collect detailed functions and track the true maximum complexity across all files.
func (*Aggregator) GetResult ¶
func (ca *Aggregator) GetResult() analyze.Report
GetResult overrides the base GetResult method to include detailed functions and compute derived metrics (average_complexity, max_complexity, message).
func (*Aggregator) SetAggregationMode ¶
func (ca *Aggregator) SetAggregationMode(mode analyze.AggregationMode)
SetAggregationMode propagates the mode to both the base aggregator and the detailed data collector.
type Analyzer ¶
type Analyzer struct {
// contains filtered or unexported fields
}
Analyzer provides comprehensive complexity analysis.
func (*Analyzer) CreateAggregator ¶
func (c *Analyzer) CreateAggregator() analyze.ResultAggregator
CreateAggregator returns a new aggregator for complexity analysis.
func (*Analyzer) CreateReportSection ¶
func (c *Analyzer) CreateReportSection(report analyze.Report) analyze.ReportSection
CreateReportSection creates a ReportSection from report data.
func (*Analyzer) CreateVisitor ¶
func (c *Analyzer) CreateVisitor() analyze.AnalysisVisitor
CreateVisitor creates a new visitor for complexity analysis.
func (*Analyzer) DefaultConfig ¶
DefaultConfig returns default complexity analysis configuration.
func (*Analyzer) Description ¶
Description returns the analyzer description.
func (*Analyzer) Descriptor ¶
func (c *Analyzer) Descriptor() analyze.Descriptor
Descriptor returns stable analyzer metadata.
func (*Analyzer) FormatReport ¶
FormatReport formats complexity analysis results as human-readable text.
func (*Analyzer) FormatReportBinary ¶
FormatReportBinary formats complexity analysis results as binary envelope.
func (*Analyzer) FormatReportJSON ¶
FormatReportJSON formats complexity analysis results as JSON.
func (*Analyzer) FormatReportPlot ¶
FormatReportPlot generates an HTML plot visualization for complexity analysis.
func (*Analyzer) FormatReportYAML ¶
FormatReportYAML formats complexity analysis results as YAML.
func (*Analyzer) ListConfigurationOptions ¶
func (c *Analyzer) ListConfigurationOptions() []pipeline.ConfigurationOption
ListConfigurationOptions returns the configuration options for the analyzer.
func (*Analyzer) Thresholds ¶
func (c *Analyzer) Thresholds() analyze.Thresholds
Thresholds returns the color-coded thresholds for complexity metrics.
type CognitiveComplexityCalculator ¶
type CognitiveComplexityCalculator struct {
// contains filtered or unexported fields
}
CognitiveComplexityCalculator implements the SonarSource cognitive complexity algorithm.
func NewCognitiveComplexityCalculator ¶
func NewCognitiveComplexityCalculator() *CognitiveComplexityCalculator
NewCognitiveComplexityCalculator creates a new cognitive complexity calculator.
func (*CognitiveComplexityCalculator) CalculateCognitiveComplexity ¶
func (c *CognitiveComplexityCalculator) CalculateCognitiveComplexity(fn *node.Node) int
CalculateCognitiveComplexity calculates cognitive complexity according to SonarSource specification.
type ComputedMetrics ¶
type ComputedMetrics struct {
FunctionComplexity []FunctionComplexityData `json:"function_complexity" yaml:"function_complexity"`
Distribution map[string]int `json:"distribution" yaml:"distribution"`
HighRiskFunctions []HighRiskFunctionData `json:"high_risk_functions" yaml:"high_risk_functions"`
Aggregate AggregateData `json:"aggregate" yaml:"aggregate"`
}
ComputedMetrics holds all computed metric results for the complexity analyzer.
func ComputeAllMetrics ¶
func ComputeAllMetrics(report analyze.Report) (*ComputedMetrics, error)
ComputeAllMetrics runs all complexity metrics and returns the results.
func (*ComputedMetrics) AnalyzerName ¶
func (m *ComputedMetrics) AnalyzerName() string
AnalyzerName returns the name of the analyzer that produced these metrics.
func (*ComputedMetrics) ToJSON ¶
func (m *ComputedMetrics) ToJSON() any
ToJSON returns the metrics in a format suitable for JSON marshaling.
func (*ComputedMetrics) ToYAML ¶
func (m *ComputedMetrics) ToYAML() any
ToYAML returns the metrics in a format suitable for YAML marshaling.
type Config ¶
type Config struct {
ComplexityThresholds map[string]int
MaxNestingDepth int
IncludeCognitiveComplexity bool
IncludeNestingDepth bool
IncludeDecisionPoints bool
IncludeLOCMetrics bool
}
Config holds configuration for complexity analysis.
type DistributionMetric ¶
type DistributionMetric struct {
metrics.MetricMeta
}
DistributionMetric computes complexity distribution.
func NewDistributionMetric ¶
func NewDistributionMetric() *DistributionMetric
NewDistributionMetric creates the distribution metric.
func (*DistributionMetric) Compute ¶
func (m *DistributionMetric) Compute(input *ReportData) map[string]int
Compute calculates complexity distribution.
type FunctionComplexityData ¶
type FunctionComplexityData struct {
Name string `json:"name" yaml:"name"`
SourceFile string `json:"source_file,omitempty" yaml:"source_file,omitempty"`
Language string `json:"language,omitempty" yaml:"language,omitempty"`
Directory string `json:"directory,omitempty" yaml:"directory,omitempty"`
CyclomaticComplexity int `json:"cyclomatic_complexity" yaml:"cyclomatic_complexity"`
CognitiveComplexity int `json:"cognitive_complexity" yaml:"cognitive_complexity"`
NestingDepth int `json:"nesting_depth" yaml:"nesting_depth"`
LinesOfCode int `json:"lines_of_code" yaml:"lines_of_code"`
ComplexityDensity float64 `json:"complexity_density" yaml:"complexity_density"`
RiskLevel string `json:"risk_level" yaml:"risk_level"`
}
FunctionComplexityData contains detailed complexity for a function.
type FunctionComplexityMetric ¶
type FunctionComplexityMetric struct {
metrics.MetricMeta
}
FunctionComplexityMetric computes per-function complexity data.
func NewFunctionComplexityMetric ¶
func NewFunctionComplexityMetric() *FunctionComplexityMetric
NewFunctionComplexityMetric creates the function complexity metric.
func (*FunctionComplexityMetric) Compute ¶
func (m *FunctionComplexityMetric) Compute(input *ReportData) []FunctionComplexityData
Compute calculates function complexity data.
type FunctionData ¶
type FunctionData struct {
Name string
SourceFile string
Language string
Directory string
CyclomaticComplexity int
CognitiveComplexity int
NestingDepth int
LinesOfCode int
ComplexityAssessment string
CognitiveAssessment string
NestingAssessment string
}
FunctionData holds complexity data for a single function.
type FunctionMetrics ¶
type FunctionMetrics struct {
Name string `json:"name"`
CyclomaticComplexity int `json:"cyclomatic_complexity"`
CognitiveComplexity int `json:"cognitive_complexity"`
NestingDepth int `json:"nesting_depth"`
DecisionPoints int `json:"decision_points"`
LinesOfCode int `json:"lines_of_code"`
Parameters int `json:"parameters"`
ReturnStatements int `json:"return_statements"`
}
FunctionMetrics holds complexity metrics for individual functions.
type FunctionReportItem ¶
type FunctionReportItem struct {
Name string
CyclomaticComplexity int
CognitiveComplexity int
NestingDepth int
LinesOfCode int
ComplexityAssessment string
CognitiveAssessment string
NestingAssessment string
}
FunctionReportItem is a typed representation of a per-function complexity report item. It includes assessment strings computed from thresholds, avoiding map[string]any allocation.
type HighRiskFunctionData ¶
type HighRiskFunctionData struct {
Name string `json:"name" yaml:"name"`
SourceFile string `json:"source_file,omitempty" yaml:"source_file,omitempty"`
Language string `json:"language,omitempty" yaml:"language,omitempty"`
Directory string `json:"directory,omitempty" yaml:"directory,omitempty"`
CyclomaticComplexity int `json:"cyclomatic_complexity" yaml:"cyclomatic_complexity"`
CognitiveComplexity int `json:"cognitive_complexity" yaml:"cognitive_complexity"`
RiskLevel string `json:"risk_level" yaml:"risk_level"`
Issues []string `json:"issues" yaml:"issues"`
}
HighRiskFunctionData identifies functions needing refactoring attention.
type HighRiskFunctionMetric ¶
type HighRiskFunctionMetric struct {
metrics.MetricMeta
}
HighRiskFunctionMetric identifies functions needing attention.
func NewHighRiskFunctionMetric ¶
func NewHighRiskFunctionMetric() *HighRiskFunctionMetric
NewHighRiskFunctionMetric creates the high risk function metric.
func (*HighRiskFunctionMetric) Compute ¶
func (m *HighRiskFunctionMetric) Compute(input *ReportData) []HighRiskFunctionData
Compute identifies high risk functions.
type Metrics ¶
type Metrics struct {
FunctionMetrics map[string]FunctionMetrics `json:"function_metrics"`
ComplexityDistribution map[string]int `json:"complexity_distribution"`
CyclomaticComplexity int `json:"cyclomatic_complexity"`
CognitiveComplexity int `json:"cognitive_complexity"`
NestingDepth int `json:"nesting_depth"`
DecisionPoints int `json:"decision_points"`
TotalFunctions int `json:"total_functions"`
AverageComplexity float64 `json:"average_complexity"`
MaxComplexity int `json:"max_complexity"`
}
Metrics holds different types of complexity measurements.
type ReportData ¶
type ReportData struct {
TotalFunctions int
AverageComplexity float64
MaxComplexity int
TotalComplexity int
CognitiveComplexity int
NestingDepth int
DecisionPoints int
Functions []FunctionData
Message string
}
ReportData is the parsed input data for complexity metrics computation.
func ParseReportData ¶
func ParseReportData(report analyze.Report) (*ReportData, error)
ParseReportData extracts ReportData from an analyzer report.
type ReportSection ¶
type ReportSection struct {
analyze.BaseReportSection
// contains filtered or unexported fields
}
ReportSection implements analyze.ReportSection for complexity analysis.
func NewReportSection ¶
func NewReportSection(report analyze.Report) *ReportSection
NewReportSection creates a ReportSection from a complexity report.
func (*ReportSection) AllIssues ¶
func (s *ReportSection) AllIssues() []analyze.Issue
AllIssues returns all functions as issues sorted by complexity descending.
func (*ReportSection) Distribution ¶
func (s *ReportSection) Distribution() []analyze.DistributionItem
Distribution returns complexity distribution categories.
func (*ReportSection) KeyMetrics ¶
func (s *ReportSection) KeyMetrics() []analyze.Metric
KeyMetrics returns the 6 key metrics for the complexity section.
type Visitor ¶
type Visitor struct {
// contains filtered or unexported fields
}
Visitor implements NodeVisitor for complexity analysis.