Documentation
¶
Index ¶
- Constants
- func CalculateProbability(m FileMetrics, w DefectWeights) float32
- func CalculateTDG(c TDGComponents, w TDGWeights) float64
- type ChurnAnalysis
- type ChurnSummary
- type CloneAnalysis
- type CloneSummary
- type CloneType
- type CodeClone
- type ComplexityAnalysis
- type ComplexityHotspot
- type ComplexityMetrics
- func (m *ComplexityMetrics) ComplexityScore() float64
- func (m *ComplexityMetrics) IsSimple(t ComplexityThresholds) bool
- func (m *ComplexityMetrics) IsSimpleDefault() bool
- func (m *ComplexityMetrics) NeedsRefactoring(t ComplexityThresholds) bool
- func (m *ComplexityMetrics) NeedsRefactoringDefault() bool
- type ComplexityReport
- type ComplexitySummary
- type ComplexityThresholds
- type DeadCodeAnalysis
- type DeadCodeSummary
- type DeadFunction
- type DeadVariable
- type DebtCategory
- type DefectAnalysis
- type DefectScore
- type DefectSummary
- type DefectWeights
- type DependencyGraph
- type EdgeType
- type ExtendedComplexitySummary
- type ExtendedComplexityThresholds
- type FileChurnMetrics
- type FileComplexity
- type FileMetrics
- type FunctionComplexity
- type GraphEdge
- type GraphMetrics
- type GraphNode
- type GraphSummary
- type HalsteadMetrics
- type MinHashSignature
- type NodeMetric
- type NodeType
- type RiskLevel
- type SATDAnalysis
- type SATDSummary
- type Severity
- type TDGAnalysis
- type TDGComponents
- type TDGScore
- type TDGSeverity
- type TDGSummary
- type TDGWeights
- type TechnicalDebt
- type UnreachableBlock
- type Violation
- type ViolationSeverity
Constants ¶
const ( HotspotThreshold = 0.5 StableThreshold = 0.1 )
Thresholds for hotspot and stable file detection.
Variables ¶
This section is empty.
Functions ¶
func CalculateProbability ¶
func CalculateProbability(m FileMetrics, w DefectWeights) float32
CalculateProbability computes defect probability from metrics.
func CalculateTDG ¶ added in v1.0.0
func CalculateTDG(c TDGComponents, w TDGWeights) float64
CalculateTDG computes the TDG score from components. Returns a score from 0-100 where higher is better (less debt). Components are penalties (0-1 normalized), so we subtract from 100.
Types ¶
type ChurnAnalysis ¶
type ChurnAnalysis struct {
Files []FileChurnMetrics `json:"files"`
Summary ChurnSummary `json:"summary"`
Days int `json:"days"`
RepoPath string `json:"repo_path"`
}
ChurnAnalysis represents the full churn analysis result.
type ChurnSummary ¶
type ChurnSummary struct {
TotalFiles int `json:"total_files"`
TotalCommits int `json:"total_commits"`
TotalLinesAdded int `json:"total_lines_added"`
TotalLinesDeleted int `json:"total_lines_deleted"`
UniqueAuthors int `json:"unique_authors"`
AvgCommitsPerFile float64 `json:"avg_commits_per_file"`
MaxChurnScore float64 `json:"max_churn_score"`
TopChurnedFiles []string `json:"top_churned_files"`
HotspotFiles []string `json:"hotspot_files"`
StableFiles []string `json:"stable_files"`
MeanChurnScore float64 `json:"mean_churn_score"`
VarianceChurn float64 `json:"variance_churn_score"`
StdDevChurn float64 `json:"stddev_churn_score"`
P50ChurnScore float64 `json:"p50_churn_score"`
P95ChurnScore float64 `json:"p95_churn_score"`
}
ChurnSummary provides aggregate statistics.
func NewChurnSummary ¶
func NewChurnSummary() ChurnSummary
NewChurnSummary creates an initialized summary.
func (*ChurnSummary) CalculateStatistics ¶
func (s *ChurnSummary) CalculateStatistics(files []FileChurnMetrics)
CalculateStatistics computes mean, variance, standard deviation, and percentiles of churn scores.
func (*ChurnSummary) IdentifyHotspotAndStableFiles ¶
func (s *ChurnSummary) IdentifyHotspotAndStableFiles(files []FileChurnMetrics)
IdentifyHotspotAndStableFiles populates HotspotFiles and StableFiles. Files must be sorted by ChurnScore descending before calling. Hotspots: top 10 files filtered by churn_score > 0.5 Stable: bottom 10 files filtered by churn_score < 0.1 and commit_count > 0
type CloneAnalysis ¶
type CloneAnalysis struct {
Clones []CodeClone `json:"clones"`
Summary CloneSummary `json:"summary"`
TotalFilesScanned int `json:"total_files_scanned"`
MinLines int `json:"min_lines"`
Threshold float64 `json:"threshold"`
}
CloneAnalysis represents the full duplicate detection result.
type CloneSummary ¶
type CloneSummary struct {
TotalClones int `json:"total_clones"`
Type1Count int `json:"type1_count"`
Type2Count int `json:"type2_count"`
Type3Count int `json:"type3_count"`
DuplicatedLines int `json:"duplicated_lines"`
FileOccurrences map[string]int `json:"file_occurrences"`
AvgSimilarity float64 `json:"avg_similarity"`
P50Similarity float64 `json:"p50_similarity"`
P95Similarity float64 `json:"p95_similarity"`
}
CloneSummary provides aggregate statistics.
func NewCloneSummary ¶
func NewCloneSummary() CloneSummary
NewCloneSummary creates an initialized summary.
func (*CloneSummary) AddClone ¶
func (s *CloneSummary) AddClone(c CodeClone)
AddClone updates the summary with a new clone.
type CodeClone ¶
type CodeClone struct {
Type CloneType `json:"type"`
Similarity float64 `json:"similarity"`
FileA string `json:"file_a"`
FileB string `json:"file_b"`
StartLineA uint32 `json:"start_line_a"`
EndLineA uint32 `json:"end_line_a"`
StartLineB uint32 `json:"start_line_b"`
EndLineB uint32 `json:"end_line_b"`
LinesA int `json:"lines_a"`
LinesB int `json:"lines_b"`
TokenCount int `json:"token_count,omitempty"`
}
CodeClone represents a detected duplicate code fragment.
type ComplexityAnalysis ¶
type ComplexityAnalysis struct {
Files []FileComplexity `json:"files"`
Summary ComplexitySummary `json:"summary"`
}
ComplexityAnalysis represents the full analysis result.
type ComplexityHotspot ¶
type ComplexityHotspot struct {
File string `json:"file"`
Function string `json:"function,omitempty"`
Line uint32 `json:"line"`
Complexity uint32 `json:"complexity"`
ComplexityType string `json:"complexity_type"`
}
ComplexityHotspot identifies a high-complexity location in the codebase.
type ComplexityMetrics ¶
type ComplexityMetrics struct {
Cyclomatic uint32 `json:"cyclomatic"`
Cognitive uint32 `json:"cognitive"`
MaxNesting int `json:"max_nesting"`
Lines int `json:"lines"`
Halstead *HalsteadMetrics `json:"halstead,omitempty"`
}
ComplexityMetrics represents code complexity measurements for a function or file.
func (*ComplexityMetrics) ComplexityScore ¶
func (m *ComplexityMetrics) ComplexityScore() float64
ComplexityScore calculates a composite complexity score for ranking. Combines cyclomatic, cognitive, nesting, and lines with weighted factors.
func (*ComplexityMetrics) IsSimple ¶
func (m *ComplexityMetrics) IsSimple(t ComplexityThresholds) bool
IsSimple returns true if complexity is within acceptable limits.
func (*ComplexityMetrics) IsSimpleDefault ¶
func (m *ComplexityMetrics) IsSimpleDefault() bool
IsSimpleDefault checks if complexity is low using fixed thresholds (pmat compatible). Returns true if cyclomatic <= 5 and cognitive <= 7.
func (*ComplexityMetrics) NeedsRefactoring ¶
func (m *ComplexityMetrics) NeedsRefactoring(t ComplexityThresholds) bool
NeedsRefactoring returns true if any metric significantly exceeds thresholds.
func (*ComplexityMetrics) NeedsRefactoringDefault ¶
func (m *ComplexityMetrics) NeedsRefactoringDefault() bool
NeedsRefactoringDefault checks if complexity exceeds fixed thresholds (pmat compatible). Returns true if cyclomatic > 10 or cognitive > 15.
type ComplexityReport ¶
type ComplexityReport struct {
Summary ExtendedComplexitySummary `json:"summary"`
Violations []Violation `json:"violations"`
Hotspots []ComplexityHotspot `json:"hotspots"`
Files []FileComplexity `json:"files"`
TechnicalDebtHours float32 `json:"technical_debt_hours"`
}
ComplexityReport is the full analysis report with violations and hotspots.
func AggregateResults ¶
func AggregateResults(files []FileComplexity) *ComplexityReport
AggregateResults creates a ComplexityReport from file metrics using default thresholds.
func AggregateResultsWithThresholds ¶
func AggregateResultsWithThresholds(files []FileComplexity, maxCyclomatic, maxCognitive *uint32) *ComplexityReport
AggregateResultsWithThresholds creates a ComplexityReport with custom thresholds. If maxCyclomatic or maxCognitive is nil, defaults are used.
func (*ComplexityReport) ErrorCount ¶
func (r *ComplexityReport) ErrorCount() int
ErrorCount returns the number of error-severity violations.
func (*ComplexityReport) WarningCount ¶
func (r *ComplexityReport) WarningCount() int
WarningCount returns the number of warning-severity violations.
type ComplexitySummary ¶
type ComplexitySummary struct {
TotalFiles int `json:"total_files"`
TotalFunctions int `json:"total_functions"`
AvgCyclomatic float64 `json:"avg_cyclomatic"`
AvgCognitive float64 `json:"avg_cognitive"`
MaxCyclomatic uint32 `json:"max_cyclomatic"`
MaxCognitive uint32 `json:"max_cognitive"`
P50Cyclomatic uint32 `json:"p50_cyclomatic"`
P95Cyclomatic uint32 `json:"p95_cyclomatic"`
P50Cognitive uint32 `json:"p50_cognitive"`
P95Cognitive uint32 `json:"p95_cognitive"`
ViolationCount int `json:"violation_count"`
}
ComplexitySummary provides aggregate statistics.
type ComplexityThresholds ¶
type ComplexityThresholds struct {
MaxCyclomatic uint32 `json:"max_cyclomatic"`
MaxCognitive uint32 `json:"max_cognitive"`
MaxNesting int `json:"max_nesting"`
}
ComplexityThresholds defines the limits for complexity violations.
func DefaultComplexityThresholds ¶
func DefaultComplexityThresholds() ComplexityThresholds
DefaultComplexityThresholds returns sensible defaults.
type DeadCodeAnalysis ¶
type DeadCodeAnalysis struct {
DeadFunctions []DeadFunction `json:"dead_functions"`
DeadVariables []DeadVariable `json:"dead_variables"`
UnreachableCode []UnreachableBlock `json:"unreachable_code"`
Summary DeadCodeSummary `json:"summary"`
}
DeadCodeAnalysis represents the full dead code detection result.
type DeadCodeSummary ¶
type DeadCodeSummary struct {
TotalDeadFunctions int `json:"total_dead_functions"`
TotalDeadVariables int `json:"total_dead_variables"`
TotalUnreachableLines int `json:"total_unreachable_lines"`
DeadCodePercentage float64 `json:"dead_code_percentage"`
ByFile map[string]int `json:"by_file"`
TotalFilesAnalyzed int `json:"total_files_analyzed"`
TotalLinesAnalyzed int `json:"total_lines_analyzed"`
}
DeadCodeSummary provides aggregate statistics.
func NewDeadCodeSummary ¶
func NewDeadCodeSummary() DeadCodeSummary
NewDeadCodeSummary creates an initialized summary.
func (*DeadCodeSummary) AddDeadFunction ¶
func (s *DeadCodeSummary) AddDeadFunction(f DeadFunction)
AddDeadFunction updates the summary with a dead function.
func (*DeadCodeSummary) AddDeadVariable ¶
func (s *DeadCodeSummary) AddDeadVariable(v DeadVariable)
AddDeadVariable updates the summary with a dead variable.
func (*DeadCodeSummary) AddUnreachableBlock ¶
func (s *DeadCodeSummary) AddUnreachableBlock(b UnreachableBlock)
AddUnreachableBlock updates the summary with unreachable code.
func (*DeadCodeSummary) CalculatePercentage ¶
func (s *DeadCodeSummary) CalculatePercentage()
CalculatePercentage computes dead code percentage.
type DeadFunction ¶
type DeadFunction struct {
Name string `json:"name"`
File string `json:"file"`
Line uint32 `json:"line"`
EndLine uint32 `json:"end_line"`
Visibility string `json:"visibility"` // public, private, internal
Confidence float64 `json:"confidence"` // 0.0-1.0, how certain we are it's dead
Reason string `json:"reason"` // Why it's considered dead
}
DeadFunction represents an unused function detected in the codebase.
type DeadVariable ¶
type DeadVariable struct {
Name string `json:"name"`
File string `json:"file"`
Line uint32 `json:"line"`
Confidence float64 `json:"confidence"`
}
DeadVariable represents an unused variable.
type DebtCategory ¶
type DebtCategory string
DebtCategory represents the type of technical debt.
const ( DebtDesign DebtCategory = "design" // HACK, KLUDGE, SMELL DebtDefect DebtCategory = "defect" // BUG, FIXME, BROKEN DebtRequirement DebtCategory = "requirement" // TODO, FEAT, ENHANCEMENT DebtTest DebtCategory = "test" // FAILING, SKIP, DISABLED DebtPerformance DebtCategory = "performance" // SLOW, OPTIMIZE, PERF DebtSecurity DebtCategory = "security" // SECURITY, VULN, UNSAFE )
type DefectAnalysis ¶
type DefectAnalysis struct {
Files []DefectScore `json:"files"`
Summary DefectSummary `json:"summary"`
Weights DefectWeights `json:"weights"`
}
DefectAnalysis represents the full defect prediction result.
type DefectScore ¶
type DefectScore struct {
FilePath string `json:"file_path"`
Probability float32 `json:"probability"` // 0.0 to 1.0
RiskLevel RiskLevel `json:"risk_level"`
ContributingFactors map[string]float32 `json:"contributing_factors"`
Recommendations []string `json:"recommendations"`
}
DefectScore represents the prediction result for a file.
type DefectSummary ¶
type DefectSummary struct {
TotalFiles int `json:"total_files"`
HighRiskCount int `json:"high_risk_count"`
MediumRiskCount int `json:"medium_risk_count"`
LowRiskCount int `json:"low_risk_count"`
AvgProbability float32 `json:"avg_probability"`
P50Probability float32 `json:"p50_probability"`
P95Probability float32 `json:"p95_probability"`
}
DefectSummary provides aggregate statistics.
type DefectWeights ¶
type DefectWeights struct {
Churn float32 `json:"churn"` // 0.35
Complexity float32 `json:"complexity"` // 0.30
Duplication float32 `json:"duplication"` // 0.25
Coupling float32 `json:"coupling"` // 0.10
}
DefectWeights defines the weights for defect prediction factors. Based on empirical research (PMAT approach).
func DefaultDefectWeights ¶
func DefaultDefectWeights() DefectWeights
DefaultDefectWeights returns the standard weights.
type DependencyGraph ¶
DependencyGraph represents the full graph structure.
func NewDependencyGraph ¶
func NewDependencyGraph() *DependencyGraph
NewDependencyGraph creates an empty graph.
func (*DependencyGraph) AddEdge ¶
func (g *DependencyGraph) AddEdge(edge GraphEdge)
AddEdge adds an edge to the graph.
func (*DependencyGraph) AddNode ¶
func (g *DependencyGraph) AddNode(node GraphNode)
AddNode adds a node to the graph.
func (*DependencyGraph) ToMermaid ¶
func (g *DependencyGraph) ToMermaid() string
MermaidGraph generates Mermaid diagram syntax from the graph.
type ExtendedComplexitySummary ¶
type ExtendedComplexitySummary struct {
TotalFiles int `json:"total_files"`
TotalFunctions int `json:"total_functions"`
MedianCyclomatic float32 `json:"median_cyclomatic"`
MedianCognitive float32 `json:"median_cognitive"`
MaxCyclomatic uint32 `json:"max_cyclomatic"`
MaxCognitive uint32 `json:"max_cognitive"`
P90Cyclomatic uint32 `json:"p90_cyclomatic"`
P90Cognitive uint32 `json:"p90_cognitive"`
TechnicalDebtHours float32 `json:"technical_debt_hours"`
}
ExtendedComplexitySummary provides enhanced statistics (pmat compatible).
type ExtendedComplexityThresholds ¶
type ExtendedComplexityThresholds struct {
CyclomaticWarn uint32 `json:"cyclomatic_warn"`
CyclomaticError uint32 `json:"cyclomatic_error"`
CognitiveWarn uint32 `json:"cognitive_warn"`
CognitiveError uint32 `json:"cognitive_error"`
NestingMax uint8 `json:"nesting_max"`
MethodLength uint16 `json:"method_length"`
}
ExtendedComplexityThresholds provides warn and error levels (pmat compatible).
func DefaultExtendedThresholds ¶
func DefaultExtendedThresholds() ExtendedComplexityThresholds
DefaultExtendedThresholds returns pmat-compatible default thresholds.
type FileChurnMetrics ¶
type FileChurnMetrics struct {
Path string `json:"path"`
Commits int `json:"commits"`
UniqueAuthors int `json:"unique_authors"`
Authors map[string]int `json:"authors"` // author email -> commit count
LinesAdded int `json:"lines_added"`
LinesDeleted int `json:"lines_deleted"`
ChurnScore float64 `json:"churn_score"` // 0.0-1.0 normalized
FirstCommit time.Time `json:"first_commit"`
LastCommit time.Time `json:"last_commit"`
}
FileChurnMetrics represents git churn data for a single file.
func (*FileChurnMetrics) CalculateChurnScore ¶
func (f *FileChurnMetrics) CalculateChurnScore() float64
CalculateChurnScore computes a normalized churn score. Uses the same formula as the reference implementation: churn_score = (commit_factor * 0.6 + change_factor * 0.4)
func (*FileChurnMetrics) CalculateChurnScoreWithMax ¶
func (f *FileChurnMetrics) CalculateChurnScoreWithMax(maxCommits, maxChanges int) float64
CalculateChurnScoreWithMax computes churn score with explicit max values.
func (*FileChurnMetrics) IsHotspot ¶
func (f *FileChurnMetrics) IsHotspot(threshold float64) bool
IsHotspot returns true if the file has high churn.
type FileComplexity ¶
type FileComplexity struct {
Path string `json:"path"`
Language string `json:"language"`
Functions []FunctionComplexity `json:"functions"`
TotalCyclomatic uint32 `json:"total_cyclomatic"`
TotalCognitive uint32 `json:"total_cognitive"`
AvgCyclomatic float64 `json:"avg_cyclomatic"`
AvgCognitive float64 `json:"avg_cognitive"`
MaxCyclomatic uint32 `json:"max_cyclomatic"`
MaxCognitive uint32 `json:"max_cognitive"`
ViolationCount int `json:"violation_count"`
}
FileComplexity represents aggregated complexity for a file.
type FileMetrics ¶
type FileMetrics struct {
FilePath string `json:"file_path"`
ChurnScore float32 `json:"churn_score"` // 0.0 to 1.0
Complexity float32 `json:"complexity"` // Raw complexity
DuplicateRatio float32 `json:"duplicate_ratio"` // 0.0 to 1.0
AfferentCoupling float32 `json:"afferent_coupling"` // Incoming deps
EfferentCoupling float32 `json:"efferent_coupling"` // Outgoing deps
LinesOfCode int `json:"lines_of_code"`
CyclomaticComplexity uint32 `json:"cyclomatic_complexity"`
CognitiveComplexity uint32 `json:"cognitive_complexity"`
}
FileMetrics contains input metrics for defect prediction.
type FunctionComplexity ¶
type FunctionComplexity struct {
Name string `json:"name"`
File string `json:"file"`
StartLine uint32 `json:"start_line"`
EndLine uint32 `json:"end_line"`
Metrics ComplexityMetrics `json:"metrics"`
Violations []string `json:"violations,omitempty"`
}
FunctionComplexity represents complexity metrics for a single function.
type GraphEdge ¶
type GraphEdge struct {
From string `json:"from"`
To string `json:"to"`
Type EdgeType `json:"type"`
Weight float64 `json:"weight,omitempty"`
}
GraphEdge represents a dependency between nodes.
type GraphMetrics ¶
type GraphMetrics struct {
NodeMetrics []NodeMetric `json:"node_metrics"`
Summary GraphSummary `json:"summary"`
}
GraphMetrics represents centrality and other graph metrics.
type GraphNode ¶
type GraphNode struct {
ID string `json:"id"`
Name string `json:"name"`
Type NodeType `json:"type"` // file, function, class, module
File string `json:"file"`
Line uint32 `json:"line,omitempty"`
Attributes map[string]string `json:"attributes,omitempty"`
}
GraphNode represents a node in the dependency graph.
type GraphSummary ¶
type GraphSummary struct {
TotalNodes int `json:"total_nodes"`
TotalEdges int `json:"total_edges"`
AvgDegree float64 `json:"avg_degree"`
Density float64 `json:"density"`
Components int `json:"components"`
LargestComponent int `json:"largest_component"`
}
GraphSummary provides aggregate graph statistics.
type HalsteadMetrics ¶ added in v1.0.0
type HalsteadMetrics struct {
OperatorsUnique uint32 `json:"operators_unique"` // n1: distinct operators
OperandsUnique uint32 `json:"operands_unique"` // n2: distinct operands
OperatorsTotal uint32 `json:"operators_total"` // N1: total operators
OperandsTotal uint32 `json:"operands_total"` // N2: total operands
Vocabulary uint32 `json:"vocabulary"` // n = n1 + n2
Length uint32 `json:"length"` // N = N1 + N2
Volume float64 `json:"volume"` // V = N * log2(n)
Difficulty float64 `json:"difficulty"` // D = (n1/2) * (N2/n2)
Effort float64 `json:"effort"` // E = D * V
Time float64 `json:"time"` // T = E / 18 (seconds)
Bugs float64 `json:"bugs"` // B = E^(2/3) / 3000
}
HalsteadMetrics represents Halstead software science metrics.
func NewHalsteadMetrics ¶ added in v1.2.0
func NewHalsteadMetrics(operatorsUnique, operandsUnique, operatorsTotal, operandsTotal uint32) *HalsteadMetrics
NewHalsteadMetrics creates Halstead metrics from base counts and calculates derived values.
type MinHashSignature ¶
type MinHashSignature struct {
Values []uint64 `json:"values"`
}
MinHashSignature represents a MinHash signature for similarity estimation.
func (*MinHashSignature) JaccardSimilarity ¶
func (s *MinHashSignature) JaccardSimilarity(other *MinHashSignature) float64
JaccardSimilarity computes similarity between two MinHash signatures.
type NodeMetric ¶
type NodeMetric struct {
NodeID string `json:"node_id"`
Name string `json:"name"`
PageRank float64 `json:"pagerank"`
BetweennessCentrality float64 `json:"betweenness_centrality"`
InDegree int `json:"in_degree"`
OutDegree int `json:"out_degree"`
ClusteringCoef float64 `json:"clustering_coefficient"`
}
NodeMetric represents computed metrics for a single node.
type RiskLevel ¶
type RiskLevel string
RiskLevel represents the defect probability risk category.
func CalculateRiskLevel ¶
CalculateRiskLevel determines risk level from probability.
type SATDAnalysis ¶
type SATDAnalysis struct {
Items []TechnicalDebt `json:"items"`
Summary SATDSummary `json:"summary"`
TotalFilesAnalyzed int `json:"total_files_analyzed"`
FilesWithDebt int `json:"files_with_debt"`
AnalyzedAt time.Time `json:"analyzed_at"`
}
SATDAnalysis represents the full SATD analysis result.
type SATDSummary ¶
type SATDSummary struct {
TotalItems int `json:"total_items"`
BySeverity map[string]int `json:"by_severity"`
ByCategory map[string]int `json:"by_category"`
ByFile map[string]int `json:"by_file"`
}
SATDSummary provides aggregate statistics.
func NewSATDSummary ¶
func NewSATDSummary() SATDSummary
NewSATDSummary creates an initialized summary.
func (*SATDSummary) AddItem ¶
func (s *SATDSummary) AddItem(item TechnicalDebt)
AddItem updates the summary with a new debt item.
type Severity ¶
type Severity string
Severity represents the urgency of addressing the debt.
type TDGAnalysis ¶ added in v1.0.0
type TDGAnalysis struct {
Files []TDGScore `json:"files"`
Summary TDGSummary `json:"summary"`
}
TDGAnalysis represents the full TDG analysis result.
type TDGComponents ¶ added in v1.0.0
type TDGComponents struct {
Complexity float64 `json:"complexity"`
Churn float64 `json:"churn"`
Coupling float64 `json:"coupling"`
Duplication float64 `json:"duplication"`
DomainRisk float64 `json:"domain_risk"`
}
TDGComponents represents the individual factors in TDG calculation.
type TDGScore ¶ added in v1.0.0
type TDGScore struct {
FilePath string `json:"file_path"`
Value float64 `json:"value"` // 0-100 scale (higher is better)
Severity TDGSeverity `json:"severity"`
Components TDGComponents `json:"components"`
}
TDGScore represents the Technical Debt Gradient for a file.
type TDGSeverity ¶
type TDGSeverity string
TDGSeverity represents the Technical Debt Gradient severity level.
const ( TDGExcellent TDGSeverity = "excellent" // 90-100 TDGGood TDGSeverity = "good" // 70-89 TDGModerate TDGSeverity = "moderate" // 50-69 TDGHighRisk TDGSeverity = "high_risk" // 0-49 )
func CalculateTDGSeverity ¶ added in v1.0.0
func CalculateTDGSeverity(score float64) TDGSeverity
CalculateTDGSeverity determines severity from score (0-100 scale, higher is better).
func (TDGSeverity) Color ¶ added in v1.0.0
func (s TDGSeverity) Color() string
SeverityColor returns an ANSI color code for the severity.
type TDGSummary ¶
type TDGSummary struct {
TotalFiles int `json:"total_files"`
AvgScore float64 `json:"avg_score"`
MaxScore float64 `json:"min_score"` // Worst (lowest) score in the codebase
P50Score float64 `json:"p50_score"`
P95Score float64 `json:"p95_score"`
BySeverity map[string]int `json:"by_severity"`
Hotspots []TDGScore `json:"hotspots"` // Top N worst scoring files
}
TDGSummary provides aggregate statistics.
func NewTDGSummary ¶ added in v1.0.0
func NewTDGSummary() TDGSummary
NewTDGSummary creates an initialized summary.
type TDGWeights ¶ added in v1.0.0
type TDGWeights struct {
Complexity float64 `json:"complexity"`
Churn float64 `json:"churn"`
Coupling float64 `json:"coupling"`
Duplication float64 `json:"duplication"`
DomainRisk float64 `json:"domain_risk"`
}
TDGWeights defines the weights for TDG calculation.
func DefaultTDGWeights ¶ added in v1.0.0
func DefaultTDGWeights() TDGWeights
DefaultTDGWeights returns the standard weights.
type TechnicalDebt ¶
type TechnicalDebt struct {
Category DebtCategory `json:"category"`
Severity Severity `json:"severity"`
File string `json:"file"`
Line uint32 `json:"line"`
Description string `json:"description"`
Marker string `json:"marker"` // TODO, FIXME, HACK, etc.
Text string `json:"text,omitempty"`
Column uint32 `json:"column,omitempty"`
ContextHash string `json:"context_hash,omitempty"` // BLAKE3 hash for identity tracking
Author string `json:"author,omitempty"`
Date *time.Time `json:"date,omitempty"`
}
TechnicalDebt represents a single SATD item found in code.
type UnreachableBlock ¶
type UnreachableBlock struct {
File string `json:"file"`
StartLine uint32 `json:"start_line"`
EndLine uint32 `json:"end_line"`
Reason string `json:"reason"` // e.g., "after return", "dead branch"
}
UnreachableBlock represents code that can never execute.
type Violation ¶
type Violation struct {
Severity ViolationSeverity `json:"severity"`
Rule string `json:"rule"`
Message string `json:"message"`
Value uint32 `json:"value"`
Threshold uint32 `json:"threshold"`
File string `json:"file"`
Line uint32 `json:"line"`
Function string `json:"function,omitempty"`
}
Violation represents a complexity threshold violation.
type ViolationSeverity ¶
type ViolationSeverity string
ViolationSeverity indicates the severity of a complexity violation.
const ( SeverityWarning ViolationSeverity = "warning" SeverityError ViolationSeverity = "error" )