Documentation
¶
Index ¶
- Constants
- func CalculateConfidence(m FileMetrics) float32
- func CalculateProbability(m FileMetrics, w DefectWeights) float32
- func EscapeMermaidLabel(s string) string
- func SanitizeMermaidID(id string) string
- type CallGraph
- type ChurnAnalysis
- type ChurnSummary
- type CloneAnalysis
- type CloneGroup
- type CloneInstance
- type CloneSummary
- type CloneType
- type CodeClone
- type ComplexityAnalysis
- type ComplexityHotspot
- type ComplexityMetrics
- func (m *ComplexityMetrics) ComplexityScore() float64
- func (m *ComplexityMetrics) IsSimple(t ComplexityThresholds) bool
- func (m *ComplexityMetrics) IsSimpleDefault() bool
- func (m *ComplexityMetrics) NeedsRefactoring(t ComplexityThresholds) bool
- func (m *ComplexityMetrics) NeedsRefactoringDefault() bool
- type ComplexityReport
- type ComplexitySummary
- type ComplexityThresholds
- type DeadClass
- type DeadCodeAnalysis
- type DeadCodeKind
- type DeadCodeSummary
- type DeadFunction
- type DeadVariable
- type DebtCategory
- type DefectAnalysis
- type DefectScore
- type DefectSummary
- type DefectWeights
- type DependencyGraph
- type DuplicationHotspot
- type EdgeType
- type ExtendedComplexitySummary
- type ExtendedComplexityThresholds
- type FileChurnMetrics
- type FileComplexity
- type FileMetrics
- type FunctionComplexity
- type Grade
- type GraphEdge
- type GraphMetrics
- type GraphNode
- type GraphSummary
- type HalsteadMetrics
- type Language
- type LanguageOverride
- type MermaidDirection
- type MermaidOptions
- type MetricCategory
- type MinHashSignature
- type NodeMetric
- type NodeType
- type PenaltyAttribution
- type PenaltyConfig
- type PenaltyCurve
- type PenaltyTracker
- type ProjectScore
- type ReferenceEdge
- type ReferenceNode
- type ReferenceType
- type RiskLevel
- type SATDAnalysis
- type SATDSummary
- type Severity
- type TdgComparison
- type TdgConfig
- type TdgScore
- type TechnicalDebt
- type ThresholdConfig
- type UnreachableBlock
- type Violation
- type ViolationSeverity
- type WeightConfig
Constants ¶
const ( HotspotThreshold = 0.5 StableThreshold = 0.1 )
Thresholds for hotspot and stable file detection.
Variables ¶
This section is empty.
Functions ¶
func CalculateConfidence ¶
func CalculateConfidence(m FileMetrics) float32
CalculateConfidence computes confidence based on data availability.
func CalculateProbability ¶
func CalculateProbability(m FileMetrics, w DefectWeights) float32
CalculateProbability computes defect probability from metrics. PMAT-compatible: uses CDF normalization and sigmoid transformation.
func EscapeMermaidLabel ¶
EscapeMermaidLabel escapes special characters in labels for Mermaid.
func SanitizeMermaidID ¶
SanitizeMermaidID makes an ID safe for Mermaid diagrams.
Types ¶
type CallGraph ¶
type CallGraph struct {
Nodes map[uint32]*ReferenceNode `json:"nodes"`
Edges []ReferenceEdge `json:"edges"`
EntryPoints []uint32 `json:"entry_points"`
EdgeIndex map[uint32][]int `json:"-"` // node -> edge indices (outgoing)
}
CallGraph represents the reference graph for reachability analysis.
func (*CallGraph) AddEdge ¶
func (g *CallGraph) AddEdge(edge ReferenceEdge)
AddEdge adds an edge to the call graph with indexing.
func (*CallGraph) AddNode ¶
func (g *CallGraph) AddNode(node *ReferenceNode)
AddNode adds a node to the call graph.
func (*CallGraph) GetOutgoingEdges ¶
func (g *CallGraph) GetOutgoingEdges(nodeID uint32) []ReferenceEdge
GetOutgoingEdges returns all edges originating from a node.
type ChurnAnalysis ¶
type ChurnAnalysis struct {
Files []FileChurnMetrics `json:"files"`
Summary ChurnSummary `json:"summary"`
Days int `json:"days"`
RepoPath string `json:"repo_path"`
}
ChurnAnalysis represents the full churn analysis result.
type ChurnSummary ¶
type ChurnSummary struct {
TotalFiles int `json:"total_files"`
TotalCommits int `json:"total_commits"`
TotalLinesAdded int `json:"total_lines_added"`
TotalLinesDeleted int `json:"total_lines_deleted"`
UniqueAuthors int `json:"unique_authors"`
AvgCommitsPerFile float64 `json:"avg_commits_per_file"`
MaxChurnScore float64 `json:"max_churn_score"`
TopChurnedFiles []string `json:"top_churned_files"`
HotspotFiles []string `json:"hotspot_files"`
StableFiles []string `json:"stable_files"`
MeanChurnScore float64 `json:"mean_churn_score"`
VarianceChurn float64 `json:"variance_churn_score"`
StdDevChurn float64 `json:"stddev_churn_score"`
P50ChurnScore float64 `json:"p50_churn_score"`
P95ChurnScore float64 `json:"p95_churn_score"`
}
ChurnSummary provides aggregate statistics.
func NewChurnSummary ¶
func NewChurnSummary() ChurnSummary
NewChurnSummary creates an initialized summary.
func (*ChurnSummary) CalculateStatistics ¶
func (s *ChurnSummary) CalculateStatistics(files []FileChurnMetrics)
CalculateStatistics computes mean, variance, standard deviation, and percentiles of churn scores.
func (*ChurnSummary) IdentifyHotspotAndStableFiles ¶
func (s *ChurnSummary) IdentifyHotspotAndStableFiles(files []FileChurnMetrics)
IdentifyHotspotAndStableFiles populates HotspotFiles and StableFiles. Files must be sorted by ChurnScore descending before calling. Hotspots: top 10 files filtered by churn_score > 0.5 Stable: bottom 10 files filtered by churn_score < 0.1 and commit_count > 0
type CloneAnalysis ¶
type CloneAnalysis struct {
Clones []CodeClone `json:"clones"`
Groups []CloneGroup `json:"groups,omitempty"`
Summary CloneSummary `json:"summary"`
TotalFilesScanned int `json:"total_files_scanned"`
MinLines int `json:"min_lines"`
Threshold float64 `json:"threshold"`
}
CloneAnalysis represents the full duplicate detection result.
type CloneGroup ¶
type CloneGroup struct {
ID uint64 `json:"id"`
Type CloneType `json:"type"`
Instances []CloneInstance `json:"instances"`
TotalLines int `json:"total_lines"`
TotalTokens int `json:"total_tokens"`
AverageSimilarity float64 `json:"average_similarity"`
}
CloneGroup represents a group of similar code fragments.
type CloneInstance ¶
type CloneInstance struct {
File string `json:"file"`
StartLine uint32 `json:"start_line"`
EndLine uint32 `json:"end_line"`
Lines int `json:"lines"`
NormalizedHash uint64 `json:"normalized_hash"`
Similarity float64 `json:"similarity"`
}
CloneInstance represents a single occurrence within a clone group.
type CloneSummary ¶
type CloneSummary struct {
TotalClones int `json:"total_clones"`
TotalGroups int `json:"total_groups"`
Type1Count int `json:"type1_count"`
Type2Count int `json:"type2_count"`
Type3Count int `json:"type3_count"`
DuplicatedLines int `json:"duplicated_lines"`
TotalLines int `json:"total_lines"`
DuplicationRatio float64 `json:"duplication_ratio"`
FileOccurrences map[string]int `json:"file_occurrences"`
AvgSimilarity float64 `json:"avg_similarity"`
P50Similarity float64 `json:"p50_similarity"`
P95Similarity float64 `json:"p95_similarity"`
Hotspots []DuplicationHotspot `json:"hotspots,omitempty"`
}
CloneSummary provides aggregate statistics.
func NewCloneSummary ¶
func NewCloneSummary() CloneSummary
NewCloneSummary creates an initialized summary.
func (*CloneSummary) AddClone ¶
func (s *CloneSummary) AddClone(c CodeClone)
AddClone updates the summary with a new clone.
type CodeClone ¶
type CodeClone struct {
Type CloneType `json:"type"`
Similarity float64 `json:"similarity"`
FileA string `json:"file_a"`
FileB string `json:"file_b"`
StartLineA uint32 `json:"start_line_a"`
EndLineA uint32 `json:"end_line_a"`
StartLineB uint32 `json:"start_line_b"`
EndLineB uint32 `json:"end_line_b"`
LinesA int `json:"lines_a"`
LinesB int `json:"lines_b"`
TokenCount int `json:"token_count,omitempty"`
GroupID uint64 `json:"group_id,omitempty"`
}
CodeClone represents a detected duplicate code fragment.
type ComplexityAnalysis ¶
type ComplexityAnalysis struct {
Files []FileComplexity `json:"files"`
Summary ComplexitySummary `json:"summary"`
}
ComplexityAnalysis represents the full analysis result.
type ComplexityHotspot ¶
type ComplexityHotspot struct {
File string `json:"file"`
Function string `json:"function,omitempty"`
Line uint32 `json:"line"`
Complexity uint32 `json:"complexity"`
ComplexityType string `json:"complexity_type"`
}
ComplexityHotspot identifies a high-complexity location in the codebase.
type ComplexityMetrics ¶
type ComplexityMetrics struct {
Cyclomatic uint32 `json:"cyclomatic"`
Cognitive uint32 `json:"cognitive"`
MaxNesting int `json:"max_nesting"`
Lines int `json:"lines"`
Halstead *HalsteadMetrics `json:"halstead,omitempty"`
}
ComplexityMetrics represents code complexity measurements for a function or file.
func (*ComplexityMetrics) ComplexityScore ¶
func (m *ComplexityMetrics) ComplexityScore() float64
ComplexityScore calculates a composite complexity score for ranking. Combines cyclomatic, cognitive, nesting, and lines with weighted factors.
func (*ComplexityMetrics) IsSimple ¶
func (m *ComplexityMetrics) IsSimple(t ComplexityThresholds) bool
IsSimple returns true if complexity is within acceptable limits.
func (*ComplexityMetrics) IsSimpleDefault ¶
func (m *ComplexityMetrics) IsSimpleDefault() bool
IsSimpleDefault checks if complexity is low using fixed thresholds (pmat compatible). Returns true if cyclomatic <= 5 and cognitive <= 7.
func (*ComplexityMetrics) NeedsRefactoring ¶
func (m *ComplexityMetrics) NeedsRefactoring(t ComplexityThresholds) bool
NeedsRefactoring returns true if any metric significantly exceeds thresholds.
func (*ComplexityMetrics) NeedsRefactoringDefault ¶
func (m *ComplexityMetrics) NeedsRefactoringDefault() bool
NeedsRefactoringDefault checks if complexity exceeds fixed thresholds (pmat compatible). Returns true if cyclomatic > 10 or cognitive > 15.
type ComplexityReport ¶
type ComplexityReport struct {
Summary ExtendedComplexitySummary `json:"summary"`
Violations []Violation `json:"violations"`
Hotspots []ComplexityHotspot `json:"hotspots"`
Files []FileComplexity `json:"files"`
TechnicalDebtHours float32 `json:"technical_debt_hours"`
}
ComplexityReport is the full analysis report with violations and hotspots.
func AggregateResults ¶
func AggregateResults(files []FileComplexity) *ComplexityReport
AggregateResults creates a ComplexityReport from file metrics using default thresholds.
func AggregateResultsWithThresholds ¶
func AggregateResultsWithThresholds(files []FileComplexity, maxCyclomatic, maxCognitive *uint32) *ComplexityReport
AggregateResultsWithThresholds creates a ComplexityReport with custom thresholds. If maxCyclomatic or maxCognitive is nil, defaults are used.
func (*ComplexityReport) ErrorCount ¶
func (r *ComplexityReport) ErrorCount() int
ErrorCount returns the number of error-severity violations.
func (*ComplexityReport) WarningCount ¶
func (r *ComplexityReport) WarningCount() int
WarningCount returns the number of warning-severity violations.
type ComplexitySummary ¶
type ComplexitySummary struct {
TotalFiles int `json:"total_files"`
TotalFunctions int `json:"total_functions"`
AvgCyclomatic float64 `json:"avg_cyclomatic"`
AvgCognitive float64 `json:"avg_cognitive"`
MaxCyclomatic uint32 `json:"max_cyclomatic"`
MaxCognitive uint32 `json:"max_cognitive"`
P50Cyclomatic uint32 `json:"p50_cyclomatic"`
P95Cyclomatic uint32 `json:"p95_cyclomatic"`
P50Cognitive uint32 `json:"p50_cognitive"`
P95Cognitive uint32 `json:"p95_cognitive"`
ViolationCount int `json:"violation_count"`
}
ComplexitySummary provides aggregate statistics.
type ComplexityThresholds ¶
type ComplexityThresholds struct {
MaxCyclomatic uint32 `json:"max_cyclomatic"`
MaxCognitive uint32 `json:"max_cognitive"`
MaxNesting int `json:"max_nesting"`
}
ComplexityThresholds defines the limits for complexity violations.
func DefaultComplexityThresholds ¶
func DefaultComplexityThresholds() ComplexityThresholds
DefaultComplexityThresholds returns sensible defaults.
type DeadClass ¶
type DeadClass struct {
Name string `json:"name"`
File string `json:"file"`
Line uint32 `json:"line"`
EndLine uint32 `json:"end_line"`
Confidence float64 `json:"confidence"`
Reason string `json:"reason"`
Kind DeadCodeKind `json:"kind,omitempty"`
NodeID uint32 `json:"node_id,omitempty"`
}
DeadClass represents an unused class/struct/type.
type DeadCodeAnalysis ¶
type DeadCodeAnalysis struct {
DeadFunctions []DeadFunction `json:"dead_functions"`
DeadVariables []DeadVariable `json:"dead_variables"`
DeadClasses []DeadClass `json:"dead_classes,omitempty"`
UnreachableCode []UnreachableBlock `json:"unreachable_code"`
Summary DeadCodeSummary `json:"summary"`
CallGraph *CallGraph `json:"call_graph,omitempty"`
}
DeadCodeAnalysis represents the full dead code detection result.
type DeadCodeKind ¶
type DeadCodeKind string
DeadCodeKind classifies the type of dead code detected.
const ( DeadKindFunction DeadCodeKind = "unused_function" DeadKindClass DeadCodeKind = "unused_class" DeadKindVariable DeadCodeKind = "unused_variable" DeadKindUnreachable DeadCodeKind = "unreachable_code" DeadKindDeadBranch DeadCodeKind = "dead_branch" )
type DeadCodeSummary ¶
type DeadCodeSummary struct {
TotalDeadFunctions int `json:"total_dead_functions"`
TotalDeadVariables int `json:"total_dead_variables"`
TotalDeadClasses int `json:"total_dead_classes"`
TotalUnreachableBlocks int `json:"total_unreachable_blocks"`
TotalUnreachableLines int `json:"total_unreachable_lines"`
DeadCodePercentage float64 `json:"dead_code_percentage"`
ByFile map[string]int `json:"by_file"`
ByKind map[DeadCodeKind]int `json:"by_kind,omitempty"`
TotalFilesAnalyzed int `json:"total_files_analyzed"`
TotalLinesAnalyzed int `json:"total_lines_analyzed"`
TotalNodesInGraph int `json:"total_nodes_in_graph,omitempty"`
ReachableNodes int `json:"reachable_nodes,omitempty"`
UnreachableNodes int `json:"unreachable_nodes,omitempty"`
ConfidenceLevel float64 `json:"confidence_level,omitempty"`
}
DeadCodeSummary provides aggregate statistics.
func NewDeadCodeSummary ¶
func NewDeadCodeSummary() DeadCodeSummary
NewDeadCodeSummary creates an initialized summary.
func (*DeadCodeSummary) AddDeadClass ¶
func (s *DeadCodeSummary) AddDeadClass(c DeadClass)
AddDeadClass updates the summary with a dead class.
func (*DeadCodeSummary) AddDeadFunction ¶
func (s *DeadCodeSummary) AddDeadFunction(f DeadFunction)
AddDeadFunction updates the summary with a dead function.
func (*DeadCodeSummary) AddDeadVariable ¶
func (s *DeadCodeSummary) AddDeadVariable(v DeadVariable)
AddDeadVariable updates the summary with a dead variable.
func (*DeadCodeSummary) AddUnreachableBlock ¶
func (s *DeadCodeSummary) AddUnreachableBlock(b UnreachableBlock)
AddUnreachableBlock updates the summary with unreachable code.
func (*DeadCodeSummary) CalculatePercentage ¶
func (s *DeadCodeSummary) CalculatePercentage()
CalculatePercentage computes dead code percentage.
type DeadFunction ¶
type DeadFunction struct {
Name string `json:"name"`
File string `json:"file"`
Line uint32 `json:"line"`
EndLine uint32 `json:"end_line"`
Visibility string `json:"visibility"` // public, private, internal
Confidence float64 `json:"confidence"` // 0.0-1.0, how certain we are it's dead
Reason string `json:"reason"` // Why it's considered dead
Kind DeadCodeKind `json:"kind,omitempty"`
NodeID uint32 `json:"node_id,omitempty"`
}
DeadFunction represents an unused function detected in the codebase.
type DeadVariable ¶
type DeadVariable struct {
Name string `json:"name"`
File string `json:"file"`
Line uint32 `json:"line"`
Confidence float64 `json:"confidence"`
Reason string `json:"reason,omitempty"`
Kind DeadCodeKind `json:"kind,omitempty"`
NodeID uint32 `json:"node_id,omitempty"`
}
DeadVariable represents an unused variable.
type DebtCategory ¶
type DebtCategory string
DebtCategory represents the type of technical debt.
const ( DebtDesign DebtCategory = "design" // HACK, KLUDGE, SMELL DebtDefect DebtCategory = "defect" // BUG, FIXME, BROKEN DebtRequirement DebtCategory = "requirement" // TODO, FEAT, ENHANCEMENT DebtTest DebtCategory = "test" // FAILING, SKIP, DISABLED DebtPerformance DebtCategory = "performance" // SLOW, OPTIMIZE, PERF DebtSecurity DebtCategory = "security" // SECURITY, VULN, UNSAFE )
type DefectAnalysis ¶
type DefectAnalysis struct {
Files []DefectScore `json:"files"`
Summary DefectSummary `json:"summary"`
Weights DefectWeights `json:"weights"`
}
DefectAnalysis represents the full defect prediction result.
type DefectScore ¶
type DefectScore struct {
FilePath string `json:"file_path"`
Probability float32 `json:"probability"` // 0.0 to 1.0
Confidence float32 `json:"confidence"` // 0.0 to 1.0
RiskLevel RiskLevel `json:"risk_level"`
ContributingFactors map[string]float32 `json:"contributing_factors"`
Recommendations []string `json:"recommendations"`
}
DefectScore represents the prediction result for a file.
type DefectSummary ¶
type DefectSummary struct {
TotalFiles int `json:"total_files"`
HighRiskCount int `json:"high_risk_count"`
MediumRiskCount int `json:"medium_risk_count"`
LowRiskCount int `json:"low_risk_count"`
AvgProbability float32 `json:"avg_probability"`
P50Probability float32 `json:"p50_probability"`
P95Probability float32 `json:"p95_probability"`
}
DefectSummary provides aggregate statistics.
type DefectWeights ¶
type DefectWeights struct {
Churn float32 `json:"churn"` // 0.35
Complexity float32 `json:"complexity"` // 0.30
Duplication float32 `json:"duplication"` // 0.25
Coupling float32 `json:"coupling"` // 0.10
}
DefectWeights defines the weights for defect prediction factors. Based on empirical research (PMAT approach).
func DefaultDefectWeights ¶
func DefaultDefectWeights() DefectWeights
DefaultDefectWeights returns the standard weights.
type DependencyGraph ¶
DependencyGraph represents the full graph structure.
func NewDependencyGraph ¶
func NewDependencyGraph() *DependencyGraph
NewDependencyGraph creates an empty graph.
func (*DependencyGraph) AddEdge ¶
func (g *DependencyGraph) AddEdge(edge GraphEdge)
AddEdge adds an edge to the graph.
func (*DependencyGraph) AddNode ¶
func (g *DependencyGraph) AddNode(node GraphNode)
AddNode adds a node to the graph.
func (*DependencyGraph) ToMermaid ¶
func (g *DependencyGraph) ToMermaid() string
ToMermaid generates Mermaid diagram syntax from the graph using default options.
func (*DependencyGraph) ToMermaidWithOptions ¶
func (g *DependencyGraph) ToMermaidWithOptions(opts MermaidOptions) string
ToMermaidWithOptions generates Mermaid diagram syntax with custom options.
type DuplicationHotspot ¶
type DuplicationHotspot struct {
File string `json:"file"`
DuplicateLines int `json:"duplicate_lines"`
CloneGroupCount int `json:"clone_group_count"`
Severity float64 `json:"severity"`
}
DuplicationHotspot represents a file with high duplication.
type ExtendedComplexitySummary ¶
type ExtendedComplexitySummary struct {
TotalFiles int `json:"total_files"`
TotalFunctions int `json:"total_functions"`
MedianCyclomatic float32 `json:"median_cyclomatic"`
MedianCognitive float32 `json:"median_cognitive"`
MaxCyclomatic uint32 `json:"max_cyclomatic"`
MaxCognitive uint32 `json:"max_cognitive"`
P90Cyclomatic uint32 `json:"p90_cyclomatic"`
P90Cognitive uint32 `json:"p90_cognitive"`
TechnicalDebtHours float32 `json:"technical_debt_hours"`
}
ExtendedComplexitySummary provides enhanced statistics (pmat compatible).
type ExtendedComplexityThresholds ¶
type ExtendedComplexityThresholds struct {
CyclomaticWarn uint32 `json:"cyclomatic_warn"`
CyclomaticError uint32 `json:"cyclomatic_error"`
CognitiveWarn uint32 `json:"cognitive_warn"`
CognitiveError uint32 `json:"cognitive_error"`
NestingMax uint8 `json:"nesting_max"`
MethodLength uint16 `json:"method_length"`
}
ExtendedComplexityThresholds provides warn and error levels (pmat compatible).
func DefaultExtendedThresholds ¶
func DefaultExtendedThresholds() ExtendedComplexityThresholds
DefaultExtendedThresholds returns pmat-compatible default thresholds.
type FileChurnMetrics ¶
type FileChurnMetrics struct {
Path string `json:"path"`
Commits int `json:"commits"`
UniqueAuthors int `json:"unique_authors"`
Authors map[string]int `json:"authors"` // author email -> commit count
LinesAdded int `json:"lines_added"`
LinesDeleted int `json:"lines_deleted"`
ChurnScore float64 `json:"churn_score"` // 0.0-1.0 normalized
FirstCommit time.Time `json:"first_commit"`
LastCommit time.Time `json:"last_commit"`
}
FileChurnMetrics represents git churn data for a single file.
func (*FileChurnMetrics) CalculateChurnScore ¶
func (f *FileChurnMetrics) CalculateChurnScore() float64
CalculateChurnScore computes a normalized churn score. Uses the same formula as the reference implementation: churn_score = (commit_factor * 0.6 + change_factor * 0.4)
func (*FileChurnMetrics) CalculateChurnScoreWithMax ¶
func (f *FileChurnMetrics) CalculateChurnScoreWithMax(maxCommits, maxChanges int) float64
CalculateChurnScoreWithMax computes churn score with explicit max values.
func (*FileChurnMetrics) IsHotspot ¶
func (f *FileChurnMetrics) IsHotspot(threshold float64) bool
IsHotspot returns true if the file has high churn.
type FileComplexity ¶
type FileComplexity struct {
Path string `json:"path"`
Language string `json:"language"`
Functions []FunctionComplexity `json:"functions"`
TotalCyclomatic uint32 `json:"total_cyclomatic"`
TotalCognitive uint32 `json:"total_cognitive"`
AvgCyclomatic float64 `json:"avg_cyclomatic"`
AvgCognitive float64 `json:"avg_cognitive"`
MaxCyclomatic uint32 `json:"max_cyclomatic"`
MaxCognitive uint32 `json:"max_cognitive"`
ViolationCount int `json:"violation_count"`
}
FileComplexity represents aggregated complexity for a file.
type FileMetrics ¶
type FileMetrics struct {
FilePath string `json:"file_path"`
ChurnScore float32 `json:"churn_score"` // 0.0 to 1.0
Complexity float32 `json:"complexity"` // Raw complexity
DuplicateRatio float32 `json:"duplicate_ratio"` // 0.0 to 1.0
AfferentCoupling float32 `json:"afferent_coupling"` // Incoming deps
EfferentCoupling float32 `json:"efferent_coupling"` // Outgoing deps
LinesOfCode int `json:"lines_of_code"`
CyclomaticComplexity uint32 `json:"cyclomatic_complexity"`
CognitiveComplexity uint32 `json:"cognitive_complexity"`
}
FileMetrics contains input metrics for defect prediction.
type FunctionComplexity ¶
type FunctionComplexity struct {
Name string `json:"name"`
File string `json:"file"`
StartLine uint32 `json:"start_line"`
EndLine uint32 `json:"end_line"`
Metrics ComplexityMetrics `json:"metrics"`
Violations []string `json:"violations,omitempty"`
}
FunctionComplexity represents complexity metrics for a single function.
type Grade ¶
type Grade string
Grade represents a letter grade from A+ to F (PMAT-compatible). Higher grades indicate better code quality.
func GradeFromScore ¶
GradeFromScore converts a 0-100 score to a letter grade.
type GraphEdge ¶
type GraphEdge struct {
From string `json:"from"`
To string `json:"to"`
Type EdgeType `json:"type"`
Weight float64 `json:"weight,omitempty"`
}
GraphEdge represents a dependency between nodes.
type GraphMetrics ¶
type GraphMetrics struct {
NodeMetrics []NodeMetric `json:"node_metrics"`
Summary GraphSummary `json:"summary"`
}
GraphMetrics represents centrality and other graph metrics.
type GraphNode ¶
type GraphNode struct {
ID string `json:"id"`
Name string `json:"name"`
Type NodeType `json:"type"` // file, function, class, module
File string `json:"file"`
Line uint32 `json:"line,omitempty"`
Attributes map[string]string `json:"attributes,omitempty"`
}
GraphNode represents a node in the dependency graph.
type GraphSummary ¶
type GraphSummary struct {
TotalNodes int `json:"total_nodes"`
TotalEdges int `json:"total_edges"`
AvgDegree float64 `json:"avg_degree"`
Density float64 `json:"density"`
Components int `json:"components"`
LargestComponent int `json:"largest_component"`
StronglyConnectedComponents int `json:"strongly_connected_components"`
CycleCount int `json:"cycle_count"`
CycleNodes []string `json:"cycle_nodes,omitempty"`
IsCyclic bool `json:"is_cyclic"`
Diameter int `json:"diameter,omitempty"`
Radius int `json:"radius,omitempty"`
ClusteringCoefficient float64 `json:"clustering_coefficient"`
Assortativity float64 `json:"assortativity"`
Transitivity float64 `json:"transitivity"`
Reciprocity float64 `json:"reciprocity,omitempty"`
Modularity float64 `json:"modularity,omitempty"`
CommunityCount int `json:"community_count,omitempty"`
}
GraphSummary provides aggregate graph statistics.
type HalsteadMetrics ¶ added in v1.0.0
type HalsteadMetrics struct {
OperatorsUnique uint32 `json:"operators_unique"` // n1: distinct operators
OperandsUnique uint32 `json:"operands_unique"` // n2: distinct operands
OperatorsTotal uint32 `json:"operators_total"` // N1: total operators
OperandsTotal uint32 `json:"operands_total"` // N2: total operands
Vocabulary uint32 `json:"vocabulary"` // n = n1 + n2
Length uint32 `json:"length"` // N = N1 + N2
Volume float64 `json:"volume"` // V = N * log2(n)
Difficulty float64 `json:"difficulty"` // D = (n1/2) * (N2/n2)
Effort float64 `json:"effort"` // E = D * V
Time float64 `json:"time"` // T = E / 18 (seconds)
Bugs float64 `json:"bugs"` // B = E^(2/3) / 3000
}
HalsteadMetrics represents Halstead software science metrics.
func NewHalsteadMetrics ¶ added in v1.2.0
func NewHalsteadMetrics(operatorsUnique, operandsUnique, operatorsTotal, operandsTotal uint32) *HalsteadMetrics
NewHalsteadMetrics creates Halstead metrics from base counts and calculates derived values.
type Language ¶
type Language string
Language represents the detected programming language.
const ( LanguageUnknown Language = "unknown" LanguageRust Language = "rust" LanguageGo Language = "go" LanguagePython Language = "python" LanguageJavaScript Language = "javascript" LanguageTypeScript Language = "typescript" LanguageJava Language = "java" LanguageC Language = "c" LanguageCpp Language = "cpp" LanguageCSharp Language = "csharp" LanguageRuby Language = "ruby" LanguagePHP Language = "php" LanguageSwift Language = "swift" LanguageKotlin Language = "kotlin" )
func LanguageFromExtension ¶
LanguageFromExtension detects the language from a file extension.
func (Language) Confidence ¶
Confidence returns the detection confidence for the language.
type LanguageOverride ¶
type LanguageOverride struct {
MaxCognitiveComplexity *uint32 `json:"max_cognitive_complexity,omitempty" toml:"max_cognitive_complexity,omitempty"`
MinDocCoverage *float32 `json:"min_doc_coverage,omitempty" toml:"min_doc_coverage,omitempty"`
EnforceErrorCheck *bool `json:"enforce_error_check,omitempty" toml:"enforce_error_check,omitempty"`
MaxFunctionLength *uint32 `json:"max_function_length,omitempty" toml:"max_function_length,omitempty"`
}
LanguageOverride defines language-specific overrides.
type MermaidDirection ¶
type MermaidDirection string
MermaidDirection specifies the graph direction.
const ( DirectionTD MermaidDirection = "TD" // Top-down DirectionLR MermaidDirection = "LR" // Left-right DirectionBT MermaidDirection = "BT" // Bottom-top DirectionRL MermaidDirection = "RL" // Right-left )
type MermaidOptions ¶
type MermaidOptions struct {
MaxNodes int `json:"max_nodes"`
MaxEdges int `json:"max_edges"`
ShowComplexity bool `json:"show_complexity"`
GroupByModule bool `json:"group_by_module"`
NodeComplexity map[string]int `json:"node_complexity,omitempty"`
Direction MermaidDirection `json:"direction"`
}
MermaidOptions configures Mermaid diagram generation.
func DefaultMermaidOptions ¶
func DefaultMermaidOptions() MermaidOptions
DefaultMermaidOptions returns sensible defaults.
type MetricCategory ¶
type MetricCategory string
MetricCategory represents a category of TDG metrics.
const ( MetricStructuralComplexity MetricCategory = "structural_complexity" MetricSemanticComplexity MetricCategory = "semantic_complexity" MetricDuplication MetricCategory = "duplication" MetricCoupling MetricCategory = "coupling" MetricDocumentation MetricCategory = "documentation" MetricConsistency MetricCategory = "consistency" )
type MinHashSignature ¶
type MinHashSignature struct {
Values []uint64 `json:"values"`
}
MinHashSignature represents a MinHash signature for similarity estimation.
func (*MinHashSignature) JaccardSimilarity ¶
func (s *MinHashSignature) JaccardSimilarity(other *MinHashSignature) float64
JaccardSimilarity computes similarity between two MinHash signatures.
type NodeMetric ¶
type NodeMetric struct {
NodeID string `json:"node_id"`
Name string `json:"name"`
PageRank float64 `json:"pagerank"`
BetweennessCentrality float64 `json:"betweenness_centrality"`
ClosenessCentrality float64 `json:"closeness_centrality"`
EigenvectorCentrality float64 `json:"eigenvector_centrality"`
HarmonicCentrality float64 `json:"harmonic_centrality"`
InDegree int `json:"in_degree"`
OutDegree int `json:"out_degree"`
ClusteringCoef float64 `json:"clustering_coefficient"`
CommunityID int `json:"community_id,omitempty"`
}
NodeMetric represents computed metrics for a single node.
type PenaltyAttribution ¶
type PenaltyAttribution struct {
SourceMetric MetricCategory `json:"source_metric"`
Amount float32 `json:"amount"`
AppliedTo []MetricCategory `json:"applied_to"`
Issue string `json:"issue"`
}
PenaltyAttribution tracks where a penalty was applied.
type PenaltyConfig ¶
type PenaltyConfig struct {
ComplexityPenaltyBase PenaltyCurve `json:"complexity_penalty_base" toml:"complexity_penalty_base"`
DuplicationPenaltyCurve PenaltyCurve `json:"duplication_penalty_curve" toml:"duplication_penalty_curve"`
CouplingPenaltyCurve PenaltyCurve `json:"coupling_penalty_curve" toml:"coupling_penalty_curve"`
}
PenaltyConfig defines penalty curves for each metric.
func DefaultPenaltyConfig ¶
func DefaultPenaltyConfig() PenaltyConfig
DefaultPenaltyConfig returns the default penalty configuration.
type PenaltyCurve ¶
type PenaltyCurve string
PenaltyCurve defines how penalties are applied.
const ( PenaltyCurveLinear PenaltyCurve = "linear" PenaltyCurveLogarithmic PenaltyCurve = "logarithmic" PenaltyCurveQuadratic PenaltyCurve = "quadratic" PenaltyCurveExponential PenaltyCurve = "exponential" )
func (PenaltyCurve) Apply ¶
func (pc PenaltyCurve) Apply(value, base float32) float32
Apply applies the penalty curve to a value.
type PenaltyTracker ¶
type PenaltyTracker struct {
// contains filtered or unexported fields
}
PenaltyTracker tracks penalties applied during analysis.
func NewPenaltyTracker ¶
func NewPenaltyTracker() *PenaltyTracker
NewPenaltyTracker creates a new penalty tracker.
func (*PenaltyTracker) Apply ¶
func (pt *PenaltyTracker) Apply(issueID string, category MetricCategory, amount float32, issue string) float32
Apply attempts to apply a penalty, returning the amount if applied or 0 if already applied.
func (*PenaltyTracker) GetAttributions ¶
func (pt *PenaltyTracker) GetAttributions() []PenaltyAttribution
GetAttributions returns all applied penalty attributions.
type ProjectScore ¶
type ProjectScore struct {
Files []TdgScore `json:"files"`
AverageScore float32 `json:"average_score"`
AverageGrade Grade `json:"average_grade"`
TotalFiles int `json:"total_files"`
LanguageDistribution map[Language]int `json:"language_distribution"`
}
ProjectScore represents aggregated TDG scores for a project.
func AggregateProjectScore ¶
func AggregateProjectScore(scores []TdgScore) ProjectScore
AggregateProjectScore creates a ProjectScore from individual file scores.
func (*ProjectScore) Average ¶
func (p *ProjectScore) Average() TdgScore
Average returns the average TDG score across all files.
type ReferenceEdge ¶
type ReferenceEdge struct {
From uint32 `json:"from"`
To uint32 `json:"to"`
Type ReferenceType `json:"type"`
Confidence float64 `json:"confidence"`
}
ReferenceEdge represents a relationship between two code elements.
type ReferenceNode ¶
type ReferenceNode struct {
ID uint32 `json:"id"`
Name string `json:"name"`
File string `json:"file"`
Line uint32 `json:"line"`
EndLine uint32 `json:"end_line"`
Kind string `json:"kind"` // function, class, variable
Language string `json:"language"`
IsExported bool `json:"is_exported"`
IsEntry bool `json:"is_entry"`
}
ReferenceNode represents a code element in the reference graph.
type ReferenceType ¶
type ReferenceType string
ReferenceType classifies the relationship between code elements.
const ( RefDirectCall ReferenceType = "direct_call" RefIndirectCall ReferenceType = "indirect_call" RefImport ReferenceType = "import" RefInheritance ReferenceType = "inheritance" RefTypeReference ReferenceType = "type_reference" RefDynamicDispatch ReferenceType = "dynamic_dispatch" )
type RiskLevel ¶
type RiskLevel string
RiskLevel represents the defect probability risk category. PMAT-compatible: 3 levels with thresholds at 0.3 and 0.7
func CalculateRiskLevel ¶
CalculateRiskLevel determines risk level from probability. PMAT-compatible: Low (<0.3), Medium (0.3-0.7), High (>=0.7)
type SATDAnalysis ¶
type SATDAnalysis struct {
Items []TechnicalDebt `json:"items"`
Summary SATDSummary `json:"summary"`
TotalFilesAnalyzed int `json:"total_files_analyzed"`
FilesWithDebt int `json:"files_with_debt"`
AnalyzedAt time.Time `json:"analyzed_at"`
}
SATDAnalysis represents the full SATD analysis result.
type SATDSummary ¶
type SATDSummary struct {
TotalItems int `json:"total_items"`
BySeverity map[string]int `json:"by_severity"`
ByCategory map[string]int `json:"by_category"`
ByFile map[string]int `json:"by_file"`
}
SATDSummary provides aggregate statistics.
func NewSATDSummary ¶
func NewSATDSummary() SATDSummary
NewSATDSummary creates an initialized summary.
func (*SATDSummary) AddItem ¶
func (s *SATDSummary) AddItem(item TechnicalDebt)
AddItem updates the summary with a new debt item.
type Severity ¶
type Severity string
Severity represents the urgency of addressing the debt.
type TdgComparison ¶
type TdgComparison struct {
Source1 TdgScore `json:"source1"`
Source2 TdgScore `json:"source2"`
Delta float32 `json:"delta"`
ImprovementPercentage float32 `json:"improvement_percentage"`
Winner string `json:"winner"`
Improvements []string `json:"improvements"`
Regressions []string `json:"regressions"`
}
TdgComparison represents a comparison between two TDG scores.
func NewTdgComparison ¶
func NewTdgComparison(source1, source2 TdgScore) TdgComparison
NewTdgComparison creates a comparison between two scores.
type TdgConfig ¶
type TdgConfig struct {
Weights WeightConfig `json:"weights" toml:"weights"`
Thresholds ThresholdConfig `json:"thresholds" toml:"thresholds"`
Penalties PenaltyConfig `json:"penalties" toml:"penalties"`
LanguageOverrides map[string]LanguageOverride `json:"language_overrides,omitempty" toml:"language_overrides,omitempty"`
}
TdgConfig is the TDG configuration.
func DefaultTdgConfig ¶
func DefaultTdgConfig() TdgConfig
DefaultTdgConfig returns the default TDG configuration.
func LoadTdgConfig ¶
LoadTdgConfig loads configuration from a JSON file.
type TdgScore ¶
type TdgScore struct {
// Component scores (each contributes to the 100-point total)
StructuralComplexity float32 `json:"structural_complexity"` // Max 25 points
SemanticComplexity float32 `json:"semantic_complexity"` // Max 20 points
DuplicationRatio float32 `json:"duplication_ratio"` // Max 20 points
CouplingScore float32 `json:"coupling_score"` // Max 15 points
DocCoverage float32 `json:"doc_coverage"` // Max 10 points
ConsistencyScore float32 `json:"consistency_score"` // Max 10 points
EntropyScore float32 `json:"entropy_score"` // Max 10 points (pattern entropy)
// Aggregated score and grade
Total float32 `json:"total"` // 0-100 (higher is better)
Grade Grade `json:"grade"` // A+ to F
// Metadata
Confidence float32 `json:"confidence"` // 0-1 confidence in the score
Language Language `json:"language"` // Detected language
FilePath string `json:"file_path,omitempty"` // Source file path
CriticalDefectsCount int `json:"critical_defects_count"` // Count of critical defects
HasCriticalDefects bool `json:"has_critical_defects"` // Auto-fail flag
// Penalty tracking for transparency
PenaltiesApplied []PenaltyAttribution `json:"penalties_applied,omitempty"`
}
TdgScore represents a TDG score (0-100, higher is better).
func NewTdgScore ¶
func NewTdgScore() TdgScore
NewTdgScore creates a new TDG score with default values.
func (*TdgScore) CalculateTotal ¶
func (s *TdgScore) CalculateTotal()
CalculateTotal computes the total score and grade from components.
func (*TdgScore) SetMetric ¶
func (s *TdgScore) SetMetric(category MetricCategory, value float32)
SetMetric sets a metric value by category.
type TechnicalDebt ¶
type TechnicalDebt struct {
Category DebtCategory `json:"category"`
Severity Severity `json:"severity"`
File string `json:"file"`
Line uint32 `json:"line"`
Description string `json:"description"`
Marker string `json:"marker"` // TODO, FIXME, HACK, etc.
Text string `json:"text,omitempty"`
Column uint32 `json:"column,omitempty"`
ContextHash string `json:"context_hash,omitempty"` // BLAKE3 hash for identity tracking
Author string `json:"author,omitempty"`
Date *time.Time `json:"date,omitempty"`
}
TechnicalDebt represents a single SATD item found in code.
type ThresholdConfig ¶
type ThresholdConfig struct {
MaxCyclomaticComplexity uint32 `json:"max_cyclomatic_complexity" toml:"max_cyclomatic_complexity"`
MaxCognitiveComplexity uint32 `json:"max_cognitive_complexity" toml:"max_cognitive_complexity"`
MaxNestingDepth uint32 `json:"max_nesting_depth" toml:"max_nesting_depth"`
MinTokenSequence uint32 `json:"min_token_sequence" toml:"min_token_sequence"`
SimilarityThreshold float32 `json:"similarity_threshold" toml:"similarity_threshold"`
MaxCoupling uint32 `json:"max_coupling" toml:"max_coupling"`
MinDocCoverage float32 `json:"min_doc_coverage" toml:"min_doc_coverage"`
}
ThresholdConfig defines thresholds for TDG analysis.
func DefaultThresholdConfig ¶
func DefaultThresholdConfig() ThresholdConfig
DefaultThresholdConfig returns enterprise-standard thresholds.
type UnreachableBlock ¶
type UnreachableBlock struct {
File string `json:"file"`
StartLine uint32 `json:"start_line"`
EndLine uint32 `json:"end_line"`
Reason string `json:"reason"` // e.g., "after return", "dead branch"
}
UnreachableBlock represents code that can never execute.
type Violation ¶
type Violation struct {
Severity ViolationSeverity `json:"severity"`
Rule string `json:"rule"`
Message string `json:"message"`
Value uint32 `json:"value"`
Threshold uint32 `json:"threshold"`
File string `json:"file"`
Line uint32 `json:"line"`
Function string `json:"function,omitempty"`
}
Violation represents a complexity threshold violation.
type ViolationSeverity ¶
type ViolationSeverity string
ViolationSeverity indicates the severity of a complexity violation.
const ( SeverityWarning ViolationSeverity = "warning" SeverityError ViolationSeverity = "error" )
type WeightConfig ¶
type WeightConfig struct {
StructuralComplexity float32 `json:"structural_complexity" toml:"structural_complexity"`
SemanticComplexity float32 `json:"semantic_complexity" toml:"semantic_complexity"`
Duplication float32 `json:"duplication" toml:"duplication"`
Coupling float32 `json:"coupling" toml:"coupling"`
Documentation float32 `json:"documentation" toml:"documentation"`
Consistency float32 `json:"consistency" toml:"consistency"`
}
WeightConfig defines the weight for each TDG metric component.
func DefaultWeightConfig ¶
func DefaultWeightConfig() WeightConfig
DefaultWeightConfig returns the default weight configuration.