Files
bzzz/pkg/slurp/intelligence/directory_analyzer.go
anthonyrawlins d96c931a29 Resolve import cycles and migrate to chorus.services module path
This comprehensive refactoring addresses critical architectural issues:

IMPORT CYCLE RESOLUTION:
• pkg/crypto ↔ pkg/slurp/roles: Created pkg/security/access_levels.go
• pkg/ucxl → pkg/dht: Created pkg/storage/interfaces.go
• pkg/slurp/leader → pkg/election → pkg/slurp/storage: Moved types to pkg/election/interfaces.go

MODULE PATH MIGRATION:
• Changed from github.com/anthonyrawlins/bzzz to chorus.services/bzzz
• Updated all import statements across 115+ files
• Maintains compatibility while removing personal GitHub account dependency

TYPE SYSTEM IMPROVEMENTS:
• Resolved duplicate type declarations in crypto package
• Added missing type definitions (RoleStatus, TimeRestrictions, KeyStatus, KeyRotationResult)
• Proper interface segregation to prevent future cycles

ARCHITECTURAL BENEFITS:
• Build now progresses past structural issues to normal dependency resolution
• Cleaner separation of concerns between packages
• Eliminates circular dependencies that prevented compilation
• Establishes foundation for scalable codebase growth

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-08-17 10:04:25 +10:00

1505 lines
42 KiB
Go

package intelligence
import (
"context"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"time"
"chorus.services/bzzz/pkg/ucxl"
slurpContext "chorus.services/bzzz/pkg/slurp/context"
)
// DefaultDirectoryAnalyzer provides comprehensive directory structure analysis
type DefaultDirectoryAnalyzer struct {
config *EngineConfig
organizationDetector *OrganizationDetector
conventionAnalyzer *ConventionAnalyzer
relationshipAnalyzer *RelationshipAnalyzer
}
// OrganizationDetector detects organizational patterns in directory structures
type OrganizationDetector struct {
commonPatterns map[string]*OrganizationalPattern
}
// ConventionAnalyzer analyzes naming and organizational conventions
type ConventionAnalyzer struct {
namingRegexes map[string]*regexp.Regexp
standards map[string]*CodingStandard
}
// RelationshipAnalyzer analyzes relationships between directories and files
type RelationshipAnalyzer struct {
dependencyDetectors map[string]*DependencyDetector
}
// DependencyDetector detects dependencies for specific languages/frameworks
type DependencyDetector struct {
importPatterns []*regexp.Regexp
configFiles []string
}
// CodingStandard represents a coding standard or convention
type CodingStandard struct {
Name string
Rules []*ConventionRule
FileTypes []string
Description string
}
// ConventionRule represents a single convention rule
type ConventionRule struct {
Type string // naming, structure, organization
Pattern string
Description string
Severity string // error, warning, info
}
// NewDefaultDirectoryAnalyzer creates a new directory analyzer
func NewDefaultDirectoryAnalyzer(config *EngineConfig) *DefaultDirectoryAnalyzer {
return &DefaultDirectoryAnalyzer{
config: config,
organizationDetector: NewOrganizationDetector(),
conventionAnalyzer: NewConventionAnalyzer(),
relationshipAnalyzer: NewRelationshipAnalyzer(),
}
}
// NewOrganizationDetector creates an organization pattern detector
func NewOrganizationDetector() *OrganizationDetector {
detector := &OrganizationDetector{
commonPatterns: make(map[string]*OrganizationalPattern),
}
// Define common organizational patterns
patterns := []*OrganizationalPattern{
{
Pattern: Pattern{
ID: "mvc",
Name: "Model-View-Controller (MVC)",
Type: "architectural",
Description: "Separates concerns into models, views, and controllers",
Confidence: 0.9,
Examples: []string{"models/", "views/", "controllers/"},
Benefits: []string{"Clear separation of concerns", "Maintainable code structure"},
},
Structure: "layered",
Depth: 2,
FanOut: 3,
Modularity: 0.8,
Scalability: "good",
},
{
Pattern: Pattern{
ID: "clean_architecture",
Name: "Clean Architecture",
Type: "architectural",
Description: "Dependency inversion with clear boundaries",
Confidence: 0.85,
Examples: []string{"entities/", "usecases/", "adapters/", "frameworks/"},
Benefits: []string{"Testable", "Independent of frameworks", "Independent of UI"},
},
Structure: "onion",
Depth: 3,
FanOut: 4,
Modularity: 0.95,
Scalability: "excellent",
},
{
Pattern: Pattern{
ID: "domain_driven",
Name: "Domain-Driven Design (DDD)",
Type: "architectural",
Description: "Organized around business domains",
Confidence: 0.8,
Examples: []string{"domain/", "application/", "infrastructure/"},
Benefits: []string{"Business-focused", "Clear domain boundaries"},
},
Structure: "domain-based",
Depth: 3,
FanOut: 5,
Modularity: 0.9,
Scalability: "excellent",
},
{
Pattern: Pattern{
ID: "feature_based",
Name: "Feature-Based Organization",
Type: "organizational",
Description: "Organized by features rather than technical layers",
Confidence: 0.75,
Examples: []string{"user-management/", "payment/", "notifications/"},
Benefits: []string{"Feature-focused development", "Team autonomy"},
},
Structure: "feature-vertical",
Depth: 2,
FanOut: 6,
Modularity: 0.85,
Scalability: "good",
},
{
Pattern: Pattern{
ID: "microservices",
Name: "Microservices Pattern",
Type: "architectural",
Description: "Independent services with their own data",
Confidence: 0.8,
Examples: []string{"services/", "api-gateway/", "shared/"},
Benefits: []string{"Independent deployment", "Technology diversity", "Fault isolation"},
},
Structure: "service-oriented",
Depth: 2,
FanOut: 8,
Modularity: 0.95,
Scalability: "excellent",
},
}
for _, pattern := range patterns {
detector.commonPatterns[pattern.ID] = pattern
}
return detector
}
// NewConventionAnalyzer creates a convention analyzer
func NewConventionAnalyzer() *ConventionAnalyzer {
analyzer := &ConventionAnalyzer{
namingRegexes: make(map[string]*regexp.Regexp),
standards: make(map[string]*CodingStandard),
}
// Define naming convention regexes
analyzer.namingRegexes["camelCase"] = regexp.MustCompile(`^[a-z][a-zA-Z0-9]*$`)
analyzer.namingRegexes["PascalCase"] = regexp.MustCompile(`^[A-Z][a-zA-Z0-9]*$`)
analyzer.namingRegexes["snake_case"] = regexp.MustCompile(`^[a-z][a-z0-9_]*$`)
analyzer.namingRegexes["kebab-case"] = regexp.MustCompile(`^[a-z][a-z0-9-]*$`)
analyzer.namingRegexes["SCREAMING_SNAKE"] = regexp.MustCompile(`^[A-Z][A-Z0-9_]*$`)
// Define coding standards
goStandard := &CodingStandard{
Name: "Go Standard",
FileTypes: []string{".go"},
Description: "Go language conventions",
Rules: []*ConventionRule{
{Type: "naming", Pattern: "^[A-Z][a-zA-Z0-9]*$", Description: "Exported functions/types use PascalCase"},
{Type: "naming", Pattern: "^[a-z][a-zA-Z0-9]*$", Description: "Private functions/variables use camelCase"},
{Type: "structure", Pattern: "package main", Description: "Executable packages use 'main'"},
},
}
pythonStandard := &CodingStandard{
Name: "PEP 8",
FileTypes: []string{".py"},
Description: "Python enhancement proposal 8 style guide",
Rules: []*ConventionRule{
{Type: "naming", Pattern: "^[a-z][a-z0-9_]*$", Description: "Functions and variables use snake_case"},
{Type: "naming", Pattern: "^[A-Z][a-zA-Z0-9]*$", Description: "Classes use PascalCase"},
{Type: "naming", Pattern: "^[A-Z][A-Z0-9_]*$", Description: "Constants use SCREAMING_SNAKE_CASE"},
},
}
jsStandard := &CodingStandard{
Name: "JavaScript Standard",
FileTypes: []string{".js", ".jsx", ".ts", ".tsx"},
Description: "JavaScript/TypeScript conventions",
Rules: []*ConventionRule{
{Type: "naming", Pattern: "^[a-z][a-zA-Z0-9]*$", Description: "Variables and functions use camelCase"},
{Type: "naming", Pattern: "^[A-Z][a-zA-Z0-9]*$", Description: "Classes and components use PascalCase"},
{Type: "naming", Pattern: "^[A-Z][A-Z0-9_]*$", Description: "Constants use SCREAMING_SNAKE_CASE"},
},
}
analyzer.standards["go"] = goStandard
analyzer.standards["python"] = pythonStandard
analyzer.standards["javascript"] = jsStandard
analyzer.standards["typescript"] = jsStandard
return analyzer
}
// NewRelationshipAnalyzer creates a relationship analyzer
func NewRelationshipAnalyzer() *RelationshipAnalyzer {
analyzer := &RelationshipAnalyzer{
dependencyDetectors: make(map[string]*DependencyDetector),
}
// Go dependency detector
goDetector := &DependencyDetector{
importPatterns: []*regexp.Regexp{
regexp.MustCompile(`import\s+"([^"]+)"`),
regexp.MustCompile(`import\s+\w+\s+"([^"]+)"`),
},
configFiles: []string{"go.mod", "go.sum"},
}
// Python dependency detector
pythonDetector := &DependencyDetector{
importPatterns: []*regexp.Regexp{
regexp.MustCompile(`from\s+([^\s]+)\s+import`),
regexp.MustCompile(`import\s+([^\s]+)`),
},
configFiles: []string{"requirements.txt", "Pipfile", "pyproject.toml", "setup.py"},
}
// JavaScript dependency detector
jsDetector := &DependencyDetector{
importPatterns: []*regexp.Regexp{
regexp.MustCompile(`import\s+.*from\s+['"]([^'"]+)['"]`),
regexp.MustCompile(`require\s*\(\s*['"]([^'"]+)['"]`),
},
configFiles: []string{"package.json", "yarn.lock", "package-lock.json"},
}
analyzer.dependencyDetectors["go"] = goDetector
analyzer.dependencyDetectors["python"] = pythonDetector
analyzer.dependencyDetectors["javascript"] = jsDetector
analyzer.dependencyDetectors["typescript"] = jsDetector
return analyzer
}
// AnalyzeStructure analyzes directory organization patterns
func (da *DefaultDirectoryAnalyzer) AnalyzeStructure(ctx context.Context, dirPath string) (*DirectoryStructure, error) {
structure := &DirectoryStructure{
Path: dirPath,
FileTypes: make(map[string]int),
Languages: make(map[string]int),
Dependencies: []string{},
AnalyzedAt: time.Now(),
}
// Walk the directory tree
err := filepath.Walk(dirPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
structure.DirectoryCount++
} else {
structure.FileCount++
structure.TotalSize += info.Size()
// Track file types
ext := strings.ToLower(filepath.Ext(path))
if ext != "" {
structure.FileTypes[ext]++
// Map extensions to languages
if lang := da.mapExtensionToLanguage(ext); lang != "" {
structure.Languages[lang]++
}
}
}
return nil
})
if err != nil {
return nil, fmt.Errorf("failed to walk directory: %w", err)
}
// Analyze organization patterns
orgInfo, err := da.analyzeOrganization(dirPath)
if err != nil {
orgInfo = &OrganizationInfo{
Pattern: "unknown",
Consistency: 0.5,
}
}
structure.Organization = orgInfo
// Analyze conventions
convInfo, err := da.analyzeConventions(ctx, dirPath)
if err != nil {
convInfo = &ConventionInfo{
NamingStyle: "mixed",
Consistency: 0.5,
}
}
structure.Conventions = convInfo
// Determine purpose and architecture
structure.Purpose = da.determinePurpose(structure)
structure.Architecture = da.determineArchitecture(structure, orgInfo)
return structure, nil
}
// DetectConventions identifies naming and organizational conventions
func (da *DefaultDirectoryAnalyzer) DetectConventions(ctx context.Context, dirPath string) (*ConventionAnalysis, error) {
analysis := &ConventionAnalysis{
NamingPatterns: []*NamingPattern{},
OrganizationalPatterns: []*OrganizationalPattern{},
Consistency: 0.0,
Violations: []*Violation{},
Recommendations: []*Recommendation{},
AppliedStandards: []string{},
AnalyzedAt: time.Now(),
}
// Collect all files and directories
files, dirs, err := da.collectFilesAndDirs(dirPath)
if err != nil {
return nil, fmt.Errorf("failed to collect files and directories: %w", err)
}
// Detect naming patterns
namingPatterns := da.detectNamingPatterns(files, dirs)
analysis.NamingPatterns = namingPatterns
// Detect organizational patterns
orgPatterns := da.detectOrganizationalPatterns(ctx, dirPath, dirs)
analysis.OrganizationalPatterns = orgPatterns
// Calculate consistency
analysis.Consistency = da.calculateConventionConsistency(files, dirs, namingPatterns)
// Find violations
violations := da.findConventionViolations(files, dirs, namingPatterns)
analysis.Violations = violations
// Generate recommendations
recommendations := da.generateConventionRecommendations(analysis)
analysis.Recommendations = recommendations
return analysis, nil
}
// IdentifyPurpose determines the primary purpose of a directory
func (da *DefaultDirectoryAnalyzer) IdentifyPurpose(ctx context.Context, structure *DirectoryStructure) (string, float64, error) {
purpose := "General purpose directory"
confidence := 0.5
dirName := strings.ToLower(filepath.Base(structure.Path))
// Common directory purposes
purposes := map[string]struct {
purpose string
confidence float64
}{
"src": {"Source code repository", 0.9},
"source": {"Source code repository", 0.9},
"lib": {"Library code", 0.8},
"libs": {"Library code", 0.8},
"vendor": {"Third-party dependencies", 0.9},
"node_modules": {"Node.js dependencies", 0.95},
"build": {"Build artifacts", 0.9},
"dist": {"Distribution files", 0.9},
"bin": {"Binary executables", 0.9},
"test": {"Test code", 0.9},
"tests": {"Test code", 0.9},
"docs": {"Documentation", 0.9},
"doc": {"Documentation", 0.9},
"config": {"Configuration files", 0.9},
"configs": {"Configuration files", 0.9},
"scripts": {"Utility scripts", 0.8},
"tools": {"Development tools", 0.8},
"assets": {"Static assets", 0.8},
"public": {"Public web assets", 0.8},
"static": {"Static files", 0.8},
"templates": {"Template files", 0.8},
"migrations": {"Database migrations", 0.9},
"models": {"Data models", 0.8},
"views": {"View layer", 0.8},
"controllers": {"Controller layer", 0.8},
"services": {"Service layer", 0.8},
"components": {"Reusable components", 0.8},
"modules": {"Modular components", 0.8},
"packages": {"Package organization", 0.7},
"internal": {"Internal implementation", 0.8},
"cmd": {"Command-line applications", 0.9},
"api": {"API implementation", 0.8},
"pkg": {"Go package directory", 0.8},
}
if p, exists := purposes[dirName]; exists {
purpose = p.purpose
confidence = p.confidence
} else {
// Analyze content to determine purpose
if structure.Languages != nil {
totalFiles := 0
for _, count := range structure.Languages {
totalFiles += count
}
if totalFiles > 0 {
// Determine purpose based on file types
if structure.Languages["javascript"] > totalFiles/2 || structure.Languages["typescript"] > totalFiles/2 {
purpose = "Frontend application code"
confidence = 0.7
} else if structure.Languages["go"] > totalFiles/2 {
purpose = "Go application or service"
confidence = 0.7
} else if structure.Languages["python"] > totalFiles/2 {
purpose = "Python application or library"
confidence = 0.7
} else if structure.FileTypes[".html"] > 0 || structure.FileTypes[".css"] > 0 {
purpose = "Web frontend resources"
confidence = 0.7
} else if structure.FileTypes[".sql"] > 0 {
purpose = "Database schema and queries"
confidence = 0.8
}
}
}
}
return purpose, confidence, nil
}
// AnalyzeRelationships analyzes relationships between subdirectories
func (da *DefaultDirectoryAnalyzer) AnalyzeRelationships(ctx context.Context, dirPath string) (*RelationshipAnalysis, error) {
analysis := &RelationshipAnalysis{
Dependencies: []*DirectoryDependency{},
Relationships: []*DirectoryRelation{},
CouplingMetrics: &CouplingMetrics{},
ModularityScore: 0.0,
ArchitecturalStyle: "unknown",
AnalyzedAt: time.Now(),
}
// Find subdirectories
subdirs, err := da.findSubdirectories(dirPath)
if err != nil {
return nil, fmt.Errorf("failed to find subdirectories: %w", err)
}
// Analyze dependencies between directories
dependencies, err := da.analyzeDependencies(ctx, subdirs)
if err != nil {
return nil, fmt.Errorf("failed to analyze dependencies: %w", err)
}
analysis.Dependencies = dependencies
// Analyze relationships
relationships := da.analyzeDirectoryRelationships(subdirs, dependencies)
analysis.Relationships = relationships
// Calculate coupling metrics
couplingMetrics := da.calculateCouplingMetrics(subdirs, dependencies)
analysis.CouplingMetrics = couplingMetrics
// Calculate modularity score
analysis.ModularityScore = da.calculateModularityScore(relationships, couplingMetrics)
// Determine architectural style
analysis.ArchitecturalStyle = da.determineArchitecturalStyle(subdirs, dependencies)
return analysis, nil
}
// GenerateHierarchy generates context hierarchy for directory tree
func (da *DefaultDirectoryAnalyzer) GenerateHierarchy(ctx context.Context, rootPath string, maxDepth int) ([]*slurpContext.ContextNode, error) {
nodes := []*slurpContext.ContextNode{}
err := da.walkDirectoryHierarchy(rootPath, 0, maxDepth, func(path string, depth int) error {
// Analyze this directory
structure, err := da.AnalyzeStructure(ctx, path)
if err != nil {
return err
}
// Generate UCXL address
ucxlAddr, err := da.generateUCXLAddress(path)
if err != nil {
return fmt.Errorf("failed to generate UCXL address: %w", err)
}
// Determine purpose
purpose, purposeConf, err := da.IdentifyPurpose(ctx, structure)
if err != nil {
purpose = "Directory"
purposeConf = 0.5
}
// Generate summary
summary := da.generateDirectorySummary(structure)
// Generate tags
tags := da.generateDirectoryTags(structure, path)
// Generate technologies list
technologies := da.extractTechnologiesFromStructure(structure)
// Create context node
contextNode := &slurpContext.ContextNode{
Path: path,
UCXLAddress: *ucxlAddr,
Summary: summary,
Purpose: purpose,
Technologies: technologies,
Tags: tags,
Insights: []string{},
OverridesParent: false,
ContextSpecificity: da.calculateDirectorySpecificity(structure),
AppliesToChildren: depth < maxDepth-1,
GeneratedAt: time.Now(),
RAGConfidence: purposeConf,
EncryptedFor: []string{"*"}, // Default access
AccessLevel: slurpContext.AccessLow,
Metadata: make(map[string]interface{}),
}
// Add structure metadata
contextNode.Metadata["structure"] = structure
contextNode.Metadata["depth"] = depth
nodes = append(nodes, contextNode)
return nil
})
if err != nil {
return nil, fmt.Errorf("failed to walk directory hierarchy: %w", err)
}
return nodes, nil
}
// Helper methods
func (da *DefaultDirectoryAnalyzer) mapExtensionToLanguage(ext string) string {
langMap := map[string]string{
".go": "go",
".py": "python",
".js": "javascript",
".jsx": "javascript",
".ts": "typescript",
".tsx": "typescript",
".java": "java",
".c": "c",
".cpp": "cpp",
".cs": "csharp",
".php": "php",
".rb": "ruby",
".rs": "rust",
".kt": "kotlin",
".swift": "swift",
}
return langMap[ext]
}
func (da *DefaultDirectoryAnalyzer) analyzeOrganization(dirPath string) (*OrganizationInfo, error) {
// Get immediate subdirectories
files, err := ioutil.ReadDir(dirPath)
if err != nil {
return nil, fmt.Errorf("failed to read directory: %w", err)
}
subdirs := []string{}
for _, file := range files {
if file.IsDir() {
subdirs = append(subdirs, file.Name())
}
}
// Detect organizational pattern
pattern := da.detectOrganizationalPattern(subdirs)
// Calculate metrics
fanOut := len(subdirs)
consistency := da.calculateOrganizationalConsistency(subdirs)
return &OrganizationInfo{
Pattern: pattern,
Consistency: consistency,
Depth: da.calculateMaxDepth(dirPath),
FanOut: fanOut,
Modularity: da.calculateModularity(subdirs),
Cohesion: 0.7, // Default cohesion score
Coupling: 0.3, // Default coupling score
Metadata: make(map[string]interface{}),
}, nil
}
func (da *DefaultDirectoryAnalyzer) detectOrganizationalPattern(subdirs []string) string {
// Check for common patterns
subdirSet := make(map[string]bool)
for _, dir := range subdirs {
subdirSet[strings.ToLower(dir)] = true
}
// MVC pattern
if subdirSet["models"] && subdirSet["views"] && subdirSet["controllers"] {
return "MVC"
}
// Clean Architecture
if subdirSet["entities"] && subdirSet["usecases"] && subdirSet["adapters"] {
return "Clean Architecture"
}
// Domain-Driven Design
if subdirSet["domain"] && subdirSet["application"] && subdirSet["infrastructure"] {
return "Domain-Driven Design"
}
// Layered architecture
if subdirSet["presentation"] && subdirSet["business"] && subdirSet["data"] {
return "Layered Architecture"
}
// Feature-based
if len(subdirs) > 3 && da.allAreDomainLike(subdirs) {
return "Feature-Based"
}
// Package by layer (technical)
technicalDirs := []string{"api", "service", "repository", "model", "dto", "util"}
technicalCount := 0
for _, tech := range technicalDirs {
if subdirSet[tech] {
technicalCount++
}
}
if technicalCount >= 3 {
return "Package by Layer"
}
return "Custom"
}
func (da *DefaultDirectoryAnalyzer) allAreDomainLike(subdirs []string) bool {
// Simple heuristic: if directories don't look like technical layers,
// they might be domain/feature based
technicalTerms := []string{"api", "service", "repository", "model", "dto", "util", "config", "test", "lib"}
for _, subdir := range subdirs {
lowerDir := strings.ToLower(subdir)
for _, term := range technicalTerms {
if strings.Contains(lowerDir, term) {
return false
}
}
}
return true
}
func (da *DefaultDirectoryAnalyzer) calculateOrganizationalConsistency(subdirs []string) float64 {
if len(subdirs) < 2 {
return 1.0
}
// Simple consistency check: naming convention consistency
camelCaseCount := 0
kebabCaseCount := 0
snakeCaseCount := 0
for _, dir := range subdirs {
if da.isCamelCase(dir) {
camelCaseCount++
} else if da.isKebabCase(dir) {
kebabCaseCount++
} else if da.isSnakeCase(dir) {
snakeCaseCount++
}
}
total := len(subdirs)
maxConsistent := camelCaseCount
if kebabCaseCount > maxConsistent {
maxConsistent = kebabCaseCount
}
if snakeCaseCount > maxConsistent {
maxConsistent = snakeCaseCount
}
return float64(maxConsistent) / float64(total)
}
func (da *DefaultDirectoryAnalyzer) isCamelCase(s string) bool {
matched, _ := regexp.MatchString(`^[a-z][a-zA-Z0-9]*$`, s)
return matched
}
func (da *DefaultDirectoryAnalyzer) isKebabCase(s string) bool {
matched, _ := regexp.MatchString(`^[a-z][a-z0-9-]*$`, s)
return matched
}
func (da *DefaultDirectoryAnalyzer) isSnakeCase(s string) bool {
matched, _ := regexp.MatchString(`^[a-z][a-z0-9_]*$`, s)
return matched
}
func (da *DefaultDirectoryAnalyzer) calculateMaxDepth(dirPath string) int {
maxDepth := 0
filepath.Walk(dirPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil
}
if info.IsDir() {
relativePath, _ := filepath.Rel(dirPath, path)
depth := strings.Count(relativePath, string(os.PathSeparator))
if depth > maxDepth {
maxDepth = depth
}
}
return nil
})
return maxDepth
}
func (da *DefaultDirectoryAnalyzer) calculateModularity(subdirs []string) float64 {
// Simple modularity heuristic based on directory count and naming
if len(subdirs) == 0 {
return 0.0
}
// More subdirectories with clear separation indicates higher modularity
if len(subdirs) > 5 {
return 0.8
} else if len(subdirs) > 2 {
return 0.6
} else {
return 0.4
}
}
func (da *DefaultDirectoryAnalyzer) analyzeConventions(ctx context.Context, dirPath string) (*ConventionInfo, error) {
files, err := ioutil.ReadDir(dirPath)
if err != nil {
return nil, fmt.Errorf("failed to read directory: %w", err)
}
fileNames := []string{}
dirNames := []string{}
for _, file := range files {
if file.IsDir() {
dirNames = append(dirNames, file.Name())
} else {
fileNames = append(fileNames, file.Name())
}
}
// Detect dominant naming style
namingStyle := da.detectDominantNamingStyle(append(fileNames, dirNames...))
// Calculate consistency
consistency := da.calculateNamingConsistency(append(fileNames, dirNames...), namingStyle)
return &ConventionInfo{
NamingStyle: namingStyle,
FileNaming: da.detectFileNamingPattern(fileNames),
DirectoryNaming: da.detectDirectoryNamingPattern(dirNames),
Consistency: consistency,
Violations: []*Violation{},
Standards: []string{},
}, nil
}
func (da *DefaultDirectoryAnalyzer) detectDominantNamingStyle(names []string) string {
styles := map[string]int{
"camelCase": 0,
"kebab-case": 0,
"snake_case": 0,
"PascalCase": 0,
}
for _, name := range names {
if da.isCamelCase(name) {
styles["camelCase"]++
} else if da.isKebabCase(name) {
styles["kebab-case"]++
} else if da.isSnakeCase(name) {
styles["snake_case"]++
} else if da.isPascalCase(name) {
styles["PascalCase"]++
}
}
maxCount := 0
dominantStyle := "mixed"
for style, count := range styles {
if count > maxCount {
maxCount = count
dominantStyle = style
}
}
return dominantStyle
}
func (da *DefaultDirectoryAnalyzer) isPascalCase(s string) bool {
matched, _ := regexp.MatchString(`^[A-Z][a-zA-Z0-9]*$`, s)
return matched
}
func (da *DefaultDirectoryAnalyzer) detectFileNamingPattern(fileNames []string) string {
// Analyze file naming patterns
if len(fileNames) == 0 {
return "none"
}
return da.detectDominantNamingStyle(fileNames)
}
func (da *DefaultDirectoryAnalyzer) detectDirectoryNamingPattern(dirNames []string) string {
if len(dirNames) == 0 {
return "none"
}
return da.detectDominantNamingStyle(dirNames)
}
func (da *DefaultDirectoryAnalyzer) calculateNamingConsistency(names []string, expectedStyle string) float64 {
if len(names) == 0 {
return 1.0
}
consistentCount := 0
for _, name := range names {
if da.matchesNamingStyle(name, expectedStyle) {
consistentCount++
}
}
return float64(consistentCount) / float64(len(names))
}
func (da *DefaultDirectoryAnalyzer) matchesNamingStyle(name, style string) bool {
switch style {
case "camelCase":
return da.isCamelCase(name)
case "kebab-case":
return da.isKebabCase(name)
case "snake_case":
return da.isSnakeCase(name)
case "PascalCase":
return da.isPascalCase(name)
default:
return true // Mixed style always matches
}
}
func (da *DefaultDirectoryAnalyzer) determinePurpose(structure *DirectoryStructure) string {
// Determine purpose based on directory structure analysis
if structure.Languages["javascript"] > 0 || structure.Languages["typescript"] > 0 {
if structure.FileTypes[".html"] > 0 || structure.FileTypes[".css"] > 0 {
return "Frontend web application"
} else {
return "JavaScript/TypeScript application"
}
}
if structure.Languages["go"] > 0 {
return "Go application or service"
}
if structure.Languages["python"] > 0 {
return "Python application or library"
}
if structure.Languages["java"] > 0 {
return "Java application"
}
if structure.FileTypes[".md"] > 0 {
return "Documentation repository"
}
return "General purpose directory"
}
func (da *DefaultDirectoryAnalyzer) determineArchitecture(structure *DirectoryStructure, orgInfo *OrganizationInfo) string {
if orgInfo.Pattern != "Custom" && orgInfo.Pattern != "unknown" {
return orgInfo.Pattern
}
// Infer architecture from structure
if structure.Languages["go"] > 0 {
return "Go service architecture"
}
if structure.Languages["javascript"] > 0 || structure.Languages["typescript"] > 0 {
if structure.FileTypes[".json"] > 0 {
return "Node.js application"
} else {
return "Frontend application"
}
}
return "Unknown architecture"
}
// Additional helper methods for comprehensive analysis
func (da *DefaultDirectoryAnalyzer) collectFilesAndDirs(rootPath string) ([]string, []string, error) {
files := []string{}
dirs := []string{}
err := filepath.Walk(rootPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
dirs = append(dirs, path)
} else {
files = append(files, path)
}
return nil
})
return files, dirs, err
}
func (da *DefaultDirectoryAnalyzer) detectNamingPatterns(files, dirs []string) []*NamingPattern {
patterns := []*NamingPattern{}
// Analyze file naming patterns
filePattern := da.analyzeNamingPattern(files, "file")
if filePattern != nil {
patterns = append(patterns, filePattern)
}
// Analyze directory naming patterns
dirPattern := da.analyzeNamingPattern(dirs, "directory")
if dirPattern != nil {
patterns = append(patterns, dirPattern)
}
return patterns
}
func (da *DefaultDirectoryAnalyzer) analyzeNamingPattern(paths []string, scope string) *NamingPattern {
if len(paths) == 0 {
return nil
}
// Extract just the names
names := make([]string, len(paths))
for i, path := range paths {
names[i] = filepath.Base(path)
}
// Detect the dominant convention
convention := da.detectDominantNamingStyle(names)
return &NamingPattern{
Pattern: Pattern{
ID: fmt.Sprintf("%s_naming", scope),
Name: fmt.Sprintf("%s Naming Convention", strings.Title(scope)),
Type: "naming",
Description: fmt.Sprintf("Naming convention for %ss", scope),
Confidence: da.calculateNamingConsistency(names, convention),
Examples: names[:min(5, len(names))],
},
Convention: convention,
Scope: scope,
CaseStyle: convention,
}
}
func (da *DefaultDirectoryAnalyzer) detectOrganizationalPatterns(ctx context.Context, rootPath string, dirs []string) []*OrganizationalPattern {
patterns := []*OrganizationalPattern{}
// Check against known organizational patterns
for _, pattern := range da.organizationDetector.commonPatterns {
if da.matchesOrganizationalPattern(dirs, pattern) {
patterns = append(patterns, pattern)
}
}
return patterns
}
func (da *DefaultDirectoryAnalyzer) matchesOrganizationalPattern(dirs []string, pattern *OrganizationalPattern) bool {
dirSet := make(map[string]bool)
for _, dir := range dirs {
dirSet[strings.ToLower(filepath.Base(dir))] = true
}
matchCount := 0
for _, example := range pattern.Examples {
exampleName := strings.TrimSuffix(strings.ToLower(example), "/")
if dirSet[exampleName] {
matchCount++
}
}
// Require at least half of the examples to match
return matchCount >= len(pattern.Examples)/2
}
func (da *DefaultDirectoryAnalyzer) calculateConventionConsistency(files, dirs []string, patterns []*NamingPattern) float64 {
if len(patterns) == 0 {
return 0.5
}
totalConsistency := 0.0
for _, pattern := range patterns {
totalConsistency += pattern.Confidence
}
return totalConsistency / float64(len(patterns))
}
func (da *DefaultDirectoryAnalyzer) findConventionViolations(files, dirs []string, patterns []*NamingPattern) []*Violation {
violations := []*Violation{}
// Check for naming violations
for _, pattern := range patterns {
if pattern.Scope == "file" {
for _, file := range files {
name := filepath.Base(file)
if !da.matchesNamingStyle(name, pattern.Convention) {
violations = append(violations, &Violation{
Type: "naming",
Path: file,
Expected: pattern.Convention,
Actual: da.detectNamingStyle(name),
Severity: "warning",
Suggestion: fmt.Sprintf("Rename to follow %s convention", pattern.Convention),
})
}
}
} else if pattern.Scope == "directory" {
for _, dir := range dirs {
name := filepath.Base(dir)
if !da.matchesNamingStyle(name, pattern.Convention) {
violations = append(violations, &Violation{
Type: "naming",
Path: dir,
Expected: pattern.Convention,
Actual: da.detectNamingStyle(name),
Severity: "info",
Suggestion: fmt.Sprintf("Rename to follow %s convention", pattern.Convention),
})
}
}
}
}
return violations
}
func (da *DefaultDirectoryAnalyzer) detectNamingStyle(name string) string {
if da.isCamelCase(name) {
return "camelCase"
} else if da.isKebabCase(name) {
return "kebab-case"
} else if da.isSnakeCase(name) {
return "snake_case"
} else if da.isPascalCase(name) {
return "PascalCase"
}
return "unknown"
}
func (da *DefaultDirectoryAnalyzer) generateConventionRecommendations(analysis *ConventionAnalysis) []*Recommendation {
recommendations := []*Recommendation{}
// Recommend consistency improvements
if analysis.Consistency < 0.8 {
recommendations = append(recommendations, &Recommendation{
Type: "consistency",
Title: "Improve naming consistency",
Description: "Consider standardizing naming conventions across the project",
Priority: 2,
Effort: "medium",
Impact: "high",
Steps: []string{"Choose a consistent naming style", "Rename files/directories", "Update style guide"},
})
}
// Recommend architectural improvements
if len(analysis.OrganizationalPatterns) == 0 {
recommendations = append(recommendations, &Recommendation{
Type: "architecture",
Title: "Consider architectural patterns",
Description: "Project structure could benefit from established architectural patterns",
Priority: 3,
Effort: "high",
Impact: "high",
Steps: []string{"Evaluate current structure", "Choose appropriate pattern", "Refactor gradually"},
})
}
return recommendations
}
// More helper methods for relationship analysis
func (da *DefaultDirectoryAnalyzer) findSubdirectories(dirPath string) ([]string, error) {
files, err := ioutil.ReadDir(dirPath)
if err != nil {
return nil, err
}
subdirs := []string{}
for _, file := range files {
if file.IsDir() {
subdirs = append(subdirs, filepath.Join(dirPath, file.Name()))
}
}
return subdirs, nil
}
func (da *DefaultDirectoryAnalyzer) analyzeDependencies(ctx context.Context, subdirs []string) ([]*DirectoryDependency, error) {
dependencies := []*DirectoryDependency{}
for _, dir := range subdirs {
deps, err := da.findDirectoryDependencies(ctx, dir, subdirs)
if err != nil {
continue // Skip directories we can't analyze
}
dependencies = append(dependencies, deps...)
}
return dependencies, nil
}
func (da *DefaultDirectoryAnalyzer) findDirectoryDependencies(ctx context.Context, dir string, allDirs []string) ([]*DirectoryDependency, error) {
dependencies := []*DirectoryDependency{}
// Walk through files in the directory
err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error {
if err != nil || info.IsDir() {
return nil
}
// Read file content to find imports
content, err := ioutil.ReadFile(path)
if err != nil {
return nil
}
// Detect language and find imports
ext := strings.ToLower(filepath.Ext(path))
language := da.mapExtensionToLanguage(ext)
if detector, exists := da.relationshipAnalyzer.dependencyDetectors[language]; exists {
imports := da.extractImports(string(content), detector.importPatterns)
// Check which imports refer to other directories
for _, imp := range imports {
for _, otherDir := range allDirs {
if otherDir != dir && da.isLocalDependency(imp, dir, otherDir) {
dependencies = append(dependencies, &DirectoryDependency{
From: dir,
To: otherDir,
Type: "import",
Strength: 1.0,
Reason: fmt.Sprintf("Import: %s", imp),
})
}
}
}
}
return nil
})
return dependencies, err
}
func (da *DefaultDirectoryAnalyzer) extractImports(content string, patterns []*regexp.Regexp) []string {
imports := []string{}
for _, pattern := range patterns {
matches := pattern.FindAllStringSubmatch(content, -1)
for _, match := range matches {
if len(match) > 1 {
imports = append(imports, match[1])
}
}
}
return imports
}
func (da *DefaultDirectoryAnalyzer) isLocalDependency(importPath, fromDir, toDir string) bool {
// Simple heuristic: check if import path references the target directory
fromBase := filepath.Base(fromDir)
toBase := filepath.Base(toDir)
return strings.Contains(importPath, toBase) ||
strings.Contains(importPath, "../"+toBase) ||
strings.Contains(importPath, "./"+toBase)
}
func (da *DefaultDirectoryAnalyzer) analyzeDirectoryRelationships(subdirs []string, dependencies []*DirectoryDependency) []*DirectoryRelation {
relationships := []*DirectoryRelation{}
// Create relationship map
depMap := make(map[string]map[string]int)
for _, dep := range dependencies {
if depMap[dep.From] == nil {
depMap[dep.From] = make(map[string]int)
}
depMap[dep.From][dep.To]++
}
// Create bidirectional relationships
processed := make(map[string]bool)
for _, dir1 := range subdirs {
for _, dir2 := range subdirs {
if dir1 >= dir2 {
continue
}
key := dir1 + ":" + dir2
if processed[key] {
continue
}
processed[key] = true
// Check for relationships
deps1to2 := depMap[dir1][dir2]
deps2to1 := depMap[dir2][dir1]
if deps1to2 > 0 || deps2to1 > 0 {
relType := "depends"
strength := float64(deps1to2 + deps2to1)
bidirectional := deps1to2 > 0 && deps2to1 > 0
if bidirectional {
relType = "mutual"
}
relationships = append(relationships, &DirectoryRelation{
Directory1: dir1,
Directory2: dir2,
Type: relType,
Strength: strength,
Description: fmt.Sprintf("%d dependencies between directories", deps1to2+deps2to1),
Bidirectional: bidirectional,
})
}
}
}
return relationships
}
func (da *DefaultDirectoryAnalyzer) calculateCouplingMetrics(subdirs []string, dependencies []*DirectoryDependency) *CouplingMetrics {
if len(subdirs) == 0 {
return &CouplingMetrics{}
}
// Calculate afferent and efferent coupling
afferent := make(map[string]int)
efferent := make(map[string]int)
for _, dep := range dependencies {
efferent[dep.From]++
afferent[dep.To]++
}
// Calculate averages
totalAfferent := 0
totalEfferent := 0
for _, dir := range subdirs {
totalAfferent += afferent[dir]
totalEfferent += efferent[dir]
}
avgAfferent := float64(totalAfferent) / float64(len(subdirs))
avgEfferent := float64(totalEfferent) / float64(len(subdirs))
// Calculate instability (efferent / (afferent + efferent))
instability := 0.0
if avgAfferent+avgEfferent > 0 {
instability = avgEfferent / (avgAfferent + avgEfferent)
}
return &CouplingMetrics{
AfferentCoupling: avgAfferent,
EfferentCoupling: avgEfferent,
Instability: instability,
Abstractness: 0.5, // Would need more analysis to determine
DistanceFromMain: 0.0, // Would need to calculate distance from main sequence
}
}
func (da *DefaultDirectoryAnalyzer) calculateModularityScore(relationships []*DirectoryRelation, coupling *CouplingMetrics) float64 {
// Simple modularity score based on coupling metrics
if coupling.Instability < 0.3 {
return 0.8 // High modularity
} else if coupling.Instability < 0.7 {
return 0.6 // Medium modularity
} else {
return 0.4 // Low modularity
}
}
func (da *DefaultDirectoryAnalyzer) determineArchitecturalStyle(subdirs []string, dependencies []*DirectoryDependency) string {
if len(subdirs) == 0 {
return "unknown"
}
// Analyze dependency patterns
if len(dependencies) == 0 {
return "independent"
}
// Check for layered architecture (unidirectional dependencies)
bidirectionalCount := 0
for _, dep := range dependencies {
// Check if there's a reverse dependency
for _, otherDep := range dependencies {
if dep.From == otherDep.To && dep.To == otherDep.From {
bidirectionalCount++
break
}
}
}
if float64(bidirectionalCount)/float64(len(dependencies)) < 0.2 {
return "layered"
} else {
return "interconnected"
}
}
// Additional utility methods
func (da *DefaultDirectoryAnalyzer) walkDirectoryHierarchy(rootPath string, currentDepth, maxDepth int, fn func(string, int) error) error {
if currentDepth > maxDepth {
return nil
}
// Process current directory
if err := fn(rootPath, currentDepth); err != nil {
return err
}
// Process subdirectories
files, err := ioutil.ReadDir(rootPath)
if err != nil {
return err
}
for _, file := range files {
if file.IsDir() && !strings.HasPrefix(file.Name(), ".") {
subPath := filepath.Join(rootPath, file.Name())
if err := da.walkDirectoryHierarchy(subPath, currentDepth+1, maxDepth, fn); err != nil {
return err
}
}
}
return nil
}
func (da *DefaultDirectoryAnalyzer) generateUCXLAddress(path string) (*ucxl.Address, error) {
cleanPath := filepath.Clean(path)
addr, err := ucxl.ParseAddress(fmt.Sprintf("dir://%s", cleanPath))
if err != nil {
return nil, fmt.Errorf("failed to generate UCXL address: %w", err)
}
return addr, nil
}
func (da *DefaultDirectoryAnalyzer) generateDirectorySummary(structure *DirectoryStructure) string {
summary := fmt.Sprintf("Directory with %d files and %d subdirectories",
structure.FileCount, structure.DirectoryCount)
// Add language information
if len(structure.Languages) > 0 {
var langs []string
for lang, count := range structure.Languages {
langs = append(langs, fmt.Sprintf("%s (%d)", lang, count))
}
sort.Strings(langs)
summary += fmt.Sprintf(", containing: %s", strings.Join(langs[:min(3, len(langs))], ", "))
}
return summary
}
func (da *DefaultDirectoryAnalyzer) generateDirectoryTags(structure *DirectoryStructure, path string) []string {
tags := []string{}
// Add directory name as tag
dirName := filepath.Base(path)
if dirName != "." && dirName != "/" {
tags = append(tags, "dir:"+dirName)
}
// Add language tags
for lang := range structure.Languages {
tags = append(tags, lang)
}
// Add size category
if structure.FileCount > 100 {
tags = append(tags, "large-project")
} else if structure.FileCount > 20 {
tags = append(tags, "medium-project")
} else {
tags = append(tags, "small-project")
}
// Add architecture tags
if structure.Architecture != "unknown" && structure.Architecture != "" {
tags = append(tags, strings.ToLower(strings.ReplaceAll(structure.Architecture, " ", "-")))
}
return tags
}
func (da *DefaultDirectoryAnalyzer) extractTechnologiesFromStructure(structure *DirectoryStructure) []string {
technologies := []string{}
// Add languages as technologies
for lang := range structure.Languages {
technologies = append(technologies, lang)
}
// Add framework detection based on file types and structure
if structure.FileTypes[".json"] > 0 && (structure.Languages["javascript"] > 0 || structure.Languages["typescript"] > 0) {
technologies = append(technologies, "Node.js")
}
if structure.FileTypes[".py"] > 0 && structure.FileTypes[".txt"] > 0 {
technologies = append(technologies, "Python")
}
return technologies
}
func (da *DefaultDirectoryAnalyzer) calculateDirectorySpecificity(structure *DirectoryStructure) int {
specificity := 1
// More specific if it has many files
if structure.FileCount > 50 {
specificity += 2
} else if structure.FileCount > 10 {
specificity += 1
}
// More specific if it uses specific technologies
if len(structure.Languages) > 2 {
specificity += 1
}
// More specific if it has clear purpose
if structure.Purpose != "General purpose directory" {
specificity += 1
}
return specificity
}
func min(a, b int) int {
if a < b {
return a
}
return b
}