chore: align slurp config and scaffolding
This commit is contained in:
@@ -11,8 +11,8 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"chorus/pkg/ucxl"
|
||||
slurpContext "chorus/pkg/slurp/context"
|
||||
"chorus/pkg/ucxl"
|
||||
)
|
||||
|
||||
// DefaultDirectoryAnalyzer provides comprehensive directory structure analysis
|
||||
@@ -268,11 +268,11 @@ func NewRelationshipAnalyzer() *RelationshipAnalyzer {
|
||||
// AnalyzeStructure analyzes directory organization patterns
|
||||
func (da *DefaultDirectoryAnalyzer) AnalyzeStructure(ctx context.Context, dirPath string) (*DirectoryStructure, error) {
|
||||
structure := &DirectoryStructure{
|
||||
Path: dirPath,
|
||||
FileTypes: make(map[string]int),
|
||||
Languages: make(map[string]int),
|
||||
Dependencies: []string{},
|
||||
AnalyzedAt: time.Now(),
|
||||
Path: dirPath,
|
||||
FileTypes: make(map[string]int),
|
||||
Languages: make(map[string]int),
|
||||
Dependencies: []string{},
|
||||
AnalyzedAt: time.Now(),
|
||||
}
|
||||
|
||||
// Walk the directory tree
|
||||
@@ -340,9 +340,9 @@ func (da *DefaultDirectoryAnalyzer) DetectConventions(ctx context.Context, dirPa
|
||||
OrganizationalPatterns: []*OrganizationalPattern{},
|
||||
Consistency: 0.0,
|
||||
Violations: []*Violation{},
|
||||
Recommendations: []*Recommendation{},
|
||||
Recommendations: []*BasicRecommendation{},
|
||||
AppliedStandards: []string{},
|
||||
AnalyzedAt: time.Now(),
|
||||
AnalyzedAt: time.Now(),
|
||||
}
|
||||
|
||||
// Collect all files and directories
|
||||
@@ -385,39 +385,39 @@ func (da *DefaultDirectoryAnalyzer) IdentifyPurpose(ctx context.Context, structu
|
||||
purpose string
|
||||
confidence float64
|
||||
}{
|
||||
"src": {"Source code repository", 0.9},
|
||||
"source": {"Source code repository", 0.9},
|
||||
"lib": {"Library code", 0.8},
|
||||
"libs": {"Library code", 0.8},
|
||||
"vendor": {"Third-party dependencies", 0.9},
|
||||
"node_modules": {"Node.js dependencies", 0.95},
|
||||
"build": {"Build artifacts", 0.9},
|
||||
"dist": {"Distribution files", 0.9},
|
||||
"bin": {"Binary executables", 0.9},
|
||||
"test": {"Test code", 0.9},
|
||||
"tests": {"Test code", 0.9},
|
||||
"docs": {"Documentation", 0.9},
|
||||
"doc": {"Documentation", 0.9},
|
||||
"config": {"Configuration files", 0.9},
|
||||
"configs": {"Configuration files", 0.9},
|
||||
"scripts": {"Utility scripts", 0.8},
|
||||
"tools": {"Development tools", 0.8},
|
||||
"assets": {"Static assets", 0.8},
|
||||
"public": {"Public web assets", 0.8},
|
||||
"static": {"Static files", 0.8},
|
||||
"templates": {"Template files", 0.8},
|
||||
"migrations": {"Database migrations", 0.9},
|
||||
"models": {"Data models", 0.8},
|
||||
"views": {"View layer", 0.8},
|
||||
"controllers": {"Controller layer", 0.8},
|
||||
"services": {"Service layer", 0.8},
|
||||
"components": {"Reusable components", 0.8},
|
||||
"modules": {"Modular components", 0.8},
|
||||
"packages": {"Package organization", 0.7},
|
||||
"internal": {"Internal implementation", 0.8},
|
||||
"cmd": {"Command-line applications", 0.9},
|
||||
"api": {"API implementation", 0.8},
|
||||
"pkg": {"Go package directory", 0.8},
|
||||
"src": {"Source code repository", 0.9},
|
||||
"source": {"Source code repository", 0.9},
|
||||
"lib": {"Library code", 0.8},
|
||||
"libs": {"Library code", 0.8},
|
||||
"vendor": {"Third-party dependencies", 0.9},
|
||||
"node_modules": {"Node.js dependencies", 0.95},
|
||||
"build": {"Build artifacts", 0.9},
|
||||
"dist": {"Distribution files", 0.9},
|
||||
"bin": {"Binary executables", 0.9},
|
||||
"test": {"Test code", 0.9},
|
||||
"tests": {"Test code", 0.9},
|
||||
"docs": {"Documentation", 0.9},
|
||||
"doc": {"Documentation", 0.9},
|
||||
"config": {"Configuration files", 0.9},
|
||||
"configs": {"Configuration files", 0.9},
|
||||
"scripts": {"Utility scripts", 0.8},
|
||||
"tools": {"Development tools", 0.8},
|
||||
"assets": {"Static assets", 0.8},
|
||||
"public": {"Public web assets", 0.8},
|
||||
"static": {"Static files", 0.8},
|
||||
"templates": {"Template files", 0.8},
|
||||
"migrations": {"Database migrations", 0.9},
|
||||
"models": {"Data models", 0.8},
|
||||
"views": {"View layer", 0.8},
|
||||
"controllers": {"Controller layer", 0.8},
|
||||
"services": {"Service layer", 0.8},
|
||||
"components": {"Reusable components", 0.8},
|
||||
"modules": {"Modular components", 0.8},
|
||||
"packages": {"Package organization", 0.7},
|
||||
"internal": {"Internal implementation", 0.8},
|
||||
"cmd": {"Command-line applications", 0.9},
|
||||
"api": {"API implementation", 0.8},
|
||||
"pkg": {"Go package directory", 0.8},
|
||||
}
|
||||
|
||||
if p, exists := purposes[dirName]; exists {
|
||||
@@ -459,12 +459,12 @@ func (da *DefaultDirectoryAnalyzer) IdentifyPurpose(ctx context.Context, structu
|
||||
// AnalyzeRelationships analyzes relationships between subdirectories
|
||||
func (da *DefaultDirectoryAnalyzer) AnalyzeRelationships(ctx context.Context, dirPath string) (*RelationshipAnalysis, error) {
|
||||
analysis := &RelationshipAnalysis{
|
||||
Dependencies: []*DirectoryDependency{},
|
||||
Relationships: []*DirectoryRelation{},
|
||||
CouplingMetrics: &CouplingMetrics{},
|
||||
ModularityScore: 0.0,
|
||||
Dependencies: []*DirectoryDependency{},
|
||||
Relationships: []*DirectoryRelation{},
|
||||
CouplingMetrics: &CouplingMetrics{},
|
||||
ModularityScore: 0.0,
|
||||
ArchitecturalStyle: "unknown",
|
||||
AnalyzedAt: time.Now(),
|
||||
AnalyzedAt: time.Now(),
|
||||
}
|
||||
|
||||
// Find subdirectories
|
||||
@@ -568,20 +568,20 @@ func (da *DefaultDirectoryAnalyzer) GenerateHierarchy(ctx context.Context, rootP
|
||||
|
||||
func (da *DefaultDirectoryAnalyzer) mapExtensionToLanguage(ext string) string {
|
||||
langMap := map[string]string{
|
||||
".go": "go",
|
||||
".py": "python",
|
||||
".js": "javascript",
|
||||
".jsx": "javascript",
|
||||
".ts": "typescript",
|
||||
".tsx": "typescript",
|
||||
".java": "java",
|
||||
".c": "c",
|
||||
".cpp": "cpp",
|
||||
".cs": "csharp",
|
||||
".php": "php",
|
||||
".rb": "ruby",
|
||||
".rs": "rust",
|
||||
".kt": "kotlin",
|
||||
".go": "go",
|
||||
".py": "python",
|
||||
".js": "javascript",
|
||||
".jsx": "javascript",
|
||||
".ts": "typescript",
|
||||
".tsx": "typescript",
|
||||
".java": "java",
|
||||
".c": "c",
|
||||
".cpp": "cpp",
|
||||
".cs": "csharp",
|
||||
".php": "php",
|
||||
".rb": "ruby",
|
||||
".rs": "rust",
|
||||
".kt": "kotlin",
|
||||
".swift": "swift",
|
||||
}
|
||||
|
||||
@@ -604,7 +604,7 @@ func (da *DefaultDirectoryAnalyzer) analyzeOrganization(dirPath string) (*Organi
|
||||
|
||||
// Detect organizational pattern
|
||||
pattern := da.detectOrganizationalPattern(subdirs)
|
||||
|
||||
|
||||
// Calculate metrics
|
||||
fanOut := len(subdirs)
|
||||
consistency := da.calculateOrganizationalConsistency(subdirs)
|
||||
@@ -672,7 +672,7 @@ func (da *DefaultDirectoryAnalyzer) allAreDomainLike(subdirs []string) bool {
|
||||
// Simple heuristic: if directories don't look like technical layers,
|
||||
// they might be domain/feature based
|
||||
technicalTerms := []string{"api", "service", "repository", "model", "dto", "util", "config", "test", "lib"}
|
||||
|
||||
|
||||
for _, subdir := range subdirs {
|
||||
lowerDir := strings.ToLower(subdir)
|
||||
for _, term := range technicalTerms {
|
||||
@@ -733,7 +733,7 @@ func (da *DefaultDirectoryAnalyzer) isSnakeCase(s string) bool {
|
||||
|
||||
func (da *DefaultDirectoryAnalyzer) calculateMaxDepth(dirPath string) int {
|
||||
maxDepth := 0
|
||||
|
||||
|
||||
filepath.Walk(dirPath, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return nil
|
||||
@@ -747,7 +747,7 @@ func (da *DefaultDirectoryAnalyzer) calculateMaxDepth(dirPath string) int {
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
|
||||
return maxDepth
|
||||
}
|
||||
|
||||
@@ -756,7 +756,7 @@ func (da *DefaultDirectoryAnalyzer) calculateModularity(subdirs []string) float6
|
||||
if len(subdirs) == 0 {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
|
||||
// More subdirectories with clear separation indicates higher modularity
|
||||
if len(subdirs) > 5 {
|
||||
return 0.8
|
||||
@@ -786,7 +786,7 @@ func (da *DefaultDirectoryAnalyzer) analyzeConventions(ctx context.Context, dirP
|
||||
|
||||
// Detect dominant naming style
|
||||
namingStyle := da.detectDominantNamingStyle(append(fileNames, dirNames...))
|
||||
|
||||
|
||||
// Calculate consistency
|
||||
consistency := da.calculateNamingConsistency(append(fileNames, dirNames...), namingStyle)
|
||||
|
||||
@@ -988,7 +988,7 @@ func (da *DefaultDirectoryAnalyzer) analyzeNamingPattern(paths []string, scope s
|
||||
|
||||
// Detect the dominant convention
|
||||
convention := da.detectDominantNamingStyle(names)
|
||||
|
||||
|
||||
return &NamingPattern{
|
||||
Pattern: Pattern{
|
||||
ID: fmt.Sprintf("%s_naming", scope),
|
||||
@@ -996,7 +996,7 @@ func (da *DefaultDirectoryAnalyzer) analyzeNamingPattern(paths []string, scope s
|
||||
Type: "naming",
|
||||
Description: fmt.Sprintf("Naming convention for %ss", scope),
|
||||
Confidence: da.calculateNamingConsistency(names, convention),
|
||||
Examples: names[:min(5, len(names))],
|
||||
Examples: names[:minInt(5, len(names))],
|
||||
},
|
||||
Convention: convention,
|
||||
Scope: scope,
|
||||
@@ -1100,12 +1100,12 @@ func (da *DefaultDirectoryAnalyzer) detectNamingStyle(name string) string {
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
func (da *DefaultDirectoryAnalyzer) generateConventionRecommendations(analysis *ConventionAnalysis) []*Recommendation {
|
||||
recommendations := []*Recommendation{}
|
||||
func (da *DefaultDirectoryAnalyzer) generateConventionRecommendations(analysis *ConventionAnalysis) []*BasicRecommendation {
|
||||
recommendations := []*BasicRecommendation{}
|
||||
|
||||
// Recommend consistency improvements
|
||||
if analysis.Consistency < 0.8 {
|
||||
recommendations = append(recommendations, &Recommendation{
|
||||
recommendations = append(recommendations, &BasicRecommendation{
|
||||
Type: "consistency",
|
||||
Title: "Improve naming consistency",
|
||||
Description: "Consider standardizing naming conventions across the project",
|
||||
@@ -1118,7 +1118,7 @@ func (da *DefaultDirectoryAnalyzer) generateConventionRecommendations(analysis *
|
||||
|
||||
// Recommend architectural improvements
|
||||
if len(analysis.OrganizationalPatterns) == 0 {
|
||||
recommendations = append(recommendations, &Recommendation{
|
||||
recommendations = append(recommendations, &BasicRecommendation{
|
||||
Type: "architecture",
|
||||
Title: "Consider architectural patterns",
|
||||
Description: "Project structure could benefit from established architectural patterns",
|
||||
@@ -1185,7 +1185,7 @@ func (da *DefaultDirectoryAnalyzer) findDirectoryDependencies(ctx context.Contex
|
||||
|
||||
if detector, exists := da.relationshipAnalyzer.dependencyDetectors[language]; exists {
|
||||
imports := da.extractImports(string(content), detector.importPatterns)
|
||||
|
||||
|
||||
// Check which imports refer to other directories
|
||||
for _, imp := range imports {
|
||||
for _, otherDir := range allDirs {
|
||||
@@ -1210,7 +1210,7 @@ func (da *DefaultDirectoryAnalyzer) findDirectoryDependencies(ctx context.Contex
|
||||
|
||||
func (da *DefaultDirectoryAnalyzer) extractImports(content string, patterns []*regexp.Regexp) []string {
|
||||
imports := []string{}
|
||||
|
||||
|
||||
for _, pattern := range patterns {
|
||||
matches := pattern.FindAllStringSubmatch(content, -1)
|
||||
for _, match := range matches {
|
||||
@@ -1225,12 +1225,11 @@ func (da *DefaultDirectoryAnalyzer) extractImports(content string, patterns []*r
|
||||
|
||||
func (da *DefaultDirectoryAnalyzer) isLocalDependency(importPath, fromDir, toDir string) bool {
|
||||
// Simple heuristic: check if import path references the target directory
|
||||
fromBase := filepath.Base(fromDir)
|
||||
toBase := filepath.Base(toDir)
|
||||
|
||||
return strings.Contains(importPath, toBase) ||
|
||||
strings.Contains(importPath, "../"+toBase) ||
|
||||
strings.Contains(importPath, "./"+toBase)
|
||||
|
||||
return strings.Contains(importPath, toBase) ||
|
||||
strings.Contains(importPath, "../"+toBase) ||
|
||||
strings.Contains(importPath, "./"+toBase)
|
||||
}
|
||||
|
||||
func (da *DefaultDirectoryAnalyzer) analyzeDirectoryRelationships(subdirs []string, dependencies []*DirectoryDependency) []*DirectoryRelation {
|
||||
@@ -1399,7 +1398,7 @@ func (da *DefaultDirectoryAnalyzer) walkDirectoryHierarchy(rootPath string, curr
|
||||
|
||||
func (da *DefaultDirectoryAnalyzer) generateUCXLAddress(path string) (*ucxl.Address, error) {
|
||||
cleanPath := filepath.Clean(path)
|
||||
addr, err := ucxl.ParseAddress(fmt.Sprintf("dir://%s", cleanPath))
|
||||
addr, err := ucxl.Parse(fmt.Sprintf("dir://%s", cleanPath))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate UCXL address: %w", err)
|
||||
}
|
||||
@@ -1407,7 +1406,7 @@ func (da *DefaultDirectoryAnalyzer) generateUCXLAddress(path string) (*ucxl.Addr
|
||||
}
|
||||
|
||||
func (da *DefaultDirectoryAnalyzer) generateDirectorySummary(structure *DirectoryStructure) string {
|
||||
summary := fmt.Sprintf("Directory with %d files and %d subdirectories",
|
||||
summary := fmt.Sprintf("Directory with %d files and %d subdirectories",
|
||||
structure.FileCount, structure.DirectoryCount)
|
||||
|
||||
// Add language information
|
||||
@@ -1417,7 +1416,7 @@ func (da *DefaultDirectoryAnalyzer) generateDirectorySummary(structure *Director
|
||||
langs = append(langs, fmt.Sprintf("%s (%d)", lang, count))
|
||||
}
|
||||
sort.Strings(langs)
|
||||
summary += fmt.Sprintf(", containing: %s", strings.Join(langs[:min(3, len(langs))], ", "))
|
||||
summary += fmt.Sprintf(", containing: %s", strings.Join(langs[:minInt(3, len(langs))], ", "))
|
||||
}
|
||||
|
||||
return summary
|
||||
@@ -1497,9 +1496,9 @@ func (da *DefaultDirectoryAnalyzer) calculateDirectorySpecificity(structure *Dir
|
||||
return specificity
|
||||
}
|
||||
|
||||
func min(a, b int) int {
|
||||
func minInt(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@ package intelligence
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"chorus/pkg/ucxl"
|
||||
slurpContext "chorus/pkg/slurp/context"
|
||||
)
|
||||
|
||||
@@ -17,38 +17,38 @@ type IntelligenceEngine interface {
|
||||
// AnalyzeFile analyzes a single file and generates context
|
||||
// Performs content analysis, language detection, and pattern recognition
|
||||
AnalyzeFile(ctx context.Context, filePath string, role string) (*slurpContext.ContextNode, error)
|
||||
|
||||
|
||||
// AnalyzeDirectory analyzes directory structure for hierarchical patterns
|
||||
// Identifies organizational patterns, naming conventions, and structure insights
|
||||
AnalyzeDirectory(ctx context.Context, dirPath string) ([]*slurpContext.ContextNode, error)
|
||||
|
||||
|
||||
// GenerateRoleInsights generates role-specific insights for existing context
|
||||
// Provides specialized analysis based on role requirements and perspectives
|
||||
GenerateRoleInsights(ctx context.Context, baseContext *slurpContext.ContextNode, role string) ([]string, error)
|
||||
|
||||
|
||||
// AssessGoalAlignment assesses how well context aligns with project goals
|
||||
// Returns alignment score and specific alignment metrics
|
||||
AssessGoalAlignment(ctx context.Context, node *slurpContext.ContextNode) (float64, error)
|
||||
|
||||
|
||||
// AnalyzeBatch processes multiple files efficiently in parallel
|
||||
// Optimized for bulk analysis operations with resource management
|
||||
AnalyzeBatch(ctx context.Context, filePaths []string, role string) (map[string]*slurpContext.ContextNode, error)
|
||||
|
||||
|
||||
// DetectPatterns identifies recurring patterns across multiple contexts
|
||||
// Useful for template creation and standardization
|
||||
DetectPatterns(ctx context.Context, contexts []*slurpContext.ContextNode) ([]*Pattern, error)
|
||||
|
||||
|
||||
// EnhanceWithRAG enhances context using RAG system knowledge
|
||||
// Integrates external knowledge for richer context understanding
|
||||
EnhanceWithRAG(ctx context.Context, node *slurpContext.ContextNode) (*slurpContext.ContextNode, error)
|
||||
|
||||
|
||||
// ValidateContext validates generated context quality and consistency
|
||||
// Ensures context meets quality thresholds and consistency requirements
|
||||
ValidateContext(ctx context.Context, node *slurpContext.ContextNode) (*ValidationResult, error)
|
||||
|
||||
|
||||
// GetEngineStats returns engine performance and operational statistics
|
||||
GetEngineStats() (*EngineStatistics, error)
|
||||
|
||||
|
||||
// SetConfiguration updates engine configuration
|
||||
SetConfiguration(config *EngineConfig) error
|
||||
}
|
||||
@@ -57,22 +57,22 @@ type IntelligenceEngine interface {
|
||||
type FileAnalyzer interface {
|
||||
// AnalyzeContent analyzes file content for context extraction
|
||||
AnalyzeContent(ctx context.Context, filePath string, content []byte) (*FileAnalysis, error)
|
||||
|
||||
|
||||
// DetectLanguage detects programming language from content
|
||||
DetectLanguage(ctx context.Context, filePath string, content []byte) (string, float64, error)
|
||||
|
||||
|
||||
// ExtractMetadata extracts file metadata and statistics
|
||||
ExtractMetadata(ctx context.Context, filePath string) (*FileMetadata, error)
|
||||
|
||||
|
||||
// AnalyzeStructure analyzes code structure and organization
|
||||
AnalyzeStructure(ctx context.Context, filePath string, content []byte) (*StructureAnalysis, error)
|
||||
|
||||
|
||||
// IdentifyPurpose identifies the primary purpose of the file
|
||||
IdentifyPurpose(ctx context.Context, analysis *FileAnalysis) (string, float64, error)
|
||||
|
||||
|
||||
// GenerateSummary generates a concise summary of file content
|
||||
GenerateSummary(ctx context.Context, analysis *FileAnalysis) (string, error)
|
||||
|
||||
|
||||
// ExtractTechnologies identifies technologies used in the file
|
||||
ExtractTechnologies(ctx context.Context, analysis *FileAnalysis) ([]string, error)
|
||||
}
|
||||
@@ -81,16 +81,16 @@ type FileAnalyzer interface {
|
||||
type DirectoryAnalyzer interface {
|
||||
// AnalyzeStructure analyzes directory organization patterns
|
||||
AnalyzeStructure(ctx context.Context, dirPath string) (*DirectoryStructure, error)
|
||||
|
||||
|
||||
// DetectConventions identifies naming and organizational conventions
|
||||
DetectConventions(ctx context.Context, dirPath string) (*ConventionAnalysis, error)
|
||||
|
||||
|
||||
// IdentifyPurpose determines the primary purpose of a directory
|
||||
IdentifyPurpose(ctx context.Context, structure *DirectoryStructure) (string, float64, error)
|
||||
|
||||
|
||||
// AnalyzeRelationships analyzes relationships between subdirectories
|
||||
AnalyzeRelationships(ctx context.Context, dirPath string) (*RelationshipAnalysis, error)
|
||||
|
||||
|
||||
// GenerateHierarchy generates context hierarchy for directory tree
|
||||
GenerateHierarchy(ctx context.Context, rootPath string, maxDepth int) ([]*slurpContext.ContextNode, error)
|
||||
}
|
||||
@@ -99,16 +99,16 @@ type DirectoryAnalyzer interface {
|
||||
type PatternDetector interface {
|
||||
// DetectCodePatterns identifies code patterns and architectural styles
|
||||
DetectCodePatterns(ctx context.Context, filePath string, content []byte) ([]*CodePattern, error)
|
||||
|
||||
|
||||
// DetectNamingPatterns identifies naming conventions and patterns
|
||||
DetectNamingPatterns(ctx context.Context, contexts []*slurpContext.ContextNode) ([]*NamingPattern, error)
|
||||
|
||||
|
||||
// DetectOrganizationalPatterns identifies organizational patterns
|
||||
DetectOrganizationalPatterns(ctx context.Context, rootPath string) ([]*OrganizationalPattern, error)
|
||||
|
||||
|
||||
// MatchPatterns matches context against known patterns
|
||||
MatchPatterns(ctx context.Context, node *slurpContext.ContextNode, patterns []*Pattern) ([]*PatternMatch, error)
|
||||
|
||||
|
||||
// LearnPatterns learns new patterns from context examples
|
||||
LearnPatterns(ctx context.Context, examples []*slurpContext.ContextNode) ([]*Pattern, error)
|
||||
}
|
||||
@@ -117,19 +117,19 @@ type PatternDetector interface {
|
||||
type RAGIntegration interface {
|
||||
// Query queries the RAG system for relevant information
|
||||
Query(ctx context.Context, query string, context map[string]interface{}) (*RAGResponse, error)
|
||||
|
||||
|
||||
// EnhanceContext enhances context using RAG knowledge
|
||||
EnhanceContext(ctx context.Context, node *slurpContext.ContextNode) (*slurpContext.ContextNode, error)
|
||||
|
||||
|
||||
// IndexContent indexes content for RAG retrieval
|
||||
IndexContent(ctx context.Context, content string, metadata map[string]interface{}) error
|
||||
|
||||
|
||||
// SearchSimilar searches for similar content in RAG system
|
||||
SearchSimilar(ctx context.Context, content string, limit int) ([]*RAGResult, error)
|
||||
|
||||
|
||||
// UpdateIndex updates RAG index with new content
|
||||
UpdateIndex(ctx context.Context, updates []*RAGUpdate) error
|
||||
|
||||
|
||||
// GetRAGStats returns RAG system statistics
|
||||
GetRAGStats(ctx context.Context) (*RAGStatistics, error)
|
||||
}
|
||||
@@ -138,26 +138,26 @@ type RAGIntegration interface {
|
||||
|
||||
// ProjectGoal represents a high-level project objective
|
||||
type ProjectGoal struct {
|
||||
ID string `json:"id"` // Unique identifier
|
||||
Name string `json:"name"` // Goal name
|
||||
Description string `json:"description"` // Detailed description
|
||||
Keywords []string `json:"keywords"` // Associated keywords
|
||||
Priority int `json:"priority"` // Priority level (1=highest)
|
||||
Phase string `json:"phase"` // Project phase
|
||||
Metrics []string `json:"metrics"` // Success metrics
|
||||
Owner string `json:"owner"` // Goal owner
|
||||
ID string `json:"id"` // Unique identifier
|
||||
Name string `json:"name"` // Goal name
|
||||
Description string `json:"description"` // Detailed description
|
||||
Keywords []string `json:"keywords"` // Associated keywords
|
||||
Priority int `json:"priority"` // Priority level (1=highest)
|
||||
Phase string `json:"phase"` // Project phase
|
||||
Metrics []string `json:"metrics"` // Success metrics
|
||||
Owner string `json:"owner"` // Goal owner
|
||||
Deadline *time.Time `json:"deadline,omitempty"` // Target deadline
|
||||
}
|
||||
|
||||
// RoleProfile defines context requirements for different roles
|
||||
type RoleProfile struct {
|
||||
Role string `json:"role"` // Role identifier
|
||||
AccessLevel slurpContext.RoleAccessLevel `json:"access_level"` // Required access level
|
||||
RelevantTags []string `json:"relevant_tags"` // Relevant context tags
|
||||
ContextScope []string `json:"context_scope"` // Scope of interest
|
||||
InsightTypes []string `json:"insight_types"` // Types of insights needed
|
||||
QualityThreshold float64 `json:"quality_threshold"` // Minimum quality threshold
|
||||
Preferences map[string]interface{} `json:"preferences"` // Role-specific preferences
|
||||
Role string `json:"role"` // Role identifier
|
||||
AccessLevel slurpContext.RoleAccessLevel `json:"access_level"` // Required access level
|
||||
RelevantTags []string `json:"relevant_tags"` // Relevant context tags
|
||||
ContextScope []string `json:"context_scope"` // Scope of interest
|
||||
InsightTypes []string `json:"insight_types"` // Types of insights needed
|
||||
QualityThreshold float64 `json:"quality_threshold"` // Minimum quality threshold
|
||||
Preferences map[string]interface{} `json:"preferences"` // Role-specific preferences
|
||||
}
|
||||
|
||||
// EngineConfig represents configuration for the intelligence engine
|
||||
@@ -166,61 +166,66 @@ type EngineConfig struct {
|
||||
MaxConcurrentAnalysis int `json:"max_concurrent_analysis"` // Maximum concurrent analyses
|
||||
AnalysisTimeout time.Duration `json:"analysis_timeout"` // Analysis timeout
|
||||
MaxFileSize int64 `json:"max_file_size"` // Maximum file size to analyze
|
||||
|
||||
|
||||
// RAG integration settings
|
||||
RAGEndpoint string `json:"rag_endpoint"` // RAG system endpoint
|
||||
RAGTimeout time.Duration `json:"rag_timeout"` // RAG query timeout
|
||||
RAGEnabled bool `json:"rag_enabled"` // Whether RAG is enabled
|
||||
|
||||
RAGEndpoint string `json:"rag_endpoint"` // RAG system endpoint
|
||||
RAGTimeout time.Duration `json:"rag_timeout"` // RAG query timeout
|
||||
RAGEnabled bool `json:"rag_enabled"` // Whether RAG is enabled
|
||||
EnableRAG bool `json:"enable_rag"` // Legacy toggle for RAG enablement
|
||||
// Feature toggles
|
||||
EnableGoalAlignment bool `json:"enable_goal_alignment"`
|
||||
EnablePatternDetection bool `json:"enable_pattern_detection"`
|
||||
EnableRoleAware bool `json:"enable_role_aware"`
|
||||
|
||||
// Quality settings
|
||||
MinConfidenceThreshold float64 `json:"min_confidence_threshold"` // Minimum confidence for results
|
||||
RequireValidation bool `json:"require_validation"` // Whether validation is required
|
||||
|
||||
MinConfidenceThreshold float64 `json:"min_confidence_threshold"` // Minimum confidence for results
|
||||
RequireValidation bool `json:"require_validation"` // Whether validation is required
|
||||
|
||||
// Performance settings
|
||||
CacheEnabled bool `json:"cache_enabled"` // Whether caching is enabled
|
||||
CacheTTL time.Duration `json:"cache_ttl"` // Cache TTL
|
||||
|
||||
CacheEnabled bool `json:"cache_enabled"` // Whether caching is enabled
|
||||
CacheTTL time.Duration `json:"cache_ttl"` // Cache TTL
|
||||
|
||||
// Role profiles
|
||||
RoleProfiles map[string]*RoleProfile `json:"role_profiles"` // Role-specific profiles
|
||||
|
||||
RoleProfiles map[string]*RoleProfile `json:"role_profiles"` // Role-specific profiles
|
||||
|
||||
// Project goals
|
||||
ProjectGoals []*ProjectGoal `json:"project_goals"` // Active project goals
|
||||
ProjectGoals []*ProjectGoal `json:"project_goals"` // Active project goals
|
||||
}
|
||||
|
||||
// EngineStatistics represents performance statistics for the engine
|
||||
type EngineStatistics struct {
|
||||
TotalAnalyses int64 `json:"total_analyses"` // Total analyses performed
|
||||
SuccessfulAnalyses int64 `json:"successful_analyses"` // Successful analyses
|
||||
FailedAnalyses int64 `json:"failed_analyses"` // Failed analyses
|
||||
AverageAnalysisTime time.Duration `json:"average_analysis_time"` // Average analysis time
|
||||
CacheHitRate float64 `json:"cache_hit_rate"` // Cache hit rate
|
||||
RAGQueriesPerformed int64 `json:"rag_queries_performed"` // RAG queries made
|
||||
AverageConfidence float64 `json:"average_confidence"` // Average confidence score
|
||||
FilesAnalyzed int64 `json:"files_analyzed"` // Total files analyzed
|
||||
DirectoriesAnalyzed int64 `json:"directories_analyzed"` // Total directories analyzed
|
||||
PatternsDetected int64 `json:"patterns_detected"` // Patterns detected
|
||||
LastResetAt time.Time `json:"last_reset_at"` // When stats were last reset
|
||||
TotalAnalyses int64 `json:"total_analyses"` // Total analyses performed
|
||||
SuccessfulAnalyses int64 `json:"successful_analyses"` // Successful analyses
|
||||
FailedAnalyses int64 `json:"failed_analyses"` // Failed analyses
|
||||
AverageAnalysisTime time.Duration `json:"average_analysis_time"` // Average analysis time
|
||||
CacheHitRate float64 `json:"cache_hit_rate"` // Cache hit rate
|
||||
RAGQueriesPerformed int64 `json:"rag_queries_performed"` // RAG queries made
|
||||
AverageConfidence float64 `json:"average_confidence"` // Average confidence score
|
||||
FilesAnalyzed int64 `json:"files_analyzed"` // Total files analyzed
|
||||
DirectoriesAnalyzed int64 `json:"directories_analyzed"` // Total directories analyzed
|
||||
PatternsDetected int64 `json:"patterns_detected"` // Patterns detected
|
||||
LastResetAt time.Time `json:"last_reset_at"` // When stats were last reset
|
||||
}
|
||||
|
||||
// FileAnalysis represents the result of file analysis
|
||||
type FileAnalysis struct {
|
||||
FilePath string `json:"file_path"` // Path to analyzed file
|
||||
Language string `json:"language"` // Detected language
|
||||
LanguageConf float64 `json:"language_conf"` // Language detection confidence
|
||||
FileType string `json:"file_type"` // File type classification
|
||||
Size int64 `json:"size"` // File size in bytes
|
||||
LineCount int `json:"line_count"` // Number of lines
|
||||
Complexity float64 `json:"complexity"` // Code complexity score
|
||||
Dependencies []string `json:"dependencies"` // Identified dependencies
|
||||
Exports []string `json:"exports"` // Exported symbols/functions
|
||||
Imports []string `json:"imports"` // Import statements
|
||||
Functions []string `json:"functions"` // Function/method names
|
||||
Classes []string `json:"classes"` // Class names
|
||||
Variables []string `json:"variables"` // Variable names
|
||||
Comments []string `json:"comments"` // Extracted comments
|
||||
TODOs []string `json:"todos"` // TODO comments
|
||||
Metadata map[string]interface{} `json:"metadata"` // Additional metadata
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
FilePath string `json:"file_path"` // Path to analyzed file
|
||||
Language string `json:"language"` // Detected language
|
||||
LanguageConf float64 `json:"language_conf"` // Language detection confidence
|
||||
FileType string `json:"file_type"` // File type classification
|
||||
Size int64 `json:"size"` // File size in bytes
|
||||
LineCount int `json:"line_count"` // Number of lines
|
||||
Complexity float64 `json:"complexity"` // Code complexity score
|
||||
Dependencies []string `json:"dependencies"` // Identified dependencies
|
||||
Exports []string `json:"exports"` // Exported symbols/functions
|
||||
Imports []string `json:"imports"` // Import statements
|
||||
Functions []string `json:"functions"` // Function/method names
|
||||
Classes []string `json:"classes"` // Class names
|
||||
Variables []string `json:"variables"` // Variable names
|
||||
Comments []string `json:"comments"` // Extracted comments
|
||||
TODOs []string `json:"todos"` // TODO comments
|
||||
Metadata map[string]interface{} `json:"metadata"` // Additional metadata
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
}
|
||||
|
||||
// DefaultIntelligenceEngine provides a complete implementation of the IntelligenceEngine interface
|
||||
@@ -250,6 +255,10 @@ func NewDefaultIntelligenceEngine(config *EngineConfig) (*DefaultIntelligenceEng
|
||||
config = DefaultEngineConfig()
|
||||
}
|
||||
|
||||
if config.EnableRAG {
|
||||
config.RAGEnabled = true
|
||||
}
|
||||
|
||||
// Initialize file analyzer
|
||||
fileAnalyzer := NewDefaultFileAnalyzer(config)
|
||||
|
||||
@@ -273,13 +282,22 @@ func NewDefaultIntelligenceEngine(config *EngineConfig) (*DefaultIntelligenceEng
|
||||
directoryAnalyzer: dirAnalyzer,
|
||||
patternDetector: patternDetector,
|
||||
ragIntegration: ragIntegration,
|
||||
stats: &EngineStatistics{
|
||||
stats: &EngineStatistics{
|
||||
LastResetAt: time.Now(),
|
||||
},
|
||||
cache: &sync.Map{},
|
||||
projectGoals: config.ProjectGoals,
|
||||
roleProfiles: config.RoleProfiles,
|
||||
cache: &sync.Map{},
|
||||
projectGoals: config.ProjectGoals,
|
||||
roleProfiles: config.RoleProfiles,
|
||||
}
|
||||
|
||||
return engine, nil
|
||||
}
|
||||
}
|
||||
|
||||
// NewIntelligenceEngine is a convenience wrapper expected by legacy callers.
|
||||
func NewIntelligenceEngine(config *EngineConfig) *DefaultIntelligenceEngine {
|
||||
engine, err := NewDefaultIntelligenceEngine(config)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return engine
|
||||
}
|
||||
|
||||
@@ -4,14 +4,13 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"chorus/pkg/ucxl"
|
||||
slurpContext "chorus/pkg/slurp/context"
|
||||
"chorus/pkg/ucxl"
|
||||
)
|
||||
|
||||
// AnalyzeFile analyzes a single file and generates contextual understanding
|
||||
@@ -136,8 +135,7 @@ func (e *DefaultIntelligenceEngine) AnalyzeDirectory(ctx context.Context, dirPat
|
||||
}()
|
||||
|
||||
// Analyze directory structure
|
||||
structure, err := e.directoryAnalyzer.AnalyzeStructure(ctx, dirPath)
|
||||
if err != nil {
|
||||
if _, err := e.directoryAnalyzer.AnalyzeStructure(ctx, dirPath); err != nil {
|
||||
e.updateStats("directory_analysis", time.Since(start), false)
|
||||
return nil, fmt.Errorf("failed to analyze directory structure: %w", err)
|
||||
}
|
||||
@@ -232,7 +230,7 @@ func (e *DefaultIntelligenceEngine) AnalyzeBatch(ctx context.Context, filePaths
|
||||
wg.Add(1)
|
||||
go func(path string) {
|
||||
defer wg.Done()
|
||||
semaphore <- struct{}{} // Acquire semaphore
|
||||
semaphore <- struct{}{} // Acquire semaphore
|
||||
defer func() { <-semaphore }() // Release semaphore
|
||||
|
||||
ctxNode, err := e.AnalyzeFile(ctx, path, role)
|
||||
@@ -317,7 +315,7 @@ func (e *DefaultIntelligenceEngine) EnhanceWithRAG(ctx context.Context, node *sl
|
||||
if ragResponse.Confidence >= e.config.MinConfidenceThreshold {
|
||||
enhanced.Insights = append(enhanced.Insights, fmt.Sprintf("RAG: %s", ragResponse.Answer))
|
||||
enhanced.RAGConfidence = ragResponse.Confidence
|
||||
|
||||
|
||||
// Add source information to metadata
|
||||
if len(ragResponse.Sources) > 0 {
|
||||
sources := make([]string, len(ragResponse.Sources))
|
||||
@@ -430,7 +428,7 @@ func (e *DefaultIntelligenceEngine) readFileContent(filePath string) ([]byte, er
|
||||
func (e *DefaultIntelligenceEngine) generateUCXLAddress(filePath string) (*ucxl.Address, error) {
|
||||
// Simple implementation - in reality this would be more sophisticated
|
||||
cleanPath := filepath.Clean(filePath)
|
||||
addr, err := ucxl.ParseAddress(fmt.Sprintf("file://%s", cleanPath))
|
||||
addr, err := ucxl.Parse(fmt.Sprintf("file://%s", cleanPath))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to generate UCXL address: %w", err)
|
||||
}
|
||||
@@ -640,6 +638,10 @@ func DefaultEngineConfig() *EngineConfig {
|
||||
RAGEndpoint: "",
|
||||
RAGTimeout: 10 * time.Second,
|
||||
RAGEnabled: false,
|
||||
EnableRAG: false,
|
||||
EnableGoalAlignment: false,
|
||||
EnablePatternDetection: false,
|
||||
EnableRoleAware: false,
|
||||
MinConfidenceThreshold: 0.6,
|
||||
RequireValidation: true,
|
||||
CacheEnabled: true,
|
||||
@@ -647,4 +649,4 @@ func DefaultEngineConfig() *EngineConfig {
|
||||
RoleProfiles: make(map[string]*RoleProfile),
|
||||
ProjectGoals: []*ProjectGoal{},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
//go:build integration
|
||||
// +build integration
|
||||
|
||||
package intelligence
|
||||
|
||||
import (
|
||||
@@ -13,12 +16,12 @@ import (
|
||||
func TestIntelligenceEngine_Integration(t *testing.T) {
|
||||
// Create test configuration
|
||||
config := &EngineConfig{
|
||||
EnableRAG: false, // Disable RAG for testing
|
||||
EnableGoalAlignment: true,
|
||||
EnablePatternDetection: true,
|
||||
EnableRoleAware: true,
|
||||
MaxConcurrentAnalysis: 2,
|
||||
AnalysisTimeout: 30 * time.Second,
|
||||
EnableRAG: false, // Disable RAG for testing
|
||||
EnableGoalAlignment: true,
|
||||
EnablePatternDetection: true,
|
||||
EnableRoleAware: true,
|
||||
MaxConcurrentAnalysis: 2,
|
||||
AnalysisTimeout: 30 * time.Second,
|
||||
CacheTTL: 5 * time.Minute,
|
||||
MinConfidenceThreshold: 0.5,
|
||||
}
|
||||
@@ -29,13 +32,13 @@ func TestIntelligenceEngine_Integration(t *testing.T) {
|
||||
|
||||
// Create test context node
|
||||
testNode := &slurpContext.ContextNode{
|
||||
Path: "/test/example.go",
|
||||
Summary: "A Go service implementing user authentication",
|
||||
Purpose: "Handles user login and authentication for the web application",
|
||||
Path: "/test/example.go",
|
||||
Summary: "A Go service implementing user authentication",
|
||||
Purpose: "Handles user login and authentication for the web application",
|
||||
Technologies: []string{"go", "jwt", "bcrypt"},
|
||||
Tags: []string{"authentication", "security", "web"},
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
Tags: []string{"authentication", "security", "web"},
|
||||
GeneratedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
|
||||
// Create test project goal
|
||||
@@ -47,7 +50,7 @@ func TestIntelligenceEngine_Integration(t *testing.T) {
|
||||
Priority: 1,
|
||||
Phase: "development",
|
||||
Deadline: nil,
|
||||
CreatedAt: time.Now(),
|
||||
GeneratedAt: time.Now(),
|
||||
}
|
||||
|
||||
t.Run("AnalyzeFile", func(t *testing.T) {
|
||||
@@ -220,9 +223,9 @@ func TestPatternDetector_DetectDesignPatterns(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
filename string
|
||||
content []byte
|
||||
name string
|
||||
filename string
|
||||
content []byte
|
||||
expectedPattern string
|
||||
}{
|
||||
{
|
||||
@@ -244,7 +247,7 @@ func TestPatternDetector_DetectDesignPatterns(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "Go Factory Pattern",
|
||||
filename: "factory.go",
|
||||
filename: "factory.go",
|
||||
content: []byte(`
|
||||
package main
|
||||
func NewUser(name string) *User {
|
||||
@@ -312,7 +315,7 @@ func TestGoalAlignment_DimensionCalculators(t *testing.T) {
|
||||
testNode := &slurpContext.ContextNode{
|
||||
Path: "/test/auth.go",
|
||||
Summary: "User authentication service with JWT tokens",
|
||||
Purpose: "Handles user login and token generation",
|
||||
Purpose: "Handles user login and token generation",
|
||||
Technologies: []string{"go", "jwt", "bcrypt"},
|
||||
Tags: []string{"authentication", "security"},
|
||||
}
|
||||
@@ -470,7 +473,7 @@ func TestRoleAwareProcessor_AccessControl(t *testing.T) {
|
||||
hasAccess := err == nil
|
||||
|
||||
if hasAccess != tc.expected {
|
||||
t.Errorf("Expected access %v for role %s, action %s, resource %s, got %v",
|
||||
t.Errorf("Expected access %v for role %s, action %s, resource %s, got %v",
|
||||
tc.expected, tc.roleID, tc.action, tc.resource, hasAccess)
|
||||
}
|
||||
})
|
||||
@@ -491,7 +494,7 @@ func TestDirectoryAnalyzer_StructureAnalysis(t *testing.T) {
|
||||
// Create test structure
|
||||
testDirs := []string{
|
||||
"src/main",
|
||||
"src/lib",
|
||||
"src/lib",
|
||||
"test/unit",
|
||||
"test/integration",
|
||||
"docs/api",
|
||||
@@ -504,7 +507,7 @@ func TestDirectoryAnalyzer_StructureAnalysis(t *testing.T) {
|
||||
if err := os.MkdirAll(fullPath, 0755); err != nil {
|
||||
t.Fatalf("Failed to create directory %s: %v", fullPath, err)
|
||||
}
|
||||
|
||||
|
||||
// Create a dummy file in each directory
|
||||
testFile := filepath.Join(fullPath, "test.txt")
|
||||
if err := os.WriteFile(testFile, []byte("test content"), 0644); err != nil {
|
||||
@@ -652,7 +655,7 @@ func createTestContextNode(path, summary, purpose string, technologies, tags []s
|
||||
Purpose: purpose,
|
||||
Technologies: technologies,
|
||||
Tags: tags,
|
||||
CreatedAt: time.Now(),
|
||||
GeneratedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
@@ -665,7 +668,7 @@ func createTestProjectGoal(id, name, description string, keywords []string, prio
|
||||
Keywords: keywords,
|
||||
Priority: priority,
|
||||
Phase: phase,
|
||||
CreatedAt: time.Now(),
|
||||
GeneratedAt: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -697,4 +700,4 @@ func assertValidDimensionScore(t *testing.T, score *DimensionScore) {
|
||||
if score.Confidence <= 0 || score.Confidence > 1 {
|
||||
t.Errorf("Invalid confidence: %f", score.Confidence)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package intelligence
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
@@ -33,12 +32,12 @@ type CodeStructureAnalyzer struct {
|
||||
|
||||
// LanguagePatterns contains regex patterns for different language constructs
|
||||
type LanguagePatterns struct {
|
||||
Functions []*regexp.Regexp
|
||||
Classes []*regexp.Regexp
|
||||
Variables []*regexp.Regexp
|
||||
Imports []*regexp.Regexp
|
||||
Comments []*regexp.Regexp
|
||||
TODOs []*regexp.Regexp
|
||||
Functions []*regexp.Regexp
|
||||
Classes []*regexp.Regexp
|
||||
Variables []*regexp.Regexp
|
||||
Imports []*regexp.Regexp
|
||||
Comments []*regexp.Regexp
|
||||
TODOs []*regexp.Regexp
|
||||
}
|
||||
|
||||
// MetadataExtractor extracts file system metadata
|
||||
@@ -65,66 +64,66 @@ func NewLanguageDetector() *LanguageDetector {
|
||||
|
||||
// Map file extensions to languages
|
||||
extensions := map[string]string{
|
||||
".go": "go",
|
||||
".py": "python",
|
||||
".js": "javascript",
|
||||
".jsx": "javascript",
|
||||
".ts": "typescript",
|
||||
".tsx": "typescript",
|
||||
".java": "java",
|
||||
".c": "c",
|
||||
".cpp": "cpp",
|
||||
".cc": "cpp",
|
||||
".cxx": "cpp",
|
||||
".h": "c",
|
||||
".hpp": "cpp",
|
||||
".cs": "csharp",
|
||||
".php": "php",
|
||||
".rb": "ruby",
|
||||
".rs": "rust",
|
||||
".kt": "kotlin",
|
||||
".swift": "swift",
|
||||
".m": "objective-c",
|
||||
".mm": "objective-c",
|
||||
".scala": "scala",
|
||||
".clj": "clojure",
|
||||
".hs": "haskell",
|
||||
".ex": "elixir",
|
||||
".exs": "elixir",
|
||||
".erl": "erlang",
|
||||
".lua": "lua",
|
||||
".pl": "perl",
|
||||
".r": "r",
|
||||
".sh": "shell",
|
||||
".bash": "shell",
|
||||
".zsh": "shell",
|
||||
".fish": "shell",
|
||||
".sql": "sql",
|
||||
".html": "html",
|
||||
".htm": "html",
|
||||
".css": "css",
|
||||
".scss": "scss",
|
||||
".sass": "sass",
|
||||
".less": "less",
|
||||
".xml": "xml",
|
||||
".json": "json",
|
||||
".yaml": "yaml",
|
||||
".yml": "yaml",
|
||||
".toml": "toml",
|
||||
".ini": "ini",
|
||||
".cfg": "ini",
|
||||
".conf": "config",
|
||||
".md": "markdown",
|
||||
".rst": "rst",
|
||||
".tex": "latex",
|
||||
".proto": "protobuf",
|
||||
".tf": "terraform",
|
||||
".hcl": "hcl",
|
||||
".dockerfile": "dockerfile",
|
||||
".go": "go",
|
||||
".py": "python",
|
||||
".js": "javascript",
|
||||
".jsx": "javascript",
|
||||
".ts": "typescript",
|
||||
".tsx": "typescript",
|
||||
".java": "java",
|
||||
".c": "c",
|
||||
".cpp": "cpp",
|
||||
".cc": "cpp",
|
||||
".cxx": "cpp",
|
||||
".h": "c",
|
||||
".hpp": "cpp",
|
||||
".cs": "csharp",
|
||||
".php": "php",
|
||||
".rb": "ruby",
|
||||
".rs": "rust",
|
||||
".kt": "kotlin",
|
||||
".swift": "swift",
|
||||
".m": "objective-c",
|
||||
".mm": "objective-c",
|
||||
".scala": "scala",
|
||||
".clj": "clojure",
|
||||
".hs": "haskell",
|
||||
".ex": "elixir",
|
||||
".exs": "elixir",
|
||||
".erl": "erlang",
|
||||
".lua": "lua",
|
||||
".pl": "perl",
|
||||
".r": "r",
|
||||
".sh": "shell",
|
||||
".bash": "shell",
|
||||
".zsh": "shell",
|
||||
".fish": "shell",
|
||||
".sql": "sql",
|
||||
".html": "html",
|
||||
".htm": "html",
|
||||
".css": "css",
|
||||
".scss": "scss",
|
||||
".sass": "sass",
|
||||
".less": "less",
|
||||
".xml": "xml",
|
||||
".json": "json",
|
||||
".yaml": "yaml",
|
||||
".yml": "yaml",
|
||||
".toml": "toml",
|
||||
".ini": "ini",
|
||||
".cfg": "ini",
|
||||
".conf": "config",
|
||||
".md": "markdown",
|
||||
".rst": "rst",
|
||||
".tex": "latex",
|
||||
".proto": "protobuf",
|
||||
".tf": "terraform",
|
||||
".hcl": "hcl",
|
||||
".dockerfile": "dockerfile",
|
||||
".dockerignore": "dockerignore",
|
||||
".gitignore": "gitignore",
|
||||
".vim": "vim",
|
||||
".emacs": "emacs",
|
||||
".gitignore": "gitignore",
|
||||
".vim": "vim",
|
||||
".emacs": "emacs",
|
||||
}
|
||||
|
||||
for ext, lang := range extensions {
|
||||
@@ -383,11 +382,11 @@ func (fa *DefaultFileAnalyzer) AnalyzeContent(ctx context.Context, filePath stri
|
||||
// DetectLanguage detects programming language from content and file extension
|
||||
func (fa *DefaultFileAnalyzer) DetectLanguage(ctx context.Context, filePath string, content []byte) (string, float64, error) {
|
||||
ext := strings.ToLower(filepath.Ext(filePath))
|
||||
|
||||
|
||||
// First try extension-based detection
|
||||
if lang, exists := fa.languageDetector.extensionMap[ext]; exists {
|
||||
confidence := 0.8 // High confidence for extension-based detection
|
||||
|
||||
|
||||
// Verify with content signatures
|
||||
if signatures, hasSignatures := fa.languageDetector.signatureRegexs[lang]; hasSignatures {
|
||||
matches := 0
|
||||
@@ -396,7 +395,7 @@ func (fa *DefaultFileAnalyzer) DetectLanguage(ctx context.Context, filePath stri
|
||||
matches++
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Adjust confidence based on signature matches
|
||||
if matches > 0 {
|
||||
confidence = 0.9 + float64(matches)/float64(len(signatures))*0.1
|
||||
@@ -404,14 +403,14 @@ func (fa *DefaultFileAnalyzer) DetectLanguage(ctx context.Context, filePath stri
|
||||
confidence = 0.6 // Lower confidence if no signatures match
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return lang, confidence, nil
|
||||
}
|
||||
|
||||
// Fall back to content-based detection
|
||||
bestLang := "unknown"
|
||||
bestScore := 0
|
||||
|
||||
|
||||
for lang, signatures := range fa.languageDetector.signatureRegexs {
|
||||
score := 0
|
||||
for _, regex := range signatures {
|
||||
@@ -419,7 +418,7 @@ func (fa *DefaultFileAnalyzer) DetectLanguage(ctx context.Context, filePath stri
|
||||
score++
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if score > bestScore {
|
||||
bestScore = score
|
||||
bestLang = lang
|
||||
@@ -499,9 +498,9 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
filenameUpper := strings.ToUpper(filename)
|
||||
|
||||
// Configuration files
|
||||
if strings.Contains(filenameUpper, "CONFIG") ||
|
||||
strings.Contains(filenameUpper, "CONF") ||
|
||||
analysis.FileType == ".ini" || analysis.FileType == ".toml" {
|
||||
if strings.Contains(filenameUpper, "CONFIG") ||
|
||||
strings.Contains(filenameUpper, "CONF") ||
|
||||
analysis.FileType == ".ini" || analysis.FileType == ".toml" {
|
||||
purpose = "Configuration management"
|
||||
confidence = 0.9
|
||||
return purpose, confidence, nil
|
||||
@@ -509,9 +508,9 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
|
||||
// Test files
|
||||
if strings.Contains(filenameUpper, "TEST") ||
|
||||
strings.Contains(filenameUpper, "SPEC") ||
|
||||
strings.HasSuffix(filenameUpper, "_TEST.GO") ||
|
||||
strings.HasSuffix(filenameUpper, "_TEST.PY") {
|
||||
strings.Contains(filenameUpper, "SPEC") ||
|
||||
strings.HasSuffix(filenameUpper, "_TEST.GO") ||
|
||||
strings.HasSuffix(filenameUpper, "_TEST.PY") {
|
||||
purpose = "Testing and quality assurance"
|
||||
confidence = 0.9
|
||||
return purpose, confidence, nil
|
||||
@@ -519,8 +518,8 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
|
||||
// Documentation files
|
||||
if analysis.FileType == ".md" || analysis.FileType == ".rst" ||
|
||||
strings.Contains(filenameUpper, "README") ||
|
||||
strings.Contains(filenameUpper, "DOC") {
|
||||
strings.Contains(filenameUpper, "README") ||
|
||||
strings.Contains(filenameUpper, "DOC") {
|
||||
purpose = "Documentation and guidance"
|
||||
confidence = 0.9
|
||||
return purpose, confidence, nil
|
||||
@@ -528,8 +527,8 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
|
||||
// API files
|
||||
if strings.Contains(filenameUpper, "API") ||
|
||||
strings.Contains(filenameUpper, "ROUTER") ||
|
||||
strings.Contains(filenameUpper, "HANDLER") {
|
||||
strings.Contains(filenameUpper, "ROUTER") ||
|
||||
strings.Contains(filenameUpper, "HANDLER") {
|
||||
purpose = "API endpoint management"
|
||||
confidence = 0.8
|
||||
return purpose, confidence, nil
|
||||
@@ -537,9 +536,9 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
|
||||
// Database files
|
||||
if strings.Contains(filenameUpper, "DB") ||
|
||||
strings.Contains(filenameUpper, "DATABASE") ||
|
||||
strings.Contains(filenameUpper, "MODEL") ||
|
||||
strings.Contains(filenameUpper, "SCHEMA") {
|
||||
strings.Contains(filenameUpper, "DATABASE") ||
|
||||
strings.Contains(filenameUpper, "MODEL") ||
|
||||
strings.Contains(filenameUpper, "SCHEMA") {
|
||||
purpose = "Data storage and management"
|
||||
confidence = 0.8
|
||||
return purpose, confidence, nil
|
||||
@@ -547,9 +546,9 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
|
||||
// UI/Frontend files
|
||||
if analysis.Language == "javascript" || analysis.Language == "typescript" ||
|
||||
strings.Contains(filenameUpper, "COMPONENT") ||
|
||||
strings.Contains(filenameUpper, "VIEW") ||
|
||||
strings.Contains(filenameUpper, "UI") {
|
||||
strings.Contains(filenameUpper, "COMPONENT") ||
|
||||
strings.Contains(filenameUpper, "VIEW") ||
|
||||
strings.Contains(filenameUpper, "UI") {
|
||||
purpose = "User interface component"
|
||||
confidence = 0.7
|
||||
return purpose, confidence, nil
|
||||
@@ -557,8 +556,8 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
|
||||
// Service/Business logic
|
||||
if strings.Contains(filenameUpper, "SERVICE") ||
|
||||
strings.Contains(filenameUpper, "BUSINESS") ||
|
||||
strings.Contains(filenameUpper, "LOGIC") {
|
||||
strings.Contains(filenameUpper, "BUSINESS") ||
|
||||
strings.Contains(filenameUpper, "LOGIC") {
|
||||
purpose = "Business logic implementation"
|
||||
confidence = 0.7
|
||||
return purpose, confidence, nil
|
||||
@@ -566,8 +565,8 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
|
||||
// Utility files
|
||||
if strings.Contains(filenameUpper, "UTIL") ||
|
||||
strings.Contains(filenameUpper, "HELPER") ||
|
||||
strings.Contains(filenameUpper, "COMMON") {
|
||||
strings.Contains(filenameUpper, "HELPER") ||
|
||||
strings.Contains(filenameUpper, "COMMON") {
|
||||
purpose = "Utility and helper functions"
|
||||
confidence = 0.7
|
||||
return purpose, confidence, nil
|
||||
@@ -591,7 +590,7 @@ func (fa *DefaultFileAnalyzer) IdentifyPurpose(ctx context.Context, analysis *Fi
|
||||
// GenerateSummary generates a concise summary of file content
|
||||
func (fa *DefaultFileAnalyzer) GenerateSummary(ctx context.Context, analysis *FileAnalysis) (string, error) {
|
||||
summary := strings.Builder{}
|
||||
|
||||
|
||||
// Language and type
|
||||
if analysis.Language != "unknown" {
|
||||
summary.WriteString(fmt.Sprintf("%s", strings.Title(analysis.Language)))
|
||||
@@ -643,23 +642,23 @@ func (fa *DefaultFileAnalyzer) ExtractTechnologies(ctx context.Context, analysis
|
||||
|
||||
// Extract from file patterns
|
||||
filename := strings.ToLower(filepath.Base(analysis.FilePath))
|
||||
|
||||
|
||||
// Framework detection
|
||||
frameworks := map[string]string{
|
||||
"react": "React",
|
||||
"vue": "Vue.js",
|
||||
"angular": "Angular",
|
||||
"express": "Express.js",
|
||||
"django": "Django",
|
||||
"flask": "Flask",
|
||||
"spring": "Spring",
|
||||
"gin": "Gin",
|
||||
"echo": "Echo",
|
||||
"fastapi": "FastAPI",
|
||||
"bootstrap": "Bootstrap",
|
||||
"tailwind": "Tailwind CSS",
|
||||
"material": "Material UI",
|
||||
"antd": "Ant Design",
|
||||
"react": "React",
|
||||
"vue": "Vue.js",
|
||||
"angular": "Angular",
|
||||
"express": "Express.js",
|
||||
"django": "Django",
|
||||
"flask": "Flask",
|
||||
"spring": "Spring",
|
||||
"gin": "Gin",
|
||||
"echo": "Echo",
|
||||
"fastapi": "FastAPI",
|
||||
"bootstrap": "Bootstrap",
|
||||
"tailwind": "Tailwind CSS",
|
||||
"material": "Material UI",
|
||||
"antd": "Ant Design",
|
||||
}
|
||||
|
||||
for pattern, tech := range frameworks {
|
||||
@@ -778,7 +777,7 @@ func (fa *DefaultFileAnalyzer) analyzeCodeStructure(analysis *FileAnalysis, cont
|
||||
|
||||
func (fa *DefaultFileAnalyzer) calculateComplexity(analysis *FileAnalysis) float64 {
|
||||
complexity := 0.0
|
||||
|
||||
|
||||
// Base complexity from structure
|
||||
complexity += float64(len(analysis.Functions)) * 1.5
|
||||
complexity += float64(len(analysis.Classes)) * 2.0
|
||||
@@ -799,7 +798,7 @@ func (fa *DefaultFileAnalyzer) calculateComplexity(analysis *FileAnalysis) float
|
||||
|
||||
func (fa *DefaultFileAnalyzer) analyzeArchitecturalPatterns(analysis *StructureAnalysis, content []byte, patterns *LanguagePatterns, language string) {
|
||||
contentStr := string(content)
|
||||
|
||||
|
||||
// Detect common architectural patterns
|
||||
if strings.Contains(contentStr, "interface") && language == "go" {
|
||||
analysis.Patterns = append(analysis.Patterns, "Interface Segregation")
|
||||
@@ -813,7 +812,7 @@ func (fa *DefaultFileAnalyzer) analyzeArchitecturalPatterns(analysis *StructureA
|
||||
if strings.Contains(contentStr, "Observer") {
|
||||
analysis.Patterns = append(analysis.Patterns, "Observer Pattern")
|
||||
}
|
||||
|
||||
|
||||
// Architectural style detection
|
||||
if strings.Contains(contentStr, "http.") || strings.Contains(contentStr, "router") {
|
||||
analysis.Architecture = "REST API"
|
||||
@@ -832,13 +831,13 @@ func (fa *DefaultFileAnalyzer) mapImportToTechnology(importPath, language string
|
||||
// Technology mapping based on common imports
|
||||
techMap := map[string]string{
|
||||
// Go
|
||||
"gin-gonic/gin": "Gin",
|
||||
"labstack/echo": "Echo",
|
||||
"gorilla/mux": "Gorilla Mux",
|
||||
"gorm.io/gorm": "GORM",
|
||||
"github.com/redis": "Redis",
|
||||
"go.mongodb.org": "MongoDB",
|
||||
|
||||
"gin-gonic/gin": "Gin",
|
||||
"labstack/echo": "Echo",
|
||||
"gorilla/mux": "Gorilla Mux",
|
||||
"gorm.io/gorm": "GORM",
|
||||
"github.com/redis": "Redis",
|
||||
"go.mongodb.org": "MongoDB",
|
||||
|
||||
// Python
|
||||
"django": "Django",
|
||||
"flask": "Flask",
|
||||
@@ -849,15 +848,15 @@ func (fa *DefaultFileAnalyzer) mapImportToTechnology(importPath, language string
|
||||
"numpy": "NumPy",
|
||||
"tensorflow": "TensorFlow",
|
||||
"torch": "PyTorch",
|
||||
|
||||
|
||||
// JavaScript/TypeScript
|
||||
"react": "React",
|
||||
"vue": "Vue.js",
|
||||
"angular": "Angular",
|
||||
"express": "Express.js",
|
||||
"axios": "Axios",
|
||||
"lodash": "Lodash",
|
||||
"moment": "Moment.js",
|
||||
"react": "React",
|
||||
"vue": "Vue.js",
|
||||
"angular": "Angular",
|
||||
"express": "Express.js",
|
||||
"axios": "Axios",
|
||||
"lodash": "Lodash",
|
||||
"moment": "Moment.js",
|
||||
"socket.io": "Socket.IO",
|
||||
}
|
||||
|
||||
@@ -868,4 +867,4 @@ func (fa *DefaultFileAnalyzer) mapImportToTechnology(importPath, language string
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,80 +8,79 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"chorus/pkg/crypto"
|
||||
slurpContext "chorus/pkg/slurp/context"
|
||||
)
|
||||
|
||||
// RoleAwareProcessor provides role-based context processing and insight generation
|
||||
type RoleAwareProcessor struct {
|
||||
mu sync.RWMutex
|
||||
config *EngineConfig
|
||||
roleManager *RoleManager
|
||||
securityFilter *SecurityFilter
|
||||
insightGenerator *InsightGenerator
|
||||
accessController *AccessController
|
||||
auditLogger *AuditLogger
|
||||
permissions *PermissionMatrix
|
||||
roleProfiles map[string]*RoleProfile
|
||||
mu sync.RWMutex
|
||||
config *EngineConfig
|
||||
roleManager *RoleManager
|
||||
securityFilter *SecurityFilter
|
||||
insightGenerator *InsightGenerator
|
||||
accessController *AccessController
|
||||
auditLogger *AuditLogger
|
||||
permissions *PermissionMatrix
|
||||
roleProfiles map[string]*RoleBlueprint
|
||||
}
|
||||
|
||||
// RoleManager manages role definitions and hierarchies
|
||||
type RoleManager struct {
|
||||
roles map[string]*Role
|
||||
hierarchies map[string]*RoleHierarchy
|
||||
capabilities map[string]*RoleCapabilities
|
||||
restrictions map[string]*RoleRestrictions
|
||||
roles map[string]*Role
|
||||
hierarchies map[string]*RoleHierarchy
|
||||
capabilities map[string]*RoleCapabilities
|
||||
restrictions map[string]*RoleRestrictions
|
||||
}
|
||||
|
||||
// Role represents an AI agent role with specific permissions and capabilities
|
||||
type Role struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
SecurityLevel int `json:"security_level"`
|
||||
Capabilities []string `json:"capabilities"`
|
||||
Restrictions []string `json:"restrictions"`
|
||||
AccessPatterns []string `json:"access_patterns"`
|
||||
ContextFilters []string `json:"context_filters"`
|
||||
Priority int `json:"priority"`
|
||||
ParentRoles []string `json:"parent_roles"`
|
||||
ChildRoles []string `json:"child_roles"`
|
||||
Metadata map[string]interface{} `json:"metadata"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
IsActive bool `json:"is_active"`
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
SecurityLevel int `json:"security_level"`
|
||||
Capabilities []string `json:"capabilities"`
|
||||
Restrictions []string `json:"restrictions"`
|
||||
AccessPatterns []string `json:"access_patterns"`
|
||||
ContextFilters []string `json:"context_filters"`
|
||||
Priority int `json:"priority"`
|
||||
ParentRoles []string `json:"parent_roles"`
|
||||
ChildRoles []string `json:"child_roles"`
|
||||
Metadata map[string]interface{} `json:"metadata"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
IsActive bool `json:"is_active"`
|
||||
}
|
||||
|
||||
// RoleHierarchy defines role inheritance and relationships
|
||||
type RoleHierarchy struct {
|
||||
ParentRole string `json:"parent_role"`
|
||||
ChildRoles []string `json:"child_roles"`
|
||||
InheritLevel int `json:"inherit_level"`
|
||||
OverrideRules []string `json:"override_rules"`
|
||||
ParentRole string `json:"parent_role"`
|
||||
ChildRoles []string `json:"child_roles"`
|
||||
InheritLevel int `json:"inherit_level"`
|
||||
OverrideRules []string `json:"override_rules"`
|
||||
}
|
||||
|
||||
// RoleCapabilities defines what a role can do
|
||||
type RoleCapabilities struct {
|
||||
RoleID string `json:"role_id"`
|
||||
ReadAccess []string `json:"read_access"`
|
||||
WriteAccess []string `json:"write_access"`
|
||||
ExecuteAccess []string `json:"execute_access"`
|
||||
AnalysisTypes []string `json:"analysis_types"`
|
||||
InsightLevels []string `json:"insight_levels"`
|
||||
SecurityScopes []string `json:"security_scopes"`
|
||||
RoleID string `json:"role_id"`
|
||||
ReadAccess []string `json:"read_access"`
|
||||
WriteAccess []string `json:"write_access"`
|
||||
ExecuteAccess []string `json:"execute_access"`
|
||||
AnalysisTypes []string `json:"analysis_types"`
|
||||
InsightLevels []string `json:"insight_levels"`
|
||||
SecurityScopes []string `json:"security_scopes"`
|
||||
DataClassifications []string `json:"data_classifications"`
|
||||
}
|
||||
|
||||
// RoleRestrictions defines what a role cannot do or access
|
||||
type RoleRestrictions struct {
|
||||
RoleID string `json:"role_id"`
|
||||
ForbiddenPaths []string `json:"forbidden_paths"`
|
||||
ForbiddenTypes []string `json:"forbidden_types"`
|
||||
ForbiddenKeywords []string `json:"forbidden_keywords"`
|
||||
TimeRestrictions []string `json:"time_restrictions"`
|
||||
RateLimit *RateLimit `json:"rate_limit"`
|
||||
MaxContextSize int `json:"max_context_size"`
|
||||
MaxInsights int `json:"max_insights"`
|
||||
RoleID string `json:"role_id"`
|
||||
ForbiddenPaths []string `json:"forbidden_paths"`
|
||||
ForbiddenTypes []string `json:"forbidden_types"`
|
||||
ForbiddenKeywords []string `json:"forbidden_keywords"`
|
||||
TimeRestrictions []string `json:"time_restrictions"`
|
||||
RateLimit *RateLimit `json:"rate_limit"`
|
||||
MaxContextSize int `json:"max_context_size"`
|
||||
MaxInsights int `json:"max_insights"`
|
||||
}
|
||||
|
||||
// RateLimit defines rate limiting for role operations
|
||||
@@ -111,9 +110,9 @@ type ContentFilter struct {
|
||||
|
||||
// AccessMatrix defines access control rules
|
||||
type AccessMatrix struct {
|
||||
Rules map[string]*AccessRule `json:"rules"`
|
||||
DefaultDeny bool `json:"default_deny"`
|
||||
LastUpdated time.Time `json:"last_updated"`
|
||||
Rules map[string]*AccessRule `json:"rules"`
|
||||
DefaultDeny bool `json:"default_deny"`
|
||||
LastUpdated time.Time `json:"last_updated"`
|
||||
}
|
||||
|
||||
// AccessRule defines a specific access control rule
|
||||
@@ -144,14 +143,14 @@ type RoleInsightGenerator interface {
|
||||
|
||||
// InsightTemplate defines templates for generating insights
|
||||
type InsightTemplate struct {
|
||||
TemplateID string `json:"template_id"`
|
||||
Name string `json:"name"`
|
||||
Template string `json:"template"`
|
||||
Variables []string `json:"variables"`
|
||||
Roles []string `json:"roles"`
|
||||
Category string `json:"category"`
|
||||
Priority int `json:"priority"`
|
||||
Metadata map[string]interface{} `json:"metadata"`
|
||||
TemplateID string `json:"template_id"`
|
||||
Name string `json:"name"`
|
||||
Template string `json:"template"`
|
||||
Variables []string `json:"variables"`
|
||||
Roles []string `json:"roles"`
|
||||
Category string `json:"category"`
|
||||
Priority int `json:"priority"`
|
||||
Metadata map[string]interface{} `json:"metadata"`
|
||||
}
|
||||
|
||||
// InsightFilter filters insights based on role permissions
|
||||
@@ -179,39 +178,39 @@ type PermissionMatrix struct {
|
||||
|
||||
// RolePermissions defines permissions for a specific role
|
||||
type RolePermissions struct {
|
||||
RoleID string `json:"role_id"`
|
||||
ContextAccess *ContextAccessRights `json:"context_access"`
|
||||
AnalysisAccess *AnalysisAccessRights `json:"analysis_access"`
|
||||
InsightAccess *InsightAccessRights `json:"insight_access"`
|
||||
SystemAccess *SystemAccessRights `json:"system_access"`
|
||||
CustomAccess map[string]interface{} `json:"custom_access"`
|
||||
RoleID string `json:"role_id"`
|
||||
ContextAccess *ContextAccessRights `json:"context_access"`
|
||||
AnalysisAccess *AnalysisAccessRights `json:"analysis_access"`
|
||||
InsightAccess *InsightAccessRights `json:"insight_access"`
|
||||
SystemAccess *SystemAccessRights `json:"system_access"`
|
||||
CustomAccess map[string]interface{} `json:"custom_access"`
|
||||
}
|
||||
|
||||
// ContextAccessRights defines context-related access rights
|
||||
type ContextAccessRights struct {
|
||||
ReadLevel int `json:"read_level"`
|
||||
WriteLevel int `json:"write_level"`
|
||||
AllowedTypes []string `json:"allowed_types"`
|
||||
ForbiddenTypes []string `json:"forbidden_types"`
|
||||
ReadLevel int `json:"read_level"`
|
||||
WriteLevel int `json:"write_level"`
|
||||
AllowedTypes []string `json:"allowed_types"`
|
||||
ForbiddenTypes []string `json:"forbidden_types"`
|
||||
PathRestrictions []string `json:"path_restrictions"`
|
||||
SizeLimit int `json:"size_limit"`
|
||||
SizeLimit int `json:"size_limit"`
|
||||
}
|
||||
|
||||
// AnalysisAccessRights defines analysis-related access rights
|
||||
type AnalysisAccessRights struct {
|
||||
AllowedAnalysisTypes []string `json:"allowed_analysis_types"`
|
||||
MaxComplexity int `json:"max_complexity"`
|
||||
AllowedAnalysisTypes []string `json:"allowed_analysis_types"`
|
||||
MaxComplexity int `json:"max_complexity"`
|
||||
TimeoutLimit time.Duration `json:"timeout_limit"`
|
||||
ResourceLimit int `json:"resource_limit"`
|
||||
ResourceLimit int `json:"resource_limit"`
|
||||
}
|
||||
|
||||
// InsightAccessRights defines insight-related access rights
|
||||
type InsightAccessRights struct {
|
||||
GenerationLevel int `json:"generation_level"`
|
||||
AccessLevel int `json:"access_level"`
|
||||
CategoryFilters []string `json:"category_filters"`
|
||||
ConfidenceThreshold float64 `json:"confidence_threshold"`
|
||||
MaxInsights int `json:"max_insights"`
|
||||
GenerationLevel int `json:"generation_level"`
|
||||
AccessLevel int `json:"access_level"`
|
||||
CategoryFilters []string `json:"category_filters"`
|
||||
ConfidenceThreshold float64 `json:"confidence_threshold"`
|
||||
MaxInsights int `json:"max_insights"`
|
||||
}
|
||||
|
||||
// SystemAccessRights defines system-level access rights
|
||||
@@ -254,15 +253,15 @@ type AuditLogger struct {
|
||||
|
||||
// AuditEntry represents an audit log entry
|
||||
type AuditEntry struct {
|
||||
ID string `json:"id"`
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
RoleID string `json:"role_id"`
|
||||
Action string `json:"action"`
|
||||
Resource string `json:"resource"`
|
||||
Result string `json:"result"` // success, denied, error
|
||||
Details string `json:"details"`
|
||||
Context map[string]interface{} `json:"context"`
|
||||
SecurityLevel int `json:"security_level"`
|
||||
ID string `json:"id"`
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
RoleID string `json:"role_id"`
|
||||
Action string `json:"action"`
|
||||
Resource string `json:"resource"`
|
||||
Result string `json:"result"` // success, denied, error
|
||||
Details string `json:"details"`
|
||||
Context map[string]interface{} `json:"context"`
|
||||
SecurityLevel int `json:"security_level"`
|
||||
}
|
||||
|
||||
// AuditConfig defines audit logging configuration
|
||||
@@ -276,49 +275,49 @@ type AuditConfig struct {
|
||||
}
|
||||
|
||||
// RoleProfile contains comprehensive role configuration
|
||||
type RoleProfile struct {
|
||||
Role *Role `json:"role"`
|
||||
Capabilities *RoleCapabilities `json:"capabilities"`
|
||||
Restrictions *RoleRestrictions `json:"restrictions"`
|
||||
Permissions *RolePermissions `json:"permissions"`
|
||||
InsightConfig *RoleInsightConfig `json:"insight_config"`
|
||||
SecurityConfig *RoleSecurityConfig `json:"security_config"`
|
||||
type RoleBlueprint struct {
|
||||
Role *Role `json:"role"`
|
||||
Capabilities *RoleCapabilities `json:"capabilities"`
|
||||
Restrictions *RoleRestrictions `json:"restrictions"`
|
||||
Permissions *RolePermissions `json:"permissions"`
|
||||
InsightConfig *RoleInsightConfig `json:"insight_config"`
|
||||
SecurityConfig *RoleSecurityConfig `json:"security_config"`
|
||||
}
|
||||
|
||||
// RoleInsightConfig defines insight generation configuration for a role
|
||||
type RoleInsightConfig struct {
|
||||
EnabledGenerators []string `json:"enabled_generators"`
|
||||
MaxInsights int `json:"max_insights"`
|
||||
ConfidenceThreshold float64 `json:"confidence_threshold"`
|
||||
CategoryWeights map[string]float64 `json:"category_weights"`
|
||||
CustomFilters []string `json:"custom_filters"`
|
||||
EnabledGenerators []string `json:"enabled_generators"`
|
||||
MaxInsights int `json:"max_insights"`
|
||||
ConfidenceThreshold float64 `json:"confidence_threshold"`
|
||||
CategoryWeights map[string]float64 `json:"category_weights"`
|
||||
CustomFilters []string `json:"custom_filters"`
|
||||
}
|
||||
|
||||
// RoleSecurityConfig defines security configuration for a role
|
||||
type RoleSecurityConfig struct {
|
||||
EncryptionRequired bool `json:"encryption_required"`
|
||||
AccessLogging bool `json:"access_logging"`
|
||||
EncryptionRequired bool `json:"encryption_required"`
|
||||
AccessLogging bool `json:"access_logging"`
|
||||
RateLimit *RateLimit `json:"rate_limit"`
|
||||
IPWhitelist []string `json:"ip_whitelist"`
|
||||
RequiredClaims []string `json:"required_claims"`
|
||||
IPWhitelist []string `json:"ip_whitelist"`
|
||||
RequiredClaims []string `json:"required_claims"`
|
||||
}
|
||||
|
||||
// RoleSpecificInsight represents an insight tailored to a specific role
|
||||
type RoleSpecificInsight struct {
|
||||
ID string `json:"id"`
|
||||
RoleID string `json:"role_id"`
|
||||
Category string `json:"category"`
|
||||
Title string `json:"title"`
|
||||
Content string `json:"content"`
|
||||
Confidence float64 `json:"confidence"`
|
||||
Priority int `json:"priority"`
|
||||
SecurityLevel int `json:"security_level"`
|
||||
Tags []string `json:"tags"`
|
||||
ActionItems []string `json:"action_items"`
|
||||
References []string `json:"references"`
|
||||
Metadata map[string]interface{} `json:"metadata"`
|
||||
GeneratedAt time.Time `json:"generated_at"`
|
||||
ExpiresAt *time.Time `json:"expires_at,omitempty"`
|
||||
ID string `json:"id"`
|
||||
RoleID string `json:"role_id"`
|
||||
Category string `json:"category"`
|
||||
Title string `json:"title"`
|
||||
Content string `json:"content"`
|
||||
Confidence float64 `json:"confidence"`
|
||||
Priority int `json:"priority"`
|
||||
SecurityLevel int `json:"security_level"`
|
||||
Tags []string `json:"tags"`
|
||||
ActionItems []string `json:"action_items"`
|
||||
References []string `json:"references"`
|
||||
Metadata map[string]interface{} `json:"metadata"`
|
||||
GeneratedAt time.Time `json:"generated_at"`
|
||||
ExpiresAt *time.Time `json:"expires_at,omitempty"`
|
||||
}
|
||||
|
||||
// NewRoleAwareProcessor creates a new role-aware processor
|
||||
@@ -331,7 +330,7 @@ func NewRoleAwareProcessor(config *EngineConfig) *RoleAwareProcessor {
|
||||
accessController: NewAccessController(),
|
||||
auditLogger: NewAuditLogger(),
|
||||
permissions: NewPermissionMatrix(),
|
||||
roleProfiles: make(map[string]*RoleProfile),
|
||||
roleProfiles: make(map[string]*RoleBlueprint),
|
||||
}
|
||||
|
||||
// Initialize default roles
|
||||
@@ -342,10 +341,10 @@ func NewRoleAwareProcessor(config *EngineConfig) *RoleAwareProcessor {
|
||||
// NewRoleManager creates a role manager with default roles
|
||||
func NewRoleManager() *RoleManager {
|
||||
rm := &RoleManager{
|
||||
roles: make(map[string]*Role),
|
||||
hierarchies: make(map[string]*RoleHierarchy),
|
||||
capabilities: make(map[string]*RoleCapabilities),
|
||||
restrictions: make(map[string]*RoleRestrictions),
|
||||
roles: make(map[string]*Role),
|
||||
hierarchies: make(map[string]*RoleHierarchy),
|
||||
capabilities: make(map[string]*RoleCapabilities),
|
||||
restrictions: make(map[string]*RoleRestrictions),
|
||||
}
|
||||
|
||||
// Initialize with default roles
|
||||
@@ -383,12 +382,15 @@ func (rap *RoleAwareProcessor) ProcessContextForRole(ctx context.Context, node *
|
||||
|
||||
// Apply insights to node
|
||||
if len(insights) > 0 {
|
||||
filteredNode.RoleSpecificInsights = insights
|
||||
filteredNode.ProcessedForRole = roleID
|
||||
if filteredNode.Metadata == nil {
|
||||
filteredNode.Metadata = make(map[string]interface{})
|
||||
}
|
||||
filteredNode.Metadata["role_specific_insights"] = insights
|
||||
filteredNode.Metadata["processed_for_role"] = roleID
|
||||
}
|
||||
|
||||
// Log successful processing
|
||||
rap.auditLogger.logAccess(roleID, "context:process", node.Path, "success",
|
||||
rap.auditLogger.logAccess(roleID, "context:process", node.Path, "success",
|
||||
fmt.Sprintf("processed with %d insights", len(insights)))
|
||||
|
||||
return filteredNode, nil
|
||||
@@ -413,7 +415,7 @@ func (rap *RoleAwareProcessor) GenerateRoleSpecificInsights(ctx context.Context,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rap.auditLogger.logAccess(roleID, "insight:generate", node.Path, "success",
|
||||
rap.auditLogger.logAccess(roleID, "insight:generate", node.Path, "success",
|
||||
fmt.Sprintf("generated %d insights", len(insights)))
|
||||
|
||||
return insights, nil
|
||||
@@ -448,69 +450,69 @@ func (rap *RoleAwareProcessor) GetRoleCapabilities(roleID string) (*RoleCapabili
|
||||
func (rap *RoleAwareProcessor) initializeDefaultRoles() {
|
||||
defaultRoles := []*Role{
|
||||
{
|
||||
ID: "architect",
|
||||
Name: "System Architect",
|
||||
Description: "High-level system design and architecture decisions",
|
||||
SecurityLevel: 8,
|
||||
Capabilities: []string{"architecture_design", "high_level_analysis", "strategic_planning"},
|
||||
Restrictions: []string{"no_implementation_details", "no_low_level_code"},
|
||||
ID: "architect",
|
||||
Name: "System Architect",
|
||||
Description: "High-level system design and architecture decisions",
|
||||
SecurityLevel: 8,
|
||||
Capabilities: []string{"architecture_design", "high_level_analysis", "strategic_planning"},
|
||||
Restrictions: []string{"no_implementation_details", "no_low_level_code"},
|
||||
AccessPatterns: []string{"architecture/**", "design/**", "docs/**"},
|
||||
Priority: 1,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
Priority: 1,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
{
|
||||
ID: "developer",
|
||||
Name: "Software Developer",
|
||||
Description: "Code implementation and development tasks",
|
||||
SecurityLevel: 6,
|
||||
Capabilities: []string{"code_analysis", "implementation", "debugging", "testing"},
|
||||
Restrictions: []string{"no_architecture_changes", "no_security_config"},
|
||||
ID: "developer",
|
||||
Name: "Software Developer",
|
||||
Description: "Code implementation and development tasks",
|
||||
SecurityLevel: 6,
|
||||
Capabilities: []string{"code_analysis", "implementation", "debugging", "testing"},
|
||||
Restrictions: []string{"no_architecture_changes", "no_security_config"},
|
||||
AccessPatterns: []string{"src/**", "lib/**", "test/**"},
|
||||
Priority: 2,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
Priority: 2,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
{
|
||||
ID: "security_analyst",
|
||||
Name: "Security Analyst",
|
||||
Description: "Security analysis and vulnerability assessment",
|
||||
SecurityLevel: 9,
|
||||
Capabilities: []string{"security_analysis", "vulnerability_assessment", "compliance_check"},
|
||||
Restrictions: []string{"no_code_modification"},
|
||||
ID: "security_analyst",
|
||||
Name: "Security Analyst",
|
||||
Description: "Security analysis and vulnerability assessment",
|
||||
SecurityLevel: 9,
|
||||
Capabilities: []string{"security_analysis", "vulnerability_assessment", "compliance_check"},
|
||||
Restrictions: []string{"no_code_modification"},
|
||||
AccessPatterns: []string{"**/*"},
|
||||
Priority: 1,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
Priority: 1,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
{
|
||||
ID: "devops_engineer",
|
||||
Name: "DevOps Engineer",
|
||||
Description: "Infrastructure and deployment operations",
|
||||
SecurityLevel: 7,
|
||||
Capabilities: []string{"infrastructure_analysis", "deployment", "monitoring", "ci_cd"},
|
||||
Restrictions: []string{"no_business_logic"},
|
||||
ID: "devops_engineer",
|
||||
Name: "DevOps Engineer",
|
||||
Description: "Infrastructure and deployment operations",
|
||||
SecurityLevel: 7,
|
||||
Capabilities: []string{"infrastructure_analysis", "deployment", "monitoring", "ci_cd"},
|
||||
Restrictions: []string{"no_business_logic"},
|
||||
AccessPatterns: []string{"infra/**", "deploy/**", "config/**", "docker/**"},
|
||||
Priority: 2,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
Priority: 2,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
{
|
||||
ID: "qa_engineer",
|
||||
Name: "Quality Assurance Engineer",
|
||||
Description: "Quality assurance and testing",
|
||||
SecurityLevel: 5,
|
||||
Capabilities: []string{"quality_analysis", "testing", "test_planning"},
|
||||
Restrictions: []string{"no_production_access", "no_code_modification"},
|
||||
ID: "qa_engineer",
|
||||
Name: "Quality Assurance Engineer",
|
||||
Description: "Quality assurance and testing",
|
||||
SecurityLevel: 5,
|
||||
Capabilities: []string{"quality_analysis", "testing", "test_planning"},
|
||||
Restrictions: []string{"no_production_access", "no_code_modification"},
|
||||
AccessPatterns: []string{"test/**", "spec/**", "qa/**"},
|
||||
Priority: 3,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
Priority: 3,
|
||||
IsActive: true,
|
||||
CreatedAt: time.Now(),
|
||||
},
|
||||
}
|
||||
|
||||
for _, role := range defaultRoles {
|
||||
rap.roleProfiles[role.ID] = &RoleProfile{
|
||||
rap.roleProfiles[role.ID] = &RoleBlueprint{
|
||||
Role: role,
|
||||
Capabilities: rap.createDefaultCapabilities(role),
|
||||
Restrictions: rap.createDefaultRestrictions(role),
|
||||
@@ -540,23 +542,23 @@ func (rap *RoleAwareProcessor) createDefaultCapabilities(role *Role) *RoleCapabi
|
||||
baseCapabilities.ExecuteAccess = []string{"design_tools", "modeling"}
|
||||
baseCapabilities.InsightLevels = []string{"strategic", "architectural", "high_level"}
|
||||
baseCapabilities.SecurityScopes = []string{"public", "internal", "confidential"}
|
||||
|
||||
|
||||
case "developer":
|
||||
baseCapabilities.WriteAccess = []string{"src/**", "test/**"}
|
||||
baseCapabilities.ExecuteAccess = []string{"compile", "test", "debug"}
|
||||
baseCapabilities.InsightLevels = []string{"implementation", "code_quality", "performance"}
|
||||
|
||||
|
||||
case "security_analyst":
|
||||
baseCapabilities.ReadAccess = []string{"**/*"}
|
||||
baseCapabilities.InsightLevels = []string{"security", "vulnerability", "compliance"}
|
||||
baseCapabilities.SecurityScopes = []string{"public", "internal", "confidential", "secret"}
|
||||
baseCapabilities.DataClassifications = []string{"public", "internal", "confidential", "restricted"}
|
||||
|
||||
|
||||
case "devops_engineer":
|
||||
baseCapabilities.WriteAccess = []string{"infra/**", "deploy/**", "config/**"}
|
||||
baseCapabilities.ExecuteAccess = []string{"deploy", "configure", "monitor"}
|
||||
baseCapabilities.InsightLevels = []string{"infrastructure", "deployment", "monitoring"}
|
||||
|
||||
|
||||
case "qa_engineer":
|
||||
baseCapabilities.WriteAccess = []string{"test/**", "qa/**"}
|
||||
baseCapabilities.ExecuteAccess = []string{"test", "validate"}
|
||||
@@ -587,21 +589,21 @@ func (rap *RoleAwareProcessor) createDefaultRestrictions(role *Role) *RoleRestri
|
||||
// Architects have fewer restrictions
|
||||
baseRestrictions.MaxContextSize = 50000
|
||||
baseRestrictions.MaxInsights = 100
|
||||
|
||||
|
||||
case "developer":
|
||||
baseRestrictions.ForbiddenPaths = append(baseRestrictions.ForbiddenPaths, "architecture/**", "security/**")
|
||||
baseRestrictions.ForbiddenTypes = []string{"security_config", "deployment_config"}
|
||||
|
||||
|
||||
case "security_analyst":
|
||||
// Security analysts have minimal path restrictions but keyword restrictions
|
||||
baseRestrictions.ForbiddenPaths = []string{"temp/**"}
|
||||
baseRestrictions.ForbiddenKeywords = []string{"password", "secret", "key"}
|
||||
baseRestrictions.MaxContextSize = 100000
|
||||
|
||||
|
||||
case "devops_engineer":
|
||||
baseRestrictions.ForbiddenPaths = append(baseRestrictions.ForbiddenPaths, "src/**")
|
||||
baseRestrictions.ForbiddenTypes = []string{"business_logic", "user_data"}
|
||||
|
||||
|
||||
case "qa_engineer":
|
||||
baseRestrictions.ForbiddenPaths = append(baseRestrictions.ForbiddenPaths, "src/**", "infra/**")
|
||||
baseRestrictions.ForbiddenTypes = []string{"production_config", "security_config"}
|
||||
@@ -615,10 +617,10 @@ func (rap *RoleAwareProcessor) createDefaultPermissions(role *Role) *RolePermiss
|
||||
return &RolePermissions{
|
||||
RoleID: role.ID,
|
||||
ContextAccess: &ContextAccessRights{
|
||||
ReadLevel: role.SecurityLevel,
|
||||
WriteLevel: role.SecurityLevel - 2,
|
||||
AllowedTypes: []string{"code", "documentation", "configuration"},
|
||||
SizeLimit: 1000000,
|
||||
ReadLevel: role.SecurityLevel,
|
||||
WriteLevel: role.SecurityLevel - 2,
|
||||
AllowedTypes: []string{"code", "documentation", "configuration"},
|
||||
SizeLimit: 1000000,
|
||||
},
|
||||
AnalysisAccess: &AnalysisAccessRights{
|
||||
AllowedAnalysisTypes: role.Capabilities,
|
||||
@@ -627,10 +629,10 @@ func (rap *RoleAwareProcessor) createDefaultPermissions(role *Role) *RolePermiss
|
||||
ResourceLimit: 100,
|
||||
},
|
||||
InsightAccess: &InsightAccessRights{
|
||||
GenerationLevel: role.SecurityLevel,
|
||||
AccessLevel: role.SecurityLevel,
|
||||
ConfidenceThreshold: 0.5,
|
||||
MaxInsights: 50,
|
||||
GenerationLevel: role.SecurityLevel,
|
||||
AccessLevel: role.SecurityLevel,
|
||||
ConfidenceThreshold: 0.5,
|
||||
MaxInsights: 50,
|
||||
},
|
||||
SystemAccess: &SystemAccessRights{
|
||||
AdminAccess: role.SecurityLevel >= 8,
|
||||
@@ -660,26 +662,26 @@ func (rap *RoleAwareProcessor) createDefaultInsightConfig(role *Role) *RoleInsig
|
||||
"scalability": 0.9,
|
||||
}
|
||||
config.MaxInsights = 100
|
||||
|
||||
|
||||
case "developer":
|
||||
config.EnabledGenerators = []string{"code_insights", "implementation_suggestions", "bug_detection"}
|
||||
config.CategoryWeights = map[string]float64{
|
||||
"code_quality": 1.0,
|
||||
"implementation": 0.9,
|
||||
"bugs": 0.8,
|
||||
"performance": 0.6,
|
||||
"code_quality": 1.0,
|
||||
"implementation": 0.9,
|
||||
"bugs": 0.8,
|
||||
"performance": 0.6,
|
||||
}
|
||||
|
||||
|
||||
case "security_analyst":
|
||||
config.EnabledGenerators = []string{"security_insights", "vulnerability_analysis", "compliance_check"}
|
||||
config.CategoryWeights = map[string]float64{
|
||||
"security": 1.0,
|
||||
"security": 1.0,
|
||||
"vulnerabilities": 1.0,
|
||||
"compliance": 0.9,
|
||||
"privacy": 0.8,
|
||||
"compliance": 0.9,
|
||||
"privacy": 0.8,
|
||||
}
|
||||
config.MaxInsights = 200
|
||||
|
||||
|
||||
case "devops_engineer":
|
||||
config.EnabledGenerators = []string{"infrastructure_insights", "deployment_analysis", "monitoring_suggestions"}
|
||||
config.CategoryWeights = map[string]float64{
|
||||
@@ -688,7 +690,7 @@ func (rap *RoleAwareProcessor) createDefaultInsightConfig(role *Role) *RoleInsig
|
||||
"monitoring": 0.8,
|
||||
"automation": 0.7,
|
||||
}
|
||||
|
||||
|
||||
case "qa_engineer":
|
||||
config.EnabledGenerators = []string{"quality_insights", "test_suggestions", "validation_analysis"}
|
||||
config.CategoryWeights = map[string]float64{
|
||||
@@ -751,7 +753,7 @@ func NewSecurityFilter() *SecurityFilter {
|
||||
"top_secret": 10,
|
||||
},
|
||||
contentFilters: make(map[string]*ContentFilter),
|
||||
accessMatrix: &AccessMatrix{
|
||||
accessMatrix: &AccessMatrix{
|
||||
Rules: make(map[string]*AccessRule),
|
||||
DefaultDeny: true,
|
||||
LastUpdated: time.Now(),
|
||||
@@ -765,7 +767,7 @@ func (sf *SecurityFilter) filterForRole(node *slurpContext.ContextNode, role *Ro
|
||||
// Apply content filtering based on role security level
|
||||
filtered.Summary = sf.filterContent(node.Summary, role)
|
||||
filtered.Purpose = sf.filterContent(node.Purpose, role)
|
||||
|
||||
|
||||
// Filter insights based on role access level
|
||||
filteredInsights := []string{}
|
||||
for _, insight := range node.Insights {
|
||||
@@ -816,7 +818,7 @@ func (sf *SecurityFilter) filterContent(content string, role *Role) string {
|
||||
func (sf *SecurityFilter) canAccessInsight(insight string, role *Role) bool {
|
||||
// Check if role can access this type of insight
|
||||
lowerInsight := strings.ToLower(insight)
|
||||
|
||||
|
||||
// Security analysts can see all insights
|
||||
if role.ID == "security_analyst" {
|
||||
return true
|
||||
@@ -849,20 +851,20 @@ func (sf *SecurityFilter) canAccessInsight(insight string, role *Role) bool {
|
||||
|
||||
func (sf *SecurityFilter) filterTechnologies(technologies []string, role *Role) []string {
|
||||
filtered := []string{}
|
||||
|
||||
|
||||
for _, tech := range technologies {
|
||||
if sf.canAccessTechnology(tech, role) {
|
||||
filtered = append(filtered, tech)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
func (sf *SecurityFilter) canAccessTechnology(technology string, role *Role) bool {
|
||||
// Role-specific technology access rules
|
||||
lowerTech := strings.ToLower(technology)
|
||||
|
||||
|
||||
switch role.ID {
|
||||
case "qa_engineer":
|
||||
// QA engineers shouldn't see infrastructure technologies
|
||||
@@ -881,26 +883,26 @@ func (sf *SecurityFilter) canAccessTechnology(technology string, role *Role) boo
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (sf *SecurityFilter) filterTags(tags []string, role *Role) []string {
|
||||
filtered := []string{}
|
||||
|
||||
|
||||
for _, tag := range tags {
|
||||
if sf.canAccessTag(tag, role) {
|
||||
filtered = append(filtered, tag)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return filtered
|
||||
}
|
||||
|
||||
func (sf *SecurityFilter) canAccessTag(tag string, role *Role) bool {
|
||||
// Simple tag filtering based on role
|
||||
lowerTag := strings.ToLower(tag)
|
||||
|
||||
|
||||
// Security-related tags only for security analysts and architects
|
||||
securityTags := []string{"security", "vulnerability", "encryption", "authentication"}
|
||||
for _, secTag := range securityTags {
|
||||
@@ -908,7 +910,7 @@ func (sf *SecurityFilter) canAccessTag(tag string, role *Role) bool {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -968,7 +970,7 @@ func (ig *InsightGenerator) generateForRole(ctx context.Context, node *slurpCont
|
||||
|
||||
func (ig *InsightGenerator) applyRoleFilters(insights []*RoleSpecificInsight, role *Role) []*RoleSpecificInsight {
|
||||
filtered := []*RoleSpecificInsight{}
|
||||
|
||||
|
||||
for _, insight := range insights {
|
||||
// Check security level
|
||||
if insight.SecurityLevel > role.SecurityLevel {
|
||||
@@ -1174,6 +1176,7 @@ func (al *AuditLogger) GetAuditLog(limit int) []*AuditEntry {
|
||||
// These would be fully implemented with sophisticated logic in production
|
||||
|
||||
type ArchitectInsightGenerator struct{}
|
||||
|
||||
func NewArchitectInsightGenerator() *ArchitectInsightGenerator { return &ArchitectInsightGenerator{} }
|
||||
func (aig *ArchitectInsightGenerator) GenerateInsights(ctx context.Context, node *slurpContext.ContextNode, role *Role) ([]*RoleSpecificInsight, error) {
|
||||
return []*RoleSpecificInsight{
|
||||
@@ -1191,10 +1194,15 @@ func (aig *ArchitectInsightGenerator) GenerateInsights(ctx context.Context, node
|
||||
}, nil
|
||||
}
|
||||
func (aig *ArchitectInsightGenerator) GetSupportedRoles() []string { return []string{"architect"} }
|
||||
func (aig *ArchitectInsightGenerator) GetInsightTypes() []string { return []string{"architecture", "design", "patterns"} }
|
||||
func (aig *ArchitectInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error { return nil }
|
||||
func (aig *ArchitectInsightGenerator) GetInsightTypes() []string {
|
||||
return []string{"architecture", "design", "patterns"}
|
||||
}
|
||||
func (aig *ArchitectInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type DeveloperInsightGenerator struct{}
|
||||
|
||||
func NewDeveloperInsightGenerator() *DeveloperInsightGenerator { return &DeveloperInsightGenerator{} }
|
||||
func (dig *DeveloperInsightGenerator) GenerateInsights(ctx context.Context, node *slurpContext.ContextNode, role *Role) ([]*RoleSpecificInsight, error) {
|
||||
return []*RoleSpecificInsight{
|
||||
@@ -1212,10 +1220,15 @@ func (dig *DeveloperInsightGenerator) GenerateInsights(ctx context.Context, node
|
||||
}, nil
|
||||
}
|
||||
func (dig *DeveloperInsightGenerator) GetSupportedRoles() []string { return []string{"developer"} }
|
||||
func (dig *DeveloperInsightGenerator) GetInsightTypes() []string { return []string{"code_quality", "implementation", "bugs"} }
|
||||
func (dig *DeveloperInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error { return nil }
|
||||
func (dig *DeveloperInsightGenerator) GetInsightTypes() []string {
|
||||
return []string{"code_quality", "implementation", "bugs"}
|
||||
}
|
||||
func (dig *DeveloperInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type SecurityInsightGenerator struct{}
|
||||
|
||||
func NewSecurityInsightGenerator() *SecurityInsightGenerator { return &SecurityInsightGenerator{} }
|
||||
func (sig *SecurityInsightGenerator) GenerateInsights(ctx context.Context, node *slurpContext.ContextNode, role *Role) ([]*RoleSpecificInsight, error) {
|
||||
return []*RoleSpecificInsight{
|
||||
@@ -1232,11 +1245,18 @@ func (sig *SecurityInsightGenerator) GenerateInsights(ctx context.Context, node
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
func (sig *SecurityInsightGenerator) GetSupportedRoles() []string { return []string{"security_analyst"} }
|
||||
func (sig *SecurityInsightGenerator) GetInsightTypes() []string { return []string{"security", "vulnerability", "compliance"} }
|
||||
func (sig *SecurityInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error { return nil }
|
||||
func (sig *SecurityInsightGenerator) GetSupportedRoles() []string {
|
||||
return []string{"security_analyst"}
|
||||
}
|
||||
func (sig *SecurityInsightGenerator) GetInsightTypes() []string {
|
||||
return []string{"security", "vulnerability", "compliance"}
|
||||
}
|
||||
func (sig *SecurityInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type DevOpsInsightGenerator struct{}
|
||||
|
||||
func NewDevOpsInsightGenerator() *DevOpsInsightGenerator { return &DevOpsInsightGenerator{} }
|
||||
func (doig *DevOpsInsightGenerator) GenerateInsights(ctx context.Context, node *slurpContext.ContextNode, role *Role) ([]*RoleSpecificInsight, error) {
|
||||
return []*RoleSpecificInsight{
|
||||
@@ -1254,10 +1274,15 @@ func (doig *DevOpsInsightGenerator) GenerateInsights(ctx context.Context, node *
|
||||
}, nil
|
||||
}
|
||||
func (doig *DevOpsInsightGenerator) GetSupportedRoles() []string { return []string{"devops_engineer"} }
|
||||
func (doig *DevOpsInsightGenerator) GetInsightTypes() []string { return []string{"infrastructure", "deployment", "monitoring"} }
|
||||
func (doig *DevOpsInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error { return nil }
|
||||
func (doig *DevOpsInsightGenerator) GetInsightTypes() []string {
|
||||
return []string{"infrastructure", "deployment", "monitoring"}
|
||||
}
|
||||
func (doig *DevOpsInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type QAInsightGenerator struct{}
|
||||
|
||||
func NewQAInsightGenerator() *QAInsightGenerator { return &QAInsightGenerator{} }
|
||||
func (qaig *QAInsightGenerator) GenerateInsights(ctx context.Context, node *slurpContext.ContextNode, role *Role) ([]*RoleSpecificInsight, error) {
|
||||
return []*RoleSpecificInsight{
|
||||
@@ -1275,5 +1300,9 @@ func (qaig *QAInsightGenerator) GenerateInsights(ctx context.Context, node *slur
|
||||
}, nil
|
||||
}
|
||||
func (qaig *QAInsightGenerator) GetSupportedRoles() []string { return []string{"qa_engineer"} }
|
||||
func (qaig *QAInsightGenerator) GetInsightTypes() []string { return []string{"quality", "testing", "validation"} }
|
||||
func (qaig *QAInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error { return nil }
|
||||
func (qaig *QAInsightGenerator) GetInsightTypes() []string {
|
||||
return []string{"quality", "testing", "validation"}
|
||||
}
|
||||
func (qaig *QAInsightGenerator) ValidateContext(node *slurpContext.ContextNode, role *Role) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -6,236 +6,236 @@ import (
|
||||
|
||||
// FileMetadata represents metadata extracted from file system
|
||||
type FileMetadata struct {
|
||||
Path string `json:"path"` // File path
|
||||
Size int64 `json:"size"` // File size in bytes
|
||||
ModTime time.Time `json:"mod_time"` // Last modification time
|
||||
Mode uint32 `json:"mode"` // File mode
|
||||
IsDir bool `json:"is_dir"` // Whether it's a directory
|
||||
Extension string `json:"extension"` // File extension
|
||||
MimeType string `json:"mime_type"` // MIME type
|
||||
Hash string `json:"hash"` // Content hash
|
||||
Permissions string `json:"permissions"` // File permissions
|
||||
Path string `json:"path"` // File path
|
||||
Size int64 `json:"size"` // File size in bytes
|
||||
ModTime time.Time `json:"mod_time"` // Last modification time
|
||||
Mode uint32 `json:"mode"` // File mode
|
||||
IsDir bool `json:"is_dir"` // Whether it's a directory
|
||||
Extension string `json:"extension"` // File extension
|
||||
MimeType string `json:"mime_type"` // MIME type
|
||||
Hash string `json:"hash"` // Content hash
|
||||
Permissions string `json:"permissions"` // File permissions
|
||||
}
|
||||
|
||||
// StructureAnalysis represents analysis of code structure
|
||||
type StructureAnalysis struct {
|
||||
Architecture string `json:"architecture"` // Architectural pattern
|
||||
Patterns []string `json:"patterns"` // Design patterns used
|
||||
Components []*Component `json:"components"` // Code components
|
||||
Relationships []*Relationship `json:"relationships"` // Component relationships
|
||||
Complexity *ComplexityMetrics `json:"complexity"` // Complexity metrics
|
||||
QualityMetrics *QualityMetrics `json:"quality_metrics"` // Code quality metrics
|
||||
TestCoverage float64 `json:"test_coverage"` // Test coverage percentage
|
||||
Documentation *DocMetrics `json:"documentation"` // Documentation metrics
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
Architecture string `json:"architecture"` // Architectural pattern
|
||||
Patterns []string `json:"patterns"` // Design patterns used
|
||||
Components []*Component `json:"components"` // Code components
|
||||
Relationships []*Relationship `json:"relationships"` // Component relationships
|
||||
Complexity *ComplexityMetrics `json:"complexity"` // Complexity metrics
|
||||
QualityMetrics *QualityMetrics `json:"quality_metrics"` // Code quality metrics
|
||||
TestCoverage float64 `json:"test_coverage"` // Test coverage percentage
|
||||
Documentation *DocMetrics `json:"documentation"` // Documentation metrics
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
}
|
||||
|
||||
// Component represents a code component
|
||||
type Component struct {
|
||||
Name string `json:"name"` // Component name
|
||||
Type string `json:"type"` // Component type (class, function, etc.)
|
||||
Purpose string `json:"purpose"` // Component purpose
|
||||
Visibility string `json:"visibility"` // Visibility (public, private, etc.)
|
||||
Lines int `json:"lines"` // Lines of code
|
||||
Complexity int `json:"complexity"` // Cyclomatic complexity
|
||||
Dependencies []string `json:"dependencies"` // Dependencies
|
||||
Metadata map[string]interface{} `json:"metadata"` // Additional metadata
|
||||
Name string `json:"name"` // Component name
|
||||
Type string `json:"type"` // Component type (class, function, etc.)
|
||||
Purpose string `json:"purpose"` // Component purpose
|
||||
Visibility string `json:"visibility"` // Visibility (public, private, etc.)
|
||||
Lines int `json:"lines"` // Lines of code
|
||||
Complexity int `json:"complexity"` // Cyclomatic complexity
|
||||
Dependencies []string `json:"dependencies"` // Dependencies
|
||||
Metadata map[string]interface{} `json:"metadata"` // Additional metadata
|
||||
}
|
||||
|
||||
// Relationship represents a relationship between components
|
||||
type Relationship struct {
|
||||
From string `json:"from"` // Source component
|
||||
To string `json:"to"` // Target component
|
||||
Type string `json:"type"` // Relationship type
|
||||
Strength float64 `json:"strength"` // Relationship strength (0-1)
|
||||
Direction string `json:"direction"` // Direction (unidirectional, bidirectional)
|
||||
Description string `json:"description"` // Relationship description
|
||||
From string `json:"from"` // Source component
|
||||
To string `json:"to"` // Target component
|
||||
Type string `json:"type"` // Relationship type
|
||||
Strength float64 `json:"strength"` // Relationship strength (0-1)
|
||||
Direction string `json:"direction"` // Direction (unidirectional, bidirectional)
|
||||
Description string `json:"description"` // Relationship description
|
||||
}
|
||||
|
||||
// ComplexityMetrics represents code complexity metrics
|
||||
type ComplexityMetrics struct {
|
||||
Cyclomatic float64 `json:"cyclomatic"` // Cyclomatic complexity
|
||||
Cognitive float64 `json:"cognitive"` // Cognitive complexity
|
||||
Halstead float64 `json:"halstead"` // Halstead complexity
|
||||
Maintainability float64 `json:"maintainability"` // Maintainability index
|
||||
TechnicalDebt float64 `json:"technical_debt"` // Technical debt estimate
|
||||
Cyclomatic float64 `json:"cyclomatic"` // Cyclomatic complexity
|
||||
Cognitive float64 `json:"cognitive"` // Cognitive complexity
|
||||
Halstead float64 `json:"halstead"` // Halstead complexity
|
||||
Maintainability float64 `json:"maintainability"` // Maintainability index
|
||||
TechnicalDebt float64 `json:"technical_debt"` // Technical debt estimate
|
||||
}
|
||||
|
||||
// QualityMetrics represents code quality metrics
|
||||
type QualityMetrics struct {
|
||||
Readability float64 `json:"readability"` // Readability score
|
||||
Testability float64 `json:"testability"` // Testability score
|
||||
Reusability float64 `json:"reusability"` // Reusability score
|
||||
Reliability float64 `json:"reliability"` // Reliability score
|
||||
Security float64 `json:"security"` // Security score
|
||||
Performance float64 `json:"performance"` // Performance score
|
||||
Duplication float64 `json:"duplication"` // Code duplication percentage
|
||||
Consistency float64 `json:"consistency"` // Code consistency score
|
||||
Readability float64 `json:"readability"` // Readability score
|
||||
Testability float64 `json:"testability"` // Testability score
|
||||
Reusability float64 `json:"reusability"` // Reusability score
|
||||
Reliability float64 `json:"reliability"` // Reliability score
|
||||
Security float64 `json:"security"` // Security score
|
||||
Performance float64 `json:"performance"` // Performance score
|
||||
Duplication float64 `json:"duplication"` // Code duplication percentage
|
||||
Consistency float64 `json:"consistency"` // Code consistency score
|
||||
}
|
||||
|
||||
// DocMetrics represents documentation metrics
|
||||
type DocMetrics struct {
|
||||
Coverage float64 `json:"coverage"` // Documentation coverage
|
||||
Quality float64 `json:"quality"` // Documentation quality
|
||||
CommentRatio float64 `json:"comment_ratio"` // Comment to code ratio
|
||||
APIDocCoverage float64 `json:"api_doc_coverage"` // API documentation coverage
|
||||
ExampleCount int `json:"example_count"` // Number of examples
|
||||
TODOCount int `json:"todo_count"` // Number of TODO comments
|
||||
FIXMECount int `json:"fixme_count"` // Number of FIXME comments
|
||||
Coverage float64 `json:"coverage"` // Documentation coverage
|
||||
Quality float64 `json:"quality"` // Documentation quality
|
||||
CommentRatio float64 `json:"comment_ratio"` // Comment to code ratio
|
||||
APIDocCoverage float64 `json:"api_doc_coverage"` // API documentation coverage
|
||||
ExampleCount int `json:"example_count"` // Number of examples
|
||||
TODOCount int `json:"todo_count"` // Number of TODO comments
|
||||
FIXMECount int `json:"fixme_count"` // Number of FIXME comments
|
||||
}
|
||||
|
||||
// DirectoryStructure represents analysis of directory organization
|
||||
type DirectoryStructure struct {
|
||||
Path string `json:"path"` // Directory path
|
||||
FileCount int `json:"file_count"` // Number of files
|
||||
DirectoryCount int `json:"directory_count"` // Number of subdirectories
|
||||
TotalSize int64 `json:"total_size"` // Total size in bytes
|
||||
FileTypes map[string]int `json:"file_types"` // File type distribution
|
||||
Languages map[string]int `json:"languages"` // Language distribution
|
||||
Organization *OrganizationInfo `json:"organization"` // Organization information
|
||||
Conventions *ConventionInfo `json:"conventions"` // Convention information
|
||||
Dependencies []string `json:"dependencies"` // Directory dependencies
|
||||
Purpose string `json:"purpose"` // Directory purpose
|
||||
Architecture string `json:"architecture"` // Architectural pattern
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
Path string `json:"path"` // Directory path
|
||||
FileCount int `json:"file_count"` // Number of files
|
||||
DirectoryCount int `json:"directory_count"` // Number of subdirectories
|
||||
TotalSize int64 `json:"total_size"` // Total size in bytes
|
||||
FileTypes map[string]int `json:"file_types"` // File type distribution
|
||||
Languages map[string]int `json:"languages"` // Language distribution
|
||||
Organization *OrganizationInfo `json:"organization"` // Organization information
|
||||
Conventions *ConventionInfo `json:"conventions"` // Convention information
|
||||
Dependencies []string `json:"dependencies"` // Directory dependencies
|
||||
Purpose string `json:"purpose"` // Directory purpose
|
||||
Architecture string `json:"architecture"` // Architectural pattern
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
}
|
||||
|
||||
// OrganizationInfo represents directory organization information
|
||||
type OrganizationInfo struct {
|
||||
Pattern string `json:"pattern"` // Organization pattern
|
||||
Consistency float64 `json:"consistency"` // Organization consistency
|
||||
Depth int `json:"depth"` // Directory depth
|
||||
FanOut int `json:"fan_out"` // Average fan-out
|
||||
Modularity float64 `json:"modularity"` // Modularity score
|
||||
Cohesion float64 `json:"cohesion"` // Cohesion score
|
||||
Coupling float64 `json:"coupling"` // Coupling score
|
||||
Metadata map[string]interface{} `json:"metadata"` // Additional metadata
|
||||
Pattern string `json:"pattern"` // Organization pattern
|
||||
Consistency float64 `json:"consistency"` // Organization consistency
|
||||
Depth int `json:"depth"` // Directory depth
|
||||
FanOut int `json:"fan_out"` // Average fan-out
|
||||
Modularity float64 `json:"modularity"` // Modularity score
|
||||
Cohesion float64 `json:"cohesion"` // Cohesion score
|
||||
Coupling float64 `json:"coupling"` // Coupling score
|
||||
Metadata map[string]interface{} `json:"metadata"` // Additional metadata
|
||||
}
|
||||
|
||||
// ConventionInfo represents naming and organizational conventions
|
||||
type ConventionInfo struct {
|
||||
NamingStyle string `json:"naming_style"` // Naming convention style
|
||||
FileNaming string `json:"file_naming"` // File naming pattern
|
||||
DirectoryNaming string `json:"directory_naming"` // Directory naming pattern
|
||||
Consistency float64 `json:"consistency"` // Convention consistency
|
||||
Violations []*Violation `json:"violations"` // Convention violations
|
||||
Standards []string `json:"standards"` // Applied standards
|
||||
NamingStyle string `json:"naming_style"` // Naming convention style
|
||||
FileNaming string `json:"file_naming"` // File naming pattern
|
||||
DirectoryNaming string `json:"directory_naming"` // Directory naming pattern
|
||||
Consistency float64 `json:"consistency"` // Convention consistency
|
||||
Violations []*Violation `json:"violations"` // Convention violations
|
||||
Standards []string `json:"standards"` // Applied standards
|
||||
}
|
||||
|
||||
// Violation represents a convention violation
|
||||
type Violation struct {
|
||||
Type string `json:"type"` // Violation type
|
||||
Path string `json:"path"` // Violating path
|
||||
Expected string `json:"expected"` // Expected format
|
||||
Actual string `json:"actual"` // Actual format
|
||||
Severity string `json:"severity"` // Violation severity
|
||||
Suggestion string `json:"suggestion"` // Suggested fix
|
||||
Type string `json:"type"` // Violation type
|
||||
Path string `json:"path"` // Violating path
|
||||
Expected string `json:"expected"` // Expected format
|
||||
Actual string `json:"actual"` // Actual format
|
||||
Severity string `json:"severity"` // Violation severity
|
||||
Suggestion string `json:"suggestion"` // Suggested fix
|
||||
}
|
||||
|
||||
// ConventionAnalysis represents analysis of naming and organizational conventions
|
||||
type ConventionAnalysis struct {
|
||||
NamingPatterns []*NamingPattern `json:"naming_patterns"` // Detected naming patterns
|
||||
NamingPatterns []*NamingPattern `json:"naming_patterns"` // Detected naming patterns
|
||||
OrganizationalPatterns []*OrganizationalPattern `json:"organizational_patterns"` // Organizational patterns
|
||||
Consistency float64 `json:"consistency"` // Overall consistency score
|
||||
Violations []*Violation `json:"violations"` // Convention violations
|
||||
Recommendations []*Recommendation `json:"recommendations"` // Improvement recommendations
|
||||
AppliedStandards []string `json:"applied_standards"` // Applied coding standards
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
Consistency float64 `json:"consistency"` // Overall consistency score
|
||||
Violations []*Violation `json:"violations"` // Convention violations
|
||||
Recommendations []*BasicRecommendation `json:"recommendations"` // Improvement recommendations
|
||||
AppliedStandards []string `json:"applied_standards"` // Applied coding standards
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
}
|
||||
|
||||
// RelationshipAnalysis represents analysis of directory relationships
|
||||
type RelationshipAnalysis struct {
|
||||
Dependencies []*DirectoryDependency `json:"dependencies"` // Directory dependencies
|
||||
Relationships []*DirectoryRelation `json:"relationships"` // Directory relationships
|
||||
CouplingMetrics *CouplingMetrics `json:"coupling_metrics"` // Coupling metrics
|
||||
ModularityScore float64 `json:"modularity_score"` // Modularity score
|
||||
ArchitecturalStyle string `json:"architectural_style"` // Architectural style
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
Dependencies []*DirectoryDependency `json:"dependencies"` // Directory dependencies
|
||||
Relationships []*DirectoryRelation `json:"relationships"` // Directory relationships
|
||||
CouplingMetrics *CouplingMetrics `json:"coupling_metrics"` // Coupling metrics
|
||||
ModularityScore float64 `json:"modularity_score"` // Modularity score
|
||||
ArchitecturalStyle string `json:"architectural_style"` // Architectural style
|
||||
AnalyzedAt time.Time `json:"analyzed_at"` // When analysis was performed
|
||||
}
|
||||
|
||||
// DirectoryDependency represents a dependency between directories
|
||||
type DirectoryDependency struct {
|
||||
From string `json:"from"` // Source directory
|
||||
To string `json:"to"` // Target directory
|
||||
Type string `json:"type"` // Dependency type
|
||||
Strength float64 `json:"strength"` // Dependency strength
|
||||
Reason string `json:"reason"` // Reason for dependency
|
||||
FileCount int `json:"file_count"` // Number of files involved
|
||||
From string `json:"from"` // Source directory
|
||||
To string `json:"to"` // Target directory
|
||||
Type string `json:"type"` // Dependency type
|
||||
Strength float64 `json:"strength"` // Dependency strength
|
||||
Reason string `json:"reason"` // Reason for dependency
|
||||
FileCount int `json:"file_count"` // Number of files involved
|
||||
}
|
||||
|
||||
// DirectoryRelation represents a relationship between directories
|
||||
type DirectoryRelation struct {
|
||||
Directory1 string `json:"directory1"` // First directory
|
||||
Directory2 string `json:"directory2"` // Second directory
|
||||
Type string `json:"type"` // Relation type
|
||||
Strength float64 `json:"strength"` // Relation strength
|
||||
Description string `json:"description"` // Relation description
|
||||
Bidirectional bool `json:"bidirectional"` // Whether relation is bidirectional
|
||||
Directory1 string `json:"directory1"` // First directory
|
||||
Directory2 string `json:"directory2"` // Second directory
|
||||
Type string `json:"type"` // Relation type
|
||||
Strength float64 `json:"strength"` // Relation strength
|
||||
Description string `json:"description"` // Relation description
|
||||
Bidirectional bool `json:"bidirectional"` // Whether relation is bidirectional
|
||||
}
|
||||
|
||||
// CouplingMetrics represents coupling metrics between directories
|
||||
type CouplingMetrics struct {
|
||||
AfferentCoupling float64 `json:"afferent_coupling"` // Afferent coupling
|
||||
EfferentCoupling float64 `json:"efferent_coupling"` // Efferent coupling
|
||||
Instability float64 `json:"instability"` // Instability metric
|
||||
Abstractness float64 `json:"abstractness"` // Abstractness metric
|
||||
DistanceFromMain float64 `json:"distance_from_main"` // Distance from main sequence
|
||||
AfferentCoupling float64 `json:"afferent_coupling"` // Afferent coupling
|
||||
EfferentCoupling float64 `json:"efferent_coupling"` // Efferent coupling
|
||||
Instability float64 `json:"instability"` // Instability metric
|
||||
Abstractness float64 `json:"abstractness"` // Abstractness metric
|
||||
DistanceFromMain float64 `json:"distance_from_main"` // Distance from main sequence
|
||||
}
|
||||
|
||||
// Pattern represents a detected pattern in code or organization
|
||||
type Pattern struct {
|
||||
ID string `json:"id"` // Pattern identifier
|
||||
Name string `json:"name"` // Pattern name
|
||||
Type string `json:"type"` // Pattern type
|
||||
Description string `json:"description"` // Pattern description
|
||||
Confidence float64 `json:"confidence"` // Detection confidence
|
||||
Frequency int `json:"frequency"` // Pattern frequency
|
||||
Examples []string `json:"examples"` // Example instances
|
||||
Criteria map[string]interface{} `json:"criteria"` // Pattern criteria
|
||||
Benefits []string `json:"benefits"` // Pattern benefits
|
||||
Drawbacks []string `json:"drawbacks"` // Pattern drawbacks
|
||||
ApplicableRoles []string `json:"applicable_roles"` // Roles that benefit from this pattern
|
||||
DetectedAt time.Time `json:"detected_at"` // When pattern was detected
|
||||
ID string `json:"id"` // Pattern identifier
|
||||
Name string `json:"name"` // Pattern name
|
||||
Type string `json:"type"` // Pattern type
|
||||
Description string `json:"description"` // Pattern description
|
||||
Confidence float64 `json:"confidence"` // Detection confidence
|
||||
Frequency int `json:"frequency"` // Pattern frequency
|
||||
Examples []string `json:"examples"` // Example instances
|
||||
Criteria map[string]interface{} `json:"criteria"` // Pattern criteria
|
||||
Benefits []string `json:"benefits"` // Pattern benefits
|
||||
Drawbacks []string `json:"drawbacks"` // Pattern drawbacks
|
||||
ApplicableRoles []string `json:"applicable_roles"` // Roles that benefit from this pattern
|
||||
DetectedAt time.Time `json:"detected_at"` // When pattern was detected
|
||||
}
|
||||
|
||||
// CodePattern represents a code-specific pattern
|
||||
type CodePattern struct {
|
||||
Pattern // Embedded base pattern
|
||||
Language string `json:"language"` // Programming language
|
||||
Framework string `json:"framework"` // Framework context
|
||||
Complexity float64 `json:"complexity"` // Pattern complexity
|
||||
Usage *UsagePattern `json:"usage"` // Usage pattern
|
||||
Performance *PerformanceInfo `json:"performance"` // Performance characteristics
|
||||
Pattern // Embedded base pattern
|
||||
Language string `json:"language"` // Programming language
|
||||
Framework string `json:"framework"` // Framework context
|
||||
Complexity float64 `json:"complexity"` // Pattern complexity
|
||||
Usage *UsagePattern `json:"usage"` // Usage pattern
|
||||
Performance *PerformanceInfo `json:"performance"` // Performance characteristics
|
||||
}
|
||||
|
||||
// NamingPattern represents a naming convention pattern
|
||||
type NamingPattern struct {
|
||||
Pattern // Embedded base pattern
|
||||
Convention string `json:"convention"` // Naming convention
|
||||
Scope string `json:"scope"` // Pattern scope
|
||||
Regex string `json:"regex"` // Regex pattern
|
||||
CaseStyle string `json:"case_style"` // Case style (camelCase, snake_case, etc.)
|
||||
Prefix string `json:"prefix"` // Common prefix
|
||||
Suffix string `json:"suffix"` // Common suffix
|
||||
Pattern // Embedded base pattern
|
||||
Convention string `json:"convention"` // Naming convention
|
||||
Scope string `json:"scope"` // Pattern scope
|
||||
Regex string `json:"regex"` // Regex pattern
|
||||
CaseStyle string `json:"case_style"` // Case style (camelCase, snake_case, etc.)
|
||||
Prefix string `json:"prefix"` // Common prefix
|
||||
Suffix string `json:"suffix"` // Common suffix
|
||||
}
|
||||
|
||||
// OrganizationalPattern represents an organizational pattern
|
||||
type OrganizationalPattern struct {
|
||||
Pattern // Embedded base pattern
|
||||
Structure string `json:"structure"` // Organizational structure
|
||||
Depth int `json:"depth"` // Typical depth
|
||||
FanOut int `json:"fan_out"` // Typical fan-out
|
||||
Modularity float64 `json:"modularity"` // Modularity characteristics
|
||||
Scalability string `json:"scalability"` // Scalability characteristics
|
||||
Pattern // Embedded base pattern
|
||||
Structure string `json:"structure"` // Organizational structure
|
||||
Depth int `json:"depth"` // Typical depth
|
||||
FanOut int `json:"fan_out"` // Typical fan-out
|
||||
Modularity float64 `json:"modularity"` // Modularity characteristics
|
||||
Scalability string `json:"scalability"` // Scalability characteristics
|
||||
}
|
||||
|
||||
// UsagePattern represents how a pattern is typically used
|
||||
type UsagePattern struct {
|
||||
Frequency string `json:"frequency"` // Usage frequency
|
||||
Context []string `json:"context"` // Usage contexts
|
||||
Prerequisites []string `json:"prerequisites"` // Prerequisites
|
||||
Alternatives []string `json:"alternatives"` // Alternative patterns
|
||||
Compatibility map[string]string `json:"compatibility"` // Compatibility with other patterns
|
||||
Frequency string `json:"frequency"` // Usage frequency
|
||||
Context []string `json:"context"` // Usage contexts
|
||||
Prerequisites []string `json:"prerequisites"` // Prerequisites
|
||||
Alternatives []string `json:"alternatives"` // Alternative patterns
|
||||
Compatibility map[string]string `json:"compatibility"` // Compatibility with other patterns
|
||||
}
|
||||
|
||||
// PerformanceInfo represents performance characteristics of a pattern
|
||||
@@ -249,12 +249,12 @@ type PerformanceInfo struct {
|
||||
|
||||
// PatternMatch represents a match between context and a pattern
|
||||
type PatternMatch struct {
|
||||
PatternID string `json:"pattern_id"` // Pattern identifier
|
||||
MatchScore float64 `json:"match_score"` // Match score (0-1)
|
||||
Confidence float64 `json:"confidence"` // Match confidence
|
||||
PatternID string `json:"pattern_id"` // Pattern identifier
|
||||
MatchScore float64 `json:"match_score"` // Match score (0-1)
|
||||
Confidence float64 `json:"confidence"` // Match confidence
|
||||
MatchedFields []string `json:"matched_fields"` // Fields that matched
|
||||
Explanation string `json:"explanation"` // Match explanation
|
||||
Suggestions []string `json:"suggestions"` // Improvement suggestions
|
||||
Explanation string `json:"explanation"` // Match explanation
|
||||
Suggestions []string `json:"suggestions"` // Improvement suggestions
|
||||
}
|
||||
|
||||
// ValidationResult represents context validation results
|
||||
@@ -269,12 +269,12 @@ type ValidationResult struct {
|
||||
|
||||
// ValidationIssue represents a validation issue
|
||||
type ValidationIssue struct {
|
||||
Type string `json:"type"` // Issue type
|
||||
Severity string `json:"severity"` // Issue severity
|
||||
Message string `json:"message"` // Issue message
|
||||
Field string `json:"field"` // Affected field
|
||||
Suggestion string `json:"suggestion"` // Suggested fix
|
||||
Impact float64 `json:"impact"` // Impact score
|
||||
Type string `json:"type"` // Issue type
|
||||
Severity string `json:"severity"` // Issue severity
|
||||
Message string `json:"message"` // Issue message
|
||||
Field string `json:"field"` // Affected field
|
||||
Suggestion string `json:"suggestion"` // Suggested fix
|
||||
Impact float64 `json:"impact"` // Impact score
|
||||
}
|
||||
|
||||
// Suggestion represents an improvement suggestion
|
||||
@@ -289,61 +289,61 @@ type Suggestion struct {
|
||||
}
|
||||
|
||||
// Recommendation represents an improvement recommendation
|
||||
type Recommendation struct {
|
||||
Type string `json:"type"` // Recommendation type
|
||||
Title string `json:"title"` // Recommendation title
|
||||
Description string `json:"description"` // Detailed description
|
||||
Priority int `json:"priority"` // Priority level
|
||||
Effort string `json:"effort"` // Effort required
|
||||
Impact string `json:"impact"` // Expected impact
|
||||
Steps []string `json:"steps"` // Implementation steps
|
||||
Resources []string `json:"resources"` // Required resources
|
||||
Metadata map[string]interface{} `json:"metadata"` // Additional metadata
|
||||
type BasicRecommendation struct {
|
||||
Type string `json:"type"` // Recommendation type
|
||||
Title string `json:"title"` // Recommendation title
|
||||
Description string `json:"description"` // Detailed description
|
||||
Priority int `json:"priority"` // Priority level
|
||||
Effort string `json:"effort"` // Effort required
|
||||
Impact string `json:"impact"` // Expected impact
|
||||
Steps []string `json:"steps"` // Implementation steps
|
||||
Resources []string `json:"resources"` // Required resources
|
||||
Metadata map[string]interface{} `json:"metadata"` // Additional metadata
|
||||
}
|
||||
|
||||
// RAGResponse represents a response from the RAG system
|
||||
type RAGResponse struct {
|
||||
Query string `json:"query"` // Original query
|
||||
Answer string `json:"answer"` // Generated answer
|
||||
Sources []*RAGSource `json:"sources"` // Source documents
|
||||
Confidence float64 `json:"confidence"` // Response confidence
|
||||
Context map[string]interface{} `json:"context"` // Additional context
|
||||
ProcessedAt time.Time `json:"processed_at"` // When processed
|
||||
Query string `json:"query"` // Original query
|
||||
Answer string `json:"answer"` // Generated answer
|
||||
Sources []*RAGSource `json:"sources"` // Source documents
|
||||
Confidence float64 `json:"confidence"` // Response confidence
|
||||
Context map[string]interface{} `json:"context"` // Additional context
|
||||
ProcessedAt time.Time `json:"processed_at"` // When processed
|
||||
}
|
||||
|
||||
// RAGSource represents a source document from RAG system
|
||||
type RAGSource struct {
|
||||
ID string `json:"id"` // Source identifier
|
||||
Title string `json:"title"` // Source title
|
||||
Content string `json:"content"` // Source content excerpt
|
||||
Score float64 `json:"score"` // Relevance score
|
||||
Metadata map[string]interface{} `json:"metadata"` // Source metadata
|
||||
URL string `json:"url"` // Source URL if available
|
||||
ID string `json:"id"` // Source identifier
|
||||
Title string `json:"title"` // Source title
|
||||
Content string `json:"content"` // Source content excerpt
|
||||
Score float64 `json:"score"` // Relevance score
|
||||
Metadata map[string]interface{} `json:"metadata"` // Source metadata
|
||||
URL string `json:"url"` // Source URL if available
|
||||
}
|
||||
|
||||
// RAGResult represents a result from RAG similarity search
|
||||
type RAGResult struct {
|
||||
ID string `json:"id"` // Result identifier
|
||||
Content string `json:"content"` // Content
|
||||
Score float64 `json:"score"` // Similarity score
|
||||
Metadata map[string]interface{} `json:"metadata"` // Result metadata
|
||||
Highlights []string `json:"highlights"` // Content highlights
|
||||
ID string `json:"id"` // Result identifier
|
||||
Content string `json:"content"` // Content
|
||||
Score float64 `json:"score"` // Similarity score
|
||||
Metadata map[string]interface{} `json:"metadata"` // Result metadata
|
||||
Highlights []string `json:"highlights"` // Content highlights
|
||||
}
|
||||
|
||||
// RAGUpdate represents an update to the RAG index
|
||||
type RAGUpdate struct {
|
||||
ID string `json:"id"` // Document identifier
|
||||
Content string `json:"content"` // Document content
|
||||
Metadata map[string]interface{} `json:"metadata"` // Document metadata
|
||||
Operation string `json:"operation"` // Operation type (add, update, delete)
|
||||
ID string `json:"id"` // Document identifier
|
||||
Content string `json:"content"` // Document content
|
||||
Metadata map[string]interface{} `json:"metadata"` // Document metadata
|
||||
Operation string `json:"operation"` // Operation type (add, update, delete)
|
||||
}
|
||||
|
||||
// RAGStatistics represents RAG system statistics
|
||||
type RAGStatistics struct {
|
||||
TotalDocuments int64 `json:"total_documents"` // Total indexed documents
|
||||
TotalQueries int64 `json:"total_queries"` // Total queries processed
|
||||
TotalDocuments int64 `json:"total_documents"` // Total indexed documents
|
||||
TotalQueries int64 `json:"total_queries"` // Total queries processed
|
||||
AverageQueryTime time.Duration `json:"average_query_time"` // Average query time
|
||||
IndexSize int64 `json:"index_size"` // Index size in bytes
|
||||
LastIndexUpdate time.Time `json:"last_index_update"` // When index was last updated
|
||||
ErrorRate float64 `json:"error_rate"` // Error rate
|
||||
}
|
||||
IndexSize int64 `json:"index_size"` // Index size in bytes
|
||||
LastIndexUpdate time.Time `json:"last_index_update"` // When index was last updated
|
||||
ErrorRate float64 `json:"error_rate"` // Error rate
|
||||
}
|
||||
|
||||
@@ -227,7 +227,7 @@ func (cau *ContentAnalysisUtils) extractGenericIdentifiers(content string) (func
|
||||
// CalculateComplexity calculates code complexity based on various metrics
|
||||
func (cau *ContentAnalysisUtils) CalculateComplexity(content, language string) float64 {
|
||||
complexity := 0.0
|
||||
|
||||
|
||||
// Lines of code (basic metric)
|
||||
lines := strings.Split(content, "\n")
|
||||
nonEmptyLines := 0
|
||||
@@ -236,26 +236,26 @@ func (cau *ContentAnalysisUtils) CalculateComplexity(content, language string) f
|
||||
nonEmptyLines++
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Base complexity from lines of code
|
||||
complexity += float64(nonEmptyLines) * 0.1
|
||||
|
||||
|
||||
// Control flow complexity (if, for, while, switch, etc.)
|
||||
controlFlowPatterns := []*regexp.Regexp{
|
||||
regexp.MustCompile(`\b(?:if|for|while|switch|case)\b`),
|
||||
regexp.MustCompile(`\b(?:try|catch|finally)\b`),
|
||||
regexp.MustCompile(`\?\s*.*\s*:`), // ternary operator
|
||||
}
|
||||
|
||||
|
||||
for _, pattern := range controlFlowPatterns {
|
||||
matches := pattern.FindAllString(content, -1)
|
||||
complexity += float64(len(matches)) * 0.5
|
||||
}
|
||||
|
||||
|
||||
// Function complexity
|
||||
functions, _, _ := cau.ExtractIdentifiers(content, language)
|
||||
complexity += float64(len(functions)) * 0.3
|
||||
|
||||
|
||||
// Nesting level (simple approximation)
|
||||
maxNesting := 0
|
||||
currentNesting := 0
|
||||
@@ -269,7 +269,7 @@ func (cau *ContentAnalysisUtils) CalculateComplexity(content, language string) f
|
||||
}
|
||||
}
|
||||
complexity += float64(maxNesting) * 0.2
|
||||
|
||||
|
||||
// Normalize to 0-10 scale
|
||||
return math.Min(10.0, complexity/10.0)
|
||||
}
|
||||
@@ -279,66 +279,66 @@ func (cau *ContentAnalysisUtils) DetectTechnologies(content, filename string) []
|
||||
technologies := []string{}
|
||||
lowerContent := strings.ToLower(content)
|
||||
ext := strings.ToLower(filepath.Ext(filename))
|
||||
|
||||
|
||||
// Language detection
|
||||
languageMap := map[string][]string{
|
||||
".go": {"go", "golang"},
|
||||
".py": {"python"},
|
||||
".js": {"javascript", "node.js"},
|
||||
".jsx": {"javascript", "react", "jsx"},
|
||||
".ts": {"typescript"},
|
||||
".tsx": {"typescript", "react", "jsx"},
|
||||
".java": {"java"},
|
||||
".kt": {"kotlin"},
|
||||
".rs": {"rust"},
|
||||
".cpp": {"c++"},
|
||||
".c": {"c"},
|
||||
".cs": {"c#", ".net"},
|
||||
".php": {"php"},
|
||||
".rb": {"ruby"},
|
||||
".go": {"go", "golang"},
|
||||
".py": {"python"},
|
||||
".js": {"javascript", "node.js"},
|
||||
".jsx": {"javascript", "react", "jsx"},
|
||||
".ts": {"typescript"},
|
||||
".tsx": {"typescript", "react", "jsx"},
|
||||
".java": {"java"},
|
||||
".kt": {"kotlin"},
|
||||
".rs": {"rust"},
|
||||
".cpp": {"c++"},
|
||||
".c": {"c"},
|
||||
".cs": {"c#", ".net"},
|
||||
".php": {"php"},
|
||||
".rb": {"ruby"},
|
||||
".swift": {"swift"},
|
||||
".scala": {"scala"},
|
||||
".clj": {"clojure"},
|
||||
".hs": {"haskell"},
|
||||
".ml": {"ocaml"},
|
||||
".clj": {"clojure"},
|
||||
".hs": {"haskell"},
|
||||
".ml": {"ocaml"},
|
||||
}
|
||||
|
||||
|
||||
if langs, exists := languageMap[ext]; exists {
|
||||
technologies = append(technologies, langs...)
|
||||
}
|
||||
|
||||
|
||||
// Framework and library detection
|
||||
frameworkPatterns := map[string][]string{
|
||||
"react": {"import.*react", "from [\"']react[\"']", "<.*/>", "jsx"},
|
||||
"vue": {"import.*vue", "from [\"']vue[\"']", "<template>", "vue"},
|
||||
"angular": {"import.*@angular", "from [\"']@angular", "ngmodule", "component"},
|
||||
"express": {"import.*express", "require.*express", "app.get", "app.post"},
|
||||
"django": {"from django", "import django", "django.db", "models.model"},
|
||||
"flask": {"from flask", "import flask", "@app.route", "flask.request"},
|
||||
"spring": {"@springboot", "@controller", "@service", "@repository"},
|
||||
"hibernate": {"@entity", "@table", "@column", "hibernate"},
|
||||
"jquery": {"$\\(", "jquery"},
|
||||
"bootstrap": {"bootstrap", "btn-", "col-", "row"},
|
||||
"docker": {"dockerfile", "docker-compose", "from.*:", "run.*"},
|
||||
"kubernetes": {"apiversion:", "kind:", "metadata:", "spec:"},
|
||||
"terraform": {"\\.tf$", "resource \"", "provider \"", "terraform"},
|
||||
"ansible": {"\\.yml$", "hosts:", "tasks:", "playbook"},
|
||||
"jenkins": {"jenkinsfile", "pipeline", "stage", "steps"},
|
||||
"git": {"\\.git", "git add", "git commit", "git push"},
|
||||
"mysql": {"mysql", "select.*from", "insert into", "create table"},
|
||||
"postgresql": {"postgresql", "postgres", "psql"},
|
||||
"mongodb": {"mongodb", "mongo", "find\\(", "insert\\("},
|
||||
"redis": {"redis", "set.*", "get.*", "rpush"},
|
||||
"elasticsearch": {"elasticsearch", "elastic", "query.*", "search.*"},
|
||||
"graphql": {"graphql", "query.*{", "mutation.*{", "subscription.*{"},
|
||||
"grpc": {"grpc", "proto", "service.*rpc", "\\.proto$"},
|
||||
"websocket": {"websocket", "ws://", "wss://", "socket.io"},
|
||||
"jwt": {"jwt", "jsonwebtoken", "bearer.*token"},
|
||||
"oauth": {"oauth", "oauth2", "client_id", "client_secret"},
|
||||
"ssl": {"ssl", "tls", "https", "certificate"},
|
||||
"encryption": {"encrypt", "decrypt", "bcrypt", "sha256"},
|
||||
"react": {"import.*react", "from [\"']react[\"']", "<.*/>", "jsx"},
|
||||
"vue": {"import.*vue", "from [\"']vue[\"']", "<template>", "vue"},
|
||||
"angular": {"import.*@angular", "from [\"']@angular", "ngmodule", "component"},
|
||||
"express": {"import.*express", "require.*express", "app.get", "app.post"},
|
||||
"django": {"from django", "import django", "django.db", "models.model"},
|
||||
"flask": {"from flask", "import flask", "@app.route", "flask.request"},
|
||||
"spring": {"@springboot", "@controller", "@service", "@repository"},
|
||||
"hibernate": {"@entity", "@table", "@column", "hibernate"},
|
||||
"jquery": {"$\\(", "jquery"},
|
||||
"bootstrap": {"bootstrap", "btn-", "col-", "row"},
|
||||
"docker": {"dockerfile", "docker-compose", "from.*:", "run.*"},
|
||||
"kubernetes": {"apiversion:", "kind:", "metadata:", "spec:"},
|
||||
"terraform": {"\\.tf$", "resource \"", "provider \"", "terraform"},
|
||||
"ansible": {"\\.yml$", "hosts:", "tasks:", "playbook"},
|
||||
"jenkins": {"jenkinsfile", "pipeline", "stage", "steps"},
|
||||
"git": {"\\.git", "git add", "git commit", "git push"},
|
||||
"mysql": {"mysql", "select.*from", "insert into", "create table"},
|
||||
"postgresql": {"postgresql", "postgres", "psql"},
|
||||
"mongodb": {"mongodb", "mongo", "find\\(", "insert\\("},
|
||||
"redis": {"redis", "set.*", "get.*", "rpush"},
|
||||
"elasticsearch": {"elasticsearch", "elastic", "query.*", "search.*"},
|
||||
"graphql": {"graphql", "query.*{", "mutation.*{", "subscription.*{"},
|
||||
"grpc": {"grpc", "proto", "service.*rpc", "\\.proto$"},
|
||||
"websocket": {"websocket", "ws://", "wss://", "socket.io"},
|
||||
"jwt": {"jwt", "jsonwebtoken", "bearer.*token"},
|
||||
"oauth": {"oauth", "oauth2", "client_id", "client_secret"},
|
||||
"ssl": {"ssl", "tls", "https", "certificate"},
|
||||
"encryption": {"encrypt", "decrypt", "bcrypt", "sha256"},
|
||||
}
|
||||
|
||||
|
||||
for tech, patterns := range frameworkPatterns {
|
||||
for _, pattern := range patterns {
|
||||
if matched, _ := regexp.MatchString(pattern, lowerContent); matched {
|
||||
@@ -347,7 +347,7 @@ func (cau *ContentAnalysisUtils) DetectTechnologies(content, filename string) []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return removeDuplicates(technologies)
|
||||
}
|
||||
|
||||
@@ -371,7 +371,7 @@ func (su *ScoreUtils) NormalizeScore(score, min, max float64) float64 {
|
||||
func (su *ScoreUtils) CalculateWeightedScore(scores map[string]float64, weights map[string]float64) float64 {
|
||||
totalWeight := 0.0
|
||||
weightedSum := 0.0
|
||||
|
||||
|
||||
for dimension, score := range scores {
|
||||
weight := weights[dimension]
|
||||
if weight == 0 {
|
||||
@@ -380,11 +380,11 @@ func (su *ScoreUtils) CalculateWeightedScore(scores map[string]float64, weights
|
||||
weightedSum += score * weight
|
||||
totalWeight += weight
|
||||
}
|
||||
|
||||
|
||||
if totalWeight == 0 {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
|
||||
return weightedSum / totalWeight
|
||||
}
|
||||
|
||||
@@ -393,31 +393,31 @@ func (su *ScoreUtils) CalculatePercentile(values []float64, percentile int) floa
|
||||
if len(values) == 0 {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
|
||||
sorted := make([]float64, len(values))
|
||||
copy(sorted, values)
|
||||
sort.Float64s(sorted)
|
||||
|
||||
|
||||
if percentile <= 0 {
|
||||
return sorted[0]
|
||||
}
|
||||
if percentile >= 100 {
|
||||
return sorted[len(sorted)-1]
|
||||
}
|
||||
|
||||
|
||||
index := float64(percentile) / 100.0 * float64(len(sorted)-1)
|
||||
lower := int(math.Floor(index))
|
||||
upper := int(math.Ceil(index))
|
||||
|
||||
|
||||
if lower == upper {
|
||||
return sorted[lower]
|
||||
}
|
||||
|
||||
|
||||
// Linear interpolation
|
||||
lowerValue := sorted[lower]
|
||||
upperValue := sorted[upper]
|
||||
weight := index - float64(lower)
|
||||
|
||||
|
||||
return lowerValue + weight*(upperValue-lowerValue)
|
||||
}
|
||||
|
||||
@@ -426,14 +426,14 @@ func (su *ScoreUtils) CalculateStandardDeviation(values []float64) float64 {
|
||||
if len(values) <= 1 {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
|
||||
// Calculate mean
|
||||
sum := 0.0
|
||||
for _, value := range values {
|
||||
sum += value
|
||||
}
|
||||
mean := sum / float64(len(values))
|
||||
|
||||
|
||||
// Calculate variance
|
||||
variance := 0.0
|
||||
for _, value := range values {
|
||||
@@ -441,7 +441,7 @@ func (su *ScoreUtils) CalculateStandardDeviation(values []float64) float64 {
|
||||
variance += diff * diff
|
||||
}
|
||||
variance /= float64(len(values) - 1)
|
||||
|
||||
|
||||
return math.Sqrt(variance)
|
||||
}
|
||||
|
||||
@@ -510,41 +510,41 @@ func (su *StringUtils) Similarity(s1, s2 string) float64 {
|
||||
if s1 == s2 {
|
||||
return 1.0
|
||||
}
|
||||
|
||||
|
||||
words1 := strings.Fields(strings.ToLower(s1))
|
||||
words2 := strings.Fields(strings.ToLower(s2))
|
||||
|
||||
|
||||
if len(words1) == 0 && len(words2) == 0 {
|
||||
return 1.0
|
||||
}
|
||||
|
||||
|
||||
if len(words1) == 0 || len(words2) == 0 {
|
||||
return 0.0
|
||||
}
|
||||
|
||||
|
||||
set1 := make(map[string]bool)
|
||||
set2 := make(map[string]bool)
|
||||
|
||||
|
||||
for _, word := range words1 {
|
||||
set1[word] = true
|
||||
}
|
||||
for _, word := range words2 {
|
||||
set2[word] = true
|
||||
}
|
||||
|
||||
|
||||
intersection := 0
|
||||
for word := range set1 {
|
||||
if set2[word] {
|
||||
intersection++
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
union := len(set1) + len(set2) - intersection
|
||||
|
||||
|
||||
if union == 0 {
|
||||
return 1.0
|
||||
}
|
||||
|
||||
|
||||
return float64(intersection) / float64(union)
|
||||
}
|
||||
|
||||
@@ -565,35 +565,35 @@ func (su *StringUtils) ExtractKeywords(text string, minLength int) []string {
|
||||
"so": true, "than": true, "too": true, "very": true, "can": true, "could": true,
|
||||
"should": true, "would": true, "use": true, "used": true, "using": true,
|
||||
}
|
||||
|
||||
|
||||
// Extract words
|
||||
wordRegex := regexp.MustCompile(`\b[a-zA-Z]+\b`)
|
||||
words := wordRegex.FindAllString(strings.ToLower(text), -1)
|
||||
|
||||
|
||||
keywords := []string{}
|
||||
wordFreq := make(map[string]int)
|
||||
|
||||
|
||||
for _, word := range words {
|
||||
if len(word) >= minLength && !stopWords[word] {
|
||||
wordFreq[word]++
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Sort by frequency and return top keywords
|
||||
type wordCount struct {
|
||||
word string
|
||||
count int
|
||||
}
|
||||
|
||||
|
||||
var sortedWords []wordCount
|
||||
for word, count := range wordFreq {
|
||||
sortedWords = append(sortedWords, wordCount{word, count})
|
||||
}
|
||||
|
||||
|
||||
sort.Slice(sortedWords, func(i, j int) bool {
|
||||
return sortedWords[i].count > sortedWords[j].count
|
||||
})
|
||||
|
||||
|
||||
maxKeywords := 20
|
||||
for i, wc := range sortedWords {
|
||||
if i >= maxKeywords {
|
||||
@@ -601,7 +601,7 @@ func (su *StringUtils) ExtractKeywords(text string, minLength int) []string {
|
||||
}
|
||||
keywords = append(keywords, wc.word)
|
||||
}
|
||||
|
||||
|
||||
return keywords
|
||||
}
|
||||
|
||||
@@ -741,30 +741,58 @@ func CloneContextNode(node *slurpContext.ContextNode) *slurpContext.ContextNode
|
||||
}
|
||||
|
||||
clone := &slurpContext.ContextNode{
|
||||
Path: node.Path,
|
||||
Summary: node.Summary,
|
||||
Purpose: node.Purpose,
|
||||
Technologies: make([]string, len(node.Technologies)),
|
||||
Tags: make([]string, len(node.Tags)),
|
||||
Insights: make([]string, len(node.Insights)),
|
||||
CreatedAt: node.CreatedAt,
|
||||
UpdatedAt: node.UpdatedAt,
|
||||
ContextSpecificity: node.ContextSpecificity,
|
||||
RAGConfidence: node.RAGConfidence,
|
||||
ProcessedForRole: node.ProcessedForRole,
|
||||
Path: node.Path,
|
||||
UCXLAddress: node.UCXLAddress,
|
||||
Summary: node.Summary,
|
||||
Purpose: node.Purpose,
|
||||
Technologies: make([]string, len(node.Technologies)),
|
||||
Tags: make([]string, len(node.Tags)),
|
||||
Insights: make([]string, len(node.Insights)),
|
||||
OverridesParent: node.OverridesParent,
|
||||
ContextSpecificity: node.ContextSpecificity,
|
||||
AppliesToChildren: node.AppliesToChildren,
|
||||
AppliesTo: node.AppliesTo,
|
||||
GeneratedAt: node.GeneratedAt,
|
||||
UpdatedAt: node.UpdatedAt,
|
||||
CreatedBy: node.CreatedBy,
|
||||
WhoUpdated: node.WhoUpdated,
|
||||
RAGConfidence: node.RAGConfidence,
|
||||
EncryptedFor: make([]string, len(node.EncryptedFor)),
|
||||
AccessLevel: node.AccessLevel,
|
||||
}
|
||||
|
||||
copy(clone.Technologies, node.Technologies)
|
||||
copy(clone.Tags, node.Tags)
|
||||
copy(clone.Insights, node.Insights)
|
||||
copy(clone.EncryptedFor, node.EncryptedFor)
|
||||
|
||||
if node.RoleSpecificInsights != nil {
|
||||
clone.RoleSpecificInsights = make([]*RoleSpecificInsight, len(node.RoleSpecificInsights))
|
||||
copy(clone.RoleSpecificInsights, node.RoleSpecificInsights)
|
||||
if node.Parent != nil {
|
||||
parent := *node.Parent
|
||||
clone.Parent = &parent
|
||||
}
|
||||
if len(node.Children) > 0 {
|
||||
clone.Children = make([]string, len(node.Children))
|
||||
copy(clone.Children, node.Children)
|
||||
}
|
||||
if node.Language != nil {
|
||||
language := *node.Language
|
||||
clone.Language = &language
|
||||
}
|
||||
if node.Size != nil {
|
||||
sz := *node.Size
|
||||
clone.Size = &sz
|
||||
}
|
||||
if node.LastModified != nil {
|
||||
lm := *node.LastModified
|
||||
clone.LastModified = &lm
|
||||
}
|
||||
if node.ContentHash != nil {
|
||||
hash := *node.ContentHash
|
||||
clone.ContentHash = &hash
|
||||
}
|
||||
|
||||
if node.Metadata != nil {
|
||||
clone.Metadata = make(map[string]interface{})
|
||||
clone.Metadata = make(map[string]interface{}, len(node.Metadata))
|
||||
for k, v := range node.Metadata {
|
||||
clone.Metadata[k] = v
|
||||
}
|
||||
@@ -783,7 +811,7 @@ func MergeContextNodes(nodes ...*slurpContext.ContextNode) *slurpContext.Context
|
||||
}
|
||||
|
||||
merged := CloneContextNode(nodes[0])
|
||||
|
||||
|
||||
for i := 1; i < len(nodes); i++ {
|
||||
node := nodes[i]
|
||||
if node == nil {
|
||||
@@ -792,27 +820,29 @@ func MergeContextNodes(nodes ...*slurpContext.ContextNode) *slurpContext.Context
|
||||
|
||||
// Merge technologies
|
||||
merged.Technologies = mergeStringSlices(merged.Technologies, node.Technologies)
|
||||
|
||||
|
||||
// Merge tags
|
||||
merged.Tags = mergeStringSlices(merged.Tags, node.Tags)
|
||||
|
||||
|
||||
// Merge insights
|
||||
merged.Insights = mergeStringSlices(merged.Insights, node.Insights)
|
||||
|
||||
// Use most recent timestamps
|
||||
if node.CreatedAt.Before(merged.CreatedAt) {
|
||||
merged.CreatedAt = node.CreatedAt
|
||||
|
||||
// Use most relevant timestamps
|
||||
if merged.GeneratedAt.IsZero() {
|
||||
merged.GeneratedAt = node.GeneratedAt
|
||||
} else if !node.GeneratedAt.IsZero() && node.GeneratedAt.Before(merged.GeneratedAt) {
|
||||
merged.GeneratedAt = node.GeneratedAt
|
||||
}
|
||||
if node.UpdatedAt.After(merged.UpdatedAt) {
|
||||
merged.UpdatedAt = node.UpdatedAt
|
||||
}
|
||||
|
||||
|
||||
// Average context specificity
|
||||
merged.ContextSpecificity = (merged.ContextSpecificity + node.ContextSpecificity) / 2
|
||||
|
||||
|
||||
// Average RAG confidence
|
||||
merged.RAGConfidence = (merged.RAGConfidence + node.RAGConfidence) / 2
|
||||
|
||||
|
||||
// Merge metadata
|
||||
if node.Metadata != nil {
|
||||
if merged.Metadata == nil {
|
||||
@@ -844,7 +874,7 @@ func removeDuplicates(slice []string) []string {
|
||||
func mergeStringSlices(slice1, slice2 []string) []string {
|
||||
merged := make([]string, len(slice1))
|
||||
copy(merged, slice1)
|
||||
|
||||
|
||||
for _, item := range slice2 {
|
||||
found := false
|
||||
for _, existing := range merged {
|
||||
@@ -857,7 +887,7 @@ func mergeStringSlices(slice1, slice2 []string) []string {
|
||||
merged = append(merged, item)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
return merged
|
||||
}
|
||||
|
||||
@@ -1034,4 +1064,4 @@ func (bu *ByteUtils) ReadFileWithLimit(filename string, maxSize int64) ([]byte,
|
||||
}
|
||||
|
||||
return io.ReadAll(file)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user