Files
bzzz/pkg/slurp/temporal/influence_analyzer.go
anthonyrawlins d96c931a29 Resolve import cycles and migrate to chorus.services module path
This comprehensive refactoring addresses critical architectural issues:

IMPORT CYCLE RESOLUTION:
• pkg/crypto ↔ pkg/slurp/roles: Created pkg/security/access_levels.go
• pkg/ucxl → pkg/dht: Created pkg/storage/interfaces.go
• pkg/slurp/leader → pkg/election → pkg/slurp/storage: Moved types to pkg/election/interfaces.go

MODULE PATH MIGRATION:
• Changed from github.com/anthonyrawlins/bzzz to chorus.services/bzzz
• Updated all import statements across 115+ files
• Maintains compatibility while removing personal GitHub account dependency

TYPE SYSTEM IMPROVEMENTS:
• Resolved duplicate type declarations in crypto package
• Added missing type definitions (RoleStatus, TimeRestrictions, KeyStatus, KeyRotationResult)
• Proper interface segregation to prevent future cycles

ARCHITECTURAL BENEFITS:
• Build now progresses past structural issues to normal dependency resolution
• Cleaner separation of concerns between packages
• Eliminates circular dependencies that prevented compilation
• Establishes foundation for scalable codebase growth

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-08-17 10:04:25 +10:00

1139 lines
30 KiB
Go

package temporal
import (
"context"
"fmt"
"math"
"sort"
"sync"
"time"
"chorus.services/bzzz/pkg/ucxl"
)
// influenceAnalyzerImpl implements the InfluenceAnalyzer interface
type influenceAnalyzerImpl struct {
mu sync.RWMutex
// Reference to the temporal graph
graph *temporalGraphImpl
// Cached analysis results
networkAnalysisCache *InfluenceNetworkAnalysis
centralityCache *CentralityMetrics
lastAnalysisTime time.Time
cacheValidDuration time.Duration
// Analysis parameters
dampingFactor float64 // For PageRank calculation
maxIterations int // For iterative algorithms
convergenceThreshold float64 // For convergence checking
}
// NewInfluenceAnalyzer creates a new influence analyzer
func NewInfluenceAnalyzer(graph *temporalGraphImpl) InfluenceAnalyzer {
return &influenceAnalyzerImpl{
graph: graph,
cacheValidDuration: time.Minute * 30,
dampingFactor: 0.85, // Standard PageRank damping factor
maxIterations: 100,
convergenceThreshold: 1e-6,
}
}
// AnalyzeInfluenceNetwork analyzes the structure of decision influence relationships
func (ia *influenceAnalyzerImpl) AnalyzeInfluenceNetwork(ctx context.Context) (*InfluenceNetworkAnalysis, error) {
ia.mu.Lock()
defer ia.mu.Unlock()
// Check if cached analysis is still valid
if ia.networkAnalysisCache != nil && time.Since(ia.lastAnalysisTime) < ia.cacheValidDuration {
return ia.networkAnalysisCache, nil
}
ia.graph.mu.RLock()
defer ia.graph.mu.RUnlock()
totalNodes := len(ia.graph.nodes)
totalEdges := 0
// Count total edges
for _, influences := range ia.graph.influences {
totalEdges += len(influences)
}
// Calculate network density
maxPossibleEdges := totalNodes * (totalNodes - 1)
networkDensity := 0.0
if maxPossibleEdges > 0 {
networkDensity = float64(totalEdges) / float64(maxPossibleEdges)
}
// Calculate clustering coefficient
clusteringCoeff := ia.calculateClusteringCoefficient()
// Calculate average path length
avgPathLength := ia.calculateAveragePathLength()
// Find central nodes
centralNodes := ia.findCentralNodes()
// Detect communities
communities := ia.detectCommunities()
analysis := &InfluenceNetworkAnalysis{
TotalNodes: totalNodes,
TotalEdges: totalEdges,
NetworkDensity: networkDensity,
ClusteringCoeff: clusteringCoeff,
AveragePathLength: avgPathLength,
CentralNodes: centralNodes,
Communities: communities,
AnalyzedAt: time.Now(),
}
// Cache the results
ia.networkAnalysisCache = analysis
ia.lastAnalysisTime = time.Now()
return analysis, nil
}
// GetInfluenceStrength calculates influence strength between contexts
func (ia *influenceAnalyzerImpl) GetInfluenceStrength(ctx context.Context, influencer, influenced ucxl.Address) (float64, error) {
ia.mu.RLock()
defer ia.mu.RUnlock()
ia.graph.mu.RLock()
defer ia.graph.mu.RUnlock()
// Get the latest nodes for both addresses
influencerNode, err := ia.graph.getLatestNodeUnsafe(influencer)
if err != nil {
return 0, fmt.Errorf("influencer node not found: %w", err)
}
influencedNode, err := ia.graph.getLatestNodeUnsafe(influenced)
if err != nil {
return 0, fmt.Errorf("influenced node not found: %w", err)
}
// Check if direct influence exists
influences, exists := ia.graph.influences[influencerNode.ID]
if !exists {
return 0, nil
}
hasDirectInfluence := false
for _, influencedID := range influences {
if influencedID == influencedNode.ID {
hasDirectInfluence = true
break
}
}
if !hasDirectInfluence {
return 0, nil
}
// Calculate influence strength based on multiple factors
strength := 0.0
// Factor 1: Decision recency (more recent = stronger influence)
timeDiff := time.Since(influencerNode.Timestamp)
recencyFactor := math.Max(0, 1.0-timeDiff.Hours()/(7*24)) // Decay over a week
strength += recencyFactor * 0.3
// Factor 2: Confidence level of the influencer
confidenceFactor := influencerNode.Confidence
strength += confidenceFactor * 0.3
// Factor 3: Impact scope (broader scope = stronger influence)
scopeFactor := 0.0
switch influencerNode.ImpactScope {
case ImpactSystem:
scopeFactor = 1.0
case ImpactProject:
scopeFactor = 0.8
case ImpactModule:
scopeFactor = 0.6
case ImpactLocal:
scopeFactor = 0.4
}
strength += scopeFactor * 0.2
// Factor 4: Network position (central nodes have stronger influence)
centralityFactor := ia.getNodeCentrality(influencerNode.ID)
strength += centralityFactor * 0.2
return math.Min(strength, 1.0), nil
}
// FindInfluentialDecisions finds the most influential decisions in the system
func (ia *influenceAnalyzerImpl) FindInfluentialDecisions(ctx context.Context, limit int) ([]*InfluentialDecision, error) {
ia.mu.RLock()
defer ia.mu.RLock()
ia.graph.mu.RLock()
defer ia.graph.mu.RUnlock()
type nodeScore struct {
node *TemporalNode
score float64
}
scores := make([]*nodeScore, 0, len(ia.graph.nodes))
// Calculate influence score for each node
for _, node := range ia.graph.nodes {
score := ia.calculateInfluenceScore(node)
scores = append(scores, &nodeScore{node: node, score: score})
}
// Sort by influence score (highest first)
sort.Slice(scores, func(i, j int) bool {
return scores[i].score > scores[j].score
})
// Convert to InfluentialDecision structs
influential := make([]*InfluentialDecision, 0)
maxResults := limit
if maxResults <= 0 || maxResults > len(scores) {
maxResults = len(scores)
}
for i := 0; i < maxResults; i++ {
nodeScore := scores[i]
node := nodeScore.node
// Analyze impact of this decision
impact := ia.analyzeDecisionImpactInternal(node)
decision := &InfluentialDecision{
Address: node.UCXLAddress,
DecisionHop: node.Version,
InfluenceScore: nodeScore.score,
AffectedContexts: node.Influences,
DecisionMetadata: ia.graph.decisions[node.DecisionID],
ImpactAnalysis: impact,
InfluenceReasons: ia.getInfluenceReasons(node, nodeScore.score),
}
influential = append(influential, decision)
}
return influential, nil
}
// AnalyzeDecisionImpact analyzes the impact of a specific decision
func (ia *influenceAnalyzerImpl) AnalyzeDecisionImpact(ctx context.Context, address ucxl.Address, decisionHop int) (*DecisionImpact, error) {
ia.mu.RLock()
defer ia.mu.RUnlock()
ia.graph.mu.RLock()
defer ia.graph.mu.RUnlock()
// Get the specific decision version
node, err := ia.graph.GetVersionAtDecision(ctx, address, decisionHop)
if err != nil {
return nil, fmt.Errorf("decision not found: %w", err)
}
return ia.analyzeDecisionImpactInternal(node), nil
}
// PredictInfluence predicts likely influence relationships
func (ia *influenceAnalyzerImpl) PredictInfluence(ctx context.Context, address ucxl.Address) ([]*PredictedInfluence, error) {
ia.mu.RLock()
defer ia.mu.RUnlock()
ia.graph.mu.RLock()
defer ia.graph.mu.RUnlock()
node, err := ia.graph.getLatestNodeUnsafe(address)
if err != nil {
return nil, fmt.Errorf("node not found: %w", err)
}
predictions := make([]*PredictedInfluence, 0)
// Analyze patterns to predict new influences
for targetAddress, targetNodes := range ia.graph.addressToNodes {
if targetAddress == address.String() {
continue // Skip self
}
targetNode := targetNodes[len(targetNodes)-1] // Latest version
// Skip if influence already exists
if ia.hasDirectInfluence(node, targetNode) {
continue
}
// Calculate prediction probability based on various factors
probability := ia.calculateInfluenceProbability(node, targetNode)
if probability > 0.3 { // Threshold for meaningful predictions
prediction := &PredictedInfluence{
From: address,
To: targetNode.UCXLAddress,
Probability: probability,
Strength: probability * 0.8, // Predicted strength slightly lower than probability
Reasons: ia.getPredictionReasons(node, targetNode),
Confidence: probability * 0.9,
EstimatedDelay: time.Duration(float64(time.Hour*24) * (1.0 - probability)),
}
predictions = append(predictions, prediction)
}
}
// Sort by probability (highest first)
sort.Slice(predictions, func(i, j int) bool {
return predictions[i].Probability > predictions[j].Probability
})
// Limit to top 10 predictions
if len(predictions) > 10 {
predictions = predictions[:10]
}
return predictions, nil
}
// GetCentralityMetrics calculates centrality metrics for contexts
func (ia *influenceAnalyzerImpl) GetCentralityMetrics(ctx context.Context) (*CentralityMetrics, error) {
ia.mu.Lock()
defer ia.mu.Unlock()
// Check cache
if ia.centralityCache != nil && time.Since(ia.lastAnalysisTime) < ia.cacheValidDuration {
return ia.centralityCache, nil
}
ia.graph.mu.RLock()
defer ia.graph.mu.RUnlock()
metrics := &CentralityMetrics{
BetweennessCentrality: make(map[string]float64),
ClosenessCentrality: make(map[string]float64),
DegreeCentrality: make(map[string]float64),
EigenvectorCentrality: make(map[string]float64),
PageRank: make(map[string]float64),
CalculatedAt: time.Now(),
}
// Calculate degree centrality
ia.calculateDegreeCentrality(metrics)
// Calculate betweenness centrality
ia.calculateBetweennessCentrality(metrics)
// Calculate closeness centrality
ia.calculateClosenessCentrality(metrics)
// Calculate eigenvector centrality
ia.calculateEigenvectorCentrality(metrics)
// Calculate PageRank
ia.calculatePageRank(metrics)
// Cache the results
ia.centralityCache = metrics
ia.lastAnalysisTime = time.Now()
return metrics, nil
}
// Helper methods
func (ia *influenceAnalyzerImpl) calculateClusteringCoefficient() float64 {
totalCoeff := 0.0
nodeCount := 0
for nodeID := range ia.graph.nodes {
coeff := ia.calculateNodeClusteringCoefficient(nodeID)
totalCoeff += coeff
nodeCount++
}
if nodeCount == 0 {
return 0
}
return totalCoeff / float64(nodeCount)
}
func (ia *influenceAnalyzerImpl) calculateNodeClusteringCoefficient(nodeID string) float64 {
neighbors := ia.getNeighbors(nodeID)
if len(neighbors) < 2 {
return 0
}
// Count connections between neighbors
connections := 0
for i, neighbor1 := range neighbors {
for j := i + 1; j < len(neighbors); j++ {
neighbor2 := neighbors[j]
if ia.areConnected(neighbor1, neighbor2) {
connections++
}
}
}
possibleConnections := len(neighbors) * (len(neighbors) - 1) / 2
if possibleConnections == 0 {
return 0
}
return float64(connections) / float64(possibleConnections)
}
func (ia *influenceAnalyzerImpl) calculateAveragePathLength() float64 {
totalPaths := 0
totalLength := 0.0
// Sample a subset of node pairs to avoid O(n³) complexity
nodeIDs := make([]string, 0, len(ia.graph.nodes))
for nodeID := range ia.graph.nodes {
nodeIDs = append(nodeIDs, nodeID)
}
sampleSize := min(100, len(nodeIDs)) // Sample up to 100 nodes
for i := 0; i < sampleSize; i++ {
for j := i + 1; j < sampleSize; j++ {
pathLength := ia.findShortestPathLength(nodeIDs[i], nodeIDs[j])
if pathLength > 0 {
totalLength += float64(pathLength)
totalPaths++
}
}
}
if totalPaths == 0 {
return 0
}
return totalLength / float64(totalPaths)
}
func (ia *influenceAnalyzerImpl) findCentralNodes() []CentralNode {
centralNodes := make([]CentralNode, 0)
// Calculate various centrality measures for each node
for nodeID, node := range ia.graph.nodes {
degreeCentrality := ia.calculateNodeDegreeCentrality(nodeID)
betweennessCentrality := ia.calculateNodeBetweennessCentrality(nodeID)
closenessCentrality := ia.calculateNodeClosenessCentrality(nodeID)
pageRank := ia.calculateNodePageRank(nodeID)
// Calculate overall influence score
influenceScore := (degreeCentrality + betweennessCentrality + closenessCentrality + pageRank) / 4.0
if influenceScore > 0.5 { // Threshold for "central"
centralNode := CentralNode{
Address: node.UCXLAddress,
BetweennessCentrality: betweennessCentrality,
ClosenessCentrality: closenessCentrality,
DegreeCentrality: degreeCentrality,
PageRank: pageRank,
InfluenceScore: influenceScore,
}
centralNodes = append(centralNodes, centralNode)
}
}
// Sort by influence score
sort.Slice(centralNodes, func(i, j int) bool {
return centralNodes[i].InfluenceScore > centralNodes[j].InfluenceScore
})
// Limit to top 20
if len(centralNodes) > 20 {
centralNodes = centralNodes[:20]
}
return centralNodes
}
func (ia *influenceAnalyzerImpl) detectCommunities() []Community {
// Simple community detection based on modularity
communities := make([]Community, 0)
// Use a simple approach: group nodes with high interconnectivity
visited := make(map[string]bool)
communityID := 0
for nodeID := range ia.graph.nodes {
if visited[nodeID] {
continue
}
// Start a new community
community := ia.expandCommunity(nodeID, visited)
if len(community) >= 3 { // Minimum community size
communityStruct := Community{
ID: fmt.Sprintf("community-%d", communityID),
Nodes: community,
Modularity: ia.calculateCommunityModularity(community),
Density: ia.calculateCommunityDensity(community),
Description: fmt.Sprintf("Community of %d related decisions", len(community)),
Tags: []string{"auto-detected"},
}
communities = append(communities, communityStruct)
communityID++
}
}
return communities
}
func (ia *influenceAnalyzerImpl) calculateInfluenceScore(node *TemporalNode) float64 {
score := 0.0
// Factor 1: Number of direct influences (30%)
directInfluences := len(ia.graph.influences[node.ID])
score += float64(directInfluences) * 0.3
// Factor 2: Network centrality (25%)
centrality := ia.getNodeCentrality(node.ID)
score += centrality * 0.25
// Factor 3: Decision confidence (20%)
score += node.Confidence * 0.2
// Factor 4: Impact scope (15%)
scopeWeight := 0.0
switch node.ImpactScope {
case ImpactSystem:
scopeWeight = 1.0
case ImpactProject:
scopeWeight = 0.8
case ImpactModule:
scopeWeight = 0.6
case ImpactLocal:
scopeWeight = 0.4
}
score += scopeWeight * 0.15
// Factor 5: Recency (10%)
timeSinceDecision := time.Since(node.Timestamp)
recencyScore := math.Max(0, 1.0-timeSinceDecision.Hours()/(30*24)) // Decay over 30 days
score += recencyScore * 0.1
return score
}
func (ia *influenceAnalyzerImpl) analyzeDecisionImpactInternal(node *TemporalNode) *DecisionImpact {
// Find direct and indirect impact
directImpact := make([]ucxl.Address, len(node.Influences))
copy(directImpact, node.Influences)
// Find indirect impact (influenced nodes that are influenced by direct impact)
indirectImpact := make([]ucxl.Address, 0)
impactRadius := 1
visited := make(map[string]bool)
queue := []string{node.ID}
visited[node.ID] = true
for len(queue) > 0 && impactRadius <= 3 { // Limit to 3 hops
levelSize := len(queue)
for i := 0; i < levelSize; i++ {
currentID := queue[0]
queue = queue[1:]
if influences, exists := ia.graph.influences[currentID]; exists {
for _, influencedID := range influences {
if !visited[influencedID] {
visited[influencedID] = true
queue = append(queue, influencedID)
if influencedNode, exists := ia.graph.nodes[influencedID]; exists {
if impactRadius > 1 { // Indirect impact
indirectImpact = append(indirectImpact, influencedNode.UCXLAddress)
}
}
}
}
}
}
impactRadius++
}
// Calculate impact strength
impactStrength := float64(len(directImpact))*1.0 + float64(len(indirectImpact))*0.5
impactStrength = math.Min(impactStrength/10.0, 1.0) // Normalize to 0-1
// Estimate propagation time
propagationTime := time.Duration(impactRadius) * time.Hour * 24
return &DecisionImpact{
Address: node.UCXLAddress,
DecisionHop: node.Version,
DirectImpact: directImpact,
IndirectImpact: indirectImpact,
ImpactRadius: impactRadius - 1,
ImpactStrength: impactStrength,
PropagationTime: propagationTime,
ImpactCategories: []string{"influence_network", "decision_cascade"},
MitigationActions: []string{"review_affected_contexts", "validate_assumptions"},
}
}
func (ia *influenceAnalyzerImpl) getInfluenceReasons(node *TemporalNode, score float64) []string {
reasons := make([]string, 0)
if len(node.Influences) > 3 {
reasons = append(reasons, "influences many contexts")
}
if node.Confidence > 0.8 {
reasons = append(reasons, "high confidence decision")
}
if node.ImpactScope == ImpactSystem || node.ImpactScope == ImpactProject {
reasons = append(reasons, "broad impact scope")
}
if score > 0.8 {
reasons = append(reasons, "high network centrality")
}
return reasons
}
func (ia *influenceAnalyzerImpl) hasDirectInfluence(from, to *TemporalNode) bool {
influences, exists := ia.graph.influences[from.ID]
if !exists {
return false
}
for _, influencedID := range influences {
if influencedID == to.ID {
return true
}
}
return false
}
func (ia *influenceAnalyzerImpl) calculateInfluenceProbability(from, to *TemporalNode) float64 {
probability := 0.0
// Factor 1: Technology similarity
techSimilarity := ia.calculateTechnologySimilarity(from, to)
probability += techSimilarity * 0.3
// Factor 2: Temporal proximity
timeDiff := math.Abs(from.Timestamp.Sub(to.Timestamp).Hours())
temporalProximity := math.Max(0, 1.0-timeDiff/(7*24)) // Closer in time = higher probability
probability += temporalProximity * 0.2
// Factor 3: Common influencers
commonInfluencers := ia.countCommonInfluencers(from, to)
probability += math.Min(float64(commonInfluencers)/3.0, 1.0) * 0.3
// Factor 4: Network distance
distance := ia.findShortestPathLength(from.ID, to.ID)
if distance > 0 && distance <= 3 {
networkProximity := 1.0 - float64(distance-1)/2.0
probability += networkProximity * 0.2
}
return math.Min(probability, 1.0)
}
func (ia *influenceAnalyzerImpl) getPredictionReasons(from, to *TemporalNode) []string {
reasons := make([]string, 0)
if ia.calculateTechnologySimilarity(from, to) > 0.7 {
reasons = append(reasons, "similar technologies")
}
if ia.countCommonInfluencers(from, to) > 2 {
reasons = append(reasons, "common influencers")
}
distance := ia.findShortestPathLength(from.ID, to.ID)
if distance > 0 && distance <= 2 {
reasons = append(reasons, "close in network")
}
timeDiff := math.Abs(from.Timestamp.Sub(to.Timestamp).Hours())
if timeDiff < 24 {
reasons = append(reasons, "recent decisions")
}
return reasons
}
// Additional helper methods for centrality calculations
func (ia *influenceAnalyzerImpl) calculateDegreeCentrality(metrics *CentralityMetrics) {
totalNodes := float64(len(ia.graph.nodes))
for nodeID := range ia.graph.nodes {
degree := float64(len(ia.graph.influences[nodeID]) + len(ia.graph.influencedBy[nodeID]))
centrality := degree / (totalNodes - 1) // Normalized degree centrality
metrics.DegreeCentrality[nodeID] = centrality
}
}
func (ia *influenceAnalyzerImpl) calculateBetweennessCentrality(metrics *CentralityMetrics) {
// Simplified betweenness centrality calculation
for nodeID := range ia.graph.nodes {
betweenness := ia.calculateNodeBetweennessCentrality(nodeID)
metrics.BetweennessCentrality[nodeID] = betweenness
}
}
func (ia *influenceAnalyzerImpl) calculateClosenessCentrality(metrics *CentralityMetrics) {
for nodeID := range ia.graph.nodes {
closeness := ia.calculateNodeClosenessCentrality(nodeID)
metrics.ClosenessCentrality[nodeID] = closeness
}
}
func (ia *influenceAnalyzerImpl) calculateEigenvectorCentrality(metrics *CentralityMetrics) {
// Simplified eigenvector centrality using power iteration
nodeIDs := make([]string, 0, len(ia.graph.nodes))
for nodeID := range ia.graph.nodes {
nodeIDs = append(nodeIDs, nodeID)
}
n := len(nodeIDs)
if n == 0 {
return
}
// Initialize eigenvector
eigenvector := make([]float64, n)
for i := range eigenvector {
eigenvector[i] = 1.0 / float64(n)
}
// Power iteration
for iter := 0; iter < ia.maxIterations; iter++ {
newEigenvector := make([]float64, n)
for i, nodeID := range nodeIDs {
sum := 0.0
if influencedBy, exists := ia.graph.influencedBy[nodeID]; exists {
for _, influencerID := range influencedBy {
for j, otherNodeID := range nodeIDs {
if otherNodeID == influencerID {
sum += eigenvector[j]
break
}
}
}
}
newEigenvector[i] = sum
}
// Normalize
norm := 0.0
for _, val := range newEigenvector {
norm += val * val
}
norm = math.Sqrt(norm)
if norm > 0 {
for i := range newEigenvector {
newEigenvector[i] /= norm
}
}
// Check convergence
diff := 0.0
for i := range eigenvector {
diff += math.Abs(eigenvector[i] - newEigenvector[i])
}
eigenvector = newEigenvector
if diff < ia.convergenceThreshold {
break
}
}
// Store results
for i, nodeID := range nodeIDs {
metrics.EigenvectorCentrality[nodeID] = eigenvector[i]
}
}
func (ia *influenceAnalyzerImpl) calculatePageRank(metrics *CentralityMetrics) {
nodeIDs := make([]string, 0, len(ia.graph.nodes))
for nodeID := range ia.graph.nodes {
nodeIDs = append(nodeIDs, nodeID)
}
n := len(nodeIDs)
if n == 0 {
return
}
// Initialize PageRank values
pageRank := make([]float64, n)
newPageRank := make([]float64, n)
for i := range pageRank {
pageRank[i] = 1.0 / float64(n)
}
// PageRank iteration
for iter := 0; iter < ia.maxIterations; iter++ {
for i := range newPageRank {
newPageRank[i] = (1.0 - ia.dampingFactor) / float64(n)
}
for i, nodeID := range nodeIDs {
if influences, exists := ia.graph.influences[nodeID]; exists && len(influences) > 0 {
contribution := ia.dampingFactor * pageRank[i] / float64(len(influences))
for _, influencedID := range influences {
for j, otherNodeID := range nodeIDs {
if otherNodeID == influencedID {
newPageRank[j] += contribution
break
}
}
}
}
}
// Check convergence
diff := 0.0
for i := range pageRank {
diff += math.Abs(pageRank[i] - newPageRank[i])
}
copy(pageRank, newPageRank)
if diff < ia.convergenceThreshold {
break
}
}
// Store results
for i, nodeID := range nodeIDs {
metrics.PageRank[nodeID] = pageRank[i]
}
}
// More helper methods
func (ia *influenceAnalyzerImpl) getNeighbors(nodeID string) []string {
neighbors := make([]string, 0)
if influences, exists := ia.graph.influences[nodeID]; exists {
neighbors = append(neighbors, influences...)
}
if influencedBy, exists := ia.graph.influencedBy[nodeID]; exists {
neighbors = append(neighbors, influencedBy...)
}
return neighbors
}
func (ia *influenceAnalyzerImpl) areConnected(nodeID1, nodeID2 string) bool {
if influences, exists := ia.graph.influences[nodeID1]; exists {
for _, influenced := range influences {
if influenced == nodeID2 {
return true
}
}
}
if influences, exists := ia.graph.influences[nodeID2]; exists {
for _, influenced := range influences {
if influenced == nodeID1 {
return true
}
}
}
return false
}
func (ia *influenceAnalyzerImpl) findShortestPathLength(fromID, toID string) int {
if fromID == toID {
return 0
}
visited := make(map[string]bool)
queue := []struct {
nodeID string
depth int
}{{fromID, 0}}
for len(queue) > 0 {
current := queue[0]
queue = queue[1:]
if visited[current.nodeID] {
continue
}
visited[current.nodeID] = true
if current.nodeID == toID {
return current.depth
}
// Add neighbors
neighbors := ia.getNeighbors(current.nodeID)
for _, neighbor := range neighbors {
if !visited[neighbor] {
queue = append(queue, struct {
nodeID string
depth int
}{neighbor, current.depth + 1})
}
}
}
return -1 // No path found
}
func (ia *influenceAnalyzerImpl) getNodeCentrality(nodeID string) float64 {
// Simple centrality based on degree
influences := len(ia.graph.influences[nodeID])
influencedBy := len(ia.graph.influencedBy[nodeID])
totalNodes := len(ia.graph.nodes)
if totalNodes <= 1 {
return 0
}
return float64(influences+influencedBy) / float64(totalNodes-1)
}
func (ia *influenceAnalyzerImpl) calculateNodeDegreeCentrality(nodeID string) float64 {
return ia.getNodeCentrality(nodeID)
}
func (ia *influenceAnalyzerImpl) calculateNodeBetweennessCentrality(nodeID string) float64 {
// Simplified betweenness: count how many shortest paths pass through this node
// This is computationally expensive, so we use an approximation
throughCount := 0
totalPaths := 0
// Sample a subset of node pairs
nodeIDs := make([]string, 0, len(ia.graph.nodes))
for id := range ia.graph.nodes {
if id != nodeID {
nodeIDs = append(nodeIDs, id)
}
}
sampleSize := min(20, len(nodeIDs))
for i := 0; i < sampleSize; i++ {
for j := i + 1; j < sampleSize; j++ {
if ia.isOnShortestPath(nodeIDs[i], nodeIDs[j], nodeID) {
throughCount++
}
totalPaths++
}
}
if totalPaths == 0 {
return 0
}
return float64(throughCount) / float64(totalPaths)
}
func (ia *influenceAnalyzerImpl) calculateNodeClosenessCentrality(nodeID string) float64 {
totalDistance := 0
reachableNodes := 0
// Calculate distances to all other nodes
for otherNodeID := range ia.graph.nodes {
if otherNodeID != nodeID {
distance := ia.findShortestPathLength(nodeID, otherNodeID)
if distance > 0 {
totalDistance += distance
reachableNodes++
}
}
}
if reachableNodes == 0 || totalDistance == 0 {
return 0
}
return float64(reachableNodes) / float64(totalDistance)
}
func (ia *influenceAnalyzerImpl) calculateNodePageRank(nodeID string) float64 {
// This is already calculated in calculatePageRank, so we'll use a simple approximation
influences := len(ia.graph.influences[nodeID])
influencedBy := len(ia.graph.influencedBy[nodeID])
// Simple approximation based on in-degree with damping
totalNodes := float64(len(ia.graph.nodes))
if totalNodes == 0 {
return 0
}
return ((1.0-ia.dampingFactor)/totalNodes + ia.dampingFactor*float64(influencedBy)/totalNodes)
}
func (ia *influenceAnalyzerImpl) expandCommunity(startNodeID string, visited map[string]bool) []ucxl.Address {
community := make([]ucxl.Address, 0)
queue := []string{startNodeID}
visited[startNodeID] = true
for len(queue) > 0 {
nodeID := queue[0]
queue = queue[1:]
if node, exists := ia.graph.nodes[nodeID]; exists {
community = append(community, node.UCXLAddress)
}
// Add strongly connected neighbors
neighbors := ia.getNeighbors(nodeID)
for _, neighborID := range neighbors {
if !visited[neighborID] && ia.isStronglyConnected(nodeID, neighborID) {
visited[neighborID] = true
queue = append(queue, neighborID)
}
}
}
return community
}
func (ia *influenceAnalyzerImpl) isStronglyConnected(nodeID1, nodeID2 string) bool {
// Check if nodes have bidirectional influence or share many common neighbors
commonNeighbors := 0
neighbors1 := ia.getNeighbors(nodeID1)
neighbors2 := ia.getNeighbors(nodeID2)
for _, n1 := range neighbors1 {
for _, n2 := range neighbors2 {
if n1 == n2 {
commonNeighbors++
}
}
}
return commonNeighbors >= 2 || (ia.areConnected(nodeID1, nodeID2) && commonNeighbors >= 1)
}
func (ia *influenceAnalyzerImpl) calculateCommunityModularity(community []ucxl.Address) float64 {
// Simplified modularity calculation
if len(community) < 2 {
return 0
}
// Count internal edges
internalEdges := 0
for _, addr1 := range community {
for _, addr2 := range community {
if addr1.String() != addr2.String() {
// Find nodes for these addresses
node1 := ia.findNodeByAddress(addr1)
node2 := ia.findNodeByAddress(addr2)
if node1 != nil && node2 != nil && ia.areConnected(node1.ID, node2.ID) {
internalEdges++
}
}
}
}
// Simple modularity approximation
maxPossibleEdges := len(community) * (len(community) - 1)
if maxPossibleEdges == 0 {
return 0
}
return float64(internalEdges) / float64(maxPossibleEdges)
}
func (ia *influenceAnalyzerImpl) calculateCommunityDensity(community []ucxl.Address) float64 {
return ia.calculateCommunityModularity(community) // Same calculation for simplicity
}
func (ia *influenceAnalyzerImpl) findNodeByAddress(address ucxl.Address) *TemporalNode {
addressKey := address.String()
if nodes, exists := ia.graph.addressToNodes[addressKey]; exists && len(nodes) > 0 {
return nodes[len(nodes)-1] // Return latest version
}
return nil
}
func (ia *influenceAnalyzerImpl) calculateTechnologySimilarity(node1, node2 *TemporalNode) float64 {
if node1.Context == nil || node2.Context == nil {
return 0
}
tech1 := make(map[string]bool)
for _, tech := range node1.Context.Technologies {
tech1[tech] = true
}
tech2 := make(map[string]bool)
for _, tech := range node2.Context.Technologies {
tech2[tech] = true
}
intersection := 0
union := len(tech1)
for tech := range tech2 {
if tech1[tech] {
intersection++
} else {
union++
}
}
if union == 0 {
return 0
}
return float64(intersection) / float64(union) // Jaccard similarity
}
func (ia *influenceAnalyzerImpl) countCommonInfluencers(node1, node2 *TemporalNode) int {
influencers1 := make(map[string]bool)
if influencedBy1, exists := ia.graph.influencedBy[node1.ID]; exists {
for _, influencer := range influencedBy1 {
influencers1[influencer] = true
}
}
common := 0
if influencedBy2, exists := ia.graph.influencedBy[node2.ID]; exists {
for _, influencer := range influencedBy2 {
if influencers1[influencer] {
common++
}
}
}
return common
}
func (ia *influenceAnalyzerImpl) isOnShortestPath(fromID, toID, throughID string) bool {
// Check if throughID is on any shortest path from fromID to toID
directDistance := ia.findShortestPathLength(fromID, toID)
if directDistance <= 0 {
return false
}
throughDistance := ia.findShortestPathLength(fromID, throughID) + ia.findShortestPathLength(throughID, toID)
return throughDistance == directDistance
}
func min(a, b int) int {
if a < b {
return a
}
return b
}