Initial commit - BUBBLE decision tracking system

- Added core BUBBLE architecture with decision envelopes and policy store
- Implemented bundle API with FastAPI skeleton and OpenAPI specification
- Added Go-based storage implementation with SQLite and RocksDB support
- Created integrations for peer sync, vector search, and N8N workflows
- Added comprehensive testing framework and documentation
- Implemented provenance walking and decision checking algorithms

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
anthonyrawlins
2025-08-27 09:35:03 +10:00
commit 013e0dc3f0
50 changed files with 3053 additions and 0 deletions

View File

@@ -0,0 +1,77 @@
from collections import deque
import heapq
def walk_back(start_id, n, query):
visited = set()
results = []
queue = deque([(start_id, 0)]) # (dr_id, hops)
query_vec = embed(query) # Precompute embedding for semantic scoring
while queue:
dr_id, depth = queue.popleft()
if dr_id in visited or depth > n:
continue
visited.add(dr_id)
# Get metadata from RocksDB first, fallback to SQLite
metadata = get_metadata(dr_id)
if not metadata:
continue
# Score
sim_score = cosine_similarity(query_vec, embed(metadata['statement']))
dist_score = max(0, (n - depth) / n) # Closer ancestors score higher
constraint_penalty = -0.2 if metadata.get("blocked") else 0
total_score = (0.6 * sim_score) + (0.3 * dist_score) + constraint_penalty
heapq.heappush(results, (-total_score, metadata)) # Max-heap
# Traverse ancestors
for anc_id in get_ancestors(dr_id):
queue.append((anc_id, depth + 1))
# Return top results sorted
sorted_results = [md for _, md in sorted(results)]
cache_walk_results(start_id, n, query, sorted_results)
return sorted_results
def get_metadata(dr_id):
val = rocks.get(f"meta:{dr_id}")
if val:
return deserialize(val)
# Fallback to SQLite
row = sqlite_conn.execute("""
SELECT statement, lifecycle_state, role_exposure, tags, timestamp
FROM decisions WHERE id=?
""", (dr_id,)).fetchone()
if row:
return {
"id": dr_id,
"statement": row[0],
"lifecycle_state": row[1],
"role_exposure": json.loads(row[2]),
"tags": json.loads(row[3]),
"timestamp": row[4]
}
return None
def get_ancestors(dr_id):
val = rocks.get(f"rev:{dr_id}")
if val:
return deserialize(val)
# Fallback to SQLite edges
rows = sqlite_conn.execute("""
SELECT source_id FROM edges
WHERE target_id=? AND relation='influences'
""", (dr_id,)).fetchall()
return [r[0] for r in rows]
def cache_walk_results(start_id, n, query, results):
cache_key = f"walkcache:{start_id}:{n}:{hash(query)}"
rocks.put(cache_key, serialize(results))