Implement initial scan logic and council formation for WHOOSH project kickoffs
- Replace incremental sync with full scan for new repositories - Add initial_scan status to bypass Since parameter filtering - Implement council formation detection for Design Brief issues - Add version display to WHOOSH UI header for debugging - Fix Docker token authentication with trailing newline removal - Add comprehensive council orchestration with Docker Swarm integration - Include BACKBEAT prototype integration for distributed timing - Support council-specific agent roles and deployment strategies - Transition repositories to active status after content discovery Key architectural improvements: - Full scan approach for new project detection vs incremental sync - Council formation triggered by chorus-entrypoint labeled Design Briefs - Proper token handling and authentication for Gitea API calls - Support for both initial discovery and ongoing task monitoring This enables autonomous project kickoff workflows where Design Brief issues automatically trigger formation of specialized agent councils for new projects. 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
533
BACKBEAT-prototype/contracts/tests/conformance_test.go
Normal file
533
BACKBEAT-prototype/contracts/tests/conformance_test.go
Normal file
@@ -0,0 +1,533 @@
|
||||
package tests
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
)
|
||||
|
||||
// MessageTypes defines the three core BACKBEAT interfaces
|
||||
const (
|
||||
BeatFrameType = "backbeat.beatframe.v1"
|
||||
StatusClaimType = "backbeat.statusclaim.v1"
|
||||
BarReportType = "backbeat.barreport.v1"
|
||||
)
|
||||
|
||||
// BeatFrame represents INT-A: Pulse → All Services
|
||||
type BeatFrame struct {
|
||||
Type string `json:"type"`
|
||||
ClusterID string `json:"cluster_id"`
|
||||
BeatIndex int64 `json:"beat_index"`
|
||||
Downbeat bool `json:"downbeat"`
|
||||
Phase string `json:"phase"`
|
||||
HLC string `json:"hlc"`
|
||||
DeadlineAt time.Time `json:"deadline_at"`
|
||||
TempoBPM float64 `json:"tempo_bpm"`
|
||||
WindowID string `json:"window_id"`
|
||||
Metadata map[string]interface{} `json:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
// StatusClaim represents INT-B: Agents → Reverb
|
||||
type StatusClaim struct {
|
||||
Type string `json:"type"`
|
||||
AgentID string `json:"agent_id"`
|
||||
TaskID string `json:"task_id,omitempty"`
|
||||
BeatIndex int64 `json:"beat_index"`
|
||||
State string `json:"state"`
|
||||
BeatsLeft int `json:"beats_left,omitempty"`
|
||||
Progress float64 `json:"progress,omitempty"`
|
||||
Notes string `json:"notes,omitempty"`
|
||||
HLC string `json:"hlc"`
|
||||
Resources map[string]interface{} `json:"resources,omitempty"`
|
||||
Dependencies []string `json:"dependencies,omitempty"`
|
||||
Metadata map[string]interface{} `json:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
// BarReport represents INT-C: Reverb → All Services
|
||||
type BarReport struct {
|
||||
Type string `json:"type"`
|
||||
WindowID string `json:"window_id"`
|
||||
FromBeat int64 `json:"from_beat"`
|
||||
ToBeat int64 `json:"to_beat"`
|
||||
AgentsReporting int `json:"agents_reporting"`
|
||||
OnTimeReviews int `json:"on_time_reviews"`
|
||||
HelpPromisesFulfilled int `json:"help_promises_fulfilled"`
|
||||
SecretRotationsOK bool `json:"secret_rotations_ok"`
|
||||
TempoDriftMS float64 `json:"tempo_drift_ms"`
|
||||
Issues []map[string]interface{} `json:"issues,omitempty"`
|
||||
Performance map[string]interface{} `json:"performance,omitempty"`
|
||||
HealthIndicators map[string]interface{} `json:"health_indicators,omitempty"`
|
||||
Metadata map[string]interface{} `json:"metadata,omitempty"`
|
||||
}
|
||||
|
||||
// TestSchemaValidation tests that all JSON schemas are valid and messages conform
|
||||
func TestSchemaValidation(t *testing.T) {
|
||||
schemaDir := "../schemas"
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
schemaFile string
|
||||
validMsgs []interface{}
|
||||
invalidMsgs []map[string]interface{}
|
||||
}{
|
||||
{
|
||||
name: "BeatFrame Schema Validation",
|
||||
schemaFile: "beatframe-v1.schema.json",
|
||||
validMsgs: []interface{}{
|
||||
BeatFrame{
|
||||
Type: BeatFrameType,
|
||||
ClusterID: "test-cluster",
|
||||
BeatIndex: 100,
|
||||
Downbeat: false,
|
||||
Phase: "execute",
|
||||
HLC: "7ffd:0001:abcd",
|
||||
DeadlineAt: time.Now().Add(30 * time.Second),
|
||||
TempoBPM: 2.0,
|
||||
WindowID: "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
},
|
||||
BeatFrame{
|
||||
Type: BeatFrameType,
|
||||
ClusterID: "prod",
|
||||
BeatIndex: 0,
|
||||
Downbeat: true,
|
||||
Phase: "plan",
|
||||
HLC: "0001:0000:cafe",
|
||||
DeadlineAt: time.Now().Add(15 * time.Second),
|
||||
TempoBPM: 4.0,
|
||||
WindowID: "a1b2c3d4e5f6789012345678901234ab",
|
||||
Metadata: map[string]interface{}{
|
||||
"pulse_version": "1.0.0",
|
||||
"cluster_health": "healthy",
|
||||
},
|
||||
},
|
||||
},
|
||||
invalidMsgs: []map[string]interface{}{
|
||||
// Missing required fields
|
||||
{
|
||||
"type": BeatFrameType,
|
||||
"cluster_id": "test",
|
||||
// missing beat_index, downbeat, phase, etc.
|
||||
},
|
||||
// Invalid phase
|
||||
{
|
||||
"type": BeatFrameType,
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "invalid_phase",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
},
|
||||
// Invalid HLC format
|
||||
{
|
||||
"type": BeatFrameType,
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "invalid-hlc-format",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "StatusClaim Schema Validation",
|
||||
schemaFile: "statusclaim-v1.schema.json",
|
||||
validMsgs: []interface{}{
|
||||
StatusClaim{
|
||||
Type: StatusClaimType,
|
||||
AgentID: "worker:test-01",
|
||||
TaskID: "task:123",
|
||||
BeatIndex: 100,
|
||||
State: "executing",
|
||||
BeatsLeft: 3,
|
||||
Progress: 0.5,
|
||||
Notes: "processing batch",
|
||||
HLC: "7ffd:0001:beef",
|
||||
},
|
||||
StatusClaim{
|
||||
Type: StatusClaimType,
|
||||
AgentID: "agent:backup",
|
||||
BeatIndex: 101,
|
||||
State: "idle",
|
||||
HLC: "7ffe:0002:dead",
|
||||
Resources: map[string]interface{}{
|
||||
"cpu_percent": 25.0,
|
||||
"memory_mb": 512,
|
||||
},
|
||||
},
|
||||
},
|
||||
invalidMsgs: []map[string]interface{}{
|
||||
// Missing required fields
|
||||
{
|
||||
"type": StatusClaimType,
|
||||
"agent_id": "test",
|
||||
// missing beat_index, state, hlc
|
||||
},
|
||||
// Invalid state
|
||||
{
|
||||
"type": StatusClaimType,
|
||||
"agent_id": "test",
|
||||
"beat_index": 0,
|
||||
"state": "invalid_state",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
},
|
||||
// Negative progress
|
||||
{
|
||||
"type": StatusClaimType,
|
||||
"agent_id": "test",
|
||||
"beat_index": 0,
|
||||
"state": "executing",
|
||||
"progress": -0.1,
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "BarReport Schema Validation",
|
||||
schemaFile: "barreport-v1.schema.json",
|
||||
validMsgs: []interface{}{
|
||||
BarReport{
|
||||
Type: BarReportType,
|
||||
WindowID: "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
FromBeat: 0,
|
||||
ToBeat: 119,
|
||||
AgentsReporting: 150,
|
||||
OnTimeReviews: 147,
|
||||
HelpPromisesFulfilled: 12,
|
||||
SecretRotationsOK: true,
|
||||
TempoDriftMS: -2.1,
|
||||
},
|
||||
BarReport{
|
||||
Type: BarReportType,
|
||||
WindowID: "a1b2c3d4e5f6789012345678901234ab",
|
||||
FromBeat: 120,
|
||||
ToBeat: 239,
|
||||
AgentsReporting: 200,
|
||||
OnTimeReviews: 195,
|
||||
HelpPromisesFulfilled: 25,
|
||||
SecretRotationsOK: false,
|
||||
TempoDriftMS: 15.7,
|
||||
Issues: []map[string]interface{}{
|
||||
{
|
||||
"severity": "warning",
|
||||
"category": "timing",
|
||||
"count": 5,
|
||||
"description": "Some agents running late",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
invalidMsgs: []map[string]interface{}{
|
||||
// Missing required fields
|
||||
{
|
||||
"type": BarReportType,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
// missing from_beat, to_beat, etc.
|
||||
},
|
||||
// Invalid window_id format
|
||||
{
|
||||
"type": BarReportType,
|
||||
"window_id": "invalid-window-id",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": 0.0,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Load schema
|
||||
schemaPath := filepath.Join(schemaDir, tt.schemaFile)
|
||||
schemaLoader := gojsonschema.NewReferenceLoader("file://" + schemaPath)
|
||||
|
||||
// Test valid messages
|
||||
for i, validMsg := range tt.validMsgs {
|
||||
t.Run(fmt.Sprintf("Valid_%d", i), func(t *testing.T) {
|
||||
msgBytes, err := json.Marshal(validMsg)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to marshal valid message: %v", err)
|
||||
}
|
||||
|
||||
docLoader := gojsonschema.NewBytesLoader(msgBytes)
|
||||
result, err := gojsonschema.Validate(schemaLoader, docLoader)
|
||||
if err != nil {
|
||||
t.Fatalf("Schema validation failed: %v", err)
|
||||
}
|
||||
|
||||
if !result.Valid() {
|
||||
t.Errorf("Valid message failed validation: %v", result.Errors())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Test invalid messages
|
||||
for i, invalidMsg := range tt.invalidMsgs {
|
||||
t.Run(fmt.Sprintf("Invalid_%d", i), func(t *testing.T) {
|
||||
msgBytes, err := json.Marshal(invalidMsg)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to marshal invalid message: %v", err)
|
||||
}
|
||||
|
||||
docLoader := gojsonschema.NewBytesLoader(msgBytes)
|
||||
result, err := gojsonschema.Validate(schemaLoader, docLoader)
|
||||
if err != nil {
|
||||
t.Fatalf("Schema validation failed: %v", err)
|
||||
}
|
||||
|
||||
if result.Valid() {
|
||||
t.Errorf("Invalid message passed validation when it should have failed")
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestMessageParsing tests that messages can be correctly parsed from JSON
|
||||
func TestMessageParsing(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
jsonStr string
|
||||
expected interface{}
|
||||
}{
|
||||
{
|
||||
name: "Parse BeatFrame",
|
||||
jsonStr: `{
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 123,
|
||||
"downbeat": true,
|
||||
"phase": "review",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.5,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
}`,
|
||||
expected: BeatFrame{
|
||||
Type: BeatFrameType,
|
||||
ClusterID: "test",
|
||||
BeatIndex: 123,
|
||||
Downbeat: true,
|
||||
Phase: "review",
|
||||
HLC: "7ffd:0001:abcd",
|
||||
TempoBPM: 2.5,
|
||||
WindowID: "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Parse StatusClaim",
|
||||
jsonStr: `{
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "worker:01",
|
||||
"beat_index": 456,
|
||||
"state": "completed",
|
||||
"progress": 1.0,
|
||||
"hlc": "7ffe:0002:beef"
|
||||
}`,
|
||||
expected: StatusClaim{
|
||||
Type: StatusClaimType,
|
||||
AgentID: "worker:01",
|
||||
BeatIndex: 456,
|
||||
State: "completed",
|
||||
Progress: 1.0,
|
||||
HLC: "7ffe:0002:beef",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
switch expected := tt.expected.(type) {
|
||||
case BeatFrame:
|
||||
var parsed BeatFrame
|
||||
err := json.Unmarshal([]byte(tt.jsonStr), &parsed)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse BeatFrame: %v", err)
|
||||
}
|
||||
|
||||
if parsed.Type != expected.Type ||
|
||||
parsed.ClusterID != expected.ClusterID ||
|
||||
parsed.BeatIndex != expected.BeatIndex {
|
||||
t.Errorf("Parsed BeatFrame doesn't match expected")
|
||||
}
|
||||
|
||||
case StatusClaim:
|
||||
var parsed StatusClaim
|
||||
err := json.Unmarshal([]byte(tt.jsonStr), &parsed)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to parse StatusClaim: %v", err)
|
||||
}
|
||||
|
||||
if parsed.Type != expected.Type ||
|
||||
parsed.AgentID != expected.AgentID ||
|
||||
parsed.State != expected.State {
|
||||
t.Errorf("Parsed StatusClaim doesn't match expected")
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestHLCValidation tests Hybrid Logical Clock format validation
|
||||
func TestHLCValidation(t *testing.T) {
|
||||
validHLCs := []string{
|
||||
"0000:0000:0000",
|
||||
"7ffd:0001:abcd",
|
||||
"FFFF:FFFF:FFFF",
|
||||
"1234:5678:90ab",
|
||||
}
|
||||
|
||||
invalidHLCs := []string{
|
||||
"invalid",
|
||||
"7ffd:0001", // too short
|
||||
"7ffd:0001:abcd:ef", // too long
|
||||
"gggg:0001:abcd", // invalid hex
|
||||
"7ffd:0001:abcdz", // invalid hex
|
||||
}
|
||||
|
||||
for _, hlc := range validHLCs {
|
||||
t.Run(fmt.Sprintf("Valid_%s", hlc), func(t *testing.T) {
|
||||
if !isValidHLC(hlc) {
|
||||
t.Errorf("Valid HLC %s was rejected", hlc)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
for _, hlc := range invalidHLCs {
|
||||
t.Run(fmt.Sprintf("Invalid_%s", hlc), func(t *testing.T) {
|
||||
if isValidHLC(hlc) {
|
||||
t.Errorf("Invalid HLC %s was accepted", hlc)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestWindowIDValidation tests window ID format validation
|
||||
func TestWindowIDValidation(t *testing.T) {
|
||||
validWindowIDs := []string{
|
||||
"7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"a1b2c3d4e5f6789012345678901234ab",
|
||||
"00000000000000000000000000000000",
|
||||
"FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF",
|
||||
}
|
||||
|
||||
invalidWindowIDs := []string{
|
||||
"invalid",
|
||||
"7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d", // too short
|
||||
"7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d55", // too long
|
||||
"7e9b0e6c4c9a4e59b7f2d9a3c1b2e4g5", // invalid hex
|
||||
}
|
||||
|
||||
for _, windowID := range validWindowIDs {
|
||||
t.Run(fmt.Sprintf("Valid_%s", windowID), func(t *testing.T) {
|
||||
if !isValidWindowID(windowID) {
|
||||
t.Errorf("Valid window ID %s was rejected", windowID)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
for _, windowID := range invalidWindowIDs {
|
||||
t.Run(fmt.Sprintf("Invalid_%s", windowID), func(t *testing.T) {
|
||||
if isValidWindowID(windowID) {
|
||||
t.Errorf("Invalid window ID %s was accepted", windowID)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Helper functions for validation
|
||||
func isValidHLC(hlc string) bool {
|
||||
parts := strings.Split(hlc, ":")
|
||||
if len(parts) != 3 {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, part := range parts {
|
||||
if len(part) != 4 {
|
||||
return false
|
||||
}
|
||||
for _, char := range part {
|
||||
if !((char >= '0' && char <= '9') || (char >= 'a' && char <= 'f') || (char >= 'A' && char <= 'F')) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func isValidWindowID(windowID string) bool {
|
||||
if len(windowID) != 32 {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, char := range windowID {
|
||||
if !((char >= '0' && char <= '9') || (char >= 'a' && char <= 'f') || (char >= 'A' && char <= 'F')) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// BenchmarkSchemaValidation benchmarks schema validation performance
|
||||
func BenchmarkSchemaValidation(b *testing.B) {
|
||||
schemaDir := "../schemas"
|
||||
schemaPath := filepath.Join(schemaDir, "beatframe-v1.schema.json")
|
||||
schemaLoader := gojsonschema.NewReferenceLoader("file://" + schemaPath)
|
||||
|
||||
beatFrame := BeatFrame{
|
||||
Type: BeatFrameType,
|
||||
ClusterID: "benchmark",
|
||||
BeatIndex: 1000,
|
||||
Downbeat: false,
|
||||
Phase: "execute",
|
||||
HLC: "7ffd:0001:abcd",
|
||||
DeadlineAt: time.Now().Add(30 * time.Second),
|
||||
TempoBPM: 2.0,
|
||||
WindowID: "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
}
|
||||
|
||||
msgBytes, _ := json.Marshal(beatFrame)
|
||||
docLoader := gojsonschema.NewBytesLoader(msgBytes)
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
result, err := gojsonschema.Validate(schemaLoader, docLoader)
|
||||
if err != nil || !result.Valid() {
|
||||
b.Fatal("Validation failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to check if schema files exist
|
||||
func TestSchemaFilesExist(t *testing.T) {
|
||||
schemaDir := "../schemas"
|
||||
requiredSchemas := []string{
|
||||
"beatframe-v1.schema.json",
|
||||
"statusclaim-v1.schema.json",
|
||||
"barreport-v1.schema.json",
|
||||
}
|
||||
|
||||
for _, schema := range requiredSchemas {
|
||||
schemaPath := filepath.Join(schemaDir, schema)
|
||||
if _, err := os.Stat(schemaPath); os.IsNotExist(err) {
|
||||
t.Errorf("Required schema file %s does not exist", schemaPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,275 @@
|
||||
[
|
||||
{
|
||||
"description": "Missing required field 'from_beat'",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1
|
||||
},
|
||||
"expected_errors": ["from_beat is required"]
|
||||
},
|
||||
{
|
||||
"description": "Missing required field 'agents_reporting'",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1
|
||||
},
|
||||
"expected_errors": ["agents_reporting is required"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid window_id format (too short)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1
|
||||
},
|
||||
"expected_errors": ["window_id must be exactly 32 hex characters"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid window_id format (non-hex characters)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4g5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1
|
||||
},
|
||||
"expected_errors": ["window_id must match pattern ^[0-9a-fA-F]{32}$"]
|
||||
},
|
||||
{
|
||||
"description": "Negative from_beat",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": -1,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1
|
||||
},
|
||||
"expected_errors": ["from_beat must be >= 0"]
|
||||
},
|
||||
{
|
||||
"description": "Negative agents_reporting",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": -1,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1
|
||||
},
|
||||
"expected_errors": ["agents_reporting must be >= 0"]
|
||||
},
|
||||
{
|
||||
"description": "Negative on_time_reviews",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": -1,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1
|
||||
},
|
||||
"expected_errors": ["on_time_reviews must be >= 0"]
|
||||
},
|
||||
{
|
||||
"description": "Too many issues (over 100)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1,
|
||||
"issues": []
|
||||
},
|
||||
"note": "This would need 101 issues to properly test, generating dynamically in actual test"
|
||||
},
|
||||
{
|
||||
"description": "Issue with invalid severity",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1,
|
||||
"issues": [
|
||||
{
|
||||
"severity": "invalid_severity",
|
||||
"category": "timing",
|
||||
"count": 1,
|
||||
"description": "Some issue"
|
||||
}
|
||||
]
|
||||
},
|
||||
"expected_errors": ["issue.severity must be one of: info, warning, error, critical"]
|
||||
},
|
||||
{
|
||||
"description": "Issue with invalid category",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1,
|
||||
"issues": [
|
||||
{
|
||||
"severity": "warning",
|
||||
"category": "invalid_category",
|
||||
"count": 1,
|
||||
"description": "Some issue"
|
||||
}
|
||||
]
|
||||
},
|
||||
"expected_errors": ["issue.category must be one of: timing, failed_tasks, missing_agents, resource_exhaustion, network_partition, credential_failure, data_corruption, unknown"]
|
||||
},
|
||||
{
|
||||
"description": "Issue with zero count",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1,
|
||||
"issues": [
|
||||
{
|
||||
"severity": "warning",
|
||||
"category": "timing",
|
||||
"count": 0,
|
||||
"description": "Some issue"
|
||||
}
|
||||
]
|
||||
},
|
||||
"expected_errors": ["issue.count must be >= 1"]
|
||||
},
|
||||
{
|
||||
"description": "Issue with description too long (over 512 chars)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1,
|
||||
"issues": [
|
||||
{
|
||||
"severity": "warning",
|
||||
"category": "timing",
|
||||
"count": 1,
|
||||
"description": "This is a very long description that exceeds the maximum allowed length of 512 characters for issue descriptions in BACKBEAT BarReport messages. This constraint is in place to prevent excessively large messages and ensure that issue descriptions remain concise and actionable. The system should reject this message because the description field contains more than 512 characters and violates the schema validation rules that have been carefully designed to maintain message size limits and system performance characteristics."
|
||||
}
|
||||
]
|
||||
},
|
||||
"expected_errors": ["issue.description must be at most 512 characters"]
|
||||
},
|
||||
{
|
||||
"description": "Issue with too many affected agents (over 50)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1,
|
||||
"issues": [
|
||||
{
|
||||
"severity": "warning",
|
||||
"category": "timing",
|
||||
"count": 1,
|
||||
"description": "Too many affected agents",
|
||||
"affected_agents": [
|
||||
"agent1", "agent2", "agent3", "agent4", "agent5", "agent6", "agent7", "agent8", "agent9", "agent10",
|
||||
"agent11", "agent12", "agent13", "agent14", "agent15", "agent16", "agent17", "agent18", "agent19", "agent20",
|
||||
"agent21", "agent22", "agent23", "agent24", "agent25", "agent26", "agent27", "agent28", "agent29", "agent30",
|
||||
"agent31", "agent32", "agent33", "agent34", "agent35", "agent36", "agent37", "agent38", "agent39", "agent40",
|
||||
"agent41", "agent42", "agent43", "agent44", "agent45", "agent46", "agent47", "agent48", "agent49", "agent50",
|
||||
"agent51"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"expected_errors": ["issue.affected_agents must have at most 50 items"]
|
||||
},
|
||||
{
|
||||
"description": "Wrong message type",
|
||||
"message": {
|
||||
"type": "backbeat.wrongtype.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1
|
||||
},
|
||||
"expected_errors": ["type must be 'backbeat.barreport.v1'"]
|
||||
},
|
||||
{
|
||||
"description": "Extra unknown properties (should fail with additionalProperties: false)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 150,
|
||||
"on_time_reviews": 147,
|
||||
"help_promises_fulfilled": 12,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -2.1,
|
||||
"unknown_field": "should not be allowed"
|
||||
},
|
||||
"expected_errors": ["Additional property unknown_field is not allowed"]
|
||||
}
|
||||
]
|
||||
190
BACKBEAT-prototype/contracts/tests/examples/barreport-valid.json
Normal file
190
BACKBEAT-prototype/contracts/tests/examples/barreport-valid.json
Normal file
@@ -0,0 +1,190 @@
|
||||
[
|
||||
{
|
||||
"description": "Healthy cluster with good performance",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 240,
|
||||
"to_beat": 359,
|
||||
"agents_reporting": 978,
|
||||
"on_time_reviews": 942,
|
||||
"help_promises_fulfilled": 87,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": 7.3,
|
||||
"issues": [
|
||||
{
|
||||
"severity": "warning",
|
||||
"category": "timing",
|
||||
"count": 12,
|
||||
"description": "Some agents consistently reporting 50ms+ late",
|
||||
"affected_agents": ["worker:batch-03", "indexer:shard-7"],
|
||||
"first_seen_beat": 245,
|
||||
"last_seen_beat": 358
|
||||
}
|
||||
],
|
||||
"performance": {
|
||||
"avg_response_time_ms": 45.2,
|
||||
"p95_response_time_ms": 125.7,
|
||||
"total_tasks_completed": 15678,
|
||||
"total_tasks_failed": 23,
|
||||
"peak_concurrent_agents": 1203,
|
||||
"network_bytes_transferred": 67890123
|
||||
},
|
||||
"health_indicators": {
|
||||
"cluster_sync_score": 0.94,
|
||||
"resource_utilization": 0.67,
|
||||
"collaboration_efficiency": 0.89,
|
||||
"error_rate": 0.001
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Small development cluster with perfect sync",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "a1b2c3d4e5f6789012345678901234ab",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 5,
|
||||
"on_time_reviews": 5,
|
||||
"help_promises_fulfilled": 2,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -0.1,
|
||||
"issues": []
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Cluster with multiple serious issues",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "fedcba9876543210fedcba9876543210",
|
||||
"from_beat": 1200,
|
||||
"to_beat": 1319,
|
||||
"agents_reporting": 450,
|
||||
"on_time_reviews": 380,
|
||||
"help_promises_fulfilled": 15,
|
||||
"secret_rotations_ok": false,
|
||||
"tempo_drift_ms": 125.7,
|
||||
"issues": [
|
||||
{
|
||||
"severity": "critical",
|
||||
"category": "credential_failure",
|
||||
"count": 3,
|
||||
"description": "Failed to rotate database credentials",
|
||||
"affected_agents": ["db-manager:primary", "backup:secondary"],
|
||||
"first_seen_beat": 1205,
|
||||
"last_seen_beat": 1318
|
||||
},
|
||||
{
|
||||
"severity": "error",
|
||||
"category": "network_partition",
|
||||
"count": 1,
|
||||
"description": "Lost connection to east coast data center",
|
||||
"affected_agents": ["worker:east-01", "worker:east-02", "worker:east-03"],
|
||||
"first_seen_beat": 1210,
|
||||
"last_seen_beat": 1319
|
||||
},
|
||||
{
|
||||
"severity": "warning",
|
||||
"category": "resource_exhaustion",
|
||||
"count": 45,
|
||||
"description": "High memory usage detected",
|
||||
"affected_agents": ["ml-trainer:gpu-01"],
|
||||
"first_seen_beat": 1200,
|
||||
"last_seen_beat": 1315
|
||||
}
|
||||
],
|
||||
"performance": {
|
||||
"avg_response_time_ms": 180.5,
|
||||
"p95_response_time_ms": 450.0,
|
||||
"total_tasks_completed": 5432,
|
||||
"total_tasks_failed": 123,
|
||||
"peak_concurrent_agents": 487,
|
||||
"network_bytes_transferred": 23456789
|
||||
},
|
||||
"health_indicators": {
|
||||
"cluster_sync_score": 0.72,
|
||||
"resource_utilization": 0.95,
|
||||
"collaboration_efficiency": 0.45,
|
||||
"error_rate": 0.022
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "High-frequency cluster report (8 BPM tempo)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "0123456789abcdef0123456789abcdef",
|
||||
"from_beat": 960,
|
||||
"to_beat": 1079,
|
||||
"agents_reporting": 2000,
|
||||
"on_time_reviews": 1985,
|
||||
"help_promises_fulfilled": 156,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": 3.2,
|
||||
"issues": [
|
||||
{
|
||||
"severity": "info",
|
||||
"category": "timing",
|
||||
"count": 15,
|
||||
"description": "Minor timing variations detected",
|
||||
"first_seen_beat": 965,
|
||||
"last_seen_beat": 1078
|
||||
}
|
||||
],
|
||||
"performance": {
|
||||
"avg_response_time_ms": 25.1,
|
||||
"p95_response_time_ms": 67.3,
|
||||
"total_tasks_completed": 45678,
|
||||
"total_tasks_failed": 12,
|
||||
"peak_concurrent_agents": 2100,
|
||||
"network_bytes_transferred": 123456789
|
||||
},
|
||||
"health_indicators": {
|
||||
"cluster_sync_score": 0.98,
|
||||
"resource_utilization": 0.78,
|
||||
"collaboration_efficiency": 0.92,
|
||||
"error_rate": 0.0003
|
||||
},
|
||||
"metadata": {
|
||||
"reverb_version": "1.3.0",
|
||||
"report_generation_time_ms": 45.7,
|
||||
"next_window_id": "fedcba0987654321fedcba0987654321"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Minimal valid bar report (only required fields)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "1111222233334444555566667777888",
|
||||
"from_beat": 600,
|
||||
"to_beat": 719,
|
||||
"agents_reporting": 1,
|
||||
"on_time_reviews": 1,
|
||||
"help_promises_fulfilled": 0,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": 0.0
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Empty issues array (valid)",
|
||||
"message": {
|
||||
"type": "backbeat.barreport.v1",
|
||||
"window_id": "9999aaaa0000bbbb1111cccc2222dddd",
|
||||
"from_beat": 480,
|
||||
"to_beat": 599,
|
||||
"agents_reporting": 100,
|
||||
"on_time_reviews": 98,
|
||||
"help_promises_fulfilled": 25,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": -1.5,
|
||||
"issues": [],
|
||||
"performance": {
|
||||
"avg_response_time_ms": 50.0,
|
||||
"total_tasks_completed": 1000,
|
||||
"total_tasks_failed": 2
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,152 @@
|
||||
[
|
||||
{
|
||||
"description": "Missing required field 'beat_index'",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"downbeat": false,
|
||||
"phase": "execute",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
},
|
||||
"expected_errors": ["beat_index is required"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid phase value",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "invalid_phase",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
},
|
||||
"expected_errors": ["phase must be one of: plan, execute, review"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid HLC format (wrong number of segments)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "7ffd:0001",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
},
|
||||
"expected_errors": ["hlc must match pattern ^[0-9a-fA-F]{4}:[0-9a-fA-F]{4}:[0-9a-fA-F]{4}$"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid HLC format (non-hex characters)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "gggg:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
},
|
||||
"expected_errors": ["hlc must match pattern ^[0-9a-fA-F]{4}:[0-9a-fA-F]{4}:[0-9a-fA-F]{4}$"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid window_id format (too short)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d"
|
||||
},
|
||||
"expected_errors": ["window_id must be exactly 32 hex characters"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid tempo_bpm (too low)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 0.05,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
},
|
||||
"expected_errors": ["tempo_bpm must be at least 0.1"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid tempo_bpm (too high)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 1001.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
},
|
||||
"expected_errors": ["tempo_bpm must be at most 1000"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid beat_index (negative)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": -1,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
},
|
||||
"expected_errors": ["beat_index must be >= 0"]
|
||||
},
|
||||
{
|
||||
"description": "Wrong message type",
|
||||
"message": {
|
||||
"type": "backbeat.wrongtype.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
},
|
||||
"expected_errors": ["type must be 'backbeat.beatframe.v1'"]
|
||||
},
|
||||
{
|
||||
"description": "Extra unknown properties (should fail with additionalProperties: false)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "test",
|
||||
"beat_index": 0,
|
||||
"downbeat": false,
|
||||
"phase": "plan",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:00:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"unknown_field": "should not be allowed"
|
||||
},
|
||||
"expected_errors": ["Additional property unknown_field is not allowed"]
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,82 @@
|
||||
[
|
||||
{
|
||||
"description": "Standard beat frame during execute phase",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "chorus-prod",
|
||||
"beat_index": 1337,
|
||||
"downbeat": false,
|
||||
"phase": "execute",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"deadline_at": "2025-09-05T12:30:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Downbeat starting new bar in plan phase",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "dev-cluster",
|
||||
"beat_index": 0,
|
||||
"downbeat": true,
|
||||
"phase": "plan",
|
||||
"hlc": "0001:0000:cafe",
|
||||
"deadline_at": "2025-09-05T12:00:30Z",
|
||||
"tempo_bpm": 4.0,
|
||||
"window_id": "a1b2c3d4e5f6789012345678901234ab"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "High-frequency beat with metadata",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "fast-cluster",
|
||||
"beat_index": 999999,
|
||||
"downbeat": false,
|
||||
"phase": "review",
|
||||
"hlc": "abcd:ef01:2345",
|
||||
"deadline_at": "2025-09-05T12:00:07.5Z",
|
||||
"tempo_bpm": 8.0,
|
||||
"window_id": "fedcba9876543210fedcba9876543210",
|
||||
"metadata": {
|
||||
"pulse_version": "1.2.3",
|
||||
"cluster_health": "healthy",
|
||||
"expected_agents": 150
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Low-frequency beat (1 BPM = 60 second beats)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "slow-batch",
|
||||
"beat_index": 42,
|
||||
"downbeat": true,
|
||||
"phase": "plan",
|
||||
"hlc": "FFFF:FFFF:FFFF",
|
||||
"deadline_at": "2025-09-05T13:00:00Z",
|
||||
"tempo_bpm": 1.0,
|
||||
"window_id": "0123456789abcdef0123456789abcdef",
|
||||
"metadata": {
|
||||
"pulse_version": "2.0.0",
|
||||
"cluster_health": "degraded",
|
||||
"expected_agents": 5
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Minimal valid beat frame (no optional fields)",
|
||||
"message": {
|
||||
"type": "backbeat.beatframe.v1",
|
||||
"cluster_id": "minimal",
|
||||
"beat_index": 1,
|
||||
"downbeat": false,
|
||||
"phase": "execute",
|
||||
"hlc": "0000:0001:0002",
|
||||
"deadline_at": "2025-09-05T12:01:00Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "1234567890abcdef1234567890abcdef"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,189 @@
|
||||
[
|
||||
{
|
||||
"description": "Missing required field 'beat_index'",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"state": "executing",
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["beat_index is required"]
|
||||
},
|
||||
{
|
||||
"description": "Missing required field 'state'",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["state is required"]
|
||||
},
|
||||
{
|
||||
"description": "Missing required field 'hlc'",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing"
|
||||
},
|
||||
"expected_errors": ["hlc is required"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid state value",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "invalid_state",
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["state must be one of: idle, planning, executing, reviewing, completed, failed, blocked, helping"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid progress value (negative)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"progress": -0.1,
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["progress must be between 0.0 and 1.0"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid progress value (greater than 1.0)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"progress": 1.1,
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["progress must be between 0.0 and 1.0"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid beats_left (negative)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"beats_left": -1,
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["beats_left must be >= 0"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid beats_left (too high)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"beats_left": 1001,
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["beats_left must be <= 1000"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid beat_index (negative)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": -1,
|
||||
"state": "executing",
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["beat_index must be >= 0"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid HLC format",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"hlc": "invalid-hlc"
|
||||
},
|
||||
"expected_errors": ["hlc must match pattern ^[0-9a-fA-F]{4}:[0-9a-fA-F]{4}:[0-9a-fA-F]{4}$"]
|
||||
},
|
||||
{
|
||||
"description": "Notes too long (over 256 characters)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"notes": "This is a very long notes field that exceeds the maximum allowed length of 256 characters. This should fail validation because it contains too much text and violates the maxLength constraint that was set to keep status messages concise and prevent excessive message sizes in the BACKBEAT system.",
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["notes must be at most 256 characters"]
|
||||
},
|
||||
{
|
||||
"description": "Too many dependencies (over 50)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "blocked",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"dependencies": [
|
||||
"dep1", "dep2", "dep3", "dep4", "dep5", "dep6", "dep7", "dep8", "dep9", "dep10",
|
||||
"dep11", "dep12", "dep13", "dep14", "dep15", "dep16", "dep17", "dep18", "dep19", "dep20",
|
||||
"dep21", "dep22", "dep23", "dep24", "dep25", "dep26", "dep27", "dep28", "dep29", "dep30",
|
||||
"dep31", "dep32", "dep33", "dep34", "dep35", "dep36", "dep37", "dep38", "dep39", "dep40",
|
||||
"dep41", "dep42", "dep43", "dep44", "dep45", "dep46", "dep47", "dep48", "dep49", "dep50",
|
||||
"dep51"
|
||||
]
|
||||
},
|
||||
"expected_errors": ["dependencies must have at most 50 items"]
|
||||
},
|
||||
{
|
||||
"description": "Invalid agent_id format (empty)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["agent_id must be at least 1 character"]
|
||||
},
|
||||
{
|
||||
"description": "Agent_id too long (over 128 characters)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "this_is_a_very_long_agent_id_that_exceeds_the_maximum_allowed_length_of_128_characters_and_should_fail_validation_because_it_is_too_long_for_the_system_to_handle_properly",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["agent_id must be at most 128 characters"]
|
||||
},
|
||||
{
|
||||
"description": "Wrong message type",
|
||||
"message": {
|
||||
"type": "backbeat.wrongtype.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"hlc": "7ffd:0001:abcd"
|
||||
},
|
||||
"expected_errors": ["type must be 'backbeat.statusclaim.v1'"]
|
||||
},
|
||||
{
|
||||
"description": "Extra unknown properties (should fail with additionalProperties: false)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "executing",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
"unknown_field": "should not be allowed"
|
||||
},
|
||||
"expected_errors": ["Additional property unknown_field is not allowed"]
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,135 @@
|
||||
[
|
||||
{
|
||||
"description": "Worker executing a batch processing task",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "search-indexer:worker-03",
|
||||
"task_id": "index-batch:20250905-120",
|
||||
"beat_index": 1337,
|
||||
"state": "executing",
|
||||
"beats_left": 3,
|
||||
"progress": 0.65,
|
||||
"notes": "processing batch 120/200",
|
||||
"hlc": "7ffd:0001:beef",
|
||||
"resources": {
|
||||
"cpu_percent": 85.0,
|
||||
"memory_mb": 2048,
|
||||
"disk_io_ops": 1250,
|
||||
"network_kb": 512
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Failed backup agent with error details",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "agent:backup-runner",
|
||||
"beat_index": 1338,
|
||||
"state": "failed",
|
||||
"beats_left": 0,
|
||||
"progress": 0.0,
|
||||
"notes": "connection timeout to storage backend",
|
||||
"hlc": "7ffe:0002:dead",
|
||||
"metadata": {
|
||||
"agent_version": "2.1.0",
|
||||
"error_code": "STORAGE_TIMEOUT",
|
||||
"retry_count": 3
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "ML trainer helping another node",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "ml-trainer:gpu-node-1",
|
||||
"beat_index": 1336,
|
||||
"state": "helping",
|
||||
"progress": 1.0,
|
||||
"notes": "completed own work, assisting node-2 with large model",
|
||||
"hlc": "7ffc:0005:cafe",
|
||||
"dependencies": ["ml-trainer:gpu-node-2"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Idle agent waiting for work",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "worker:standby-01",
|
||||
"beat_index": 1339,
|
||||
"state": "idle",
|
||||
"progress": 0.0,
|
||||
"hlc": "8000:0000:1111"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Agent in planning phase",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "coordinator:main",
|
||||
"task_id": "deploy:v2.1.0",
|
||||
"beat_index": 1340,
|
||||
"state": "planning",
|
||||
"beats_left": 5,
|
||||
"progress": 0.2,
|
||||
"notes": "analyzing dependency graph",
|
||||
"hlc": "8001:0001:2222",
|
||||
"resources": {
|
||||
"cpu_percent": 15.0,
|
||||
"memory_mb": 512
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Reviewing agent with completed task",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "quality-checker:auto",
|
||||
"task_id": "validate:batch-45",
|
||||
"beat_index": 1341,
|
||||
"state": "reviewing",
|
||||
"beats_left": 1,
|
||||
"progress": 0.9,
|
||||
"notes": "final verification of output quality",
|
||||
"hlc": "8002:0002:3333"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Completed agent ready for next task",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "processor:fast-01",
|
||||
"task_id": "process:item-567",
|
||||
"beat_index": 1342,
|
||||
"state": "completed",
|
||||
"beats_left": 0,
|
||||
"progress": 1.0,
|
||||
"notes": "item processed successfully",
|
||||
"hlc": "8003:0003:4444"
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Blocked agent waiting for external dependency",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "data-loader:external",
|
||||
"task_id": "load:dataset-789",
|
||||
"beat_index": 1343,
|
||||
"state": "blocked",
|
||||
"beats_left": 10,
|
||||
"progress": 0.1,
|
||||
"notes": "waiting for external API rate limit reset",
|
||||
"hlc": "8004:0004:5555",
|
||||
"dependencies": ["external-api:rate-limiter"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"description": "Minimal valid status claim (only required fields)",
|
||||
"message": {
|
||||
"type": "backbeat.statusclaim.v1",
|
||||
"agent_id": "simple:agent",
|
||||
"beat_index": 1344,
|
||||
"state": "idle",
|
||||
"hlc": "8005:0005:6666"
|
||||
}
|
||||
}
|
||||
]
|
||||
206
BACKBEAT-prototype/contracts/tests/integration/Makefile
Normal file
206
BACKBEAT-prototype/contracts/tests/integration/Makefile
Normal file
@@ -0,0 +1,206 @@
|
||||
# BACKBEAT Contracts CI Integration Makefile
|
||||
|
||||
# Variables
|
||||
SCHEMA_DIR = ../../schemas
|
||||
EXAMPLES_DIR = ../examples
|
||||
CLI_TOOL = ./cmd/backbeat-validate
|
||||
BINARY_NAME = backbeat-validate
|
||||
|
||||
# Default target
|
||||
.PHONY: all
|
||||
all: build test
|
||||
|
||||
# Build the CLI validation tool
|
||||
.PHONY: build
|
||||
build:
|
||||
@echo "Building BACKBEAT validation CLI tool..."
|
||||
go build -o $(BINARY_NAME) $(CLI_TOOL)
|
||||
|
||||
# Run all tests
|
||||
.PHONY: test
|
||||
test: test-schemas test-examples test-integration
|
||||
|
||||
# Test schema files are valid
|
||||
.PHONY: test-schemas
|
||||
test-schemas:
|
||||
@echo "Testing JSON schema files..."
|
||||
@for schema in $(SCHEMA_DIR)/*.schema.json; do \
|
||||
echo "Validating schema: $$schema"; \
|
||||
python3 -c "import json; json.load(open('$$schema'))" || exit 1; \
|
||||
done
|
||||
|
||||
# Test all example files
|
||||
.PHONY: test-examples
|
||||
test-examples: build
|
||||
@echo "Testing example messages..."
|
||||
./$(BINARY_NAME) --schemas $(SCHEMA_DIR) --dir $(EXAMPLES_DIR)
|
||||
|
||||
# Run Go integration tests
|
||||
.PHONY: test-integration
|
||||
test-integration:
|
||||
@echo "Running Go integration tests..."
|
||||
go test -v ./...
|
||||
|
||||
# Validate built-in examples
|
||||
.PHONY: validate-examples
|
||||
validate-examples: build
|
||||
@echo "Validating built-in examples..."
|
||||
./$(BINARY_NAME) --schemas $(SCHEMA_DIR) --examples
|
||||
|
||||
# Validate a specific directory (for CI use)
|
||||
.PHONY: validate-dir
|
||||
validate-dir: build
|
||||
@if [ -z "$(DIR)" ]; then \
|
||||
echo "Usage: make validate-dir DIR=/path/to/messages"; \
|
||||
exit 1; \
|
||||
fi
|
||||
./$(BINARY_NAME) --schemas $(SCHEMA_DIR) --dir $(DIR) --exit-code
|
||||
|
||||
# Validate a specific file (for CI use)
|
||||
.PHONY: validate-file
|
||||
validate-file: build
|
||||
@if [ -z "$(FILE)" ]; then \
|
||||
echo "Usage: make validate-file FILE=/path/to/message.json"; \
|
||||
exit 1; \
|
||||
fi
|
||||
./$(BINARY_NAME) --schemas $(SCHEMA_DIR) --file $(FILE) --exit-code
|
||||
|
||||
# Clean build artifacts
|
||||
.PHONY: clean
|
||||
clean:
|
||||
rm -f $(BINARY_NAME)
|
||||
|
||||
# Install dependencies
|
||||
.PHONY: deps
|
||||
deps:
|
||||
go mod tidy
|
||||
go mod download
|
||||
|
||||
# Format Go code
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
go fmt ./...
|
||||
|
||||
# Run static analysis
|
||||
.PHONY: lint
|
||||
lint:
|
||||
go vet ./...
|
||||
|
||||
# Generate CI configuration examples
|
||||
.PHONY: examples
|
||||
examples: generate-github-actions generate-gitlab-ci generate-makefile-example
|
||||
|
||||
# Generate GitHub Actions workflow
|
||||
.PHONY: generate-github-actions
|
||||
generate-github-actions:
|
||||
@echo "Generating GitHub Actions workflow..."
|
||||
@mkdir -p ci-examples
|
||||
@cat > ci-examples/github-actions.yml << 'EOF'\
|
||||
name: BACKBEAT Contract Validation\
|
||||
\
|
||||
on:\
|
||||
push:\
|
||||
branches: [ main, develop ]\
|
||||
pull_request:\
|
||||
branches: [ main ]\
|
||||
\
|
||||
jobs:\
|
||||
validate-backbeat-messages:\
|
||||
runs-on: ubuntu-latest\
|
||||
\
|
||||
steps:\
|
||||
- uses: actions/checkout@v4\
|
||||
with:\
|
||||
repository: 'chorus-services/backbeat'\
|
||||
path: 'backbeat-contracts'\
|
||||
\
|
||||
- uses: actions/checkout@v4\
|
||||
with:\
|
||||
path: 'current-repo'\
|
||||
\
|
||||
- name: Set up Go\
|
||||
uses: actions/setup-go@v4\
|
||||
with:\
|
||||
go-version: '1.22'\
|
||||
\
|
||||
- name: Build BACKBEAT validator\
|
||||
run: |\
|
||||
cd backbeat-contracts/contracts/tests/integration\
|
||||
make build\
|
||||
\
|
||||
- name: Validate BACKBEAT messages\
|
||||
run: |\
|
||||
cd backbeat-contracts/contracts/tests/integration\
|
||||
./backbeat-validate \\\
|
||||
--schemas ../../schemas \\\
|
||||
--dir ../../../current-repo/path/to/messages \\\
|
||||
--exit-code\
|
||||
EOF
|
||||
|
||||
# Generate GitLab CI configuration
|
||||
.PHONY: generate-gitlab-ci
|
||||
generate-gitlab-ci:
|
||||
@echo "Generating GitLab CI configuration..."
|
||||
@mkdir -p ci-examples
|
||||
@cat > ci-examples/gitlab-ci.yml << 'EOF'\
|
||||
validate-backbeat-contracts:\
|
||||
stage: test\
|
||||
image: golang:1.22\
|
||||
\
|
||||
before_script:\
|
||||
- git clone https://github.com/chorus-services/backbeat.git /tmp/backbeat\
|
||||
- cd /tmp/backbeat/contracts/tests/integration\
|
||||
- make deps build\
|
||||
\
|
||||
script:\
|
||||
- /tmp/backbeat/contracts/tests/integration/backbeat-validate \\\
|
||||
--schemas /tmp/backbeat/contracts/schemas \\\
|
||||
--dir $$CI_PROJECT_DIR/path/to/messages \\\
|
||||
--exit-code\
|
||||
\
|
||||
only:\
|
||||
- merge_requests\
|
||||
- main\
|
||||
- develop\
|
||||
EOF
|
||||
|
||||
# Generate example Makefile for downstream projects
|
||||
.PHONY: generate-makefile-example
|
||||
generate-makefile-example:
|
||||
@echo "Generating example Makefile for downstream projects..."
|
||||
@mkdir -p ci-examples
|
||||
@echo "# Example Makefile for BACKBEAT contract validation" > ci-examples/downstream-makefile
|
||||
@echo "" >> ci-examples/downstream-makefile
|
||||
@echo "BACKBEAT_REPO = https://github.com/chorus-services/backbeat.git" >> ci-examples/downstream-makefile
|
||||
@echo "BACKBEAT_DIR = .backbeat-contracts" >> ci-examples/downstream-makefile
|
||||
@echo "" >> ci-examples/downstream-makefile
|
||||
@echo "validate-backbeat:" >> ci-examples/downstream-makefile
|
||||
@echo " git clone \$$(BACKBEAT_REPO) \$$(BACKBEAT_DIR) 2>/dev/null || true" >> ci-examples/downstream-makefile
|
||||
@echo " cd \$$(BACKBEAT_DIR)/contracts/tests/integration && make build" >> ci-examples/downstream-makefile
|
||||
@echo " \$$(BACKBEAT_DIR)/contracts/tests/integration/backbeat-validate --schemas \$$(BACKBEAT_DIR)/contracts/schemas --dir messages --exit-code" >> ci-examples/downstream-makefile
|
||||
|
||||
# Help target
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "BACKBEAT Contracts CI Integration Makefile"
|
||||
@echo ""
|
||||
@echo "Available targets:"
|
||||
@echo " all - Build and test everything"
|
||||
@echo " build - Build the CLI validation tool"
|
||||
@echo " test - Run all tests"
|
||||
@echo " test-schemas - Validate JSON schema files"
|
||||
@echo " test-examples - Test example message files"
|
||||
@echo " test-integration - Run Go integration tests"
|
||||
@echo " validate-examples - Validate built-in examples"
|
||||
@echo " validate-dir DIR=path - Validate messages in directory"
|
||||
@echo " validate-file FILE=path - Validate single message file"
|
||||
@echo " clean - Clean build artifacts"
|
||||
@echo " deps - Install Go dependencies"
|
||||
@echo " fmt - Format Go code"
|
||||
@echo " lint - Run static analysis"
|
||||
@echo " examples - Generate CI configuration examples"
|
||||
@echo " help - Show this help message"
|
||||
@echo ""
|
||||
@echo "Examples:"
|
||||
@echo " make validate-dir DIR=../../../examples"
|
||||
@echo " make validate-file FILE=../../../examples/beatframe-valid.json"
|
||||
279
BACKBEAT-prototype/contracts/tests/integration/ci_helper.go
Normal file
279
BACKBEAT-prototype/contracts/tests/integration/ci_helper.go
Normal file
@@ -0,0 +1,279 @@
|
||||
// Package integration provides CI helper functions for BACKBEAT contract testing
|
||||
package integration
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// CIHelper provides utilities for continuous integration testing
|
||||
type CIHelper struct {
|
||||
validator *MessageValidator
|
||||
}
|
||||
|
||||
// NewCIHelper creates a new CI helper with a message validator
|
||||
func NewCIHelper(schemaDir string) (*CIHelper, error) {
|
||||
validator, err := NewMessageValidator(schemaDir)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create validator: %w", err)
|
||||
}
|
||||
|
||||
return &CIHelper{
|
||||
validator: validator,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ValidateDirectory validates all JSON files in a directory against BACKBEAT schemas
|
||||
func (ci *CIHelper) ValidateDirectory(dir string) (*DirectoryValidationResult, error) {
|
||||
result := &DirectoryValidationResult{
|
||||
Directory: dir,
|
||||
Files: make(map[string]*FileValidationResult),
|
||||
}
|
||||
|
||||
err := filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Skip non-JSON files
|
||||
if d.IsDir() || !strings.HasSuffix(strings.ToLower(path), ".json") {
|
||||
return nil
|
||||
}
|
||||
|
||||
fileResult, validateErr := ci.validateFile(path)
|
||||
if validateErr != nil {
|
||||
result.Errors = append(result.Errors, fmt.Sprintf("Failed to validate %s: %v", path, validateErr))
|
||||
} else {
|
||||
relPath, _ := filepath.Rel(dir, path)
|
||||
result.Files[relPath] = fileResult
|
||||
result.TotalFiles++
|
||||
if fileResult.AllValid {
|
||||
result.ValidFiles++
|
||||
} else {
|
||||
result.InvalidFiles++
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to walk directory: %w", err)
|
||||
}
|
||||
|
||||
result.ValidationRate = float64(result.ValidFiles) / float64(result.TotalFiles)
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// validateFile validates a single JSON file
|
||||
func (ci *CIHelper) validateFile(filePath string) (*FileValidationResult, error) {
|
||||
data, err := os.ReadFile(filePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to read file: %w", err)
|
||||
}
|
||||
|
||||
result := &FileValidationResult{
|
||||
FilePath: filePath,
|
||||
AllValid: true,
|
||||
}
|
||||
|
||||
// Try to parse as single message first
|
||||
var singleMessage map[string]interface{}
|
||||
if err := json.Unmarshal(data, &singleMessage); err == nil {
|
||||
if msgType, hasType := singleMessage["type"].(string); hasType && ci.validator.IsMessageTypeSupported(msgType) {
|
||||
// Single BACKBEAT message
|
||||
validationResult, validateErr := ci.validator.ValidateMessage(data)
|
||||
if validateErr != nil {
|
||||
return nil, validateErr
|
||||
}
|
||||
result.Messages = []*ValidationResult{validationResult}
|
||||
result.AllValid = validationResult.Valid
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse as array of messages
|
||||
var messageArray []map[string]interface{}
|
||||
if err := json.Unmarshal(data, &messageArray); err == nil {
|
||||
for i, msg := range messageArray {
|
||||
msgBytes, marshalErr := json.Marshal(msg)
|
||||
if marshalErr != nil {
|
||||
result.Errors = append(result.Errors, fmt.Sprintf("Message %d: failed to marshal: %v", i, marshalErr))
|
||||
result.AllValid = false
|
||||
continue
|
||||
}
|
||||
|
||||
validationResult, validateErr := ci.validator.ValidateMessage(msgBytes)
|
||||
if validateErr != nil {
|
||||
result.Errors = append(result.Errors, fmt.Sprintf("Message %d: validation error: %v", i, validateErr))
|
||||
result.AllValid = false
|
||||
continue
|
||||
}
|
||||
|
||||
result.Messages = append(result.Messages, validationResult)
|
||||
if !validationResult.Valid {
|
||||
result.AllValid = false
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// Try to parse as examples format (array with description and message fields)
|
||||
var examples []ExampleMessage
|
||||
if err := json.Unmarshal(data, &examples); err == nil {
|
||||
for i, example := range examples {
|
||||
msgBytes, marshalErr := json.Marshal(example.Message)
|
||||
if marshalErr != nil {
|
||||
result.Errors = append(result.Errors, fmt.Sprintf("Example %d (%s): failed to marshal: %v", i, example.Description, marshalErr))
|
||||
result.AllValid = false
|
||||
continue
|
||||
}
|
||||
|
||||
validationResult, validateErr := ci.validator.ValidateMessage(msgBytes)
|
||||
if validateErr != nil {
|
||||
result.Errors = append(result.Errors, fmt.Sprintf("Example %d (%s): validation error: %v", i, example.Description, validateErr))
|
||||
result.AllValid = false
|
||||
continue
|
||||
}
|
||||
|
||||
result.Messages = append(result.Messages, validationResult)
|
||||
if !validationResult.Valid {
|
||||
result.AllValid = false
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("file does not contain valid JSON message format")
|
||||
}
|
||||
|
||||
// ExampleMessage represents a message example with description
|
||||
type ExampleMessage struct {
|
||||
Description string `json:"description"`
|
||||
Message map[string]interface{} `json:"message"`
|
||||
}
|
||||
|
||||
// DirectoryValidationResult contains results for validating a directory
|
||||
type DirectoryValidationResult struct {
|
||||
Directory string `json:"directory"`
|
||||
TotalFiles int `json:"total_files"`
|
||||
ValidFiles int `json:"valid_files"`
|
||||
InvalidFiles int `json:"invalid_files"`
|
||||
ValidationRate float64 `json:"validation_rate"`
|
||||
Files map[string]*FileValidationResult `json:"files"`
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
// FileValidationResult contains results for validating a single file
|
||||
type FileValidationResult struct {
|
||||
FilePath string `json:"file_path"`
|
||||
AllValid bool `json:"all_valid"`
|
||||
Messages []*ValidationResult `json:"messages"`
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
}
|
||||
|
||||
// GenerateCIReport generates a formatted report suitable for CI systems
|
||||
func (ci *CIHelper) GenerateCIReport(result *DirectoryValidationResult) string {
|
||||
var sb strings.Builder
|
||||
|
||||
sb.WriteString("BACKBEAT Contract Validation Report\n")
|
||||
sb.WriteString("===================================\n\n")
|
||||
|
||||
sb.WriteString(fmt.Sprintf("Directory: %s\n", result.Directory))
|
||||
sb.WriteString(fmt.Sprintf("Total Files: %d\n", result.TotalFiles))
|
||||
sb.WriteString(fmt.Sprintf("Valid Files: %d\n", result.ValidFiles))
|
||||
sb.WriteString(fmt.Sprintf("Invalid Files: %d\n", result.InvalidFiles))
|
||||
sb.WriteString(fmt.Sprintf("Validation Rate: %.2f%%\n\n", result.ValidationRate*100))
|
||||
|
||||
if len(result.Errors) > 0 {
|
||||
sb.WriteString("Directory-level Errors:\n")
|
||||
for _, err := range result.Errors {
|
||||
sb.WriteString(fmt.Sprintf(" - %s\n", err))
|
||||
}
|
||||
sb.WriteString("\n")
|
||||
}
|
||||
|
||||
// Group files by validation status
|
||||
validFiles := make([]string, 0)
|
||||
invalidFiles := make([]string, 0)
|
||||
|
||||
for filePath, fileResult := range result.Files {
|
||||
if fileResult.AllValid {
|
||||
validFiles = append(validFiles, filePath)
|
||||
} else {
|
||||
invalidFiles = append(invalidFiles, filePath)
|
||||
}
|
||||
}
|
||||
|
||||
if len(validFiles) > 0 {
|
||||
sb.WriteString("Valid Files:\n")
|
||||
for _, file := range validFiles {
|
||||
sb.WriteString(fmt.Sprintf(" ✓ %s\n", file))
|
||||
}
|
||||
sb.WriteString("\n")
|
||||
}
|
||||
|
||||
if len(invalidFiles) > 0 {
|
||||
sb.WriteString("Invalid Files:\n")
|
||||
for _, file := range invalidFiles {
|
||||
fileResult := result.Files[file]
|
||||
sb.WriteString(fmt.Sprintf(" ✗ %s\n", file))
|
||||
|
||||
for _, err := range fileResult.Errors {
|
||||
sb.WriteString(fmt.Sprintf(" - %s\n", err))
|
||||
}
|
||||
|
||||
for i, msg := range fileResult.Messages {
|
||||
if !msg.Valid {
|
||||
sb.WriteString(fmt.Sprintf(" Message %d (%s):\n", i+1, msg.MessageType))
|
||||
for _, valErr := range msg.Errors {
|
||||
sb.WriteString(fmt.Sprintf(" - %s: %s\n", valErr.Field, valErr.Message))
|
||||
}
|
||||
}
|
||||
}
|
||||
sb.WriteString("\n")
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// ExitWithStatus exits the program with appropriate status code for CI
|
||||
func (ci *CIHelper) ExitWithStatus(result *DirectoryValidationResult) {
|
||||
if result.InvalidFiles > 0 || len(result.Errors) > 0 {
|
||||
fmt.Fprint(os.Stderr, ci.GenerateCIReport(result))
|
||||
os.Exit(1)
|
||||
} else {
|
||||
fmt.Print(ci.GenerateCIReport(result))
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
// ValidateExamples validates the built-in example messages
|
||||
func (ci *CIHelper) ValidateExamples() ([]*ValidationResult, error) {
|
||||
examples := ExampleMessages()
|
||||
results := make([]*ValidationResult, 0, len(examples))
|
||||
|
||||
for name, example := range examples {
|
||||
result, err := ci.validator.ValidateStruct(example)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to validate example %s: %w", name, err)
|
||||
}
|
||||
results = append(results, result)
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// GetSchemaInfo returns information about loaded schemas
|
||||
func (ci *CIHelper) GetSchemaInfo() map[string]string {
|
||||
info := make(map[string]string)
|
||||
for _, msgType := range ci.validator.GetSupportedMessageTypes() {
|
||||
info[msgType] = getSchemaVersion(msgType)
|
||||
}
|
||||
return info
|
||||
}
|
||||
@@ -0,0 +1,184 @@
|
||||
// Command backbeat-validate provides CLI validation of BACKBEAT messages for CI integration
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/chorus-services/backbeat/contracts/tests/integration"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var (
|
||||
schemaDir = flag.String("schemas", "", "Path to BACKBEAT schema directory (required)")
|
||||
validateDir = flag.String("dir", "", "Directory to validate (optional)")
|
||||
validateFile = flag.String("file", "", "Single file to validate (optional)")
|
||||
messageJSON = flag.String("message", "", "JSON message to validate (optional)")
|
||||
examples = flag.Bool("examples", false, "Validate built-in examples")
|
||||
quiet = flag.Bool("quiet", false, "Only output errors")
|
||||
json_output = flag.Bool("json", false, "Output results as JSON")
|
||||
exitCode = flag.Bool("exit-code", true, "Exit with non-zero code on validation failures")
|
||||
)
|
||||
flag.Parse()
|
||||
|
||||
if *schemaDir == "" {
|
||||
fmt.Fprintf(os.Stderr, "Error: --schemas parameter is required\n")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Create CI helper
|
||||
helper, err := integration.NewCIHelper(*schemaDir)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error creating validator: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Determine what to validate
|
||||
switch {
|
||||
case *examples:
|
||||
validateExamples(helper, *quiet, *json_output, *exitCode)
|
||||
case *validateDir != "":
|
||||
validateDirectory(helper, *validateDir, *quiet, *json_output, *exitCode)
|
||||
case *validateFile != "":
|
||||
validateFile_func(helper, *validateFile, *quiet, *json_output, *exitCode)
|
||||
case *messageJSON != "":
|
||||
validateMessage(helper, *messageJSON, *quiet, *json_output, *exitCode)
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "Error: must specify one of --dir, --file, --message, or --examples\n")
|
||||
flag.Usage()
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func validateExamples(helper *integration.CIHelper, quiet, jsonOutput, exitOnError bool) {
|
||||
results, err := helper.ValidateExamples()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error validating examples: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
invalidCount := 0
|
||||
for _, result := range results {
|
||||
if !result.Valid {
|
||||
invalidCount++
|
||||
}
|
||||
|
||||
if !quiet || !result.Valid {
|
||||
if jsonOutput {
|
||||
jsonBytes, _ := json.MarshalIndent(result, "", " ")
|
||||
fmt.Println(string(jsonBytes))
|
||||
} else {
|
||||
fmt.Print(integration.PrettyPrintValidationResult(result))
|
||||
fmt.Println(strings.Repeat("-", 50))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !quiet {
|
||||
fmt.Printf("\nSummary: %d total, %d valid, %d invalid\n", len(results), len(results)-invalidCount, invalidCount)
|
||||
}
|
||||
|
||||
if exitOnError && invalidCount > 0 {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func validateDirectory(helper *integration.CIHelper, dir string, quiet, jsonOutput, exitOnError bool) {
|
||||
result, err := helper.ValidateDirectory(dir)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error validating directory: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if jsonOutput {
|
||||
jsonBytes, _ := json.MarshalIndent(result, "", " ")
|
||||
fmt.Println(string(jsonBytes))
|
||||
} else if !quiet {
|
||||
fmt.Print(helper.GenerateCIReport(result))
|
||||
}
|
||||
|
||||
if exitOnError && (result.InvalidFiles > 0 || len(result.Errors) > 0) {
|
||||
if quiet {
|
||||
fmt.Fprintf(os.Stderr, "Validation failed: %d invalid files, %d errors\n", result.InvalidFiles, len(result.Errors))
|
||||
}
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func validateFile_func(helper *integration.CIHelper, filePath string, quiet, jsonOutput, exitOnError bool) {
|
||||
// Create a temporary directory for validation
|
||||
tmpDir := filepath.Dir(filePath)
|
||||
result, err := helper.ValidateDirectory(tmpDir)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error validating file: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Filter results to just this file
|
||||
fileName := filepath.Base(filePath)
|
||||
fileResult, exists := result.Files[fileName]
|
||||
if !exists {
|
||||
fmt.Fprintf(os.Stderr, "File was not validated (may not contain BACKBEAT messages)\n")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if jsonOutput {
|
||||
jsonBytes, _ := json.MarshalIndent(fileResult, "", " ")
|
||||
fmt.Println(string(jsonBytes))
|
||||
} else if !quiet {
|
||||
fmt.Printf("File: %s\n", fileName)
|
||||
fmt.Printf("Valid: %t\n", fileResult.AllValid)
|
||||
|
||||
if len(fileResult.Errors) > 0 {
|
||||
fmt.Println("Errors:")
|
||||
for _, err := range fileResult.Errors {
|
||||
fmt.Printf(" - %s\n", err)
|
||||
}
|
||||
}
|
||||
|
||||
for i, msg := range fileResult.Messages {
|
||||
fmt.Printf("\nMessage %d:\n", i+1)
|
||||
fmt.Print(integration.PrettyPrintValidationResult(msg))
|
||||
}
|
||||
}
|
||||
|
||||
if exitOnError && !fileResult.AllValid {
|
||||
if quiet {
|
||||
fmt.Fprintf(os.Stderr, "Validation failed\n")
|
||||
}
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func validateMessage(helper *integration.CIHelper, messageJSON string, quiet, jsonOutput, exitOnError bool) {
|
||||
validator, err := integration.NewMessageValidator(flag.Lookup("schemas").Value.String())
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error creating validator: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
result, err := validator.ValidateMessageString(messageJSON)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error validating message: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if jsonOutput {
|
||||
jsonBytes, _ := json.MarshalIndent(result, "", " ")
|
||||
fmt.Println(string(jsonBytes))
|
||||
} else if !quiet {
|
||||
fmt.Print(integration.PrettyPrintValidationResult(result))
|
||||
}
|
||||
|
||||
if exitOnError && !result.Valid {
|
||||
if quiet {
|
||||
fmt.Fprintf(os.Stderr, "Validation failed\n")
|
||||
}
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
283
BACKBEAT-prototype/contracts/tests/integration/validator.go
Normal file
283
BACKBEAT-prototype/contracts/tests/integration/validator.go
Normal file
@@ -0,0 +1,283 @@
|
||||
// Package integration provides CI validation helpers for BACKBEAT conformance testing
|
||||
package integration
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
)
|
||||
|
||||
// MessageValidator provides validation for BACKBEAT messages against JSON schemas
|
||||
type MessageValidator struct {
|
||||
schemaLoaders map[string]gojsonschema.JSONLoader
|
||||
}
|
||||
|
||||
// MessageType constants for the three core BACKBEAT interfaces
|
||||
const (
|
||||
BeatFrameType = "backbeat.beatframe.v1"
|
||||
StatusClaimType = "backbeat.statusclaim.v1"
|
||||
BarReportType = "backbeat.barreport.v1"
|
||||
)
|
||||
|
||||
// ValidationError represents a validation failure with context
|
||||
type ValidationError struct {
|
||||
MessageType string `json:"message_type"`
|
||||
Field string `json:"field"`
|
||||
Value string `json:"value"`
|
||||
Message string `json:"message"`
|
||||
Errors []string `json:"errors"`
|
||||
}
|
||||
|
||||
func (ve ValidationError) Error() string {
|
||||
return fmt.Sprintf("validation failed for %s: %s", ve.MessageType, strings.Join(ve.Errors, "; "))
|
||||
}
|
||||
|
||||
// ValidationResult contains the outcome of message validation
|
||||
type ValidationResult struct {
|
||||
Valid bool `json:"valid"`
|
||||
MessageType string `json:"message_type"`
|
||||
Errors []ValidationError `json:"errors,omitempty"`
|
||||
SchemaVersion string `json:"schema_version"`
|
||||
}
|
||||
|
||||
// NewMessageValidator creates a new validator with schema loaders
|
||||
func NewMessageValidator(schemaDir string) (*MessageValidator, error) {
|
||||
validator := &MessageValidator{
|
||||
schemaLoaders: make(map[string]gojsonschema.JSONLoader),
|
||||
}
|
||||
|
||||
// Load all schema files
|
||||
schemas := map[string]string{
|
||||
BeatFrameType: "beatframe-v1.schema.json",
|
||||
StatusClaimType: "statusclaim-v1.schema.json",
|
||||
BarReportType: "barreport-v1.schema.json",
|
||||
}
|
||||
|
||||
for msgType, schemaFile := range schemas {
|
||||
schemaPath := filepath.Join(schemaDir, schemaFile)
|
||||
loader := gojsonschema.NewReferenceLoader("file://" + schemaPath)
|
||||
validator.schemaLoaders[msgType] = loader
|
||||
}
|
||||
|
||||
return validator, nil
|
||||
}
|
||||
|
||||
// ValidateMessage validates a JSON message against the appropriate BACKBEAT schema
|
||||
func (v *MessageValidator) ValidateMessage(messageJSON []byte) (*ValidationResult, error) {
|
||||
// Parse message to determine type
|
||||
var msgMap map[string]interface{}
|
||||
if err := json.Unmarshal(messageJSON, &msgMap); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse JSON: %w", err)
|
||||
}
|
||||
|
||||
msgType, ok := msgMap["type"].(string)
|
||||
if !ok {
|
||||
return &ValidationResult{
|
||||
Valid: false,
|
||||
MessageType: "unknown",
|
||||
Errors: []ValidationError{
|
||||
{
|
||||
Field: "type",
|
||||
Message: "message type field is missing or not a string",
|
||||
Errors: []string{"type field is required and must be a string"},
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Get appropriate schema loader
|
||||
schemaLoader, exists := v.schemaLoaders[msgType]
|
||||
if !exists {
|
||||
return &ValidationResult{
|
||||
Valid: false,
|
||||
MessageType: msgType,
|
||||
Errors: []ValidationError{
|
||||
{
|
||||
Field: "type",
|
||||
Value: msgType,
|
||||
Message: fmt.Sprintf("unsupported message type: %s", msgType),
|
||||
Errors: []string{fmt.Sprintf("message type %s is not supported by BACKBEAT contracts", msgType)},
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Validate against schema
|
||||
docLoader := gojsonschema.NewBytesLoader(messageJSON)
|
||||
result, err := gojsonschema.Validate(schemaLoader, docLoader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("schema validation failed: %w", err)
|
||||
}
|
||||
|
||||
validationResult := &ValidationResult{
|
||||
Valid: result.Valid(),
|
||||
MessageType: msgType,
|
||||
SchemaVersion: getSchemaVersion(msgType),
|
||||
}
|
||||
|
||||
if !result.Valid() {
|
||||
for _, desc := range result.Errors() {
|
||||
validationResult.Errors = append(validationResult.Errors, ValidationError{
|
||||
MessageType: msgType,
|
||||
Field: desc.Field(),
|
||||
Value: fmt.Sprintf("%v", desc.Value()),
|
||||
Message: desc.Description(),
|
||||
Errors: []string{desc.String()},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return validationResult, nil
|
||||
}
|
||||
|
||||
// ValidateMessageString validates a JSON message string
|
||||
func (v *MessageValidator) ValidateMessageString(messageJSON string) (*ValidationResult, error) {
|
||||
return v.ValidateMessage([]byte(messageJSON))
|
||||
}
|
||||
|
||||
// ValidateStruct validates a Go struct by marshaling to JSON first
|
||||
func (v *MessageValidator) ValidateStruct(message interface{}) (*ValidationResult, error) {
|
||||
jsonBytes, err := json.Marshal(message)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal struct to JSON: %w", err)
|
||||
}
|
||||
return v.ValidateMessage(jsonBytes)
|
||||
}
|
||||
|
||||
// BatchValidate validates multiple messages and returns aggregated results
|
||||
func (v *MessageValidator) BatchValidate(messages [][]byte) ([]*ValidationResult, error) {
|
||||
results := make([]*ValidationResult, len(messages))
|
||||
for i, msg := range messages {
|
||||
result, err := v.ValidateMessage(msg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to validate message %d: %w", i, err)
|
||||
}
|
||||
results[i] = result
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// GetSupportedMessageTypes returns the list of supported BACKBEAT message types
|
||||
func (v *MessageValidator) GetSupportedMessageTypes() []string {
|
||||
types := make([]string, 0, len(v.schemaLoaders))
|
||||
for msgType := range v.schemaLoaders {
|
||||
types = append(types, msgType)
|
||||
}
|
||||
return types
|
||||
}
|
||||
|
||||
// IsMessageTypeSupported checks if a message type is supported
|
||||
func (v *MessageValidator) IsMessageTypeSupported(msgType string) bool {
|
||||
_, exists := v.schemaLoaders[msgType]
|
||||
return exists
|
||||
}
|
||||
|
||||
// getSchemaVersion returns the version for a given message type
|
||||
func getSchemaVersion(msgType string) string {
|
||||
versions := map[string]string{
|
||||
BeatFrameType: "1.0.0",
|
||||
StatusClaimType: "1.0.0",
|
||||
BarReportType: "1.0.0",
|
||||
}
|
||||
return versions[msgType]
|
||||
}
|
||||
|
||||
// ValidationStats provides summary statistics for batch validation
|
||||
type ValidationStats struct {
|
||||
TotalMessages int `json:"total_messages"`
|
||||
ValidMessages int `json:"valid_messages"`
|
||||
InvalidMessages int `json:"invalid_messages"`
|
||||
MessageTypes map[string]int `json:"message_types"`
|
||||
ErrorSummary map[string]int `json:"error_summary"`
|
||||
ValidationRate float64 `json:"validation_rate"`
|
||||
}
|
||||
|
||||
// GetValidationStats computes statistics from validation results
|
||||
func GetValidationStats(results []*ValidationResult) *ValidationStats {
|
||||
stats := &ValidationStats{
|
||||
TotalMessages: len(results),
|
||||
MessageTypes: make(map[string]int),
|
||||
ErrorSummary: make(map[string]int),
|
||||
}
|
||||
|
||||
for _, result := range results {
|
||||
// Count message types
|
||||
stats.MessageTypes[result.MessageType]++
|
||||
|
||||
if result.Valid {
|
||||
stats.ValidMessages++
|
||||
} else {
|
||||
stats.InvalidMessages++
|
||||
// Aggregate error types
|
||||
for _, err := range result.Errors {
|
||||
stats.ErrorSummary[err.Field]++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if stats.TotalMessages > 0 {
|
||||
stats.ValidationRate = float64(stats.ValidMessages) / float64(stats.TotalMessages)
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
// ExampleMessages provides sample messages for testing and documentation
|
||||
func ExampleMessages() map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"beatframe_minimal": map[string]interface{}{
|
||||
"type": BeatFrameType,
|
||||
"cluster_id": "test-cluster",
|
||||
"beat_index": 0,
|
||||
"downbeat": true,
|
||||
"phase": "plan",
|
||||
"hlc": "0001:0000:cafe",
|
||||
"deadline_at": "2025-09-05T12:00:30Z",
|
||||
"tempo_bpm": 2.0,
|
||||
"window_id": "a1b2c3d4e5f6789012345678901234ab",
|
||||
},
|
||||
"statusclaim_minimal": map[string]interface{}{
|
||||
"type": StatusClaimType,
|
||||
"agent_id": "test:agent",
|
||||
"beat_index": 100,
|
||||
"state": "idle",
|
||||
"hlc": "7ffd:0001:abcd",
|
||||
},
|
||||
"barreport_minimal": map[string]interface{}{
|
||||
"type": BarReportType,
|
||||
"window_id": "7e9b0e6c4c9a4e59b7f2d9a3c1b2e4d5",
|
||||
"from_beat": 0,
|
||||
"to_beat": 119,
|
||||
"agents_reporting": 1,
|
||||
"on_time_reviews": 1,
|
||||
"help_promises_fulfilled": 0,
|
||||
"secret_rotations_ok": true,
|
||||
"tempo_drift_ms": 0.0,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// PrettyPrintValidationResult formats validation results for human reading
|
||||
func PrettyPrintValidationResult(result *ValidationResult) string {
|
||||
var sb strings.Builder
|
||||
|
||||
sb.WriteString(fmt.Sprintf("Message Type: %s\n", result.MessageType))
|
||||
sb.WriteString(fmt.Sprintf("Schema Version: %s\n", result.SchemaVersion))
|
||||
sb.WriteString(fmt.Sprintf("Valid: %t\n", result.Valid))
|
||||
|
||||
if !result.Valid && len(result.Errors) > 0 {
|
||||
sb.WriteString("\nValidation Errors:\n")
|
||||
for i, err := range result.Errors {
|
||||
sb.WriteString(fmt.Sprintf(" %d. Field: %s\n", i+1, err.Field))
|
||||
if err.Value != "" {
|
||||
sb.WriteString(fmt.Sprintf(" Value: %s\n", err.Value))
|
||||
}
|
||||
sb.WriteString(fmt.Sprintf(" Error: %s\n", err.Message))
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
Reference in New Issue
Block a user