Harden CHORUS security and messaging stack

This commit is contained in:
anthonyrawlins
2025-09-20 23:21:35 +10:00
parent 57751f277a
commit 1bb736c09a
25 changed files with 2793 additions and 2474 deletions

11
pkg/shhh/doc.go Normal file
View File

@@ -0,0 +1,11 @@
// Package shhh provides the CHORUS secrets sentinel responsible for detecting
// and redacting sensitive values before they leave the runtime. The sentinel
// focuses on predictable failure modes (log emission, telemetry fan-out,
// request forwarding) and offers a composable API for registering additional
// redaction rules, emitting audit events, and tracking operational metrics.
//
// The initial implementation focuses on high-signal secrets (API keys,
// bearer/OAuth tokens, private keys) so the runtime can start integrating
// SHHH into COOEE and WHOOSH logging immediately while the broader roadmap
// items (automated redaction replay, policy driven rules) continue landing.
package shhh

130
pkg/shhh/rule.go Normal file
View File

@@ -0,0 +1,130 @@
package shhh
import (
"crypto/sha256"
"encoding/base64"
"regexp"
"sort"
"strings"
)
type compiledRule struct {
name string
regex *regexp.Regexp
replacement string
severity Severity
tags []string
}
type matchRecord struct {
value string
}
func (r *compiledRule) apply(in string) (string, []matchRecord) {
indices := r.regex.FindAllStringSubmatchIndex(in, -1)
if len(indices) == 0 {
return in, nil
}
var builder strings.Builder
builder.Grow(len(in))
matches := make([]matchRecord, 0, len(indices))
last := 0
for _, loc := range indices {
start, end := loc[0], loc[1]
builder.WriteString(in[last:start])
replaced := r.regex.ExpandString(nil, r.replacement, in, loc)
builder.Write(replaced)
matches = append(matches, matchRecord{value: in[start:end]})
last = end
}
builder.WriteString(in[last:])
return builder.String(), matches
}
func buildDefaultRuleConfigs(placeholder string) []RuleConfig {
if placeholder == "" {
placeholder = "[REDACTED]"
}
return []RuleConfig{
{
Name: "bearer-token",
Pattern: `(?i)(authorization\s*:\s*bearer\s+)([A-Za-z0-9\-._~+/]+=*)`,
ReplacementTemplate: "$1" + placeholder,
Severity: SeverityMedium,
Tags: []string{"token", "http"},
},
{
Name: "api-key",
Pattern: `(?i)((?:api[_-]?key|token|secret|password)\s*[:=]\s*["']?)([A-Za-z0-9\-._~+/]{8,})(["']?)`,
ReplacementTemplate: "$1" + placeholder + "$3",
Severity: SeverityHigh,
Tags: []string{"credentials"},
},
{
Name: "openai-secret",
Pattern: `(sk-[A-Za-z0-9]{20,})`,
ReplacementTemplate: placeholder,
Severity: SeverityHigh,
Tags: []string{"llm", "api"},
},
{
Name: "oauth-refresh-token",
Pattern: `(?i)(refresh_token"?\s*[:=]\s*["']?)([A-Za-z0-9\-._~+/]{8,})(["']?)`,
ReplacementTemplate: "$1" + placeholder + "$3",
Severity: SeverityMedium,
Tags: []string{"oauth"},
},
{
Name: "private-key-block",
Pattern: `(?s)(-----BEGIN [^-]+ PRIVATE KEY-----)[^-]+(-----END [^-]+ PRIVATE KEY-----)`,
ReplacementTemplate: "$1\n" + placeholder + "\n$2",
Severity: SeverityHigh,
Tags: []string{"pem", "key"},
},
}
}
func compileRules(cfg Config, placeholder string) ([]*compiledRule, error) {
configs := make([]RuleConfig, 0)
if !cfg.DisableDefaultRules {
configs = append(configs, buildDefaultRuleConfigs(placeholder)...)
}
configs = append(configs, cfg.CustomRules...)
rules := make([]*compiledRule, 0, len(configs))
for _, rc := range configs {
if rc.Name == "" || rc.Pattern == "" {
continue
}
replacement := rc.ReplacementTemplate
if replacement == "" {
replacement = placeholder
}
re, err := regexp.Compile(rc.Pattern)
if err != nil {
return nil, err
}
compiled := &compiledRule{
name: rc.Name,
replacement: replacement,
regex: re,
severity: rc.Severity,
tags: append([]string(nil), rc.Tags...),
}
rules = append(rules, compiled)
}
sort.SliceStable(rules, func(i, j int) bool {
return rules[i].name < rules[j].name
})
return rules, nil
}
func hashSecret(value string) string {
sum := sha256.Sum256([]byte(value))
return base64.RawStdEncoding.EncodeToString(sum[:])
}

407
pkg/shhh/sentinel.go Normal file
View File

@@ -0,0 +1,407 @@
package shhh
import (
"context"
"errors"
"fmt"
"sort"
"sync"
)
// Option configures the sentinel during construction.
type Option func(*Sentinel)
// FindingObserver receives aggregated findings for each redaction operation.
type FindingObserver func(context.Context, []Finding)
// WithAuditSink attaches an audit sink for per-redaction events.
func WithAuditSink(sink AuditSink) Option {
return func(s *Sentinel) {
s.audit = sink
}
}
// WithStats allows callers to supply a shared stats collector.
func WithStats(stats *Stats) Option {
return func(s *Sentinel) {
s.stats = stats
}
}
// WithFindingObserver registers an observer that is invoked whenever redaction
// produces findings.
func WithFindingObserver(observer FindingObserver) Option {
return func(s *Sentinel) {
if observer == nil {
return
}
s.observers = append(s.observers, observer)
}
}
// Sentinel performs secret detection/redaction across text payloads.
type Sentinel struct {
mu sync.RWMutex
enabled bool
placeholder string
rules []*compiledRule
audit AuditSink
stats *Stats
observers []FindingObserver
}
// NewSentinel creates a new secrets sentinel using the provided configuration.
func NewSentinel(cfg Config, opts ...Option) (*Sentinel, error) {
placeholder := cfg.RedactionPlaceholder
if placeholder == "" {
placeholder = "[REDACTED]"
}
s := &Sentinel{
enabled: !cfg.Disabled,
placeholder: placeholder,
stats: NewStats(),
}
for _, opt := range opts {
opt(s)
}
if s.stats == nil {
s.stats = NewStats()
}
rules, err := compileRules(cfg, placeholder)
if err != nil {
return nil, fmt.Errorf("compile SHHH rules: %w", err)
}
if len(rules) == 0 {
return nil, errors.New("no SHHH rules configured")
}
s.rules = rules
return s, nil
}
// Enabled reports whether the sentinel is actively redacting.
func (s *Sentinel) Enabled() bool {
s.mu.RLock()
defer s.mu.RUnlock()
return s.enabled
}
// Toggle enables or disables the sentinel at runtime.
func (s *Sentinel) Toggle(enabled bool) {
s.mu.Lock()
defer s.mu.Unlock()
s.enabled = enabled
}
// SetAuditSink updates the audit sink at runtime.
func (s *Sentinel) SetAuditSink(sink AuditSink) {
s.mu.Lock()
defer s.mu.Unlock()
s.audit = sink
}
// AddFindingObserver registers an observer after construction.
func (s *Sentinel) AddFindingObserver(observer FindingObserver) {
if observer == nil {
return
}
s.mu.Lock()
defer s.mu.Unlock()
s.observers = append(s.observers, observer)
}
// StatsSnapshot returns a snapshot of the current counters.
func (s *Sentinel) StatsSnapshot() StatsSnapshot {
s.mu.RLock()
stats := s.stats
s.mu.RUnlock()
if stats == nil {
return StatsSnapshot{}
}
return stats.Snapshot()
}
// RedactText scans the provided text and redacts any findings.
func (s *Sentinel) RedactText(ctx context.Context, text string, labels map[string]string) (string, []Finding) {
s.mu.RLock()
enabled := s.enabled
rules := s.rules
stats := s.stats
audit := s.audit
s.mu.RUnlock()
if !enabled || len(rules) == 0 {
return text, nil
}
if stats != nil {
stats.IncScan()
}
aggregates := make(map[string]*findingAggregate)
current := text
path := derivePath(labels)
for _, rule := range rules {
redacted, matches := rule.apply(current)
if len(matches) == 0 {
continue
}
current = redacted
if stats != nil {
stats.AddFindings(rule.name, len(matches))
}
recordAggregate(aggregates, rule, path, len(matches))
if audit != nil {
metadata := cloneLabels(labels)
for _, match := range matches {
event := AuditEvent{
Rule: rule.name,
Severity: rule.severity,
Tags: append([]string(nil), rule.tags...),
Path: path,
Hash: hashSecret(match.value),
Metadata: metadata,
}
audit.RecordRedaction(ctx, event)
}
}
}
findings := flattenAggregates(aggregates)
s.notifyObservers(ctx, findings)
return current, findings
}
// RedactMap walks the map and redacts in-place. It returns the collected findings.
func (s *Sentinel) RedactMap(ctx context.Context, payload map[string]any) []Finding {
return s.RedactMapWithLabels(ctx, payload, nil)
}
// RedactMapWithLabels allows callers to specify base labels that will be merged
// into metadata for nested structures.
func (s *Sentinel) RedactMapWithLabels(ctx context.Context, payload map[string]any, baseLabels map[string]string) []Finding {
if payload == nil {
return nil
}
aggregates := make(map[string]*findingAggregate)
s.redactValue(ctx, payload, "", baseLabels, aggregates)
findings := flattenAggregates(aggregates)
s.notifyObservers(ctx, findings)
return findings
}
func (s *Sentinel) redactValue(ctx context.Context, value any, path string, baseLabels map[string]string, agg map[string]*findingAggregate) {
switch v := value.(type) {
case map[string]interface{}:
for key, val := range v {
childPath := joinPath(path, key)
switch typed := val.(type) {
case string:
labels := mergeLabels(baseLabels, childPath)
redacted, findings := s.RedactText(ctx, typed, labels)
if redacted != typed {
v[key] = redacted
}
mergeAggregates(agg, findings)
case fmt.Stringer:
labels := mergeLabels(baseLabels, childPath)
text := typed.String()
redacted, findings := s.RedactText(ctx, text, labels)
if redacted != text {
v[key] = redacted
}
mergeAggregates(agg, findings)
default:
s.redactValue(ctx, typed, childPath, baseLabels, agg)
}
}
case []interface{}:
for idx, item := range v {
childPath := indexPath(path, idx)
switch typed := item.(type) {
case string:
labels := mergeLabels(baseLabels, childPath)
redacted, findings := s.RedactText(ctx, typed, labels)
if redacted != typed {
v[idx] = redacted
}
mergeAggregates(agg, findings)
case fmt.Stringer:
labels := mergeLabels(baseLabels, childPath)
text := typed.String()
redacted, findings := s.RedactText(ctx, text, labels)
if redacted != text {
v[idx] = redacted
}
mergeAggregates(agg, findings)
default:
s.redactValue(ctx, typed, childPath, baseLabels, agg)
}
}
case []string:
for idx, item := range v {
childPath := indexPath(path, idx)
labels := mergeLabels(baseLabels, childPath)
redacted, findings := s.RedactText(ctx, item, labels)
if redacted != item {
v[idx] = redacted
}
mergeAggregates(agg, findings)
}
}
}
func (s *Sentinel) notifyObservers(ctx context.Context, findings []Finding) {
if len(findings) == 0 {
return
}
findingsCopy := append([]Finding(nil), findings...)
s.mu.RLock()
observers := append([]FindingObserver(nil), s.observers...)
s.mu.RUnlock()
for _, observer := range observers {
observer(ctx, findingsCopy)
}
}
func mergeAggregates(dest map[string]*findingAggregate, findings []Finding) {
for i := range findings {
f := findings[i]
agg := dest[f.Rule]
if agg == nil {
agg = &findingAggregate{
rule: f.Rule,
severity: f.Severity,
tags: append([]string(nil), f.Tags...),
locations: make(map[string]int),
}
dest[f.Rule] = agg
}
agg.count += f.Count
for _, loc := range f.Locations {
agg.locations[loc.Path] += loc.Count
}
}
}
func recordAggregate(dest map[string]*findingAggregate, rule *compiledRule, path string, count int) {
agg := dest[rule.name]
if agg == nil {
agg = &findingAggregate{
rule: rule.name,
severity: rule.severity,
tags: append([]string(nil), rule.tags...),
locations: make(map[string]int),
}
dest[rule.name] = agg
}
agg.count += count
if path != "" {
agg.locations[path] += count
}
}
func flattenAggregates(agg map[string]*findingAggregate) []Finding {
if len(agg) == 0 {
return nil
}
keys := make([]string, 0, len(agg))
for key := range agg {
keys = append(keys, key)
}
sort.Strings(keys)
findings := make([]Finding, 0, len(agg))
for _, key := range keys {
entry := agg[key]
locations := make([]Location, 0, len(entry.locations))
if len(entry.locations) > 0 {
paths := make([]string, 0, len(entry.locations))
for path := range entry.locations {
paths = append(paths, path)
}
sort.Strings(paths)
for _, path := range paths {
locations = append(locations, Location{Path: path, Count: entry.locations[path]})
}
}
findings = append(findings, Finding{
Rule: entry.rule,
Severity: entry.severity,
Tags: append([]string(nil), entry.tags...),
Count: entry.count,
Locations: locations,
})
}
return findings
}
func derivePath(labels map[string]string) string {
if labels == nil {
return ""
}
if path := labels["path"]; path != "" {
return path
}
if path := labels["source"]; path != "" {
return path
}
if path := labels["field"]; path != "" {
return path
}
return ""
}
func cloneLabels(labels map[string]string) map[string]string {
if len(labels) == 0 {
return nil
}
clone := make(map[string]string, len(labels))
for k, v := range labels {
clone[k] = v
}
return clone
}
func joinPath(prefix, key string) string {
if prefix == "" {
return key
}
if key == "" {
return prefix
}
return prefix + "." + key
}
func indexPath(prefix string, idx int) string {
if prefix == "" {
return fmt.Sprintf("[%d]", idx)
}
return fmt.Sprintf("%s[%d]", prefix, idx)
}
func mergeLabels(base map[string]string, path string) map[string]string {
if base == nil && path == "" {
return nil
}
labels := cloneLabels(base)
if labels == nil {
labels = make(map[string]string, 1)
}
if path != "" {
labels["path"] = path
}
return labels
}
type findingAggregate struct {
rule string
severity Severity
tags []string
count int
locations map[string]int
}

95
pkg/shhh/sentinel_test.go Normal file
View File

@@ -0,0 +1,95 @@
package shhh
import (
"context"
"testing"
"github.com/stretchr/testify/require"
)
type recordingSink struct {
events []AuditEvent
}
func (r *recordingSink) RecordRedaction(_ context.Context, event AuditEvent) {
r.events = append(r.events, event)
}
func TestRedactText_DefaultRules(t *testing.T) {
sentinel, err := NewSentinel(Config{})
require.NoError(t, err)
input := "Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.secret"
redacted, findings := sentinel.RedactText(context.Background(), input, map[string]string{"source": "http.request.headers.authorization"})
require.Equal(t, "Authorization: Bearer [REDACTED]", redacted)
require.Len(t, findings, 1)
require.Equal(t, "bearer-token", findings[0].Rule)
require.Equal(t, 1, findings[0].Count)
require.NotEmpty(t, findings[0].Locations)
snapshot := sentinel.StatsSnapshot()
require.Equal(t, uint64(1), snapshot.TotalScans)
require.Equal(t, uint64(1), snapshot.TotalFindings)
require.Equal(t, uint64(1), snapshot.PerRuleFindings["bearer-token"])
}
func TestRedactMap_NestedStructures(t *testing.T) {
sentinel, err := NewSentinel(Config{})
require.NoError(t, err)
payload := map[string]any{
"config": map[string]any{
"api_key": "API_KEY=1234567890ABCDEFG",
},
"tokens": []any{
"sk-test1234567890ABCDEF",
map[string]any{"refresh": "refresh_token=abcdef12345"},
},
}
findings := sentinel.RedactMap(context.Background(), payload)
require.NotEmpty(t, findings)
config := payload["config"].(map[string]any)
require.Equal(t, "API_KEY=[REDACTED]", config["api_key"])
tokens := payload["tokens"].([]any)
require.Equal(t, "[REDACTED]", tokens[0])
inner := tokens[1].(map[string]any)
require.Equal(t, "refresh_token=[REDACTED]", inner["refresh"])
total := 0
for _, finding := range findings {
total += finding.Count
}
require.Equal(t, 3, total)
}
func TestAuditSinkReceivesEvents(t *testing.T) {
sink := &recordingSink{}
cfg := Config{
DisableDefaultRules: true,
CustomRules: []RuleConfig{
{
Name: "custom-secret",
Pattern: `(secret\s*=\s*)([A-Za-z0-9]{6,})`,
ReplacementTemplate: "$1[REDACTED]",
Severity: SeverityHigh,
},
},
}
sentinel, err := NewSentinel(cfg, WithAuditSink(sink))
require.NoError(t, err)
_, findings := sentinel.RedactText(context.Background(), "secret=mysecretvalue", map[string]string{"source": "test"})
require.Len(t, findings, 1)
require.Equal(t, 1, findings[0].Count)
require.Len(t, sink.events, 1)
require.Equal(t, "custom-secret", sink.events[0].Rule)
require.NotEmpty(t, sink.events[0].Hash)
require.Equal(t, "test", sink.events[0].Path)
}

60
pkg/shhh/stats.go Normal file
View File

@@ -0,0 +1,60 @@
package shhh
import (
"sync"
"sync/atomic"
)
// Stats tracks aggregate counts for the sentinel.
type Stats struct {
totalScans atomic.Uint64
totalFindings atomic.Uint64
perRule sync.Map // string -> *atomic.Uint64
}
// NewStats constructs a Stats collector.
func NewStats() *Stats {
return &Stats{}
}
// IncScan increments the total scan counter.
func (s *Stats) IncScan() {
if s == nil {
return
}
s.totalScans.Add(1)
}
// AddFindings records findings for a rule.
func (s *Stats) AddFindings(rule string, count int) {
if s == nil || count <= 0 {
return
}
s.totalFindings.Add(uint64(count))
counterAny, _ := s.perRule.LoadOrStore(rule, new(atomic.Uint64))
counter := counterAny.(*atomic.Uint64)
counter.Add(uint64(count))
}
// Snapshot returns a point-in-time view of the counters.
func (s *Stats) Snapshot() StatsSnapshot {
if s == nil {
return StatsSnapshot{}
}
snapshot := StatsSnapshot{
TotalScans: s.totalScans.Load(),
TotalFindings: s.totalFindings.Load(),
PerRuleFindings: make(map[string]uint64),
}
s.perRule.Range(func(key, value any) bool {
name, ok := key.(string)
if !ok {
return true
}
if counter, ok := value.(*atomic.Uint64); ok {
snapshot.PerRuleFindings[name] = counter.Load()
}
return true
})
return snapshot
}

73
pkg/shhh/types.go Normal file
View File

@@ -0,0 +1,73 @@
package shhh
import "context"
// Severity represents the criticality associated with a redaction finding.
type Severity string
const (
// SeverityLow indicates low-impact findings (e.g. non-production credentials).
SeverityLow Severity = "low"
// SeverityMedium indicates medium impact findings (e.g. access tokens).
SeverityMedium Severity = "medium"
// SeverityHigh indicates high-impact findings (e.g. private keys).
SeverityHigh Severity = "high"
)
// RuleConfig defines a redaction rule that SHHH should enforce.
type RuleConfig struct {
Name string `json:"name"`
Pattern string `json:"pattern"`
ReplacementTemplate string `json:"replacement_template"`
Severity Severity `json:"severity"`
Tags []string `json:"tags"`
}
// Config controls sentinel behaviour.
type Config struct {
// Disabled toggles redaction off entirely.
Disabled bool `json:"disabled"`
// RedactionPlaceholder overrides the default placeholder value.
RedactionPlaceholder string `json:"redaction_placeholder"`
// DisableDefaultRules disables the built-in curated rule set.
DisableDefaultRules bool `json:"disable_default_rules"`
// CustomRules allows callers to append bespoke redaction patterns.
CustomRules []RuleConfig `json:"custom_rules"`
}
// Finding represents a single rule firing during redaction.
type Finding struct {
Rule string `json:"rule"`
Severity Severity `json:"severity"`
Tags []string `json:"tags,omitempty"`
Count int `json:"count"`
Locations []Location `json:"locations,omitempty"`
}
// Location describes where a secret was found.
type Location struct {
Path string `json:"path"`
Count int `json:"count"`
}
// StatsSnapshot exposes aggregate counters for observability.
type StatsSnapshot struct {
TotalScans uint64 `json:"total_scans"`
TotalFindings uint64 `json:"total_findings"`
PerRuleFindings map[string]uint64 `json:"per_rule_findings"`
}
// AuditEvent captures a single redaction occurrence for downstream sinks.
type AuditEvent struct {
Rule string `json:"rule"`
Severity Severity `json:"severity"`
Tags []string `json:"tags,omitempty"`
Path string `json:"path,omitempty"`
Hash string `json:"hash"`
Metadata map[string]string `json:"metadata,omitempty"`
}
// AuditSink receives redaction events for long term storage / replay.
type AuditSink interface {
RecordRedaction(ctx context.Context, event AuditEvent)
}