Release prep: 54 engines, self-hosted signatures, i18n, dashboard updates

This commit is contained in:
DmitrL-dev 2026-03-23 16:45:40 +10:00
parent 694e32be26
commit 41cbfd6e0a
178 changed files with 36008 additions and 399 deletions

View file

@ -0,0 +1,527 @@
package soc
import (
"bytes"
"fmt"
"net/http"
"net/http/httptest"
"os"
"sync"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
domsoc "github.com/syntrex/gomcp/internal/domain/soc"
"github.com/syntrex/gomcp/internal/infrastructure/audit"
"github.com/syntrex/gomcp/internal/infrastructure/sqlite"
)
// newTestServiceWithLogger creates a SOC service backed by in-memory SQLite WITH a decision logger.
func newTestServiceWithLogger(t *testing.T) *Service {
t.Helper()
db, err := sqlite.OpenMemory()
require.NoError(t, err)
repo, err := sqlite.NewSOCRepo(db)
require.NoError(t, err)
logger, err := audit.NewDecisionLogger(t.TempDir())
require.NoError(t, err)
// Close logger BEFORE TempDir cleanup (Windows file locking).
t.Cleanup(func() {
logger.Close()
db.Close()
})
return NewService(repo, logger)
}
// --- E2E: Full Pipeline (Ingest → Correlation → Incident → Playbook) ---
func TestE2E_FullPipeline_IngestToIncident(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Step 1: Ingest a jailbreak event.
evt1 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityHigh, "jailbreak", "detected jailbreak attempt")
evt1.SensorID = "sensor-e2e-1"
id1, inc1, err := svc.IngestEvent(evt1)
require.NoError(t, err)
assert.NotEmpty(t, id1)
assert.Nil(t, inc1, "single event should not trigger correlation")
// Step 2: Ingest a tool_abuse event from same source — triggers SOC-CR-001.
evt2 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityCritical, "tool_abuse", "tool abuse detected")
evt2.SensorID = "sensor-e2e-1"
id2, inc2, err := svc.IngestEvent(evt2)
require.NoError(t, err)
assert.NotEmpty(t, id2)
// Correlation rule SOC-CR-001 (jailbreak + tool_abuse) should trigger an incident.
require.NotNil(t, inc2, "jailbreak + tool_abuse should create an incident")
assert.Equal(t, domsoc.SeverityCritical, inc2.Severity)
assert.Equal(t, "Multi-stage Jailbreak", inc2.Title)
assert.NotEmpty(t, inc2.ID)
assert.NotEmpty(t, inc2.Events, "incident should reference triggering events")
// Step 3: Verify incident is persisted.
gotInc, err := svc.GetIncident(inc2.ID)
require.NoError(t, err)
assert.Equal(t, inc2.ID, gotInc.ID)
// Step 4: Verify decision chain integrity.
dash, err := svc.Dashboard()
require.NoError(t, err)
assert.True(t, dash.ChainValid, "decision chain should be valid")
assert.Greater(t, dash.TotalEvents, 0)
}
func TestE2E_TemporalSequenceCorrelation(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Sequence rule SOC-CR-010: auth_bypass → tool_abuse (ordered).
evt1 := domsoc.NewSOCEvent(domsoc.SourceShield, domsoc.SeverityHigh, "auth_bypass", "brute force detected")
evt1.SensorID = "sensor-seq-1"
_, _, err := svc.IngestEvent(evt1)
require.NoError(t, err)
evt2 := domsoc.NewSOCEvent(domsoc.SourceShield, domsoc.SeverityHigh, "tool_abuse", "tool escalation")
evt2.SensorID = "sensor-seq-1"
_, inc, err := svc.IngestEvent(evt2)
require.NoError(t, err)
// Should trigger either SOC-CR-010 (sequence) or another matching rule.
if inc != nil {
assert.NotEmpty(t, inc.KillChainPhase)
assert.NotEmpty(t, inc.MITREMapping)
}
}
// --- E2E: Sensor Authentication Flow ---
func TestE2E_SensorAuth_FullFlow(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Configure sensor keys.
svc.SetSensorKeys(map[string]string{
"sensor-auth-1": "secret-key-1",
"sensor-auth-2": "secret-key-2",
})
// Valid auth — should succeed.
evt := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityLow, "test", "auth test")
evt.SensorID = "sensor-auth-1"
evt.SensorKey = "secret-key-1"
id, _, err := svc.IngestEvent(evt)
require.NoError(t, err)
assert.NotEmpty(t, id)
// Invalid key — should fail.
evt2 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityLow, "test", "bad key")
evt2.SensorID = "sensor-auth-1"
evt2.SensorKey = "wrong-key"
_, _, err = svc.IngestEvent(evt2)
require.Error(t, err)
assert.Contains(t, err.Error(), "auth")
// Missing SensorID — should fail (S-1 fix).
evt3 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityLow, "test", "no sensor id")
_, _, err = svc.IngestEvent(evt3)
require.Error(t, err)
assert.Contains(t, err.Error(), "sensor_id required")
// Unknown sensor — should fail.
evt4 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityLow, "test", "unknown sensor")
evt4.SensorID = "sensor-unknown"
evt4.SensorKey = "whatever"
_, _, err = svc.IngestEvent(evt4)
require.Error(t, err)
assert.Contains(t, err.Error(), "auth")
}
// --- E2E: Drain Mode ---
func TestE2E_DrainMode_RejectsNewEvents(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Ingest works before drain.
evt := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityLow, "test", "pre-drain")
evt.SensorID = "sensor-drain"
_, _, err := svc.IngestEvent(evt)
require.NoError(t, err)
// Activate drain mode.
svc.Drain()
assert.True(t, svc.IsDraining())
// New events should be rejected.
evt2 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityLow, "test", "during-drain")
evt2.SensorID = "sensor-drain"
_, _, err = svc.IngestEvent(evt2)
require.Error(t, err)
assert.Contains(t, err.Error(), "draining")
// Resume.
svc.Resume()
assert.False(t, svc.IsDraining())
// Events should work again.
evt3 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityLow, "test", "post-drain")
evt3.SensorID = "sensor-drain"
_, _, err = svc.IngestEvent(evt3)
require.NoError(t, err)
}
// --- E2E: Webhook Delivery ---
func TestE2E_WebhookFiredOnIncident(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Set up a test webhook server.
var mu sync.Mutex
var received []string
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
mu.Lock()
received = append(received, r.URL.Path)
mu.Unlock()
w.WriteHeader(http.StatusOK)
}))
defer ts.Close()
svc.SetWebhookConfig(WebhookConfig{
Endpoints: []string{ts.URL + "/webhook"},
MaxRetries: 1,
TimeoutSec: 5,
})
// Trigger an incident via correlation.
evt1 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityHigh, "jailbreak", "jailbreak e2e")
evt1.SensorID = "sensor-wh"
svc.IngestEvent(evt1)
evt2 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityCritical, "tool_abuse", "tool abuse e2e")
evt2.SensorID = "sensor-wh"
_, inc, err := svc.IngestEvent(evt2)
require.NoError(t, err)
if inc != nil {
// Give the async webhook goroutine time to fire.
time.Sleep(200 * time.Millisecond)
mu.Lock()
assert.GreaterOrEqual(t, len(received), 1, "webhook should have been called")
mu.Unlock()
}
}
// --- E2E: Verdict Flow ---
func TestE2E_VerdictFlow(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Create an incident via correlation.
evt1 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityHigh, "jailbreak", "verdict test 1")
evt1.SensorID = "sensor-vd"
svc.IngestEvent(evt1)
evt2 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityCritical, "tool_abuse", "verdict test 2")
evt2.SensorID = "sensor-vd"
_, inc, _ := svc.IngestEvent(evt2)
if inc == nil {
t.Skip("no incident created — correlation rules may not match with current sliding window state")
}
// Verify initial status is OPEN.
got, err := svc.GetIncident(inc.ID)
require.NoError(t, err)
assert.Equal(t, domsoc.StatusOpen, got.Status)
// Update to INVESTIGATING.
err = svc.UpdateVerdict(inc.ID, domsoc.StatusInvestigating)
require.NoError(t, err)
got, _ = svc.GetIncident(inc.ID)
assert.Equal(t, domsoc.StatusInvestigating, got.Status)
// Update to RESOLVED.
err = svc.UpdateVerdict(inc.ID, domsoc.StatusResolved)
require.NoError(t, err)
got, _ = svc.GetIncident(inc.ID)
assert.Equal(t, domsoc.StatusResolved, got.Status)
}
// --- E2E: Analytics Report ---
func TestE2E_AnalyticsReport(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Ingest several events.
categories := []string{"jailbreak", "injection", "exfiltration", "auth_bypass", "tool_abuse"}
for i, cat := range categories {
evt := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityHigh, cat, fmt.Sprintf("analytics test %d", i))
evt.SensorID = "sensor-analytics"
svc.IngestEvent(evt)
}
report, err := svc.Analytics(24)
require.NoError(t, err)
assert.NotNil(t, report)
assert.Greater(t, len(report.TopCategories), 0)
assert.Greater(t, len(report.TopSources), 0)
assert.GreaterOrEqual(t, report.EventsPerHour, float64(0))
}
// --- E2E: Multi-Sensor Concurrent Ingest ---
func TestE2E_ConcurrentIngest(t *testing.T) {
svc := newTestServiceWithLogger(t)
var wg sync.WaitGroup
errors := make([]error, 0)
var mu sync.Mutex
// 10 sensors × 10 events each = 100 concurrent ingests.
for s := 0; s < 10; s++ {
wg.Add(1)
go func(sensorNum int) {
defer wg.Done()
for i := 0; i < 10; i++ {
evt := domsoc.NewSOCEvent(
domsoc.SourceSentinelCore,
domsoc.SeverityLow,
"test",
fmt.Sprintf("concurrent sensor-%d event-%d", sensorNum, i),
)
evt.SensorID = fmt.Sprintf("sensor-conc-%d", sensorNum)
_, _, err := svc.IngestEvent(evt)
if err != nil {
mu.Lock()
errors = append(errors, err)
mu.Unlock()
}
}
}(s)
}
wg.Wait()
// Some events may be rate-limited (100 events/sec per sensor),
// but there should be no panics or data corruption.
dash, err := svc.Dashboard()
require.NoError(t, err)
assert.Greater(t, dash.TotalEvents, 0, "at least some events should have been ingested")
}
// --- E2E: Lattice TSA Chain Violation (SOC-CR-012) ---
func TestE2E_TSAChainViolation(t *testing.T) {
svc := newTestServiceWithLogger(t)
// SOC-CR-012 requires: auth_bypass → tool_abuse → exfiltration within 15 min.
events := []struct {
category string
severity domsoc.EventSeverity
}{
{"auth_bypass", domsoc.SeverityHigh},
{"tool_abuse", domsoc.SeverityHigh},
{"exfiltration", domsoc.SeverityCritical},
}
var lastInc *domsoc.Incident
for _, e := range events {
evt := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, e.severity, e.category, "TSA chain test: "+e.category)
evt.SensorID = "sensor-tsa"
_, inc, err := svc.IngestEvent(evt)
require.NoError(t, err)
if inc != nil {
lastInc = inc
}
}
// The TSA chain (auth_bypass + tool_abuse + exfiltration) should trigger
// SOC-CR-012 or another matching rule.
require.NotNil(t, lastInc, "TSA chain (auth_bypass → tool_abuse → exfiltration) should create an incident")
assert.Equal(t, domsoc.SeverityCritical, lastInc.Severity)
assert.NotEmpty(t, lastInc.MITREMapping)
// Verify incident is persisted.
got, err := svc.GetIncident(lastInc.ID)
require.NoError(t, err)
assert.Equal(t, lastInc.ID, got.ID)
}
// --- E2E: Zero-G Mode Excludes Playbook Auto-Response ---
func TestE2E_ZeroGExcludedFromAutoResponse(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Set up a test webhook server to track playbook webhook notifications.
var mu sync.Mutex
var webhookCalls int
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
mu.Lock()
webhookCalls++
mu.Unlock()
w.WriteHeader(http.StatusOK)
}))
defer ts.Close()
svc.SetWebhookConfig(WebhookConfig{
Endpoints: []string{ts.URL + "/webhook"},
MaxRetries: 1,
TimeoutSec: 5,
})
// Ingest jailbreak + tool_abuse with ZeroGMode=true.
// This should trigger correlation (incident created) but NOT playbooks.
evt1 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityHigh, "jailbreak", "zero-g jailbreak test")
evt1.SensorID = "sensor-zg"
evt1.ZeroGMode = true
_, _, err := svc.IngestEvent(evt1)
require.NoError(t, err)
evt2 := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, domsoc.SeverityCritical, "tool_abuse", "zero-g tool abuse test")
evt2.SensorID = "sensor-zg"
evt2.ZeroGMode = true
_, inc, err := svc.IngestEvent(evt2)
require.NoError(t, err)
// Correlation should still run — incident should be created.
if inc != nil {
assert.Equal(t, domsoc.SeverityCritical, inc.Severity)
// Wait for any async webhook goroutines.
time.Sleep(200 * time.Millisecond)
// Webhook should NOT have been called (playbook skipped for Zero-G).
mu.Lock()
assert.Equal(t, 0, webhookCalls, "webhooks should NOT fire for Zero-G events — playbook must be skipped")
mu.Unlock()
}
// Verify decision log records the PLAYBOOK_SKIPPED:ZERO_G entry.
logPath := svc.DecisionLogPath()
if logPath != "" {
valid, broken, err := audit.VerifyChainFromFile(logPath)
require.NoError(t, err)
assert.Equal(t, 0, broken, "decision chain should be intact")
assert.Greater(t, valid, 0, "should have decision entries")
}
}
// --- E2E: Decision Logger Tamper Detection ---
func TestE2E_DecisionLoggerTampering(t *testing.T) {
svc := newTestServiceWithLogger(t)
// Ingest several events to build up a decision chain.
for i := 0; i < 10; i++ {
evt := domsoc.NewSOCEvent(
domsoc.SourceSentinelCore,
domsoc.SeverityLow,
"test",
fmt.Sprintf("tamper test event %d", i),
)
evt.SensorID = "sensor-tamper"
_, _, err := svc.IngestEvent(evt)
require.NoError(t, err)
}
// Step 1: Verify chain is valid.
logPath := svc.DecisionLogPath()
require.NotEmpty(t, logPath, "decision log path should be set")
validCount, brokenLine, err := audit.VerifyChainFromFile(logPath)
require.NoError(t, err)
assert.Equal(t, 0, brokenLine, "chain should be intact before tampering")
assert.GreaterOrEqual(t, validCount, 10, "should have at least 10 decision entries")
// Step 2: Tamper with the log file — modify a line mid-chain.
data, err := os.ReadFile(logPath)
require.NoError(t, err)
lines := bytes.Split(data, []byte("\n"))
if len(lines) > 5 {
// Corrupt line 5 by altering content.
lines[4] = []byte("TAMPERED|2026-01-01T00:00:00Z|SOC|FAKE|fake_reason|0000000000")
err = os.WriteFile(logPath, bytes.Join(lines, []byte("\n")), 0644)
require.NoError(t, err)
// Step 3: Verify chain detects the tamper.
_, brokenLine2, err2 := audit.VerifyChainFromFile(logPath)
require.NoError(t, err2)
assert.Greater(t, brokenLine2, 0, "chain should detect tampering — broken line reported")
}
}
// --- E2E: Cross-Sensor Session Correlation (SOC-CR-011) ---
func TestE2E_CrossSensorSessionCorrelation(t *testing.T) {
svc := newTestServiceWithLogger(t)
// SOC-CR-011 requires 3+ events from different sensors with same session_id.
sessionID := "session-xsensor-e2e-001"
sources := []struct {
source domsoc.EventSource
sensor string
category string
}{
{domsoc.SourceShield, "sensor-shield-1", "auth_bypass"},
{domsoc.SourceSentinelCore, "sensor-core-1", "jailbreak"},
{domsoc.SourceImmune, "sensor-immune-1", "exfiltration"},
}
var lastInc *domsoc.Incident
for _, s := range sources {
evt := domsoc.NewSOCEvent(s.source, domsoc.SeverityHigh, s.category, "cross-sensor test: "+s.category)
evt.SensorID = s.sensor
evt.SessionID = sessionID
_, inc, err := svc.IngestEvent(evt)
require.NoError(t, err)
if inc != nil {
lastInc = inc
}
}
// After 3 events from different sensors/sources with same session_id,
// at least one correlation rule should have matched.
require.NotNil(t, lastInc, "cross-sensor session attack (3 sources, same session_id) should create incident")
assert.NotEmpty(t, lastInc.ID)
assert.NotEmpty(t, lastInc.Events, "incident should reference triggering events")
}
// --- E2E: Crescendo Escalation (SOC-CR-015) ---
func TestE2E_CrescendoEscalation(t *testing.T) {
svc := newTestServiceWithLogger(t)
// SOC-CR-015: 3+ jailbreak events with ascending severity within 15 min.
severities := []domsoc.EventSeverity{
domsoc.SeverityLow,
domsoc.SeverityMedium,
domsoc.SeverityHigh,
}
var lastInc *domsoc.Incident
for i, sev := range severities {
evt := domsoc.NewSOCEvent(domsoc.SourceSentinelCore, sev, "jailbreak",
fmt.Sprintf("crescendo jailbreak attempt %d", i+1))
evt.SensorID = "sensor-crescendo"
_, inc, err := svc.IngestEvent(evt)
require.NoError(t, err)
if inc != nil {
lastInc = inc
}
}
// The ascending severity pattern (LOW→MEDIUM→HIGH) should trigger SOC-CR-015.
require.NotNil(t, lastInc, "crescendo pattern (LOW→MEDIUM→HIGH jailbreaks) should create incident")
assert.Equal(t, domsoc.SeverityCritical, lastInc.Severity)
assert.Contains(t, lastInc.MITREMapping, "T1059")
}

View file

@ -0,0 +1,100 @@
package soc
import (
"fmt"
"sync/atomic"
"testing"
"time"
"github.com/stretchr/testify/require"
domsoc "github.com/syntrex/gomcp/internal/domain/soc"
"github.com/syntrex/gomcp/internal/infrastructure/audit"
"github.com/syntrex/gomcp/internal/infrastructure/sqlite"
)
// newBenchService creates a minimal SOC service for benchmarking.
// Disables rate limiting to measure raw pipeline throughput.
func newBenchService(b *testing.B) *Service {
b.Helper()
tmpDir := b.TempDir()
dbPath := tmpDir + "/bench.db"
db, err := sqlite.Open(dbPath)
require.NoError(b, err)
b.Cleanup(func() { db.Close() })
repo, err := sqlite.NewSOCRepo(db)
require.NoError(b, err)
logger, err := audit.NewDecisionLogger(tmpDir)
require.NoError(b, err)
b.Cleanup(func() { logger.Close() })
svc := NewService(repo, logger)
svc.DisableRateLimit() // benchmarks measure throughput, not rate limiting
return svc
}
// BenchmarkIngestEvent measures single-event pipeline throughput.
func BenchmarkIngestEvent(b *testing.B) {
svc := newBenchService(b)
b.ResetTimer()
for i := 0; i < b.N; i++ {
event := domsoc.NewSOCEvent(domsoc.SourceShield, domsoc.SeverityMedium, "injection",
fmt.Sprintf("Bench event #%d", i))
event.ID = fmt.Sprintf("bench-evt-%d", i)
_, _, err := svc.IngestEvent(event)
if err != nil {
b.Fatal(err)
}
}
}
// BenchmarkIngestEvent_WithCorrelation measures pipeline with correlation active.
// Pre-loads events to trigger correlation matching.
func BenchmarkIngestEvent_WithCorrelation(b *testing.B) {
svc := newBenchService(b)
// Pre-load events to make correlation rules meaningful.
for i := 0; i < 50; i++ {
event := domsoc.NewSOCEvent(domsoc.SourceShield, domsoc.SeverityHigh, "jailbreak",
fmt.Sprintf("Pre-load jailbreak #%d", i))
event.ID = fmt.Sprintf("preload-%d", i)
svc.IngestEvent(event)
time.Sleep(time.Microsecond)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
event := domsoc.NewSOCEvent(domsoc.SourceShield, domsoc.SeverityHigh, "jailbreak",
fmt.Sprintf("Corr bench event #%d", i))
event.ID = fmt.Sprintf("bench-corr-%d", i)
_, _, err := svc.IngestEvent(event)
if err != nil {
b.Fatal(err)
}
}
}
// BenchmarkIngestEvent_Parallel measures concurrent ingest throughput.
func BenchmarkIngestEvent_Parallel(b *testing.B) {
svc := newBenchService(b)
var counter atomic.Int64
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
n := counter.Add(1)
event := domsoc.NewSOCEvent(domsoc.SourceShield, domsoc.SeverityLow, "jailbreak",
fmt.Sprintf("Parallel bench #%d", n))
event.ID = fmt.Sprintf("bench-par-%d", n)
_, _, err := svc.IngestEvent(event)
if err != nil {
b.Fatal(err)
}
}
})
}

View file

@ -0,0 +1,153 @@
package soc
import (
"fmt"
"math"
"sort"
"sync"
"sync/atomic"
"testing"
"time"
domsoc "github.com/syntrex/gomcp/internal/domain/soc"
"github.com/syntrex/gomcp/internal/infrastructure/audit"
"github.com/syntrex/gomcp/internal/infrastructure/sqlite"
"github.com/stretchr/testify/require"
)
// TestLoadTest_SustainedThroughput measures SOC pipeline throughput and latency
// under sustained concurrent load. Reports p50/p95/p99 latencies and events/sec.
func TestLoadTest_SustainedThroughput(t *testing.T) {
if testing.Short() {
t.Skip("skipping load test in short mode")
}
// Setup service with file-based SQLite for concurrency safety.
tmpDir := t.TempDir()
db, err := sqlite.Open(tmpDir + "/loadtest.db")
require.NoError(t, err)
repo, err := sqlite.NewSOCRepo(db)
require.NoError(t, err)
logger, err := audit.NewDecisionLogger(tmpDir)
require.NoError(t, err)
t.Cleanup(func() {
logger.Close()
db.Close()
})
svc := NewService(repo, logger)
svc.DisableRateLimit() // bypass rate limiter for raw throughput
// Load test parameters.
const (
numWorkers = 16
eventsPerWkr = 200
totalEvents = numWorkers * eventsPerWkr
)
categories := []string{"jailbreak", "injection", "exfiltration", "auth_bypass", "tool_abuse"}
sources := []domsoc.EventSource{domsoc.SourceSentinelCore, domsoc.SourceShield, domsoc.SourceGoMCP}
var (
wg sync.WaitGroup
latencies = make([]time.Duration, totalEvents)
errors int64
incidents int64
)
start := time.Now()
for w := 0; w < numWorkers; w++ {
wg.Add(1)
go func(workerID int) {
defer wg.Done()
for i := 0; i < eventsPerWkr; i++ {
idx := workerID*eventsPerWkr + i
evt := domsoc.NewSOCEvent(
sources[idx%len(sources)],
domsoc.SeverityHigh,
categories[idx%len(categories)],
fmt.Sprintf("load-test w%d-e%d", workerID, i),
)
evt.SensorID = fmt.Sprintf("load-sensor-%d", workerID)
t0 := time.Now()
_, inc, err := svc.IngestEvent(evt)
latencies[idx] = time.Since(t0)
if err != nil {
atomic.AddInt64(&errors, 1)
}
if inc != nil {
atomic.AddInt64(&incidents, 1)
}
}
}(w)
}
wg.Wait()
totalDuration := time.Since(start)
// Compute latency percentiles.
sort.Slice(latencies, func(i, j int) bool { return latencies[i] < latencies[j] })
p50 := percentile(latencies, 50)
p95 := percentile(latencies, 95)
p99 := percentile(latencies, 99)
mean := meanDuration(latencies)
eventsPerSec := float64(totalEvents) / totalDuration.Seconds()
// Report results.
t.Logf("═══════════════════════════════════════════════")
t.Logf(" SENTINEL SOC Load Test Results")
t.Logf("═══════════════════════════════════════════════")
t.Logf(" Workers: %d", numWorkers)
t.Logf(" Events/worker: %d", eventsPerWkr)
t.Logf(" Total events: %d", totalEvents)
t.Logf(" Duration: %s", totalDuration.Round(time.Millisecond))
t.Logf(" Throughput: %.0f events/sec", eventsPerSec)
t.Logf("───────────────────────────────────────────────")
t.Logf(" Mean: %s", mean.Round(time.Microsecond))
t.Logf(" P50: %s", p50.Round(time.Microsecond))
t.Logf(" P95: %s", p95.Round(time.Microsecond))
t.Logf(" P99: %s", p99.Round(time.Microsecond))
t.Logf(" Min: %s", latencies[0].Round(time.Microsecond))
t.Logf(" Max: %s", latencies[len(latencies)-1].Round(time.Microsecond))
t.Logf("───────────────────────────────────────────────")
t.Logf(" Errors: %d (%.1f%%)", errors, float64(errors)/float64(totalEvents)*100)
t.Logf(" Incidents: %d", incidents)
t.Logf("═══════════════════════════════════════════════")
// Assertions: basic sanity checks.
require.Less(t, float64(errors)/float64(totalEvents), 0.05, "error rate should be < 5%%")
require.Greater(t, eventsPerSec, float64(100), "should sustain > 100 events/sec")
}
func percentile(sorted []time.Duration, p int) time.Duration {
if len(sorted) == 0 {
return 0
}
idx := int(math.Ceil(float64(p)/100.0*float64(len(sorted)))) - 1
if idx < 0 {
idx = 0
}
if idx >= len(sorted) {
idx = len(sorted) - 1
}
return sorted[idx]
}
func meanDuration(ds []time.Duration) time.Duration {
if len(ds) == 0 {
return 0
}
var total time.Duration
for _, d := range ds {
total += d
}
return total / time.Duration(len(ds))
}

File diff suppressed because it is too large Load diff

View file

@ -143,7 +143,7 @@ func TestRunPlaybook_IncidentNotFound(t *testing.T) {
svc := newTestService(t)
// Use a valid playbook ID from defaults.
_, err := svc.RunPlaybook("pb-auto-block-jailbreak", "nonexistent-inc")
_, err := svc.RunPlaybook("pb-block-jailbreak", "nonexistent-inc")
require.Error(t, err)
assert.Contains(t, err.Error(), "incident not found")
}

View file

@ -0,0 +1,255 @@
package soc
import (
"encoding/json"
"log/slog"
"net/http"
"strings"
"time"
)
// STIXBundle represents a STIX 2.1 bundle (simplified).
type STIXBundle struct {
Type string `json:"type"` // "bundle"
ID string `json:"id"`
Objects []STIXObject `json:"objects"`
}
// STIXObject represents a generic STIX 2.1 object.
type STIXObject struct {
Type string `json:"type"` // indicator, malware, attack-pattern, etc.
ID string `json:"id"`
Created time.Time `json:"created"`
Modified time.Time `json:"modified"`
Name string `json:"name,omitempty"`
Description string `json:"description,omitempty"`
Pattern string `json:"pattern,omitempty"` // STIX pattern (indicators)
PatternType string `json:"pattern_type,omitempty"` // stix, pcre, sigma
ValidFrom time.Time `json:"valid_from,omitempty"`
Labels []string `json:"labels,omitempty"`
// Kill chain phases for attack-pattern objects.
KillChainPhases []struct {
KillChainName string `json:"kill_chain_name"`
PhaseName string `json:"phase_name"`
} `json:"kill_chain_phases,omitempty"`
// External references (CVE, etc.)
ExternalReferences []struct {
SourceName string `json:"source_name"`
ExternalID string `json:"external_id,omitempty"`
URL string `json:"url,omitempty"`
Description string `json:"description,omitempty"`
} `json:"external_references,omitempty"`
}
// STIXFeedConfig configures automatic STIX feed polling.
type STIXFeedConfig struct {
Name string `json:"name"` // Feed name (e.g., "OTX", "MISP")
URL string `json:"url"` // TAXII or HTTP feed URL
APIKey string `json:"api_key"` // Authentication key
Headers map[string]string `json:"headers"` // Additional headers
Interval time.Duration `json:"interval"` // Poll interval (default: 1h)
Enabled bool `json:"enabled"`
}
// FeedSync syncs IOCs from STIX/TAXII feeds into the ThreatIntelStore.
type FeedSync struct {
feeds []STIXFeedConfig
store *ThreatIntelStore
client *http.Client
}
// NewFeedSync creates a feed synchronizer.
func NewFeedSync(store *ThreatIntelStore, feeds []STIXFeedConfig) *FeedSync {
return &FeedSync{
feeds: feeds,
store: store,
client: &http.Client{
Timeout: 30 * time.Second,
},
}
}
// Start begins polling all enabled feeds in the background.
func (f *FeedSync) Start(done <-chan struct{}) {
for _, feed := range f.feeds {
if !feed.Enabled {
continue
}
go f.pollFeed(feed, done)
}
}
// pollFeed periodically fetches and processes a single STIX feed.
func (f *FeedSync) pollFeed(feed STIXFeedConfig, done <-chan struct{}) {
interval := feed.Interval
if interval == 0 {
interval = time.Hour
}
slog.Info("stix feed started", "feed", feed.Name, "url", feed.URL, "interval", interval)
// Initial fetch.
f.fetchFeed(feed)
ticker := time.NewTicker(interval)
defer ticker.Stop()
for {
select {
case <-done:
slog.Info("stix feed stopped", "feed", feed.Name)
return
case <-ticker.C:
f.fetchFeed(feed)
}
}
}
// fetchFeed performs a single HTTP GET and processes the STIX bundle.
func (f *FeedSync) fetchFeed(feed STIXFeedConfig) {
req, err := http.NewRequest(http.MethodGet, feed.URL, nil)
if err != nil {
slog.Error("stix feed: request error", "feed", feed.Name, "error", err)
return
}
req.Header.Set("Accept", "application/stix+json;version=2.1")
if feed.APIKey != "" {
req.Header.Set("X-OTX-API-KEY", feed.APIKey)
}
for k, v := range feed.Headers {
req.Header.Set(k, v)
}
resp, err := f.client.Do(req)
if err != nil {
slog.Error("stix feed: fetch error", "feed", feed.Name, "error", err)
return
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
slog.Warn("stix feed: non-200 response", "feed", feed.Name, "status", resp.StatusCode)
return
}
var bundle STIXBundle
if err := json.NewDecoder(resp.Body).Decode(&bundle); err != nil {
slog.Error("stix feed: decode error", "feed", feed.Name, "error", err)
return
}
imported := f.processBundle(feed.Name, bundle)
slog.Info("stix feed synced",
"feed", feed.Name,
"objects", len(bundle.Objects),
"iocs_imported", imported,
)
}
// processBundle extracts IOCs from STIX indicators and adds to the store.
func (f *FeedSync) processBundle(feedName string, bundle STIXBundle) int {
imported := 0
for _, obj := range bundle.Objects {
if obj.Type != "indicator" || obj.Pattern == "" {
continue
}
ioc := stixPatternToIOC(obj)
if ioc == nil {
continue
}
ioc.Source = feedName
ioc.Tags = obj.Labels
f.store.AddIOC(*ioc)
imported++
}
return imported
}
// stixPatternToIOC converts a STIX indicator pattern to our IOC format.
// Supports: [file:hashes.'SHA-256' = '...'], [ipv4-addr:value = '...'],
// [domain-name:value = '...'], [url:value = '...']
func stixPatternToIOC(obj STIXObject) *IOC {
pattern := obj.Pattern
now := obj.Modified
if now.IsZero() {
now = obj.Created
}
ioc := &IOC{
Value: "",
Severity: "medium",
FirstSeen: now,
LastSeen: now,
Confidence: 0.7,
}
switch {
case strings.Contains(pattern, "file:hashes"):
ioc.Type = IOCTypeHash
ioc.Value = extractSTIXValue(pattern)
case strings.Contains(pattern, "ipv4-addr:value"):
ioc.Type = IOCTypeIP
ioc.Value = extractSTIXValue(pattern)
case strings.Contains(pattern, "domain-name:value"):
ioc.Type = IOCTypeDomain
ioc.Value = extractSTIXValue(pattern)
case strings.Contains(pattern, "url:value"):
ioc.Type = IOCTypeURL
ioc.Value = extractSTIXValue(pattern)
default:
return nil
}
if ioc.Value == "" {
return nil
}
// Derive severity from STIX labels.
for _, label := range obj.Labels {
switch {
case strings.Contains(label, "anomalous-activity"):
ioc.Severity = "low"
case strings.Contains(label, "malicious-activity"):
ioc.Severity = "critical"
case strings.Contains(label, "attribution"):
ioc.Severity = "high"
}
}
return ioc
}
// extractSTIXValue pulls the quoted value from a STIX pattern like:
// [ipv4-addr:value = '192.168.1.1']
// [file:hashes.'SHA-256' = 'e3b0c44...']
func extractSTIXValue(pattern string) string {
// Anchor on "= '" to skip any earlier quotes (e.g., hashes.'SHA-256').
eqIdx := strings.Index(pattern, "= '")
if eqIdx < 0 {
return ""
}
start := eqIdx + 3 // skip "= '"
end := strings.Index(pattern[start:], "'")
if end < 0 {
return ""
}
return pattern[start : start+end]
}
// DefaultOTXFeed returns a pre-configured AlienVault OTX feed config.
func DefaultOTXFeed(apiKey string) STIXFeedConfig {
return STIXFeedConfig{
Name: "AlienVault OTX",
URL: "https://otx.alienvault.com/api/v1/pulses/subscribed",
APIKey: apiKey,
Interval: time.Hour,
Enabled: apiKey != "",
Headers: map[string]string{
"X-OTX-API-KEY": apiKey,
},
}
}
// IOC type is defined in threat_intel.go — this file uses it directly.

View file

@ -0,0 +1,137 @@
package soc
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// --- stixPatternToIOC ---
func TestSTIXPatternToIOC_IPv4(t *testing.T) {
obj := STIXObject{
Type: "indicator",
Pattern: "[ipv4-addr:value = '192.168.1.1']",
Modified: time.Now(),
}
ioc := stixPatternToIOC(obj)
require.NotNil(t, ioc, "should parse IPv4 pattern")
assert.Equal(t, IOCTypeIP, ioc.Type)
assert.Equal(t, "192.168.1.1", ioc.Value)
assert.Equal(t, "medium", ioc.Severity)
assert.False(t, ioc.FirstSeen.IsZero(), "FirstSeen must be set")
}
func TestSTIXPatternToIOC_Hash(t *testing.T) {
hash := "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
obj := STIXObject{
Type: "indicator",
Pattern: "[file:hashes.'SHA-256' = '" + hash + "']",
Modified: time.Now(),
Labels: []string{"malicious-activity"},
}
ioc := stixPatternToIOC(obj)
require.NotNil(t, ioc, "should parse hash pattern")
assert.Equal(t, IOCTypeHash, ioc.Type)
assert.Equal(t, hash, ioc.Value)
assert.Equal(t, "critical", ioc.Severity, "malicious-activity label → critical")
}
func TestSTIXPatternToIOC_Domain(t *testing.T) {
obj := STIXObject{
Type: "indicator",
Pattern: "[domain-name:value = 'evil.example.com']",
Modified: time.Now(),
Labels: []string{"attribution"},
}
ioc := stixPatternToIOC(obj)
require.NotNil(t, ioc)
assert.Equal(t, IOCTypeDomain, ioc.Type)
assert.Equal(t, "evil.example.com", ioc.Value)
assert.Equal(t, "high", ioc.Severity, "attribution label → high")
}
func TestSTIXPatternToIOC_Unsupported(t *testing.T) {
obj := STIXObject{
Type: "indicator",
Pattern: "[email-addr:value = 'attacker@evil.com']",
Modified: time.Now(),
}
ioc := stixPatternToIOC(obj)
assert.Nil(t, ioc, "unsupported pattern type should return nil")
}
func TestSTIXPatternToIOC_FallbackToCreated(t *testing.T) {
created := time.Date(2026, 1, 15, 0, 0, 0, 0, time.UTC)
obj := STIXObject{
Type: "indicator",
Pattern: "[ipv4-addr:value = '10.0.0.1']",
Created: created,
// Modified is zero → should fall back to Created
}
ioc := stixPatternToIOC(obj)
require.NotNil(t, ioc)
assert.Equal(t, created, ioc.FirstSeen, "should fall back to Created when Modified is zero")
}
// --- extractSTIXValue ---
func TestExtractSTIXValue(t *testing.T) {
tests := []struct {
name string
pattern string
want string
}{
{"ipv4", "[ipv4-addr:value = '1.2.3.4']", "1.2.3.4"},
{"domain", "[domain-name:value = 'evil.com']", "evil.com"},
{"hash", "[file:hashes.'SHA-256' = 'abc123']", "abc123"},
{"empty_no_quotes", "[ipv4-addr:value = ]", ""},
{"single_quote_only", "'", ""},
{"empty_string", "", ""},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := extractSTIXValue(tt.pattern)
assert.Equal(t, tt.want, got)
})
}
}
// --- processBundle ---
func TestProcessBundle_FiltersNonIndicators(t *testing.T) {
store := NewThreatIntelStore()
fs := NewFeedSync(store, nil)
bundle := STIXBundle{
Type: "bundle",
ID: "bundle--test",
Objects: []STIXObject{
{Type: "indicator", Pattern: "[ipv4-addr:value = '10.0.0.1']", Modified: time.Now()},
{Type: "malware", Name: "BadMalware"}, // should be skipped
{Type: "indicator", Pattern: ""}, // empty pattern → skipped
{Type: "attack-pattern", Name: "Phish"}, // should be skipped
{Type: "indicator", Pattern: "[domain-name:value = 'bad.com']", Modified: time.Now()},
},
}
imported := fs.processBundle("test-feed", bundle)
assert.Equal(t, 2, imported, "should import only 2 valid indicators")
assert.Equal(t, 2, store.TotalIOCs, "store should have 2 IOCs")
}
// --- DefaultOTXFeed ---
func TestDefaultOTXFeed(t *testing.T) {
feed := DefaultOTXFeed("test-key-123")
assert.Equal(t, "AlienVault OTX", feed.Name)
assert.True(t, feed.Enabled, "should be enabled when key provided")
assert.Contains(t, feed.URL, "otx.alienvault.com")
assert.Equal(t, time.Hour, feed.Interval)
assert.Equal(t, "test-key-123", feed.Headers["X-OTX-API-KEY"])
disabled := DefaultOTXFeed("")
assert.False(t, disabled.Enabled, "should be disabled when key is empty")
}

View file

@ -6,8 +6,8 @@ import (
"bytes"
"encoding/json"
"fmt"
"log"
"math/rand"
"log/slog"
"math/rand/v2"
"net/http"
"sync"
"time"
@ -58,9 +58,9 @@ type WebhookNotifier struct {
client *http.Client
enabled bool
// Stats
Sent int64 `json:"sent"`
Failed int64 `json:"failed"`
// Stats (unexported — access via Stats() method)
sent int64
failed int64
}
// NewWebhookNotifier creates a notifier with the given config.
@ -80,23 +80,7 @@ func NewWebhookNotifier(config WebhookConfig) *WebhookNotifier {
}
}
// severityRank returns numeric rank for severity comparison.
func severityRank(s domsoc.EventSeverity) int {
switch s {
case domsoc.SeverityCritical:
return 5
case domsoc.SeverityHigh:
return 4
case domsoc.SeverityMedium:
return 3
case domsoc.SeverityLow:
return 2
case domsoc.SeverityInfo:
return 1
default:
return 0
}
}
// NotifyIncident sends an incident webhook to all configured endpoints.
// Non-blocking: fires goroutines for each endpoint.
@ -105,9 +89,9 @@ func (w *WebhookNotifier) NotifyIncident(eventType string, incident *domsoc.Inci
return nil
}
// Severity filter
// Severity filter — use domain Rank() method (Q-1 FIX: removed duplicate severityRank).
if w.config.MinSeverity != "" {
if severityRank(incident.Severity) < severityRank(w.config.MinSeverity) {
if incident.Severity.Rank() < w.config.MinSeverity.Rank() {
return nil
}
}
@ -146,9 +130,9 @@ func (w *WebhookNotifier) NotifyIncident(eventType string, incident *domsoc.Inci
w.mu.Lock()
for _, r := range results {
if r.Success {
w.Sent++
w.sent++
} else {
w.Failed++
w.failed++
}
}
w.mu.Unlock()
@ -213,7 +197,7 @@ func (w *WebhookNotifier) sendWithRetry(url string, body []byte) WebhookResult {
result.Error = err.Error()
if attempt < w.config.MaxRetries {
backoff := time.Duration(1<<uint(attempt)) * 500 * time.Millisecond
jitter := time.Duration(rand.Intn(500)) * time.Millisecond
jitter := time.Duration(rand.IntN(500)) * time.Millisecond
time.Sleep(backoff + jitter)
continue
}
@ -230,12 +214,12 @@ func (w *WebhookNotifier) sendWithRetry(url string, body []byte) WebhookResult {
result.Error = fmt.Sprintf("HTTP %d", resp.StatusCode)
if attempt < w.config.MaxRetries {
backoff := time.Duration(1<<uint(attempt)) * 500 * time.Millisecond
jitter := time.Duration(rand.Intn(500)) * time.Millisecond
jitter := time.Duration(rand.IntN(500)) * time.Millisecond
time.Sleep(backoff + jitter)
}
}
log.Printf("[SOC] webhook failed after %d retries: %s → %s", w.config.MaxRetries, url, result.Error)
slog.Error("webhook failed", "retries", w.config.MaxRetries, "url", url, "error", result.Error)
return result
}
@ -243,5 +227,5 @@ func (w *WebhookNotifier) sendWithRetry(url string, body []byte) WebhookResult {
func (w *WebhookNotifier) Stats() (sent, failed int64) {
w.mu.RLock()
defer w.mu.RUnlock()
return w.Sent, w.Failed
return w.sent, w.failed
}