Testing Guide
Test aperture configurations using the aperture/testing package.
Installation
import apertesting "github.com/zoobz-io/aperture/testing"
Mock Logger Provider
Capture logs without external collectors:
func TestLogging(t *testing.T) {
ctx := context.Background()
cap := capitan.New()
defer cap.Shutdown()
sig := capitan.NewSignal("test.event", "Test event")
// Use mock logger provider
mockLog := apertesting.NewMockLoggerProvider()
ap, err := aperture.New(cap, mockLog, noop.NewMeterProvider(), tracenoop.NewTracerProvider())
if err != nil {
t.Fatal(err)
}
defer ap.Close()
// Emit events
cap.Emit(ctx, sig)
cap.Emit(ctx, sig)
cap.Shutdown()
// Verify logs captured
records := mockLog.Capture().Records()
if len(records) != 2 {
t.Errorf("expected 2 records, got %d", len(records))
}
}
Log Capture
Access captured log records:
mockLog := apertesting.NewMockLoggerProvider()
// After emissions
cap.Shutdown()
capture := mockLog.Capture()
// Count records
if capture.Count() != 3 {
t.Error("expected 3 records")
}
// Get all records
records := capture.Records()
for _, r := range records {
// Inspect log.Record
}
// Reset for next test
capture.Reset()
Wait for Async Operations
Wait for expected record count:
capture := mockLog.Capture()
// Emit asynchronously
go func() {
for i := 0; i < 10; i++ {
cap.Emit(ctx, sig)
}
}()
// Wait up to 1 second for 10 records
if !capture.WaitForCount(10, time.Second) {
t.Error("timeout waiting for records")
}
Event Capture
Capture capitan events directly:
func TestEventCapture(t *testing.T) {
ctx := context.Background()
cap := capitan.New()
defer cap.Shutdown()
sig := capitan.NewSignal("test.event", "Test event")
keyField := capitan.NewStringKey("key")
// Hook event capture
capture := apertesting.NewEventCapture()
cap.Hook(sig, capture.Handler())
// Emit events
cap.Emit(ctx, sig, keyField.Field("value1"))
cap.Emit(ctx, sig, keyField.Field("value2"))
cap.Shutdown()
// Verify captured events
events := capture.Events()
if len(events) != 2 {
t.Errorf("expected 2 events, got %d", len(events))
}
// Inspect event details
if events[0].Signal != sig {
t.Error("wrong signal")
}
}
Testing with Real Providers
For integration tests against actual OTEL collectors:
func TestWithCollector(t *testing.T) {
ctx := context.Background()
// Create real OTLP providers (requires running collector)
pvs, err := apertesting.TestProviders(ctx, "test-service", "v1.0.0", "localhost:4318")
if err != nil {
t.Skipf("OTLP collector not available: %v", err)
}
defer pvs.Shutdown(ctx)
cap := capitan.New()
defer cap.Shutdown()
ap, err := aperture.New(cap, pvs.Log, pvs.Meter, pvs.Trace)
if err != nil {
t.Fatal(err)
}
defer ap.Close()
// Test with real providers
sig := capitan.NewSignal("integration.test", "Integration test")
cap.Emit(ctx, sig)
}
Testing Whitelist Filtering
Verify log filtering works:
func TestWhitelistFiltering(t *testing.T) {
ctx := context.Background()
cap := capitan.New()
defer cap.Shutdown()
sigAllowed := capitan.NewSignal("allowed", "Allowed signal")
sigBlocked := capitan.NewSignal("blocked", "Blocked signal")
schema := aperture.Schema{
Logs: &aperture.LogSchema{
Whitelist: []string{"allowed"},
},
}
mockLog := apertesting.NewMockLoggerProvider()
ap, _ := aperture.New(cap, mockLog, noop.NewMeterProvider(), tracenoop.NewTracerProvider())
ap.Apply(schema)
defer ap.Close()
cap.Emit(ctx, sigAllowed)
cap.Emit(ctx, sigBlocked)
cap.Shutdown()
records := mockLog.Capture().Records()
if len(records) != 1 {
t.Errorf("expected 1 record (whitelist filtering), got %d", len(records))
}
}
Testing Context Extraction
Verify context values are extracted:
func TestContextExtraction(t *testing.T) {
ctx := context.Background()
type ctxKey string
const userKey ctxKey = "user_id"
cap := capitan.New()
defer cap.Shutdown()
sig := capitan.NewSignal("test", "Test signal")
mockLog := apertesting.NewMockLoggerProvider()
ap, _ := aperture.New(cap, mockLog, noop.NewMeterProvider(), tracenoop.NewTracerProvider())
ap.RegisterContextKey("user_id", userKey)
schema := aperture.Schema{
Context: &aperture.ContextSchema{
Logs: []string{"user_id"},
},
}
ap.Apply(schema)
defer ap.Close()
ctx = context.WithValue(ctx, userKey, "user-123")
cap.Emit(ctx, sig)
cap.Shutdown()
records := mockLog.Capture().Records()
// Inspect records for user_id attribute
}
Testing Trace Correlation
Verify span creation and correlation:
func TestTraceCorrelation(t *testing.T) {
ctx := context.Background()
cap := capitan.New()
defer cap.Shutdown()
reqStarted := capitan.NewSignal("req.started", "Started")
reqDone := capitan.NewSignal("req.done", "Done")
requestID := capitan.NewStringKey("request_id")
schema := aperture.Schema{
Traces: []aperture.TraceSchema{
{
Start: "req.started",
End: "req.done",
CorrelationKey: "request_id",
SpanName: "test_span",
},
},
}
mockLog := apertesting.NewMockLoggerProvider()
ap, _ := aperture.New(cap, mockLog, noop.NewMeterProvider(), tracenoop.NewTracerProvider())
ap.Apply(schema)
defer ap.Close()
// Emit correlated events
cap.Emit(ctx, reqStarted, requestID.Field("REQ-001"))
cap.Emit(ctx, reqDone, requestID.Field("REQ-001"))
cap.Shutdown()
// Verify no panics, logs captured
// For actual span verification, use a span exporter
}
Noop Providers
Use OTEL noop providers when testing components that don't need full providers:
import (
"go.opentelemetry.io/otel/metric/noop"
tracenoop "go.opentelemetry.io/otel/trace/noop"
)
// Only test logging behavior
mockLog := apertesting.NewMockLoggerProvider()
ap, _ := aperture.New(cap, mockLog, noop.NewMeterProvider(), tracenoop.NewTracerProvider())
Benchmarking
Run benchmarks:
go test -bench=. ./testing/benchmarks/
Example benchmark output:
BenchmarkEmit_NoConfig-8 5000000 234 ns/op 48 B/op 1 allocs/op
BenchmarkEmit_WithMetricsCounter-8 3000000 456 ns/op 96 B/op 2 allocs/op
BenchmarkEmit_WithLogs-8 2000000 612 ns/op 144 B/op 3 allocs/op