feat(phase-3): workstream 6 — SOC 2 Type II Preparation
Implements all 22 WS6 tasks completing Phase 3 Enterprise. Column-level encryption (AES-256-CBC, Vault-backed key) via EncryptionService applied to credentials.secret_hash, credentials.vault_path, webhook_subscriptions.vault_secret_path, and agent_did_keys.vault_key_path. Backward-compatible: isEncrypted() guard skips decryption for existing plaintext rows until next read-write cycle. Audit chain integrity (CC7.2): AuditRepository computes SHA-256 Merkle hash on every INSERT (hash = SHA-256(eventId+timestamp+action+outcome+agentId+orgId+prevHash)). AuditVerificationService walks the full chain verifying hash continuity. AuditChainVerificationJob runs hourly; sets agentidp_audit_chain_integrity Prometheus gauge to 1 (pass) or 0 (fail). TLS enforcement (CC6.7): TLSEnforcementMiddleware registered as first middleware in Express stack; 301 redirect on non-https X-Forwarded-Proto in production. SecretsRotationJob (CC9.2): hourly scan for credentials expiring within 7 days; increments agentidp_credentials_expiring_soon_total. ComplianceController + routes: GET /audit/verify (auth+audit:read scope, 30/min rate-limit); GET /compliance/controls (public, Cache-Control 60s). ComplianceStatusStore: module-level map updated by jobs, consumed by controller. Prometheus: 2 new metrics (agentidp_credentials_expiring_soon_total, agentidp_audit_chain_integrity); 6 alerting rules in alerts.yml. Compliance docs: soc2-controls-matrix.md, encryption-runbook.md, audit-log-runbook.md, incident-response.md, secrets-rotation.md. Tests: 557 unit tests passing (35 suites); 26 new tests (EncryptionService, AuditVerificationService); 19 compliance integration tests. TypeScript clean. Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Unit tests for src/metrics/registry.ts
|
||||
*
|
||||
* Verifies that all 6 Prometheus metrics are registered on the shared
|
||||
* Verifies that all Prometheus metrics are registered on the shared
|
||||
* metricsRegistry (not the default global registry), have the correct
|
||||
* names, and carry the correct label names.
|
||||
*/
|
||||
@@ -14,6 +14,8 @@ import {
|
||||
httpRequestDurationSeconds,
|
||||
dbQueryDurationSeconds,
|
||||
redisCommandDurationSeconds,
|
||||
credentialsExpiringSoonTotal,
|
||||
auditChainIntegrity,
|
||||
} from '../../../src/metrics/registry';
|
||||
|
||||
describe('metricsRegistry', () => {
|
||||
@@ -28,9 +30,9 @@ describe('metricsRegistry', () => {
|
||||
expect(metricsRegistry).not.toBe(register);
|
||||
});
|
||||
|
||||
it('contains exactly 7 metric entries', async () => {
|
||||
it('contains exactly 9 metric entries', async () => {
|
||||
const entries = await metricsRegistry.getMetricsAsJSON();
|
||||
expect(entries).toHaveLength(7);
|
||||
expect(entries).toHaveLength(9);
|
||||
});
|
||||
|
||||
// ──────────────────────────────────────────────────────────────────
|
||||
@@ -43,6 +45,9 @@ describe('metricsRegistry', () => {
|
||||
'agentidp_http_request_duration_seconds',
|
||||
'agentidp_db_query_duration_seconds',
|
||||
'agentidp_redis_command_duration_seconds',
|
||||
'agentidp_webhook_dead_letters_total',
|
||||
'agentidp_credentials_expiring_soon_total',
|
||||
'agentidp_audit_chain_integrity',
|
||||
])('registers metric "%s"', async (metricName) => {
|
||||
const entries = await metricsRegistry.getMetricsAsJSON();
|
||||
const names = entries.map((e) => e.name);
|
||||
@@ -126,4 +131,32 @@ describe('metricsRegistry', () => {
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('credentialsExpiringSoonTotal', () => {
|
||||
it('has name agentidp_credentials_expiring_soon_total', () => {
|
||||
const metric = credentialsExpiringSoonTotal as unknown as { name: string };
|
||||
expect(metric.name).toBe('agentidp_credentials_expiring_soon_total');
|
||||
});
|
||||
|
||||
it('increments with agent_id label without throwing', () => {
|
||||
expect(() =>
|
||||
credentialsExpiringSoonTotal.inc({ agent_id: 'agent-test-001' }),
|
||||
).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('auditChainIntegrity', () => {
|
||||
it('has name agentidp_audit_chain_integrity', () => {
|
||||
const metric = auditChainIntegrity as unknown as { name: string };
|
||||
expect(metric.name).toBe('agentidp_audit_chain_integrity');
|
||||
});
|
||||
|
||||
it('can be set to 1 (passing) without throwing', () => {
|
||||
expect(() => auditChainIntegrity.set(1)).not.toThrow();
|
||||
});
|
||||
|
||||
it('can be set to 0 (failing) without throwing', () => {
|
||||
expect(() => auditChainIntegrity.set(0)).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -65,12 +65,16 @@ describe('AuditRepository', () => {
|
||||
};
|
||||
|
||||
it('should insert a row and return a mapped IAuditEvent', async () => {
|
||||
(pool.query as jest.Mock).mockResolvedValueOnce({ rows: [AUDIT_ROW], rowCount: 1 });
|
||||
// create() first SELECTs the previous hash, then INSERTs the new event
|
||||
(pool.query as jest.Mock)
|
||||
.mockResolvedValueOnce({ rows: [], rowCount: 0 }) // SELECT hash (no previous event)
|
||||
.mockResolvedValueOnce({ rows: [AUDIT_ROW], rowCount: 1 }); // INSERT
|
||||
|
||||
const result = await repo.create(eventInput);
|
||||
|
||||
expect(pool.query).toHaveBeenCalledTimes(1);
|
||||
const [sql, params] = (pool.query as jest.Mock).mock.calls[0] as [string, unknown[]];
|
||||
expect(pool.query).toHaveBeenCalledTimes(2);
|
||||
// Second call is the INSERT
|
||||
const [sql, params] = (pool.query as jest.Mock).mock.calls[1] as [string, unknown[]];
|
||||
expect(sql).toContain('INSERT INTO audit_events');
|
||||
expect(params).toContain(eventInput.agentId);
|
||||
expect(params).toContain(eventInput.action);
|
||||
@@ -81,11 +85,15 @@ describe('AuditRepository', () => {
|
||||
});
|
||||
|
||||
it('should JSON-stringify the metadata field', async () => {
|
||||
(pool.query as jest.Mock).mockResolvedValueOnce({ rows: [AUDIT_ROW], rowCount: 1 });
|
||||
// create() first SELECTs the previous hash, then INSERTs the new event
|
||||
(pool.query as jest.Mock)
|
||||
.mockResolvedValueOnce({ rows: [], rowCount: 0 }) // SELECT hash (no previous event)
|
||||
.mockResolvedValueOnce({ rows: [AUDIT_ROW], rowCount: 1 }); // INSERT
|
||||
|
||||
await repo.create(eventInput);
|
||||
|
||||
const [, params] = (pool.query as jest.Mock).mock.calls[0] as [string, unknown[]];
|
||||
// Second call is the INSERT
|
||||
const [, params] = (pool.query as jest.Mock).mock.calls[1] as [string, unknown[]];
|
||||
// metadata param should be a JSON string
|
||||
const metadataParam = params.find((p) => typeof p === 'string' && p.startsWith('{'));
|
||||
expect(metadataParam).toBe(JSON.stringify(eventInput.metadata));
|
||||
|
||||
280
tests/unit/services/AuditVerificationService.test.ts
Normal file
280
tests/unit/services/AuditVerificationService.test.ts
Normal file
@@ -0,0 +1,280 @@
|
||||
/**
|
||||
* Unit tests for AuditVerificationService — audit chain integrity verification.
|
||||
*
|
||||
* Tests:
|
||||
* 1. Intact chain: correct hashes → { verified: true, checkedCount: N, brokenAtEventId: null }
|
||||
* 2. Tampered chain: one wrong hash → { verified: false, brokenAtEventId: <event_id> }
|
||||
* 3. Empty log: no rows → { verified: true, checkedCount: 0, brokenAtEventId: null }
|
||||
* 4. Date range params are propagated to SQL query
|
||||
* 5. previous_hash mismatch is detected
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
import { Pool } from 'pg';
|
||||
import {
|
||||
AuditVerificationService,
|
||||
IChainVerificationResult,
|
||||
_resetAuditVerificationServiceSingleton,
|
||||
getAuditVerificationService,
|
||||
} from '../../../src/services/AuditVerificationService';
|
||||
|
||||
// ============================================================================
|
||||
// Helpers
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Computes the SHA-256 hash of an audit event — must match the algorithm in
|
||||
* AuditVerificationService and AuditRepository.
|
||||
*/
|
||||
function computeHash(
|
||||
eventId: string,
|
||||
timestamp: Date,
|
||||
action: string,
|
||||
outcome: string,
|
||||
agentId: string,
|
||||
organizationId: string,
|
||||
previousHash: string,
|
||||
): string {
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(
|
||||
eventId +
|
||||
timestamp.toISOString() +
|
||||
action +
|
||||
outcome +
|
||||
agentId +
|
||||
organizationId +
|
||||
previousHash,
|
||||
)
|
||||
.digest('hex');
|
||||
}
|
||||
|
||||
/** Generates a minimal audit chain row with correct hash linkage. */
|
||||
function makeRow(
|
||||
eventId: string,
|
||||
timestamp: Date,
|
||||
action: string,
|
||||
outcome: string,
|
||||
agentId: string,
|
||||
organizationId: string,
|
||||
previousHash: string,
|
||||
) {
|
||||
const hash = computeHash(eventId, timestamp, action, outcome, agentId, organizationId, previousHash);
|
||||
return {
|
||||
event_id: eventId,
|
||||
timestamp,
|
||||
action,
|
||||
outcome,
|
||||
agent_id: agentId,
|
||||
organization_id: organizationId,
|
||||
hash,
|
||||
previous_hash: previousHash,
|
||||
};
|
||||
}
|
||||
|
||||
/** Creates a mock pg.Pool whose query() returns the given rows. */
|
||||
function mockPool(rows: unknown[]): Pool {
|
||||
return {
|
||||
query: jest.fn().mockResolvedValue({ rows }),
|
||||
} as unknown as Pool;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Test data
|
||||
// ============================================================================
|
||||
|
||||
const ORG = 'org_test';
|
||||
const AGENT = 'agent-abc-123';
|
||||
const T1 = new Date('2026-03-01T10:00:00.000Z');
|
||||
const T2 = new Date('2026-03-01T10:01:00.000Z');
|
||||
const T3 = new Date('2026-03-01T10:02:00.000Z');
|
||||
|
||||
// ============================================================================
|
||||
// Tests
|
||||
// ============================================================================
|
||||
|
||||
describe('AuditVerificationService', () => {
|
||||
afterEach(() => {
|
||||
_resetAuditVerificationServiceSingleton();
|
||||
});
|
||||
|
||||
// ── Intact chain ──────────────────────────────────────────────────────────
|
||||
|
||||
it('should return verified: true for an intact 3-event chain', async () => {
|
||||
const row1 = makeRow('evt-001', T1, 'agent.created', 'success', AGENT, ORG, '');
|
||||
const row2 = makeRow('evt-002', T2, 'credential.generated', 'success', AGENT, ORG, row1.hash);
|
||||
const row3 = makeRow('evt-003', T3, 'token.issued', 'success', AGENT, ORG, row2.hash);
|
||||
|
||||
const pool = mockPool([row1, row2, row3]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const result: IChainVerificationResult = await service.verifyChain();
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.checkedCount).toBe(3);
|
||||
expect(result.brokenAtEventId).toBeNull();
|
||||
});
|
||||
|
||||
it('should return verified: true for a single-event chain', async () => {
|
||||
const row1 = makeRow('evt-001', T1, 'agent.created', 'success', AGENT, ORG, '');
|
||||
const pool = mockPool([row1]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const result = await service.verifyChain();
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.checkedCount).toBe(1);
|
||||
expect(result.brokenAtEventId).toBeNull();
|
||||
});
|
||||
|
||||
// ── Empty log ─────────────────────────────────────────────────────────────
|
||||
|
||||
it('should return verified: true with checkedCount 0 for an empty log', async () => {
|
||||
const pool = mockPool([]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const result = await service.verifyChain();
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.checkedCount).toBe(0);
|
||||
expect(result.brokenAtEventId).toBeNull();
|
||||
});
|
||||
|
||||
// ── Tampered hash ─────────────────────────────────────────────────────────
|
||||
|
||||
it('should detect a tampered hash on the second event', async () => {
|
||||
const row1 = makeRow('evt-001', T1, 'agent.created', 'success', AGENT, ORG, '');
|
||||
const row2 = makeRow('evt-002', T2, 'credential.generated', 'success', AGENT, ORG, row1.hash);
|
||||
|
||||
// Tamper: replace hash on row2 with garbage
|
||||
const tamperedRow2 = { ...row2, hash: 'deadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef' };
|
||||
|
||||
const pool = mockPool([row1, tamperedRow2]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const result = await service.verifyChain();
|
||||
|
||||
expect(result.verified).toBe(false);
|
||||
expect(result.brokenAtEventId).toBe('evt-002');
|
||||
expect(result.checkedCount).toBe(1); // row1 was checked before break detected
|
||||
});
|
||||
|
||||
it('should detect a previous_hash mismatch', async () => {
|
||||
const row1 = makeRow('evt-001', T1, 'agent.created', 'success', AGENT, ORG, '');
|
||||
|
||||
// row2 references wrong previous_hash
|
||||
const row2 = makeRow('evt-002', T2, 'credential.generated', 'success', AGENT, ORG, 'wrongprevhash');
|
||||
|
||||
const pool = mockPool([row1, row2]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const result = await service.verifyChain();
|
||||
|
||||
expect(result.verified).toBe(false);
|
||||
expect(result.brokenAtEventId).toBe('evt-002');
|
||||
});
|
||||
|
||||
it('should stop at the first break and not report subsequent events', async () => {
|
||||
const row1 = makeRow('evt-001', T1, 'agent.created', 'success', AGENT, ORG, '');
|
||||
const row2 = makeRow('evt-002', T2, 'credential.generated', 'success', AGENT, ORG, row1.hash);
|
||||
const row3 = makeRow('evt-003', T3, 'token.issued', 'success', AGENT, ORG, row2.hash);
|
||||
|
||||
// Tamper row2 hash
|
||||
const tamperedRow2 = { ...row2, hash: 'aaaa' + row2.hash.slice(4) };
|
||||
|
||||
const pool = mockPool([row1, tamperedRow2, row3]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const result = await service.verifyChain();
|
||||
|
||||
expect(result.verified).toBe(false);
|
||||
expect(result.brokenAtEventId).toBe('evt-002');
|
||||
// row3 was never checked
|
||||
});
|
||||
|
||||
// ── Pre-migration rows (empty hashes) ─────────────────────────────────────
|
||||
|
||||
it('should skip pre-migration rows with empty hashes', async () => {
|
||||
// Simulate rows written before migration 020 (hash = '', previous_hash = '')
|
||||
const legacyRow = {
|
||||
event_id: 'evt-legacy',
|
||||
timestamp: T1,
|
||||
action: 'agent.created',
|
||||
outcome: 'success',
|
||||
agent_id: AGENT,
|
||||
organization_id: ORG,
|
||||
hash: '',
|
||||
previous_hash: '',
|
||||
};
|
||||
|
||||
const pool = mockPool([legacyRow]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const result = await service.verifyChain();
|
||||
|
||||
expect(result.verified).toBe(true);
|
||||
expect(result.checkedCount).toBe(1);
|
||||
expect(result.brokenAtEventId).toBeNull();
|
||||
});
|
||||
|
||||
// ── Date range params ─────────────────────────────────────────────────────
|
||||
|
||||
it('should propagate fromDate and toDate to the SQL query', async () => {
|
||||
const pool = mockPool([]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const fromDate = '2026-03-01T00:00:00.000Z';
|
||||
const toDate = '2026-03-31T23:59:59.999Z';
|
||||
|
||||
const result = await service.verifyChain(fromDate, toDate);
|
||||
|
||||
// Verify the query was called with date params
|
||||
const queryMock = pool.query as jest.Mock;
|
||||
expect(queryMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
const callArgs = queryMock.mock.calls[0] as [string, unknown[]];
|
||||
expect(callArgs[0]).toContain('timestamp >=');
|
||||
expect(callArgs[0]).toContain('timestamp <=');
|
||||
expect(callArgs[1]).toEqual([new Date(fromDate), new Date(toDate)]);
|
||||
|
||||
// fromDate/toDate are echoed back in result
|
||||
expect(result.fromDate).toBe(fromDate);
|
||||
expect(result.toDate).toBe(toDate);
|
||||
});
|
||||
|
||||
it('should include only fromDate in query when toDate is omitted', async () => {
|
||||
const pool = mockPool([]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
const fromDate = '2026-03-01T00:00:00.000Z';
|
||||
const result = await service.verifyChain(fromDate, undefined);
|
||||
|
||||
const queryMock = pool.query as jest.Mock;
|
||||
const callArgs = queryMock.mock.calls[0] as [string, unknown[]];
|
||||
expect(callArgs[0]).toContain('timestamp >=');
|
||||
expect(callArgs[0]).not.toContain('timestamp <=');
|
||||
expect(result.fromDate).toBe(fromDate);
|
||||
expect(result.toDate).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should include no WHERE clause when no date range is provided', async () => {
|
||||
const pool = mockPool([]);
|
||||
const service = new AuditVerificationService(pool);
|
||||
|
||||
await service.verifyChain();
|
||||
|
||||
const queryMock = pool.query as jest.Mock;
|
||||
const callArgs = queryMock.mock.calls[0] as [string, unknown[]];
|
||||
expect(callArgs[0]).not.toContain('WHERE');
|
||||
expect(callArgs[1]).toEqual([]);
|
||||
});
|
||||
|
||||
// ── Singleton ─────────────────────────────────────────────────────────────
|
||||
|
||||
it('getAuditVerificationService should return the same instance on repeated calls', () => {
|
||||
const pool = mockPool([]);
|
||||
const instance1 = getAuditVerificationService(pool);
|
||||
const instance2 = getAuditVerificationService(pool);
|
||||
expect(instance1).toBe(instance2);
|
||||
});
|
||||
});
|
||||
190
tests/unit/services/EncryptionService.test.ts
Normal file
190
tests/unit/services/EncryptionService.test.ts
Normal file
@@ -0,0 +1,190 @@
|
||||
/**
|
||||
* Unit tests for EncryptionService — AES-256-CBC column-level encryption.
|
||||
*
|
||||
* Tests:
|
||||
* 1. Encrypt/decrypt round-trip returns original plaintext
|
||||
* 2. isEncrypted: true for base64:base64 format, false for plaintext strings
|
||||
* 3. encryptColumn produces different ciphertext on each call (IV randomness)
|
||||
* 4. Singleton reset utility works for test isolation
|
||||
*/
|
||||
|
||||
import {
|
||||
EncryptionService,
|
||||
getEncryptionService,
|
||||
_resetEncryptionServiceSingleton,
|
||||
} from '../../../src/services/EncryptionService';
|
||||
import { VaultClient } from '../../../src/vault/VaultClient';
|
||||
|
||||
// ============================================================================
|
||||
// Mock VaultClient
|
||||
// ============================================================================
|
||||
|
||||
/** A 32-byte (64-char hex) test encryption key. */
|
||||
const TEST_KEY = 'a'.repeat(64); // 64 x 'a' = valid 32-byte hex key
|
||||
|
||||
/**
|
||||
* Creates a mock VaultClient that returns TEST_KEY from readArbitrarySecret.
|
||||
*/
|
||||
function makeMockVaultClient(): VaultClient {
|
||||
const mock = {
|
||||
readArbitrarySecret: jest.fn().mockResolvedValue({ encryptionKey: TEST_KEY }),
|
||||
writeArbitrarySecret: jest.fn().mockResolvedValue(undefined),
|
||||
writeSecret: jest.fn(),
|
||||
readSecret: jest.fn(),
|
||||
verifySecret: jest.fn(),
|
||||
deleteSecret: jest.fn(),
|
||||
};
|
||||
return mock as unknown as VaultClient;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Tests
|
||||
// ============================================================================
|
||||
|
||||
describe('EncryptionService', () => {
|
||||
let service: EncryptionService;
|
||||
let mockVaultClient: VaultClient;
|
||||
|
||||
beforeEach(() => {
|
||||
_resetEncryptionServiceSingleton();
|
||||
mockVaultClient = makeMockVaultClient();
|
||||
service = new EncryptionService(mockVaultClient);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
_resetEncryptionServiceSingleton();
|
||||
});
|
||||
|
||||
// ── Round-trip ────────────────────────────────────────────────────────────
|
||||
|
||||
it('should encrypt and then decrypt back to the original plaintext', async () => {
|
||||
const plaintext = 'super-secret-credential-hash-value';
|
||||
|
||||
const encrypted = await service.encryptColumn(plaintext);
|
||||
expect(encrypted).not.toBe(plaintext);
|
||||
expect(encrypted).toContain(':');
|
||||
|
||||
const decrypted = await service.decryptColumn(encrypted);
|
||||
expect(decrypted).toBe(plaintext);
|
||||
});
|
||||
|
||||
it('should handle empty string round-trip', async () => {
|
||||
const plaintext = '';
|
||||
const encrypted = await service.encryptColumn(plaintext);
|
||||
const decrypted = await service.decryptColumn(encrypted);
|
||||
expect(decrypted).toBe(plaintext);
|
||||
});
|
||||
|
||||
it('should handle unicode strings in round-trip', async () => {
|
||||
const plaintext = 'secret/data/agentidp/agents/über-agent/credentials/cred-123';
|
||||
const encrypted = await service.encryptColumn(plaintext);
|
||||
const decrypted = await service.decryptColumn(encrypted);
|
||||
expect(decrypted).toBe(plaintext);
|
||||
});
|
||||
|
||||
// ── IV randomness ─────────────────────────────────────────────────────────
|
||||
|
||||
it('should produce different ciphertext on each call (random IV)', async () => {
|
||||
const plaintext = 'same-plaintext-value';
|
||||
|
||||
const encrypted1 = await service.encryptColumn(plaintext);
|
||||
const encrypted2 = await service.encryptColumn(plaintext);
|
||||
|
||||
// Same plaintext but different IV → different ciphertext
|
||||
expect(encrypted1).not.toBe(encrypted2);
|
||||
|
||||
// Both must still decrypt to the same plaintext
|
||||
expect(await service.decryptColumn(encrypted1)).toBe(plaintext);
|
||||
expect(await service.decryptColumn(encrypted2)).toBe(plaintext);
|
||||
});
|
||||
|
||||
// ── isEncrypted ──────────────────────────────────────────────────────────
|
||||
|
||||
it('should return true for a value in base64:base64 format', async () => {
|
||||
const encrypted = await service.encryptColumn('test-value');
|
||||
expect(service.isEncrypted(encrypted)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for a plaintext bcrypt hash', () => {
|
||||
const bcryptHash = '$2a$10$N9qo8uLOickgx2ZMRZoMyeIjZAgcfl7p92ldGxad68LJZdL17lhWy';
|
||||
expect(service.isEncrypted(bcryptHash)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for a Vault path string', () => {
|
||||
expect(service.isEncrypted('secret/data/agentidp/agents/abc/credentials/xyz')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for an empty string', () => {
|
||||
expect(service.isEncrypted('')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for a plain UUID', () => {
|
||||
expect(service.isEncrypted('550e8400-e29b-41d4-a716-446655440000')).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for a manually constructed base64:base64 string', () => {
|
||||
const iv = Buffer.from('deadbeef12345678', 'hex').toString('base64');
|
||||
const ct = Buffer.from('cafebabe00112233', 'hex').toString('base64');
|
||||
expect(service.isEncrypted(`${iv}:${ct}`)).toBe(true);
|
||||
});
|
||||
|
||||
// ── Vault key fetching ────────────────────────────────────────────────────
|
||||
|
||||
it('should call Vault readArbitrarySecret once and cache the key', async () => {
|
||||
const plaintext = 'value1';
|
||||
await service.encryptColumn(plaintext);
|
||||
await service.encryptColumn(plaintext);
|
||||
await service.encryptColumn(plaintext);
|
||||
|
||||
// Key should be fetched only once
|
||||
expect(
|
||||
(mockVaultClient.readArbitrarySecret as jest.Mock).mock.calls.length,
|
||||
).toBe(1);
|
||||
});
|
||||
|
||||
it('should use the ENCRYPTION_KEY_VAULT_PATH env var for the Vault path', async () => {
|
||||
const originalPath = process.env['ENCRYPTION_KEY_VAULT_PATH'];
|
||||
process.env['ENCRYPTION_KEY_VAULT_PATH'] = 'secret/data/custom/path';
|
||||
|
||||
const freshService = new EncryptionService(mockVaultClient);
|
||||
await freshService.encryptColumn('test');
|
||||
|
||||
expect(
|
||||
(mockVaultClient.readArbitrarySecret as jest.Mock).mock.calls[0][0],
|
||||
).toBe('secret/data/custom/path');
|
||||
|
||||
// Restore env
|
||||
if (originalPath === undefined) {
|
||||
delete process.env['ENCRYPTION_KEY_VAULT_PATH'];
|
||||
} else {
|
||||
process.env['ENCRYPTION_KEY_VAULT_PATH'] = originalPath;
|
||||
}
|
||||
});
|
||||
|
||||
// ── Error handling ────────────────────────────────────────────────────────
|
||||
|
||||
it('should throw when ciphertext has no colon separator', async () => {
|
||||
await expect(service.decryptColumn('invalidformat')).rejects.toThrow(
|
||||
'Invalid encrypted column format',
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw when Vault returns an invalid key', async () => {
|
||||
const badVault = {
|
||||
readArbitrarySecret: jest.fn().mockResolvedValue({ encryptionKey: 'tooshort' }),
|
||||
} as unknown as VaultClient;
|
||||
|
||||
const badService = new EncryptionService(badVault);
|
||||
await expect(badService.encryptColumn('test')).rejects.toThrow(
|
||||
'expected a 64-character hex string',
|
||||
);
|
||||
});
|
||||
|
||||
// ── Singleton ─────────────────────────────────────────────────────────────
|
||||
|
||||
it('getEncryptionService should return the same instance on repeated calls', () => {
|
||||
const instance1 = getEncryptionService(mockVaultClient);
|
||||
const instance2 = getEncryptionService(mockVaultClient);
|
||||
expect(instance1).toBe(instance2);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user