Browse Source
- Add @timesafari/polling-contracts package with comprehensive type definitions - Implement GenericPollingRequest, PollingResult, and PollingScheduleConfig interfaces - Add Zod schemas for StarredProjectsRequest/Response and DeepLinkParams validation - Include calculateBackoffDelay utility with unified retry policy (exponential, linear, fixed) - Add OutboxPressureManager for storage pressure controls and back-pressure signals - Implement TelemetryManager with cardinality budgets and PII redaction - Add ClockSyncManager for JWT timestamp validation and skew tolerance - Include comprehensive unit tests with Jest snapshots and race condition testing - Add JWT_ID_PATTERN regex for canonical JWT ID format validation - Support idempotency with X-Idempotency-Key enforcement - Implement watermark CAS (Compare-and-Swap) for race condition prevention This establishes the foundation for the new generic polling system where host apps define request/response schemas and the plugin provides robust polling logic.master
18 changed files with 2599 additions and 0 deletions
@ -0,0 +1,14 @@ |
|||||
|
module.exports = { |
||||
|
preset: 'ts-jest', |
||||
|
testEnvironment: 'node', |
||||
|
roots: ['<rootDir>/src'], |
||||
|
testMatch: ['**/__tests__/**/*.test.ts'], |
||||
|
collectCoverageFrom: [ |
||||
|
'src/**/*.ts', |
||||
|
'!src/**/*.d.ts', |
||||
|
'!src/__tests__/**' |
||||
|
], |
||||
|
coverageDirectory: 'coverage', |
||||
|
coverageReporters: ['text', 'lcov', 'html'], |
||||
|
setupFilesAfterEnv: ['<rootDir>/src/__tests__/setup.ts'] |
||||
|
}; |
@ -0,0 +1,29 @@ |
|||||
|
{ |
||||
|
"name": "@timesafari/polling-contracts", |
||||
|
"version": "1.0.0", |
||||
|
"description": "TypeScript contracts and Zod schemas for TimeSafari polling system", |
||||
|
"main": "dist/index.js", |
||||
|
"types": "dist/index.d.ts", |
||||
|
"scripts": { |
||||
|
"build": "tsc", |
||||
|
"test": "jest", |
||||
|
"test:snapshots": "jest --updateSnapshot", |
||||
|
"lint": "eslint src --ext .ts", |
||||
|
"lint-fix": "eslint src --ext .ts --fix" |
||||
|
}, |
||||
|
"dependencies": { |
||||
|
"zod": "^3.22.4" |
||||
|
}, |
||||
|
"devDependencies": { |
||||
|
"@types/jest": "^29.5.5", |
||||
|
"@typescript-eslint/eslint-plugin": "^5.57.0", |
||||
|
"@typescript-eslint/parser": "^5.57.0", |
||||
|
"eslint": "^8.37.0", |
||||
|
"jest": "^29.7.0", |
||||
|
"ts-jest": "^29.1.0", |
||||
|
"typescript": "^5.2.2" |
||||
|
}, |
||||
|
"files": [ |
||||
|
"dist/**/*" |
||||
|
] |
||||
|
} |
@ -0,0 +1,167 @@ |
|||||
|
// Jest Snapshot v1, https://goo.gl/fbAQLP |
||||
|
|
||||
|
exports[`Schema Validation AcknowledgmentRequestSchema should validate acknowledgment request: acknowledgment-request 1`] = ` |
||||
|
{ |
||||
|
"acknowledgedAt": "2025-01-01T12:00:00Z", |
||||
|
"acknowledgedJwtIds": [ |
||||
|
"1704067200_abc123_12345678", |
||||
|
"1704153600_mno345_87654321", |
||||
|
], |
||||
|
"clientVersion": "TimeSafari-DailyNotificationPlugin/1.0.0", |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation AcknowledgmentResponseSchema should validate acknowledgment response: acknowledgment-response 1`] = ` |
||||
|
{ |
||||
|
"acknowledged": 2, |
||||
|
"acknowledgmentId": "ack_xyz789", |
||||
|
"alreadyAcknowledged": 0, |
||||
|
"failed": 0, |
||||
|
"timestamp": "2025-01-01T12:00:00Z", |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation ClockSyncResponseSchema should validate clock sync response: clock-sync-response 1`] = ` |
||||
|
{ |
||||
|
"clientTime": 1704067195000, |
||||
|
"ntpServers": [ |
||||
|
"pool.ntp.org", |
||||
|
"time.google.com", |
||||
|
], |
||||
|
"offset": 5000, |
||||
|
"serverTime": 1704067200000, |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation DeepLinkParamsSchema should reject invalid params: invalid-params-error 1`] = ` |
||||
|
[ZodError: [ |
||||
|
{ |
||||
|
"validation": "regex", |
||||
|
"code": "invalid_string", |
||||
|
"message": "Invalid", |
||||
|
"path": [ |
||||
|
"jwtIds", |
||||
|
0 |
||||
|
] |
||||
|
}, |
||||
|
{ |
||||
|
"validation": "regex", |
||||
|
"code": "invalid_string", |
||||
|
"message": "Invalid", |
||||
|
"path": [ |
||||
|
"projectId" |
||||
|
] |
||||
|
} |
||||
|
]] |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation DeepLinkParamsSchema should validate multiple JWT IDs params: multiple-jwt-ids-params 1`] = ` |
||||
|
{ |
||||
|
"jwtIds": [ |
||||
|
"1704067200_abc123_12345678", |
||||
|
"1704153600_mno345_87654321", |
||||
|
"1704240000_new123_abcdef01", |
||||
|
], |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation DeepLinkParamsSchema should validate project ID params: project-id-params 1`] = ` |
||||
|
{ |
||||
|
"projectId": "test_project_123", |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation DeepLinkParamsSchema should validate shortlink params: shortlink-params 1`] = ` |
||||
|
{ |
||||
|
"shortlink": "abc123def456789", |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation ErrorResponseSchema should validate generic error: generic-error 1`] = ` |
||||
|
{ |
||||
|
"details": { |
||||
|
"component": "database", |
||||
|
"operation": "query_starred_projects", |
||||
|
}, |
||||
|
"error": "internal_server_error", |
||||
|
"message": "Database connection timeout", |
||||
|
"requestId": "req_mno345", |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation ErrorResponseSchema should validate rate limit error: rate-limit-error 1`] = ` |
||||
|
{ |
||||
|
"code": "RATE_LIMIT_EXCEEDED", |
||||
|
"details": { |
||||
|
"limit": 100, |
||||
|
"resetAt": "2025-01-01T12:01:00Z", |
||||
|
"window": "1m", |
||||
|
}, |
||||
|
"error": "Rate limit exceeded", |
||||
|
"message": "Rate limit exceeded for DID", |
||||
|
"requestId": "req_jkl012", |
||||
|
"retryAfter": 60, |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation StarredProjectsResponseSchema should validate canonical response envelope: canonical-response-envelope 1`] = ` |
||||
|
{ |
||||
|
"data": [ |
||||
|
{ |
||||
|
"planSummary": { |
||||
|
"agentDid": "did:key:test_agent_1", |
||||
|
"description": "First test project", |
||||
|
"endTime": "2025-01-31T23:59:59Z", |
||||
|
"handleId": "test_project_1", |
||||
|
"issuerDid": "did:key:test_issuer_1", |
||||
|
"jwtId": "1704067200_abc123_def45678", |
||||
|
"locLat": 40.7128, |
||||
|
"locLon": -74.006, |
||||
|
"name": "Test Project 1", |
||||
|
"startTime": "2025-01-01T00:00:00Z", |
||||
|
"url": "https://project-url.com", |
||||
|
"version": "1.0.0", |
||||
|
}, |
||||
|
"previousClaim": { |
||||
|
"claimData": { |
||||
|
"progress": 0.75, |
||||
|
"status": "in_progress", |
||||
|
}, |
||||
|
"claimType": "project_update", |
||||
|
"jwtId": "1703980800_xyz789_12345678", |
||||
|
"metadata": { |
||||
|
"createdAt": "2025-01-01T10:00:00Z", |
||||
|
"updatedAt": "2025-01-01T12:00:00Z", |
||||
|
}, |
||||
|
}, |
||||
|
}, |
||||
|
], |
||||
|
"hitLimit": false, |
||||
|
"pagination": { |
||||
|
"hasMore": false, |
||||
|
"nextAfterId": null, |
||||
|
}, |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation StarredProjectsResponseSchema should validate empty response: empty-response 1`] = ` |
||||
|
{ |
||||
|
"data": [], |
||||
|
"hitLimit": false, |
||||
|
"pagination": { |
||||
|
"hasMore": false, |
||||
|
"nextAfterId": null, |
||||
|
}, |
||||
|
} |
||||
|
`; |
||||
|
|
||||
|
exports[`Schema Validation StarredProjectsResponseSchema should validate paginated response: paginated-response 1`] = ` |
||||
|
{ |
||||
|
"data": [], |
||||
|
"hitLimit": true, |
||||
|
"pagination": { |
||||
|
"hasMore": true, |
||||
|
"nextAfterId": "1704153600_mno345_87654321", |
||||
|
}, |
||||
|
} |
||||
|
`; |
@ -0,0 +1,238 @@ |
|||||
|
/** |
||||
|
* Unit tests for backoff policy |
||||
|
*/ |
||||
|
|
||||
|
import { |
||||
|
calculateBackoffDelay, |
||||
|
createDefaultBackoffPolicy, |
||||
|
createRateLimitBackoffPolicy, |
||||
|
createNetworkErrorBackoffPolicy, |
||||
|
createServerErrorBackoffPolicy |
||||
|
} from '../backoff'; |
||||
|
import { BackoffPolicy } from '../types'; |
||||
|
|
||||
|
describe('Backoff Policy', () => { |
||||
|
describe('calculateBackoffDelay', () => { |
||||
|
const defaultPolicy: BackoffPolicy = { |
||||
|
maxAttempts: 3, |
||||
|
baseDelayMs: 1000, |
||||
|
maxDelayMs: 30000, |
||||
|
strategy: 'exponential', |
||||
|
jitterEnabled: false, // Disable for predictable tests
|
||||
|
jitterFactor: 0.25, |
||||
|
respectRetryAfter: true, |
||||
|
retryAfterMaxMs: 300000 |
||||
|
}; |
||||
|
|
||||
|
it('should calculate exponential backoff correctly', () => { |
||||
|
const policy = { ...defaultPolicy, strategy: 'exponential' as const }; |
||||
|
|
||||
|
expect(calculateBackoffDelay(1, policy)).toBe(1000); // 1s
|
||||
|
expect(calculateBackoffDelay(2, policy)).toBe(2000); // 2s
|
||||
|
expect(calculateBackoffDelay(3, policy)).toBe(4000); // 4s
|
||||
|
expect(calculateBackoffDelay(4, policy)).toBe(8000); // 8s
|
||||
|
}); |
||||
|
|
||||
|
it('should calculate linear backoff correctly', () => { |
||||
|
const policy = { ...defaultPolicy, strategy: 'linear' as const }; |
||||
|
|
||||
|
expect(calculateBackoffDelay(1, policy)).toBe(1000); // 1s
|
||||
|
expect(calculateBackoffDelay(2, policy)).toBe(2000); // 2s
|
||||
|
expect(calculateBackoffDelay(3, policy)).toBe(3000); // 3s
|
||||
|
expect(calculateBackoffDelay(4, policy)).toBe(4000); // 4s
|
||||
|
}); |
||||
|
|
||||
|
it('should calculate fixed backoff correctly', () => { |
||||
|
const policy = { ...defaultPolicy, strategy: 'fixed' as const }; |
||||
|
|
||||
|
expect(calculateBackoffDelay(1, policy)).toBe(1000); // 1s
|
||||
|
expect(calculateBackoffDelay(2, policy)).toBe(1000); // 1s
|
||||
|
expect(calculateBackoffDelay(3, policy)).toBe(1000); // 1s
|
||||
|
expect(calculateBackoffDelay(4, policy)).toBe(1000); // 1s
|
||||
|
}); |
||||
|
|
||||
|
it('should respect Retry-After header', () => { |
||||
|
const policy = { ...defaultPolicy, respectRetryAfter: true }; |
||||
|
const retryAfterMs = 5000; // 5 seconds
|
||||
|
|
||||
|
expect(calculateBackoffDelay(1, policy, retryAfterMs)).toBe(5000); |
||||
|
expect(calculateBackoffDelay(2, policy, retryAfterMs)).toBe(5000); |
||||
|
expect(calculateBackoffDelay(3, policy, retryAfterMs)).toBe(5000); |
||||
|
}); |
||||
|
|
||||
|
it('should cap Retry-After at maxDelayMs', () => { |
||||
|
const policy = { ...defaultPolicy, maxDelayMs: 10000 }; |
||||
|
const retryAfterMs = 60000; // 60 seconds (exceeds max)
|
||||
|
|
||||
|
expect(calculateBackoffDelay(1, policy, retryAfterMs)).toBe(10000); |
||||
|
}); |
||||
|
|
||||
|
it('should cap Retry-After at retryAfterMaxMs', () => { |
||||
|
const policy = { ...defaultPolicy, retryAfterMaxMs: 15000 }; |
||||
|
const retryAfterMs = 30000; // 30 seconds (exceeds retryAfterMaxMs)
|
||||
|
|
||||
|
expect(calculateBackoffDelay(1, policy, retryAfterMs)).toBe(15000); |
||||
|
}); |
||||
|
|
||||
|
it('should apply jitter when enabled', () => { |
||||
|
const policy = { ...defaultPolicy, jitterEnabled: true, jitterFactor: 0.5 }; |
||||
|
|
||||
|
// Run multiple times to test jitter range
|
||||
|
const delays = Array.from({ length: 100 }, () => |
||||
|
calculateBackoffDelay(1, policy) |
||||
|
); |
||||
|
|
||||
|
// All delays should be within jitter range (500ms to 1500ms)
|
||||
|
delays.forEach(delay => { |
||||
|
expect(delay).toBeGreaterThanOrEqual(500); |
||||
|
expect(delay).toBeLessThanOrEqual(1500); |
||||
|
}); |
||||
|
|
||||
|
// Should have some variation due to jitter
|
||||
|
const uniqueDelays = new Set(delays); |
||||
|
expect(uniqueDelays.size).toBeGreaterThan(1); |
||||
|
}); |
||||
|
|
||||
|
it('should cap delay at maxDelayMs', () => { |
||||
|
const policy = { ...defaultPolicy, maxDelayMs: 5000 }; |
||||
|
|
||||
|
expect(calculateBackoffDelay(10, policy)).toBe(5000); // Capped at 5s
|
||||
|
}); |
||||
|
|
||||
|
it('should handle zero jitter factor', () => { |
||||
|
const policy = { ...defaultPolicy, jitterEnabled: true, jitterFactor: 0 }; |
||||
|
|
||||
|
expect(calculateBackoffDelay(1, policy)).toBe(1000); // No jitter applied
|
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Policy Creators', () => { |
||||
|
it('should create default backoff policy', () => { |
||||
|
const policy = createDefaultBackoffPolicy(); |
||||
|
|
||||
|
expect(policy.maxAttempts).toBe(3); |
||||
|
expect(policy.baseDelayMs).toBe(1000); |
||||
|
expect(policy.maxDelayMs).toBe(30000); |
||||
|
expect(policy.strategy).toBe('exponential'); |
||||
|
expect(policy.jitterEnabled).toBe(true); |
||||
|
expect(policy.jitterFactor).toBe(0.25); |
||||
|
expect(policy.respectRetryAfter).toBe(true); |
||||
|
}); |
||||
|
|
||||
|
it('should create rate limit backoff policy', () => { |
||||
|
const retryAfterMs = 60000; // 1 minute
|
||||
|
const policy = createRateLimitBackoffPolicy(retryAfterMs); |
||||
|
|
||||
|
expect(policy.maxAttempts).toBe(5); |
||||
|
expect(policy.baseDelayMs).toBe(60000); |
||||
|
expect(policy.strategy).toBe('fixed'); |
||||
|
expect(policy.jitterEnabled).toBe(true); |
||||
|
expect(policy.jitterFactor).toBe(0.1); |
||||
|
expect(policy.respectRetryAfter).toBe(true); |
||||
|
}); |
||||
|
|
||||
|
it('should create network error backoff policy', () => { |
||||
|
const policy = createNetworkErrorBackoffPolicy(); |
||||
|
|
||||
|
expect(policy.maxAttempts).toBe(3); |
||||
|
expect(policy.baseDelayMs).toBe(1000); |
||||
|
expect(policy.strategy).toBe('exponential'); |
||||
|
expect(policy.jitterEnabled).toBe(true); |
||||
|
expect(policy.respectRetryAfter).toBe(false); |
||||
|
}); |
||||
|
|
||||
|
it('should create server error backoff policy', () => { |
||||
|
const policy = createServerErrorBackoffPolicy(); |
||||
|
|
||||
|
expect(policy.maxAttempts).toBe(3); |
||||
|
expect(policy.baseDelayMs).toBe(2000); |
||||
|
expect(policy.strategy).toBe('exponential'); |
||||
|
expect(policy.jitterEnabled).toBe(true); |
||||
|
expect(policy.jitterFactor).toBe(0.5); |
||||
|
expect(policy.respectRetryAfter).toBe(false); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('429 + Retry-After Branch', () => { |
||||
|
it('should handle 429 with Retry-After header', () => { |
||||
|
const policy = createRateLimitBackoffPolicy(30000); // 30 seconds
|
||||
|
policy.jitterEnabled = false; // Disable jitter for predictable tests
|
||||
|
|
||||
|
// First attempt should use Retry-After
|
||||
|
expect(calculateBackoffDelay(1, policy, 30000)).toBe(30000); |
||||
|
|
||||
|
// Subsequent attempts should also use Retry-After
|
||||
|
expect(calculateBackoffDelay(2, policy, 30000)).toBe(30000); |
||||
|
expect(calculateBackoffDelay(3, policy, 30000)).toBe(30000); |
||||
|
}); |
||||
|
|
||||
|
it('should handle 429 without Retry-After header', () => { |
||||
|
const policy = createRateLimitBackoffPolicy(30000); |
||||
|
policy.jitterEnabled = false; // Disable jitter for predictable tests
|
||||
|
|
||||
|
// Should fall back to fixed strategy
|
||||
|
expect(calculateBackoffDelay(1, policy)).toBe(30000); |
||||
|
expect(calculateBackoffDelay(2, policy)).toBe(30000); |
||||
|
}); |
||||
|
|
||||
|
it('should cap 429 Retry-After at reasonable limits', () => { |
||||
|
const policy = createRateLimitBackoffPolicy(300000); // 5 minutes
|
||||
|
policy.jitterEnabled = false; // Disable jitter for predictable tests
|
||||
|
|
||||
|
// Should be capped at retryAfterMaxMs (600000ms = 10 minutes)
|
||||
|
expect(calculateBackoffDelay(1, policy, 300000)).toBe(300000); |
||||
|
expect(calculateBackoffDelay(1, policy, 900000)).toBe(600000); // Capped
|
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Jitter Bounds', () => { |
||||
|
it('should respect jitter bounds for exponential backoff', () => { |
||||
|
const policy: BackoffPolicy = { |
||||
|
maxAttempts: 3, |
||||
|
baseDelayMs: 1000, |
||||
|
maxDelayMs: 30000, |
||||
|
strategy: 'exponential', |
||||
|
jitterEnabled: true, |
||||
|
jitterFactor: 0.25, |
||||
|
respectRetryAfter: false |
||||
|
}; |
||||
|
|
||||
|
// Test multiple attempts to ensure jitter is within bounds
|
||||
|
for (let attempt = 1; attempt <= 5; attempt++) { |
||||
|
const baseDelay = 1000 * Math.pow(2, attempt - 1); |
||||
|
const minDelay = baseDelay * (1 - 0.25); |
||||
|
const maxDelay = baseDelay * (1 + 0.25); |
||||
|
|
||||
|
const delays = Array.from({ length: 50 }, () => |
||||
|
calculateBackoffDelay(attempt, policy) |
||||
|
); |
||||
|
|
||||
|
delays.forEach(delay => { |
||||
|
expect(delay).toBeGreaterThanOrEqual(minDelay); |
||||
|
expect(delay).toBeLessThanOrEqual(maxDelay); |
||||
|
}); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
it('should not apply negative jitter', () => { |
||||
|
const policy: BackoffPolicy = { |
||||
|
maxAttempts: 3, |
||||
|
baseDelayMs: 1000, |
||||
|
maxDelayMs: 30000, |
||||
|
strategy: 'fixed', |
||||
|
jitterEnabled: true, |
||||
|
jitterFactor: 1.0, // 100% jitter
|
||||
|
respectRetryAfter: false |
||||
|
}; |
||||
|
|
||||
|
const delays = Array.from({ length: 100 }, () => |
||||
|
calculateBackoffDelay(1, policy) |
||||
|
); |
||||
|
|
||||
|
// All delays should be non-negative
|
||||
|
delays.forEach(delay => { |
||||
|
expect(delay).toBeGreaterThanOrEqual(0); |
||||
|
}); |
||||
|
}); |
||||
|
}); |
||||
|
}); |
@ -0,0 +1,268 @@ |
|||||
|
/** |
||||
|
* Unit tests for clock synchronization and skew handling |
||||
|
*/ |
||||
|
|
||||
|
import { ClockSyncManager, createDefaultClockSyncManager } from '../clock-sync'; |
||||
|
|
||||
|
// Mock fetch for testing
|
||||
|
global.fetch = jest.fn(); |
||||
|
|
||||
|
describe('Clock Sync Manager', () => { |
||||
|
let clockSync: ClockSyncManager; |
||||
|
|
||||
|
beforeEach(() => { |
||||
|
clockSync = createDefaultClockSyncManager(); |
||||
|
jest.clearAllMocks(); |
||||
|
}); |
||||
|
|
||||
|
describe('Server Time Sync', () => { |
||||
|
it('should sync with server time successfully', async () => { |
||||
|
const mockServerTime = 1704067200000; // 2024-01-01 00:00:00
|
||||
|
const mockClientTime = 1704067195000; // 2024-01-01 00:00:00 - 5s
|
||||
|
|
||||
|
(fetch as jest.Mock).mockResolvedValueOnce({ |
||||
|
ok: true, |
||||
|
headers: { |
||||
|
get: jest.fn().mockReturnValue(mockServerTime.toString()) |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// Mock Date.now to return consistent client time
|
||||
|
jest.spyOn(Date, 'now').mockReturnValue(mockClientTime); |
||||
|
|
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
expect(clockSync.getServerOffset()).toBe(5000); // 5 second offset
|
||||
|
expect(clockSync.getLastSyncTime()).toBe(mockClientTime); |
||||
|
}); |
||||
|
|
||||
|
it('should handle server sync failure gracefully', async () => { |
||||
|
(fetch as jest.Mock).mockRejectedValueOnce(new Error('Network error')); |
||||
|
|
||||
|
const initialOffset = clockSync.getServerOffset(); |
||||
|
|
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
// Offset should remain unchanged on failure
|
||||
|
expect(clockSync.getServerOffset()).toBe(initialOffset); |
||||
|
}); |
||||
|
|
||||
|
it('should handle invalid server time response', async () => { |
||||
|
(fetch as jest.Mock).mockResolvedValueOnce({ |
||||
|
ok: true, |
||||
|
headers: { |
||||
|
get: jest.fn().mockReturnValue('0') |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
const initialOffset = clockSync.getServerOffset(); |
||||
|
|
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
// Offset should remain unchanged on invalid response
|
||||
|
expect(clockSync.getServerOffset()).toBe(initialOffset); |
||||
|
}); |
||||
|
|
||||
|
it('should handle HTTP error responses', async () => { |
||||
|
(fetch as jest.Mock).mockResolvedValueOnce({ |
||||
|
ok: false, |
||||
|
status: 500 |
||||
|
}); |
||||
|
|
||||
|
const initialOffset = clockSync.getServerOffset(); |
||||
|
|
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
// Offset should remain unchanged on HTTP error
|
||||
|
expect(clockSync.getServerOffset()).toBe(initialOffset); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Clock Skew Detection', () => { |
||||
|
it('should detect excessive clock skew', async () => { |
||||
|
const mockServerTime = 1704067200000; // 2024-01-01 00:00:00
|
||||
|
const mockClientTime = 1704067200000 - (35 * 1000); // 35 seconds behind
|
||||
|
|
||||
|
(fetch as jest.Mock).mockResolvedValueOnce({ |
||||
|
ok: true, |
||||
|
headers: { |
||||
|
get: jest.fn().mockReturnValue(mockServerTime.toString()) |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
jest.spyOn(Date, 'now').mockReturnValue(mockClientTime); |
||||
|
|
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
expect(clockSync.isClockSkewExcessive()).toBe(true); |
||||
|
}); |
||||
|
|
||||
|
it('should not detect excessive clock skew within tolerance', async () => { |
||||
|
const mockServerTime = 1704067200000; // 2024-01-01 00:00:00
|
||||
|
const mockClientTime = 1704067200000 - (25 * 1000); // 25 seconds behind
|
||||
|
|
||||
|
(fetch as jest.Mock).mockResolvedValueOnce({ |
||||
|
ok: true, |
||||
|
headers: { |
||||
|
get: jest.fn().mockReturnValue(mockServerTime.toString()) |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
jest.spyOn(Date, 'now').mockReturnValue(mockClientTime); |
||||
|
|
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
expect(clockSync.isClockSkewExcessive()).toBe(false); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('JWT Timestamp Validation', () => { |
||||
|
beforeEach(() => { |
||||
|
// Set up clock sync with known offset
|
||||
|
const mockServerTime = 1704067200000; // 2024-01-01 00:00:00
|
||||
|
const mockClientTime = 1704067195000; // 5 seconds behind
|
||||
|
|
||||
|
(fetch as jest.Mock).mockResolvedValueOnce({ |
||||
|
ok: true, |
||||
|
headers: { |
||||
|
get: jest.fn().mockReturnValue(mockServerTime.toString()) |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
jest.spyOn(Date, 'now').mockReturnValue(mockClientTime); |
||||
|
}); |
||||
|
|
||||
|
it('should validate JWT within acceptable time window', async () => { |
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
const jwt = { |
||||
|
iat: Math.floor((1704067200000 - 1000) / 1000), // 1 second ago
|
||||
|
exp: Math.floor((1704067200000 + 3600000) / 1000) // 1 hour from now
|
||||
|
}; |
||||
|
|
||||
|
expect(clockSync.validateJwtTimestamp(jwt)).toBe(true); |
||||
|
}); |
||||
|
|
||||
|
it('should reject JWT with excessive clock skew', async () => { |
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
const jwt = { |
||||
|
iat: Math.floor((1704067200000 - 45000) / 1000), // 45 seconds ago (exceeds 30s tolerance)
|
||||
|
exp: Math.floor((1704067200000 + 3600000) / 1000) // 1 hour from now
|
||||
|
}; |
||||
|
|
||||
|
expect(clockSync.validateJwtTimestamp(jwt)).toBe(false); |
||||
|
}); |
||||
|
|
||||
|
it('should reject expired JWT', async () => { |
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
const jwt = { |
||||
|
iat: Math.floor((1704067200000 - 3600000) / 1000), // 1 hour ago
|
||||
|
exp: Math.floor((1704067200000 - 35000) / 1000) // 35 seconds ago (expired, exceeds tolerance)
|
||||
|
}; |
||||
|
|
||||
|
expect(clockSync.validateJwtTimestamp(jwt)).toBe(false); |
||||
|
}); |
||||
|
|
||||
|
it('should reject JWT that is too old', async () => { |
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
const jwt = { |
||||
|
iat: Math.floor((1704067200000 - 7200000) / 1000), // 2 hours ago (exceeds max age)
|
||||
|
exp: Math.floor((1704067200000 + 3600000) / 1000) // 1 hour from now
|
||||
|
}; |
||||
|
|
||||
|
expect(clockSync.validateJwtTimestamp(jwt)).toBe(false); |
||||
|
}); |
||||
|
|
||||
|
it('should handle JWT with clock skew tolerance', async () => { |
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
const jwt = { |
||||
|
iat: Math.floor((1704067200000 - 25000) / 1000), // 25 seconds ago (within tolerance)
|
||||
|
exp: Math.floor((1704067200000 + 3600000) / 1000) // 1 hour from now
|
||||
|
}; |
||||
|
|
||||
|
expect(clockSync.validateJwtTimestamp(jwt)).toBe(true); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Periodic Sync', () => { |
||||
|
it('should start and stop periodic sync', () => { |
||||
|
const syncSpy = jest.spyOn(clockSync, 'syncWithServer'); |
||||
|
|
||||
|
clockSync.startPeriodicSync('https://api.example.com'); |
||||
|
expect(syncSpy).not.toHaveBeenCalled(); // Not called immediately
|
||||
|
|
||||
|
clockSync.stopPeriodicSync(); |
||||
|
// Should not throw error
|
||||
|
}); |
||||
|
|
||||
|
it('should detect when sync is needed', () => { |
||||
|
// Initially needs sync (no previous sync)
|
||||
|
expect(clockSync.needsSync()).toBe(true); |
||||
|
|
||||
|
// Mock a recent sync
|
||||
|
jest.spyOn(Date, 'now').mockReturnValue(1000); |
||||
|
clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
// Should not need sync immediately after
|
||||
|
expect(clockSync.needsSync()).toBe(false); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Time Calculations', () => { |
||||
|
it('should calculate server time correctly', async () => { |
||||
|
const mockServerTime = 1704067200000; |
||||
|
const mockClientTime = 1704067195000; |
||||
|
|
||||
|
(fetch as jest.Mock).mockResolvedValueOnce({ |
||||
|
ok: true, |
||||
|
headers: { |
||||
|
get: jest.fn().mockReturnValue(mockServerTime.toString()) |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
jest.spyOn(Date, 'now').mockReturnValue(mockClientTime); |
||||
|
|
||||
|
await clockSync.syncWithServer('https://api.example.com'); |
||||
|
|
||||
|
// Mock current time to be 1 second later
|
||||
|
jest.spyOn(Date, 'now').mockReturnValue(mockClientTime + 1000); |
||||
|
|
||||
|
const serverTime = clockSync.getServerTime(); |
||||
|
expect(serverTime).toBe(mockServerTime + 1000); |
||||
|
}); |
||||
|
|
||||
|
it('should return client time correctly', () => { |
||||
|
const mockTime = 1704067200000; |
||||
|
jest.spyOn(Date, 'now').mockReturnValue(mockTime); |
||||
|
|
||||
|
expect(clockSync.getClientTime()).toBe(mockTime); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Configuration', () => { |
||||
|
it('should use default configuration', () => { |
||||
|
const config = clockSync.getConfig(); |
||||
|
|
||||
|
expect(config.maxClockSkewSeconds).toBe(30); |
||||
|
expect(config.skewCheckIntervalMs).toBe(300000); |
||||
|
expect(config.jwtClockSkewTolerance).toBe(30); |
||||
|
expect(config.jwtMaxAge).toBe(3600000); |
||||
|
}); |
||||
|
|
||||
|
it('should use custom configuration', () => { |
||||
|
const customClockSync = new ClockSyncManager({ |
||||
|
maxClockSkewSeconds: 60, |
||||
|
skewCheckIntervalMs: 600000 |
||||
|
}); |
||||
|
|
||||
|
const config = customClockSync.getConfig(); |
||||
|
|
||||
|
expect(config.maxClockSkewSeconds).toBe(60); |
||||
|
expect(config.skewCheckIntervalMs).toBe(600000); |
||||
|
}); |
||||
|
}); |
||||
|
}); |
@ -0,0 +1,236 @@ |
|||||
|
/** |
||||
|
* Jest tests for schema validation with snapshots |
||||
|
*/ |
||||
|
|
||||
|
import { |
||||
|
StarredProjectsResponseSchema, |
||||
|
DeepLinkParamsSchema, |
||||
|
ErrorResponseSchema, |
||||
|
RateLimitResponseSchema, |
||||
|
AcknowledgmentRequestSchema, |
||||
|
AcknowledgmentResponseSchema, |
||||
|
ClockSyncResponseSchema |
||||
|
} from '../schemas'; |
||||
|
|
||||
|
describe('Schema Validation', () => { |
||||
|
describe('StarredProjectsResponseSchema', () => { |
||||
|
it('should validate canonical response envelope', () => { |
||||
|
const canonicalResponse = { |
||||
|
data: [ |
||||
|
{ |
||||
|
planSummary: { |
||||
|
jwtId: '1704067200_abc123_def45678', |
||||
|
handleId: 'test_project_1', |
||||
|
name: 'Test Project 1', |
||||
|
description: 'First test project', |
||||
|
issuerDid: 'did:key:test_issuer_1', |
||||
|
agentDid: 'did:key:test_agent_1', |
||||
|
startTime: '2025-01-01T00:00:00Z', |
||||
|
endTime: '2025-01-31T23:59:59Z', |
||||
|
locLat: 40.7128, |
||||
|
locLon: -74.0060, |
||||
|
url: 'https://project-url.com', |
||||
|
version: '1.0.0' |
||||
|
}, |
||||
|
previousClaim: { |
||||
|
jwtId: '1703980800_xyz789_12345678', |
||||
|
claimType: 'project_update', |
||||
|
claimData: { |
||||
|
status: 'in_progress', |
||||
|
progress: 0.75 |
||||
|
}, |
||||
|
metadata: { |
||||
|
createdAt: '2025-01-01T10:00:00Z', |
||||
|
updatedAt: '2025-01-01T12:00:00Z' |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
], |
||||
|
hitLimit: false, |
||||
|
pagination: { |
||||
|
hasMore: false, |
||||
|
nextAfterId: null |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
const result = StarredProjectsResponseSchema.safeParse(canonicalResponse); |
||||
|
if (!result.success) { |
||||
|
console.log('Schema validation errors:', result.error.errors); |
||||
|
} |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('canonical-response-envelope'); |
||||
|
}); |
||||
|
|
||||
|
it('should validate empty response', () => { |
||||
|
const emptyResponse = { |
||||
|
data: [], |
||||
|
hitLimit: false, |
||||
|
pagination: { |
||||
|
hasMore: false, |
||||
|
nextAfterId: null |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
const result = StarredProjectsResponseSchema.safeParse(emptyResponse); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('empty-response'); |
||||
|
}); |
||||
|
|
||||
|
it('should validate paginated response', () => { |
||||
|
const paginatedResponse = { |
||||
|
data: [], // Would contain 100 items in real scenario
|
||||
|
hitLimit: true, |
||||
|
pagination: { |
||||
|
hasMore: true, |
||||
|
nextAfterId: '1704153600_mno345_87654321' |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
const result = StarredProjectsResponseSchema.safeParse(paginatedResponse); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('paginated-response'); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('DeepLinkParamsSchema', () => { |
||||
|
it('should validate single JWT ID params', () => { |
||||
|
const params = { |
||||
|
jwtId: '1704067200_abc123_def45678' |
||||
|
}; |
||||
|
|
||||
|
const result = DeepLinkParamsSchema.safeParse(params); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('single-jwt-id-params'); |
||||
|
}); |
||||
|
|
||||
|
it('should validate multiple JWT IDs params', () => { |
||||
|
const params = { |
||||
|
jwtIds: [ |
||||
|
'1704067200_abc123_12345678', |
||||
|
'1704153600_mno345_87654321', |
||||
|
'1704240000_new123_abcdef01' |
||||
|
] |
||||
|
}; |
||||
|
|
||||
|
const result = DeepLinkParamsSchema.safeParse(params); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('multiple-jwt-ids-params'); |
||||
|
}); |
||||
|
|
||||
|
it('should validate project ID params', () => { |
||||
|
const params = { |
||||
|
projectId: 'test_project_123' |
||||
|
}; |
||||
|
|
||||
|
const result = DeepLinkParamsSchema.safeParse(params); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('project-id-params'); |
||||
|
}); |
||||
|
|
||||
|
it('should validate shortlink params', () => { |
||||
|
const params = { |
||||
|
shortlink: 'abc123def456789' |
||||
|
}; |
||||
|
|
||||
|
const result = DeepLinkParamsSchema.safeParse(params); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('shortlink-params'); |
||||
|
}); |
||||
|
|
||||
|
it('should reject invalid params', () => { |
||||
|
const invalidParams = { |
||||
|
jwtIds: ['invalid_jwt_id'], |
||||
|
projectId: 'invalid@project#id' |
||||
|
}; |
||||
|
|
||||
|
const result = DeepLinkParamsSchema.safeParse(invalidParams); |
||||
|
expect(result.success).toBe(false); |
||||
|
expect(result.success ? null : result.error).toMatchSnapshot('invalid-params-error'); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('ErrorResponseSchema', () => { |
||||
|
it('should validate rate limit error', () => { |
||||
|
const rateLimitError = { |
||||
|
error: 'Rate limit exceeded', |
||||
|
code: 'RATE_LIMIT_EXCEEDED', |
||||
|
message: 'Rate limit exceeded for DID', |
||||
|
details: { |
||||
|
limit: 100, |
||||
|
window: '1m', |
||||
|
resetAt: '2025-01-01T12:01:00Z' |
||||
|
}, |
||||
|
retryAfter: 60, |
||||
|
requestId: 'req_jkl012' |
||||
|
}; |
||||
|
|
||||
|
const result = RateLimitResponseSchema.safeParse(rateLimitError); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('rate-limit-error'); |
||||
|
}); |
||||
|
|
||||
|
it('should validate generic error', () => { |
||||
|
const genericError = { |
||||
|
error: 'internal_server_error', |
||||
|
message: 'Database connection timeout', |
||||
|
details: { |
||||
|
component: 'database', |
||||
|
operation: 'query_starred_projects' |
||||
|
}, |
||||
|
requestId: 'req_mno345' |
||||
|
}; |
||||
|
|
||||
|
const result = ErrorResponseSchema.safeParse(genericError); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('generic-error'); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('AcknowledgmentRequestSchema', () => { |
||||
|
it('should validate acknowledgment request', () => { |
||||
|
const ackRequest = { |
||||
|
acknowledgedJwtIds: [ |
||||
|
'1704067200_abc123_12345678', |
||||
|
'1704153600_mno345_87654321' |
||||
|
], |
||||
|
acknowledgedAt: '2025-01-01T12:00:00Z', |
||||
|
clientVersion: 'TimeSafari-DailyNotificationPlugin/1.0.0' |
||||
|
}; |
||||
|
|
||||
|
const result = AcknowledgmentRequestSchema.safeParse(ackRequest); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('acknowledgment-request'); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('AcknowledgmentResponseSchema', () => { |
||||
|
it('should validate acknowledgment response', () => { |
||||
|
const ackResponse = { |
||||
|
acknowledged: 2, |
||||
|
failed: 0, |
||||
|
alreadyAcknowledged: 0, |
||||
|
acknowledgmentId: 'ack_xyz789', |
||||
|
timestamp: '2025-01-01T12:00:00Z' |
||||
|
}; |
||||
|
|
||||
|
const result = AcknowledgmentResponseSchema.safeParse(ackResponse); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('acknowledgment-response'); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('ClockSyncResponseSchema', () => { |
||||
|
it('should validate clock sync response', () => { |
||||
|
const clockSyncResponse = { |
||||
|
serverTime: 1704067200000, |
||||
|
clientTime: 1704067195000, |
||||
|
offset: 5000, |
||||
|
ntpServers: ['pool.ntp.org', 'time.google.com'] |
||||
|
}; |
||||
|
|
||||
|
const result = ClockSyncResponseSchema.safeParse(clockSyncResponse); |
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.success ? result.data : null).toMatchSnapshot('clock-sync-response'); |
||||
|
}); |
||||
|
}); |
||||
|
}); |
@ -0,0 +1,22 @@ |
|||||
|
/** |
||||
|
* Jest setup file for polling contracts tests |
||||
|
*/ |
||||
|
|
||||
|
// Mock console methods to reduce noise in tests
|
||||
|
const originalConsoleLog = console.log; |
||||
|
const originalConsoleWarn = console.warn; |
||||
|
const originalConsoleError = console.error; |
||||
|
|
||||
|
beforeAll(() => { |
||||
|
// Allow console.log for debugging, but suppress other console methods
|
||||
|
// console.log = jest.fn();
|
||||
|
console.warn = jest.fn(); |
||||
|
console.error = jest.fn(); |
||||
|
}); |
||||
|
|
||||
|
afterAll(() => { |
||||
|
// Restore console methods
|
||||
|
console.log = originalConsoleLog; |
||||
|
console.warn = originalConsoleWarn; |
||||
|
console.error = originalConsoleError; |
||||
|
}); |
@ -0,0 +1,265 @@ |
|||||
|
/** |
||||
|
* Watermark CAS race condition tests |
||||
|
*/ |
||||
|
|
||||
|
import { compareJwtIds } from '../validation'; |
||||
|
|
||||
|
describe('Watermark CAS Race Conditions', () => { |
||||
|
const testJwtIds = [ |
||||
|
'1704067200_abc123_12345678', // 2024-01-01 00:00:00
|
||||
|
'1704153600_mno345_87654321', // 2024-01-02 00:00:00
|
||||
|
'1704240000_new123_abcdef01', // 2024-01-03 00:00:00
|
||||
|
'1704326400_xyz789_23456789', // 2024-01-04 00:00:00
|
||||
|
'1704412800_stu901_34567890' // 2024-01-05 00:00:00
|
||||
|
]; |
||||
|
|
||||
|
describe('Concurrent Bootstrap Race', () => { |
||||
|
it('should handle two clients bootstrapping concurrently', async () => { |
||||
|
// Simulate two clients fetching the same data concurrently
|
||||
|
const client1Bootstrap = testJwtIds[2]; // 2024-01-03
|
||||
|
const client2Bootstrap = testJwtIds[3]; // 2024-01-04 (newer)
|
||||
|
|
||||
|
// Client 1 attempts to set watermark
|
||||
|
const client1Result = await simulateWatermarkUpdate(null, client1Bootstrap); |
||||
|
expect(client1Result.success).toBe(true); |
||||
|
expect(client1Result.watermark).toBe(client1Bootstrap); |
||||
|
|
||||
|
// Client 2 attempts to set watermark (should succeed due to CAS)
|
||||
|
const client2Result = await simulateWatermarkUpdate(null, client2Bootstrap); |
||||
|
expect(client2Result.success).toBe(true); |
||||
|
expect(client2Result.watermark).toBe(client2Bootstrap); |
||||
|
|
||||
|
// Final watermark should be the maximum JWT ID
|
||||
|
const finalWatermark = await getCurrentWatermark(); |
||||
|
expect(finalWatermark).toBe(client2Bootstrap); |
||||
|
if (finalWatermark && client1Bootstrap) { |
||||
|
expect(compareJwtIds(finalWatermark, client1Bootstrap)).toBeGreaterThan(0); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
it('should reject older watermark updates', async () => { |
||||
|
// Set initial watermark
|
||||
|
await simulateWatermarkUpdate(null, testJwtIds[3]); // 2024-01-04
|
||||
|
|
||||
|
// Attempt to set older watermark (should fail)
|
||||
|
const result = await simulateWatermarkUpdate(testJwtIds[3], testJwtIds[1]); // 2024-01-02
|
||||
|
expect(result.success).toBe(false); |
||||
|
expect(result.watermark).toBe(testJwtIds[3]); // Unchanged
|
||||
|
}); |
||||
|
|
||||
|
it('should handle null watermark bootstrap', async () => { |
||||
|
// First client sets watermark from null
|
||||
|
const result1 = await simulateWatermarkUpdate(null, testJwtIds[2]); |
||||
|
expect(result1.success).toBe(true); |
||||
|
|
||||
|
// Second client attempts to set watermark from null (should fail)
|
||||
|
const result2 = await simulateWatermarkUpdate(null, testJwtIds[1]); |
||||
|
expect(result2.success).toBe(false); |
||||
|
expect(result2.watermark).toBe(testJwtIds[2]); // First client's watermark
|
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Overlapping Polls Race', () => { |
||||
|
it('should handle overlapping polls with different results', async () => { |
||||
|
// Set initial watermark
|
||||
|
await simulateWatermarkUpdate(null, testJwtIds[1]); // 2024-01-02
|
||||
|
|
||||
|
// Client 1 polls and finds changes up to 2024-01-03
|
||||
|
const client1Changes = [testJwtIds[2]]; // 2024-01-03
|
||||
|
const client1Result = await simulatePollAndUpdate(testJwtIds[1], client1Changes); |
||||
|
expect(client1Result.success).toBe(true); |
||||
|
expect(client1Result.newWatermark).toBe(testJwtIds[2]); |
||||
|
|
||||
|
// Client 2 polls concurrently and finds changes up to 2024-01-04
|
||||
|
const client2Changes = [testJwtIds[2], testJwtIds[3]]; // 2024-01-03, 2024-01-04
|
||||
|
const client2Result = await simulatePollAndUpdate(testJwtIds[1], client2Changes); |
||||
|
expect(client2Result.success).toBe(true); |
||||
|
expect(client2Result.newWatermark).toBe(testJwtIds[3]); |
||||
|
|
||||
|
// Final watermark should be the maximum
|
||||
|
const finalWatermark = await getCurrentWatermark(); |
||||
|
expect(finalWatermark).toBe(testJwtIds[3]); |
||||
|
}); |
||||
|
|
||||
|
it('should prevent duplicate notifications from overlapping polls', async () => { |
||||
|
// Set initial watermark
|
||||
|
await simulateWatermarkUpdate(null, testJwtIds[1]); |
||||
|
|
||||
|
// Both clients find the same change
|
||||
|
const sharedChange = testJwtIds[2]; |
||||
|
|
||||
|
// Client 1 processes change
|
||||
|
const client1Result = await simulatePollAndUpdate(testJwtIds[1], [sharedChange]); |
||||
|
expect(client1Result.success).toBe(true); |
||||
|
expect(client1Result.notificationsGenerated).toBe(1); |
||||
|
|
||||
|
// Client 2 attempts to process same change (should be no-op)
|
||||
|
const client2Result = await simulatePollAndUpdate(testJwtIds[1], [sharedChange]); |
||||
|
expect(client2Result.success).toBe(false); // No new changes
|
||||
|
expect(client2Result.notificationsGenerated).toBe(0); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Watermark Monotonicity', () => { |
||||
|
it('should maintain monotonic watermark advancement', async () => { |
||||
|
let currentWatermark = null; |
||||
|
|
||||
|
// Process changes in order
|
||||
|
for (let i = 0; i < testJwtIds.length; i++) { |
||||
|
const newWatermark = testJwtIds[i]; |
||||
|
const result = await simulateWatermarkUpdate(currentWatermark, newWatermark); |
||||
|
|
||||
|
expect(result.success).toBe(true); |
||||
|
expect(result.watermark).toBe(newWatermark); |
||||
|
|
||||
|
// Verify monotonicity
|
||||
|
if (currentWatermark) { |
||||
|
expect(compareJwtIds(newWatermark, currentWatermark)).toBeGreaterThan(0); |
||||
|
} |
||||
|
|
||||
|
currentWatermark = newWatermark; |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
it('should reject non-monotonic watermark updates', async () => { |
||||
|
// Set watermark to middle value
|
||||
|
await simulateWatermarkUpdate(null, testJwtIds[2]); |
||||
|
|
||||
|
// Attempt to set older watermark
|
||||
|
const result = await simulateWatermarkUpdate(testJwtIds[2], testJwtIds[0]); |
||||
|
expect(result.success).toBe(false); |
||||
|
|
||||
|
// Attempt to set same watermark
|
||||
|
const result2 = await simulateWatermarkUpdate(testJwtIds[2], testJwtIds[2]); |
||||
|
expect(result2.success).toBe(false); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
describe('Platform-Specific CAS Implementation', () => { |
||||
|
it('should verify SQL CAS returns row update count', async () => { |
||||
|
// Simulate SQL UPDATE ... WHERE condition
|
||||
|
const sqlResult = await simulateSqlWatermarkUpdate(null, testJwtIds[0]); |
||||
|
expect(sqlResult.rowsAffected).toBe(1); |
||||
|
|
||||
|
// Attempt to update with same condition (should affect 0 rows)
|
||||
|
const sqlResult2 = await simulateSqlWatermarkUpdate(null, testJwtIds[0]); |
||||
|
expect(sqlResult2.rowsAffected).toBe(0); |
||||
|
}); |
||||
|
|
||||
|
it('should verify Room CAS returns update count', async () => { |
||||
|
// Simulate Room @Query with return type Int
|
||||
|
const roomResult = await simulateRoomWatermarkUpdate(null, testJwtIds[0]); |
||||
|
expect(roomResult).toBe(1); |
||||
|
|
||||
|
// Attempt to update with same condition
|
||||
|
const roomResult2 = await simulateRoomWatermarkUpdate(null, testJwtIds[0]); |
||||
|
expect(roomResult2).toBe(0); |
||||
|
}); |
||||
|
|
||||
|
it('should verify Core Data CAS returns success boolean', async () => { |
||||
|
// Simulate Core Data compare-and-swap
|
||||
|
const coreDataResult = await simulateCoreDataWatermarkUpdate(null, testJwtIds[0]); |
||||
|
expect(coreDataResult).toBe(true); |
||||
|
|
||||
|
// Attempt to update with same condition
|
||||
|
const coreDataResult2 = await simulateCoreDataWatermarkUpdate(null, testJwtIds[0]); |
||||
|
expect(coreDataResult2).toBe(false); |
||||
|
}); |
||||
|
|
||||
|
it('should verify IndexedDB CAS returns success boolean', async () => { |
||||
|
// Simulate IndexedDB transaction
|
||||
|
const idbResult = await simulateIndexedDBWatermarkUpdate(null, testJwtIds[0]); |
||||
|
expect(idbResult).toBe(true); |
||||
|
|
||||
|
// Attempt to update with same condition
|
||||
|
const idbResult2 = await simulateIndexedDBWatermarkUpdate(null, testJwtIds[0]); |
||||
|
expect(idbResult2).toBe(false); |
||||
|
}); |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
// Mock implementations for testing
|
||||
|
let mockWatermark: string | null = null; |
||||
|
|
||||
|
async function simulateWatermarkUpdate( |
||||
|
expectedWatermark: string | null, |
||||
|
newWatermark: string |
||||
|
): Promise<{ success: boolean; watermark: string | null }> { |
||||
|
// Simulate CAS logic
|
||||
|
if (mockWatermark === expectedWatermark) { |
||||
|
mockWatermark = newWatermark; |
||||
|
return { success: true, watermark: newWatermark }; |
||||
|
} |
||||
|
return { success: false, watermark: mockWatermark }; |
||||
|
} |
||||
|
|
||||
|
async function simulatePollAndUpdate( |
||||
|
currentWatermark: string | null, |
||||
|
changes: string[] |
||||
|
): Promise<{ success: boolean; newWatermark: string | null; notificationsGenerated: number }> { |
||||
|
if (changes.length === 0) { |
||||
|
return { success: false, newWatermark: currentWatermark, notificationsGenerated: 0 }; |
||||
|
} |
||||
|
|
||||
|
const latestChange = changes[changes.length - 1]; |
||||
|
const result = await simulateWatermarkUpdate(currentWatermark, latestChange); |
||||
|
|
||||
|
return { |
||||
|
success: result.success, |
||||
|
newWatermark: result.watermark, |
||||
|
notificationsGenerated: result.success ? changes.length : 0 |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
async function getCurrentWatermark(): Promise<string | null> { |
||||
|
return mockWatermark; |
||||
|
} |
||||
|
|
||||
|
async function simulateSqlWatermarkUpdate( |
||||
|
expectedWatermark: string | null, |
||||
|
newWatermark: string |
||||
|
): Promise<{ rowsAffected: number }> { |
||||
|
if (mockWatermark === expectedWatermark) { |
||||
|
mockWatermark = newWatermark; |
||||
|
return { rowsAffected: 1 }; |
||||
|
} |
||||
|
return { rowsAffected: 0 }; |
||||
|
} |
||||
|
|
||||
|
async function simulateRoomWatermarkUpdate( |
||||
|
expectedWatermark: string | null, |
||||
|
newWatermark: string |
||||
|
): Promise<number> { |
||||
|
if (mockWatermark === expectedWatermark) { |
||||
|
mockWatermark = newWatermark; |
||||
|
return 1; |
||||
|
} |
||||
|
return 0; |
||||
|
} |
||||
|
|
||||
|
async function simulateCoreDataWatermarkUpdate( |
||||
|
expectedWatermark: string | null, |
||||
|
newWatermark: string |
||||
|
): Promise<boolean> { |
||||
|
if (mockWatermark === expectedWatermark) { |
||||
|
mockWatermark = newWatermark; |
||||
|
return true; |
||||
|
} |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
async function simulateIndexedDBWatermarkUpdate( |
||||
|
expectedWatermark: string | null, |
||||
|
newWatermark: string |
||||
|
): Promise<boolean> { |
||||
|
if (mockWatermark === expectedWatermark) { |
||||
|
mockWatermark = newWatermark; |
||||
|
return true; |
||||
|
} |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
// Reset mock state before each test
|
||||
|
beforeEach(() => { |
||||
|
mockWatermark = null; |
||||
|
}); |
@ -0,0 +1,109 @@ |
|||||
|
/** |
||||
|
* Unified backoff policy implementation |
||||
|
*/ |
||||
|
|
||||
|
import { BackoffPolicy } from './types'; |
||||
|
import { DEFAULT_CONFIG } from './constants'; |
||||
|
|
||||
|
/** |
||||
|
* Calculate backoff delay with Retry-After + jittered exponential caps |
||||
|
*/ |
||||
|
export function calculateBackoffDelay( |
||||
|
attempt: number, |
||||
|
policy: BackoffPolicy, |
||||
|
retryAfterMs?: number |
||||
|
): number { |
||||
|
let delay: number; |
||||
|
|
||||
|
// Respect Retry-After header if present and enabled
|
||||
|
if (policy.respectRetryAfter && retryAfterMs !== undefined) { |
||||
|
delay = Math.min(retryAfterMs, policy.retryAfterMaxMs || policy.maxDelayMs); |
||||
|
} else { |
||||
|
// Calculate base delay based on strategy
|
||||
|
switch (policy.strategy) { |
||||
|
case 'exponential': |
||||
|
delay = policy.baseDelayMs * Math.pow(2, attempt - 1); |
||||
|
break; |
||||
|
case 'linear': |
||||
|
delay = policy.baseDelayMs * attempt; |
||||
|
break; |
||||
|
case 'fixed': |
||||
|
delay = policy.baseDelayMs; |
||||
|
break; |
||||
|
default: |
||||
|
delay = policy.baseDelayMs; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Apply jitter if enabled
|
||||
|
if (policy.jitterEnabled) { |
||||
|
const jitterRange = delay * policy.jitterFactor; |
||||
|
const jitter = (Math.random() - 0.5) * 2 * jitterRange; |
||||
|
delay = Math.max(0, delay + jitter); |
||||
|
} |
||||
|
|
||||
|
// Cap at maximum delay
|
||||
|
return Math.min(delay, policy.maxDelayMs); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create default backoff policy |
||||
|
*/ |
||||
|
export function createDefaultBackoffPolicy(): BackoffPolicy { |
||||
|
return { |
||||
|
maxAttempts: 3, |
||||
|
baseDelayMs: DEFAULT_CONFIG.baseDelayMs, |
||||
|
maxDelayMs: DEFAULT_CONFIG.maxDelayMs, |
||||
|
strategy: 'exponential', |
||||
|
jitterEnabled: true, |
||||
|
jitterFactor: DEFAULT_CONFIG.jitterFactor, |
||||
|
respectRetryAfter: DEFAULT_CONFIG.respectRetryAfter, |
||||
|
retryAfterMaxMs: DEFAULT_CONFIG.retryAfterMaxMs |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create backoff policy for rate limiting |
||||
|
*/ |
||||
|
export function createRateLimitBackoffPolicy(retryAfterMs: number): BackoffPolicy { |
||||
|
return { |
||||
|
maxAttempts: 5, |
||||
|
baseDelayMs: retryAfterMs, |
||||
|
maxDelayMs: Math.max(retryAfterMs * 2, DEFAULT_CONFIG.maxDelayMs), |
||||
|
strategy: 'fixed', |
||||
|
jitterEnabled: true, |
||||
|
jitterFactor: 0.1, // ±10% jitter for rate limits
|
||||
|
respectRetryAfter: true, |
||||
|
retryAfterMaxMs: retryAfterMs * 2 |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create backoff policy for network errors |
||||
|
*/ |
||||
|
export function createNetworkErrorBackoffPolicy(): BackoffPolicy { |
||||
|
return { |
||||
|
maxAttempts: 3, |
||||
|
baseDelayMs: DEFAULT_CONFIG.baseDelayMs, |
||||
|
maxDelayMs: DEFAULT_CONFIG.maxDelayMs, |
||||
|
strategy: 'exponential', |
||||
|
jitterEnabled: true, |
||||
|
jitterFactor: DEFAULT_CONFIG.jitterFactor, |
||||
|
respectRetryAfter: false |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create backoff policy for server errors (5xx) |
||||
|
*/ |
||||
|
export function createServerErrorBackoffPolicy(): BackoffPolicy { |
||||
|
return { |
||||
|
maxAttempts: 3, |
||||
|
baseDelayMs: 2000, // Start with 2s for server errors
|
||||
|
maxDelayMs: DEFAULT_CONFIG.maxDelayMs, |
||||
|
strategy: 'exponential', |
||||
|
jitterEnabled: true, |
||||
|
jitterFactor: 0.5, // ±50% jitter for server errors
|
||||
|
respectRetryAfter: false |
||||
|
}; |
||||
|
} |
@ -0,0 +1,176 @@ |
|||||
|
/** |
||||
|
* Clock synchronization and skew handling |
||||
|
*/ |
||||
|
|
||||
|
import { ClockSyncConfig } from './types'; |
||||
|
import { DEFAULT_CONFIG } from './constants'; |
||||
|
|
||||
|
export class ClockSyncManager { |
||||
|
private config: ClockSyncConfig; |
||||
|
private lastSyncTime = 0; |
||||
|
private serverOffset = 0; // Server time - client time
|
||||
|
private syncInterval?: NodeJS.Timeout | undefined; |
||||
|
|
||||
|
constructor(config: Partial<ClockSyncConfig> = {}) { |
||||
|
this.config = { |
||||
|
serverTimeSource: config.serverTimeSource ?? 'ntp', |
||||
|
ntpServers: config.ntpServers ?? ['pool.ntp.org', 'time.google.com'], |
||||
|
maxClockSkewSeconds: config.maxClockSkewSeconds ?? DEFAULT_CONFIG.maxClockSkewSeconds, |
||||
|
skewCheckIntervalMs: config.skewCheckIntervalMs ?? DEFAULT_CONFIG.skewCheckIntervalMs, |
||||
|
jwtClockSkewTolerance: config.jwtClockSkewTolerance ?? DEFAULT_CONFIG.jwtClockSkewTolerance, |
||||
|
jwtMaxAge: config.jwtMaxAge ?? DEFAULT_CONFIG.jwtMaxAge |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
async syncWithServer(apiServer: string, jwtToken?: string): Promise<void> { |
||||
|
try { |
||||
|
// Get server time from API
|
||||
|
const response = await fetch(`${apiServer}/api/v2/time`, { |
||||
|
method: 'GET', |
||||
|
headers: jwtToken ? { 'Authorization': `Bearer ${jwtToken}` } : {} |
||||
|
}); |
||||
|
|
||||
|
if (!response.ok) { |
||||
|
throw new Error(`Clock sync failed: HTTP ${response.status}`); |
||||
|
} |
||||
|
|
||||
|
const serverTime = parseInt(response.headers.get('X-Server-Time') || '0'); |
||||
|
const clientTime = Date.now(); |
||||
|
|
||||
|
if (serverTime === 0) { |
||||
|
throw new Error('Invalid server time response'); |
||||
|
} |
||||
|
|
||||
|
this.serverOffset = serverTime - clientTime; |
||||
|
this.lastSyncTime = clientTime; |
||||
|
|
||||
|
// Validate skew is within tolerance
|
||||
|
if (Math.abs(this.serverOffset) > this.config.maxClockSkewSeconds * 1000) { |
||||
|
console.warn(`Large clock skew detected: ${this.serverOffset}ms`); |
||||
|
} |
||||
|
|
||||
|
console.log(`Clock sync successful: offset=${this.serverOffset}ms`); |
||||
|
|
||||
|
} catch (error) { |
||||
|
console.error('Clock sync failed:', error); |
||||
|
// Continue with client time, but log the issue
|
||||
|
} |
||||
|
} |
||||
|
|
||||
|
getServerTime(): number { |
||||
|
return Date.now() + this.serverOffset; |
||||
|
} |
||||
|
|
||||
|
getClientTime(): number { |
||||
|
return Date.now(); |
||||
|
} |
||||
|
|
||||
|
getServerOffset(): number { |
||||
|
return this.serverOffset; |
||||
|
} |
||||
|
|
||||
|
getLastSyncTime(): number { |
||||
|
return this.lastSyncTime; |
||||
|
} |
||||
|
|
||||
|
validateJwtTimestamp(jwt: any): boolean { |
||||
|
const now = this.getServerTime(); |
||||
|
const iat = jwt.iat * 1000; // Convert to milliseconds
|
||||
|
const exp = jwt.exp * 1000; |
||||
|
|
||||
|
// Check if JWT is within valid time window
|
||||
|
const skewTolerance = this.config.jwtClockSkewTolerance * 1000; |
||||
|
const maxAge = this.config.jwtMaxAge; |
||||
|
|
||||
|
const isValid = (now >= iat - skewTolerance) && |
||||
|
(now <= exp + skewTolerance) && |
||||
|
(now - iat <= maxAge); |
||||
|
|
||||
|
if (!isValid) { |
||||
|
console.warn('JWT timestamp validation failed:', { |
||||
|
now, |
||||
|
iat, |
||||
|
exp, |
||||
|
skewTolerance, |
||||
|
maxAge, |
||||
|
serverOffset: this.serverOffset |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
return isValid; |
||||
|
} |
||||
|
|
||||
|
isClockSkewExcessive(): boolean { |
||||
|
return Math.abs(this.serverOffset) > this.config.maxClockSkewSeconds * 1000; |
||||
|
} |
||||
|
|
||||
|
needsSync(): boolean { |
||||
|
const timeSinceLastSync = Date.now() - this.lastSyncTime; |
||||
|
return timeSinceLastSync > this.config.skewCheckIntervalMs; |
||||
|
} |
||||
|
|
||||
|
// Periodic sync
|
||||
|
startPeriodicSync(apiServer: string, jwtToken?: string): void { |
||||
|
if (this.syncInterval) { |
||||
|
clearInterval(this.syncInterval); |
||||
|
} |
||||
|
|
||||
|
this.syncInterval = setInterval(() => { |
||||
|
this.syncWithServer(apiServer, jwtToken); |
||||
|
}, this.config.skewCheckIntervalMs); |
||||
|
} |
||||
|
|
||||
|
stopPeriodicSync(): void { |
||||
|
if (this.syncInterval) { |
||||
|
clearInterval(this.syncInterval); |
||||
|
this.syncInterval = undefined; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
getConfig(): ClockSyncConfig { |
||||
|
return { ...this.config }; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create default clock sync manager |
||||
|
*/ |
||||
|
export function createDefaultClockSyncManager(): ClockSyncManager { |
||||
|
return new ClockSyncManager(); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create clock sync manager with custom config |
||||
|
*/ |
||||
|
export function createClockSyncManager(config: Partial<ClockSyncConfig>): ClockSyncManager { |
||||
|
return new ClockSyncManager(config); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Clock sync configuration presets |
||||
|
*/ |
||||
|
export const CLOCK_SYNC_PRESETS = { |
||||
|
// Conservative: Frequent sync, strict tolerance
|
||||
|
conservative: { |
||||
|
maxClockSkewSeconds: 15, |
||||
|
skewCheckIntervalMs: 180000, // 3 minutes
|
||||
|
jwtClockSkewTolerance: 15, |
||||
|
jwtMaxAge: 1800000 // 30 minutes
|
||||
|
}, |
||||
|
|
||||
|
// Balanced: Good balance of sync frequency and tolerance
|
||||
|
balanced: { |
||||
|
maxClockSkewSeconds: 30, |
||||
|
skewCheckIntervalMs: 300000, // 5 minutes
|
||||
|
jwtClockSkewTolerance: 30, |
||||
|
jwtMaxAge: 3600000 // 1 hour
|
||||
|
}, |
||||
|
|
||||
|
// Relaxed: Less frequent sync, more tolerance
|
||||
|
relaxed: { |
||||
|
maxClockSkewSeconds: 60, |
||||
|
skewCheckIntervalMs: 600000, // 10 minutes
|
||||
|
jwtClockSkewTolerance: 60, |
||||
|
jwtMaxAge: 7200000 // 2 hours
|
||||
|
} |
||||
|
} as const; |
@ -0,0 +1,53 @@ |
|||||
|
/** |
||||
|
* Canonical constants for polling system |
||||
|
*/ |
||||
|
|
||||
|
// JWT ID regex pattern with named capture groups
|
||||
|
export const JWT_ID_PATTERN = /^(?<ts>\d{10})_(?<rnd>[A-Za-z0-9]{6})_(?<hash>[a-f0-9]{8})$/; |
||||
|
|
||||
|
// Default configuration values
|
||||
|
export const DEFAULT_CONFIG = { |
||||
|
// Outbox pressure controls
|
||||
|
maxUndelivered: 1000, |
||||
|
backpressureThreshold: 0.8, |
||||
|
maxRetries: 3, |
||||
|
cleanupIntervalMs: 3600000, // 1 hour
|
||||
|
|
||||
|
// Backoff policy
|
||||
|
baseDelayMs: 1000, |
||||
|
maxDelayMs: 30000, |
||||
|
jitterFactor: 0.25, |
||||
|
respectRetryAfter: true, |
||||
|
retryAfterMaxMs: 300000, // 5 minutes
|
||||
|
|
||||
|
// Clock sync
|
||||
|
maxClockSkewSeconds: 30, |
||||
|
skewCheckIntervalMs: 300000, // 5 minutes
|
||||
|
jwtClockSkewTolerance: 30, |
||||
|
jwtMaxAge: 3600000, // 1 hour
|
||||
|
|
||||
|
// Telemetry
|
||||
|
metricsPrefix: 'starred_projects', |
||||
|
logLevel: 'INFO' |
||||
|
} as const; |
||||
|
|
||||
|
// Error codes
|
||||
|
export const ERROR_CODES = { |
||||
|
INVALID_REQUEST: 'INVALID_REQUEST', |
||||
|
VALIDATION_ERROR: 'VALIDATION_ERROR', |
||||
|
RATE_LIMIT_EXCEEDED: 'RATE_LIMIT_EXCEEDED', |
||||
|
EXECUTION_ERROR: 'EXECUTION_ERROR', |
||||
|
CLOCK_SKEW_ERROR: 'CLOCK_SKEW_ERROR', |
||||
|
STORAGE_PRESSURE: 'STORAGE_PRESSURE' |
||||
|
} as const; |
||||
|
|
||||
|
// HTTP status codes
|
||||
|
export const HTTP_STATUS = { |
||||
|
OK: 200, |
||||
|
BAD_REQUEST: 400, |
||||
|
UNAUTHORIZED: 401, |
||||
|
FORBIDDEN: 403, |
||||
|
TOO_MANY_REQUESTS: 429, |
||||
|
INTERNAL_SERVER_ERROR: 500, |
||||
|
SERVICE_UNAVAILABLE: 503 |
||||
|
} as const; |
@ -0,0 +1,17 @@ |
|||||
|
/** |
||||
|
* @timesafari/polling-contracts |
||||
|
* |
||||
|
* TypeScript contracts and Zod schemas for TimeSafari polling system |
||||
|
* |
||||
|
* @author Matthew Raymer |
||||
|
* @version 1.0.0 |
||||
|
*/ |
||||
|
|
||||
|
export * from './types'; |
||||
|
export * from './schemas'; |
||||
|
export * from './validation'; |
||||
|
export * from './constants'; |
||||
|
export * from './backoff'; |
||||
|
export * from './outbox-pressure'; |
||||
|
export * from './telemetry'; |
||||
|
export * from './clock-sync'; |
@ -0,0 +1,144 @@ |
|||||
|
/** |
||||
|
* Outbox pressure management with telemetry |
||||
|
*/ |
||||
|
|
||||
|
import { OutboxPressureConfig, TelemetryMetrics } from './types'; |
||||
|
import { DEFAULT_CONFIG } from './constants'; |
||||
|
|
||||
|
export class OutboxPressureManager { |
||||
|
private config: OutboxPressureConfig; |
||||
|
private metrics: TelemetryMetrics; |
||||
|
|
||||
|
constructor(config: Partial<OutboxPressureConfig> = {}) { |
||||
|
this.config = { |
||||
|
maxUndelivered: config.maxUndelivered ?? DEFAULT_CONFIG.maxUndelivered, |
||||
|
cleanupIntervalMs: config.cleanupIntervalMs ?? DEFAULT_CONFIG.cleanupIntervalMs, |
||||
|
backpressureThreshold: config.backpressureThreshold ?? DEFAULT_CONFIG.backpressureThreshold, |
||||
|
evictionPolicy: config.evictionPolicy ?? 'fifo' |
||||
|
}; |
||||
|
|
||||
|
this.metrics = { |
||||
|
'starred_projects_outbox_size': 0, |
||||
|
'starred_projects_outbox_backpressure_active': 0 |
||||
|
} as TelemetryMetrics; |
||||
|
} |
||||
|
|
||||
|
async checkStoragePressure(undeliveredCount: number): Promise<boolean> { |
||||
|
// Update metrics
|
||||
|
this.metrics['starred_projects_outbox_size'] = undeliveredCount; |
||||
|
|
||||
|
const pressureRatio = undeliveredCount / this.config.maxUndelivered; |
||||
|
const backpressureActive = pressureRatio >= this.config.backpressureThreshold; |
||||
|
|
||||
|
// Update backpressure metric
|
||||
|
this.metrics['starred_projects_outbox_backpressure_active'] = backpressureActive ? 1 : 0; |
||||
|
|
||||
|
if (pressureRatio >= 1.0) { |
||||
|
// Critical: Drop oldest notifications to make room
|
||||
|
const evictCount = undeliveredCount - this.config.maxUndelivered; |
||||
|
await this.evictNotifications(evictCount); |
||||
|
return true; // Backpressure active
|
||||
|
} |
||||
|
|
||||
|
return backpressureActive; |
||||
|
} |
||||
|
|
||||
|
async evictNotifications(count: number): Promise<void> { |
||||
|
if (count <= 0) return; |
||||
|
|
||||
|
// Simulate eviction based on policy
|
||||
|
switch (this.config.evictionPolicy) { |
||||
|
case 'fifo': |
||||
|
await this.evictFIFO(count); |
||||
|
break; |
||||
|
case 'lifo': |
||||
|
await this.evictLIFO(count); |
||||
|
break; |
||||
|
case 'priority': |
||||
|
await this.evictByPriority(count); |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private async evictFIFO(count: number): Promise<void> { |
||||
|
// Simulate: DELETE FROM notification_outbox
|
||||
|
// WHERE delivered_at IS NULL
|
||||
|
// ORDER BY created_at ASC
|
||||
|
// LIMIT count
|
||||
|
console.log(`Evicting ${count} oldest notifications (FIFO)`); |
||||
|
} |
||||
|
|
||||
|
private async evictLIFO(count: number): Promise<void> { |
||||
|
// Simulate: DELETE FROM notification_outbox
|
||||
|
// WHERE delivered_at IS NULL
|
||||
|
// ORDER BY created_at DESC
|
||||
|
// LIMIT count
|
||||
|
console.log(`Evicting ${count} newest notifications (LIFO)`); |
||||
|
} |
||||
|
|
||||
|
private async evictByPriority(count: number): Promise<void> { |
||||
|
// Simulate: DELETE FROM notification_outbox
|
||||
|
// WHERE delivered_at IS NULL
|
||||
|
// ORDER BY priority ASC, created_at ASC
|
||||
|
// LIMIT count
|
||||
|
console.log(`Evicting ${count} lowest priority notifications`); |
||||
|
} |
||||
|
|
||||
|
async cleanupDeliveredNotifications(): Promise<void> { |
||||
|
// Simulate: DELETE FROM notification_outbox
|
||||
|
// WHERE delivered_at IS NOT NULL
|
||||
|
// AND delivered_at < datetime('now', '-${cleanupIntervalMs / 1000} seconds')
|
||||
|
console.log(`Cleaning up delivered notifications older than ${this.config.cleanupIntervalMs}ms`); |
||||
|
} |
||||
|
|
||||
|
getMetrics(): TelemetryMetrics { |
||||
|
return { ...this.metrics }; |
||||
|
} |
||||
|
|
||||
|
getConfig(): OutboxPressureConfig { |
||||
|
return { ...this.config }; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create default outbox pressure manager |
||||
|
*/ |
||||
|
export function createDefaultOutboxPressureManager(): OutboxPressureManager { |
||||
|
return new OutboxPressureManager(); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create outbox pressure manager with custom config |
||||
|
*/ |
||||
|
export function createOutboxPressureManager(config: Partial<OutboxPressureConfig>): OutboxPressureManager { |
||||
|
return new OutboxPressureManager(config); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Outbox pressure configuration presets |
||||
|
*/ |
||||
|
export const OUTBOX_PRESSURE_PRESETS = { |
||||
|
// Conservative: Low memory usage, frequent cleanup
|
||||
|
conservative: { |
||||
|
maxUndelivered: 500, |
||||
|
backpressureThreshold: 0.7, |
||||
|
cleanupIntervalMs: 1800000, // 30 minutes
|
||||
|
evictionPolicy: 'fifo' as const |
||||
|
}, |
||||
|
|
||||
|
// Balanced: Good performance, reasonable memory usage
|
||||
|
balanced: { |
||||
|
maxUndelivered: 1000, |
||||
|
backpressureThreshold: 0.8, |
||||
|
cleanupIntervalMs: 3600000, // 1 hour
|
||||
|
evictionPolicy: 'fifo' as const |
||||
|
}, |
||||
|
|
||||
|
// Aggressive: High throughput, more memory usage
|
||||
|
aggressive: { |
||||
|
maxUndelivered: 2000, |
||||
|
backpressureThreshold: 0.9, |
||||
|
cleanupIntervalMs: 7200000, // 2 hours
|
||||
|
evictionPolicy: 'priority' as const |
||||
|
} |
||||
|
} as const; |
@ -0,0 +1,126 @@ |
|||||
|
/** |
||||
|
* Zod schemas for strong structural validation |
||||
|
*/ |
||||
|
|
||||
|
import { z } from 'zod'; |
||||
|
import { JWT_ID_PATTERN } from './constants'; |
||||
|
|
||||
|
// Core schemas
|
||||
|
export const PlanSummarySchema = z.object({ |
||||
|
jwtId: z.string().regex(JWT_ID_PATTERN, 'Invalid JWT ID format'), |
||||
|
handleId: z.string().min(1), |
||||
|
name: z.string().min(1), |
||||
|
description: z.string(), |
||||
|
issuerDid: z.string().startsWith('did:key:'), |
||||
|
agentDid: z.string().startsWith('did:key:'), |
||||
|
startTime: z.string().datetime(), |
||||
|
endTime: z.string().datetime(), |
||||
|
locLat: z.number().nullable().optional(), |
||||
|
locLon: z.number().nullable().optional(), |
||||
|
url: z.string().url().nullable().optional(), |
||||
|
version: z.string() |
||||
|
}); |
||||
|
|
||||
|
export const PreviousClaimSchema = z.object({ |
||||
|
jwtId: z.string().regex(JWT_ID_PATTERN), |
||||
|
claimType: z.string(), |
||||
|
claimData: z.record(z.any()), |
||||
|
metadata: z.object({ |
||||
|
createdAt: z.string().datetime(), |
||||
|
updatedAt: z.string().datetime() |
||||
|
}) |
||||
|
}); |
||||
|
|
||||
|
export const PlanSummaryAndPreviousClaimSchema = z.object({ |
||||
|
planSummary: PlanSummarySchema, |
||||
|
previousClaim: PreviousClaimSchema.optional() |
||||
|
}); |
||||
|
|
||||
|
export const StarredProjectsResponseSchema = z.object({ |
||||
|
data: z.array(PlanSummaryAndPreviousClaimSchema), |
||||
|
hitLimit: z.boolean(), |
||||
|
pagination: z.object({ |
||||
|
hasMore: z.boolean(), |
||||
|
nextAfterId: z.string().regex(JWT_ID_PATTERN).nullable() |
||||
|
}) |
||||
|
}); |
||||
|
|
||||
|
export const StarredProjectsRequestSchema = z.object({ |
||||
|
planIds: z.array(z.string()).min(1), |
||||
|
afterId: z.string().regex(JWT_ID_PATTERN).optional(), |
||||
|
beforeId: z.string().regex(JWT_ID_PATTERN).optional(), |
||||
|
limit: z.number().min(1).max(100).default(100) |
||||
|
}); |
||||
|
|
||||
|
// Deep link parameter validation
|
||||
|
export const DeepLinkParamsSchema = z.object({ |
||||
|
jwtIds: z.array(z.string().regex(JWT_ID_PATTERN)).max(10).optional(), |
||||
|
projectId: z.string().regex(/^[a-zA-Z0-9_-]+$/).optional(), |
||||
|
jwtId: z.string().regex(JWT_ID_PATTERN).optional(), |
||||
|
shortlink: z.string().min(1).optional() |
||||
|
}).refine( |
||||
|
(data) => data.jwtIds || data.projectId || data.shortlink, |
||||
|
'At least one of jwtIds, projectId, or shortlink must be provided' |
||||
|
); |
||||
|
|
||||
|
// Error response schema
|
||||
|
export const ErrorResponseSchema = z.object({ |
||||
|
error: z.string(), |
||||
|
code: z.string().optional(), |
||||
|
message: z.string(), |
||||
|
details: z.record(z.any()).optional(), |
||||
|
retryAfter: z.number().optional(), |
||||
|
requestId: z.string().optional() |
||||
|
}); |
||||
|
|
||||
|
// Rate limit response schema
|
||||
|
export const RateLimitResponseSchema = z.object({ |
||||
|
error: z.literal('Rate limit exceeded'), |
||||
|
code: z.literal('RATE_LIMIT_EXCEEDED'), |
||||
|
message: z.string(), |
||||
|
details: z.object({ |
||||
|
limit: z.number(), |
||||
|
window: z.string(), |
||||
|
resetAt: z.string().datetime(), |
||||
|
remaining: z.number().optional() |
||||
|
}), |
||||
|
retryAfter: z.number(), |
||||
|
requestId: z.string() |
||||
|
}); |
||||
|
|
||||
|
// Acknowledgment request schema
|
||||
|
export const AcknowledgmentRequestSchema = z.object({ |
||||
|
acknowledgedJwtIds: z.array(z.string().regex(JWT_ID_PATTERN)).min(1), |
||||
|
acknowledgedAt: z.string().datetime(), |
||||
|
clientVersion: z.string() |
||||
|
}); |
||||
|
|
||||
|
// Acknowledgment response schema
|
||||
|
export const AcknowledgmentResponseSchema = z.object({ |
||||
|
acknowledged: z.number(), |
||||
|
failed: z.number(), |
||||
|
alreadyAcknowledged: z.number(), |
||||
|
acknowledgmentId: z.string(), |
||||
|
timestamp: z.string().datetime() |
||||
|
}); |
||||
|
|
||||
|
// Clock sync response schema
|
||||
|
export const ClockSyncResponseSchema = z.object({ |
||||
|
serverTime: z.number(), |
||||
|
clientTime: z.number(), |
||||
|
offset: z.number(), |
||||
|
ntpServers: z.array(z.string()).optional() |
||||
|
}); |
||||
|
|
||||
|
// Type exports for use in host apps
|
||||
|
export type StarredProjectsRequest = z.infer<typeof StarredProjectsRequestSchema>; |
||||
|
export type StarredProjectsResponse = z.infer<typeof StarredProjectsResponseSchema>; |
||||
|
export type PlanSummary = z.infer<typeof PlanSummarySchema>; |
||||
|
export type PreviousClaim = z.infer<typeof PreviousClaimSchema>; |
||||
|
export type PlanSummaryAndPreviousClaim = z.infer<typeof PlanSummaryAndPreviousClaimSchema>; |
||||
|
export type DeepLinkParams = z.infer<typeof DeepLinkParamsSchema>; |
||||
|
export type ErrorResponse = z.infer<typeof ErrorResponseSchema>; |
||||
|
export type RateLimitResponse = z.infer<typeof RateLimitResponseSchema>; |
||||
|
export type AcknowledgmentRequest = z.infer<typeof AcknowledgmentRequestSchema>; |
||||
|
export type AcknowledgmentResponse = z.infer<typeof AcknowledgmentResponseSchema>; |
||||
|
export type ClockSyncResponse = z.infer<typeof ClockSyncResponseSchema>; |
@ -0,0 +1,313 @@ |
|||||
|
/** |
||||
|
* Telemetry management with cardinality budgets |
||||
|
*/ |
||||
|
|
||||
|
import { TelemetryMetrics, TelemetryLogs } from './types'; |
||||
|
import { hashDid, redactPii } from './validation'; |
||||
|
|
||||
|
export class TelemetryManager { |
||||
|
private metrics: Map<string, any> = new Map(); |
||||
|
private logLevel: 'DEBUG' | 'INFO' | 'WARN' | 'ERROR'; |
||||
|
|
||||
|
constructor(logLevel: 'DEBUG' | 'INFO' | 'WARN' | 'ERROR' = 'INFO') { |
||||
|
this.logLevel = logLevel; |
||||
|
this.registerMetrics(); |
||||
|
} |
||||
|
|
||||
|
private registerMetrics(): void { |
||||
|
// Counter metrics (low cardinality)
|
||||
|
this.metrics.set('starred_projects_poll_attempts_total', |
||||
|
this.createCounter('starred_projects_poll_attempts_total', 'Total number of polling attempts')); |
||||
|
|
||||
|
this.metrics.set('starred_projects_poll_success_total', |
||||
|
this.createCounter('starred_projects_poll_success_total', 'Total number of successful polls')); |
||||
|
|
||||
|
this.metrics.set('starred_projects_poll_failure_total', |
||||
|
this.createCounter('starred_projects_poll_failure_total', 'Total number of failed polls')); |
||||
|
|
||||
|
this.metrics.set('starred_projects_changes_found_total', |
||||
|
this.createCounter('starred_projects_changes_found_total', 'Total number of changes found')); |
||||
|
|
||||
|
this.metrics.set('starred_projects_notifications_generated_total', |
||||
|
this.createCounter('starred_projects_notifications_generated_total', 'Total notifications generated')); |
||||
|
|
||||
|
this.metrics.set('starred_projects_error_total', |
||||
|
this.createCounter('starred_projects_error_total', 'Total number of errors')); |
||||
|
|
||||
|
this.metrics.set('starred_projects_rate_limit_total', |
||||
|
this.createCounter('starred_projects_rate_limit_total', 'Total number of rate limit hits')); |
||||
|
|
||||
|
// Histogram metrics (low cardinality)
|
||||
|
this.metrics.set('starred_projects_poll_duration_seconds', |
||||
|
this.createHistogram('starred_projects_poll_duration_seconds', 'Polling duration in seconds', |
||||
|
[0.1, 0.5, 1, 2, 5, 10, 30])); |
||||
|
|
||||
|
this.metrics.set('starred_projects_api_latency_seconds', |
||||
|
this.createHistogram('starred_projects_api_latency_seconds', 'API latency in seconds', |
||||
|
[0.05, 0.1, 0.25, 0.5, 1, 2, 5])); |
||||
|
|
||||
|
// Gauge metrics (low cardinality)
|
||||
|
this.metrics.set('starred_projects_outbox_size', |
||||
|
this.createGauge('starred_projects_outbox_size', 'Current number of undelivered notifications')); |
||||
|
|
||||
|
this.metrics.set('starred_projects_outbox_backpressure_active', |
||||
|
this.createGauge('starred_projects_outbox_backpressure_active', 'Backpressure active (0/1)')); |
||||
|
|
||||
|
this.metrics.set('starred_projects_api_throughput_rps', |
||||
|
this.createGauge('starred_projects_api_throughput_rps', 'API throughput in requests per second')); |
||||
|
} |
||||
|
|
||||
|
private createCounter(name: string, help: string): any { |
||||
|
// Mock counter implementation
|
||||
|
return { |
||||
|
name, |
||||
|
help, |
||||
|
type: 'counter', |
||||
|
value: 0, |
||||
|
inc: () => { this.metrics.get(name)!.value++; } |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
private createHistogram(name: string, help: string, buckets: number[]): any { |
||||
|
// Mock histogram implementation
|
||||
|
return { |
||||
|
name, |
||||
|
help, |
||||
|
type: 'histogram', |
||||
|
buckets, |
||||
|
values: new Array(buckets.length + 1).fill(0), |
||||
|
observe: (value: number) => { |
||||
|
const metric = this.metrics.get(name)!; |
||||
|
// Find bucket and increment
|
||||
|
for (let i = 0; i < buckets.length; i++) { |
||||
|
if (value <= buckets[i]) { |
||||
|
metric.values[i]++; |
||||
|
return; |
||||
|
} |
||||
|
} |
||||
|
metric.values[buckets.length]++; // +Inf bucket
|
||||
|
} |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
private createGauge(name: string, help: string): any { |
||||
|
// Mock gauge implementation
|
||||
|
return { |
||||
|
name, |
||||
|
help, |
||||
|
type: 'gauge', |
||||
|
value: 0, |
||||
|
set: (value: number) => { this.metrics.get(name)!.value = value; } |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
// Low-cardinality metric recording
|
||||
|
recordPollAttempt(): void { |
||||
|
this.metrics.get('starred_projects_poll_attempts_total')?.inc(); |
||||
|
} |
||||
|
|
||||
|
recordPollSuccess(durationSeconds: number): void { |
||||
|
this.metrics.get('starred_projects_poll_success_total')?.inc(); |
||||
|
this.metrics.get('starred_projects_poll_duration_seconds')?.observe(durationSeconds); |
||||
|
} |
||||
|
|
||||
|
recordPollFailure(): void { |
||||
|
this.metrics.get('starred_projects_poll_failure_total')?.inc(); |
||||
|
} |
||||
|
|
||||
|
recordChangesFound(count: number): void { |
||||
|
for (let i = 0; i < count; i++) { |
||||
|
this.metrics.get('starred_projects_changes_found_total')?.inc(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
recordNotificationsGenerated(count: number): void { |
||||
|
for (let i = 0; i < count; i++) { |
||||
|
this.metrics.get('starred_projects_notifications_generated_total')?.inc(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
recordError(): void { |
||||
|
this.metrics.get('starred_projects_error_total')?.inc(); |
||||
|
} |
||||
|
|
||||
|
recordRateLimit(): void { |
||||
|
this.metrics.get('starred_projects_rate_limit_total')?.inc(); |
||||
|
} |
||||
|
|
||||
|
recordApiLatency(latencySeconds: number): void { |
||||
|
this.metrics.get('starred_projects_api_latency_seconds')?.observe(latencySeconds); |
||||
|
} |
||||
|
|
||||
|
recordOutboxSize(size: number): void { |
||||
|
this.metrics.get('starred_projects_outbox_size')?.set(size); |
||||
|
} |
||||
|
|
||||
|
recordBackpressureActive(active: boolean): void { |
||||
|
this.metrics.get('starred_projects_outbox_backpressure_active')?.set(active ? 1 : 0); |
||||
|
} |
||||
|
|
||||
|
recordApiThroughput(rps: number): void { |
||||
|
this.metrics.get('starred_projects_api_throughput_rps')?.set(rps); |
||||
|
} |
||||
|
|
||||
|
// High-cardinality data (logs only, not metrics)
|
||||
|
logPollingEvent(event: TelemetryLogs): void { |
||||
|
if (this.shouldLog('INFO')) { |
||||
|
const redactedEvent = redactPii({ |
||||
|
...event, |
||||
|
activeDid: hashDid(event.activeDid) // Hash for privacy
|
||||
|
}); |
||||
|
|
||||
|
console.log('Polling event:', redactedEvent); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
logError(error: Error, context?: Record<string, any>): void { |
||||
|
if (this.shouldLog('ERROR')) { |
||||
|
const redactedContext = context ? redactPii(context) : undefined; |
||||
|
console.error('Polling error:', { |
||||
|
message: error.message, |
||||
|
stack: error.stack, |
||||
|
context: redactedContext |
||||
|
}); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
logWarning(message: string, context?: Record<string, any>): void { |
||||
|
if (this.shouldLog('WARN')) { |
||||
|
const redactedContext = context ? redactPii(context) : undefined; |
||||
|
console.warn('Polling warning:', { message, context: redactedContext }); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
logDebug(message: string, context?: Record<string, any>): void { |
||||
|
if (this.shouldLog('DEBUG')) { |
||||
|
const redactedContext = context ? redactPii(context) : undefined; |
||||
|
console.debug('Polling debug:', { message, context: redactedContext }); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private shouldLog(level: 'DEBUG' | 'INFO' | 'WARN' | 'ERROR'): boolean { |
||||
|
const levels = { DEBUG: 0, INFO: 1, WARN: 2, ERROR: 3 }; |
||||
|
return levels[level] >= levels[this.logLevel]; |
||||
|
} |
||||
|
|
||||
|
// Get all metrics for export
|
||||
|
getMetrics(): TelemetryMetrics { |
||||
|
const metrics: any = {}; |
||||
|
for (const [name, metric] of this.metrics) { |
||||
|
metrics[name] = metric.value; |
||||
|
} |
||||
|
return metrics as TelemetryMetrics; |
||||
|
} |
||||
|
|
||||
|
// Get metrics in Prometheus format
|
||||
|
getPrometheusMetrics(): string { |
||||
|
let output = ''; |
||||
|
for (const [name, metric] of this.metrics) { |
||||
|
output += `# HELP ${name} ${metric.help}\n`; |
||||
|
output += `# TYPE ${name} ${metric.type}\n`; |
||||
|
|
||||
|
if (metric.type === 'histogram') { |
||||
|
// Export histogram buckets
|
||||
|
for (let i = 0; i < metric.buckets.length; i++) { |
||||
|
output += `${name}_bucket{le="${metric.buckets[i]}"} ${metric.values[i]}\n`; |
||||
|
} |
||||
|
output += `${name}_bucket{le="+Inf"} ${metric.values[metric.buckets.length]}\n`; |
||||
|
output += `${name}_count ${metric.values.reduce((a: number, b: number) => a + b, 0)}\n`; |
||||
|
} else { |
||||
|
output += `${name} ${metric.value}\n`; |
||||
|
} |
||||
|
} |
||||
|
return output; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Lint rule to prevent high-cardinality labels in metrics |
||||
|
*/ |
||||
|
export function validateMetricLabels(labels: Record<string, string>): void { |
||||
|
const highCardinalityPatterns = [ |
||||
|
/requestId/i, |
||||
|
/activeDid/i, |
||||
|
/jwtId/i, |
||||
|
/userId/i, |
||||
|
/sessionId/i, |
||||
|
/traceId/i, |
||||
|
/spanId/i |
||||
|
]; |
||||
|
|
||||
|
for (const [key, value] of Object.entries(labels)) { |
||||
|
for (const pattern of highCardinalityPatterns) { |
||||
|
if (pattern.test(key)) { |
||||
|
throw new Error( |
||||
|
`High-cardinality label detected: ${key}. ` + |
||||
|
`Use logs for request-level data, not metrics. ` + |
||||
|
`Consider using a hash or removing the label.` |
||||
|
); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Check for high-cardinality values
|
||||
|
if (value.length > 50 || /^[a-f0-9]{32,}$/.test(value)) { |
||||
|
throw new Error( |
||||
|
`High-cardinality value detected for label ${key}: ${value}. ` + |
||||
|
`Consider using a hash or removing the label.` |
||||
|
); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Safe metric recording with validation |
||||
|
*/ |
||||
|
export function recordMetricWithValidation( |
||||
|
telemetry: TelemetryManager, |
||||
|
metricName: string, |
||||
|
value: number, |
||||
|
labels?: Record<string, string> |
||||
|
): void { |
||||
|
if (labels) { |
||||
|
validateMetricLabels(labels); |
||||
|
} |
||||
|
|
||||
|
// Record metric based on type
|
||||
|
switch (metricName) { |
||||
|
case 'starred_projects_poll_attempts_total': |
||||
|
telemetry.recordPollAttempt(); |
||||
|
break; |
||||
|
case 'starred_projects_poll_success_total': |
||||
|
telemetry.recordPollSuccess(value); |
||||
|
break; |
||||
|
case 'starred_projects_poll_failure_total': |
||||
|
telemetry.recordPollFailure(); |
||||
|
break; |
||||
|
case 'starred_projects_changes_found_total': |
||||
|
telemetry.recordChangesFound(value); |
||||
|
break; |
||||
|
case 'starred_projects_notifications_generated_total': |
||||
|
telemetry.recordNotificationsGenerated(value); |
||||
|
break; |
||||
|
case 'starred_projects_error_total': |
||||
|
telemetry.recordError(); |
||||
|
break; |
||||
|
case 'starred_projects_rate_limit_total': |
||||
|
telemetry.recordRateLimit(); |
||||
|
break; |
||||
|
case 'starred_projects_api_latency_seconds': |
||||
|
telemetry.recordApiLatency(value); |
||||
|
break; |
||||
|
case 'starred_projects_outbox_size': |
||||
|
telemetry.recordOutboxSize(value); |
||||
|
break; |
||||
|
case 'starred_projects_outbox_backpressure_active': |
||||
|
telemetry.recordBackpressureActive(value > 0); |
||||
|
break; |
||||
|
case 'starred_projects_api_throughput_rps': |
||||
|
telemetry.recordApiThroughput(value); |
||||
|
break; |
||||
|
default: |
||||
|
throw new Error(`Unknown metric: ${metricName}`); |
||||
|
} |
||||
|
} |
@ -0,0 +1,234 @@ |
|||||
|
/** |
||||
|
* Core TypeScript interfaces for polling system |
||||
|
*/ |
||||
|
|
||||
|
import { z } from 'zod'; |
||||
|
|
||||
|
// Core polling interfaces
|
||||
|
export interface GenericPollingRequest<TRequest, TResponse> { |
||||
|
// Request configuration
|
||||
|
endpoint: string; |
||||
|
method: 'GET' | 'POST' | 'PUT' | 'DELETE'; |
||||
|
headers?: Record<string, string>; |
||||
|
body?: TRequest; |
||||
|
|
||||
|
// Idempotency (required for POST requests)
|
||||
|
idempotencyKey?: string; // Auto-generated if not provided
|
||||
|
|
||||
|
// Response handling
|
||||
|
responseSchema: ResponseSchema<TResponse>; |
||||
|
transformResponse?: (rawResponse: any) => TResponse; |
||||
|
|
||||
|
// Error handling
|
||||
|
retryConfig?: RetryConfiguration; |
||||
|
timeoutMs?: number; |
||||
|
|
||||
|
// Authentication
|
||||
|
authConfig?: AuthenticationConfig; |
||||
|
} |
||||
|
|
||||
|
export interface ResponseSchema<T> { |
||||
|
// Schema validation
|
||||
|
validate: (data: any) => data is T; |
||||
|
// Error transformation
|
||||
|
transformError?: (error: any) => PollingError; |
||||
|
} |
||||
|
|
||||
|
export interface PollingResult<T> { |
||||
|
success: boolean; |
||||
|
data?: T; |
||||
|
error?: PollingError; |
||||
|
metadata: { |
||||
|
requestId: string; |
||||
|
timestamp: string; |
||||
|
duration: number; |
||||
|
retryCount: number; |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
export interface PollingError { |
||||
|
code: string; |
||||
|
message: string; |
||||
|
details?: any; |
||||
|
retryable: boolean; |
||||
|
retryAfter?: number; |
||||
|
} |
||||
|
|
||||
|
// Backoff policy
|
||||
|
export interface BackoffPolicy { |
||||
|
// Base configuration
|
||||
|
maxAttempts: number; |
||||
|
baseDelayMs: number; |
||||
|
maxDelayMs: number; |
||||
|
|
||||
|
// Strategy selection
|
||||
|
strategy: 'exponential' | 'linear' | 'fixed'; |
||||
|
|
||||
|
// Jitter configuration
|
||||
|
jitterEnabled: boolean; |
||||
|
jitterFactor: number; // 0.0 to 1.0 (e.g., 0.25 = ±25% jitter)
|
||||
|
|
||||
|
// Retry-After integration
|
||||
|
respectRetryAfter: boolean; |
||||
|
retryAfterMaxMs?: number; // Cap Retry-After values
|
||||
|
} |
||||
|
|
||||
|
export interface RetryConfiguration { |
||||
|
maxAttempts: number; |
||||
|
backoffStrategy: 'exponential' | 'linear' | 'fixed'; |
||||
|
baseDelayMs: number; |
||||
|
} |
||||
|
|
||||
|
// Authentication
|
||||
|
export interface AuthenticationConfig { |
||||
|
type: 'jwt' | 'bearer' | 'api_key'; |
||||
|
token?: string; |
||||
|
refreshToken?: string; |
||||
|
expiresAt?: number; |
||||
|
} |
||||
|
|
||||
|
// Scheduling
|
||||
|
export interface PollingScheduleConfig<TRequest, TResponse> { |
||||
|
request: GenericPollingRequest<TRequest, TResponse>; |
||||
|
schedule: { |
||||
|
cronExpression: string; |
||||
|
timezone: string; |
||||
|
maxConcurrentPolls: number; |
||||
|
}; |
||||
|
notificationConfig?: NotificationConfig; |
||||
|
stateConfig: { |
||||
|
watermarkKey: string; |
||||
|
storageAdapter: StorageAdapter; |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
export interface NotificationConfig { |
||||
|
enabled: boolean; |
||||
|
templates: NotificationTemplates; |
||||
|
groupingRules: NotificationGroupingRules; |
||||
|
} |
||||
|
|
||||
|
export interface NotificationTemplates { |
||||
|
singleUpdate: string; |
||||
|
multipleUpdates: string; |
||||
|
} |
||||
|
|
||||
|
export interface NotificationGroupingRules { |
||||
|
maxGroupSize: number; |
||||
|
timeWindowMinutes: number; |
||||
|
} |
||||
|
|
||||
|
// Storage
|
||||
|
export interface StorageAdapter { |
||||
|
get(key: string): Promise<any>; |
||||
|
set(key: string, value: any): Promise<void>; |
||||
|
delete(key: string): Promise<void>; |
||||
|
exists(key: string): Promise<boolean>; |
||||
|
} |
||||
|
|
||||
|
// Context
|
||||
|
export interface PollingContext { |
||||
|
activeDid: string; |
||||
|
apiServer: string; |
||||
|
storageAdapter: StorageAdapter; |
||||
|
authManager: AuthenticationManager; |
||||
|
} |
||||
|
|
||||
|
export interface AuthenticationManager { |
||||
|
getCurrentToken(): Promise<string | null>; |
||||
|
refreshToken(): Promise<string>; |
||||
|
validateToken(token: string): Promise<boolean>; |
||||
|
} |
||||
|
|
||||
|
// Starred Projects specific types
|
||||
|
export interface StarredProjectsRequest { |
||||
|
planIds: string[]; |
||||
|
afterId?: string; |
||||
|
beforeId?: string; |
||||
|
limit?: number; |
||||
|
} |
||||
|
|
||||
|
export interface StarredProjectsResponse { |
||||
|
data: PlanSummaryAndPreviousClaim[]; |
||||
|
hitLimit: boolean; |
||||
|
pagination: { |
||||
|
hasMore: boolean; |
||||
|
nextAfterId: string | null; |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
export interface PlanSummaryAndPreviousClaim { |
||||
|
planSummary: PlanSummary; |
||||
|
previousClaim?: PreviousClaim; |
||||
|
} |
||||
|
|
||||
|
export interface PlanSummary { |
||||
|
jwtId: string; |
||||
|
handleId: string; |
||||
|
name: string; |
||||
|
description: string; |
||||
|
issuerDid: string; |
||||
|
agentDid: string; |
||||
|
startTime: string; |
||||
|
endTime: string; |
||||
|
locLat?: number; |
||||
|
locLon?: number; |
||||
|
url?: string; |
||||
|
version: string; |
||||
|
} |
||||
|
|
||||
|
export interface PreviousClaim { |
||||
|
jwtId: string; |
||||
|
claimType: string; |
||||
|
claimData: Record<string, any>; |
||||
|
metadata: { |
||||
|
createdAt: string; |
||||
|
updatedAt: string; |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
// Telemetry
|
||||
|
export interface TelemetryMetrics { |
||||
|
// Low-cardinality metrics (Prometheus counters/gauges)
|
||||
|
'starred_projects_poll_attempts_total': number; |
||||
|
'starred_projects_poll_success_total': number; |
||||
|
'starred_projects_poll_failure_total': number; |
||||
|
'starred_projects_poll_duration_seconds': number; |
||||
|
'starred_projects_changes_found_total': number; |
||||
|
'starred_projects_notifications_generated_total': number; |
||||
|
'starred_projects_error_total': number; |
||||
|
'starred_projects_rate_limit_total': number; |
||||
|
'starred_projects_api_latency_seconds': number; |
||||
|
'starred_projects_api_throughput_rps': number; |
||||
|
'starred_projects_outbox_size': number; |
||||
|
'starred_projects_outbox_backpressure_active': number; |
||||
|
} |
||||
|
|
||||
|
export interface TelemetryLogs { |
||||
|
// Request-level details (logs only)
|
||||
|
requestId: string; |
||||
|
activeDid: string; |
||||
|
projectCount: number; |
||||
|
changeCount: number; |
||||
|
duration: number; |
||||
|
error?: string; |
||||
|
metadata?: Record<string, any>; |
||||
|
} |
||||
|
|
||||
|
// Clock sync
|
||||
|
export interface ClockSyncConfig { |
||||
|
serverTimeSource: 'ntp' | 'system' | 'atomic'; |
||||
|
ntpServers: string[]; |
||||
|
maxClockSkewSeconds: number; |
||||
|
skewCheckIntervalMs: number; |
||||
|
jwtClockSkewTolerance: number; |
||||
|
jwtMaxAge: number; |
||||
|
} |
||||
|
|
||||
|
// Storage pressure
|
||||
|
export interface OutboxPressureConfig { |
||||
|
maxUndelivered: number; |
||||
|
cleanupIntervalMs: number; |
||||
|
backpressureThreshold: number; |
||||
|
evictionPolicy: 'fifo' | 'lifo' | 'priority'; |
||||
|
} |
@ -0,0 +1,157 @@ |
|||||
|
/** |
||||
|
* Validation utilities and helpers |
||||
|
*/ |
||||
|
|
||||
|
import { z } from 'zod'; |
||||
|
import { JWT_ID_PATTERN, ERROR_CODES } from './constants'; |
||||
|
import { |
||||
|
StarredProjectsResponseSchema, |
||||
|
DeepLinkParamsSchema, |
||||
|
ErrorResponseSchema, |
||||
|
RateLimitResponseSchema |
||||
|
} from './schemas'; |
||||
|
|
||||
|
/** |
||||
|
* Validate JWT ID format |
||||
|
*/ |
||||
|
export function validateJwtId(jwtId: string): boolean { |
||||
|
return JWT_ID_PATTERN.test(jwtId); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Compare JWT IDs lexicographically |
||||
|
*/ |
||||
|
export function compareJwtIds(a: string, b: string): number { |
||||
|
if (!validateJwtId(a) || !validateJwtId(b)) { |
||||
|
throw new Error('Invalid JWT ID format'); |
||||
|
} |
||||
|
return a.localeCompare(b); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Extract timestamp from JWT ID |
||||
|
*/ |
||||
|
export function extractJwtTimestamp(jwtId: string): number { |
||||
|
const match = jwtId.match(JWT_ID_PATTERN); |
||||
|
if (!match || !match.groups?.ts) { |
||||
|
throw new Error('Invalid JWT ID format'); |
||||
|
} |
||||
|
return parseInt(match.groups.ts, 10); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Validate starred projects response |
||||
|
*/ |
||||
|
export function validateStarredProjectsResponse(data: any): boolean { |
||||
|
return StarredProjectsResponseSchema.safeParse(data).success; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Validate deep link parameters |
||||
|
*/ |
||||
|
export function validateDeepLinkParams(params: any): boolean { |
||||
|
return DeepLinkParamsSchema.safeParse(params).success; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Validate error response |
||||
|
*/ |
||||
|
export function validateErrorResponse(data: any): boolean { |
||||
|
return ErrorResponseSchema.safeParse(data).success; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Validate rate limit response |
||||
|
*/ |
||||
|
export function validateRateLimitResponse(data: any): boolean { |
||||
|
return RateLimitResponseSchema.safeParse(data).success; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create response schema validator |
||||
|
*/ |
||||
|
export function createResponseValidator<T>(schema: z.ZodSchema<T>) { |
||||
|
return { |
||||
|
validate: (data: any): data is T => schema.safeParse(data).success, |
||||
|
transformError: (error: any) => ({ |
||||
|
code: ERROR_CODES.VALIDATION_ERROR, |
||||
|
message: error.message || 'Validation failed', |
||||
|
retryable: false |
||||
|
}) |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Safe parse with error transformation |
||||
|
*/ |
||||
|
export function safeParseWithError<T>( |
||||
|
schema: z.ZodSchema<T>, |
||||
|
data: any |
||||
|
): { success: true; data: T } | { success: false; error: string } { |
||||
|
const result = schema.safeParse(data); |
||||
|
|
||||
|
if (result.success) { |
||||
|
return { success: true, data: result.data }; |
||||
|
} |
||||
|
|
||||
|
return { |
||||
|
success: false, |
||||
|
error: result.error.errors.map(e => `${e.path.join('.')}: ${e.message}`).join(', ') |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Validate idempotency key format |
||||
|
*/ |
||||
|
export function validateIdempotencyKey(key: string): boolean { |
||||
|
// UUID v4 format
|
||||
|
const uuidV4Pattern = /^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i; |
||||
|
return uuidV4Pattern.test(key); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Generate idempotency key |
||||
|
*/ |
||||
|
export function generateIdempotencyKey(): string { |
||||
|
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) { |
||||
|
const r = Math.random() * 16 | 0; |
||||
|
const v = c === 'x' ? r : (r & 0x3 | 0x8); |
||||
|
return v.toString(16); |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Hash DID for privacy in logs |
||||
|
*/ |
||||
|
export function hashDid(did: string): string { |
||||
|
// Simple hash for privacy (use crypto in production)
|
||||
|
const hash = did.split('').reduce((a, b) => { |
||||
|
a = ((a << 5) - a) + b.charCodeAt(0); |
||||
|
return a & a; |
||||
|
}, 0); |
||||
|
return `did:hash:${Math.abs(hash).toString(16)}`; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Redact PII from logs |
||||
|
*/ |
||||
|
export function redactPii(data: any): any { |
||||
|
const redacted = JSON.parse(JSON.stringify(data)); |
||||
|
|
||||
|
// Redact DID patterns
|
||||
|
if (typeof redacted === 'string') { |
||||
|
return redacted.replace(/did:key:[a-zA-Z0-9]+/g, (match) => hashDid(match)); |
||||
|
} |
||||
|
|
||||
|
if (typeof redacted === 'object' && redacted !== null) { |
||||
|
for (const key in redacted) { |
||||
|
if (typeof redacted[key] === 'string') { |
||||
|
redacted[key] = redacted[key].replace(/did:key:[a-zA-Z0-9]+/g, (match: string) => hashDid(match)); |
||||
|
} else if (typeof redacted[key] === 'object') { |
||||
|
redacted[key] = redactPii(redacted[key]); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
return redacted; |
||||
|
} |
@ -0,0 +1,31 @@ |
|||||
|
{ |
||||
|
"compilerOptions": { |
||||
|
"target": "ES2020", |
||||
|
"module": "commonjs", |
||||
|
"lib": ["ES2020"], |
||||
|
"outDir": "./dist", |
||||
|
"rootDir": "./src", |
||||
|
"strict": true, |
||||
|
"esModuleInterop": true, |
||||
|
"skipLibCheck": true, |
||||
|
"forceConsistentCasingInFileNames": true, |
||||
|
"declaration": true, |
||||
|
"declarationMap": true, |
||||
|
"sourceMap": true, |
||||
|
"removeComments": false, |
||||
|
"noImplicitAny": true, |
||||
|
"noImplicitReturns": true, |
||||
|
"noImplicitThis": true, |
||||
|
"noUnusedLocals": true, |
||||
|
"noUnusedParameters": true, |
||||
|
"exactOptionalPropertyTypes": true |
||||
|
}, |
||||
|
"include": [ |
||||
|
"src/**/*" |
||||
|
], |
||||
|
"exclude": [ |
||||
|
"node_modules", |
||||
|
"dist", |
||||
|
"**/__tests__/**" |
||||
|
] |
||||
|
} |
Loading…
Reference in new issue