- Revert ProfileService from broken /api/partner/userProfile endpoint to working /api/partner/userProfileForIssuer/${did}
- Fix location data display by restoring single profile object response parsing
- Remove complex array handling logic that was unnecessary for current user profiles
- Restore original working functionality that was broken by recent refactoring
Problem: Recent ProfileService creation changed endpoint from working userProfileForIssuer/${did}
to broken userProfile (list endpoint), causing location data to not display properly.
Solution: Revert to original working endpoint and response parsing logic that returns
single profile objects with location data instead of arrays of all profiles.
Files changed:
- src/services/ProfileService.ts: Restore working endpoint and simplify response parsing
Testing: Profile loading now works correctly for both existing and new profiles,
location data is properly extracted and displayed, maps render correctly.
483 lines
11 KiB
TypeScript
483 lines
11 KiB
TypeScript
/**
|
|
* Performance Optimizer
|
|
*
|
|
* Provides utilities for optimizing API calls, database queries, and component
|
|
* rendering to improve TimeSafari application performance.
|
|
*
|
|
* @author Matthew Raymer
|
|
* @since 2025-08-25
|
|
*/
|
|
|
|
import { logger } from "./logger";
|
|
|
|
/**
|
|
* Batch operation configuration
|
|
*/
|
|
export interface BatchConfig {
|
|
maxBatchSize: number;
|
|
maxWaitTime: number;
|
|
retryAttempts: number;
|
|
retryDelay: number;
|
|
}
|
|
|
|
/**
|
|
* Default batch configuration
|
|
*/
|
|
export const DEFAULT_BATCH_CONFIG: BatchConfig = {
|
|
maxBatchSize: 10,
|
|
maxWaitTime: 100, // milliseconds
|
|
retryAttempts: 3,
|
|
retryDelay: 1000, // milliseconds
|
|
};
|
|
|
|
/**
|
|
* Batched operation item
|
|
*/
|
|
export interface BatchItem<T, R> {
|
|
id: string;
|
|
data: T;
|
|
resolve: (value: R) => void;
|
|
reject: (error: Error) => void;
|
|
timestamp: number;
|
|
}
|
|
|
|
/**
|
|
* Batch processor for API operations
|
|
*
|
|
* Groups multiple similar operations into batches to reduce
|
|
* the number of API calls and improve performance.
|
|
*/
|
|
export class BatchProcessor<T, R> {
|
|
private items: BatchItem<T, R>[] = [];
|
|
private timer: NodeJS.Timeout | null = null;
|
|
private processing = false;
|
|
private config: BatchConfig;
|
|
|
|
constructor(
|
|
private batchHandler: (items: T[]) => Promise<R[]>,
|
|
private itemIdExtractor: (item: T) => string,
|
|
config: Partial<BatchConfig> = {},
|
|
) {
|
|
this.config = { ...DEFAULT_BATCH_CONFIG, ...config };
|
|
}
|
|
|
|
/**
|
|
* Add an item to the batch
|
|
*
|
|
* @param data - Data to process
|
|
* @returns Promise that resolves when the item is processed
|
|
*/
|
|
async add(data: T): Promise<R> {
|
|
return new Promise((resolve, reject) => {
|
|
const item: BatchItem<T, R> = {
|
|
id: this.itemIdExtractor(data),
|
|
data,
|
|
resolve,
|
|
reject,
|
|
timestamp: Date.now(),
|
|
};
|
|
|
|
this.items.push(item);
|
|
|
|
// Start timer if this is the first item
|
|
if (this.items.length === 1) {
|
|
this.startTimer();
|
|
}
|
|
|
|
// Process immediately if batch is full
|
|
if (this.items.length >= this.config.maxBatchSize) {
|
|
this.processBatch();
|
|
}
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Start the batch timer
|
|
*/
|
|
private startTimer(): void {
|
|
if (this.timer) {
|
|
clearTimeout(this.timer);
|
|
}
|
|
|
|
this.timer = setTimeout(() => {
|
|
this.processBatch();
|
|
}, this.config.maxWaitTime);
|
|
}
|
|
|
|
/**
|
|
* Process the current batch
|
|
*/
|
|
private async processBatch(): Promise<void> {
|
|
if (this.processing || this.items.length === 0) {
|
|
return;
|
|
}
|
|
|
|
this.processing = true;
|
|
|
|
// Clear timer
|
|
if (this.timer) {
|
|
clearTimeout(this.timer);
|
|
this.timer = null;
|
|
}
|
|
|
|
// Get current batch
|
|
const currentItems = [...this.items];
|
|
this.items = [];
|
|
|
|
try {
|
|
logger.debug("[BatchProcessor] 🔄 Processing batch:", {
|
|
batchSize: currentItems.length,
|
|
itemIds: currentItems.map((item) => item.id),
|
|
timestamp: new Date().toISOString(),
|
|
});
|
|
|
|
// Process batch
|
|
const results = await this.batchHandler(
|
|
currentItems.map((item) => item.data),
|
|
);
|
|
|
|
// Map results back to items
|
|
const resultMap = new Map<string, R>();
|
|
results.forEach((result, index) => {
|
|
const item = currentItems[index];
|
|
if (item) {
|
|
resultMap.set(item.id, result);
|
|
}
|
|
});
|
|
|
|
// Resolve promises
|
|
currentItems.forEach((item) => {
|
|
const result = resultMap.get(item.id);
|
|
if (result !== undefined) {
|
|
item.resolve(result);
|
|
} else {
|
|
item.reject(new Error(`No result found for item ${item.id}`));
|
|
}
|
|
});
|
|
|
|
logger.debug("[BatchProcessor] ✅ Batch processed successfully:", {
|
|
batchSize: currentItems.length,
|
|
resultsCount: results.length,
|
|
timestamp: new Date().toISOString(),
|
|
});
|
|
} catch (error) {
|
|
logger.error("[BatchProcessor] ❌ Batch processing failed:", {
|
|
batchSize: currentItems.length,
|
|
error: error instanceof Error ? error.message : String(error),
|
|
timestamp: new Date().toISOString(),
|
|
});
|
|
|
|
// Reject all items in the batch
|
|
currentItems.forEach((item) => {
|
|
item.reject(error instanceof Error ? error : new Error(String(error)));
|
|
});
|
|
} finally {
|
|
this.processing = false;
|
|
|
|
// Start timer for remaining items if any
|
|
if (this.items.length > 0) {
|
|
this.startTimer();
|
|
}
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get current batch status
|
|
*/
|
|
getStatus(): {
|
|
pendingItems: number;
|
|
isProcessing: boolean;
|
|
hasTimer: boolean;
|
|
} {
|
|
return {
|
|
pendingItems: this.items.length,
|
|
isProcessing: this.processing,
|
|
hasTimer: this.timer !== null,
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Clear all pending items
|
|
*/
|
|
clear(): void {
|
|
if (this.timer) {
|
|
clearTimeout(this.timer);
|
|
this.timer = null;
|
|
}
|
|
|
|
// Reject all pending items
|
|
this.items.forEach((item) => {
|
|
item.reject(new Error("Batch processor cleared"));
|
|
});
|
|
|
|
this.items = [];
|
|
this.processing = false;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Database query optimizer
|
|
*
|
|
* Provides utilities for optimizing database queries and reducing
|
|
* the number of database operations.
|
|
*/
|
|
export class DatabaseOptimizer {
|
|
/**
|
|
* Batch multiple SELECT queries into a single query
|
|
*
|
|
* @param baseQuery - Base SELECT query
|
|
* @param ids - Array of IDs to query
|
|
* @param idColumn - Name of the ID column
|
|
* @returns Optimized query string
|
|
*/
|
|
static batchSelectQuery(
|
|
baseQuery: string,
|
|
ids: (string | number)[],
|
|
idColumn: string,
|
|
): string {
|
|
if (ids.length === 0) {
|
|
return baseQuery;
|
|
}
|
|
|
|
if (ids.length === 1) {
|
|
return `${baseQuery} WHERE ${idColumn} = ?`;
|
|
}
|
|
|
|
const placeholders = ids.map(() => "?").join(", ");
|
|
return `${baseQuery} WHERE ${idColumn} IN (${placeholders})`;
|
|
}
|
|
|
|
/**
|
|
* Create a query plan for multiple operations
|
|
*
|
|
* @param operations - Array of database operations
|
|
* @returns Optimized query plan
|
|
*/
|
|
static createQueryPlan(
|
|
operations: Array<{
|
|
type: "SELECT" | "INSERT" | "UPDATE" | "DELETE";
|
|
table: string;
|
|
priority: number;
|
|
}>,
|
|
): Array<{
|
|
type: "SELECT" | "INSERT" | "UPDATE" | "DELETE";
|
|
table: string;
|
|
priority: number;
|
|
batchable: boolean;
|
|
}> {
|
|
return operations
|
|
.map((op) => ({
|
|
...op,
|
|
batchable: op.type === "SELECT" || op.type === "INSERT",
|
|
}))
|
|
.sort((a, b) => {
|
|
// Sort by priority first, then by type
|
|
if (a.priority !== b.priority) {
|
|
return b.priority - a.priority;
|
|
}
|
|
|
|
// SELECT operations first, then INSERT, UPDATE, DELETE
|
|
const typeOrder = { SELECT: 0, INSERT: 1, UPDATE: 2, DELETE: 3 };
|
|
return typeOrder[a.type] - typeOrder[b.type];
|
|
});
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Component rendering optimizer
|
|
*
|
|
* Provides utilities for optimizing Vue component rendering
|
|
* and reducing unnecessary re-renders.
|
|
*/
|
|
export class ComponentOptimizer {
|
|
/**
|
|
* Debounce function calls to prevent excessive execution
|
|
*
|
|
* @param func - Function to debounce
|
|
* @param wait - Wait time in milliseconds
|
|
* @returns Debounced function
|
|
*/
|
|
static debounce<T extends (...args: unknown[]) => unknown>(
|
|
func: T,
|
|
wait: number,
|
|
): (...args: Parameters<T>) => void {
|
|
let timeout: NodeJS.Timeout | null = null;
|
|
|
|
return (...args: Parameters<T>) => {
|
|
if (timeout) {
|
|
clearTimeout(timeout);
|
|
}
|
|
|
|
timeout = setTimeout(() => {
|
|
func(...args);
|
|
}, wait);
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Throttle function calls to limit execution frequency
|
|
*
|
|
* @param func - Function to throttle
|
|
* @param limit - Time limit in milliseconds
|
|
* @returns Throttled function
|
|
*/
|
|
static throttle<T extends (...args: unknown[]) => unknown>(
|
|
func: T,
|
|
limit: number,
|
|
): (...args: Parameters<T>) => void {
|
|
let inThrottle = false;
|
|
|
|
return (...args: Parameters<T>) => {
|
|
if (!inThrottle) {
|
|
func(...args);
|
|
inThrottle = true;
|
|
setTimeout(() => {
|
|
inThrottle = false;
|
|
}, limit);
|
|
}
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Memoize function results to avoid redundant computation
|
|
*
|
|
* @param func - Function to memoize
|
|
* @param keyGenerator - Function to generate cache keys
|
|
* @returns Memoized function
|
|
*/
|
|
static memoize<T extends (...args: unknown[]) => unknown, K>(
|
|
func: T,
|
|
keyGenerator: (...args: Parameters<T>) => K,
|
|
): T {
|
|
const cache = new Map<K, unknown>();
|
|
|
|
return ((...args: Parameters<T>) => {
|
|
const key = keyGenerator(...args);
|
|
|
|
if (cache.has(key)) {
|
|
return cache.get(key);
|
|
}
|
|
|
|
const result = func(...args);
|
|
cache.set(key, result);
|
|
return result;
|
|
}) as T;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Performance monitoring utility
|
|
*
|
|
* Tracks and reports performance metrics for optimization analysis.
|
|
*/
|
|
export class PerformanceMonitor {
|
|
private static instance: PerformanceMonitor;
|
|
private metrics = new Map<
|
|
string,
|
|
Array<{ timestamp: number; duration: number }>
|
|
>();
|
|
|
|
private constructor() {}
|
|
|
|
/**
|
|
* Get singleton instance
|
|
*/
|
|
static getInstance(): PerformanceMonitor {
|
|
if (!PerformanceMonitor.instance) {
|
|
PerformanceMonitor.instance = new PerformanceMonitor();
|
|
}
|
|
return PerformanceMonitor.instance;
|
|
}
|
|
|
|
/**
|
|
* Start timing an operation
|
|
*
|
|
* @param operationName - Name of the operation
|
|
* @returns Function to call when operation completes
|
|
*/
|
|
startTiming(operationName: string): () => void {
|
|
const startTime = performance.now();
|
|
|
|
return () => {
|
|
const duration = performance.now() - startTime;
|
|
this.recordMetric(operationName, duration);
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Record a performance metric
|
|
*
|
|
* @param operationName - Name of the operation
|
|
* @param duration - Duration in milliseconds
|
|
*/
|
|
private recordMetric(operationName: string, duration: number): void {
|
|
if (!this.metrics.has(operationName)) {
|
|
this.metrics.set(operationName, []);
|
|
}
|
|
|
|
const operationMetrics = this.metrics.get(operationName)!;
|
|
operationMetrics.push({
|
|
timestamp: Date.now(),
|
|
duration,
|
|
});
|
|
|
|
// Keep only last 100 metrics per operation
|
|
if (operationMetrics.length > 100) {
|
|
operationMetrics.splice(0, operationMetrics.length - 100);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get performance summary for an operation
|
|
*
|
|
* @param operationName - Name of the operation
|
|
* @returns Performance statistics
|
|
*/
|
|
getPerformanceSummary(operationName: string): {
|
|
count: number;
|
|
average: number;
|
|
min: number;
|
|
max: number;
|
|
recentAverage: number;
|
|
} | null {
|
|
const metrics = this.metrics.get(operationName);
|
|
if (!metrics || metrics.length === 0) {
|
|
return null;
|
|
}
|
|
|
|
const durations = metrics.map((m) => m.duration);
|
|
const recentMetrics = metrics.slice(-10); // Last 10 metrics
|
|
|
|
return {
|
|
count: metrics.length,
|
|
average: durations.reduce((a, b) => a + b, 0) / durations.length,
|
|
min: Math.min(...durations),
|
|
max: Math.max(...durations),
|
|
recentAverage:
|
|
recentMetrics.reduce((a, b) => a + b.duration, 0) /
|
|
recentMetrics.length,
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Get all performance metrics
|
|
*/
|
|
getAllMetrics(): Map<string, Array<{ timestamp: number; duration: number }>> {
|
|
return new Map(this.metrics);
|
|
}
|
|
|
|
/**
|
|
* Clear all performance metrics
|
|
*/
|
|
clearMetrics(): void {
|
|
this.metrics.clear();
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Convenience function to get the performance monitor
|
|
*/
|
|
export const getPerformanceMonitor = (): PerformanceMonitor => {
|
|
return PerformanceMonitor.getInstance();
|
|
};
|