You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
1272 lines
37 KiB
1272 lines
37 KiB
/**
|
|
* SQLite Migration System for TimeSafari
|
|
*
|
|
* A robust migration system for managing database schema changes in the TimeSafari
|
|
* application. Provides versioned migrations with transaction safety, rollback
|
|
* support, and detailed logging.
|
|
*
|
|
* Core Features:
|
|
* - Versioned migrations with tracking
|
|
* - Atomic transactions per migration
|
|
* - Comprehensive error handling
|
|
* - SQL parsing and validation
|
|
* - State verification and recovery
|
|
* - Detailed logging and debugging
|
|
*
|
|
* Migration Process:
|
|
* 1. Version tracking via schema_version table
|
|
* 2. Transaction-based execution
|
|
* 3. Automatic rollback on failure
|
|
* 4. State verification before/after
|
|
* 5. Detailed error logging
|
|
*
|
|
* SQL Processing:
|
|
* - Handles single-line (--) and multi-line comments
|
|
* - Validates SQL statements
|
|
* - Proper statement separation
|
|
* - SQL injection prevention
|
|
* - Parameter binding safety
|
|
*
|
|
* Transaction Management:
|
|
* - Single transaction per migration
|
|
* - Automatic rollback on failure
|
|
* - State verification
|
|
* - Deadlock prevention
|
|
* - Connection isolation
|
|
*
|
|
* Error Handling:
|
|
* - Detailed error reporting
|
|
* - SQL validation
|
|
* - Transaction state tracking
|
|
* - Recovery mechanisms
|
|
* - Debug logging
|
|
*
|
|
* Security:
|
|
* - SQL injection prevention
|
|
* - Parameter validation
|
|
* - Transaction isolation
|
|
* - State verification
|
|
* - Error sanitization
|
|
*
|
|
* Performance:
|
|
* - Efficient SQL parsing
|
|
* - Optimized transactions
|
|
* - Minimal locking
|
|
* - Connection pooling
|
|
* - Statement reuse
|
|
*
|
|
* @author Matthew Raymer <matthew.raymer@anomalistdesign.com>
|
|
* @version 1.0.0
|
|
* @since 2025-06-01
|
|
*/
|
|
|
|
import { CapacitorSQLite } from '@capacitor-community/sqlite/electron/dist/plugin.js';
|
|
import { logger } from './logger';
|
|
import * as crypto from 'crypto';
|
|
|
|
// Constants
|
|
const DEFAULT_ENDORSER_API_SERVER = 'https://api.timesafari.app';
|
|
|
|
// Utility function to delay execution
|
|
const delay = (ms: number): Promise<void> => {
|
|
return new Promise(resolve => setTimeout(resolve, ms));
|
|
};
|
|
|
|
// Utility function to convert Buffer to base64
|
|
const bufferToBase64 = (buffer: Buffer): string => {
|
|
return buffer.toString('base64');
|
|
};
|
|
|
|
// Generate a random secret for the secret table
|
|
// Note: This is a temporary solution until better secure storage is implemented
|
|
const generateSecret = (): string => {
|
|
const randomBytes = crypto.randomBytes(32);
|
|
return bufferToBase64(randomBytes);
|
|
};
|
|
|
|
// Constants for initial data
|
|
const INITIAL_SECRET = generateSecret();
|
|
|
|
// Types for migration system
|
|
interface Migration {
|
|
version: number;
|
|
name: string;
|
|
description: string;
|
|
sql: string;
|
|
rollback?: string;
|
|
}
|
|
|
|
interface MigrationResult {
|
|
success: boolean;
|
|
version: number;
|
|
name: string;
|
|
error?: Error;
|
|
state?: {
|
|
plugin: {
|
|
isAvailable: boolean;
|
|
lastChecked: Date;
|
|
};
|
|
transaction: {
|
|
isActive: boolean;
|
|
lastVerified: Date;
|
|
};
|
|
};
|
|
}
|
|
|
|
interface MigrationState {
|
|
currentVersion: number;
|
|
lastMigration: string;
|
|
lastApplied: Date;
|
|
isDirty: boolean;
|
|
}
|
|
|
|
// Constants
|
|
const MIGRATIONS_TABLE = `
|
|
CREATE TABLE IF NOT EXISTS schema_version (
|
|
version INTEGER NOT NULL,
|
|
name TEXT NOT NULL,
|
|
description TEXT,
|
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
checksum TEXT,
|
|
is_dirty BOOLEAN DEFAULT FALSE,
|
|
error_message TEXT,
|
|
error_stack TEXT,
|
|
error_context TEXT,
|
|
PRIMARY KEY (version)
|
|
);`;
|
|
|
|
// Constants for retry logic
|
|
const MAX_RETRY_ATTEMPTS = 3;
|
|
const RETRY_DELAY_MS = 1000;
|
|
const LOCK_TIMEOUT_MS = 10000; // 10 seconds total timeout for locks
|
|
|
|
// SQL Parsing Utilities
|
|
interface ParsedSQL {
|
|
statements: string[];
|
|
parameters: any[][];
|
|
errors: string[];
|
|
warnings: string[];
|
|
}
|
|
|
|
/**
|
|
* Removes SQL comments from a string while preserving statement structure
|
|
* @param sql The SQL string to process
|
|
* @returns SQL with comments removed
|
|
*/
|
|
const removeSQLComments = (sql: string): string => {
|
|
let result = '';
|
|
let inSingleLineComment = false;
|
|
let inMultiLineComment = false;
|
|
let inString = false;
|
|
let stringChar = '';
|
|
let i = 0;
|
|
|
|
while (i < sql.length) {
|
|
const char = sql[i];
|
|
const nextChar = sql[i + 1] || '';
|
|
|
|
// Handle string literals
|
|
if ((char === "'" || char === '"') && !inSingleLineComment && !inMultiLineComment) {
|
|
if (!inString) {
|
|
inString = true;
|
|
stringChar = char;
|
|
} else if (char === stringChar) {
|
|
inString = false;
|
|
}
|
|
result += char;
|
|
i++;
|
|
continue;
|
|
}
|
|
|
|
// Handle single-line comments
|
|
if (char === '-' && nextChar === '-' && !inString && !inMultiLineComment) {
|
|
inSingleLineComment = true;
|
|
i += 2;
|
|
continue;
|
|
}
|
|
|
|
// Handle multi-line comments
|
|
if (char === '/' && nextChar === '*' && !inString && !inSingleLineComment) {
|
|
inMultiLineComment = true;
|
|
i += 2;
|
|
continue;
|
|
}
|
|
|
|
if (char === '*' && nextChar === '/' && inMultiLineComment) {
|
|
inMultiLineComment = false;
|
|
i += 2;
|
|
continue;
|
|
}
|
|
|
|
// Handle newlines in single-line comments
|
|
if (char === '\n' && inSingleLineComment) {
|
|
inSingleLineComment = false;
|
|
result += '\n';
|
|
i++;
|
|
continue;
|
|
}
|
|
|
|
// Add character if not in any comment
|
|
if (!inSingleLineComment && !inMultiLineComment) {
|
|
result += char;
|
|
}
|
|
|
|
i++;
|
|
}
|
|
|
|
return result;
|
|
};
|
|
|
|
/**
|
|
* Formats a SQL statement for consistent processing
|
|
* @param sql The SQL statement to format
|
|
* @returns Formatted SQL statement
|
|
*/
|
|
const formatSQLStatement = (sql: string): string => {
|
|
return sql
|
|
.trim()
|
|
.replace(/\s+/g, ' ') // Replace multiple spaces with single space
|
|
.replace(/\s*;\s*$/, ';') // Ensure semicolon at end
|
|
.replace(/^\s*;\s*/, ''); // Remove leading semicolon
|
|
};
|
|
|
|
/**
|
|
* Validates a SQL statement for common issues
|
|
* @param statement The SQL statement to validate
|
|
* @returns Array of validation errors, empty if valid
|
|
*/
|
|
const validateSQLStatement = (statement: string): string[] => {
|
|
const errors: string[] = [];
|
|
const trimmed = statement.trim().toLowerCase();
|
|
|
|
// Check for empty statements
|
|
if (!trimmed) {
|
|
errors.push('Empty SQL statement');
|
|
return errors;
|
|
}
|
|
|
|
// Check for valid statement types
|
|
const validStarts = [
|
|
'create', 'alter', 'drop', 'insert', 'update', 'delete',
|
|
'select', 'pragma', 'begin', 'commit', 'rollback'
|
|
];
|
|
|
|
const startsWithValid = validStarts.some(start => trimmed.startsWith(start));
|
|
if (!startsWithValid) {
|
|
errors.push(`Invalid SQL statement type: ${trimmed.split(' ')[0]}`);
|
|
}
|
|
|
|
// Check for balanced parentheses
|
|
let parenCount = 0;
|
|
let inString = false;
|
|
let stringChar = '';
|
|
|
|
for (let i = 0; i < statement.length; i++) {
|
|
const char = statement[i];
|
|
|
|
if ((char === "'" || char === '"') && !inString) {
|
|
inString = true;
|
|
stringChar = char;
|
|
} else if (char === stringChar && inString) {
|
|
inString = false;
|
|
}
|
|
|
|
if (!inString) {
|
|
if (char === '(') parenCount++;
|
|
if (char === ')') parenCount--;
|
|
}
|
|
}
|
|
|
|
if (parenCount !== 0) {
|
|
errors.push('Unbalanced parentheses in SQL statement');
|
|
}
|
|
|
|
return errors;
|
|
};
|
|
|
|
/**
|
|
* Parses SQL into individual statements with validation
|
|
* @param sql The SQL to parse
|
|
* @returns ParsedSQL object containing statements and any errors/warnings
|
|
*/
|
|
const parseSQL = (sql: string): ParsedSQL => {
|
|
const result: ParsedSQL = {
|
|
statements: [],
|
|
parameters: [],
|
|
errors: [],
|
|
warnings: []
|
|
};
|
|
|
|
try {
|
|
// Remove comments first
|
|
const cleanSQL = removeSQLComments(sql);
|
|
|
|
// Split on semicolons and process each statement
|
|
const rawStatements = cleanSQL
|
|
.split(';')
|
|
.map(s => formatSQLStatement(s))
|
|
.filter(s => s.length > 0);
|
|
|
|
// Process each statement
|
|
for (const statement of rawStatements) {
|
|
const errors = validateSQLStatement(statement);
|
|
if (errors.length > 0) {
|
|
result.errors.push(...errors.map(e => `${e} in statement: ${statement.substring(0, 50)}...`));
|
|
} else {
|
|
// Extract any parameterized values (e.g., from INSERT statements)
|
|
const { processedStatement, params } = extractParameters(statement);
|
|
result.statements.push(processedStatement);
|
|
result.parameters.push(params);
|
|
}
|
|
}
|
|
|
|
// Add warnings for potential issues
|
|
if (rawStatements.length === 0) {
|
|
result.warnings.push('No SQL statements found after parsing');
|
|
}
|
|
|
|
// Log parsing results
|
|
// logger.debug('SQL parsing results:', {
|
|
// statementCount: result.statements.length,
|
|
// errorCount: result.errors.length,
|
|
// warningCount: result.warnings.length,
|
|
// statements: result.statements.map(s => s.substring(0, 50) + '...'),
|
|
// errors: result.errors,
|
|
// warnings: result.warnings
|
|
// });
|
|
|
|
} catch (error) {
|
|
result.errors.push(`SQL parsing failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
// logger.error('SQL parsing error:', error);
|
|
}
|
|
|
|
return result;
|
|
};
|
|
|
|
// Helper to extract parameters from SQL statements
|
|
const extractParameters = (statement: string): { processedStatement: string; params: any[] } => {
|
|
const params: any[] = [];
|
|
let processedStatement = statement;
|
|
|
|
// Handle INSERT statements with VALUES
|
|
if (statement.toLowerCase().includes('insert into') && statement.includes('values')) {
|
|
const valuesMatch = statement.match(/values\s*\((.*?)\)/i);
|
|
if (valuesMatch) {
|
|
const values = valuesMatch[1].split(',').map(v => v.trim());
|
|
const placeholders = values.map((_, i) => '?').join(', ');
|
|
processedStatement = statement.replace(/values\s*\(.*?\)/i, `VALUES (${placeholders})`);
|
|
|
|
// Extract actual values
|
|
values.forEach(v => {
|
|
if (v.startsWith("'") && v.endsWith("'")) {
|
|
params.push(v.slice(1, -1)); // Remove quotes
|
|
} else if (!isNaN(Number(v))) {
|
|
params.push(Number(v));
|
|
} else {
|
|
params.push(v);
|
|
}
|
|
});
|
|
}
|
|
}
|
|
|
|
return { processedStatement, params };
|
|
};
|
|
|
|
// Add version conflict detection
|
|
const validateMigrationVersions = (migrations: Migration[]): void => {
|
|
const versions = new Set<number>();
|
|
const duplicates = new Set<number>();
|
|
|
|
migrations.forEach(migration => {
|
|
if (versions.has(migration.version)) {
|
|
duplicates.add(migration.version);
|
|
}
|
|
versions.add(migration.version);
|
|
});
|
|
|
|
if (duplicates.size > 0) {
|
|
throw new Error(`Duplicate migration versions found: ${Array.from(duplicates).join(', ')}`);
|
|
}
|
|
|
|
// Verify versions are sequential
|
|
const sortedVersions = Array.from(versions).sort((a, b) => a - b);
|
|
for (let i = 0; i < sortedVersions.length; i++) {
|
|
if (sortedVersions[i] !== i + 1) {
|
|
throw new Error(`Migration versions must be sequential. Found gap at version ${i + 1}`);
|
|
}
|
|
}
|
|
};
|
|
|
|
// Initial migration for accounts table
|
|
const INITIAL_MIGRATION: Migration = {
|
|
version: 1,
|
|
name: '001_initial_accounts',
|
|
description: 'Initial schema with accounts table',
|
|
sql: `
|
|
/* Create accounts table with required fields */
|
|
CREATE TABLE IF NOT EXISTS accounts (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
dateCreated TEXT NOT NULL,
|
|
derivationPath TEXT,
|
|
did TEXT NOT NULL,
|
|
identityEncrBase64 TEXT, -- encrypted & base64-encoded
|
|
mnemonicEncrBase64 TEXT, -- encrypted & base64-encoded
|
|
passkeyCredIdHex TEXT,
|
|
publicKeyHex TEXT NOT NULL
|
|
);
|
|
|
|
/* Create index on did for faster lookups */
|
|
CREATE INDEX IF NOT EXISTS idx_accounts_did ON accounts(did);
|
|
`,
|
|
rollback: `
|
|
/* Drop index first to avoid foreign key issues */
|
|
DROP INDEX IF EXISTS idx_accounts_did;
|
|
|
|
/* Drop the accounts table */
|
|
DROP TABLE IF EXISTS accounts;
|
|
`
|
|
};
|
|
|
|
// Migration definitions
|
|
const MIGRATIONS: Migration[] = [
|
|
INITIAL_MIGRATION,
|
|
{
|
|
version: 2,
|
|
name: '002_secret_and_settings',
|
|
description: 'Add secret, settings, contacts, logs, and temp tables with initial data',
|
|
sql: `
|
|
-- Secret table for storing encryption keys
|
|
-- Note: This is a temporary solution until better secure storage is implemented
|
|
CREATE TABLE IF NOT EXISTS secret (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
secretBase64 TEXT NOT NULL
|
|
);
|
|
|
|
-- Insert initial secret only if no secret exists
|
|
INSERT OR IGNORE INTO secret (id, secretBase64) VALUES (1, '${INITIAL_SECRET}');
|
|
|
|
-- Settings table for user preferences and app state
|
|
CREATE TABLE IF NOT EXISTS settings (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
accountDid TEXT,
|
|
activeDid TEXT,
|
|
apiServer TEXT,
|
|
filterFeedByNearby BOOLEAN,
|
|
filterFeedByVisible BOOLEAN,
|
|
finishedOnboarding BOOLEAN,
|
|
firstName TEXT,
|
|
hideRegisterPromptOnNewContact BOOLEAN,
|
|
isRegistered BOOLEAN,
|
|
lastName TEXT,
|
|
lastAckedOfferToUserJwtId TEXT,
|
|
lastAckedOfferToUserProjectsJwtId TEXT,
|
|
lastNotifiedClaimId TEXT,
|
|
lastViewedClaimId TEXT,
|
|
notifyingNewActivityTime TEXT,
|
|
notifyingReminderMessage TEXT,
|
|
notifyingReminderTime TEXT,
|
|
partnerApiServer TEXT,
|
|
passkeyExpirationMinutes INTEGER,
|
|
profileImageUrl TEXT,
|
|
searchBoxes TEXT, -- Stored as JSON string
|
|
showContactGivesInline BOOLEAN,
|
|
showGeneralAdvanced BOOLEAN,
|
|
showShortcutBvc BOOLEAN,
|
|
vapid TEXT,
|
|
warnIfProdServer BOOLEAN,
|
|
warnIfTestServer BOOLEAN,
|
|
webPushServer TEXT
|
|
);
|
|
|
|
-- Insert default API server setting only if no settings exist
|
|
INSERT OR IGNORE INTO settings (id, apiServer) VALUES (1, '${DEFAULT_ENDORSER_API_SERVER}');
|
|
|
|
CREATE INDEX IF NOT EXISTS idx_settings_accountDid ON settings(accountDid);
|
|
|
|
-- Contacts table for user connections
|
|
CREATE TABLE IF NOT EXISTS contacts (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
did TEXT NOT NULL,
|
|
name TEXT,
|
|
contactMethods TEXT, -- Stored as JSON string
|
|
nextPubKeyHashB64 TEXT,
|
|
notes TEXT,
|
|
profileImageUrl TEXT,
|
|
publicKeyBase64 TEXT,
|
|
seesMe BOOLEAN,
|
|
registered BOOLEAN
|
|
);
|
|
|
|
CREATE INDEX IF NOT EXISTS idx_contacts_did ON contacts(did);
|
|
CREATE INDEX IF NOT EXISTS idx_contacts_name ON contacts(name);
|
|
|
|
-- Logs table for application logging
|
|
CREATE TABLE IF NOT EXISTS logs (
|
|
date TEXT NOT NULL,
|
|
message TEXT NOT NULL
|
|
);
|
|
|
|
-- Temp table for temporary data storage
|
|
CREATE TABLE IF NOT EXISTS temp (
|
|
id TEXT PRIMARY KEY,
|
|
blobB64 TEXT
|
|
);
|
|
`,
|
|
rollback: `
|
|
-- Drop tables in reverse order to avoid foreign key issues
|
|
DROP TABLE IF EXISTS temp;
|
|
DROP TABLE IF EXISTS logs;
|
|
DROP INDEX IF EXISTS idx_contacts_name;
|
|
DROP INDEX IF EXISTS idx_contacts_did;
|
|
DROP TABLE IF EXISTS contacts;
|
|
DROP INDEX IF EXISTS idx_settings_accountDid;
|
|
DROP TABLE IF EXISTS settings;
|
|
DROP TABLE IF EXISTS secret;
|
|
`
|
|
}
|
|
];
|
|
|
|
// Validate migrations before export
|
|
validateMigrationVersions(MIGRATIONS);
|
|
|
|
// Helper functions
|
|
const verifyPluginState = async (plugin: any): Promise<boolean> => {
|
|
try {
|
|
const result = await plugin.echo({ value: 'test' });
|
|
return result?.value === 'test';
|
|
} catch (error) {
|
|
// logger.error('Plugin state verification failed:', error);
|
|
return false;
|
|
}
|
|
};
|
|
|
|
// Helper function to verify transaction state without starting a transaction
|
|
const verifyTransactionState = async (
|
|
plugin: any,
|
|
database: string
|
|
): Promise<boolean> => {
|
|
try {
|
|
// Query SQLite's internal transaction state
|
|
const result = await plugin.query({
|
|
database,
|
|
statement: "SELECT * FROM sqlite_master WHERE type='table' AND name='schema_version';"
|
|
});
|
|
|
|
// If we can query, we're not in a transaction
|
|
return false;
|
|
} catch (error) {
|
|
// If error contains "transaction", we're probably in a transaction
|
|
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
const inTransaction = errorMsg.toLowerCase().includes('transaction');
|
|
|
|
// logger.debug('Transaction state check:', {
|
|
// inTransaction,
|
|
// error: error instanceof Error ? {
|
|
// message: error.message,
|
|
// name: error.name
|
|
// } : error
|
|
// });
|
|
|
|
return inTransaction;
|
|
}
|
|
};
|
|
|
|
const getCurrentVersion = async (
|
|
plugin: any,
|
|
database: string
|
|
): Promise<number> => {
|
|
try {
|
|
const result = await plugin.query({
|
|
database,
|
|
statement: 'SELECT version FROM schema_version ORDER BY version DESC LIMIT 1;'
|
|
});
|
|
return result?.values?.[0]?.version || 0;
|
|
} catch (error) {
|
|
// logger.error('Error getting current version:', error);
|
|
return 0;
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Helper function to execute SQL with retry logic for locked database
|
|
* @param plugin SQLite plugin instance
|
|
* @param database Database name
|
|
* @param operation Function to execute
|
|
* @param context Operation context for logging
|
|
*/
|
|
const executeWithRetry = async <T>(
|
|
plugin: any,
|
|
database: string,
|
|
operation: () => Promise<T>,
|
|
context: string
|
|
): Promise<T> => {
|
|
let lastError: Error | null = null;
|
|
let startTime = Date.now();
|
|
|
|
for (let attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; attempt++) {
|
|
try {
|
|
// Check if we've exceeded the total timeout
|
|
if (Date.now() - startTime > LOCK_TIMEOUT_MS) {
|
|
throw new Error(`Operation timed out after ${LOCK_TIMEOUT_MS}ms`);
|
|
}
|
|
|
|
// Try the operation
|
|
return await operation();
|
|
} catch (error) {
|
|
lastError = error instanceof Error ? error : new Error(String(error));
|
|
const errorMsg = lastError.message.toLowerCase();
|
|
const isLockError = errorMsg.includes('database is locked') ||
|
|
errorMsg.includes('database is busy') ||
|
|
errorMsg.includes('database is locked (5)');
|
|
|
|
if (!isLockError || attempt === MAX_RETRY_ATTEMPTS) {
|
|
throw lastError;
|
|
}
|
|
|
|
// logger.warn(`Database operation failed, retrying (${attempt}/${MAX_RETRY_ATTEMPTS}):`, {
|
|
// context,
|
|
// error: lastError.message,
|
|
// attempt,
|
|
// elapsedMs: Date.now() - startTime
|
|
// });
|
|
|
|
// Exponential backoff
|
|
const backoffDelay = RETRY_DELAY_MS * Math.pow(2, attempt - 1);
|
|
await delay(Math.min(backoffDelay, LOCK_TIMEOUT_MS - (Date.now() - startTime)));
|
|
}
|
|
}
|
|
|
|
throw lastError || new Error(`Operation failed after ${MAX_RETRY_ATTEMPTS} attempts`);
|
|
};
|
|
|
|
// Helper function to execute a single SQL statement with retry logic
|
|
const executeSingleStatement = async (
|
|
plugin: any,
|
|
database: string,
|
|
statement: string,
|
|
values: any[] = []
|
|
): Promise<any> => {
|
|
// logger.debug('Executing SQL statement:', {
|
|
// statement: statement.substring(0, 100) + (statement.length > 100 ? '...' : ''),
|
|
// values: values.map(v => ({
|
|
// value: v,
|
|
// type: typeof v,
|
|
// isNull: v === null || v === undefined
|
|
// }))
|
|
// });
|
|
|
|
return executeWithRetry(
|
|
plugin,
|
|
database,
|
|
async () => {
|
|
// Validate values before execution
|
|
if (statement.includes('schema_version') && statement.includes('INSERT')) {
|
|
// Find the name parameter index in the SQL statement
|
|
const paramIndex = statement.toLowerCase().split(',').findIndex(p =>
|
|
p.trim().startsWith('name')
|
|
);
|
|
|
|
if (paramIndex !== -1 && values[paramIndex] !== undefined) {
|
|
const nameValue = values[paramIndex];
|
|
if (!nameValue || typeof nameValue !== 'string') {
|
|
throw new Error(`Invalid migration name type: ${typeof nameValue}`);
|
|
}
|
|
if (nameValue.trim().length === 0) {
|
|
throw new Error('Migration name cannot be empty');
|
|
}
|
|
// Ensure we're using the actual migration name, not the version
|
|
if (nameValue === values[0]?.toString()) {
|
|
throw new Error('Migration name cannot be the same as version number');
|
|
}
|
|
// logger.debug('Validated migration name:', {
|
|
// name: nameValue,
|
|
// type: typeof nameValue,
|
|
// length: nameValue.length
|
|
// });
|
|
}
|
|
}
|
|
|
|
const result = await plugin.execute({
|
|
database,
|
|
statements: statement,
|
|
values,
|
|
transaction: false
|
|
});
|
|
|
|
// logger.debug('SQL execution result:', {
|
|
// statement: statement.substring(0, 100) + (statement.length > 100 ? '...' : ''),
|
|
// result
|
|
// });
|
|
|
|
return result;
|
|
},
|
|
'executeSingleStatement'
|
|
);
|
|
};
|
|
|
|
// Helper function to create migrations table if it doesn't exist
|
|
const ensureMigrationsTable = async (
|
|
plugin: any,
|
|
database: string
|
|
): Promise<void> => {
|
|
// logger.debug('Ensuring migrations table exists');
|
|
|
|
try {
|
|
// Drop and recreate the table to ensure proper structure
|
|
await plugin.execute({
|
|
database,
|
|
statements: 'DROP TABLE IF EXISTS schema_version;',
|
|
transaction: false
|
|
});
|
|
|
|
// Create the table with proper constraints
|
|
await plugin.execute({
|
|
database,
|
|
statements: MIGRATIONS_TABLE,
|
|
transaction: false
|
|
});
|
|
|
|
// Verify table creation and structure
|
|
const tableInfo = await plugin.query({
|
|
database,
|
|
statement: "PRAGMA table_info(schema_version);"
|
|
});
|
|
|
|
// logger.debug('Schema version table structure:', {
|
|
// columns: tableInfo?.values?.map((row: any) => ({
|
|
// name: row.name,
|
|
// type: row.type,
|
|
// notnull: row.notnull,
|
|
// dflt_value: row.dflt_value
|
|
// }))
|
|
// });
|
|
|
|
// Verify table was created
|
|
const verifyCheck = await plugin.query({
|
|
database,
|
|
statement: "SELECT name FROM sqlite_master WHERE type='table' AND name='schema_version';"
|
|
});
|
|
|
|
if (!verifyCheck?.values?.length) {
|
|
throw new Error('Failed to create migrations table');
|
|
}
|
|
|
|
// logger.debug('Migrations table created successfully');
|
|
} catch (error) {
|
|
// logger.error('Error ensuring migrations table:', {
|
|
// error: error instanceof Error ? {
|
|
// message: error.message,
|
|
// stack: error.stack,
|
|
// name: error.name
|
|
// } : error
|
|
// });
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
// Update parseMigrationStatements to return ParsedSQL type
|
|
const parseMigrationStatements = (sql: string): ParsedSQL => {
|
|
const parsed = parseSQL(sql);
|
|
|
|
if (parsed.errors.length > 0) {
|
|
throw new Error(`SQL validation failed:\n${parsed.errors.join('\n')}`);
|
|
}
|
|
|
|
if (parsed.warnings.length > 0) {
|
|
// logger.warn('SQL parsing warnings:', parsed.warnings);
|
|
}
|
|
|
|
return parsed;
|
|
};
|
|
|
|
// Add debug helper function
|
|
const debugTableState = async (
|
|
plugin: any,
|
|
database: string,
|
|
context: string
|
|
): Promise<void> => {
|
|
try {
|
|
const tableInfo = await plugin.query({
|
|
database,
|
|
statement: "PRAGMA table_info(schema_version);"
|
|
});
|
|
|
|
const tableData = await plugin.query({
|
|
database,
|
|
statement: "SELECT * FROM schema_version;"
|
|
});
|
|
|
|
// logger.debug(`Table state (${context}):`, {
|
|
// tableInfo: tableInfo?.values?.map((row: any) => ({
|
|
// name: row.name,
|
|
// type: row.type,
|
|
// notnull: row.notnull,
|
|
// dflt_value: row.dflt_value
|
|
// })),
|
|
// tableData: tableData?.values,
|
|
// rowCount: tableData?.values?.length || 0
|
|
// });
|
|
} catch (error) {
|
|
// logger.error(`Error getting table state (${context}):`, error);
|
|
}
|
|
};
|
|
|
|
// Update logMigrationProgress to use migration logger
|
|
const logMigrationProgress = (
|
|
migration: Migration,
|
|
stage: string,
|
|
details: Record<string, any> = {}
|
|
): void => {
|
|
// logger.migration.info(`Migration ${migration.version} (${migration.name}): ${stage}`, {
|
|
// migration: {
|
|
// version: migration.version,
|
|
// name: migration.name,
|
|
// stage,
|
|
// timestamp: new Date().toISOString(),
|
|
// ...details
|
|
// }
|
|
// });
|
|
};
|
|
|
|
// Add new error tracking
|
|
interface MigrationError extends Error {
|
|
migrationVersion?: number;
|
|
migrationName?: string;
|
|
stage?: string;
|
|
context?: Record<string, any>;
|
|
isRecoverable?: boolean;
|
|
}
|
|
|
|
const createMigrationError = (
|
|
message: string,
|
|
migration: Migration,
|
|
stage: string,
|
|
originalError?: Error,
|
|
context: Record<string, any> = {}
|
|
): MigrationError => {
|
|
const error = new Error(message) as MigrationError;
|
|
error.migrationVersion = migration.version;
|
|
error.migrationName = migration.name;
|
|
error.stage = stage;
|
|
error.context = context;
|
|
error.isRecoverable = false;
|
|
|
|
if (originalError) {
|
|
error.stack = `${error.stack}\nCaused by: ${originalError.stack}`;
|
|
}
|
|
|
|
return error;
|
|
};
|
|
|
|
// Enhance executeMigration with better logging and error handling
|
|
const executeMigration = async (
|
|
plugin: any,
|
|
database: string,
|
|
migration: Migration
|
|
): Promise<MigrationResult> => {
|
|
const startTime = Date.now();
|
|
const migrationId = `${migration.version}_${migration.name}`;
|
|
|
|
logMigrationProgress(migration, 'Starting', {
|
|
database,
|
|
startTime: new Date(startTime).toISOString()
|
|
});
|
|
|
|
// Parse SQL and extract any parameterized values
|
|
const { statements, parameters, errors: parseErrors, warnings: parseWarnings } = parseMigrationStatements(migration.sql);
|
|
|
|
if (parseErrors.length > 0) {
|
|
const error = createMigrationError(
|
|
'SQL parsing failed',
|
|
migration,
|
|
'parse',
|
|
new Error(parseErrors.join('\n')),
|
|
{ errors: parseErrors }
|
|
);
|
|
error.isRecoverable = false;
|
|
throw error;
|
|
}
|
|
|
|
if (parseWarnings.length > 0) {
|
|
logMigrationProgress(migration, 'Parse warnings', { warnings: parseWarnings });
|
|
}
|
|
|
|
let transactionStarted = false;
|
|
let rollbackAttempted = false;
|
|
|
|
try {
|
|
// Debug table state before migration
|
|
await debugTableState(plugin, database, 'before_migration');
|
|
logMigrationProgress(migration, 'Table state verified');
|
|
|
|
// Ensure migrations table exists with retry
|
|
await executeWithRetry(
|
|
plugin,
|
|
database,
|
|
() => ensureMigrationsTable(plugin, database),
|
|
'ensureMigrationsTable'
|
|
);
|
|
logMigrationProgress(migration, 'Migrations table verified');
|
|
|
|
// Verify plugin state with enhanced logging
|
|
const pluginState = await verifyPluginState(plugin);
|
|
if (!pluginState) {
|
|
throw createMigrationError(
|
|
'Plugin not available',
|
|
migration,
|
|
'plugin_verification',
|
|
null,
|
|
{ database }
|
|
);
|
|
}
|
|
logMigrationProgress(migration, 'Plugin state verified');
|
|
|
|
// Start transaction with retry and enhanced logging
|
|
await executeWithRetry(
|
|
plugin,
|
|
database,
|
|
async () => {
|
|
logMigrationProgress(migration, 'Starting transaction');
|
|
await plugin.beginTransaction({ database });
|
|
transactionStarted = true;
|
|
logMigrationProgress(migration, 'Transaction started');
|
|
},
|
|
'beginTransaction'
|
|
);
|
|
|
|
try {
|
|
// Execute each statement with retry and parameters
|
|
for (let i = 0; i < statements.length; i++) {
|
|
const statement = statements[i];
|
|
const statementParams = parameters[i] || [];
|
|
|
|
logMigrationProgress(migration, 'Executing statement', {
|
|
statementNumber: i + 1,
|
|
totalStatements: statements.length,
|
|
statementPreview: statement.substring(0, 100) + (statement.length > 100 ? '...' : '')
|
|
});
|
|
|
|
await executeWithRetry(
|
|
plugin,
|
|
database,
|
|
() => executeSingleStatement(plugin, database, statement, statementParams),
|
|
`executeStatement_${i + 1}`
|
|
);
|
|
|
|
logMigrationProgress(migration, 'Statement executed', {
|
|
statementNumber: i + 1,
|
|
success: true
|
|
});
|
|
}
|
|
|
|
// Commit transaction with enhanced logging
|
|
logMigrationProgress(migration, 'Committing transaction');
|
|
await executeWithRetry(
|
|
plugin,
|
|
database,
|
|
async () => {
|
|
await plugin.commitTransaction({ database });
|
|
transactionStarted = false;
|
|
logMigrationProgress(migration, 'Transaction committed');
|
|
},
|
|
'commitTransaction'
|
|
);
|
|
|
|
// Update schema version with enhanced logging
|
|
logMigrationProgress(migration, 'Updating schema version');
|
|
await executeWithRetry(
|
|
plugin,
|
|
database,
|
|
async () => {
|
|
const escapedName = migration.name.trim().replace(/'/g, "''");
|
|
const escapedDesc = (migration.description || '').replace(/'/g, "''");
|
|
const insertSql = `INSERT INTO schema_version (version, name, description) VALUES (${migration.version}, '${escapedName}', '${escapedDesc}')`;
|
|
|
|
logMigrationProgress(migration, 'Executing schema version update', {
|
|
sql: insertSql,
|
|
values: {
|
|
version: migration.version,
|
|
name: escapedName,
|
|
description: escapedDesc
|
|
}
|
|
});
|
|
|
|
await debugTableState(plugin, database, 'before_insert');
|
|
const result = await plugin.execute({
|
|
database,
|
|
statements: insertSql,
|
|
transaction: false
|
|
});
|
|
await debugTableState(plugin, database, 'after_insert');
|
|
|
|
logMigrationProgress(migration, 'Schema version updated', {
|
|
result,
|
|
changes: result.changes
|
|
});
|
|
|
|
// Verify the insert with detailed logging
|
|
const verifyQuery = await plugin.query({
|
|
database,
|
|
statement: `SELECT * FROM schema_version WHERE version = ${migration.version} AND name = '${escapedName}'`
|
|
});
|
|
|
|
if (!verifyQuery?.values?.length) {
|
|
throw createMigrationError(
|
|
'Schema version update verification failed',
|
|
migration,
|
|
'verify_schema_update',
|
|
null,
|
|
{ verifyQuery }
|
|
);
|
|
}
|
|
|
|
logMigrationProgress(migration, 'Schema version verified', {
|
|
rowCount: verifyQuery.values.length,
|
|
data: verifyQuery.values
|
|
});
|
|
},
|
|
'updateSchemaVersion'
|
|
);
|
|
|
|
const duration = Date.now() - startTime;
|
|
logMigrationProgress(migration, 'Completed', {
|
|
duration,
|
|
success: true,
|
|
endTime: new Date().toISOString()
|
|
});
|
|
|
|
return {
|
|
success: true,
|
|
version: migration.version,
|
|
name: migration.name,
|
|
state: {
|
|
plugin: { isAvailable: true, lastChecked: new Date() },
|
|
transaction: { isActive: false, lastVerified: new Date() }
|
|
}
|
|
};
|
|
} catch (error) {
|
|
// Enhanced rollback handling with logging
|
|
if (transactionStarted && !rollbackAttempted) {
|
|
rollbackAttempted = true;
|
|
logMigrationProgress(migration, 'Rollback initiated', {
|
|
error: error instanceof Error ? error.message : String(error)
|
|
});
|
|
|
|
try {
|
|
await executeWithRetry(
|
|
plugin,
|
|
database,
|
|
async () => {
|
|
// Record error in schema_version before rollback
|
|
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
const errorStack = error instanceof Error ? error.stack : null;
|
|
|
|
logMigrationProgress(migration, 'Recording error state', {
|
|
errorMessage,
|
|
hasStack: !!errorStack
|
|
});
|
|
|
|
await executeSingleStatement(
|
|
plugin,
|
|
database,
|
|
`INSERT INTO schema_version (
|
|
version, name, description, applied_at,
|
|
error_message, error_stack, error_context,
|
|
is_dirty
|
|
) VALUES (?, ?, ?, CURRENT_TIMESTAMP, ?, ?, ?, TRUE);`,
|
|
[
|
|
migration.version,
|
|
migration.name,
|
|
migration.description,
|
|
errorMessage,
|
|
errorStack,
|
|
'migration_execution'
|
|
]
|
|
);
|
|
|
|
logMigrationProgress(migration, 'Rolling back transaction');
|
|
await plugin.rollbackTransaction({ database });
|
|
logMigrationProgress(migration, 'Transaction rolled back');
|
|
},
|
|
'rollbackTransaction'
|
|
);
|
|
} catch (rollbackError) {
|
|
const rollbackFailure = createMigrationError(
|
|
'Rollback failed',
|
|
migration,
|
|
'rollback',
|
|
rollbackError,
|
|
{ originalError: error }
|
|
);
|
|
rollbackFailure.isRecoverable = false;
|
|
throw rollbackFailure;
|
|
}
|
|
}
|
|
|
|
throw error;
|
|
}
|
|
} catch (error) {
|
|
// Enhanced error handling with detailed logging
|
|
await debugTableState(plugin, database, 'on_error');
|
|
|
|
const migrationError = createMigrationError(
|
|
'Migration execution failed',
|
|
migration,
|
|
'execution',
|
|
error instanceof Error ? error : new Error(String(error)),
|
|
{
|
|
transactionStarted,
|
|
rollbackAttempted,
|
|
duration: Date.now() - startTime
|
|
}
|
|
);
|
|
|
|
logMigrationProgress(migration, 'Failed', {
|
|
error: migrationError,
|
|
stage: migrationError.stage,
|
|
isRecoverable: migrationError.isRecoverable,
|
|
duration: Date.now() - startTime
|
|
});
|
|
|
|
return {
|
|
success: false,
|
|
version: migration.version,
|
|
name: migration.name,
|
|
error: migrationError,
|
|
state: {
|
|
plugin: { isAvailable: true, lastChecked: new Date() },
|
|
transaction: { isActive: transactionStarted, lastVerified: new Date() }
|
|
}
|
|
};
|
|
}
|
|
};
|
|
|
|
// Enhance runMigrations with better logging and error handling
|
|
export async function runMigrations(
|
|
plugin: any,
|
|
database: string
|
|
): Promise<MigrationResult[]> {
|
|
const startTime = Date.now();
|
|
// logger.migration.info('Starting migration process', {
|
|
// database,
|
|
// startTime: new Date(startTime).toISOString(),
|
|
// totalMigrations: MIGRATIONS.length
|
|
// });
|
|
|
|
// Validate migrations with enhanced error handling
|
|
try {
|
|
validateMigrationVersions(MIGRATIONS);
|
|
// logger.migration.info('Migration versions validated', {
|
|
// totalMigrations: MIGRATIONS.length,
|
|
// versions: MIGRATIONS.map(m => m.version)
|
|
// });
|
|
} catch (error) {
|
|
const validationError = new Error('Migration validation failed') as MigrationError;
|
|
validationError.isRecoverable = false;
|
|
validationError.context = { error };
|
|
throw validationError;
|
|
}
|
|
|
|
// Verify plugin with enhanced logging
|
|
const pluginState = await verifyPluginState(plugin);
|
|
if (!pluginState) {
|
|
const error = new Error('SQLite plugin not available') as MigrationError;
|
|
error.isRecoverable = false;
|
|
error.context = { database };
|
|
throw error;
|
|
}
|
|
// logger.migration.info('Plugin state verified');
|
|
|
|
// Ensure migrations table with enhanced error handling
|
|
try {
|
|
await ensureMigrationsTable(plugin, database);
|
|
// logger.migration.info('Migrations table ensured');
|
|
} catch (error) {
|
|
const initError = new Error('Failed to initialize migrations system') as MigrationError;
|
|
initError.isRecoverable = false;
|
|
initError.context = { error, database };
|
|
throw initError;
|
|
}
|
|
|
|
// Get current version with enhanced logging
|
|
const currentVersion = await getCurrentVersion(plugin, database);
|
|
// logger.migration.info('Current database version determined', {
|
|
// currentVersion,
|
|
// totalMigrations: MIGRATIONS.length
|
|
// });
|
|
|
|
// Find pending migrations with logging
|
|
const pendingMigrations = MIGRATIONS.filter(m => m.version > currentVersion);
|
|
// logger.migration.info('Pending migrations identified', {
|
|
// pendingCount: pendingMigrations.length,
|
|
// currentVersion,
|
|
// pendingVersions: pendingMigrations.map(m => m.version)
|
|
// });
|
|
|
|
if (pendingMigrations.length === 0) {
|
|
// logger.migration.info('No pending migrations');
|
|
return [];
|
|
}
|
|
|
|
// Execute migrations with enhanced error tracking
|
|
const results: MigrationResult[] = [];
|
|
const failures: MigrationResult[] = [];
|
|
|
|
for (const migration of pendingMigrations) {
|
|
// logger.migration.info(`Processing migration ${migration.version} of ${pendingMigrations.length}`, {
|
|
// migration: {
|
|
// version: migration.version,
|
|
// name: migration.name,
|
|
// progress: `${results.length + 1}/${pendingMigrations.length}`
|
|
// }
|
|
// });
|
|
|
|
const result = await executeMigration(plugin, database, migration);
|
|
results.push(result);
|
|
|
|
if (!result.success) {
|
|
failures.push(result);
|
|
// logger.migration.error('Migration failed', {
|
|
// migration: {
|
|
// version: migration.version,
|
|
// name: migration.name
|
|
// },
|
|
// error: result.error,
|
|
// totalFailures: failures.length,
|
|
// remainingMigrations: pendingMigrations.length - results.length
|
|
// });
|
|
|
|
// Check if we should continue
|
|
const migrationError = result.error as MigrationError;
|
|
if (migrationError?.isRecoverable === false) {
|
|
// logger.migration.error('Unrecoverable migration failure, stopping process', {
|
|
// migration: {
|
|
// version: migration.version,
|
|
// name: migration.name
|
|
// },
|
|
// totalFailures: failures.length,
|
|
// completedMigrations: results.length - failures.length
|
|
// });
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
|
|
if (failures.length > 0) {
|
|
// logger.migration.error('Migration failures:', {
|
|
// totalMigrations: pendingMigrations.length,
|
|
// failedCount: failures.length,
|
|
// failures: failures.map(f => ({
|
|
// version: f.version,
|
|
// name: f.name,
|
|
// error: f.error,
|
|
// state: f.state
|
|
// }))
|
|
// });
|
|
}
|
|
|
|
return results;
|
|
}
|
|
|
|
// Export types for use in other modules
|
|
export type { Migration, MigrationResult, MigrationState };
|