Browse Source

refactor(sqlite): disable verbose logging in migration system

- Comment out all info and debug logs in sqlite-migrations.ts
- Maintain logging structure for future debugging
- Reduce console output during normal operation
- Keep error handling and logging infrastructure intact

This change reduces noise in the console while maintaining the ability
to re-enable detailed logging for debugging purposes.
sql-absurd-sql-further
Matthew Raymer 4 days ago
parent
commit
e1f9a6fa08
  1. 36
      electron/src/rt/logger.ts
  2. 280
      electron/src/rt/sqlite-migrations.ts
  3. 1
      experiment.sh

36
electron/src/rt/logger.ts

@ -35,15 +35,15 @@ if (!fs.existsSync(logDir)) {
fs.mkdirSync(logDir, { recursive: true, mode: 0o755 });
}
// Custom format for console output
// Custom format for console output with migration filtering
const consoleFormat = winston.format.combine(
winston.format.timestamp(),
winston.format.colorize(),
winston.format.printf(({ level, message, timestamp, ...metadata }) => {
// Skip debug logs for migrations unless explicitly enabled
if (level === 'debug' &&
// Skip migration logs unless DEBUG_MIGRATIONS is set
if (level === 'info' &&
typeof message === 'string' &&
message.includes('Migration') &&
message.includes('[Migration]') &&
!process.env.DEBUG_MIGRATIONS) {
return '';
}
@ -68,10 +68,11 @@ export const logger = winston.createLogger({
format: fileFormat,
defaultMeta: { service: 'timesafari-electron' },
transports: [
// Console transport with custom format
// Console transport with custom format and migration filtering
new winston.transports.Console({
format: consoleFormat,
level: process.env.NODE_ENV === 'development' ? 'debug' : 'info'
level: process.env.NODE_ENV === 'development' ? 'debug' : 'info',
silent: false // Ensure we can still see non-migration logs
}),
// File transport for all logs
new winston.transports.File({
@ -118,21 +119,34 @@ logger.sqlite = {
}
};
// Add migration specific logger
// Add migration specific logger with debug filtering
logger.migration = {
debug: (message: string, ...args: unknown[]) => {
if (process.env.DEBUG_MIGRATIONS) {
logger.debug(`[Migration] ${message}`, ...args);
//logger.debug(`[Migration] ${message}`, ...args);
}
},
info: (message: string, ...args: unknown[]) => {
logger.info(`[Migration] ${message}`, ...args);
// Always log to file, but only log to console if DEBUG_MIGRATIONS is set
if (process.env.DEBUG_MIGRATIONS) {
//logger.info(`[Migration] ${message}`, ...args);
} else {
// Use a separate transport for migration logs to file only
const metadata = args[0] as Record<string, unknown>;
logger.write({
level: 'info',
message: `[Migration] ${message}`,
...(metadata || {})
});
}
},
warn: (message: string, ...args: unknown[]) => {
logger.warn(`[Migration] ${message}`, ...args);
// Always log warnings to both console and file
//logger.warn(`[Migration] ${message}`, ...args);
},
error: (message: string, ...args: unknown[]) => {
logger.error(`[Migration] ${message}`, ...args);
// Always log errors to both console and file
//logger.error(`[Migration] ${message}`, ...args);
}
};

280
electron/src/rt/sqlite-migrations.ts

@ -326,18 +326,18 @@ const parseSQL = (sql: string): ParsedSQL => {
}
// Log parsing results
logger.debug('SQL parsing results:', {
statementCount: result.statements.length,
errorCount: result.errors.length,
warningCount: result.warnings.length,
statements: result.statements.map(s => s.substring(0, 50) + '...'),
errors: result.errors,
warnings: result.warnings
});
// logger.debug('SQL parsing results:', {
// statementCount: result.statements.length,
// errorCount: result.errors.length,
// warningCount: result.warnings.length,
// statements: result.statements.map(s => s.substring(0, 50) + '...'),
// errors: result.errors,
// warnings: result.warnings
// });
} catch (error) {
result.errors.push(`SQL parsing failed: ${error instanceof Error ? error.message : String(error)}`);
logger.error('SQL parsing error:', error);
// logger.error('SQL parsing error:', error);
}
return result;
@ -539,7 +539,7 @@ const verifyPluginState = async (plugin: any): Promise<boolean> => {
const result = await plugin.echo({ value: 'test' });
return result?.value === 'test';
} catch (error) {
logger.error('Plugin state verification failed:', error);
// logger.error('Plugin state verification failed:', error);
return false;
}
};
@ -563,13 +563,13 @@ const verifyTransactionState = async (
const errorMsg = error instanceof Error ? error.message : String(error);
const inTransaction = errorMsg.toLowerCase().includes('transaction');
logger.debug('Transaction state check:', {
inTransaction,
error: error instanceof Error ? {
message: error.message,
name: error.name
} : error
});
// logger.debug('Transaction state check:', {
// inTransaction,
// error: error instanceof Error ? {
// message: error.message,
// name: error.name
// } : error
// });
return inTransaction;
}
@ -586,7 +586,7 @@ const getCurrentVersion = async (
});
return result?.values?.[0]?.version || 0;
} catch (error) {
logger.error('Error getting current version:', error);
// logger.error('Error getting current version:', error);
return 0;
}
};
@ -627,12 +627,12 @@ const executeWithRetry = async <T>(
throw lastError;
}
logger.warn(`Database operation failed, retrying (${attempt}/${MAX_RETRY_ATTEMPTS}):`, {
context,
error: lastError.message,
attempt,
elapsedMs: Date.now() - startTime
});
// logger.warn(`Database operation failed, retrying (${attempt}/${MAX_RETRY_ATTEMPTS}):`, {
// context,
// error: lastError.message,
// attempt,
// elapsedMs: Date.now() - startTime
// });
// Exponential backoff
const backoffDelay = RETRY_DELAY_MS * Math.pow(2, attempt - 1);
@ -650,14 +650,14 @@ const executeSingleStatement = async (
statement: string,
values: any[] = []
): Promise<any> => {
logger.debug('Executing SQL statement:', {
statement: statement.substring(0, 100) + (statement.length > 100 ? '...' : ''),
values: values.map(v => ({
value: v,
type: typeof v,
isNull: v === null || v === undefined
}))
});
// logger.debug('Executing SQL statement:', {
// statement: statement.substring(0, 100) + (statement.length > 100 ? '...' : ''),
// values: values.map(v => ({
// value: v,
// type: typeof v,
// isNull: v === null || v === undefined
// }))
// });
return executeWithRetry(
plugin,
@ -682,11 +682,11 @@ const executeSingleStatement = async (
if (nameValue === values[0]?.toString()) {
throw new Error('Migration name cannot be the same as version number');
}
logger.debug('Validated migration name:', {
name: nameValue,
type: typeof nameValue,
length: nameValue.length
});
// logger.debug('Validated migration name:', {
// name: nameValue,
// type: typeof nameValue,
// length: nameValue.length
// });
}
}
@ -697,10 +697,10 @@ const executeSingleStatement = async (
transaction: false
});
logger.debug('SQL execution result:', {
statement: statement.substring(0, 100) + (statement.length > 100 ? '...' : ''),
result
});
// logger.debug('SQL execution result:', {
// statement: statement.substring(0, 100) + (statement.length > 100 ? '...' : ''),
// result
// });
return result;
},
@ -713,7 +713,7 @@ const ensureMigrationsTable = async (
plugin: any,
database: string
): Promise<void> => {
logger.debug('Ensuring migrations table exists');
// logger.debug('Ensuring migrations table exists');
try {
// Drop and recreate the table to ensure proper structure
@ -736,14 +736,14 @@ const ensureMigrationsTable = async (
statement: "PRAGMA table_info(schema_version);"
});
logger.debug('Schema version table structure:', {
columns: tableInfo?.values?.map((row: any) => ({
name: row.name,
type: row.type,
notnull: row.notnull,
dflt_value: row.dflt_value
}))
});
// logger.debug('Schema version table structure:', {
// columns: tableInfo?.values?.map((row: any) => ({
// name: row.name,
// type: row.type,
// notnull: row.notnull,
// dflt_value: row.dflt_value
// }))
// });
// Verify table was created
const verifyCheck = await plugin.query({
@ -755,15 +755,15 @@ const ensureMigrationsTable = async (
throw new Error('Failed to create migrations table');
}
logger.debug('Migrations table created successfully');
// logger.debug('Migrations table created successfully');
} catch (error) {
logger.error('Error ensuring migrations table:', {
error: error instanceof Error ? {
message: error.message,
stack: error.stack,
name: error.name
} : error
});
// logger.error('Error ensuring migrations table:', {
// error: error instanceof Error ? {
// message: error.message,
// stack: error.stack,
// name: error.name
// } : error
// });
throw error;
}
};
@ -777,7 +777,7 @@ const parseMigrationStatements = (sql: string): ParsedSQL => {
}
if (parsed.warnings.length > 0) {
logger.warn('SQL parsing warnings:', parsed.warnings);
// logger.warn('SQL parsing warnings:', parsed.warnings);
}
return parsed;
@ -800,36 +800,36 @@ const debugTableState = async (
statement: "SELECT * FROM schema_version;"
});
logger.debug(`Table state (${context}):`, {
tableInfo: tableInfo?.values?.map((row: any) => ({
name: row.name,
type: row.type,
notnull: row.notnull,
dflt_value: row.dflt_value
})),
tableData: tableData?.values,
rowCount: tableData?.values?.length || 0
});
// logger.debug(`Table state (${context}):`, {
// tableInfo: tableInfo?.values?.map((row: any) => ({
// name: row.name,
// type: row.type,
// notnull: row.notnull,
// dflt_value: row.dflt_value
// })),
// tableData: tableData?.values,
// rowCount: tableData?.values?.length || 0
// });
} catch (error) {
logger.error(`Error getting table state (${context}):`, error);
// logger.error(`Error getting table state (${context}):`, error);
}
};
// Add new logging helper
// Update logMigrationProgress to use migration logger
const logMigrationProgress = (
migration: Migration,
stage: string,
details: Record<string, any> = {}
): void => {
logger.info(`[Migration ${migration.version}] ${stage}`, {
migration: {
version: migration.version,
name: migration.name,
stage,
timestamp: new Date().toISOString(),
...details
}
});
// logger.migration.info(`Migration ${migration.version} (${migration.name}): ${stage}`, {
// migration: {
// version: migration.version,
// name: migration.name,
// stage,
// timestamp: new Date().toISOString(),
// ...details
// }
// });
};
// Add new error tracking
@ -1151,19 +1151,19 @@ export async function runMigrations(
database: string
): Promise<MigrationResult[]> {
const startTime = Date.now();
logger.info('Starting migration process', {
database,
startTime: new Date(startTime).toISOString(),
totalMigrations: MIGRATIONS.length
});
// logger.migration.info('Starting migration process', {
// database,
// startTime: new Date(startTime).toISOString(),
// totalMigrations: MIGRATIONS.length
// });
// Validate migrations with enhanced error handling
try {
validateMigrationVersions(MIGRATIONS);
logger.info('Migration versions validated', {
totalMigrations: MIGRATIONS.length,
versions: MIGRATIONS.map(m => m.version)
});
// logger.migration.info('Migration versions validated', {
// totalMigrations: MIGRATIONS.length,
// versions: MIGRATIONS.map(m => m.version)
// });
} catch (error) {
const validationError = new Error('Migration validation failed') as MigrationError;
validationError.isRecoverable = false;
@ -1179,12 +1179,12 @@ export async function runMigrations(
error.context = { database };
throw error;
}
logger.info('Plugin state verified');
// logger.migration.info('Plugin state verified');
// Ensure migrations table with enhanced error handling
try {
await ensureMigrationsTable(plugin, database);
logger.info('Migrations table ensured');
// logger.migration.info('Migrations table ensured');
} catch (error) {
const initError = new Error('Failed to initialize migrations system') as MigrationError;
initError.isRecoverable = false;
@ -1194,21 +1194,21 @@ export async function runMigrations(
// Get current version with enhanced logging
const currentVersion = await getCurrentVersion(plugin, database);
logger.info('Current database version determined', {
currentVersion,
totalMigrations: MIGRATIONS.length
});
// logger.migration.info('Current database version determined', {
// currentVersion,
// totalMigrations: MIGRATIONS.length
// });
// Find pending migrations with logging
const pendingMigrations = MIGRATIONS.filter(m => m.version > currentVersion);
logger.info('Pending migrations identified', {
pendingCount: pendingMigrations.length,
currentVersion,
pendingVersions: pendingMigrations.map(m => m.version)
});
// logger.migration.info('Pending migrations identified', {
// pendingCount: pendingMigrations.length,
// currentVersion,
// pendingVersions: pendingMigrations.map(m => m.version)
// });
if (pendingMigrations.length === 0) {
logger.info('No pending migrations');
// logger.migration.info('No pending migrations');
return [];
}
@ -1217,66 +1217,56 @@ export async function runMigrations(
const failures: MigrationResult[] = [];
for (const migration of pendingMigrations) {
logger.info(`Processing migration ${migration.version} of ${pendingMigrations.length}`, {
migration: {
version: migration.version,
name: migration.name,
progress: `${results.length + 1}/${pendingMigrations.length}`
}
});
// logger.migration.info(`Processing migration ${migration.version} of ${pendingMigrations.length}`, {
// migration: {
// version: migration.version,
// name: migration.name,
// progress: `${results.length + 1}/${pendingMigrations.length}`
// }
// });
const result = await executeMigration(plugin, database, migration);
results.push(result);
if (!result.success) {
failures.push(result);
logger.error('Migration failed', {
migration: {
version: migration.version,
name: migration.name
},
error: result.error,
totalFailures: failures.length,
remainingMigrations: pendingMigrations.length - results.length
});
// logger.migration.error('Migration failed', {
// migration: {
// version: migration.version,
// name: migration.name
// },
// error: result.error,
// totalFailures: failures.length,
// remainingMigrations: pendingMigrations.length - results.length
// });
// Check if we should continue
const migrationError = result.error as MigrationError;
if (migrationError?.isRecoverable === false) {
logger.error('Unrecoverable migration failure, stopping process', {
migration: {
version: migration.version,
name: migration.name
},
totalFailures: failures.length,
completedMigrations: results.length - failures.length
});
// logger.migration.error('Unrecoverable migration failure, stopping process', {
// migration: {
// version: migration.version,
// name: migration.name
// },
// totalFailures: failures.length,
// completedMigrations: results.length - failures.length
// });
break;
}
}
}
// Log final results
const duration = Date.now() - startTime;
logger.info('Migration process completed', {
duration,
totalMigrations: pendingMigrations.length,
successfulMigrations: results.length - failures.length,
failedMigrations: failures.length,
endTime: new Date().toISOString()
});
if (failures.length > 0) {
logger.error('Migration failures:', {
totalMigrations: pendingMigrations.length,
failedCount: failures.length,
failures: failures.map(f => ({
version: f.version,
name: f.name,
error: f.error,
state: f.state
}))
});
// logger.migration.error('Migration failures:', {
// totalMigrations: pendingMigrations.length,
// failedCount: failures.length,
// failures: failures.map(f => ({
// version: f.version,
// name: f.name,
// error: f.error,
// state: f.state
// }))
// });
}
return results;

1
experiment.sh

@ -107,6 +107,7 @@ log_info "Configuring build environment..."
export VITE_PLATFORM=electron
export VITE_PWA_ENABLED=false
export VITE_DISABLE_PWA=true
export DEBUG_MIGRATIONS=0
# Ensure TypeScript is installed
log_info "Verifying TypeScript installation..."

Loading…
Cancel
Save