Compare commits
107 Commits
master
...
elec-tweak
Author | SHA1 | Date |
---|---|---|
|
59d711bd90 | 6 days ago |
|
c355de6e33 | 6 days ago |
|
28c114a2c7 | 6 days ago |
|
dabfe33fbe | 6 days ago |
|
d8f2587d1c | 6 days ago |
|
3946a8a27a | 6 days ago |
|
4c40b80718 | 7 days ago |
|
74989c2b64 | 7 days ago |
|
7e17b41444 | 7 days ago |
|
83acb028c7 | 7 days ago |
|
786f07e067 | 7 days ago |
|
710cc1683c | 7 days ago |
|
ebef5d6c8d | 7 days ago |
|
43ea7ee610 | 1 week ago |
|
57191df416 | 1 week ago |
|
644593a5f4 | 1 week ago |
|
900c2521c7 | 1 week ago |
|
182cff2b16 | 1 week ago |
|
3b4ef908f3 | 1 week ago |
|
a5a9e15ece | 1 week ago |
|
a6d8f0eb8a | 1 week ago |
|
3997a88b44 | 1 week ago |
|
5eeeae32c6 | 1 week ago |
|
d9895086e6 | 1 week ago |
|
fb8d1cb8b2 | 1 week ago |
|
70c0edbed0 | 1 week ago |
|
55cc08d675 | 1 week ago |
|
688a5be76e | 1 week ago |
|
014341f320 | 1 week ago |
|
1d5e062c76 | 1 week ago |
|
2c5c15108a | 1 week ago |
|
26df0fb671 | 1 week ago |
|
ef3bfcdbd2 | 1 week ago |
|
ec1f27bab1 | 1 week ago |
|
01c33069c4 | 1 week ago |
|
c637d39dc9 | 1 week ago |
|
3e90bafbd1 | 1 week ago |
|
d2c3e5db05 | 1 week ago |
|
e824fcce2e | 1 week ago |
|
f2c49872a6 | 1 week ago |
|
229d9184b2 | 1 week ago |
|
29908b77e3 | 1 week ago |
|
16cad04e5c | 1 week ago |
|
e4f859a116 | 1 week ago |
|
7f17a3d9c7 | 1 week ago |
|
2d4d9691ca | 1 week ago |
|
63575b36ed | 1 week ago |
|
2eb46367bc | 1 week ago |
|
cea0456148 | 1 week ago |
|
6f5db13a49 | 1 week ago |
|
068662625d | 1 week ago |
|
23627835f9 | 1 week ago |
|
f1ba6f9231 | 1 week ago |
|
137fce3e30 | 1 week ago |
|
7166dadbc0 | 1 week ago |
|
bc274bdf7f | 1 week ago |
|
082f8c0126 | 1 week ago |
|
fd09c7e426 | 1 week ago |
|
be40643379 | 1 week ago |
|
835a270e65 | 1 week ago |
|
13682a1930 | 1 week ago |
|
669a66c24c | 1 week ago |
|
13505b539e | 1 week ago |
|
07ac340733 | 1 week ago |
|
ba2b2fc543 | 1 week ago |
|
21184e7625 | 1 week ago |
|
8d1511e38f | 1 week ago |
|
b18112b869 | 2 weeks ago |
|
a228a9b1c0 | 2 weeks ago |
|
1560ff0829 | 2 weeks ago |
|
7de4125eb7 | 2 weeks ago |
|
81d4f0c762 | 2 weeks ago |
|
4c1b4fe651 | 2 weeks ago |
|
e63541ef53 | 2 weeks ago |
|
0bfc18c385 | 2 weeks ago |
|
35f5df6b6b | 2 weeks ago |
|
0f1ac2b230 | 2 weeks ago |
|
3c0bdeaed3 | 2 weeks ago |
|
11f2527b04 | 2 weeks ago |
|
5d8175aeeb | 2 weeks ago |
|
b6b95cb0d0 | 2 weeks ago |
|
655c5188a4 | 2 weeks ago |
|
8b7451330f | 2 weeks ago |
|
b8fbc3f7a6 | 2 weeks ago |
|
92dadba1cb | 2 weeks ago |
|
3a6f585de0 | 2 weeks ago |
|
2647c5a77d | 2 weeks ago |
|
682fceb1c6 | 2 weeks ago |
|
e0013008b4 | 2 weeks ago |
|
0674d98670 | 2 weeks ago |
|
ee441d1aea | 2 weeks ago |
|
75f6e99200 | 2 weeks ago |
|
52c9e57ef4 | 2 weeks ago |
|
603823d808 | 2 weeks ago |
|
5f24f4975d | 2 weeks ago |
|
5057d7d07f | 2 weeks ago |
|
946e88d903 | 2 weeks ago |
|
cbfb1ebf57 | 2 weeks ago |
|
a38934e38d | 2 weeks ago |
|
a3bdcfd168 | 2 weeks ago |
|
83771caee1 | 2 weeks ago |
|
da35b225cd | 2 weeks ago |
|
8c3920e108 | 2 weeks ago |
|
54f269054f | 2 weeks ago |
|
574520d9b3 | 2 weeks ago |
|
6556eb55a3 | 2 weeks ago |
|
634e2bb2fb | 2 weeks ago |
173 changed files with 19362 additions and 3856 deletions
@ -0,0 +1,153 @@ |
|||||
|
--- |
||||
|
description: |
||||
|
globs: |
||||
|
alwaysApply: true |
||||
|
--- |
||||
|
# Absurd SQL - Cursor Development Guide |
||||
|
|
||||
|
## Project Overview |
||||
|
Absurd SQL is a backend implementation for sql.js that enables persistent SQLite databases in the browser by using IndexedDB as a block storage system. This guide provides rules and best practices for developing with this project in Cursor. |
||||
|
|
||||
|
## Project Structure |
||||
|
``` |
||||
|
absurd-sql/ |
||||
|
├── src/ # Source code |
||||
|
├── dist/ # Built files |
||||
|
├── package.json # Dependencies and scripts |
||||
|
├── rollup.config.js # Build configuration |
||||
|
└── jest.config.js # Test configuration |
||||
|
``` |
||||
|
|
||||
|
## Development Rules |
||||
|
|
||||
|
### 1. Worker Thread Requirements |
||||
|
- All SQL operations MUST be performed in a worker thread |
||||
|
- Main thread should only handle worker initialization and communication |
||||
|
- Never block the main thread with database operations |
||||
|
|
||||
|
### 2. Code Organization |
||||
|
- Keep worker code in separate files (e.g., `*.worker.js`) |
||||
|
- Use ES modules for imports/exports |
||||
|
- Follow the project's existing module structure |
||||
|
|
||||
|
### 3. Required Headers |
||||
|
When developing locally or deploying, ensure these headers are set: |
||||
|
``` |
||||
|
Cross-Origin-Opener-Policy: same-origin |
||||
|
Cross-Origin-Embedder-Policy: require-corp |
||||
|
``` |
||||
|
|
||||
|
### 4. Browser Compatibility |
||||
|
- Primary target: Modern browsers with SharedArrayBuffer support |
||||
|
- Fallback mode: Safari (with limitations) |
||||
|
- Always test in both modes |
||||
|
|
||||
|
### 5. Database Configuration |
||||
|
Recommended database settings: |
||||
|
```sql |
||||
|
PRAGMA journal_mode=MEMORY; |
||||
|
PRAGMA page_size=8192; -- Optional, but recommended |
||||
|
``` |
||||
|
|
||||
|
### 6. Development Workflow |
||||
|
1. Install dependencies: |
||||
|
```bash |
||||
|
yarn add @jlongster/sql.js absurd-sql |
||||
|
``` |
||||
|
|
||||
|
2. Development commands: |
||||
|
- `yarn build` - Build the project |
||||
|
- `yarn jest` - Run tests |
||||
|
- `yarn serve` - Start development server |
||||
|
|
||||
|
### 7. Testing Guidelines |
||||
|
- Write tests for both SharedArrayBuffer and fallback modes |
||||
|
- Use Jest for testing |
||||
|
- Include performance benchmarks for critical operations |
||||
|
|
||||
|
### 8. Performance Considerations |
||||
|
- Use bulk operations when possible |
||||
|
- Monitor read/write performance |
||||
|
- Consider using transactions for multiple operations |
||||
|
- Avoid unnecessary database connections |
||||
|
|
||||
|
### 9. Error Handling |
||||
|
- Implement proper error handling for: |
||||
|
- Worker initialization failures |
||||
|
- Database connection issues |
||||
|
- Concurrent access conflicts (in fallback mode) |
||||
|
- Storage quota exceeded scenarios |
||||
|
|
||||
|
### 10. Security Best Practices |
||||
|
- Never expose database operations directly to the client |
||||
|
- Validate all SQL queries |
||||
|
- Implement proper access controls |
||||
|
- Handle sensitive data appropriately |
||||
|
|
||||
|
### 11. Code Style |
||||
|
- Follow ESLint configuration |
||||
|
- Use async/await for asynchronous operations |
||||
|
- Document complex database operations |
||||
|
- Include comments for non-obvious optimizations |
||||
|
|
||||
|
### 12. Debugging |
||||
|
- Use `jest-debug` for debugging tests |
||||
|
- Monitor IndexedDB usage in browser dev tools |
||||
|
- Check worker communication in console |
||||
|
- Use performance monitoring tools |
||||
|
|
||||
|
## Common Patterns |
||||
|
|
||||
|
### Worker Initialization |
||||
|
```javascript |
||||
|
// Main thread |
||||
|
import { initBackend } from 'absurd-sql/dist/indexeddb-main-thread'; |
||||
|
|
||||
|
function init() { |
||||
|
let worker = new Worker(new URL('./index.worker.js', import.meta.url)); |
||||
|
initBackend(worker); |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
### Database Setup |
||||
|
```javascript |
||||
|
// Worker thread |
||||
|
import initSqlJs from '@jlongster/sql.js'; |
||||
|
import { SQLiteFS } from 'absurd-sql'; |
||||
|
import IndexedDBBackend from 'absurd-sql/dist/indexeddb-backend'; |
||||
|
|
||||
|
async function setupDatabase() { |
||||
|
let SQL = await initSqlJs({ locateFile: file => file }); |
||||
|
let sqlFS = new SQLiteFS(SQL.FS, new IndexedDBBackend()); |
||||
|
SQL.register_for_idb(sqlFS); |
||||
|
|
||||
|
SQL.FS.mkdir('/sql'); |
||||
|
SQL.FS.mount(sqlFS, {}, '/sql'); |
||||
|
|
||||
|
return new SQL.Database('/sql/db.sqlite', { filename: true }); |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
## Troubleshooting |
||||
|
|
||||
|
### Common Issues |
||||
|
1. SharedArrayBuffer not available |
||||
|
- Check COOP/COEP headers |
||||
|
- Verify browser support |
||||
|
- Test fallback mode |
||||
|
|
||||
|
2. Worker initialization failures |
||||
|
- Check file paths |
||||
|
- Verify module imports |
||||
|
- Check browser console for errors |
||||
|
|
||||
|
3. Performance issues |
||||
|
- Monitor IndexedDB usage |
||||
|
- Check for unnecessary operations |
||||
|
- Verify transaction usage |
||||
|
|
||||
|
## Resources |
||||
|
- [Project Demo](https://priceless-keller-d097e5.netlify.app/) |
||||
|
- [Example Project](https://github.com/jlongster/absurd-example-project) |
||||
|
- [Blog Post](https://jlongster.com/future-sql-web) |
||||
|
- [SQL.js Documentation](https://github.com/sql-js/sql.js/) |
@ -1,6 +0,0 @@ |
|||||
# Admin DID credentials |
|
||||
ADMIN_DID=did:ethr:0x0000694B58C2cC69658993A90D3840C560f2F51F |
|
||||
ADMIN_PRIVATE_KEY=2b6472c026ec2aa2c4235c994a63868fc9212d18b58f6cbfe861b52e71330f5b |
|
||||
|
|
||||
# API Configuration |
|
||||
ENDORSER_API_URL=https://test-api.endorser.ch/api/v2/claim |
|
@ -1,7 +1,15 @@ |
|||||
package app.timesafari; |
package app.timesafari; |
||||
|
|
||||
|
import android.os.Bundle; |
||||
import com.getcapacitor.BridgeActivity; |
import com.getcapacitor.BridgeActivity; |
||||
|
import com.getcapacitor.community.sqlite.SQLite; |
||||
|
|
||||
public class MainActivity extends BridgeActivity { |
public class MainActivity extends BridgeActivity { |
||||
// ... existing code ...
|
@Override |
||||
|
public void onCreate(Bundle savedInstanceState) { |
||||
|
super.onCreate(savedInstanceState); |
||||
|
|
||||
|
// Initialize SQLite
|
||||
|
registerPlugin(SQLite.class); |
||||
|
} |
||||
} |
} |
@ -0,0 +1,399 @@ |
|||||
|
# Dexie to absurd-sql Mapping Guide |
||||
|
|
||||
|
## Schema Mapping |
||||
|
|
||||
|
### Current Dexie Schema |
||||
|
```typescript |
||||
|
// Current Dexie schema |
||||
|
const db = new Dexie('TimeSafariDB'); |
||||
|
|
||||
|
db.version(1).stores({ |
||||
|
accounts: 'did, publicKeyHex, createdAt, updatedAt', |
||||
|
settings: 'key, value, updatedAt', |
||||
|
contacts: 'id, did, name, createdAt, updatedAt' |
||||
|
}); |
||||
|
``` |
||||
|
|
||||
|
### New SQLite Schema |
||||
|
```sql |
||||
|
-- New SQLite schema |
||||
|
CREATE TABLE accounts ( |
||||
|
did TEXT PRIMARY KEY, |
||||
|
public_key_hex TEXT NOT NULL, |
||||
|
created_at INTEGER NOT NULL, |
||||
|
updated_at INTEGER NOT NULL |
||||
|
); |
||||
|
|
||||
|
CREATE TABLE settings ( |
||||
|
key TEXT PRIMARY KEY, |
||||
|
value TEXT NOT NULL, |
||||
|
updated_at INTEGER NOT NULL |
||||
|
); |
||||
|
|
||||
|
CREATE TABLE contacts ( |
||||
|
id TEXT PRIMARY KEY, |
||||
|
did TEXT NOT NULL, |
||||
|
name TEXT, |
||||
|
created_at INTEGER NOT NULL, |
||||
|
updated_at INTEGER NOT NULL, |
||||
|
FOREIGN KEY (did) REFERENCES accounts(did) |
||||
|
); |
||||
|
|
||||
|
-- Indexes for performance |
||||
|
CREATE INDEX idx_accounts_created_at ON accounts(created_at); |
||||
|
CREATE INDEX idx_contacts_did ON contacts(did); |
||||
|
CREATE INDEX idx_settings_updated_at ON settings(updated_at); |
||||
|
``` |
||||
|
|
||||
|
## Query Mapping |
||||
|
|
||||
|
### 1. Account Operations |
||||
|
|
||||
|
#### Get Account by DID |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
const account = await db.accounts.get(did); |
||||
|
|
||||
|
// absurd-sql |
||||
|
const result = await db.exec(` |
||||
|
SELECT * FROM accounts WHERE did = ? |
||||
|
`, [did]); |
||||
|
const account = result[0]?.values[0]; |
||||
|
``` |
||||
|
|
||||
|
#### Get All Accounts |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
const accounts = await db.accounts.toArray(); |
||||
|
|
||||
|
// absurd-sql |
||||
|
const result = await db.exec(` |
||||
|
SELECT * FROM accounts ORDER BY created_at DESC |
||||
|
`); |
||||
|
const accounts = result[0]?.values || []; |
||||
|
``` |
||||
|
|
||||
|
#### Add Account |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
await db.accounts.add({ |
||||
|
did, |
||||
|
publicKeyHex, |
||||
|
createdAt: Date.now(), |
||||
|
updatedAt: Date.now() |
||||
|
}); |
||||
|
|
||||
|
// absurd-sql |
||||
|
await db.run(` |
||||
|
INSERT INTO accounts (did, public_key_hex, created_at, updated_at) |
||||
|
VALUES (?, ?, ?, ?) |
||||
|
`, [did, publicKeyHex, Date.now(), Date.now()]); |
||||
|
``` |
||||
|
|
||||
|
#### Update Account |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
await db.accounts.update(did, { |
||||
|
publicKeyHex, |
||||
|
updatedAt: Date.now() |
||||
|
}); |
||||
|
|
||||
|
// absurd-sql |
||||
|
await db.run(` |
||||
|
UPDATE accounts |
||||
|
SET public_key_hex = ?, updated_at = ? |
||||
|
WHERE did = ? |
||||
|
`, [publicKeyHex, Date.now(), did]); |
||||
|
``` |
||||
|
|
||||
|
### 2. Settings Operations |
||||
|
|
||||
|
#### Get Setting |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
const setting = await db.settings.get(key); |
||||
|
|
||||
|
// absurd-sql |
||||
|
const result = await db.exec(` |
||||
|
SELECT * FROM settings WHERE key = ? |
||||
|
`, [key]); |
||||
|
const setting = result[0]?.values[0]; |
||||
|
``` |
||||
|
|
||||
|
#### Set Setting |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
await db.settings.put({ |
||||
|
key, |
||||
|
value, |
||||
|
updatedAt: Date.now() |
||||
|
}); |
||||
|
|
||||
|
// absurd-sql |
||||
|
await db.run(` |
||||
|
INSERT INTO settings (key, value, updated_at) |
||||
|
VALUES (?, ?, ?) |
||||
|
ON CONFLICT(key) DO UPDATE SET |
||||
|
value = excluded.value, |
||||
|
updated_at = excluded.updated_at |
||||
|
`, [key, value, Date.now()]); |
||||
|
``` |
||||
|
|
||||
|
### 3. Contact Operations |
||||
|
|
||||
|
#### Get Contacts by Account |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
const contacts = await db.contacts |
||||
|
.where('did') |
||||
|
.equals(accountDid) |
||||
|
.toArray(); |
||||
|
|
||||
|
// absurd-sql |
||||
|
const result = await db.exec(` |
||||
|
SELECT * FROM contacts |
||||
|
WHERE did = ? |
||||
|
ORDER BY created_at DESC |
||||
|
`, [accountDid]); |
||||
|
const contacts = result[0]?.values || []; |
||||
|
``` |
||||
|
|
||||
|
#### Add Contact |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
await db.contacts.add({ |
||||
|
id: generateId(), |
||||
|
did: accountDid, |
||||
|
name, |
||||
|
createdAt: Date.now(), |
||||
|
updatedAt: Date.now() |
||||
|
}); |
||||
|
|
||||
|
// absurd-sql |
||||
|
await db.run(` |
||||
|
INSERT INTO contacts (id, did, name, created_at, updated_at) |
||||
|
VALUES (?, ?, ?, ?, ?) |
||||
|
`, [generateId(), accountDid, name, Date.now(), Date.now()]); |
||||
|
``` |
||||
|
|
||||
|
## Transaction Mapping |
||||
|
|
||||
|
### Batch Operations |
||||
|
```typescript |
||||
|
// Dexie |
||||
|
await db.transaction('rw', [db.accounts, db.contacts], async () => { |
||||
|
await db.accounts.add(account); |
||||
|
await db.contacts.bulkAdd(contacts); |
||||
|
}); |
||||
|
|
||||
|
// absurd-sql |
||||
|
await db.exec('BEGIN TRANSACTION;'); |
||||
|
try { |
||||
|
await db.run(` |
||||
|
INSERT INTO accounts (did, public_key_hex, created_at, updated_at) |
||||
|
VALUES (?, ?, ?, ?) |
||||
|
`, [account.did, account.publicKeyHex, account.createdAt, account.updatedAt]); |
||||
|
|
||||
|
for (const contact of contacts) { |
||||
|
await db.run(` |
||||
|
INSERT INTO contacts (id, did, name, created_at, updated_at) |
||||
|
VALUES (?, ?, ?, ?, ?) |
||||
|
`, [contact.id, contact.did, contact.name, contact.createdAt, contact.updatedAt]); |
||||
|
} |
||||
|
await db.exec('COMMIT;'); |
||||
|
} catch (error) { |
||||
|
await db.exec('ROLLBACK;'); |
||||
|
throw error; |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
## Migration Helper Functions |
||||
|
|
||||
|
### 1. Data Export (Dexie to JSON) |
||||
|
```typescript |
||||
|
async function exportDexieData(): Promise<MigrationData> { |
||||
|
const db = new Dexie('TimeSafariDB'); |
||||
|
|
||||
|
return { |
||||
|
accounts: await db.accounts.toArray(), |
||||
|
settings: await db.settings.toArray(), |
||||
|
contacts: await db.contacts.toArray(), |
||||
|
metadata: { |
||||
|
version: '1.0.0', |
||||
|
timestamp: Date.now(), |
||||
|
dexieVersion: Dexie.version |
||||
|
} |
||||
|
}; |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
### 2. Data Import (JSON to absurd-sql) |
||||
|
```typescript |
||||
|
async function importToAbsurdSql(data: MigrationData): Promise<void> { |
||||
|
await db.exec('BEGIN TRANSACTION;'); |
||||
|
try { |
||||
|
// Import accounts |
||||
|
for (const account of data.accounts) { |
||||
|
await db.run(` |
||||
|
INSERT INTO accounts (did, public_key_hex, created_at, updated_at) |
||||
|
VALUES (?, ?, ?, ?) |
||||
|
`, [account.did, account.publicKeyHex, account.createdAt, account.updatedAt]); |
||||
|
} |
||||
|
|
||||
|
// Import settings |
||||
|
for (const setting of data.settings) { |
||||
|
await db.run(` |
||||
|
INSERT INTO settings (key, value, updated_at) |
||||
|
VALUES (?, ?, ?) |
||||
|
`, [setting.key, setting.value, setting.updatedAt]); |
||||
|
} |
||||
|
|
||||
|
// Import contacts |
||||
|
for (const contact of data.contacts) { |
||||
|
await db.run(` |
||||
|
INSERT INTO contacts (id, did, name, created_at, updated_at) |
||||
|
VALUES (?, ?, ?, ?, ?) |
||||
|
`, [contact.id, contact.did, contact.name, contact.createdAt, contact.updatedAt]); |
||||
|
} |
||||
|
await db.exec('COMMIT;'); |
||||
|
} catch (error) { |
||||
|
await db.exec('ROLLBACK;'); |
||||
|
throw error; |
||||
|
} |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
### 3. Verification |
||||
|
```typescript |
||||
|
async function verifyMigration(dexieData: MigrationData): Promise<boolean> { |
||||
|
// Verify account count |
||||
|
const accountResult = await db.exec('SELECT COUNT(*) as count FROM accounts'); |
||||
|
const accountCount = accountResult[0].values[0][0]; |
||||
|
if (accountCount !== dexieData.accounts.length) { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
// Verify settings count |
||||
|
const settingsResult = await db.exec('SELECT COUNT(*) as count FROM settings'); |
||||
|
const settingsCount = settingsResult[0].values[0][0]; |
||||
|
if (settingsCount !== dexieData.settings.length) { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
// Verify contacts count |
||||
|
const contactsResult = await db.exec('SELECT COUNT(*) as count FROM contacts'); |
||||
|
const contactsCount = contactsResult[0].values[0][0]; |
||||
|
if (contactsCount !== dexieData.contacts.length) { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
// Verify data integrity |
||||
|
for (const account of dexieData.accounts) { |
||||
|
const result = await db.exec( |
||||
|
'SELECT * FROM accounts WHERE did = ?', |
||||
|
[account.did] |
||||
|
); |
||||
|
const migratedAccount = result[0]?.values[0]; |
||||
|
if (!migratedAccount || |
||||
|
migratedAccount[1] !== account.publicKeyHex) { // public_key_hex is second column |
||||
|
return false; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
return true; |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
## Performance Considerations |
||||
|
|
||||
|
### 1. Indexing |
||||
|
- Dexie automatically creates indexes based on the schema |
||||
|
- absurd-sql requires explicit index creation |
||||
|
- Added indexes for frequently queried fields |
||||
|
- Use `PRAGMA journal_mode=MEMORY;` for better performance |
||||
|
|
||||
|
### 2. Batch Operations |
||||
|
- Dexie has built-in bulk operations |
||||
|
- absurd-sql uses transactions for batch operations |
||||
|
- Consider chunking large datasets |
||||
|
- Use prepared statements for repeated queries |
||||
|
|
||||
|
### 3. Query Optimization |
||||
|
- Dexie uses IndexedDB's native indexing |
||||
|
- absurd-sql requires explicit query optimization |
||||
|
- Use prepared statements for repeated queries |
||||
|
- Consider using `PRAGMA synchronous=NORMAL;` for better performance |
||||
|
|
||||
|
## Error Handling |
||||
|
|
||||
|
### 1. Common Errors |
||||
|
```typescript |
||||
|
// Dexie errors |
||||
|
try { |
||||
|
await db.accounts.add(account); |
||||
|
} catch (error) { |
||||
|
if (error instanceof Dexie.ConstraintError) { |
||||
|
// Handle duplicate key |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// absurd-sql errors |
||||
|
try { |
||||
|
await db.run(` |
||||
|
INSERT INTO accounts (did, public_key_hex, created_at, updated_at) |
||||
|
VALUES (?, ?, ?, ?) |
||||
|
`, [account.did, account.publicKeyHex, account.createdAt, account.updatedAt]); |
||||
|
} catch (error) { |
||||
|
if (error.message.includes('UNIQUE constraint failed')) { |
||||
|
// Handle duplicate key |
||||
|
} |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
### 2. Transaction Recovery |
||||
|
```typescript |
||||
|
// Dexie transaction |
||||
|
try { |
||||
|
await db.transaction('rw', db.accounts, async () => { |
||||
|
// Operations |
||||
|
}); |
||||
|
} catch (error) { |
||||
|
// Dexie automatically rolls back |
||||
|
} |
||||
|
|
||||
|
// absurd-sql transaction |
||||
|
try { |
||||
|
await db.exec('BEGIN TRANSACTION;'); |
||||
|
// Operations |
||||
|
await db.exec('COMMIT;'); |
||||
|
} catch (error) { |
||||
|
await db.exec('ROLLBACK;'); |
||||
|
throw error; |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
## Migration Strategy |
||||
|
|
||||
|
1. **Preparation** |
||||
|
- Export all Dexie data |
||||
|
- Verify data integrity |
||||
|
- Create SQLite schema |
||||
|
- Setup indexes |
||||
|
|
||||
|
2. **Migration** |
||||
|
- Import data in transactions |
||||
|
- Verify each batch |
||||
|
- Handle errors gracefully |
||||
|
- Maintain backup |
||||
|
|
||||
|
3. **Verification** |
||||
|
- Compare record counts |
||||
|
- Verify data integrity |
||||
|
- Test common queries |
||||
|
- Validate relationships |
||||
|
|
||||
|
4. **Cleanup** |
||||
|
- Remove Dexie database |
||||
|
- Clear IndexedDB storage |
||||
|
- Update application code |
||||
|
- Remove old dependencies |
@ -0,0 +1,339 @@ |
|||||
|
# Secure Storage Implementation Guide for TimeSafari App |
||||
|
|
||||
|
## Overview |
||||
|
|
||||
|
This document outlines the implementation of secure storage for the TimeSafari app. The implementation focuses on: |
||||
|
|
||||
|
1. **Platform-Specific Storage Solutions**: |
||||
|
- Web: SQLite with IndexedDB backend (absurd-sql) |
||||
|
- Electron: SQLite with Node.js backend |
||||
|
- Native: (Planned) SQLCipher with platform-specific secure storage |
||||
|
|
||||
|
2. **Key Features**: |
||||
|
- SQLite-based storage using absurd-sql for web |
||||
|
- Platform-specific service factory pattern |
||||
|
- Consistent API across platforms |
||||
|
- Migration support from Dexie.js |
||||
|
|
||||
|
## Quick Start |
||||
|
|
||||
|
### 1. Installation |
||||
|
|
||||
|
```bash |
||||
|
# Core dependencies |
||||
|
npm install @jlongster/sql.js |
||||
|
npm install absurd-sql |
||||
|
|
||||
|
# Platform-specific dependencies (for future native support) |
||||
|
npm install @capacitor/preferences |
||||
|
npm install @capacitor-community/biometric-auth |
||||
|
``` |
||||
|
|
||||
|
### 2. Basic Usage |
||||
|
|
||||
|
```typescript |
||||
|
// Using the platform service |
||||
|
import { PlatformServiceFactory } from '../services/PlatformServiceFactory'; |
||||
|
|
||||
|
// Get platform-specific service instance |
||||
|
const platformService = PlatformServiceFactory.getInstance(); |
||||
|
|
||||
|
// Example database operations |
||||
|
async function example() { |
||||
|
try { |
||||
|
// Query example |
||||
|
const result = await platformService.dbQuery( |
||||
|
"SELECT * FROM accounts WHERE did = ?", |
||||
|
[did] |
||||
|
); |
||||
|
|
||||
|
// Execute example |
||||
|
await platformService.dbExec( |
||||
|
"INSERT INTO accounts (did, public_key_hex) VALUES (?, ?)", |
||||
|
[did, publicKeyHex] |
||||
|
); |
||||
|
|
||||
|
} catch (error) { |
||||
|
console.error('Database operation failed:', error); |
||||
|
} |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
### 3. Platform Detection |
||||
|
|
||||
|
```typescript |
||||
|
// src/services/PlatformServiceFactory.ts |
||||
|
export class PlatformServiceFactory { |
||||
|
static getInstance(): PlatformService { |
||||
|
if (process.env.ELECTRON) { |
||||
|
// Electron platform |
||||
|
return new ElectronPlatformService(); |
||||
|
} else { |
||||
|
// Web platform (default) |
||||
|
return new AbsurdSqlDatabaseService(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
### 4. Current Implementation Details |
||||
|
|
||||
|
#### Web Platform (AbsurdSqlDatabaseService) |
||||
|
|
||||
|
The web platform uses absurd-sql with IndexedDB backend: |
||||
|
|
||||
|
```typescript |
||||
|
// src/services/AbsurdSqlDatabaseService.ts |
||||
|
export class AbsurdSqlDatabaseService implements PlatformService { |
||||
|
private static instance: AbsurdSqlDatabaseService | null = null; |
||||
|
private db: AbsurdSqlDatabase | null = null; |
||||
|
private initialized: boolean = false; |
||||
|
|
||||
|
// Singleton pattern |
||||
|
static getInstance(): AbsurdSqlDatabaseService { |
||||
|
if (!AbsurdSqlDatabaseService.instance) { |
||||
|
AbsurdSqlDatabaseService.instance = new AbsurdSqlDatabaseService(); |
||||
|
} |
||||
|
return AbsurdSqlDatabaseService.instance; |
||||
|
} |
||||
|
|
||||
|
// Database operations |
||||
|
async dbQuery(sql: string, params: unknown[] = []): Promise<QueryExecResult[]> { |
||||
|
await this.waitForInitialization(); |
||||
|
return this.queueOperation<QueryExecResult[]>("query", sql, params); |
||||
|
} |
||||
|
|
||||
|
async dbExec(sql: string, params: unknown[] = []): Promise<void> { |
||||
|
await this.waitForInitialization(); |
||||
|
await this.queueOperation<void>("run", sql, params); |
||||
|
} |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
Key features: |
||||
|
- Uses absurd-sql for SQLite in the browser |
||||
|
- Implements operation queuing for thread safety |
||||
|
- Handles initialization and connection management |
||||
|
- Provides consistent API across platforms |
||||
|
|
||||
|
### 5. Migration from Dexie.js |
||||
|
|
||||
|
The current implementation supports gradual migration from Dexie.js: |
||||
|
|
||||
|
```typescript |
||||
|
// Example of dual-storage pattern |
||||
|
async function getAccount(did: string): Promise<Account | undefined> { |
||||
|
// Try SQLite first |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
let account = await platform.dbQuery( |
||||
|
"SELECT * FROM accounts WHERE did = ?", |
||||
|
[did] |
||||
|
); |
||||
|
|
||||
|
// Fallback to Dexie if needed |
||||
|
if (USE_DEXIE_DB) { |
||||
|
account = await db.accounts.get(did); |
||||
|
} |
||||
|
|
||||
|
return account; |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
#### A. Modifying Code |
||||
|
|
||||
|
When converting from Dexie.js to SQL-based implementation, follow these patterns: |
||||
|
|
||||
|
1. **Database Access Pattern** |
||||
|
```typescript |
||||
|
// Before (Dexie) |
||||
|
const result = await db.table.where("field").equals(value).first(); |
||||
|
|
||||
|
// After (SQL) |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
let result = await platform.dbQuery( |
||||
|
"SELECT * FROM table WHERE field = ?", |
||||
|
[value] |
||||
|
); |
||||
|
result = databaseUtil.mapQueryResultToValues(result); |
||||
|
|
||||
|
// Fallback to Dexie if needed |
||||
|
if (USE_DEXIE_DB) { |
||||
|
result = await db.table.where("field").equals(value).first(); |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
2. **Update Operations** |
||||
|
```typescript |
||||
|
// Before (Dexie) |
||||
|
await db.table.where("id").equals(id).modify(changes); |
||||
|
|
||||
|
// After (SQL) |
||||
|
// For settings updates, use the utility methods: |
||||
|
await databaseUtil.updateDefaultSettings(changes); |
||||
|
// OR |
||||
|
await databaseUtil.updateAccountSettings(did, changes); |
||||
|
|
||||
|
// For other tables, use direct SQL: |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
await platform.dbExec( |
||||
|
"UPDATE table SET field1 = ?, field2 = ? WHERE id = ?", |
||||
|
[changes.field1, changes.field2, id] |
||||
|
); |
||||
|
|
||||
|
// Fallback to Dexie if needed |
||||
|
if (USE_DEXIE_DB) { |
||||
|
await db.table.where("id").equals(id).modify(changes); |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
3. **Insert Operations** |
||||
|
```typescript |
||||
|
// Before (Dexie) |
||||
|
await db.table.add(item); |
||||
|
|
||||
|
// After (SQL) |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
const columns = Object.keys(item); |
||||
|
const values = Object.values(item); |
||||
|
const placeholders = values.map(() => '?').join(', '); |
||||
|
const sql = `INSERT INTO table (${columns.join(', ')}) VALUES (${placeholders})`; |
||||
|
await platform.dbExec(sql, values); |
||||
|
|
||||
|
// Fallback to Dexie if needed |
||||
|
if (USE_DEXIE_DB) { |
||||
|
await db.table.add(item); |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
4. **Delete Operations** |
||||
|
```typescript |
||||
|
// Before (Dexie) |
||||
|
await db.table.where("id").equals(id).delete(); |
||||
|
|
||||
|
// After (SQL) |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
await platform.dbExec("DELETE FROM table WHERE id = ?", [id]); |
||||
|
|
||||
|
// Fallback to Dexie if needed |
||||
|
if (USE_DEXIE_DB) { |
||||
|
await db.table.where("id").equals(id).delete(); |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
5. **Result Processing** |
||||
|
```typescript |
||||
|
// Before (Dexie) |
||||
|
const items = await db.table.toArray(); |
||||
|
|
||||
|
// After (SQL) |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
let items = await platform.dbQuery("SELECT * FROM table"); |
||||
|
items = databaseUtil.mapQueryResultToValues(items); |
||||
|
|
||||
|
// Fallback to Dexie if needed |
||||
|
if (USE_DEXIE_DB) { |
||||
|
items = await db.table.toArray(); |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
6. **Using Utility Methods** |
||||
|
|
||||
|
When working with settings or other common operations, use the utility methods in `db/index.ts`: |
||||
|
|
||||
|
```typescript |
||||
|
// Settings operations |
||||
|
await databaseUtil.updateDefaultSettings(settings); |
||||
|
await databaseUtil.updateAccountSettings(did, settings); |
||||
|
const settings = await databaseUtil.retrieveSettingsForDefaultAccount(); |
||||
|
const settings = await databaseUtil.retrieveSettingsForActiveAccount(); |
||||
|
|
||||
|
// Logging operations |
||||
|
await databaseUtil.logToDb(message); |
||||
|
await databaseUtil.logConsoleAndDb(message, showInConsole); |
||||
|
``` |
||||
|
|
||||
|
Key Considerations: |
||||
|
- Always use `databaseUtil.mapQueryResultToValues()` to process SQL query results |
||||
|
- Use utility methods from `db/index.ts` when available instead of direct SQL |
||||
|
- Keep Dexie fallbacks wrapped in `if (USE_DEXIE_DB)` checks |
||||
|
- For queries that return results, use `let` variables to allow Dexie fallback to override |
||||
|
- For updates/inserts/deletes, execute both SQL and Dexie operations when `USE_DEXIE_DB` is true |
||||
|
|
||||
|
Example Migration: |
||||
|
```typescript |
||||
|
// Before (Dexie) |
||||
|
export async function updateSettings(settings: Settings): Promise<void> { |
||||
|
await db.settings.put(settings); |
||||
|
} |
||||
|
|
||||
|
// After (SQL) |
||||
|
export async function updateSettings(settings: Settings): Promise<void> { |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
const { sql, params } = generateUpdateStatement( |
||||
|
settings, |
||||
|
"settings", |
||||
|
"id = ?", |
||||
|
[settings.id] |
||||
|
); |
||||
|
await platform.dbExec(sql, params); |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
Remember to: |
||||
|
- Create database access code to use the platform service, putting it in front of the Dexie version |
||||
|
- Instead of removing Dexie-specific code, keep it. |
||||
|
|
||||
|
- For creates & updates & deletes, the duplicate code is fine. |
||||
|
|
||||
|
- For queries where we use the results, make the setting from SQL into a 'let' variable, then wrap the Dexie code in a check for USE_DEXIE_DB from app.ts and if |
||||
|
it's true then use that result instead of the SQL code's result. |
||||
|
|
||||
|
- Consider data migration needs, and warn if there are any potential migration problems |
||||
|
|
||||
|
## Success Criteria |
||||
|
|
||||
|
1. **Functionality** |
||||
|
- [x] Basic CRUD operations work correctly |
||||
|
- [x] Platform service factory pattern implemented |
||||
|
- [x] Error handling in place |
||||
|
- [ ] Native platform support (planned) |
||||
|
|
||||
|
2. **Performance** |
||||
|
- [x] Database operations complete within acceptable time |
||||
|
- [x] Operation queuing for thread safety |
||||
|
- [x] Proper initialization handling |
||||
|
- [ ] Performance monitoring (planned) |
||||
|
|
||||
|
3. **Security** |
||||
|
- [x] Basic data integrity |
||||
|
- [ ] Encryption (planned for native platforms) |
||||
|
- [ ] Secure key storage (planned) |
||||
|
- [ ] Platform-specific security features (planned) |
||||
|
|
||||
|
4. **Testing** |
||||
|
- [x] Basic unit tests |
||||
|
- [ ] Comprehensive integration tests (planned) |
||||
|
- [ ] Platform-specific tests (planned) |
||||
|
- [ ] Migration tests (planned) |
||||
|
|
||||
|
## Next Steps |
||||
|
|
||||
|
1. **Native Platform Support** |
||||
|
- Implement SQLCipher for iOS/Android |
||||
|
- Add platform-specific secure storage |
||||
|
- Implement biometric authentication |
||||
|
|
||||
|
2. **Enhanced Security** |
||||
|
- Add encryption for sensitive data |
||||
|
- Implement secure key storage |
||||
|
- Add platform-specific security features |
||||
|
|
||||
|
3. **Testing and Monitoring** |
||||
|
- Add comprehensive test coverage |
||||
|
- Implement performance monitoring |
||||
|
- Add error tracking and analytics |
||||
|
|
||||
|
4. **Documentation** |
||||
|
- Add API documentation |
||||
|
- Create migration guides |
||||
|
- Document security measures |
@ -0,0 +1,329 @@ |
|||||
|
# Storage Implementation Checklist |
||||
|
|
||||
|
## Core Services |
||||
|
|
||||
|
### 1. Storage Service Layer |
||||
|
- [x] Create base `PlatformService` interface |
||||
|
- [x] Define common methods for all platforms |
||||
|
- [x] Add platform-specific method signatures |
||||
|
- [x] Include error handling types |
||||
|
- [x] Add migration support methods |
||||
|
|
||||
|
- [x] Implement platform-specific services |
||||
|
- [x] `AbsurdSqlDatabaseService` (web) |
||||
|
- [x] Database initialization |
||||
|
- [x] VFS setup with IndexedDB backend |
||||
|
- [x] Connection management |
||||
|
- [x] Operation queuing |
||||
|
- [ ] `NativeSQLiteService` (iOS/Android) (planned) |
||||
|
- [ ] SQLCipher integration |
||||
|
- [ ] Native bridge setup |
||||
|
- [ ] File system access |
||||
|
- [ ] `ElectronSQLiteService` (planned) |
||||
|
- [ ] Node SQLite integration |
||||
|
- [ ] IPC communication |
||||
|
- [ ] File system access |
||||
|
|
||||
|
### 2. Migration Services |
||||
|
- [x] Implement basic migration support |
||||
|
- [x] Dual-storage pattern (SQLite + Dexie) |
||||
|
- [x] Basic data verification |
||||
|
- [ ] Rollback procedures (planned) |
||||
|
- [ ] Progress tracking (planned) |
||||
|
- [ ] Create `MigrationUI` components (planned) |
||||
|
- [ ] Progress indicators |
||||
|
- [ ] Error handling |
||||
|
- [ ] User notifications |
||||
|
- [ ] Manual triggers |
||||
|
|
||||
|
### 3. Security Layer |
||||
|
- [x] Basic data integrity |
||||
|
- [ ] Implement `EncryptionService` (planned) |
||||
|
- [ ] Key management |
||||
|
- [ ] Encryption/decryption |
||||
|
- [ ] Secure storage |
||||
|
- [ ] Add `BiometricService` (planned) |
||||
|
- [ ] Platform detection |
||||
|
- [ ] Authentication flow |
||||
|
- [ ] Fallback mechanisms |
||||
|
|
||||
|
## Platform-Specific Implementation |
||||
|
|
||||
|
### Web Platform |
||||
|
- [x] Setup absurd-sql |
||||
|
- [x] Install dependencies |
||||
|
```json |
||||
|
{ |
||||
|
"@jlongster/sql.js": "^1.8.0", |
||||
|
"absurd-sql": "^1.8.0" |
||||
|
} |
||||
|
``` |
||||
|
- [x] Configure VFS with IndexedDB backend |
||||
|
- [x] Setup worker threads |
||||
|
- [x] Implement operation queuing |
||||
|
- [x] Configure database pragmas |
||||
|
|
||||
|
```sql |
||||
|
PRAGMA journal_mode=MEMORY; |
||||
|
PRAGMA synchronous=NORMAL; |
||||
|
PRAGMA foreign_keys=ON; |
||||
|
PRAGMA busy_timeout=5000; |
||||
|
``` |
||||
|
|
||||
|
- [x] Update build configuration |
||||
|
- [x] Modify `vite.config.ts` |
||||
|
- [x] Add worker configuration |
||||
|
- [x] Update chunk splitting |
||||
|
- [x] Configure asset handling |
||||
|
|
||||
|
- [x] Implement IndexedDB backend |
||||
|
- [x] Create database service |
||||
|
- [x] Add operation queuing |
||||
|
- [x] Handle initialization |
||||
|
- [x] Implement atomic operations |
||||
|
|
||||
|
### iOS Platform (Planned) |
||||
|
- [ ] Setup SQLCipher |
||||
|
- [ ] Install pod dependencies |
||||
|
- [ ] Configure encryption |
||||
|
- [ ] Setup keychain access |
||||
|
- [ ] Implement secure storage |
||||
|
|
||||
|
- [ ] Update Capacitor config |
||||
|
- [ ] Modify `capacitor.config.ts` |
||||
|
- [ ] Add iOS permissions |
||||
|
- [ ] Configure backup |
||||
|
- [ ] Setup app groups |
||||
|
|
||||
|
### Android Platform (Planned) |
||||
|
- [ ] Setup SQLCipher |
||||
|
- [ ] Add Gradle dependencies |
||||
|
- [ ] Configure encryption |
||||
|
- [ ] Setup keystore |
||||
|
- [ ] Implement secure storage |
||||
|
|
||||
|
- [ ] Update Capacitor config |
||||
|
- [ ] Modify `capacitor.config.ts` |
||||
|
- [ ] Add Android permissions |
||||
|
- [ ] Configure backup |
||||
|
- [ ] Setup file provider |
||||
|
|
||||
|
### Electron Platform (Planned) |
||||
|
- [ ] Setup Node SQLite |
||||
|
- [ ] Install dependencies |
||||
|
- [ ] Configure IPC |
||||
|
- [ ] Setup file system access |
||||
|
- [ ] Implement secure storage |
||||
|
|
||||
|
- [ ] Update Electron config |
||||
|
- [ ] Modify `electron.config.ts` |
||||
|
- [ ] Add security policies |
||||
|
- [ ] Configure file access |
||||
|
- [ ] Setup auto-updates |
||||
|
|
||||
|
## Data Models and Types |
||||
|
|
||||
|
### 1. Database Schema |
||||
|
- [x] Define tables |
||||
|
|
||||
|
```sql |
||||
|
-- Accounts table |
||||
|
CREATE TABLE accounts ( |
||||
|
did TEXT PRIMARY KEY, |
||||
|
public_key_hex TEXT NOT NULL, |
||||
|
created_at INTEGER NOT NULL, |
||||
|
updated_at INTEGER NOT NULL |
||||
|
); |
||||
|
|
||||
|
-- Settings table |
||||
|
CREATE TABLE settings ( |
||||
|
key TEXT PRIMARY KEY, |
||||
|
value TEXT NOT NULL, |
||||
|
updated_at INTEGER NOT NULL |
||||
|
); |
||||
|
|
||||
|
-- Contacts table |
||||
|
CREATE TABLE contacts ( |
||||
|
id TEXT PRIMARY KEY, |
||||
|
did TEXT NOT NULL, |
||||
|
name TEXT, |
||||
|
created_at INTEGER NOT NULL, |
||||
|
updated_at INTEGER NOT NULL, |
||||
|
FOREIGN KEY (did) REFERENCES accounts(did) |
||||
|
); |
||||
|
|
||||
|
-- Indexes for performance |
||||
|
CREATE INDEX idx_accounts_created_at ON accounts(created_at); |
||||
|
CREATE INDEX idx_contacts_did ON contacts(did); |
||||
|
CREATE INDEX idx_settings_updated_at ON settings(updated_at); |
||||
|
``` |
||||
|
|
||||
|
- [x] Create indexes |
||||
|
- [x] Define constraints |
||||
|
- [ ] Add triggers (planned) |
||||
|
- [ ] Setup migrations (planned) |
||||
|
|
||||
|
### 2. Type Definitions |
||||
|
|
||||
|
- [x] Create interfaces |
||||
|
```typescript |
||||
|
interface Account { |
||||
|
did: string; |
||||
|
publicKeyHex: string; |
||||
|
createdAt: number; |
||||
|
updatedAt: number; |
||||
|
} |
||||
|
|
||||
|
interface Setting { |
||||
|
key: string; |
||||
|
value: string; |
||||
|
updatedAt: number; |
||||
|
} |
||||
|
|
||||
|
interface Contact { |
||||
|
id: string; |
||||
|
did: string; |
||||
|
name?: string; |
||||
|
createdAt: number; |
||||
|
updatedAt: number; |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
- [x] Add validation |
||||
|
- [x] Create DTOs |
||||
|
- [x] Define enums |
||||
|
- [x] Add type guards |
||||
|
|
||||
|
## UI Components |
||||
|
|
||||
|
### 1. Migration UI (Planned) |
||||
|
- [ ] Create components |
||||
|
- [ ] `MigrationProgress.vue` |
||||
|
- [ ] `MigrationError.vue` |
||||
|
- [ ] `MigrationSettings.vue` |
||||
|
- [ ] `MigrationStatus.vue` |
||||
|
|
||||
|
### 2. Settings UI (Planned) |
||||
|
- [ ] Update components |
||||
|
- [ ] Add storage settings |
||||
|
- [ ] Add migration controls |
||||
|
- [ ] Add backup options |
||||
|
- [ ] Add security settings |
||||
|
|
||||
|
### 3. Error Handling UI (Planned) |
||||
|
- [ ] Create components |
||||
|
- [ ] `StorageError.vue` |
||||
|
- [ ] `QuotaExceeded.vue` |
||||
|
- [ ] `MigrationFailed.vue` |
||||
|
- [ ] `RecoveryOptions.vue` |
||||
|
|
||||
|
## Testing |
||||
|
|
||||
|
### 1. Unit Tests |
||||
|
- [x] Basic service tests |
||||
|
- [x] Platform service tests |
||||
|
- [x] Database operation tests |
||||
|
- [ ] Security service tests (planned) |
||||
|
- [ ] Platform detection tests (planned) |
||||
|
|
||||
|
### 2. Integration Tests (Planned) |
||||
|
- [ ] Test migrations |
||||
|
- [ ] Web platform tests |
||||
|
- [ ] iOS platform tests |
||||
|
- [ ] Android platform tests |
||||
|
- [ ] Electron platform tests |
||||
|
|
||||
|
### 3. E2E Tests (Planned) |
||||
|
- [ ] Test workflows |
||||
|
- [ ] Account management |
||||
|
- [ ] Settings management |
||||
|
- [ ] Contact management |
||||
|
- [ ] Migration process |
||||
|
|
||||
|
## Documentation |
||||
|
|
||||
|
### 1. Technical Documentation |
||||
|
- [x] Update architecture docs |
||||
|
- [x] Add API documentation |
||||
|
- [ ] Create migration guides (planned) |
||||
|
- [ ] Document security measures (planned) |
||||
|
|
||||
|
### 2. User Documentation (Planned) |
||||
|
- [ ] Update user guides |
||||
|
- [ ] Add troubleshooting guides |
||||
|
- [ ] Create FAQ |
||||
|
- [ ] Document new features |
||||
|
|
||||
|
## Deployment |
||||
|
|
||||
|
### 1. Build Process |
||||
|
- [x] Update build scripts |
||||
|
- [x] Add platform-specific builds |
||||
|
- [ ] Configure CI/CD (planned) |
||||
|
- [ ] Setup automated testing (planned) |
||||
|
|
||||
|
### 2. Release Process (Planned) |
||||
|
- [ ] Create release checklist |
||||
|
- [ ] Add version management |
||||
|
- [ ] Setup rollback procedures |
||||
|
- [ ] Configure monitoring |
||||
|
|
||||
|
## Monitoring and Analytics (Planned) |
||||
|
|
||||
|
### 1. Error Tracking |
||||
|
- [ ] Setup error logging |
||||
|
- [ ] Add performance monitoring |
||||
|
- [ ] Configure alerts |
||||
|
- [ ] Create dashboards |
||||
|
|
||||
|
### 2. Usage Analytics |
||||
|
- [ ] Add storage metrics |
||||
|
- [ ] Track migration success |
||||
|
- [ ] Monitor performance |
||||
|
- [ ] Collect user feedback |
||||
|
|
||||
|
## Security Audit (Planned) |
||||
|
|
||||
|
### 1. Code Review |
||||
|
- [ ] Review encryption |
||||
|
- [ ] Check access controls |
||||
|
- [ ] Verify data handling |
||||
|
- [ ] Audit dependencies |
||||
|
|
||||
|
### 2. Penetration Testing |
||||
|
- [ ] Test data access |
||||
|
- [ ] Verify encryption |
||||
|
- [ ] Check authentication |
||||
|
- [ ] Review permissions |
||||
|
|
||||
|
## Success Criteria |
||||
|
|
||||
|
### 1. Performance |
||||
|
- [x] Query response time < 100ms |
||||
|
- [x] Operation queuing for thread safety |
||||
|
- [x] Proper initialization handling |
||||
|
- [ ] Migration time < 5s per 1000 records (planned) |
||||
|
- [ ] Storage overhead < 10% (planned) |
||||
|
- [ ] Memory usage < 50MB (planned) |
||||
|
|
||||
|
### 2. Reliability |
||||
|
- [x] Basic data integrity |
||||
|
- [x] Operation queuing |
||||
|
- [ ] Automatic recovery (planned) |
||||
|
- [ ] Backup verification (planned) |
||||
|
- [ ] Transaction atomicity (planned) |
||||
|
- [ ] Data consistency (planned) |
||||
|
|
||||
|
### 3. Security |
||||
|
- [x] Basic data integrity |
||||
|
- [ ] AES-256 encryption (planned) |
||||
|
- [ ] Secure key storage (planned) |
||||
|
- [ ] Access control (planned) |
||||
|
- [ ] Audit logging (planned) |
||||
|
|
||||
|
### 4. User Experience |
||||
|
- [x] Basic database operations |
||||
|
- [ ] Smooth migration (planned) |
||||
|
- [ ] Clear error messages (planned) |
||||
|
- [ ] Progress indicators (planned) |
||||
|
- [ ] Recovery options (planned) |
File diff suppressed because it is too large
@ -0,0 +1,55 @@ |
|||||
|
# NPM renames .gitignore to .npmignore |
||||
|
# In order to prevent that, we remove the initial "." |
||||
|
# And the CLI then renames it |
||||
|
app |
||||
|
node_modules |
||||
|
build |
||||
|
dist |
||||
|
logs |
||||
|
# Node.js dependencies |
||||
|
node_modules/ |
||||
|
npm-debug.log* |
||||
|
yarn-debug.log* |
||||
|
yarn-error.log* |
||||
|
.pnpm-debug.log* |
||||
|
|
||||
|
# Capacitor build outputs |
||||
|
web/ |
||||
|
ios/ |
||||
|
android/ |
||||
|
electron/app/ |
||||
|
|
||||
|
# Capacitor SQLite plugin data (important!) |
||||
|
capacitor-sqlite/ |
||||
|
|
||||
|
# TypeScript / build output |
||||
|
dist/ |
||||
|
build/ |
||||
|
*.log |
||||
|
|
||||
|
# Development / IDE files |
||||
|
.env.local |
||||
|
.env.development.local |
||||
|
.env.test.local |
||||
|
.env.production.local |
||||
|
|
||||
|
# VS Code |
||||
|
.vscode/ |
||||
|
!.vscode/extensions.json |
||||
|
|
||||
|
# JetBrains IDEs (IntelliJ, WebStorm, etc.) |
||||
|
.idea/ |
||||
|
*.iml |
||||
|
*.iws |
||||
|
|
||||
|
# macOS specific |
||||
|
.DS_Store |
||||
|
*.swp |
||||
|
*~ |
||||
|
*.tmp |
||||
|
|
||||
|
# Windows specific |
||||
|
Thumbs.db |
||||
|
ehthumbs.db |
||||
|
Desktop.ini |
||||
|
$RECYCLE.BIN/ |
After Width: | Height: | Size: 142 KiB |
After Width: | Height: | Size: 121 KiB |
After Width: | Height: | Size: 159 KiB |
After Width: | Height: | Size: 12 KiB |
@ -0,0 +1,62 @@ |
|||||
|
{ |
||||
|
"appId": "com.timesafari.app", |
||||
|
"appName": "TimeSafari", |
||||
|
"webDir": "dist", |
||||
|
"bundledWebRuntime": false, |
||||
|
"server": { |
||||
|
"cleartext": true, |
||||
|
"androidScheme": "https" |
||||
|
}, |
||||
|
"plugins": { |
||||
|
"App": { |
||||
|
"appUrlOpen": { |
||||
|
"handlers": [ |
||||
|
{ |
||||
|
"url": "timesafari://*", |
||||
|
"autoVerify": true |
||||
|
} |
||||
|
] |
||||
|
} |
||||
|
}, |
||||
|
"SQLite": { |
||||
|
"iosDatabaseLocation": "Library/CapacitorDatabase", |
||||
|
"iosIsEncryption": true, |
||||
|
"iosBiometric": { |
||||
|
"biometricAuth": true, |
||||
|
"biometricTitle": "Biometric login for TimeSafari" |
||||
|
}, |
||||
|
"androidIsEncryption": true, |
||||
|
"androidBiometric": { |
||||
|
"biometricAuth": true, |
||||
|
"biometricTitle": "Biometric login for TimeSafari" |
||||
|
} |
||||
|
}, |
||||
|
"CapacitorSQLite": { |
||||
|
"electronIsEncryption": false, |
||||
|
"electronMacLocation": "~/Library/Application Support/TimeSafari", |
||||
|
"electronWindowsLocation": "C:\\ProgramData\\TimeSafari" |
||||
|
} |
||||
|
}, |
||||
|
"ios": { |
||||
|
"contentInset": "always", |
||||
|
"allowsLinkPreview": true, |
||||
|
"scrollEnabled": true, |
||||
|
"limitsNavigationsToAppBoundDomains": true, |
||||
|
"backgroundColor": "#ffffff", |
||||
|
"allowNavigation": [ |
||||
|
"*.timesafari.app", |
||||
|
"*.jsdelivr.net", |
||||
|
"api.endorser.ch" |
||||
|
] |
||||
|
}, |
||||
|
"android": { |
||||
|
"allowMixedContent": false, |
||||
|
"captureInput": true, |
||||
|
"webContentsDebuggingEnabled": false, |
||||
|
"allowNavigation": [ |
||||
|
"*.timesafari.app", |
||||
|
"*.jsdelivr.net", |
||||
|
"api.endorser.ch" |
||||
|
] |
||||
|
} |
||||
|
} |
@ -0,0 +1,28 @@ |
|||||
|
{ |
||||
|
"appId": "com.yourdoamnin.yourapp", |
||||
|
"directories": { |
||||
|
"buildResources": "resources" |
||||
|
}, |
||||
|
"files": [ |
||||
|
"assets/**/*", |
||||
|
"build/**/*", |
||||
|
"capacitor.config.*", |
||||
|
"app/**/*" |
||||
|
], |
||||
|
"publish": { |
||||
|
"provider": "github" |
||||
|
}, |
||||
|
"nsis": { |
||||
|
"allowElevation": true, |
||||
|
"oneClick": false, |
||||
|
"allowToChangeInstallationDirectory": true |
||||
|
}, |
||||
|
"win": { |
||||
|
"target": "nsis", |
||||
|
"icon": "assets/appIcon.ico" |
||||
|
}, |
||||
|
"mac": { |
||||
|
"category": "your.app.category.type", |
||||
|
"target": "dmg" |
||||
|
} |
||||
|
} |
@ -0,0 +1,75 @@ |
|||||
|
/* eslint-disable no-undef */ |
||||
|
/* eslint-disable @typescript-eslint/no-var-requires */ |
||||
|
const cp = require('child_process'); |
||||
|
const chokidar = require('chokidar'); |
||||
|
const electron = require('electron'); |
||||
|
|
||||
|
let child = null; |
||||
|
const npmCmd = process.platform === 'win32' ? 'npm.cmd' : 'npm'; |
||||
|
const reloadWatcher = { |
||||
|
debouncer: null, |
||||
|
ready: false, |
||||
|
watcher: null, |
||||
|
restarting: false, |
||||
|
}; |
||||
|
|
||||
|
///*
|
||||
|
function runBuild() { |
||||
|
return new Promise((resolve, _reject) => { |
||||
|
let tempChild = cp.spawn(npmCmd, ['run', 'build']); |
||||
|
tempChild.once('exit', () => { |
||||
|
resolve(); |
||||
|
}); |
||||
|
tempChild.stdout.pipe(process.stdout); |
||||
|
}); |
||||
|
} |
||||
|
//*/
|
||||
|
|
||||
|
async function spawnElectron() { |
||||
|
if (child !== null) { |
||||
|
child.stdin.pause(); |
||||
|
child.kill(); |
||||
|
child = null; |
||||
|
await runBuild(); |
||||
|
} |
||||
|
child = cp.spawn(electron, ['--inspect=5858', './']); |
||||
|
child.on('exit', () => { |
||||
|
if (!reloadWatcher.restarting) { |
||||
|
process.exit(0); |
||||
|
} |
||||
|
}); |
||||
|
child.stdout.pipe(process.stdout); |
||||
|
} |
||||
|
|
||||
|
function setupReloadWatcher() { |
||||
|
reloadWatcher.watcher = chokidar |
||||
|
.watch('./src/**/*', { |
||||
|
ignored: /[/\\]\./, |
||||
|
persistent: true, |
||||
|
}) |
||||
|
.on('ready', () => { |
||||
|
reloadWatcher.ready = true; |
||||
|
}) |
||||
|
.on('all', (_event, _path) => { |
||||
|
if (reloadWatcher.ready) { |
||||
|
clearTimeout(reloadWatcher.debouncer); |
||||
|
reloadWatcher.debouncer = setTimeout(async () => { |
||||
|
console.log('Restarting'); |
||||
|
reloadWatcher.restarting = true; |
||||
|
await spawnElectron(); |
||||
|
reloadWatcher.restarting = false; |
||||
|
reloadWatcher.ready = false; |
||||
|
clearTimeout(reloadWatcher.debouncer); |
||||
|
reloadWatcher.debouncer = null; |
||||
|
reloadWatcher.watcher = null; |
||||
|
setupReloadWatcher(); |
||||
|
}, 500); |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
(async () => { |
||||
|
await runBuild(); |
||||
|
await spawnElectron(); |
||||
|
setupReloadWatcher(); |
||||
|
})(); |
File diff suppressed because it is too large
@ -0,0 +1,51 @@ |
|||||
|
{ |
||||
|
"name": "TimeSafari", |
||||
|
"version": "1.0.0", |
||||
|
"description": "TimeSafari Electron App", |
||||
|
"author": { |
||||
|
"name": "", |
||||
|
"email": "" |
||||
|
}, |
||||
|
"repository": { |
||||
|
"type": "git", |
||||
|
"url": "" |
||||
|
}, |
||||
|
"license": "MIT", |
||||
|
"main": "build/src/index.js", |
||||
|
"scripts": { |
||||
|
"build": "tsc && electron-rebuild", |
||||
|
"electron:start-live": "node ./live-runner.js", |
||||
|
"electron:start": "npm run build && electron --inspect=5858 ./", |
||||
|
"electron:pack": "npm run build && electron-builder build --dir -c ./electron-builder.config.json", |
||||
|
"electron:make": "npm run build && electron-builder build -c ./electron-builder.config.json -p always" |
||||
|
}, |
||||
|
"dependencies": { |
||||
|
"@capacitor-community/electron": "^5.0.0", |
||||
|
"@capacitor-community/sqlite": "^6.0.2", |
||||
|
"better-sqlite3-multiple-ciphers": "^11.10.0", |
||||
|
"chokidar": "~3.5.3", |
||||
|
"crypto": "^1.0.1", |
||||
|
"crypto-js": "^4.2.0", |
||||
|
"electron-is-dev": "~2.0.0", |
||||
|
"electron-json-storage": "^4.6.0", |
||||
|
"electron-serve": "~1.1.0", |
||||
|
"electron-unhandled": "~4.0.1", |
||||
|
"electron-updater": "^5.3.0", |
||||
|
"electron-window-state": "^5.0.3", |
||||
|
"jszip": "^3.10.1", |
||||
|
"node-fetch": "^2.6.7" |
||||
|
}, |
||||
|
"devDependencies": { |
||||
|
"@types/better-sqlite3": "^7.6.13", |
||||
|
"@types/crypto-js": "^4.2.2", |
||||
|
"@types/electron-json-storage": "^4.5.4", |
||||
|
"electron": "^26.2.2", |
||||
|
"electron-builder": "~23.6.0", |
||||
|
"source-map-support": "^0.5.21", |
||||
|
"typescript": "^5.0.4" |
||||
|
}, |
||||
|
"keywords": [ |
||||
|
"capacitor", |
||||
|
"electron" |
||||
|
] |
||||
|
} |
@ -0,0 +1,10 @@ |
|||||
|
/* eslint-disable no-undef */ |
||||
|
/* eslint-disable @typescript-eslint/no-var-requires */ |
||||
|
const electronPublish = require('electron-publish'); |
||||
|
|
||||
|
class Publisher extends electronPublish.Publisher { |
||||
|
async upload(task) { |
||||
|
console.log('electron-publisher-custom', task.file); |
||||
|
} |
||||
|
} |
||||
|
module.exports = Publisher; |
@ -0,0 +1,110 @@ |
|||||
|
import type { CapacitorElectronConfig } from '@capacitor-community/electron'; |
||||
|
import { getCapacitorElectronConfig, setupElectronDeepLinking } from '@capacitor-community/electron'; |
||||
|
import type { MenuItemConstructorOptions } from 'electron'; |
||||
|
import { app, MenuItem } from 'electron'; |
||||
|
import electronIsDev from 'electron-is-dev'; |
||||
|
import unhandled from 'electron-unhandled'; |
||||
|
import { autoUpdater } from 'electron-updater'; |
||||
|
|
||||
|
import { ElectronCapacitorApp, setupContentSecurityPolicy, setupReloadWatcher } from './setup'; |
||||
|
import { initializeSQLite, setupSQLiteHandlers } from './rt/sqlite-init'; |
||||
|
|
||||
|
// Graceful handling of unhandled errors.
|
||||
|
unhandled(); |
||||
|
|
||||
|
// Define our menu templates (these are optional)
|
||||
|
const trayMenuTemplate: (MenuItemConstructorOptions | MenuItem)[] = [new MenuItem({ label: 'Quit App', role: 'quit' })]; |
||||
|
const appMenuBarMenuTemplate: (MenuItemConstructorOptions | MenuItem)[] = [ |
||||
|
{ role: process.platform === 'darwin' ? 'appMenu' : 'fileMenu' }, |
||||
|
{ role: 'viewMenu' }, |
||||
|
]; |
||||
|
|
||||
|
// Get Config options from capacitor.config
|
||||
|
const capacitorFileConfig: CapacitorElectronConfig = getCapacitorElectronConfig(); |
||||
|
|
||||
|
// Initialize our app. You can pass menu templates into the app here.
|
||||
|
const myCapacitorApp = new ElectronCapacitorApp(capacitorFileConfig, trayMenuTemplate, appMenuBarMenuTemplate); |
||||
|
|
||||
|
// If deeplinking is enabled then we will set it up here.
|
||||
|
if (capacitorFileConfig.electron?.deepLinkingEnabled) { |
||||
|
setupElectronDeepLinking(myCapacitorApp, { |
||||
|
customProtocol: capacitorFileConfig.electron.deepLinkingCustomProtocol ?? 'mycapacitorapp', |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
// If we are in Dev mode, use the file watcher components.
|
||||
|
if (electronIsDev) { |
||||
|
setupReloadWatcher(myCapacitorApp); |
||||
|
} |
||||
|
|
||||
|
// Run Application
|
||||
|
(async () => { |
||||
|
try { |
||||
|
// Wait for electron app to be ready.
|
||||
|
await app.whenReady(); |
||||
|
|
||||
|
// Security - Set Content-Security-Policy based on whether or not we are in dev mode.
|
||||
|
setupContentSecurityPolicy(myCapacitorApp.getCustomURLScheme()); |
||||
|
|
||||
|
// Initialize SQLite and register handlers BEFORE app initialization
|
||||
|
console.log('[Main] Starting SQLite initialization...'); |
||||
|
try { |
||||
|
// Register handlers first to prevent "no handler" errors
|
||||
|
setupSQLiteHandlers(); |
||||
|
console.log('[Main] SQLite handlers registered'); |
||||
|
|
||||
|
// Then initialize the plugin
|
||||
|
await initializeSQLite(); |
||||
|
console.log('[Main] SQLite plugin initialized successfully'); |
||||
|
} catch (error) { |
||||
|
console.error('[Main] Failed to initialize SQLite:', error); |
||||
|
// Don't proceed with app initialization if SQLite fails
|
||||
|
throw new Error(`SQLite initialization failed: ${error instanceof Error ? error.message : 'Unknown error'}`); |
||||
|
} |
||||
|
|
||||
|
// Initialize our app, build windows, and load content.
|
||||
|
console.log('[Main] Starting app initialization...'); |
||||
|
await myCapacitorApp.init(); |
||||
|
console.log('[Main] App initialization complete'); |
||||
|
|
||||
|
// Check for updates if we are in a packaged app.
|
||||
|
if (!electronIsDev) { |
||||
|
console.log('[Main] Checking for updates...'); |
||||
|
autoUpdater.checkForUpdatesAndNotify(); |
||||
|
} |
||||
|
} catch (error) { |
||||
|
console.error('[Main] Fatal error during app initialization:', error); |
||||
|
// Ensure we notify the user before quitting
|
||||
|
const mainWindow = myCapacitorApp.getMainWindow(); |
||||
|
if (mainWindow && !mainWindow.isDestroyed()) { |
||||
|
mainWindow.webContents.send('app-error', { |
||||
|
type: 'initialization', |
||||
|
error: error instanceof Error ? error.message : 'Unknown error' |
||||
|
}); |
||||
|
// Give the window time to show the error
|
||||
|
setTimeout(() => app.quit(), 5000); |
||||
|
} else { |
||||
|
app.quit(); |
||||
|
} |
||||
|
} |
||||
|
})(); |
||||
|
|
||||
|
// Handle when all of our windows are close (platforms have their own expectations).
|
||||
|
app.on('window-all-closed', function () { |
||||
|
// On OS X it is common for applications and their menu bar
|
||||
|
// to stay active until the user quits explicitly with Cmd + Q
|
||||
|
if (process.platform !== 'darwin') { |
||||
|
app.quit(); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// When the dock icon is clicked.
|
||||
|
app.on('activate', async function () { |
||||
|
// On OS X it's common to re-create a window in the app when the
|
||||
|
// dock icon is clicked and there are no other windows open.
|
||||
|
if (myCapacitorApp.getMainWindow().isDestroyed()) { |
||||
|
await myCapacitorApp.init(); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// Place all ipc or other electron api calls and custom functionality under this line
|
@ -0,0 +1,97 @@ |
|||||
|
/** |
||||
|
* Preload script for Electron |
||||
|
* Sets up secure IPC communication between renderer and main process |
||||
|
* |
||||
|
* @author Matthew Raymer |
||||
|
*/ |
||||
|
|
||||
|
import { contextBridge, ipcRenderer } from 'electron'; |
||||
|
|
||||
|
// Simple logger for preload script
|
||||
|
const logger = { |
||||
|
log: (...args: unknown[]) => console.log('[Preload]', ...args), |
||||
|
error: (...args: unknown[]) => console.error('[Preload]', ...args), |
||||
|
info: (...args: unknown[]) => console.info('[Preload]', ...args), |
||||
|
warn: (...args: unknown[]) => console.warn('[Preload]', ...args), |
||||
|
debug: (...args: unknown[]) => console.debug('[Preload]', ...args), |
||||
|
}; |
||||
|
|
||||
|
// Types for SQLite connection options
|
||||
|
interface SQLiteConnectionOptions { |
||||
|
database: string; |
||||
|
version?: number; |
||||
|
readOnly?: boolean; |
||||
|
readonly?: boolean; // Handle both cases
|
||||
|
encryption?: string; |
||||
|
mode?: string; |
||||
|
useNative?: boolean; |
||||
|
[key: string]: unknown; // Allow other properties
|
||||
|
} |
||||
|
|
||||
|
// Create a proxy for the CapacitorSQLite plugin
|
||||
|
const createSQLiteProxy = () => { |
||||
|
const MAX_RETRIES = 3; |
||||
|
const RETRY_DELAY = 1000; // 1 second
|
||||
|
|
||||
|
const withRetry = async <T>(operation: (...args: unknown[]) => Promise<T>, ...args: unknown[]): Promise<T> => { |
||||
|
let lastError: Error | null = null; |
||||
|
for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) { |
||||
|
try { |
||||
|
return await operation(...args); |
||||
|
} catch (error) { |
||||
|
lastError = error instanceof Error ? error : new Error(String(error)); |
||||
|
if (attempt < MAX_RETRIES) { |
||||
|
logger.warn(`SQLite operation failed (attempt ${attempt}/${MAX_RETRIES}), retrying...`, error); |
||||
|
await new Promise(resolve => setTimeout(resolve, RETRY_DELAY)); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
throw new Error(`SQLite operation failed after ${MAX_RETRIES} attempts: ${lastError?.message || 'Unknown error'}`); |
||||
|
}; |
||||
|
|
||||
|
const wrapOperation = (method: string) => { |
||||
|
return async (...args: unknown[]): Promise<unknown> => { |
||||
|
try { |
||||
|
// For createConnection, ensure readOnly is false
|
||||
|
if (method === 'create-connection') { |
||||
|
const options = args[0] as SQLiteConnectionOptions; |
||||
|
if (options && typeof options === 'object') { |
||||
|
// Set readOnly to false and ensure mode is rwc
|
||||
|
options.readOnly = false; |
||||
|
options.mode = 'rwc'; |
||||
|
// Remove any lowercase readonly property if it exists
|
||||
|
delete options.readonly; |
||||
|
} |
||||
|
} |
||||
|
return await withRetry(ipcRenderer.invoke, 'sqlite-' + method, ...args); |
||||
|
} catch (error) { |
||||
|
logger.error(`SQLite ${method} failed:`, error); |
||||
|
throw new Error(`Database operation failed: ${error instanceof Error ? error.message : 'Unknown error'}`); |
||||
|
} |
||||
|
}; |
||||
|
}; |
||||
|
|
||||
|
// Create a proxy that matches the CapacitorSQLite interface
|
||||
|
return { |
||||
|
echo: wrapOperation('echo'), |
||||
|
createConnection: wrapOperation('create-connection'), |
||||
|
closeConnection: wrapOperation('close-connection'), |
||||
|
execute: wrapOperation('execute'), |
||||
|
query: wrapOperation('query'), |
||||
|
run: wrapOperation('run'), |
||||
|
isAvailable: wrapOperation('is-available'), |
||||
|
getPlatform: () => Promise.resolve('electron'), |
||||
|
// Add other methods as needed
|
||||
|
}; |
||||
|
}; |
||||
|
|
||||
|
// Expose only the CapacitorSQLite proxy
|
||||
|
contextBridge.exposeInMainWorld('CapacitorSQLite', createSQLiteProxy()); |
||||
|
|
||||
|
// Log startup
|
||||
|
logger.log('Script starting...'); |
||||
|
|
||||
|
// Handle window load
|
||||
|
window.addEventListener('load', () => { |
||||
|
logger.log('Script complete'); |
||||
|
}); |
@ -0,0 +1,6 @@ |
|||||
|
/* eslint-disable @typescript-eslint/no-var-requires */ |
||||
|
const CapacitorCommunitySqlite = require('../../../node_modules/@capacitor-community/sqlite/electron/dist/plugin.js'); |
||||
|
|
||||
|
module.exports = { |
||||
|
CapacitorCommunitySqlite, |
||||
|
} |
@ -0,0 +1,88 @@ |
|||||
|
import { randomBytes } from 'crypto'; |
||||
|
import { ipcRenderer, contextBridge } from 'electron'; |
||||
|
import { EventEmitter } from 'events'; |
||||
|
|
||||
|
////////////////////////////////////////////////////////
|
||||
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
|
const plugins = require('./electron-plugins'); |
||||
|
|
||||
|
const randomId = (length = 5) => randomBytes(length).toString('hex'); |
||||
|
|
||||
|
const contextApi: { |
||||
|
[plugin: string]: { [functionName: string]: () => Promise<any> }; |
||||
|
} = {}; |
||||
|
|
||||
|
Object.keys(plugins).forEach((pluginKey) => { |
||||
|
Object.keys(plugins[pluginKey]) |
||||
|
.filter((className) => className !== 'default') |
||||
|
.forEach((classKey) => { |
||||
|
const functionList = Object.getOwnPropertyNames(plugins[pluginKey][classKey].prototype).filter( |
||||
|
(v) => v !== 'constructor' |
||||
|
); |
||||
|
|
||||
|
if (!contextApi[classKey]) { |
||||
|
contextApi[classKey] = {}; |
||||
|
} |
||||
|
|
||||
|
functionList.forEach((functionName) => { |
||||
|
if (!contextApi[classKey][functionName]) { |
||||
|
contextApi[classKey][functionName] = (...args) => ipcRenderer.invoke(`${classKey}-${functionName}`, ...args); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// Events
|
||||
|
if (plugins[pluginKey][classKey].prototype instanceof EventEmitter) { |
||||
|
const listeners: { [key: string]: { type: string; listener: (...args: any[]) => void } } = {}; |
||||
|
const listenersOfTypeExist = (type) => |
||||
|
!!Object.values(listeners).find((listenerObj) => listenerObj.type === type); |
||||
|
|
||||
|
Object.assign(contextApi[classKey], { |
||||
|
addListener(type: string, callback: (...args) => void) { |
||||
|
const id = randomId(); |
||||
|
|
||||
|
// Deduplicate events
|
||||
|
if (!listenersOfTypeExist(type)) { |
||||
|
ipcRenderer.send(`event-add-${classKey}`, type); |
||||
|
} |
||||
|
|
||||
|
const eventHandler = (_, ...args) => callback(...args); |
||||
|
|
||||
|
ipcRenderer.addListener(`event-${classKey}-${type}`, eventHandler); |
||||
|
listeners[id] = { type, listener: eventHandler }; |
||||
|
|
||||
|
return id; |
||||
|
}, |
||||
|
removeListener(id: string) { |
||||
|
if (!listeners[id]) { |
||||
|
throw new Error('Invalid id'); |
||||
|
} |
||||
|
|
||||
|
const { type, listener } = listeners[id]; |
||||
|
|
||||
|
ipcRenderer.removeListener(`event-${classKey}-${type}`, listener); |
||||
|
|
||||
|
delete listeners[id]; |
||||
|
|
||||
|
if (!listenersOfTypeExist(type)) { |
||||
|
ipcRenderer.send(`event-remove-${classKey}-${type}`); |
||||
|
} |
||||
|
}, |
||||
|
removeAllListeners(type: string) { |
||||
|
Object.entries(listeners).forEach(([id, listenerObj]) => { |
||||
|
if (!type || listenerObj.type === type) { |
||||
|
ipcRenderer.removeListener(`event-${classKey}-${listenerObj.type}`, listenerObj.listener); |
||||
|
ipcRenderer.send(`event-remove-${classKey}-${listenerObj.type}`); |
||||
|
delete listeners[id]; |
||||
|
} |
||||
|
}); |
||||
|
}, |
||||
|
}); |
||||
|
} |
||||
|
}); |
||||
|
}); |
||||
|
|
||||
|
contextBridge.exposeInMainWorld('CapacitorCustomPlatform', { |
||||
|
name: 'electron', |
||||
|
plugins: contextApi, |
||||
|
}); |
||||
|
////////////////////////////////////////////////////////
|
@ -0,0 +1,77 @@ |
|||||
|
/** |
||||
|
* Structured logging system for TimeSafari |
||||
|
* |
||||
|
* Provides consistent logging across the application with: |
||||
|
* - Timestamp tracking |
||||
|
* - Log levels (debug, info, warn, error) |
||||
|
* - Structured data support |
||||
|
* - Component tagging |
||||
|
* |
||||
|
* @author Matthew Raymer <matthew.raymer@anomalistdesign.com> |
||||
|
* @version 1.0.0 |
||||
|
* @since 2025-06-01 |
||||
|
*/ |
||||
|
|
||||
|
// Log levels
|
||||
|
export enum LogLevel { |
||||
|
DEBUG = 'DEBUG', |
||||
|
INFO = 'INFO', |
||||
|
WARN = 'WARN', |
||||
|
ERROR = 'ERROR' |
||||
|
} |
||||
|
|
||||
|
// Log entry interface
|
||||
|
interface LogEntry { |
||||
|
timestamp: string; |
||||
|
level: LogLevel; |
||||
|
component: string; |
||||
|
message: string; |
||||
|
data?: unknown; |
||||
|
} |
||||
|
|
||||
|
// Format log entry
|
||||
|
const formatLogEntry = (entry: LogEntry): string => { |
||||
|
const { timestamp, level, component, message, data } = entry; |
||||
|
const dataStr = data ? ` ${JSON.stringify(data, null, 2)}` : ''; |
||||
|
return `[${timestamp}] [${level}] [${component}] ${message}${dataStr}`; |
||||
|
}; |
||||
|
|
||||
|
// Create logger for a specific component
|
||||
|
export const createLogger = (component: string) => { |
||||
|
const log = (level: LogLevel, message: string, data?: unknown) => { |
||||
|
const entry: LogEntry = { |
||||
|
timestamp: new Date().toISOString(), |
||||
|
level, |
||||
|
component, |
||||
|
message, |
||||
|
data |
||||
|
}; |
||||
|
|
||||
|
const formatted = formatLogEntry(entry); |
||||
|
|
||||
|
switch (level) { |
||||
|
case LogLevel.DEBUG: |
||||
|
console.debug(formatted); |
||||
|
break; |
||||
|
case LogLevel.INFO: |
||||
|
console.info(formatted); |
||||
|
break; |
||||
|
case LogLevel.WARN: |
||||
|
console.warn(formatted); |
||||
|
break; |
||||
|
case LogLevel.ERROR: |
||||
|
console.error(formatted); |
||||
|
break; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
return { |
||||
|
debug: (message: string, data?: unknown) => log(LogLevel.DEBUG, message, data), |
||||
|
info: (message: string, data?: unknown) => log(LogLevel.INFO, message, data), |
||||
|
warn: (message: string, data?: unknown) => log(LogLevel.WARN, message, data), |
||||
|
error: (message: string, data?: unknown) => log(LogLevel.ERROR, message, data) |
||||
|
}; |
||||
|
}; |
||||
|
|
||||
|
// Create default logger for SQLite operations
|
||||
|
export const logger = createLogger('SQLite'); |
@ -0,0 +1,584 @@ |
|||||
|
/** |
||||
|
* SQLite Initialization and Management for TimeSafari Electron |
||||
|
* |
||||
|
* This module handles the complete lifecycle of SQLite database initialization, |
||||
|
* connection management, and IPC communication in the TimeSafari Electron app. |
||||
|
* |
||||
|
* Key Features: |
||||
|
* - Database path management with proper permissions |
||||
|
* - Plugin initialization and state verification |
||||
|
* - Connection lifecycle management |
||||
|
* - PRAGMA configuration for optimal performance |
||||
|
* - Migration system integration |
||||
|
* - Error handling and recovery |
||||
|
* - IPC communication layer |
||||
|
* |
||||
|
* Database Configuration: |
||||
|
* - Uses WAL journal mode for better concurrency |
||||
|
* - Configures optimal PRAGMA settings |
||||
|
* - Implements connection pooling |
||||
|
* - Handles encryption (when enabled) |
||||
|
* |
||||
|
* State Management: |
||||
|
* - Tracks plugin initialization state |
||||
|
* - Monitors connection health |
||||
|
* - Manages transaction state |
||||
|
* - Implements recovery mechanisms |
||||
|
* |
||||
|
* Error Handling: |
||||
|
* - Custom SQLiteError class for detailed error tracking |
||||
|
* - Comprehensive error logging |
||||
|
* - Automatic recovery attempts |
||||
|
* - State verification before operations |
||||
|
* |
||||
|
* Security: |
||||
|
* - Proper file permissions (0o755) |
||||
|
* - Write access verification |
||||
|
* - Connection state validation |
||||
|
* - Transaction safety |
||||
|
* |
||||
|
* Performance: |
||||
|
* - WAL mode for better concurrency |
||||
|
* - Optimized PRAGMA settings |
||||
|
* - Connection pooling |
||||
|
* - Efficient state management |
||||
|
* |
||||
|
* @author Matthew Raymer <matthew.raymer@anomalistdesign.com> |
||||
|
* @version 1.0.0 |
||||
|
* @since 2025-06-01 |
||||
|
*/ |
||||
|
|
||||
|
import { app, ipcMain } from 'electron'; |
||||
|
import { CapacitorSQLite } from '@capacitor-community/sqlite/electron/dist/plugin.js'; |
||||
|
import * as SQLiteModule from '@capacitor-community/sqlite/electron/dist/plugin.js'; |
||||
|
import fs from 'fs'; |
||||
|
import path from 'path'; |
||||
|
import os from 'os'; |
||||
|
import { runMigrations } from './sqlite-migrations'; |
||||
|
import { logger } from './logger'; |
||||
|
|
||||
|
// Types for state management
|
||||
|
interface PluginState { |
||||
|
isInitialized: boolean; |
||||
|
isAvailable: boolean; |
||||
|
lastVerified: Date | null; |
||||
|
lastError: Error | null; |
||||
|
instance: any | null; |
||||
|
} |
||||
|
|
||||
|
interface TransactionState { |
||||
|
isActive: boolean; |
||||
|
lastVerified: Date | null; |
||||
|
database: string | null; |
||||
|
} |
||||
|
|
||||
|
// State tracking
|
||||
|
let pluginState: PluginState = { |
||||
|
isInitialized: false, |
||||
|
isAvailable: false, |
||||
|
lastVerified: null, |
||||
|
lastError: null, |
||||
|
instance: null |
||||
|
}; |
||||
|
|
||||
|
let transactionState: TransactionState = { |
||||
|
isActive: false, |
||||
|
lastVerified: null, |
||||
|
database: null |
||||
|
}; |
||||
|
|
||||
|
// Constants
|
||||
|
const MAX_RECOVERY_ATTEMPTS = 3; |
||||
|
const RECOVERY_DELAY_MS = 1000; |
||||
|
const VERIFICATION_TIMEOUT_MS = 5000; |
||||
|
|
||||
|
// Error handling
|
||||
|
class SQLiteError extends Error { |
||||
|
constructor( |
||||
|
message: string, |
||||
|
public context: string, |
||||
|
public originalError?: unknown |
||||
|
) { |
||||
|
super(message); |
||||
|
this.name = 'SQLiteError'; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
const handleError = (error: unknown, context: string): SQLiteError => { |
||||
|
const errorMessage = error instanceof Error |
||||
|
? error.message |
||||
|
: 'Unknown error occurred'; |
||||
|
const errorStack = error instanceof Error |
||||
|
? error.stack |
||||
|
: undefined; |
||||
|
|
||||
|
logger.error(`Error in ${context}:`, { |
||||
|
message: errorMessage, |
||||
|
stack: errorStack, |
||||
|
context, |
||||
|
timestamp: new Date().toISOString() |
||||
|
}); |
||||
|
|
||||
|
return new SQLiteError(`${context} failed: ${errorMessage}`, context, error); |
||||
|
}; |
||||
|
|
||||
|
// Add delay utility with timeout
|
||||
|
const delay = (ms: number, timeoutMs: number = VERIFICATION_TIMEOUT_MS): Promise<void> => { |
||||
|
return new Promise((resolve, reject) => { |
||||
|
const timeout = setTimeout(() => { |
||||
|
reject(new SQLiteError('Operation timed out', 'delay')); |
||||
|
}, timeoutMs); |
||||
|
|
||||
|
setTimeout(() => { |
||||
|
clearTimeout(timeout); |
||||
|
resolve(); |
||||
|
}, ms); |
||||
|
}); |
||||
|
}; |
||||
|
|
||||
|
// Plugin state verification
|
||||
|
const verifyPluginState = async (): Promise<boolean> => { |
||||
|
if (!pluginState.instance || !pluginState.isInitialized) { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
try { |
||||
|
// Test plugin responsiveness
|
||||
|
const echoResult = await pluginState.instance.echo({ value: 'test' }); |
||||
|
if (!echoResult || echoResult.value !== 'test') { |
||||
|
throw new SQLiteError('Plugin echo test failed', 'verifyPluginState'); |
||||
|
} |
||||
|
|
||||
|
pluginState.isAvailable = true; |
||||
|
pluginState.lastVerified = new Date(); |
||||
|
pluginState.lastError = null; |
||||
|
|
||||
|
return true; |
||||
|
} catch (error) { |
||||
|
pluginState.isAvailable = false; |
||||
|
pluginState.lastError = handleError(error, 'verifyPluginState'); |
||||
|
return false; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
// Transaction state verification
|
||||
|
const verifyTransactionState = async (database: string): Promise<boolean> => { |
||||
|
if (!pluginState.instance || !pluginState.isAvailable) { |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
try { |
||||
|
// Check if we're in a transaction
|
||||
|
const isActive = await pluginState.instance.isTransactionActive({ database }); |
||||
|
|
||||
|
transactionState.isActive = isActive; |
||||
|
transactionState.lastVerified = new Date(); |
||||
|
transactionState.database = database; |
||||
|
|
||||
|
return true; |
||||
|
} catch (error) { |
||||
|
transactionState.isActive = false; |
||||
|
transactionState.lastVerified = new Date(); |
||||
|
transactionState.database = null; |
||||
|
|
||||
|
logger.error('Transaction state verification failed:', error); |
||||
|
return false; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
// Plugin initialization
|
||||
|
const initializePlugin = async (): Promise<boolean> => { |
||||
|
logger.info('Starting plugin initialization'); |
||||
|
|
||||
|
try { |
||||
|
// Create plugin instance
|
||||
|
let rawPlugin; |
||||
|
if (SQLiteModule.default?.CapacitorSQLite) { |
||||
|
logger.debug('Using default export CapacitorSQLite'); |
||||
|
rawPlugin = new SQLiteModule.default.CapacitorSQLite(); |
||||
|
} else { |
||||
|
logger.debug('Using direct CapacitorSQLite class'); |
||||
|
rawPlugin = new CapacitorSQLite(); |
||||
|
} |
||||
|
|
||||
|
// Verify instance
|
||||
|
if (!rawPlugin || typeof rawPlugin !== 'object') { |
||||
|
throw new SQLiteError('Invalid plugin instance created', 'initializePlugin'); |
||||
|
} |
||||
|
|
||||
|
// Test plugin functionality
|
||||
|
const echoResult = await rawPlugin.echo({ value: 'test' }); |
||||
|
if (!echoResult || echoResult.value !== 'test') { |
||||
|
throw new SQLiteError('Plugin echo test failed', 'initializePlugin'); |
||||
|
} |
||||
|
|
||||
|
// Update state only after successful verification
|
||||
|
pluginState = { |
||||
|
isInitialized: true, |
||||
|
isAvailable: true, |
||||
|
lastVerified: new Date(), |
||||
|
lastError: null, |
||||
|
instance: rawPlugin |
||||
|
}; |
||||
|
|
||||
|
logger.info('Plugin initialized successfully'); |
||||
|
return true; |
||||
|
} catch (error) { |
||||
|
pluginState = { |
||||
|
isInitialized: false, |
||||
|
isAvailable: false, |
||||
|
lastVerified: new Date(), |
||||
|
lastError: handleError(error, 'initializePlugin'), |
||||
|
instance: null |
||||
|
}; |
||||
|
|
||||
|
logger.error('Plugin initialization failed:', { |
||||
|
error: pluginState.lastError, |
||||
|
timestamp: new Date().toISOString() |
||||
|
}); |
||||
|
|
||||
|
return false; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
// Recovery mechanism
|
||||
|
const recoverPluginState = async (attempt: number = 1): Promise<boolean> => { |
||||
|
logger.info(`Attempting plugin state recovery (attempt ${attempt}/${MAX_RECOVERY_ATTEMPTS})`); |
||||
|
|
||||
|
if (attempt > MAX_RECOVERY_ATTEMPTS) { |
||||
|
logger.error('Max recovery attempts reached'); |
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
try { |
||||
|
// Cleanup existing connection if any
|
||||
|
if (pluginState.instance) { |
||||
|
try { |
||||
|
await pluginState.instance.closeConnection({ database: 'timesafari' }); |
||||
|
logger.debug('Closed existing database connection during recovery'); |
||||
|
} catch (error) { |
||||
|
logger.warn('Error closing connection during recovery:', error); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Reset state
|
||||
|
pluginState = { |
||||
|
isInitialized: false, |
||||
|
isAvailable: false, |
||||
|
lastVerified: new Date(), |
||||
|
lastError: null, |
||||
|
instance: null |
||||
|
}; |
||||
|
|
||||
|
// Wait before retry with exponential backoff
|
||||
|
const backoffDelay = RECOVERY_DELAY_MS * Math.pow(2, attempt - 1); |
||||
|
await delay(backoffDelay); |
||||
|
|
||||
|
// Reinitialize
|
||||
|
const success = await initializePlugin(); |
||||
|
if (!success && attempt < MAX_RECOVERY_ATTEMPTS) { |
||||
|
return recoverPluginState(attempt + 1); |
||||
|
} |
||||
|
|
||||
|
return success; |
||||
|
} catch (error) { |
||||
|
logger.error('Plugin recovery failed:', error); |
||||
|
if (attempt < MAX_RECOVERY_ATTEMPTS) { |
||||
|
return recoverPluginState(attempt + 1); |
||||
|
} |
||||
|
return false; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Initializes database paths and ensures proper permissions |
||||
|
* |
||||
|
* This function: |
||||
|
* 1. Creates the database directory if it doesn't exist |
||||
|
* 2. Sets proper permissions (0o755) |
||||
|
* 3. Verifies write access |
||||
|
* 4. Returns the absolute path to the database directory |
||||
|
* |
||||
|
* @returns {Promise<string>} Absolute path to database directory |
||||
|
* @throws {SQLiteError} If directory creation or permission setting fails |
||||
|
*/ |
||||
|
const initializeDatabasePaths = async (): Promise<string> => { |
||||
|
try { |
||||
|
// Get the absolute app data directory
|
||||
|
const appDataDir = path.join(os.homedir(), 'Databases', 'TimeSafari'); |
||||
|
logger.info('App data directory:', appDataDir); |
||||
|
|
||||
|
// Ensure directory exists with proper permissions
|
||||
|
if (!fs.existsSync(appDataDir)) { |
||||
|
await fs.promises.mkdir(appDataDir, { |
||||
|
recursive: true, |
||||
|
mode: 0o755 |
||||
|
}); |
||||
|
} else { |
||||
|
await fs.promises.chmod(appDataDir, 0o755); |
||||
|
} |
||||
|
|
||||
|
// Verify directory permissions
|
||||
|
const stats = await fs.promises.stat(appDataDir); |
||||
|
logger.info('Directory permissions:', { |
||||
|
mode: stats.mode.toString(8), |
||||
|
uid: stats.uid, |
||||
|
gid: stats.gid, |
||||
|
isDirectory: stats.isDirectory(), |
||||
|
isWritable: !!(stats.mode & 0o200) |
||||
|
}); |
||||
|
|
||||
|
// Test write access
|
||||
|
const testFile = path.join(appDataDir, '.write-test'); |
||||
|
await fs.promises.writeFile(testFile, 'test'); |
||||
|
await fs.promises.unlink(testFile); |
||||
|
|
||||
|
return appDataDir; |
||||
|
} catch (error) { |
||||
|
throw handleError(error, 'initializeDatabasePaths'); |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Main SQLite initialization function |
||||
|
* |
||||
|
* Orchestrates the complete database initialization process: |
||||
|
* 1. Sets up database paths |
||||
|
* 2. Initializes the SQLite plugin |
||||
|
* 3. Creates and verifies database connection |
||||
|
* 4. Configures database PRAGMAs |
||||
|
* 5. Runs database migrations |
||||
|
* 6. Handles errors and recovery |
||||
|
* |
||||
|
* Database Configuration: |
||||
|
* - Uses WAL journal mode |
||||
|
* - Enables foreign keys |
||||
|
* - Sets optimal page size and cache |
||||
|
* - Configures busy timeout |
||||
|
* |
||||
|
* Error Recovery: |
||||
|
* - Implements exponential backoff |
||||
|
* - Verifies plugin state |
||||
|
* - Attempts connection recovery |
||||
|
* - Maintains detailed error logs |
||||
|
* |
||||
|
* @throws {SQLiteError} If initialization fails and recovery is unsuccessful |
||||
|
*/ |
||||
|
export async function initializeSQLite(): Promise<void> { |
||||
|
logger.info('Starting SQLite initialization'); |
||||
|
|
||||
|
try { |
||||
|
// Initialize database paths
|
||||
|
const dbDir = await initializeDatabasePaths(); |
||||
|
const dbPath = path.join(dbDir, 'timesafariSQLite.db'); |
||||
|
|
||||
|
// Initialize plugin
|
||||
|
if (!await initializePlugin()) { |
||||
|
throw new SQLiteError('Plugin initialization failed', 'initializeSQLite'); |
||||
|
} |
||||
|
|
||||
|
// Verify plugin state
|
||||
|
if (!await verifyPluginState()) { |
||||
|
throw new SQLiteError('Plugin state verification failed', 'initializeSQLite'); |
||||
|
} |
||||
|
|
||||
|
// Set up database connection
|
||||
|
const connectionOptions = { |
||||
|
database: 'timesafari', |
||||
|
version: 1, |
||||
|
readOnly: false, |
||||
|
encryption: 'no-encryption', |
||||
|
useNative: true, |
||||
|
mode: 'rwc' |
||||
|
}; |
||||
|
|
||||
|
// Create and verify connection
|
||||
|
logger.debug('Creating database connection:', connectionOptions); |
||||
|
await pluginState.instance.createConnection(connectionOptions); |
||||
|
await delay(500); // Wait for connection registration
|
||||
|
|
||||
|
const isRegistered = await pluginState.instance.isDatabase({ |
||||
|
database: connectionOptions.database |
||||
|
}); |
||||
|
|
||||
|
if (!isRegistered) { |
||||
|
throw new SQLiteError('Database not registered', 'initializeSQLite'); |
||||
|
} |
||||
|
|
||||
|
// Open database
|
||||
|
logger.debug('Opening database with options:', connectionOptions); |
||||
|
await pluginState.instance.open({ |
||||
|
...connectionOptions, |
||||
|
mode: 'rwc' |
||||
|
}); |
||||
|
|
||||
|
// Set PRAGMAs with detailed logging
|
||||
|
const pragmaStatements = [ |
||||
|
'PRAGMA foreign_keys = ON;', |
||||
|
'PRAGMA journal_mode = WAL;', // Changed to WAL for better concurrency
|
||||
|
'PRAGMA synchronous = NORMAL;', |
||||
|
'PRAGMA temp_store = MEMORY;', |
||||
|
'PRAGMA page_size = 4096;', |
||||
|
'PRAGMA cache_size = 2000;', |
||||
|
'PRAGMA busy_timeout = 15000;', // Increased to 15 seconds
|
||||
|
'PRAGMA wal_autocheckpoint = 1000;' // Added WAL checkpoint setting
|
||||
|
]; |
||||
|
|
||||
|
logger.debug('Setting database PRAGMAs'); |
||||
|
for (const statement of pragmaStatements) { |
||||
|
try { |
||||
|
logger.debug('Executing PRAGMA:', statement); |
||||
|
const result = await pluginState.instance.execute({ |
||||
|
database: connectionOptions.database, |
||||
|
statements: statement, |
||||
|
transaction: false |
||||
|
}); |
||||
|
logger.debug('PRAGMA result:', { statement, result }); |
||||
|
} catch (error) { |
||||
|
logger.error('PRAGMA execution failed:', { |
||||
|
statement, |
||||
|
error: error instanceof Error ? { |
||||
|
message: error.message, |
||||
|
stack: error.stack, |
||||
|
name: error.name |
||||
|
} : error |
||||
|
}); |
||||
|
throw error; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Run migrations with enhanced error logging
|
||||
|
logger.info('Starting database migrations'); |
||||
|
const migrationResults = await runMigrations( |
||||
|
pluginState.instance, |
||||
|
connectionOptions.database |
||||
|
); |
||||
|
|
||||
|
// Check migration results with detailed logging
|
||||
|
const failedMigrations = migrationResults.filter(r => !r.success); |
||||
|
if (failedMigrations.length > 0) { |
||||
|
logger.error('Migration failures:', { |
||||
|
totalMigrations: migrationResults.length, |
||||
|
failedCount: failedMigrations.length, |
||||
|
failures: failedMigrations.map(f => ({ |
||||
|
version: f.version, |
||||
|
name: f.name, |
||||
|
error: f.error instanceof Error ? { |
||||
|
message: f.error.message, |
||||
|
stack: f.error.stack, |
||||
|
name: f.error.name |
||||
|
} : f.error, |
||||
|
state: f.state |
||||
|
})) |
||||
|
}); |
||||
|
throw new SQLiteError( |
||||
|
'Database migrations failed', |
||||
|
'initializeSQLite', |
||||
|
failedMigrations |
||||
|
); |
||||
|
} |
||||
|
|
||||
|
logger.info('SQLite initialization completed successfully'); |
||||
|
} catch (error) { |
||||
|
const sqliteError = handleError(error, 'initializeSQLite'); |
||||
|
logger.error('SQLite initialization failed:', { |
||||
|
error: sqliteError, |
||||
|
pluginState: { |
||||
|
isInitialized: pluginState.isInitialized, |
||||
|
isAvailable: pluginState.isAvailable, |
||||
|
lastVerified: pluginState.lastVerified, |
||||
|
lastError: pluginState.lastError |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// Attempt recovery
|
||||
|
if (await recoverPluginState()) { |
||||
|
logger.info('Recovery successful, retrying initialization'); |
||||
|
return initializeSQLite(); |
||||
|
} |
||||
|
|
||||
|
throw sqliteError; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Sets up IPC handlers for SQLite operations |
||||
|
* |
||||
|
* Registers handlers for: |
||||
|
* - Plugin availability checks |
||||
|
* - Connection management |
||||
|
* - Query execution |
||||
|
* - Error retrieval |
||||
|
* |
||||
|
* Each handler includes: |
||||
|
* - State verification |
||||
|
* - Error handling |
||||
|
* - Detailed logging |
||||
|
* - Transaction safety |
||||
|
* |
||||
|
* Security: |
||||
|
* - Validates all incoming requests |
||||
|
* - Verifies plugin state |
||||
|
* - Maintains connection isolation |
||||
|
* |
||||
|
* @throws {Error} If handler registration fails |
||||
|
*/ |
||||
|
export function setupSQLiteHandlers(): void { |
||||
|
// Remove existing handlers
|
||||
|
const handlers = [ |
||||
|
'sqlite-is-available', |
||||
|
'sqlite-echo', |
||||
|
'sqlite-create-connection', |
||||
|
'sqlite-execute', |
||||
|
'sqlite-query', |
||||
|
'sqlite-close-connection', |
||||
|
'sqlite-get-error' |
||||
|
]; |
||||
|
|
||||
|
handlers.forEach(handler => { |
||||
|
try { |
||||
|
ipcMain.removeHandler(handler); |
||||
|
} catch (error) { |
||||
|
logger.warn(`Error removing handler ${handler}:`, error); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// Register handlers
|
||||
|
ipcMain.handle('sqlite-is-available', async () => { |
||||
|
try { |
||||
|
const isAvailable = await verifyPluginState(); |
||||
|
logger.debug('Plugin availability check:', { isAvailable }); |
||||
|
return isAvailable; |
||||
|
} catch (error) { |
||||
|
logger.error('Error checking plugin availability:', error); |
||||
|
return false; |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
ipcMain.handle('sqlite-get-error', async () => { |
||||
|
return pluginState.lastError ? { |
||||
|
message: pluginState.lastError.message, |
||||
|
stack: pluginState.lastError.stack, |
||||
|
name: pluginState.lastError.name, |
||||
|
context: (pluginState.lastError as SQLiteError).context |
||||
|
} : null; |
||||
|
}); |
||||
|
|
||||
|
// Add other handlers with proper state verification
|
||||
|
ipcMain.handle('sqlite-create-connection', async (_event, options) => { |
||||
|
try { |
||||
|
if (!await verifyPluginState()) { |
||||
|
throw new SQLiteError('Plugin not available', 'sqlite-create-connection'); |
||||
|
} |
||||
|
|
||||
|
// ... rest of connection creation logic ...
|
||||
|
|
||||
|
} catch (error) { |
||||
|
throw handleError(error, 'sqlite-create-connection'); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// ... other handlers ...
|
||||
|
|
||||
|
logger.info('SQLite IPC handlers registered successfully'); |
||||
|
} |
@ -0,0 +1,950 @@ |
|||||
|
/** |
||||
|
* SQLite Migration System for TimeSafari |
||||
|
* |
||||
|
* A robust migration system for managing database schema changes in the TimeSafari |
||||
|
* application. Provides versioned migrations with transaction safety, rollback |
||||
|
* support, and detailed logging. |
||||
|
* |
||||
|
* Core Features: |
||||
|
* - Versioned migrations with tracking |
||||
|
* - Atomic transactions per migration |
||||
|
* - Comprehensive error handling |
||||
|
* - SQL parsing and validation |
||||
|
* - State verification and recovery |
||||
|
* - Detailed logging and debugging |
||||
|
* |
||||
|
* Migration Process: |
||||
|
* 1. Version tracking via schema_version table |
||||
|
* 2. Transaction-based execution |
||||
|
* 3. Automatic rollback on failure |
||||
|
* 4. State verification before/after |
||||
|
* 5. Detailed error logging |
||||
|
* |
||||
|
* SQL Processing: |
||||
|
* - Handles single-line (--) and multi-line comments |
||||
|
* - Validates SQL statements |
||||
|
* - Proper statement separation |
||||
|
* - SQL injection prevention |
||||
|
* - Parameter binding safety |
||||
|
* |
||||
|
* Transaction Management: |
||||
|
* - Single transaction per migration |
||||
|
* - Automatic rollback on failure |
||||
|
* - State verification |
||||
|
* - Deadlock prevention |
||||
|
* - Connection isolation |
||||
|
* |
||||
|
* Error Handling: |
||||
|
* - Detailed error reporting |
||||
|
* - SQL validation |
||||
|
* - Transaction state tracking |
||||
|
* - Recovery mechanisms |
||||
|
* - Debug logging |
||||
|
* |
||||
|
* Security: |
||||
|
* - SQL injection prevention |
||||
|
* - Parameter validation |
||||
|
* - Transaction isolation |
||||
|
* - State verification |
||||
|
* - Error sanitization |
||||
|
* |
||||
|
* Performance: |
||||
|
* - Efficient SQL parsing |
||||
|
* - Optimized transactions |
||||
|
* - Minimal locking |
||||
|
* - Connection pooling |
||||
|
* - Statement reuse |
||||
|
* |
||||
|
* @author Matthew Raymer <matthew.raymer@anomalistdesign.com> |
||||
|
* @version 1.0.0 |
||||
|
* @since 2025-06-01 |
||||
|
*/ |
||||
|
|
||||
|
import { CapacitorSQLite } from '@capacitor-community/sqlite/electron/dist/plugin.js'; |
||||
|
import { logger } from './logger'; |
||||
|
|
||||
|
// Types for migration system
|
||||
|
interface Migration { |
||||
|
version: number; |
||||
|
name: string; |
||||
|
description: string; |
||||
|
sql: string; |
||||
|
rollback?: string; |
||||
|
} |
||||
|
|
||||
|
interface MigrationResult { |
||||
|
success: boolean; |
||||
|
version: number; |
||||
|
name: string; |
||||
|
error?: Error; |
||||
|
state?: { |
||||
|
plugin: { |
||||
|
isAvailable: boolean; |
||||
|
lastChecked: Date; |
||||
|
}; |
||||
|
transaction: { |
||||
|
isActive: boolean; |
||||
|
lastVerified: Date; |
||||
|
}; |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
interface MigrationState { |
||||
|
currentVersion: number; |
||||
|
lastMigration: string; |
||||
|
lastApplied: Date; |
||||
|
isDirty: boolean; |
||||
|
} |
||||
|
|
||||
|
// Constants
|
||||
|
const MIGRATIONS_TABLE = ` |
||||
|
CREATE TABLE IF NOT EXISTS schema_version ( |
||||
|
version INTEGER NOT NULL, |
||||
|
name TEXT NOT NULL, |
||||
|
description TEXT, |
||||
|
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, |
||||
|
checksum TEXT, |
||||
|
is_dirty BOOLEAN DEFAULT FALSE, |
||||
|
error_message TEXT, |
||||
|
error_stack TEXT, |
||||
|
error_context TEXT, |
||||
|
PRIMARY KEY (version) |
||||
|
);`;
|
||||
|
|
||||
|
// Constants for retry logic
|
||||
|
const MAX_RETRY_ATTEMPTS = 3; |
||||
|
const RETRY_DELAY_MS = 1000; |
||||
|
const LOCK_TIMEOUT_MS = 10000; // 10 seconds total timeout for locks
|
||||
|
|
||||
|
/** |
||||
|
* Utility function to delay execution |
||||
|
* @param ms Milliseconds to delay |
||||
|
* @returns Promise that resolves after the delay |
||||
|
*/ |
||||
|
const delay = (ms: number): Promise<void> => { |
||||
|
return new Promise(resolve => setTimeout(resolve, ms)); |
||||
|
}; |
||||
|
|
||||
|
// SQL Parsing Utilities
|
||||
|
interface ParsedSQL { |
||||
|
statements: string[]; |
||||
|
errors: string[]; |
||||
|
warnings: string[]; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Removes SQL comments from a string while preserving statement structure |
||||
|
* @param sql The SQL string to process |
||||
|
* @returns SQL with comments removed |
||||
|
*/ |
||||
|
const removeSQLComments = (sql: string): string => { |
||||
|
let result = ''; |
||||
|
let inSingleLineComment = false; |
||||
|
let inMultiLineComment = false; |
||||
|
let inString = false; |
||||
|
let stringChar = ''; |
||||
|
let i = 0; |
||||
|
|
||||
|
while (i < sql.length) { |
||||
|
const char = sql[i]; |
||||
|
const nextChar = sql[i + 1] || ''; |
||||
|
|
||||
|
// Handle string literals
|
||||
|
if ((char === "'" || char === '"') && !inSingleLineComment && !inMultiLineComment) { |
||||
|
if (!inString) { |
||||
|
inString = true; |
||||
|
stringChar = char; |
||||
|
} else if (char === stringChar) { |
||||
|
inString = false; |
||||
|
} |
||||
|
result += char; |
||||
|
i++; |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
// Handle single-line comments
|
||||
|
if (char === '-' && nextChar === '-' && !inString && !inMultiLineComment) { |
||||
|
inSingleLineComment = true; |
||||
|
i += 2; |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
// Handle multi-line comments
|
||||
|
if (char === '/' && nextChar === '*' && !inString && !inSingleLineComment) { |
||||
|
inMultiLineComment = true; |
||||
|
i += 2; |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
if (char === '*' && nextChar === '/' && inMultiLineComment) { |
||||
|
inMultiLineComment = false; |
||||
|
i += 2; |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
// Handle newlines in single-line comments
|
||||
|
if (char === '\n' && inSingleLineComment) { |
||||
|
inSingleLineComment = false; |
||||
|
result += '\n'; |
||||
|
i++; |
||||
|
continue; |
||||
|
} |
||||
|
|
||||
|
// Add character if not in any comment
|
||||
|
if (!inSingleLineComment && !inMultiLineComment) { |
||||
|
result += char; |
||||
|
} |
||||
|
|
||||
|
i++; |
||||
|
} |
||||
|
|
||||
|
return result; |
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Formats a SQL statement for consistent processing |
||||
|
* @param sql The SQL statement to format |
||||
|
* @returns Formatted SQL statement |
||||
|
*/ |
||||
|
const formatSQLStatement = (sql: string): string => { |
||||
|
return sql |
||||
|
.trim() |
||||
|
.replace(/\s+/g, ' ') // Replace multiple spaces with single space
|
||||
|
.replace(/\s*;\s*$/, ';') // Ensure semicolon at end
|
||||
|
.replace(/^\s*;\s*/, ''); // Remove leading semicolon
|
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Validates a SQL statement for common issues |
||||
|
* @param statement The SQL statement to validate |
||||
|
* @returns Array of validation errors, empty if valid |
||||
|
*/ |
||||
|
const validateSQLStatement = (statement: string): string[] => { |
||||
|
const errors: string[] = []; |
||||
|
const trimmed = statement.trim().toLowerCase(); |
||||
|
|
||||
|
// Check for empty statements
|
||||
|
if (!trimmed) { |
||||
|
errors.push('Empty SQL statement'); |
||||
|
return errors; |
||||
|
} |
||||
|
|
||||
|
// Check for valid statement types
|
||||
|
const validStarts = [ |
||||
|
'create', 'alter', 'drop', 'insert', 'update', 'delete', |
||||
|
'select', 'pragma', 'begin', 'commit', 'rollback' |
||||
|
]; |
||||
|
|
||||
|
const startsWithValid = validStarts.some(start => trimmed.startsWith(start)); |
||||
|
if (!startsWithValid) { |
||||
|
errors.push(`Invalid SQL statement type: ${trimmed.split(' ')[0]}`); |
||||
|
} |
||||
|
|
||||
|
// Check for balanced parentheses
|
||||
|
let parenCount = 0; |
||||
|
let inString = false; |
||||
|
let stringChar = ''; |
||||
|
|
||||
|
for (let i = 0; i < statement.length; i++) { |
||||
|
const char = statement[i]; |
||||
|
|
||||
|
if ((char === "'" || char === '"') && !inString) { |
||||
|
inString = true; |
||||
|
stringChar = char; |
||||
|
} else if (char === stringChar && inString) { |
||||
|
inString = false; |
||||
|
} |
||||
|
|
||||
|
if (!inString) { |
||||
|
if (char === '(') parenCount++; |
||||
|
if (char === ')') parenCount--; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
if (parenCount !== 0) { |
||||
|
errors.push('Unbalanced parentheses in SQL statement'); |
||||
|
} |
||||
|
|
||||
|
return errors; |
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Parses SQL into individual statements with validation |
||||
|
* @param sql The SQL to parse |
||||
|
* @returns ParsedSQL object containing statements and any errors/warnings |
||||
|
*/ |
||||
|
const parseSQL = (sql: string): ParsedSQL => { |
||||
|
const result: ParsedSQL = { |
||||
|
statements: [], |
||||
|
errors: [], |
||||
|
warnings: [] |
||||
|
}; |
||||
|
|
||||
|
try { |
||||
|
// Remove comments first
|
||||
|
const cleanSQL = removeSQLComments(sql); |
||||
|
|
||||
|
// Split on semicolons and process each statement
|
||||
|
const rawStatements = cleanSQL |
||||
|
.split(';') |
||||
|
.map(s => formatSQLStatement(s)) |
||||
|
.filter(s => s.length > 0); |
||||
|
|
||||
|
// Validate each statement
|
||||
|
for (const statement of rawStatements) { |
||||
|
const errors = validateSQLStatement(statement); |
||||
|
if (errors.length > 0) { |
||||
|
result.errors.push(...errors.map(e => `${e} in statement: ${statement.substring(0, 50)}...`)); |
||||
|
} else { |
||||
|
result.statements.push(statement); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Add warnings for potential issues
|
||||
|
if (rawStatements.length === 0) { |
||||
|
result.warnings.push('No SQL statements found after parsing'); |
||||
|
} |
||||
|
|
||||
|
// Log parsing results
|
||||
|
logger.debug('SQL parsing results:', { |
||||
|
statementCount: result.statements.length, |
||||
|
errorCount: result.errors.length, |
||||
|
warningCount: result.warnings.length, |
||||
|
statements: result.statements.map(s => s.substring(0, 50) + '...'), |
||||
|
errors: result.errors, |
||||
|
warnings: result.warnings |
||||
|
}); |
||||
|
|
||||
|
} catch (error) { |
||||
|
result.errors.push(`SQL parsing failed: ${error instanceof Error ? error.message : String(error)}`); |
||||
|
logger.error('SQL parsing error:', error); |
||||
|
} |
||||
|
|
||||
|
return result; |
||||
|
}; |
||||
|
|
||||
|
// Initial migration for accounts table
|
||||
|
const INITIAL_MIGRATION: Migration = { |
||||
|
version: 1, |
||||
|
name: '001_initial_accounts', |
||||
|
description: 'Initial schema with accounts table', |
||||
|
sql: ` |
||||
|
/* Create accounts table with required fields */ |
||||
|
CREATE TABLE IF NOT EXISTS accounts ( |
||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||
|
dateCreated TEXT NOT NULL, |
||||
|
derivationPath TEXT, |
||||
|
did TEXT NOT NULL, |
||||
|
identityEncrBase64 TEXT, -- encrypted & base64-encoded |
||||
|
mnemonicEncrBase64 TEXT, -- encrypted & base64-encoded |
||||
|
passkeyCredIdHex TEXT, |
||||
|
publicKeyHex TEXT NOT NULL |
||||
|
); |
||||
|
|
||||
|
/* Create index on did for faster lookups */ |
||||
|
CREATE INDEX IF NOT EXISTS idx_accounts_did ON accounts(did); |
||||
|
`,
|
||||
|
rollback: ` |
||||
|
/* Drop index first to avoid foreign key issues */ |
||||
|
DROP INDEX IF EXISTS idx_accounts_did; |
||||
|
|
||||
|
/* Drop the accounts table */ |
||||
|
DROP TABLE IF EXISTS accounts; |
||||
|
` |
||||
|
}; |
||||
|
|
||||
|
// Migration registry
|
||||
|
const MIGRATIONS: Migration[] = [ |
||||
|
INITIAL_MIGRATION |
||||
|
]; |
||||
|
|
||||
|
// Helper functions
|
||||
|
const verifyPluginState = async (plugin: any): Promise<boolean> => { |
||||
|
try { |
||||
|
const result = await plugin.echo({ value: 'test' }); |
||||
|
return result?.value === 'test'; |
||||
|
} catch (error) { |
||||
|
logger.error('Plugin state verification failed:', error); |
||||
|
return false; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
// Helper function to verify transaction state without starting a transaction
|
||||
|
const verifyTransactionState = async ( |
||||
|
plugin: any, |
||||
|
database: string |
||||
|
): Promise<boolean> => { |
||||
|
try { |
||||
|
// Query SQLite's internal transaction state
|
||||
|
const result = await plugin.query({ |
||||
|
database, |
||||
|
statement: "SELECT * FROM sqlite_master WHERE type='table' AND name='schema_version';" |
||||
|
}); |
||||
|
|
||||
|
// If we can query, we're not in a transaction
|
||||
|
return false; |
||||
|
} catch (error) { |
||||
|
// If error contains "transaction", we're probably in a transaction
|
||||
|
const errorMsg = error instanceof Error ? error.message : String(error); |
||||
|
const inTransaction = errorMsg.toLowerCase().includes('transaction'); |
||||
|
|
||||
|
logger.debug('Transaction state check:', { |
||||
|
inTransaction, |
||||
|
error: error instanceof Error ? { |
||||
|
message: error.message, |
||||
|
name: error.name |
||||
|
} : error |
||||
|
}); |
||||
|
|
||||
|
return inTransaction; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
const getCurrentVersion = async ( |
||||
|
plugin: any, |
||||
|
database: string |
||||
|
): Promise<number> => { |
||||
|
try { |
||||
|
const result = await plugin.query({ |
||||
|
database, |
||||
|
statement: 'SELECT version FROM schema_version ORDER BY version DESC LIMIT 1;' |
||||
|
}); |
||||
|
return result?.values?.[0]?.version || 0; |
||||
|
} catch (error) { |
||||
|
logger.error('Error getting current version:', error); |
||||
|
return 0; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Helper function to execute SQL with retry logic for locked database |
||||
|
* @param plugin SQLite plugin instance |
||||
|
* @param database Database name |
||||
|
* @param operation Function to execute |
||||
|
* @param context Operation context for logging |
||||
|
*/ |
||||
|
const executeWithRetry = async <T>( |
||||
|
plugin: any, |
||||
|
database: string, |
||||
|
operation: () => Promise<T>, |
||||
|
context: string |
||||
|
): Promise<T> => { |
||||
|
let lastError: Error | null = null; |
||||
|
let startTime = Date.now(); |
||||
|
|
||||
|
for (let attempt = 1; attempt <= MAX_RETRY_ATTEMPTS; attempt++) { |
||||
|
try { |
||||
|
// Check if we've exceeded the total timeout
|
||||
|
if (Date.now() - startTime > LOCK_TIMEOUT_MS) { |
||||
|
throw new Error(`Operation timed out after ${LOCK_TIMEOUT_MS}ms`); |
||||
|
} |
||||
|
|
||||
|
// Try the operation
|
||||
|
return await operation(); |
||||
|
} catch (error) { |
||||
|
lastError = error instanceof Error ? error : new Error(String(error)); |
||||
|
const errorMsg = lastError.message.toLowerCase(); |
||||
|
const isLockError = errorMsg.includes('database is locked') || |
||||
|
errorMsg.includes('database is busy') || |
||||
|
errorMsg.includes('database is locked (5)'); |
||||
|
|
||||
|
if (!isLockError || attempt === MAX_RETRY_ATTEMPTS) { |
||||
|
throw lastError; |
||||
|
} |
||||
|
|
||||
|
logger.warn(`Database operation failed, retrying (${attempt}/${MAX_RETRY_ATTEMPTS}):`, { |
||||
|
context, |
||||
|
error: lastError.message, |
||||
|
attempt, |
||||
|
elapsedMs: Date.now() - startTime |
||||
|
}); |
||||
|
|
||||
|
// Exponential backoff
|
||||
|
const backoffDelay = RETRY_DELAY_MS * Math.pow(2, attempt - 1); |
||||
|
await delay(Math.min(backoffDelay, LOCK_TIMEOUT_MS - (Date.now() - startTime))); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
throw lastError || new Error(`Operation failed after ${MAX_RETRY_ATTEMPTS} attempts`); |
||||
|
}; |
||||
|
|
||||
|
// Helper function to execute a single SQL statement with retry logic
|
||||
|
const executeSingleStatement = async ( |
||||
|
plugin: any, |
||||
|
database: string, |
||||
|
statement: string, |
||||
|
values: any[] = [] |
||||
|
): Promise<any> => { |
||||
|
logger.debug('Executing SQL statement:', { |
||||
|
statement: statement.substring(0, 100) + (statement.length > 100 ? '...' : ''), |
||||
|
values: values.map(v => ({ |
||||
|
value: v, |
||||
|
type: typeof v, |
||||
|
isNull: v === null || v === undefined |
||||
|
})) |
||||
|
}); |
||||
|
|
||||
|
return executeWithRetry( |
||||
|
plugin, |
||||
|
database, |
||||
|
async () => { |
||||
|
// Validate values before execution
|
||||
|
if (statement.includes('schema_version') && statement.includes('INSERT')) { |
||||
|
// Find the name parameter index in the SQL statement
|
||||
|
const paramIndex = statement.toLowerCase().split(',').findIndex(p => |
||||
|
p.trim().startsWith('name') |
||||
|
); |
||||
|
|
||||
|
if (paramIndex !== -1 && values[paramIndex] !== undefined) { |
||||
|
const nameValue = values[paramIndex]; |
||||
|
if (!nameValue || typeof nameValue !== 'string') { |
||||
|
throw new Error(`Invalid migration name type: ${typeof nameValue}`); |
||||
|
} |
||||
|
if (nameValue.trim().length === 0) { |
||||
|
throw new Error('Migration name cannot be empty'); |
||||
|
} |
||||
|
// Ensure we're using the actual migration name, not the version
|
||||
|
if (nameValue === values[0]?.toString()) { |
||||
|
throw new Error('Migration name cannot be the same as version number'); |
||||
|
} |
||||
|
logger.debug('Validated migration name:', { |
||||
|
name: nameValue, |
||||
|
type: typeof nameValue, |
||||
|
length: nameValue.length |
||||
|
}); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
const result = await plugin.execute({ |
||||
|
database, |
||||
|
statements: statement, |
||||
|
values, |
||||
|
transaction: false |
||||
|
}); |
||||
|
|
||||
|
logger.debug('SQL execution result:', { |
||||
|
statement: statement.substring(0, 100) + (statement.length > 100 ? '...' : ''), |
||||
|
result |
||||
|
}); |
||||
|
|
||||
|
return result; |
||||
|
}, |
||||
|
'executeSingleStatement' |
||||
|
); |
||||
|
}; |
||||
|
|
||||
|
// Helper function to create migrations table if it doesn't exist
|
||||
|
const ensureMigrationsTable = async ( |
||||
|
plugin: any, |
||||
|
database: string |
||||
|
): Promise<void> => { |
||||
|
logger.debug('Ensuring migrations table exists'); |
||||
|
|
||||
|
try { |
||||
|
// Drop and recreate the table to ensure proper structure
|
||||
|
await plugin.execute({ |
||||
|
database, |
||||
|
statements: 'DROP TABLE IF EXISTS schema_version;', |
||||
|
transaction: false |
||||
|
}); |
||||
|
|
||||
|
// Create the table with proper constraints
|
||||
|
await plugin.execute({ |
||||
|
database, |
||||
|
statements: MIGRATIONS_TABLE, |
||||
|
transaction: false |
||||
|
}); |
||||
|
|
||||
|
// Verify table creation and structure
|
||||
|
const tableInfo = await plugin.query({ |
||||
|
database, |
||||
|
statement: "PRAGMA table_info(schema_version);" |
||||
|
}); |
||||
|
|
||||
|
logger.debug('Schema version table structure:', { |
||||
|
columns: tableInfo?.values?.map((row: any) => ({ |
||||
|
name: row.name, |
||||
|
type: row.type, |
||||
|
notnull: row.notnull, |
||||
|
dflt_value: row.dflt_value |
||||
|
})) |
||||
|
}); |
||||
|
|
||||
|
// Verify table was created
|
||||
|
const verifyCheck = await plugin.query({ |
||||
|
database, |
||||
|
statement: "SELECT name FROM sqlite_master WHERE type='table' AND name='schema_version';" |
||||
|
}); |
||||
|
|
||||
|
if (!verifyCheck?.values?.length) { |
||||
|
throw new Error('Failed to create migrations table'); |
||||
|
} |
||||
|
|
||||
|
logger.debug('Migrations table created successfully'); |
||||
|
} catch (error) { |
||||
|
logger.error('Error ensuring migrations table:', { |
||||
|
error: error instanceof Error ? { |
||||
|
message: error.message, |
||||
|
stack: error.stack, |
||||
|
name: error.name |
||||
|
} : error |
||||
|
}); |
||||
|
throw error; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
// Update the parseMigrationStatements function to use the new parser
|
||||
|
const parseMigrationStatements = (sql: string): string[] => { |
||||
|
const parsed = parseSQL(sql); |
||||
|
|
||||
|
if (parsed.errors.length > 0) { |
||||
|
throw new Error(`SQL validation failed:\n${parsed.errors.join('\n')}`); |
||||
|
} |
||||
|
|
||||
|
if (parsed.warnings.length > 0) { |
||||
|
logger.warn('SQL parsing warnings:', parsed.warnings); |
||||
|
} |
||||
|
|
||||
|
return parsed.statements; |
||||
|
}; |
||||
|
|
||||
|
// Add debug helper function
|
||||
|
const debugTableState = async ( |
||||
|
plugin: any, |
||||
|
database: string, |
||||
|
context: string |
||||
|
): Promise<void> => { |
||||
|
try { |
||||
|
const tableInfo = await plugin.query({ |
||||
|
database, |
||||
|
statement: "PRAGMA table_info(schema_version);" |
||||
|
}); |
||||
|
|
||||
|
const tableData = await plugin.query({ |
||||
|
database, |
||||
|
statement: "SELECT * FROM schema_version;" |
||||
|
}); |
||||
|
|
||||
|
logger.debug(`Table state (${context}):`, { |
||||
|
tableInfo: tableInfo?.values?.map((row: any) => ({ |
||||
|
name: row.name, |
||||
|
type: row.type, |
||||
|
notnull: row.notnull, |
||||
|
dflt_value: row.dflt_value |
||||
|
})), |
||||
|
tableData: tableData?.values, |
||||
|
rowCount: tableData?.values?.length || 0 |
||||
|
}); |
||||
|
} catch (error) { |
||||
|
logger.error(`Error getting table state (${context}):`, error); |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Executes a single migration with full transaction safety |
||||
|
* |
||||
|
* Process: |
||||
|
* 1. Verifies plugin and transaction state |
||||
|
* 2. Parses and validates SQL |
||||
|
* 3. Executes in transaction |
||||
|
* 4. Updates schema version |
||||
|
* 5. Verifies success |
||||
|
* |
||||
|
* Error Handling: |
||||
|
* - Automatic rollback on failure |
||||
|
* - Detailed error logging |
||||
|
* - State verification |
||||
|
* - Recovery attempts |
||||
|
* |
||||
|
* @param plugin SQLite plugin instance |
||||
|
* @param database Database name |
||||
|
* @param migration Migration to execute |
||||
|
* @returns {Promise<MigrationResult>} Result of migration execution |
||||
|
* @throws {Error} If migration fails and cannot be recovered |
||||
|
*/ |
||||
|
const executeMigration = async ( |
||||
|
plugin: any, |
||||
|
database: string, |
||||
|
migration: Migration |
||||
|
): Promise<MigrationResult> => { |
||||
|
const startTime = Date.now(); |
||||
|
const statements = parseMigrationStatements(migration.sql); |
||||
|
let transactionStarted = false; |
||||
|
|
||||
|
logger.info(`Starting migration ${migration.version}: ${migration.name}`, { |
||||
|
migration: { |
||||
|
version: migration.version, |
||||
|
name: migration.name, |
||||
|
description: migration.description, |
||||
|
statementCount: statements.length |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
try { |
||||
|
// Debug table state before migration
|
||||
|
await debugTableState(plugin, database, 'before_migration'); |
||||
|
|
||||
|
// Ensure migrations table exists with retry
|
||||
|
await executeWithRetry( |
||||
|
plugin, |
||||
|
database, |
||||
|
() => ensureMigrationsTable(plugin, database), |
||||
|
'ensureMigrationsTable' |
||||
|
); |
||||
|
|
||||
|
// Verify plugin state
|
||||
|
const pluginState = await verifyPluginState(plugin); |
||||
|
if (!pluginState) { |
||||
|
throw new Error('Plugin not available'); |
||||
|
} |
||||
|
|
||||
|
// Start transaction with retry
|
||||
|
await executeWithRetry( |
||||
|
plugin, |
||||
|
database, |
||||
|
async () => { |
||||
|
await plugin.beginTransaction({ database }); |
||||
|
transactionStarted = true; |
||||
|
}, |
||||
|
'beginTransaction' |
||||
|
); |
||||
|
|
||||
|
try { |
||||
|
// Execute each statement with retry
|
||||
|
for (let i = 0; i < statements.length; i++) { |
||||
|
const statement = statements[i]; |
||||
|
await executeWithRetry( |
||||
|
plugin, |
||||
|
database, |
||||
|
() => executeSingleStatement(plugin, database, statement), |
||||
|
`executeStatement_${i + 1}` |
||||
|
); |
||||
|
} |
||||
|
|
||||
|
// Commit transaction before updating schema version
|
||||
|
await executeWithRetry( |
||||
|
plugin, |
||||
|
database, |
||||
|
async () => { |
||||
|
await plugin.commitTransaction({ database }); |
||||
|
transactionStarted = false; |
||||
|
}, |
||||
|
'commitTransaction' |
||||
|
); |
||||
|
|
||||
|
// Update schema version outside of transaction with enhanced debugging
|
||||
|
await executeWithRetry( |
||||
|
plugin, |
||||
|
database, |
||||
|
async () => { |
||||
|
logger.debug('Preparing schema version update:', { |
||||
|
version: migration.version, |
||||
|
name: migration.name.trim(), |
||||
|
description: migration.description, |
||||
|
nameType: typeof migration.name, |
||||
|
nameLength: migration.name.length, |
||||
|
nameTrimmedLength: migration.name.trim().length, |
||||
|
nameIsEmpty: migration.name.trim().length === 0 |
||||
|
}); |
||||
|
|
||||
|
// Use direct SQL with properly escaped values
|
||||
|
const escapedName = migration.name.trim().replace(/'/g, "''"); |
||||
|
const escapedDesc = (migration.description || '').replace(/'/g, "''"); |
||||
|
const insertSql = `INSERT INTO schema_version (version, name, description) VALUES (${migration.version}, '${escapedName}', '${escapedDesc}')`; |
||||
|
|
||||
|
logger.debug('Executing schema version update:', { |
||||
|
sql: insertSql, |
||||
|
originalValues: { |
||||
|
version: migration.version, |
||||
|
name: migration.name.trim(), |
||||
|
description: migration.description |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// Debug table state before insert
|
||||
|
await debugTableState(plugin, database, 'before_insert'); |
||||
|
|
||||
|
const result = await plugin.execute({ |
||||
|
database, |
||||
|
statements: insertSql, |
||||
|
transaction: false |
||||
|
}); |
||||
|
|
||||
|
logger.debug('Schema version update result:', { |
||||
|
result, |
||||
|
sql: insertSql |
||||
|
}); |
||||
|
|
||||
|
// Debug table state after insert
|
||||
|
await debugTableState(plugin, database, 'after_insert'); |
||||
|
|
||||
|
// Verify the insert
|
||||
|
const verifyQuery = await plugin.query({ |
||||
|
database, |
||||
|
statement: `SELECT * FROM schema_version WHERE version = ${migration.version} AND name = '${escapedName}'` |
||||
|
}); |
||||
|
|
||||
|
logger.debug('Schema version verification:', { |
||||
|
found: verifyQuery?.values?.length > 0, |
||||
|
rowCount: verifyQuery?.values?.length || 0, |
||||
|
data: verifyQuery?.values |
||||
|
}); |
||||
|
}, |
||||
|
'updateSchemaVersion' |
||||
|
); |
||||
|
|
||||
|
const duration = Date.now() - startTime; |
||||
|
logger.info(`Migration ${migration.version} completed in ${duration}ms`); |
||||
|
|
||||
|
return { |
||||
|
success: true, |
||||
|
version: migration.version, |
||||
|
name: migration.name, |
||||
|
state: { |
||||
|
plugin: { isAvailable: true, lastChecked: new Date() }, |
||||
|
transaction: { isActive: false, lastVerified: new Date() } |
||||
|
} |
||||
|
}; |
||||
|
} catch (error) { |
||||
|
// Rollback with retry
|
||||
|
if (transactionStarted) { |
||||
|
try { |
||||
|
await executeWithRetry( |
||||
|
plugin, |
||||
|
database, |
||||
|
async () => { |
||||
|
// Record error in schema_version before rollback
|
||||
|
await executeSingleStatement( |
||||
|
plugin, |
||||
|
database, |
||||
|
`INSERT INTO schema_version (
|
||||
|
version, name, description, applied_at, |
||||
|
error_message, error_stack, error_context |
||||
|
) VALUES (?, ?, ?, CURRENT_TIMESTAMP, ?, ?, ?);`,
|
||||
|
[ |
||||
|
migration.version, |
||||
|
migration.name, |
||||
|
migration.description, |
||||
|
error instanceof Error ? error.message : String(error), |
||||
|
error instanceof Error ? error.stack : null, |
||||
|
'migration_execution' |
||||
|
] |
||||
|
); |
||||
|
|
||||
|
await plugin.rollbackTransaction({ database }); |
||||
|
}, |
||||
|
'rollbackTransaction' |
||||
|
); |
||||
|
} catch (rollbackError) { |
||||
|
logger.error('Error during rollback:', { |
||||
|
originalError: error, |
||||
|
rollbackError |
||||
|
}); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
throw error; |
||||
|
} |
||||
|
} catch (error) { |
||||
|
// Debug table state on error
|
||||
|
await debugTableState(plugin, database, 'on_error'); |
||||
|
|
||||
|
logger.error('Migration execution failed:', { |
||||
|
error: error instanceof Error ? { |
||||
|
message: error.message, |
||||
|
stack: error.stack, |
||||
|
name: error.name |
||||
|
} : error, |
||||
|
migration: { |
||||
|
version: migration.version, |
||||
|
name: migration.name, |
||||
|
nameType: typeof migration.name, |
||||
|
nameLength: migration.name.length, |
||||
|
nameTrimmedLength: migration.name.trim().length |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
return { |
||||
|
success: false, |
||||
|
version: migration.version, |
||||
|
name: migration.name, |
||||
|
error: error instanceof Error ? error : new Error(String(error)), |
||||
|
state: { |
||||
|
plugin: { isAvailable: true, lastChecked: new Date() }, |
||||
|
transaction: { isActive: false, lastVerified: new Date() } |
||||
|
} |
||||
|
}; |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Main migration runner |
||||
|
* |
||||
|
* Orchestrates the complete migration process: |
||||
|
* 1. Verifies plugin state |
||||
|
* 2. Ensures migrations table |
||||
|
* 3. Determines pending migrations |
||||
|
* 4. Executes migrations in order |
||||
|
* 5. Verifies results |
||||
|
* |
||||
|
* Features: |
||||
|
* - Version-based ordering |
||||
|
* - Transaction safety |
||||
|
* - Error recovery |
||||
|
* - State verification |
||||
|
* - Detailed logging |
||||
|
* |
||||
|
* @param plugin SQLite plugin instance |
||||
|
* @param database Database name |
||||
|
* @returns {Promise<MigrationResult[]>} Results of all migrations |
||||
|
* @throws {Error} If migration process fails |
||||
|
*/ |
||||
|
export async function runMigrations( |
||||
|
plugin: any, |
||||
|
database: string |
||||
|
): Promise<MigrationResult[]> { |
||||
|
logger.info('Starting migration process'); |
||||
|
|
||||
|
// Verify plugin is available
|
||||
|
if (!await verifyPluginState(plugin)) { |
||||
|
throw new Error('SQLite plugin not available'); |
||||
|
} |
||||
|
|
||||
|
// Ensure migrations table exists before any migrations
|
||||
|
try { |
||||
|
await ensureMigrationsTable(plugin, database); |
||||
|
} catch (error) { |
||||
|
logger.error('Failed to ensure migrations table:', error); |
||||
|
throw new Error('Failed to initialize migrations system'); |
||||
|
} |
||||
|
|
||||
|
// Get current version
|
||||
|
const currentVersion = await getCurrentVersion(plugin, database); |
||||
|
logger.info(`Current database version: ${currentVersion}`); |
||||
|
|
||||
|
// Find pending migrations
|
||||
|
const pendingMigrations = MIGRATIONS.filter(m => m.version > currentVersion); |
||||
|
if (pendingMigrations.length === 0) { |
||||
|
logger.info('No pending migrations'); |
||||
|
return []; |
||||
|
} |
||||
|
|
||||
|
logger.info(`Found ${pendingMigrations.length} pending migrations`); |
||||
|
|
||||
|
// Execute each migration
|
||||
|
const results: MigrationResult[] = []; |
||||
|
for (const migration of pendingMigrations) { |
||||
|
const result = await executeMigration(plugin, database, migration); |
||||
|
results.push(result); |
||||
|
|
||||
|
if (!result.success) { |
||||
|
logger.error(`Migration failed at version ${migration.version}`); |
||||
|
break; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
return results; |
||||
|
} |
||||
|
|
||||
|
// Export types for use in other modules
|
||||
|
export type { Migration, MigrationResult, MigrationState }; |
@ -0,0 +1,244 @@ |
|||||
|
import type { CapacitorElectronConfig } from '@capacitor-community/electron'; |
||||
|
import { |
||||
|
CapElectronEventEmitter, |
||||
|
CapacitorSplashScreen, |
||||
|
setupCapacitorElectronPlugins, |
||||
|
} from '@capacitor-community/electron'; |
||||
|
import chokidar from 'chokidar'; |
||||
|
import type { MenuItemConstructorOptions } from 'electron'; |
||||
|
import { app, BrowserWindow, Menu, MenuItem, nativeImage, Tray, session } from 'electron'; |
||||
|
import electronIsDev from 'electron-is-dev'; |
||||
|
import electronServe from 'electron-serve'; |
||||
|
import windowStateKeeper from 'electron-window-state'; |
||||
|
import { join } from 'path'; |
||||
|
|
||||
|
// Define components for a watcher to detect when the webapp is changed so we can reload in Dev mode.
|
||||
|
const reloadWatcher = { |
||||
|
debouncer: null, |
||||
|
ready: false, |
||||
|
watcher: null, |
||||
|
}; |
||||
|
export function setupReloadWatcher(electronCapacitorApp: ElectronCapacitorApp): void { |
||||
|
reloadWatcher.watcher = chokidar |
||||
|
.watch(join(app.getAppPath(), 'app'), { |
||||
|
ignored: /[/\\]\./, |
||||
|
persistent: true, |
||||
|
}) |
||||
|
.on('ready', () => { |
||||
|
reloadWatcher.ready = true; |
||||
|
}) |
||||
|
.on('all', (_event, _path) => { |
||||
|
if (reloadWatcher.ready) { |
||||
|
clearTimeout(reloadWatcher.debouncer); |
||||
|
reloadWatcher.debouncer = setTimeout(async () => { |
||||
|
electronCapacitorApp.getMainWindow().webContents.reload(); |
||||
|
reloadWatcher.ready = false; |
||||
|
clearTimeout(reloadWatcher.debouncer); |
||||
|
reloadWatcher.debouncer = null; |
||||
|
reloadWatcher.watcher = null; |
||||
|
setupReloadWatcher(electronCapacitorApp); |
||||
|
}, 1500); |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
// Define our class to manage our app.
|
||||
|
export class ElectronCapacitorApp { |
||||
|
private MainWindow: BrowserWindow | null = null; |
||||
|
private SplashScreen: CapacitorSplashScreen | null = null; |
||||
|
private TrayIcon: Tray | null = null; |
||||
|
private CapacitorFileConfig: CapacitorElectronConfig; |
||||
|
private TrayMenuTemplate: (MenuItem | MenuItemConstructorOptions)[] = [ |
||||
|
new MenuItem({ label: 'Quit App', role: 'quit' }), |
||||
|
]; |
||||
|
private AppMenuBarMenuTemplate: (MenuItem | MenuItemConstructorOptions)[] = [ |
||||
|
{ role: process.platform === 'darwin' ? 'appMenu' : 'fileMenu' }, |
||||
|
{ role: 'viewMenu' }, |
||||
|
]; |
||||
|
private mainWindowState; |
||||
|
private loadWebApp; |
||||
|
private customScheme: string; |
||||
|
|
||||
|
constructor( |
||||
|
capacitorFileConfig: CapacitorElectronConfig, |
||||
|
trayMenuTemplate?: (MenuItemConstructorOptions | MenuItem)[], |
||||
|
appMenuBarMenuTemplate?: (MenuItemConstructorOptions | MenuItem)[] |
||||
|
) { |
||||
|
this.CapacitorFileConfig = capacitorFileConfig; |
||||
|
|
||||
|
this.customScheme = this.CapacitorFileConfig.electron?.customUrlScheme ?? 'capacitor-electron'; |
||||
|
|
||||
|
if (trayMenuTemplate) { |
||||
|
this.TrayMenuTemplate = trayMenuTemplate; |
||||
|
} |
||||
|
|
||||
|
if (appMenuBarMenuTemplate) { |
||||
|
this.AppMenuBarMenuTemplate = appMenuBarMenuTemplate; |
||||
|
} |
||||
|
|
||||
|
// Setup our web app loader, this lets us load apps like react, vue, and angular without changing their build chains.
|
||||
|
this.loadWebApp = electronServe({ |
||||
|
directory: join(app.getAppPath(), 'app'), |
||||
|
scheme: this.customScheme, |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
// Helper function to load in the app.
|
||||
|
private async loadMainWindow(thisRef: any) { |
||||
|
await thisRef.loadWebApp(thisRef.MainWindow); |
||||
|
} |
||||
|
|
||||
|
// Expose the mainWindow ref for use outside of the class.
|
||||
|
getMainWindow(): BrowserWindow { |
||||
|
return this.MainWindow; |
||||
|
} |
||||
|
|
||||
|
getCustomURLScheme(): string { |
||||
|
return this.customScheme; |
||||
|
} |
||||
|
|
||||
|
async init(): Promise<void> { |
||||
|
const icon = nativeImage.createFromPath( |
||||
|
join(app.getAppPath(), 'assets', process.platform === 'win32' ? 'appIcon.ico' : 'appIcon.png') |
||||
|
); |
||||
|
this.mainWindowState = windowStateKeeper({ |
||||
|
defaultWidth: 1000, |
||||
|
defaultHeight: 800, |
||||
|
}); |
||||
|
// Setup preload script path and construct our main window.
|
||||
|
const preloadPath = join(app.getAppPath(), 'build', 'src', 'preload.js'); |
||||
|
this.MainWindow = new BrowserWindow({ |
||||
|
icon, |
||||
|
show: false, |
||||
|
x: this.mainWindowState.x, |
||||
|
y: this.mainWindowState.y, |
||||
|
width: this.mainWindowState.width, |
||||
|
height: this.mainWindowState.height, |
||||
|
webPreferences: { |
||||
|
nodeIntegration: true, |
||||
|
contextIsolation: true, |
||||
|
// Use preload to inject the electron varriant overrides for capacitor plugins.
|
||||
|
// preload: join(app.getAppPath(), "node_modules", "@capacitor-community", "electron", "dist", "runtime", "electron-rt.js"),
|
||||
|
preload: preloadPath, |
||||
|
}, |
||||
|
}); |
||||
|
this.mainWindowState.manage(this.MainWindow); |
||||
|
|
||||
|
if (this.CapacitorFileConfig.backgroundColor) { |
||||
|
this.MainWindow.setBackgroundColor(this.CapacitorFileConfig.electron.backgroundColor); |
||||
|
} |
||||
|
|
||||
|
// If we close the main window with the splashscreen enabled we need to destory the ref.
|
||||
|
this.MainWindow.on('closed', () => { |
||||
|
if (this.SplashScreen?.getSplashWindow() && !this.SplashScreen.getSplashWindow().isDestroyed()) { |
||||
|
this.SplashScreen.getSplashWindow().close(); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// When the tray icon is enabled, setup the options.
|
||||
|
if (this.CapacitorFileConfig.electron?.trayIconAndMenuEnabled) { |
||||
|
this.TrayIcon = new Tray(icon); |
||||
|
this.TrayIcon.on('double-click', () => { |
||||
|
if (this.MainWindow) { |
||||
|
if (this.MainWindow.isVisible()) { |
||||
|
this.MainWindow.hide(); |
||||
|
} else { |
||||
|
this.MainWindow.show(); |
||||
|
this.MainWindow.focus(); |
||||
|
} |
||||
|
} |
||||
|
}); |
||||
|
this.TrayIcon.on('click', () => { |
||||
|
if (this.MainWindow) { |
||||
|
if (this.MainWindow.isVisible()) { |
||||
|
this.MainWindow.hide(); |
||||
|
} else { |
||||
|
this.MainWindow.show(); |
||||
|
this.MainWindow.focus(); |
||||
|
} |
||||
|
} |
||||
|
}); |
||||
|
this.TrayIcon.setToolTip(app.getName()); |
||||
|
this.TrayIcon.setContextMenu(Menu.buildFromTemplate(this.TrayMenuTemplate)); |
||||
|
} |
||||
|
|
||||
|
// Setup the main manu bar at the top of our window.
|
||||
|
Menu.setApplicationMenu(Menu.buildFromTemplate(this.AppMenuBarMenuTemplate)); |
||||
|
|
||||
|
// If the splashscreen is enabled, show it first while the main window loads then switch it out for the main window, or just load the main window from the start.
|
||||
|
if (this.CapacitorFileConfig.electron?.splashScreenEnabled) { |
||||
|
this.SplashScreen = new CapacitorSplashScreen({ |
||||
|
imageFilePath: join( |
||||
|
app.getAppPath(), |
||||
|
'assets', |
||||
|
this.CapacitorFileConfig.electron?.splashScreenImageName ?? 'splash.png' |
||||
|
), |
||||
|
windowWidth: 400, |
||||
|
windowHeight: 400, |
||||
|
}); |
||||
|
this.SplashScreen.init(this.loadMainWindow, this); |
||||
|
} else { |
||||
|
this.loadMainWindow(this); |
||||
|
} |
||||
|
|
||||
|
// Security
|
||||
|
this.MainWindow.webContents.setWindowOpenHandler((details) => { |
||||
|
if (!details.url.includes(this.customScheme)) { |
||||
|
return { action: 'deny' }; |
||||
|
} else { |
||||
|
return { action: 'allow' }; |
||||
|
} |
||||
|
}); |
||||
|
this.MainWindow.webContents.on('will-navigate', (event, _newURL) => { |
||||
|
if (!this.MainWindow.webContents.getURL().includes(this.customScheme)) { |
||||
|
event.preventDefault(); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// Link electron plugins into the system.
|
||||
|
setupCapacitorElectronPlugins(); |
||||
|
|
||||
|
// When the web app is loaded we hide the splashscreen if needed and show the mainwindow.
|
||||
|
this.MainWindow.webContents.on('dom-ready', () => { |
||||
|
if (this.CapacitorFileConfig.electron?.splashScreenEnabled) { |
||||
|
this.SplashScreen.getSplashWindow().hide(); |
||||
|
} |
||||
|
if (!this.CapacitorFileConfig.electron?.hideMainWindowOnLaunch) { |
||||
|
this.MainWindow.show(); |
||||
|
} |
||||
|
setTimeout(() => { |
||||
|
if (electronIsDev) { |
||||
|
this.MainWindow.webContents.openDevTools(); |
||||
|
} |
||||
|
CapElectronEventEmitter.emit('CAPELECTRON_DeeplinkListenerInitialized', ''); |
||||
|
}, 400); |
||||
|
}); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Set a CSP up for our application based on the custom scheme
|
||||
|
export function setupContentSecurityPolicy(customScheme: string): void { |
||||
|
session.defaultSession.webRequest.onHeadersReceived((details, callback) => { |
||||
|
callback({ |
||||
|
responseHeaders: { |
||||
|
...details.responseHeaders, |
||||
|
'Content-Security-Policy': [ |
||||
|
// Base CSP for both dev and prod
|
||||
|
`default-src ${customScheme}://* 'unsafe-inline' data:;`, |
||||
|
// Allow Google Fonts
|
||||
|
`style-src ${customScheme}://* 'unsafe-inline' https://fonts.googleapis.com;`, |
||||
|
`font-src ${customScheme}://* https://fonts.gstatic.com;`, |
||||
|
// Allow images and media
|
||||
|
`img-src ${customScheme}://* data: https:;`, |
||||
|
// Allow connections to HTTPS resources
|
||||
|
`connect-src ${customScheme}://* https:;`, |
||||
|
// Add dev-specific policies
|
||||
|
...(electronIsDev ? [ |
||||
|
`script-src ${customScheme}://* 'unsafe-inline' 'unsafe-eval' devtools://*;`, |
||||
|
`default-src ${customScheme}://* 'unsafe-inline' devtools://* 'unsafe-eval' data:;` |
||||
|
] : []) |
||||
|
].join(' ') |
||||
|
}, |
||||
|
}); |
||||
|
}); |
||||
|
} |
@ -0,0 +1,18 @@ |
|||||
|
{ |
||||
|
"compileOnSave": true, |
||||
|
"include": ["./src/**/*", "./capacitor.config.ts", "./capacitor.config.js"], |
||||
|
"compilerOptions": { |
||||
|
"outDir": "./build", |
||||
|
"importHelpers": true, |
||||
|
"target": "ES2020", |
||||
|
"module": "CommonJS", |
||||
|
"moduleResolution": "node", |
||||
|
"esModuleInterop": true, |
||||
|
"typeRoots": ["./node_modules/@types"], |
||||
|
"allowJs": true, |
||||
|
"rootDir": ".", |
||||
|
"skipLibCheck": true, |
||||
|
"resolveJsonModule": true |
||||
|
} |
||||
|
} |
||||
|
|
@ -0,0 +1,186 @@ |
|||||
|
#!/bin/bash |
||||
|
# experiment.sh |
||||
|
# Author: Matthew Raymer |
||||
|
# Description: Build script for TimeSafari Electron application |
||||
|
# This script handles the complete build process for the TimeSafari Electron app, |
||||
|
# including web asset compilation, TypeScript compilation, and AppImage packaging. |
||||
|
# It ensures all dependencies are available and provides detailed build feedback. |
||||
|
# |
||||
|
# Build Process: |
||||
|
# 1. Environment setup and dependency checks |
||||
|
# 2. Web asset compilation (Vite) |
||||
|
# 3. TypeScript compilation |
||||
|
# 4. Electron main process build |
||||
|
# 5. AppImage packaging |
||||
|
# |
||||
|
# Dependencies: |
||||
|
# - Node.js and npm |
||||
|
# - TypeScript |
||||
|
# - Vite |
||||
|
# - electron-builder |
||||
|
# |
||||
|
# Usage: ./experiment.sh |
||||
|
# |
||||
|
# Exit Codes: |
||||
|
# 1 - Required command not found |
||||
|
# 2 - TypeScript installation failed |
||||
|
# 3 - TypeScript compilation failed |
||||
|
# 4 - Build process failed |
||||
|
# 5 - AppImage build failed |
||||
|
|
||||
|
# Exit on any error |
||||
|
set -e |
||||
|
|
||||
|
# ANSI color codes for better output formatting |
||||
|
readonly RED='\033[0;31m' |
||||
|
readonly GREEN='\033[0;32m' |
||||
|
readonly YELLOW='\033[1;33m' |
||||
|
readonly BLUE='\033[0;34m' |
||||
|
readonly NC='\033[0m' # No Color |
||||
|
|
||||
|
# Logging functions |
||||
|
log_info() { |
||||
|
echo -e "${BLUE}[$(date '+%Y-%m-%d %H:%M:%S')] [INFO]${NC} $1" |
||||
|
} |
||||
|
|
||||
|
log_success() { |
||||
|
echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')] [SUCCESS]${NC} $1" |
||||
|
} |
||||
|
|
||||
|
log_warn() { |
||||
|
echo -e "${YELLOW}[$(date '+%Y-%m-%d %H:%M:%S')] [WARN]${NC} $1" |
||||
|
} |
||||
|
|
||||
|
log_error() { |
||||
|
echo -e "${RED}[$(date '+%Y-%m-%d %H:%M:%S')] [ERROR]${NC} $1" |
||||
|
} |
||||
|
|
||||
|
# Function to check if a command exists |
||||
|
check_command() { |
||||
|
if ! command -v "$1" &> /dev/null; then |
||||
|
log_error "$1 is required but not installed." |
||||
|
exit 1 |
||||
|
fi |
||||
|
log_info "Found $1: $(command -v "$1")" |
||||
|
} |
||||
|
|
||||
|
# Function to measure and log execution time |
||||
|
measure_time() { |
||||
|
local start_time=$(date +%s) |
||||
|
"$@" |
||||
|
local end_time=$(date +%s) |
||||
|
local duration=$((end_time - start_time)) |
||||
|
log_success "Completed in ${duration} seconds" |
||||
|
} |
||||
|
|
||||
|
# Function to find the AppImage |
||||
|
find_appimage() { |
||||
|
local appimage_path |
||||
|
appimage_path=$(find dist-electron-packages -name "*.AppImage" -type f -print -quit) |
||||
|
if [ -n "$appimage_path" ]; then |
||||
|
echo "$appimage_path" |
||||
|
else |
||||
|
log_warn "AppImage not found in expected location" |
||||
|
echo "dist-electron-packages/*.AppImage" |
||||
|
fi |
||||
|
} |
||||
|
|
||||
|
# Print build header |
||||
|
echo -e "\n${BLUE}=== TimeSafari Electron Build Process ===${NC}\n" |
||||
|
log_info "Starting build process at $(date)" |
||||
|
|
||||
|
# Check required commands |
||||
|
log_info "Checking required dependencies..." |
||||
|
check_command node |
||||
|
check_command npm |
||||
|
|
||||
|
# Create application data directory |
||||
|
log_info "Setting up application directories..." |
||||
|
mkdir -p ~/.local/share/TimeSafari/timesafari |
||||
|
|
||||
|
# Clean up previous builds |
||||
|
log_info "Cleaning previous builds..." |
||||
|
rm -rf dist* || log_warn "No previous builds to clean" |
||||
|
|
||||
|
# Set environment variables for the build |
||||
|
log_info "Configuring build environment..." |
||||
|
export VITE_PLATFORM=electron |
||||
|
export VITE_PWA_ENABLED=false |
||||
|
export VITE_DISABLE_PWA=true |
||||
|
|
||||
|
# Ensure TypeScript is installed |
||||
|
log_info "Verifying TypeScript installation..." |
||||
|
if [ ! -f "./node_modules/.bin/tsc" ]; then |
||||
|
log_info "Installing TypeScript..." |
||||
|
if ! npm install --save-dev typescript@~5.2.2; then |
||||
|
log_error "TypeScript installation failed!" |
||||
|
exit 2 |
||||
|
fi |
||||
|
# Verify installation |
||||
|
if [ ! -f "./node_modules/.bin/tsc" ]; then |
||||
|
log_error "TypeScript installation verification failed!" |
||||
|
exit 2 |
||||
|
fi |
||||
|
log_success "TypeScript installed successfully" |
||||
|
else |
||||
|
log_info "TypeScript already installed" |
||||
|
fi |
||||
|
|
||||
|
# Get git hash for versioning |
||||
|
GIT_HASH=$(git log -1 --pretty=format:%h) |
||||
|
log_info "Using git hash: ${GIT_HASH}" |
||||
|
|
||||
|
# Build web assets |
||||
|
log_info "Building web assets with Vite..." |
||||
|
if ! measure_time env VITE_GIT_HASH="$GIT_HASH" npx vite build --config vite.config.app.electron.mts --mode electron; then |
||||
|
log_error "Web asset build failed!" |
||||
|
exit 4 |
||||
|
fi |
||||
|
|
||||
|
# TypeScript compilation |
||||
|
log_info "Compiling TypeScript..." |
||||
|
if ! measure_time ./node_modules/.bin/tsc -p tsconfig.electron.json; then |
||||
|
log_error "TypeScript compilation failed!" |
||||
|
exit 3 |
||||
|
fi |
||||
|
|
||||
|
# Build electron main process |
||||
|
log_info "Building electron main process..." |
||||
|
if ! measure_time env VITE_GIT_HASH="$GIT_HASH" npx vite build --config vite.config.electron.mts --mode electron; then |
||||
|
log_error "Electron main process build failed!" |
||||
|
exit 4 |
||||
|
fi |
||||
|
|
||||
|
# Organize files |
||||
|
log_info "Organizing build artifacts..." |
||||
|
mkdir -p dist-electron/www |
||||
|
cp -r dist/* dist-electron/www/ || log_error "Failed to copy web assets" |
||||
|
mkdir -p dist-electron/resources |
||||
|
cp src/electron/preload.js dist-electron/resources/preload.js || log_error "Failed to copy preload script" |
||||
|
|
||||
|
# Build the AppImage |
||||
|
log_info "Building AppImage package..." |
||||
|
if ! measure_time npx electron-builder --linux AppImage; then |
||||
|
log_error "AppImage build failed!" |
||||
|
exit 5 |
||||
|
fi |
||||
|
|
||||
|
# Print build summary |
||||
|
echo -e "\n${GREEN}=== Build Summary ===${NC}" |
||||
|
log_success "Build completed successfully!" |
||||
|
log_info "Build artifacts location: $(pwd)/dist-electron" |
||||
|
log_info "AppImage location: $(find_appimage)" |
||||
|
|
||||
|
# Check for build warnings |
||||
|
if grep -q "default Electron icon is used" dist-electron-packages/builder-effective-config.yaml; then |
||||
|
log_warn "Using default Electron icon - consider adding a custom icon" |
||||
|
fi |
||||
|
|
||||
|
if grep -q "chunks are larger than 1000 kB" dist-electron-packages/builder-effective-config.yaml; then |
||||
|
log_warn "Large chunks detected - consider implementing code splitting" |
||||
|
fi |
||||
|
|
||||
|
echo -e "\n${GREEN}=== End of Build Process ===${NC}\n" |
||||
|
|
||||
|
# Exit with success |
||||
|
exit 0 |
@ -1,29 +0,0 @@ |
|||||
const { app, BrowserWindow } = require('electron'); |
|
||||
const path = require('path'); |
|
||||
|
|
||||
function createWindow() { |
|
||||
const win = new BrowserWindow({ |
|
||||
width: 1200, |
|
||||
height: 800, |
|
||||
webPreferences: { |
|
||||
nodeIntegration: true, |
|
||||
contextIsolation: false |
|
||||
} |
|
||||
}); |
|
||||
|
|
||||
win.loadFile(path.join(__dirname, 'dist-electron/www/index.html')); |
|
||||
} |
|
||||
|
|
||||
app.whenReady().then(createWindow); |
|
||||
|
|
||||
app.on('window-all-closed', () => { |
|
||||
if (process.platform !== 'darwin') { |
|
||||
app.quit(); |
|
||||
} |
|
||||
}); |
|
||||
|
|
||||
app.on('activate', () => { |
|
||||
if (BrowserWindow.getAllWindows().length === 0) { |
|
||||
createWindow(); |
|
||||
} |
|
||||
}); |
|
File diff suppressed because it is too large
@ -1,5 +1,6 @@ |
|||||
eth_keys |
eth_keys |
||||
pywebview |
pywebview |
||||
pyinstaller>=6.12.0 |
pyinstaller>=6.12.0 |
||||
|
setuptools>=69.0.0 # Required for distutils for electron-builder on macOS |
||||
# For development |
# For development |
||||
watchdog>=3.0.0 # For file watching support |
watchdog>=3.0.0 # For file watching support |
@ -0,0 +1,85 @@ |
|||||
|
const fs = require("fs"); |
||||
|
const fse = require("fs-extra"); |
||||
|
const path = require("path"); |
||||
|
const { execSync } = require('child_process'); |
||||
|
|
||||
|
console.log("Starting Electron build finalization..."); |
||||
|
|
||||
|
// Define paths |
||||
|
const distPath = path.join(__dirname, "..", "dist"); |
||||
|
const electronDistPath = path.join(__dirname, "..", "dist-electron"); |
||||
|
const wwwPath = path.join(electronDistPath, "www"); |
||||
|
const builtIndexPath = path.join(distPath, "index.html"); |
||||
|
const finalIndexPath = path.join(wwwPath, "index.html"); |
||||
|
|
||||
|
// Ensure target directory exists |
||||
|
if (!fs.existsSync(wwwPath)) { |
||||
|
fs.mkdirSync(wwwPath, { recursive: true }); |
||||
|
} |
||||
|
|
||||
|
// Copy assets directory |
||||
|
const assetsSrc = path.join(distPath, "assets"); |
||||
|
const assetsDest = path.join(wwwPath, "assets"); |
||||
|
if (fs.existsSync(assetsSrc)) { |
||||
|
fse.copySync(assetsSrc, assetsDest, { overwrite: true }); |
||||
|
} |
||||
|
|
||||
|
// Copy favicon.ico |
||||
|
const faviconSrc = path.join(distPath, "favicon.ico"); |
||||
|
if (fs.existsSync(faviconSrc)) { |
||||
|
fs.copyFileSync(faviconSrc, path.join(wwwPath, "favicon.ico")); |
||||
|
} |
||||
|
|
||||
|
// Copy manifest.webmanifest |
||||
|
const manifestSrc = path.join(distPath, "manifest.webmanifest"); |
||||
|
if (fs.existsSync(manifestSrc)) { |
||||
|
fs.copyFileSync(manifestSrc, path.join(wwwPath, "manifest.webmanifest")); |
||||
|
} |
||||
|
|
||||
|
// Load and modify index.html from Vite output |
||||
|
let indexContent = fs.readFileSync(builtIndexPath, "utf-8"); |
||||
|
|
||||
|
// Inject the window.process shim after the first <script> block |
||||
|
indexContent = indexContent.replace( |
||||
|
/<script[^>]*type="module"[^>]*>/, |
||||
|
match => `${match}\n window.process = { env: { VITE_PLATFORM: 'electron' } };` |
||||
|
); |
||||
|
|
||||
|
// Write the modified index.html to dist-electron/www |
||||
|
fs.writeFileSync(finalIndexPath, indexContent); |
||||
|
|
||||
|
// Copy preload script to resources |
||||
|
const preloadSrc = path.join(electronDistPath, "preload.js"); |
||||
|
const preloadDest = path.join(electronDistPath, "resources", "preload.js"); |
||||
|
|
||||
|
// Ensure resources directory exists |
||||
|
const resourcesDir = path.join(electronDistPath, "resources"); |
||||
|
if (!fs.existsSync(resourcesDir)) { |
||||
|
fs.mkdirSync(resourcesDir, { recursive: true }); |
||||
|
} |
||||
|
|
||||
|
if (fs.existsSync(preloadSrc)) { |
||||
|
fs.copyFileSync(preloadSrc, preloadDest); |
||||
|
console.log("Preload script copied to resources directory"); |
||||
|
} else { |
||||
|
console.error("Preload script not found at:", preloadSrc); |
||||
|
} |
||||
|
|
||||
|
// Copy capacitor.config.json to dist-electron |
||||
|
try { |
||||
|
console.log("Copying capacitor.config.json to dist-electron..."); |
||||
|
const configPath = path.join(process.cwd(), 'capacitor.config.json'); |
||||
|
const targetPath = path.join(process.cwd(), 'dist-electron', 'capacitor.config.json'); |
||||
|
|
||||
|
if (!fs.existsSync(configPath)) { |
||||
|
throw new Error('capacitor.config.json not found in project root'); |
||||
|
} |
||||
|
|
||||
|
fs.copyFileSync(configPath, targetPath); |
||||
|
console.log("Successfully copied capacitor.config.json"); |
||||
|
} catch (error) { |
||||
|
console.error("Failed to copy capacitor.config.json:", error); |
||||
|
throw error; |
||||
|
} |
||||
|
|
||||
|
console.log("Electron index.html copied and patched for Electron context."); |
@ -1,243 +0,0 @@ |
|||||
const fs = require('fs'); |
|
||||
const path = require('path'); |
|
||||
|
|
||||
console.log('Starting electron build process...'); |
|
||||
|
|
||||
// Copy web files
|
|
||||
const webDistPath = path.join(__dirname, '..', 'dist'); |
|
||||
const electronDistPath = path.join(__dirname, '..', 'dist-electron'); |
|
||||
const wwwPath = path.join(electronDistPath, 'www'); |
|
||||
|
|
||||
// Create www directory if it doesn't exist
|
|
||||
if (!fs.existsSync(wwwPath)) { |
|
||||
fs.mkdirSync(wwwPath, { recursive: true }); |
|
||||
} |
|
||||
|
|
||||
// Copy web files to www directory
|
|
||||
fs.cpSync(webDistPath, wwwPath, { recursive: true }); |
|
||||
|
|
||||
// Fix asset paths in index.html
|
|
||||
const indexPath = path.join(wwwPath, 'index.html'); |
|
||||
let indexContent = fs.readFileSync(indexPath, 'utf8'); |
|
||||
|
|
||||
// Fix asset paths
|
|
||||
indexContent = indexContent |
|
||||
.replace(/\/assets\//g, './assets/') |
|
||||
.replace(/href="\//g, 'href="./') |
|
||||
.replace(/src="\//g, 'src="./'); |
|
||||
|
|
||||
fs.writeFileSync(indexPath, indexContent); |
|
||||
|
|
||||
// Check for remaining /assets/ paths
|
|
||||
console.log('After path fixing, checking for remaining /assets/ paths:', indexContent.includes('/assets/')); |
|
||||
console.log('Sample of fixed content:', indexContent.substring(0, 500)); |
|
||||
|
|
||||
console.log('Copied and fixed web files in:', wwwPath); |
|
||||
|
|
||||
// Copy main process files
|
|
||||
console.log('Copying main process files...'); |
|
||||
|
|
||||
// Create the main process file with inlined logger
|
|
||||
const mainContent = `const { app, BrowserWindow } = require("electron");
|
|
||||
const path = require("path"); |
|
||||
const fs = require("fs"); |
|
||||
|
|
||||
// Inline logger implementation
|
|
||||
const logger = { |
|
||||
log: (...args) => console.log(...args), |
|
||||
error: (...args) => console.error(...args), |
|
||||
info: (...args) => console.info(...args), |
|
||||
warn: (...args) => console.warn(...args), |
|
||||
debug: (...args) => console.debug(...args), |
|
||||
}; |
|
||||
|
|
||||
// Check if running in dev mode
|
|
||||
const isDev = process.argv.includes("--inspect"); |
|
||||
|
|
||||
function createWindow() { |
|
||||
// Add before createWindow function
|
|
||||
const preloadPath = path.join(__dirname, "preload.js"); |
|
||||
logger.log("Checking preload path:", preloadPath); |
|
||||
logger.log("Preload exists:", fs.existsSync(preloadPath)); |
|
||||
|
|
||||
// Create the browser window.
|
|
||||
const mainWindow = new BrowserWindow({ |
|
||||
width: 1200, |
|
||||
height: 800, |
|
||||
webPreferences: { |
|
||||
nodeIntegration: false, |
|
||||
contextIsolation: true, |
|
||||
webSecurity: true, |
|
||||
allowRunningInsecureContent: false, |
|
||||
preload: path.join(__dirname, "preload.js"), |
|
||||
}, |
|
||||
}); |
|
||||
|
|
||||
// Always open DevTools for now
|
|
||||
mainWindow.webContents.openDevTools(); |
|
||||
|
|
||||
// Intercept requests to fix asset paths
|
|
||||
mainWindow.webContents.session.webRequest.onBeforeRequest( |
|
||||
{ |
|
||||
urls: [ |
|
||||
"file://*/*/assets/*", |
|
||||
"file://*/assets/*", |
|
||||
"file:///assets/*", // Catch absolute paths
|
|
||||
"<all_urls>", // Catch all URLs as a fallback
|
|
||||
], |
|
||||
}, |
|
||||
(details, callback) => { |
|
||||
let url = details.url; |
|
||||
|
|
||||
// Handle paths that don't start with file://
|
|
||||
if (!url.startsWith("file://") && url.includes("/assets/")) { |
|
||||
url = \`file://\${path.join(__dirname, "www", url)}\`;
|
|
||||
} |
|
||||
|
|
||||
// Handle absolute paths starting with /assets/
|
|
||||
if (url.includes("/assets/") && !url.includes("/www/assets/")) { |
|
||||
const baseDir = url.includes("dist-electron") |
|
||||
? url.substring( |
|
||||
0, |
|
||||
url.indexOf("/dist-electron") + "/dist-electron".length, |
|
||||
) |
|
||||
: \`file://\${__dirname}\`;
|
|
||||
const assetPath = url.split("/assets/")[1]; |
|
||||
const newUrl = \`\${baseDir}/www/assets/\${assetPath}\`;
|
|
||||
callback({ redirectURL: newUrl }); |
|
||||
return; |
|
||||
} |
|
||||
|
|
||||
callback({}); // No redirect for other URLs
|
|
||||
}, |
|
||||
); |
|
||||
|
|
||||
if (isDev) { |
|
||||
// Debug info
|
|
||||
logger.log("Debug Info:"); |
|
||||
logger.log("Running in dev mode:", isDev); |
|
||||
logger.log("App is packaged:", app.isPackaged); |
|
||||
logger.log("Process resource path:", process.resourcesPath); |
|
||||
logger.log("App path:", app.getAppPath()); |
|
||||
logger.log("__dirname:", __dirname); |
|
||||
logger.log("process.cwd():", process.cwd()); |
|
||||
} |
|
||||
|
|
||||
const indexPath = path.join(__dirname, "www", "index.html"); |
|
||||
|
|
||||
if (isDev) { |
|
||||
logger.log("Loading index from:", indexPath); |
|
||||
logger.log("www path:", path.join(__dirname, "www")); |
|
||||
logger.log("www assets path:", path.join(__dirname, "www", "assets")); |
|
||||
} |
|
||||
|
|
||||
if (!fs.existsSync(indexPath)) { |
|
||||
logger.error(\`Index file not found at: \${indexPath}\`);
|
|
||||
throw new Error("Index file not found"); |
|
||||
} |
|
||||
|
|
||||
// Add CSP headers to allow API connections, Google Fonts, and zxing-wasm
|
|
||||
mainWindow.webContents.session.webRequest.onHeadersReceived( |
|
||||
(details, callback) => { |
|
||||
callback({ |
|
||||
responseHeaders: { |
|
||||
...details.responseHeaders, |
|
||||
"Content-Security-Policy": [ |
|
||||
"default-src 'self';" + |
|
||||
"connect-src 'self' https://api.endorser.ch https://*.timesafari.app https://*.jsdelivr.net;" + |
|
||||
"img-src 'self' data: https: blob:;" + |
|
||||
"script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.jsdelivr.net;" + |
|
||||
"style-src 'self' 'unsafe-inline' https://fonts.googleapis.com;" + |
|
||||
"font-src 'self' data: https://fonts.gstatic.com;" + |
|
||||
"style-src-elem 'self' 'unsafe-inline' https://fonts.googleapis.com;" + |
|
||||
"worker-src 'self' blob:;", |
|
||||
], |
|
||||
}, |
|
||||
}); |
|
||||
}, |
|
||||
); |
|
||||
|
|
||||
// Load the index.html
|
|
||||
mainWindow |
|
||||
.loadFile(indexPath) |
|
||||
.then(() => { |
|
||||
logger.log("Successfully loaded index.html"); |
|
||||
if (isDev) { |
|
||||
mainWindow.webContents.openDevTools(); |
|
||||
logger.log("DevTools opened - running in dev mode"); |
|
||||
} |
|
||||
}) |
|
||||
.catch((err) => { |
|
||||
logger.error("Failed to load index.html:", err); |
|
||||
logger.error("Attempted path:", indexPath); |
|
||||
}); |
|
||||
|
|
||||
// Listen for console messages from the renderer
|
|
||||
mainWindow.webContents.on("console-message", (_event, _level, message) => { |
|
||||
logger.log("Renderer Console:", message); |
|
||||
}); |
|
||||
|
|
||||
// Add right after creating the BrowserWindow
|
|
||||
mainWindow.webContents.on( |
|
||||
"did-fail-load", |
|
||||
(_event, errorCode, errorDescription) => { |
|
||||
logger.error("Page failed to load:", errorCode, errorDescription); |
|
||||
}, |
|
||||
); |
|
||||
|
|
||||
mainWindow.webContents.on("preload-error", (_event, preloadPath, error) => { |
|
||||
logger.error("Preload script error:", preloadPath, error); |
|
||||
}); |
|
||||
|
|
||||
mainWindow.webContents.on( |
|
||||
"console-message", |
|
||||
(_event, _level, message, line, sourceId) => { |
|
||||
logger.log("Renderer Console:", line, sourceId, message); |
|
||||
}, |
|
||||
); |
|
||||
|
|
||||
// Enable remote debugging when in dev mode
|
|
||||
if (isDev) { |
|
||||
mainWindow.webContents.openDevTools(); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
// Handle app ready
|
|
||||
app.whenReady().then(createWindow); |
|
||||
|
|
||||
// Handle all windows closed
|
|
||||
app.on("window-all-closed", () => { |
|
||||
if (process.platform !== "darwin") { |
|
||||
app.quit(); |
|
||||
} |
|
||||
}); |
|
||||
|
|
||||
app.on("activate", () => { |
|
||||
if (BrowserWindow.getAllWindows().length === 0) { |
|
||||
createWindow(); |
|
||||
} |
|
||||
}); |
|
||||
|
|
||||
// Handle any errors
|
|
||||
process.on("uncaughtException", (error) => { |
|
||||
logger.error("Uncaught Exception:", error); |
|
||||
}); |
|
||||
`;
|
|
||||
|
|
||||
// Write the main process file
|
|
||||
const mainDest = path.join(electronDistPath, 'main.js'); |
|
||||
fs.writeFileSync(mainDest, mainContent); |
|
||||
|
|
||||
// Copy preload script if it exists
|
|
||||
const preloadSrc = path.join(__dirname, '..', 'src', 'electron', 'preload.js'); |
|
||||
const preloadDest = path.join(electronDistPath, 'preload.js'); |
|
||||
if (fs.existsSync(preloadSrc)) { |
|
||||
console.log(`Copying ${preloadSrc} to ${preloadDest}`); |
|
||||
fs.copyFileSync(preloadSrc, preloadDest); |
|
||||
} |
|
||||
|
|
||||
// Verify build structure
|
|
||||
console.log('\nVerifying build structure:'); |
|
||||
console.log('Files in dist-electron:', fs.readdirSync(electronDistPath)); |
|
||||
|
|
||||
console.log('Build completed successfully!'); |
|
@ -0,0 +1,15 @@ |
|||||
|
const fs = require('fs'); |
||||
|
const path = require('path'); |
||||
|
|
||||
|
// Create public/wasm directory if it doesn't exist |
||||
|
const wasmDir = path.join(__dirname, '../public/wasm'); |
||||
|
if (!fs.existsSync(wasmDir)) { |
||||
|
fs.mkdirSync(wasmDir, { recursive: true }); |
||||
|
} |
||||
|
|
||||
|
// Copy the WASM file from node_modules to public/wasm |
||||
|
const sourceFile = path.join(__dirname, '../node_modules/@jlongster/sql.js/dist/sql-wasm.wasm'); |
||||
|
const targetFile = path.join(wasmDir, 'sql-wasm.wasm'); |
||||
|
|
||||
|
fs.copyFileSync(sourceFile, targetFile); |
||||
|
console.log('WASM file copied successfully!'); |
@ -0,0 +1,134 @@ |
|||||
|
import migrationService from "../services/migrationService"; |
||||
|
import type { QueryExecResult } from "../interfaces/database"; |
||||
|
import { DEFAULT_ENDORSER_API_SERVER } from "@/constants/app"; |
||||
|
import { arrayBufferToBase64 } from "@/libs/crypto"; |
||||
|
|
||||
|
// Generate a random secret for the secret table
|
||||
|
|
||||
|
// It's not really secure to maintain the secret next to the user's data.
|
||||
|
// However, until we have better hooks into a real wallet or reliable secure
|
||||
|
// storage, we'll do this for user convenience. As they sign more records
|
||||
|
// and integrate with more people, they'll value it more and want to be more
|
||||
|
// secure, so we'll prompt them to take steps to back it up, properly encrypt,
|
||||
|
// etc. At the beginning, we'll prompt for a password, then we'll prompt for a
|
||||
|
// PWA so it's not in a browser... and then we hope to be integrated with a
|
||||
|
// real wallet or something else more secure.
|
||||
|
|
||||
|
// One might ask: why encrypt at all? We figure a basic encryption is better
|
||||
|
// than none. Plus, we expect to support their own password or keystore or
|
||||
|
// external wallet as better signing options in the future, so it's gonna be
|
||||
|
// important to have the structure where each account access might require
|
||||
|
// user action.
|
||||
|
|
||||
|
// (Once upon a time we stored the secret in localStorage, but it frequently
|
||||
|
// got erased, even though the IndexedDB still had the identity data. This
|
||||
|
// ended up throwing lots of errors to the user... and they'd end up in a state
|
||||
|
// where they couldn't take action because they couldn't unlock that identity.)
|
||||
|
|
||||
|
const randomBytes = crypto.getRandomValues(new Uint8Array(32)); |
||||
|
const secretBase64 = arrayBufferToBase64(randomBytes); |
||||
|
|
||||
|
// Each migration can include multiple SQL statements (with semicolons)
|
||||
|
const MIGRATIONS = [ |
||||
|
{ |
||||
|
name: "001_initial", |
||||
|
// see ../db/tables files for explanations of the fields
|
||||
|
sql: ` |
||||
|
CREATE TABLE IF NOT EXISTS accounts ( |
||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||
|
dateCreated TEXT NOT NULL, |
||||
|
derivationPath TEXT, |
||||
|
did TEXT NOT NULL, |
||||
|
identityEncrBase64 TEXT, -- encrypted & base64-encoded |
||||
|
mnemonicEncrBase64 TEXT, -- encrypted & base64-encoded |
||||
|
passkeyCredIdHex TEXT, |
||||
|
publicKeyHex TEXT NOT NULL |
||||
|
); |
||||
|
|
||||
|
CREATE INDEX IF NOT EXISTS idx_accounts_did ON accounts(did); |
||||
|
|
||||
|
CREATE TABLE IF NOT EXISTS secret ( |
||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||
|
secretBase64 TEXT NOT NULL |
||||
|
); |
||||
|
|
||||
|
INSERT INTO secret (id, secretBase64) VALUES (1, '${secretBase64}'); |
||||
|
|
||||
|
CREATE TABLE IF NOT EXISTS settings ( |
||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||
|
accountDid TEXT, |
||||
|
activeDid TEXT, |
||||
|
apiServer TEXT, |
||||
|
filterFeedByNearby BOOLEAN, |
||||
|
filterFeedByVisible BOOLEAN, |
||||
|
finishedOnboarding BOOLEAN, |
||||
|
firstName TEXT, |
||||
|
hideRegisterPromptOnNewContact BOOLEAN, |
||||
|
isRegistered BOOLEAN, |
||||
|
lastName TEXT, |
||||
|
lastAckedOfferToUserJwtId TEXT, |
||||
|
lastAckedOfferToUserProjectsJwtId TEXT, |
||||
|
lastNotifiedClaimId TEXT, |
||||
|
lastViewedClaimId TEXT, |
||||
|
notifyingNewActivityTime TEXT, |
||||
|
notifyingReminderMessage TEXT, |
||||
|
notifyingReminderTime TEXT, |
||||
|
partnerApiServer TEXT, |
||||
|
passkeyExpirationMinutes INTEGER, |
||||
|
profileImageUrl TEXT, |
||||
|
searchBoxes TEXT, -- Stored as JSON string |
||||
|
showContactGivesInline BOOLEAN, |
||||
|
showGeneralAdvanced BOOLEAN, |
||||
|
showShortcutBvc BOOLEAN, |
||||
|
vapid TEXT, |
||||
|
warnIfProdServer BOOLEAN, |
||||
|
warnIfTestServer BOOLEAN, |
||||
|
webPushServer TEXT |
||||
|
); |
||||
|
|
||||
|
CREATE INDEX IF NOT EXISTS idx_settings_accountDid ON settings(accountDid); |
||||
|
|
||||
|
INSERT INTO settings (id, apiServer) VALUES (1, '${DEFAULT_ENDORSER_API_SERVER}'); |
||||
|
|
||||
|
CREATE TABLE IF NOT EXISTS contacts ( |
||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||
|
did TEXT NOT NULL, |
||||
|
name TEXT, |
||||
|
contactMethods TEXT, -- Stored as JSON string |
||||
|
nextPubKeyHashB64 TEXT, |
||||
|
notes TEXT, |
||||
|
profileImageUrl TEXT, |
||||
|
publicKeyBase64 TEXT, |
||||
|
seesMe BOOLEAN, |
||||
|
registered BOOLEAN |
||||
|
); |
||||
|
|
||||
|
CREATE INDEX IF NOT EXISTS idx_contacts_did ON contacts(did); |
||||
|
CREATE INDEX IF NOT EXISTS idx_contacts_name ON contacts(name); |
||||
|
|
||||
|
CREATE TABLE IF NOT EXISTS logs ( |
||||
|
date TEXT NOT NULL, |
||||
|
message TEXT NOT NULL |
||||
|
); |
||||
|
|
||||
|
CREATE TABLE IF NOT EXISTS temp ( |
||||
|
id TEXT PRIMARY KEY, |
||||
|
blobB64 TEXT |
||||
|
); |
||||
|
`,
|
||||
|
}, |
||||
|
]; |
||||
|
|
||||
|
export async function registerMigrations(): Promise<void> { |
||||
|
// Register all migrations
|
||||
|
for (const migration of MIGRATIONS) { |
||||
|
await migrationService.registerMigration(migration); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
export async function runMigrations( |
||||
|
sqlExec: (sql: string, params?: unknown[]) => Promise<Array<QueryExecResult>>, |
||||
|
): Promise<void> { |
||||
|
await registerMigrations(); |
||||
|
await migrationService.runMigrations(sqlExec); |
||||
|
} |
@ -0,0 +1,267 @@ |
|||||
|
/** |
||||
|
* This file is the SQL replacement of the index.ts file in the db directory. |
||||
|
* That file will eventually be deleted. |
||||
|
*/ |
||||
|
|
||||
|
import { PlatformServiceFactory } from "@/services/PlatformServiceFactory"; |
||||
|
import { MASTER_SETTINGS_KEY, Settings } from "./tables/settings"; |
||||
|
import { logger } from "@/utils/logger"; |
||||
|
import { DEFAULT_ENDORSER_API_SERVER } from "@/constants/app"; |
||||
|
import { QueryExecResult } from "@/interfaces/database"; |
||||
|
|
||||
|
export async function updateDefaultSettings( |
||||
|
settingsChanges: Settings, |
||||
|
): Promise<boolean> { |
||||
|
delete settingsChanges.accountDid; // just in case
|
||||
|
// ensure there is no "id" that would override the key
|
||||
|
delete settingsChanges.id; |
||||
|
try { |
||||
|
const platformService = PlatformServiceFactory.getInstance(); |
||||
|
const { sql, params } = generateUpdateStatement( |
||||
|
settingsChanges, |
||||
|
"settings", |
||||
|
"id = ?", |
||||
|
[MASTER_SETTINGS_KEY], |
||||
|
); |
||||
|
const result = await platformService.dbExec(sql, params); |
||||
|
return result.changes === 1; |
||||
|
} catch (error) { |
||||
|
logger.error("Error updating default settings:", error); |
||||
|
if (error instanceof Error) { |
||||
|
throw error; // Re-throw if it's already an Error with a message
|
||||
|
} else { |
||||
|
throw new Error( |
||||
|
`Failed to update settings. We recommend you try again or restart the app.`, |
||||
|
); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
export async function updateAccountSettings( |
||||
|
accountDid: string, |
||||
|
settingsChanges: Settings, |
||||
|
): Promise<boolean> { |
||||
|
settingsChanges.accountDid = accountDid; |
||||
|
delete settingsChanges.id; // key off account, not ID
|
||||
|
|
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
|
||||
|
// First try to update existing record
|
||||
|
const { sql: updateSql, params: updateParams } = generateUpdateStatement( |
||||
|
settingsChanges, |
||||
|
"settings", |
||||
|
"accountDid = ?", |
||||
|
[accountDid], |
||||
|
); |
||||
|
|
||||
|
const updateResult = await platform.dbExec(updateSql, updateParams); |
||||
|
|
||||
|
// If no record was updated, insert a new one
|
||||
|
if (updateResult.changes === 1) { |
||||
|
return true; |
||||
|
} else { |
||||
|
const columns = Object.keys(settingsChanges); |
||||
|
const values = Object.values(settingsChanges); |
||||
|
const placeholders = values.map(() => "?").join(", "); |
||||
|
|
||||
|
const insertSql = `INSERT INTO settings (${columns.join(", ")}) VALUES (${placeholders})`; |
||||
|
const result = await platform.dbExec(insertSql, values); |
||||
|
|
||||
|
return result.changes === 1; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
const DEFAULT_SETTINGS: Settings = { |
||||
|
id: MASTER_SETTINGS_KEY, |
||||
|
activeDid: undefined, |
||||
|
apiServer: DEFAULT_ENDORSER_API_SERVER, |
||||
|
}; |
||||
|
|
||||
|
// retrieves default settings
|
||||
|
export async function retrieveSettingsForDefaultAccount(): Promise<Settings> { |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
const result = await platform.dbQuery("SELECT * FROM settings WHERE id = ?", [ |
||||
|
MASTER_SETTINGS_KEY, |
||||
|
]); |
||||
|
if (!result) { |
||||
|
return DEFAULT_SETTINGS; |
||||
|
} else { |
||||
|
const settings = mapColumnsToValues( |
||||
|
result.columns, |
||||
|
result.values, |
||||
|
)[0] as Settings; |
||||
|
if (settings.searchBoxes) { |
||||
|
// @ts-expect-error - the searchBoxes field is a string in the DB
|
||||
|
settings.searchBoxes = JSON.parse(settings.searchBoxes); |
||||
|
} |
||||
|
return settings; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
export async function retrieveSettingsForActiveAccount(): Promise<Settings> { |
||||
|
const defaultSettings = await retrieveSettingsForDefaultAccount(); |
||||
|
if (!defaultSettings.activeDid) { |
||||
|
return defaultSettings; |
||||
|
} else { |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
const result = await platform.dbQuery( |
||||
|
"SELECT * FROM settings WHERE accountDid = ?", |
||||
|
[defaultSettings.activeDid], |
||||
|
); |
||||
|
const overrideSettings = result |
||||
|
? (mapColumnsToValues(result.columns, result.values)[0] as Settings) |
||||
|
: {}; |
||||
|
const overrideSettingsFiltered = Object.fromEntries( |
||||
|
Object.entries(overrideSettings).filter(([_, v]) => v !== null), |
||||
|
); |
||||
|
const settings = { ...defaultSettings, ...overrideSettingsFiltered }; |
||||
|
if (settings.searchBoxes) { |
||||
|
// @ts-expect-error - the searchBoxes field is a string in the DB
|
||||
|
settings.searchBoxes = JSON.parse(settings.searchBoxes); |
||||
|
} |
||||
|
return settings; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
let lastCleanupDate: string | null = null; |
||||
|
|
||||
|
/** |
||||
|
* Logs a message to the database with proper handling of concurrent writes |
||||
|
* @param message - The message to log |
||||
|
* @author Matthew Raymer |
||||
|
*/ |
||||
|
export async function logToDb(message: string): Promise<void> { |
||||
|
const platform = PlatformServiceFactory.getInstance(); |
||||
|
const todayKey = new Date().toDateString(); |
||||
|
const nowKey = new Date().toISOString(); |
||||
|
|
||||
|
try { |
||||
|
// Try to insert first, if it fails due to UNIQUE constraint, update instead
|
||||
|
await platform.dbExec("INSERT INTO logs (date, message) VALUES (?, ?)", [ |
||||
|
nowKey, |
||||
|
message, |
||||
|
]); |
||||
|
|
||||
|
// Clean up old logs (keep only last 7 days) - do this less frequently
|
||||
|
// Only clean up if the date is different from the last cleanup
|
||||
|
if (!lastCleanupDate || lastCleanupDate !== todayKey) { |
||||
|
const sevenDaysAgo = new Date( |
||||
|
new Date().getTime() - 7 * 24 * 60 * 60 * 1000, |
||||
|
); |
||||
|
await platform.dbExec("DELETE FROM logs WHERE date < ?", [ |
||||
|
sevenDaysAgo.toDateString(), |
||||
|
]); |
||||
|
lastCleanupDate = todayKey; |
||||
|
} |
||||
|
} catch (error) { |
||||
|
// Log to console as fallback
|
||||
|
// eslint-disable-next-line no-console
|
||||
|
console.error( |
||||
|
"Error logging to database:", |
||||
|
error, |
||||
|
" ... for original message:", |
||||
|
message, |
||||
|
); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// similar method is in the sw_scripts/additional-scripts.js file
|
||||
|
export async function logConsoleAndDb( |
||||
|
message: string, |
||||
|
isError = false, |
||||
|
): Promise<void> { |
||||
|
if (isError) { |
||||
|
logger.error(`${new Date().toISOString()} ${message}`); |
||||
|
} else { |
||||
|
logger.log(`${new Date().toISOString()} ${message}`); |
||||
|
} |
||||
|
await logToDb(message); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Generates an SQL INSERT statement and parameters from a model object. |
||||
|
* @param model The model object containing fields to update |
||||
|
* @param tableName The name of the table to update |
||||
|
* @returns Object containing the SQL statement and parameters array |
||||
|
*/ |
||||
|
export function generateInsertStatement( |
||||
|
model: Record<string, unknown>, |
||||
|
tableName: string, |
||||
|
): { sql: string; params: unknown[] } { |
||||
|
const columns = Object.keys(model).filter((key) => model[key] !== undefined); |
||||
|
const values = Object.values(model).filter((value) => value !== undefined); |
||||
|
const placeholders = values.map(() => "?").join(", "); |
||||
|
const insertSql = `INSERT INTO ${tableName} (${columns.join(", ")}) VALUES (${placeholders})`; |
||||
|
return { |
||||
|
sql: insertSql, |
||||
|
params: values, |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Generates an SQL UPDATE statement and parameters from a model object. |
||||
|
* @param model The model object containing fields to update |
||||
|
* @param tableName The name of the table to update |
||||
|
* @param whereClause The WHERE clause for the update (e.g. "id = ?") |
||||
|
* @param whereParams Parameters for the WHERE clause |
||||
|
* @returns Object containing the SQL statement and parameters array |
||||
|
*/ |
||||
|
export function generateUpdateStatement( |
||||
|
model: Record<string, unknown>, |
||||
|
tableName: string, |
||||
|
whereClause: string, |
||||
|
whereParams: unknown[] = [], |
||||
|
): { sql: string; params: unknown[] } { |
||||
|
// Filter out undefined/null values and create SET clause
|
||||
|
const setClauses: string[] = []; |
||||
|
const params: unknown[] = []; |
||||
|
|
||||
|
Object.entries(model).forEach(([key, value]) => { |
||||
|
if (value !== undefined) { |
||||
|
setClauses.push(`${key} = ?`); |
||||
|
params.push(value); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
if (setClauses.length === 0) { |
||||
|
throw new Error("No valid fields to update"); |
||||
|
} |
||||
|
|
||||
|
const sql = `UPDATE ${tableName} SET ${setClauses.join(", ")} WHERE ${whereClause}`; |
||||
|
|
||||
|
return { |
||||
|
sql, |
||||
|
params: [...params, ...whereParams], |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
export function mapQueryResultToValues( |
||||
|
record: QueryExecResult | undefined, |
||||
|
): Array<Record<string, unknown>> { |
||||
|
if (!record) { |
||||
|
return []; |
||||
|
} |
||||
|
return mapColumnsToValues(record.columns, record.values) as Array< |
||||
|
Record<string, unknown> |
||||
|
>; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Maps an array of column names to an array of value arrays, creating objects where each column name |
||||
|
* is mapped to its corresponding value. |
||||
|
* @param columns Array of column names to use as object keys |
||||
|
* @param values Array of value arrays, where each inner array corresponds to one row of data |
||||
|
* @returns Array of objects where each object maps column names to their corresponding values |
||||
|
*/ |
||||
|
export function mapColumnsToValues( |
||||
|
columns: string[], |
||||
|
values: unknown[][], |
||||
|
): Array<Record<string, unknown>> { |
||||
|
return values.map((row) => { |
||||
|
const obj: Record<string, unknown> = {}; |
||||
|
columns.forEach((column, index) => { |
||||
|
obj[column] = row[index]; |
||||
|
}); |
||||
|
return obj; |
||||
|
}); |
||||
|
} |
@ -0,0 +1,59 @@ |
|||||
|
import type { QueryExecResult, SqlValue } from "./database"; |
||||
|
|
||||
|
declare module "@jlongster/sql.js" { |
||||
|
interface SQL { |
||||
|
Database: new (path: string, options?: { filename: boolean }) => AbsurdSqlDatabase; |
||||
|
FS: { |
||||
|
mkdir: (path: string) => void; |
||||
|
mount: (fs: any, options: any, path: string) => void; |
||||
|
open: (path: string, flags: string) => any; |
||||
|
close: (stream: any) => void; |
||||
|
}; |
||||
|
register_for_idb: (fs: any) => void; |
||||
|
} |
||||
|
|
||||
|
interface AbsurdSqlDatabase { |
||||
|
exec: (sql: string, params?: unknown[]) => Promise<QueryExecResult[]>; |
||||
|
run: ( |
||||
|
sql: string, |
||||
|
params?: unknown[], |
||||
|
) => Promise<{ changes: number; lastId?: number }>; |
||||
|
} |
||||
|
|
||||
|
const initSqlJs: (options?: { |
||||
|
locateFile?: (file: string) => string; |
||||
|
}) => Promise<SQL>; |
||||
|
|
||||
|
export default initSqlJs; |
||||
|
} |
||||
|
|
||||
|
declare module "absurd-sql" { |
||||
|
import type { SQL } from "@jlongster/sql.js"; |
||||
|
|
||||
|
export class SQLiteFS { |
||||
|
constructor(fs: any, backend: any); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
declare module "absurd-sql/dist/indexeddb-backend" { |
||||
|
export default class IndexedDBBackend { |
||||
|
constructor(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
declare module "absurd-sql/dist/indexeddb-main-thread" { |
||||
|
export interface SQLiteOptions { |
||||
|
filename?: string; |
||||
|
autoLoad?: boolean; |
||||
|
debug?: boolean; |
||||
|
} |
||||
|
|
||||
|
export interface SQLiteDatabase { |
||||
|
exec: (sql: string, params?: unknown[]) => Promise<QueryExecResult[]>; |
||||
|
close: () => Promise<void>; |
||||
|
} |
||||
|
|
||||
|
export function initSqlJs(options?: any): Promise<any>; |
||||
|
export function createDatabase(options?: SQLiteOptions): Promise<SQLiteDatabase>; |
||||
|
export function openDatabase(options?: SQLiteOptions): Promise<SQLiteDatabase>; |
||||
|
} |
@ -0,0 +1,17 @@ |
|||||
|
export type SqlValue = string | number | null | Uint8Array; |
||||
|
|
||||
|
export interface QueryExecResult { |
||||
|
columns: Array<string>; |
||||
|
values: Array<Array<SqlValue>>; |
||||
|
} |
||||
|
|
||||
|
export interface DatabaseService { |
||||
|
initialize(): Promise<void>; |
||||
|
query(sql: string, params?: unknown[]): Promise<QueryExecResult[]>; |
||||
|
run( |
||||
|
sql: string, |
||||
|
params?: unknown[], |
||||
|
): Promise<{ changes: number; lastId?: number }>; |
||||
|
getOneRow(sql: string, params?: unknown[]): Promise<unknown[] | undefined>; |
||||
|
getAll(sql: string, params?: unknown[]): Promise<unknown[][]>; |
||||
|
} |
@ -1,7 +1,37 @@ |
|||||
export * from "./claims"; |
export type { |
||||
export * from "./claims-result"; |
// From common.ts
|
||||
export * from "./common"; |
GenericCredWrapper, |
||||
|
GenericVerifiableCredential, |
||||
|
KeyMeta, |
||||
|
// Exclude types that are also exported from other files
|
||||
|
// GiveVerifiableCredential,
|
||||
|
// OfferVerifiableCredential,
|
||||
|
// RegisterVerifiableCredential,
|
||||
|
// PlanSummaryRecord,
|
||||
|
// UserInfo,
|
||||
|
} from "./common"; |
||||
|
|
||||
|
export type { |
||||
|
// From claims.ts
|
||||
|
GiveActionClaim, |
||||
|
OfferClaim, |
||||
|
RegisterActionClaim, |
||||
|
} from "./claims"; |
||||
|
|
||||
|
export type { |
||||
|
// From claims-result.ts
|
||||
|
CreateAndSubmitClaimResult, |
||||
|
} from "./claims-result"; |
||||
|
|
||||
|
export type { |
||||
|
// From records.ts
|
||||
|
PlanSummaryRecord, |
||||
|
} from "./records"; |
||||
|
|
||||
|
export type { |
||||
|
// From user.ts
|
||||
|
UserInfo, |
||||
|
} from "./user"; |
||||
|
|
||||
export * from "./limits"; |
export * from "./limits"; |
||||
export * from "./records"; |
|
||||
export * from "./user"; |
|
||||
export * from "./deepLinks"; |
export * from "./deepLinks"; |
||||
|
@ -1,4 +1,75 @@ |
|||||
import { initializeApp } from "./main.common"; |
import { initializeApp } from "./main.common"; |
||||
|
import { logger } from "./utils/logger"; |
||||
|
|
||||
|
async function initializeSQLite() { |
||||
|
try { |
||||
|
// Wait for SQLite to be available in the main process
|
||||
|
let retries = 0; |
||||
|
const maxRetries = 5; |
||||
|
const retryDelay = 1000; // 1 second
|
||||
|
|
||||
|
while (retries < maxRetries) { |
||||
|
try { |
||||
|
const isAvailable = await window.CapacitorSQLite.isAvailable(); |
||||
|
if (isAvailable) { |
||||
|
logger.info( |
||||
|
"[Electron] SQLite plugin bridge initialized successfully", |
||||
|
); |
||||
|
return true; |
||||
|
} |
||||
|
} catch (error) { |
||||
|
logger.warn( |
||||
|
`[Electron] SQLite not available yet (attempt ${retries + 1}/${maxRetries}):`, |
||||
|
error, |
||||
|
); |
||||
|
} |
||||
|
|
||||
|
retries++; |
||||
|
if (retries < maxRetries) { |
||||
|
await new Promise((resolve) => setTimeout(resolve, retryDelay)); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
throw new Error("SQLite plugin not available after maximum retries"); |
||||
|
} catch (error) { |
||||
|
logger.error( |
||||
|
"[Electron] Failed to initialize SQLite plugin bridge:", |
||||
|
error, |
||||
|
); |
||||
|
throw error; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
const platform = process.env.VITE_PLATFORM; |
||||
|
const pwa_enabled = process.env.VITE_PWA_ENABLED === "true"; |
||||
|
|
||||
|
logger.info("[Electron] Initializing app"); |
||||
|
logger.info("[Electron] Platform:", { platform }); |
||||
|
logger.info("[Electron] PWA enabled:", { pwa_enabled }); |
||||
|
|
||||
|
if (pwa_enabled) { |
||||
|
logger.warn("[Electron] PWA is enabled, but not supported in electron"); |
||||
|
} |
||||
|
|
||||
|
// Initialize app and SQLite
|
||||
const app = initializeApp(); |
const app = initializeApp(); |
||||
app.mount("#app"); |
|
||||
|
// Initialize SQLite first, then mount the app
|
||||
|
initializeSQLite() |
||||
|
.then(() => { |
||||
|
logger.info("[Electron] SQLite initialized, mounting app..."); |
||||
|
app.mount("#app"); |
||||
|
}) |
||||
|
.catch((error) => { |
||||
|
logger.error("[Electron] Failed to initialize app:", error); |
||||
|
// Show error to user
|
||||
|
const errorDiv = document.createElement("div"); |
||||
|
errorDiv.style.cssText = |
||||
|
"position: fixed; top: 50%; left: 50%; transform: translate(-50%, -50%); background: #ffebee; color: #c62828; padding: 20px; border-radius: 4px; text-align: center; max-width: 80%;"; |
||||
|
errorDiv.innerHTML = ` |
||||
|
<h2>Failed to Initialize Database</h2> |
||||
|
<p>There was an error initializing the database. Please try restarting the application.</p> |
||||
|
<p>Error details: ${error.message}</p> |
||||
|
`;
|
||||
|
document.body.appendChild(errorDiv); |
||||
|
}); |
||||
|
@ -1,215 +0,0 @@ |
|||||
import { createPinia } from "pinia"; |
|
||||
import { App as VueApp, ComponentPublicInstance, createApp } from "vue"; |
|
||||
import App from "./App.vue"; |
|
||||
import "./registerServiceWorker"; |
|
||||
import router from "./router"; |
|
||||
import axios from "axios"; |
|
||||
import VueAxios from "vue-axios"; |
|
||||
import Notifications from "notiwind"; |
|
||||
import "./assets/styles/tailwind.css"; |
|
||||
|
|
||||
import { library } from "@fortawesome/fontawesome-svg-core"; |
|
||||
import { |
|
||||
faArrowDown, |
|
||||
faArrowLeft, |
|
||||
faArrowRight, |
|
||||
faArrowRotateBackward, |
|
||||
faArrowUpRightFromSquare, |
|
||||
faArrowUp, |
|
||||
faBan, |
|
||||
faBitcoinSign, |
|
||||
faBurst, |
|
||||
faCalendar, |
|
||||
faCamera, |
|
||||
faCameraRotate, |
|
||||
faCaretDown, |
|
||||
faChair, |
|
||||
faCheck, |
|
||||
faChevronDown, |
|
||||
faChevronLeft, |
|
||||
faChevronRight, |
|
||||
faChevronUp, |
|
||||
faCircle, |
|
||||
faCircleCheck, |
|
||||
faCircleInfo, |
|
||||
faCircleQuestion, |
|
||||
faCircleUser, |
|
||||
faClock, |
|
||||
faCoins, |
|
||||
faComment, |
|
||||
faCopy, |
|
||||
faDollar, |
|
||||
faEllipsis, |
|
||||
faEllipsisVertical, |
|
||||
faEnvelopeOpenText, |
|
||||
faEraser, |
|
||||
faEye, |
|
||||
faEyeSlash, |
|
||||
faFileContract, |
|
||||
faFileLines, |
|
||||
faFilter, |
|
||||
faFloppyDisk, |
|
||||
faFolderOpen, |
|
||||
faForward, |
|
||||
faGift, |
|
||||
faGlobe, |
|
||||
faHammer, |
|
||||
faHand, |
|
||||
faHandHoldingDollar, |
|
||||
faHandHoldingHeart, |
|
||||
faHouseChimney, |
|
||||
faImage, |
|
||||
faImagePortrait, |
|
||||
faLeftRight, |
|
||||
faLightbulb, |
|
||||
faLink, |
|
||||
faLocationDot, |
|
||||
faLongArrowAltLeft, |
|
||||
faLongArrowAltRight, |
|
||||
faMagnifyingGlass, |
|
||||
faMessage, |
|
||||
faMinus, |
|
||||
faPen, |
|
||||
faPersonCircleCheck, |
|
||||
faPersonCircleQuestion, |
|
||||
faPlus, |
|
||||
faQuestion, |
|
||||
faQrcode, |
|
||||
faRightFromBracket, |
|
||||
faRotate, |
|
||||
faShareNodes, |
|
||||
faSpinner, |
|
||||
faSquare, |
|
||||
faSquareCaretDown, |
|
||||
faSquareCaretUp, |
|
||||
faSquarePlus, |
|
||||
faTrashCan, |
|
||||
faTriangleExclamation, |
|
||||
faUser, |
|
||||
faUsers, |
|
||||
faXmark, |
|
||||
} from "@fortawesome/free-solid-svg-icons"; |
|
||||
|
|
||||
library.add( |
|
||||
faArrowDown, |
|
||||
faArrowLeft, |
|
||||
faArrowRight, |
|
||||
faArrowRotateBackward, |
|
||||
faArrowUpRightFromSquare, |
|
||||
faArrowUp, |
|
||||
faBan, |
|
||||
faBitcoinSign, |
|
||||
faBurst, |
|
||||
faCalendar, |
|
||||
faCamera, |
|
||||
faCameraRotate, |
|
||||
faCaretDown, |
|
||||
faChair, |
|
||||
faCheck, |
|
||||
faChevronDown, |
|
||||
faChevronLeft, |
|
||||
faChevronRight, |
|
||||
faChevronUp, |
|
||||
faCircle, |
|
||||
faCircleCheck, |
|
||||
faCircleInfo, |
|
||||
faCircleQuestion, |
|
||||
faCircleUser, |
|
||||
faClock, |
|
||||
faCoins, |
|
||||
faComment, |
|
||||
faCopy, |
|
||||
faDollar, |
|
||||
faEllipsis, |
|
||||
faEllipsisVertical, |
|
||||
faEnvelopeOpenText, |
|
||||
faEraser, |
|
||||
faEye, |
|
||||
faEyeSlash, |
|
||||
faFileContract, |
|
||||
faFileLines, |
|
||||
faFilter, |
|
||||
faFloppyDisk, |
|
||||
faFolderOpen, |
|
||||
faForward, |
|
||||
faGift, |
|
||||
faGlobe, |
|
||||
faHammer, |
|
||||
faHand, |
|
||||
faHandHoldingDollar, |
|
||||
faHandHoldingHeart, |
|
||||
faHouseChimney, |
|
||||
faImage, |
|
||||
faImagePortrait, |
|
||||
faLeftRight, |
|
||||
faLightbulb, |
|
||||
faLink, |
|
||||
faLocationDot, |
|
||||
faLongArrowAltLeft, |
|
||||
faLongArrowAltRight, |
|
||||
faMagnifyingGlass, |
|
||||
faMessage, |
|
||||
faMinus, |
|
||||
faPen, |
|
||||
faPersonCircleCheck, |
|
||||
faPersonCircleQuestion, |
|
||||
faPlus, |
|
||||
faQrcode, |
|
||||
faQuestion, |
|
||||
faRotate, |
|
||||
faRightFromBracket, |
|
||||
faShareNodes, |
|
||||
faSpinner, |
|
||||
faSquare, |
|
||||
faSquareCaretDown, |
|
||||
faSquareCaretUp, |
|
||||
faSquarePlus, |
|
||||
faTrashCan, |
|
||||
faTriangleExclamation, |
|
||||
faUser, |
|
||||
faUsers, |
|
||||
faXmark, |
|
||||
); |
|
||||
|
|
||||
import { FontAwesomeIcon } from "@fortawesome/vue-fontawesome"; |
|
||||
import Camera from "simple-vue-camera"; |
|
||||
import { logger } from "./utils/logger"; |
|
||||
|
|
||||
// Can trigger this with a 'throw' inside some top-level function, eg. on the HomeView
|
|
||||
function setupGlobalErrorHandler(app: VueApp) { |
|
||||
// @ts-expect-error 'cause we cannot see why config is not defined
|
|
||||
app.config.errorHandler = ( |
|
||||
err: Error, |
|
||||
instance: ComponentPublicInstance | null, |
|
||||
info: string, |
|
||||
) => { |
|
||||
logger.error( |
|
||||
"Ouch! Global Error Handler.", |
|
||||
"Error:", |
|
||||
err, |
|
||||
"- Error toString:", |
|
||||
err.toString(), |
|
||||
"- Info:", |
|
||||
info, |
|
||||
"- Instance:", |
|
||||
instance, |
|
||||
); |
|
||||
// Want to show a nice notiwind notification but can't figure out how.
|
|
||||
alert( |
|
||||
(err.message || "Something bad happened") + |
|
||||
" - Try reloading or restarting the app.", |
|
||||
); |
|
||||
}; |
|
||||
} |
|
||||
|
|
||||
const app = createApp(App) |
|
||||
.component("fa", FontAwesomeIcon) |
|
||||
.component("camera", Camera) |
|
||||
.use(createPinia()) |
|
||||
.use(VueAxios, axios) |
|
||||
.use(router) |
|
||||
.use(Notifications); |
|
||||
|
|
||||
setupGlobalErrorHandler(app); |
|
||||
|
|
||||
app.mount("#app"); |
|
@ -1,5 +1,37 @@ |
|||||
|
import { initBackend } from "absurd-sql/dist/indexeddb-main-thread"; |
||||
import { initializeApp } from "./main.common"; |
import { initializeApp } from "./main.common"; |
||||
import "./registerServiceWorker"; // Web PWA support
|
import { logger } from "./utils/logger"; |
||||
|
|
||||
|
const platform = process.env.VITE_PLATFORM; |
||||
|
const pwa_enabled = process.env.VITE_PWA_ENABLED === "true"; |
||||
|
|
||||
|
logger.error("[Web] PWA enabled", { pwa_enabled }); |
||||
|
logger.error("[Web] Platform", { platform }); |
||||
|
|
||||
|
// Only import service worker for web builds
|
||||
|
if (platform !== "electron" && pwa_enabled) { |
||||
|
import("./registerServiceWorker"); // Web PWA support
|
||||
|
} |
||||
|
|
||||
const app = initializeApp(); |
const app = initializeApp(); |
||||
|
|
||||
|
function sqlInit() { |
||||
|
// see https://github.com/jlongster/absurd-sql
|
||||
|
const worker = new Worker( |
||||
|
new URL("./registerSQLWorker.js", import.meta.url), |
||||
|
{ |
||||
|
type: "module", |
||||
|
}, |
||||
|
); |
||||
|
// This is only required because Safari doesn't support nested
|
||||
|
// workers. This installs a handler that will proxy creating web
|
||||
|
// workers through the main thread
|
||||
|
initBackend(worker); |
||||
|
} |
||||
|
if (platform === "web" || platform === "development") { |
||||
|
sqlInit(); |
||||
|
} else { |
||||
|
logger.info("[Web] SQL not initialized for platform", { platform }); |
||||
|
} |
||||
|
|
||||
app.mount("#app"); |
app.mount("#app"); |
||||
|
@ -0,0 +1,6 @@ |
|||||
|
import databaseService from "./services/AbsurdSqlDatabaseService"; |
||||
|
|
||||
|
async function run() { |
||||
|
await databaseService.initialize(); |
||||
|
} |
||||
|
run(); |
@ -0,0 +1,29 @@ |
|||||
|
import { DatabaseService } from "../interfaces/database"; |
||||
|
|
||||
|
declare module "@jlongster/sql.js" { |
||||
|
interface SQL { |
||||
|
Database: unknown; |
||||
|
FS: unknown; |
||||
|
register_for_idb: (fs: unknown) => void; |
||||
|
} |
||||
|
|
||||
|
function initSqlJs(config: { |
||||
|
locateFile: (file: string) => string; |
||||
|
}): Promise<SQL>; |
||||
|
export default initSqlJs; |
||||
|
} |
||||
|
|
||||
|
declare module "absurd-sql" { |
||||
|
export class SQLiteFS { |
||||
|
constructor(fs: unknown, backend: unknown); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
declare module "absurd-sql/dist/indexeddb-backend" { |
||||
|
export default class IndexedDBBackend { |
||||
|
constructor(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
declare const databaseService: DatabaseService; |
||||
|
export default databaseService; |
@ -0,0 +1,243 @@ |
|||||
|
import initSqlJs from "@jlongster/sql.js"; |
||||
|
import { SQLiteFS } from "absurd-sql"; |
||||
|
import IndexedDBBackend from "absurd-sql/dist/indexeddb-backend"; |
||||
|
|
||||
|
import { runMigrations } from "../db-sql/migration"; |
||||
|
import type { DatabaseService, QueryExecResult } from "../interfaces/database"; |
||||
|
import { logger } from "@/utils/logger"; |
||||
|
|
||||
|
interface QueuedOperation { |
||||
|
type: "run" | "query" | "getOneRow" | "getAll"; |
||||
|
sql: string; |
||||
|
params: unknown[]; |
||||
|
resolve: (value: unknown) => void; |
||||
|
reject: (reason: unknown) => void; |
||||
|
} |
||||
|
|
||||
|
interface AbsurdSqlDatabase { |
||||
|
exec: (sql: string, params?: unknown[]) => Promise<QueryExecResult[]>; |
||||
|
run: ( |
||||
|
sql: string, |
||||
|
params?: unknown[], |
||||
|
) => Promise<{ changes: number; lastId?: number }>; |
||||
|
} |
||||
|
|
||||
|
class AbsurdSqlDatabaseService implements DatabaseService { |
||||
|
private static instance: AbsurdSqlDatabaseService | null = null; |
||||
|
private db: AbsurdSqlDatabase | null; |
||||
|
private initialized: boolean; |
||||
|
private initializationPromise: Promise<void> | null = null; |
||||
|
private operationQueue: Array<QueuedOperation> = []; |
||||
|
private isProcessingQueue: boolean = false; |
||||
|
|
||||
|
private constructor() { |
||||
|
this.db = null; |
||||
|
this.initialized = false; |
||||
|
} |
||||
|
|
||||
|
static getInstance(): AbsurdSqlDatabaseService { |
||||
|
if (!AbsurdSqlDatabaseService.instance) { |
||||
|
AbsurdSqlDatabaseService.instance = new AbsurdSqlDatabaseService(); |
||||
|
} |
||||
|
return AbsurdSqlDatabaseService.instance; |
||||
|
} |
||||
|
|
||||
|
async initialize(): Promise<void> { |
||||
|
// If already initialized, return immediately
|
||||
|
if (this.initialized) { |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
// If initialization is in progress, wait for it
|
||||
|
if (this.initializationPromise) { |
||||
|
return this.initializationPromise; |
||||
|
} |
||||
|
|
||||
|
// Start initialization
|
||||
|
this.initializationPromise = this._initialize(); |
||||
|
try { |
||||
|
await this.initializationPromise; |
||||
|
} catch (error) { |
||||
|
logger.error(`AbsurdSqlDatabaseService initialize method failed:`, error); |
||||
|
this.initializationPromise = null; // Reset on failure
|
||||
|
throw error; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private async _initialize(): Promise<void> { |
||||
|
if (this.initialized) { |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
const SQL = await initSqlJs({ |
||||
|
locateFile: (file: string) => { |
||||
|
return new URL( |
||||
|
`/node_modules/@jlongster/sql.js/dist/${file}`, |
||||
|
import.meta.url, |
||||
|
).href; |
||||
|
}, |
||||
|
}); |
||||
|
|
||||
|
const sqlFS = new SQLiteFS(SQL.FS, new IndexedDBBackend()); |
||||
|
SQL.register_for_idb(sqlFS); |
||||
|
|
||||
|
SQL.FS.mkdir("/sql"); |
||||
|
SQL.FS.mount(sqlFS, {}, "/sql"); |
||||
|
|
||||
|
const path = "/sql/timesafari.absurd-sql"; |
||||
|
if (typeof SharedArrayBuffer === "undefined") { |
||||
|
const stream = SQL.FS.open(path, "a+"); |
||||
|
await stream.node.contents.readIfFallback(); |
||||
|
SQL.FS.close(stream); |
||||
|
} |
||||
|
|
||||
|
this.db = new SQL.Database(path, { filename: true }); |
||||
|
if (!this.db) { |
||||
|
throw new Error( |
||||
|
"The database initialization failed. We recommend you restart or reinstall.", |
||||
|
); |
||||
|
} |
||||
|
|
||||
|
// An error is thrown without this pragma: "File has invalid page size. (the first block of a new file must be written first)"
|
||||
|
await this.db.exec(`PRAGMA journal_mode=MEMORY;`); |
||||
|
const sqlExec = this.db.exec.bind(this.db); |
||||
|
|
||||
|
// Run migrations
|
||||
|
await runMigrations(sqlExec); |
||||
|
|
||||
|
this.initialized = true; |
||||
|
|
||||
|
// Start processing the queue after initialization
|
||||
|
this.processQueue(); |
||||
|
} |
||||
|
|
||||
|
private async processQueue(): Promise<void> { |
||||
|
if (this.isProcessingQueue || !this.initialized || !this.db) { |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
this.isProcessingQueue = true; |
||||
|
|
||||
|
while (this.operationQueue.length > 0) { |
||||
|
const operation = this.operationQueue.shift(); |
||||
|
if (!operation) continue; |
||||
|
|
||||
|
try { |
||||
|
let queryResult: QueryExecResult[] = []; |
||||
|
let result: unknown; |
||||
|
switch (operation.type) { |
||||
|
case "run": |
||||
|
result = await this.db.run(operation.sql, operation.params); |
||||
|
break; |
||||
|
case "query": |
||||
|
result = await this.db.exec(operation.sql, operation.params); |
||||
|
break; |
||||
|
case "getOneRow": |
||||
|
queryResult = await this.db.exec(operation.sql, operation.params); |
||||
|
result = queryResult[0]?.values[0]; |
||||
|
break; |
||||
|
case "getAll": |
||||
|
queryResult = await this.db.exec(operation.sql, operation.params); |
||||
|
result = queryResult[0]?.values || []; |
||||
|
break; |
||||
|
} |
||||
|
operation.resolve(result); |
||||
|
} catch (error) { |
||||
|
logger.error( |
||||
|
"Error while processing SQL queue:", |
||||
|
error, |
||||
|
" ... for sql:", |
||||
|
operation.sql, |
||||
|
" ... with params:", |
||||
|
operation.params, |
||||
|
); |
||||
|
operation.reject(error); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
this.isProcessingQueue = false; |
||||
|
} |
||||
|
|
||||
|
private async queueOperation<R>( |
||||
|
type: QueuedOperation["type"], |
||||
|
sql: string, |
||||
|
params: unknown[] = [], |
||||
|
): Promise<R> { |
||||
|
return new Promise<R>((resolve, reject) => { |
||||
|
const operation: QueuedOperation = { |
||||
|
type, |
||||
|
sql, |
||||
|
params, |
||||
|
resolve: (value: unknown) => resolve(value as R), |
||||
|
reject, |
||||
|
}; |
||||
|
this.operationQueue.push(operation); |
||||
|
|
||||
|
// If we're already initialized, start processing the queue
|
||||
|
if (this.initialized && this.db) { |
||||
|
this.processQueue(); |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
private async waitForInitialization(): Promise<void> { |
||||
|
// If we have an initialization promise, wait for it
|
||||
|
if (this.initializationPromise) { |
||||
|
await this.initializationPromise; |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
// If not initialized and no promise, start initialization
|
||||
|
if (!this.initialized) { |
||||
|
await this.initialize(); |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
// If initialized but no db, something went wrong
|
||||
|
if (!this.db) { |
||||
|
logger.error( |
||||
|
`Database not properly initialized after await waitForInitialization() - initialized flag is true but db is null`, |
||||
|
); |
||||
|
throw new Error( |
||||
|
`The database could not be initialized. We recommend you restart or reinstall.`, |
||||
|
); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Used for inserts, updates, and deletes
|
||||
|
async run( |
||||
|
sql: string, |
||||
|
params: unknown[] = [], |
||||
|
): Promise<{ changes: number; lastId?: number }> { |
||||
|
await this.waitForInitialization(); |
||||
|
return this.queueOperation<{ changes: number; lastId?: number }>( |
||||
|
"run", |
||||
|
sql, |
||||
|
params, |
||||
|
); |
||||
|
} |
||||
|
|
||||
|
// Note that the resulting array may be empty if there are no results from the query
|
||||
|
async query(sql: string, params: unknown[] = []): Promise<QueryExecResult[]> { |
||||
|
await this.waitForInitialization(); |
||||
|
return this.queueOperation<QueryExecResult[]>("query", sql, params); |
||||
|
} |
||||
|
|
||||
|
async getOneRow( |
||||
|
sql: string, |
||||
|
params: unknown[] = [], |
||||
|
): Promise<unknown[] | undefined> { |
||||
|
await this.waitForInitialization(); |
||||
|
return this.queueOperation<unknown[] | undefined>("getOneRow", sql, params); |
||||
|
} |
||||
|
|
||||
|
async getAll(sql: string, params: unknown[] = []): Promise<unknown[][]> { |
||||
|
await this.waitForInitialization(); |
||||
|
return this.queueOperation<unknown[][]>("getAll", sql, params); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// Create a singleton instance
|
||||
|
const databaseService = AbsurdSqlDatabaseService.getInstance(); |
||||
|
|
||||
|
export default databaseService; |
@ -0,0 +1,116 @@ |
|||||
|
import { logger } from "../../utils/logger"; |
||||
|
import { SQLiteDBConnection } from "@capacitor-community/sqlite"; |
||||
|
|
||||
|
interface ConnectionState { |
||||
|
connection: SQLiteDBConnection; |
||||
|
lastUsed: number; |
||||
|
inUse: boolean; |
||||
|
} |
||||
|
|
||||
|
export class DatabaseConnectionPool { |
||||
|
private static instance: DatabaseConnectionPool | null = null; |
||||
|
private connections: Map<string, ConnectionState> = new Map(); |
||||
|
private readonly MAX_CONNECTIONS = 1; // We only need one connection for SQLite
|
||||
|
private readonly MAX_IDLE_TIME = 5 * 60 * 1000; // 5 minutes
|
||||
|
private readonly CLEANUP_INTERVAL = 60 * 1000; // 1 minute
|
||||
|
private cleanupInterval: NodeJS.Timeout | null = null; |
||||
|
|
||||
|
private constructor() { |
||||
|
// Start cleanup interval
|
||||
|
this.cleanupInterval = setInterval(() => this.cleanup(), this.CLEANUP_INTERVAL); |
||||
|
} |
||||
|
|
||||
|
public static getInstance(): DatabaseConnectionPool { |
||||
|
if (!DatabaseConnectionPool.instance) { |
||||
|
DatabaseConnectionPool.instance = new DatabaseConnectionPool(); |
||||
|
} |
||||
|
return DatabaseConnectionPool.instance; |
||||
|
} |
||||
|
|
||||
|
public async getConnection( |
||||
|
dbName: string, |
||||
|
createConnection: () => Promise<SQLiteDBConnection> |
||||
|
): Promise<SQLiteDBConnection> { |
||||
|
// Check if we have an existing connection
|
||||
|
const existing = this.connections.get(dbName); |
||||
|
if (existing && !existing.inUse) { |
||||
|
existing.inUse = true; |
||||
|
existing.lastUsed = Date.now(); |
||||
|
logger.debug(`[ConnectionPool] Reusing existing connection for ${dbName}`); |
||||
|
return existing.connection; |
||||
|
} |
||||
|
|
||||
|
// If we have too many connections, wait for one to be released
|
||||
|
if (this.connections.size >= this.MAX_CONNECTIONS) { |
||||
|
logger.debug(`[ConnectionPool] Waiting for connection to be released...`); |
||||
|
await this.waitForConnection(); |
||||
|
} |
||||
|
|
||||
|
// Create new connection
|
||||
|
try { |
||||
|
const connection = await createConnection(); |
||||
|
this.connections.set(dbName, { |
||||
|
connection, |
||||
|
lastUsed: Date.now(), |
||||
|
inUse: true |
||||
|
}); |
||||
|
logger.debug(`[ConnectionPool] Created new connection for ${dbName}`); |
||||
|
return connection; |
||||
|
} catch (error) { |
||||
|
logger.error(`[ConnectionPool] Failed to create connection for ${dbName}:`, error); |
||||
|
throw error; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public async releaseConnection(dbName: string): Promise<void> { |
||||
|
const connection = this.connections.get(dbName); |
||||
|
if (connection) { |
||||
|
connection.inUse = false; |
||||
|
connection.lastUsed = Date.now(); |
||||
|
logger.debug(`[ConnectionPool] Released connection for ${dbName}`); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
private async waitForConnection(): Promise<void> { |
||||
|
return new Promise((resolve) => { |
||||
|
const checkInterval = setInterval(() => { |
||||
|
if (this.connections.size < this.MAX_CONNECTIONS) { |
||||
|
clearInterval(checkInterval); |
||||
|
resolve(); |
||||
|
} |
||||
|
}, 100); |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
private async cleanup(): Promise<void> { |
||||
|
const now = Date.now(); |
||||
|
for (const [dbName, state] of this.connections.entries()) { |
||||
|
if (!state.inUse && now - state.lastUsed > this.MAX_IDLE_TIME) { |
||||
|
try { |
||||
|
await state.connection.close(); |
||||
|
this.connections.delete(dbName); |
||||
|
logger.debug(`[ConnectionPool] Cleaned up idle connection for ${dbName}`); |
||||
|
} catch (error) { |
||||
|
logger.warn(`[ConnectionPool] Error closing idle connection for ${dbName}:`, error); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
public async closeAll(): Promise<void> { |
||||
|
if (this.cleanupInterval) { |
||||
|
clearInterval(this.cleanupInterval); |
||||
|
this.cleanupInterval = null; |
||||
|
} |
||||
|
|
||||
|
for (const [dbName, state] of this.connections.entries()) { |
||||
|
try { |
||||
|
await state.connection.close(); |
||||
|
logger.debug(`[ConnectionPool] Closed connection for ${dbName}`); |
||||
|
} catch (error) { |
||||
|
logger.warn(`[ConnectionPool] Error closing connection for ${dbName}:`, error); |
||||
|
} |
||||
|
} |
||||
|
this.connections.clear(); |
||||
|
} |
||||
|
} |
@ -0,0 +1,67 @@ |
|||||
|
import { logger } from "@/utils/logger"; |
||||
|
import { QueryExecResult } from "../interfaces/database"; |
||||
|
|
||||
|
interface Migration { |
||||
|
name: string; |
||||
|
sql: string; |
||||
|
} |
||||
|
|
||||
|
export class MigrationService { |
||||
|
private static instance: MigrationService; |
||||
|
private migrations: Migration[] = []; |
||||
|
|
||||
|
private constructor() {} |
||||
|
|
||||
|
static getInstance(): MigrationService { |
||||
|
if (!MigrationService.instance) { |
||||
|
MigrationService.instance = new MigrationService(); |
||||
|
} |
||||
|
return MigrationService.instance; |
||||
|
} |
||||
|
|
||||
|
async registerMigration(migration: Migration): Promise<void> { |
||||
|
this.migrations.push(migration); |
||||
|
} |
||||
|
|
||||
|
async runMigrations( |
||||
|
sqlExec: ( |
||||
|
sql: string, |
||||
|
params?: unknown[], |
||||
|
) => Promise<Array<QueryExecResult>>, |
||||
|
): Promise<void> { |
||||
|
// Create migrations table if it doesn't exist
|
||||
|
await sqlExec(` |
||||
|
CREATE TABLE IF NOT EXISTS migrations ( |
||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT, |
||||
|
name TEXT NOT NULL UNIQUE, |
||||
|
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP |
||||
|
); |
||||
|
`);
|
||||
|
|
||||
|
// Get list of executed migrations
|
||||
|
const result: QueryExecResult[] = await sqlExec( |
||||
|
"SELECT name FROM migrations;", |
||||
|
); |
||||
|
let executedMigrations: Set<unknown> = new Set(); |
||||
|
// Even with that query, the QueryExecResult may be [] (which doesn't make sense to me).
|
||||
|
if (result.length > 0) { |
||||
|
const singleResult = result[0]; |
||||
|
executedMigrations = new Set( |
||||
|
singleResult.values.map((row: unknown[]) => row[0]), |
||||
|
); |
||||
|
} |
||||
|
|
||||
|
// Run pending migrations in order
|
||||
|
for (const migration of this.migrations) { |
||||
|
if (!executedMigrations.has(migration.name)) { |
||||
|
await sqlExec(migration.sql); |
||||
|
await sqlExec("INSERT INTO migrations (name) VALUES (?)", [ |
||||
|
migration.name, |
||||
|
]); |
||||
|
logger.log(`Migration ${migration.name} executed successfully`); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
export default MigrationService.getInstance(); |
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue