35 changed files with 3599 additions and 599 deletions
@ -0,0 +1,192 @@ |
|||||
|
# Meta-Rule: Core Always-On Rules |
||||
|
|
||||
|
**Author**: Matthew Raymer |
||||
|
**Date**: 2025-08-21 |
||||
|
**Status**: 🎯 **ACTIVE** - Core rules for every prompt |
||||
|
|
||||
|
## Purpose |
||||
|
|
||||
|
This meta-rule bundles the core rules that should be applied to **every single |
||||
|
prompt** because they define fundamental behaviors, principles, and context |
||||
|
that are essential for all AI interactions. |
||||
|
|
||||
|
## When to Use |
||||
|
|
||||
|
**ALWAYS** - These rules apply to every single prompt, regardless of the task |
||||
|
or context. They form the foundation for all AI assistant behavior. |
||||
|
|
||||
|
## Bundled Rules |
||||
|
|
||||
|
### **Core Human Competence Principles** |
||||
|
|
||||
|
- **`core/base_context.mdc`** - Human competence first principles, interaction |
||||
|
guidelines, and output contract requirements |
||||
|
- **`core/less_complex.mdc`** - Minimalist solution principle and complexity |
||||
|
guidelines |
||||
|
|
||||
|
### **Time & Context Standards** |
||||
|
|
||||
|
- **`development/time.mdc`** - Time handling principles and UTC standards |
||||
|
- **`development/time_examples.mdc`** - Practical time implementation examples |
||||
|
- **`development/time_implementation.mdc`** - Detailed time implementation |
||||
|
guidelines |
||||
|
|
||||
|
### **Version Control & Process** |
||||
|
|
||||
|
- **`workflow/version_control.mdc`** - Version control principles and commit |
||||
|
guidelines |
||||
|
- **`workflow/commit_messages.mdc`** - Commit message format and conventions |
||||
|
|
||||
|
### **Application Context** |
||||
|
|
||||
|
- **`app/timesafari.mdc`** - Core TimeSafari application context and |
||||
|
development principles |
||||
|
- **`app/timesafari_development.mdc`** - TimeSafari-specific development |
||||
|
workflow and quality standards |
||||
|
|
||||
|
## Why These Rules Are Always-On |
||||
|
|
||||
|
### **Base Context** |
||||
|
|
||||
|
- **Human Competence First**: Every interaction must increase human competence |
||||
|
- **Output Contract**: All responses must follow the required structure |
||||
|
- **Competence Hooks**: Learning and collaboration must be built into every response |
||||
|
|
||||
|
### **Time Standards** |
||||
|
|
||||
|
- **UTC Consistency**: All timestamps must use UTC for system operations |
||||
|
- **Evidence Collection**: Time context is essential for debugging and investigation |
||||
|
- **Cross-Platform**: Time handling affects all platforms and features |
||||
|
|
||||
|
### **Version Control** |
||||
|
|
||||
|
- **Commit Standards**: Every code change must follow commit message conventions |
||||
|
- **Process Consistency**: Version control affects all development work |
||||
|
- **Team Collaboration**: Commit standards enable effective team communication |
||||
|
|
||||
|
### **Application Context** |
||||
|
|
||||
|
- **Platform Awareness**: Every task must consider web/mobile/desktop platforms |
||||
|
- **Architecture Principles**: All work must follow TimeSafari patterns |
||||
|
- **Development Standards**: Quality and testing requirements apply to all work |
||||
|
|
||||
|
## Application Priority |
||||
|
|
||||
|
### **Primary (Apply First)** |
||||
|
|
||||
|
1. **Base Context** - Human competence and output contract |
||||
|
2. **Time Standards** - UTC and timestamp requirements |
||||
|
3. **Application Context** - TimeSafari principles and platforms |
||||
|
|
||||
|
### **Secondary (Apply as Needed)** |
||||
|
|
||||
|
1. **Version Control** - When making code changes |
||||
|
2. **Complexity Guidelines** - When evaluating solution approaches |
||||
|
|
||||
|
## Integration with Other Meta-Rules |
||||
|
|
||||
|
### **Feature Planning** |
||||
|
|
||||
|
- Base context ensures human competence focus |
||||
|
- Time standards inform planning and estimation |
||||
|
- Application context drives platform considerations |
||||
|
|
||||
|
### **Bug Diagnosis** |
||||
|
|
||||
|
- Base context ensures systematic investigation |
||||
|
- Time standards enable proper evidence collection |
||||
|
- Application context provides system understanding |
||||
|
|
||||
|
### **Bug Fixing** |
||||
|
|
||||
|
- Base context ensures quality implementation |
||||
|
- Time standards maintain logging consistency |
||||
|
- Application context guides testing strategy |
||||
|
|
||||
|
### **Feature Implementation** |
||||
|
|
||||
|
- Base context ensures proper development approach |
||||
|
- Time standards maintain system consistency |
||||
|
- Application context drives architecture decisions |
||||
|
|
||||
|
## Success Criteria |
||||
|
|
||||
|
- [ ] **Base context applied** to every single prompt |
||||
|
- [ ] **Time standards followed** for all timestamps and logging |
||||
|
- [ ] **Version control standards** applied to all code changes |
||||
|
- [ ] **Application context considered** for all platform work |
||||
|
- [ ] **Human competence focus** maintained in all interactions |
||||
|
- [ ] **Output contract structure** followed in all responses |
||||
|
|
||||
|
## Common Pitfalls |
||||
|
|
||||
|
- **Don't skip base context** - loses human competence focus |
||||
|
- **Don't ignore time standards** - creates inconsistent timestamps |
||||
|
- **Don't forget application context** - misses platform considerations |
||||
|
- **Don't skip version control** - creates inconsistent commit history |
||||
|
- **Don't lose competence focus** - reduces learning value |
||||
|
|
||||
|
## Feedback & Improvement |
||||
|
|
||||
|
### **Rule Effectiveness Ratings (1-5 scale)** |
||||
|
|
||||
|
- **Base Context**: ___/5 - Comments: _______________ |
||||
|
- **Time Standards**: ___/5 - Comments: _______________ |
||||
|
- **Version Control**: ___/5 - Comments: _______________ |
||||
|
- **Application Context**: ___/5 - Comments: _______________ |
||||
|
|
||||
|
### **Always-On Effectiveness** |
||||
|
|
||||
|
- **Consistency**: Are these rules applied consistently across all prompts? |
||||
|
- **Value**: Do these rules add value to every interaction? |
||||
|
- **Overhead**: Are these rules too burdensome for simple tasks? |
||||
|
|
||||
|
### **Integration Feedback** |
||||
|
|
||||
|
- **With Other Meta-Rules**: How well do these integrate with workflow rules? |
||||
|
- **Context Switching**: Do these rules help or hinder context switching? |
||||
|
- **Learning Curve**: Are these rules easy for new users to understand? |
||||
|
|
||||
|
### **Overall Experience** |
||||
|
|
||||
|
- **Quality Improvement**: Do these rules improve response quality? |
||||
|
- **Efficiency**: Do these rules make interactions more efficient? |
||||
|
- **Recommendation**: Would you recommend keeping these always-on? |
||||
|
|
||||
|
## Model Implementation Checklist |
||||
|
|
||||
|
### Before Every Prompt |
||||
|
|
||||
|
- [ ] **Base Context**: Ensure human competence principles are active |
||||
|
- [ ] **Time Standards**: Verify UTC and timestamp requirements are clear |
||||
|
- [ ] **Application Context**: Confirm TimeSafari context is loaded |
||||
|
- [ ] **Version Control**: Prepare commit standards if code changes are needed |
||||
|
|
||||
|
### During Response Creation |
||||
|
|
||||
|
- [ ] **Output Contract**: Follow required response structure |
||||
|
- [ ] **Competence Hooks**: Include learning and collaboration elements |
||||
|
- [ ] **Time Consistency**: Apply UTC standards for all time references |
||||
|
- [ ] **Platform Awareness**: Consider all target platforms |
||||
|
|
||||
|
### After Response Creation |
||||
|
|
||||
|
- [ ] **Validation**: Verify all always-on rules were applied |
||||
|
- [ ] **Quality Check**: Ensure response meets competence standards |
||||
|
- [ ] **Context Review**: Confirm application context was properly considered |
||||
|
- [ ] **Feedback Collection**: Note any issues with always-on application |
||||
|
|
||||
|
--- |
||||
|
|
||||
|
**See also**: |
||||
|
|
||||
|
- `.cursor/rules/meta_feature_planning.mdc` for workflow-specific rules |
||||
|
- `.cursor/rules/meta_bug_diagnosis.mdc` for investigation workflows |
||||
|
- `.cursor/rules/meta_bug_fixing.mdc` for fix implementation |
||||
|
- `.cursor/rules/meta_feature_implementation.mdc` for feature development |
||||
|
|
||||
|
**Status**: Active core always-on meta-rule |
||||
|
**Priority**: Critical (applies to every prompt) |
||||
|
**Estimated Effort**: Ongoing reference |
||||
|
**Dependencies**: All bundled sub-rules |
||||
|
**Stakeholders**: All AI interactions, Development team |
@ -0,0 +1,383 @@ |
|||||
|
# Meta-Rule: Change Evaluation and Breaking Change Detection |
||||
|
|
||||
|
**Author**: Matthew Raymer |
||||
|
**Date**: 2025-08-25 |
||||
|
**Status**: 🎯 **ACTIVE** - Manually activated change evaluation rule |
||||
|
|
||||
|
## Purpose |
||||
|
|
||||
|
This meta-rule provides a systematic approach to evaluate changes between |
||||
|
branches and detect potential breaking changes. It's designed to catch |
||||
|
problematic model behavior by analyzing the nature, scope, and impact of |
||||
|
code changes before they cause issues. |
||||
|
|
||||
|
## When to Use |
||||
|
|
||||
|
**Manual Activation Only** - This rule should be invoked when: |
||||
|
|
||||
|
- Reviewing changes before merging branches |
||||
|
- Investigating unexpected behavior after updates |
||||
|
- Validating that model-generated changes are safe |
||||
|
- Analyzing the impact of recent commits |
||||
|
- Debugging issues that may be caused by recent changes |
||||
|
|
||||
|
## Workflow State Enforcement |
||||
|
|
||||
|
**This meta-rule enforces current workflow mode constraints:** |
||||
|
|
||||
|
### **Current Workflow State** |
||||
|
|
||||
|
```json |
||||
|
{ |
||||
|
"workflowState": { |
||||
|
"currentMode": "diagnosis|fixing|planning|research|documentation", |
||||
|
"constraints": { |
||||
|
"mode": "read_only|implementation|design_only|investigation|writing_only", |
||||
|
"allowed": ["array", "of", "allowed", "actions"], |
||||
|
"forbidden": ["array", "of", "forbidden", "actions"] |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
### **Mode-Specific Enforcement** |
||||
|
|
||||
|
**Diagnosis Mode (read_only):** |
||||
|
|
||||
|
- ❌ **Forbidden**: File modification, code creation, build commands, git |
||||
|
commits |
||||
|
- ✅ **Allowed**: File reading, code analysis, investigation, documentation |
||||
|
- **Response**: Focus on analysis and documentation, not implementation |
||||
|
|
||||
|
**Fixing Mode (implementation):** |
||||
|
|
||||
|
- ✅ **Allowed**: File modification, code creation, build commands, testing, |
||||
|
git commits |
||||
|
- ❌ **Forbidden**: None (full implementation mode) |
||||
|
- **Response**: Proceed with implementation and testing |
||||
|
|
||||
|
**Planning Mode (design_only):** |
||||
|
|
||||
|
- ❌ **Forbidden**: Implementation, coding, building, deployment |
||||
|
- ✅ **Allowed**: Analysis, design, estimation, documentation, architecture |
||||
|
- **Response**: Focus on planning and design, not implementation |
||||
|
|
||||
|
**Research Mode (investigation):** |
||||
|
|
||||
|
- ❌ **Forbidden**: File modification, implementation, deployment |
||||
|
- ✅ **Allowed**: Investigation, analysis, research, documentation |
||||
|
- **Response**: Focus on investigation and analysis |
||||
|
|
||||
|
**Documentation Mode (writing_only):** |
||||
|
|
||||
|
- ❌ **Forbidden**: Implementation, coding, building, deployment |
||||
|
- ✅ **Allowed**: Writing, editing, formatting, structuring, reviewing |
||||
|
- **Response**: Focus on documentation creation and improvement |
||||
|
|
||||
|
## Change Evaluation Process |
||||
|
|
||||
|
### **Phase 1: Change Discovery and Analysis** |
||||
|
|
||||
|
1. **Branch Comparison Analysis** |
||||
|
|
||||
|
- Compare working branch with master/main branch |
||||
|
- Identify all changed files and their modification types |
||||
|
- Categorize changes by scope and impact |
||||
|
|
||||
|
2. **Change Pattern Recognition** |
||||
|
|
||||
|
- Identify common change patterns (refactoring, feature addition, bug |
||||
|
fixes) |
||||
|
- Detect unusual or suspicious change patterns |
||||
|
- Flag changes that deviate from established patterns |
||||
|
|
||||
|
3. **Dependency Impact Assessment** |
||||
|
|
||||
|
- Analyze changes to imports, exports, and interfaces |
||||
|
- Identify potential breaking changes to public APIs |
||||
|
- Assess impact on dependent components and services |
||||
|
|
||||
|
### **Phase 2: Breaking Change Detection** |
||||
|
|
||||
|
1. **API Contract Analysis** |
||||
|
|
||||
|
- Check for changes to function signatures, method names, class |
||||
|
interfaces |
||||
|
- Identify removed or renamed public methods/properties |
||||
|
- Detect changes to configuration options and constants |
||||
|
|
||||
|
2. **Data Structure Changes** |
||||
|
|
||||
|
- Analyze database schema modifications |
||||
|
- Check for changes to data models and interfaces |
||||
|
- Identify modifications to serialization/deserialization logic |
||||
|
|
||||
|
3. **Behavioral Changes** |
||||
|
|
||||
|
- Detect changes to business logic and algorithms |
||||
|
- Identify modifications to error handling and validation |
||||
|
- Check for changes to user experience and workflows |
||||
|
|
||||
|
### **Phase 3: Risk Assessment and Recommendations** |
||||
|
|
||||
|
1. **Risk Level Classification** |
||||
|
|
||||
|
- **LOW**: Cosmetic changes, documentation updates, minor refactoring |
||||
|
- **MEDIUM**: Internal API changes, configuration modifications, |
||||
|
performance improvements |
||||
|
- **HIGH**: Public API changes, breaking interface modifications, major |
||||
|
architectural changes |
||||
|
- **CRITICAL**: Database schema changes, authentication modifications, |
||||
|
security-related changes |
||||
|
|
||||
|
2. **Impact Analysis** |
||||
|
|
||||
|
- Identify affected user groups and use cases |
||||
|
- Assess potential for data loss or corruption |
||||
|
- Evaluate impact on system performance and reliability |
||||
|
|
||||
|
3. **Mitigation Strategies** |
||||
|
|
||||
|
- Recommend testing approaches for affected areas |
||||
|
- Suggest rollback strategies if needed |
||||
|
- Identify areas requiring additional validation |
||||
|
|
||||
|
## Implementation Guidelines |
||||
|
|
||||
|
### **Change Analysis Tools** |
||||
|
|
||||
|
1. **Git Diff Analysis** |
||||
|
|
||||
|
```bash |
||||
|
# Compare working branch with master |
||||
|
git diff master..HEAD --name-only |
||||
|
git diff master..HEAD --stat |
||||
|
git log master..HEAD --oneline |
||||
|
``` |
||||
|
|
||||
|
2. **File Change Categorization** |
||||
|
|
||||
|
- **Core Files**: Application entry points, main services, critical |
||||
|
utilities |
||||
|
- **Interface Files**: Public APIs, component interfaces, data models |
||||
|
- **Configuration Files**: Environment settings, build configurations, |
||||
|
deployment scripts |
||||
|
- **Test Files**: Unit tests, integration tests, test utilities |
||||
|
|
||||
|
3. **Change Impact Mapping** |
||||
|
|
||||
|
- Map changed files to affected functionality |
||||
|
- Identify cross-dependencies and ripple effects |
||||
|
- Document potential side effects and unintended consequences |
||||
|
|
||||
|
### **Breaking Change Detection Patterns** |
||||
|
|
||||
|
1. **Function Signature Changes** |
||||
|
|
||||
|
```typescript |
||||
|
// BEFORE |
||||
|
function processData(data: string, options?: Options): Result |
||||
|
|
||||
|
// AFTER - BREAKING CHANGE |
||||
|
function processData(data: string, options: Required<Options>): Result |
||||
|
``` |
||||
|
|
||||
|
2. **Interface Modifications** |
||||
|
|
||||
|
```typescript |
||||
|
// BEFORE |
||||
|
interface UserProfile { |
||||
|
name: string; |
||||
|
email: string; |
||||
|
} |
||||
|
|
||||
|
// AFTER - BREAKING CHANGE |
||||
|
interface UserProfile { |
||||
|
name: string; |
||||
|
email: string; |
||||
|
phone: string; // Required new field |
||||
|
} |
||||
|
``` |
||||
|
|
||||
|
3. **Configuration Changes** |
||||
|
|
||||
|
```typescript |
||||
|
// BEFORE |
||||
|
const config = { |
||||
|
apiUrl: 'https://api.example.com', |
||||
|
timeout: 5000 |
||||
|
}; |
||||
|
|
||||
|
// AFTER - BREAKING CHANGE |
||||
|
const config = { |
||||
|
apiUrl: 'https://api.example.com', |
||||
|
timeout: 5000, |
||||
|
retries: 3 // New required configuration |
||||
|
}; |
||||
|
``` |
||||
|
|
||||
|
## Output Format |
||||
|
|
||||
|
### **Change Evaluation Report** |
||||
|
|
||||
|
```markdown |
||||
|
# Change Evaluation Report |
||||
|
|
||||
|
## Executive Summary |
||||
|
|
||||
|
- **Risk Level**: [LOW|MEDIUM|HIGH|CRITICAL] |
||||
|
- **Overall Assessment**: [SAFE|CAUTION|DANGEROUS|CRITICAL] |
||||
|
- **Recommendation**: [PROCEED|REVIEW|HALT|IMMEDIATE_ROLLBACK] |
||||
|
|
||||
|
## Change Analysis |
||||
|
|
||||
|
### Files Modified |
||||
|
|
||||
|
- **Total Changes**: [X] files |
||||
|
- **Core Files**: [X] files |
||||
|
- **Interface Files**: [X] files |
||||
|
- **Configuration Files**: [X] files |
||||
|
- **Test Files**: [X] files |
||||
|
|
||||
|
### Change Categories |
||||
|
|
||||
|
- **Refactoring**: [X] changes |
||||
|
- **Feature Addition**: [X] changes |
||||
|
- **Bug Fixes**: [X] changes |
||||
|
- **Configuration**: [X] changes |
||||
|
- **Documentation**: [X] changes |
||||
|
|
||||
|
## Breaking Change Detection |
||||
|
|
||||
|
### API Contract Changes |
||||
|
|
||||
|
- **Function Signatures**: [X] modified |
||||
|
- **Interface Definitions**: [X] modified |
||||
|
- **Public Methods**: [X] added/removed/modified |
||||
|
|
||||
|
### Data Structure Changes |
||||
|
|
||||
|
- **Database Schema**: [X] modifications |
||||
|
- **Data Models**: [X] changes |
||||
|
- **Serialization**: [X] changes |
||||
|
|
||||
|
### Behavioral Changes |
||||
|
|
||||
|
- **Business Logic**: [X] modifications |
||||
|
- **Error Handling**: [X] changes |
||||
|
- **User Experience**: [X] changes |
||||
|
|
||||
|
## Risk Assessment |
||||
|
|
||||
|
### Impact Analysis |
||||
|
|
||||
|
- **User Groups Affected**: [Description] |
||||
|
- **Use Cases Impacted**: [Description] |
||||
|
- **Performance Impact**: [Description] |
||||
|
- **Reliability Impact**: [Description] |
||||
|
|
||||
|
### Dependencies |
||||
|
|
||||
|
- **Internal Dependencies**: [List] |
||||
|
- **External Dependencies**: [List] |
||||
|
- **Configuration Dependencies**: [List] |
||||
|
|
||||
|
## Recommendations |
||||
|
|
||||
|
### Testing Requirements |
||||
|
|
||||
|
- [ ] Unit tests for modified components |
||||
|
- [ ] Integration tests for affected workflows |
||||
|
- [ ] Performance tests for changed algorithms |
||||
|
- [ ] User acceptance tests for UI changes |
||||
|
|
||||
|
### Validation Steps |
||||
|
|
||||
|
- [ ] Code review by domain experts |
||||
|
- [ ] API compatibility testing |
||||
|
- [ ] Database migration testing |
||||
|
- [ ] End-to-end workflow testing |
||||
|
|
||||
|
### Rollback Strategy |
||||
|
|
||||
|
- **Rollback Complexity**: [LOW|MEDIUM|HIGH] |
||||
|
- **Rollback Time**: [Estimated time] |
||||
|
- **Data Preservation**: [Strategy description] |
||||
|
|
||||
|
## Conclusion |
||||
|
|
||||
|
[Summary of findings and final recommendation] |
||||
|
``` |
||||
|
|
||||
|
## Usage Examples |
||||
|
|
||||
|
### **Example 1: Safe Refactoring** |
||||
|
|
||||
|
```bash |
||||
|
@meta_change_evaluation.mdc analyze changes between feature-branch and master |
||||
|
``` |
||||
|
|
||||
|
### **Example 2: Breaking Change Investigation** |
||||
|
|
||||
|
```bash |
||||
|
@meta_change_evaluation.mdc evaluate potential breaking changes in recent commits |
||||
|
``` |
||||
|
|
||||
|
### **Example 3: Pre-Merge Validation** |
||||
|
|
||||
|
```bash |
||||
|
@meta_change_evaluation.mdc validate changes before merging feature-branch to master |
||||
|
``` |
||||
|
|
||||
|
## Success Criteria |
||||
|
|
||||
|
- [ ] **Change Discovery**: All modified files are identified and categorized |
||||
|
- [ ] **Pattern Recognition**: Unusual change patterns are detected and flagged |
||||
|
- [ ] **Breaking Change Detection**: All potential breaking changes are identified |
||||
|
- [ ] **Risk Assessment**: Accurate risk levels are assigned with justification |
||||
|
- [ ] **Recommendations**: Actionable recommendations are provided |
||||
|
- [ ] **Documentation**: Complete change evaluation report is generated |
||||
|
|
||||
|
## Common Pitfalls |
||||
|
|
||||
|
- **Missing Dependencies**: Failing to identify all affected components |
||||
|
- **Underestimating Impact**: Not considering ripple effects of changes |
||||
|
- **Incomplete Testing**: Missing critical test scenarios for changes |
||||
|
- **Configuration Blindness**: Overlooking configuration file changes |
||||
|
- **Interface Assumptions**: Assuming internal changes won't affect external |
||||
|
users |
||||
|
|
||||
|
## Integration with Other Meta-Rules |
||||
|
|
||||
|
### **With Bug Diagnosis** |
||||
|
|
||||
|
- Use change evaluation to identify recent changes that may have caused |
||||
|
bugs |
||||
|
- Correlate change patterns with reported issues |
||||
|
|
||||
|
### **With Feature Planning** |
||||
|
|
||||
|
- Evaluate the impact of planned changes before implementation |
||||
|
- Identify potential breaking changes early in the planning process |
||||
|
|
||||
|
### **With Bug Fixing** |
||||
|
|
||||
|
- Validate that fixes don't introduce new breaking changes |
||||
|
- Ensure fixes maintain backward compatibility |
||||
|
|
||||
|
--- |
||||
|
|
||||
|
**See also**: |
||||
|
|
||||
|
- `.cursor/rules/meta_core_always_on.mdc` for core always-on rules |
||||
|
- `.cursor/rules/meta_feature_planning.mdc` for feature development |
||||
|
workflows |
||||
|
- `.cursor/rules/meta_bug_diagnosis.mdc` for bug investigation workflows |
||||
|
- `.cursor/rules/meta_bug_fixing.mdc` for fix implementation workflows |
||||
|
|
||||
|
**Status**: Active change evaluation meta-rule |
||||
|
**Priority**: High (applies to all change evaluation tasks) |
||||
|
**Estimated Effort**: Ongoing reference |
||||
|
**Dependencies**: All bundled sub-rules |
||||
|
**Stakeholders**: Development team, Quality Assurance team, Release |
||||
|
Management team |
@ -0,0 +1,116 @@ |
|||||
|
import { CapacitorConfig } from '@capacitor/cli'; |
||||
|
|
||||
|
const config: CapacitorConfig = { |
||||
|
appId: 'app.timesafari', |
||||
|
appName: 'TimeSafari', |
||||
|
webDir: 'dist', |
||||
|
server: { |
||||
|
cleartext: true |
||||
|
}, |
||||
|
plugins: { |
||||
|
App: { |
||||
|
appUrlOpen: { |
||||
|
handlers: [ |
||||
|
{ |
||||
|
url: 'timesafari://*', |
||||
|
autoVerify: true |
||||
|
} |
||||
|
] |
||||
|
} |
||||
|
}, |
||||
|
SplashScreen: { |
||||
|
launchShowDuration: 3000, |
||||
|
launchAutoHide: true, |
||||
|
backgroundColor: '#ffffff', |
||||
|
androidSplashResourceName: 'splash', |
||||
|
androidScaleType: 'CENTER_CROP', |
||||
|
showSpinner: false, |
||||
|
androidSpinnerStyle: 'large', |
||||
|
iosSpinnerStyle: 'small', |
||||
|
spinnerColor: '#999999', |
||||
|
splashFullScreen: true, |
||||
|
splashImmersive: true |
||||
|
}, |
||||
|
CapSQLite: { |
||||
|
iosDatabaseLocation: 'Library/CapacitorDatabase', |
||||
|
iosIsEncryption: false, |
||||
|
iosBiometric: { |
||||
|
biometricAuth: false, |
||||
|
biometricTitle: 'Biometric login for TimeSafari' |
||||
|
}, |
||||
|
androidIsEncryption: false, |
||||
|
androidBiometric: { |
||||
|
biometricAuth: false, |
||||
|
biometricTitle: 'Biometric login for TimeSafari' |
||||
|
}, |
||||
|
electronIsEncryption: false |
||||
|
} |
||||
|
}, |
||||
|
ios: { |
||||
|
contentInset: 'never', |
||||
|
allowsLinkPreview: true, |
||||
|
scrollEnabled: true, |
||||
|
limitsNavigationsToAppBoundDomains: true, |
||||
|
backgroundColor: '#ffffff', |
||||
|
allowNavigation: [ |
||||
|
'*.timesafari.app', |
||||
|
'*.jsdelivr.net', |
||||
|
'api.endorser.ch' |
||||
|
] |
||||
|
}, |
||||
|
android: { |
||||
|
allowMixedContent: true, |
||||
|
captureInput: true, |
||||
|
webContentsDebuggingEnabled: false, |
||||
|
allowNavigation: [ |
||||
|
'*.timesafari.app', |
||||
|
'*.jsdelivr.net', |
||||
|
'api.endorser.ch', |
||||
|
'10.0.2.2:3000' |
||||
|
] |
||||
|
}, |
||||
|
electron: { |
||||
|
deepLinking: { |
||||
|
schemes: ['timesafari'] |
||||
|
}, |
||||
|
buildOptions: { |
||||
|
appId: 'app.timesafari', |
||||
|
productName: 'TimeSafari', |
||||
|
directories: { |
||||
|
output: 'dist-electron-packages' |
||||
|
}, |
||||
|
files: [ |
||||
|
'dist/**/*', |
||||
|
'electron/**/*' |
||||
|
], |
||||
|
mac: { |
||||
|
category: 'public.app-category.productivity', |
||||
|
target: [ |
||||
|
{ |
||||
|
target: 'dmg', |
||||
|
arch: ['x64', 'arm64'] |
||||
|
} |
||||
|
] |
||||
|
}, |
||||
|
win: { |
||||
|
target: [ |
||||
|
{ |
||||
|
target: 'nsis', |
||||
|
arch: ['x64'] |
||||
|
} |
||||
|
] |
||||
|
}, |
||||
|
linux: { |
||||
|
target: [ |
||||
|
{ |
||||
|
target: 'AppImage', |
||||
|
arch: ['x64'] |
||||
|
} |
||||
|
], |
||||
|
category: 'Utility' |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
}; |
||||
|
|
||||
|
export default config; |
@ -1,305 +0,0 @@ |
|||||
/** |
|
||||
* ProfileService - Handles user profile operations and API calls |
|
||||
* Extracted from AccountViewView.vue to improve separation of concerns |
|
||||
*/ |
|
||||
|
|
||||
import { AxiosInstance, AxiosError } from "axios"; |
|
||||
import { UserProfile } from "@/libs/partnerServer"; |
|
||||
import { UserProfileResponse } from "@/interfaces/accountView"; |
|
||||
import { getHeaders, errorStringForLog } from "@/libs/endorserServer"; |
|
||||
import { handleApiError } from "./api"; |
|
||||
import { logger } from "@/utils/logger"; |
|
||||
import { ACCOUNT_VIEW_CONSTANTS } from "@/constants/accountView"; |
|
||||
|
|
||||
/** |
|
||||
* Profile data interface |
|
||||
*/ |
|
||||
export interface ProfileData { |
|
||||
description: string; |
|
||||
latitude: number; |
|
||||
longitude: number; |
|
||||
includeLocation: boolean; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Profile service class |
|
||||
*/ |
|
||||
export class ProfileService { |
|
||||
private axios: AxiosInstance; |
|
||||
private partnerApiServer: string; |
|
||||
|
|
||||
constructor(axios: AxiosInstance, partnerApiServer: string) { |
|
||||
this.axios = axios; |
|
||||
this.partnerApiServer = partnerApiServer; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Load user profile from the server |
|
||||
* @param activeDid - The user's DID |
|
||||
* @returns ProfileData or null if profile doesn't exist |
|
||||
*/ |
|
||||
async loadProfile(activeDid: string): Promise<ProfileData | null> { |
|
||||
try { |
|
||||
const headers = await getHeaders(activeDid); |
|
||||
const response = await this.axios.get<UserProfileResponse>( |
|
||||
`${this.partnerApiServer}/api/partner/userProfileForIssuer/${activeDid}`, |
|
||||
{ headers }, |
|
||||
); |
|
||||
|
|
||||
if (response.status === 200) { |
|
||||
const data = response.data.data; |
|
||||
const profileData: ProfileData = { |
|
||||
description: data.description || "", |
|
||||
latitude: data.locLat || 0, |
|
||||
longitude: data.locLon || 0, |
|
||||
includeLocation: !!(data.locLat && data.locLon), |
|
||||
}; |
|
||||
return profileData; |
|
||||
} else { |
|
||||
throw new Error(ACCOUNT_VIEW_CONSTANTS.ERRORS.UNABLE_TO_LOAD_PROFILE); |
|
||||
} |
|
||||
} catch (error) { |
|
||||
if (this.isApiError(error) && error.response?.status === 404) { |
|
||||
// Profile doesn't exist yet - this is normal
|
|
||||
return null; |
|
||||
} |
|
||||
|
|
||||
logger.error("Error loading profile:", errorStringForLog(error)); |
|
||||
handleApiError(error as AxiosError, "/api/partner/userProfileForIssuer"); |
|
||||
return null; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Save user profile to the server |
|
||||
* @param activeDid - The user's DID |
|
||||
* @param profileData - The profile data to save |
|
||||
* @returns true if successful, false otherwise |
|
||||
*/ |
|
||||
async saveProfile( |
|
||||
activeDid: string, |
|
||||
profileData: ProfileData, |
|
||||
): Promise<boolean> { |
|
||||
try { |
|
||||
const headers = await getHeaders(activeDid); |
|
||||
const payload: UserProfile = { |
|
||||
description: profileData.description, |
|
||||
issuerDid: activeDid, |
|
||||
}; |
|
||||
|
|
||||
// Add location data if location is included
|
|
||||
if ( |
|
||||
profileData.includeLocation && |
|
||||
profileData.latitude && |
|
||||
profileData.longitude |
|
||||
) { |
|
||||
payload.locLat = profileData.latitude; |
|
||||
payload.locLon = profileData.longitude; |
|
||||
} |
|
||||
|
|
||||
const response = await this.axios.post( |
|
||||
`${this.partnerApiServer}/api/partner/userProfile`, |
|
||||
payload, |
|
||||
{ headers }, |
|
||||
); |
|
||||
|
|
||||
if (response.status === 201) { |
|
||||
return true; |
|
||||
} else { |
|
||||
logger.error("Error saving profile:", response); |
|
||||
throw new Error(ACCOUNT_VIEW_CONSTANTS.ERRORS.PROFILE_NOT_SAVED); |
|
||||
} |
|
||||
} catch (error) { |
|
||||
logger.error("Error saving profile:", errorStringForLog(error)); |
|
||||
handleApiError(error as AxiosError, "/api/partner/userProfile"); |
|
||||
return false; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Delete user profile from the server |
|
||||
* @param activeDid - The user's DID |
|
||||
* @returns true if successful, false otherwise |
|
||||
*/ |
|
||||
async deleteProfile(activeDid: string): Promise<boolean> { |
|
||||
try { |
|
||||
const headers = await getHeaders(activeDid); |
|
||||
const url = `${this.partnerApiServer}/api/partner/userProfile`; |
|
||||
const response = await this.axios.delete(url, { headers }); |
|
||||
|
|
||||
if (response.status === 204 || response.status === 200) { |
|
||||
logger.info("Profile deleted successfully"); |
|
||||
return true; |
|
||||
} else { |
|
||||
logger.error("Unexpected response status when deleting profile:", { |
|
||||
status: response.status, |
|
||||
statusText: response.statusText, |
|
||||
data: response.data, |
|
||||
}); |
|
||||
throw new Error( |
|
||||
`Profile not deleted - HTTP ${response.status}: ${response.statusText}`, |
|
||||
); |
|
||||
} |
|
||||
} catch (error) { |
|
||||
if (this.isApiError(error) && error.response) { |
|
||||
const response = error.response; |
|
||||
logger.error("API error deleting profile:", { |
|
||||
status: response.status, |
|
||||
statusText: response.statusText, |
|
||||
data: response.data, |
|
||||
url: this.getErrorUrl(error), |
|
||||
}); |
|
||||
|
|
||||
// Handle specific HTTP status codes
|
|
||||
if (response.status === 204) { |
|
||||
logger.debug("Profile deleted successfully (204 No Content)"); |
|
||||
return true; // 204 is success for DELETE operations
|
|
||||
} else if (response.status === 404) { |
|
||||
logger.warn("Profile not found - may already be deleted"); |
|
||||
return true; // Consider this a success if profile doesn't exist
|
|
||||
} else if (response.status === 400) { |
|
||||
logger.error("Bad request when deleting profile:", response.data); |
|
||||
const errorMessage = |
|
||||
typeof response.data === "string" |
|
||||
? response.data |
|
||||
: response.data?.message || "Bad request"; |
|
||||
throw new Error(`Profile deletion failed: ${errorMessage}`); |
|
||||
} else if (response.status === 401) { |
|
||||
logger.error("Unauthorized to delete profile"); |
|
||||
throw new Error("You are not authorized to delete this profile"); |
|
||||
} else if (response.status === 403) { |
|
||||
logger.error("Forbidden to delete profile"); |
|
||||
throw new Error("You are not allowed to delete this profile"); |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
logger.error("Error deleting profile:", errorStringForLog(error)); |
|
||||
handleApiError(error as AxiosError, "/api/partner/userProfile"); |
|
||||
return false; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Update profile location |
|
||||
* @param profileData - Current profile data |
|
||||
* @param latitude - New latitude |
|
||||
* @param longitude - New longitude |
|
||||
* @returns Updated profile data |
|
||||
*/ |
|
||||
updateProfileLocation( |
|
||||
profileData: ProfileData, |
|
||||
latitude: number, |
|
||||
longitude: number, |
|
||||
): ProfileData { |
|
||||
return { |
|
||||
...profileData, |
|
||||
latitude, |
|
||||
longitude, |
|
||||
includeLocation: true, |
|
||||
}; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Toggle location inclusion in profile |
|
||||
* @param profileData - Current profile data |
|
||||
* @returns Updated profile data |
|
||||
*/ |
|
||||
toggleProfileLocation(profileData: ProfileData): ProfileData { |
|
||||
const includeLocation = !profileData.includeLocation; |
|
||||
return { |
|
||||
...profileData, |
|
||||
latitude: includeLocation ? profileData.latitude : 0, |
|
||||
longitude: includeLocation ? profileData.longitude : 0, |
|
||||
includeLocation, |
|
||||
}; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Clear profile location |
|
||||
* @param profileData - Current profile data |
|
||||
* @returns Updated profile data |
|
||||
*/ |
|
||||
clearProfileLocation(profileData: ProfileData): ProfileData { |
|
||||
return { |
|
||||
...profileData, |
|
||||
latitude: 0, |
|
||||
longitude: 0, |
|
||||
includeLocation: false, |
|
||||
}; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Reset profile to default state |
|
||||
* @returns Default profile data |
|
||||
*/ |
|
||||
getDefaultProfile(): ProfileData { |
|
||||
return { |
|
||||
description: "", |
|
||||
latitude: 0, |
|
||||
longitude: 0, |
|
||||
includeLocation: false, |
|
||||
}; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Type guard for API errors with proper typing |
|
||||
*/ |
|
||||
private isApiError(error: unknown): error is { |
|
||||
response?: { |
|
||||
status?: number; |
|
||||
statusText?: string; |
|
||||
data?: { message?: string } | string; |
|
||||
}; |
|
||||
} { |
|
||||
return typeof error === "object" && error !== null && "response" in error; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Extract error URL safely from error object |
|
||||
*/ |
|
||||
private getErrorUrl(error: unknown): string | undefined { |
|
||||
if (this.isAxiosError(error)) { |
|
||||
return error.config?.url; |
|
||||
} |
|
||||
if (this.isApiError(error) && this.hasConfigProperty(error)) { |
|
||||
const config = this.getConfigProperty(error); |
|
||||
return config?.url; |
|
||||
} |
|
||||
return undefined; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Type guard to check if error has config property |
|
||||
*/ |
|
||||
private hasConfigProperty( |
|
||||
error: unknown, |
|
||||
): error is { config?: { url?: string } } { |
|
||||
return typeof error === "object" && error !== null && "config" in error; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Safely extract config property from error |
|
||||
*/ |
|
||||
private getConfigProperty(error: { |
|
||||
config?: { url?: string }; |
|
||||
}): { url?: string } | undefined { |
|
||||
return error.config; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Type guard for AxiosError |
|
||||
*/ |
|
||||
private isAxiosError(error: unknown): error is AxiosError { |
|
||||
return error instanceof AxiosError; |
|
||||
} |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* Factory function to create a ProfileService instance |
|
||||
*/ |
|
||||
export function createProfileService( |
|
||||
axios: AxiosInstance, |
|
||||
partnerApiServer: string, |
|
||||
): ProfileService { |
|
||||
return new ProfileService(axios, partnerApiServer); |
|
||||
} |
|
@ -0,0 +1,298 @@ |
|||||
|
/** |
||||
|
* Standardized Error Handler |
||||
|
* |
||||
|
* Provides consistent error handling patterns across the TimeSafari codebase |
||||
|
* to improve debugging, user experience, and maintainability. |
||||
|
* |
||||
|
* @author Matthew Raymer |
||||
|
* @since 2025-08-25 |
||||
|
*/ |
||||
|
|
||||
|
import { AxiosError } from "axios"; |
||||
|
import { logger } from "./logger"; |
||||
|
|
||||
|
/** |
||||
|
* Standard error context for consistent logging |
||||
|
*/ |
||||
|
export interface ErrorContext { |
||||
|
component: string; |
||||
|
operation: string; |
||||
|
timestamp: string; |
||||
|
[key: string]: unknown; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Enhanced error information for better debugging |
||||
|
*/ |
||||
|
export interface EnhancedErrorInfo { |
||||
|
errorType: "AxiosError" | "NetworkError" | "ValidationError" | "UnknownError"; |
||||
|
status?: number; |
||||
|
statusText?: string; |
||||
|
errorData?: unknown; |
||||
|
errorMessage: string; |
||||
|
errorStack?: string; |
||||
|
requestContext?: { |
||||
|
url?: string; |
||||
|
method?: string; |
||||
|
headers?: Record<string, unknown>; |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Standardized error handler for API operations |
||||
|
* |
||||
|
* @param error - The error that occurred |
||||
|
* @param context - Context information about the operation |
||||
|
* @param operation - Description of the operation being performed |
||||
|
* @returns Enhanced error information for consistent handling |
||||
|
*/ |
||||
|
export function handleApiError( |
||||
|
error: unknown, |
||||
|
context: ErrorContext, |
||||
|
operation: string, |
||||
|
): EnhancedErrorInfo { |
||||
|
const baseContext = { |
||||
|
...context, |
||||
|
operation, |
||||
|
timestamp: new Date().toISOString(), |
||||
|
}; |
||||
|
|
||||
|
if (error instanceof AxiosError) { |
||||
|
const axiosError = error as AxiosError; |
||||
|
const status = axiosError.response?.status; |
||||
|
const statusText = axiosError.response?.statusText; |
||||
|
const errorData = axiosError.response?.data; |
||||
|
|
||||
|
const enhancedError: EnhancedErrorInfo = { |
||||
|
errorType: "AxiosError", |
||||
|
status, |
||||
|
statusText, |
||||
|
errorData, |
||||
|
errorMessage: axiosError.message, |
||||
|
errorStack: axiosError.stack, |
||||
|
requestContext: { |
||||
|
url: axiosError.config?.url, |
||||
|
method: axiosError.config?.method, |
||||
|
headers: axiosError.config?.headers, |
||||
|
}, |
||||
|
}; |
||||
|
|
||||
|
// Log with consistent format
|
||||
|
logger.error( |
||||
|
`[${context.component}] ❌ ${operation} failed (AxiosError):`, |
||||
|
{ |
||||
|
...baseContext, |
||||
|
...enhancedError, |
||||
|
}, |
||||
|
); |
||||
|
|
||||
|
return enhancedError; |
||||
|
} |
||||
|
|
||||
|
if (error instanceof Error) { |
||||
|
const enhancedError: EnhancedErrorInfo = { |
||||
|
errorType: "UnknownError", |
||||
|
errorMessage: error.message, |
||||
|
errorStack: error.stack, |
||||
|
}; |
||||
|
|
||||
|
logger.error(`[${context.component}] ❌ ${operation} failed (Error):`, { |
||||
|
...baseContext, |
||||
|
...enhancedError, |
||||
|
}); |
||||
|
|
||||
|
return enhancedError; |
||||
|
} |
||||
|
|
||||
|
// Handle unknown error types
|
||||
|
const enhancedError: EnhancedErrorInfo = { |
||||
|
errorType: "UnknownError", |
||||
|
errorMessage: String(error), |
||||
|
}; |
||||
|
|
||||
|
logger.error(`[${context.component}] ❌ ${operation} failed (Unknown):`, { |
||||
|
...baseContext, |
||||
|
...enhancedError, |
||||
|
}); |
||||
|
|
||||
|
return enhancedError; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Extract human-readable error message from various error response formats |
||||
|
* |
||||
|
* @param errorData - Error response data |
||||
|
* @returns Human-readable error message |
||||
|
*/ |
||||
|
export function extractErrorMessage(errorData: unknown): string { |
||||
|
if (typeof errorData === "string") { |
||||
|
return errorData; |
||||
|
} |
||||
|
|
||||
|
if (typeof errorData === "object" && errorData !== null) { |
||||
|
const obj = errorData as Record<string, unknown>; |
||||
|
|
||||
|
// Try common error message fields
|
||||
|
if (obj.message && typeof obj.message === "string") { |
||||
|
return obj.message; |
||||
|
} |
||||
|
|
||||
|
if (obj.error && typeof obj.error === "string") { |
||||
|
return obj.error; |
||||
|
} |
||||
|
|
||||
|
if (obj.detail && typeof obj.detail === "string") { |
||||
|
return obj.detail; |
||||
|
} |
||||
|
|
||||
|
if (obj.reason && typeof obj.reason === "string") { |
||||
|
return obj.reason; |
||||
|
} |
||||
|
|
||||
|
// Fallback to stringified object
|
||||
|
return JSON.stringify(errorData); |
||||
|
} |
||||
|
|
||||
|
return String(errorData); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create user-friendly error message from enhanced error info |
||||
|
* |
||||
|
* @param errorInfo - Enhanced error information |
||||
|
* @param fallbackMessage - Fallback message if error details are insufficient |
||||
|
* @returns User-friendly error message |
||||
|
*/ |
||||
|
export function createUserMessage( |
||||
|
errorInfo: EnhancedErrorInfo, |
||||
|
fallbackMessage: string, |
||||
|
): string { |
||||
|
if (errorInfo.errorType === "AxiosError") { |
||||
|
const status = errorInfo.status; |
||||
|
const statusText = errorInfo.statusText; |
||||
|
const errorMessage = extractErrorMessage(errorInfo.errorData); |
||||
|
|
||||
|
if (status && statusText) { |
||||
|
if (errorMessage && errorMessage !== "{}") { |
||||
|
return `${fallbackMessage}: ${status} ${statusText} - ${errorMessage}`; |
||||
|
} |
||||
|
return `${fallbackMessage}: ${status} ${statusText}`; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
if ( |
||||
|
errorInfo.errorMessage && |
||||
|
errorInfo.errorMessage !== "Request failed with status code 0" |
||||
|
) { |
||||
|
return `${fallbackMessage}: ${errorInfo.errorMessage}`; |
||||
|
} |
||||
|
|
||||
|
return fallbackMessage; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Handle specific HTTP status codes with appropriate user messages |
||||
|
* |
||||
|
* @param status - HTTP status code |
||||
|
* @param errorData - Error response data |
||||
|
* @param operation - Description of the operation |
||||
|
* @returns User-friendly error message |
||||
|
*/ |
||||
|
export function handleHttpStatus( |
||||
|
status: number, |
||||
|
errorData: unknown, |
||||
|
operation: string, |
||||
|
): string { |
||||
|
const errorMessage = extractErrorMessage(errorData); |
||||
|
|
||||
|
switch (status) { |
||||
|
case 400: |
||||
|
return errorMessage || `${operation} failed: Bad request`; |
||||
|
case 401: |
||||
|
return `${operation} failed: Authentication required`; |
||||
|
case 403: |
||||
|
return `${operation} failed: Access denied`; |
||||
|
case 404: |
||||
|
return errorMessage || `${operation} failed: Resource not found`; |
||||
|
case 409: |
||||
|
return errorMessage || `${operation} failed: Conflict with existing data`; |
||||
|
case 422: |
||||
|
return errorMessage || `${operation} failed: Validation error`; |
||||
|
case 429: |
||||
|
return `${operation} failed: Too many requests. Please try again later.`; |
||||
|
case 500: |
||||
|
return `${operation} failed: Server error. Please try again later.`; |
||||
|
case 502: |
||||
|
case 503: |
||||
|
case 504: |
||||
|
return `${operation} failed: Service temporarily unavailable. Please try again later.`; |
||||
|
default: |
||||
|
return errorMessage || `${operation} failed: HTTP ${status}`; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Check if an error is a network-related error |
||||
|
* |
||||
|
* @param error - The error to check |
||||
|
* @returns True if the error is network-related |
||||
|
*/ |
||||
|
export function isNetworkError(error: unknown): boolean { |
||||
|
if (error instanceof AxiosError) { |
||||
|
return !error.response && !error.request; |
||||
|
} |
||||
|
|
||||
|
if (error instanceof Error) { |
||||
|
const message = error.message.toLowerCase(); |
||||
|
return ( |
||||
|
message.includes("network") || |
||||
|
message.includes("timeout") || |
||||
|
message.includes("connection") || |
||||
|
message.includes("fetch") |
||||
|
); |
||||
|
} |
||||
|
|
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Check if an error is a timeout error |
||||
|
* |
||||
|
* @param error - The error to check |
||||
|
* @returns True if the error is a timeout |
||||
|
*/ |
||||
|
export function isTimeoutError(error: unknown): boolean { |
||||
|
if (error instanceof AxiosError) { |
||||
|
return ( |
||||
|
error.code === "ECONNABORTED" || |
||||
|
error.message.toLowerCase().includes("timeout") |
||||
|
); |
||||
|
} |
||||
|
|
||||
|
if (error instanceof Error) { |
||||
|
return error.message.toLowerCase().includes("timeout"); |
||||
|
} |
||||
|
|
||||
|
return false; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create standardized error context for components |
||||
|
* |
||||
|
* @param component - Component name |
||||
|
* @param operation - Operation being performed |
||||
|
* @param additionalContext - Additional context information |
||||
|
* @returns Standardized error context |
||||
|
*/ |
||||
|
export function createErrorContext( |
||||
|
component: string, |
||||
|
operation: string, |
||||
|
additionalContext: Record<string, unknown> = {}, |
||||
|
): ErrorContext { |
||||
|
return { |
||||
|
component, |
||||
|
operation, |
||||
|
timestamp: new Date().toISOString(), |
||||
|
...additionalContext, |
||||
|
}; |
||||
|
} |
@ -0,0 +1,482 @@ |
|||||
|
/** |
||||
|
* Performance Optimizer |
||||
|
* |
||||
|
* Provides utilities for optimizing API calls, database queries, and component |
||||
|
* rendering to improve TimeSafari application performance. |
||||
|
* |
||||
|
* @author Matthew Raymer |
||||
|
* @since 2025-08-25 |
||||
|
*/ |
||||
|
|
||||
|
import { logger } from "./logger"; |
||||
|
|
||||
|
/** |
||||
|
* Batch operation configuration |
||||
|
*/ |
||||
|
export interface BatchConfig { |
||||
|
maxBatchSize: number; |
||||
|
maxWaitTime: number; |
||||
|
retryAttempts: number; |
||||
|
retryDelay: number; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Default batch configuration |
||||
|
*/ |
||||
|
export const DEFAULT_BATCH_CONFIG: BatchConfig = { |
||||
|
maxBatchSize: 10, |
||||
|
maxWaitTime: 100, // milliseconds
|
||||
|
retryAttempts: 3, |
||||
|
retryDelay: 1000, // milliseconds
|
||||
|
}; |
||||
|
|
||||
|
/** |
||||
|
* Batched operation item |
||||
|
*/ |
||||
|
export interface BatchItem<T, R> { |
||||
|
id: string; |
||||
|
data: T; |
||||
|
resolve: (value: R) => void; |
||||
|
reject: (error: Error) => void; |
||||
|
timestamp: number; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Batch processor for API operations |
||||
|
* |
||||
|
* Groups multiple similar operations into batches to reduce |
||||
|
* the number of API calls and improve performance. |
||||
|
*/ |
||||
|
export class BatchProcessor<T, R> { |
||||
|
private items: BatchItem<T, R>[] = []; |
||||
|
private timer: NodeJS.Timeout | null = null; |
||||
|
private processing = false; |
||||
|
private config: BatchConfig; |
||||
|
|
||||
|
constructor( |
||||
|
private batchHandler: (items: T[]) => Promise<R[]>, |
||||
|
private itemIdExtractor: (item: T) => string, |
||||
|
config: Partial<BatchConfig> = {}, |
||||
|
) { |
||||
|
this.config = { ...DEFAULT_BATCH_CONFIG, ...config }; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Add an item to the batch |
||||
|
* |
||||
|
* @param data - Data to process |
||||
|
* @returns Promise that resolves when the item is processed |
||||
|
*/ |
||||
|
async add(data: T): Promise<R> { |
||||
|
return new Promise((resolve, reject) => { |
||||
|
const item: BatchItem<T, R> = { |
||||
|
id: this.itemIdExtractor(data), |
||||
|
data, |
||||
|
resolve, |
||||
|
reject, |
||||
|
timestamp: Date.now(), |
||||
|
}; |
||||
|
|
||||
|
this.items.push(item); |
||||
|
|
||||
|
// Start timer if this is the first item
|
||||
|
if (this.items.length === 1) { |
||||
|
this.startTimer(); |
||||
|
} |
||||
|
|
||||
|
// Process immediately if batch is full
|
||||
|
if (this.items.length >= this.config.maxBatchSize) { |
||||
|
this.processBatch(); |
||||
|
} |
||||
|
}); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Start the batch timer |
||||
|
*/ |
||||
|
private startTimer(): void { |
||||
|
if (this.timer) { |
||||
|
clearTimeout(this.timer); |
||||
|
} |
||||
|
|
||||
|
this.timer = setTimeout(() => { |
||||
|
this.processBatch(); |
||||
|
}, this.config.maxWaitTime); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Process the current batch |
||||
|
*/ |
||||
|
private async processBatch(): Promise<void> { |
||||
|
if (this.processing || this.items.length === 0) { |
||||
|
return; |
||||
|
} |
||||
|
|
||||
|
this.processing = true; |
||||
|
|
||||
|
// Clear timer
|
||||
|
if (this.timer) { |
||||
|
clearTimeout(this.timer); |
||||
|
this.timer = null; |
||||
|
} |
||||
|
|
||||
|
// Get current batch
|
||||
|
const currentItems = [...this.items]; |
||||
|
this.items = []; |
||||
|
|
||||
|
try { |
||||
|
logger.debug("[BatchProcessor] 🔄 Processing batch:", { |
||||
|
batchSize: currentItems.length, |
||||
|
itemIds: currentItems.map((item) => item.id), |
||||
|
timestamp: new Date().toISOString(), |
||||
|
}); |
||||
|
|
||||
|
// Process batch
|
||||
|
const results = await this.batchHandler( |
||||
|
currentItems.map((item) => item.data), |
||||
|
); |
||||
|
|
||||
|
// Map results back to items
|
||||
|
const resultMap = new Map<string, R>(); |
||||
|
results.forEach((result, index) => { |
||||
|
const item = currentItems[index]; |
||||
|
if (item) { |
||||
|
resultMap.set(item.id, result); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
// Resolve promises
|
||||
|
currentItems.forEach((item) => { |
||||
|
const result = resultMap.get(item.id); |
||||
|
if (result !== undefined) { |
||||
|
item.resolve(result); |
||||
|
} else { |
||||
|
item.reject(new Error(`No result found for item ${item.id}`)); |
||||
|
} |
||||
|
}); |
||||
|
|
||||
|
logger.debug("[BatchProcessor] ✅ Batch processed successfully:", { |
||||
|
batchSize: currentItems.length, |
||||
|
resultsCount: results.length, |
||||
|
timestamp: new Date().toISOString(), |
||||
|
}); |
||||
|
} catch (error) { |
||||
|
logger.error("[BatchProcessor] ❌ Batch processing failed:", { |
||||
|
batchSize: currentItems.length, |
||||
|
error: error instanceof Error ? error.message : String(error), |
||||
|
timestamp: new Date().toISOString(), |
||||
|
}); |
||||
|
|
||||
|
// Reject all items in the batch
|
||||
|
currentItems.forEach((item) => { |
||||
|
item.reject(error instanceof Error ? error : new Error(String(error))); |
||||
|
}); |
||||
|
} finally { |
||||
|
this.processing = false; |
||||
|
|
||||
|
// Start timer for remaining items if any
|
||||
|
if (this.items.length > 0) { |
||||
|
this.startTimer(); |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Get current batch status |
||||
|
*/ |
||||
|
getStatus(): { |
||||
|
pendingItems: number; |
||||
|
isProcessing: boolean; |
||||
|
hasTimer: boolean; |
||||
|
} { |
||||
|
return { |
||||
|
pendingItems: this.items.length, |
||||
|
isProcessing: this.processing, |
||||
|
hasTimer: this.timer !== null, |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Clear all pending items |
||||
|
*/ |
||||
|
clear(): void { |
||||
|
if (this.timer) { |
||||
|
clearTimeout(this.timer); |
||||
|
this.timer = null; |
||||
|
} |
||||
|
|
||||
|
// Reject all pending items
|
||||
|
this.items.forEach((item) => { |
||||
|
item.reject(new Error("Batch processor cleared")); |
||||
|
}); |
||||
|
|
||||
|
this.items = []; |
||||
|
this.processing = false; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Database query optimizer |
||||
|
* |
||||
|
* Provides utilities for optimizing database queries and reducing |
||||
|
* the number of database operations. |
||||
|
*/ |
||||
|
export class DatabaseOptimizer { |
||||
|
/** |
||||
|
* Batch multiple SELECT queries into a single query |
||||
|
* |
||||
|
* @param baseQuery - Base SELECT query |
||||
|
* @param ids - Array of IDs to query |
||||
|
* @param idColumn - Name of the ID column |
||||
|
* @returns Optimized query string |
||||
|
*/ |
||||
|
static batchSelectQuery( |
||||
|
baseQuery: string, |
||||
|
ids: (string | number)[], |
||||
|
idColumn: string, |
||||
|
): string { |
||||
|
if (ids.length === 0) { |
||||
|
return baseQuery; |
||||
|
} |
||||
|
|
||||
|
if (ids.length === 1) { |
||||
|
return `${baseQuery} WHERE ${idColumn} = ?`; |
||||
|
} |
||||
|
|
||||
|
const placeholders = ids.map(() => "?").join(", "); |
||||
|
return `${baseQuery} WHERE ${idColumn} IN (${placeholders})`; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Create a query plan for multiple operations |
||||
|
* |
||||
|
* @param operations - Array of database operations |
||||
|
* @returns Optimized query plan |
||||
|
*/ |
||||
|
static createQueryPlan( |
||||
|
operations: Array<{ |
||||
|
type: "SELECT" | "INSERT" | "UPDATE" | "DELETE"; |
||||
|
table: string; |
||||
|
priority: number; |
||||
|
}>, |
||||
|
): Array<{ |
||||
|
type: "SELECT" | "INSERT" | "UPDATE" | "DELETE"; |
||||
|
table: string; |
||||
|
priority: number; |
||||
|
batchable: boolean; |
||||
|
}> { |
||||
|
return operations |
||||
|
.map((op) => ({ |
||||
|
...op, |
||||
|
batchable: op.type === "SELECT" || op.type === "INSERT", |
||||
|
})) |
||||
|
.sort((a, b) => { |
||||
|
// Sort by priority first, then by type
|
||||
|
if (a.priority !== b.priority) { |
||||
|
return b.priority - a.priority; |
||||
|
} |
||||
|
|
||||
|
// SELECT operations first, then INSERT, UPDATE, DELETE
|
||||
|
const typeOrder = { SELECT: 0, INSERT: 1, UPDATE: 2, DELETE: 3 }; |
||||
|
return typeOrder[a.type] - typeOrder[b.type]; |
||||
|
}); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Component rendering optimizer |
||||
|
* |
||||
|
* Provides utilities for optimizing Vue component rendering |
||||
|
* and reducing unnecessary re-renders. |
||||
|
*/ |
||||
|
export class ComponentOptimizer { |
||||
|
/** |
||||
|
* Debounce function calls to prevent excessive execution |
||||
|
* |
||||
|
* @param func - Function to debounce |
||||
|
* @param wait - Wait time in milliseconds |
||||
|
* @returns Debounced function |
||||
|
*/ |
||||
|
static debounce<T extends (...args: unknown[]) => unknown>( |
||||
|
func: T, |
||||
|
wait: number, |
||||
|
): (...args: Parameters<T>) => void { |
||||
|
let timeout: NodeJS.Timeout | null = null; |
||||
|
|
||||
|
return (...args: Parameters<T>) => { |
||||
|
if (timeout) { |
||||
|
clearTimeout(timeout); |
||||
|
} |
||||
|
|
||||
|
timeout = setTimeout(() => { |
||||
|
func(...args); |
||||
|
}, wait); |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Throttle function calls to limit execution frequency |
||||
|
* |
||||
|
* @param func - Function to throttle |
||||
|
* @param limit - Time limit in milliseconds |
||||
|
* @returns Throttled function |
||||
|
*/ |
||||
|
static throttle<T extends (...args: unknown[]) => unknown>( |
||||
|
func: T, |
||||
|
limit: number, |
||||
|
): (...args: Parameters<T>) => void { |
||||
|
let inThrottle = false; |
||||
|
|
||||
|
return (...args: Parameters<T>) => { |
||||
|
if (!inThrottle) { |
||||
|
func(...args); |
||||
|
inThrottle = true; |
||||
|
setTimeout(() => { |
||||
|
inThrottle = false; |
||||
|
}, limit); |
||||
|
} |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Memoize function results to avoid redundant computation |
||||
|
* |
||||
|
* @param func - Function to memoize |
||||
|
* @param keyGenerator - Function to generate cache keys |
||||
|
* @returns Memoized function |
||||
|
*/ |
||||
|
static memoize<T extends (...args: unknown[]) => unknown, K>( |
||||
|
func: T, |
||||
|
keyGenerator: (...args: Parameters<T>) => K, |
||||
|
): T { |
||||
|
const cache = new Map<K, unknown>(); |
||||
|
|
||||
|
return ((...args: Parameters<T>) => { |
||||
|
const key = keyGenerator(...args); |
||||
|
|
||||
|
if (cache.has(key)) { |
||||
|
return cache.get(key); |
||||
|
} |
||||
|
|
||||
|
const result = func(...args); |
||||
|
cache.set(key, result); |
||||
|
return result; |
||||
|
}) as T; |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Performance monitoring utility |
||||
|
* |
||||
|
* Tracks and reports performance metrics for optimization analysis. |
||||
|
*/ |
||||
|
export class PerformanceMonitor { |
||||
|
private static instance: PerformanceMonitor; |
||||
|
private metrics = new Map< |
||||
|
string, |
||||
|
Array<{ timestamp: number; duration: number }> |
||||
|
>(); |
||||
|
|
||||
|
private constructor() {} |
||||
|
|
||||
|
/** |
||||
|
* Get singleton instance |
||||
|
*/ |
||||
|
static getInstance(): PerformanceMonitor { |
||||
|
if (!PerformanceMonitor.instance) { |
||||
|
PerformanceMonitor.instance = new PerformanceMonitor(); |
||||
|
} |
||||
|
return PerformanceMonitor.instance; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Start timing an operation |
||||
|
* |
||||
|
* @param operationName - Name of the operation |
||||
|
* @returns Function to call when operation completes |
||||
|
*/ |
||||
|
startTiming(operationName: string): () => void { |
||||
|
const startTime = performance.now(); |
||||
|
|
||||
|
return () => { |
||||
|
const duration = performance.now() - startTime; |
||||
|
this.recordMetric(operationName, duration); |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Record a performance metric |
||||
|
* |
||||
|
* @param operationName - Name of the operation |
||||
|
* @param duration - Duration in milliseconds |
||||
|
*/ |
||||
|
private recordMetric(operationName: string, duration: number): void { |
||||
|
if (!this.metrics.has(operationName)) { |
||||
|
this.metrics.set(operationName, []); |
||||
|
} |
||||
|
|
||||
|
const operationMetrics = this.metrics.get(operationName)!; |
||||
|
operationMetrics.push({ |
||||
|
timestamp: Date.now(), |
||||
|
duration, |
||||
|
}); |
||||
|
|
||||
|
// Keep only last 100 metrics per operation
|
||||
|
if (operationMetrics.length > 100) { |
||||
|
operationMetrics.splice(0, operationMetrics.length - 100); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Get performance summary for an operation |
||||
|
* |
||||
|
* @param operationName - Name of the operation |
||||
|
* @returns Performance statistics |
||||
|
*/ |
||||
|
getPerformanceSummary(operationName: string): { |
||||
|
count: number; |
||||
|
average: number; |
||||
|
min: number; |
||||
|
max: number; |
||||
|
recentAverage: number; |
||||
|
} | null { |
||||
|
const metrics = this.metrics.get(operationName); |
||||
|
if (!metrics || metrics.length === 0) { |
||||
|
return null; |
||||
|
} |
||||
|
|
||||
|
const durations = metrics.map((m) => m.duration); |
||||
|
const recentMetrics = metrics.slice(-10); // Last 10 metrics
|
||||
|
|
||||
|
return { |
||||
|
count: metrics.length, |
||||
|
average: durations.reduce((a, b) => a + b, 0) / durations.length, |
||||
|
min: Math.min(...durations), |
||||
|
max: Math.max(...durations), |
||||
|
recentAverage: |
||||
|
recentMetrics.reduce((a, b) => a + b.duration, 0) / |
||||
|
recentMetrics.length, |
||||
|
}; |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Get all performance metrics |
||||
|
*/ |
||||
|
getAllMetrics(): Map<string, Array<{ timestamp: number; duration: number }>> { |
||||
|
return new Map(this.metrics); |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Clear all performance metrics |
||||
|
*/ |
||||
|
clearMetrics(): void { |
||||
|
this.metrics.clear(); |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
/** |
||||
|
* Convenience function to get the performance monitor |
||||
|
*/ |
||||
|
export const getPerformanceMonitor = (): PerformanceMonitor => { |
||||
|
return PerformanceMonitor.getInstance(); |
||||
|
}; |
Loading…
Reference in new issue