Files
daily-notification-plugin/ios/Plugin/DailyNotificationErrorHandler.swift
Matthew Raymer 8ded555a21 fix(ios): resolve compilation errors and enable successful build
Fixed critical compilation errors preventing iOS plugin build:
- Updated logger API calls from logger.debug(TAG, msg) to logger.log(.debug, msg)
  across all iOS plugin files to match DailyNotificationLogger interface
- Fixed async/await concurrency in makeConditionalRequest using semaphore pattern
- Fixed NotificationContent immutability by creating new instances instead of mutation
- Changed private access control to internal for extension-accessible methods
- Added iOS 15.0+ availability checks for interruptionLevel property
- Fixed static member references using Self.MEMBER_NAME syntax
- Added missing .scheduling case to exhaustive switch statement
- Fixed variable initialization in retry state closures

Added DailyNotificationStorage.swift implementation matching Android pattern.

Updated build scripts with improved error reporting and full log visibility.

iOS plugin now compiles successfully. All build errors resolved.
2025-11-04 22:22:02 -08:00

654 lines
22 KiB
Swift

/**
* DailyNotificationErrorHandler.swift
*
* iOS Error Handler for comprehensive error management
* Implements error categorization, retry logic, and telemetry
*
* @author Matthew Raymer
* @version 1.0.0
*/
import Foundation
/**
* Manages comprehensive error handling with categorization, retry logic, and telemetry
*
* This class implements the critical error handling functionality:
* - Categorizes errors by type, code, and severity
* - Implements exponential backoff retry logic
* - Tracks error metrics and telemetry
* - Provides debugging information
* - Manages retry state and limits
*/
class DailyNotificationErrorHandler {
// MARK: - Constants
private static let TAG = "DailyNotificationErrorHandler"
// Retry configuration
private static let DEFAULT_MAX_RETRIES = 3
private static let DEFAULT_BASE_DELAY_SECONDS: TimeInterval = 1.0
private static let DEFAULT_MAX_DELAY_SECONDS: TimeInterval = 30.0
private static let DEFAULT_BACKOFF_MULTIPLIER: Double = 2.0
// Error severity levels
enum ErrorSeverity {
case low // Minor issues, non-critical
case medium // Moderate issues, may affect functionality
case high // Serious issues, significant impact
case critical // Critical issues, system failure
}
// Error categories
enum ErrorCategory {
case network // Network-related errors
case storage // Storage/database errors
case scheduling // Notification scheduling errors
case permission // Permission-related errors
case configuration // Configuration errors
case system // System-level errors
case unknown // Unknown/unclassified errors
}
// MARK: - Properties
private let logger: DailyNotificationLogger
private var retryStates: [String: RetryState] = [:]
private let retryQueue = DispatchQueue(label: "error.retry", attributes: .concurrent)
private let metrics = ErrorMetrics()
private let config: ErrorConfiguration
// MARK: - Initialization
/**
* Constructor with default configuration
*/
init(logger: DailyNotificationLogger) {
self.logger = logger
self.config = ErrorConfiguration()
logger.log(.debug, "ErrorHandler initialized with max retries: \(config.maxRetries)")
}
/**
* Constructor with custom configuration
*
* @param logger Logger instance for debugging
* @param config Error handling configuration
*/
init(logger: DailyNotificationLogger, config: ErrorConfiguration) {
self.logger = logger
self.config = config
logger.log(.debug, "ErrorHandler initialized with max retries: \(config.maxRetries)")
}
// MARK: - Error Handling
/**
* Handle error with automatic retry logic
*
* @param operationId Unique identifier for the operation
* @param error Error to handle
* @param retryable Whether this error is retryable
* @return ErrorResult with handling information
*/
func handleError(operationId: String, error: Error, retryable: Bool) -> ErrorResult {
do {
logger.log(.debug, "Handling error for operation: \(operationId)")
// Categorize error
let errorInfo = categorizeError(error)
// Update metrics
metrics.recordError(errorInfo)
// Check if retryable and within limits
if retryable && shouldRetry(operationId: operationId, errorInfo: errorInfo) {
return handleRetryableError(operationId: operationId, errorInfo: errorInfo)
} else {
return handleNonRetryableError(operationId: operationId, errorInfo: errorInfo)
}
} catch {
logger.log(.error, "Error in error handler: \(error)")
return ErrorResult.fatal(message: "Error handler failure: \(error.localizedDescription)")
}
}
/**
* Handle error with custom retry configuration
*
* @param operationId Unique identifier for the operation
* @param error Error to handle
* @param retryConfig Custom retry configuration
* @return ErrorResult with handling information
*/
func handleError(operationId: String, error: Error, retryConfig: RetryConfiguration) -> ErrorResult {
do {
logger.log(.debug, "Handling error with custom retry config for operation: \(operationId)")
// Categorize error
let errorInfo = categorizeError(error)
// Update metrics
metrics.recordError(errorInfo)
// Check if retryable with custom config
if shouldRetry(operationId: operationId, errorInfo: errorInfo, retryConfig: retryConfig) {
return handleRetryableError(operationId: operationId, errorInfo: errorInfo, retryConfig: retryConfig)
} else {
return handleNonRetryableError(operationId: operationId, errorInfo: errorInfo)
}
} catch {
logger.log(.error, "Error in error handler with custom config: \(error)")
return ErrorResult.fatal(message: "Error handler failure: \(error.localizedDescription)")
}
}
// MARK: - Error Categorization
/**
* Categorize error by type, code, and severity
*
* @param error Error to categorize
* @return ErrorInfo with categorization
*/
private func categorizeError(_ error: Error) -> ErrorInfo {
do {
let category = determineCategory(error)
let errorCode = determineErrorCode(error)
let severity = determineSeverity(error, category: category)
let errorInfo = ErrorInfo(
error: error,
category: category,
errorCode: errorCode,
severity: severity,
timestamp: Date()
)
logger.log(.debug, "Error categorized: \(errorInfo)")
return errorInfo
} catch {
logger.log(.error, "Error during categorization: \(error)")
return ErrorInfo(
error: error,
category: .unknown,
errorCode: "CATEGORIZATION_FAILED",
severity: .high,
timestamp: Date()
)
}
}
/**
* Determine error category based on error type
*
* @param error Error to analyze
* @return ErrorCategory
*/
private func determineCategory(_ error: Error) -> ErrorCategory {
let errorType = String(describing: type(of: error))
let errorMessage = error.localizedDescription
// Network errors
if errorType.contains("URLError") || errorType.contains("Network") ||
errorType.contains("Connection") || errorType.contains("Timeout") {
return .network
}
// Storage errors
if errorType.contains("SQLite") || errorType.contains("Database") ||
errorType.contains("Storage") || errorType.contains("File") {
return .storage
}
// Permission errors
if errorType.contains("Security") || errorType.contains("Permission") ||
errorMessage.contains("permission") {
return .permission
}
// Configuration errors
if errorType.contains("IllegalArgument") || errorType.contains("Configuration") ||
errorMessage.contains("config") {
return .configuration
}
// System errors
if errorType.contains("OutOfMemory") || errorType.contains("StackOverflow") ||
errorType.contains("Runtime") {
return .system
}
return .unknown
}
/**
* Determine error code based on error details
*
* @param error Error to analyze
* @return Error code string
*/
private func determineErrorCode(_ error: Error) -> String {
let errorType = String(describing: type(of: error))
let errorMessage = error.localizedDescription
// Generate error code based on type and message
if !errorMessage.isEmpty {
return "\(errorType)_\(errorMessage.hashValue)"
} else {
return "\(errorType)_\(Date().timeIntervalSince1970)"
}
}
/**
* Determine error severity based on error and category
*
* @param error Error to analyze
* @param category Error category
* @return ErrorSeverity
*/
private func determineSeverity(_ error: Error, category: ErrorCategory) -> ErrorSeverity {
let errorType = String(describing: type(of: error))
// Critical errors
if errorType.contains("OutOfMemory") || errorType.contains("StackOverflow") {
return .critical
}
// High severity errors
if category == .system || category == .storage {
return .high
}
// Medium severity errors
if category == .network || category == .permission {
return .medium
}
// Low severity errors
return .low
}
// MARK: - Retry Logic
/**
* Check if error should be retried
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @return true if should retry
*/
private func shouldRetry(operationId: String, errorInfo: ErrorInfo) -> Bool {
return shouldRetry(operationId: operationId, errorInfo: errorInfo, retryConfig: nil)
}
/**
* Check if error should be retried with custom config
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @param retryConfig Custom retry configuration
* @return true if should retry
*/
private func shouldRetry(operationId: String, errorInfo: ErrorInfo, retryConfig: RetryConfiguration?) -> Bool {
do {
// Get retry state
var state: RetryState!
retryQueue.sync {
if retryStates[operationId] == nil {
retryStates[operationId] = RetryState()
}
state = retryStates[operationId]!
}
// Check retry limits
let maxRetries = retryConfig?.maxRetries ?? config.maxRetries
if state.attemptCount >= maxRetries {
logger.log(.debug, "Max retries exceeded for operation: \(operationId)")
return false
}
// Check if error is retryable based on category
let isRetryable = isErrorRetryable(errorInfo.category)
logger.log(.debug, "Should retry: \(isRetryable) (attempt: \(state.attemptCount)/\(maxRetries))")
return isRetryable
} catch {
logger.log(.error, "Error checking retry eligibility: \(error)")
return false
}
}
/**
* Check if error category is retryable
*
* @param category Error category
* @return true if retryable
*/
private func isErrorRetryable(_ category: ErrorCategory) -> Bool {
switch category {
case .network, .storage:
return true
case .permission, .configuration, .system, .unknown, .scheduling:
return false
}
}
/**
* Handle retryable error
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @return ErrorResult with retry information
*/
private func handleRetryableError(operationId: String, errorInfo: ErrorInfo) -> ErrorResult {
return handleRetryableError(operationId: operationId, errorInfo: errorInfo, retryConfig: nil)
}
/**
* Handle retryable error with custom config
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @param retryConfig Custom retry configuration
* @return ErrorResult with retry information
*/
private func handleRetryableError(operationId: String, errorInfo: ErrorInfo, retryConfig: RetryConfiguration?) -> ErrorResult {
do {
var state: RetryState!
retryQueue.sync {
if retryStates[operationId] == nil {
retryStates[operationId] = RetryState()
}
state = retryStates[operationId]!
state.attemptCount += 1
}
// Calculate delay with exponential backoff
let delay = calculateRetryDelay(attemptCount: state.attemptCount, retryConfig: retryConfig)
state.nextRetryTime = Date().addingTimeInterval(delay)
logger.log(.info, "Retryable error handled - retry in \(delay)s (attempt \(state.attemptCount))")
return ErrorResult.retryable(errorInfo: errorInfo, retryDelaySeconds: delay, attemptCount: state.attemptCount)
} catch {
logger.log(.error, "Error handling retryable error: \(error)")
return ErrorResult.fatal(message: "Retry handling failure: \(error.localizedDescription)")
}
}
/**
* Handle non-retryable error
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @return ErrorResult with failure information
*/
private func handleNonRetryableError(operationId: String, errorInfo: ErrorInfo) -> ErrorResult {
do {
logger.log(.warning, "Non-retryable error handled for operation: \(operationId)")
// Clean up retry state
retryQueue.async(flags: .barrier) {
self.retryStates.removeValue(forKey: operationId)
}
return ErrorResult.fatal(errorInfo: errorInfo)
} catch {
logger.log(.error, "Error handling non-retryable error: \(error)")
return ErrorResult.fatal(message: "Non-retryable error handling failure: \(error.localizedDescription)")
}
}
/**
* Calculate retry delay with exponential backoff
*
* @param attemptCount Current attempt number
* @param retryConfig Custom retry configuration
* @return Delay in seconds
*/
private func calculateRetryDelay(attemptCount: Int, retryConfig: RetryConfiguration?) -> TimeInterval {
do {
let baseDelay = retryConfig?.baseDelaySeconds ?? config.baseDelaySeconds
let multiplier = retryConfig?.backoffMultiplier ?? config.backoffMultiplier
let maxDelay = retryConfig?.maxDelaySeconds ?? config.maxDelaySeconds
// Calculate exponential backoff: baseDelay * (multiplier ^ (attemptCount - 1))
var delay = baseDelay * pow(multiplier, Double(attemptCount - 1))
// Cap at maximum delay
delay = min(delay, maxDelay)
// Add jitter to prevent thundering herd
let jitter = delay * 0.1 * Double.random(in: 0...1)
delay += jitter
logger.log(.debug, "Calculated retry delay: \(delay)s (attempt \(attemptCount))")
return delay
} catch {
logger.log(.error, "Error calculating retry delay: \(error)")
return config.baseDelaySeconds
}
}
// MARK: - Metrics and Telemetry
/**
* Get error metrics
*
* @return ErrorMetrics with current statistics
*/
func getMetrics() -> ErrorMetrics {
return metrics
}
/**
* Reset error metrics
*/
func resetMetrics() {
metrics.reset()
logger.log(.debug, "Error metrics reset")
}
/**
* Get retry statistics
*
* @return RetryStatistics with retry information
*/
func getRetryStatistics() -> RetryStatistics {
var totalOperations = 0
var activeRetries = 0
var totalRetries = 0
retryQueue.sync {
totalOperations = retryStates.count
for state in retryStates.values {
if state.attemptCount > 0 {
activeRetries += 1
totalRetries += state.attemptCount
}
}
}
return RetryStatistics(totalOperations: totalOperations, activeRetries: activeRetries, totalRetries: totalRetries)
}
/**
* Clear retry states
*/
func clearRetryStates() {
retryQueue.async(flags: .barrier) {
self.retryStates.removeAll()
}
logger.log(.debug, "Retry states cleared")
}
// MARK: - Data Classes
/**
* Error information
*/
struct ErrorInfo {
let error: Error
let category: ErrorCategory
let errorCode: String
let severity: ErrorSeverity
let timestamp: Date
var description: String {
return "ErrorInfo{category=\(category), code=\(errorCode), severity=\(severity), error=\(String(describing: type(of: error)))}"
}
}
/**
* Retry state for an operation
*/
private class RetryState {
var attemptCount = 0
var nextRetryTime = Date()
}
/**
* Error result
*/
struct ErrorResult {
let success: Bool
let retryable: Bool
let errorInfo: ErrorInfo?
let retryDelaySeconds: TimeInterval
let attemptCount: Int
let message: String
static func retryable(errorInfo: ErrorInfo, retryDelaySeconds: TimeInterval, attemptCount: Int) -> ErrorResult {
return ErrorResult(success: false, retryable: true, errorInfo: errorInfo, retryDelaySeconds: retryDelaySeconds, attemptCount: attemptCount, message: "Retryable error")
}
static func fatal(errorInfo: ErrorInfo) -> ErrorResult {
return ErrorResult(success: false, retryable: false, errorInfo: errorInfo, retryDelaySeconds: 0, attemptCount: 0, message: "Fatal error")
}
static func fatal(message: String) -> ErrorResult {
return ErrorResult(success: false, retryable: false, errorInfo: nil, retryDelaySeconds: 0, attemptCount: 0, message: message)
}
}
/**
* Error configuration
*/
struct ErrorConfiguration {
let maxRetries: Int
let baseDelaySeconds: TimeInterval
let maxDelaySeconds: TimeInterval
let backoffMultiplier: Double
init() {
self.maxRetries = DailyNotificationErrorHandler.DEFAULT_MAX_RETRIES
self.baseDelaySeconds = DailyNotificationErrorHandler.DEFAULT_BASE_DELAY_SECONDS
self.maxDelaySeconds = DailyNotificationErrorHandler.DEFAULT_MAX_DELAY_SECONDS
self.backoffMultiplier = DailyNotificationErrorHandler.DEFAULT_BACKOFF_MULTIPLIER
}
init(maxRetries: Int, baseDelaySeconds: TimeInterval, maxDelaySeconds: TimeInterval, backoffMultiplier: Double) {
self.maxRetries = maxRetries
self.baseDelaySeconds = baseDelaySeconds
self.maxDelaySeconds = maxDelaySeconds
self.backoffMultiplier = backoffMultiplier
}
}
/**
* Retry configuration
*/
struct RetryConfiguration {
let maxRetries: Int
let baseDelaySeconds: TimeInterval
let maxDelaySeconds: TimeInterval
let backoffMultiplier: Double
init(maxRetries: Int, baseDelaySeconds: TimeInterval, maxDelaySeconds: TimeInterval, backoffMultiplier: Double) {
self.maxRetries = maxRetries
self.baseDelaySeconds = baseDelaySeconds
self.maxDelaySeconds = maxDelaySeconds
self.backoffMultiplier = backoffMultiplier
}
}
/**
* Error metrics
*/
class ErrorMetrics {
private var totalErrors = 0
private var networkErrors = 0
private var storageErrors = 0
private var schedulingErrors = 0
private var permissionErrors = 0
private var configurationErrors = 0
private var systemErrors = 0
private var unknownErrors = 0
func recordError(_ errorInfo: ErrorInfo) {
totalErrors += 1
switch errorInfo.category {
case .network:
networkErrors += 1
case .storage:
storageErrors += 1
case .scheduling:
schedulingErrors += 1
case .permission:
permissionErrors += 1
case .configuration:
configurationErrors += 1
case .system:
systemErrors += 1
case .unknown:
unknownErrors += 1
}
}
func reset() {
totalErrors = 0
networkErrors = 0
storageErrors = 0
schedulingErrors = 0
permissionErrors = 0
configurationErrors = 0
systemErrors = 0
unknownErrors = 0
}
var totalErrorsCount: Int { return totalErrors }
var networkErrorsCount: Int { return networkErrors }
var storageErrorsCount: Int { return storageErrors }
var schedulingErrorsCount: Int { return schedulingErrors }
var permissionErrorsCount: Int { return permissionErrors }
var configurationErrorsCount: Int { return configurationErrors }
var systemErrorsCount: Int { return systemErrors }
var unknownErrorsCount: Int { return unknownErrors }
}
/**
* Retry statistics
*/
struct RetryStatistics {
let totalOperations: Int
let activeRetries: Int
let totalRetries: Int
var description: String {
return "RetryStatistics{totalOps=\(totalOperations), activeRetries=\(activeRetries), totalRetries=\(totalRetries)}"
}
}
}