You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

668 lines
23 KiB

/**
* DailyNotificationErrorHandler.java
*
* Android Error Handler for comprehensive error management
* Implements error categorization, retry logic, and telemetry
*
* @author Matthew Raymer
* @version 1.0.0
*/
package com.timesafari.dailynotification;
import android.util.Log;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Manages comprehensive error handling with categorization, retry logic, and telemetry
*
* This class implements the critical error handling functionality:
* - Categorizes errors by type, code, and severity
* - Implements exponential backoff retry logic
* - Tracks error metrics and telemetry
* - Provides debugging information
* - Manages retry state and limits
*/
public class DailyNotificationErrorHandler {
// MARK: - Constants
private static final String TAG = "DailyNotificationErrorHandler";
// Retry configuration
private static final int DEFAULT_MAX_RETRIES = 3;
private static final long DEFAULT_BASE_DELAY_MS = 1000; // 1 second
private static final long DEFAULT_MAX_DELAY_MS = 30000; // 30 seconds
private static final double DEFAULT_BACKOFF_MULTIPLIER = 2.0;
// Error severity levels
public enum ErrorSeverity {
LOW, // Minor issues, non-critical
MEDIUM, // Moderate issues, may affect functionality
HIGH, // Serious issues, significant impact
CRITICAL // Critical issues, system failure
}
// Error categories
public enum ErrorCategory {
NETWORK, // Network-related errors
STORAGE, // Storage/database errors
SCHEDULING, // Notification scheduling errors
PERMISSION, // Permission-related errors
CONFIGURATION, // Configuration errors
SYSTEM, // System-level errors
UNKNOWN // Unknown/unclassified errors
}
// MARK: - Properties
private final ConcurrentHashMap<String, RetryState> retryStates;
private final ErrorMetrics metrics;
private final ErrorConfiguration config;
// MARK: - Initialization
/**
* Constructor with default configuration
*/
public DailyNotificationErrorHandler() {
this(new ErrorConfiguration());
}
/**
* Constructor with custom configuration
*
* @param config Error handling configuration
*/
public DailyNotificationErrorHandler(ErrorConfiguration config) {
this.retryStates = new ConcurrentHashMap<>();
this.metrics = new ErrorMetrics();
this.config = config;
Log.d(TAG, "ErrorHandler initialized with max retries: " + config.maxRetries);
}
// MARK: - Error Handling
/**
* Handle error with automatic retry logic
*
* @param operationId Unique identifier for the operation
* @param error Error to handle
* @param retryable Whether this error is retryable
* @return ErrorResult with handling information
*/
public ErrorResult handleError(String operationId, Throwable error, boolean retryable) {
try {
Log.d(TAG, "Handling error for operation: " + operationId);
// Categorize error
ErrorInfo errorInfo = categorizeError(error);
// Update metrics
metrics.recordError(errorInfo);
// Check if retryable and within limits
if (retryable && shouldRetry(operationId, errorInfo)) {
return handleRetryableError(operationId, errorInfo);
} else {
return handleNonRetryableError(operationId, errorInfo);
}
} catch (Exception e) {
Log.e(TAG, "Error in error handler", e);
return ErrorResult.fatal("Error handler failure: " + e.getMessage());
}
}
/**
* Handle error with custom retry configuration
*
* @param operationId Unique identifier for the operation
* @param error Error to handle
* @param retryConfig Custom retry configuration
* @return ErrorResult with handling information
*/
public ErrorResult handleError(String operationId, Throwable error, RetryConfiguration retryConfig) {
try {
Log.d(TAG, "Handling error with custom retry config for operation: " + operationId);
// Categorize error
ErrorInfo errorInfo = categorizeError(error);
// Update metrics
metrics.recordError(errorInfo);
// Check if retryable with custom config
if (shouldRetry(operationId, errorInfo, retryConfig)) {
return handleRetryableError(operationId, errorInfo, retryConfig);
} else {
return handleNonRetryableError(operationId, errorInfo);
}
} catch (Exception e) {
Log.e(TAG, "Error in error handler with custom config", e);
return ErrorResult.fatal("Error handler failure: " + e.getMessage());
}
}
// MARK: - Error Categorization
/**
* Categorize error by type, code, and severity
*
* @param error Error to categorize
* @return ErrorInfo with categorization
*/
private ErrorInfo categorizeError(Throwable error) {
try {
ErrorCategory category = determineCategory(error);
String errorCode = determineErrorCode(error);
ErrorSeverity severity = determineSeverity(error, category);
ErrorInfo errorInfo = new ErrorInfo(
error,
category,
errorCode,
severity,
System.currentTimeMillis()
);
Log.d(TAG, "Error categorized: " + errorInfo);
return errorInfo;
} catch (Exception e) {
Log.e(TAG, "Error during categorization", e);
return new ErrorInfo(error, ErrorCategory.UNKNOWN, "CATEGORIZATION_FAILED", ErrorSeverity.HIGH, System.currentTimeMillis());
}
}
/**
* Determine error category based on error type
*
* @param error Error to analyze
* @return ErrorCategory
*/
private ErrorCategory determineCategory(Throwable error) {
String errorMessage = error.getMessage();
String errorType = error.getClass().getSimpleName();
// Network errors
if (errorType.contains("IOException") || errorType.contains("Socket") ||
errorType.contains("Connect") || errorType.contains("Timeout")) {
return ErrorCategory.NETWORK;
}
// Storage errors
if (errorType.contains("SQLite") || errorType.contains("Database") ||
errorType.contains("Storage") || errorType.contains("File")) {
return ErrorCategory.STORAGE;
}
// Permission errors
if (errorType.contains("Security") || errorType.contains("Permission") ||
errorMessage != null && errorMessage.contains("permission")) {
return ErrorCategory.PERMISSION;
}
// Configuration errors
if (errorType.contains("IllegalArgument") || errorType.contains("Configuration") ||
errorMessage != null && errorMessage.contains("config")) {
return ErrorCategory.CONFIGURATION;
}
// System errors
if (errorType.contains("OutOfMemory") || errorType.contains("StackOverflow") ||
errorType.contains("Runtime")) {
return ErrorCategory.SYSTEM;
}
return ErrorCategory.UNKNOWN;
}
/**
* Determine error code based on error details
*
* @param error Error to analyze
* @return Error code string
*/
private String determineErrorCode(Throwable error) {
String errorType = error.getClass().getSimpleName();
String errorMessage = error.getMessage();
// Generate error code based on type and message
if (errorMessage != null && errorMessage.length() > 0) {
return errorType + "_" + errorMessage.hashCode();
} else {
return errorType + "_" + System.currentTimeMillis();
}
}
/**
* Determine error severity based on error and category
*
* @param error Error to analyze
* @param category Error category
* @return ErrorSeverity
*/
private ErrorSeverity determineSeverity(Throwable error, ErrorCategory category) {
// Critical errors
if (error instanceof OutOfMemoryError || error instanceof StackOverflowError) {
return ErrorSeverity.CRITICAL;
}
// High severity errors
if (category == ErrorCategory.SYSTEM || category == ErrorCategory.STORAGE) {
return ErrorSeverity.HIGH;
}
// Medium severity errors
if (category == ErrorCategory.NETWORK || category == ErrorCategory.PERMISSION) {
return ErrorSeverity.MEDIUM;
}
// Low severity errors
return ErrorSeverity.LOW;
}
// MARK: - Retry Logic
/**
* Check if error should be retried
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @return true if should retry
*/
private boolean shouldRetry(String operationId, ErrorInfo errorInfo) {
return shouldRetry(operationId, errorInfo, null);
}
/**
* Check if error should be retried with custom config
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @param retryConfig Custom retry configuration
* @return true if should retry
*/
private boolean shouldRetry(String operationId, ErrorInfo errorInfo, RetryConfiguration retryConfig) {
try {
// Get retry state
RetryState state = retryStates.get(operationId);
if (state == null) {
state = new RetryState();
retryStates.put(operationId, state);
}
// Check retry limits
int maxRetries = retryConfig != null ? retryConfig.maxRetries : config.maxRetries;
if (state.attemptCount >= maxRetries) {
Log.d(TAG, "Max retries exceeded for operation: " + operationId);
return false;
}
// Check if error is retryable based on category
boolean isRetryable = isErrorRetryable(errorInfo.category);
Log.d(TAG, "Should retry: " + isRetryable + " (attempt: " + state.attemptCount + "/" + maxRetries + ")");
return isRetryable;
} catch (Exception e) {
Log.e(TAG, "Error checking retry eligibility", e);
return false;
}
}
/**
* Check if error category is retryable
*
* @param category Error category
* @return true if retryable
*/
private boolean isErrorRetryable(ErrorCategory category) {
switch (category) {
case NETWORK:
case STORAGE:
return true;
case PERMISSION:
case CONFIGURATION:
case SYSTEM:
case UNKNOWN:
default:
return false;
}
}
/**
* Handle retryable error
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @return ErrorResult with retry information
*/
private ErrorResult handleRetryableError(String operationId, ErrorInfo errorInfo) {
return handleRetryableError(operationId, errorInfo, null);
}
/**
* Handle retryable error with custom config
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @param retryConfig Custom retry configuration
* @return ErrorResult with retry information
*/
private ErrorResult handleRetryableError(String operationId, ErrorInfo errorInfo, RetryConfiguration retryConfig) {
try {
RetryState state = retryStates.get(operationId);
state.attemptCount++;
// Calculate delay with exponential backoff
long delay = calculateRetryDelay(state.attemptCount, retryConfig);
state.nextRetryTime = System.currentTimeMillis() + delay;
Log.i(TAG, "Retryable error handled - retry in " + delay + "ms (attempt " + state.attemptCount + ")");
return ErrorResult.retryable(errorInfo, delay, state.attemptCount);
} catch (Exception e) {
Log.e(TAG, "Error handling retryable error", e);
return ErrorResult.fatal("Retry handling failure: " + e.getMessage());
}
}
/**
* Handle non-retryable error
*
* @param operationId Operation identifier
* @param errorInfo Error information
* @return ErrorResult with failure information
*/
private ErrorResult handleNonRetryableError(String operationId, ErrorInfo errorInfo) {
try {
Log.w(TAG, "Non-retryable error handled for operation: " + operationId);
// Clean up retry state
retryStates.remove(operationId);
return ErrorResult.fatal(errorInfo);
} catch (Exception e) {
Log.e(TAG, "Error handling non-retryable error", e);
return ErrorResult.fatal("Non-retryable error handling failure: " + e.getMessage());
}
}
/**
* Calculate retry delay with exponential backoff
*
* @param attemptCount Current attempt number
* @param retryConfig Custom retry configuration
* @return Delay in milliseconds
*/
private long calculateRetryDelay(int attemptCount, RetryConfiguration retryConfig) {
try {
long baseDelay = retryConfig != null ? retryConfig.baseDelayMs : config.baseDelayMs;
double multiplier = retryConfig != null ? retryConfig.backoffMultiplier : config.backoffMultiplier;
long maxDelay = retryConfig != null ? retryConfig.maxDelayMs : config.maxDelayMs;
// Calculate exponential backoff: baseDelay * (multiplier ^ (attemptCount - 1))
long delay = (long) (baseDelay * Math.pow(multiplier, attemptCount - 1));
// Cap at maximum delay
delay = Math.min(delay, maxDelay);
// Add jitter to prevent thundering herd
long jitter = (long) (delay * 0.1 * Math.random());
delay += jitter;
Log.d(TAG, "Calculated retry delay: " + delay + "ms (attempt " + attemptCount + ")");
return delay;
} catch (Exception e) {
Log.e(TAG, "Error calculating retry delay", e);
return config.baseDelayMs;
}
}
// MARK: - Metrics and Telemetry
/**
* Get error metrics
*
* @return ErrorMetrics with current statistics
*/
public ErrorMetrics getMetrics() {
return metrics;
}
/**
* Reset error metrics
*/
public void resetMetrics() {
metrics.reset();
Log.d(TAG, "Error metrics reset");
}
/**
* Get retry statistics
*
* @return RetryStatistics with retry information
*/
public RetryStatistics getRetryStatistics() {
int totalOperations = retryStates.size();
int activeRetries = 0;
int totalRetries = 0;
for (RetryState state : retryStates.values()) {
if (state.attemptCount > 0) {
activeRetries++;
totalRetries += state.attemptCount;
}
}
return new RetryStatistics(totalOperations, activeRetries, totalRetries);
}
/**
* Clear retry states
*/
public void clearRetryStates() {
retryStates.clear();
Log.d(TAG, "Retry states cleared");
}
// MARK: - Data Classes
/**
* Error information
*/
public static class ErrorInfo {
public final Throwable error;
public final ErrorCategory category;
public final String errorCode;
public final ErrorSeverity severity;
public final long timestamp;
public ErrorInfo(Throwable error, ErrorCategory category, String errorCode, ErrorSeverity severity, long timestamp) {
this.error = error;
this.category = category;
this.errorCode = errorCode;
this.severity = severity;
this.timestamp = timestamp;
}
@Override
public String toString() {
return String.format("ErrorInfo{category=%s, code=%s, severity=%s, error=%s}",
category, errorCode, severity, error.getClass().getSimpleName());
}
}
/**
* Retry state for an operation
*/
private static class RetryState {
public int attemptCount = 0;
public long nextRetryTime = 0;
}
/**
* Error result
*/
public static class ErrorResult {
public final boolean success;
public final boolean retryable;
public final ErrorInfo errorInfo;
public final long retryDelayMs;
public final int attemptCount;
public final String message;
private ErrorResult(boolean success, boolean retryable, ErrorInfo errorInfo, long retryDelayMs, int attemptCount, String message) {
this.success = success;
this.retryable = retryable;
this.errorInfo = errorInfo;
this.retryDelayMs = retryDelayMs;
this.attemptCount = attemptCount;
this.message = message;
}
public static ErrorResult retryable(ErrorInfo errorInfo, long retryDelayMs, int attemptCount) {
return new ErrorResult(false, true, errorInfo, retryDelayMs, attemptCount, "Retryable error");
}
public static ErrorResult fatal(ErrorInfo errorInfo) {
return new ErrorResult(false, false, errorInfo, 0, 0, "Fatal error");
}
public static ErrorResult fatal(String message) {
return new ErrorResult(false, false, null, 0, 0, message);
}
}
/**
* Error configuration
*/
public static class ErrorConfiguration {
public final int maxRetries;
public final long baseDelayMs;
public final long maxDelayMs;
public final double backoffMultiplier;
public ErrorConfiguration() {
this(DEFAULT_MAX_RETRIES, DEFAULT_BASE_DELAY_MS, DEFAULT_MAX_DELAY_MS, DEFAULT_BACKOFF_MULTIPLIER);
}
public ErrorConfiguration(int maxRetries, long baseDelayMs, long maxDelayMs, double backoffMultiplier) {
this.maxRetries = maxRetries;
this.baseDelayMs = baseDelayMs;
this.maxDelayMs = maxDelayMs;
this.backoffMultiplier = backoffMultiplier;
}
}
/**
* Retry configuration
*/
public static class RetryConfiguration {
public final int maxRetries;
public final long baseDelayMs;
public final long maxDelayMs;
public final double backoffMultiplier;
public RetryConfiguration(int maxRetries, long baseDelayMs, long maxDelayMs, double backoffMultiplier) {
this.maxRetries = maxRetries;
this.baseDelayMs = baseDelayMs;
this.maxDelayMs = maxDelayMs;
this.backoffMultiplier = backoffMultiplier;
}
}
/**
* Error metrics
*/
public static class ErrorMetrics {
private final AtomicInteger totalErrors = new AtomicInteger(0);
private final AtomicInteger networkErrors = new AtomicInteger(0);
private final AtomicInteger storageErrors = new AtomicInteger(0);
private final AtomicInteger schedulingErrors = new AtomicInteger(0);
private final AtomicInteger permissionErrors = new AtomicInteger(0);
private final AtomicInteger configurationErrors = new AtomicInteger(0);
private final AtomicInteger systemErrors = new AtomicInteger(0);
private final AtomicInteger unknownErrors = new AtomicInteger(0);
public void recordError(ErrorInfo errorInfo) {
totalErrors.incrementAndGet();
switch (errorInfo.category) {
case NETWORK:
networkErrors.incrementAndGet();
break;
case STORAGE:
storageErrors.incrementAndGet();
break;
case SCHEDULING:
schedulingErrors.incrementAndGet();
break;
case PERMISSION:
permissionErrors.incrementAndGet();
break;
case CONFIGURATION:
configurationErrors.incrementAndGet();
break;
case SYSTEM:
systemErrors.incrementAndGet();
break;
case UNKNOWN:
default:
unknownErrors.incrementAndGet();
break;
}
}
public void reset() {
totalErrors.set(0);
networkErrors.set(0);
storageErrors.set(0);
schedulingErrors.set(0);
permissionErrors.set(0);
configurationErrors.set(0);
systemErrors.set(0);
unknownErrors.set(0);
}
public int getTotalErrors() { return totalErrors.get(); }
public int getNetworkErrors() { return networkErrors.get(); }
public int getStorageErrors() { return storageErrors.get(); }
public int getSchedulingErrors() { return schedulingErrors.get(); }
public int getPermissionErrors() { return permissionErrors.get(); }
public int getConfigurationErrors() { return configurationErrors.get(); }
public int getSystemErrors() { return systemErrors.get(); }
public int getUnknownErrors() { return unknownErrors.get(); }
}
/**
* Retry statistics
*/
public static class RetryStatistics {
public final int totalOperations;
public final int activeRetries;
public final int totalRetries;
public RetryStatistics(int totalOperations, int activeRetries, int totalRetries) {
this.totalOperations = totalOperations;
this.activeRetries = activeRetries;
this.totalRetries = totalRetries;
}
@Override
public String toString() {
return String.format("RetryStatistics{totalOps=%d, activeRetries=%d, totalRetries=%d}",
totalOperations, activeRetries, totalRetries);
}
}
}