Analytics
Advanced analytics and insights for Athena.Cache help you understand usage patterns, optimize performance, and make data-driven decisions about your caching strategy.
Cache Usage Analytics
Analyze how your cache is being used to identify optimization opportunities.
Usage Pattern Analysis
public class CacheUsageAnalyzer : ICacheUsageAnalyzer
{
private readonly ICacheMetricsCollector _metrics;
private readonly ILogger<CacheUsageAnalyzer> _logger;
public async Task<CacheUsageReport> AnalyzeUsageAsync(TimeSpan period)
{
var endTime = DateTimeOffset.UtcNow;
var startTime = endTime - period;
var metrics = await _metrics.GetMetricsHistoryAsync(startTime, endTime);
return new CacheUsageReport
{
Period = period,
StartTime = startTime,
EndTime = endTime,
// Overall statistics
TotalRequests = metrics.Sum(m => m.TotalRequests),
TotalHits = metrics.Sum(m => m.HitCount),
TotalMisses = metrics.Sum(m => m.MissCount),
OverallHitRate = CalculateOverallHitRate(metrics),
// Performance metrics
AverageResponseTime = metrics.Average(m => m.AverageResponseTime),
P95ResponseTime = CalculatePercentile(metrics.Select(m => m.P95ResponseTime), 95),
P99ResponseTime = CalculatePercentile(metrics.Select(m => m.P99ResponseTime), 99),
// Usage patterns
PeakUsageHours = IdentifyPeakUsageHours(metrics),
UsageByController = await AnalyzeUsageByController(startTime, endTime),
UsageByEndpoint = await AnalyzeUsageByEndpoint(startTime, endTime),
// Key analysis
MostAccessedKeys = await GetMostAccessedKeys(startTime, endTime, 50),
LeastAccessedKeys = await GetLeastAccessedKeys(startTime, endTime, 50),
UnusedKeys = await GetUnusedKeys(startTime, endTime),
// Memory analysis
MemoryUsageTrend = AnalyzeMemoryTrend(metrics),
CacheSizeTrend = AnalyzeCacheSizeTrend(metrics),
// Recommendations
Recommendations = await GenerateRecommendations(metrics)
};
}
private async Task<Dictionary<string, ControllerUsageStats>> AnalyzeUsageByController(DateTimeOffset start, DateTimeOffset end)
{
var controllerStats = new Dictionary<string, ControllerUsageStats>();
var keyAccesses = await _metrics.GetKeyAccessHistoryAsync(start, end);
foreach (var access in keyAccesses)
{
var controller = ExtractControllerFromKey(access.Key);
if (!controllerStats.ContainsKey(controller))
{
controllerStats[controller] = new ControllerUsageStats
{
ControllerName = controller,
TotalRequests = 0,
HitCount = 0,
MissCount = 0,
AverageResponseTime = 0,
UniqueKeys = new HashSet<string>()
};
}
var stats = controllerStats[controller];
stats.TotalRequests += access.AccessCount;
stats.HitCount += access.HitCount;
stats.MissCount += access.MissCount;
stats.UniqueKeys.Add(access.Key);
}
// Calculate derived metrics
foreach (var stats in controllerStats.Values)
{
stats.HitRate = stats.TotalRequests > 0 ? (double)stats.HitCount / stats.TotalRequests * 100 : 0;
stats.KeyCount = stats.UniqueKeys.Count;
}
return controllerStats;
}
private async Task<List<CacheRecommendation>> GenerateRecommendations(IEnumerable<CacheMetrics> metrics)
{
var recommendations = new List<CacheRecommendation>();
var latestMetrics = metrics.OrderBy(m => m.Timestamp).Last();
// Hit rate recommendations
if (latestMetrics.HitRate < 70)
{
recommendations.Add(new CacheRecommendation
{
Type = RecommendationType.HitRateImprovement,
Priority = RecommendationPriority.High,
Title = "Low cache hit rate detected",
Description = $"Current hit rate is {latestMetrics.HitRate:F1}%. Consider increasing expiration times or reviewing cache key patterns.",
Impact = EstimateImpact(RecommendationType.HitRateImprovement, latestMetrics),
Actions = new[]
{
"Analyze most frequently missed cache keys",
"Consider increasing expiration times for stable data",
"Review cache key patterns for better reusability",
"Implement pre-warming for critical data"
}
});
}
// Performance recommendations
if (latestMetrics.P95ResponseTime > 100)
{
recommendations.Add(new CacheRecommendation
{
Type = RecommendationType.Performance,
Priority = RecommendationPriority.Medium,
Title = "High response time detected",
Description = $"P95 response time is {latestMetrics.P95ResponseTime:F1}ms. Consider optimizing cache operations.",
Impact = EstimateImpact(RecommendationType.Performance, latestMetrics),
Actions = new[]
{
"Enable Source Generator for compile-time optimizations",
"Review serialization settings",
"Consider connection pooling optimizations",
"Analyze slow cache operations"
}
});
}
// Memory recommendations
if (latestMetrics.MemoryUsage > 1_000_000_000) // 1GB
{
recommendations.Add(new CacheRecommendation
{
Type = RecommendationType.Memory,
Priority = RecommendationPriority.Medium,
Title = "High memory usage detected",
Description = $"Cache is using {latestMetrics.MemoryUsage / 1024 / 1024:F0}MB of memory.",
Impact = EstimateImpact(RecommendationType.Memory, latestMetrics),
Actions = new[]
{
"Enable memory pressure management",
"Review cache expiration policies",
"Consider implementing cache size limits",
"Analyze largest cached objects"
}
});
}
return recommendations;
}
}
public class CacheUsageReport
{
public TimeSpan Period { get; set; }
public DateTimeOffset StartTime { get; set; }
public DateTimeOffset EndTime { get; set; }
public long TotalRequests { get; set; }
public long TotalHits { get; set; }
public long TotalMisses { get; set; }
public double OverallHitRate { get; set; }
public double AverageResponseTime { get; set; }
public double P95ResponseTime { get; set; }
public double P99ResponseTime { get; set; }
public List<int> PeakUsageHours { get; set; }
public Dictionary<string, ControllerUsageStats> UsageByController { get; set; }
public Dictionary<string, EndpointUsageStats> UsageByEndpoint { get; set; }
public List<KeyUsageStats> MostAccessedKeys { get; set; }
public List<KeyUsageStats> LeastAccessedKeys { get; set; }
public List<string> UnusedKeys { get; set; }
public TrendAnalysis MemoryUsageTrend { get; set; }
public TrendAnalysis CacheSizeTrend { get; set; }
public List<CacheRecommendation> Recommendations { get; set; }
}
Performance Trend Analysis
Track performance trends over time to identify patterns and degradation.
Trend Analysis Engine
public class PerformanceTrendAnalyzer : IPerformanceTrendAnalyzer
{
public async Task<TrendAnalysis> AnalyzeTrendsAsync(IEnumerable<CacheMetrics> metrics, MetricType metricType)
{
var sortedMetrics = metrics.OrderBy(m => m.Timestamp).ToList();
var values = ExtractValues(sortedMetrics, metricType);
return new TrendAnalysis
{
MetricType = metricType,
TrendDirection = CalculateTrendDirection(values),
ChangeRate = CalculateChangeRate(values),
Volatility = CalculateVolatility(values),
Seasonality = DetectSeasonality(values),
Anomalies = DetectAnomalies(values),
Forecast = GenerateForecast(values),
Correlation = await AnalyzeCorrelations(sortedMetrics, metricType)
};
}
private TrendDirection CalculateTrendDirection(IList<double> values)
{
if (values.Count < 2) return TrendDirection.Stable;
var correlationCoefficient = CalculateCorrelationWithTime(values);
if (correlationCoefficient > 0.7) return TrendDirection.Increasing;
if (correlationCoefficient < -0.7) return TrendDirection.Decreasing;
// Check for recent trend changes
var recentValues = values.Skip(Math.Max(0, values.Count - 10)).ToList();
var recentCorrelation = CalculateCorrelationWithTime(recentValues);
if (recentCorrelation > 0.5) return TrendDirection.RecentlyIncreasing;
if (recentCorrelation < -0.5) return TrendDirection.RecentlyDecreasing;
return TrendDirection.Stable;
}
private double CalculateChangeRate(IList<double> values)
{
if (values.Count < 2) return 0;
var firstValue = values.First();
var lastValue = values.Last();
return firstValue != 0 ? (lastValue - firstValue) / firstValue * 100 : 0;
}
private double CalculateVolatility(IList<double> values)
{
if (values.Count < 2) return 0;
var mean = values.Average();
var variance = values.Select(v => Math.Pow(v - mean, 2)).Average();
return Math.Sqrt(variance);
}
private SeasonalityInfo DetectSeasonality(IList<double> values)
{
// Simplified seasonality detection
// In practice, you might use more sophisticated algorithms like FFT
if (values.Count < 24) return new SeasonalityInfo { HasSeasonality = false };
var hourlyPatterns = new Dictionary<int, List<double>>();
for (int i = 0; i < values.Count; i++)
{
var hour = i % 24;
if (!hourlyPatterns.ContainsKey(hour))
hourlyPatterns[hour] = new List<double>();
hourlyPatterns[hour].Add(values[i]);
}
var hourlyAverages = hourlyPatterns.ToDictionary(
kvp => kvp.Key,
kvp => kvp.Value.Average()
);
var overallAverage = values.Average();
var seasonalVariance = hourlyAverages.Values.Select(avg => Math.Pow(avg - overallAverage, 2)).Average();
return new SeasonalityInfo
{
HasSeasonality = seasonalVariance > overallAverage * 0.1, // 10% threshold
Pattern = seasonalVariance > overallAverage * 0.1 ? "Hourly" : "None",
PeakHours = hourlyAverages.Where(kvp => kvp.Value > overallAverage * 1.2).Select(kvp => kvp.Key).ToList(),
LowHours = hourlyAverages.Where(kvp => kvp.Value < overallAverage * 0.8).Select(kvp => kvp.Key).ToList()
};
}
private List<AnomalyDetection> DetectAnomalies(IList<double> values)
{
var anomalies = new List<AnomalyDetection>();
if (values.Count < 10) return anomalies;
var mean = values.Average();
var stdDev = Math.Sqrt(values.Select(v => Math.Pow(v - mean, 2)).Average());
var threshold = 2.5 * stdDev; // 2.5 standard deviations
for (int i = 0; i < values.Count; i++)
{
if (Math.Abs(values[i] - mean) > threshold)
{
anomalies.Add(new AnomalyDetection
{
Index = i,
Value = values[i],
ExpectedValue = mean,
Deviation = Math.Abs(values[i] - mean),
Severity = CalculateAnomalySeverity(values[i], mean, stdDev)
});
}
}
return anomalies;
}
private ForecastInfo GenerateForecast(IList<double> values)
{
if (values.Count < 5) return new ForecastInfo { CanForecast = false };
// Simple linear regression for basic forecasting
var n = values.Count;
var sumX = Enumerable.Range(0, n).Sum();
var sumY = values.Sum();
var sumXY = Enumerable.Range(0, n).Select(i => i * values[i]).Sum();
var sumX2 = Enumerable.Range(0, n).Select(i => i * i).Sum();
var slope = (n * sumXY - sumX * sumY) / (n * sumX2 - sumX * sumX);
var intercept = (sumY - slope * sumX) / n;
var nextValue = slope * n + intercept;
var confidence = CalculateForecastConfidence(values, slope, intercept);
return new ForecastInfo
{
CanForecast = true,
NextValue = nextValue,
Confidence = confidence,
Method = "Linear Regression",
TimeHorizon = TimeSpan.FromMinutes(30) // Next data point
};
}
}
public class TrendAnalysis
{
public MetricType MetricType { get; set; }
public TrendDirection TrendDirection { get; set; }
public double ChangeRate { get; set; }
public double Volatility { get; set; }
public SeasonalityInfo Seasonality { get; set; }
public List<AnomalyDetection> Anomalies { get; set; }
public ForecastInfo Forecast { get; set; }
public Dictionary<MetricType, double> Correlation { get; set; }
}
public enum TrendDirection
{
Increasing,
Decreasing,
Stable,
RecentlyIncreasing,
RecentlyDecreasing,
Volatile
}
Cache Efficiency Analysis
Analyze cache effectiveness and identify opportunities for improvement.
Efficiency Analyzer
public class CacheEfficiencyAnalyzer : ICacheEfficiencyAnalyzer
{
public async Task<EfficiencyReport> AnalyzeEfficiencyAsync(TimeSpan period)
{
var usage = await _usageAnalyzer.AnalyzeUsageAsync(period);
var keyAnalysis = await AnalyzeKeyEfficiency(period);
var resourceAnalysis = await AnalyzeResourceEfficiency(period);
return new EfficiencyReport
{
Period = period,
OverallEfficiencyScore = CalculateOverallEfficiency(usage, keyAnalysis, resourceAnalysis),
HitRateEfficiency = new EfficiencyMetric
{
Score = CalculateHitRateScore(usage.OverallHitRate),
Current = usage.OverallHitRate,
Target = 80.0,
Status = usage.OverallHitRate >= 80 ? EfficiencyStatus.Good :
usage.OverallHitRate >= 60 ? EfficiencyStatus.Fair : EfficiencyStatus.Poor
},
ResponseTimeEfficiency = new EfficiencyMetric
{
Score = CalculateResponseTimeScore(usage.P95ResponseTime),
Current = usage.P95ResponseTime,
Target = 50.0,
Status = usage.P95ResponseTime <= 50 ? EfficiencyStatus.Good :
usage.P95ResponseTime <= 100 ? EfficiencyStatus.Fair : EfficiencyStatus.Poor
},
MemoryEfficiency = new EfficiencyMetric
{
Score = CalculateMemoryScore(resourceAnalysis.MemoryUtilization),
Current = resourceAnalysis.MemoryUtilization,
Target = 70.0,
Status = resourceAnalysis.MemoryUtilization <= 70 ? EfficiencyStatus.Good :
resourceAnalysis.MemoryUtilization <= 85 ? EfficiencyStatus.Fair : EfficiencyStatus.Poor
},
KeyEfficiency = keyAnalysis,
ResourceEfficiency = resourceAnalysis,
ImprovementOpportunities = await IdentifyImprovementOpportunities(usage, keyAnalysis, resourceAnalysis),
ROIAnalysis = await PerformROIAnalysis(usage, keyAnalysis)
};
}
private async Task<KeyEfficiencyAnalysis> AnalyzeKeyEfficiency(TimeSpan period)
{
var keyStats = await _metrics.GetKeyStatsAsync(period);
return new KeyEfficiencyAnalysis
{
TotalKeys = keyStats.Count,
ActiveKeys = keyStats.Count(k => k.AccessCount > 0),
UnusedKeys = keyStats.Count(k => k.AccessCount == 0),
KeyUtilizationRate = keyStats.Count > 0 ? (double)keyStats.Count(k => k.AccessCount > 0) / keyStats.Count * 100 : 0,
HighValueKeys = keyStats
.Where(k => k.HitRate > 80 && k.AccessCount > 100)
.OrderByDescending(k => k.AccessCount)
.Take(20)
.ToList(),
LowValueKeys = keyStats
.Where(k => k.HitRate < 20 || k.AccessCount < 10)
.OrderBy(k => k.HitRate)
.Take(20)
.ToList(),
KeySizeDistribution = AnalyzeKeySizeDistribution(keyStats),
ExpirationPatterns = AnalyzeExpirationPatterns(keyStats)
};
}
private async Task<List<ImprovementOpportunity>> IdentifyImprovementOpportunities(
CacheUsageReport usage,
KeyEfficiencyAnalysis keyAnalysis,
ResourceEfficiencyAnalysis resourceAnalysis)
{
var opportunities = new List<ImprovementOpportunity>();
// Hit rate improvement
if (usage.OverallHitRate < 80)
{
opportunities.Add(new ImprovementOpportunity
{
Type = ImprovementType.HitRate,
Title = "Improve Cache Hit Rate",
Description = $"Hit rate is {usage.OverallHitRate:F1}%, target is 80%+",
PotentialImpact = CalculateHitRateImpact(usage.OverallHitRate, 80),
Effort = EstimateEffort(ImprovementType.HitRate),
Actions = new[]
{
"Increase expiration times for stable data",
"Implement cache warming strategies",
"Review and optimize cache key patterns",
"Add pre-emptive cache refresh"
},
ROI = CalculateROI(ImprovementType.HitRate, usage)
});
}
// Memory optimization
if (resourceAnalysis.MemoryUtilization > 85)
{
opportunities.Add(new ImprovementOpportunity
{
Type = ImprovementType.Memory,
Title = "Optimize Memory Usage",
Description = $"Memory utilization is {resourceAnalysis.MemoryUtilization:F1}%",
PotentialImpact = CalculateMemoryImpact(resourceAnalysis),
Effort = EstimateEffort(ImprovementType.Memory),
Actions = new[]
{
"Enable aggressive memory management",
"Implement cache size limits",
"Remove unused cache keys",
"Optimize object serialization"
},
ROI = CalculateROI(ImprovementType.Memory, usage)
});
}
// Key optimization
if (keyAnalysis.KeyUtilizationRate < 60)
{
opportunities.Add(new ImprovementOpportunity
{
Type = ImprovementType.KeyOptimization,
Title = "Optimize Cache Keys",
Description = $"Only {keyAnalysis.KeyUtilizationRate:F1}% of keys are being used",
PotentialImpact = CalculateKeyOptimizationImpact(keyAnalysis),
Effort = EstimateEffort(ImprovementType.KeyOptimization),
Actions = new[]
{
"Remove unused cache keys",
"Consolidate similar cache patterns",
"Implement key lifecycle management",
"Review cache key naming conventions"
},
ROI = CalculateROI(ImprovementType.KeyOptimization, usage)
});
}
return opportunities.OrderByDescending(o => o.ROI).ToList();
}
private async Task<ROIAnalysis> PerformROIAnalysis(CacheUsageReport usage, KeyEfficiencyAnalysis keyAnalysis)
{
var currentCost = CalculateCurrentCost(usage);
var potentialSavings = CalculatePotentialSavings(usage, keyAnalysis);
return new ROIAnalysis
{
CurrentMonthlyCost = currentCost,
PotentialMonthlySavings = potentialSavings,
ImplementationCost = EstimateImplementationCost(usage),
PaybackPeriodMonths = potentialSavings > 0 ? EstimateImplementationCost(usage) / potentialSavings : double.MaxValue,
CostBreakdown = new CostBreakdown
{
ComputeCost = CalculateComputeCost(usage),
MemoryCost = CalculateMemoryCost(usage),
NetworkCost = CalculateNetworkCost(usage),
StorageCost = CalculateStorageCost(usage)
},
BenefitBreakdown = new BenefitBreakdown
{
PerformanceImprovement = CalculatePerformanceBenefit(usage),
ResourceSavings = CalculateResourceSavings(usage),
ScalabilityCost = CalculateScalabilityBenefit(usage),
UserExperienceValue = CalculateUXBenefit(usage)
}
};
}
}
public class EfficiencyReport
{
public TimeSpan Period { get; set; }
public double OverallEfficiencyScore { get; set; }
public EfficiencyMetric HitRateEfficiency { get; set; }
public EfficiencyMetric ResponseTimeEfficiency { get; set; }
public EfficiencyMetric MemoryEfficiency { get; set; }
public KeyEfficiencyAnalysis KeyEfficiency { get; set; }
public ResourceEfficiencyAnalysis ResourceEfficiency { get; set; }
public List<ImprovementOpportunity> ImprovementOpportunities { get; set; }
public ROIAnalysis ROIAnalysis { get; set; }
}
Business Impact Analysis
Correlate cache performance with business metrics.
Business Impact Analyzer
public class BusinessImpactAnalyzer : IBusinessImpactAnalyzer
{
public async Task<BusinessImpactReport> AnalyzeBusinessImpactAsync(TimeSpan period)
{
var cacheMetrics = await _metricsCollector.GetMetricsHistoryAsync(
DateTimeOffset.UtcNow - period, DateTimeOffset.UtcNow);
var businessMetrics = await _businessMetricsService.GetMetricsAsync(period);
return new BusinessImpactReport
{
Period = period,
UserExperience = new UserExperienceImpact
{
AveragePageLoadTime = CalculatePageLoadTime(cacheMetrics),
PageLoadTimeImprovement = CalculatePageLoadImprovement(cacheMetrics),
UserSatisfactionScore = await CalculateUserSatisfaction(businessMetrics),
BounceRateImpact = CalculateBounceRateImpact(cacheMetrics, businessMetrics)
},
Performance = new PerformanceImpact
{
ResponseTimeReduction = CalculateResponseTimeReduction(cacheMetrics),
ThroughputImprovement = CalculateThroughputImprovement(cacheMetrics),
ErrorRateReduction = CalculateErrorRateReduction(cacheMetrics),
AvailabilityImprovement = CalculateAvailabilityImprovement(cacheMetrics)
},
Cost = new CostImpact
{
DatabaseLoadReduction = CalculateDatabaseLoadReduction(cacheMetrics),
InfrastructureCostSavings = CalculateInfrastructureSavings(cacheMetrics),
BandwidthSavings = CalculateBandwidthSavings(cacheMetrics),
DeveloperProductivityGain = CalculateProductivityGain(cacheMetrics)
},
Revenue = new RevenueImpact
{
ConversionRateImpact = await CalculateConversionImpact(businessMetrics),
CustomerRetentionImpact = await CalculateRetentionImpact(businessMetrics),
AverageOrderValueImpact = await CalculateAOVImpact(businessMetrics),
CustomerLifetimeValueImpact = await CalculateLTVImpact(businessMetrics)
},
Correlations = await AnalyzeCorrelations(cacheMetrics, businessMetrics),
Predictions = await GenerateBusinessPredictions(cacheMetrics, businessMetrics)
};
}
private async Task<List<MetricCorrelation>> AnalyzeCorrelations(
IEnumerable<CacheMetrics> cacheMetrics,
BusinessMetrics businessMetrics)
{
var correlations = new List<MetricCorrelation>();
// Analyze correlation between hit rate and conversion rate
var hitRateConversionCorrelation = CalculateCorrelation(
cacheMetrics.Select(m => m.HitRate),
businessMetrics.ConversionRates);
correlations.Add(new MetricCorrelation
{
CacheMetric = "Hit Rate",
BusinessMetric = "Conversion Rate",
CorrelationCoefficient = hitRateConversionCorrelation,
Strength = GetCorrelationStrength(hitRateConversionCorrelation),
Description = GenerateCorrelationDescription("Hit Rate", "Conversion Rate", hitRateConversionCorrelation)
});
// Analyze correlation between response time and bounce rate
var responseTimeBounceCorrelation = CalculateCorrelation(
cacheMetrics.Select(m => m.P95ResponseTime),
businessMetrics.BounceRates);
correlations.Add(new MetricCorrelation
{
CacheMetric = "Response Time",
BusinessMetric = "Bounce Rate",
CorrelationCoefficient = responseTimeBounceCorrelation,
Strength = GetCorrelationStrength(responseTimeBounceCorrelation),
Description = GenerateCorrelationDescription("Response Time", "Bounce Rate", responseTimeBounceCorrelation)
});
return correlations;
}
private async Task<List<BusinessPrediction>> GenerateBusinessPredictions(
IEnumerable<CacheMetrics> cacheMetrics,
BusinessMetrics businessMetrics)
{
var predictions = new List<BusinessPrediction>();
// Predict impact of hit rate improvement
var currentHitRate = cacheMetrics.Last().HitRate;
var targetHitRate = Math.Min(95, currentHitRate + 10);
predictions.Add(new BusinessPrediction
{
Scenario = $"Improve hit rate from {currentHitRate:F1}% to {targetHitRate:F1}%",
PredictedImpact = new PredictedImpact
{
ConversionRateChange = PredictConversionRateChange(currentHitRate, targetHitRate),
RevenueImpact = PredictRevenueImpact(currentHitRate, targetHitRate, businessMetrics),
CostSavings = PredictCostSavings(currentHitRate, targetHitRate),
UserExperienceScore = PredictUXScore(currentHitRate, targetHitRate)
},
Confidence = CalculatePredictionConfidence("HitRate", cacheMetrics),
Timeframe = TimeSpan.FromDays(30)
});
return predictions;
}
}
public class BusinessImpactReport
{
public TimeSpan Period { get; set; }
public UserExperienceImpact UserExperience { get; set; }
public PerformanceImpact Performance { get; set; }
public CostImpact Cost { get; set; }
public RevenueImpact Revenue { get; set; }
public List<MetricCorrelation> Correlations { get; set; }
public List<BusinessPrediction> Predictions { get; set; }
}
public class MetricCorrelation
{
public string CacheMetric { get; set; }
public string BusinessMetric { get; set; }
public double CorrelationCoefficient { get; set; }
public CorrelationStrength Strength { get; set; }
public string Description { get; set; }
}
public enum CorrelationStrength
{
None,
Weak,
Moderate,
Strong,
VeryStrong
}
Analytics Dashboard API
Provide comprehensive analytics data through REST APIs.
Analytics API Controller
[ApiController]
[Route("api/cache/analytics")]
public class CacheAnalyticsController : ControllerBase
{
private readonly ICacheUsageAnalyzer _usageAnalyzer;
private readonly IPerformanceTrendAnalyzer _trendAnalyzer;
private readonly ICacheEfficiencyAnalyzer _efficiencyAnalyzer;
private readonly IBusinessImpactAnalyzer _businessImpactAnalyzer;
[HttpGet("usage")]
public async Task<ActionResult<CacheUsageReport>> GetUsageAnalytics(
[FromQuery] int hours = 24)
{
var period = TimeSpan.FromHours(hours);
var report = await _usageAnalyzer.AnalyzeUsageAsync(period);
return Ok(report);
}
[HttpGet("trends")]
public async Task<ActionResult<Dictionary<string, TrendAnalysis>>> GetTrendAnalytics(
[FromQuery] int hours = 24)
{
var period = TimeSpan.FromHours(hours);
var metrics = await _metricsCollector.GetMetricsHistoryAsync(
DateTimeOffset.UtcNow - period, DateTimeOffset.UtcNow);
var trends = new Dictionary<string, TrendAnalysis>();
trends["HitRate"] = await _trendAnalyzer.AnalyzeTrendsAsync(metrics, MetricType.HitRate);
trends["ResponseTime"] = await _trendAnalyzer.AnalyzeTrendsAsync(metrics, MetricType.ResponseTime);
trends["MemoryUsage"] = await _trendAnalyzer.AnalyzeTrendsAsync(metrics, MetricType.MemoryUsage);
trends["Throughput"] = await _trendAnalyzer.AnalyzeTrendsAsync(metrics, MetricType.Throughput);
return Ok(trends);
}
[HttpGet("efficiency")]
public async Task<ActionResult<EfficiencyReport>> GetEfficiencyAnalytics(
[FromQuery] int hours = 24)
{
var period = TimeSpan.FromHours(hours);
var report = await _efficiencyAnalyzer.AnalyzeEfficiencyAsync(period);
return Ok(report);
}
[HttpGet("business-impact")]
public async Task<ActionResult<BusinessImpactReport>> GetBusinessImpactAnalytics(
[FromQuery] int hours = 24)
{
var period = TimeSpan.FromHours(hours);
var report = await _businessImpactAnalyzer.AnalyzeBusinessImpactAsync(period);
return Ok(report);
}
[HttpGet("recommendations")]
public async Task<ActionResult<List<CacheRecommendation>>> GetRecommendations(
[FromQuery] int hours = 24)
{
var period = TimeSpan.FromHours(hours);
var usage = await _usageAnalyzer.AnalyzeUsageAsync(period);
var efficiency = await _efficiencyAnalyzer.AnalyzeEfficiencyAsync(period);
var recommendations = new List<CacheRecommendation>();
recommendations.AddRange(usage.Recommendations);
recommendations.AddRange(efficiency.ImprovementOpportunities.Select(o => new CacheRecommendation
{
Type = MapImprovementTypeToRecommendationType(o.Type),
Title = o.Title,
Description = o.Description,
Priority = MapEffortToPriority(o.Effort),
Actions = o.Actions,
Impact = o.PotentialImpact
}));
return Ok(recommendations.OrderByDescending(r => r.Impact).Take(10));
}
[HttpPost("export")]
public async Task<IActionResult> ExportAnalytics(
[FromBody] AnalyticsExportRequest request)
{
var data = await GatherAnalyticsData(request);
return request.Format switch
{
ExportFormat.Json => Ok(data),
ExportFormat.Csv => File(ConvertToCsv(data), "text/csv", "cache-analytics.csv"),
ExportFormat.Excel => File(ConvertToExcel(data), "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", "cache-analytics.xlsx"),
_ => BadRequest("Unsupported export format")
};
}
}
public class AnalyticsExportRequest
{
public TimeSpan Period { get; set; } = TimeSpan.FromDays(7);
public ExportFormat Format { get; set; } = ExportFormat.Json;
public List<AnalyticsSection> Sections { get; set; } = new() { AnalyticsSection.All };
}
public enum ExportFormat
{
Json,
Csv,
Excel
}
public enum AnalyticsSection
{
All,
Usage,
Performance,
Efficiency,
BusinessImpact,
Recommendations
}
For detailed implementation and additional analytics features:
- Performance Metrics - Core metrics collection
- Real-time Dashboards - Live monitoring
- Production Tuning - Apply insights for optimization