38 KiB
38 KiB
Performance Patterns and Optimization - Centron Enterprise Application
Overview
This document provides comprehensive coverage of performance patterns and optimizations within the Centron .NET 8 enterprise application. Performance patterns encompass caching strategies, query optimization, lazy loading, asynchronous programming, resource management, and monitoring techniques that ensure optimal application performance under various load conditions.
Performance Architecture
Performance Design Principles
The Centron application implements performance optimization through multiple strategies:
- Caching: Multi-level caching from database to UI components
- Asynchronous Processing: Non-blocking I/O operations and background tasks
- Query Optimization: Efficient database queries with proper indexing
- Resource Management: Proper disposal and pooling of expensive resources
- Lazy Loading: On-demand data loading to reduce initial load times
- Background Processing: Offload heavy operations to background services
Caching Patterns
1. Multi-Level Caching Strategy
public interface ICacheService
{
Task<Result<T>> GetAsync<T>(string key) where T : class;
Task<Result> SetAsync<T>(string key, T value, TimeSpan? expiry = null) where T : class;
Task<Result> RemoveAsync(string key);
Task<Result> RemoveByPatternAsync(string pattern);
Task<Result> ClearAsync();
}
public class HybridCacheService : ICacheService
{
private readonly IMemoryCache _memoryCache;
private readonly IDistributedCache _distributedCache;
private readonly ILogger<HybridCacheService> _logger;
private readonly CacheConfiguration _config;
public HybridCacheService(
IMemoryCache memoryCache,
IDistributedCache distributedCache,
CacheConfiguration config)
{
_memoryCache = memoryCache;
_distributedCache = distributedCache;
_config = config;
}
public async Task<Result<T>> GetAsync<T>(string key) where T : class
{
try
{
// Level 1: Memory Cache (fastest)
if (_memoryCache.TryGetValue(key, out T cachedValue))
{
return Result<T>.AsSuccess(cachedValue);
}
// Level 2: Distributed Cache (Redis/SQL Server)
var distributedValue = await _distributedCache.GetStringAsync(key);
if (!string.IsNullOrEmpty(distributedValue))
{
var deserializedValue = JsonSerializer.Deserialize<T>(distributedValue);
// Store in memory cache for faster subsequent access
var memoryExpiry = TimeSpan.FromMinutes(_config.MemoryCacheMinutes);
_memoryCache.Set(key, deserializedValue, memoryExpiry);
return Result<T>.AsSuccess(deserializedValue);
}
return Result<T>.AsError("Cache miss");
}
catch (Exception ex)
{
_logger.LogError(ex, "Cache get operation failed for key: {Key}", key);
return Result<T>.AsError($"Cache operation failed: {ex.Message}");
}
}
public async Task<Result> SetAsync<T>(string key, T value, TimeSpan? expiry = null) where T : class
{
try
{
var cacheExpiry = expiry ?? TimeSpan.FromMinutes(_config.DefaultExpiryMinutes);
// Set in memory cache
_memoryCache.Set(key, value, TimeSpan.FromMinutes(_config.MemoryCacheMinutes));
// Set in distributed cache
var serializedValue = JsonSerializer.Serialize(value);
var options = new DistributedCacheEntryOptions
{
AbsoluteExpirationRelativeToNow = cacheExpiry
};
await _distributedCache.SetStringAsync(key, serializedValue, options);
return Result.AsSuccess();
}
catch (Exception ex)
{
_logger.LogError(ex, "Cache set operation failed for key: {Key}", key);
return Result.AsError($"Cache operation failed: {ex.Message}");
}
}
}
// Cache-Aside Pattern Implementation
public class CachedAccountRepository : IAccountRepository
{
private readonly IAccountRepository _baseRepository;
private readonly ICacheService _cacheService;
private readonly TimeSpan _cacheExpiry = TimeSpan.FromMinutes(15);
public CachedAccountRepository(IAccountRepository baseRepository, ICacheService cacheService)
{
_baseRepository = baseRepository;
_cacheService = cacheService;
}
public async Task<Result<Account>> GetAccountByIdAsync(int accountId)
{
var cacheKey = $"account:{accountId}";
// Try cache first
var cachedResult = await _cacheService.GetAsync<Account>(cacheKey);
if (cachedResult.Status == ResultStatus.Success)
return cachedResult;
// Cache miss - get from database
var dbResult = await _baseRepository.GetAccountByIdAsync(accountId);
if (dbResult.Status == ResultStatus.Success)
{
// Cache the result
await _cacheService.SetAsync(cacheKey, dbResult.Data, _cacheExpiry);
}
return dbResult;
}
public async Task<Result<Account>> SaveAccountAsync(Account account)
{
var result = await _baseRepository.SaveAccountAsync(account);
if (result.Status == ResultStatus.Success)
{
// Update cache with new data
var cacheKey = $"account:{account.I3D}";
await _cacheService.SetAsync(cacheKey, result.Data, _cacheExpiry);
// Invalidate related cached data
await InvalidateRelatedCacheAsync(account);
}
return result;
}
private async Task InvalidateRelatedCacheAsync(Account account)
{
// Remove account lists that might include this account
await _cacheService.RemoveByPatternAsync("accounts:list:*");
await _cacheService.RemoveByPatternAsync($"customer:{account.CustomerNumber}:*");
}
}
2. Application-Level Caching Patterns
public class CachedTableService
{
private readonly ICacheService _cacheService;
private readonly ICachedTableRepository _repository;
public async Task<Result<List<T>>> GetCachedDataAsync<T>(string tableName) where T : class
{
var cacheKey = $"cached_table:{tableName}";
var cachedResult = await _cacheService.GetAsync<CachedTableData<T>>(cacheKey);
if (cachedResult.Status == ResultStatus.Success)
{
var cachedData = cachedResult.Data;
// Check if cache needs refresh
var lastModified = await _repository.GetLastModifiedAsync(tableName);
if (lastModified <= cachedData.CacheTime)
{
return Result<List<T>>.AsSuccess(cachedData.Data);
}
}
// Cache is stale or missing - refresh from database
return await RefreshCachedDataAsync<T>(tableName);
}
private async Task<Result<List<T>>> RefreshCachedDataAsync<T>(string tableName) where T : class
{
var dataResult = await _repository.GetDataAsync<T>(tableName);
if (dataResult.Status != ResultStatus.Success)
return dataResult;
var cachedData = new CachedTableData<T>
{
Data = dataResult.Data,
CacheTime = DateTime.UtcNow,
TableName = tableName
};
// Cache with appropriate expiry based on table type
var expiry = GetCacheExpiryForTable(tableName);
await _cacheService.SetAsync($"cached_table:{tableName}", cachedData, expiry);
return Result<List<T>>.AsSuccess(cachedData.Data);
}
private TimeSpan GetCacheExpiryForTable(string tableName)
{
return tableName.ToLower() switch
{
"countries" => TimeSpan.FromHours(24), // Static reference data
"currencies" => TimeSpan.FromHours(12),
"products" => TimeSpan.FromMinutes(30), // More dynamic data
"prices" => TimeSpan.FromMinutes(5), // Frequently changing
_ => TimeSpan.FromHours(1) // Default
};
}
}
// Background cache warming service
public class CacheWarmupService : BackgroundService
{
private readonly ICachedTableService _cachedTableService;
private readonly ILogger<CacheWarmupService> _logger;
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
while (!stoppingToken.IsCancellationRequested)
{
try
{
await WarmupCriticalCaches();
await Task.Delay(TimeSpan.FromMinutes(30), stoppingToken);
}
catch (OperationCanceledException)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during cache warmup");
await Task.Delay(TimeSpan.FromMinutes(5), stoppingToken);
}
}
}
private async Task WarmupCriticalCaches()
{
var criticalTables = new[] { "countries", "currencies", "taxrates", "paymentmethods" };
var warmupTasks = criticalTables.Select(async table =>
{
try
{
await _cachedTableService.GetCachedDataAsync<object>(table);
_logger.LogDebug("Warmed up cache for table: {Table}", table);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to warm up cache for table: {Table}", table);
}
});
await Task.WhenAll(warmupTasks);
}
}
Asynchronous Programming Patterns
1. Async/Await Best Practices
public class OptimizedBusinessLogic : BaseBL
{
// Correct async pattern with ConfigureAwait
public async Task<Result<List<Account>>> GetAccountsAsync(AccountFilter filter)
{
try
{
// Use ConfigureAwait(false) in library code to avoid deadlocks
var accounts = await _accountRepository
.GetAccountsAsync(filter)
.ConfigureAwait(false);
return Result<List<Account>>.AsSuccess(accounts);
}
catch (Exception ex)
{
return Result<List<Account>>.AsError($"Failed to get accounts: {ex.Message}");
}
}
// Parallel processing for independent operations
public async Task<Result<AccountSummary>> GetAccountSummaryAsync(int accountId)
{
try
{
// Start all async operations concurrently
var accountTask = _accountRepository.GetAccountByIdAsync(accountId);
var contractsTask = _contractRepository.GetContractsByAccountAsync(accountId);
var invoicesTask = _invoiceRepository.GetRecentInvoicesAsync(accountId);
var paymentsTask = _paymentRepository.GetRecentPaymentsAsync(accountId);
// Wait for all to complete
await Task.WhenAll(accountTask, contractsTask, invoicesTask, paymentsTask)
.ConfigureAwait(false);
// Process results
var account = await accountTask.ConfigureAwait(false);
var contracts = await contractsTask.ConfigureAwait(false);
var invoices = await invoicesTask.ConfigureAwait(false);
var payments = await paymentsTask.ConfigureAwait(false);
if (account.Status != ResultStatus.Success)
return Result<AccountSummary>.FromResult(account);
var summary = new AccountSummary
{
Account = account.Data,
ActiveContracts = contracts.Data?.Where(c => c.IsActive).ToList() ?? new List<Contract>(),
RecentInvoices = invoices.Data ?? new List<Invoice>(),
RecentPayments = payments.Data ?? new List<Payment>(),
TotalOutstanding = CalculateOutstanding(invoices.Data, payments.Data)
};
return Result<AccountSummary>.AsSuccess(summary);
}
catch (Exception ex)
{
return Result<AccountSummary>.AsError($"Failed to get account summary: {ex.Message}");
}
}
// Batch processing with controlled concurrency
public async Task<Result<BatchProcessResult>> ProcessAccountsBatchAsync(List<int> accountIds, int maxConcurrency = 10)
{
var semaphore = new SemaphoreSlim(maxConcurrency, maxConcurrency);
var results = new ConcurrentBag<ProcessResult>();
var errors = new ConcurrentBag<string>();
var tasks = accountIds.Select(async accountId =>
{
await semaphore.WaitAsync().ConfigureAwait(false);
try
{
var result = await ProcessSingleAccountAsync(accountId).ConfigureAwait(false);
results.Add(result);
}
catch (Exception ex)
{
errors.Add($"Account {accountId}: {ex.Message}");
}
finally
{
semaphore.Release();
}
});
await Task.WhenAll(tasks).ConfigureAwait(false);
return Result<BatchProcessResult>.AsSuccess(new BatchProcessResult
{
SuccessfulResults = results.ToList(),
Errors = errors.ToList(),
TotalProcessed = accountIds.Count
});
}
}
2. Background Task Processing Patterns
public abstract class ManagedBackgroundService : BackgroundService
{
private readonly ILogger _logger;
private readonly IServiceProvider _serviceProvider;
protected abstract string ServiceName { get; }
protected abstract TimeSpan GetExecutionInterval();
protected abstract void ExecuteService(CancellationToken stoppingToken);
protected virtual void InitializeService(CancellationToken stoppingToken) { }
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
_logger.LogInformation("Starting background service: {ServiceName}", ServiceName);
try
{
InitializeService(stoppingToken);
while (!stoppingToken.IsCancellationRequested)
{
var stopwatch = Stopwatch.StartNew();
try
{
using (var scope = _serviceProvider.CreateScope())
{
ExecuteService(stoppingToken);
}
}
catch (OperationCanceledException)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error executing background service: {ServiceName}", ServiceName);
}
stopwatch.Stop();
var executionTime = stopwatch.Elapsed;
var interval = GetExecutionInterval();
// Ensure we don't execute too frequently
var delay = interval - executionTime;
if (delay > TimeSpan.Zero)
{
await Task.Delay(delay, stoppingToken);
}
}
}
finally
{
_logger.LogInformation("Background service stopped: {ServiceName}", ServiceName);
}
}
}
// Specialized cache update service
public class CacheUpdateService : ManagedBackgroundService
{
private DateTime? _lastOptimizedUpdate = null;
protected override string ServiceName => "CacheUpdateService";
protected override void ExecuteService(CancellationToken stoppingToken)
{
using (var session = new BLSession())
{
// Regular cache updates
session.GetBL<CachedTableBL>().ExecuteCacheUpdates().ThrowIfError();
// Optimized cache updates (less frequent)
if (_lastOptimizedUpdate == null || DateTime.Now - _lastOptimizedUpdate > TimeSpan.FromMinutes(1))
{
session.GetBL<CachedTableBL>().ExecuteOptimizedCacheUpdates().ThrowIfError();
_lastOptimizedUpdate = DateTime.Now;
}
}
}
protected override TimeSpan GetExecutionInterval()
{
return TimeSpan.FromSeconds(2); // High frequency for immediate updates
}
protected override void InitializeService(CancellationToken stoppingToken)
{
try
{
using var session = new BLSession();
session.GetBL<CachedTableBL>().RepairIfNecessaryCacheStatistic();
}
catch (Exception e)
{
_logger.LogError(e, "Failed to repair the cached statistic table");
}
}
}
// Task queue background processor
public class TaskQueueProcessor : BackgroundService
{
private readonly ITaskQueue _taskQueue;
private readonly IServiceProvider _serviceProvider;
private readonly ILogger<TaskQueueProcessor> _logger;
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
while (!stoppingToken.IsCancellationRequested)
{
try
{
var task = await _taskQueue.DequeueAsync(stoppingToken);
// Process task in separate scope for proper DI cleanup
using (var scope = _serviceProvider.CreateScope())
{
await ProcessTaskAsync(task, scope.ServiceProvider);
}
}
catch (OperationCanceledException)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error processing background task");
}
}
}
private async Task ProcessTaskAsync(BackgroundTask task, IServiceProvider serviceProvider)
{
try
{
task.Status = TaskStatus.Processing;
task.StartedAt = DateTime.UtcNow;
var processor = serviceProvider.GetRequiredService(task.ProcessorType) as ITaskProcessor;
var result = await processor.ProcessAsync(task.Data);
task.Status = result.Status == ResultStatus.Success ? TaskStatus.Completed : TaskStatus.Failed;
task.Result = result.Data;
task.Error = result.Error;
}
catch (Exception ex)
{
task.Status = TaskStatus.Failed;
task.Error = ex.Message;
_logger.LogError(ex, "Task processing failed: {TaskId}", task.Id);
}
finally
{
task.CompletedAt = DateTime.UtcNow;
await _taskQueue.UpdateTaskAsync(task);
}
}
}
Database Query Optimization Patterns
1. Efficient Query Patterns
public class OptimizedQueryRepository
{
private readonly ISession _session;
// Projection query - only select needed fields
public async Task<Result<List<AccountSummaryDto>>> GetAccountSummariesAsync(AccountFilter filter)
{
try
{
var query = _session.QueryOver<Account>()
.Where(a => a.IsActive)
.SelectList(list => list
.Select(a => a.I3D).WithAlias(() => alias.Id)
.Select(a => a.CompanyName).WithAlias(() => alias.CompanyName)
.Select(a => a.CustomerNumber).WithAlias(() => alias.CustomerNumber)
.Select(a => a.CreditLimit).WithAlias(() => alias.CreditLimit))
.TransformUsing(Transformers.AliasToBean<AccountSummaryDto>());
// Apply filters
if (filter.CustomerNumbers?.Any() == true)
query.WhereRestrictionOn(a => a.CustomerNumber).IsIn(filter.CustomerNumbers);
if (!string.IsNullOrEmpty(filter.CompanyNameFilter))
query.Where(Restrictions.InsensitiveLike("CompanyName", filter.CompanyNameFilter, MatchMode.Anywhere));
// Pagination
if (filter.PageSize > 0)
{
query.Skip(filter.Page * filter.PageSize).Take(filter.PageSize);
}
var results = await Task.FromResult(query.List<AccountSummaryDto>());
return Result<List<AccountSummaryDto>>.AsSuccess(results.ToList());
}
catch (Exception ex)
{
return Result<List<AccountSummaryDto>>.AsError($"Query failed: {ex.Message}");
}
}
// Batch loading with proper joins
public async Task<Result<List<Account>>> GetAccountsWithContactsAsync(List<int> accountIds)
{
try
{
// Use batch loading to avoid N+1 queries
var accounts = await Task.FromResult(
_session.QueryOver<Account>()
.Fetch(a => a.Contacts).Eager
.Fetch(a => a.Addresses).Eager
.WhereRestrictionOn(a => a.I3D).IsIn(accountIds)
.List());
return Result<List<Account>>.AsSuccess(accounts.ToList());
}
catch (Exception ex)
{
return Result<List<Account>>.AsError($"Batch query failed: {ex.Message}");
}
}
// Optimized aggregation query
public async Task<Result<AccountStatistics>> GetAccountStatisticsAsync()
{
try
{
var statistics = await Task.FromResult(
_session.QueryOver<Account>()
.SelectList(list => list
.SelectCount(a => a.I3D).WithAlias(() => alias.TotalAccounts)
.SelectSum(a => a.CreditLimit).WithAlias(() => alias.TotalCreditLimit)
.SelectAvg(a => a.CreditLimit).WithAlias(() => alias.AverageCreditLimit))
.Where(a => a.IsActive)
.TransformUsing(Transformers.AliasToBean<AccountStatistics>())
.SingleOrDefault<AccountStatistics>());
return Result<AccountStatistics>.AsSuccess(statistics);
}
catch (Exception ex)
{
return Result<AccountStatistics>.AsError($"Statistics query failed: {ex.Message}");
}
}
}
// Lazy loading pattern for large datasets
public class LazyLoadingService
{
public async Task<Result<PagedResult<T>>> GetPagedDataAsync<T>(
IQueryable<T> baseQuery,
int page,
int pageSize,
CancellationToken cancellationToken = default)
{
try
{
var totalCount = await baseQuery.CountAsync(cancellationToken);
var items = await baseQuery
.Skip(page * pageSize)
.Take(pageSize)
.ToListAsync(cancellationToken);
return Result<PagedResult<T>>.AsSuccess(new PagedResult<T>
{
Items = items,
TotalCount = totalCount,
Page = page,
PageSize = pageSize,
TotalPages = (int)Math.Ceiling((double)totalCount / pageSize)
});
}
catch (Exception ex)
{
return Result<PagedResult<T>>.AsError($"Paged query failed: {ex.Message}");
}
}
}
2. Connection and Transaction Optimization
public class OptimizedDataAccess
{
// Connection pooling and session management
public class OptimizedSession : IDisposable
{
private readonly ISession _session;
private readonly ITransaction _transaction;
public OptimizedSession()
{
_session = SessionFactory.OpenSession();
_transaction = _session.BeginTransaction(IsolationLevel.ReadCommitted);
}
public async Task<Result<T>> ExecuteAsync<T>(Func<ISession, Task<T>> operation)
{
try
{
var result = await operation(_session);
await _transaction.CommitAsync();
return Result<T>.AsSuccess(result);
}
catch (Exception ex)
{
await _transaction.RollbackAsync();
return Result<T>.AsError($"Operation failed: {ex.Message}");
}
}
public void Dispose()
{
_transaction?.Dispose();
_session?.Dispose();
}
}
// Batch operations for better performance
public async Task<Result> BatchInsertAsync<T>(IEnumerable<T> entities) where T : class
{
try
{
using var session = new OptimizedSession();
return await session.ExecuteAsync(async s =>
{
var batchSize = 100;
var batch = 0;
foreach (var entity in entities)
{
await s.SaveAsync(entity);
if (++batch % batchSize == 0)
{
await s.FlushAsync();
s.Clear();
}
}
await s.FlushAsync();
return "Success";
});
}
catch (Exception ex)
{
return Result.AsError($"Batch insert failed: {ex.Message}");
}
}
}
Resource Management and Pooling Patterns
1. Object Pooling Pattern
public class ObjectPool<T> : IDisposable where T : class, IDisposable, new()
{
private readonly ConcurrentQueue<T> _objects = new();
private readonly Func<T> _objectFactory;
private readonly Action<T> _resetAction;
private readonly int _maxSize;
private int _currentCount = 0;
public ObjectPool(Func<T> objectFactory = null, Action<T> resetAction = null, int maxSize = 100)
{
_objectFactory = objectFactory ?? (() => new T());
_resetAction = resetAction;
_maxSize = maxSize;
}
public PooledObject<T> Get()
{
if (_objects.TryDequeue(out T item))
{
Interlocked.Decrement(ref _currentCount);
return new PooledObject<T>(item, this);
}
return new PooledObject<T>(_objectFactory(), this);
}
internal void Return(T item)
{
if (_currentCount < _maxSize)
{
_resetAction?.Invoke(item);
_objects.Enqueue(item);
Interlocked.Increment(ref _currentCount);
}
else
{
item.Dispose();
}
}
public void Dispose()
{
while (_objects.TryDequeue(out T item))
{
item.Dispose();
}
}
}
public struct PooledObject<T> : IDisposable where T : class, IDisposable
{
private readonly ObjectPool<T> _pool;
public T Object { get; }
internal PooledObject(T obj, ObjectPool<T> pool)
{
Object = obj;
_pool = pool;
}
public void Dispose()
{
_pool.Return(Object);
}
}
// Usage example - HTTP client pooling
public class HttpClientPool
{
private static readonly ObjectPool<HttpClient> _httpClientPool =
new ObjectPool<HttpClient>(
() => new HttpClient(),
client => { /* Reset client state */ },
50);
public static async Task<Result<string>> MakeRequestAsync(string url)
{
using var pooledClient = _httpClientPool.Get();
var client = pooledClient.Object;
try
{
var response = await client.GetStringAsync(url);
return Result<string>.AsSuccess(response);
}
catch (Exception ex)
{
return Result<string>.AsError($"Request failed: {ex.Message}");
}
}
}
2. Memory Management Patterns
public class MemoryOptimizedService
{
private readonly MemoryPool<byte> _memoryPool = MemoryPool<byte>.Shared;
public async Task<Result<ProcessedData>> ProcessLargeDataAsync(Stream dataStream)
{
using var owner = _memoryPool.Rent(8192); // Rent 8KB buffer
var buffer = owner.Memory;
try
{
var processedData = new ProcessedData();
int bytesRead;
while ((bytesRead = await dataStream.ReadAsync(buffer)) > 0)
{
// Process data in chunks to avoid large allocations
var chunk = buffer.Slice(0, bytesRead);
ProcessChunk(chunk.Span, processedData);
}
return Result<ProcessedData>.AsSuccess(processedData);
}
catch (Exception ex)
{
return Result<ProcessedData>.AsError($"Processing failed: {ex.Message}");
}
// Buffer is automatically returned to pool when owner is disposed
}
private void ProcessChunk(Span<byte> chunk, ProcessedData result)
{
// Process data without additional allocations
for (int i = 0; i < chunk.Length; i++)
{
result.Checksum += chunk[i];
}
result.BytesProcessed += chunk.Length;
}
}
// Large object heap optimization
public class LargeDataProcessor
{
public Result<ProcessedResult> ProcessLargeDataset(IEnumerable<LargeDataItem> items)
{
try
{
var result = new ProcessedResult();
// Process in batches to avoid LOH pressure
const int batchSize = 1000;
var batch = new List<LargeDataItem>(batchSize);
foreach (var item in items)
{
batch.Add(item);
if (batch.Count == batchSize)
{
ProcessBatch(batch, result);
batch.Clear(); // Clear instead of creating new list
}
}
// Process remaining items
if (batch.Count > 0)
{
ProcessBatch(batch, result);
}
return Result<ProcessedResult>.AsSuccess(result);
}
catch (Exception ex)
{
return Result<ProcessedResult>.AsError($"Large data processing failed: {ex.Message}");
}
}
private void ProcessBatch(List<LargeDataItem> batch, ProcessedResult result)
{
// Process batch and update result
foreach (var item in batch)
{
result.ProcessedCount++;
result.TotalValue += item.Value;
}
// Force garbage collection after processing batch
// Only do this for large batches to avoid performance impact
if (batch.Count >= 1000)
{
GC.Collect(0, GCCollectionMode.Optimized);
}
}
}
Performance Monitoring and Profiling Patterns
1. Performance Metrics Collection
public class PerformanceMonitoringService
{
private readonly IMetricsCollector _metricsCollector;
private readonly ILogger<PerformanceMonitoringService> _logger;
public async Task<Result<T>> MonitorOperationAsync<T>(
string operationName,
Func<Task<Result<T>>> operation,
Dictionary<string, object> additionalMetrics = null)
{
var stopwatch = Stopwatch.StartNew();
var startTime = DateTime.UtcNow;
try
{
var result = await operation();
stopwatch.Stop();
var duration = stopwatch.Elapsed;
// Collect performance metrics
var metrics = new OperationMetrics
{
OperationName = operationName,
Duration = duration,
Success = result.Status == ResultStatus.Success,
StartTime = startTime,
EndTime = DateTime.UtcNow,
AdditionalMetrics = additionalMetrics ?? new Dictionary<string, object>()
};
await _metricsCollector.RecordOperationAsync(metrics);
// Log slow operations
if (duration > TimeSpan.FromSeconds(5))
{
_logger.LogWarning("Slow operation detected: {Operation} took {Duration}ms",
operationName, duration.TotalMilliseconds);
}
return result;
}
catch (Exception ex)
{
stopwatch.Stop();
await _metricsCollector.RecordOperationAsync(new OperationMetrics
{
OperationName = operationName,
Duration = stopwatch.Elapsed,
Success = false,
StartTime = startTime,
EndTime = DateTime.UtcNow,
Error = ex.Message
});
return Result<T>.AsError($"Operation {operationName} failed: {ex.Message}");
}
}
public async Task<Result<PerformanceReport>> GeneratePerformanceReportAsync(TimeSpan period)
{
try
{
var endTime = DateTime.UtcNow;
var startTime = endTime - period;
var metrics = await _metricsCollector.GetMetricsAsync(startTime, endTime);
var report = new PerformanceReport
{
Period = period,
TotalOperations = metrics.Count,
SuccessfulOperations = metrics.Count(m => m.Success),
AverageDuration = TimeSpan.FromMilliseconds(metrics.Average(m => m.Duration.TotalMilliseconds)),
SlowestOperations = metrics.OrderByDescending(m => m.Duration).Take(10).ToList(),
MostFrequentOperations = metrics.GroupBy(m => m.OperationName)
.OrderByDescending(g => g.Count())
.Take(10)
.ToDictionary(g => g.Key, g => g.Count())
};
return Result<PerformanceReport>.AsSuccess(report);
}
catch (Exception ex)
{
return Result<PerformanceReport>.AsError($"Failed to generate performance report: {ex.Message}");
}
}
}
// Performance measurement attribute
[AttributeUsage(AttributeTargets.Method)]
public class MeasurePerformanceAttribute : Attribute
{
public string OperationName { get; set; }
public bool LogSlowOperations { get; set; } = true;
public int SlowThresholdMs { get; set; } = 1000;
}
// Performance interceptor
public class PerformanceInterceptor
{
private readonly PerformanceMonitoringService _performanceService;
public async Task<object> InterceptAsync(MethodInvocationContext context)
{
var perfAttribute = context.Method.GetCustomAttribute<MeasurePerformanceAttribute>();
if (perfAttribute == null)
return await context.ProceedAsync();
var operationName = perfAttribute.OperationName ?? $"{context.TargetType.Name}.{context.Method.Name}";
return await _performanceService.MonitorOperationAsync(operationName, async () =>
{
var result = await context.ProceedAsync();
return Result<object>.AsSuccess(result);
});
}
}
2. Resource Usage Monitoring
public class ResourceMonitoringService : BackgroundService
{
private readonly ILogger<ResourceMonitoringService> _logger;
private readonly IMetricsCollector _metricsCollector;
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
while (!stoppingToken.IsCancellationRequested)
{
try
{
await CollectResourceMetricsAsync();
await Task.Delay(TimeSpan.FromMinutes(1), stoppingToken);
}
catch (OperationCanceledException)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error collecting resource metrics");
}
}
}
private async Task CollectResourceMetricsAsync()
{
using var process = Process.GetCurrentProcess();
var metrics = new ResourceMetrics
{
Timestamp = DateTime.UtcNow,
WorkingSet = process.WorkingSet64,
PrivateMemory = process.PrivateMemorySize64,
VirtualMemory = process.VirtualMemorySize64,
ProcessorTime = process.TotalProcessorTime,
ThreadCount = process.Threads.Count,
HandleCount = process.HandleCount
};
// Add GC metrics
for (int generation = 0; generation <= GC.MaxGeneration; generation++)
{
metrics.GcCollections[generation] = GC.CollectionCount(generation);
}
metrics.TotalMemory = GC.GetTotalMemory(false);
await _metricsCollector.RecordResourceMetricsAsync(metrics);
// Alert on high resource usage
if (metrics.WorkingSet > 1_000_000_000) // > 1GB
{
_logger.LogWarning("High memory usage detected: {WorkingSet}MB",
metrics.WorkingSet / 1_000_000);
}
if (metrics.ThreadCount > 100)
{
_logger.LogWarning("High thread count detected: {ThreadCount}",
metrics.ThreadCount);
}
}
}
Performance Best Practices and Guidelines
1. General Performance Guidelines
- Measure First: Always measure performance before optimizing
- Profile Regularly: Use profiling tools to identify bottlenecks
- Cache Strategically: Cache expensive operations but avoid over-caching
- Optimize Database Access: Use proper indexing and query optimization
- Manage Resources: Properly dispose of resources and use pooling
2. Async Programming Guidelines
- Use ConfigureAwait(false): In library code to avoid deadlocks
- Avoid Blocking: Never use .Result or .Wait() on async methods
- Parallel Processing: Use Task.WhenAll for independent operations
- Limit Concurrency: Use SemaphoreSlim to control concurrent operations
3. Memory Management Guidelines
- Avoid Large Object Heap: Keep objects under 85KB when possible
- Use Object Pooling: For frequently created/disposed objects
- Stream Processing: Process large datasets in chunks
- Monitor GC Pressure: Track garbage collection frequency and duration
4. Caching Guidelines
- Cache Expiry: Set appropriate expiry times based on data volatility
- Cache Invalidation: Implement proper cache invalidation strategies
- Cache Levels: Use multi-level caching for optimal performance
- Cache Monitoring: Monitor cache hit rates and effectiveness
Conclusion
The Centron application implements comprehensive performance patterns that ensure:
- Scalability: Multi-level caching and efficient resource management
- Responsiveness: Asynchronous programming and background processing
- Efficiency: Query optimization and memory management
- Monitoring: Performance metrics collection and resource monitoring
- Maintainability: Clear patterns and best practices for performance optimization
These performance patterns provide the foundation for a high-performance enterprise application that can scale with growing user demands while maintaining optimal response times and resource utilization.