Performance Optimization Patterns

#performance #optimization #caching #database #profiling #scalability

Last Updated: May 18, 2025 Related: .NET vs Laravel Complete Developer Guide, Debugging .NET Applications, PHP Best Practices


Quick Navigation


Fundamental Principles

🎯 Performance Optimization Hierarchy

1. Measure First, Optimize Second

Never optimize without measuring:
1. Establish baseline performance metrics
2. Identify actual bottlenecks through profiling
3. Implement targeted optimizations
4. Measure improvement and validate
5. Monitor in production environment

2. The 80/20 Rule in Performance

Focus on the 20% of code that affects 80% of performance:
- Database queries (typically the biggest bottleneck)
- External API calls
- File I/O operations
- Complex algorithms in hot paths
- Memory allocation patterns

3. Scalability Patterns

Horizontal vs Vertical Scaling:

Vertical Scaling (Scale Up):
- Add more power to existing servers
- Simpler to implement
- Has hard limits
- Single point of failure

Horizontal Scaling (Scale Out):
- Add more servers
- Better fault tolerance
- Requires architecture changes
- More complex load balancing

📊 Performance Budgets

## Performance Budget Template

### Response Time Targets
- Page Load: < 2 seconds
- API Response: < 200ms
- Database Query: < 100ms
- Background Job: < 30 seconds

### Resource Budgets
- JavaScript Bundle: < 200KB gzipped
- CSS Bundle: < 50KB gzipped
- Images: < 1MB total per page
- Total Page Size: < 2MB

### User Experience Metrics
- First Contentful Paint: < 1.5s
- Largest Contentful Paint: < 2.5s
- Cumulative Layout Shift: < 0.1
- First Input Delay: < 100ms

Database Optimization

🗄️ Query Optimization Patterns

N+1 Query Problem Solutions

// ❌ Bad: N+1 Query Problem
public async Task<List<PostDto>> GetPostsWithAuthorsAsync()
{
    var posts = await _context.Posts.ToListAsync();
    var postDtos = new List<PostDto>();
    
    foreach (var post in posts)
    {
        // This creates N additional queries
        var author = await _context.Users.FindAsync(post.AuthorId);
        postDtos.Add(new PostDto
        {
            Title = post.Title,
            AuthorName = author.Name // N+1 problem!
        });
    }
    
    return postDtos;
}

// ✅ Good: Eager Loading
public async Task<List<PostDto>> GetPostsWithAuthorsOptimizedAsync()
{
    return await _context.Posts
        .Include(p => p.Author)
        .Select(p => new PostDto
        {
            Title = p.Title,
            AuthorName = p.Author.Name
        })
        .ToListAsync();
}

// ✅ Alternative: Explicit Join
public async Task<List<PostDto>> GetPostsWithAuthorsJoinAsync()
{
    return await (from p in _context.Posts
                  join u in _context.Users on p.AuthorId equals u.Id
                  select new PostDto
                  {
                      Title = p.Title,
                      AuthorName = u.Name
                  }).ToListAsync();
}
// PHP Laravel Example
// ❌ Bad: N+1 Query
class PostController
{
    public function index()
    {
        $posts = Post::all();
        
        foreach ($posts as $post) {
            // N+1 query problem
            echo $post->author->name;
        }
    }
}

// ✅ Good: Eager Loading
class PostController
{
    public function index()
    {
        // Single query with join
        $posts = Post::with('author')->get();
        
        foreach ($posts as $post) {
            echo $post->author->name;
        }
    }
    
    // ✅ Even Better: Specific columns
    public function indexOptimized()
    {
        return Post::with('author:id,name')
                   ->select('id', 'title', 'author_id')
                   ->get();
    }
}

Pagination for Large Datasets

// ✅ Cursor-based pagination for large datasets
public class CursorPaginationService
{
    public async Task<CursorPage<T>> GetPageAsync<T>(
        IQueryable<T> query,
        string cursor = null,
        int pageSize = 20) where T : class, IEntity
    {
        // Apply cursor filter
        if (!string.IsNullOrEmpty(cursor))
        {
            var cursorValue = DecodeBase64Cursor(cursor);
            query = query.Where(x => x.Id > cursorValue);
        }
        
        // Get items + 1 to check if there's a next page
        var items = await query
            .OrderBy(x => x.Id)
            .Take(pageSize + 1)
            .ToListAsync();
        
        var hasNext = items.Count > pageSize;
        if (hasNext)
        {
            items.RemoveAt(items.Count - 1);
        }
        
        var nextCursor = hasNext && items.Any()
            ? EncodeBase64Cursor(items.Last().Id)
            : null;
        
        return new CursorPage<T>
        {
            Items = items,
            NextCursor = nextCursor,
            HasNext = hasNext
        };
    }
    
    private string EncodeBase64Cursor(long id)
    {
        return Convert.ToBase64String(BitConverter.GetBytes(id));
    }
    
    private long DecodeBase64Cursor(string cursor)
    {
        var bytes = Convert.FromBase64String(cursor);
        return BitConverter.ToInt64(bytes, 0);
    }
}

// ✅ Efficient offset pagination (when cursor isn't suitable)
public async Task<PagedResult<T>> GetPagedAsync<T>(
    IQueryable<T> query,
    int page = 1,
    int pageSize = 20)
{
    // Use window functions for better performance on large offsets
    var skip = (page - 1) * pageSize;
    
    // Get total count efficiently
    var totalCount = await query.CountAsync();
    
    // Use OFFSET/LIMIT
    var items = await query
        .Skip(skip)
        .Take(pageSize)
        .ToListAsync();
    
    return new PagedResult<T>
    {
        Items = items,
        Page = page,
        PageSize = pageSize,
        TotalCount = totalCount,
        TotalPages = (int)Math.Ceiling((double)totalCount / pageSize)
    };
}

Index Optimization

-- Covering indexes for common queries
CREATE INDEX IX_Posts_AuthorId_Published_Title 
ON Posts (AuthorId, IsPublished)
INCLUDE (Title, CreatedAt)
WHERE IsPublished = 1;

-- Partial indexes for specific conditions
CREATE INDEX IX_Posts_Draft 
ON Posts (AuthorId, UpdatedAt)
WHERE IsPublished = 0;

-- Composite indexes for range queries
CREATE INDEX IX_Orders_DateRange
ON Orders (CustomerId, OrderDate, Status);

-- Full-text search indexes
CREATE FULLTEXT INDEX IX_Posts_FullText
ON Posts (Title, Content);

🔄 Connection Pool Optimization

// ✅ Proper connection string configuration
var connectionString = builder.Configuration.GetConnectionString("DefaultConnection") +
    ";Max Pool Size=100;" +
    ";Min Pool Size=5;" +
    ";Connection Lifetime=30;" +
    ";Connection Timeout=30;" +
    ";Command Timeout=30;";

// ✅ Using connection pooling effectively
public class OptimizedRepository
{
    private readonly IDbContextFactory<AppDbContext> _contextFactory;
    
    public OptimizedRepository(IDbContextFactory<AppDbContext> contextFactory)
    {
        _contextFactory = contextFactory;
    }
    
    public async Task<List<User>> GetUsersAsync()
    {
        // Use factory to get short-lived context
        using var context = await _contextFactory.CreateDbContextAsync();
        
        return await context.Users
            .AsNoTracking() // Don't track changes for read-only operations
            .ToListAsync();
    }
    
    // ✅ Batch operations
    public async Task UpdateUsersAsync(List<User> users)
    {
        using var context = await _contextFactory.CreateDbContextAsync();
        
        context.Users.UpdateRange(users);
        await context.SaveChangesAsync();
    }
}

Caching Strategies

💾 Multi-Level Caching Architecture

Cache Hierarchy

public class CacheManager
{
    private readonly IMemoryCache _l1Cache;      // In-process cache
    private readonly IDistributedCache _l2Cache; // Redis cache
    private readonly IDatabase _database;         // Database
    
    public async Task<T> GetAsync<T>(string key, Func<Task<T>> factory, TimeSpan? ttl = null)
    {
        // L1: Check in-memory cache
        if (_l1Cache.TryGetValue(key, out T cachedValue))
        {
            return cachedValue;
        }
        
        // L2: Check distributed cache
        var json = await _l2Cache.GetStringAsync(key);
        if (!string.IsNullOrEmpty(json))
        {
            var value = JsonSerializer.Deserialize<T>(json);
            // Populate L1 cache
            _l1Cache.Set(key, value, TimeSpan.FromMinutes(5));
            return value;
        }
        
        // L3: Generate from source
        var newValue = await factory();
        
        // Populate both cache levels
        var serialized = JsonSerializer.Serialize(newValue);
        await _l2Cache.SetStringAsync(key, serialized, new DistributedCacheEntryOptions
        {
            AbsoluteExpirationRelativeToNow = ttl ?? TimeSpan.FromHours(1)
        });
        
        _l1Cache.Set(key, newValue, TimeSpan.FromMinutes(5));
        
        return newValue;
    }
}

// Usage with dependency injection
public class UserService
{
    private readonly CacheManager _cache;
    private readonly IUserRepository _repository;
    
    public async Task<User> GetUserAsync(int id)
    {
        return await _cache.GetAsync(
            $"user:{id}",
            () => _repository.GetByIdAsync(id),
            TimeSpan.FromHours(2)
        );
    }
}

Cache Invalidation Patterns

// ✅ Tag-based cache invalidation
public class TaggedCacheService
{
    private readonly IDistributedCache _cache;
    private readonly ISet<string> _tags = new HashSet<string>();
    
    public async Task SetAsync<T>(string key, T value, string[] tags, TimeSpan? expiry = null)
    {
        // Store the actual value
        await _cache.SetStringAsync(key, JsonSerializer.Serialize(value));
        
        // Store tag associations
        foreach (var tag in tags)
        {
            var tagKey = $"tag:{tag}";
            var taggedKeys = await GetTaggedKeysAsync(tag);
            taggedKeys.Add(key);
            await _cache.SetStringAsync(tagKey, JsonSerializer.Serialize(taggedKeys));
        }
    }
    
    public async Task InvalidateTagAsync(string tag)
    {
        var taggedKeys = await GetTaggedKeysAsync(tag);
        
        foreach (var key in taggedKeys)
        {
            await _cache.RemoveAsync(key);
        }
        
        await _cache.RemoveAsync($"tag:{tag}");
    }
    
    private async Task<HashSet<string>> GetTaggedKeysAsync(string tag)
    {
        var tagKey = $"tag:{tag}";
        var json = await _cache.GetStringAsync(tagKey);
        
        return string.IsNullOrEmpty(json)
            ? new HashSet<string>()
            : JsonSerializer.Deserialize<HashSet<string>>(json);
    }
}

// ✅ Cache-aside pattern with write-through
public class CacheAsideService
{
    public async Task<User> GetUserAsync(int id)
    {
        var key = $"user:{id}";
        
        // Try cache first
        var cached = await _cache.GetStringAsync(key);
        if (!string.IsNullOrEmpty(cached))
        {
            return JsonSerializer.Deserialize<User>(cached);
        }
        
        // Load from database
        var user = await _repository.GetByIdAsync(id);
        if (user != null)
        {
            // Store in cache
            await _cache.SetStringAsync(key, JsonSerializer.Serialize(user));
        }
        
        return user;
    }
    
    public async Task UpdateUserAsync(User user)
    {
        // Update database
        await _repository.UpdateAsync(user);
        
        // Update cache (write-through)
        var key = $"user:{user.Id}";
        await _cache.SetStringAsync(key, JsonSerializer.Serialize(user));
        
        // Invalidate related caches
        await _taggedCache.InvalidateTagAsync("users");
    }
}

HTTP Caching Headers

// ✅ Response caching middleware
public class ResponseCachingService
{
    public void ConfigureCaching(IApplicationBuilder app)
    {
        app.UseResponseCaching();
        app.Use(async (context, next) =>
        {
            // Set cache headers based on content type
            if (context.Request.Path.StartsWithSegments("/api"))
            {
                context.Response.Headers.CacheControl = "private, max-age=300";
            }
            else if (context.Request.Path.StartsWithSegments("/static"))
            {
                context.Response.Headers.CacheControl = "public, max-age=31536000";
                context.Response.Headers.ETag = CalculateETag(context.Request.Path);
            }
            
            await next();
        });
    }
    
    private string CalculateETag(string path)
    {
        // Generate ETag based on file content hash
        var hash = MD5.HashData(Encoding.UTF8.GetBytes(path));
        return $"\"{Convert.ToHexString(hash)}\"";
    }
}

// ✅ Action-level caching
[ApiController]
[Route("api/[controller]")]
public class ProductsController : ControllerBase
{
    [HttpGet]
    [ResponseCache(Duration = 300, VaryByQueryKeys = new[] { "category", "page" })]
    public async Task<ActionResult<IEnumerable<Product>>> GetProducts(
        string category = null,
        int page = 1)
    {
        // This response will be cached for 5 minutes
        // and vary by category and page parameters
        var products = await _productService.GetProductsAsync(category, page);
        return Ok(products);
    }
    
    [HttpGet("{id}")]
    [ResponseCache(Duration = 3600, VaryByHeader = "Accept-Language")]
    public async Task<ActionResult<Product>> GetProduct(int id)
    {
        var product = await _productService.GetProductAsync(id);
        if (product == null)
        {
            return NotFound();
        }
        
        // Set custom cache headers
        Response.Headers.LastModified = product.UpdatedAt.ToString("R");
        Response.Headers.ETag = $"\"{product.Version}\"";
        
        return Ok(product);
    }
}

Memory Management

🧠 .NET Memory Optimization

Object Pooling

// ✅ Object pooling for frequently allocated objects
public class StringBuilderPool
{
    private static readonly ObjectPool<StringBuilder> Pool = 
        new DefaultObjectPool<StringBuilder>(new StringBuilderPooledObjectPolicy());
    
    public static StringBuilder Get() => Pool.Get();
    
    public static void Return(StringBuilder sb)
    {
        Pool.Return(sb);
    }
}

public class StringBuilderPooledObjectPolicy : IPooledObjectPolicy<StringBuilder>
{
    public StringBuilder Create() => new StringBuilder();
    
    public bool Return(StringBuilder obj)
    {
        obj.Clear();
        return obj.Capacity <= 1024; // Don't pool very large builders
    }
}

// Usage
public string ProcessData(IEnumerable<string> items)
{
    var sb = StringBuilderPool.Get();
    try
    {
        foreach (var item in items)
        {
            sb.AppendLine(item);
        }
        return sb.ToString();
    }
    finally
    {
        StringBuilderPool.Return(sb);
    }
}

// ✅ Array pooling for temporary buffers
public class BufferProcessor
{
    private static readonly ArrayPool<byte> Pool = ArrayPool<byte>.Shared;
    
    public async Task ProcessStreamAsync(Stream stream)
    {
        byte[] buffer = Pool.Rent(4096);
        try
        {
            int bytesRead;
            while ((bytesRead = await stream.ReadAsync(buffer, 0, buffer.Length)) > 0)
            {
                // Process buffer
                ProcessBuffer(buffer.AsSpan(0, bytesRead));
            }
        }
        finally
        {
            Pool.Return(buffer);
        }
    }
}

Span and Memory Usage

// ✅ Using Span<T> for zero-allocation operations
public class SpanOptimizations
{
    // Avoid string allocations with Span
    public bool IsValidEmail(ReadOnlySpan<char> email)
    {
        int atIndex = email.IndexOf('@');
        if (atIndex <= 0 || atIndex == email.Length - 1)
            return false;
        
        var localPart = email.Slice(0, atIndex);
        var domainPart = email.Slice(atIndex + 1);
        
        return IsValidLocalPart(localPart) && IsValidDomain(domainPart);
    }
    
    // Process arrays without allocation
    public void ProcessNumbers(ReadOnlySpan<int> numbers)
    {
        foreach (var number in numbers)
        {
            // Process without copying
        }
    }
    
    // Parse without string allocation
    public bool TryParseCustomFormat(ReadOnlySpan<char> input, out int result)
    {
        result = 0;
        
        if (input.Length < 3 || !input.StartsWith("ID".AsSpan()))
            return false;
        
        return int.TryParse(input.Slice(2), out result);
    }
}

// ✅ Memory<T> for async operations
public class MemoryOptimizations
{
    private readonly Memory<byte> _buffer = new byte[1024];
    
    public async Task<int> ReadDataAsync(Stream stream)
    {
        // Memory<T> can be used across await boundaries
        return await stream.ReadAsync(_buffer);
    }
    
    public void ProcessData(Memory<byte> data)
    {
        // Can be passed around without allocation
        ProcessSpan(data.Span);
    }
    
    private void ProcessSpan(Span<byte> span)
    {
        // Fast operations on span
        for (int i = 0; i < span.Length; i++)
        {
            span[i] = (byte)(span[i] * 2);
        }
    }
}

Garbage Collection Optimization

// ✅ Minimize allocations in hot paths
public class AllocationOptimizer
{
    private static readonly ConcurrentDictionary<string, CompiledRegex> RegexCache = new();
    
    // Avoid repeated allocations
    private readonly StringBuilder _reusableStringBuilder = new();
    private readonly List<Item> _reusableList = new();
    
    public string ProcessItems(IEnumerable<Item> items)
    {
        _reusableStringBuilder.Clear();
        _reusableList.Clear();
        
        // Reuse collections to avoid allocations
        _reusableList.AddRange(items.Where(i => i.IsValid));
        
        foreach (var item in _reusableList)
        {
            _reusableStringBuilder.AppendLine(item.ToString());
        }
        
        return _reusableStringBuilder.ToString();
    }
    
    // Use static readonly for constants
    private static readonly TimeSpan CacheExpiry = TimeSpan.FromMinutes(30);
    private static readonly string[] CommonPrefixes = { "http://", "https://", "ftp://" };
    
    // Avoid allocations in loops
    public void ProcessLargeDataset(IEnumerable<Data> dataset)
    {
        foreach (var data in dataset)
        {
            // Process without creating temporary objects
            ProcessDataInPlace(data);
            
            // Force GC collection if memory usage is high
            if (GC.GetTotalMemory(false) > 500_000_000) // 500MB
            {
                GC.Collect(1, GCCollectionMode.Optimized);
            }
        }
    }
}

Async Programming Patterns

⚡ Efficient Async Operations

Parallel Processing

// ✅ Parallel processing with controlled concurrency
public class ParallelProcessor
{
    private readonly SemaphoreSlim _semaphore;
    private readonly HttpClient _httpClient;
    
    public ParallelProcessor(int maxConcurrency)
    {
        _semaphore = new SemaphoreSlim(maxConcurrency);
        _httpClient = new HttpClient();
    }
    
    public async Task<List<TResult>> ProcessConcurrentlyAsync<T, TResult>(
        IEnumerable<T> items,
        Func<T, Task<TResult>> processor)
    {
        var tasks = items.Select(async item =>
        {
            await _semaphore.WaitAsync();
            try
            {
                return await processor(item);
            }
            finally
            {
                _semaphore.Release();
            }
        });
        
        return (await Task.WhenAll(tasks)).ToList();
    }
    
    // ✅ Batch processing with progress reporting
    public async Task<List<TResult>> ProcessInBatchesAsync<T, TResult>(
        IEnumerable<T> items,
        Func<T, Task<TResult>> processor,
        int batchSize = 10,
        IProgress<double> progress = null)
    {
        var allItems = items.ToList();
        var results = new List<TResult>();
        var completed = 0;
        
        var batches = allItems
            .Select((item, index) => new { item, index })
            .GroupBy(x => x.index / batchSize)
            .Select(g => g.Select(x => x.item).ToList());
        
        foreach (var batch in batches)
        {
            var batchTasks = batch.Select(processor);
            var batchResults = await Task.WhenAll(batchTasks);
            
            results.AddRange(batchResults);
            completed += batch.Count;
            
            progress?.Report((double)completed / allItems.Count);
        }
        
        return results;
    }
}

// ✅ Producer-Consumer pattern with channels
public class ChannelProcessor<T>
{
    private readonly Channel<T> _channel;
    private readonly int _capacity;
    
    public ChannelProcessor(int capacity = 100)
    {
        _capacity = capacity;
        var options = new BoundedChannelOptions(capacity)
        {
            FullMode = BoundedChannelFullMode.Wait,
            SingleReader = false,
            SingleWriter = false
        };
        _channel = Channel.CreateBounded<T>(options);
    }
    
    public async Task ProduceAsync(IEnumerable<T> items)
    {
        var writer = _channel.Writer;
        
        try
        {
            foreach (var item in items)
            {
                await writer.WriteAsync(item);
            }
        }
        finally
        {
            writer.Complete();
        }
    }
    
    public async Task ConsumeAsync(Func<T, Task> processor, CancellationToken cancellationToken)
    {
        var reader = _channel.Reader;
        
        try
        {
            await foreach (var item in reader.ReadAllAsync(cancellationToken))
            {
                await processor(item);
            }
        }
        catch (OperationCanceledException)
        {
            // Expected when cancelled
        }
    }
    
    public async Task ProcessAsync(
        IEnumerable<T> items,
        Func<T, Task> processor,
        int consumerCount = Environment.ProcessorCount,
        CancellationToken cancellationToken = default)
    {
        // Start producer
        var producerTask = ProduceAsync(items);
        
        // Start consumers
        var consumerTasks = Enumerable.Range(0, consumerCount)
            .Select(_ => ConsumeAsync(processor, cancellationToken))
            .ToArray();
        
        // Wait for completion
        await Task.WhenAll(new[] { producerTask }.Concat(consumerTasks));
    }
}

Async Resource Management

// ✅ Async disposable pattern
public class AsyncResourceManager : IAsyncDisposable
{
    private readonly HttpClient _httpClient;
    private readonly Database _database;
    private bool _disposed;
    
    public AsyncResourceManager()
    {
        _httpClient = new HttpClient();
        _database = new Database();
    }
    
    public async ValueTask DisposeAsync()
    {
        if (_disposed) return;
        
        await DisposeAsyncCore();
        
        Dispose(false);
        GC.SuppressFinalize(this);
        _disposed = true;
    }
    
    protected virtual async ValueTask DisposeAsyncCore()
    {
        // Dispose async resources
        if (_database != null)
        {
            await _database.DisposeAsync();
        }
        
        // Dispose sync resources
        _httpClient?.Dispose();
    }
    
    protected virtual void Dispose(bool disposing)
    {
        if (disposing)
        {
            _httpClient?.Dispose();
        }
    }
}

// ✅ Async lazy initialization
public class AsyncLazy<T>
{
    private readonly Lazy<Task<T>> _lazy;
    
    public AsyncLazy(Func<Task<T>> taskFactory)
    {
        _lazy = new Lazy<Task<T>>(taskFactory);
    }
    
    public TaskAwaiter<T> GetAwaiter() => Value.GetAwaiter();
    
    public Task<T> Value => _lazy.Value;
    
    public bool IsValueCreated => _lazy.IsValueCreated;
}

// Usage
public class ExpensiveService
{
    private readonly AsyncLazy<ExpensiveResource> _resource;
    
    public ExpensiveService()
    {
        _resource = new AsyncLazy<ExpensiveResource>(InitializeResourceAsync);
    }
    
    private async Task<ExpensiveResource> InitializeResourceAsync()
    {
        // Expensive initialization
        await Task.Delay(5000);
        return new ExpensiveResource();
    }
    
    public async Task<string> GetDataAsync()
    {
        var resource = await _resource;
        return resource.GetData();
    }
}

Frontend Performance

🎨 Client-Side Optimization

Bundle Optimization

// ✅ Code splitting and lazy loading
// webpack.config.js
module.exports = {
  optimization: {
    splitChunks: {
      chunks: 'all',
      cacheGroups: {
        vendor: {
          test: /[\\/]node_modules[\\/]/,
          name: 'vendors',
          chunks: 'all',
        },
        common: {
          name: 'common',
          minChunks: 2,
          chunks: 'all',
          enforce: true,
        },
      },
    },
  },
};

// Lazy component loading
const LazyComponent = React.lazy(() => import('./ExpensiveComponent'));

function App() {
  return (
    <Suspense fallback={<div>Loading...</div>}>
      <LazyComponent />
    </Suspense>
  );
}

// Dynamic imports for features
async function loadFeature() {
  const { feature } = await import('./feature');
  return feature;
}

Image Optimization

// ✅ Progressive image loading
class ImageOptimizer {
  static generateSrcSet(imageName: string): string {
    const sizes = [320, 640, 1024, 1920];
    return sizes
      .map(size => `/images/${imageName}_${size}w.webp ${size}w`)
      .join(', ');
  }
  
  static lazyLoad(): void {
    if ('IntersectionObserver' in window) {
      const imageObserver = new IntersectionObserver((entries) => {
        entries.forEach(entry => {
          if (entry.isIntersecting) {
            const img = entry.target as HTMLImageElement;
            img.src = img.dataset.src!;
            img.classList.remove('lazy');
            imageObserver.unobserve(img);
          }
        });
      });
      
      document.querySelectorAll('img[data-src]').forEach(img =>
        imageObserver.observe(img)
      );
    }
  }
}

// React component for optimized images
interface OptimizedImageProps {
  src: string;
  alt: string;
  className?: string;
}

const OptimizedImage: React.FC<OptimizedImageProps> = ({ src, alt, className }) => {
  const [isLoaded, setIsLoaded] = useState(false);
  const [inView, setInView] = useState(false);
  const imgRef = useRef<HTMLImageElement>(null);
  
  useEffect(() => {
    const observer = new IntersectionObserver(
      ([entry]) => {
        if (entry.isIntersecting) {
          setInView(true);
          observer.disconnect();
        }
      },
      { threshold: 0.1 }
    );
    
    if (imgRef.current) {
      observer.observe(imgRef.current);
    }
    
    return () => observer.disconnect();
  }, []);
  
  return (
    <img
      ref={imgRef}
      src={inView ? src : undefined}
      alt={alt}
      className={`${className} ${isLoaded ? 'loaded' : 'loading'}`}
      onLoad={() => setIsLoaded(true)}
      style={{
        transition: 'opacity 0.3s',
        opacity: isLoaded ? 1 : 0,
      }}
    />
  );
};

State Management Optimization

// ✅ Optimized Redux patterns
// Selector memoization
const selectExpensiveComputation = createSelector(
  [selectItems, selectFilters],
  (items, filters) => {
    // Expensive computation only runs when dependencies change
    return items.filter(item => 
      filters.every(filter => filter(item))
    ).sort((a, b) => a.priority - b.priority);
  }
);

// Normalized state structure
interface NormalizedState {
  users: {
    byId: Record<string, User>;
    allIds: string[];
  };
  posts: {
    byId: Record<string, Post>;
    allIds: string[];
    byAuthor: Record<string, string[]>;
  };
}

// ✅ React optimization patterns
// Memoized components
const MemoizedList = React.memo<ListProps>(({ items, onItemClick }) => {
  return (
    <ul>
      {items.map(item => (
        <MemoizedListItem
          key={item.id}
          item={item}
          onClick={onItemClick}
        />
      ))}
    </ul>
  );
});

// Optimized callbacks
const ListContainer: React.FC = () => {
  const [items, setItems] = useState<Item[]>([]);
  
  // Memoize callback to prevent unnecessary re-renders
  const handleItemClick = useCallback((id: string) => {
    setItems(prev => prev.map(item =>
      item.id === id ? { ...item, selected: !item.selected } : item
    ));
  }, []);
  
  // Debounced search
  const [searchTerm, setSearchTerm] = useState('');
  const debouncedSearch = useMemo(
    () => debounce(setSearchTerm, 300),
    []
  );
  
  return (
    <div>
      <input onChange={(e) => debouncedSearch(e.target.value)} />
      <MemoizedList items={items} onItemClick={handleItemClick} />
    </div>
  );
};

Monitoring & Profiling

📊 Performance Monitoring

Application Performance Monitoring (APM)

// ✅ Custom performance tracking
public class PerformanceTracker
{
    private readonly ILogger<PerformanceTracker> _logger;
    private readonly DiagnosticSource _diagnosticSource;
    
    public PerformanceTracker(ILogger<PerformanceTracker> logger)
    {
        _logger = logger;
        _diagnosticSource = new DiagnosticListener("Application.Performance");
    }
    
    public IDisposable Track(string operationName, object parameters = null)
    {
        return new PerformanceScope(_logger, _diagnosticSource, operationName, parameters);
    }
}

public class PerformanceScope : IDisposable
{
    private readonly ILogger _logger;
    private readonly DiagnosticSource _diagnosticSource;
    private readonly string _operationName;
    private readonly Stopwatch _stopwatch;
    private readonly Activity _activity;
    
    public PerformanceScope(
        ILogger logger,
        DiagnosticSource diagnosticSource,
        string operationName,
        object parameters)
    {
        _logger = logger;
        _diagnosticSource = diagnosticSource;
        _operationName = operationName;
        _stopwatch = Stopwatch.StartNew();
        
        _activity = new Activity(operationName);
        _activity.Start();
        
        if (_diagnosticSource.IsEnabled($"{operationName}.Start"))
        {
            _diagnosticSource.Write($"{operationName}.Start", parameters);
        }
    }
    
    public void Dispose()
    {
        _stopwatch.Stop();
        _activity.Stop();
        
        var duration = _stopwatch.ElapsedMilliseconds;
        
        _logger.LogInformation(
            "Operation {OperationName} completed in {Duration}ms",
            _operationName,
            duration);
        
        if (_diagnosticSource.IsEnabled($"{_operationName}.Stop"))
        {
            _diagnosticSource.Write($"{_operationName}.Stop", new
            {
                Duration = duration,
                Success = true
            });
        }
        
        _diagnosticSource.Dispose();
    }
}

// Usage
public class UserService
{
    private readonly PerformanceTracker _tracker;
    
    public async Task<User> GetUserAsync(int id)
    {
        using var scope = _tracker.Track("GetUser", new { UserId = id });
        
        return await _repository.GetByIdAsync(id);
    }
}

Custom Metrics Collection

// ✅ Business metrics tracking
public class MetricsCollector
{
    private readonly IMetrics _metrics;
    private readonly Counter<int> _requestCounter;
    private readonly Histogram<double> _requestDuration;
    private readonly UpDownCounter<int> _activeConnections;
    
    public MetricsCollector(IMeterFactory meterFactory)
    {
        var meter = meterFactory.Create("MyApp.Business");
        
        _requestCounter = meter.CreateCounter<int>(
            "requests_total",
            "requests",
            "Total number of requests processed");
        
        _requestDuration = meter.CreateHistogram<double>(
            "request_duration",
            "seconds",
            "Request processing duration");
        
        _activeConnections = meter.CreateUpDownCounter<int>(
            "active_connections",
            "connections",
            "Number of active connections");
    }
    
    public void RecordRequest(string endpoint, TimeSpan duration, bool success)
    {
        var tags = new TagList
        {
            ["endpoint"] = endpoint,
            ["status"] = success ? "success" : "failure"
        };
        
        _requestCounter.Add(1, tags);
        _requestDuration.Record(duration.TotalSeconds, tags);
    }
    
    public void ConnectionOpened() => _activeConnections.Add(1);
    public void ConnectionClosed() => _activeConnections.Add(-1);
}

// ✅ Health metrics
public class HealthMetrics
{
    private readonly ILogger<HealthMetrics> _logger;
    
    public HealthMetrics(ILogger<HealthMetrics> logger)
    {
        _logger = logger;
    }
    
    public async Task<HealthReport> CollectAsync()
    {
        var metrics = new Dictionary<string, object>();
        
        // Memory metrics
        var process = Process.GetCurrentProcess();
        metrics["memory_working_set_mb"] = process.WorkingSet64 / 1024 / 1024;
        metrics["memory_private_mb"] = process.PrivateMemorySize64 / 1024 / 1024;
        metrics["memory_managed_mb"] = GC.GetTotalMemory(false) / 1024 / 1024;
        
        // GC metrics
        metrics["gc_gen0_collections"] = GC.CollectionCount(0);
        metrics["gc_gen1_collections"] = GC.CollectionCount(1);
        metrics["gc_gen2_collections"] = GC.CollectionCount(2);
        
        // Thread pool metrics
        ThreadPool.GetAvailableThreads(out int availableWorkerThreads, out int availableCompletionPortThreads);
        ThreadPool.GetMaxThreads(out int maxWorkerThreads, out int maxCompletionPortThreads);
        
        metrics["threadpool_worker_threads_busy"] = maxWorkerThreads - availableWorkerThreads;
        metrics["threadpool_completion_port_threads_busy"] = maxCompletionPortThreads - availableCompletionPortThreads;
        
        // Response times
        metrics["average_response_time_ms"] = await CalculateAverageResponseTime();
        
        return new HealthReport
        {
            Status = HealthStatus.Healthy,
            Metrics = metrics,
            Timestamp = DateTimeOffset.UtcNow
        };
    }
    
    private async Task<double> CalculateAverageResponseTime()
    {
        // Implement based on your monitoring system
        return 150.0; // Example value
    }
}

Automated Performance Testing

// ✅ Benchmark-driven development
[MemoryDiagnoser]
[SimpleJob(RuntimeMoniker.Net80)]
public class SearchBenchmarks
{
    private List<Product> _products;
    private Dictionary<int, Product> _productLookup;
    private string[] _searchTerms;
    
    [GlobalSetup]
    public void Setup()
    {
        _products = GenerateProducts(10000);
        _productLookup = _products.ToDictionary(p => p.Id);
        _searchTerms = new[] { "laptop", "phone", "tablet", "watch" };
    }
    
    [Benchmark(Baseline = true)]
    public List<Product> LinearSearch()
    {
        var results = new List<Product>();
        foreach (var term in _searchTerms)
        {
            foreach (var product in _products)
            {
                if (product.Name.Contains(term, StringComparison.OrdinalIgnoreCase))
                {
                    results.Add(product);
                }
            }
        }
        return results;
    }
    
    [Benchmark]
    public List<Product> LinqSearch()
    {
        var results = new List<Product>();
        foreach (var term in _searchTerms)
        {
            results.AddRange(_products.Where(p => 
                p.Name.Contains(term, StringComparison.OrdinalIgnoreCase)));
        }
        return results;
    }
    
    [Benchmark]
    public List<Product> IndexedSearch()
    {
        // Assuming pre-built search index
        var results = new List<Product>();
        foreach (var term in _searchTerms)
        {
            // Fast indexed lookup
            results.AddRange(SearchIndex.Search(term));
        }
        return results;
    }
}

// ✅ Load testing automation
public class LoadTestRunner
{
    private readonly HttpClient _httpClient;
    private readonly ILogger<LoadTestRunner> _logger;
    
    public async Task<LoadTestResult> RunLoadTestAsync(
        string endpoint,
        int concurrentUsers,
        TimeSpan duration)
    {
        var results = new ConcurrentBag<RequestResult>();
        var semaphore = new SemaphoreSlim(concurrentUsers);
        var cancellationTokenSource = new CancellationTokenSource(duration);
        
        var tasks = Enumerable.Range(0, concurrentUsers).Select(async _ =>
        {
            while (!cancellationTokenSource.Token.IsCancellationRequested)
            {
                await semaphore.WaitAsync();
                try
                {
                    var result = await MakeRequestAsync(endpoint);
                    results.Add(result);
                }
                finally
                {
                    semaphore.Release();
                }
                
                await Task.Delay(100, cancellationTokenSource.Token);
            }
        });
        
        await Task.WhenAll(tasks);
        
        return AnalyzeResults(results.ToList());
    }
    
    private async Task<RequestResult> MakeRequestAsync(string endpoint)
    {
        var stopwatch = Stopwatch.StartNew();
        try
        {
            var response = await _httpClient.GetAsync(endpoint);
            stopwatch.Stop();
            
            return new RequestResult
            {
                Success = response.IsSuccessStatusCode,
                Duration = stopwatch.Elapsed,
                StatusCode = response.StatusCode
            };
        }
        catch (Exception ex)
        {
            stopwatch.Stop();
            return new RequestResult
            {
                Success = false,
                Duration = stopwatch.Elapsed,
                Error = ex.Message
            };
        }
    }
    
    private LoadTestResult AnalyzeResults(List<RequestResult> results)
    {
        var successfulRequests = results.Where(r => r.Success).ToList();
        var durations = successfulRequests.Select(r => r.Duration.TotalMilliseconds).ToList();
        
        return new LoadTestResult
        {
            TotalRequests = results.Count,
            SuccessfulRequests = successfulRequests.Count,
            FailedRequests = results.Count - successfulRequests.Count,
            AverageResponseTime = durations.Any() ? durations.Average() : 0,
            MedianResponseTime = CalculatePercentile(durations, 50),
            NinetyFifthPercentile = CalculatePercentile(durations, 95),
            NinetyNinthPercentile = CalculatePercentile(durations, 99)
        };
    }
    
    private double CalculatePercentile(List<double> values, int percentile)
    {
        if (!values.Any()) return 0;
        
        values.Sort();
        var index = (int)Math.Ceiling(values.Count * percentile / 100.0) - 1;
        return values[Math.Max(0, Math.Min(index, values.Count - 1))];
    }
}


Performance Optimization Checklist

✅ Backend Optimization

✅ Frontend Optimization

✅ Infrastructure Optimization


Tags

#performance #optimization #caching #database #profiling #scalability #monitoring


This performance guide should be regularly updated based on new optimization techniques and monitoring insights.