Memoization: Accelerating Function Execution with Caching
Why Memoization is Needed
Imagine you're solving a complex math problem. The first time you calculate it, you need to carefully work through it with pen and paper, taking 10 minutes. But if someone asks you the same question a second or third time, you don't need to recalculate—you already know the answer and can retrieve it directly from memory.
Memoization is this idea applied in programming: when a function is called with the same arguments, instead of recalculating, it directly returns the previously cached result. This technique is particularly suitable for handling:
- Expensive computations: Such as complex mathematical operations
- Repeated calls: Functions that are called multiple times with the same arguments
- Pure functions: Same input always produces the same output
Basic Implementation
1. Simplest Memoization
// Unoptimized Fibonacci
function fibonacci(n) {
if (n <= 1) return n;
return fibonacci(n - 1) + fibonacci(n - 2);
}
console.time("fib(40)");
console.log(fibonacci(40)); // 102334155
console.timeEnd("fib(40)"); // ~1000ms (very slow!)
// Manual memoization version
const cache = {};
function fibonacciMemo(n) {
// Check cache
if (n in cache) {
return cache[n];
}
// Base case
if (n <= 1) {
return n;
}
// Calculate and cache result
const result = fibonacciMemo(n - 1) + fibonacciMemo(n - 2);
cache[n] = result;
return result;
}
console.time("fibMemo(40)");
console.log(fibonacciMemo(40)); // 102334155
console.timeEnd("fibMemo(40)"); // ~1ms (1000 times faster!)This simple change brings huge performance improvements. Let's see why:
// fibonacci(5) call tree (unoptimized)
fib(5)
/ \
fib(4) fib(3)
/ \ / \
fib(3) fib(2) fib(2) fib(1)
/ \ / \ / \
fib(2) fib(1) ... ... ... ...
// Notice fib(3) is calculated 2 times, fib(2) is calculated 3 times!
// After memoization, each value is calculated only once2. Generic Memoization Function
function memoize(fn) {
const cache = new Map();
return function (...args) {
// Use arguments as cache key
const key = JSON.stringify(args);
// Check cache
if (cache.has(key)) {
console.log(`Cache hit: ${key}`);
return cache.get(key);
}
// Calculate result
console.log(`Calculating new value: ${key}`);
const result = fn(...args);
// Store in cache
cache.set(key, result);
return result;
};
}
// Apply to any function
const add = (a, b) => {
console.log("Executing addition");
return a + b;
};
const memoizedAdd = memoize(add);
console.log(memoizedAdd(2, 3)); // Calculating new value: [2,3] Executing addition => 5
console.log(memoizedAdd(2, 3)); // Cache hit: [2,3] => 5
console.log(memoizedAdd(5, 7)); // Calculating new value: [5,7] Executing addition => 12Advanced Memoization Techniques
1. Custom Cache Key Generator
function memoizeWithKeyGen(fn, keyGenerator) {
const cache = new Map();
return function (...args) {
const key = keyGenerator ? keyGenerator(...args) : JSON.stringify(args);
if (cache.has(key)) {
return cache.get(key);
}
const result = fn(...args);
cache.set(key, result);
return result;
};
}
// When handling object arguments, you can customize key generation
const getUserData = (user) => {
console.log("Fetching user data...");
return { ...user, fetched: new Date() };
};
const memoizedGetUserData = memoizeWithKeyGen(
getUserData,
(user) => user.id // Use only id as key
);
const user1 = { id: 1, name: "John" };
const user2 = { id: 1, name: "John Doe" }; // Different name
console.log(memoizedGetUserData(user1)); // Fetching user data...
console.log(memoizedGetUserData(user2)); // Uses cache (because id is the same)2. LRU (Least Recently Used) Cache
Limit cache size, remove least recently used entries:
class LRUCache {
constructor(maxSize = 100) {
this.maxSize = maxSize;
this.cache = new Map();
}
get(key) {
if (!this.cache.has(key)) {
return undefined;
}
// Get value and move to end (mark as recently used)
const value = this.cache.get(key);
this.cache.delete(key);
this.cache.set(key, value);
return value;
}
set(key, value) {
// If key exists, delete old one first
if (this.cache.has(key)) {
this.cache.delete(key);
}
// If size limit exceeded, delete oldest entry
if (this.cache.size >= this.maxSize) {
const firstKey = this.cache.keys().next().value;
this.cache.delete(firstKey);
}
// Add new entry
this.cache.set(key, value);
}
has(key) {
return this.cache.has(key);
}
}
function memoizeWithLRU(fn, maxSize = 100) {
const cache = new LRUCache(maxSize);
return function (...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
return cache.get(key);
}
const result = fn(...args);
cache.set(key, result);
return result;
};
}
// Usage example: cache only last 5 calls
const expensiveCalc = memoizeWithLRU(
(n) => {
let sum = 0;
for (let i = 0; i < n; i++) {
sum += i;
}
return sum;
},
5 // Cache maximum 5 results
);3. Cache with Expiration Time
function memoizeWithTTL(fn, ttl = 60000) {
// Default 60 seconds
const cache = new Map();
const timestamps = new Map();
return function (...args) {
const key = JSON.stringify(args);
const now = Date.now();
// Check if cache exists and hasn't expired
if (cache.has(key)) {
const timestamp = timestamps.get(key);
if (now - timestamp < ttl) {
console.log("Cache hit:", key);
return cache.get(key);
}
// Expired then delete
console.log("Cache expired:", key);
cache.delete(key);
timestamps.delete(key);
}
// Calculate new result
console.log("Calculating new value:", key);
const result = fn(...args);
// Store in cache
cache.set(key, result);
timestamps.set(key, now);
return result;
};
}
// Simulate API call
const fetchWeather = (city) => {
console.log(`Fetching weather for ${city}...`);
return { city, temp: Math.random() * 30, time: new Date() };
};
const cachedFetchWeather = memoizeWithTTL(fetchWeather, 5000); // Cache for 5 seconds
console.log(cachedFetchWeather("Beijing")); // Calculate new value
console.log(cachedFetchWeather("Beijing")); // Cache hit
setTimeout(() => {
console.log(cachedFetchWeather("Beijing")); // Cache expired, fetch again
}, 6000);4. Conditional Memoization
Only cache calls that meet specific conditions:
function memoizeIf(fn, shouldCache) {
const cache = new Map();
return function (...args) {
// Check if this call should be cached
if (!shouldCache(...args)) {
return fn(...args);
}
const key = JSON.stringify(args);
if (cache.has(key)) {
return cache.get(key);
}
const result = fn(...args);
cache.set(key, result);
return result;
};
}
// Only cache calculations greater than 10
const heavyCalc = memoizeIf(
(n) => {
let result = 0;
for (let i = 0; i < n; i++) {
result += Math.sqrt(i);
}
return result;
},
(n) => n > 10 // Only cache results with parameters greater than 10
);
console.log(heavyCalc(5)); // Not cached
console.log(heavyCalc(100)); // Cached
console.log(heavyCalc(100)); // Use cacheReal-World Application Scenarios
1. React Component Optimization
// React.memo is a form of memoization
import React, { useMemo, useCallback } from "react";
function ExpensiveComponent({ data, filter }) {
// Memoize expensive computation
const filteredData = useMemo(() => {
console.log("Filtering data...");
return data.filter((item) => item.category === filter);
}, [data, filter]); // Only recalculate when data or filter changes
// Memoize callback function
const handleClick = useCallback(() => {
console.log("Handling click");
}, []); // Callback function won't be recreated on every render
return (
<div>
{filteredData.map((item) => (
<div key={item.id} onClick={handleClick}>
{item.name}
</div>
))}
</div>
);
}
// Memoize entire component
const MemoizedComponent = React.memo(ExpensiveComponent);2. API Request Caching
class APIClient {
constructor() {
this.cache = new Map();
this.pendingRequests = new Map();
}
async get(url, options = {}) {
const cacheKey = `${url}:${JSON.stringify(options)}`;
// Check cache
if (this.cache.has(cacheKey)) {
const { data, timestamp } = this.cache.get(cacheKey);
const maxAge = options.maxAge || 60000;
if (Date.now() - timestamp < maxAge) {
console.log("Using cache:", url);
return data;
}
}
// Check if there's an ongoing same request
if (this.pendingRequests.has(cacheKey)) {
console.log("Waiting for ongoing request:", url);
return this.pendingRequests.get(cacheKey);
}
// Make new request
console.log("Making new request:", url);
const promise = fetch(url, options)
.then((res) => res.json())
.then((data) => {
// Store in cache
this.cache.set(cacheKey, {
data,
timestamp: Date.now(),
});
// Remove ongoing request marker
this.pendingRequests.delete(cacheKey);
return data;
});
// Mark as ongoing
this.pendingRequests.set(cacheKey, promise);
return promise;
}
clearCache() {
this.cache.clear();
}
}
// Usage
const api = new APIClient();
// These three calls will share the same request
Promise.all([
api.get("/api/users", { maxAge: 5000 }),
api.get("/api/users", { maxAge: 5000 }),
api.get("/api/users", { maxAge: 5000 }),
]).then((results) => {
console.log("All requests completed");
});3. Complex Computation Caching
// Calculate shortest path between two points
function findShortestPath(graph, start, end) {
console.log(`Calculating shortest path from ${start} to ${end}...`);
// Dijkstra algorithm implementation...
// Simplified here as example
return {
path: [start, "A", "B", end],
distance: 100,
};
}
const memoizedFindPath = memoize(findShortestPath);
// In map applications, users might query the same route multiple times
console.log(memoizedFindPath(graph, "Beijing", "Shanghai")); // Calculate
console.log(memoizedFindPath(graph, "Beijing", "Shanghai")); // Cache
console.log(memoizedFindPath(graph, "Shanghai", "Guangzhou")); // Calculate
console.log(memoizedFindPath(graph, "Beijing", "Shanghai")); // Cache4. Data Transformation Pipeline Caching
// Complex data processing pipeline
const processData = memoize((data) => {
console.log("Processing data...");
return data
.filter((item) => item.active)
.map((item) => ({
...item,
fullName: `${item.firstName} ${item.lastName}`,
age: new Date().getFullYear() - item.birthYear,
}))
.sort((a, b) => b.age - a.age);
});
const rawData = [
{ id: 1, firstName: "John", lastName: "Doe", birthYear: 1990, active: true },
{
id: 2,
firstName: "Jane",
lastName: "Smith",
birthYear: 1985,
active: true,
},
{
id: 3,
firstName: "Bob",
lastName: "Johnson",
birthYear: 1995,
active: false,
},
];
console.log(processData(rawData)); // Processing data...
console.log(processData(rawData)); // Use cachePerformance Analysis
Let's compare performance differences before and after memoization:
// Performance testing tool
function benchmark(fn, iterations = 1000) {
const start = performance.now();
for (let i = 0; i < iterations; i++) {
fn();
}
const end = performance.now();
return end - start;
}
// Test function: calculate factorial
function factorial(n) {
if (n <= 1) return 1;
return n * factorial(n - 1);
}
const memoizedFactorial = memoize(factorial);
// Benchmark
console.log("=== Performance Comparison ===");
console.log("Non-memoized:");
console.log(benchmark(() => factorial(20)));
console.log("Memoized (first time):");
console.log(benchmark(() => memoizedFactorial(20)));
console.log("Memoized (cached):");
console.log(benchmark(() => memoizedFactorial(20)));
// Typical results:
// Non-memoized: ~15ms
// Memoized (first): ~15ms
// Memoized (cached): ~0.1ms (150 times faster!)Considerations and Best Practices
1. When to Use Memoization
✅ Scenarios suitable for memoization:
- Pure functions (same input always produces same output)
- Expensive computations
- Functions that are called repeatedly
- Limited parameter space
❌ Scenarios not suitable for memoization:
- Functions with side effects
- Very large parameter space (will consume lots of memory)
- Rarely repeated calls
- Fast computations themselves
// ❌ Not suitable for memoization - has side effects
function saveUser(user) {
fetch("/api/users", {
method: "POST",
body: JSON.stringify(user),
});
}
// ❌ Not suitable for memoization - returns random values
function random() {
return Math.random();
}
// ✅ Suitable for memoization - pure function, expensive computation
function isPrime(n) {
if (n <= 1) return false;
for (let i = 2; i <= Math.sqrt(n); i++) {
if (n % i === 0) return false;
}
return true;
}2. Memory Management
// Monitor cache size
function memoizeWithMonitoring(fn, maxSize = 1000) {
const cache = new Map();
const memoized = function (...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
return cache.get(key);
}
const result = fn(...args);
// Check cache size
if (cache.size >= maxSize) {
console.warn(`Cache full (${maxSize}), clearing earliest entries`);
const firstKey = cache.keys().next().value;
cache.delete(firstKey);
}
cache.set(key, result);
return result;
};
// Add utility methods
memoized.cache = cache;
memoized.clearCache = () => cache.clear();
memoized.getCacheSize = () => cache.size;
return memoized;
}
const fn = memoizeWithMonitoring(expensiveCalc, 100);
// Monitor and manage
console.log("Cache size:", fn.getCacheSize());
fn.clearCache(); // Manual cleanup3. Debugging Support
function memoizeWithDebug(fn, name = fn.name) {
const cache = new Map();
let hits = 0;
let misses = 0;
const memoized = function (...args) {
const key = JSON.stringify(args);
if (cache.has(key)) {
hits++;
console.log(`[${name}] Cache hit (${hits}/${hits + misses})`);
return cache.get(key);
}
misses++;
console.log(`[${name}] Cache miss (${hits}/${hits + misses})`);
const result = fn(...args);
cache.set(key, result);
return result;
};
memoized.getStats = () => ({
hits,
misses,
hitRate: hits / (hits + misses),
cacheSize: cache.size,
});
return memoized;
}
const fn = memoizeWithDebug(expensiveFunction, "myFunc");
// View statistics after usage
console.log(fn.getStats());
// { hits: 15, misses: 5, hitRate: 0.75, cacheSize: 5 }Summary
Memoization is a powerful performance optimization technique that avoids repeated computations by caching function results. Key takeaways:
- Simple principle: Use space for time, cache results of expensive computations
- Applicable scenarios: Pure functions, expensive computations, repeated calls
- Advanced features: LRU cache, TTL, conditional caching
- Real applications: React optimization, API caching, complex computations
- Considerations: Memory management, only use for pure functions
Mastering memoization techniques can bring order-of-magnitude performance improvements to your applications, especially when handling complex computations and data transformations. But don't optimize prematurely—ensure code correctness first, then apply memoization based on actual performance bottlenecks.