feat: add Redis caching for recipe queries with automatic invalidation
All checks were successful
CI / update (push) Successful in 13s
All checks were successful
CI / update (push) Successful in 13s
Implements Redis caching layer for recipe endpoints to reduce MongoDB load and improve response times: - Install ioredis for Redis client with TypeScript support - Create cache.ts with namespaced keys (homepage: prefix) to avoid conflicts with other Redis applications - Add caching to recipe query endpoints (all_brief, by tag, in_season) with 1-hour TTL - Implement automatic cache invalidation on recipe create/edit/delete operations - Cache recipes before randomization to maximize cache reuse while maintaining random order per request - Add graceful fallback to MongoDB if Redis is unavailable - Update .env.example with Redis configuration (REDIS_HOST, REDIS_PORT) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
311
src/lib/server/cache.ts
Normal file
311
src/lib/server/cache.ts
Normal file
@@ -0,0 +1,311 @@
|
||||
import Redis from 'ioredis';
|
||||
|
||||
// Key prefix for namespace isolation
|
||||
const KEY_PREFIX = 'homepage:';
|
||||
|
||||
// Redis client configuration
|
||||
const redis = new Redis({
|
||||
host: process.env.REDIS_HOST || 'localhost',
|
||||
port: parseInt(process.env.REDIS_PORT || '6379'),
|
||||
// Reconnection strategy: exponential backoff with max 2 seconds
|
||||
retryStrategy: (times) => Math.min(times * 50, 2000),
|
||||
// Lazy connect to avoid blocking startup
|
||||
lazyConnect: true,
|
||||
// Connection timeout
|
||||
connectTimeout: 10000,
|
||||
// Enable offline queue to buffer commands during reconnection
|
||||
enableOfflineQueue: true,
|
||||
});
|
||||
|
||||
// Track connection status
|
||||
let isConnected = false;
|
||||
let isConnecting = false;
|
||||
|
||||
// Graceful connection with error handling
|
||||
async function ensureConnection(): Promise<boolean> {
|
||||
if (isConnected) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (isConnecting) {
|
||||
// Wait for ongoing connection attempt
|
||||
return new Promise((resolve) => {
|
||||
const checkInterval = setInterval(() => {
|
||||
if (isConnected || !isConnecting) {
|
||||
clearInterval(checkInterval);
|
||||
resolve(isConnected);
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
}
|
||||
|
||||
isConnecting = true;
|
||||
try {
|
||||
await redis.connect();
|
||||
isConnected = true;
|
||||
console.log('[Redis] Connected successfully');
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error('[Redis] Connection failed:', err);
|
||||
isConnected = false;
|
||||
return false;
|
||||
} finally {
|
||||
isConnecting = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle connection events
|
||||
redis.on('connect', () => {
|
||||
isConnected = true;
|
||||
console.log('[Redis] Connected');
|
||||
});
|
||||
|
||||
redis.on('ready', () => {
|
||||
isConnected = true;
|
||||
console.log('[Redis] Ready');
|
||||
});
|
||||
|
||||
redis.on('error', (err) => {
|
||||
console.error('[Redis] Error:', err);
|
||||
});
|
||||
|
||||
redis.on('close', () => {
|
||||
isConnected = false;
|
||||
console.log('[Redis] Connection closed');
|
||||
});
|
||||
|
||||
redis.on('reconnecting', () => {
|
||||
console.log('[Redis] Reconnecting...');
|
||||
});
|
||||
|
||||
// Helper function to add prefix to keys
|
||||
function prefixKey(key: string): string {
|
||||
return `${KEY_PREFIX}${key}`;
|
||||
}
|
||||
|
||||
// Helper function to add prefix to multiple keys
|
||||
function prefixKeys(keys: string[]): string[] {
|
||||
return keys.map(prefixKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache wrapper with automatic key prefixing and error handling
|
||||
*/
|
||||
export const cache = {
|
||||
/**
|
||||
* Get a value from cache
|
||||
*/
|
||||
async get(key: string): Promise<string | null> {
|
||||
if (!(await ensureConnection())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return await redis.get(prefixKey(key));
|
||||
} catch (err) {
|
||||
console.error(`[Redis] GET error for key "${key}":`, err);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Set a value in cache with optional TTL (in seconds)
|
||||
*/
|
||||
async set(key: string, value: string, ttl?: number): Promise<boolean> {
|
||||
if (!(await ensureConnection())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
if (ttl) {
|
||||
await redis.setex(prefixKey(key), ttl, value);
|
||||
} else {
|
||||
await redis.set(prefixKey(key), value);
|
||||
}
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SET error for key "${key}":`, err);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete one or more keys from cache
|
||||
*/
|
||||
async del(...keys: string[]): Promise<number> {
|
||||
if (!(await ensureConnection())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
try {
|
||||
const prefixedKeys = prefixKeys(keys);
|
||||
return await redis.del(...prefixedKeys);
|
||||
} catch (err) {
|
||||
console.error(`[Redis] DEL error for keys "${keys.join(', ')}":`, err);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete all keys matching a pattern (uses SCAN for safety)
|
||||
* Pattern should NOT include the prefix (it will be added automatically)
|
||||
*/
|
||||
async delPattern(pattern: string): Promise<number> {
|
||||
if (!(await ensureConnection())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
try {
|
||||
const prefixedPattern = prefixKey(pattern);
|
||||
const keys: string[] = [];
|
||||
let cursor = '0';
|
||||
|
||||
// Use SCAN to safely iterate through keys
|
||||
do {
|
||||
const [nextCursor, matchedKeys] = await redis.scan(
|
||||
cursor,
|
||||
'MATCH',
|
||||
prefixedPattern,
|
||||
'COUNT',
|
||||
100
|
||||
);
|
||||
cursor = nextCursor;
|
||||
keys.push(...matchedKeys);
|
||||
} while (cursor !== '0');
|
||||
|
||||
if (keys.length > 0) {
|
||||
return await redis.del(...keys);
|
||||
}
|
||||
return 0;
|
||||
} catch (err) {
|
||||
console.error(`[Redis] DEL PATTERN error for pattern "${pattern}":`, err);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Redis Set operations for managing sets (e.g., user favorites)
|
||||
*/
|
||||
sets: {
|
||||
/**
|
||||
* Add members to a set
|
||||
*/
|
||||
async add(key: string, ...members: string[]): Promise<number> {
|
||||
if (!(await ensureConnection())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
try {
|
||||
return await redis.sadd(prefixKey(key), ...members);
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SADD error for key "${key}":`, err);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Remove members from a set
|
||||
*/
|
||||
async remove(key: string, ...members: string[]): Promise<number> {
|
||||
if (!(await ensureConnection())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
try {
|
||||
return await redis.srem(prefixKey(key), ...members);
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SREM error for key "${key}":`, err);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get all members of a set
|
||||
*/
|
||||
async members(key: string): Promise<string[]> {
|
||||
if (!(await ensureConnection())) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
return await redis.smembers(prefixKey(key));
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SMEMBERS error for key "${key}":`, err);
|
||||
return [];
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if a member exists in a set
|
||||
*/
|
||||
async isMember(key: string, member: string): Promise<boolean> {
|
||||
if (!(await ensureConnection())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await redis.sismember(prefixKey(key), member);
|
||||
return result === 1;
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SISMEMBER error for key "${key}":`, err);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
/**
|
||||
* Get cache statistics
|
||||
*/
|
||||
async getStats(): Promise<{ hits: number; misses: number; hitRate: string } | null> {
|
||||
if (!(await ensureConnection())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const info = await redis.info('stats');
|
||||
const hitsMatch = info.match(/keyspace_hits:(\d+)/);
|
||||
const missesMatch = info.match(/keyspace_misses:(\d+)/);
|
||||
|
||||
const hits = hitsMatch ? parseInt(hitsMatch[1]) : 0;
|
||||
const misses = missesMatch ? parseInt(missesMatch[1]) : 0;
|
||||
const total = hits + misses;
|
||||
const hitRate = total > 0 ? ((hits / total) * 100).toFixed(2) : '0.00';
|
||||
|
||||
return { hits, misses, hitRate: `${hitRate}%` };
|
||||
} catch (err) {
|
||||
console.error('[Redis] Error getting stats:', err);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Graceful shutdown
|
||||
process.on('SIGTERM', () => {
|
||||
redis.quit();
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
redis.quit();
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper function to invalidate all recipe caches
|
||||
* Call this after recipe create/update/delete operations
|
||||
*/
|
||||
export async function invalidateRecipeCaches(): Promise<void> {
|
||||
try {
|
||||
// Clear all recipe-related caches in parallel
|
||||
await Promise.all([
|
||||
cache.del('recipes:all_brief'),
|
||||
cache.delPattern('recipes:tag:*'),
|
||||
cache.delPattern('recipes:in_season:*'),
|
||||
cache.delPattern('recipes:category:*'),
|
||||
cache.delPattern('recipes:icon:*'),
|
||||
]);
|
||||
console.log('[Cache] Invalidated all recipe caches');
|
||||
} catch (err) {
|
||||
console.error('[Cache] Error invalidating recipe caches:', err);
|
||||
}
|
||||
}
|
||||
|
||||
export default cache;
|
||||
@@ -2,6 +2,7 @@ import type { RequestHandler } from '@sveltejs/kit';
|
||||
import { Recipe } from '../../../../models/Recipe';
|
||||
import { dbConnect } from '../../../../utils/db';
|
||||
import { error } from '@sveltejs/kit';
|
||||
import { invalidateRecipeCaches } from '$lib/server/cache';
|
||||
// header: use for bearer token for now
|
||||
// recipe json in body
|
||||
export const POST: RequestHandler = async ({request, cookies, locals}) => {
|
||||
@@ -19,6 +20,8 @@ export const POST: RequestHandler = async ({request, cookies, locals}) => {
|
||||
await dbConnect();
|
||||
try{
|
||||
await Recipe.create(recipe_json);
|
||||
// Invalidate recipe caches after successful creation
|
||||
await invalidateRecipeCaches();
|
||||
} catch(e){
|
||||
throw error(400, e)
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { UserFavorites } from '../../../../models/UserFavorites';
|
||||
import { dbConnect } from '../../../../utils/db';
|
||||
import type {RecipeModelType} from '../../../../types/types';
|
||||
import { error } from '@sveltejs/kit';
|
||||
import { invalidateRecipeCaches } from '$lib/server/cache';
|
||||
// header: use for bearer token for now
|
||||
// recipe json in body
|
||||
export const POST: RequestHandler = async ({request, locals}) => {
|
||||
@@ -69,6 +70,9 @@ export const POST: RequestHandler = async ({request, locals}) => {
|
||||
// Delete the recipe
|
||||
await Recipe.findOneAndDelete({short_name: short_name});
|
||||
|
||||
// Invalidate recipe caches after successful deletion
|
||||
await invalidateRecipeCaches();
|
||||
|
||||
return new Response(JSON.stringify({msg: "Deleted recipe successfully"}),{
|
||||
status: 200,
|
||||
});
|
||||
|
||||
@@ -6,6 +6,7 @@ import { error } from '@sveltejs/kit';
|
||||
import { rename } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { existsSync } from 'fs';
|
||||
import { invalidateRecipeCaches } from '$lib/server/cache';
|
||||
|
||||
// header: use for bearer token for now
|
||||
// recipe json in body
|
||||
@@ -46,6 +47,10 @@ export const POST: RequestHandler = async ({request, locals}) => {
|
||||
}
|
||||
|
||||
await Recipe.findOneAndUpdate({short_name: message.old_short_name }, recipe_json);
|
||||
|
||||
// Invalidate recipe caches after successful update
|
||||
await invalidateRecipeCaches();
|
||||
|
||||
return new Response(JSON.stringify({msg: "Edited recipe successfully"}),{
|
||||
status: 200,
|
||||
});
|
||||
|
||||
@@ -3,9 +3,27 @@ import type { BriefRecipeType } from '../../../../../types/types';
|
||||
import { Recipe } from '../../../../../models/Recipe'
|
||||
import { dbConnect } from '../../../../../utils/db';
|
||||
import { rand_array } from '$lib/js/randomize';
|
||||
import cache from '$lib/server/cache';
|
||||
|
||||
export const GET: RequestHandler = async ({params}) => {
|
||||
await dbConnect();
|
||||
let found_brief = rand_array(await Recipe.find({}, 'name short_name tags category icon description season dateModified').lean()) as BriefRecipeType[];
|
||||
return json(JSON.parse(JSON.stringify(found_brief)));
|
||||
const cacheKey = 'recipes:all_brief';
|
||||
|
||||
// Try cache first
|
||||
let recipes: BriefRecipeType[] | null = null;
|
||||
const cached = await cache.get(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
recipes = JSON.parse(cached);
|
||||
} else {
|
||||
// Cache miss - fetch from DB
|
||||
await dbConnect();
|
||||
recipes = await Recipe.find({}, 'name short_name tags category icon description season dateModified').lean() as BriefRecipeType[];
|
||||
|
||||
// Store in cache (1 hour TTL)
|
||||
await cache.set(cacheKey, JSON.stringify(recipes), 3600);
|
||||
}
|
||||
|
||||
// Apply randomization after fetching (so each request gets different order)
|
||||
const randomized = rand_array(recipes);
|
||||
return json(JSON.parse(JSON.stringify(randomized)));
|
||||
};
|
||||
|
||||
@@ -3,10 +3,27 @@ import { json, type RequestHandler } from '@sveltejs/kit';
|
||||
import { Recipe } from '../../../../../../models/Recipe'
|
||||
import { dbConnect } from '../../../../../../utils/db';
|
||||
import { rand_array } from '$lib/js/randomize';
|
||||
import cache from '$lib/server/cache';
|
||||
|
||||
export const GET: RequestHandler = async ({params}) => {
|
||||
await dbConnect();
|
||||
let found_in_season = rand_array(await Recipe.find({season: params.month, icon: {$ne: "🍽️"}}, 'name short_name images tags category icon description season dateModified').lean());
|
||||
found_in_season = JSON.parse(JSON.stringify(found_in_season));
|
||||
return json(found_in_season);
|
||||
const cacheKey = `recipes:in_season:${params.month}`;
|
||||
|
||||
// Try cache first
|
||||
let recipes = null;
|
||||
const cached = await cache.get(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
recipes = JSON.parse(cached);
|
||||
} else {
|
||||
// Cache miss - fetch from DB
|
||||
await dbConnect();
|
||||
recipes = await Recipe.find({season: params.month, icon: {$ne: "🍽️"}}, 'name short_name images tags category icon description season dateModified').lean();
|
||||
|
||||
// Store in cache (1 hour TTL)
|
||||
await cache.set(cacheKey, JSON.stringify(recipes), 3600);
|
||||
}
|
||||
|
||||
// Apply randomization after fetching (so each request gets different order)
|
||||
const randomized = rand_array(recipes);
|
||||
return json(JSON.parse(JSON.stringify(randomized)));
|
||||
};
|
||||
|
||||
@@ -3,11 +3,27 @@ import { Recipe } from '../../../../../../models/Recipe';
|
||||
import { dbConnect } from '../../../../../../utils/db';
|
||||
import type {BriefRecipeType} from '../../../../../../types/types';
|
||||
import { rand_array } from '$lib/js/randomize';
|
||||
import cache from '$lib/server/cache';
|
||||
|
||||
export const GET: RequestHandler = async ({params}) => {
|
||||
await dbConnect();
|
||||
let recipes = rand_array(await Recipe.find({tags: params.tag}, 'name short_name images tags category icon description season dateModified').lean()) as BriefRecipeType[];
|
||||
const cacheKey = `recipes:tag:${params.tag}`;
|
||||
|
||||
recipes = JSON.parse(JSON.stringify(recipes));
|
||||
return json(recipes);
|
||||
// Try cache first
|
||||
let recipes: BriefRecipeType[] | null = null;
|
||||
const cached = await cache.get(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
recipes = JSON.parse(cached);
|
||||
} else {
|
||||
// Cache miss - fetch from DB
|
||||
await dbConnect();
|
||||
recipes = await Recipe.find({tags: params.tag}, 'name short_name images tags category icon description season dateModified').lean() as BriefRecipeType[];
|
||||
|
||||
// Store in cache (1 hour TTL)
|
||||
await cache.set(cacheKey, JSON.stringify(recipes), 3600);
|
||||
}
|
||||
|
||||
// Apply randomization after fetching (so each request gets different order)
|
||||
const randomized = rand_array(recipes);
|
||||
return json(JSON.parse(JSON.stringify(randomized)));
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user