feat: add Redis caching for recipe queries with automatic invalidation
All checks were successful
CI / update (push) Successful in 13s
All checks were successful
CI / update (push) Successful in 13s
Implements Redis caching layer for recipe endpoints to reduce MongoDB load and improve response times: - Install ioredis for Redis client with TypeScript support - Create cache.ts with namespaced keys (homepage: prefix) to avoid conflicts with other Redis applications - Add caching to recipe query endpoints (all_brief, by tag, in_season) with 1-hour TTL - Implement automatic cache invalidation on recipe create/edit/delete operations - Cache recipes before randomization to maximize cache reuse while maintaining random order per request - Add graceful fallback to MongoDB if Redis is unavailable - Update .env.example with Redis configuration (REDIS_HOST, REDIS_PORT) Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,6 +1,10 @@
|
||||
# Database Configuration
|
||||
MONGO_URL="mongodb://user:password@host:port/database?authSource=admin"
|
||||
|
||||
# Redis Cache Configuration (optional - falls back to direct DB queries if unavailable)
|
||||
REDIS_HOST="localhost" # Redis server hostname
|
||||
REDIS_PORT="6379" # Redis server port
|
||||
|
||||
# Authentication Secrets (runtime only - not embedded in build)
|
||||
AUTHENTIK_ID="your-authentik-client-id"
|
||||
AUTHENTIK_SECRET="your-authentik-client-secret"
|
||||
|
||||
@@ -41,6 +41,7 @@
|
||||
"@sveltejs/adapter-node": "^5.0.0",
|
||||
"chart.js": "^4.5.0",
|
||||
"cheerio": "1.0.0-rc.12",
|
||||
"ioredis": "^5.9.0",
|
||||
"mongoose": "^8.0.0",
|
||||
"node-cron": "^4.2.1",
|
||||
"sharp": "^0.33.0"
|
||||
|
||||
67
pnpm-lock.yaml
generated
67
pnpm-lock.yaml
generated
@@ -20,6 +20,9 @@ importers:
|
||||
cheerio:
|
||||
specifier: 1.0.0-rc.12
|
||||
version: 1.0.0-rc.12
|
||||
ioredis:
|
||||
specifier: ^5.9.0
|
||||
version: 5.9.0
|
||||
mongoose:
|
||||
specifier: ^8.0.0
|
||||
version: 8.18.0(socks@2.7.1)
|
||||
@@ -437,6 +440,9 @@ packages:
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@ioredis/commands@1.5.0':
|
||||
resolution: {integrity: sha512-eUgLqrMf8nJkZxT24JvVRrQya1vZkQh8BBeYNwGDqa5I0VUi8ACx7uFvAaLxintokpTenkK6DASvo/bvNbBGow==}
|
||||
|
||||
'@jridgewell/gen-mapping@0.3.13':
|
||||
resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==}
|
||||
|
||||
@@ -822,6 +828,10 @@ packages:
|
||||
resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==}
|
||||
engines: {node: '>=6'}
|
||||
|
||||
cluster-key-slot@1.1.2:
|
||||
resolution: {integrity: sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
color-convert@2.0.1:
|
||||
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
|
||||
engines: {node: '>=7.0.0'}
|
||||
@@ -899,6 +909,10 @@ packages:
|
||||
resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
denque@2.1.0:
|
||||
resolution: {integrity: sha512-HVQE3AAb/pxF8fQAoiqpvg9i3evqug3hoiwakOyZAwJm+6vZehbkYXZ0l4JxS+I3QxM97v5aaRNhj8v5oBhekw==}
|
||||
engines: {node: '>=0.10'}
|
||||
|
||||
dequal@2.0.3:
|
||||
resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==}
|
||||
engines: {node: '>=6'}
|
||||
@@ -1016,6 +1030,10 @@ packages:
|
||||
resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
ioredis@5.9.0:
|
||||
resolution: {integrity: sha512-T3VieIilNumOJCXI9SDgo4NnF6sZkd6XcmPi6qWtw4xqbt8nNz/ZVNiIH1L9puMTSHZh1mUWA4xKa2nWPF4NwQ==}
|
||||
engines: {node: '>=12.22.0'}
|
||||
|
||||
ip@2.0.1:
|
||||
resolution: {integrity: sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==}
|
||||
|
||||
@@ -1063,6 +1081,12 @@ packages:
|
||||
locate-character@3.0.0:
|
||||
resolution: {integrity: sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==}
|
||||
|
||||
lodash.defaults@4.2.0:
|
||||
resolution: {integrity: sha512-qjxPLHd3r5DnsdGacqOMU6pb/avJzdh9tFX2ymgoZE27BmjXrNy/y4LoaiTeAb+O3gL8AfpJGtqfX/ae2leYYQ==}
|
||||
|
||||
lodash.isarguments@3.1.0:
|
||||
resolution: {integrity: sha512-chi4NHZlZqZD18a0imDHnZPrDeBbTtVN7GXMwuGdRH9qotxAjYs3aVLKc7zNOG9eddR5Ksd8rvFEBc9SsggPpg==}
|
||||
|
||||
lru-cache@11.2.2:
|
||||
resolution: {integrity: sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==}
|
||||
engines: {node: 20 || >=22}
|
||||
@@ -1228,6 +1252,14 @@ packages:
|
||||
resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
redis-errors@1.2.0:
|
||||
resolution: {integrity: sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w==}
|
||||
engines: {node: '>=4'}
|
||||
|
||||
redis-parser@3.0.0:
|
||||
resolution: {integrity: sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A==}
|
||||
engines: {node: '>=4'}
|
||||
|
||||
require-from-string@2.0.2:
|
||||
resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
@@ -1302,6 +1334,9 @@ packages:
|
||||
stackback@0.0.2:
|
||||
resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==}
|
||||
|
||||
standard-as-callback@2.1.0:
|
||||
resolution: {integrity: sha512-qoRRSyROncaz1z0mvYqIE4lCd9p2R90i6GxW3uZv5ucSu8tU7B5HXUP1gG8pVZsYNVaXjk8ClXHPttLyxAL48A==}
|
||||
|
||||
std-env@3.10.0:
|
||||
resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==}
|
||||
|
||||
@@ -1746,6 +1781,8 @@ snapshots:
|
||||
'@img/sharp-win32-x64@0.33.5':
|
||||
optional: true
|
||||
|
||||
'@ioredis/commands@1.5.0': {}
|
||||
|
||||
'@jridgewell/gen-mapping@0.3.13':
|
||||
dependencies:
|
||||
'@jridgewell/sourcemap-codec': 1.5.5
|
||||
@@ -2111,6 +2148,8 @@ snapshots:
|
||||
|
||||
clsx@2.1.1: {}
|
||||
|
||||
cluster-key-slot@1.1.2: {}
|
||||
|
||||
color-convert@2.0.1:
|
||||
dependencies:
|
||||
color-name: 1.1.4
|
||||
@@ -2175,6 +2214,8 @@ snapshots:
|
||||
|
||||
deepmerge@4.3.1: {}
|
||||
|
||||
denque@2.1.0: {}
|
||||
|
||||
dequal@2.0.3: {}
|
||||
|
||||
detect-libc@2.0.4: {}
|
||||
@@ -2303,6 +2344,20 @@ snapshots:
|
||||
|
||||
indent-string@4.0.0: {}
|
||||
|
||||
ioredis@5.9.0:
|
||||
dependencies:
|
||||
'@ioredis/commands': 1.5.0
|
||||
cluster-key-slot: 1.1.2
|
||||
debug: 4.4.3
|
||||
denque: 2.1.0
|
||||
lodash.defaults: 4.2.0
|
||||
lodash.isarguments: 3.1.0
|
||||
redis-errors: 1.2.0
|
||||
redis-parser: 3.0.0
|
||||
standard-as-callback: 2.1.0
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
ip@2.0.1:
|
||||
optional: true
|
||||
|
||||
@@ -2361,6 +2416,10 @@ snapshots:
|
||||
|
||||
locate-character@3.0.0: {}
|
||||
|
||||
lodash.defaults@4.2.0: {}
|
||||
|
||||
lodash.isarguments@3.1.0: {}
|
||||
|
||||
lru-cache@11.2.2: {}
|
||||
|
||||
lz-string@1.5.0: {}
|
||||
@@ -2499,6 +2558,12 @@ snapshots:
|
||||
indent-string: 4.0.0
|
||||
strip-indent: 3.0.0
|
||||
|
||||
redis-errors@1.2.0: {}
|
||||
|
||||
redis-parser@3.0.0:
|
||||
dependencies:
|
||||
redis-errors: 1.2.0
|
||||
|
||||
require-from-string@2.0.2: {}
|
||||
|
||||
resolve@1.22.2:
|
||||
@@ -2613,6 +2678,8 @@ snapshots:
|
||||
|
||||
stackback@0.0.2: {}
|
||||
|
||||
standard-as-callback@2.1.0: {}
|
||||
|
||||
std-env@3.10.0: {}
|
||||
|
||||
strip-indent@3.0.0:
|
||||
|
||||
311
src/lib/server/cache.ts
Normal file
311
src/lib/server/cache.ts
Normal file
@@ -0,0 +1,311 @@
|
||||
import Redis from 'ioredis';
|
||||
|
||||
// Key prefix for namespace isolation
|
||||
const KEY_PREFIX = 'homepage:';
|
||||
|
||||
// Redis client configuration
|
||||
const redis = new Redis({
|
||||
host: process.env.REDIS_HOST || 'localhost',
|
||||
port: parseInt(process.env.REDIS_PORT || '6379'),
|
||||
// Reconnection strategy: exponential backoff with max 2 seconds
|
||||
retryStrategy: (times) => Math.min(times * 50, 2000),
|
||||
// Lazy connect to avoid blocking startup
|
||||
lazyConnect: true,
|
||||
// Connection timeout
|
||||
connectTimeout: 10000,
|
||||
// Enable offline queue to buffer commands during reconnection
|
||||
enableOfflineQueue: true,
|
||||
});
|
||||
|
||||
// Track connection status
|
||||
let isConnected = false;
|
||||
let isConnecting = false;
|
||||
|
||||
// Graceful connection with error handling
|
||||
async function ensureConnection(): Promise<boolean> {
|
||||
if (isConnected) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (isConnecting) {
|
||||
// Wait for ongoing connection attempt
|
||||
return new Promise((resolve) => {
|
||||
const checkInterval = setInterval(() => {
|
||||
if (isConnected || !isConnecting) {
|
||||
clearInterval(checkInterval);
|
||||
resolve(isConnected);
|
||||
}
|
||||
}, 100);
|
||||
});
|
||||
}
|
||||
|
||||
isConnecting = true;
|
||||
try {
|
||||
await redis.connect();
|
||||
isConnected = true;
|
||||
console.log('[Redis] Connected successfully');
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error('[Redis] Connection failed:', err);
|
||||
isConnected = false;
|
||||
return false;
|
||||
} finally {
|
||||
isConnecting = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Handle connection events
|
||||
redis.on('connect', () => {
|
||||
isConnected = true;
|
||||
console.log('[Redis] Connected');
|
||||
});
|
||||
|
||||
redis.on('ready', () => {
|
||||
isConnected = true;
|
||||
console.log('[Redis] Ready');
|
||||
});
|
||||
|
||||
redis.on('error', (err) => {
|
||||
console.error('[Redis] Error:', err);
|
||||
});
|
||||
|
||||
redis.on('close', () => {
|
||||
isConnected = false;
|
||||
console.log('[Redis] Connection closed');
|
||||
});
|
||||
|
||||
redis.on('reconnecting', () => {
|
||||
console.log('[Redis] Reconnecting...');
|
||||
});
|
||||
|
||||
// Helper function to add prefix to keys
|
||||
function prefixKey(key: string): string {
|
||||
return `${KEY_PREFIX}${key}`;
|
||||
}
|
||||
|
||||
// Helper function to add prefix to multiple keys
|
||||
function prefixKeys(keys: string[]): string[] {
|
||||
return keys.map(prefixKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache wrapper with automatic key prefixing and error handling
|
||||
*/
|
||||
export const cache = {
|
||||
/**
|
||||
* Get a value from cache
|
||||
*/
|
||||
async get(key: string): Promise<string | null> {
|
||||
if (!(await ensureConnection())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return await redis.get(prefixKey(key));
|
||||
} catch (err) {
|
||||
console.error(`[Redis] GET error for key "${key}":`, err);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Set a value in cache with optional TTL (in seconds)
|
||||
*/
|
||||
async set(key: string, value: string, ttl?: number): Promise<boolean> {
|
||||
if (!(await ensureConnection())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
if (ttl) {
|
||||
await redis.setex(prefixKey(key), ttl, value);
|
||||
} else {
|
||||
await redis.set(prefixKey(key), value);
|
||||
}
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SET error for key "${key}":`, err);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete one or more keys from cache
|
||||
*/
|
||||
async del(...keys: string[]): Promise<number> {
|
||||
if (!(await ensureConnection())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
try {
|
||||
const prefixedKeys = prefixKeys(keys);
|
||||
return await redis.del(...prefixedKeys);
|
||||
} catch (err) {
|
||||
console.error(`[Redis] DEL error for keys "${keys.join(', ')}":`, err);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete all keys matching a pattern (uses SCAN for safety)
|
||||
* Pattern should NOT include the prefix (it will be added automatically)
|
||||
*/
|
||||
async delPattern(pattern: string): Promise<number> {
|
||||
if (!(await ensureConnection())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
try {
|
||||
const prefixedPattern = prefixKey(pattern);
|
||||
const keys: string[] = [];
|
||||
let cursor = '0';
|
||||
|
||||
// Use SCAN to safely iterate through keys
|
||||
do {
|
||||
const [nextCursor, matchedKeys] = await redis.scan(
|
||||
cursor,
|
||||
'MATCH',
|
||||
prefixedPattern,
|
||||
'COUNT',
|
||||
100
|
||||
);
|
||||
cursor = nextCursor;
|
||||
keys.push(...matchedKeys);
|
||||
} while (cursor !== '0');
|
||||
|
||||
if (keys.length > 0) {
|
||||
return await redis.del(...keys);
|
||||
}
|
||||
return 0;
|
||||
} catch (err) {
|
||||
console.error(`[Redis] DEL PATTERN error for pattern "${pattern}":`, err);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Redis Set operations for managing sets (e.g., user favorites)
|
||||
*/
|
||||
sets: {
|
||||
/**
|
||||
* Add members to a set
|
||||
*/
|
||||
async add(key: string, ...members: string[]): Promise<number> {
|
||||
if (!(await ensureConnection())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
try {
|
||||
return await redis.sadd(prefixKey(key), ...members);
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SADD error for key "${key}":`, err);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Remove members from a set
|
||||
*/
|
||||
async remove(key: string, ...members: string[]): Promise<number> {
|
||||
if (!(await ensureConnection())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
try {
|
||||
return await redis.srem(prefixKey(key), ...members);
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SREM error for key "${key}":`, err);
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Get all members of a set
|
||||
*/
|
||||
async members(key: string): Promise<string[]> {
|
||||
if (!(await ensureConnection())) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
return await redis.smembers(prefixKey(key));
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SMEMBERS error for key "${key}":`, err);
|
||||
return [];
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Check if a member exists in a set
|
||||
*/
|
||||
async isMember(key: string, member: string): Promise<boolean> {
|
||||
if (!(await ensureConnection())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await redis.sismember(prefixKey(key), member);
|
||||
return result === 1;
|
||||
} catch (err) {
|
||||
console.error(`[Redis] SISMEMBER error for key "${key}":`, err);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
},
|
||||
|
||||
/**
|
||||
* Get cache statistics
|
||||
*/
|
||||
async getStats(): Promise<{ hits: number; misses: number; hitRate: string } | null> {
|
||||
if (!(await ensureConnection())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const info = await redis.info('stats');
|
||||
const hitsMatch = info.match(/keyspace_hits:(\d+)/);
|
||||
const missesMatch = info.match(/keyspace_misses:(\d+)/);
|
||||
|
||||
const hits = hitsMatch ? parseInt(hitsMatch[1]) : 0;
|
||||
const misses = missesMatch ? parseInt(missesMatch[1]) : 0;
|
||||
const total = hits + misses;
|
||||
const hitRate = total > 0 ? ((hits / total) * 100).toFixed(2) : '0.00';
|
||||
|
||||
return { hits, misses, hitRate: `${hitRate}%` };
|
||||
} catch (err) {
|
||||
console.error('[Redis] Error getting stats:', err);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Graceful shutdown
|
||||
process.on('SIGTERM', () => {
|
||||
redis.quit();
|
||||
});
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
redis.quit();
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper function to invalidate all recipe caches
|
||||
* Call this after recipe create/update/delete operations
|
||||
*/
|
||||
export async function invalidateRecipeCaches(): Promise<void> {
|
||||
try {
|
||||
// Clear all recipe-related caches in parallel
|
||||
await Promise.all([
|
||||
cache.del('recipes:all_brief'),
|
||||
cache.delPattern('recipes:tag:*'),
|
||||
cache.delPattern('recipes:in_season:*'),
|
||||
cache.delPattern('recipes:category:*'),
|
||||
cache.delPattern('recipes:icon:*'),
|
||||
]);
|
||||
console.log('[Cache] Invalidated all recipe caches');
|
||||
} catch (err) {
|
||||
console.error('[Cache] Error invalidating recipe caches:', err);
|
||||
}
|
||||
}
|
||||
|
||||
export default cache;
|
||||
@@ -2,6 +2,7 @@ import type { RequestHandler } from '@sveltejs/kit';
|
||||
import { Recipe } from '../../../../models/Recipe';
|
||||
import { dbConnect } from '../../../../utils/db';
|
||||
import { error } from '@sveltejs/kit';
|
||||
import { invalidateRecipeCaches } from '$lib/server/cache';
|
||||
// header: use for bearer token for now
|
||||
// recipe json in body
|
||||
export const POST: RequestHandler = async ({request, cookies, locals}) => {
|
||||
@@ -19,6 +20,8 @@ export const POST: RequestHandler = async ({request, cookies, locals}) => {
|
||||
await dbConnect();
|
||||
try{
|
||||
await Recipe.create(recipe_json);
|
||||
// Invalidate recipe caches after successful creation
|
||||
await invalidateRecipeCaches();
|
||||
} catch(e){
|
||||
throw error(400, e)
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { UserFavorites } from '../../../../models/UserFavorites';
|
||||
import { dbConnect } from '../../../../utils/db';
|
||||
import type {RecipeModelType} from '../../../../types/types';
|
||||
import { error } from '@sveltejs/kit';
|
||||
import { invalidateRecipeCaches } from '$lib/server/cache';
|
||||
// header: use for bearer token for now
|
||||
// recipe json in body
|
||||
export const POST: RequestHandler = async ({request, locals}) => {
|
||||
@@ -69,6 +70,9 @@ export const POST: RequestHandler = async ({request, locals}) => {
|
||||
// Delete the recipe
|
||||
await Recipe.findOneAndDelete({short_name: short_name});
|
||||
|
||||
// Invalidate recipe caches after successful deletion
|
||||
await invalidateRecipeCaches();
|
||||
|
||||
return new Response(JSON.stringify({msg: "Deleted recipe successfully"}),{
|
||||
status: 200,
|
||||
});
|
||||
|
||||
@@ -6,6 +6,7 @@ import { error } from '@sveltejs/kit';
|
||||
import { rename } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { existsSync } from 'fs';
|
||||
import { invalidateRecipeCaches } from '$lib/server/cache';
|
||||
|
||||
// header: use for bearer token for now
|
||||
// recipe json in body
|
||||
@@ -46,6 +47,10 @@ export const POST: RequestHandler = async ({request, locals}) => {
|
||||
}
|
||||
|
||||
await Recipe.findOneAndUpdate({short_name: message.old_short_name }, recipe_json);
|
||||
|
||||
// Invalidate recipe caches after successful update
|
||||
await invalidateRecipeCaches();
|
||||
|
||||
return new Response(JSON.stringify({msg: "Edited recipe successfully"}),{
|
||||
status: 200,
|
||||
});
|
||||
|
||||
@@ -3,9 +3,27 @@ import type { BriefRecipeType } from '../../../../../types/types';
|
||||
import { Recipe } from '../../../../../models/Recipe'
|
||||
import { dbConnect } from '../../../../../utils/db';
|
||||
import { rand_array } from '$lib/js/randomize';
|
||||
import cache from '$lib/server/cache';
|
||||
|
||||
export const GET: RequestHandler = async ({params}) => {
|
||||
const cacheKey = 'recipes:all_brief';
|
||||
|
||||
// Try cache first
|
||||
let recipes: BriefRecipeType[] | null = null;
|
||||
const cached = await cache.get(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
recipes = JSON.parse(cached);
|
||||
} else {
|
||||
// Cache miss - fetch from DB
|
||||
await dbConnect();
|
||||
let found_brief = rand_array(await Recipe.find({}, 'name short_name tags category icon description season dateModified').lean()) as BriefRecipeType[];
|
||||
return json(JSON.parse(JSON.stringify(found_brief)));
|
||||
recipes = await Recipe.find({}, 'name short_name tags category icon description season dateModified').lean() as BriefRecipeType[];
|
||||
|
||||
// Store in cache (1 hour TTL)
|
||||
await cache.set(cacheKey, JSON.stringify(recipes), 3600);
|
||||
}
|
||||
|
||||
// Apply randomization after fetching (so each request gets different order)
|
||||
const randomized = rand_array(recipes);
|
||||
return json(JSON.parse(JSON.stringify(randomized)));
|
||||
};
|
||||
|
||||
@@ -3,10 +3,27 @@ import { json, type RequestHandler } from '@sveltejs/kit';
|
||||
import { Recipe } from '../../../../../../models/Recipe'
|
||||
import { dbConnect } from '../../../../../../utils/db';
|
||||
import { rand_array } from '$lib/js/randomize';
|
||||
import cache from '$lib/server/cache';
|
||||
|
||||
export const GET: RequestHandler = async ({params}) => {
|
||||
const cacheKey = `recipes:in_season:${params.month}`;
|
||||
|
||||
// Try cache first
|
||||
let recipes = null;
|
||||
const cached = await cache.get(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
recipes = JSON.parse(cached);
|
||||
} else {
|
||||
// Cache miss - fetch from DB
|
||||
await dbConnect();
|
||||
let found_in_season = rand_array(await Recipe.find({season: params.month, icon: {$ne: "🍽️"}}, 'name short_name images tags category icon description season dateModified').lean());
|
||||
found_in_season = JSON.parse(JSON.stringify(found_in_season));
|
||||
return json(found_in_season);
|
||||
recipes = await Recipe.find({season: params.month, icon: {$ne: "🍽️"}}, 'name short_name images tags category icon description season dateModified').lean();
|
||||
|
||||
// Store in cache (1 hour TTL)
|
||||
await cache.set(cacheKey, JSON.stringify(recipes), 3600);
|
||||
}
|
||||
|
||||
// Apply randomization after fetching (so each request gets different order)
|
||||
const randomized = rand_array(recipes);
|
||||
return json(JSON.parse(JSON.stringify(randomized)));
|
||||
};
|
||||
|
||||
@@ -3,11 +3,27 @@ import { Recipe } from '../../../../../../models/Recipe';
|
||||
import { dbConnect } from '../../../../../../utils/db';
|
||||
import type {BriefRecipeType} from '../../../../../../types/types';
|
||||
import { rand_array } from '$lib/js/randomize';
|
||||
import cache from '$lib/server/cache';
|
||||
|
||||
export const GET: RequestHandler = async ({params}) => {
|
||||
await dbConnect();
|
||||
let recipes = rand_array(await Recipe.find({tags: params.tag}, 'name short_name images tags category icon description season dateModified').lean()) as BriefRecipeType[];
|
||||
const cacheKey = `recipes:tag:${params.tag}`;
|
||||
|
||||
recipes = JSON.parse(JSON.stringify(recipes));
|
||||
return json(recipes);
|
||||
// Try cache first
|
||||
let recipes: BriefRecipeType[] | null = null;
|
||||
const cached = await cache.get(cacheKey);
|
||||
|
||||
if (cached) {
|
||||
recipes = JSON.parse(cached);
|
||||
} else {
|
||||
// Cache miss - fetch from DB
|
||||
await dbConnect();
|
||||
recipes = await Recipe.find({tags: params.tag}, 'name short_name images tags category icon description season dateModified').lean() as BriefRecipeType[];
|
||||
|
||||
// Store in cache (1 hour TTL)
|
||||
await cache.set(cacheKey, JSON.stringify(recipes), 3600);
|
||||
}
|
||||
|
||||
// Apply randomization after fetching (so each request gets different order)
|
||||
const randomized = rand_array(recipes);
|
||||
return json(JSON.parse(JSON.stringify(randomized)));
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user