feat: add real-time collaborative shopping list at /cospend/list
All checks were successful
CI / update (push) Successful in 1m18s

Real-time shopping list with SSE sync between multiple clients, automatic
item categorization using embedding-based classification + Bring icon
matching, and card-based UI with category grouping.

- SSE broadcast for live sync (add/check/remove items across tabs)
- Hybrid categorizer: direct catalog lookup → category-scoped embedding
  search → per-category default icons, with DB caching
- 388 Bring catalog icons matched via multilingual-e5-base embeddings
- 170+ English→German icon aliases for reliable cross-language matching
- Move cospend dashboard to /cospend/dash, /cospend redirects to list
- Shopping icon on homepage links to /cospend/list
This commit is contained in:
2026-04-07 23:50:50 +02:00
parent d9f2a27700
commit 738875e89f
28 changed files with 2281 additions and 49 deletions

View File

@@ -0,0 +1,74 @@
/**
* Pre-assign each Bring catalog icon to a shopping category using embeddings.
* This enables category-scoped icon search at runtime.
*
* Run: pnpm exec vite-node scripts/assign-icon-categories.ts
*/
import { pipeline } from '@huggingface/transformers';
import { readFileSync, writeFileSync } from 'fs';
import { resolve } from 'path';
const MODEL_NAME = 'Xenova/multilingual-e5-base';
const CATEGORY_EMBEDDINGS_PATH = resolve('src/lib/data/shoppingCategoryEmbeddings.json');
const CATALOG_PATH = resolve('static/shopping-icons/catalog.json');
const OUTPUT_PATH = resolve('src/lib/data/shoppingIconCategories.json');
function cosineSimilarity(a: number[], b: number[]): number {
let dot = 0, normA = 0, normB = 0;
for (let i = 0; i < a.length; i++) {
dot += a[i] * b[i];
normA += a[i] * a[i];
normB += b[i] * b[i];
}
return dot / (Math.sqrt(normA) * Math.sqrt(normB));
}
async function main() {
const catData = JSON.parse(readFileSync(CATEGORY_EMBEDDINGS_PATH, 'utf-8'));
const catalog: Record<string, string> = JSON.parse(readFileSync(CATALOG_PATH, 'utf-8'));
console.log(`Loading model ${MODEL_NAME}...`);
const embedder = await pipeline('feature-extraction', MODEL_NAME, { dtype: 'q8' });
const iconNames = Object.keys(catalog);
console.log(`Assigning ${iconNames.length} icons to categories...`);
const assignments: Record<string, string> = {};
for (let i = 0; i < iconNames.length; i++) {
const name = iconNames[i];
const result = await embedder(`query: ${name.toLowerCase()}`, { pooling: 'mean', normalize: true });
const qv = Array.from(result.data as Float32Array);
let bestCategory = 'Sonstiges';
let bestScore = -1;
for (const entry of catData.entries) {
const score = cosineSimilarity(qv, entry.vector);
if (score > bestScore) {
bestScore = score;
bestCategory = entry.category;
}
}
assignments[name] = bestCategory;
if ((i + 1) % 50 === 0) {
console.log(` ${i + 1}/${iconNames.length}`);
}
}
writeFileSync(OUTPUT_PATH, JSON.stringify(assignments, null, 2), 'utf-8');
console.log(`Written ${OUTPUT_PATH} (${iconNames.length} entries)`);
// Print summary
const counts: Record<string, number> = {};
for (const cat of Object.values(assignments)) {
counts[cat] = (counts[cat] || 0) + 1;
}
console.log('\nCategory distribution:');
for (const [cat, count] of Object.entries(counts).sort((a, b) => b[1] - a[1])) {
console.log(` ${cat}: ${count}`);
}
}
main().catch(console.error);

View File

@@ -0,0 +1,107 @@
/**
* Downloads all Bring! shopping list item icons locally.
* Icons are stored at static/shopping-icons/{key}.png
*
* Run: pnpm exec vite-node scripts/download-bring-icons.ts
*/
import { writeFileSync, mkdirSync, existsSync } from 'fs';
import { resolve } from 'path';
const CATALOG_URL = 'https://web.getbring.com/locale/articles.de-DE.json';
const ICON_BASE = 'https://web.getbring.com/assets/images/items/';
const OUTPUT_DIR = resolve('static/shopping-icons');
/** Normalize key to icon filename (matches Bring's normalizeStringPath) */
function normalizeKey(key: string): string {
return key
.toLowerCase()
.replace(/ä/g, 'ae')
.replace(/ö/g, 'oe')
.replace(/ü/g, 'ue')
.replace(/é/g, 'e')
.replace(/è/g, 'e')
.replace(/ê/g, 'e')
.replace(/à/g, 'a')
.replace(/!/g, '')
.replace(/[\s\-]+/g, '_');
}
async function main() {
console.log('Fetching catalog...');
const res = await fetch(CATALOG_URL);
const catalog: Record<string, string> = await res.json();
// Filter out category headers and meta entries
const SKIP = [
'Früchte & Gemüse', 'Fleisch & Fisch', 'Milch & Käse', 'Brot & Gebäck',
'Getreideprodukte', 'Snacks & Süsswaren', 'Getränke & Tabak', 'Getränke',
'Haushalt & Gesundheit', 'Fertig- & Tiefkühlprodukte', 'Zutaten & Gewürze',
'Baumarkt & Garten', 'Tierbedarf', 'Eigene Artikel', 'Zuletzt verwendet',
'Bring!', 'Vielen Dank', 'Früchte', 'Fleisch', 'Gemüse',
];
const items = Object.keys(catalog).filter(k => !SKIP.includes(k));
console.log(`Found ${items.length} items to download`);
mkdirSync(OUTPUT_DIR, { recursive: true });
// Also download letter fallbacks a-z
const allKeys = [
...items.map(k => ({ original: k, normalized: normalizeKey(k) })),
...'abcdefghijklmnopqrstuvwxyz'.split('').map(l => ({ original: l, normalized: l })),
];
let downloaded = 0;
let skipped = 0;
let failed = 0;
for (const { original, normalized } of allKeys) {
const outPath = resolve(OUTPUT_DIR, `${normalized}.png`);
if (existsSync(outPath)) {
skipped++;
continue;
}
const url = `${ICON_BASE}${normalized}.png`;
try {
const res = await fetch(url);
if (res.ok) {
const buffer = Buffer.from(await res.arrayBuffer());
writeFileSync(outPath, buffer);
downloaded++;
} else {
console.warn(`${original} (${normalized}.png) → ${res.status}`);
failed++;
}
} catch (err) {
console.warn(`${original} (${normalized}.png) → ${err}`);
failed++;
}
// Rate limiting
if ((downloaded + skipped + failed) % 50 === 0) {
console.log(` ${downloaded + skipped + failed}/${allKeys.length} (${downloaded} new, ${skipped} cached, ${failed} failed)`);
}
}
// Save the catalog mapping (key → normalized filename) for runtime lookup
const mapping: Record<string, string> = {};
for (const item of items) {
mapping[item.toLowerCase()] = normalizeKey(item);
}
// Also add the display names as lookups
for (const [key, displayName] of Object.entries(catalog)) {
if (!SKIP.includes(key)) {
mapping[displayName.toLowerCase()] = normalizeKey(key);
}
}
const mappingPath = resolve(OUTPUT_DIR, 'catalog.json');
writeFileSync(mappingPath, JSON.stringify(mapping, null, 2));
console.log(`\nDone: ${downloaded} downloaded, ${skipped} cached, ${failed} failed`);
console.log(`Catalog: ${Object.keys(mapping).length} entries → ${mappingPath}`);
}
main().catch(console.error);

View File

@@ -7,6 +7,7 @@ import { pipeline } from '@huggingface/transformers';
const MODELS = [
'Xenova/all-MiniLM-L6-v2',
'Xenova/multilingual-e5-small',
'Xenova/multilingual-e5-base',
];
for (const name of MODELS) {

View File

@@ -0,0 +1,55 @@
/**
* Pre-compute sentence embeddings for shopping category representative items.
* Uses multilingual-e5-base for good DE/EN understanding.
*
* Run: pnpm exec vite-node scripts/embed-shopping-categories.ts
*/
import { pipeline } from '@huggingface/transformers';
import { writeFileSync } from 'fs';
import { resolve } from 'path';
const { CATEGORY_ITEMS } = await import('../src/lib/data/shoppingCategoryItems');
const MODEL_NAME = 'Xenova/multilingual-e5-base';
const OUTPUT_FILE = resolve('src/lib/data/shoppingCategoryEmbeddings.json');
async function main() {
console.log(`Loading model ${MODEL_NAME}...`);
const embedder = await pipeline('feature-extraction', MODEL_NAME, {
dtype: 'q8',
});
console.log(`Embedding ${CATEGORY_ITEMS.length} category items...`);
const entries: { name: string; category: string; vector: number[] }[] = [];
for (let i = 0; i < CATEGORY_ITEMS.length; i++) {
const item = CATEGORY_ITEMS[i];
// e5 models require "passage: " prefix for documents
const result = await embedder(`passage: ${item.name}`, { pooling: 'mean', normalize: true });
const vector = Array.from(result.data as Float32Array).map(v => Math.round(v * 10000) / 10000);
entries.push({
name: item.name,
category: item.category,
vector,
});
if ((i + 1) % 50 === 0) {
console.log(` ${i + 1}/${CATEGORY_ITEMS.length}`);
}
}
const output = {
model: MODEL_NAME,
dimensions: entries[0]?.vector.length || 768,
count: entries.length,
entries,
};
const json = JSON.stringify(output);
writeFileSync(OUTPUT_FILE, json, 'utf-8');
console.log(`Written ${OUTPUT_FILE} (${(json.length / 1024).toFixed(1)}KB, ${entries.length} entries)`);
}
main().catch(console.error);

View File

@@ -0,0 +1,55 @@
/**
* Pre-compute embeddings for Bring! catalog items to enable icon matching.
* Maps item names to their icon filenames via semantic similarity.
*
* Run: pnpm exec vite-node scripts/embed-shopping-icons.ts
*/
import { pipeline } from '@huggingface/transformers';
import { readFileSync, writeFileSync } from 'fs';
import { resolve } from 'path';
const MODEL_NAME = 'Xenova/multilingual-e5-base';
const CATALOG_PATH = resolve('static/shopping-icons/catalog.json');
const OUTPUT_FILE = resolve('src/lib/data/shoppingIconEmbeddings.json');
async function main() {
const catalog: Record<string, string> = JSON.parse(readFileSync(CATALOG_PATH, 'utf-8'));
// Deduplicate: multiple display names can map to the same icon
// We want one embedding per unique display name
const uniqueItems = new Map<string, string>();
for (const [name, iconFile] of Object.entries(catalog)) {
uniqueItems.set(name, iconFile);
}
const items = [...uniqueItems.entries()];
console.log(`Loading model ${MODEL_NAME}...`);
const embedder = await pipeline('feature-extraction', MODEL_NAME, { dtype: 'q8' });
console.log(`Embedding ${items.length} catalog items...`);
const entries: { name: string; icon: string; vector: number[] }[] = [];
for (let i = 0; i < items.length; i++) {
const [name, icon] = items[i];
const result = await embedder(`passage: ${name}`, { pooling: 'mean', normalize: true });
const vector = Array.from(result.data as Float32Array).map(v => Math.round(v * 10000) / 10000);
entries.push({ name, icon, vector });
if ((i + 1) % 50 === 0) {
console.log(` ${i + 1}/${items.length}`);
}
}
const output = {
model: MODEL_NAME,
dimensions: entries[0]?.vector.length || 768,
count: entries.length,
entries,
};
const json = JSON.stringify(output);
writeFileSync(OUTPUT_FILE, json, 'utf-8');
console.log(`Written ${OUTPUT_FILE} (${(json.length / 1024).toFixed(1)}KB, ${entries.length} entries)`);
}
main().catch(console.error);