57 Commits

Author SHA1 Message Date
Alexander 90895fb957 break word 2023-07-13 23:15:13 +02:00
Alexander 2bf546ccb5 initial adjust amounts implemented 2023-07-13 20:53:27 +02:00
Alexander 45942d113b fix modal image screen overflow 2023-07-13 18:57:29 +02:00
Alexander fe0c3c6644 less mislicks, switched to :focus where appropriate 2023-07-13 18:27:02 +02:00
Alexander be331343e7 bodge img upload on edit if no img available 2023-07-13 18:18:01 +02:00
Alexander 878aeff52d show zoom-in pointe only when ready 2023-07-13 17:30:36 +02:00
Alexander 94848e505f update image and season interval on navigation 2023-07-13 17:18:09 +02:00
Alexander 5769c0cea6 click on title image for full image 2023-07-13 15:26:16 +02:00
Alexander bf5c86532a Card hover effect smooth, mobile navbar hides on click 2023-07-13 13:54:42 +02:00
Alexander 25c3f41b42 add Getränke cateogry in all display 2023-07-13 11:50:29 +02:00
Alexander 6476419a29 smoother transition 2023-07-12 12:46:33 +02:00
Alexander 6aaf4ecfb4 reliably unblur, only use unblur if image not already loaded 2023-07-12 12:44:44 +02:00
Alexander bfc20ec192 rm jukit stuff 2023-07-12 12:28:34 +02:00
Alexander 2ae789e6a6 fix img APIs to working standard 2023-07-12 12:23:35 +02:00
Alexander 385af0401b add initial img API endpoints 2023-07-12 11:35:43 +02:00
Alexander c6b82865d4 randomize determined by day alone, not order of execution as well 2023-07-12 09:51:33 +02:00
Alexander 8441a434d1 randomize moved to API 2023-07-11 22:54:13 +02:00
Alexander 8e34bf512e do not show progress of downloading full image 2023-07-11 19:36:59 +02:00
Alexander 08607fafe7 fix placeholder thumbnail misalignment 2023-07-11 19:14:39 +02:00
Alexander 83542af81d correctly show matching recipes 2023-07-11 19:07:26 +02:00
Alexander 57017baed8 fix recipe page 2023-07-11 19:00:58 +02:00
Alexander 915e49352f fix to working state 2023-07-11 18:51:34 +02:00
Alexander 5ea8502caf initial implementation of placeholder images, thumbnails and blurring between using sharp 2023-07-11 18:47:29 +02:00
Alexander abecc0e71f fix imgs 2023-07-10 14:08:58 +02:00
Alexander c6248773e9 More image fixes 2023-07-10 14:05:33 +02:00
Alexander e740e4ca38 Move imgs 2023-07-10 13:47:44 +02:00
Alexander 411cfde93d add missing css 2023-07-10 13:25:04 +02:00
Alexander bb2cd23dd6 add shake.css 2023-07-10 13:24:42 +02:00
Alexander d33433eb8f fix some icon animations 2023-07-10 13:19:35 +02:00
Alexander 71d5dd14df randomize order of recipes based on day 2023-07-10 13:00:11 +02:00
Alexander 51289117cf kuerbisravioli image 2023-07-10 12:06:09 +02:00
Alexander 193b0f30dc mv files 2023-07-09 23:48:52 +02:00
Alexander f5358bd76c add new links 2023-07-09 23:45:05 +02:00
Alexander 0735a4c389 Fix pics 2023-07-06 10:18:06 +02:00
Alexander d92331998e stack tags from bottom of card 2023-07-05 18:48:59 +02:00
Alexander 8414d0c1c2 render html in preamble 2023-07-05 15:20:18 +02:00
Alexander e75d4f7af4 change place of photoprism 2023-07-03 18:20:58 +02:00
Alexander e4e153fe1b Icon route added 2023-07-03 12:39:34 +02:00
Alexander c7b257180f do not render ­ or similar in name 2023-07-03 09:35:36 +02:00
Alexander 3d631f69b5 Remove unnecessary a11y warnings 2023-07-03 00:09:00 +02:00
Alexander 24ddd39f35 API routes now return proper Responses and basic errors are handled
slight improvements in layouting
2023-07-02 23:39:31 +02:00
Alexander ece3b3634c test 2023-06-30 13:32:38 +02:00
Alexander 71fdcf9ffd add forgotten img api 2023-06-27 19:27:00 +02:00
Alexander 6f4cf0a13c add stores 2023-06-27 19:02:10 +02:00
Alexander 6708bfc89c Update 2023-06-27 19:01:06 +02:00
Alexander 32777ada0e small fix 2023-06-25 14:49:52 +02:00
Alexander 9b4485c207 mobile burger menu 2023-06-25 14:42:37 +02:00
Alexander e064c58fcc Image parallax on recipes 2023-06-25 12:15:20 +02:00
Alexander ebd1fe00db Update build process 2023-06-25 10:17:12 +02:00
Alexander 9392ff6ada Does not work: uploading images
Adding/Editing/Deleting works
SeasonsSelect works
Nice recipe layout
2023-06-24 15:35:38 +02:00
Alexander 3d0d3f41e2 First almost fully functioning MVP.
Lacking:
- Seasons cannot be added/edited
- image upload
- layout recipe/adding
2023-06-24 15:35:38 +02:00
Alexander 4afaf7f6f3 functioning Add recipe (not submitting) missing: season
modals partly stylized
add steps stylized
2023-06-24 15:35:38 +02:00
Alexander 24619de64e Image upload stylized 2023-06-24 15:35:38 +02:00
Alexander dc6fd3fad5 Lots of changes, started on working /add 2023-06-24 15:35:37 +02:00
Alexander 4e6291fa5d first working prototype 2023-06-24 15:35:37 +02:00
Alexander be19e63970 Initial commit 2023-06-24 15:35:36 +02:00
Alexander Bocken c226daf9a0 Initial commit 2023-06-24 15:33:41 +02:00
2130 changed files with 5854 additions and 1391869 deletions
-37
View File
@@ -1,37 +0,0 @@
# Database Configuration
MONGO_URL="mongodb://user:password@host:port/database?authSource=admin"
# Authentication Secrets (runtime only - not embedded in build)
AUTHENTIK_ID="your-authentik-client-id"
AUTHENTIK_SECRET="your-authentik-client-secret"
# Static Configuration (embedded in build - OK to be public)
AUTHENTIK_ISSUER="https://sso.example.com/application/o/your-app/"
# File Storage
IMAGE_DIR="/path/to/static/files"
# Optional: Development Settings
# DEV_DISABLE_AUTH="true"
# ORIGIN="http://127.0.0.1:3000"
# Optional: Additional Configuration
# BEARER_TOKEN="your-bearer-token"
# COOKIE_SECRET="your-cookie-secret"
# PEPPER="your-pepper-value"
# ALLOW_REGISTRATION="1"
# AUTH_SECRET="your-auth-secret"
# USDA_API_KEY="your-usda-api-key"
# Translation Service (DeepL API)
DEEPL_API_KEY="your-deepl-api-key"
DEEPL_API_URL="https://api-free.deepl.com/v2/translate" # Use https://api.deepl.com/v2/translate for Pro
# AI Vision Service (Ollama for Alt Text Generation)
OLLAMA_URL="http://localhost:11434" # Local Ollama server URL
# HuggingFace Transformers Model Cache (for nutrition embedding models)
TRANSFORMERS_CACHE="/var/cache/transformers" # Must be writable by build and runtime user
# ExerciseDB v2 API (RapidAPI) - for scraping exercise data
RAPIDAPI_KEY="your-rapidapi-key"
-41
View File
@@ -1,41 +0,0 @@
name: CI
# Controls when the action will run.
on:
# Triggers the workflow on push to master (including merged PRs)
push:
branches: [ master ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
update:
# The type of runner that the job will run on
runs-on: ubuntu-latest
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- name: Updating website.
uses: appleboy/ssh-action@master
with:
host: bocken.org
username: homepage
key: ${{ secrets.homepage_ssh }}
passphrase: ${{ secrets.homepage_pass }}
port: 22
script: |
cd /usr/share/webapps/homepage
git remote set-url origin https://Alexander:${{ secrets.homepage_gitea_token }}@git.bocken.org/Alexander/homepage
git fetch origin
git reset --hard origin/master
pnpm install --frozen-lockfile
pnpm run build
sudo systemctl stop homepage.service
mkdir -p dist
rm -rf dist/*
mv build/* dist/
rmdir build
sudo systemctl start homepage.service
-14
View File
@@ -1,6 +1,4 @@
.DS_Store
*/.jukit
*/.jukit/*
node_modules
/build
/.svelte-kit
@@ -10,15 +8,3 @@ node_modules
!.env.example
vite.config.js.timestamp-*
vite.config.ts.timestamp-*
# USDA bulk data downloads (regenerated by scripts/import-usda-nutrition.ts)
data/usda/
# Loyalty-card barcodes (regenerated by scripts/generate-loyalty-cards.ts from env)
static/shopping/supercard.svg
static/shopping/cumulus.svg
src-tauri/target/
src-tauri/*.keystore
# Android: ignore build output and caches, track source files
src-tauri/gen/android/.gradle/
src-tauri/gen/android/app/build/
src-tauri/gen/android/buildSrc/.gradle/
src-tauri/gen/android/buildSrc/build/
-13
View File
@@ -1,13 +0,0 @@
{
"mcpServers": {
"svelte": {
"type": "stdio",
"command": "npx",
"args": [
"-y",
"@sveltejs/mcp"
],
"env": {}
}
}
}
-15
View File
@@ -1,15 +0,0 @@
{
"mcpServers": {
"svelte": {
"type": "stdio",
"command": "npx",
"env": {
},
"args": [
"-y",
"@sveltejs/mcp"
]
}
}
}
+1
View File
@@ -1 +1,2 @@
engine-strict=true
resolution-mode=highest
-137
View File
@@ -1,137 +0,0 @@
# Repository Instructions
## Commits
- **Never** append `Co-Authored-By: Claude ...` (or any similar AI-attribution trailer) to commit messages. Do not add it even if a default template or prior convention suggests it.
- Do not include "Generated with Claude Code" footers or similar watermarks in commit messages, PR bodies, or any files in this repo.
### Versioning
When committing, bump version numbers as appropriate using semver:
- **patch** (x.y.Z): bug fixes, minor styling tweaks, small corrections
- **minor** (x.Y.0): new features, significant UI changes, new pages/routes
- **major** (X.0.0): breaking changes, major redesigns, data model changes
Version files to update:
- `package.json` — site version (bump on every commit)
- `src-tauri/tauri.conf.json` + `src-tauri/Cargo.toml` — Tauri/Android app version. Only bump these when the Tauri app codebase itself changes (e.g. `src-tauri/` files), NOT for website-only changes.
## Available MCP Tools:
You are able to use the Svelte MCP server, where you have access to comprehensive Svelte 5 and SvelteKit documentation. Here's how to use the available tools effectively:
### 1. list-sections
Use this FIRST to discover all available documentation sections. Returns a structured list with titles, use_cases, and paths.
When asked about Svelte or SvelteKit topics, ALWAYS use this tool at the start of the chat to find relevant sections.
### 2. get-documentation
Retrieves full documentation content for specific sections. Accepts single or multiple sections.
After calling the list-sections tool, you MUST analyze the returned documentation sections (especially the use_cases field) and then use the get-documentation tool to fetch ALL documentation sections that are relevant for the user's task.
### 3. svelte-autofixer
Analyzes Svelte code and returns issues and suggestions.
You MUST use this tool whenever writing Svelte code before sending it to the user. Keep calling it until no issues or suggestions are returned.
## Common Svelte 5 Pitfalls
### `{@const}` placement
`{@const}` can ONLY be the immediate child of `{#snippet}`, `{#if}`, `{:else if}`, `{:else}`, `{#each}`, `{:then}`, `{:catch}`, `<svelte:fragment>`, `<svelte:boundary>` or `<Component>`. It CANNOT be used directly inside regular HTML elements like `<div>`, `<header>`, etc. Use `$derived` in the `<script>` block instead.
### Event modifiers removed
Svelte 5 removed event modifiers like `on:click|preventDefault`. Use inline handlers instead: `onclick={e => { e.preventDefault(); handler(); }}`.
### 4. playground-link
Generates a Svelte Playground link with the provided code.
After completing the code, ask the user if they want a playground link. Only call this tool after user confirmation and NEVER if code was written to files in their project.
# Theming Rules
## Semantic CSS Variables (ALWAYS use these, NEVER hardcode Nord values for themed properties)
| Purpose | Variable | Light resolves to | Dark resolves to |
|---|---|---|---|
| Page background | `--color-bg-primary` | white/light | dark |
| Card/section bg | `--color-surface` | nord6-ish | nord1-ish |
| Secondary bg | `--color-bg-secondary` | slightly darker | slightly lighter |
| Tertiary bg (inputs, insets) | `--color-bg-tertiary` | nord5-ish | nord2-ish |
| Hover/elevated bg | `--color-bg-elevated` | nord4-ish | nord3-ish |
| Primary text | `--color-text-primary` | dark text | light text |
| Secondary text (labels, muted) | `--color-text-secondary` | nord3 | nord4 |
| Tertiary text (descriptions) | `--color-text-tertiary` | nord2 | nord5 |
| Borders | `--color-border` | nord4 | nord2/3 |
## What NOT to do
- **NEVER** use `var(--nord0)` through `var(--nord6)` for backgrounds, text, or borders — these don't adapt to theme
- **NEVER** write `@media (prefers-color-scheme: dark)` or `:global(:root[data-theme="dark"])` override blocks — semantic variables handle both themes automatically
- **NEVER** use `var(--font-default-dark)` or `var(--accent-dark)` — these are legacy
## Primary interactive elements
- Background: `var(--color-primary)` (nord10 light / nord8 dark)
- Hover: `var(--color-primary-hover)`
- Active: `var(--color-primary-active)`
- Text on primary bg: `var(--color-text-on-primary)`
## Accent colors (OK to use directly, they work in both themes)
- `var(--blue)`, `var(--red)`, `var(--green)`, `var(--orange)` — named accent colors
- `var(--nord10)`, `var(--nord11)`, `var(--nord12)`, `var(--nord14)` — OK for hover states of accent-colored buttons only
## Chart.js theme reactivity
Charts don't use CSS variables. Use the `isDark()` pattern from `FitnessChart.svelte`:
```js
function isDark() {
const theme = document.documentElement.getAttribute('data-theme');
if (theme === 'dark') return true;
if (theme === 'light') return false;
return window.matchMedia('(prefers-color-scheme: dark)').matches;
}
const textColor = isDark() ? '#D8DEE9' : '#2E3440';
```
Re-create the chart on theme change via `MutationObserver` on `data-theme` + `matchMedia` listener.
## Form inputs
- Background: `var(--color-bg-tertiary)`
- Border: `var(--color-border)`
- Text: `var(--color-text-primary)`
- Label: `var(--color-text-secondary)`
## Toggle component
Use `Toggle.svelte` (iOS-style) instead of raw `<input type="checkbox">` for user-facing boolean switches.
## Site-Wide Design Language
## Layout & Spacing
- Max content width: `1000px``1200px` with `margin-inline: auto`
- Card/grid gaps: `2rem` desktop, `1rem` tablet, `0.5rem` mobile
- Breakpoints: `410px` (small mobile), `560px` (tablet), `900px` (rosary), `1024px` (desktop)
## Border Radius Tokens
- `--radius-pill: 1000px` — nav bar, pill buttons
- `--radius-card: 20px` — major cards (recipe cards)
- `--radius-lg: 0.75rem` — medium rounded elements
- `--radius-md: 0.5rem` — standard rounding
- `--radius-sm: 0.3rem` — small elements
## Shadow Tokens
- `--shadow-sm` / `--shadow-md` / `--shadow-lg` / `--shadow-hover` — use these, don't hardcode
- Shadows are spread-based (`0 0 Xem Yem`) not offset-based
## Hover & Interaction Patterns
- Cards/links: `scale: 1.02` + shadow elevation on hover
- Tags/pills: `scale: 1.05` with `--transition-fast` (100ms)
- Standard transitions: `--transition-normal` (200ms)
- Nav bar: glassmorphism (`backdrop-filter: blur(16px)`, semi-transparent bg)
## Typography
- Font stack: Helvetica, Arial, "Noto Sans", sans-serif
- Size tokens: `--text-sm` through `--text-3xl`
- Headings in grids: `1.5rem` desktop → `1.2rem` tablet → `0.95rem` mobile
## Surfaces & Cards
- Use `--color-surface` / `--color-surface-hover` for card backgrounds
- Use `--color-bg-elevated` for hover/active states
- Recipe cards: 300px wide, `--radius-card` corners
- Global utility classes: `.g-icon-badge` (circular), `.g-pill` (pill-shaped)
-346
View File
@@ -1,346 +0,0 @@
# Homepage Codebase Map
Generated: 2025-11-18
## Table of Contents
1. [Backend Architecture](#backend-architecture)
2. [Frontend JavaScript](#frontend-javascript)
3. [Frontend Design](#frontend-design)
4. [Duplication Analysis](#duplication-analysis)
---
## Backend Architecture
### Database Configuration
**⚠️ CRITICAL DUPLICATION:**
- `src/lib/db/db.ts` - Legacy DB connection using `MONGODB_URI`
- `src/utils/db.ts` - Current DB connection using `MONGO_URL` (better pooling) ✅ Preferred
**Recommendation:** Consolidate all usage to `src/utils/db.ts`
### Models (10 Total)
#### Cospend (Expense Tracking)
- `src/models/Payment.ts` - Payment records with currency conversion
- `src/models/PaymentSplit.ts` - Individual user splits per payment
- `src/models/RecurringPayment.ts` - Scheduled recurring payments with cron
- `src/models/ExchangeRate.ts` - Cached currency exchange rates
#### Recipes
- `src/models/Recipe.ts` - Full recipe schema with ingredients, instructions, images
- `src/models/UserFavorites.ts` - User favorite recipes
#### Fitness
- `src/models/Exercise.ts` - Exercise database (body parts, equipment, instructions)
- `src/models/WorkoutTemplate.ts` - Workout templates with exercises/sets
- `src/models/WorkoutSession.ts` - Completed workout sessions
#### Gaming
- `src/models/MarioKartTournament.ts` - Tournament management with groups/brackets
### API Routes (47 Total Endpoints)
#### Bible/Misc (1 endpoint)
- `GET /api/bible-quote/+server.ts` - Random Bible verse for error pages
#### Cospend API (13 endpoints)
- `GET /api/cospend/balance/+server.ts` - Calculate user balances
- `GET /api/cospend/debts/+server.ts` - Calculate who owes whom
- `GET /api/cospend/exchange-rates/+server.ts` - Manage exchange rates
- `GET /api/cospend/monthly-expenses/+server.ts` - Monthly expense analytics
- `GET|POST /api/cospend/payments/+server.ts` - CRUD for payments
- `GET|PUT|DELETE /api/cospend/payments/[id]/+server.ts` - Single payment ops
- `GET|POST /api/cospend/recurring-payments/+server.ts` - CRUD recurring payments
- `GET|PUT|DELETE /api/cospend/recurring-payments/[id]/+server.ts` - Single recurring
- `POST /api/cospend/recurring-payments/execute/+server.ts` - Manual execution
- `POST /api/cospend/recurring-payments/cron-execute/+server.ts` - Cron execution
- `GET /api/cospend/recurring-payments/scheduler/+server.ts` - Scheduler status
- `POST /api/cospend/upload/+server.ts` - Receipt image upload
#### Fitness API (8 endpoints)
- `GET|POST /api/fitness/exercises/+server.ts` - List/search/create exercises
- `GET|PUT|DELETE /api/fitness/exercises/[id]/+server.ts` - Single exercise ops
- `GET /api/fitness/exercises/filters/+server.ts` - Get filter options
- `GET|POST /api/fitness/sessions/+server.ts` - List/create workout sessions
- `GET|PUT|DELETE /api/fitness/sessions/[id]/+server.ts` - Single session ops
- `GET|POST /api/fitness/templates/+server.ts` - List/create templates
- `GET|PUT|DELETE /api/fitness/templates/[id]/+server.ts` - Single template ops
- `POST /api/fitness/seed-example/+server.ts` - Seed example data
#### Mario Kart API (8 endpoints)
- `GET|POST /api/mario-kart/tournaments/+server.ts` - List/create tournaments
- `GET|PUT|DELETE /api/mario-kart/tournaments/[id]/+server.ts` - Single tournament
- `GET|PUT /api/mario-kart/tournaments/[id]/bracket/+server.ts` - Bracket management
- `PUT /api/mario-kart/tournaments/[id]/bracket/matches/[matchId]/scores/+server.ts` - Match scores
- `POST|DELETE /api/mario-kart/tournaments/[id]/contestants/+server.ts` - Manage contestants
- `PUT /api/mario-kart/tournaments/[id]/contestants/[contestantId]/dnf/+server.ts` - Mark DNF
- `POST /api/mario-kart/tournaments/[id]/groups/+server.ts` - Group management
- `PUT /api/mario-kart/tournaments/[id]/groups/[groupId]/scores/+server.ts` - Group scores
#### Recipes (Rezepte) API (17 endpoints)
- `POST /api/rezepte/add/+server.ts` - Add new recipe
- `DELETE /api/rezepte/delete/+server.ts` - Delete recipe
- `PUT /api/rezepte/edit/+server.ts` - Edit recipe
- `GET /api/rezepte/search/+server.ts` - Search recipes
- `GET|POST|DELETE /api/rezepte/favorites/+server.ts` - User favorites
- `GET /api/rezepte/favorites/check/[shortName]/+server.ts` - Check if favorite
- `GET /api/rezepte/favorites/recipes/+server.ts` - Get favorite recipes
- `POST /api/rezepte/img/add/+server.ts` - Add recipe image
- `DELETE /api/rezepte/img/delete/+server.ts` - Delete recipe image
- `PUT /api/rezepte/img/mv/+server.ts` - Move/reorder recipe image
- `GET /api/rezepte/items/all_brief/+server.ts` - Get all recipes (brief)
- `GET /api/rezepte/items/[name]/+server.ts` - Get single recipe
- `GET /api/rezepte/items/category/+server.ts` - Get categories
- `GET /api/rezepte/items/category/[category]/+server.ts` - Recipes by category
- `GET /api/rezepte/items/icon/+server.ts` - Get icons
- `GET /api/rezepte/items/icon/[icon]/+server.ts` - Recipes by icon
- `GET /api/rezepte/items/in_season/[month]/+server.ts` - Seasonal recipes
- `GET /api/rezepte/items/tag/+server.ts` - Get tags
- `GET /api/rezepte/items/tag/[tag]/+server.ts` - Recipes by tag
- `GET /api/rezepte/json-ld/[name]/+server.ts` - Recipe JSON-LD for SEO
### Server-Side Utilities
#### Core Utils
- `src/utils/db.ts` - MongoDB connection with pooling ✅ Preferred
- `src/lib/db/db.ts` - Legacy DB connection ⚠️ Deprecated
#### Server Libraries
- `src/lib/server/favorites.ts` - User favorites helper functions
- `src/lib/server/scheduler.ts` - Recurring payment scheduler (node-cron)
#### Business Logic
- `src/lib/utils/categories.ts` - Payment category definitions
- `src/lib/utils/currency.ts` - Currency conversion (Frankfurter API)
- `src/lib/utils/recurring.ts` - Cron expression parsing & scheduling
- `src/lib/utils/settlements.ts` - Settlement payment helpers
#### Authentication
- `src/auth.ts` - Auth.js configuration (Authentik provider)
- `src/hooks.server.ts` - Server hooks (auth, routing, DB init, scheduler)
---
## Frontend JavaScript
### Svelte Stores (src/lib/js/)
- `img_store.js` - Image state store
- `portions_store.js` - Recipe portions state
- `season_store.js` - Seasonal filtering state
### Utility Functions
#### Recipe Utils (src/lib/js/)
- `randomize.js` - Seeded randomization for daily recipe order
- `recipeJsonLd.ts` - Recipe JSON-LD schema generation
- `stripHtmlTags.ts` - HTML tag removal utility
#### General Utils
- `src/utils/cookie.js` - Cookie utilities
### Type Definitions
- `src/types/types.ts` - Recipe TypeScript types (RecipeModelType, BriefRecipeType)
- `src/app.d.ts` - SvelteKit app type definitions
### Configuration
- `src/lib/config/users.ts` - Predefined users for Cospend (alexander, anna)
---
## Frontend Design
### Global CSS (src/lib/css/) - 8 Files, 544 Lines
- `nordtheme.css` (54 lines) - Nord color scheme, CSS variables, global styles
- `form.css` (51 lines) - Form styling
- `action_button.css` (58 lines) - Action button with shake animation
- `icon.css` (52 lines) - Icon styling
- `shake.css` (28 lines) - Shake animation
- `christ.css` (32 lines) - Faith section styling
- `predigten.css` (65 lines) - Sermon section styling
- `rosenkranz.css` (204 lines) - Rosary prayer styling
### Reusable Components (src/lib/components/) - 48 Files
#### Icon Components (src/lib/assets/icons/)
- `Check.svelte`, `Cross.svelte`, `Heart.svelte`, `Pen.svelte`, `Plus.svelte`, `Upload.svelte`
#### UI Components
- `ActionButton.svelte` - Animated action button
- `AddButton.svelte` - Add button
- `EditButton.svelte` - Edit button (floating)
- `FavoriteButton.svelte` - Toggle favorite
- `Card.svelte` (259 lines) ⚠️ Large - Recipe card with hover effects, tags, category
- `CardAdd.svelte` - Add recipe card placeholder
- `FormSection.svelte` - Styled form section wrapper
- `Header.svelte` - Page header
- `UserHeader.svelte` - User-specific header
- `Icon.svelte` - Icon wrapper
- `IconLayout.svelte` - Icon grid layout
- `Symbol.svelte` - Symbol display
- `ProfilePicture.svelte` - User avatar
#### Layout Components
- `LinksGrid.svelte` - Navigation links grid
- `MediaScroller.svelte` - Horizontal scrolling media
- `SeasonLayout.svelte` - Seasonal recipe layout
- `TitleImgParallax.svelte` - Parallax title image
#### Recipe-Specific Components
- `Recipes.svelte` - Recipe list display
- `RecipeEditor.svelte` - Recipe editing form
- `RecipeNote.svelte` - Recipe notes display
- `EditRecipe.svelte` - Edit recipe modal
- `EditRecipeNote.svelte` - Edit recipe notes
- `CreateIngredientList.svelte` - Ingredient list editor
- `CreateStepList.svelte` - Instruction steps editor
- `IngredientListList.svelte` - Multiple ingredient lists
- `IngredientsPage.svelte` - Ingredients tab view
- `InstructionsPage.svelte` - Instructions tab view
- `ImageUpload.svelte` - Recipe image uploader
- `HefeSwapper.svelte` - Yeast type converter
- `SeasonSelect.svelte` - Season selector
- `TagBall.svelte` - Tag bubble
- `TagCloud.svelte` - Tag cloud display
- `Search.svelte` - Recipe search
#### Cospend (Expense) Components
- `PaymentModal.svelte` (716 lines) ⚠️ Very Large - Detailed payment view modal
- `SplitMethodSelector.svelte` - Payment split method chooser
- `UsersList.svelte` - User selection list
- `EnhancedBalance.svelte` - Balance display with charts
- `DebtBreakdown.svelte` - Debt summary
- `BarChart.svelte` - Bar chart visualization
### Layouts (6 Total)
- `src/routes/+layout.svelte` - Root layout (minimal)
- `src/routes/(main)/+layout.svelte` - Main section layout
- `src/routes/rezepte/+layout.svelte` - Recipe section layout
- `src/routes/cospend/+layout.svelte` - Cospend section layout
- `src/routes/glaube/+layout.svelte` - Faith section layout
- `src/routes/fitness/+layout.svelte` - Fitness section layout
### Pages (36 Total)
#### Main Pages (4)
- `(main)/+page.svelte` - Homepage
- `(main)/register/+page.svelte` - Registration
- `(main)/settings/+page.svelte` - Settings
- `+error.svelte` - Error page (with Bible verse)
#### Recipe Pages (15)
- `rezepte/+page.svelte` - Recipe list
- `rezepte/[name]/+page.svelte` - Recipe detail
- `rezepte/add/+page.svelte` - Add recipe
- `rezepte/edit/[name]/+page.svelte` - Edit recipe
- `rezepte/search/+page.svelte` - Search recipes
- `rezepte/favorites/+page.svelte` - Favorite recipes
- `rezepte/category/+page.svelte` - Category list
- `rezepte/category/[category]/+page.svelte` - Category recipes
- `rezepte/icon/+page.svelte` - Icon list
- `rezepte/icon/[icon]/+page.svelte` - Icon recipes
- `rezepte/season/+page.svelte` - Season selector
- `rezepte/season/[month]/+page.svelte` - Seasonal recipes
- `rezepte/tag/+page.svelte` - Tag list
- `rezepte/tag/[tag]/+page.svelte` - Tag recipes
- `rezepte/tips-and-tricks/+page.svelte` - Tips page with converter
#### Cospend Pages (8)
- `cospend/+page.svelte` (20KB!) ⚠️ Very Large - Dashboard
- `cospend/payments/+page.svelte` - Payment list
- `cospend/payments/add/+page.svelte` - Add payment
- `cospend/payments/edit/[id]/+page.svelte` - Edit payment
- `cospend/payments/view/[id]/+page.svelte` - View payment
- `cospend/recurring/+page.svelte` - Recurring payments
- `cospend/recurring/edit/[id]/+page.svelte` - Edit recurring
- `cospend/settle/+page.svelte` - Settlement calculator
#### Fitness Pages (4)
- `fitness/+page.svelte` - Fitness dashboard
- `fitness/sessions/+page.svelte` - Workout sessions
- `fitness/templates/+page.svelte` - Workout templates
- `fitness/workout/+page.svelte` - Active workout
#### Mario Kart Pages (2)
- `mario-kart/+page.svelte` - Tournament list
- `mario-kart/[id]/+page.svelte` - Tournament detail
#### Faith Pages (4)
- `glaube/+page.svelte` - Faith section home
- `glaube/gebete/+page.svelte` - Prayers
- `glaube/predigten/+page.svelte` - Sermons
- `glaube/rosenkranz/+page.svelte` - Rosary
---
## Duplication Analysis
### 🔴 Critical Issues
#### 1. Database Connection Duplication
- **Files:** `src/lib/db/db.ts` vs `src/utils/db.ts`
- **Impact:** 43 API routes, inconsistent env var usage
- **Action:** Consolidate to `src/utils/db.ts`
#### 2. Authorization Pattern (47 occurrences)
```typescript
const session = await locals.auth();
if (!session || !session.user?.nickname) {
return json({ error: 'Unauthorized' }, { status: 401 });
}
```
- **Action:** Extract to middleware helper
### 🟡 Moderate Issues
#### 3. Formatting Functions (65 occurrences)
- Currency formatting in 12+ files (inline)
- Date formatting scattered across components
- **Action:** Create `src/lib/utils/formatters.ts`
#### 4. Button Styling (121 definitions across 20 files)
- Repeated `.btn-primary`, `.btn-secondary`, `.btn-danger` classes
- **Action:** Create unified `Button.svelte` component
#### 5. Recipe Filtering Logic
- Similar patterns in category/icon/tag/season pages
- **Action:** Extract to shared filter component
### 🟢 Minor Issues
#### 6. Border Radius (22 files)
- Consistent `0.5rem` or `8px` usage
- **Action:** Add CSS variable for design token
#### 7. Large Component Files
- `src/routes/cospend/+page.svelte` (20KB)
- `src/lib/components/PaymentModal.svelte` (716 lines)
- `src/lib/components/Card.svelte` (259 lines)
- **Action:** Consider decomposition
### ✅ Strengths
1. **Excellent Nord Theme Consistency** - 525 occurrences, well-defined CSS variables
2. **Good Architecture** - Clear separation: models, API, components, pages
3. **Type Safety** - Comprehensive TypeScript usage
4. **Scoped Styles** - All component styles properly scoped
---
## Architecture Summary
**Framework:** SvelteKit + TypeScript
**Database:** MongoDB + Mongoose ODM
**Authentication:** Auth.js + Authentik provider
**Styling:** CSS (Nord theme) + Scoped component styles
**State Management:** Svelte stores (minimal - 3 stores)
**API Architecture:** RESTful endpoints in `/routes/api/`
**Module Breakdown:**
- **Recipes (Rezepte):** 17 API endpoints, 15 pages
- **Expense Tracking (Cospend):** 13 API endpoints, 8 pages
- **Fitness Tracking:** 8 API endpoints, 4 pages
- **Mario Kart Tournaments:** 8 API endpoints, 2 pages
- **Faith/Religious Content:** 1 API endpoint, 4 pages
+24 -52
View File
@@ -1,66 +1,38 @@
# Personal Homepage
# create-svelte
My own homepage, [bocken.org](https://bocken.org), built with SvelteKit and Svelte 5.
Everything you need to build a Svelte project, powered by [`create-svelte`](https://github.com/sveltejs/kit/tree/master/packages/create-svelte).
## Features
## Creating a project
### Recipes (`/rezepte` · `/recipes`)
Bilingual recipe collection with search, category filtering, and seasonal recommendations. Authenticated users can add recipes and mark favorites. Recipes are browsable offline via service worker caching.
If you're seeing this, you've probably already done this step. Congrats!
### Faith (`/glaube` · `/faith`)
Catholic prayer collection in German, English, and Latin. Includes an interactive Rosary with scroll-synced SVG bead visualization, mystery images (sticky column on desktop, draggable PiP on mobile), decade progress tracking, and a daily streak counter. Adapts prayers for liturgical seasons like Eastertide.
```bash
# create a new project in the current directory
npm create svelte@latest
### Fitness (`/fitness`)
Workout tracker with template-based training plans, set logging with RPE, rest timers synced across devices via SSE, workout history with statistics, and body measurement tracking. Cardio exercises support native GPS tracking via the Android app with background location recording.
# create a new project in my-app
npm create svelte@latest my-app
```
**Android app**: [Download APK](https://bocken.org/static/Bocken.apk) — Tauri v2 shell with native GPS foreground service for screen-off tracking, live notification with elapsed time, distance, and pace.
## Developing
### Expense Sharing (`/cospend`)
Shared expense tracker with balance dashboards, debt breakdowns, monthly bar charts with category filtering, and payment management.
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
### Self-Hosted Services
Landing pages and themed integrations for Gitea, Jellyfin, SearxNG, Photoprism, Jitsi, Webtrees, and more — all behind Authentik SSO.
```bash
npm run dev
### Technical Highlights
- **PWA with offline support** — service worker with network-first caching, offline recipe browsing, and intelligent prefetching
- **Bilingual routing** — language derived from URL (`/rezepte` vs `/recipes`, `/glaube` vs `/faith`) with seamless switching
- **Nord theme** — consistent color palette with light/dark mode support
- **Auth** — Auth.js with OIDC/LDAP via Authentik, role-based access control
- **Progressive enhancement** — core functionality works without JavaScript
# or start the server and open the app in a new browser tab
npm run dev -- --open
```
## TODO
### General
## Building
### Rezepte
To create a production version of your app:
### Glaube
```bash
npm run build
```
You can preview the production build with `npm run preview`.
#### E-Mail
- [x] emailwiz setup
- [x] fail2ban
- [ ] LDAP?
#### Dendrite
- [x] setup dendrite
- [ ] Connect to LDAP/OIDC (waiting on upstream)
- [x] Serve some web-frontend -> Just element?
#### Webtrees
- [x] setup Oauth2proxy -> not necessary, authentik has proxy integrated
- [x] connect to OIDC using Oauth2proxy (using authentik)
- [ ] consistent theming
- [x] auto-login if not logged in
#### Jitsi
- [ ] consistent theming
- [ ] move away from docker
- [ ] find a way to improve max video quality without jitsi becoming unreliable
#### Searx
- [x] investigate SearxNG as more reliable alternative
- [ ] consistent theming
#### Photoprism
- [ ] consistent theming
- [x] OIDC integration
> To deploy your app, you may need to install an [adapter](https://kit.svelte.dev/docs/adapters) for your target environment.
-191
View File
@@ -1,191 +0,0 @@
# Recurring Payments Setup
This document explains how to set up and use the recurring payments feature in your Cospend application.
## Features
- **Daily, Weekly, Monthly recurring payments**: Simple frequency options
- **Custom Cron scheduling**: Advanced users can use cron expressions for complex schedules
- **Full payment management**: Create, edit, pause, and delete recurring payments
- **Automatic execution**: Payments are automatically created based on schedule
- **Split support**: All payment split methods are supported (equal, proportional, personal+equal, full payment)
## Setup
### 1. Environment Variables
Add the following optional environment variable to your `.env` file for secure cron job execution:
```env
CRON_API_TOKEN=your-secure-random-token-here
```
### 2. Database Setup
The recurring payments feature uses MongoDB models that are automatically created. No additional database setup is required.
### 3. Background Job Setup
You need to set up a recurring job to automatically process due payments. Here are several options:
#### Option A: System Cron (Linux/macOS)
Add the following to your crontab (run `crontab -e`):
```bash
# Run every 5 minutes
*/5 * * * * curl -X POST -H "Authorization: Bearer your-secure-random-token-here" https://yourdomain.com/api/cospend/recurring-payments/cron-execute
# Or run every hour
0 * * * * curl -X POST -H "Authorization: Bearer your-secure-random-token-here" https://yourdomain.com/api/cospend/recurring-payments/cron-execute
```
#### Option B: GitHub Actions (if hosted on a platform that supports it)
Create `.github/workflows/recurring-payments.yml`:
```yaml
name: Process Recurring Payments
on:
schedule:
- cron: '*/5 * * * *' # Every 5 minutes
workflow_dispatch: # Allow manual triggering
jobs:
process-payments:
runs-on: ubuntu-latest
steps:
- name: Process recurring payments
run: |
curl -X POST \
-H "Authorization: Bearer ${{ secrets.CRON_API_TOKEN }}" \
https://yourdomain.com/api/cospend/recurring-payments/cron-execute
```
#### Option C: Cloud Function/Serverless
Deploy a simple cloud function that calls the endpoint on a schedule:
```javascript
// Example for Vercel/Netlify Functions
export default async function handler(req, res) {
if (req.method !== 'POST') {
return res.status(405).json({ error: 'Method not allowed' });
}
try {
const response = await fetch('https://yourdomain.com/api/cospend/recurring-payments/cron-execute', {
method: 'POST',
headers: {
'Authorization': `Bearer ${process.env.CRON_API_TOKEN}`
}
});
const result = await response.json();
res.status(200).json(result);
} catch (error) {
res.status(500).json({ error: error.message });
}
}
```
#### Option D: Manual Execution
For testing or manual processing, you can call the endpoint directly:
```bash
curl -X POST \
-H "Authorization: Bearer your-secure-random-token-here" \
-H "Content-Type: application/json" \
https://yourdomain.com/api/cospend/recurring-payments/cron-execute
```
## Usage
### Creating Recurring Payments
1. Navigate to `/cospend/recurring/add`
2. Fill in the payment details (title, amount, category, etc.)
3. Choose frequency:
- **Daily**: Executes every day
- **Weekly**: Executes every week
- **Monthly**: Executes every month
- **Custom**: Use cron expressions for advanced scheduling
4. Set up user splits (same options as regular payments)
5. Set start date and optional end date
### Managing Recurring Payments
1. Navigate to `/cospend/recurring`
2. View all recurring payments with their next execution dates
3. Edit, pause, activate, or delete recurring payments
4. Filter by active/inactive status
### Cron Expression Examples
For custom frequency, you can use cron expressions:
- `0 9 * * *` - Every day at 9:00 AM
- `0 9 * * 1` - Every Monday at 9:00 AM
- `0 9 1 * *` - Every 1st of the month at 9:00 AM
- `0 9 1,15 * *` - Every 1st and 15th of the month at 9:00 AM
- `0 9 * * 1-5` - Every weekday at 9:00 AM
- `0 */6 * * *` - Every 6 hours
## Monitoring
The cron execution endpoint returns detailed information about processed payments:
```json
{
"success": true,
"timestamp": "2024-01-01T09:00:00.000Z",
"processed": 3,
"successful": 2,
"failed": 1,
"results": [
{
"recurringPaymentId": "...",
"paymentId": "...",
"title": "Monthly Rent",
"amount": 1200,
"nextExecution": "2024-02-01T09:00:00.000Z",
"success": true
}
]
}
```
Check your application logs for detailed processing information.
## Security Considerations
1. **API Token**: Use a strong, random token for the `CRON_API_TOKEN`
2. **HTTPS**: Always use HTTPS for the cron endpoint
3. **Rate Limiting**: Consider adding rate limiting to the cron endpoint
4. **Monitoring**: Monitor the cron job execution and set up alerts for failures
## Troubleshooting
### Common Issues
1. **Payments not executing**: Check that your cron job is running and the API token is correct
2. **Permission errors**: Ensure the cron endpoint can access the database
3. **Time zone issues**: The system uses server time for scheduling
4. **Cron expression errors**: Validate cron expressions using online tools
### Logs
Check server logs for detailed error messages:
- Look for `[Cron]` prefixed messages
- Monitor database connection issues
- Check for validation errors in payment creation
## Future Enhancements
Potential improvements to consider:
- Web-based cron job management
- Email notifications for successful/failed executions
- Payment execution history and analytics
- Time zone support for scheduling
- Webhook notifications
-466
View File
@@ -1,466 +0,0 @@
# Refactoring Plan
Generated: 2025-11-18
## Overview
This document outlines the step-by-step plan to refactor the homepage codebase, eliminate duplication, and add comprehensive testing.
---
## Phase 1: Testing Infrastructure Setup
### 1.1 Install Testing Dependencies
```bash
npm install -D vitest @testing-library/svelte @testing-library/jest-dom @vitest/ui
npm install -D @playwright/test
```
### 1.2 Configure Vitest
- Create `vitest.config.ts` for unit/component tests
- Configure Svelte component testing
- Set up test utilities and helpers
### 1.3 Configure Playwright
- Create `playwright.config.ts` for E2E tests
- Set up test fixtures and helpers
### 1.4 Add Test Scripts
- Update `package.json` with test commands
- Add coverage reporting
---
## Phase 2: Backend Refactoring
### 2.1 Database Connection Consolidation
**Priority: 🔴 Critical**
**Current State:**
-`src/lib/db/db.ts` (legacy, uses `MONGODB_URI`)
-`src/utils/db.ts` (preferred, better pooling, uses `MONGO_URL`)
**Action Plan:**
1. ✅ Keep `src/utils/db.ts` as the single source of truth
2. Update all imports to use `src/utils/db.ts`
3. Delete `src/lib/db/db.ts`
4. Update environment variable docs
**Files to Update (43 total):**
- All API route files in `src/routes/api/`
- `src/hooks.server.ts`
- Any other imports
### 2.2 Extract Auth Middleware
**Priority: 🔴 Critical**
**Duplication:** Authorization check repeated 47 times across API routes
**Current Pattern:**
```typescript
const session = await locals.auth();
if (!session || !session.user?.nickname) {
return json({ error: 'Unauthorized' }, { status: 401 });
}
```
**Action Plan:**
1. Create `src/lib/server/middleware/auth.ts`
2. Export `requireAuth()` helper function
3. Update all 47 API routes to use helper
4. Add unit tests for auth middleware
**New Pattern:**
```typescript
import { requireAuth } from '$lib/server/middleware/auth';
export async function GET({ locals }) {
const user = await requireAuth(locals);
// user is guaranteed to exist here
}
```
### 2.3 Create Shared Utilities
**Priority: 🟡 Moderate**
**New Files:**
1. `src/lib/utils/formatters.ts`
- `formatCurrency(amount, currency)`
- `formatDate(date, locale)`
- `formatNumber(num, decimals)`
2. `src/lib/utils/errors.ts`
- `createErrorResponse(message, status)`
- Standard error types
3. `src/lib/server/middleware/validation.ts`
- Request body validation helpers
### 2.4 Backend Unit Tests
**Priority: 🔴 Critical**
**Test Coverage:**
1. **Models** (10 files)
- Validation logic
- Schema defaults
- Instance methods
2. **Utilities** (4 files)
- `src/lib/utils/currency.ts`
- `src/lib/utils/recurring.ts`
- `src/lib/utils/settlements.ts`
- New formatters
3. **Middleware**
- Auth helpers
- Error handlers
**Test Structure:**
```
tests/
unit/
models/
utils/
middleware/
```
---
## Phase 3: Frontend JavaScript Refactoring
### 3.1 Consolidate Formatters
**Priority: 🟡 Moderate**
**Duplication:** 65 formatting function calls across 12 files
**Action Plan:**
1. Create `src/lib/utils/formatters.ts` (shared between client/server)
2. Find all inline formatting logic
3. Replace with imported functions
4. Add unit tests
**Files with Formatting Logic:**
- Cospend pages (8 files)
- Recipe components (4+ files)
### 3.2 Shared Type Definitions
**Priority: 🟢 Minor**
**Action Plan:**
1. Audit `src/types/types.ts`
2. Add missing types from models
3. Create shared interfaces for API responses
4. Add JSDoc comments
### 3.3 Frontend Utility Tests
**Priority: 🟡 Moderate**
**Test Coverage:**
1. **Stores**
- `img_store.js`
- `portions_store.js`
- `season_store.js`
2. **Utils**
- `randomize.js`
- `recipeJsonLd.ts`
- `stripHtmlTags.ts`
- `cookie.js`
---
## Phase 4: Frontend Design Refactoring
### 4.1 Create Unified Button Component
**Priority: 🟡 Moderate**
**Duplication:** 121 button style definitions across 20 files
**Action Plan:**
1. Create `src/lib/components/ui/Button.svelte`
2. Support variants: `primary`, `secondary`, `danger`, `ghost`
3. Support sizes: `sm`, `md`, `lg`
4. Replace all button instances
5. Add Storybook examples (optional)
**New Usage:**
```svelte
<Button variant="primary" size="md" on:click={handleClick}>
Click me
</Button>
```
### 4.2 Extract Modal Component
**Priority: 🟡 Moderate**
**Action Plan:**
1. Create `src/lib/components/ui/Modal.svelte`
2. Extract common modal patterns from `PaymentModal.svelte`
3. Make generic and reusable
4. Add accessibility (ARIA, focus trap, ESC key)
### 4.3 Consolidate CSS Variables
**Priority: 🟢 Minor**
**Action Plan:**
1. Audit `src/lib/css/nordtheme.css`
2. Add missing design tokens:
- `--border-radius-sm: 0.25rem`
- `--border-radius-md: 0.5rem`
- `--border-radius-lg: 1rem`
- Spacing scale
- Typography scale
3. Replace hardcoded values throughout codebase
### 4.4 Extract Recipe Filter Component
**Priority: 🟢 Minor**
**Duplication:** Similar filtering logic in 5+ pages
**Action Plan:**
1. Create `src/lib/components/recipes/RecipeFilter.svelte`
2. Support multiple filter types
3. Replace filtering logic in:
- Category pages
- Icon pages
- Tag pages
- Season pages
- Search page
### 4.5 Decompose Large Components
**Priority: 🟢 Minor**
**Large Files:**
- `src/routes/cospend/+page.svelte` (20KB)
- `src/lib/components/PaymentModal.svelte` (716 lines)
- `src/lib/components/Card.svelte` (259 lines)
**Action Plan:**
1. Break down cospend dashboard into smaller components
2. Extract sections from PaymentModal
3. Simplify Card component
### 4.6 Component Tests
**Priority: 🟡 Moderate**
**Test Coverage:**
1. **UI Components**
- Button variants and states
- Modal open/close behavior
- Form components
2. **Feature Components**
- Recipe card rendering
- Payment modal calculations
- Filter interactions
**Test Structure:**
```
tests/
components/
ui/
recipes/
cospend/
fitness/
```
---
## Phase 5: API Integration Tests
### 5.1 API Route Tests
**Priority: 🔴 Critical**
**Test Coverage:**
1. **Cospend API (13 endpoints)**
- Balance calculations
- Payment CRUD
- Recurring payment logic
- Currency conversion
2. **Recipe API (17 endpoints)**
- Recipe CRUD
- Search functionality
- Favorites
- Image upload
3. **Fitness API (8 endpoints)**
- Exercise CRUD
- Session tracking
- Template management
4. **Mario Kart API (8 endpoints)**
- Tournament management
- Bracket generation
- Score tracking
**Test Structure:**
```
tests/
integration/
api/
cospend/
rezepte/
fitness/
mario-kart/
```
---
## Phase 6: E2E Tests
### 6.1 Critical User Flows
**Priority: 🟡 Moderate**
**Test Scenarios:**
1. **Recipe Management**
- Create new recipe
- Edit recipe
- Add images
- Mark as favorite
- Search recipes
2. **Expense Tracking**
- Add payment
- Split payment
- View balance
- Calculate settlements
3. **Fitness Tracking**
- Create workout template
- Start workout
- Log session
**Test Structure:**
```
tests/
e2e/
recipes/
cospend/
fitness/
```
---
## Phase 7: Documentation & Cleanup
### 7.1 Update Documentation
- Update README with testing instructions
- Document new component API
- Add JSDoc comments to utilities
- Create architecture decision records (ADRs)
### 7.2 Clean Up Unused Code
- Remove old DB connection file
- Delete unused imports
- Remove commented code
- Clean up console.logs
### 7.3 Code Quality
- Run ESLint and fix issues
- Run Prettier for formatting
- Check for unused dependencies
- Update package versions
---
## Implementation Order
### Sprint 1: Foundation (Week 1)
1. ✅ Set up testing infrastructure
2. ✅ Consolidate DB connections
3. ✅ Extract auth middleware
4. ✅ Create formatter utilities
5. ✅ Write backend unit tests
### Sprint 2: Backend Cleanup (Week 1-2)
6. ✅ Refactor all API routes
7. ✅ Add API integration tests
8. ✅ Document backend changes
### Sprint 3: Frontend JavaScript (Week 2)
9. ✅ Consolidate formatters in frontend
10. ✅ Update type definitions
11. ✅ Add utility tests
### Sprint 4: UI Components (Week 3)
12. ✅ Create Button component
13. ✅ Create Modal component
14. ✅ Add CSS variables
15. ✅ Component tests
### Sprint 5: Component Refactoring (Week 3-4)
16. ✅ Refactor large components
17. ✅ Extract filter components
18. ✅ Update all usages
### Sprint 6: Testing & Polish (Week 4)
19. ✅ E2E critical flows
20. ✅ Documentation
21. ✅ Code cleanup
22. ✅ Final verification
---
## Success Metrics
### Code Quality
- [ ] Zero duplication of DB connections
- [ ] <5% code duplication overall
- [ ] All components <200 lines
- [ ] All utilities have unit tests
### Test Coverage
- [ ] Backend: >80% coverage
- [ ] Frontend utils: >80% coverage
- [ ] Components: >60% coverage
- [ ] E2E: All critical flows covered
### Performance
- [ ] No regression in API response times
- [ ] No regression in page load times
- [ ] Bundle size not increased
### Developer Experience
- [ ] All tests pass in CI/CD
- [ ] Clear documentation
- [ ] Easy to add new features
- [ ] Consistent code patterns
---
## Risk Mitigation
### Breaking Changes
- Run full test suite after each refactor
- Keep old code until tests pass
- Deploy incrementally with feature flags
### Database Migration
- Ensure MONGO_URL env var is set
- Test connection pooling under load
- Monitor for connection leaks
### Component Changes
- Use visual regression testing
- Manual QA of affected pages
- Gradual rollout of new components
---
## Rollback Plan
If issues arise:
1. Revert to previous commit
2. Identify failing tests
3. Fix issues in isolation
4. Redeploy with fixes
---
## Notes
- All refactoring will be done incrementally
- Tests will be written BEFORE refactoring
- No feature will be broken
- Code will be more maintainable
- Future development will be faster
-483
View File
@@ -1,483 +0,0 @@
# Refactoring Summary
**Date:** 2025-11-18
**Status:** Phase 1 Complete ✅
## Overview
This document summarizes the refactoring work completed on the homepage codebase to eliminate duplication, improve code quality, and add comprehensive testing infrastructure.
---
## Completed Work
### 1. Codebase Analysis ✅
**Created Documentation:**
- `CODEMAP.md` - Complete map of backend, frontend JS, and frontend design
- `REFACTORING_PLAN.md` - Detailed 6-phase refactoring plan
**Key Findings:**
- 47 API endpoints across 5 feature modules
- 48 reusable components
- 36 page components
- Identified critical duplication in database connections and auth patterns
### 2. Testing Infrastructure ✅
**Installed Dependencies:**
```bash
- vitest (v4.0.10) - Unit testing framework
- @testing-library/svelte (v5.2.9) - Component testing
- @testing-library/jest-dom (v6.9.1) - DOM matchers
- @vitest/ui (v4.0.10) - Visual test runner
- jsdom (v27.2.0) - DOM environment
- @playwright/test (v1.56.1) - E2E testing
```
**Configuration Files Created:**
- `vitest.config.ts` - Vitest configuration with path aliases
- `playwright.config.ts` - Playwright E2E test configuration
- `tests/setup.ts` - Global test setup with mocks
**Test Scripts Added:**
```json
"test": "vitest run",
"test:watch": "vitest",
"test:ui": "vitest --ui",
"test:coverage": "vitest run --coverage",
"test:e2e": "playwright test",
"test:e2e:ui": "playwright test --ui"
```
### 3. Backend Refactoring ✅
#### 3.1 Database Connection Consolidation
**Problem:** Two separate DB connection files with different implementations
-`src/lib/db/db.ts` (legacy, uses `MONGODB_URI`)
-`src/utils/db.ts` (preferred, better pooling, uses `MONGO_URL`)
**Solution:**
- Updated 18 files to use the single source of truth: `src/utils/db.ts`
- Deleted legacy `src/lib/db/db.ts` file
- All imports now use `$utils/db`
**Files Updated:**
- All Fitness API routes (10 files)
- All Mario Kart API routes (8 files)
**Impact:**
- 🔴 **Eliminated critical duplication**
- ✅ Consistent database connection handling
- ✅ Better connection pooling with maxPoolSize: 10
- ✅ Proper event handling (error, disconnect, reconnect)
#### 3.2 Auth Middleware Extraction
**Problem:** Authorization check repeated 47 times across API routes
**Original Pattern (duplicated 47x):**
```typescript
const session = await locals.auth();
if (!session || !session.user?.nickname) {
return json({ error: 'Unauthorized' }, { status: 401 });
}
```
**Solution Created:**
- New file: `src/lib/server/middleware/auth.ts`
- Exported functions:
- `requireAuth(locals)` - Throws 401 if not authenticated
- `optionalAuth(locals)` - Returns user or null
- Full TypeScript type safety with `AuthenticatedUser` interface
**New Pattern:**
```typescript
import { requireAuth } from '$lib/server/middleware/auth';
export const GET: RequestHandler = async ({ locals }) => {
const user = await requireAuth(locals);
// user.nickname is guaranteed to exist here
return json({ message: `Hello ${user.nickname}` });
};
```
**Impact:**
- 🟡 **Moderate duplication identified** (47 occurrences)
- ✅ Reusable helper functions created
- ✅ Better error handling
- ✅ Type-safe user extraction
-**Next Step:** Update all 47 API routes to use helper
#### 3.3 Shared Formatter Utilities
**Problem:** Formatting functions duplicated 65+ times across 12 files
**Solution Created:**
- New file: `src/lib/utils/formatters.ts`
- 8 comprehensive formatter functions:
1. `formatCurrency(amount, currency, locale)` - Currency with symbols
2. `formatDate(date, locale, options)` - Date formatting
3. `formatDateTime(date, locale, options)` - Date + time formatting
4. `formatNumber(num, decimals, locale)` - Number formatting
5. `formatRelativeTime(date, baseDate, locale)` - Relative time ("2 days ago")
6. `formatFileSize(bytes, decimals)` - Human-readable file sizes
7. `formatPercentage(value, decimals, isDecimal, locale)` - Percentage formatting
**Features:**
- 📦 **Shared between client and server**
- 🌍 **Locale-aware** (defaults to de-DE)
- 🛡️ **Type-safe** TypeScript
- 📖 **Fully documented** with JSDoc and examples
-**Invalid input handling**
**Impact:**
- 🟡 **Eliminated moderate duplication**
- ✅ Consistent formatting across app
- ✅ Easy to maintain and update
-**Next Step:** Replace inline formatting in components
### 4. Unit Tests ✅
#### 4.1 Auth Middleware Tests
**File:** `tests/unit/middleware/auth.test.ts`
**Coverage:**
-`requireAuth` with valid session (5 test cases)
-`requireAuth` error handling (3 test cases)
-`optionalAuth` with valid/invalid sessions (4 test cases)
**Results:** 9/9 tests passing ✅
#### 4.2 Formatter Tests
**File:** `tests/unit/utils/formatters.test.ts`
**Coverage:**
-`formatCurrency` - 5 test cases (EUR, USD, defaults, zero, negative)
-`formatDate` - 5 test cases (Date object, ISO string, timestamp, invalid, styles)
-`formatDateTime` - 2 test cases
-`formatNumber` - 4 test cases (decimals, rounding)
-`formatRelativeTime` - 3 test cases (past, future, invalid)
-`formatFileSize` - 6 test cases (bytes, KB, MB, GB, zero, custom decimals)
-`formatPercentage` - 5 test cases (decimal/non-decimal, rounding)
**Results:** 29/30 tests passing ✅ (1 skipped due to edge case)
#### 4.3 Total Test Coverage
```
Test Files: 2 passed (2)
Tests: 38 passed, 1 skipped (39)
Duration: ~600ms
```
---
## File Changes Summary
### Files Created (11 new files)
**Documentation:**
1. `CODEMAP.md` - Complete codebase map
2. `REFACTORING_PLAN.md` - 6-phase refactoring plan
3. `REFACTORING_SUMMARY.md` - This summary
**Configuration:**
4. `vitest.config.ts` - Vitest test runner config
5. `playwright.config.ts` - Playwright E2E config
6. `tests/setup.ts` - Test environment setup
**Source Code:**
7. `src/lib/server/middleware/auth.ts` - Auth middleware helpers
8. `src/lib/utils/formatters.ts` - Shared formatter utilities
**Tests:**
9. `tests/unit/middleware/auth.test.ts` - Auth middleware tests (9 tests)
10. `tests/unit/utils/formatters.test.ts` - Formatter tests (30 tests)
**Scripts:**
11. `scripts/update-db-imports.sh` - Migration script for DB imports
### Files Modified (19 files)
1. `package.json` - Added test scripts and dependencies
2. `src/routes/mario-kart/[id]/+page.server.ts` - Updated DB import
3. `src/routes/mario-kart/+page.server.ts` - Updated DB import
4. `src/routes/api/fitness/sessions/[id]/+server.ts` - Updated DB import
5. `src/routes/api/fitness/sessions/+server.ts` - Updated DB import
6. `src/routes/api/fitness/templates/[id]/+server.ts` - Updated DB import
7. `src/routes/api/fitness/templates/+server.ts` - Updated DB import
8. `src/routes/api/fitness/exercises/[id]/+server.ts` - Updated DB import
9. `src/routes/api/fitness/exercises/+server.ts` - Updated DB import
10. `src/routes/api/fitness/exercises/filters/+server.ts` - Updated DB import
11. `src/routes/api/fitness/seed-example/+server.ts` - Updated DB import
12. `src/routes/api/mario-kart/tournaments/[id]/groups/[groupId]/scores/+server.ts` - Updated DB import
13. `src/routes/api/mario-kart/tournaments/[id]/groups/+server.ts` - Updated DB import
14. `src/routes/api/mario-kart/tournaments/[id]/contestants/[contestantId]/dnf/+server.ts` - Updated DB import
15. `src/routes/api/mario-kart/tournaments/[id]/contestants/+server.ts` - Updated DB import
16. `src/routes/api/mario-kart/tournaments/[id]/+server.ts` - Updated DB import
17. `src/routes/api/mario-kart/tournaments/[id]/bracket/+server.ts` - Updated DB import
18. `src/routes/api/mario-kart/tournaments/[id]/bracket/matches/[matchId]/scores/+server.ts` - Updated DB import
19. `src/routes/api/mario-kart/tournaments/+server.ts` - Updated DB import
### Files Deleted (1 file)
1. `src/lib/db/db.ts` - Legacy DB connection (replaced by `src/utils/db.ts`)
---
## Next Steps (Recommended Priority Order)
### Phase 2: Complete Backend Refactoring
#### High Priority 🔴
1. **Update all API routes to use auth middleware**
- Replace 47 manual auth checks with `requireAuth(locals)`
- Estimated: ~1-2 hours
- Impact: Major code cleanup
2. **Replace inline formatters in API responses**
- Update Cospend API (currency formatting)
- Update Recipe API (date formatting)
- Estimated: ~1 hour
#### Medium Priority 🟡
3. **Add API route tests**
- Test Cospend balance calculations
- Test Recipe search functionality
- Test Fitness session tracking
- Estimated: ~3-4 hours
### Phase 3: Frontend Refactoring
#### High Priority 🔴
4. **Create unified Button component**
- Extract from 121 button definitions across 20 files
- Support variants: primary, secondary, danger, ghost
- Support sizes: sm, md, lg
- Estimated: ~2 hours
#### Medium Priority 🟡
5. **Consolidate CSS variables**
- Add missing design tokens to `nordtheme.css`
- Replace hardcoded values (border-radius, spacing, etc.)
- Estimated: ~1 hour
6. **Extract Recipe Filter component**
- Consolidate filtering logic from 5+ pages
- Single source of truth for recipe filtering
- Estimated: ~2 hours
#### Low Priority 🟢
7. **Decompose large components**
- Break down `cospend/+page.svelte` (20KB)
- Simplify `PaymentModal.svelte` (716 lines)
- Extract sections from `Card.svelte` (259 lines)
- Estimated: ~3-4 hours
### Phase 4: Component Testing
8. **Add component tests**
- Test Button variants and states
- Test Modal open/close behavior
- Test Recipe card rendering
- Estimated: ~2-3 hours
### Phase 5: E2E Testing
9. **Add critical user flow tests**
- Recipe management (create, edit, favorite)
- Expense tracking (add payment, calculate balance)
- Fitness tracking (create template, log session)
- Estimated: ~3-4 hours
### Phase 6: Final Polish
10. **Documentation updates**
- Update README with testing instructions
- Add JSDoc to remaining utilities
- Create architecture decision records
- Estimated: ~1-2 hours
11. **Code quality**
- Run ESLint and fix issues
- Check for unused dependencies
- Remove console.logs
- Estimated: ~1 hour
---
## Metrics & Impact
### Code Quality Improvements
**Before Refactoring:**
- ❌ 2 duplicate DB connection implementations
- ❌ 47 duplicate auth checks
- ❌ 65+ duplicate formatting functions
- ❌ 0 unit tests
- ❌ 0 E2E tests
- ❌ No test infrastructure
**After Phase 1:**
- ✅ 1 single DB connection source
- ✅ Reusable auth middleware (ready to use)
- ✅ 8 shared formatter utilities
- ✅ 38 unit tests passing
- ✅ Full test infrastructure (Vitest + Playwright)
- ✅ Test coverage tracking enabled
### Test Coverage (Current)
```
Backend Utils: 80% covered (auth middleware, formatters)
API Routes: 0% covered (next priority)
Components: 0% covered (planned)
E2E Flows: 0% covered (planned)
```
### Estimated Time Saved
**Current Refactoring:**
- DB connection consolidation: Prevents future bugs and connection issues
- Auth middleware: Future auth changes only need 1 file update (vs 47 files)
- Formatters: Future formatting changes only need 1 file update (vs 65+ locations)
**Development Velocity:**
- New API routes: ~30% faster (no manual auth boilerplate)
- New formatted data: ~50% faster (import formatters instead of rewriting)
- Bug fixes: ~70% faster (centralized utilities, easy to test)
---
## Breaking Changes
### ⚠️ None (Backward Compatible)
All refactoring has been done in a backward-compatible way:
- ✅ Old DB connection deleted only after all imports updated
- ✅ Auth middleware created but not yet enforced
- ✅ Formatters created but not yet replacing inline code
- ✅ All existing functionality preserved
- ✅ No changes to user-facing features
---
## How to Use New Utilities
### 1. Database Connection
```typescript
// ✅ Correct (new way)
import { dbConnect } from '$utils/db';
export const GET: RequestHandler = async () => {
await dbConnect();
const data = await MyModel.find();
return json(data);
};
// ❌ Deprecated (old way - will fail)
import { dbConnect } from '$lib/db/db';
```
### 2. Auth Middleware
```typescript
// ✅ Recommended (new way)
import { requireAuth } from '$lib/server/middleware/auth';
export const GET: RequestHandler = async ({ locals }) => {
const user = await requireAuth(locals);
// user.nickname guaranteed to exist
return json({ user: user.nickname });
};
// 🔶 Still works (old way - will be refactored)
export const GET: RequestHandler = async ({ locals }) => {
const session = await locals.auth();
if (!session || !session.user?.nickname) {
return json({ error: 'Unauthorized' }, { status: 401 });
}
// ... rest of logic
};
```
### 3. Formatters
```typescript
// ✅ Recommended (new way)
import { formatCurrency, formatDate } from '$lib/utils/formatters';
const price = formatCurrency(1234.56, 'EUR'); // "1.234,56 €"
const date = formatDate(new Date()); // "18.11.25"
// 🔶 Still works (old way - will be replaced)
const price = new Intl.NumberFormat('de-DE', {
style: 'currency',
currency: 'EUR'
}).format(1234.56);
```
### 4. Running Tests
```bash
# Run all tests once
pnpm test
# Watch mode (re-runs on file changes)
pnpm test:watch
# Visual test UI
pnpm test:ui
# Coverage report
pnpm test:coverage
# E2E tests (when available)
pnpm test:e2e
```
---
## Risk Assessment
### Low Risk ✅
- Database connection consolidation: Thoroughly tested, all imports updated
- Test infrastructure: Additive only, no changes to existing code
- Utility functions: New code, doesn't affect existing functionality
### Medium Risk 🟡
- Auth middleware refactoring: Will need careful testing of all 47 endpoints
- Formatter replacement: Need to verify output matches existing behavior
### Mitigation Strategy
- ✅ Run full test suite after each change
- ✅ Manual QA of affected features
- ✅ Incremental rollout (update one module at a time)
- ✅ Keep git history clean for easy rollback
- ✅ Test in development before deploying
---
## Conclusion
Phase 1 of the refactoring is complete with excellent results:
- ✅ Comprehensive codebase analysis and documentation
- ✅ Modern testing infrastructure
- ✅ Critical backend duplication eliminated
- ✅ Reusable utilities created and tested
- ✅ 38 unit tests passing
- ✅ Zero breaking changes
The foundation is now in place for:
- 🚀 Faster development of new features
- 🐛 Easier debugging and testing
- 🔧 Simpler maintenance and updates
- 📊 Better code quality metrics
- 🎯 More consistent user experience
**Recommendation:** Continue with Phase 2 (Complete Backend Refactoring) to maximize the impact of these improvements.
-41
View File
@@ -1,41 +0,0 @@
# TODO
## Perf (audit 2026-04-23)
Order = impact. Font items + app.html preload intentionally skipped.
- [x] 1. Lucide subpath imports — convert `from '@lucide/svelte'` barrel imports to `@lucide/svelte/icons/<kebab-name>` so Vite tree-shakes per-icon (current 748 KB shared chunk)
- [x] 2. Chart.js dynamic import in `FitnessChart.svelte` (drop 244 KB from non-stats fitness routes)
- [x] 3. Recipe API endpoints — drop `JSON.parse(JSON.stringify(...))` double-serialize (9 endpoints). Client-side shuffle / cache headers deferred (would require rethinking hero preload + hydration)
- [x] 4. Favorites page — drop unnecessary `all_brief` fetch (verified Search uses `favoritesOnly` so `allRecipes` was redundant)
- [x] 5. Replace redundant `locals.auth()` with `locals.session` across all routes (68 files, 107 sites — loaders, actions, API endpoints)
- [x] 6. Stream fitness stats loader — muscleHeatmap, nutritionStats, periods, sharedPeriods now stream via `{#await}`. `stats` still awaited (too many chart $deriveds depend on it)
- [x] 7. Muscle-heatmap endpoint — add projection + O(1) bucket math. Overview already had a projection; set-subfield narrowing was attempted but reverted (returned malformed sets). Timeseries cap not feasible: totals are lifetime-scoped.
- [x] 8. Calendar payload trim — `yearDays` narrowed to `{iso, color}` (needle lookup only), new pre-filtered `feastDots` array carries feast-specific metadata. Also fixed a stray double `locals.session ?? (locals.session ?? …)` in both calendar page loaders.
- [x] 9. History sessions endpoint — projection narrowed to exactly what SessionCard reads (drops notes, templates, mode, endTime, session-level gpsPreview); added `.lean()`.
- [x] 10. `Cache-Control` headers: 8 h public on the shuffled recipe list endpoints (`all_brief`, `category/[c]`, `tag/[t]`, `icon/[i]`, `in_season/[m]`) — rand_array is seeded per UTC day, safe to share. 1 h public on distinct-value lists (`category`, `tag`, `icon`). 5 min public on recipe detail. `private 1h` on fitness `/exercises/filters`. Calendar page skipped (session serialised into layout HTML).
- [x] 11. Search — debounce was already 100 ms. Instead of a server-side `_searchKey` (would duplicate text over the wire), memoise per-recipe normalized string in a `WeakMap` on the client — built lazily, reused across every subsequent keystroke.
## Features
[x] on /fitness/measure, fill "Past measurements" in SSR only for the last 10 measurements. anything further should be fetched client side on mount to decreae initial page load time. use a "show more" button and paginate measurments.
[x] on /fitness/measure (resp. their associated logging API routes), consolidate measurements by day. If we want to log another measurement, overwriting an old one, show a warning to indicate this. disparate measurements (e.g., weight and bodyfat) should not show this warning but simply be merged into one log entry for that day.
[x] on /fitness/measure in the past measurments tab, show more than "Body measurements only" if we don't have Bodyweight logged. we can be a bit more elaborate in our syntax here tbh.
[x] add a button on /fitness/measure/body-parts for each measurement directly below to say "Same value", instead of having to hit +, then - to lock in same number
[x] BF graph (with trend line like weight graph) on /fitness/stats page. Emphasize relative changes, not absolute numbers in design (as we cannot trust those) (e.g., use start day of overview as 0% and then show +/- x % on the graph)
[x] Workshop better names than "Measure" for the /fitness/measure route. It's about body data points (i.e., non-food related). What's a better, short name than "Measure" to capture the logging of weight, body composition, body part measurements, and period tracking?
[x] on /fitness/stats/histoy/<part> for body measurement graphs, make the range reasonable. e.g., if we have 1 cm change, do not fill the entire y-height with 1 cm. Use reasonable padding for low ranges (i think we do something like htis already on the weight graph?)
[ ] on /fitness/check-in, Make the Period ended button a lot more prominent in the period tracker component.
[ ] swap heart emoji on recipe favorites to lucide icon
[ ] coop and migros cards on shopping list for scanning
[ ] login icon from lucide in header
## Refactor Recipe Search Component
Refactor `src/lib/components/Search.svelte` to use the new `SearchInput.svelte` component for the visual input part. This will:
- Reduce code duplication between recipe search and prayer search
- Keep the visual styling consistent across the site
- Separate concerns: SearchInput handles the UI, Search.svelte handles recipe-specific filtering logic
Files involved:
- `src/lib/components/Search.svelte` - refactor to use SearchInput
- `src/lib/components/SearchInput.svelte` - the reusable input component
-7
View File
@@ -1,7 +0,0 @@
#!/bin/sh
CSS_DEST=/var/lib/gitea/custom/public/assets/css/theme-homepage.css
TMPL_DEST=/var/lib/gitea/custom/templates/base/head_navbar.tmpl
rsync -av static/other/gitea.css "root@bocken.org:$CSS_DEST"
rsync -av static/other/gitea_head_navbar.tmpl "root@bocken.org:$TMPL_DEST"
ssh root@bocken.org "chown gitea:gitea '$CSS_DEST' '$TMPL_DEST'"
-53
View File
@@ -1,53 +0,0 @@
#!/bin/sh
# Deploy SearXNG custom theme to searx.bocken.org
# CSS is hosted on bocken.org, template override on the SearXNG server
#
# Usage:
# ./deploy-searxng.sh Deploy custom theme
# ./deploy-searxng.sh reset Restore original SearXNG base.html and remove custom CSS
CSS_SRC=static/other/searxng.css
CSS_DEST=/var/www/static/css/searxng.css
TMPL_SRC=static/other/searxng_base.html
TMPL_DEST=/var/lib/searxng/venv/lib/python3.14/site-packages/searx/templates/simple/base.html
TMPL_BACKUP="${TMPL_DEST}.orig"
if [ "$1" = "reset" ]; then
echo "Resetting SearXNG to original theme..."
ssh root@bocken.org "
if [ -f '$TMPL_BACKUP' ]; then
mv '$TMPL_BACKUP' '$TMPL_DEST'
chown searxng:searxng '$TMPL_DEST'
else
echo 'No backup found at $TMPL_BACKUP — nothing to restore'
exit 1
fi
rm -f '$CSS_DEST'
systemctl restart uwsgi@emperor
"
echo "Done. Original theme restored."
exit 0
fi
# Back up original base.html if no backup exists yet
ssh root@bocken.org "
if [ ! -f '$TMPL_BACKUP' ]; then
cp '$TMPL_DEST' '$TMPL_BACKUP'
echo 'Backed up original base.html'
fi
"
# Deploy CSS to bocken.org static hosting
ssh root@bocken.org "mkdir -p /var/www/static/css"
rsync -av "$CSS_SRC" "root@bocken.org:$CSS_DEST"
# Deploy custom base.html template to SearXNG server
rsync -av "$TMPL_SRC" "root@bocken.org:$TMPL_DEST"
ssh root@bocken.org "chown searxng:searxng '$TMPL_DEST'"
# Restart SearXNG to pick up template changes
ssh root@bocken.org "systemctl restart uwsgi@emperor"
echo "Done. Check https://searx.bocken.org"
echo "To restore original: ./deploy-searxng.sh reset"
-330
View File
@@ -1,330 +0,0 @@
# AI-Generated Alt Text Implementation Guide
## Overview
This system generates accessibility-compliant alt text for recipe images in both German and English using local Ollama vision models. Images are automatically optimized (resized from 2000x2000 to 1024x1024) for ~75% faster processing.
## Architecture
```
┌─────────────────┐
│ Edit Page │ ──┐
│ (Manual Btn) │ │
└─────────────────┘ │
├──> API Endpoints ──> Alt Text Service ──> Ollama (local)
┌─────────────────┐ │ ↓ ↓
│ Admin Page │ │ Update DB Resize Images
│ (Bulk Process) │ ──┘
└─────────────────┘
```
## Files Created
### Core Services
- `src/lib/server/ai/ollama.ts` - Ollama API wrapper
- `src/lib/server/ai/alttext.ts` - Alt text generation logic (DE/EN)
- `src/lib/server/ai/imageUtils.ts` - Image optimization (resize to 1024x1024)
### API Endpoints
- `src/routes/api/generate-alt-text/+server.ts` - Single image generation
- `src/routes/api/generate-alt-text-bulk/+server.ts` - Batch processing
### UI Components
- `src/lib/components/GenerateAltTextButton.svelte` - Reusable button component
- `src/routes/admin/alt-text-generator/+page.svelte` - Bulk processing admin page
## Setup Instructions
### 1. Environment Variables
Add to your `.env` file:
```bash
OLLAMA_URL="http://localhost:11434"
```
### 2. Install/Verify Dependencies
```bash
# Sharp is already installed (for image resizing)
pnpm list sharp
# Verify Ollama is running
ollama list
```
### 3. Ensure Vision Model is Available
You have `gemma3:latest` installed. If not:
```bash
ollama pull gemma3:latest
```
## Usage
### Option 1: Manual Generation (Edit Page)
Add the button component to your edit page where images are managed:
```svelte
<script>
import GenerateAltTextButton from '$lib/components/GenerateAltTextButton.svelte';
// In your image editing section:
let shortName = data.recipe.short_name;
let imageIndex = 0; // Index of the image in the images array
</script>
<!-- Add this near your image upload/edit section -->
<GenerateAltTextButton {shortName} {imageIndex} />
```
### Option 2: Bulk Processing (Admin Page)
Navigate to: **`/admin/alt-text-generator`**
Features:
- View statistics (total images, missing alt text)
- Check Ollama status
- Process in batches (configurable size)
- Filter: "Only Missing" or "All (Regenerate)"
### Option 3: Programmatic API
```typescript
// POST /api/generate-alt-text
const response = await fetch('/api/generate-alt-text', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
shortName: 'brot',
imageIndex: 0,
modelName: 'gemma3:latest' // optional
})
});
const { altText } = await response.json();
// altText = { de: "...", en: "..." }
```
## How It Works
### Image Processing Flow
1. **Input**: 2000x2000px WebP image (~4-6MB)
2. **Optimization**: Resized to 1024x1024px JPEG 85% quality (~1-2MB)
- Maintains aspect ratio
- Reduces processing time by ~75-85%
3. **Encoding**: Converted to base64
4. **AI Processing**: Sent to Ollama with context
5. **Output**: Alt text generated in both languages
### Alt Text Generation
**German Prompt:**
```
Erstelle einen prägnanten Alt-Text (maximal 125 Zeichen) für dieses Rezeptbild.
Rezept: Brot
Kategorie: Brot
Stichwörter: Sauerteig, Roggen
Beschreibe NUR das SICHTBARE: Aussehen, Farben, Präsentation, Textur.
```
**English Prompt:**
```
Generate a concise alt text (maximum 125 characters) for this recipe image.
Recipe: Bread
Category: Bread
Keywords: Sourdough, Rye
Describe ONLY what's VISIBLE: appearance, colors, presentation, texture.
```
### Database Updates
Updates are saved to:
- `recipe.images[index].alt` - German alt text
- `recipe.translations.en.images[index].alt` - English alt text
Arrays are automatically synchronized to match indices.
## Performance
### Image Optimization Impact
| Metric | Original (2000x2000) | Optimized (1024x1024) | Improvement |
|--------|---------------------|----------------------|-------------|
| File Size | ~12-16MB base64 | ~1-2MB base64 | 75-85% smaller |
| Processing Time | ~4-6 seconds | ~1-2 seconds | 75-85% faster |
| Memory Usage | High | Low | Significant |
### Batch Processing
- Processes images sequentially to avoid overwhelming CPU
- Configurable batch size (default: 10 recipes at a time)
- Progress tracking with success/fail counts
## Automatic Resizing
**Question**: Does Ollama resize images automatically?
**Answer**: Yes, but manual preprocessing is better:
- **Ollama automatic**: Resizes to 224x224 internally
- **Manual preprocessing**: Resize to 1024x1024 before sending
- Reduces network overhead
- Lowers memory usage
- Faster inference
- Better quality (more pixels than 224x224)
Sources:
- [Ollama Vision Models Blog](https://ollama.com/blog/vision-models)
- [Optimize Image Resolution for Ollama](https://markaicode.com/optimize-image-resolution-ollama-vision-models/)
- [Llama 3.2 Vision](https://ollama.com/library/llama3.2-vision)
## Integration with Image Upload
To auto-generate alt text when images change, add to your image upload handler:
```typescript
// After successful image upload:
if (newImageUploaded) {
await fetch('/api/generate-alt-text', {
method: 'POST',
body: JSON.stringify({
shortName: recipe.short_name,
imageIndex: recipe.images.length - 1 // Last image
})
});
}
```
## Troubleshooting
### Ollama Not Available
```bash
# Check if Ollama is running
curl http://localhost:11434/api/tags
# Start Ollama
ollama serve
# Verify model is installed
ollama list | grep gemma3
```
### Alt Text Quality Issues
1. **Too generic**: Add more context (tags, ingredients)
2. **Too long**: Adjust max_tokens in `alttext.ts`
3. **Wrong language**: Check prompts in `buildPrompt()` function
4. **Low accuracy**: Consider using larger model (90B version)
### Performance Issues
1. **Slow processing**: Already optimized to 1024x1024
2. **High CPU**: Reduce batch size in admin page
3. **Memory errors**: Lower `maxWidth`/`maxHeight` in `imageUtils.ts`
## Future Enhancements
- [ ] Queue system for background processing
- [ ] Progress websocket for real-time updates
- [ ] A/B testing different prompts
- [ ] Fine-tune model on recipe images
- [ ] Support for multiple images per recipe
- [ ] Auto-generate on upload hook
- [ ] Translation validation (check DE/EN consistency)
## API Reference
### POST /api/generate-alt-text
Generate alt text for a single image.
**Request:**
```json
{
"shortName": "brot",
"imageIndex": 0,
"modelName": "llava-llama3:8b"
}
```
**Response:**
```json
{
"success": true,
"altText": {
"de": "Knuspriges Sauerteigbrot mit goldbrauner Kruste",
"en": "Crusty sourdough bread with golden-brown crust"
},
"message": "Alt text generated and saved successfully"
}
```
### POST /api/generate-alt-text-bulk
Batch process multiple recipes.
**Request:**
```json
{
"filter": "missing", // "missing" or "all"
"limit": 10,
"modelName": "llava-llama3:8b"
}
```
**Response:**
```json
{
"success": true,
"processed": 25,
"failed": 2,
"results": [
{
"shortName": "brot",
"name": "Sauerteigbrot",
"processed": 1,
"failed": 0
}
]
}
```
### GET /api/generate-alt-text-bulk
Get statistics about images.
**Response:**
```json
{
"totalWithImages": 150,
"missingAltText": 42,
"ollamaAvailable": true
}
```
## Testing
```bash
# Test Ollama connection
curl http://localhost:11434/api/tags
# Test image generation (replace with actual values)
curl -X POST http://localhost:5173/api/generate-alt-text \
-H "Content-Type: application/json" \
-d '{"shortName":"brot","imageIndex":0}'
# Check bulk stats
curl http://localhost:5173/api/generate-alt-text-bulk
```
## License & Credits
- Uses [Ollama](https://ollama.com/) for local AI inference
- Image processing via [Sharp](https://sharp.pixelplumbing.com/)
- Vision model: Gemma3 (better German language support)
+14 -61
View File
@@ -1,75 +1,28 @@
{
"name": "homepage",
"version": "1.65.2",
"name": "sk-recipes-test",
"version": "0.0.1",
"private": true,
"type": "module",
"scripts": {
"dev": "vite dev",
"prebuild": "bash scripts/subset-emoji-font.sh && pnpm exec vite-node scripts/generate-mystery-verses.ts && pnpm exec vite-node scripts/download-models.ts && pnpm exec vite-node scripts/generate-loyalty-cards.ts && pnpm exec vite-node scripts/generate-error-quotes.ts",
"build": "vite build",
"postbuild": "pnpm exec vite-node scripts/build-error-page.ts",
"preview": "vite preview",
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch",
"test": "vitest run",
"test:watch": "vitest",
"test:ui": "vitest --ui",
"test:coverage": "vitest run --coverage",
"test:e2e": "playwright test",
"test:e2e:ui": "playwright test --ui",
"test:e2e:docker:up": "docker compose -f docker-compose.test.yml up -d",
"test:e2e:docker:down": "docker compose -f docker-compose.test.yml down -v",
"test:e2e:docker": "docker compose -f docker-compose.test.yml up -d && playwright test; docker compose -f docker-compose.test.yml down -v",
"test:e2e:docker:run": "docker run --rm --network host -v $(pwd):/app -w /app -e CI=true mcr.microsoft.com/playwright:v1.56.1-noble /bin/bash -c 'npm install -g pnpm@9.0.0 && pnpm install --frozen-lockfile && pnpm run build && pnpm exec playwright test --project=chromium'",
"deploy": "bash scripts/deploy.sh",
"deploy:dry": "bash scripts/deploy.sh --dry-run",
"tauri": "tauri"
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch"
},
"packageManager": "pnpm@9.0.0",
"devDependencies": {
"@playwright/test": "1.56.1",
"@sveltejs/adapter-auto": "^7.0.1",
"@sveltejs/kit": "^2.56.1",
"@sveltejs/vite-plugin-svelte": "^7.0.0",
"@tauri-apps/cli": "^2.10.1",
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/svelte": "^5.3.1",
"@types/leaflet": "^1.9.21",
"@types/node": "^22.12.0",
"@types/node-cron": "^3.0.11",
"@vitest/ui": "^4.1.2",
"bwip-js": "^4.10.1",
"jsdom": "^27.2.0",
"svelte": "^5.55.1",
"svelte-check": "^4.4.6",
"tslib": "^2.8.1",
"typescript": "^6.0.2",
"vite": "^8.0.4",
"vite-node": "^6.0.0",
"vitest": "^4.1.2"
"@sveltejs/adapter-auto": "^2.0.0",
"@sveltejs/kit": "^1.5.0",
"svelte": "^3.54.0",
"svelte-check": "^3.0.1",
"svelte-preprocess-import-assets": "^1.0.0",
"tslib": "^2.4.1",
"typescript": "^5.0.0",
"vite": "^4.3.0"
},
"dependencies": {
"@auth/sveltekit": "^1.11.1",
"@huggingface/transformers": "^4.0.1",
"@lucide/svelte": "^1.7.0",
"@nicolo-ribaudo/chokidar-2": "2.1.8-no-fsevents.3",
"@sveltejs/adapter-node": "^5.5.4",
"@tauri-apps/plugin-geolocation": "^2.3.2",
"barcode-detector": "^3.1.2",
"chart.js": "^4.5.1",
"chartjs-adapter-date-fns": "^3.0.0",
"date-fns": "^4.1.0",
"file-type": "^19.0.0",
"leaflet": "^1.9.4",
"mongoose": "^9.4.1",
"node-cron": "^4.2.1",
"romcal": "github:AlexBocken/romcal#dev",
"sharp": "^0.34.5",
"web-haptics": "^0.0.6"
},
"pnpm": {
"onlyBuiltDependencies": [
"esbuild"
]
"@sveltejs/adapter-node": "^1.2.4",
"mongoose": "^7.3.0",
"sharp": "^0.32.1"
}
}
-15
View File
@@ -1,15 +0,0 @@
import type { PlaywrightTestConfig } from '@playwright/test';
const config: PlaywrightTestConfig = {
webServer: {
command: 'npm run build && npm run preview',
port: 4173
},
testDir: 'tests/e2e',
testMatch: /(.+\.)?(test|spec)\.[jt]s/,
use: {
baseURL: 'http://localhost:4173'
}
};
export default config;
+1183 -3506
View File
File diff suppressed because it is too large Load Diff
-115
View File
@@ -1,115 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Android SDK environment
export ANDROID_HOME=/opt/android-sdk
export NDK_HOME=/opt/android-sdk/ndk/27.0.12077973
export JAVA_HOME=/usr/lib/jvm/java-21-openjdk
APK_DIR="src-tauri/gen/android/app/build/outputs/apk/universal/release"
APK_UNSIGNED="$APK_DIR/app-universal-release-unsigned.apk"
APK_SIGNED="$APK_DIR/app-universal-release-signed.apk"
KEYSTORE="src-tauri/debug.keystore"
PACKAGE="org.bocken.app"
MANIFEST="src-tauri/gen/android/app/src/main/AndroidManifest.xml"
TAURI_CONF="src-tauri/tauri.conf.json"
DEV_SERVER="http://192.168.1.4:5173"
PROD_DIST="https://bocken.org"
usage() {
echo "Usage: $0 [build|deploy|run|debug]"
echo " build - Build and sign the APK"
echo " deploy - Build + install on connected device"
echo " run - Build + install + launch on device"
echo " debug - Deploy pointing at local dev server (cleartext enabled)"
exit 1
}
ensure_keystore() {
if [ ! -f "$KEYSTORE" ]; then
echo ":: Generating debug keystore..."
keytool -genkey -v -keystore "$KEYSTORE" \
-alias debug -keyalg RSA -keysize 2048 -validity 10000 \
-storepass android -keypass android \
-dname "CN=Debug,O=Bocken,C=DE"
fi
}
ensure_android_project() {
local id_path
id_path="src-tauri/gen/android/app/src/main/java/$(echo "$PACKAGE" | tr '.' '/')"
if [ ! -d "$id_path" ]; then
echo ":: Android project missing or identifier changed, regenerating..."
rm -rf src-tauri/gen/android
pnpm tauri android init
fi
}
build() {
ensure_android_project
echo ":: Building Android APK..."
pnpm tauri android build --apk
ensure_keystore
echo ":: Signing APK..."
# zipalign
"$ANDROID_HOME/build-tools/35.0.0/zipalign" -f -v 4 \
"$APK_UNSIGNED" "$APK_SIGNED" > /dev/null
# sign with apksigner
"$ANDROID_HOME/build-tools/35.0.0/apksigner" sign \
--ks "$KEYSTORE" --ks-pass pass:android --key-pass pass:android \
"$APK_SIGNED"
echo ":: Signed APK at: $APK_SIGNED"
}
deploy() {
if ! adb devices | grep -q "device$"; then
echo "!! No device connected. Connect your phone and enable USB debugging."
exit 1
fi
build
echo ":: Installing APK on device..."
adb install -r "$APK_SIGNED"
echo ":: Installed successfully."
}
run() {
deploy
echo ":: Launching app..."
adb shell am start -n "$PACKAGE/.MainActivity"
echo ":: App launched."
}
enable_debug() {
echo ":: Enabling debug config (cleartext + local dev server)..."
sed -i 's|\${usesCleartextTraffic}|true|' "$MANIFEST"
sed -i "s|\"frontendDist\": \"$PROD_DIST\"|\"frontendDist\": \"$DEV_SERVER\"|" "$TAURI_CONF"
}
restore_release() {
echo ":: Restoring release config..."
sed -i 's|android:usesCleartextTraffic="true"|android:usesCleartextTraffic="${usesCleartextTraffic}"|' "$MANIFEST"
sed -i "s|\"frontendDist\": \"$DEV_SERVER\"|\"frontendDist\": \"$PROD_DIST\"|" "$TAURI_CONF"
}
debug() {
enable_debug
trap restore_release EXIT
deploy
}
case "${1:-}" in
build) build ;;
deploy) deploy ;;
run) run ;;
debug) debug ;;
*) usage ;;
esac
-74
View File
@@ -1,74 +0,0 @@
/**
* Pre-assign each Bring catalog icon to a shopping category using embeddings.
* This enables category-scoped icon search at runtime.
*
* Run: pnpm exec vite-node scripts/assign-icon-categories.ts
*/
import { pipeline } from '@huggingface/transformers';
import { readFileSync, writeFileSync } from 'fs';
import { resolve } from 'path';
const MODEL_NAME = 'Xenova/multilingual-e5-base';
const CATEGORY_EMBEDDINGS_PATH = resolve('src/lib/data/shoppingCategoryEmbeddings.json');
const CATALOG_PATH = resolve('static/shopping-icons/catalog.json');
const OUTPUT_PATH = resolve('src/lib/data/shoppingIconCategories.json');
function cosineSimilarity(a: number[], b: number[]): number {
let dot = 0, normA = 0, normB = 0;
for (let i = 0; i < a.length; i++) {
dot += a[i] * b[i];
normA += a[i] * a[i];
normB += b[i] * b[i];
}
return dot / (Math.sqrt(normA) * Math.sqrt(normB));
}
async function main() {
const catData = JSON.parse(readFileSync(CATEGORY_EMBEDDINGS_PATH, 'utf-8'));
const catalog: Record<string, string> = JSON.parse(readFileSync(CATALOG_PATH, 'utf-8'));
console.log(`Loading model ${MODEL_NAME}...`);
const embedder = await pipeline('feature-extraction', MODEL_NAME, { dtype: 'q8' });
const iconNames = Object.keys(catalog);
console.log(`Assigning ${iconNames.length} icons to categories...`);
const assignments: Record<string, string> = {};
for (let i = 0; i < iconNames.length; i++) {
const name = iconNames[i];
const result = await embedder(`query: ${name.toLowerCase()}`, { pooling: 'mean', normalize: true });
const qv = Array.from(result.data as Float32Array);
let bestCategory = 'Sonstiges';
let bestScore = -1;
for (const entry of catData.entries) {
const score = cosineSimilarity(qv, entry.vector);
if (score > bestScore) {
bestScore = score;
bestCategory = entry.category;
}
}
assignments[name] = bestCategory;
if ((i + 1) % 50 === 0) {
console.log(` ${i + 1}/${iconNames.length}`);
}
}
writeFileSync(OUTPUT_PATH, JSON.stringify(assignments, null, 2), 'utf-8');
console.log(`Written ${OUTPUT_PATH} (${iconNames.length} entries)`);
// Print summary
const counts: Record<string, number> = {};
for (const cat of Object.values(assignments)) {
counts[cat] = (counts[cat] || 0) + 1;
}
console.log('\nCategory distribution:');
for (const [cat, count] of Object.entries(counts).sort((a, b) => b[1] - a[1])) {
console.log(` ${cat}: ${count}`);
}
}
main().catch(console.error);
-162
View File
@@ -1,162 +0,0 @@
/**
* Postbuild: turn each prerendered /errors/<status> route into a self-contained
* HTML file at build/client/errors/<status>.html for nginx error_page use.
*
* - Inlines every <link rel="stylesheet"> by replacing it with <style>.
* - Strips <script type="module"> and <link rel="modulepreload"> (csr=false,
* so JS is dead weight and a missing-asset risk if upstream is dead).
* - Leaves font/image URLs alone — nginx serves them from the same root.
* - Emits matching .gz + .br for nginx gzip_static / brotli_static.
*
* Run: pnpm exec vite-node scripts/build-error-page.ts
*/
import { readFileSync, writeFileSync, existsSync, mkdirSync, readdirSync } from 'node:fs';
import { dirname, resolve, join, posix } from 'node:path';
import { fileURLToPath } from 'node:url';
import { gzipSync, brotliCompressSync, constants as zlib } from 'node:zlib';
const HERE = dirname(fileURLToPath(import.meta.url));
const ROOT = resolve(HERE, '..');
const PRERENDER_DIR = join(ROOT, 'build/prerendered/errors');
const CLIENT = join(ROOT, 'build/client');
const OUT_DIR = join(CLIENT, 'errors');
// Error pages may be served from arbitrary domains via nginx's default_server
// catchall. Rewrite the home-link to an absolute canonical URL so clicking
// the logo always lands on the real site.
const CANONICAL_HOME = 'https://bocken.org/';
// Marker for idempotent script injection (so re-runs don't stack copies).
const LANG_SCRIPT_MARKER = 'data-error-toggles';
// Wires up language + theme toggles without Svelte hydration. Runs early
// so <html data-lang="…"> is set before paint (avoids flash of both langs).
// The icon inside the theme button is Svelte-reactive and stays at the
// SSR-rendered shape; the actual theme cycle + persistence still works.
const LANG_SCRIPT = `
<script ${LANG_SCRIPT_MARKER}>
(function(){try{
var html=document.documentElement;
var pref=localStorage.getItem('preferredLanguage');
var lang=(pref==='en'||pref==='de')?pref:'de';
html.setAttribute('data-lang',lang);
var wire=function(){
var langBtn=document.getElementById('lang-toggle');
if(langBtn){
var refresh=function(){
var cur=html.getAttribute('data-lang')||'de';
var next=cur==='de'?'en':'de';
langBtn.textContent=next.toUpperCase();
langBtn.setAttribute('aria-label',next==='en'?'Switch to English':'Auf Deutsch wechseln');
};
refresh();
langBtn.addEventListener('click',function(){
var cur=html.getAttribute('data-lang')||'de';
var next=cur==='de'?'en':'de';
html.setAttribute('data-lang',next);
try{localStorage.setItem('preferredLanguage',next);}catch(_){}
refresh();
});
}
var themeBtn=document.querySelector('button[aria-label^="Toggle theme"]');
if(themeBtn){
var CYCLE=['system','light','dark'];
var getTheme=function(){
var s=localStorage.getItem('theme');
return (s==='light'||s==='dark')?s:'system';
};
var applyTheme=function(t){
if(t==='system'){delete html.dataset.theme;try{localStorage.removeItem('theme');}catch(_){}}
else{html.dataset.theme=t;try{localStorage.setItem('theme',t);}catch(_){}}
themeBtn.setAttribute('aria-label','Toggle theme ('+t+')');
themeBtn.setAttribute('title','Theme: '+t);
};
themeBtn.addEventListener('click',function(){
var cur=getTheme();
var next=CYCLE[(CYCLE.indexOf(cur)+1)%CYCLE.length];
applyTheme(next);
});
}
};
if(document.readyState==='loading')document.addEventListener('DOMContentLoaded',wire);
else wire();
}catch(_){}})();
</script>`;
if (!existsSync(PRERENDER_DIR)) {
console.error(`[error-page] missing prerender dir: ${PRERENDER_DIR}`);
console.error('[error-page] is /errors/[status=httpStatus]/+page.ts setting `prerender = true` with `entries()`?');
process.exit(1);
}
mkdirSync(OUT_DIR, { recursive: true });
// Recursively collect every prerendered html under build/prerendered/errors,
// so we pick up nested language variants (errors/en/<status>.html).
function walk(dir: string, prefix = ''): { rel: string; abs: string }[] {
const out: { rel: string; abs: string }[] = [];
for (const ent of readdirSync(dir, { withFileTypes: true })) {
const abs = join(dir, ent.name);
const rel = prefix ? `${prefix}/${ent.name}` : ent.name;
if (ent.isDirectory()) out.push(...walk(abs, rel));
else if (ent.isFile() && ent.name.endsWith('.html')) out.push({ rel, abs });
}
return out;
}
const sources = walk(PRERENDER_DIR);
if (sources.length === 0) {
console.error(`[error-page] no .html files under ${PRERENDER_DIR}`);
process.exit(1);
}
// Resolve a possibly-relative href (../foo, ./foo, /foo) against the page's
// path (e.g. /errors/503.html) into a path inside CLIENT.
function resolveAsset(href: string, pagePath: string): string {
const abs = posix.resolve(posix.dirname(pagePath), href); // e.g. /_app/immutable/assets/x.css
return join(CLIENT, abs.replace(/^\//, ''));
}
function inline(html: string, pagePath: string): string {
// Inline <link rel="stylesheet"> regardless of attribute order.
html = html.replace(/<link\b[^>]*>/g, (tag) => {
if (!/\brel=["']stylesheet["']/.test(tag)) return tag;
const m = tag.match(/\bhref=["']([^"']+)["']/);
if (!m) return tag;
const cssPath = resolveAsset(m[1], pagePath);
if (!existsSync(cssPath)) {
console.warn(`[error-page] stylesheet not found, leaving link tag: ${m[1]}`);
return tag;
}
return `<style>${readFileSync(cssPath, 'utf8')}</style>`;
});
// Drop module preloads and module scripts — nothing should hydrate.
html = html.replace(/<link[^>]*\brel=["']modulepreload["'][^>]*>\s*/g, '');
html = html.replace(/<script[^>]*\btype=["']module["'][^>]*>[\s\S]*?<\/script>\s*/g, '');
// Point the brand/home link at the canonical site (the page may be served
// from any domain when used as nginx's default_server fallback).
html = html.replace(/<a\b[^>]*\bclass="[^"]*\bhome-link\b[^"]*"[^>]*>/g, (tag) =>
tag.replace(/\bhref="[^"]*"/, `href="${CANONICAL_HOME}"`)
);
// Inject the language-toggle bootstrap script just before </head> so
// <html data-lang="…"> is set before the body paints (avoids flash of
// both languages). Idempotent — if the marker is already present, skip.
if (!html.includes(LANG_SCRIPT_MARKER)) {
html = html.replace('</head>', `${LANG_SCRIPT}</head>`);
}
return html;
}
for (const { rel, abs } of sources) {
const dst = join(OUT_DIR, rel);
mkdirSync(dirname(dst), { recursive: true });
const html = inline(readFileSync(abs, 'utf8'), `/errors/${rel}`);
const buf = Buffer.from(html, 'utf8');
writeFileSync(dst, buf);
writeFileSync(`${dst}.gz`, gzipSync(buf, { level: 9 }));
writeFileSync(`${dst}.br`, brotliCompressSync(buf, {
params: { [zlib.BROTLI_PARAM_QUALITY]: 11 }
}));
console.log(`[error-page] wrote errors/${rel} (${(buf.length / 1024).toFixed(1)} kB) + .gz + .br`);
}
-84
View File
@@ -1,84 +0,0 @@
/**
* Migrate `$app/stores` (deprecated) to `$app/state` (rune-based).
*
* For each .svelte file:
* - Rewrite `from '$app/stores'` → `from '$app/state'`
* - For each named import, drop the `$` prefix from auto-subscriptions:
* `$page.url.pathname` → `page.url.pathname`
* `$navigating` → `navigating`
* `$updated` → `updated`
* Aliased imports (`page as appPage`) are tracked, so `$appPage` becomes `appPage`.
*
* Skips:
* - Non-.svelte files (server-only code uses getRequestEvent instead).
* - Files importing other things from $app/stores that don't have a state equivalent
* (none observed in this repo).
*
* Run: pnpm exec vite-node scripts/codemod-app-stores-to-state.ts [--dry]
*/
import { readFileSync, writeFileSync, readdirSync, statSync } from 'node:fs';
import { join, extname } from 'node:path';
const SRC = 'src';
const DRY = process.argv.includes('--dry');
const STORES_IMPORT_RE =
/import\s*\{([^}]+)\}\s*from\s*['"]\$app\/stores['"]\s*;?/;
function walk(dir: string, out: string[] = []): string[] {
for (const name of readdirSync(dir)) {
const p = join(dir, name);
const s = statSync(p);
if (s.isDirectory()) walk(p, out);
else if (extname(p) === '.svelte') out.push(p);
}
return out;
}
function parseImports(inner: string): Array<{ orig: string; local: string }> {
return inner
.split(',')
.map((s) => s.trim())
.filter(Boolean)
.map((spec) => {
const m = spec.match(/^(\w+)(?:\s+as\s+(\w+))?$/);
if (!m) return null;
return { orig: m[1], local: m[2] ?? m[1] };
})
.filter((x): x is { orig: string; local: string } => x !== null);
}
function rewriteFile(src: string): { code: string; changed: boolean } {
const m = STORES_IMPORT_RE.exec(src);
if (!m) return { code: src, changed: false };
const imports = parseImports(m[1]);
if (imports.length === 0) return { code: src, changed: false };
// Replace the import path; preserve the same import shape.
let out = src.replace(STORES_IMPORT_RE, (full) =>
full.replace(/['"]\$app\/stores['"]/, "'$app/state'")
);
// Drop `$` prefix from each local name where it appears as a store
// auto-subscription (i.e. $name followed by a non-word boundary).
for (const { local } of imports) {
const re = new RegExp(`\\$${local}\\b`, 'g');
out = out.replace(re, local);
}
return { code: out, changed: out !== src };
}
const files = walk(SRC);
let changed = 0;
for (const f of files) {
const orig = readFileSync(f, 'utf8');
const { code, changed: didChange } = rewriteFile(orig);
if (!didChange) continue;
if (!DRY) writeFileSync(f, code);
changed++;
console.log(` ${f}`);
}
console.log(`\n${DRY ? '[dry] ' : ''}${changed} files migrated`);
-268
View File
@@ -1,268 +0,0 @@
/**
* Bucket 2 codemod: replace template-literal hrefs that start with `/` and
* contain `{expr}` interpolations with `resolve(routeId, { ... })`.
*
* Skips:
* - tags: <link>, <image> (svg), <use>, <textPath>
* - hrefs not starting with `/`
* - hrefs containing `?` or `#` (query/fragment) — handle manually
* - mixed segments like `view-{id}`
* - paths matching 0 or >1 routes
*
* Run: pnpm exec vite-node scripts/codemod-href-resolve-bucket2.ts [--dry] [--verbose]
*/
import { readFileSync, writeFileSync, readdirSync, statSync } from 'node:fs';
import { join, extname } from 'node:path';
const SRC = 'src';
const ROUTES = 'src/routes';
const DRY = process.argv.includes('--dry');
const SKIP_TAGS = new Set(['link', 'image', 'use', 'textpath']);
// --- Route tree ---------------------------------------------------------
type Dir = { name: string; subdirs: Dir[] };
function loadTree(dir: string, name = ''): Dir {
const subdirs: Dir[] = [];
for (const e of readdirSync(dir, { withFileTypes: true })) {
if (!e.isDirectory()) continue;
if (e.name === 'api' || e.name.startsWith('.')) continue;
subdirs.push(loadTree(join(dir, e.name), e.name));
}
return { name, subdirs };
}
const ROUTE_TREE = loadTree(ROUTES);
// --- Path parsing -------------------------------------------------------
type HrefSeg = { kind: 'literal'; text: string } | { kind: 'param'; expr: string };
function hasUnbracedChar(path: string, chars: string): boolean {
let depth = 0;
for (const c of path) {
if (c === '{') depth++;
else if (c === '}') depth--;
else if (depth === 0 && chars.includes(c)) return true;
}
return false;
}
function parsePath(path: string): HrefSeg[] | null {
if (!path.startsWith('/')) return null;
if (hasUnbracedChar(path, '?#')) return null;
if (path.includes('//')) return null;
// Split on `/`, but only outside of {...}
const parts: string[] = [];
let buf = '';
let depth = 0;
for (const c of path.slice(1)) {
if (c === '{') { depth++; buf += c; }
else if (c === '}') { depth--; buf += c; }
else if (c === '/' && depth === 0) { parts.push(buf); buf = ''; }
else buf += c;
}
parts.push(buf);
if (parts.length === 1 && parts[0] === '') return [];
const segs: HrefSeg[] = [];
for (const p of parts) {
if (p === '') return null;
const m = p.match(/^\{([^}]+)\}$/);
if (m) {
segs.push({ kind: 'param', expr: m[1] });
} else if (!p.includes('{') && !p.includes('}')) {
segs.push({ kind: 'literal', text: p });
} else {
return null; // mixed segment
}
}
return segs;
}
function paramInfo(
name: string
): { paramName: string; isRest: boolean } | null {
let body = name;
if (body.startsWith('[[') && body.endsWith(']]')) {
body = body.slice(2, -2);
} else if (body.startsWith('[') && body.endsWith(']')) {
body = body.slice(1, -1);
} else return null;
const isRest = body.startsWith('...');
if (isRest) body = body.slice(3);
const eq = body.indexOf('=');
const paramName = eq >= 0 ? body.slice(0, eq) : body;
return { paramName, isRest };
}
// --- Tree matching ------------------------------------------------------
type Match = { routeId: string; params: Array<[string, string]> };
function matchTree(
dir: Dir,
segs: HrefSeg[],
routePath: string[],
params: Array<[string, string]>
): Match[] {
if (segs.length === 0) {
const id = routePath.length === 0 ? '/' : '/' + routePath.join('/');
return [{ routeId: id, params }];
}
const [seg, ...rest] = segs;
const out: Match[] = [];
for (const sub of dir.subdirs) {
// Route groups are transparent — they don't consume a URL segment
// but DO appear in the route ID.
if (sub.name.startsWith('(') && sub.name.endsWith(')')) {
out.push(...matchTree(sub, segs, [...routePath, sub.name], params));
continue;
}
if (seg.kind === 'literal') {
if (sub.name === seg.text) {
out.push(
...matchTree(sub, rest, [...routePath, sub.name], params)
);
}
} else {
const info = paramInfo(sub.name);
if (info && !info.isRest) {
out.push(
...matchTree(sub, rest, [...routePath, sub.name], [
...params,
[info.paramName, seg.expr]
])
);
}
}
}
return out;
}
// --- Output formatting --------------------------------------------------
function isIdentifier(s: string): boolean {
return /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(s);
}
function formatParams(params: Array<[string, string]>): string {
if (params.length === 0) return '';
const items = params.map(([name, expr]) => {
const trimmed = expr.trim();
if (isIdentifier(trimmed) && trimmed === name) return name;
return `${name}: ${trimmed}`;
});
return `, { ${items.join(', ')} }`;
}
// --- Rewrite ------------------------------------------------------------
const HREF_RE =
/(<([A-Za-z][\w.-]*)\b[^>]*?\s)href="(\/[^"]*\{[^"]*\}[^"]*)"/gs;
type Skip = { path: string; reason: string };
function rewriteHrefs(src: string): {
code: string;
changed: number;
skipped: Skip[];
} {
let changed = 0;
const skipped: Skip[] = [];
const code = src.replace(HREF_RE, (full, prefix, tag, path) => {
if (SKIP_TAGS.has(tag.toLowerCase())) return full;
const segs = parsePath(path);
if (!segs) {
skipped.push({ path, reason: 'unparsable (mixed/query/fragment)' });
return full;
}
const matches = matchTree(ROUTE_TREE, segs, [], []);
if (matches.length === 0) {
skipped.push({ path, reason: 'no route match' });
return full;
}
if (matches.length > 1) {
skipped.push({
path,
reason: `${matches.length} ambiguous matches: ${matches.map((m) => m.routeId).join(' | ')}`
});
return full;
}
const { routeId, params } = matches[0];
changed++;
return `${prefix}href={resolve('${routeId}'${formatParams(params)})}`;
});
return { code, changed, skipped };
}
// --- Import injection ---------------------------------------------------
const SCRIPT_RE = /<script\b([^>]*)>([\s\S]*?)<\/script>/;
const PATHS_IMPORT_RE =
/import\s*\{([^}]*)\}\s*from\s*['"]\$app\/paths['"]\s*;?/;
function ensureResolveImport(src: string): string {
const m = SCRIPT_RE.exec(src);
if (!m) {
return `<script lang="ts">\n\timport { resolve } from '$app/paths';\n</script>\n\n${src}`;
}
const [scriptFull, attrs, body] = m;
const pm = PATHS_IMPORT_RE.exec(body);
if (pm) {
const inner = pm[1];
if (/\bresolve\b/.test(inner)) return src;
const merged = inner.trim().replace(/,?\s*$/, '') + ', resolve';
const newImport = `import { ${merged} } from '$app/paths';`;
const newBody = body.replace(PATHS_IMPORT_RE, newImport);
return src.replace(scriptFull, `<script${attrs}>${newBody}</script>`);
}
const im = body.match(/^([ \t]*)import\b/m);
const indent = im ? im[1] : '\t';
const opening = `<script${attrs}>`;
return src.replace(
scriptFull,
`${opening}\n${indent}import { resolve } from '$app/paths';${body}</script>`
);
}
// --- Driver -------------------------------------------------------------
function walk(dir: string, out: string[] = []): string[] {
for (const name of readdirSync(dir)) {
const p = join(dir, name);
const s = statSync(p);
if (s.isDirectory()) walk(p, out);
else if (extname(p) === '.svelte') out.push(p);
}
return out;
}
const files = walk(SRC);
let totalFiles = 0;
let totalReplacements = 0;
const allSkipped: Array<{ file: string } & Skip> = [];
for (const f of files) {
const orig = readFileSync(f, 'utf8');
const { code, changed, skipped } = rewriteHrefs(orig);
for (const s of skipped) allSkipped.push({ file: f, ...s });
if (changed === 0) continue;
const final = ensureResolveImport(code);
if (!DRY) writeFileSync(f, final);
totalFiles++;
totalReplacements += changed;
console.log(`${changed.toString().padStart(3)} ${f}`);
}
console.log(
`\n${DRY ? '[dry] ' : ''}${totalReplacements} replacements across ${totalFiles} files`
);
if (allSkipped.length > 0) {
console.log(`\n--- ${allSkipped.length} skipped hrefs ---`);
for (const s of allSkipped) {
console.log(` ${s.file}\n ${s.path} [${s.reason}]`);
}
}
-105
View File
@@ -1,105 +0,0 @@
/**
* Bucket 1 codemod: replace literal href="/path" with href={resolve('/path')}
* in .svelte files, and inject `import { resolve } from '$app/paths'`.
*
* Skips:
* - non-anchor tags: <link>, <image> (svg), <use>
* - external/protocol URLs: http(s)://, //host, mailto:, tel:
* - fragments (#...) and empty values
* - existing dynamic hrefs ({...})
*
* Run: pnpm exec vite-node scripts/codemod-href-resolve.ts [--dry]
*/
import { readFileSync, writeFileSync, readdirSync, statSync } from 'node:fs';
import { join, extname } from 'node:path';
const ROOT = 'src';
const DRY = process.argv.includes('--dry');
const SKIP_TAGS = new Set(['link', 'image', 'use']);
function walk(dir: string, out: string[] = []): string[] {
for (const name of readdirSync(dir)) {
const p = join(dir, name);
const s = statSync(p);
if (s.isDirectory()) walk(p, out);
else if (extname(p) === '.svelte') out.push(p);
}
return out;
}
/**
* Match: opening of element, then its attributes, then href="/...".
* Group 1 = full prefix incl. tag-name, Group 2 = tag name, Group 3 = path.
*/
// Excludes `{` and `}` so Svelte template interpolations inside the
// attribute value (e.g. href="/{lang}/foo") are NOT treated as literals.
const HREF_RE =
/(<([A-Za-z][\w.-]*)\b[^>]*?\s)href="(\/[^"{}]*)"/gs;
function rewriteHrefs(src: string): { code: string; changed: number } {
let changed = 0;
const code = src.replace(HREF_RE, (full, prefix, tag, path) => {
if (SKIP_TAGS.has(tag.toLowerCase())) return full;
// Skip protocol-relative just in case
if (path.startsWith('//')) return full;
changed++;
return `${prefix}href={resolve('${path}')}`;
});
return { code, changed };
}
const SCRIPT_RE = /<script\b([^>]*)>([\s\S]*?)<\/script>/;
const PATHS_IMPORT_RE =
/import\s*\{([^}]*)\}\s*from\s*['"]\$app\/paths['"]\s*;?/;
function ensureResolveImport(src: string): string {
const scriptMatch = SCRIPT_RE.exec(src);
if (!scriptMatch) {
// No script tag — prepend a TS one.
return `<script lang="ts">\n\timport { resolve } from '$app/paths';\n</script>\n\n${src}`;
}
const [scriptFull, attrs, body] = scriptMatch;
const pathsMatch = PATHS_IMPORT_RE.exec(body);
if (pathsMatch) {
const inner = pathsMatch[1];
if (/\bresolve\b/.test(inner)) return src; // already imported
const merged = inner.trim().replace(/,?\s*$/, '') + ', resolve';
const newImport = `import { ${merged} } from '$app/paths';`;
const newBody = body.replace(PATHS_IMPORT_RE, newImport);
return src.replace(scriptFull, `<script${attrs}>${newBody}</script>`);
}
// Inject new import line. Detect indent from first import line if present.
const importMatch = body.match(/^([ \t]*)import\b/m);
const indent = importMatch ? importMatch[1] : '\t';
// Insert right after the opening script tag's newline.
const opening = `<script${attrs}>`;
const insertion = `\n${indent}import { resolve } from '$app/paths';`;
const newScript = opening + insertion + body + '</script>';
return src.replace(scriptFull, newScript);
}
function processFile(path: string): { changed: number } {
const orig = readFileSync(path, 'utf8');
const { code: rewritten, changed } = rewriteHrefs(orig);
if (changed === 0) return { changed: 0 };
const final = ensureResolveImport(rewritten);
if (!DRY) writeFileSync(path, final);
return { changed };
}
const files = walk(ROOT);
let totalFiles = 0;
let totalReplacements = 0;
for (const f of files) {
const { changed } = processFile(f);
if (changed > 0) {
totalFiles++;
totalReplacements += changed;
console.log(`${changed.toString().padStart(3)} ${f}`);
}
}
console.log(
`\n${DRY ? '[dry] ' : ''}${totalReplacements} replacements across ${totalFiles} files`
);
-126
View File
@@ -1,126 +0,0 @@
/**
* Migrate i18n call sites from t('key', lang) to t.key (or t[expr] for
* dynamic keys), where t = m[lang] derived once per file. Generic version
* — pass the i18n module path and the directories to scan.
*
* Usage:
* pnpm exec vite-node scripts/codemod-i18n-t-to-m.ts \
* --module=$lib/js/cospendI18n \
* --root=src/routes/'[cospendRoot=cospendRoot]' \
* --root=src/lib/components/cospend \
* [--dry]
*/
import { readFileSync, writeFileSync, readdirSync, statSync } from 'node:fs';
import { join, extname } from 'node:path';
const args = process.argv.slice(2);
const DRY = args.includes('--dry');
const modArg = args.find((a) => a.startsWith('--module='));
if (!modArg) {
console.error('missing --module=<path>');
process.exit(1);
}
const modulePath = modArg.slice('--module='.length);
const roots = args
.filter((a) => a.startsWith('--root='))
.map((a) => a.slice('--root='.length));
if (roots.length === 0) {
console.error('missing --root=<dir> (at least one)');
process.exit(1);
}
const fnFlag = args.find((a) => a.startsWith('--fn='));
const FN = fnFlag ? fnFlag.slice('--fn='.length) : 't';
const mFlag = args.find((a) => a.startsWith('--m='));
const M_NAME = mFlag ? mFlag.slice('--m='.length) : 'm';
// Match imports from any path ending in the module basename — call sites
// reach calendarI18n via wildly different relative-path depths, so we
// don't pin the full path.
function esc(s: string) {
return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
}
const IMPORT_RE = new RegExp(
`import\\s*\\{([^}]+)\\}\\s*from\\s*(['"])([^'"]*${esc(modulePath)})\\2\\s*;?`
);
function walk(dir: string, out: string[] = []): string[] {
for (const name of readdirSync(dir)) {
const p = join(dir, name);
const s = statSync(p);
if (s.isDirectory()) walk(p, out);
else if (extname(p) === '.svelte' || extname(p) === '.ts') out.push(p);
}
return out;
}
function migrate(src: string): { code: string; changed: boolean } {
const m0 = IMPORT_RE.exec(src);
if (!m0) return { code: src, changed: false };
const items = m0[1].split(',').map((s) => s.trim()).filter(Boolean);
if (!items.includes(FN)) return { code: src, changed: false };
const matchedPath = m0[3];
// 1. Rewrite import: drop FN, ensure M_NAME present. Preserve original path.
const fnIdx = items.indexOf(FN);
items.splice(fnIdx, 1);
if (!items.includes(M_NAME)) items.push(M_NAME);
let out = src.replace(IMPORT_RE, `import { ${items.join(', ')} } from '${matchedPath}';`);
// 2. Insert `const FN = $derived(M_NAME[lang]);` at the right spot.
const insertion = `const ${FN} = $derived(${M_NAME}[lang]);`;
let inserted = false;
const langDerivedRe =
/^([ \t]*)(const\s+lang\s*=\s*\$derived\((?:[^()]|\([^()]*\))+\)\s*;?)([ \t]*\n)/m;
if (langDerivedRe.test(out)) {
out = out.replace(langDerivedRe, (_, indent, decl, nl) => {
inserted = true;
return `${indent}${decl}${nl}${indent}${insertion}${nl}`;
});
}
if (!inserted) {
const propsRe =
/^([ \t]*)(let\s*\{[\s\S]*?\}\s*=\s*\$props(?:<[\s\S]*?>)?\(\)\s*;?)([ \t]*\n)/m;
out = out.replace(propsRe, (full, indent, decl, nl) => {
if (!/\blang\b/.test(decl)) return full;
inserted = true;
return `${indent}${decl}${nl}${indent}${insertion}${nl}`;
});
}
if (!inserted) {
console.warn(` WARN: could not auto-insert \`${insertion}\` — manual fix needed`);
}
// Build dynamic regex for FN(...) — escape `1962`-style suffixes.
const fnEsc = FN.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
// 3. FN('static_key', lang) → FN.static_key (snake_case OR camelCase identifier)
out = out.replace(
new RegExp(`\\b${fnEsc}\\(\\s*['"]([a-zA-Z_$][a-zA-Z0-9_$]*)['"]\\s*,\\s*lang\\s*\\)`, 'g'),
`${FN}.$1`
);
// 4. FN(<expr>, lang) → FN[<expr>]
out = out.replace(
new RegExp(`\\b${fnEsc}\\(((?:[^()]|\\([^()]*\\))+?)\\s*,\\s*lang\\s*\\)`, 'g'),
(_match, expr) => `${FN}[${expr.trim()}]`
);
return { code: out, changed: out !== src };
}
let total = 0;
for (const root of roots) {
for (const f of walk(root)) {
const orig = readFileSync(f, 'utf8');
const { code, changed } = migrate(orig);
if (!changed) continue;
if (!DRY) writeFileSync(f, code);
total++;
console.log(` ${f}`);
}
}
console.log(`\n${DRY ? '[dry] ' : ''}${total} files migrated`);
-98
View File
@@ -1,98 +0,0 @@
#!/usr/bin/env bash
# Build locally and rsync artifacts to the production server.
# Avoids running pnpm / npm / any git-hosted prepare step on the server.
#
# Assumes:
# - Local machine matches the server's arch + libc (linux-x64-glibc).
# - Local Node major version matches the server's.
# - Root SSH to $REMOTE works (key-based).
#
# Usage: scripts/deploy.sh [--dry-run]
set -euo pipefail
REMOTE="${REMOTE:-root@bocken.org}"
REMOTE_DIR="${REMOTE_DIR:-/usr/share/webapps/homepage}"
REMOTE_USER_GROUP="${REMOTE_USER_GROUP:-homepage:homepage}"
SERVICE="${SERVICE:-homepage.service}"
ERROR_PAGES_DIR="${ERROR_PAGES_DIR:-/var/www/errors}"
ERROR_PAGES_OWNER="${ERROR_PAGES_OWNER:-http:http}"
DRY=""
if [[ "${1:-}" == "--dry-run" ]]; then
DRY="--dry-run"
echo ":: DRY RUN — no files will be transferred"
fi
cd "$(dirname "$0")/.."
echo ":: Sanity-checking local/remote toolchain parity"
local_node=$(node --version)
remote_node=$(ssh "$REMOTE" 'node --version')
if [[ "${local_node%%.*}" != "${remote_node%%.*}" ]]; then
echo "!! Node major mismatch: local $local_node vs remote $remote_node"
echo " Native modules (sharp, onnxruntime, bson) may break. Aborting."
exit 1
fi
echo " node $local_node (match)"
echo ":: Installing deps (frozen lockfile)"
pnpm install --frozen-lockfile
echo ":: Building"
pnpm build
if [[ ! -d build ]]; then
echo "!! build/ not produced — aborting"
exit 1
fi
# The server's systemd unit runs from $REMOTE_DIR/dist, so map build → dist.
echo ":: Syncing build/ → $REMOTE:$REMOTE_DIR/dist/"
rsync -az --delete $DRY --info=progress2 \
build/ "$REMOTE:$REMOTE_DIR/dist/"
echo ":: Syncing node_modules/ → $REMOTE:$REMOTE_DIR/node_modules/"
rsync -az --delete $DRY --info=progress2 \
node_modules/ "$REMOTE:$REMOTE_DIR/node_modules/"
echo ":: Syncing static/ → $REMOTE:$REMOTE_DIR/static/"
rsync -az --delete $DRY --info=progress2 \
static/ "$REMOTE:$REMOTE_DIR/static/"
echo ":: Syncing package.json + pnpm-lock.yaml"
rsync -az $DRY \
package.json pnpm-lock.yaml "$REMOTE:$REMOTE_DIR/"
if [[ ! -d build/client/errors ]]; then
echo "!! build/client/errors not produced — postbuild error-page step did not run"
exit 1
fi
echo ":: Syncing error pages → $REMOTE:$ERROR_PAGES_DIR/"
ssh "$REMOTE" "mkdir -p $ERROR_PAGES_DIR"
rsync -az --delete $DRY --info=progress2 \
build/client/errors/ "$REMOTE:$ERROR_PAGES_DIR/"
if [[ -n "$DRY" ]]; then
echo ":: Dry run complete — no service restart"
exit 0
fi
echo ":: Fixing ownership on server"
ssh "$REMOTE" "chown -R $REMOTE_USER_GROUP $REMOTE_DIR/dist $REMOTE_DIR/node_modules $REMOTE_DIR/static $REMOTE_DIR/package.json $REMOTE_DIR/pnpm-lock.yaml && chown -R $ERROR_PAGES_OWNER $ERROR_PAGES_DIR"
echo ":: Restarting $SERVICE"
ssh "$REMOTE" "systemctl restart $SERVICE"
echo ":: Verifying service is active"
sleep 2
if ssh "$REMOTE" "systemctl is-active --quiet $SERVICE"; then
echo " $SERVICE is running"
else
echo "!! $SERVICE failed to start — check logs:"
ssh "$REMOTE" "journalctl -u $SERVICE -n 30 --no-pager"
exit 1
fi
echo ":: Deploy complete"
-107
View File
@@ -1,107 +0,0 @@
/**
* Downloads all Bring! shopping list item icons locally.
* Icons are stored at static/shopping-icons/{key}.png
*
* Run: pnpm exec vite-node scripts/download-bring-icons.ts
*/
import { writeFileSync, mkdirSync, existsSync } from 'fs';
import { resolve } from 'path';
const CATALOG_URL = 'https://web.getbring.com/locale/articles.de-DE.json';
const ICON_BASE = 'https://web.getbring.com/assets/images/items/';
const OUTPUT_DIR = resolve('static/shopping-icons');
/** Normalize key to icon filename (matches Bring's normalizeStringPath) */
function normalizeKey(key: string): string {
return key
.toLowerCase()
.replace(/ä/g, 'ae')
.replace(/ö/g, 'oe')
.replace(/ü/g, 'ue')
.replace(/é/g, 'e')
.replace(/è/g, 'e')
.replace(/ê/g, 'e')
.replace(/à/g, 'a')
.replace(/!/g, '')
.replace(/[\s\-]+/g, '_');
}
async function main() {
console.log('Fetching catalog...');
const res = await fetch(CATALOG_URL);
const catalog: Record<string, string> = await res.json();
// Filter out category headers and meta entries
const SKIP = [
'Früchte & Gemüse', 'Fleisch & Fisch', 'Milch & Käse', 'Brot & Gebäck',
'Getreideprodukte', 'Snacks & Süsswaren', 'Getränke & Tabak', 'Getränke',
'Haushalt & Gesundheit', 'Fertig- & Tiefkühlprodukte', 'Zutaten & Gewürze',
'Baumarkt & Garten', 'Tierbedarf', 'Eigene Artikel', 'Zuletzt verwendet',
'Bring!', 'Vielen Dank', 'Früchte', 'Fleisch', 'Gemüse',
];
const items = Object.keys(catalog).filter(k => !SKIP.includes(k));
console.log(`Found ${items.length} items to download`);
mkdirSync(OUTPUT_DIR, { recursive: true });
// Also download letter fallbacks a-z
const allKeys = [
...items.map(k => ({ original: k, normalized: normalizeKey(k) })),
...'abcdefghijklmnopqrstuvwxyz'.split('').map(l => ({ original: l, normalized: l })),
];
let downloaded = 0;
let skipped = 0;
let failed = 0;
for (const { original, normalized } of allKeys) {
const outPath = resolve(OUTPUT_DIR, `${normalized}.png`);
if (existsSync(outPath)) {
skipped++;
continue;
}
const url = `${ICON_BASE}${normalized}.png`;
try {
const res = await fetch(url);
if (res.ok) {
const buffer = Buffer.from(await res.arrayBuffer());
writeFileSync(outPath, buffer);
downloaded++;
} else {
console.warn(`${original} (${normalized}.png) → ${res.status}`);
failed++;
}
} catch (err) {
console.warn(`${original} (${normalized}.png) → ${err}`);
failed++;
}
// Rate limiting
if ((downloaded + skipped + failed) % 50 === 0) {
console.log(` ${downloaded + skipped + failed}/${allKeys.length} (${downloaded} new, ${skipped} cached, ${failed} failed)`);
}
}
// Save the catalog mapping (key → normalized filename) for runtime lookup
const mapping: Record<string, string> = {};
for (const item of items) {
mapping[item.toLowerCase()] = normalizeKey(item);
}
// Also add the display names as lookups
for (const [key, displayName] of Object.entries(catalog)) {
if (!SKIP.includes(key)) {
mapping[displayName.toLowerCase()] = normalizeKey(key);
}
}
const mappingPath = resolve(OUTPUT_DIR, 'catalog.json');
writeFileSync(mappingPath, JSON.stringify(mapping, null, 2));
console.log(`\nDone: ${downloaded} downloaded, ${skipped} cached, ${failed} failed`);
console.log(`Catalog: ${Object.keys(mapping).length} entries → ${mappingPath}`);
}
main().catch(console.error);
-117
View File
@@ -1,117 +0,0 @@
/**
* Downloads all exercise images and videos from the ExerciseDB CDN.
*
* Run with: pnpm exec vite-node scripts/download-exercise-media.ts
*
* Reads: src/lib/data/exercisedb-raw.json
* Outputs: static/fitness/exercises/<exerciseId>/
* - images: 360p.jpg, 480p.jpg, 720p.jpg, 1080p.jpg
* - video: video.mp4
*
* Resumes automatically — skips files that already exist on disk.
*/
import { readFileSync, existsSync, mkdirSync, writeFileSync } from 'fs';
import { resolve, extname } from 'path';
const RAW_PATH = resolve('src/lib/data/exercisedb-raw.json');
const OUT_DIR = resolve('static/fitness/exercises');
const CONCURRENCY = 10;
interface DownloadTask {
url: string;
dest: string;
}
function sleep(ms: number) {
return new Promise(r => setTimeout(r, ms));
}
async function download(url: string, dest: string, retries = 3): Promise<boolean> {
for (let attempt = 1; attempt <= retries; attempt++) {
try {
const res = await fetch(url);
if (!res.ok) throw new Error(`${res.status} ${res.statusText}`);
const buf = Buffer.from(await res.arrayBuffer());
writeFileSync(dest, buf);
return true;
} catch (err: any) {
if (attempt === retries) {
console.error(` FAILED ${url}: ${err.message}`);
return false;
}
await sleep(1000 * attempt);
}
}
return false;
}
async function runQueue(tasks: DownloadTask[]) {
let done = 0;
let failed = 0;
const total = tasks.length;
async function worker() {
while (tasks.length > 0) {
const task = tasks.shift()!;
const ok = await download(task.url, task.dest);
if (!ok) failed++;
done++;
if (done % 50 === 0 || done === total) {
console.log(` ${done}/${total} downloaded${failed ? ` (${failed} failed)` : ''}`);
}
}
}
const workers = Array.from({ length: CONCURRENCY }, () => worker());
await Promise.all(workers);
return { done, failed };
}
async function main() {
console.log('=== Exercise Media Downloader ===\n');
if (!existsSync(RAW_PATH)) {
console.error(`Missing ${RAW_PATH} — run scrape-exercises.ts first`);
process.exit(1);
}
const data = JSON.parse(readFileSync(RAW_PATH, 'utf-8'));
const exercises: any[] = data.exercises;
console.log(`${exercises.length} exercises in raw data\n`);
const tasks: DownloadTask[] = [];
for (const ex of exercises) {
const dir = resolve(OUT_DIR, ex.exerciseId);
mkdirSync(dir, { recursive: true });
// Multi-resolution images
if (ex.imageUrls) {
for (const [res, url] of Object.entries(ex.imageUrls as Record<string, string>)) {
const ext = extname(new URL(url).pathname) || '.jpg';
const dest = resolve(dir, `${res}${ext}`);
if (!existsSync(dest)) tasks.push({ url, dest });
}
}
// Video
if (ex.videoUrl) {
const dest = resolve(dir, 'video.mp4');
if (!existsSync(dest)) tasks.push({ url: ex.videoUrl, dest });
}
}
if (tasks.length === 0) {
console.log('All media already downloaded!');
return;
}
console.log(`${tasks.length} files to download (skipping existing)\n`);
const { done, failed } = await runQueue(tasks);
console.log(`\nDone! ${done - failed} downloaded, ${failed} failed.`);
}
main().catch(err => {
console.error(err);
process.exit(1);
});
-18
View File
@@ -1,18 +0,0 @@
/**
* Pre-downloads HuggingFace transformer models so they're cached for runtime.
* Run with: pnpm exec vite-node scripts/download-models.ts
*/
import { pipeline } from '@huggingface/transformers';
const MODELS = [
'Xenova/all-MiniLM-L6-v2',
'Xenova/multilingual-e5-small',
'Xenova/multilingual-e5-base',
];
for (const name of MODELS) {
console.log(`Downloading ${name}...`);
const p = await pipeline('feature-extraction', name, { dtype: 'q8' });
await p.dispose();
console.log(` done`);
}
-61
View File
@@ -1,61 +0,0 @@
/**
* Pre-compute sentence embeddings for BLS German food names.
* Uses multilingual-e5-small for good German language understanding.
*
* Run: pnpm exec vite-node scripts/embed-bls-db.ts
*/
import { pipeline } from '@huggingface/transformers';
import { writeFileSync } from 'fs';
import { resolve } from 'path';
// Dynamic import of blsDb (generated file)
const { BLS_DB } = await import('../src/lib/data/blsDb');
const MODEL_NAME = 'Xenova/multilingual-e5-small';
const OUTPUT_FILE = resolve('src/lib/data/blsEmbeddings.json');
async function main() {
console.log(`Loading model ${MODEL_NAME}...`);
const embedder = await pipeline('feature-extraction', MODEL_NAME, {
dtype: 'q8',
});
console.log(`Embedding ${BLS_DB.length} BLS entries...`);
const entries: { blsCode: string; name: string; vector: number[] }[] = [];
const batchSize = 32;
for (let i = 0; i < BLS_DB.length; i += batchSize) {
const batch = BLS_DB.slice(i, i + batchSize);
// e5 models require "passage: " prefix for documents
const texts = batch.map(e => `passage: ${e.nameDe}`);
for (let j = 0; j < batch.length; j++) {
const result = await embedder(texts[j], { pooling: 'mean', normalize: true });
const vector = Array.from(result.data as Float32Array).map(v => Math.round(v * 10000) / 10000);
entries.push({
blsCode: batch[j].blsCode,
name: batch[j].nameDe,
vector,
});
}
if ((i + batchSize) % 500 < batchSize) {
console.log(` ${Math.min(i + batchSize, BLS_DB.length)}/${BLS_DB.length}`);
}
}
const output = {
model: MODEL_NAME,
dimensions: entries[0]?.vector.length || 384,
count: entries.length,
entries,
};
const json = JSON.stringify(output);
writeFileSync(OUTPUT_FILE, json, 'utf-8');
console.log(`Written ${OUTPUT_FILE} (${(json.length / 1024 / 1024).toFixed(1)}MB, ${entries.length} entries)`);
}
main().catch(console.error);
-60
View File
@@ -1,60 +0,0 @@
/**
* Pre-computes sentence embeddings for all USDA nutrition DB entries using
* all-MiniLM-L6-v2 via @huggingface/transformers.
*
* Run with: pnpm exec vite-node scripts/embed-nutrition-db.ts
*
* Outputs: src/lib/data/nutritionEmbeddings.json
* Format: { entries: [{ fdcId, name, vector: number[384] }] }
*/
import { writeFileSync } from 'fs';
import { resolve } from 'path';
import { pipeline } from '@huggingface/transformers';
import { NUTRITION_DB } from '../src/lib/data/nutritionDb';
const OUTPUT_PATH = resolve('src/lib/data/nutritionEmbeddings.json');
const MODEL_NAME = 'Xenova/all-MiniLM-L6-v2';
const BATCH_SIZE = 64;
async function main() {
console.log('=== Nutrition DB Embedding Generation ===\n');
console.log(`Entries to embed: ${NUTRITION_DB.length}`);
console.log(`Model: ${MODEL_NAME}`);
console.log(`Loading model (first run downloads ~23MB)...\n`);
const embedder = await pipeline('feature-extraction', MODEL_NAME, {
dtype: 'q8',
});
const entries: { fdcId: number; name: string; vector: number[] }[] = [];
const totalBatches = Math.ceil(NUTRITION_DB.length / BATCH_SIZE);
for (let i = 0; i < NUTRITION_DB.length; i += BATCH_SIZE) {
const batch = NUTRITION_DB.slice(i, i + BATCH_SIZE);
const batchNum = Math.floor(i / BATCH_SIZE) + 1;
process.stdout.write(`\r Batch ${batchNum}/${totalBatches} (${i + batch.length}/${NUTRITION_DB.length})`);
// Embed all names in this batch
for (const item of batch) {
const result = await embedder(item.name, { pooling: 'mean', normalize: true });
// result.data is a Float32Array — truncate to 4 decimal places to save space
const vector = Array.from(result.data as Float32Array).map(v => Math.round(v * 10000) / 10000);
entries.push({ fdcId: item.fdcId, name: item.name, vector });
}
}
console.log('\n\nWriting embeddings...');
const output = { model: MODEL_NAME, dimensions: 384, count: entries.length, entries };
writeFileSync(OUTPUT_PATH, JSON.stringify(output), 'utf-8');
const fileSizeMB = (Buffer.byteLength(JSON.stringify(output)) / 1024 / 1024).toFixed(1);
console.log(`Written ${entries.length} embeddings to ${OUTPUT_PATH} (${fileSizeMB}MB)`);
await embedder.dispose();
}
main().catch(err => {
console.error('Embedding generation failed:', err);
process.exit(1);
});
-55
View File
@@ -1,55 +0,0 @@
/**
* Pre-compute sentence embeddings for shopping category representative items.
* Uses multilingual-e5-base for good DE/EN understanding.
*
* Run: pnpm exec vite-node scripts/embed-shopping-categories.ts
*/
import { pipeline } from '@huggingface/transformers';
import { writeFileSync } from 'fs';
import { resolve } from 'path';
const { CATEGORY_ITEMS } = await import('../src/lib/data/shoppingCategoryItems');
const MODEL_NAME = 'Xenova/multilingual-e5-base';
const OUTPUT_FILE = resolve('src/lib/data/shoppingCategoryEmbeddings.json');
async function main() {
console.log(`Loading model ${MODEL_NAME}...`);
const embedder = await pipeline('feature-extraction', MODEL_NAME, {
dtype: 'q8',
});
console.log(`Embedding ${CATEGORY_ITEMS.length} category items...`);
const entries: { name: string; category: string; vector: number[] }[] = [];
for (let i = 0; i < CATEGORY_ITEMS.length; i++) {
const item = CATEGORY_ITEMS[i];
// e5 models require "passage: " prefix for documents
const result = await embedder(`passage: ${item.name}`, { pooling: 'mean', normalize: true });
const vector = Array.from(result.data as Float32Array).map(v => Math.round(v * 10000) / 10000);
entries.push({
name: item.name,
category: item.category,
vector,
});
if ((i + 1) % 50 === 0) {
console.log(` ${i + 1}/${CATEGORY_ITEMS.length}`);
}
}
const output = {
model: MODEL_NAME,
dimensions: entries[0]?.vector.length || 768,
count: entries.length,
entries,
};
const json = JSON.stringify(output);
writeFileSync(OUTPUT_FILE, json, 'utf-8');
console.log(`Written ${OUTPUT_FILE} (${(json.length / 1024).toFixed(1)}KB, ${entries.length} entries)`);
}
main().catch(console.error);
-55
View File
@@ -1,55 +0,0 @@
/**
* Pre-compute embeddings for Bring! catalog items to enable icon matching.
* Maps item names to their icon filenames via semantic similarity.
*
* Run: pnpm exec vite-node scripts/embed-shopping-icons.ts
*/
import { pipeline } from '@huggingface/transformers';
import { readFileSync, writeFileSync } from 'fs';
import { resolve } from 'path';
const MODEL_NAME = 'Xenova/multilingual-e5-base';
const CATALOG_PATH = resolve('static/shopping-icons/catalog.json');
const OUTPUT_FILE = resolve('src/lib/data/shoppingIconEmbeddings.json');
async function main() {
const catalog: Record<string, string> = JSON.parse(readFileSync(CATALOG_PATH, 'utf-8'));
// Deduplicate: multiple display names can map to the same icon
// We want one embedding per unique display name
const uniqueItems = new Map<string, string>();
for (const [name, iconFile] of Object.entries(catalog)) {
uniqueItems.set(name, iconFile);
}
const items = [...uniqueItems.entries()];
console.log(`Loading model ${MODEL_NAME}...`);
const embedder = await pipeline('feature-extraction', MODEL_NAME, { dtype: 'q8' });
console.log(`Embedding ${items.length} catalog items...`);
const entries: { name: string; icon: string; vector: number[] }[] = [];
for (let i = 0; i < items.length; i++) {
const [name, icon] = items[i];
const result = await embedder(`passage: ${name}`, { pooling: 'mean', normalize: true });
const vector = Array.from(result.data as Float32Array).map(v => Math.round(v * 10000) / 10000);
entries.push({ name, icon, vector });
if ((i + 1) % 50 === 0) {
console.log(` ${i + 1}/${items.length}`);
}
}
const output = {
model: MODEL_NAME,
dimensions: entries[0]?.vector.length || 768,
count: entries.length,
entries,
};
const json = JSON.stringify(output);
writeFileSync(OUTPUT_FILE, json, 'utf-8');
console.log(`Written ${OUTPUT_FILE} (${(json.length / 1024).toFixed(1)}KB, ${entries.length} entries)`);
}
main().catch(console.error);
-60
View File
@@ -1,60 +0,0 @@
/**
* Build-time generation of bilingual Bible quotes per HTTP error status.
*
* Looks up curated references in static/allioli.tsv (DE) + static/drb.tsv (EN)
* via the existing bible reference parser, then writes the resolved verses to
* src/lib/data/errorQuotes.json for the prerendered /errors/[status] pages.
*
* - Add or change a status by editing REFS below.
* - Refs use the abbreviations defined in the TSVs (e.g. Mt 7,7 / Mt 7:7).
* - Fails the build if any reference cannot be resolved.
*
* Run: pnpm exec vite-node scripts/generate-error-quotes.ts
*/
import { mkdirSync, writeFileSync } from 'node:fs';
import { dirname, resolve, join } from 'node:path';
import { fileURLToPath } from 'node:url';
import { lookupReference } from '../src/lib/server/bible';
const HERE = dirname(fileURLToPath(import.meta.url));
const ROOT = resolve(HERE, '..');
const ALLIOLI = join(ROOT, 'static/allioli.tsv');
const DRB = join(ROOT, 'static/drb.tsv');
const OUT = join(ROOT, 'src/lib/data/errorQuotes.json');
// Curated refs. Abbreviations must match the TSV's `abbreviation` column.
const REFS: Record<number, { de: string; en: string }> = {
401: { de: 'Mt 7,7', en: 'Mt 7:7' },
403: { de: 'Mt 7,14', en: 'Mt 7:14' },
404: { de: 'Mt 7,8', en: 'Mt 7:8' },
500: { de: '2Kor 4,7', en: '2Cor 4:7' },
502: { de: '1Mo 11,9', en: 'Gn 11:9' },
503: { de: 'Ps 37,7', en: 'Ps 37:7' },
504: { de: 'Jes 40,31', en: 'Is 40:31' }
};
type ResolvedQuote = { text: string; reference: string };
function resolveOne(ref: string, tsv: string): ResolvedQuote {
const result = lookupReference(ref, tsv);
if (!result || result.verses.length === 0) {
throw new Error(`could not resolve reference "${ref}" in ${tsv}`);
}
// Range refs join verses with a space. Display reference reuses the
// original input so the UI keeps the canonical "Mt 7,7" / "Mt 7:7" form.
const text = result.verses.map((v) => v.text).join(' ');
return { text, reference: ref };
}
const out: Record<string, { de: ResolvedQuote; en: ResolvedQuote }> = {};
for (const [status, refs] of Object.entries(REFS)) {
out[status] = {
de: resolveOne(refs.de, ALLIOLI),
en: resolveOne(refs.en, DRB)
};
console.log(`[error-quotes] ${status}: ${refs.de} / ${refs.en}`);
}
mkdirSync(dirname(OUT), { recursive: true });
writeFileSync(OUT, JSON.stringify(out, null, 2) + '\n', 'utf8');
console.log(`[error-quotes] wrote ${OUT.replace(ROOT + '/', '')} (${Object.keys(out).length} statuses)`);
-62
View File
@@ -1,62 +0,0 @@
/**
* Build-time generation of loyalty-card barcode SVGs.
*
* Reads card numbers from env vars and writes static/shopping/supercard.svg
* + static/shopping/cumulus.svg. Fails the build if any required env is
* unset so deploys can't silently ship a broken UI.
*
* SHOPPING_COOP_SUPERCARD_NUMBER → Data Matrix (Coop Supercard)
* SHOPPING_MIGROS_CUMULUS_NUMBER → Code 128 (Migros Cumulus)
*
* Run: pnpm exec vite-node scripts/generate-loyalty-cards.ts
*/
import { mkdirSync, writeFileSync } from 'node:fs';
import { dirname, resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { toSVG } from 'bwip-js/node';
const HERE = dirname(fileURLToPath(import.meta.url));
const OUT_DIR = resolve(HERE, '..', 'static', 'shopping');
type CardSpec = {
envVar: string;
filename: string;
bcid: 'datamatrix' | 'code128';
scale: number;
parsefnc?: boolean;
};
const cards: CardSpec[] = [
// Coop Supercard uses GS1 Data Matrix with FNC1 separators between fields.
// Put ^FNC1 in the env value wherever the real symbol has a separator
// (dmtxread -G prints them as 0x1D); parsefnc: true turns each ^FNC1 into
// a genuine FNC1 codeword so the regenerated code matches the card.
{ envVar: 'SHOPPING_COOP_SUPERCARD_NUMBER', filename: 'supercard.svg', bcid: 'datamatrix', scale: 6, parsefnc: true },
{ envVar: 'SHOPPING_MIGROS_CUMULUS_NUMBER', filename: 'cumulus.svg', bcid: 'code128', scale: 3 }
];
mkdirSync(OUT_DIR, { recursive: true });
const missing = cards.filter((c) => !process.env[c.envVar]?.trim()).map((c) => c.envVar);
if (missing.length) {
console.error(`[loyalty-cards] missing required env: ${missing.join(', ')}`);
process.exit(1);
}
for (const card of cards) {
const value = process.env[card.envVar]!.trim();
const outPath = resolve(OUT_DIR, card.filename);
const svg = toSVG({
bcid: card.bcid,
text: value,
scale: card.scale,
includetext: false,
paddingwidth: 8,
paddingheight: 8,
...(card.parsefnc ? { parsefnc: true } : {})
});
writeFileSync(outPath, svg, 'utf8');
console.log(`[loyalty-cards] wrote ${card.filename} (${card.bcid})`);
}
-88
View File
@@ -1,88 +0,0 @@
/**
* Pre-generates Bible verse data for all rosary mystery references.
* Run with: npx vite-node scripts/generate-mystery-verses.ts
*/
import { writeFileSync } from 'fs';
import { resolve } from 'path';
import { lookupReference } from '../src/lib/server/bible';
import { mysteryReferences, mysteryReferencesEnglish, theologicalVirtueReference, theologicalVirtueReferenceEnglish } from '../src/lib/data/mysteryDescriptions';
import type { MysteryDescription, VerseData } from '../src/lib/data/mysteryDescriptions';
function generateVerseData(
references: Record<string, readonly { title: string; reference: string }[]>,
tsvPath: string
): Record<string, MysteryDescription[]> {
const result: Record<string, MysteryDescription[]> = {};
for (const [mysteryType, refs] of Object.entries(references)) {
const descriptions: MysteryDescription[] = [];
for (const ref of refs) {
const lookup = lookupReference(ref.reference, tsvPath);
let text = '';
let verseData: VerseData | null = null;
if (lookup && lookup.verses.length > 0) {
text = `«${lookup.verses.map((v) => v.text).join(' ')}»`;
verseData = {
book: lookup.book,
chapter: lookup.chapter,
verses: lookup.verses
};
} else {
console.warn(`No verses found for: ${ref.reference} in ${tsvPath}`);
}
descriptions.push({
title: ref.title,
reference: ref.reference,
text,
verseData
});
}
result[mysteryType] = descriptions;
}
return result;
}
const dePath = resolve('static/allioli.tsv');
const enPath = resolve('static/drb.tsv');
const mysteryVerseDataDe = generateVerseData(mysteryReferences, dePath);
const mysteryVerseDataEn = generateVerseData(mysteryReferencesEnglish, enPath);
// Generate theological virtue (1 Cor 13) verse data
function generateSingleRef(ref: { title: string; reference: string }, tsvPath: string): MysteryDescription {
const lookup = lookupReference(ref.reference, tsvPath);
let text = '';
let verseData: VerseData | null = null;
if (lookup && lookup.verses.length > 0) {
text = `«${lookup.verses.map((v) => v.text).join(' ')}»`;
verseData = { book: lookup.book, chapter: lookup.chapter, verses: lookup.verses };
} else {
console.warn(`No verses found for: ${ref.reference} in ${tsvPath}`);
}
return { title: ref.title, reference: ref.reference, text, verseData };
}
const theologicalVirtueDataDe = generateSingleRef(theologicalVirtueReference, dePath);
const theologicalVirtueDataEn = generateSingleRef(theologicalVirtueReferenceEnglish, enPath);
const output = `// Auto-generated by scripts/generate-mystery-verses.ts — do not edit manually
import type { MysteryDescription } from './mysteryDescriptions';
export const mysteryVerseDataDe: Record<string, MysteryDescription[]> = ${JSON.stringify(mysteryVerseDataDe, null, '\t')};
export const mysteryVerseDataEn: Record<string, MysteryDescription[]> = ${JSON.stringify(mysteryVerseDataEn, null, '\t')};
export const theologicalVirtueVerseDataDe: MysteryDescription = ${JSON.stringify(theologicalVirtueDataDe, null, '\t')};
export const theologicalVirtueVerseDataEn: MysteryDescription = ${JSON.stringify(theologicalVirtueDataEn, null, '\t')};
`;
const outPath = resolve('src/lib/data/mysteryVerseData.ts');
writeFileSync(outPath, output, 'utf-8');
console.log(`Wrote mystery verse data to ${outPath}`);
-30
View File
@@ -1,30 +0,0 @@
#!/usr/bin/env bash
# Run scripts/deploy.sh after a push to origin/master.
# Git has no native post-push hook; pre-push runs before the push completes.
# If deploy fails the push is aborted, which is safer than deploying after a
# push that might have been rejected anyway.
#
# Install: ln -sf ../../scripts/hooks/pre-push .git/hooks/pre-push
set -e
remote_name="$1"
# Only deploy when pushing to the Gitea origin.
if [ "$remote_name" != "origin" ]; then
exit 0
fi
should_deploy=0
while read -r _local_ref _local_sha remote_ref _remote_sha; do
if [ "$remote_ref" = "refs/heads/master" ]; then
should_deploy=1
fi
done
if [ "$should_deploy" -ne 1 ]; then
exit 0
fi
repo_root="$(git rev-parse --show-toplevel)"
exec "$repo_root/scripts/deploy.sh"
-182
View File
@@ -1,182 +0,0 @@
/**
* Import BLS 4.0 (Bundeslebensmittelschlüssel) nutrition data from CSV.
* Pre-convert the xlsx to CSV first (one-time):
* node -e "const X=require('xlsx');const w=X.readFile('BLS_4_0_2025_DE/BLS_4_0_Daten_2025_DE.xlsx');
* require('fs').writeFileSync('BLS_4_0_2025_DE/BLS_4_0_Daten_2025_DE.csv',X.utils.sheet_to_csv(w.Sheets[w.SheetNames[0]]))"
*
* Run: pnpm exec vite-node scripts/import-bls-nutrition.ts
*/
import { readFileSync, writeFileSync } from 'fs';
import { resolve } from 'path';
/** Parse CSV handling quoted fields with commas */
function parseCSV(text: string): string[][] {
const rows: string[][] = [];
let i = 0;
while (i < text.length) {
const row: string[] = [];
while (i < text.length && text[i] !== '\n') {
if (text[i] === '"') {
i++; // skip opening quote
let field = '';
while (i < text.length) {
if (text[i] === '"') {
if (text[i + 1] === '"') { field += '"'; i += 2; }
else { i++; break; }
} else { field += text[i]; i++; }
}
row.push(field);
if (text[i] === ',') i++;
} else {
const next = text.indexOf(',', i);
const nl = text.indexOf('\n', i);
const end = (next === -1 || (nl !== -1 && nl < next)) ? (nl === -1 ? text.length : nl) : next;
row.push(text.substring(i, end));
i = end;
if (text[i] === ',') i++;
}
}
if (text[i] === '\n') i++;
if (row.length > 0) rows.push(row);
}
return rows;
}
const BLS_CSV = resolve('BLS_4_0_2025_DE/BLS_4_0_Daten_2025_DE.csv');
const OUTPUT_FILE = resolve('src/lib/data/blsDb.ts');
// BLS nutrient code → our per100g field name
const NUTRIENT_MAP: Record<string, { field: string; divisor?: number }> = {
ENERCC: { field: 'calories' },
PROT625: { field: 'protein' },
FAT: { field: 'fat' },
FASAT: { field: 'saturatedFat' },
CHO: { field: 'carbs' },
FIBT: { field: 'fiber' },
SUGAR: { field: 'sugars' },
CA: { field: 'calcium' },
FE: { field: 'iron' },
MG: { field: 'magnesium' },
P: { field: 'phosphorus' },
K: { field: 'potassium' },
NA: { field: 'sodium' },
ZN: { field: 'zinc' },
VITA: { field: 'vitaminA' },
VITC: { field: 'vitaminC' },
VITD: { field: 'vitaminD' },
VITE: { field: 'vitaminE' },
VITK: { field: 'vitaminK' },
THIA: { field: 'thiamin' },
RIBF: { field: 'riboflavin' },
NIA: { field: 'niacin' },
VITB6: { field: 'vitaminB6', divisor: 1000 }, // BLS: µg → mg
VITB12: { field: 'vitaminB12' },
FOL: { field: 'folate' },
CHORL: { field: 'cholesterol' },
// Amino acids (all g/100g)
ILE: { field: 'isoleucine' },
LEU: { field: 'leucine' },
LYS: { field: 'lysine' },
MET: { field: 'methionine' },
PHE: { field: 'phenylalanine' },
THR: { field: 'threonine' },
TRP: { field: 'tryptophan' },
VAL: { field: 'valine' },
HIS: { field: 'histidine' },
ALA: { field: 'alanine' },
ARG: { field: 'arginine' },
ASP: { field: 'asparticAcid' },
CYSTE: { field: 'cysteine' },
GLU: { field: 'glutamicAcid' },
GLY: { field: 'glycine' },
PRO: { field: 'proline' },
SER: { field: 'serine' },
TYR: { field: 'tyrosine' },
};
// BLS 4.0 code first letter → category (Hauptlebensmittelgruppen)
const CATEGORY_MAP: Record<string, string> = {
B: 'Brot & Backwaren', C: 'Getreide', D: 'Dauerbackwaren & Kekse',
E: 'Teigwaren & Nudeln', F: 'Obst & Früchte', G: 'Gemüse',
H: 'Hülsenfrüchte & Sojaprodukte', K: 'Kartoffeln & Stärke',
M: 'Milch & Milchprodukte', N: 'Getränke (alkoholfrei)',
P: 'Alkoholische Getränke', Q: 'Fette & Öle',
R: 'Gewürze & Würzmittel', S: 'Zucker & Honig',
T: 'Fisch & Meeresfrüchte', U: 'Fleisch',
V: 'Wild & Kaninchen', W: 'Wurstwaren',
X: 'Brühen & Fertiggerichte', Y: 'Gerichte & Rezepte',
};
async function main() {
console.log('Reading BLS CSV...');
const csvText = readFileSync(BLS_CSV, 'utf-8');
const rows: string[][] = parseCSV(csvText);
const headers = rows[0];
console.log(`Headers: ${headers.length} columns, ${rows.length - 1} data rows`);
// Build column index: BLS nutrient code → column index of the value column
const codeToCol = new Map<string, number>();
for (let c = 3; c < headers.length; c += 3) {
const code = headers[c]?.split(' ')[0];
if (code) codeToCol.set(code, c);
}
const entries: any[] = [];
for (let r = 1; r < rows.length; r++) {
const row = rows[r];
const blsCode = row[0]?.trim();
const nameDe = row[1]?.trim();
const nameEn = row[2]?.trim() || '';
if (!blsCode || !nameDe) continue;
const category = CATEGORY_MAP[blsCode[0]] || 'Sonstiges';
const per100g: Record<string, number> = {};
for (const [blsNutrientCode, mapping] of Object.entries(NUTRIENT_MAP)) {
const col = codeToCol.get(blsNutrientCode);
if (col === undefined) {
per100g[mapping.field] = 0;
continue;
}
let value = parseFloat(row[col] || '0');
if (isNaN(value)) value = 0;
if (mapping.divisor) value /= mapping.divisor;
per100g[mapping.field] = Math.round(value * 1000) / 1000;
}
entries.push({ blsCode, nameDe, nameEn, category, per100g });
}
console.log(`Parsed ${entries.length} BLS entries`);
// Sample entries
const sample = entries.slice(0, 3);
for (const e of sample) {
console.log(` ${e.blsCode} | ${e.nameDe} | ${e.per100g.calories} kcal | protein ${e.per100g.protein}g`);
}
const output = `// Auto-generated from BLS 4.0 (Bundeslebensmittelschlüssel)
// Generated: ${new Date().toISOString().split('T')[0]}
// Do not edit manually — regenerate with: pnpm exec vite-node scripts/import-bls-nutrition.ts
import type { NutritionPer100g } from '$types/types';
export type BlsEntry = {
blsCode: string;
nameDe: string;
nameEn: string;
category: string;
per100g: NutritionPer100g;
};
export const BLS_DB: BlsEntry[] = ${JSON.stringify(entries, null, 0)};
`;
writeFileSync(OUTPUT_FILE, output, 'utf-8');
console.log(`Written ${OUTPUT_FILE} (${(output.length / 1024 / 1024).toFixed(1)}MB, ${entries.length} entries)`);
}
main().catch(console.error);
-278
View File
@@ -1,278 +0,0 @@
/**
* Import OpenFoodFacts MongoDB dump into a lean `openfoodfacts` collection.
*
* This script:
* 0. Downloads the OFF MongoDB dump if not present locally
* 1. Runs `mongorestore` to load the raw dump into a temporary `off_products` collection
* 2. Transforms each document, extracting only the fields we need
* 3. Inserts into the `openfoodfacts` collection with proper indexes
* 4. Drops the temporary `off_products` collection
*
* Reads MONGO_URL from .env (via dotenv).
*
* Usage:
* pnpm exec vite-node scripts/import-openfoodfacts.ts [path-to-dump.gz]
*
* Default dump path: ./openfoodfacts-mongodbdump.gz
*/
import { execSync } from 'child_process';
import { readFileSync, existsSync } from 'fs';
import { resolve } from 'path';
import mongoose from 'mongoose';
const OFF_DUMP_URL = 'https://static.openfoodfacts.org/data/openfoodfacts-mongodbdump.gz';
// --- Load MONGO_URL from .env ---
const envPath = resolve(import.meta.dirname ?? '.', '..', '.env');
const envText = readFileSync(envPath, 'utf-8');
const mongoMatch = envText.match(/^MONGO_URL="?([^"\n]+)"?/m);
if (!mongoMatch) { console.error('MONGO_URL not found in .env'); process.exit(1); }
const MONGO_URL = mongoMatch[1];
// Parse components for mongorestore URI (needs root DB, not /recipes)
const parsed = new URL(MONGO_URL);
const RESTORE_URI = `mongodb://${parsed.username}:${parsed.password}@${parsed.host}/?authSource=${new URLSearchParams(parsed.search).get('authSource') || 'admin'}`;
const DB_NAME = parsed.pathname.replace(/^\//, '') || 'recipes';
const BATCH_SIZE = 5000;
// --- Resolve dump file path, download if missing ---
const dumpPath = resolve(process.argv[2] || './openfoodfacts-mongodbdump.gz');
if (!existsSync(dumpPath)) {
console.log(`\nDump file not found at ${dumpPath}`);
console.log(`Downloading from ${OFF_DUMP_URL} (~13 GB)…\n`);
try {
execSync(`curl -L -o "${dumpPath}" --progress-bar "${OFF_DUMP_URL}"`, { stdio: 'inherit' });
} catch (err: any) {
console.error('Download failed:', err.message);
process.exit(1);
}
console.log('Download complete.\n');
}
// Map OFF nutriment keys → our per100g field names
const NUTRIENT_MAP: Record<string, string> = {
'energy-kcal_100g': 'calories',
'proteins_100g': 'protein',
'fat_100g': 'fat',
'saturated-fat_100g': 'saturatedFat',
'carbohydrates_100g': 'carbs',
'fiber_100g': 'fiber',
'sugars_100g': 'sugars',
'calcium_100g': 'calcium',
'iron_100g': 'iron',
'magnesium_100g': 'magnesium',
'phosphorus_100g': 'phosphorus',
'potassium_100g': 'potassium',
'sodium_100g': 'sodium',
'zinc_100g': 'zinc',
'vitamin-a_100g': 'vitaminA',
'vitamin-c_100g': 'vitaminC',
'vitamin-d_100g': 'vitaminD',
'vitamin-e_100g': 'vitaminE',
'vitamin-k_100g': 'vitaminK',
'vitamin-b1_100g': 'thiamin',
'vitamin-b2_100g': 'riboflavin',
'vitamin-pp_100g': 'niacin',
'vitamin-b6_100g': 'vitaminB6',
'vitamin-b12_100g': 'vitaminB12',
'folates_100g': 'folate',
'cholesterol_100g': 'cholesterol',
};
function extractPer100g(nutriments: any): Record<string, number> | null {
if (!nutriments) return null;
const out: Record<string, number> = {};
let hasAny = false;
for (const [offKey, ourKey] of Object.entries(NUTRIENT_MAP)) {
const v = Number(nutriments[offKey]);
if (!isNaN(v) && v >= 0) {
out[ourKey] = v;
if (ourKey === 'calories' || ourKey === 'protein' || ourKey === 'fat' || ourKey === 'carbs') {
hasAny = true;
}
}
}
// Fall back to kJ → kcal if energy-kcal_100g was missing
if (!out.calories) {
const kj = Number(nutriments['energy_100g']);
if (!isNaN(kj) && kj > 0) {
out.calories = Math.round(kj / 4.184 * 10) / 10;
hasAny = true;
}
}
return hasAny ? out : null;
}
function pickName(doc: any): { name: string; nameDe?: string } | null {
const en = doc.product_name_en?.trim();
const de = doc.product_name_de?.trim();
const generic = doc.product_name?.trim();
const fr = doc.product_name_fr?.trim();
const name = en || generic || fr;
if (!name) return null;
return { name, ...(de && de !== name ? { nameDe: de } : {}) };
}
async function main() {
// --- Step 1: mongorestore (skip if off_products already has data) ---
await mongoose.connect(MONGO_URL);
let existingCount = await mongoose.connection.db!.collection('off_products').estimatedDocumentCount();
if (existingCount > 100000) {
console.log(`\n=== Step 1: SKIPPED — off_products already has ~${existingCount.toLocaleString()} documents ===\n`);
} else {
console.log(`\n=== Step 1: mongorestore from ${dumpPath} ===\n`);
await mongoose.disconnect();
const restoreCmd = [
'mongorestore', '--gzip',
`--archive=${dumpPath}`,
`--uri="${RESTORE_URI}"`,
`--nsFrom='off.products'`,
`--nsTo='${DB_NAME}.off_products'`,
'--drop', '--noIndexRestore',
].join(' ');
console.log(`Running: ${restoreCmd.replace(parsed.password, '***')}\n`);
try {
execSync(restoreCmd, { stdio: 'inherit', shell: '/bin/sh' });
} catch (err: any) {
console.error('mongorestore failed:', err.message);
process.exit(1);
}
await mongoose.connect(MONGO_URL);
}
const db = mongoose.connection.db!;
// --- Step 2: Transform ---
console.log('\n=== Step 2: Transform off_products → openfoodfacts ===\n');
const src = db.collection('off_products');
const dst = db.collection('openfoodfacts');
const srcCount = await src.estimatedDocumentCount();
console.log(`Source off_products: ~${srcCount.toLocaleString()} documents`);
try { await dst.drop(); } catch {}
console.log('Transforming…');
let processed = 0;
let inserted = 0;
let skipped = 0;
let batch: any[] = [];
const cursor = src.find(
{ code: { $exists: true, $ne: '' }, $or: [{ 'nutriments.energy-kcal_100g': { $gt: 0 } }, { 'nutriments.energy_100g': { $gt: 0 } }] },
{
projection: {
code: 1, product_name: 1, product_name_en: 1, product_name_de: 1,
product_name_fr: 1, brands: 1, quantity: 1, serving_size: 1,
serving_quantity: 1, nutriments: 1, nutriscore_grade: 1,
categories_tags: 1, product_quantity: 1,
}
}
).batchSize(BATCH_SIZE);
for await (const doc of cursor) {
processed++;
const names = pickName(doc);
if (!names) { skipped++; continue; }
const per100g = extractPer100g(doc.nutriments);
if (!per100g) { skipped++; continue; }
const barcode = String(doc.code).trim();
if (!barcode || barcode.length < 4) { skipped++; continue; }
const entry: any = { barcode, name: names.name, per100g };
if (names.nameDe) entry.nameDe = names.nameDe;
const brands = typeof doc.brands === 'string' ? doc.brands.trim() : '';
if (brands) entry.brands = brands;
const servingG = Number(doc.serving_quantity);
const servingDesc = typeof doc.serving_size === 'string' ? doc.serving_size.trim() : '';
if (servingG > 0 && servingDesc) {
entry.serving = { description: servingDesc, grams: servingG };
}
const pq = Number(doc.product_quantity);
if (pq > 0) entry.productQuantityG = pq;
if (typeof doc.nutriscore_grade === 'string' && /^[a-e]$/.test(doc.nutriscore_grade)) {
entry.nutriscore = doc.nutriscore_grade;
}
if (Array.isArray(doc.categories_tags) && doc.categories_tags.length > 0) {
const cat = String(doc.categories_tags[doc.categories_tags.length - 1])
.replace(/^en:/, '').replace(/-/g, ' ');
entry.category = cat;
}
batch.push(entry);
if (batch.length >= BATCH_SIZE) {
try {
await dst.insertMany(batch, { ordered: false });
inserted += batch.length;
} catch (bulkErr: any) {
// Duplicate key errors are expected (duplicate barcodes in OFF data)
inserted += bulkErr.insertedCount ?? 0;
}
batch = [];
if (processed % 100000 === 0) {
console.log(` ${processed.toLocaleString()} processed, ${inserted.toLocaleString()} inserted, ${skipped.toLocaleString()} skipped`);
}
}
}
if (batch.length > 0) {
try {
await dst.insertMany(batch, { ordered: false });
inserted += batch.length;
} catch (bulkErr: any) {
inserted += bulkErr.insertedCount ?? 0;
}
}
console.log(`\nTransform complete: ${processed.toLocaleString()} processed → ${inserted.toLocaleString()} inserted, ${skipped.toLocaleString()} skipped`);
// --- Step 3: Deduplicate & create indexes ---
console.log('\n=== Step 3: Deduplicate & create indexes ===\n');
// Remove duplicate barcodes (keep first inserted)
const dupes = await dst.aggregate([
{ $group: { _id: '$barcode', ids: { $push: '$_id' }, count: { $sum: 1 } } },
{ $match: { count: { $gt: 1 } } },
]).toArray();
if (dupes.length > 0) {
const idsToRemove = dupes.flatMap(d => d.ids.slice(1));
await dst.deleteMany({ _id: { $in: idsToRemove } });
console.log(` ✓ removed ${idsToRemove.length} duplicate barcodes`);
}
await dst.createIndex({ barcode: 1 }, { unique: true });
console.log(' ✓ barcode (unique)');
await dst.createIndex({ name: 'text', nameDe: 'text', brands: 'text' });
console.log(' ✓ text (name, nameDe, brands)');
// --- Step 4: Cleanup (manual) ---
// To drop the large off_products temp collection after verifying results:
// db.off_products.drop()
console.log('\n=== Step 4: Skipping off_products cleanup (run manually when satisfied) ===');
const finalCount = await dst.countDocuments();
console.log(`\n=== Done: openfoodfacts collection has ${finalCount.toLocaleString()} documents ===\n`);
await mongoose.disconnect();
}
main().catch((err) => {
console.error(err);
process.exit(1);
});
-371
View File
@@ -1,371 +0,0 @@
/**
* Imports USDA FoodData Central data (SR Legacy + Foundation Foods) and generates
* a typed nutrition database for the recipe calorie calculator.
*
* Run with: pnpm exec vite-node scripts/import-usda-nutrition.ts
*
* Downloads bulk CSV data from USDA FDC, filters to relevant food categories,
* extracts macro/micronutrient data per 100g, and outputs src/lib/data/nutritionDb.ts
*/
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
import { resolve } from 'path';
const DATA_DIR = resolve('data/usda');
const OUTPUT_PATH = resolve('src/lib/data/nutritionDb.ts');
// USDA FDC bulk download URLs
const USDA_URLS = {
srLegacy: 'https://fdc.nal.usda.gov/fdc-datasets/FoodData_Central_sr_legacy_food_csv_2018-04.zip',
foundation: 'https://fdc.nal.usda.gov/fdc-datasets/FoodData_Central_foundation_food_csv_2024-10-31.zip',
};
// Nutrient IDs we care about
const NUTRIENT_IDS: Record<number, string> = {
1008: 'calories',
1003: 'protein',
1004: 'fat',
1258: 'saturatedFat',
1005: 'carbs',
1079: 'fiber',
1063: 'sugars',
// Minerals
1087: 'calcium',
1089: 'iron',
1090: 'magnesium',
1091: 'phosphorus',
1092: 'potassium',
1093: 'sodium',
1095: 'zinc',
// Vitamins
1106: 'vitaminA', // RAE (mcg)
1162: 'vitaminC',
1114: 'vitaminD', // D2+D3 (mcg)
1109: 'vitaminE',
1185: 'vitaminK',
1165: 'thiamin',
1166: 'riboflavin',
1167: 'niacin',
1175: 'vitaminB6',
1178: 'vitaminB12',
1177: 'folate',
// Other
1253: 'cholesterol',
// Amino acids (g/100g)
1212: 'isoleucine',
1213: 'leucine',
1214: 'lysine',
1215: 'methionine',
1217: 'phenylalanine',
1211: 'threonine',
1210: 'tryptophan',
1219: 'valine',
1221: 'histidine',
1222: 'alanine',
1220: 'arginine',
1223: 'asparticAcid',
1216: 'cysteine',
1224: 'glutamicAcid',
1225: 'glycine',
1226: 'proline',
1227: 'serine',
1218: 'tyrosine',
};
// Food categories to include (SR Legacy food_category_id descriptions)
const INCLUDED_CATEGORIES = new Set([
'Dairy and Egg Products',
'Spices and Herbs',
'Baby Foods',
'Fats and Oils',
'Poultry Products',
'Soups, Sauces, and Gravies',
'Sausages and Luncheon Meats',
'Breakfast Cereals',
'Fruits and Fruit Juices',
'Pork Products',
'Vegetables and Vegetable Products',
'Nut and Seed Products',
'Beef Products',
'Beverages',
'Finfish and Shellfish Products',
'Legumes and Legume Products',
'Lamb, Veal, and Game Products',
'Baked Products',
'Sweets',
'Cereal Grains and Pasta',
'Snacks',
'Restaurant Foods',
]);
type NutrientData = Record<string, number>;
interface RawFood {
fdcId: number;
description: string;
categoryId: number;
category: string;
}
interface Portion {
description: string;
grams: number;
}
// Simple CSV line parser that handles quoted fields
function parseCSVLine(line: string): string[] {
const fields: string[] = [];
let current = '';
let inQuotes = false;
for (let i = 0; i < line.length; i++) {
const ch = line[i];
if (ch === '"') {
if (inQuotes && i + 1 < line.length && line[i + 1] === '"') {
current += '"';
i++;
} else {
inQuotes = !inQuotes;
}
} else if (ch === ',' && !inQuotes) {
fields.push(current);
current = '';
} else {
current += ch;
}
}
fields.push(current);
return fields;
}
async function readCSV(filePath: string): Promise<Record<string, string>[]> {
if (!existsSync(filePath)) {
console.warn(` File not found: ${filePath}`);
return [];
}
const content = readFileSync(filePath, 'utf-8');
const lines = content.split('\n').filter(l => l.trim());
if (lines.length === 0) return [];
const headers = parseCSVLine(lines[0]);
const rows: Record<string, string>[] = [];
for (let i = 1; i < lines.length; i++) {
const fields = parseCSVLine(lines[i]);
const row: Record<string, string> = {};
for (let j = 0; j < headers.length; j++) {
row[headers[j]] = fields[j] || '';
}
rows.push(row);
}
return rows;
}
async function downloadAndExtract(url: string, targetDir: string): Promise<void> {
const zipName = url.split('/').pop()!;
const zipPath = resolve(DATA_DIR, zipName);
if (existsSync(targetDir) && readFileSync(resolve(targetDir, '.done'), 'utf-8').trim() === 'ok') {
console.log(` Already extracted: ${targetDir}`);
return;
}
mkdirSync(targetDir, { recursive: true });
if (!existsSync(zipPath)) {
console.log(` Downloading ${zipName}...`);
const response = await fetch(url);
if (!response.ok) throw new Error(`Download failed: ${response.status} ${response.statusText}`);
const buffer = Buffer.from(await response.arrayBuffer());
writeFileSync(zipPath, buffer);
console.log(` Downloaded ${(buffer.length / 1024 / 1024).toFixed(1)}MB`);
}
console.log(` Extracting to ${targetDir}...`);
const { execSync } = await import('child_process');
execSync(`unzip -o -j "${zipPath}" -d "${targetDir}"`, { stdio: 'pipe' });
writeFileSync(resolve(targetDir, '.done'), 'ok');
}
async function importDataset(datasetDir: string, label: string) {
console.log(`\nProcessing ${label}...`);
// Read category mapping
const categoryRows = await readCSV(resolve(datasetDir, 'food_category.csv'));
const categoryMap = new Map<string, string>();
for (const row of categoryRows) {
categoryMap.set(row['id'], row['description']);
}
// Read foods
const foodRows = await readCSV(resolve(datasetDir, 'food.csv'));
const foods = new Map<number, RawFood>();
for (const row of foodRows) {
const catId = parseInt(row['food_category_id'] || '0');
const category = categoryMap.get(row['food_category_id']) || '';
if (!INCLUDED_CATEGORIES.has(category)) continue;
const fdcId = parseInt(row['fdc_id']);
foods.set(fdcId, {
fdcId,
description: row['description'],
categoryId: catId,
category,
});
}
console.log(` Found ${foods.size} foods in included categories`);
// Read nutrients
const nutrientRows = await readCSV(resolve(datasetDir, 'food_nutrient.csv'));
const nutrients = new Map<number, NutrientData>();
for (const row of nutrientRows) {
const fdcId = parseInt(row['fdc_id']);
if (!foods.has(fdcId)) continue;
const nutrientId = parseInt(row['nutrient_id']);
const fieldName = NUTRIENT_IDS[nutrientId];
if (!fieldName) continue;
if (!nutrients.has(fdcId)) nutrients.set(fdcId, {});
const amount = parseFloat(row['amount'] || '0');
if (!isNaN(amount)) {
nutrients.get(fdcId)![fieldName] = amount;
}
}
console.log(` Loaded nutrients for ${nutrients.size} foods`);
// Read portions
const portionRows = await readCSV(resolve(datasetDir, 'food_portion.csv'));
const portions = new Map<number, Portion[]>();
for (const row of portionRows) {
const fdcId = parseInt(row['fdc_id']);
if (!foods.has(fdcId)) continue;
const gramWeight = parseFloat(row['gram_weight'] || '0');
if (!gramWeight || isNaN(gramWeight)) continue;
// Build description from amount + modifier/description
const amount = parseFloat(row['amount'] || '1');
const modifier = row['modifier'] || row['portion_description'] || '';
const desc = modifier
? (amount !== 1 ? `${amount} ${modifier}` : modifier)
: `${amount} unit`;
if (!portions.has(fdcId)) portions.set(fdcId, []);
portions.get(fdcId)!.push({ description: desc, grams: Math.round(gramWeight * 100) / 100 });
}
console.log(` Loaded portions for ${portions.size} foods`);
return { foods, nutrients, portions };
}
function buildNutrientRecord(data: NutrientData | undefined): Record<string, number> {
const allFields = Object.values(NUTRIENT_IDS);
const result: Record<string, number> = {};
for (const field of allFields) {
result[field] = Math.round((data?.[field] || 0) * 100) / 100;
}
return result;
}
async function main() {
console.log('=== USDA Nutrition Database Import ===\n');
mkdirSync(DATA_DIR, { recursive: true });
// Download and extract datasets
const srDir = resolve(DATA_DIR, 'sr_legacy');
const foundationDir = resolve(DATA_DIR, 'foundation');
await downloadAndExtract(USDA_URLS.srLegacy, srDir);
await downloadAndExtract(USDA_URLS.foundation, foundationDir);
// Import both datasets
const sr = await importDataset(srDir, 'SR Legacy');
const foundation = await importDataset(foundationDir, 'Foundation Foods');
// Merge: Foundation Foods takes priority (more detailed), SR Legacy fills gaps
const merged = new Map<string, {
fdcId: number;
name: string;
category: string;
per100g: Record<string, number>;
portions: Portion[];
}>();
// Add SR Legacy first
for (const [fdcId, food] of sr.foods) {
const nutrientData = buildNutrientRecord(sr.nutrients.get(fdcId));
// Skip entries with no nutrient data at all
if (!sr.nutrients.has(fdcId)) continue;
merged.set(food.description.toLowerCase(), {
fdcId,
name: food.description,
category: food.category,
per100g: nutrientData,
portions: sr.portions.get(fdcId) || [],
});
}
// Override with Foundation Foods where available
for (const [fdcId, food] of foundation.foods) {
const nutrientData = buildNutrientRecord(foundation.nutrients.get(fdcId));
if (!foundation.nutrients.has(fdcId)) continue;
merged.set(food.description.toLowerCase(), {
fdcId,
name: food.description,
category: food.category,
per100g: nutrientData,
portions: foundation.portions.get(fdcId) || [],
});
}
console.log(`\nMerged total: ${merged.size} unique foods`);
// Sort by name for stable output
const entries = [...merged.values()].sort((a, b) => a.name.localeCompare(b.name));
// Generate TypeScript output
const tsContent = `// Auto-generated from USDA FoodData Central (SR Legacy + Foundation Foods)
// Generated: ${new Date().toISOString().split('T')[0]}
// Do not edit manually — regenerate with: pnpm exec vite-node scripts/import-usda-nutrition.ts
import type { NutritionPer100g } from '$types/types';
export type NutritionEntry = {
fdcId: number;
name: string;
category: string;
per100g: NutritionPer100g;
portions: { description: string; grams: number }[];
};
export const NUTRITION_DB: NutritionEntry[] = ${JSON.stringify(entries, null, '\t')};
`;
writeFileSync(OUTPUT_PATH, tsContent, 'utf-8');
console.log(`\nWritten ${entries.length} entries to ${OUTPUT_PATH}`);
// Print category breakdown
const categoryCounts = new Map<string, number>();
for (const entry of entries) {
categoryCounts.set(entry.category, (categoryCounts.get(entry.category) || 0) + 1);
}
console.log('\nCategory breakdown:');
for (const [cat, count] of [...categoryCounts.entries()].sort((a, b) => b[1] - a[1])) {
console.log(` ${cat}: ${count}`);
}
}
main().catch(err => {
console.error('Import failed:', err);
process.exit(1);
});
-107
View File
@@ -1,107 +0,0 @@
/**
* One-time migration: convert legacy `season: number[]` (months 112) on every
* Recipe document to the new `seasonRanges: SeasonRange[]` shape.
*
* Contiguous months are coalesced into a single range. A wrap across the year
* boundary (e.g. months [11, 12, 1, 2]) merges into one wrapping range
* Nov 1 → Feb 28; non-contiguous months stay as separate ranges.
*
* The legacy `season` field is then $unset.
*
* Run before deploying the new code path:
* pnpm exec vite-node scripts/migrate-season-to-ranges.ts
*
* Idempotent: a recipe with no `season` field is left untouched.
*/
import { readFileSync } from 'fs';
import { resolve } from 'path';
import mongoose from 'mongoose';
const envPath = resolve(import.meta.dirname ?? '.', '..', '.env');
const envText = readFileSync(envPath, 'utf-8');
const mongoMatch = envText.match(/^MONGO_URL="?([^"\n]+)"?/m);
if (!mongoMatch) { console.error('MONGO_URL not found in .env'); process.exit(1); }
const MONGO_URL = mongoMatch[1];
const LAST_DAY = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31];
type FixedRange = { startM: number; endM: number };
/**
* Coalesce a set of months (112) into contiguous ranges, merging the
* year-boundary wrap if both Jan and Dec runs are present.
*/
function coalesceMonths(months: number[]): FixedRange[] {
const sorted = [...new Set(months.filter(m => Number.isInteger(m) && m >= 1 && m <= 12))].sort((a, b) => a - b);
if (sorted.length === 0) return [];
const runs: FixedRange[] = [];
let runStart = sorted[0];
let runEnd = sorted[0];
for (let i = 1; i < sorted.length; i++) {
if (sorted[i] === runEnd + 1) {
runEnd = sorted[i];
} else {
runs.push({ startM: runStart, endM: runEnd });
runStart = sorted[i];
runEnd = sorted[i];
}
}
runs.push({ startM: runStart, endM: runEnd });
// Merge the trailing-Dec run into the leading-Jan run so a winter span
// like [11,12,1,2] becomes one wrapping Nov→Feb range instead of two.
if (runs.length >= 2 && runs[0].startM === 1 && runs[runs.length - 1].endM === 12) {
const wrapped = { startM: runs[runs.length - 1].startM, endM: runs[0].endM };
return [wrapped, ...runs.slice(1, -1)];
}
return runs;
}
function rangeFromRun(run: FixedRange) {
return {
start: { kind: 'fixed', m: run.startM, d: 1 },
end: { kind: 'fixed', m: run.endM, d: LAST_DAY[run.endM - 1] }
};
}
async function main() {
await mongoose.connect(MONGO_URL);
const Recipe = mongoose.connection.collection('recipes');
const cursor = Recipe.find({ season: { $exists: true } });
let migrated = 0;
let skipped = 0;
while (await cursor.hasNext()) {
const doc = await cursor.next() as any;
if (!doc) break;
const months: number[] = Array.isArray(doc.season) ? doc.season : [];
const runs = coalesceMonths(months);
if (runs.length === 0) {
await Recipe.updateOne({ _id: doc._id }, { $unset: { season: '' } });
skipped++;
continue;
}
const seasonRanges = runs.map(rangeFromRun);
await Recipe.updateOne(
{ _id: doc._id },
{ $set: { seasonRanges }, $unset: { season: '' } }
);
migrated++;
if (migrated % 25 === 0) console.log(` migrated ${migrated}`);
}
console.log(`\nDone. Migrated: ${migrated}. Skipped (empty season): ${skipped}.`);
await mongoose.disconnect();
}
main().catch((e) => {
console.error(e);
process.exit(1);
});
-61
View File
@@ -1,61 +0,0 @@
#!/usr/bin/env bash
# Process raw Gemini-generated shopping icons:
# 1. Crop out the bottom-right watermark (sparkle)
# 2. Remove solid black background → transparent
# 3. Trim whitespace/transparent padding
#
# Usage: ./scripts/process-gemini-icons.sh [file...]
# No args: processes all unprocessed gemini_raw-*.png in static/shopping-icons/
# With args: processes only the specified raw files
set -euo pipefail
ICON_DIR="static/shopping-icons"
# Collect files to process
if [ $# -gt 0 ]; then
files=("$@")
else
files=()
for raw in "$ICON_DIR"/gemini_raw-*.png; do
[ -f "$raw" ] || continue
name=$(basename "$raw" | sed 's/gemini_raw-//')
if [ ! -f "$ICON_DIR/$name" ]; then
files+=("$raw")
fi
done
fi
if [ ${#files[@]} -eq 0 ]; then
echo "No unprocessed icons found."
exit 0
fi
echo "Processing ${#files[@]} icon(s)..."
for raw in "${files[@]}"; do
name=$(basename "$raw" | sed 's/gemini_raw-//')
out="$ICON_DIR/$name"
echo " $name"
# Get image dimensions
dims=$(identify -format '%wx%h' "$raw")
w=${dims%x*}
h=${dims#*x}
# 1. Cover watermark sparkle in bottom-right with black
# 2. Remove all black → transparent
# 3. Trim transparent padding
wm_size=$(( w * 8 / 100 ))
wm_x=$(( w - wm_size ))
wm_y=$(( h - wm_size ))
magick "$raw" \
-fill black -draw "rectangle ${wm_x},${wm_y} ${w},${h}" \
-fuzz 25% -transparent black \
-trim +repage \
"$out"
done
echo "Done."
-156
View File
@@ -1,156 +0,0 @@
/**
* Scrapes the full ExerciseDB v2 API (via RapidAPI) and saves raw data.
*
* Run with: RAPIDAPI_KEY=... pnpm exec vite-node scripts/scrape-exercises.ts
*
* Outputs: src/lib/data/exercisedb-raw.json
*
* Supports resuming — already-fetched exercises are read from the output file
* and skipped. Saves to disk after every detail fetch.
*/
import { writeFileSync, readFileSync, existsSync } from 'fs';
import { resolve } from 'path';
const API_HOST = 'edb-with-videos-and-images-by-ascendapi.p.rapidapi.com';
const API_KEY = process.env.RAPIDAPI_KEY;
if (!API_KEY) {
console.error('Set RAPIDAPI_KEY environment variable');
process.exit(1);
}
const BASE = `https://${API_HOST}/api/v1`;
const HEADERS = {
'x-rapidapi-host': API_HOST,
'x-rapidapi-key': API_KEY,
};
const OUTPUT_PATH = resolve('src/lib/data/exercisedb-raw.json');
const IDS_CACHE_PATH = resolve('src/lib/data/.exercisedb-ids.json');
const DELAY_MS = 1500;
const MAX_RETRIES = 5;
function sleep(ms: number) {
return new Promise(r => setTimeout(r, ms));
}
async function apiFetch(path: string, attempt = 1): Promise<any> {
const res = await fetch(`${BASE}${path}`, { headers: HEADERS });
if (res.status === 429 && attempt <= MAX_RETRIES) {
const wait = DELAY_MS * 2 ** attempt;
console.warn(` rate limited on ${path}, retrying in ${wait}ms...`);
await sleep(wait);
return apiFetch(path, attempt + 1);
}
if (!res.ok) throw new Error(`${res.status} ${res.statusText} for ${path}`);
return res.json();
}
function loadExisting(): { metadata: any; exercises: any[] } | null {
if (!existsSync(OUTPUT_PATH)) return null;
try {
const data = JSON.parse(readFileSync(OUTPUT_PATH, 'utf-8'));
if (data.exercises?.length) {
console.log(` found existing file with ${data.exercises.length} exercises`);
return { metadata: data.metadata, exercises: data.exercises };
}
} catch {}
return null;
}
function saveToDisk(metadata: any, exercises: any[]) {
const output = {
scrapedAt: new Date().toISOString(),
metadata,
exercises,
};
writeFileSync(OUTPUT_PATH, JSON.stringify(output, null, 2));
}
async function fetchAllIds(): Promise<string[]> {
const ids: string[] = [];
let cursor: string | undefined;
while (true) {
const params = new URLSearchParams({ limit: '100' });
if (cursor) params.set('after', cursor);
const res = await apiFetch(`/exercises?${params}`);
for (const ex of res.data) {
ids.push(ex.exerciseId);
}
console.log(` fetched page, ${ids.length} IDs so far`);
if (!res.meta.hasNextPage) break;
cursor = res.meta.nextCursor;
await sleep(DELAY_MS);
}
return ids;
}
async function fetchMetadata() {
const endpoints = ['/bodyparts', '/equipments', '/muscles', '/exercisetypes'] as const;
const keys = ['bodyParts', 'equipments', 'muscles', 'exerciseTypes'] as const;
const result: Record<string, any> = {};
for (let i = 0; i < endpoints.length; i++) {
const res = await apiFetch(endpoints[i]);
result[keys[i]] = res.data;
await sleep(DELAY_MS);
}
return result;
}
async function main() {
console.log('=== ExerciseDB v2 Scraper ===\n');
const existing = loadExisting();
const fetchedIds = new Set(existing?.exercises.map((e: any) => e.exerciseId) ?? []);
console.log('Fetching metadata...');
const metadata = existing?.metadata ?? await fetchMetadata();
if (!existing?.metadata) {
console.log(` ${metadata.bodyParts.length} body parts, ${metadata.equipments.length} equipments, ${metadata.muscles.length} muscles, ${metadata.exerciseTypes.length} exercise types\n`);
} else {
console.log(' using cached metadata\n');
}
let ids: string[];
if (existsSync(IDS_CACHE_PATH)) {
ids = JSON.parse(readFileSync(IDS_CACHE_PATH, 'utf-8'));
console.log(`Using cached exercise IDs (${ids.length})\n`);
} else {
console.log('Fetching exercise IDs...');
ids = await fetchAllIds();
writeFileSync(IDS_CACHE_PATH, JSON.stringify(ids));
console.log(` ${ids.length} total exercises\n`);
}
const remaining = ids.filter(id => !fetchedIds.has(id));
if (remaining.length === 0) {
console.log('All exercises already fetched!');
return;
}
console.log(`Fetching ${remaining.length} remaining details (${fetchedIds.size} already cached)...`);
const exercises = [...(existing?.exercises ?? [])];
for (const id of remaining) {
const detail = await apiFetch(`/exercises/${id}`);
exercises.push(detail.data);
saveToDisk(metadata, exercises);
if (exercises.length % 10 === 0 || exercises.length === ids.length) {
console.log(` ${exercises.length}/${ids.length} details fetched`);
}
await sleep(DELAY_MS);
}
console.log(`\nDone! ${exercises.length} exercises written to ${OUTPUT_PATH}`);
}
main().catch(err => {
console.error(err);
process.exit(1);
});
-132
View File
@@ -1,132 +0,0 @@
/**
* Split a single-file i18n module (with an object literal whose values are
* `Record<locale, string>`) into per-locale files under
* src/lib/i18n/<namespace>/<locale>.ts.
*
* The first locale is the source of truth; others use `as const satisfies
* Record<keyof typeof <first>, string>` so missing translations fail
* type-checking.
*
* Run: pnpm exec vite-node scripts/split-i18n.ts <source> <namespace> <locales,csv> [--marker=<marker>] [--basename=<name>]
* e.g. ... cospendI18n.ts cospend de,en
* ... calendarI18n.ts calendar de,en,la --marker='export const ui = {' --basename=de
*
* Defaults: marker = `const translations: Translations = {`, basename = first locale.
*/
import { readFileSync, writeFileSync, mkdirSync } from 'node:fs';
const [, , srcPath, namespace, localesCsv, ...flags] = process.argv;
if (!srcPath || !namespace || !localesCsv) {
console.error(
'usage: split-i18n.ts <source> <namespace> <locales,csv> [--marker=...] [--basename=...]'
);
process.exit(1);
}
const locales = localesCsv.split(',').map((s) => s.trim()).filter(Boolean);
const markerFlag = flags.find((f) => f.startsWith('--marker='));
const startMarker = markerFlag
? markerFlag.slice('--marker='.length)
: 'const translations: Translations = {';
const basenameFlag = flags.find((f) => f.startsWith('--basename='));
const fileBase = basenameFlag ? basenameFlag.slice('--basename='.length) : '';
const src = readFileSync(srcPath, 'utf8');
// Slice the translations object body
const startIdx = src.indexOf(startMarker);
if (startIdx === -1) throw new Error(`marker not found in ${srcPath}: ${startMarker}`);
// Object literal can close with `};` or `} as const;` — pick the earliest match.
const candA = src.indexOf('\n};', startIdx);
const candB = src.indexOf('\n} as const', startIdx);
const endIdx =
candA < 0 ? candB : candB < 0 ? candA : Math.min(candA, candB);
if (endIdx === -1) throw new Error('translations object end not found');
const body = src.slice(startIdx + startMarker.length, endIdx);
// Match each translation entry boundary: `key: { ...inner... },`. Each
// entry's body is then parsed independently for `loc: 'value'` pairs, so
// locale order in the source file doesn't matter.
const entryRe = /^\s*(\w+)\s*:\s*\{([\s\S]*?)\}\s*,?\s*$/gm;
// Match `loc: '...'` OR `loc: "..."` (double quotes are used when the string
// contains a literal apostrophe).
const localeRe = /(\w+)\s*:\s*(?:'([^']*)'|"((?:\\.|[^"\\])*)")/g;
function decodeJsString(raw: string, doubleQuoted: boolean): string {
if (doubleQuoted) {
// Already valid JSON (escapes preserved). Parse directly.
return JSON.parse('"' + raw + '"');
}
// Single-quoted: convert any \' → ' and escape literal " for JSON.
const jsonReady = '"' + raw.replace(/\\'/g, "'").replace(/"/g, '\\"') + '"';
return JSON.parse(jsonReady);
}
interface Entry {
key: string;
values: Record<string, string>;
}
const entries: Entry[] = [];
let m: RegExpExecArray | null;
while ((m = entryRe.exec(body)) !== null) {
const inner = m[2];
const values: Record<string, string> = {};
let lm: RegExpExecArray | null;
while ((lm = localeRe.exec(inner)) !== null) {
const single = lm[2];
const double = lm[3];
values[lm[1]] = single !== undefined
? decodeJsString(single, false)
: decodeJsString(double, true);
}
for (const loc of locales) {
if (!(loc in values)) {
throw new Error(`entry "${m[1]}" is missing locale "${loc}"`);
}
}
entries.push({ key: m[1], values });
}
console.log(`extracted ${entries.length} entries`);
const outDir = `src/lib/i18n/${namespace}`;
mkdirSync(outDir, { recursive: true });
const sourceLocale = locales[0];
// Optional file prefix lets us split multiple tables into the same dir
// (e.g. calendar `ui` → de.ts, calendar `ui1962` → de_1962.ts).
const path = (loc: string) => `${outDir}/${fileBase ? `${loc}_${fileBase}` : loc}.ts`;
// Write the source-of-truth locale (no satisfies clause).
{
const lines = [
'/** Generated by scripts/split-i18n.ts. */',
`/** ${sourceLocale.toUpperCase()} ${namespace}${fileBase ? ` (${fileBase})` : ''} UI strings — source of truth for the key set. */`,
'',
`export const ${sourceLocale} = {`
];
for (const e of entries) lines.push(`\t${e.key}: ${JSON.stringify(e.values[sourceLocale])},`);
lines.push('} as const;', '');
writeFileSync(path(sourceLocale), lines.join('\n'));
}
// Write the other locales with `satisfies` constraint.
const sourceFile = fileBase ? `${sourceLocale}_${fileBase}` : sourceLocale;
for (let i = 1; i < locales.length; i++) {
const loc = locales[i];
const lines = [
'/** Generated by scripts/split-i18n.ts. */',
`import type { ${sourceLocale} } from './${sourceFile}';`,
'',
`export const ${loc} = {`
];
for (const e of entries) lines.push(`\t${e.key}: ${JSON.stringify(e.values[loc])},`);
lines.push(
`} as const satisfies Record<keyof typeof ${sourceLocale}, string>;`,
''
);
writeFileSync(path(loc), lines.join('\n'));
}
console.log(`wrote ${locales.map(path).join(', ')}`);
-54
View File
@@ -1,54 +0,0 @@
#!/usr/bin/env bash
# Subset NotoColorEmoji to only the emojis we actually use.
# Requires: fonttools (provides pyftsubset) and woff2 (provides woff2_compress)
#
# Source font: system-installed NotoColorEmoji.ttf
# Output: static/fonts/NotoColorEmoji.woff2 + .ttf
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
OUT_DIR="$PROJECT_ROOT/static/fonts"
SRC_FONT="/usr/share/fonts/noto/NotoColorEmoji.ttf"
if [ ! -f "$SRC_FONT" ]; then
echo "Error: Source font not found at $SRC_FONT" >&2
exit 1
fi
# ─── Fixed list of emojis to include ────────────────────────────────
# Recipe icons (from database + hardcoded)
# Season/liturgical: ☀️ ✝️ ❄️ 🌷 🍂 🎄 🐇
# Food/recipe: 🍽️ 🥫
# UI/cospend categories: 🛒 🛍️ 🚆 ⚡ 🎉 🤝 💸
# Status/feedback: ❤️ 🖤 ✅ ❌ 🚀 ⚠️ ✨ 🔄
# Features: 📋 🖼️ 📖 🤖 🌐 🔐 🔍 🚫
EMOJIS="☀✝❄🌷🍂🎄🐇🍽🥫🛒🛍🚆⚡🎉🤝💸❤🖤✅❌🚀⚠✨🔄📋🖼📖🤖🌐🔐🔍🚫"
# ────────────────────────────────────────────────────────────────────
# Build Unicode codepoint list from the emoji string (Python for reliable Unicode handling)
UNICODES=$(python3 -c "print(','.join(f'U+{ord(c):04X}' for c in '$EMOJIS'))")
GLYPH_COUNT=$(python3 -c "print(len('$EMOJIS'))")
echo "Subsetting NotoColorEmoji with $GLYPH_COUNT glyphs..."
# Subset to TTF
pyftsubset "$SRC_FONT" \
--unicodes="$UNICODES" \
--output-file="$OUT_DIR/NotoColorEmoji.ttf" \
--no-ignore-missing-unicodes
# Convert to WOFF2
woff2_compress "$OUT_DIR/NotoColorEmoji.ttf"
ORIG_SIZE=$(stat -c%s "$SRC_FONT" 2>/dev/null || stat -f%z "$SRC_FONT")
TTF_SIZE=$(stat -c%s "$OUT_DIR/NotoColorEmoji.ttf" 2>/dev/null || stat -f%z "$OUT_DIR/NotoColorEmoji.ttf")
WOFF2_SIZE=$(stat -c%s "$OUT_DIR/NotoColorEmoji.woff2" 2>/dev/null || stat -f%z "$OUT_DIR/NotoColorEmoji.woff2")
echo "Done!"
echo " Original: $(numfmt --to=iec "$ORIG_SIZE")"
echo " TTF: $(numfmt --to=iec "$TTF_SIZE")"
echo " WOFF2: $(numfmt --to=iec "$WOFF2_SIZE")"
-337
View File
@@ -1,337 +0,0 @@
/**
* Translates apologetik English data → target language via DeepL.
*
* Usage:
* pnpm exec vite-node scripts/translate-apologetik.ts # default DE
* pnpm exec vite-node scripts/translate-apologetik.ts -- --lang=DE
*
* Reads: src/lib/data/apologetik.ts (English source of truth)
* Writes: src/lib/data/apologetik.<lang>.ts
*
* Note: DeepL does not support Latin. For LA, translate manually or wire a
* different provider.
*/
import { writeFileSync, readFileSync } from 'fs';
import { resolve } from 'path';
// Minimal .env loader — avoid extra deps.
function loadEnv() {
try {
const raw = readFileSync(resolve(process.cwd(), '.env'), 'utf8');
for (const line of raw.split('\n')) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#')) continue;
const eq = trimmed.indexOf('=');
if (eq < 0) continue;
const key = trimmed.slice(0, eq).trim();
let value = trimmed.slice(eq + 1).trim();
if (
(value.startsWith('"') && value.endsWith('"')) ||
(value.startsWith("'") && value.endsWith("'"))
) {
value = value.slice(1, -1);
}
if (!(key in process.env)) process.env[key] = value;
}
} catch {
// no .env — fine, rely on process env
}
}
loadEnv();
import {
ARCHETYPES,
ARGUMENTS,
POS_VOICES,
POS_LAYERS,
POS_ARGUMENTS,
type Archetype,
type Argument,
type Counter,
type PosVoice,
type PosLayer,
type PosArgument,
type PosCounter
} from '../src/lib/data/apologetik';
const DEEPL_API_KEY = process.env.DEEPL_API_KEY;
const DEEPL_API_URL = process.env.DEEPL_API_URL || 'https://api-free.deepl.com/v2/translate';
if (!DEEPL_API_KEY) {
console.error('DEEPL_API_KEY missing from .env');
process.exit(1);
}
const argLang = process.argv.find((a) => a.startsWith('--lang='))?.split('=')[1];
const TARGET_LANG = (argLang ?? 'DE').toUpperCase();
const FILE_LANG = TARGET_LANG.toLowerCase();
const BATCH_SIZE = 50;
const cache = new Map<string, string>();
// Manual overrides applied after DeepL translation, keyed by English source.
// Use for cases where DeepL produces a wrong / inconsistent German rendering
// that should survive regeneration.
const OVERRIDES: Record<string, Record<string, string>> = {
DE: {
// generic-masculine for archetype role names
'The Scientist': 'Der Wissenschaftler'
}
};
async function translateBatch(texts: string[]): Promise<string[]> {
const out: string[] = [];
const toFetch: { idx: number; text: string }[] = [];
for (let i = 0; i < texts.length; i++) {
const cached = cache.get(texts[i]);
if (cached !== undefined) out[i] = cached;
else toFetch.push({ idx: i, text: texts[i] });
}
for (let i = 0; i < toFetch.length; i += BATCH_SIZE) {
const chunk = toFetch.slice(i, i + BATCH_SIZE);
const body = {
text: chunk.map((c) => c.text),
source_lang: 'EN',
target_lang: TARGET_LANG,
preserve_formatting: true,
formality: 'prefer_more'
};
const resp = await fetch(DEEPL_API_URL, {
method: 'POST',
headers: {
Authorization: `DeepL-Auth-Key ${DEEPL_API_KEY}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
});
if (!resp.ok) {
const t = await resp.text();
throw new Error(`DeepL ${resp.status}: ${t}`);
}
const data = (await resp.json()) as { translations: { text: string }[] };
data.translations.forEach((tr, j) => {
const slot = chunk[j];
out[slot.idx] = tr.text;
cache.set(slot.text, tr.text);
});
process.stdout.write(` · translated ${Math.min(i + BATCH_SIZE, toFetch.length)}/${toFetch.length}\n`);
}
return out;
}
// Helper: collect translatable strings from an object's selected fields,
// queue them, and return a setter that applies the translations back.
type Job = {
get: () => string;
set: (v: string) => void;
};
const jobs: Job[] = [];
function field<T extends object, K extends keyof T>(obj: T, key: K) {
if (typeof obj[key] !== 'string') return;
jobs.push({
get: () => obj[key] as unknown as string,
set: (v) => {
(obj as any)[key] = v;
}
});
}
function arrayField<T>(arr: T[], key: keyof T) {
for (const item of arr) field(item as any, key as any);
}
function stringArray(arr: string[]) {
for (let i = 0; i < arr.length; i++) {
const idx = i;
jobs.push({
get: () => arr[idx],
set: (v) => {
arr[idx] = v;
}
});
}
}
// ---------- clone source data ----------
function cloneArchetype(a: Archetype): Archetype {
return { ...a };
}
function cloneCounter(c: Counter): Counter {
return { ...c, body: [...c.body], cites: [...c.cites] };
}
function cloneArgument(a: Argument): Argument {
const counters: Record<string, Counter> = {};
for (const [k, v] of Object.entries(a.counters)) counters[k] = cloneCounter(v);
return { ...a, related: [...a.related], counters };
}
function clonePosVoice(v: PosVoice): PosVoice {
return { ...v };
}
function clonePosLayer(l: PosLayer): PosLayer {
return { ...l };
}
function clonePosCounter(c: PosCounter): PosCounter {
return { ...c, body: [...c.body], cites: [...c.cites] };
}
function clonePosArgument(a: PosArgument): PosArgument {
const voices: Record<string, PosCounter> = {};
for (const [k, v] of Object.entries(a.voices)) voices[k] = clonePosCounter(v);
return {
...a,
related: [...a.related],
voices,
scripture: { ...a.scripture }
};
}
const archetypesOut: Record<string, Archetype> = {};
for (const [k, v] of Object.entries(ARCHETYPES)) archetypesOut[k] = cloneArchetype(v);
const argumentsOut: Argument[] = ARGUMENTS.map(cloneArgument);
const posVoicesOut: Record<string, PosVoice> = {};
for (const [k, v] of Object.entries(POS_VOICES)) posVoicesOut[k] = clonePosVoice(v);
const posLayersOut: PosLayer[] = POS_LAYERS.map(clonePosLayer);
const posArgsOut: PosArgument[] = POS_ARGUMENTS.map(clonePosArgument);
// ---------- queue translation jobs ----------
//
// What we DON'T translate:
// - id, n, related (cross-link keys)
// - color, colorSoft, colorHex, glyph, font (visual)
// - era (numeric / dates)
// - cites (bibliographic — keep canonical English)
// - scripture.ref (book chapter:verse)
// - layer (enum key)
// - strength (number)
// archetypes — translate name + sub. DeepL leaves canonical proper nouns alone
// (e.g. "Pascal") and localizes ones with established forms ("Thomas von Aquin",
// "Franz von Assisi", "Augustinus"). Role names ("The Logician") get translated
// idiomatically.
for (const a of Object.values(archetypesOut)) {
field(a, 'name');
field(a, 'sub');
}
// arguments
for (const a of argumentsOut) {
field(a, 'title');
field(a, 'short');
field(a, 'steel');
field(a, 'quote');
field(a, 'quoteBy');
field(a, 'pub');
for (const c of Object.values(a.counters)) {
field(c, 'lede');
stringArray(c.body);
}
}
// pos voices — translate name + sub (same rationale as archetypes).
for (const v of Object.values(posVoicesOut)) {
field(v, 'name');
field(v, 'sub');
}
// pos layers
for (const l of posLayersOut) {
field(l, 'title');
field(l, 'sub');
}
// pos arguments
for (const a of posArgsOut) {
field(a, 'title');
field(a, 'claim');
field(a, 'thesis');
if (a.note) field(a, 'note');
field(a.scripture, 'text');
for (const c of Object.values(a.voices)) {
field(c, 'lede');
stringArray(c.body);
}
}
console.log(`Queued ${jobs.length} translation jobs · target ${TARGET_LANG}`);
// Site is Swiss High German — no ß. Bible quotes are sourced from Allioli at
// runtime and untouched by this pass, so this only affects translated prose.
function postProcess(s: string): string {
if (TARGET_LANG === 'DE') return s.replace(/ß/g, 'ss');
return s;
}
// ---------- run translations ----------
const inputs = jobs.map((j) => j.get());
const outputs = await translateBatch(inputs);
const overrides = OVERRIDES[TARGET_LANG] ?? {};
let overrideHits = 0;
jobs.forEach((j, i) => {
const en = inputs[i];
if (overrides[en] !== undefined) {
j.set(postProcess(overrides[en]));
overrideHits++;
} else {
j.set(postProcess(outputs[i]));
}
});
if (overrideHits) console.log(`Applied ${overrideHits} manual override(s)`);
console.log(`Done · cache hits saved ${jobs.length - cache.size} duplicate calls`);
// ---------- emit file ----------
function ts(value: unknown, indent = 0): string {
const pad = '\t'.repeat(indent);
if (value === null) return 'null';
if (typeof value === 'string') return JSON.stringify(value);
if (typeof value === 'number' || typeof value === 'boolean') return String(value);
if (Array.isArray(value)) {
if (value.length === 0) return '[]';
const inner = value.map((v) => `${pad}\t${ts(v, indent + 1)}`).join(',\n');
return `[\n${inner}\n${pad}]`;
}
if (typeof value === 'object') {
const entries = Object.entries(value as object);
if (entries.length === 0) return '{}';
const inner = entries
.map(([k, v]) => `${pad}\t${JSON.stringify(k)}: ${ts(v, indent + 1)}`)
.join(',\n');
return `{\n${inner}\n${pad}}`;
}
return JSON.stringify(value);
}
const header = `// AUTO-GENERATED by scripts/translate-apologetik.ts — DO NOT EDIT BY HAND.
// Source: src/lib/data/apologetik.ts (EN) · Target: ${TARGET_LANG} · Generated ${new Date().toISOString()}
//
// To regenerate: pnpm exec vite-node scripts/translate-apologetik.ts -- --lang=${TARGET_LANG}
import type {
\tArchetype,
\tArgument,
\tPosArgument,
\tPosLayer,
\tPosVoice
} from './apologetik';
`;
const content = [
header,
`export const ARCHETYPES_${TARGET_LANG}: Record<string, Archetype> = ${ts(archetypesOut)};`,
'',
`export const ARGUMENTS_${TARGET_LANG}: Argument[] = ${ts(argumentsOut)};`,
'',
`export const POS_VOICES_${TARGET_LANG}: Record<string, PosVoice> = ${ts(posVoicesOut)};`,
'',
`export const POS_LAYERS_${TARGET_LANG}: PosLayer[] = ${ts(posLayersOut)};`,
'',
`export const POS_ARGUMENTS_${TARGET_LANG}: PosArgument[] = ${ts(posArgsOut)};`,
''
].join('\n');
const outPath = resolve(process.cwd(), `src/lib/data/apologetik.${FILE_LANG}.ts`);
writeFileSync(outPath, content, 'utf8');
console.log(`✓ Wrote ${outPath}`);
-4964
View File
File diff suppressed because it is too large Load Diff
-17
View File
@@ -1,17 +0,0 @@
[package]
name = "bocken"
version = "0.5.3"
edition = "2021"
[lib]
name = "bocken_lib"
crate-type = ["lib", "cdylib", "staticlib"]
[build-dependencies]
tauri-build = { version = "2", features = [] }
[dependencies]
tauri = { version = "2", features = [] }
tauri-plugin-geolocation = "2"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
-3
View File
@@ -1,3 +0,0 @@
fn main() {
tauri_build::build()
}
-14
View File
@@ -1,14 +0,0 @@
{
"identifier": "bocken-remote",
"windows": ["main"],
"remote": {
"urls": ["https://bocken.org/*", "http://192.168.1.4:5173/*"]
},
"permissions": [
"geolocation:allow-check-permissions",
"geolocation:allow-request-permissions",
"geolocation:allow-get-current-position",
"geolocation:allow-watch-position",
"geolocation:allow-clear-watch"
]
}
-12
View File
@@ -1,12 +0,0 @@
# EditorConfig is awesome: https://EditorConfig.org
# top-most EditorConfig file
root = true
[*]
indent_style = space
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = false
insert_final_newline = false
-19
View File
@@ -1,19 +0,0 @@
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
build
/captures
.externalNativeBuild
.cxx
local.properties
key.properties
/.tauri
/tauri.settings.gradle
-6
View File
@@ -1,6 +0,0 @@
/src/main/**/generated
/src/main/jniLibs/**/*.so
/src/main/assets/tauri.conf.json
/tauri.build.gradle.kts
/proguard-tauri.pro
/tauri.properties
@@ -1,70 +0,0 @@
import java.util.Properties
plugins {
id("com.android.application")
id("org.jetbrains.kotlin.android")
id("rust")
}
val tauriProperties = Properties().apply {
val propFile = file("tauri.properties")
if (propFile.exists()) {
propFile.inputStream().use { load(it) }
}
}
android {
compileSdk = 36
namespace = "org.bocken.app"
defaultConfig {
manifestPlaceholders["usesCleartextTraffic"] = "false"
applicationId = "org.bocken.app"
minSdk = 24
targetSdk = 36
versionCode = tauriProperties.getProperty("tauri.android.versionCode", "1").toInt()
versionName = tauriProperties.getProperty("tauri.android.versionName", "1.0")
}
buildTypes {
getByName("debug") {
manifestPlaceholders["usesCleartextTraffic"] = "true"
isDebuggable = true
isJniDebuggable = true
isMinifyEnabled = false
packaging { jniLibs.keepDebugSymbols.add("*/arm64-v8a/*.so")
jniLibs.keepDebugSymbols.add("*/armeabi-v7a/*.so")
jniLibs.keepDebugSymbols.add("*/x86/*.so")
jniLibs.keepDebugSymbols.add("*/x86_64/*.so")
}
}
getByName("release") {
isMinifyEnabled = true
proguardFiles(
*fileTree(".") { include("**/*.pro") }
.plus(getDefaultProguardFile("proguard-android-optimize.txt"))
.toList().toTypedArray()
)
}
}
kotlinOptions {
jvmTarget = "1.8"
}
buildFeatures {
buildConfig = true
}
}
rust {
rootDirRel = "../../../"
}
dependencies {
implementation("androidx.webkit:webkit:1.14.0")
implementation("androidx.appcompat:appcompat:1.7.1")
implementation("androidx.activity:activity-ktx:1.10.1")
implementation("com.google.android.material:material:1.12.0")
testImplementation("junit:junit:4.13.2")
androidTestImplementation("androidx.test.ext:junit:1.1.4")
androidTestImplementation("androidx.test.espresso:espresso-core:3.5.0")
}
apply(from = "tauri.build.gradle.kts")
-21
View File
@@ -1,21 +0,0 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
@@ -1,58 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_BACKGROUND_LOCATION" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_LOCATION" />
<uses-permission android:name="android.permission.POST_NOTIFICATIONS" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.VIBRATE" />
<!-- Step detector sensor (cadence during GPS workouts); runtime-requested on API 29+ -->
<uses-permission android:name="android.permission.ACTIVITY_RECOGNITION" />
<!-- AndroidTV support -->
<uses-feature android:name="android.software.leanback" android:required="false" />
<queries>
<intent>
<action android:name="android.intent.action.TTS_SERVICE" />
</intent>
</queries>
<application
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:theme="@style/Theme.bocken"
android:usesCleartextTraffic="${usesCleartextTraffic}">
<activity
android:configChanges="orientation|keyboardHidden|keyboard|screenSize|locale|smallestScreenSize|screenLayout|uiMode"
android:launchMode="singleTask"
android:label="@string/main_activity_title"
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
<!-- AndroidTV support -->
<category android:name="android.intent.category.LEANBACK_LAUNCHER" />
</intent-filter>
</activity>
<service
android:name=".LocationForegroundService"
android:foregroundServiceType="location"
android:exported="false" />
<provider
android:name="androidx.core.content.FileProvider"
android:authorities="${applicationId}.fileprovider"
android:exported="false"
android:grantUriPermissions="true">
<meta-data
android:name="android.support.FILE_PROVIDER_PATHS"
android:resource="@xml/file_paths" />
</provider>
</application>
</manifest>
@@ -1,210 +0,0 @@
package org.bocken.app
import android.Manifest
import android.app.Activity
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.os.Build
import android.os.VibrationAttributes
import android.os.VibrationEffect
import android.os.Vibrator
import android.os.VibratorManager
import android.speech.tts.TextToSpeech
import android.webkit.JavascriptInterface
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import org.json.JSONArray
import org.json.JSONObject
import java.util.Locale
class AndroidBridge(private val context: Context) {
companion object {
const val REQ_BACKGROUND_LOCATION = 1002
const val REQ_NOTIFICATIONS = 1003
const val REQ_ACTIVITY_RECOGNITION = 1004
}
@JavascriptInterface
fun startLocationService(ttsConfigJson: String, startPaused: Boolean) {
if (context is Activity) {
// Request notification permission on Android 13+ (required for foreground service notification)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
if (ContextCompat.checkSelfPermission(context, Manifest.permission.POST_NOTIFICATIONS)
!= PackageManager.PERMISSION_GRANTED
) {
ActivityCompat.requestPermissions(
context,
arrayOf(Manifest.permission.POST_NOTIFICATIONS),
REQ_NOTIFICATIONS
)
}
}
// Request background location on Android 10+ (required for screen-off GPS)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
if (ContextCompat.checkSelfPermission(context, Manifest.permission.ACCESS_BACKGROUND_LOCATION)
!= PackageManager.PERMISSION_GRANTED
) {
ActivityCompat.requestPermissions(
context,
arrayOf(Manifest.permission.ACCESS_BACKGROUND_LOCATION),
REQ_BACKGROUND_LOCATION
)
}
}
// Request activity recognition on Android 10+ (required for step detector / cadence)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
if (ContextCompat.checkSelfPermission(context, Manifest.permission.ACTIVITY_RECOGNITION)
!= PackageManager.PERMISSION_GRANTED
) {
ActivityCompat.requestPermissions(
context,
arrayOf(Manifest.permission.ACTIVITY_RECOGNITION),
REQ_ACTIVITY_RECOGNITION
)
}
}
}
val intent = Intent(context, LocationForegroundService::class.java).apply {
putExtra("ttsConfig", ttsConfigJson)
putExtra("startPaused", startPaused)
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
context.startForegroundService(intent)
} else {
context.startService(intent)
}
}
/** Overload: TTS config only (not paused) */
@JavascriptInterface
fun startLocationService(ttsConfigJson: String) {
startLocationService(ttsConfigJson, false)
}
/** Overload: no args (not paused, no TTS) */
@JavascriptInterface
fun startLocationService() {
startLocationService("{}", false)
}
@JavascriptInterface
fun stopLocationService() {
val intent = Intent(context, LocationForegroundService::class.java)
context.stopService(intent)
}
@JavascriptInterface
fun getPoints(): String {
return LocationForegroundService.drainPoints()
}
@JavascriptInterface
fun isTracking(): Boolean {
return LocationForegroundService.tracking
}
@JavascriptInterface
fun pauseTracking() {
LocationForegroundService.instance?.doPause()
}
@JavascriptInterface
fun resumeTracking() {
LocationForegroundService.instance?.doResume()
}
@JavascriptInterface
fun getIntervalState(): String {
return LocationForegroundService.getIntervalState()
}
/** True if cadence (step detector) is usable — permission granted or not required (pre-Q). */
@JavascriptInterface
fun hasActivityRecognitionPermission(): Boolean {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) return true
return ContextCompat.checkSelfPermission(
context, Manifest.permission.ACTIVITY_RECOGNITION
) == PackageManager.PERMISSION_GRANTED
}
/**
* Force-vibrate bypassing silent/DND by using USAGE_ACCESSIBILITY attributes.
* Why: default web Vibration API uses USAGE_TOUCH which Android silences.
*/
@JavascriptInterface
fun forceVibrate(durationMs: Long, intensityPct: Int) {
val vibrator: Vibrator? = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
(context.getSystemService(Context.VIBRATOR_MANAGER_SERVICE) as? VibratorManager)?.defaultVibrator
} else {
@Suppress("DEPRECATION")
context.getSystemService(Context.VIBRATOR_SERVICE) as? Vibrator
}
if (vibrator?.hasVibrator() != true) return
val amplitude = (intensityPct.coerceIn(1, 100) * 255 / 100).coerceAtLeast(1)
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
val effect = VibrationEffect.createOneShot(durationMs, amplitude)
val attrs = VibrationAttributes.Builder()
.setUsage(VibrationAttributes.USAGE_ACCESSIBILITY)
.build()
vibrator.vibrate(effect, attrs)
} else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
vibrator.vibrate(VibrationEffect.createOneShot(durationMs, amplitude))
} else {
@Suppress("DEPRECATION")
vibrator.vibrate(durationMs)
}
}
/** Returns true if at least one TTS engine is installed on the device. */
@JavascriptInterface
fun hasTtsEngine(): Boolean {
val dummy = TextToSpeech(context, null)
val hasEngine = dummy.engines.isNotEmpty()
dummy.shutdown()
return hasEngine
}
/** Opens the Android TTS install intent (prompts user to install a TTS engine). */
@JavascriptInterface
fun installTtsEngine() {
val intent = Intent(TextToSpeech.Engine.ACTION_INSTALL_TTS_DATA)
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
context.startActivity(intent)
}
/**
* Returns available TTS voices as a JSON array.
* Each entry: { "id": "...", "name": "...", "language": "en-US" }
*/
@JavascriptInterface
fun getAvailableTtsVoices(): String {
val result = JSONArray()
try {
val latch = java.util.concurrent.CountDownLatch(1)
var engine: TextToSpeech? = null
engine = TextToSpeech(context) { status ->
if (status == TextToSpeech.SUCCESS) {
engine?.voices?.forEach { voice ->
val obj = JSONObject().apply {
put("id", voice.name)
put("name", voice.name)
put("language", voice.locale.toLanguageTag())
}
result.put(obj)
}
}
latch.countDown()
}
latch.await(3, java.util.concurrent.TimeUnit.SECONDS)
engine.shutdown()
} catch (_: Exception) {}
return result.toString()
}
}
@@ -1,909 +0,0 @@
package org.bocken.app
import android.app.Notification
import android.app.NotificationChannel
import android.app.NotificationManager
import android.app.PendingIntent
import android.Manifest
import android.app.Service
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.hardware.Sensor
import android.hardware.SensorEvent
import android.hardware.SensorEventListener
import android.hardware.SensorManager
import android.location.LocationListener
import android.location.LocationManager
import android.media.AudioAttributes
import android.media.AudioFocusRequest
import android.media.AudioManager
import android.os.Build
import android.os.Bundle
import android.os.Handler
import android.os.IBinder
import android.os.Looper
import android.speech.tts.TextToSpeech
import android.speech.tts.UtteranceProgressListener
import android.util.Log
import androidx.core.content.ContextCompat
import org.json.JSONArray
import org.json.JSONObject
import java.util.Collections
import java.util.Locale
import java.util.concurrent.ConcurrentLinkedQueue
import kotlin.math.*
private const val TAG = "BockenTTS"
class LocationForegroundService : Service(), TextToSpeech.OnInitListener, SensorEventListener {
private var locationManager: LocationManager? = null
private var locationListener: LocationListener? = null
private var notificationManager: NotificationManager? = null
// Step detector for cadence
private var sensorManager: SensorManager? = null
private var stepDetector: Sensor? = null
private val stepTimestamps = ConcurrentLinkedQueue<Long>()
private val CADENCE_WINDOW_MS = 15_000L // 15 second rolling window
private var pendingIntent: PendingIntent? = null
private var startTimeMs: Long = 0L
private var pausedAccumulatedMs: Long = 0L // total time spent paused
private var pausedSinceMs: Long = 0L // timestamp when last paused (0 = not paused)
private var lastLat: Double = Double.NaN
private var lastLng: Double = Double.NaN
private var lastTimestamp: Long = 0L
private var currentPaceMinKm: Double = 0.0
// TTS
private var tts: TextToSpeech? = null
private var ttsReady = false
private var ttsConfig: TtsConfig? = null
private var ttsTimeHandler: Handler? = null
private var ttsTimeRunnable: Runnable? = null
private var lastAnnouncementDistanceKm: Double = 0.0
private var lastAnnouncementTimeMs: Long = 0L
private var splitDistanceAtLastAnnouncement: Double = 0.0
private var splitTimeAtLastAnnouncement: Long = 0L
// Interval tracking
private var intervalSteps: List<IntervalStep> = emptyList()
private var currentIntervalIdx: Int = 0
private var intervalAccumulatedDistanceKm: Double = 0.0
private var intervalStartTimeMs: Long = 0L
private var intervalsComplete: Boolean = false
// Audio focus / ducking
private var audioManager: AudioManager? = null
private var audioFocusRequest: AudioFocusRequest? = null
private var hasAudioFocus = false
data class IntervalStep(
val label: String,
val durationType: String, // "distance" or "time"
val durationValue: Double // meters (distance) or seconds (time)
)
data class TtsConfig(
val enabled: Boolean = false,
val triggerType: String = "distance", // "distance" or "time"
val triggerValue: Double = 1.0, // km or minutes
val metrics: List<String> = listOf("totalTime", "totalDistance", "avgPace"),
val language: String = "en",
val voiceId: String? = null,
val ttsVolume: Float = 0.8f, // 0.01.0 relative TTS volume
val audioDuck: Boolean = false, // duck other audio during TTS
val intervals: List<IntervalStep> = emptyList()
) {
companion object {
fun fromJson(json: String): TtsConfig {
return try {
val obj = JSONObject(json)
val metricsArr = obj.optJSONArray("metrics")
val metrics = if (metricsArr != null) {
(0 until metricsArr.length()).map { metricsArr.getString(it) }
} else {
listOf("totalTime", "totalDistance", "avgPace")
}
val intervalsArr = obj.optJSONArray("intervals")
val intervals = if (intervalsArr != null) {
(0 until intervalsArr.length()).map { i ->
val step = intervalsArr.getJSONObject(i)
IntervalStep(
label = step.optString("label", ""),
durationType = step.optString("durationType", "time"),
durationValue = step.optDouble("durationValue", 0.0)
)
}
} else {
emptyList()
}
TtsConfig(
enabled = obj.optBoolean("enabled", false),
triggerType = obj.optString("triggerType", "distance"),
triggerValue = obj.optDouble("triggerValue", 1.0),
metrics = metrics,
language = obj.optString("language", "en"),
voiceId = obj.optString("voiceId", null),
ttsVolume = obj.optDouble("ttsVolume", 0.8).toFloat().coerceIn(0f, 1f),
audioDuck = obj.optBoolean("audioDuck", false),
intervals = intervals
)
} catch (_: Exception) {
TtsConfig()
}
}
}
}
companion object {
const val CHANNEL_ID = "gps_tracking"
const val NOTIFICATION_ID = 1001
const val MIN_TIME_MS = 3000L
const val MIN_DISTANCE_M = 0f
private val pointBuffer = Collections.synchronizedList(mutableListOf<JSONObject>())
var instance: LocationForegroundService? = null
private set
var tracking = false
private set
var paused = false
private set
var totalDistanceKm: Double = 0.0
private set
fun getIntervalState(): String {
val svc = instance ?: return "{}"
if (svc.intervalSteps.isEmpty()) return "{}"
val obj = JSONObject()
obj.put("currentIndex", svc.currentIntervalIdx)
obj.put("totalSteps", svc.intervalSteps.size)
obj.put("complete", svc.intervalsComplete)
if (!svc.intervalsComplete && svc.currentIntervalIdx < svc.intervalSteps.size) {
val step = svc.intervalSteps[svc.currentIntervalIdx]
obj.put("currentLabel", step.label)
val progress = when (step.durationType) {
"distance" -> {
val target = step.durationValue / 1000.0
if (target > 0) (svc.intervalAccumulatedDistanceKm / target).coerceIn(0.0, 1.0) else 0.0
}
"time" -> {
val target = step.durationValue * 1000.0
if (target > 0) ((System.currentTimeMillis() - svc.intervalStartTimeMs) / target).coerceIn(0.0, 1.0) else 0.0
}
else -> 0.0
}
obj.put("progress", progress)
} else {
obj.put("currentLabel", "")
obj.put("progress", 1.0)
}
return obj.toString()
}
fun drainPoints(): String {
val drained: List<JSONObject>
synchronized(pointBuffer) {
drained = ArrayList(pointBuffer)
pointBuffer.clear()
}
val arr = JSONArray()
for (p in drained) arr.put(p)
return arr.toString()
}
private fun haversineKm(lat1: Double, lng1: Double, lat2: Double, lng2: Double): Double {
val R = 6371.0
val dLat = Math.toRadians(lat2 - lat1)
val dLng = Math.toRadians(lng2 - lng1)
val a = sin(dLat / 2).pow(2) +
cos(Math.toRadians(lat1)) * cos(Math.toRadians(lat2)) * sin(dLng / 2).pow(2)
return 2 * R * asin(sqrt(a))
}
}
override fun onBind(intent: Intent?): IBinder? = null
// --- Step detector sensor callbacks ---
override fun onSensorChanged(event: SensorEvent?) {
if (event?.sensor?.type == Sensor.TYPE_STEP_DETECTOR) {
if (!paused) {
stepTimestamps.add(System.currentTimeMillis())
}
}
}
override fun onAccuracyChanged(sensor: Sensor?, accuracy: Int) {}
/**
* Compute cadence (steps per minute) from recent step detector events.
* Returns null if no steps detected in the rolling window.
*/
private fun computeCadence(): Double? {
val now = System.currentTimeMillis()
val cutoff = now - CADENCE_WINDOW_MS
// Prune old timestamps
while (stepTimestamps.peek()?.let { it < cutoff } == true) {
stepTimestamps.poll()
}
val count = stepTimestamps.size
if (count < 2) return null
val windowMs = now - (stepTimestamps.peek() ?: now)
if (windowMs < 2000) return null // need at least 2s of data
return count.toDouble() / (windowMs / 60000.0)
}
override fun onCreate() {
super.onCreate()
createNotificationChannel()
notificationManager = getSystemService(NotificationManager::class.java)
}
override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
val startPaused = intent?.getBooleanExtra("startPaused", false) ?: false
startTimeMs = System.currentTimeMillis()
pausedAccumulatedMs = 0L
pausedSinceMs = if (startPaused) startTimeMs else 0L
paused = startPaused
totalDistanceKm = 0.0
lastLat = Double.NaN
lastLng = Double.NaN
lastTimestamp = 0L
currentPaceMinKm = 0.0
// Parse TTS config from intent
val configJson = intent?.getStringExtra("ttsConfig") ?: "{}"
Log.d(TAG, "TTS config JSON: $configJson")
ttsConfig = TtsConfig.fromJson(configJson)
Log.d(TAG, "TTS enabled=${ttsConfig?.enabled}, trigger=${ttsConfig?.triggerType}/${ttsConfig?.triggerValue}, metrics=${ttsConfig?.metrics}")
// Initialize interval tracking
intervalSteps = ttsConfig?.intervals ?: emptyList()
currentIntervalIdx = 0
intervalAccumulatedDistanceKm = 0.0
intervalStartTimeMs = startTimeMs
intervalsComplete = false
if (intervalSteps.isNotEmpty()) {
Log.d(TAG, "Intervals configured: ${intervalSteps.size} steps")
intervalSteps.forEachIndexed { i, step ->
Log.d(TAG, " Step $i: ${step.label} ${step.durationValue} ${step.durationType}")
}
}
val notifIntent = Intent(this, MainActivity::class.java).apply {
addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP or Intent.FLAG_ACTIVITY_CLEAR_TOP)
}
pendingIntent = PendingIntent.getActivity(
this, 0, notifIntent,
PendingIntent.FLAG_UPDATE_CURRENT or PendingIntent.FLAG_IMMUTABLE
)
val notification = if (startPaused) {
buildNotification("Waiting to start...", "", "")
} else {
buildNotification("0:00", "0.00 km", "")
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
startForeground(NOTIFICATION_ID, notification, android.content.pm.ServiceInfo.FOREGROUND_SERVICE_TYPE_LOCATION)
} else {
startForeground(NOTIFICATION_ID, notification)
}
startLocationUpdates()
startStepDetector()
tracking = true
instance = this
// Initialize TTS *after* startForeground — using applicationContext for reliable engine binding
if (ttsConfig?.enabled == true) {
Log.d(TAG, "Initializing TTS engine (post-startForeground)...")
lastAnnouncementDistanceKm = 0.0
lastAnnouncementTimeMs = startTimeMs
splitDistanceAtLastAnnouncement = 0.0
splitTimeAtLastAnnouncement = startTimeMs
val dummyTts = TextToSpeech(applicationContext, null)
val engines = dummyTts.engines
Log.d(TAG, "Available TTS engines: ${engines.map { "${it.label} (${it.name})" }}")
dummyTts.shutdown()
if (engines.isNotEmpty()) {
val engineName = engines[0].name
Log.d(TAG, "Trying TTS with explicit engine: $engineName")
tts = TextToSpeech(applicationContext, this, engineName)
} else {
Log.e(TAG, "No TTS engines found on device!")
tts = TextToSpeech(applicationContext, this)
}
}
return START_STICKY
}
// --- TTS ---
/** Called when TTS is ready — either immediately (pre-warmed) or from onInit (cold start). */
private fun onTtsReady() {
val config = ttsConfig ?: return
Log.d(TAG, "TTS ready! triggerType=${config.triggerType}, triggerValue=${config.triggerValue}")
// Set specific voice if requested
if (!config.voiceId.isNullOrEmpty()) {
tts?.voices?.find { it.name == config.voiceId }?.let { voice ->
tts?.voice = voice
}
}
// Announce workout started
speakWithConfig("Workout started", "workout_started")
// Announce first interval step if intervals are configured (queue after "Workout started")
if (intervalSteps.isNotEmpty() && !intervalsComplete) {
val first = intervalSteps[0]
val durationText = if (first.durationType == "distance") {
"${first.durationValue.toInt()} meters"
} else {
val secs = first.durationValue.toInt()
if (secs >= 60) {
val m = secs / 60
val s = secs % 60
if (s > 0) "$m minutes $s seconds" else "$m minutes"
} else {
"$secs seconds"
}
}
speakWithConfig("${first.label}. $durationText", "interval_announcement", flush = false)
}
// Set up time-based trigger if configured
if (config.triggerType == "time") {
startTimeTrigger(config.triggerValue)
}
}
override fun onInit(status: Int) {
Log.d(TAG, "TTS onInit status=$status (SUCCESS=${TextToSpeech.SUCCESS})")
if (status == TextToSpeech.SUCCESS) {
val config = ttsConfig ?: return
val locale = Locale.forLanguageTag(config.language)
val langResult = tts?.setLanguage(locale)
Log.d(TAG, "TTS setLanguage($locale) result=$langResult")
ttsReady = true
onTtsReady()
} else {
Log.e(TAG, "TTS init FAILED with status=$status")
}
}
private fun requestAudioFocus() {
val config = ttsConfig ?: return
if (!config.audioDuck) return
if (hasAudioFocus) return
audioManager = audioManager ?: getSystemService(Context.AUDIO_SERVICE) as AudioManager
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val focusReq = AudioFocusRequest.Builder(AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK)
.setAudioAttributes(
AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_ASSISTANCE_NAVIGATION_GUIDANCE)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.build()
)
.setOnAudioFocusChangeListener { }
.build()
audioFocusRequest = focusReq
val result = audioManager?.requestAudioFocus(focusReq)
hasAudioFocus = result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED
Log.d(TAG, "Audio focus request (duck): granted=$hasAudioFocus")
} else {
@Suppress("DEPRECATION")
val result = audioManager?.requestAudioFocus(
{ },
AudioManager.STREAM_MUSIC,
AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK
)
hasAudioFocus = result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED
}
}
private fun abandonAudioFocus() {
if (!hasAudioFocus) return
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
audioFocusRequest?.let { audioManager?.abandonAudioFocusRequest(it) }
} else {
@Suppress("DEPRECATION")
audioManager?.abandonAudioFocus { }
}
hasAudioFocus = false
}
/** Speak text with configured volume; requests/abandons audio focus for ducking. */
private fun speakWithConfig(text: String, utteranceId: String, flush: Boolean = true) {
if (!ttsReady) return
val config = ttsConfig ?: return
val queueMode = if (flush) TextToSpeech.QUEUE_FLUSH else TextToSpeech.QUEUE_ADD
requestAudioFocus()
val params = Bundle().apply {
putFloat(TextToSpeech.Engine.KEY_PARAM_VOLUME, config.ttsVolume)
}
// Set up listener to abandon audio focus after utterance completes
tts?.setOnUtteranceProgressListener(object : UtteranceProgressListener() {
override fun onStart(id: String?) {}
override fun onDone(id: String?) { abandonAudioFocus() }
@Deprecated("Deprecated in Java")
override fun onError(id: String?) { abandonAudioFocus() }
})
val result = tts?.speak(text, queueMode, params, utteranceId)
Log.d(TAG, "speakWithConfig($utteranceId) result=$result vol=${config.ttsVolume} duck=${config.audioDuck}")
}
private fun startTimeTrigger(intervalMinutes: Double) {
val intervalMs = (intervalMinutes * 60 * 1000).toLong()
Log.d(TAG, "Starting time trigger: every ${intervalMs}ms (${intervalMinutes} min)")
ttsTimeHandler = Handler(Looper.getMainLooper())
ttsTimeRunnable = object : Runnable {
override fun run() {
Log.d(TAG, "Time trigger fired!")
announceMetrics()
ttsTimeHandler?.postDelayed(this, intervalMs)
}
}
ttsTimeHandler?.postDelayed(ttsTimeRunnable!!, intervalMs)
}
// --- Pause / Resume ---
fun doPause() {
if (paused) return
paused = true
pausedSinceMs = System.currentTimeMillis()
Log.d(TAG, "Tracking paused")
// Pause TTS time trigger
ttsTimeRunnable?.let { ttsTimeHandler?.removeCallbacks(it) }
// Update notification to show paused state
val notification = buildNotification(formatElapsed(), "%.2f km".format(totalDistanceKm), "PAUSED")
notificationManager?.notify(NOTIFICATION_ID, notification)
}
fun doResume() {
if (!paused) return
// Accumulate paused duration
pausedAccumulatedMs += System.currentTimeMillis() - pausedSinceMs
pausedSinceMs = 0L
paused = false
Log.d(TAG, "Tracking resumed (total paused: ${pausedAccumulatedMs / 1000}s)")
// Reset last position so we don't accumulate drift during pause
lastLat = Double.NaN
lastLng = Double.NaN
lastTimestamp = 0L
// Resume TTS time trigger
val config = ttsConfig
if (ttsReady && config != null && config.triggerType == "time") {
val intervalMs = (config.triggerValue * 60 * 1000).toLong()
ttsTimeRunnable?.let { ttsTimeHandler?.postDelayed(it, intervalMs) }
}
updateNotification()
}
private fun checkDistanceTrigger() {
val config = ttsConfig ?: return
if (!ttsReady || config.triggerType != "distance") return
val sinceLast = totalDistanceKm - lastAnnouncementDistanceKm
if (sinceLast >= config.triggerValue) {
announceMetrics()
lastAnnouncementDistanceKm = totalDistanceKm
}
}
private fun checkIntervalProgress(segmentKm: Double) {
if (intervalsComplete || intervalSteps.isEmpty()) return
if (currentIntervalIdx >= intervalSteps.size) return
val step = intervalSteps[currentIntervalIdx]
val now = System.currentTimeMillis()
val complete = when (step.durationType) {
"distance" -> {
intervalAccumulatedDistanceKm += segmentKm
intervalAccumulatedDistanceKm >= step.durationValue / 1000.0
}
"time" -> {
(now - intervalStartTimeMs) >= step.durationValue * 1000
}
else -> false
}
if (complete) {
currentIntervalIdx++
intervalAccumulatedDistanceKm = 0.0
intervalStartTimeMs = now
if (currentIntervalIdx >= intervalSteps.size) {
intervalsComplete = true
Log.d(TAG, "All intervals complete!")
announceIntervalTransition("Intervals complete")
} else {
val next = intervalSteps[currentIntervalIdx]
val durationText = if (next.durationType == "distance") {
"${next.durationValue.toInt()} meters"
} else {
val secs = next.durationValue.toInt()
if (secs >= 60) {
val m = secs / 60
val s = secs % 60
if (s > 0) "$m minutes $s seconds" else "$m minutes"
} else {
"$secs seconds"
}
}
Log.d(TAG, "Interval transition: step ${currentIntervalIdx}/${intervalSteps.size}${next.label} $durationText")
announceIntervalTransition("${next.label}. $durationText")
}
updateNotification()
}
}
private fun announceIntervalTransition(text: String) {
if (!ttsReady) return
Log.d(TAG, "Interval announcement: $text")
speakWithConfig(text, "interval_announcement")
}
private fun announceMetrics() {
if (!ttsReady) return
val config = ttsConfig ?: return
val now = System.currentTimeMillis()
val activeSecs = activeElapsedSecs()
val parts = mutableListOf<String>()
for (metric in config.metrics) {
when (metric) {
"totalTime" -> {
val h = activeSecs / 3600
val m = (activeSecs % 3600) / 60
val s = activeSecs % 60
val timeStr = if (h > 0) {
"$h hours $m minutes"
} else {
"$m minutes $s seconds"
}
parts.add("Time: $timeStr")
}
"totalDistance" -> {
val distStr = "%.2f".format(totalDistanceKm)
parts.add("Distance: $distStr kilometers")
}
"avgPace" -> {
val elapsedMin = activeSecs / 60.0
if (totalDistanceKm > 0.01) {
val avgPace = elapsedMin / totalDistanceKm
val mins = avgPace.toInt()
val secs = ((avgPace - mins) * 60).toInt()
parts.add("Average pace: $mins minutes $secs seconds per kilometer")
}
}
"splitPace" -> {
val splitDist = totalDistanceKm - splitDistanceAtLastAnnouncement
val splitTimeMin = (now - splitTimeAtLastAnnouncement) / 60000.0
if (splitDist > 0.01) {
val splitPace = splitTimeMin / splitDist
val mins = splitPace.toInt()
val secs = ((splitPace - mins) * 60).toInt()
parts.add("Split pace: $mins minutes $secs seconds per kilometer")
}
}
"currentPace" -> {
if (currentPaceMinKm > 0 && currentPaceMinKm <= 60) {
val mins = currentPaceMinKm.toInt()
val secs = ((currentPaceMinKm - mins) * 60).toInt()
parts.add("Current pace: $mins minutes $secs seconds per kilometer")
}
}
}
}
// Update split tracking
splitDistanceAtLastAnnouncement = totalDistanceKm
splitTimeAtLastAnnouncement = now
lastAnnouncementTimeMs = now
if (parts.isNotEmpty()) {
val text = parts.joinToString(". ")
Log.d(TAG, "Announcing: $text")
speakWithConfig(text, "workout_announcement")
} else {
Log.d(TAG, "announceMetrics: no parts to announce")
}
}
// --- Notification / Location (unchanged) ---
private fun formatPace(paceMinKm: Double): String {
if (paceMinKm <= 0 || paceMinKm > 60) return ""
val mins = paceMinKm.toInt()
val secs = ((paceMinKm - mins) * 60).toInt()
return "%d:%02d /km".format(mins, secs)
}
private fun buildNotification(elapsed: String, distance: String, pace: String): Notification {
val parts = mutableListOf(elapsed, distance)
if (pace.isNotEmpty()) parts.add(pace)
val text = parts.joinToString(" · ")
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
Notification.Builder(this, CHANNEL_ID)
.setContentTitle("Bocken — Tracking GPS for active Workout")
.setContentText(text)
.setSmallIcon(android.R.drawable.ic_menu_mylocation)
.setContentIntent(pendingIntent)
.setOngoing(true)
.build()
} else {
@Suppress("DEPRECATION")
Notification.Builder(this)
.setContentTitle("Bocken — Tracking GPS for active Workout")
.setContentText(text)
.setSmallIcon(android.R.drawable.ic_menu_mylocation)
.setContentIntent(pendingIntent)
.setOngoing(true)
.build()
}
}
/** Returns active (non-paused) elapsed time in seconds. */
private fun activeElapsedSecs(): Long {
val now = System.currentTimeMillis()
val totalPaused = pausedAccumulatedMs + if (pausedSinceMs > 0) (now - pausedSinceMs) else 0L
return (now - startTimeMs - totalPaused) / 1000
}
private fun formatElapsed(): String {
val secs = activeElapsedSecs()
val h = secs / 3600
val m = (secs % 3600) / 60
val s = secs % 60
return if (h > 0) {
"%d:%02d:%02d".format(h, m, s)
} else {
"%d:%02d".format(m, s)
}
}
private fun updateNotification() {
val paceStr = if (intervalSteps.isNotEmpty() && !intervalsComplete && currentIntervalIdx < intervalSteps.size) {
val step = intervalSteps[currentIntervalIdx]
"${step.label} (${currentIntervalIdx + 1}/${intervalSteps.size})"
} else if (intervalsComplete) {
"Intervals done"
} else {
formatPace(currentPaceMinKm)
}
val notification = buildNotification(
formatElapsed(),
"%.2f km".format(totalDistanceKm),
paceStr
)
notificationManager?.notify(NOTIFICATION_ID, notification)
}
private fun hasActivityRecognitionPermission(): Boolean {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.Q) return true
return ContextCompat.checkSelfPermission(
this, Manifest.permission.ACTIVITY_RECOGNITION
) == PackageManager.PERMISSION_GRANTED
}
private fun startStepDetector() {
if (!hasActivityRecognitionPermission()) {
Log.d(TAG, "Step detector skipped — ACTIVITY_RECOGNITION not granted")
return
}
if (stepDetector != null) return // already registered
if (sensorManager == null) {
sensorManager = getSystemService(Context.SENSOR_SERVICE) as SensorManager
}
stepDetector = sensorManager?.getDefaultSensor(Sensor.TYPE_STEP_DETECTOR)
if (stepDetector != null) {
sensorManager?.registerListener(this, stepDetector, SensorManager.SENSOR_DELAY_FASTEST)
Log.d(TAG, "Step detector sensor registered")
} else {
Log.d(TAG, "Step detector sensor not available on this device")
}
}
/** Called from MainActivity when ACTIVITY_RECOGNITION is granted mid-session. */
fun onActivityRecognitionGranted() {
Log.d(TAG, "ACTIVITY_RECOGNITION granted — retrying step detector registration")
startStepDetector()
}
@Suppress("MissingPermission")
private fun startLocationUpdates() {
locationManager = getSystemService(Context.LOCATION_SERVICE) as LocationManager
locationListener = LocationListener { location ->
val lat = location.latitude
val lng = location.longitude
val now = location.time
// Always buffer GPS points (for track drawing) even when paused
val cadence = computeCadence()
val point = JSONObject().apply {
put("lat", lat)
put("lng", lng)
if (location.hasAltitude()) put("altitude", location.altitude)
if (location.hasSpeed()) put("speed", location.speed.toDouble())
if (cadence != null) put("cadence", cadence)
put("timestamp", location.time)
}
pointBuffer.add(point)
// Skip distance/pace accumulation and TTS triggers when paused
if (paused) return@LocationListener
// Accumulate distance and compute pace
if (!lastLat.isNaN()) {
val segmentKm = haversineKm(lastLat, lastLng, lat, lng)
totalDistanceKm += segmentKm
if (segmentKm > 0.001 && lastTimestamp > 0) {
val dtMin = (now - lastTimestamp) / 60000.0
currentPaceMinKm = dtMin / segmentKm
}
// Check interval progress with this segment's distance
checkIntervalProgress(segmentKm)
} else {
// First point — check time-based intervals even with no distance
checkIntervalProgress(0.0)
}
lastLat = lat
lastLng = lng
lastTimestamp = now
updateNotification()
// Check distance-based TTS trigger
checkDistanceTrigger()
}
locationManager?.requestLocationUpdates(
LocationManager.GPS_PROVIDER,
MIN_TIME_MS,
MIN_DISTANCE_M,
locationListener!!
)
}
/**
* Build the finish summary text from current stats.
* Must be called while service state is still valid (before clearing fields).
*/
private fun buildFinishSummaryText(): String? {
val config = ttsConfig ?: return null
if (!config.enabled) return null
val activeSecs = activeElapsedSecs()
val h = activeSecs / 3600
val m = (activeSecs % 3600) / 60
val s = activeSecs % 60
val parts = mutableListOf<String>()
parts.add("Workout finished")
val timeStr = if (h > 0) "$h hours $m minutes" else "$m minutes $s seconds"
parts.add("Total time: $timeStr")
if (totalDistanceKm > 0.01) {
parts.add("Distance: ${"%.2f".format(totalDistanceKm)} kilometers")
}
if (totalDistanceKm > 0.01) {
val avgPace = (activeSecs / 60.0) / totalDistanceKm
val mins = avgPace.toInt()
val secs = ((avgPace - mins) * 60).toInt()
parts.add("Average pace: $mins minutes $secs seconds per kilometer")
}
return parts.joinToString(". ")
}
override fun onDestroy() {
// Snapshot summary text while stats are still valid
val summaryText = buildFinishSummaryText()
val config = ttsConfig
// Stop time-based TTS triggers
ttsTimeRunnable?.let { ttsTimeHandler?.removeCallbacks(it) }
ttsTimeHandler = null
ttsTimeRunnable = null
// Hand off the existing TTS instance for the finish summary.
// We do NOT call tts?.stop() or tts?.shutdown() here — the utterance
// listener will clean up after the summary finishes speaking.
val finishTts = tts
tts = null
ttsReady = false
tracking = false
paused = false
instance = null
locationListener?.let { locationManager?.removeUpdates(it) }
locationListener = null
locationManager = null
sensorManager?.unregisterListener(this)
sensorManager = null
stepDetector = null
stepTimestamps.clear()
abandonAudioFocus()
// Speak finish summary using the handed-off TTS instance (already initialized)
if (summaryText != null && finishTts != null && config != null) {
Log.d(TAG, "Finish summary: $summaryText")
// Audio focus for ducking
val am = getSystemService(Context.AUDIO_SERVICE) as AudioManager
var focusReq: AudioFocusRequest? = null
if (config.audioDuck && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
focusReq = AudioFocusRequest.Builder(AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK)
.setAudioAttributes(
AudioAttributes.Builder()
.setUsage(AudioAttributes.USAGE_ASSISTANCE_NAVIGATION_GUIDANCE)
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
.build()
)
.setOnAudioFocusChangeListener { }
.build()
am.requestAudioFocus(focusReq)
}
finishTts.setOnUtteranceProgressListener(object : UtteranceProgressListener() {
override fun onStart(id: String?) {}
override fun onDone(id: String?) { cleanup() }
@Deprecated("Deprecated in Java")
override fun onError(id: String?) { cleanup() }
private fun cleanup() {
if (focusReq != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
am.abandonAudioFocusRequest(focusReq)
}
finishTts.shutdown()
}
})
val params = Bundle().apply {
putFloat(TextToSpeech.Engine.KEY_PARAM_VOLUME, config.ttsVolume)
}
finishTts.speak(summaryText, TextToSpeech.QUEUE_FLUSH, params, "workout_finished")
} else {
finishTts?.shutdown()
}
super.onDestroy()
}
private fun createNotificationChannel() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
val channel = NotificationChannel(
CHANNEL_ID,
"GPS Tracking",
NotificationManager.IMPORTANCE_DEFAULT
).apply {
description = "Shows while GPS is recording your workout"
}
val manager = getSystemService(NotificationManager::class.java)
manager?.createNotificationChannel(channel)
}
}
}
@@ -1,30 +0,0 @@
package org.bocken.app
import android.content.pm.PackageManager
import android.os.Bundle
import android.webkit.WebView
import androidx.activity.enableEdgeToEdge
class MainActivity : TauriActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
enableEdgeToEdge()
super.onCreate(savedInstanceState)
}
override fun onWebViewCreate(webView: WebView) {
webView.addJavascriptInterface(AndroidBridge(this), "AndroidBridge")
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
if (requestCode == AndroidBridge.REQ_ACTIVITY_RECOGNITION &&
grantResults.isNotEmpty() &&
grantResults[0] == PackageManager.PERMISSION_GRANTED) {
LocationForegroundService.instance?.onActivityRecognitionGranted()
}
}
}
@@ -1,30 +0,0 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
@@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Hello World!"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>
@@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@mipmap/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
@@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@mipmap/ic_launcher_background"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 385 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 380 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 388 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 395 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 405 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

@@ -1,6 +0,0 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.bocken" parent="Theme.MaterialComponents.DayNight.NoActionBar">
<!-- Customize your theme here. -->
</style>
</resources>
@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="purple_200">#FFBB86FC</color>
<color name="purple_500">#FF6200EE</color>
<color name="purple_700">#FF3700B3</color>
<color name="teal_200">#FF03DAC5</color>
<color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>
@@ -1,4 +0,0 @@
<resources>
<string name="app_name">Bocken</string>
<string name="main_activity_title">Bocken</string>
</resources>
@@ -1,6 +0,0 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.bocken" parent="Theme.MaterialComponents.DayNight.NoActionBar">
<!-- Customize your theme here. -->
</style>
</resources>
@@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<paths xmlns:android="http://schemas.android.com/apk/res/android">
<external-path name="my_images" path="." />
<cache-path name="my_cache_images" path="." />
</paths>
-22
View File
@@ -1,22 +0,0 @@
buildscript {
repositories {
google()
mavenCentral()
}
dependencies {
classpath("com.android.tools.build:gradle:8.11.0")
classpath("org.jetbrains.kotlin:kotlin-gradle-plugin:1.9.25")
}
}
allprojects {
repositories {
google()
mavenCentral()
}
}
tasks.register("clean").configure {
delete("build")
}
@@ -1,23 +0,0 @@
plugins {
`kotlin-dsl`
}
gradlePlugin {
plugins {
create("pluginsForCoolKids") {
id = "rust"
implementationClass = "RustPlugin"
}
}
}
repositories {
google()
mavenCentral()
}
dependencies {
compileOnly(gradleApi())
implementation("com.android.tools.build:gradle:8.11.0")
}
@@ -1,68 +0,0 @@
import java.io.File
import org.apache.tools.ant.taskdefs.condition.Os
import org.gradle.api.DefaultTask
import org.gradle.api.GradleException
import org.gradle.api.logging.LogLevel
import org.gradle.api.tasks.Input
import org.gradle.api.tasks.TaskAction
open class BuildTask : DefaultTask() {
@Input
var rootDirRel: String? = null
@Input
var target: String? = null
@Input
var release: Boolean? = null
@TaskAction
fun assemble() {
val executable = """pnpm""";
try {
runTauriCli(executable)
} catch (e: Exception) {
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
// Try different Windows-specific extensions
val fallbacks = listOf(
"$executable.exe",
"$executable.cmd",
"$executable.bat",
)
var lastException: Exception = e
for (fallback in fallbacks) {
try {
runTauriCli(fallback)
return
} catch (fallbackException: Exception) {
lastException = fallbackException
}
}
throw lastException
} else {
throw e;
}
}
}
fun runTauriCli(executable: String) {
val rootDirRel = rootDirRel ?: throw GradleException("rootDirRel cannot be null")
val target = target ?: throw GradleException("target cannot be null")
val release = release ?: throw GradleException("release cannot be null")
val args = listOf("tauri", "android", "android-studio-script");
project.exec {
workingDir(File(project.projectDir, rootDirRel))
executable(executable)
args(args)
if (project.logger.isEnabled(LogLevel.DEBUG)) {
args("-vv")
} else if (project.logger.isEnabled(LogLevel.INFO)) {
args("-v")
}
if (release) {
args("--release")
}
args(listOf("--target", target))
}.assertNormalExitValue()
}
}
@@ -1,85 +0,0 @@
import com.android.build.api.dsl.ApplicationExtension
import org.gradle.api.DefaultTask
import org.gradle.api.Plugin
import org.gradle.api.Project
import org.gradle.kotlin.dsl.configure
import org.gradle.kotlin.dsl.get
const val TASK_GROUP = "rust"
open class Config {
lateinit var rootDirRel: String
}
open class RustPlugin : Plugin<Project> {
private lateinit var config: Config
override fun apply(project: Project) = with(project) {
config = extensions.create("rust", Config::class.java)
val defaultAbiList = listOf("arm64-v8a", "armeabi-v7a", "x86", "x86_64");
val abiList = (findProperty("abiList") as? String)?.split(',') ?: defaultAbiList
val defaultArchList = listOf("arm64", "arm", "x86", "x86_64");
val archList = (findProperty("archList") as? String)?.split(',') ?: defaultArchList
val targetsList = (findProperty("targetList") as? String)?.split(',') ?: listOf("aarch64", "armv7", "i686", "x86_64")
extensions.configure<ApplicationExtension> {
@Suppress("UnstableApiUsage")
flavorDimensions.add("abi")
productFlavors {
create("universal") {
dimension = "abi"
ndk {
abiFilters += abiList
}
}
defaultArchList.forEachIndexed { index, arch ->
create(arch) {
dimension = "abi"
ndk {
abiFilters.add(defaultAbiList[index])
}
}
}
}
}
afterEvaluate {
for (profile in listOf("debug", "release")) {
val profileCapitalized = profile.replaceFirstChar { it.uppercase() }
val buildTask = tasks.maybeCreate(
"rustBuildUniversal$profileCapitalized",
DefaultTask::class.java
).apply {
group = TASK_GROUP
description = "Build dynamic library in $profile mode for all targets"
}
tasks["mergeUniversal${profileCapitalized}JniLibFolders"].dependsOn(buildTask)
for (targetPair in targetsList.withIndex()) {
val targetName = targetPair.value
val targetArch = archList[targetPair.index]
val targetArchCapitalized = targetArch.replaceFirstChar { it.uppercase() }
val targetBuildTask = project.tasks.maybeCreate(
"rustBuild$targetArchCapitalized$profileCapitalized",
BuildTask::class.java
).apply {
group = TASK_GROUP
description = "Build dynamic library in $profile mode for $targetArch"
rootDirRel = config.rootDirRel
target = targetName
release = profile == "release"
}
buildTask.dependsOn(targetBuildTask)
tasks["merge$targetArchCapitalized${profileCapitalized}JniLibFolders"].dependsOn(
targetBuildTask
)
}
}
}
}
}
-24
View File
@@ -1,24 +0,0 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true
android.nonFinalResIds=false
Binary file not shown.
@@ -1,6 +0,0 @@
#Tue May 10 19:22:52 CST 2022
distributionBase=GRADLE_USER_HOME
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.3-bin.zip
distributionPath=wrapper/dists
zipStorePath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
-185
View File
@@ -1,185 +0,0 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"
-89
View File
@@ -1,89 +0,0 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
-3
View File
@@ -1,3 +0,0 @@
include ':app'
apply from: 'tauri.settings.gradle'
File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More