Compare commits
41 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6592d1662c | ||
|
|
59e8cdb19a | ||
|
|
1118392811 | ||
|
|
baa403efa2 | ||
|
|
0ed1112b20 | ||
|
|
16a12ede4d | ||
|
|
b9b69cee44 | ||
|
|
5b27d501af | ||
|
|
a85636d5db | ||
|
|
29d0eeb7e8 | ||
|
|
fabe9724c2 | ||
|
|
4c9bb4adde | ||
|
|
22b6ee824e | ||
|
|
3918bc8dc3 | ||
|
|
5825b859b7 | ||
|
|
1642434a79 | ||
|
|
02705dc6ed | ||
|
|
7413313100 | ||
|
|
b11f4ab6b4 | ||
|
|
3e4b1c0484 | ||
|
|
b5bc6ff3de | ||
|
|
8d4bba7964 | ||
|
|
2e5fe54615 | ||
|
|
81265510ef | ||
|
|
4d3c093612 | ||
|
|
937ba052fc | ||
|
|
479d201da9 | ||
|
|
1242cc7eb3 | ||
|
|
0b6dbeb042 | ||
|
|
c06877069f | ||
|
|
261c738fc0 | ||
|
|
5528abe4b0 | ||
|
|
09cdda2a07 | ||
|
|
718bfa6691 | ||
|
|
e11e866e27 | ||
|
|
23345e22e6 | ||
|
|
c7b3495a23 | ||
|
|
83a5910a59 | ||
|
|
0f6639aae7 | ||
|
|
88a25bc33e | ||
|
|
73ad4ece49 |
83
.env.example
83
.env.example
@@ -1,83 +0,0 @@
|
||||
# libnovel scraper — environment overrides
|
||||
# Copy to .env and adjust values; do NOT commit this file with real secrets.
|
||||
|
||||
# ── Service ports (host-side) ─────────────────────────────────────────────────
|
||||
# Port the scraper HTTP API listens on (default 8080)
|
||||
SCRAPER_PORT=8080
|
||||
|
||||
# Port PocketBase listens on (default 8090)
|
||||
POCKETBASE_PORT=8090
|
||||
|
||||
# Port MinIO S3 API listens on (default 9000)
|
||||
MINIO_PORT=9000
|
||||
|
||||
# Port MinIO web console listens on (default 9001)
|
||||
MINIO_CONSOLE_PORT=9001
|
||||
|
||||
# Port Browserless Chrome listens on (default 3030)
|
||||
BROWSERLESS_PORT=3030
|
||||
|
||||
# Port the SvelteKit UI listens on (default 3000)
|
||||
UI_PORT=3000
|
||||
|
||||
# ── Browserless ───────────────────────────────────────────────────────────────
|
||||
# Browserless API token (leave empty to disable auth)
|
||||
BROWSERLESS_TOKEN=
|
||||
|
||||
# Number of concurrent browser sessions in Browserless
|
||||
BROWSERLESS_CONCURRENT=10
|
||||
|
||||
# Queue depth before Browserless returns 429
|
||||
BROWSERLESS_QUEUED=100
|
||||
|
||||
# Per-session timeout in ms
|
||||
BROWSERLESS_TIMEOUT=60000
|
||||
|
||||
# Optional webhook URL for Browserless error alerts (leave empty to disable)
|
||||
ERROR_ALERT_URL=
|
||||
|
||||
# Which Browserless strategy the scraper uses: content | scrape | cdp | direct
|
||||
BROWSERLESS_STRATEGY=direct
|
||||
|
||||
# ── Scraper ───────────────────────────────────────────────────────────────────
|
||||
# Chapter worker goroutines (0 = NumCPU inside the container)
|
||||
SCRAPER_WORKERS=0
|
||||
|
||||
# Host path to mount as the static output directory
|
||||
STATIC_ROOT=./static/books
|
||||
|
||||
# ── Kokoro-FastAPI TTS ────────────────────────────────────────────────────────
|
||||
# Base URL for the Kokoro-FastAPI service. When running via docker-compose the
|
||||
# default (http://kokoro:8880) is wired in automatically; override here only if
|
||||
# you are pointing at an external or GPU instance.
|
||||
KOKORO_URL=http://kokoro:8880
|
||||
|
||||
# Default voice used for chapter narration.
|
||||
# Single voices: af_bella, af_sky, af_heart, am_adam, …
|
||||
# Mixed voices: af_bella+af_sky or af_bella(2)+af_sky(1) (weighted blend)
|
||||
KOKORO_VOICE=af_bella
|
||||
|
||||
# ── MinIO / S3 object storage ─────────────────────────────────────────────────
|
||||
MINIO_ROOT_USER=admin
|
||||
MINIO_ROOT_PASSWORD=changeme123
|
||||
MINIO_BUCKET_CHAPTERS=libnovel-chapters
|
||||
MINIO_BUCKET_AUDIO=libnovel-audio
|
||||
|
||||
# ── PocketBase ────────────────────────────────────────────────────────────────
|
||||
# Admin credentials (used by scraper + UI server-side)
|
||||
POCKETBASE_ADMIN_EMAIL=admin@libnovel.local
|
||||
POCKETBASE_ADMIN_PASSWORD=changeme123
|
||||
|
||||
# ── SvelteKit UI ─────────────────────────────────────────────────────────────
|
||||
# Internal URL the SvelteKit server uses to reach the scraper API.
|
||||
# In docker-compose this is http://scraper:8080 (wired automatically).
|
||||
# Override here only if running the UI outside of docker-compose.
|
||||
SCRAPER_API_URL=http://localhost:8080
|
||||
|
||||
# Internal URL the SvelteKit server uses to reach PocketBase.
|
||||
# In docker-compose this is http://pocketbase:8090 (wired automatically).
|
||||
POCKETBASE_URL=http://localhost:8090
|
||||
|
||||
# Public MinIO URL reachable from the browser (for audio/presigned URLs).
|
||||
# In production, point this at your MinIO reverse-proxy or CDN domain.
|
||||
PUBLIC_MINIO_PUBLIC_URL=http://localhost:9000
|
||||
@@ -1,76 +0,0 @@
|
||||
name: CI / Scraper
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main", "master", "v2"]
|
||||
paths:
|
||||
- "scraper/**"
|
||||
- ".gitea/workflows/ci-scraper.yaml"
|
||||
pull_request:
|
||||
branches: ["main", "master", "v2"]
|
||||
paths:
|
||||
- "scraper/**"
|
||||
- ".gitea/workflows/ci-scraper.yaml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ gitea.workflow }}-${{ gitea.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ── lint & vet ───────────────────────────────────────────────────────────────
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: scraper/go.mod
|
||||
cache-dependency-path: scraper/go.sum
|
||||
|
||||
- name: go vet
|
||||
working-directory: scraper
|
||||
run: |
|
||||
go vet ./...
|
||||
go vet -tags integration ./...
|
||||
|
||||
# ── tests ────────────────────────────────────────────────────────────────────
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: scraper/go.mod
|
||||
cache-dependency-path: scraper/go.sum
|
||||
|
||||
- name: Run tests
|
||||
working-directory: scraper
|
||||
run: go test -short -race -count=1 -timeout=60s ./...
|
||||
|
||||
# ── push to Docker Hub ───────────────────────────────────────────────────────
|
||||
docker:
|
||||
name: Docker Push
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, test]
|
||||
if: gitea.event_name == 'push'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: scraper
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USER }}/libnovel-scraper:latest
|
||||
${{ secrets.DOCKER_USER }}/libnovel-scraper:${{ gitea.sha }}
|
||||
@@ -1,67 +0,0 @@
|
||||
name: CI / UI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main", "master", "v2"]
|
||||
paths:
|
||||
- "ui/**"
|
||||
- ".gitea/workflows/ci-ui.yaml"
|
||||
pull_request:
|
||||
branches: ["main", "master", "v2"]
|
||||
paths:
|
||||
- "ui/**"
|
||||
- ".gitea/workflows/ci-ui.yaml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ gitea.workflow }}-${{ gitea.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ── type-check & build ───────────────────────────────────────────────────────
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ui
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
cache: npm
|
||||
cache-dependency-path: ui/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npm run check
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
# ── push to Docker Hub ───────────────────────────────────────────────────────
|
||||
docker:
|
||||
name: Docker Push
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
if: gitea.event_name == 'push'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ui
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USER }}/libnovel-ui:latest
|
||||
${{ secrets.DOCKER_USER }}/libnovel-ui:${{ gitea.sha }}
|
||||
191
.gitea/workflows/ci-v3.yaml
Normal file
191
.gitea/workflows/ci-v3.yaml
Normal file
@@ -0,0 +1,191 @@
|
||||
name: CI / v3
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main", "master"]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "ui/**"
|
||||
- "caddy/**"
|
||||
- "docker-compose.yml"
|
||||
- ".gitea/workflows/ci-v3.yaml"
|
||||
pull_request:
|
||||
branches: ["main", "master"]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "ui/**"
|
||||
- "caddy/**"
|
||||
- "docker-compose.yml"
|
||||
- ".gitea/workflows/ci-v3.yaml"
|
||||
|
||||
concurrency:
|
||||
group: ${{ gitea.workflow }}-${{ gitea.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ── backend: vet & test ───────────────────────────────────────────────────────
|
||||
test-backend:
|
||||
name: Test backend
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: backend/go.mod
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
- name: go vet
|
||||
working-directory: backend
|
||||
run: go vet ./...
|
||||
|
||||
- name: Run tests
|
||||
working-directory: backend
|
||||
run: go test -short -race -count=1 -timeout=60s ./...
|
||||
|
||||
# ── ui: type-check & build ────────────────────────────────────────────────────
|
||||
check-ui:
|
||||
name: Check ui
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ui
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
cache: npm
|
||||
cache-dependency-path: ui/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npm run check
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
# ── docker: backend ───────────────────────────────────────────────────────────
|
||||
docker-backend:
|
||||
name: Docker / backend
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-backend]
|
||||
if: gitea.event_name == 'push'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: backend
|
||||
target: backend
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USER }}/libnovel-backend:latest
|
||||
${{ secrets.DOCKER_USER }}/libnovel-backend:${{ gitea.sha }}
|
||||
build-args: |
|
||||
VERSION=${{ gitea.sha }}
|
||||
COMMIT=${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-backend:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── docker: runner ────────────────────────────────────────────────────────────
|
||||
docker-runner:
|
||||
name: Docker / runner
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-backend]
|
||||
if: gitea.event_name == 'push'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: backend
|
||||
target: runner
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USER }}/libnovel-runner:latest
|
||||
${{ secrets.DOCKER_USER }}/libnovel-runner:${{ gitea.sha }}
|
||||
build-args: |
|
||||
VERSION=${{ gitea.sha }}
|
||||
COMMIT=${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-runner:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── docker: ui ────────────────────────────────────────────────────────────────
|
||||
docker-ui:
|
||||
name: Docker / ui
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-ui]
|
||||
if: gitea.event_name == 'push'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ui
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USER }}/libnovel-ui:latest
|
||||
${{ secrets.DOCKER_USER }}/libnovel-ui:${{ gitea.sha }}
|
||||
build-args: |
|
||||
BUILD_VERSION=${{ gitea.sha }}
|
||||
BUILD_COMMIT=${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-ui:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── docker: caddy ─────────────────────────────────────────────────────────────
|
||||
docker-caddy:
|
||||
name: Docker / caddy
|
||||
runs-on: ubuntu-latest
|
||||
if: gitea.event_name == 'push'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: caddy
|
||||
push: true
|
||||
tags: |
|
||||
${{ secrets.DOCKER_USER }}/libnovel-caddy:latest
|
||||
${{ secrets.DOCKER_USER }}/libnovel-caddy:${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-caddy:latest
|
||||
cache-to: type=inline
|
||||
@@ -1,63 +0,0 @@
|
||||
name: iOS CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["v2", "main"]
|
||||
paths:
|
||||
- "ios/**"
|
||||
- "justfile"
|
||||
- ".gitea/workflows/ios.yaml"
|
||||
- ".gitea/workflows/ios-release.yaml"
|
||||
pull_request:
|
||||
branches: ["v2", "main"]
|
||||
paths:
|
||||
- "ios/**"
|
||||
- "justfile"
|
||||
- ".gitea/workflows/ios.yaml"
|
||||
- ".gitea/workflows/ios-release.yaml"
|
||||
|
||||
concurrency:
|
||||
group: ios-macos-runner
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ── build (simulator) ─────────────────────────────────────────────────────
|
||||
build:
|
||||
name: Build
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install just
|
||||
run: command -v just || brew install just
|
||||
|
||||
- name: Build (simulator)
|
||||
env:
|
||||
USER: runner
|
||||
run: just ios-build
|
||||
|
||||
# ── unit tests ────────────────────────────────────────────────────────────
|
||||
test:
|
||||
name: Test
|
||||
runs-on: macos-latest
|
||||
needs: build
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install just
|
||||
run: command -v just || brew install just
|
||||
|
||||
- name: Run unit tests
|
||||
env:
|
||||
USER: runner
|
||||
run: just ios-test
|
||||
|
||||
- name: Upload test results
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-results
|
||||
path: ios/LibNovel/test-results.xml
|
||||
retention-days: 7
|
||||
@@ -1,68 +0,0 @@
|
||||
name: Release / UI
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*"
|
||||
|
||||
concurrency:
|
||||
group: ${{ gitea.workflow }}-${{ gitea.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ── type-check & build ───────────────────────────────────────────────────────
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ui
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
cache: npm
|
||||
cache-dependency-path: ui/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npm run check
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
# ── docker build & push ──────────────────────────────────────────────────────
|
||||
docker:
|
||||
name: Docker
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-ui
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ui
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
226
.gitea/workflows/release-v3.yaml
Normal file
226
.gitea/workflows/release-v3.yaml
Normal file
@@ -0,0 +1,226 @@
|
||||
name: Release / v3
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "v*" # e.g. v1.0.0, v1.2.3
|
||||
|
||||
concurrency:
|
||||
group: ${{ gitea.workflow }}-${{ gitea.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ── backend: vet & test ───────────────────────────────────────────────────────
|
||||
test-backend:
|
||||
name: Test backend
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version-file: backend/go.mod
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
- name: go vet
|
||||
working-directory: backend
|
||||
run: go vet ./...
|
||||
|
||||
- name: Run tests
|
||||
working-directory: backend
|
||||
run: go test -short -race -count=1 -timeout=60s ./...
|
||||
|
||||
# ── ui: type-check & build ────────────────────────────────────────────────────
|
||||
check-ui:
|
||||
name: Check ui
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ui
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
cache: npm
|
||||
cache-dependency-path: ui/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Type check
|
||||
run: npm run check
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
# ── docker: backend ───────────────────────────────────────────────────────────
|
||||
docker-backend:
|
||||
name: Docker / backend
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-backend]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-backend
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: backend
|
||||
target: backend
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
VERSION=${{ steps.meta.outputs.version }}
|
||||
COMMIT=${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-backend:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── docker: runner ────────────────────────────────────────────────────────────
|
||||
docker-runner:
|
||||
name: Docker / runner
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-backend]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-runner
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: backend
|
||||
target: runner
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
VERSION=${{ steps.meta.outputs.version }}
|
||||
COMMIT=${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-runner:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── docker: ui ────────────────────────────────────────────────────────────────
|
||||
docker-ui:
|
||||
name: Docker / ui
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-ui]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-ui
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ui
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: |
|
||||
BUILD_VERSION=${{ steps.meta.outputs.version }}
|
||||
BUILD_COMMIT=${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-ui:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── docker: caddy ─────────────────────────────────────────────────────────────
|
||||
docker-caddy:
|
||||
name: Docker / caddy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-caddy
|
||||
tags: |
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: caddy
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-caddy:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── Gitea release ─────────────────────────────────────────────────────────────
|
||||
release:
|
||||
name: Gitea Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [docker-backend, docker-runner, docker-ui, docker-caddy]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Create release
|
||||
uses: actions/gitea-release-action@v1
|
||||
with:
|
||||
token: ${{ secrets.GITEA_TOKEN }}
|
||||
generate_release_notes: true
|
||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -5,16 +5,16 @@
|
||||
/dist/
|
||||
|
||||
# ── Compiled binaries ──────────────────────────────────────────────────────────
|
||||
scraper/bin/
|
||||
scraper/scraper
|
||||
backend/bin/
|
||||
|
||||
# ── Scraped output (large, machine-generated) ──────────────────────────────────
|
||||
|
||||
/static/books
|
||||
# ── Environment & secrets ──────────────────────────────────────────────────────
|
||||
# Secrets are managed by Doppler — never commit .env files.
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
.env.local
|
||||
|
||||
# ── CrowdSec — generated bouncer API key ──────────────────────────────────────
|
||||
crowdsec/.crowdsec.env
|
||||
|
||||
# ── OS artefacts ───────────────────────────────────────────────────────────────
|
||||
.DS_Store
|
||||
|
||||
156
.opencode/skills/ios-ux/SKILL.md
Normal file
156
.opencode/skills/ios-ux/SKILL.md
Normal file
@@ -0,0 +1,156 @@
|
||||
---
|
||||
name: ios-ux
|
||||
description: iOS/SwiftUI UI & UX review and implementation guidelines for LibNovel. Enforces Apple HIG, iOS 17+ APIs, spring animations, haptics, accessibility, performance, and offline handling. Load this skill for any iOS view work.
|
||||
compatibility: opencode
|
||||
---
|
||||
|
||||
# iOS UI/UX Skill — LibNovel
|
||||
|
||||
Load this skill whenever working on SwiftUI views in `ios/`. It defines design standards, review process for screenshots, and implementation rules.
|
||||
|
||||
---
|
||||
|
||||
## Screenshot Review Process
|
||||
|
||||
When the user provides a screenshot of the app:
|
||||
|
||||
1. **Analyze first** — identify specific UI/UX issues across these categories:
|
||||
- Visual hierarchy and spacing
|
||||
- Typography (size, weight, contrast)
|
||||
- Color and material usage
|
||||
- Animation and interactivity gaps
|
||||
- Accessibility problems
|
||||
- Deprecated or non-native patterns
|
||||
2. **Present a numbered list** of suggested improvements with brief rationale for each.
|
||||
3. **Ask for confirmation** before writing any code: "Should I apply all of these, or only specific ones?"
|
||||
4. Apply only what the user confirms.
|
||||
|
||||
---
|
||||
|
||||
## Design System
|
||||
|
||||
### Colors & Materials
|
||||
- **Accent**: `Color.amber` (project-defined). Use for active state, selection indicators, progress fills, and CTAs.
|
||||
- **Backgrounds**: Prefer `.regularMaterial`, `.ultraThinMaterial`, or `.thinMaterial` over hard-coded `Color.black.opacity(x)` or `Color(.systemBackground)`.
|
||||
- **Dark overlays** (e.g. full-screen players): Use `KFImage` blurred background + `Color.black.opacity(0.5–0.6)` overlay. Never use a flat solid black background.
|
||||
- **Semantic colors**: Use `.primary`, `.secondary`, `.tertiary` foreground styles. Avoid hard-coded `Color.white` except on dark material contexts (full-screen player).
|
||||
- **No hardcoded color literals** — use `Color+App.swift` extensions or system semantic colors.
|
||||
|
||||
### Typography
|
||||
- Use the SF Pro system font via `.font(.title)`, `.font(.body)`, etc. — never hardcode font names except for intentional stylistic accents (e.g. "Snell Roundhand" for voice watermark).
|
||||
- Apply `.fontWeight()` and `.fontDesign()` modifiers rather than custom font families.
|
||||
- Support Dynamic Type — never hardcode a fixed font size as the sole option without a `.minimumScaleFactor` or system font size modifier.
|
||||
- Hierarchy: title3.bold for primary labels, subheadline for secondary, caption/caption2 for metadata.
|
||||
|
||||
### Spacing & Layout
|
||||
- Minimum touch target: **44×44 pt**. Use `.frame(minWidth: 44, minHeight: 44)` or `.contentShape(Rectangle())` on small icons.
|
||||
- Prefer 16–20 pt horizontal padding on full-width containers; 12 pt for compact inner elements.
|
||||
- Use `VStack(spacing:)` and `HStack(spacing:)` explicitly — never rely on default spacing for production UI.
|
||||
- Corner radii: 12–14 pt for cards/chips, 10 pt for small badges, 20–24 pt for large cover art.
|
||||
|
||||
---
|
||||
|
||||
## Animation Rules
|
||||
|
||||
### Spring Animations (default for all interactive transitions)
|
||||
- Use `.spring(response:dampingFraction:)` for state-driven layout changes, selection feedback, and appear/disappear transitions.
|
||||
- Recommended defaults:
|
||||
- Interactive elements: `response: 0.3, dampingFraction: 0.7`
|
||||
- Entrance animations: `response: 0.45–0.5, dampingFraction: 0.7`
|
||||
- Quick snappy feedback: `response: 0.2, dampingFraction: 0.6`
|
||||
- Reserve `.easeInOut` only for non-interactive, ambient animations (e.g. opacity pulses, generating overlays).
|
||||
|
||||
### SF Symbol Transitions
|
||||
- Always use `contentTransition(.symbolEffect(.replace.downUp))` when a symbol name changes based on state (play/pause, checkmark/circle, etc.).
|
||||
- Use `.symbolEffect(.variableColor.cumulative)` for continuous animations (waveform, loading indicators).
|
||||
- Use `.symbolEffect(.bounce)` for one-shot entrance emphasis (e.g. completion checkmark appearing).
|
||||
- Use `.symbolEffect(.pulse)` for error/warning states that need attention.
|
||||
|
||||
### Repeating Animations
|
||||
- Use `phaseAnimator` for any looping animation that previously used manual `@State` + `withAnimation` chains.
|
||||
- Do not use `Timer` publishers for UI animation — prefer `phaseAnimator` or `TimelineView`.
|
||||
|
||||
---
|
||||
|
||||
## Haptic Feedback
|
||||
|
||||
Add `UIImpactFeedbackGenerator` to every user-initiated interactive control:
|
||||
- `.light` — toggle switches, selection chips, secondary actions, slider drag start.
|
||||
- `.medium` — primary transport buttons (play/pause, chapter skip), significant confirmations.
|
||||
- `.heavy` — destructive actions (only if no confirmation dialog).
|
||||
|
||||
Pattern:
|
||||
```swift
|
||||
Button {
|
||||
UIImpactFeedbackGenerator(style: .light).impactOccurred()
|
||||
// action
|
||||
} label: { ... }
|
||||
```
|
||||
|
||||
Do **not** add haptics to:
|
||||
- Programmatic state changes not directly triggered by a tap.
|
||||
- Buttons inside `List` rows that already use swipe actions.
|
||||
- Scroll events.
|
||||
|
||||
---
|
||||
|
||||
## iOS 17+ API Usage
|
||||
|
||||
Flag and replace any of the following deprecated patterns:
|
||||
|
||||
| Deprecated | Replace with |
|
||||
|---|---|
|
||||
| `NavigationView` | `NavigationStack` |
|
||||
| `@StateObject` / `ObservableObject` (new types only) | `@Observable` macro |
|
||||
| `DispatchQueue.main.async` | `await MainActor.run` or `@MainActor` |
|
||||
| Manual `@State` animation chains for repeating loops | `phaseAnimator` |
|
||||
| `.animation(_:)` without `value:` | `.animation(_:value:)` |
|
||||
| `AnyView` wrapping for conditional content | `@ViewBuilder` + `Group` |
|
||||
|
||||
Do **not** refactor existing `ObservableObject` types to `@Observable` unless explicitly asked — only apply `@Observable` to new types.
|
||||
|
||||
---
|
||||
|
||||
## Accessibility
|
||||
|
||||
Every view must:
|
||||
- Support VoiceOver: add `.accessibilityLabel()` to icon-only buttons and image views.
|
||||
- Support Dynamic Type: test that text doesn't truncate at xxxLarge without a layout adjustment.
|
||||
- Meet contrast ratio: text on tinted backgrounds must be legible — avoid `.opacity(0.25)` or lower for any user-readable text.
|
||||
- Touch targets ≥ 44pt (see Spacing above).
|
||||
- Interactive controls must have `.accessibilityAddTraits(.isButton)` if not using `Button`.
|
||||
- Do not rely solely on color to convey state — pair color with icon or label.
|
||||
|
||||
---
|
||||
|
||||
## Performance
|
||||
|
||||
- **Isolate high-frequency observers**: Any view that observes a `PlaybackProgress` (timer-tick updates) must be a separate sub-view that `@ObservedObject`-observes only the progress object — not the parent view. This prevents the entire parent from re-rendering every 0.5 seconds.
|
||||
- **Avoid `id()` overuse**: Only use `.id()` to force view recreation when necessary (e.g. background image on track change). Prefer `onChange(of:)` for side effects.
|
||||
- **Lazy containers**: Use `LazyVStack` / `LazyHStack` inside `ScrollView` for lists of 20+ items. `List` is inherently lazy and does not need this.
|
||||
- **Image loading**: Always use `KFImage` (Kingfisher) with `.placeholder` for remote images. Never use `AsyncImage` for cover art — it has no disk cache.
|
||||
- **Avoid `AnyView`**: It breaks structural identity and hurts diffing. Use `@ViewBuilder` or `Group { }` instead.
|
||||
|
||||
---
|
||||
|
||||
## Offline & Error States
|
||||
|
||||
Every view that makes network calls must:
|
||||
1. Wrap the body in a `VStack` with `OfflineBanner` at the top, gated on `networkMonitor.isConnected`.
|
||||
2. Suppress network errors silently when offline via `ErrorAlertModifier` — do not show an alert when the device is offline.
|
||||
3. Gate `.task` / `.onAppear` network calls: `guard networkMonitor.isConnected else { return }`.
|
||||
4. Show a non-blocking inline empty state (not a full-screen error) for failed loads when online.
|
||||
|
||||
---
|
||||
|
||||
## Component Checklist (before submitting any view change)
|
||||
|
||||
- [ ] All interactive elements ≥ 44pt touch target
|
||||
- [ ] SF Symbol state changes use `contentTransition(.symbolEffect(...))`
|
||||
- [ ] State-driven layout transitions use `.spring(response:dampingFraction:)`
|
||||
- [ ] Tappable controls have haptic feedback
|
||||
- [ ] No `NavigationView`, no `DispatchQueue.main.async`, no `.animation(_:)` without `value:`
|
||||
- [ ] High-frequency observers are isolated sub-views
|
||||
- [ ] Offline state handled with `OfflineBanner` + `NetworkMonitor`
|
||||
- [ ] VoiceOver labels on icon-only buttons
|
||||
- [ ] No hardcoded `Color.black` / `Color.white` / `Color(.systemBackground)` where a material applies
|
||||
171
AGENTS.md
171
AGENTS.md
@@ -1,171 +0,0 @@
|
||||
# libnovel Project
|
||||
|
||||
Go web scraper for novelfire.net with TTS support via Kokoro-FastAPI. Structured data in PocketBase, binary blobs (chapters, audio, browse snapshots) in MinIO. SvelteKit frontend.
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
scraper/
|
||||
├── cmd/scraper/main.go # Entry point: run | refresh | serve | save-browse
|
||||
├── internal/
|
||||
│ ├── orchestrator/orchestrator.go # Catalogue walk → per-book metadata goroutines → chapter worker pool
|
||||
│ ├── browser/ # BrowserClient interface + direct HTTP (production) + Browserless variants
|
||||
│ ├── novelfire/scraper.go # novelfire.net scraping (catalogue, metadata, chapters, ranking)
|
||||
│ ├── server/ # HTTP API server (server.go + 6 handler files)
|
||||
│ │ ├── server.go # Server struct, route registration, ListenAndServe
|
||||
│ │ ├── handlers_scrape.go # POST /scrape, /scrape/book, /scrape/book/range; job status/tasks
|
||||
│ │ ├── handlers_browse.go # GET /api/browse, /api/search, /api/cover — MinIO-cached browse pages
|
||||
│ │ ├── handlers_preview.go # GET /api/book-preview, /api/chapter-text-preview — live scrape, no store writes
|
||||
│ │ ├── handlers_audio.go # POST /api/audio, GET /api/audio-proxy, voice samples, presign
|
||||
│ │ ├── handlers_progress.go # GET/POST/DELETE /api/progress
|
||||
│ │ ├── handlers_ranking.go # GET /api/ranking, /api/cover
|
||||
│ │ └── helpers.go # stripMarkdown, hardcoded voice list fallback
|
||||
│ ├── storage/ # Persistence layer (PocketBase + MinIO)
|
||||
│ │ ├── store.go # Store interface — single abstraction for server + orchestrator
|
||||
│ │ ├── hybrid.go # HybridStore: routes structured data → PocketBase, blobs → MinIO
|
||||
│ │ ├── pocketbase.go # PocketBase REST admin client (7 collections, auth, schema bootstrap)
|
||||
│ │ ├── minio.go # MinIO client (3 buckets: chapters, audio, browse)
|
||||
│ │ └── coverutil.go # Best-effort cover image downloader → browse bucket
|
||||
│ └── scraper/
|
||||
│ ├── interfaces.go # NovelScraper interface + domain types (BookMeta, ChapterRef, etc.)
|
||||
│ └── htmlutil/htmlutil.go # HTML parsing helpers (NodeToMarkdown, ResolveURL, etc.)
|
||||
```
|
||||
|
||||
## Key Concepts
|
||||
|
||||
- **Orchestrator**: Catalogue stream → per-book goroutines (metadata + chapter list) → shared chapter work channel → N worker goroutines (chapter text). Scrape jobs tracked in PocketBase `scraping_tasks`.
|
||||
- **Storage**: `HybridStore` implements the `Store` interface. PocketBase holds structured records (`books`, `chapters_idx`, `ranking`, `progress`, `audio_cache`, `app_users`, `scraping_tasks`). MinIO holds blobs (chapter markdown, audio MP3s, browse HTML snapshots, cover images).
|
||||
- **Browser Client**: Production uses `NewDirectHTTPClient` (plain HTTP, no Browserless). Browserless variants (content/scrape/cdp) exist in `browser/` but are only wired for the `save-browse` subcommand.
|
||||
- **Preview**: `GET /api/book-preview/{slug}` scrapes metadata + chapter list live without persisting anything — used when a book is not yet in the library. On first visit, metadata and chapter index are auto-saved to PocketBase in the background.
|
||||
- **Server**: 24 HTTP endpoints. Async scrape jobs (mutex, 409 on concurrent), in-flight dedup for audio generation, MinIO-backed browse page cache with mem-cache fallback.
|
||||
|
||||
## Commands
|
||||
|
||||
```bash
|
||||
# Build
|
||||
cd scraper && go build -o bin/scraper ./cmd/scraper
|
||||
|
||||
# Full catalogue scrape (one-shot)
|
||||
./bin/scraper run
|
||||
|
||||
# Single book
|
||||
./bin/scraper run --url https://novelfire.net/book/xxx
|
||||
|
||||
# Re-scrape a book already in the DB (uses stored source_url)
|
||||
./bin/scraper refresh <slug>
|
||||
|
||||
# HTTP server
|
||||
./bin/scraper serve
|
||||
|
||||
# Capture browse pages to MinIO via SingleFile CLI (requires SINGLEFILE_PATH + BROWSERLESS_URL)
|
||||
./bin/scraper save-browse
|
||||
|
||||
# Tests (unit only — integration tests require live services)
|
||||
cd scraper && go test ./... -short
|
||||
|
||||
# All tests (requires MinIO + PocketBase + Browserless)
|
||||
cd scraper && go test ./...
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
### Scraper (Go)
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `LOG_LEVEL` | `debug\|info\|warn\|error` | `info` |
|
||||
| `SCRAPER_HTTP_ADDR` | HTTP listen address | `:8080` |
|
||||
| `SCRAPER_WORKERS` | Chapter goroutines | `NumCPU` |
|
||||
| `SCRAPER_TIMEOUT` | Per-request HTTP timeout (seconds) | `90` |
|
||||
| `KOKORO_URL` | Kokoro-FastAPI TTS base URL | `https://kokoro.kalekber.cc` |
|
||||
| `KOKORO_VOICE` | Default TTS voice | `af_bella` |
|
||||
| `MINIO_ENDPOINT` | MinIO S3 API host:port | `localhost:9000` |
|
||||
| `MINIO_PUBLIC_ENDPOINT` | Public MinIO endpoint for presigned URLs | `""` |
|
||||
| `MINIO_ACCESS_KEY` | MinIO access key | `admin` |
|
||||
| `MINIO_SECRET_KEY` | MinIO secret key | `changeme123` |
|
||||
| `MINIO_USE_SSL` | TLS for internal MinIO connection | `false` |
|
||||
| `MINIO_PUBLIC_USE_SSL` | TLS for public presigned URL endpoint | `true` |
|
||||
| `MINIO_BUCKET_CHAPTERS` | Chapter markdown bucket | `libnovel-chapters` |
|
||||
| `MINIO_BUCKET_AUDIO` | Audio MP3 bucket | `libnovel-audio` |
|
||||
| `MINIO_BUCKET_BROWSE` | Browse HTML + cover image bucket | `libnovel-browse` |
|
||||
| `POCKETBASE_URL` | PocketBase base URL | `http://localhost:8090` |
|
||||
| `POCKETBASE_ADMIN_EMAIL` | PocketBase admin email | `admin@libnovel.local` |
|
||||
| `POCKETBASE_ADMIN_PASSWORD` | PocketBase admin password | `changeme123` |
|
||||
| `BROWSERLESS_URL` | Browserless WS endpoint (save-browse only) | `http://localhost:3030` |
|
||||
| `SINGLEFILE_PATH` | SingleFile CLI binary path (save-browse only) | `single-file` |
|
||||
|
||||
### UI (SvelteKit)
|
||||
|
||||
| Variable | Description | Default |
|
||||
|----------|-------------|---------|
|
||||
| `AUTH_SECRET` | HMAC signing secret for auth tokens | `dev_secret_change_in_production` |
|
||||
| `SCRAPER_API_URL` | Internal URL of the Go scraper | `http://localhost:8080` |
|
||||
| `POCKETBASE_URL` | PocketBase base URL | `http://localhost:8090` |
|
||||
| `POCKETBASE_ADMIN_EMAIL` | PocketBase admin email | `admin@libnovel.local` |
|
||||
| `POCKETBASE_ADMIN_PASSWORD` | PocketBase admin password | `changeme123` |
|
||||
| `PUBLIC_MINIO_PUBLIC_URL` | Browser-visible MinIO URL (presigned links) | `http://localhost:9000` |
|
||||
|
||||
## Docker
|
||||
|
||||
```bash
|
||||
docker-compose up -d # Starts: minio, minio-init, pocketbase, pb-init, scraper, ui
|
||||
```
|
||||
|
||||
Services:
|
||||
|
||||
| Service | Port(s) | Role |
|
||||
|---------|---------|------|
|
||||
| `minio` | `9000` (S3 API), `9001` (console) | Object storage |
|
||||
| `minio-init` | — | One-shot bucket creation then exits |
|
||||
| `pocketbase` | `8090` | Structured data store |
|
||||
| `pb-init` | — | One-shot PocketBase collection bootstrap then exits |
|
||||
| `scraper` | `8080` | Go scraper HTTP API |
|
||||
| `ui` | `5252` → internal `3000` | SvelteKit frontend |
|
||||
|
||||
Kokoro and Browserless are **external services** — not in docker-compose.
|
||||
|
||||
## HTTP API Endpoints (Go scraper)
|
||||
|
||||
| Method | Path | Description |
|
||||
|--------|------|-------------|
|
||||
| `GET` | `/health` | Liveness probe |
|
||||
| `POST` | `/scrape` | Enqueue full catalogue scrape |
|
||||
| `POST` | `/scrape/book` | Enqueue single-book scrape `{url}` |
|
||||
| `POST` | `/scrape/book/range` | Enqueue range scrape `{url, from, to?}` |
|
||||
| `GET` | `/api/scrape/status` | Current scrape job status |
|
||||
| `GET` | `/api/scrape/tasks` | All scrape task records |
|
||||
| `GET` | `/api/browse` | Browse novelfire catalogue (MinIO-cached) |
|
||||
| `GET` | `/api/search` | Search local + remote `?q=` |
|
||||
| `GET` | `/api/ranking` | Ranking list |
|
||||
| `GET` | `/api/cover/{domain}/{slug}` | Proxy cover image from MinIO |
|
||||
| `GET` | `/api/book-preview/{slug}` | Live metadata + chapter list (no store write) |
|
||||
| `GET` | `/api/chapter-text-preview/{slug}/{n}` | Live chapter text (no store write) |
|
||||
| `POST` | `/api/reindex/{slug}` | Rebuild chapters_idx from MinIO |
|
||||
| `GET` | `/api/chapter-text/{slug}/{n}` | Chapter text (markdown stripped) |
|
||||
| `POST` | `/api/audio/{slug}/{n}` | Trigger Kokoro TTS generation |
|
||||
| `GET` | `/api/audio-proxy/{slug}/{n}` | Proxy generated audio |
|
||||
| `POST` | `/api/audio/voice-samples` | Pre-generate voice samples |
|
||||
| `GET` | `/api/voices` | List available Kokoro voices |
|
||||
| `GET` | `/api/presign/chapter/{slug}/{n}` | Presigned MinIO URL for chapter |
|
||||
| `GET` | `/api/presign/audio/{slug}/{n}` | Presigned MinIO URL for audio |
|
||||
| `GET` | `/api/presign/voice-sample/{voice}` | Presigned MinIO URL for voice sample |
|
||||
| `GET` | `/api/progress` | Get reading progress (session-scoped) |
|
||||
| `POST` | `/api/progress/{slug}` | Set reading progress |
|
||||
| `DELETE` | `/api/progress/{slug}` | Delete reading progress |
|
||||
|
||||
## Code Patterns
|
||||
|
||||
- `log/slog` for structured logging throughout
|
||||
- Context-based cancellation on all network calls and goroutines
|
||||
- Worker pool pattern in orchestrator (buffered channel + WaitGroup)
|
||||
- Single async scrape job enforced by mutex; 409 on concurrent requests; job state persisted to `scraping_tasks` in PocketBase
|
||||
- `Store` interface decouples all persistence — pass it around, never touch MinIO/PocketBase clients directly outside `storage/`
|
||||
- Auth: custom HMAC-signed token (`userId:username:role.<sig>`) in `libnovel_auth` cookie; signed with `AUTH_SECRET`
|
||||
|
||||
## AI Context Tips
|
||||
|
||||
- **Primary files to modify**: `orchestrator.go`, `server/handlers_*.go`, `novelfire/scraper.go`, `storage/hybrid.go`, `storage/pocketbase.go`
|
||||
- **To add a new scrape source**: implement `NovelScraper` from `internal/scraper/interfaces.go`
|
||||
- **To add a new API endpoint**: add handler in the appropriate `handlers_*.go` file, register in `server.go` `ListenAndServe()`
|
||||
- **Storage changes**: update `Store` interface in `store.go`, implement on `HybridStore` (hybrid.go) and `PocketBaseStore`/`MinioClient` as needed; update mock in `orchestrator_test.go`
|
||||
- **Skip**: `scraper/bin/` (compiled binary), MinIO/PocketBase data volumes
|
||||
240
Caddyfile
Normal file
240
Caddyfile
Normal file
@@ -0,0 +1,240 @@
|
||||
# v3/Caddyfile
|
||||
#
|
||||
# Caddy reverse proxy for LibNovel v3.
|
||||
# Custom build includes github.com/mholt/caddy-ratelimit.
|
||||
#
|
||||
# Environment variables consumed (set in docker-compose.yml):
|
||||
# DOMAIN — public hostname, e.g. libnovel.example.com
|
||||
# Use "localhost" for local dev (no TLS cert attempted).
|
||||
# CADDY_ACME_EMAIL — Let's Encrypt notification email (empty = no email)
|
||||
#
|
||||
# Routing rules (main domain):
|
||||
# /health → backend:8080 (liveness probe)
|
||||
# /scrape* → backend:8080 (Go admin scrape endpoints)
|
||||
# /api/book-preview/* → backend:8080 (live scrape, no store write)
|
||||
# /api/chapter-text/* → backend:8080 (chapter markdown from MinIO)
|
||||
# /api/chapter-markdown/* → backend:8080 (chapter markdown from MinIO)
|
||||
# /api/reindex/* → backend:8080 (rebuild chapter index)
|
||||
# /api/cover/* → backend:8080 (proxy cover image)
|
||||
# /api/audio-proxy/* → backend:8080 (proxy generated audio)
|
||||
# /avatars/* → minio:9000 (presigned avatar GETs)
|
||||
# /audio/* → minio:9000 (presigned audio GETs)
|
||||
# /chapters/* → minio:9000 (presigned chapter GETs)
|
||||
# /* (everything else) → ui:3000 (SvelteKit — handles all
|
||||
# remaining /api/* routes)
|
||||
#
|
||||
# Subdomain routing:
|
||||
# feedback.libnovel.cc → fider:3000 (user feedback / feature requests)
|
||||
# errors.libnovel.cc → glitchtip-web:8000 (error tracking)
|
||||
# analytics.libnovel.cc → umami:3000 (page analytics)
|
||||
# logs.libnovel.cc → dozzle:8080 (Docker log viewer)
|
||||
# uptime.libnovel.cc → uptime-kuma:3001 (uptime monitoring)
|
||||
# push.libnovel.cc → gotify:80 (push notifications)
|
||||
#
|
||||
# Routes intentionally removed from direct-to-backend:
|
||||
# /api/scrape/* — SvelteKit has /api/scrape/ counterparts
|
||||
# that enforce auth; routing directly would
|
||||
# bypass SK middleware.
|
||||
# /api/chapter-text-preview/* — Same: SvelteKit owns
|
||||
# /api/chapter-text-preview/[slug]/[n].
|
||||
# /api/browse — Endpoint removed; browse snapshot system
|
||||
# was deleted.
|
||||
{
|
||||
# Email for Let's Encrypt ACME account registration.
|
||||
# When CADDY_ACME_EMAIL is set this expands to e.g. "email you@example.com".
|
||||
# When unset it expands to an empty string and is silently ignored.
|
||||
{$CADDY_ACME_EMAIL:}
|
||||
|
||||
# CrowdSec bouncer — streams decisions from the CrowdSec LAPI every 15s.
|
||||
# CROWDSEC_API_KEY is injected at runtime via crowdsec/.crowdsec.env.
|
||||
# The default "disabled" placeholder makes the bouncer fail-open (warn,
|
||||
# pass traffic) when no key is configured — Caddy still starts cleanly.
|
||||
crowdsec {
|
||||
api_url http://crowdsec:8080
|
||||
api_key {$CROWDSEC_API_KEY:disabled}
|
||||
ticker_interval 15s
|
||||
}
|
||||
}
|
||||
|
||||
(security_headers) {
|
||||
header {
|
||||
# Prevent clickjacking
|
||||
X-Frame-Options "SAMEORIGIN"
|
||||
# Prevent MIME-type sniffing
|
||||
X-Content-Type-Options "nosniff"
|
||||
# Minimal referrer info for cross-origin requests
|
||||
Referrer-Policy "strict-origin-when-cross-origin"
|
||||
# Restrict powerful browser features
|
||||
Permissions-Policy "camera=(), microphone=(), geolocation=(), payment=()"
|
||||
# Enforce HTTPS for 1 year (includeSubDomains)
|
||||
Strict-Transport-Security "max-age=31536000; includeSubDomains"
|
||||
# Enable XSS filter in older browsers
|
||||
X-XSS-Protection "1; mode=block"
|
||||
# Remove server identity header
|
||||
-Server
|
||||
}
|
||||
}
|
||||
|
||||
{$DOMAIN:localhost} {
|
||||
import security_headers
|
||||
|
||||
# ── CrowdSec bouncer ──────────────────────────────────────────────────────
|
||||
# Checks every incoming request against CrowdSec decisions.
|
||||
# Banned IPs receive a 403; all others pass through unchanged.
|
||||
route {
|
||||
crowdsec
|
||||
}
|
||||
|
||||
# ── Rate limiting ─────────────────────────────────────────────────────────
|
||||
# Auth endpoints: strict — 10 req/min per IP
|
||||
rate_limit {
|
||||
zone auth_zone {
|
||||
match {
|
||||
path /api/auth/login /api/auth/register /api/auth/change-password
|
||||
}
|
||||
key {remote_host}
|
||||
window 1m
|
||||
events 10
|
||||
}
|
||||
}
|
||||
|
||||
# Admin scrape endpoints: moderate — 20 req/min per IP
|
||||
rate_limit {
|
||||
zone scrape_zone {
|
||||
match {
|
||||
path /scrape*
|
||||
}
|
||||
key {remote_host}
|
||||
window 1m
|
||||
events 20
|
||||
}
|
||||
}
|
||||
|
||||
# Global: 300 req/min per IP (covers everything)
|
||||
rate_limit {
|
||||
zone global_zone {
|
||||
key {remote_host}
|
||||
window 1m
|
||||
events 300
|
||||
}
|
||||
}
|
||||
|
||||
# ── Liveness probe ────────────────────────────────────────────────────────
|
||||
handle /health {
|
||||
reverse_proxy backend:8080
|
||||
}
|
||||
|
||||
# ── Scrape task creation (Go backend only) ────────────────────────────────
|
||||
handle /scrape* {
|
||||
reverse_proxy backend:8080
|
||||
}
|
||||
|
||||
# ── Backend-only API paths ────────────────────────────────────────────────
|
||||
# These paths are served exclusively by the Go backend and have no
|
||||
# SvelteKit counterpart. Routing them here skips SK intentionally.
|
||||
handle /api/book-preview/* {
|
||||
reverse_proxy backend:8080
|
||||
}
|
||||
handle /api/chapter-text/* {
|
||||
reverse_proxy backend:8080
|
||||
}
|
||||
handle /api/chapter-markdown/* {
|
||||
reverse_proxy backend:8080
|
||||
}
|
||||
handle /api/reindex/* {
|
||||
reverse_proxy backend:8080
|
||||
}
|
||||
handle /api/cover/* {
|
||||
reverse_proxy backend:8080
|
||||
}
|
||||
handle /api/audio-proxy/* {
|
||||
reverse_proxy backend:8080
|
||||
}
|
||||
|
||||
# ── MinIO bucket paths (presigned URLs) ──────────────────────────────────
|
||||
# MinIO path-style presigned URLs include the bucket name as the first
|
||||
# path segment. MINIO_PUBLIC_ENDPOINT points here, so Caddy must proxy
|
||||
# these paths directly to MinIO — no auth layer needed (the presigned
|
||||
# signature itself enforces access and expiry).
|
||||
handle /avatars/* {
|
||||
reverse_proxy minio:9000
|
||||
}
|
||||
handle /audio/* {
|
||||
reverse_proxy minio:9000
|
||||
}
|
||||
handle /chapters/* {
|
||||
reverse_proxy minio:9000
|
||||
}
|
||||
|
||||
# ── SvelteKit UI (catch-all — includes all remaining /api/* routes) ───────
|
||||
handle {
|
||||
reverse_proxy ui:3000
|
||||
}
|
||||
|
||||
# ── Caddy-level error pages ───────────────────────────────────────────────
|
||||
# These fire when the upstream (backend or ui) is completely unreachable.
|
||||
# SvelteKit's own +error.svelte handles application-level errors (404, 500).
|
||||
handle_errors 502 {
|
||||
root * /srv/errors
|
||||
rewrite * /502.html
|
||||
file_server
|
||||
}
|
||||
handle_errors 503 {
|
||||
root * /srv/errors
|
||||
rewrite * /503.html
|
||||
file_server
|
||||
}
|
||||
handle_errors 504 {
|
||||
root * /srv/errors
|
||||
rewrite * /504.html
|
||||
file_server
|
||||
}
|
||||
|
||||
# ── Logging ───────────────────────────────────────────────────────────────
|
||||
# JSON log file read by CrowdSec for threat detection.
|
||||
log {
|
||||
output file /var/log/caddy/access.log {
|
||||
roll_size 100MiB
|
||||
roll_keep 5
|
||||
roll_keep_for 720h
|
||||
}
|
||||
format json
|
||||
}
|
||||
}
|
||||
|
||||
# ── Fider: user feedback & feature requests ───────────────────────────────────
|
||||
feedback.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy fider:3000
|
||||
}
|
||||
|
||||
# ── GlitchTip: error tracking ─────────────────────────────────────────────────
|
||||
errors.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy glitchtip-web:8000
|
||||
}
|
||||
|
||||
# ── Umami: page analytics ─────────────────────────────────────────────────────
|
||||
analytics.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy umami:3000
|
||||
}
|
||||
|
||||
# ── Dozzle: Docker log viewer ─────────────────────────────────────────────────
|
||||
logs.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy dozzle:8080
|
||||
}
|
||||
|
||||
# ── Uptime Kuma: uptime monitoring ────────────────────────────────────────────
|
||||
uptime.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy uptime-kuma:3001
|
||||
}
|
||||
|
||||
# ── Gotify: push notifications ────────────────────────────────────────────────
|
||||
push.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy gotify:80
|
||||
}
|
||||
}
|
||||
38
README.md
Normal file
38
README.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# LibNovel
|
||||
|
||||
Self-hosted audiobook platform. Go backend + SvelteKit UI + MinIO/PocketBase/Meilisearch.
|
||||
|
||||
## Requirements
|
||||
|
||||
- Docker + Docker Compose
|
||||
- [just](https://github.com/casey/just)
|
||||
- [Doppler CLI](https://docs.doppler.com/docs/install-cli)
|
||||
|
||||
## Setup
|
||||
|
||||
```sh
|
||||
doppler login
|
||||
doppler setup # project=libnovel, config=prd
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```sh
|
||||
just up # start everything
|
||||
just down # stop
|
||||
just logs # tail all logs
|
||||
just log backend # tail one service
|
||||
just build # rebuild images
|
||||
just restart # down + up
|
||||
just secrets # view/edit secrets
|
||||
```
|
||||
|
||||
## Secrets
|
||||
|
||||
Managed via Doppler (`project=libnovel`, `config=prd`). No `.env` files.
|
||||
|
||||
To add or update a secret:
|
||||
|
||||
```sh
|
||||
doppler secrets set MY_SECRET=value
|
||||
```
|
||||
13
backend/.dockerignore
Normal file
13
backend/.dockerignore
Normal file
@@ -0,0 +1,13 @@
|
||||
# Exclude compiled binaries
|
||||
bin/
|
||||
|
||||
# Exclude test binaries produced by `go test -c`
|
||||
*.test
|
||||
|
||||
# Git history is not needed inside the image
|
||||
.git/
|
||||
|
||||
# Editor/OS noise
|
||||
.DS_Store
|
||||
*.swp
|
||||
*.swo
|
||||
42
backend/Dockerfile
Normal file
42
backend/Dockerfile
Normal file
@@ -0,0 +1,42 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
FROM golang:1.26.1-alpine AS builder
|
||||
WORKDIR /app
|
||||
|
||||
# Download modules into the BuildKit cache so they survive across builds.
|
||||
# This layer is only invalidated when go.mod or go.sum changes.
|
||||
COPY go.mod go.sum ./
|
||||
RUN --mount=type=cache,target=/root/go/pkg/mod \
|
||||
go mod download
|
||||
|
||||
COPY . .
|
||||
|
||||
ARG VERSION=dev
|
||||
ARG COMMIT=unknown
|
||||
|
||||
# Build all three binaries in a single layer so the Go compiler can reuse
|
||||
# intermediate object files. Both cache mounts are preserved between builds:
|
||||
# /root/go/pkg/mod — downloaded module source
|
||||
# /root/.cache/go-build — compiled package objects (incremental recompile)
|
||||
RUN --mount=type=cache,target=/root/go/pkg/mod \
|
||||
--mount=type=cache,target=/root/.cache/go-build \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags="-s -w -X main.version=${VERSION} -X main.commit=${COMMIT}" \
|
||||
-o /out/backend ./cmd/backend && \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags="-s -w -X main.version=${VERSION} -X main.commit=${COMMIT}" \
|
||||
-o /out/runner ./cmd/runner && \
|
||||
CGO_ENABLED=0 GOOS=linux go build \
|
||||
-ldflags="-s -w" \
|
||||
-o /out/healthcheck ./cmd/healthcheck
|
||||
|
||||
# ── backend service ──────────────────────────────────────────────────────────
|
||||
FROM gcr.io/distroless/static:nonroot AS backend
|
||||
COPY --from=builder /out/healthcheck /healthcheck
|
||||
COPY --from=builder /out/backend /backend
|
||||
ENTRYPOINT ["/backend"]
|
||||
|
||||
# ── runner service ───────────────────────────────────────────────────────────
|
||||
FROM gcr.io/distroless/static:nonroot AS runner
|
||||
COPY --from=builder /out/healthcheck /healthcheck
|
||||
COPY --from=builder /out/runner /runner
|
||||
ENTRYPOINT ["/runner"]
|
||||
153
backend/cmd/backend/main.go
Normal file
153
backend/cmd/backend/main.go
Normal file
@@ -0,0 +1,153 @@
|
||||
// Command backend is the LibNovel HTTP API server.
|
||||
//
|
||||
// It exposes all endpoints consumed by the SvelteKit UI: book/chapter reads,
|
||||
// scrape-task creation, presigned MinIO URLs, audio-task creation, reading
|
||||
// progress, live novelfire.net search, and Kokoro voice list.
|
||||
//
|
||||
// All heavy lifting (scraping, TTS generation) is delegated to the runner
|
||||
// binary via PocketBase task records. The backend never scrapes directly.
|
||||
//
|
||||
// Usage:
|
||||
//
|
||||
// backend # start HTTP server (blocks until SIGINT/SIGTERM)
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/getsentry/sentry-go"
|
||||
"github.com/libnovel/backend/internal/backend"
|
||||
"github.com/libnovel/backend/internal/config"
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
"github.com/libnovel/backend/internal/meili"
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
// version and commit are set at build time via -ldflags.
|
||||
var (
|
||||
version = "dev"
|
||||
commit = "unknown"
|
||||
)
|
||||
|
||||
func main() {
|
||||
if err := run(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "backend: fatal: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func run() error {
|
||||
cfg := config.Load()
|
||||
|
||||
// ── Sentry / GlitchTip error tracking ────────────────────────────────────
|
||||
if dsn := os.Getenv("GLITCHTIP_DSN"); dsn != "" {
|
||||
if err := sentry.Init(sentry.ClientOptions{
|
||||
Dsn: dsn,
|
||||
Release: version + "@" + commit,
|
||||
TracesSampleRate: 0.1,
|
||||
}); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "backend: sentry init warning: %v\n", err)
|
||||
} else {
|
||||
defer sentry.Flush(2 * time.Second)
|
||||
}
|
||||
}
|
||||
|
||||
// ── Logger ───────────────────────────────────────────────────────────────
|
||||
log := buildLogger(cfg.LogLevel)
|
||||
log.Info("backend starting",
|
||||
"version", version,
|
||||
"commit", commit,
|
||||
"addr", cfg.HTTP.Addr,
|
||||
)
|
||||
|
||||
// ── Context: cancel on SIGINT / SIGTERM ──────────────────────────────────
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
// ── Storage ──────────────────────────────────────────────────────────────
|
||||
store, err := storage.NewStore(ctx, cfg, log)
|
||||
if err != nil {
|
||||
return fmt.Errorf("init storage: %w", err)
|
||||
}
|
||||
|
||||
// ── Kokoro (voice list only; audio generation is done by the runner) ─────
|
||||
var kokoroClient kokoro.Client
|
||||
if cfg.Kokoro.URL != "" {
|
||||
kokoroClient = kokoro.New(cfg.Kokoro.URL)
|
||||
log.Info("kokoro voices enabled", "url", cfg.Kokoro.URL)
|
||||
} else {
|
||||
log.Info("KOKORO_URL not set — voice list will use built-in fallback")
|
||||
kokoroClient = &noopKokoro{}
|
||||
}
|
||||
|
||||
// ── Meilisearch (search reads only; indexing is the runner's job) ────────
|
||||
var searchIndex meili.Client
|
||||
if cfg.Meilisearch.URL != "" {
|
||||
searchIndex = meili.New(cfg.Meilisearch.URL, cfg.Meilisearch.APIKey)
|
||||
log.Info("meilisearch search enabled", "url", cfg.Meilisearch.URL)
|
||||
} else {
|
||||
log.Info("MEILI_URL not set — search will use PocketBase substring fallback")
|
||||
searchIndex = meili.NoopClient{}
|
||||
}
|
||||
|
||||
// ── Backend server ───────────────────────────────────────────────────────
|
||||
srv := backend.New(
|
||||
backend.Config{
|
||||
Addr: cfg.HTTP.Addr,
|
||||
DefaultVoice: cfg.Kokoro.DefaultVoice,
|
||||
Version: version,
|
||||
Commit: commit,
|
||||
},
|
||||
backend.Dependencies{
|
||||
BookReader: store,
|
||||
RankingStore: store,
|
||||
AudioStore: store,
|
||||
PresignStore: store,
|
||||
ProgressStore: store,
|
||||
CoverStore: store,
|
||||
Producer: store,
|
||||
TaskReader: store,
|
||||
SearchIndex: searchIndex,
|
||||
Kokoro: kokoroClient,
|
||||
Log: log,
|
||||
},
|
||||
)
|
||||
|
||||
return srv.ListenAndServe(ctx)
|
||||
}
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
func buildLogger(level string) *slog.Logger {
|
||||
var lvl slog.Level
|
||||
switch level {
|
||||
case "debug":
|
||||
lvl = slog.LevelDebug
|
||||
case "warn":
|
||||
lvl = slog.LevelWarn
|
||||
case "error":
|
||||
lvl = slog.LevelError
|
||||
default:
|
||||
lvl = slog.LevelInfo
|
||||
}
|
||||
return slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: lvl}))
|
||||
}
|
||||
|
||||
// noopKokoro is a no-op implementation used when KOKORO_URL is not set.
|
||||
// The backend only uses Kokoro for the voice list; audio generation is the
|
||||
// runner's responsibility. With no URL the built-in fallback list is served.
|
||||
type noopKokoro struct{}
|
||||
|
||||
func (n *noopKokoro) GenerateAudio(_ context.Context, _, _ string) ([]byte, error) {
|
||||
return nil, fmt.Errorf("kokoro not configured (KOKORO_URL is empty)")
|
||||
}
|
||||
|
||||
func (n *noopKokoro) ListVoices(_ context.Context) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
57
backend/cmd/backend/main_test.go
Normal file
57
backend/cmd/backend/main_test.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// TestBuildLogger verifies that buildLogger returns a non-nil logger for each
|
||||
// supported log level string and for unknown values.
|
||||
func TestBuildLogger(t *testing.T) {
|
||||
for _, level := range []string{"debug", "info", "warn", "error", "unknown", ""} {
|
||||
l := buildLogger(level)
|
||||
if l == nil {
|
||||
t.Errorf("buildLogger(%q) returned nil", level)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestNoopKokoro verifies that the no-op Kokoro stub returns the expected
|
||||
// sentinel error from GenerateAudio and nil, nil from ListVoices.
|
||||
func TestNoopKokoro(t *testing.T) {
|
||||
noop := &noopKokoro{}
|
||||
|
||||
_, err := noop.GenerateAudio(t.Context(), "text", "af_bella")
|
||||
if err == nil {
|
||||
t.Fatal("noopKokoro.GenerateAudio: expected error, got nil")
|
||||
}
|
||||
|
||||
voices, err := noop.ListVoices(t.Context())
|
||||
if err != nil {
|
||||
t.Fatalf("noopKokoro.ListVoices: unexpected error: %v", err)
|
||||
}
|
||||
if voices != nil {
|
||||
t.Fatalf("noopKokoro.ListVoices: expected nil slice, got %v", voices)
|
||||
}
|
||||
}
|
||||
|
||||
// TestRunStorageUnreachable verifies that run() fails fast and returns a
|
||||
// descriptive error when PocketBase is unreachable.
|
||||
func TestRunStorageUnreachable(t *testing.T) {
|
||||
// Point at an address nothing is listening on.
|
||||
t.Setenv("POCKETBASE_URL", "http://127.0.0.1:19999")
|
||||
// Use a fast listen address so we don't accidentally start a real server.
|
||||
t.Setenv("BACKEND_HTTP_ADDR", "127.0.0.1:0")
|
||||
|
||||
err := run()
|
||||
if err == nil {
|
||||
t.Fatal("run() should have returned an error when storage is unreachable")
|
||||
}
|
||||
|
||||
t.Logf("got expected error: %v", err)
|
||||
}
|
||||
|
||||
// TestMain runs the test suite. No special setup required.
|
||||
func TestMain(m *testing.M) {
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
89
backend/cmd/healthcheck/main.go
Normal file
89
backend/cmd/healthcheck/main.go
Normal file
@@ -0,0 +1,89 @@
|
||||
// healthcheck is a static binary used by Docker HEALTHCHECK CMD in distroless
|
||||
// images (which have no shell, wget, or curl).
|
||||
//
|
||||
// Two modes:
|
||||
//
|
||||
// 1. HTTP mode (default):
|
||||
// /healthcheck <url>
|
||||
// Performs GET <url>; exits 0 if HTTP 2xx/3xx, 1 otherwise.
|
||||
// Example: /healthcheck http://localhost:8080/health
|
||||
//
|
||||
// 2. File-liveness mode:
|
||||
// /healthcheck file <path> <max_age_seconds>
|
||||
// Reads <path>, parses its content as RFC3339 timestamp, and exits 1 if the
|
||||
// timestamp is older than <max_age_seconds>. Used by the runner service which
|
||||
// writes /tmp/runner.alive on every successful poll.
|
||||
// Example: /healthcheck file /tmp/runner.alive 120
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) > 1 && os.Args[1] == "file" {
|
||||
checkFile()
|
||||
return
|
||||
}
|
||||
checkHTTP()
|
||||
}
|
||||
|
||||
// checkHTTP performs a GET request and exits 0 on success, 1 on failure.
|
||||
func checkHTTP() {
|
||||
url := "http://localhost:8080/health"
|
||||
if len(os.Args) > 1 {
|
||||
url = os.Args[1]
|
||||
}
|
||||
resp, err := http.Get(url) //nolint:gosec,noctx
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "healthcheck: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
resp.Body.Close()
|
||||
if resp.StatusCode >= 400 {
|
||||
fmt.Fprintf(os.Stderr, "healthcheck: status %d\n", resp.StatusCode)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// checkFile reads a timestamp from a file and exits 1 if it is older than the
|
||||
// given max age. Usage: /healthcheck file <path> <max_age_seconds>
|
||||
func checkFile() {
|
||||
if len(os.Args) < 4 {
|
||||
fmt.Fprintln(os.Stderr, "healthcheck file: usage: /healthcheck file <path> <max_age_seconds>")
|
||||
os.Exit(1)
|
||||
}
|
||||
path := os.Args[2]
|
||||
maxAgeSec, err := strconv.ParseInt(os.Args[3], 10, 64)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "healthcheck file: invalid max_age_seconds %q: %v\n", os.Args[3], err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "healthcheck file: cannot read %s: %v\n", path, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ts, err := time.Parse(time.RFC3339, string(data))
|
||||
if err != nil {
|
||||
// Fallback: use file mtime if content is not a valid timestamp.
|
||||
info, statErr := os.Stat(path)
|
||||
if statErr != nil {
|
||||
fmt.Fprintf(os.Stderr, "healthcheck file: cannot stat %s: %v\n", path, statErr)
|
||||
os.Exit(1)
|
||||
}
|
||||
ts = info.ModTime()
|
||||
}
|
||||
|
||||
age := time.Since(ts)
|
||||
if age > time.Duration(maxAgeSec)*time.Second {
|
||||
fmt.Fprintf(os.Stderr, "healthcheck file: %s is %.0fs old (max %ds)\n", path, age.Seconds(), maxAgeSec)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
173
backend/cmd/runner/main.go
Normal file
173
backend/cmd/runner/main.go
Normal file
@@ -0,0 +1,173 @@
|
||||
// Command runner is the homelab worker binary.
|
||||
//
|
||||
// It polls PocketBase for pending scrape and audio tasks, executes them, and
|
||||
// writes results back. It connects directly to PocketBase and MinIO using
|
||||
// admin credentials loaded from environment variables.
|
||||
//
|
||||
// Usage:
|
||||
//
|
||||
// runner # start polling loop (blocks until SIGINT/SIGTERM)
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/signal"
|
||||
"runtime"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/getsentry/sentry-go"
|
||||
"github.com/libnovel/backend/internal/browser"
|
||||
"github.com/libnovel/backend/internal/config"
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
"github.com/libnovel/backend/internal/meili"
|
||||
"github.com/libnovel/backend/internal/novelfire"
|
||||
"github.com/libnovel/backend/internal/runner"
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
// version and commit are set at build time via -ldflags.
|
||||
var (
|
||||
version = "dev"
|
||||
commit = "unknown"
|
||||
)
|
||||
|
||||
func main() {
|
||||
if err := run(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "runner: fatal: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
func run() error {
|
||||
cfg := config.Load()
|
||||
|
||||
// ── Sentry / GlitchTip error tracking ────────────────────────────────────
|
||||
if dsn := os.Getenv("GLITCHTIP_DSN"); dsn != "" {
|
||||
if err := sentry.Init(sentry.ClientOptions{
|
||||
Dsn: dsn,
|
||||
Release: version + "@" + commit,
|
||||
TracesSampleRate: 0.1,
|
||||
}); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "runner: sentry init warning: %v\n", err)
|
||||
} else {
|
||||
defer sentry.Flush(2 * time.Second)
|
||||
}
|
||||
}
|
||||
|
||||
// ── Logger ──────────────────────────────────────────────────────────────
|
||||
log := buildLogger(cfg.LogLevel)
|
||||
log.Info("runner starting",
|
||||
"version", version,
|
||||
"commit", commit,
|
||||
"worker_id", cfg.Runner.WorkerID,
|
||||
)
|
||||
|
||||
// ── Context: cancel on SIGINT / SIGTERM ─────────────────────────────────
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
// ── Storage ─────────────────────────────────────────────────────────────
|
||||
store, err := storage.NewStore(ctx, cfg, log)
|
||||
if err != nil {
|
||||
return fmt.Errorf("init storage: %w", err)
|
||||
}
|
||||
|
||||
// ── Browser / Scraper ───────────────────────────────────────────────────
|
||||
workers := cfg.Runner.Workers
|
||||
if workers <= 0 {
|
||||
workers = runtime.NumCPU()
|
||||
}
|
||||
timeout := cfg.Runner.Timeout
|
||||
if timeout <= 0 {
|
||||
timeout = 90 * time.Second
|
||||
}
|
||||
|
||||
browserClient := browser.NewDirectClient(browser.Config{
|
||||
MaxConcurrent: workers,
|
||||
Timeout: timeout,
|
||||
})
|
||||
novel := novelfire.New(browserClient, log)
|
||||
|
||||
// ── Kokoro ──────────────────────────────────────────────────────────────
|
||||
var kokoroClient kokoro.Client
|
||||
if cfg.Kokoro.URL != "" {
|
||||
kokoroClient = kokoro.New(cfg.Kokoro.URL)
|
||||
log.Info("kokoro TTS enabled", "url", cfg.Kokoro.URL)
|
||||
} else {
|
||||
log.Warn("KOKORO_URL not set — audio tasks will fail")
|
||||
kokoroClient = &noopKokoro{}
|
||||
}
|
||||
|
||||
// ── Meilisearch ─────────────────────────────────────────────────────────
|
||||
var searchIndex meili.Client
|
||||
if cfg.Meilisearch.URL != "" {
|
||||
if err := meili.Configure(cfg.Meilisearch.URL, cfg.Meilisearch.APIKey); err != nil {
|
||||
log.Warn("meilisearch configure failed — search indexing disabled", "err", err)
|
||||
searchIndex = meili.NoopClient{}
|
||||
} else {
|
||||
searchIndex = meili.New(cfg.Meilisearch.URL, cfg.Meilisearch.APIKey)
|
||||
log.Info("meilisearch enabled", "url", cfg.Meilisearch.URL)
|
||||
}
|
||||
} else {
|
||||
log.Info("MEILI_URL not set — search indexing disabled")
|
||||
searchIndex = meili.NoopClient{}
|
||||
}
|
||||
|
||||
// ── Runner ──────────────────────────────────────────────────────────────
|
||||
rCfg := runner.Config{
|
||||
WorkerID: cfg.Runner.WorkerID,
|
||||
PollInterval: cfg.Runner.PollInterval,
|
||||
MaxConcurrentScrape: cfg.Runner.MaxConcurrentScrape,
|
||||
MaxConcurrentAudio: cfg.Runner.MaxConcurrentAudio,
|
||||
OrchestratorWorkers: workers,
|
||||
MetricsAddr: cfg.Runner.MetricsAddr,
|
||||
CatalogueRefreshInterval: cfg.Runner.CatalogueRefreshInterval,
|
||||
SkipInitialCatalogueRefresh: cfg.Runner.SkipInitialCatalogueRefresh,
|
||||
}
|
||||
deps := runner.Dependencies{
|
||||
Consumer: store,
|
||||
BookWriter: store,
|
||||
BookReader: store,
|
||||
AudioStore: store,
|
||||
CoverStore: store,
|
||||
SearchIndex: searchIndex,
|
||||
Novel: novel,
|
||||
Kokoro: kokoroClient,
|
||||
Log: log,
|
||||
}
|
||||
r := runner.New(rCfg, deps)
|
||||
|
||||
return r.Run(ctx)
|
||||
}
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
func buildLogger(level string) *slog.Logger {
|
||||
var lvl slog.Level
|
||||
switch level {
|
||||
case "debug":
|
||||
lvl = slog.LevelDebug
|
||||
case "warn":
|
||||
lvl = slog.LevelWarn
|
||||
case "error":
|
||||
lvl = slog.LevelError
|
||||
default:
|
||||
lvl = slog.LevelInfo
|
||||
}
|
||||
return slog.New(slog.NewJSONHandler(os.Stdout, &slog.HandlerOptions{Level: lvl}))
|
||||
}
|
||||
|
||||
// noopKokoro is a no-op implementation used when KOKORO_URL is not set.
|
||||
type noopKokoro struct{}
|
||||
|
||||
func (n *noopKokoro) GenerateAudio(_ context.Context, _, _ string) ([]byte, error) {
|
||||
return nil, fmt.Errorf("kokoro not configured (KOKORO_URL is empty)")
|
||||
}
|
||||
|
||||
func (n *noopKokoro) ListVoices(_ context.Context) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
@@ -1,37 +1,37 @@
|
||||
module github.com/libnovel/scraper
|
||||
module github.com/libnovel/backend
|
||||
|
||||
go 1.25.0
|
||||
go 1.26.1
|
||||
|
||||
require (
|
||||
github.com/minio/minio-go/v7 v7.0.98
|
||||
golang.org/x/net v0.51.0
|
||||
honnef.co/go/tools v0.7.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/getsentry/sentry-go v0.43.0 // indirect
|
||||
github.com/go-ini/ini v1.67.0 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/klauspost/compress v1.18.2 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/klauspost/crc32 v1.3.0 // indirect
|
||||
github.com/meilisearch/meilisearch-go v0.36.1 // indirect
|
||||
github.com/minio/crc64nvme v1.1.1 // indirect
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/philhofer/fwd v1.2.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/redis/go-redis/v9 v9.18.0 // indirect
|
||||
github.com/rs/xid v1.6.0 // indirect
|
||||
github.com/tinylib/msgp v1.6.1 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.48.0 // indirect
|
||||
golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 // indirect
|
||||
golang.org/x/mod v0.32.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/text v0.34.0 // indirect
|
||||
golang.org/x/tools v0.41.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
tool honnef.co/go/tools/cmd/staticcheck
|
||||
@@ -1,13 +1,19 @@
|
||||
github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c h1:pxW6RcqyfI9/kWtOwnv/G+AzdKuy2ZrqINhenH4HyNs=
|
||||
github.com/BurntSushi/toml v1.4.1-0.20240526193622-a339e1f7089c/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho=
|
||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/getsentry/sentry-go v0.43.0 h1:XbXLpFicpo8HmBDaInk7dum18G9KSLcjZiyUKS+hLW4=
|
||||
github.com/getsentry/sentry-go v0.43.0/go.mod h1:XDotiNZbgf5U8bPDUAfvcFmOnMQQceESxyKaObSssW0=
|
||||
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
||||
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
|
||||
@@ -17,6 +23,8 @@ github.com/klauspost/cpuid/v2 v2.2.11 h1:0OwqZRYI2rFrjS4kvkDnqJkKHdHaRnCm68/DY4O
|
||||
github.com/klauspost/cpuid/v2 v2.2.11/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/klauspost/crc32 v1.3.0 h1:sSmTt3gUt81RP655XGZPElI0PelVTZ6YwCRnPSupoFM=
|
||||
github.com/klauspost/crc32 v1.3.0/go.mod h1:D7kQaZhnkX/Y0tstFGf8VUzv2UofNGqCjnC3zdHB0Hw=
|
||||
github.com/meilisearch/meilisearch-go v0.36.1 h1:mJTCJE5g7tRvaqKco6DfqOuJEjX+rRltDEnkEC02Y0M=
|
||||
github.com/meilisearch/meilisearch-go v0.36.1/go.mod h1:hWcR0MuWLSzHfbz9GGzIr3s9rnXLm1jqkmHkJPbUSvM=
|
||||
github.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI=
|
||||
github.com/minio/crc64nvme v1.1.1/go.mod h1:eVfm2fAzLlxMdUGc0EEBGSMmPwmXD5XiNRpnu9J3bvg=
|
||||
github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||
@@ -27,35 +35,30 @@ github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=
|
||||
github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/redis/go-redis/v9 v9.18.0 h1:pMkxYPkEbMPwRdenAzUNyFNrDgHx9U+DrBabWNfSRQs=
|
||||
github.com/redis/go-redis/v9 v9.18.0/go.mod h1:k3ufPphLU5YXwNTUcCRXGxUoF1fqxnhFQmscfkCoDA0=
|
||||
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY=
|
||||
github.com/tinylib/msgp v1.6.1/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77roAqEA=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
|
||||
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
|
||||
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
|
||||
golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678 h1:1P7xPZEwZMoBoz0Yze5Nx2/4pxj6nw9ZqHWXqP0iRgQ=
|
||||
golang.org/x/exp/typeparams v0.0.0-20231108232855-2478ac86f678/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
|
||||
golang.org/x/mod v0.32.0 h1:9F4d3PHLljb6x//jOyokMv3eX+YDeepZSEo3mFJy93c=
|
||||
golang.org/x/mod v0.32.0/go.mod h1:SgipZ/3h2Ci89DlEtEXWUk/HteuRin+HHhN+WbNhguU=
|
||||
golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
|
||||
golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
|
||||
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
|
||||
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
golang.org/x/tools v0.41.0 h1:a9b8iMweWG+S0OBnlU36rzLp20z1Rp10w+IY2czHTQc=
|
||||
golang.org/x/tools v0.41.0/go.mod h1:XSY6eDqxVNiYgezAVqqCeihT4j1U2CCsqvH3WhQpnlg=
|
||||
golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM=
|
||||
golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.7.0 h1:w6WUp1VbkqPEgLz4rkBzH/CSU6HkoqNLp6GstyTx3lU=
|
||||
honnef.co/go/tools v0.7.0/go.mod h1:pm29oPxeP3P82ISxZDgIYeOaf9ta6Pi0EWvCFoLG2vc=
|
||||
1178
backend/internal/backend/handlers.go
Normal file
1178
backend/internal/backend/handlers.go
Normal file
File diff suppressed because it is too large
Load Diff
303
backend/internal/backend/server.go
Normal file
303
backend/internal/backend/server.go
Normal file
@@ -0,0 +1,303 @@
|
||||
// Package backend implements the HTTP API server for the LibNovel backend.
|
||||
//
|
||||
// The server exposes all endpoints consumed by the SvelteKit UI:
|
||||
// - Book/chapter reads from PocketBase/MinIO via bookstore interfaces
|
||||
// - Task creation (scrape + audio) via taskqueue.Producer — the runner binary
|
||||
// picks up and executes those tasks asynchronously
|
||||
// - Presigned MinIO URLs for media playback/upload
|
||||
// - Session-scoped reading progress
|
||||
// - Live novelfire.net search (no scraper interface needed; direct HTTP)
|
||||
// - Kokoro voice list
|
||||
//
|
||||
// The backend never scrapes directly. All scraping (metadata, chapter list,
|
||||
// chapter text, audio TTS) is delegated to the runner binary via PocketBase
|
||||
// task records. GET /api/book-preview enqueues a task when the book is absent.
|
||||
//
|
||||
// All external dependencies are injected as interfaces; concrete types live in
|
||||
// internal/storage and are wired by cmd/backend/main.go.
|
||||
package backend
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
sentryhttp "github.com/getsentry/sentry-go/http"
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
"github.com/libnovel/backend/internal/meili"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
)
|
||||
|
||||
// Dependencies holds all external services the backend server depends on.
|
||||
// Every field is an interface so test doubles can be injected freely.
|
||||
type Dependencies struct {
|
||||
// BookReader reads book metadata and chapter text from PocketBase/MinIO.
|
||||
BookReader bookstore.BookReader
|
||||
// RankingStore reads ranking data from PocketBase.
|
||||
RankingStore bookstore.RankingStore
|
||||
// AudioStore checks audio object existence and computes MinIO keys.
|
||||
AudioStore bookstore.AudioStore
|
||||
// PresignStore generates short-lived MinIO URLs.
|
||||
PresignStore bookstore.PresignStore
|
||||
// ProgressStore reads/writes per-session reading progress.
|
||||
ProgressStore bookstore.ProgressStore
|
||||
// CoverStore reads and writes book cover images from MinIO.
|
||||
// If nil, the cover endpoint falls back to a CDN redirect.
|
||||
CoverStore bookstore.CoverStore
|
||||
// Producer creates scrape/audio tasks in PocketBase.
|
||||
Producer taskqueue.Producer
|
||||
// TaskReader reads scrape/audio task records from PocketBase.
|
||||
TaskReader taskqueue.Reader
|
||||
// SearchIndex provides full-text book search via Meilisearch.
|
||||
// If nil, the local-only fallback search is used.
|
||||
SearchIndex meili.Client
|
||||
// Kokoro is the TTS client (used for voice list only in the backend;
|
||||
// audio generation is done by the runner).
|
||||
Kokoro kokoro.Client
|
||||
// Log is the structured logger.
|
||||
Log *slog.Logger
|
||||
}
|
||||
|
||||
// Config holds HTTP server tuning parameters.
|
||||
type Config struct {
|
||||
// Addr is the listen address, e.g. ":8080".
|
||||
Addr string
|
||||
// DefaultVoice is used when no voice is specified in audio requests.
|
||||
DefaultVoice string
|
||||
// Version and Commit are embedded in /health and /api/version responses.
|
||||
Version string
|
||||
Commit string
|
||||
}
|
||||
|
||||
// Server is the HTTP API server.
|
||||
type Server struct {
|
||||
cfg Config
|
||||
deps Dependencies
|
||||
|
||||
// voiceMu guards cachedVoices. Populated lazily on first GET /api/voices.
|
||||
voiceMu sync.RWMutex
|
||||
cachedVoices []string
|
||||
}
|
||||
|
||||
// New creates a Server from cfg and deps.
|
||||
func New(cfg Config, deps Dependencies) *Server {
|
||||
if cfg.DefaultVoice == "" {
|
||||
cfg.DefaultVoice = "af_bella"
|
||||
}
|
||||
if deps.Log == nil {
|
||||
deps.Log = slog.Default()
|
||||
}
|
||||
if deps.SearchIndex == nil {
|
||||
deps.SearchIndex = meili.NoopClient{}
|
||||
}
|
||||
return &Server{cfg: cfg, deps: deps}
|
||||
}
|
||||
|
||||
// ListenAndServe registers all routes and starts the HTTP server.
|
||||
// It blocks until ctx is cancelled, then performs a graceful shutdown.
|
||||
func (s *Server) ListenAndServe(ctx context.Context) error {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
// Health / version
|
||||
mux.HandleFunc("GET /health", s.handleHealth)
|
||||
mux.HandleFunc("GET /api/version", s.handleVersion)
|
||||
|
||||
// Scrape task creation (202 Accepted — runner executes asynchronously)
|
||||
mux.HandleFunc("POST /scrape", s.handleScrapeCatalogue)
|
||||
mux.HandleFunc("POST /scrape/book", s.handleScrapeBook)
|
||||
mux.HandleFunc("POST /scrape/book/range", s.handleScrapeBookRange)
|
||||
|
||||
// Scrape task status / history
|
||||
mux.HandleFunc("GET /api/scrape/status", s.handleScrapeStatus)
|
||||
mux.HandleFunc("GET /api/scrape/tasks", s.handleScrapeTasks)
|
||||
|
||||
// Cancel a pending task (scrape or audio)
|
||||
mux.HandleFunc("POST /api/cancel-task/{id}", s.handleCancelTask)
|
||||
|
||||
// Browse & search
|
||||
mux.HandleFunc("GET /api/search", s.handleSearch)
|
||||
|
||||
// Catalogue (Meilisearch-backed browse + search — preferred path for UI)
|
||||
mux.HandleFunc("GET /api/catalogue", s.handleCatalogue)
|
||||
|
||||
// Ranking (from PocketBase)
|
||||
mux.HandleFunc("GET /api/ranking", s.handleGetRanking)
|
||||
|
||||
// Cover proxy (live URL redirect)
|
||||
mux.HandleFunc("GET /api/cover/{domain}/{slug}", s.handleGetCover)
|
||||
|
||||
// Book preview (enqueues scrape task if not in library; returns stored data if already scraped)
|
||||
mux.HandleFunc("GET /api/book-preview/{slug}", s.handleBookPreview)
|
||||
|
||||
// Chapter text (served from MinIO via PocketBase index)
|
||||
mux.HandleFunc("GET /api/chapter-text/{slug}/{n}", s.handleChapterText)
|
||||
// Raw markdown chapter content — served directly from MinIO by the backend.
|
||||
// Use this instead of presign+fetch to avoid SvelteKit→MinIO network path.
|
||||
mux.HandleFunc("GET /api/chapter-markdown/{slug}/{n}", s.handleChapterMarkdown)
|
||||
|
||||
// Chapter text preview — live scrape from novelfire.net, no store writes.
|
||||
// Used when the chapter is not yet in the library (preview mode).
|
||||
mux.HandleFunc("GET /api/chapter-text-preview/{slug}/{n}", s.handleChapterTextPreview)
|
||||
|
||||
// Reindex chapters_idx from MinIO
|
||||
mux.HandleFunc("POST /api/reindex/{slug}", s.handleReindex)
|
||||
|
||||
// Audio task creation (backend creates task; runner executes)
|
||||
mux.HandleFunc("POST /api/audio/{slug}/{n}", s.handleAudioGenerate)
|
||||
mux.HandleFunc("GET /api/audio/status/{slug}/{n}", s.handleAudioStatus)
|
||||
mux.HandleFunc("GET /api/audio-proxy/{slug}/{n}", s.handleAudioProxy)
|
||||
|
||||
// Voices list
|
||||
mux.HandleFunc("GET /api/voices", s.handleVoices)
|
||||
|
||||
// Presigned URLs
|
||||
mux.HandleFunc("GET /api/presign/chapter/{slug}/{n}", s.handlePresignChapter)
|
||||
mux.HandleFunc("GET /api/presign/audio/{slug}/{n}", s.handlePresignAudio)
|
||||
mux.HandleFunc("GET /api/presign/voice-sample/{voice}", s.handlePresignVoiceSample)
|
||||
mux.HandleFunc("GET /api/presign/avatar-upload/{userId}", s.handlePresignAvatarUpload)
|
||||
mux.HandleFunc("GET /api/presign/avatar/{userId}", s.handlePresignAvatar)
|
||||
mux.HandleFunc("PUT /api/avatar-upload/{userId}", s.handleAvatarUpload)
|
||||
|
||||
// Reading progress
|
||||
mux.HandleFunc("GET /api/progress", s.handleGetProgress)
|
||||
mux.HandleFunc("POST /api/progress/{slug}", s.handleSetProgress)
|
||||
mux.HandleFunc("DELETE /api/progress/{slug}", s.handleDeleteProgress)
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: s.cfg.Addr,
|
||||
Handler: sentryhttp.New(sentryhttp.Options{Repanic: true}).Handle(mux),
|
||||
ReadTimeout: 15 * time.Second,
|
||||
WriteTimeout: 60 * time.Second,
|
||||
IdleTimeout: 60 * time.Second,
|
||||
}
|
||||
|
||||
errCh := make(chan error, 1)
|
||||
go func() { errCh <- srv.ListenAndServe() }()
|
||||
s.deps.Log.Info("backend: HTTP server listening", "addr", s.cfg.Addr)
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
s.deps.Log.Info("backend: context cancelled, starting graceful shutdown")
|
||||
shutCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
if err := srv.Shutdown(shutCtx); err != nil {
|
||||
s.deps.Log.Error("backend: graceful shutdown failed", "err", err)
|
||||
return err
|
||||
}
|
||||
s.deps.Log.Info("backend: shutdown complete")
|
||||
return nil
|
||||
case err := <-errCh:
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// ── Session cookie helpers ─────────────────────────────────────────────────────
|
||||
|
||||
const sessionCookieName = "libnovel_session"
|
||||
|
||||
func sessionID(r *http.Request) string {
|
||||
c, err := r.Cookie(sessionCookieName)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
return c.Value
|
||||
}
|
||||
|
||||
func newSessionID() (string, error) {
|
||||
b := make([]byte, 16)
|
||||
if _, err := rand.Read(b); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return hex.EncodeToString(b), nil
|
||||
}
|
||||
|
||||
func ensureSession(w http.ResponseWriter, r *http.Request) string {
|
||||
if id := sessionID(r); id != "" {
|
||||
return id
|
||||
}
|
||||
id, err := newSessionID()
|
||||
if err != nil {
|
||||
id = fmt.Sprintf("fallback-%d", time.Now().UnixNano())
|
||||
}
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
Name: sessionCookieName,
|
||||
Value: id,
|
||||
Path: "/",
|
||||
HttpOnly: true,
|
||||
SameSite: http.SameSiteLaxMode,
|
||||
MaxAge: 365 * 24 * 60 * 60,
|
||||
})
|
||||
return id
|
||||
}
|
||||
|
||||
// ── Utility helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
// writeJSON writes v as a JSON response with status code. Status 0 → 200.
|
||||
func writeJSON(w http.ResponseWriter, status int, v any) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if status != 0 {
|
||||
w.WriteHeader(status)
|
||||
}
|
||||
_ = json.NewEncoder(w).Encode(v)
|
||||
}
|
||||
|
||||
// jsonError writes a JSON error body and the given status code.
|
||||
func jsonError(w http.ResponseWriter, status int, msg string) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
_ = json.NewEncoder(w).Encode(map[string]string{"error": msg})
|
||||
}
|
||||
|
||||
// voices returns the list of available Kokoro voices. On the first call it
|
||||
// fetches from the Kokoro service and caches the result. Falls back to the
|
||||
// hardcoded list on error.
|
||||
func (s *Server) voices(ctx context.Context) []string {
|
||||
s.voiceMu.RLock()
|
||||
cached := s.cachedVoices
|
||||
s.voiceMu.RUnlock()
|
||||
if len(cached) > 0 {
|
||||
return cached
|
||||
}
|
||||
|
||||
if s.deps.Kokoro == nil {
|
||||
return kokoroVoices
|
||||
}
|
||||
|
||||
fetchCtx, cancel := context.WithTimeout(ctx, 5*time.Second)
|
||||
defer cancel()
|
||||
list, err := s.deps.Kokoro.ListVoices(fetchCtx)
|
||||
if err != nil || len(list) == 0 {
|
||||
s.deps.Log.Warn("backend: could not fetch kokoro voices, using built-in list", "err", err)
|
||||
return kokoroVoices
|
||||
}
|
||||
|
||||
s.voiceMu.Lock()
|
||||
s.cachedVoices = list
|
||||
s.voiceMu.Unlock()
|
||||
s.deps.Log.Info("backend: fetched kokoro voices", "count", len(list))
|
||||
return list
|
||||
}
|
||||
|
||||
// handleHealth handles GET /health.
|
||||
func (s *Server) handleHealth(w http.ResponseWriter, _ *http.Request) {
|
||||
writeJSON(w, 0, map[string]string{
|
||||
"status": "ok",
|
||||
"version": s.cfg.Version,
|
||||
"commit": s.cfg.Commit,
|
||||
})
|
||||
}
|
||||
|
||||
// handleVersion handles GET /api/version.
|
||||
func (s *Server) handleVersion(w http.ResponseWriter, _ *http.Request) {
|
||||
writeJSON(w, 0, map[string]string{
|
||||
"version": s.cfg.Version,
|
||||
"commit": s.cfg.Commit,
|
||||
})
|
||||
}
|
||||
143
backend/internal/bookstore/bookstore.go
Normal file
143
backend/internal/bookstore/bookstore.go
Normal file
@@ -0,0 +1,143 @@
|
||||
// Package bookstore defines the segregated read/write interfaces for book,
|
||||
// chapter, ranking, progress, audio, and presign data.
|
||||
//
|
||||
// Interface segregation:
|
||||
// - BookWriter — used by the runner to persist scraped data.
|
||||
// - BookReader — used by the backend to serve book/chapter data.
|
||||
// - RankingStore — used by both runner (write) and backend (read).
|
||||
// - PresignStore — used only by the backend for URL signing.
|
||||
// - AudioStore — used by the runner to store audio; backend for presign.
|
||||
// - ProgressStore— used only by the backend for reading progress.
|
||||
//
|
||||
// Concrete implementations live in internal/storage.
|
||||
package bookstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// BookWriter is the write side used by the runner after scraping a book.
|
||||
type BookWriter interface {
|
||||
// WriteMetadata upserts all bibliographic fields for a book.
|
||||
WriteMetadata(ctx context.Context, meta domain.BookMeta) error
|
||||
|
||||
// WriteChapter stores a fully-scraped chapter's text in MinIO and
|
||||
// updates the chapters_idx record in PocketBase.
|
||||
WriteChapter(ctx context.Context, slug string, chapter domain.Chapter) error
|
||||
|
||||
// WriteChapterRefs persists chapter metadata (number + title) into
|
||||
// chapters_idx without fetching or storing chapter text.
|
||||
WriteChapterRefs(ctx context.Context, slug string, refs []domain.ChapterRef) error
|
||||
|
||||
// ChapterExists returns true if the markdown object for ref already exists.
|
||||
ChapterExists(ctx context.Context, slug string, ref domain.ChapterRef) bool
|
||||
}
|
||||
|
||||
// BookReader is the read side used by the backend to serve content.
|
||||
type BookReader interface {
|
||||
// ReadMetadata returns the metadata for slug.
|
||||
// Returns (zero, false, nil) when not found.
|
||||
ReadMetadata(ctx context.Context, slug string) (domain.BookMeta, bool, error)
|
||||
|
||||
// ListBooks returns all books sorted alphabetically by title.
|
||||
ListBooks(ctx context.Context) ([]domain.BookMeta, error)
|
||||
|
||||
// LocalSlugs returns the set of slugs that have metadata stored.
|
||||
LocalSlugs(ctx context.Context) (map[string]bool, error)
|
||||
|
||||
// MetadataMtime returns the Unix-second mtime of the metadata record, or 0.
|
||||
MetadataMtime(ctx context.Context, slug string) int64
|
||||
|
||||
// ReadChapter returns the raw markdown for chapter number n.
|
||||
ReadChapter(ctx context.Context, slug string, n int) (string, error)
|
||||
|
||||
// ListChapters returns all stored chapters for slug, sorted by number.
|
||||
ListChapters(ctx context.Context, slug string) ([]domain.ChapterInfo, error)
|
||||
|
||||
// CountChapters returns the count of stored chapters.
|
||||
CountChapters(ctx context.Context, slug string) int
|
||||
|
||||
// ReindexChapters rebuilds chapters_idx from MinIO objects for slug.
|
||||
ReindexChapters(ctx context.Context, slug string) (int, error)
|
||||
}
|
||||
|
||||
// RankingStore covers ranking reads and writes.
|
||||
type RankingStore interface {
|
||||
// WriteRankingItem upserts a single ranking entry (keyed on Slug).
|
||||
WriteRankingItem(ctx context.Context, item domain.RankingItem) error
|
||||
|
||||
// ReadRankingItems returns all ranking items sorted by rank ascending.
|
||||
ReadRankingItems(ctx context.Context) ([]domain.RankingItem, error)
|
||||
|
||||
// RankingFreshEnough returns true when ranking rows exist and the most
|
||||
// recent Updated timestamp is within maxAge.
|
||||
RankingFreshEnough(ctx context.Context, maxAge time.Duration) (bool, error)
|
||||
}
|
||||
|
||||
// AudioStore covers audio object storage (runner writes; backend reads).
|
||||
type AudioStore interface {
|
||||
// AudioObjectKey returns the MinIO object key for a cached audio file.
|
||||
AudioObjectKey(slug string, n int, voice string) string
|
||||
|
||||
// AudioExists returns true when the audio object is present in MinIO.
|
||||
AudioExists(ctx context.Context, key string) bool
|
||||
|
||||
// PutAudio stores raw audio bytes under the given MinIO object key.
|
||||
PutAudio(ctx context.Context, key string, data []byte) error
|
||||
}
|
||||
|
||||
// PresignStore generates short-lived URLs — used exclusively by the backend.
|
||||
type PresignStore interface {
|
||||
// PresignChapter returns a presigned GET URL for a chapter markdown object.
|
||||
PresignChapter(ctx context.Context, slug string, n int, expires time.Duration) (string, error)
|
||||
|
||||
// PresignAudio returns a presigned GET URL for an audio object.
|
||||
PresignAudio(ctx context.Context, key string, expires time.Duration) (string, error)
|
||||
|
||||
// PresignAvatarUpload returns a short-lived presigned PUT URL for uploading
|
||||
// an avatar image. ext should be "jpg", "png", or "webp".
|
||||
PresignAvatarUpload(ctx context.Context, userID, ext string) (uploadURL, key string, err error)
|
||||
|
||||
// PresignAvatarURL returns a presigned GET URL for a user's avatar.
|
||||
// Returns ("", false, nil) when no avatar exists.
|
||||
PresignAvatarURL(ctx context.Context, userID string) (string, bool, error)
|
||||
|
||||
// PutAvatar stores raw image bytes for a user avatar directly in MinIO.
|
||||
// ext should be "jpg", "png", or "webp". Returns the object key.
|
||||
PutAvatar(ctx context.Context, userID, ext, contentType string, data []byte) (key string, err error)
|
||||
|
||||
// DeleteAvatar removes all avatar objects for a user.
|
||||
DeleteAvatar(ctx context.Context, userID string) error
|
||||
}
|
||||
|
||||
// ProgressStore covers per-session reading progress — backend only.
|
||||
type ProgressStore interface {
|
||||
// GetProgress returns the reading progress for the given session + slug.
|
||||
GetProgress(ctx context.Context, sessionID, slug string) (domain.ReadingProgress, bool)
|
||||
|
||||
// SetProgress saves or updates reading progress.
|
||||
SetProgress(ctx context.Context, sessionID string, p domain.ReadingProgress) error
|
||||
|
||||
// AllProgress returns all progress entries for a session.
|
||||
AllProgress(ctx context.Context, sessionID string) ([]domain.ReadingProgress, error)
|
||||
|
||||
// DeleteProgress removes progress for a specific slug.
|
||||
DeleteProgress(ctx context.Context, sessionID, slug string) error
|
||||
}
|
||||
|
||||
// CoverStore covers book cover image storage in MinIO.
|
||||
// The runner writes covers during catalogue refresh; the backend reads them.
|
||||
type CoverStore interface {
|
||||
// PutCover stores a raw cover image for a book identified by slug.
|
||||
PutCover(ctx context.Context, slug string, data []byte, contentType string) error
|
||||
|
||||
// GetCover retrieves the cover image for a book. Returns (nil, false, nil)
|
||||
// when no cover exists for the given slug.
|
||||
GetCover(ctx context.Context, slug string) ([]byte, string, bool, error)
|
||||
|
||||
// CoverExists returns true when a cover image is stored for slug.
|
||||
CoverExists(ctx context.Context, slug string) bool
|
||||
}
|
||||
141
backend/internal/bookstore/bookstore_test.go
Normal file
141
backend/internal/bookstore/bookstore_test.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package bookstore_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// ── Mock that satisfies all bookstore interfaces ──────────────────────────────
|
||||
|
||||
type mockStore struct{}
|
||||
|
||||
// BookWriter
|
||||
func (m *mockStore) WriteMetadata(_ context.Context, _ domain.BookMeta) error { return nil }
|
||||
func (m *mockStore) WriteChapter(_ context.Context, _ string, _ domain.Chapter) error { return nil }
|
||||
func (m *mockStore) WriteChapterRefs(_ context.Context, _ string, _ []domain.ChapterRef) error {
|
||||
return nil
|
||||
}
|
||||
func (m *mockStore) ChapterExists(_ context.Context, _ string, _ domain.ChapterRef) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// BookReader
|
||||
func (m *mockStore) ReadMetadata(_ context.Context, _ string) (domain.BookMeta, bool, error) {
|
||||
return domain.BookMeta{}, false, nil
|
||||
}
|
||||
func (m *mockStore) ListBooks(_ context.Context) ([]domain.BookMeta, error) { return nil, nil }
|
||||
func (m *mockStore) LocalSlugs(_ context.Context) (map[string]bool, error) {
|
||||
return map[string]bool{}, nil
|
||||
}
|
||||
func (m *mockStore) MetadataMtime(_ context.Context, _ string) int64 { return 0 }
|
||||
func (m *mockStore) ReadChapter(_ context.Context, _ string, _ int) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (m *mockStore) ListChapters(_ context.Context, _ string) ([]domain.ChapterInfo, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (m *mockStore) CountChapters(_ context.Context, _ string) int { return 0 }
|
||||
func (m *mockStore) ReindexChapters(_ context.Context, _ string) (int, error) { return 0, nil }
|
||||
|
||||
// RankingStore
|
||||
func (m *mockStore) WriteRankingItem(_ context.Context, _ domain.RankingItem) error { return nil }
|
||||
func (m *mockStore) ReadRankingItems(_ context.Context) ([]domain.RankingItem, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (m *mockStore) RankingFreshEnough(_ context.Context, _ time.Duration) (bool, error) {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// AudioStore
|
||||
func (m *mockStore) AudioObjectKey(_ string, _ int, _ string) string { return "" }
|
||||
func (m *mockStore) AudioExists(_ context.Context, _ string) bool { return false }
|
||||
func (m *mockStore) PutAudio(_ context.Context, _ string, _ []byte) error { return nil }
|
||||
|
||||
// PresignStore
|
||||
func (m *mockStore) PresignChapter(_ context.Context, _ string, _ int, _ time.Duration) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (m *mockStore) PresignAudio(_ context.Context, _ string, _ time.Duration) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (m *mockStore) PresignAvatarUpload(_ context.Context, _, _ string) (string, string, error) {
|
||||
return "", "", nil
|
||||
}
|
||||
func (m *mockStore) PresignAvatarURL(_ context.Context, _ string) (string, bool, error) {
|
||||
return "", false, nil
|
||||
}
|
||||
func (m *mockStore) PutAvatar(_ context.Context, _, _, _ string, _ []byte) (string, error) {
|
||||
return "", nil
|
||||
}
|
||||
func (m *mockStore) DeleteAvatar(_ context.Context, _ string) error { return nil }
|
||||
|
||||
// ProgressStore
|
||||
func (m *mockStore) GetProgress(_ context.Context, _, _ string) (domain.ReadingProgress, bool) {
|
||||
return domain.ReadingProgress{}, false
|
||||
}
|
||||
func (m *mockStore) SetProgress(_ context.Context, _ string, _ domain.ReadingProgress) error {
|
||||
return nil
|
||||
}
|
||||
func (m *mockStore) AllProgress(_ context.Context, _ string) ([]domain.ReadingProgress, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (m *mockStore) DeleteProgress(_ context.Context, _, _ string) error { return nil }
|
||||
|
||||
// ── Compile-time interface satisfaction ───────────────────────────────────────
|
||||
|
||||
var _ bookstore.BookWriter = (*mockStore)(nil)
|
||||
var _ bookstore.BookReader = (*mockStore)(nil)
|
||||
var _ bookstore.RankingStore = (*mockStore)(nil)
|
||||
var _ bookstore.AudioStore = (*mockStore)(nil)
|
||||
var _ bookstore.PresignStore = (*mockStore)(nil)
|
||||
var _ bookstore.ProgressStore = (*mockStore)(nil)
|
||||
|
||||
// ── Behavioural tests ─────────────────────────────────────────────────────────
|
||||
|
||||
func TestBookWriter_WriteMetadata_ReturnsNilError(t *testing.T) {
|
||||
var w bookstore.BookWriter = &mockStore{}
|
||||
if err := w.WriteMetadata(context.Background(), domain.BookMeta{Slug: "test"}); err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestBookReader_ReadMetadata_NotFound(t *testing.T) {
|
||||
var r bookstore.BookReader = &mockStore{}
|
||||
_, found, err := r.ReadMetadata(context.Background(), "unknown")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if found {
|
||||
t.Error("expected not found")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRankingStore_RankingFreshEnough_ReturnsFalse(t *testing.T) {
|
||||
var s bookstore.RankingStore = &mockStore{}
|
||||
fresh, err := s.RankingFreshEnough(context.Background(), time.Hour)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if fresh {
|
||||
t.Error("expected false")
|
||||
}
|
||||
}
|
||||
|
||||
func TestAudioStore_AudioExists_ReturnsFalse(t *testing.T) {
|
||||
var s bookstore.AudioStore = &mockStore{}
|
||||
if s.AudioExists(context.Background(), "audio/slug/1/af_bella.mp3") {
|
||||
t.Error("expected false")
|
||||
}
|
||||
}
|
||||
|
||||
func TestProgressStore_GetProgress_NotFound(t *testing.T) {
|
||||
var s bookstore.ProgressStore = &mockStore{}
|
||||
_, found := s.GetProgress(context.Background(), "session-1", "slug")
|
||||
if found {
|
||||
t.Error("expected not found")
|
||||
}
|
||||
}
|
||||
191
backend/internal/browser/browser.go
Normal file
191
backend/internal/browser/browser.go
Normal file
@@ -0,0 +1,191 @@
|
||||
// Package browser provides a rate-limited HTTP client for web scraping.
|
||||
package browser
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ErrRateLimit is returned by GetContent when the server responds with 429.
|
||||
// It carries the suggested retry delay (from Retry-After header, or a default).
|
||||
var ErrRateLimit = errors.New("rate limited (429)")
|
||||
|
||||
// RateLimitError wraps ErrRateLimit and carries the suggested wait duration.
|
||||
type RateLimitError struct {
|
||||
// RetryAfter is how long the caller should wait before retrying.
|
||||
// Derived from the Retry-After response header when present; otherwise a default.
|
||||
RetryAfter time.Duration
|
||||
}
|
||||
|
||||
func (e *RateLimitError) Error() string {
|
||||
return fmt.Sprintf("rate limited (429): retry after %s", e.RetryAfter)
|
||||
}
|
||||
|
||||
func (e *RateLimitError) Is(target error) bool { return target == ErrRateLimit }
|
||||
|
||||
// defaultRateLimitDelay is used when the server returns 429 with no Retry-After header.
|
||||
const defaultRateLimitDelay = 60 * time.Second
|
||||
|
||||
// Client is the interface used by scrapers to fetch raw page HTML.
|
||||
// Implementations must be safe for concurrent use.
|
||||
type Client interface {
|
||||
// GetContent fetches the URL and returns the full response body as a string.
|
||||
// It should respect the provided context for cancellation and timeouts.
|
||||
GetContent(ctx context.Context, pageURL string) (string, error)
|
||||
}
|
||||
|
||||
// Config holds tunable parameters for the direct HTTP client.
|
||||
type Config struct {
|
||||
// MaxConcurrent limits the number of simultaneous in-flight requests.
|
||||
// Defaults to 5 when 0.
|
||||
MaxConcurrent int
|
||||
// Timeout is the per-request deadline. Defaults to 90s when 0.
|
||||
Timeout time.Duration
|
||||
}
|
||||
|
||||
// DirectClient is a plain net/http-based Client with a concurrency semaphore.
|
||||
type DirectClient struct {
|
||||
http *http.Client
|
||||
semaphore chan struct{}
|
||||
}
|
||||
|
||||
// NewDirectClient returns a DirectClient configured by cfg.
|
||||
func NewDirectClient(cfg Config) *DirectClient {
|
||||
if cfg.MaxConcurrent <= 0 {
|
||||
cfg.MaxConcurrent = 5
|
||||
}
|
||||
if cfg.Timeout <= 0 {
|
||||
cfg.Timeout = 90 * time.Second
|
||||
}
|
||||
|
||||
transport := &http.Transport{
|
||||
MaxIdleConnsPerHost: cfg.MaxConcurrent * 2,
|
||||
DisableCompression: false,
|
||||
}
|
||||
|
||||
return &DirectClient{
|
||||
http: &http.Client{
|
||||
Transport: transport,
|
||||
Timeout: cfg.Timeout,
|
||||
},
|
||||
semaphore: make(chan struct{}, cfg.MaxConcurrent),
|
||||
}
|
||||
}
|
||||
|
||||
// GetContent fetches pageURL respecting the concurrency limit.
|
||||
func (c *DirectClient) GetContent(ctx context.Context, pageURL string) (string, error) {
|
||||
// Acquire semaphore slot.
|
||||
select {
|
||||
case c.semaphore <- struct{}{}:
|
||||
case <-ctx.Done():
|
||||
return "", ctx.Err()
|
||||
}
|
||||
defer func() { <-c.semaphore }()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, pageURL, nil)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("browser: build request %s: %w", pageURL, err)
|
||||
}
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; libnovel-runner/2)")
|
||||
req.Header.Set("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
|
||||
req.Header.Set("Accept-Language", "en-US,en;q=0.5")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("browser: GET %s: %w", pageURL, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == http.StatusTooManyRequests {
|
||||
delay := defaultRateLimitDelay
|
||||
if ra := resp.Header.Get("Retry-After"); ra != "" {
|
||||
if secs, err := strconv.Atoi(ra); err == nil && secs > 0 {
|
||||
delay = time.Duration(secs) * time.Second
|
||||
}
|
||||
}
|
||||
return "", &RateLimitError{RetryAfter: delay}
|
||||
}
|
||||
|
||||
if resp.StatusCode >= 400 {
|
||||
return "", fmt.Errorf("browser: GET %s returned %d", pageURL, resp.StatusCode)
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("browser: read body %s: %w", pageURL, err)
|
||||
}
|
||||
return string(body), nil
|
||||
}
|
||||
|
||||
// Do implements httputil.Client so DirectClient can be passed to RetryGet.
|
||||
func (c *DirectClient) Do(req *http.Request) (*http.Response, error) {
|
||||
select {
|
||||
case c.semaphore <- struct{}{}:
|
||||
case <-req.Context().Done():
|
||||
return nil, req.Context().Err()
|
||||
}
|
||||
defer func() { <-c.semaphore }()
|
||||
return c.http.Do(req)
|
||||
}
|
||||
|
||||
// ── Stub for testing ──────────────────────────────────────────────────────────
|
||||
|
||||
// StubClient is a test double for Client. It returns pre-configured responses
|
||||
// keyed on URL. Calls to unknown URLs return an error.
|
||||
type StubClient struct {
|
||||
mu sync.Mutex
|
||||
pages map[string]string
|
||||
errors map[string]error
|
||||
callLog []string
|
||||
}
|
||||
|
||||
// NewStub creates a StubClient with no pages pre-loaded.
|
||||
func NewStub() *StubClient {
|
||||
return &StubClient{
|
||||
pages: make(map[string]string),
|
||||
errors: make(map[string]error),
|
||||
}
|
||||
}
|
||||
|
||||
// SetPage registers a URL → HTML body mapping.
|
||||
func (s *StubClient) SetPage(u, html string) {
|
||||
s.mu.Lock()
|
||||
s.pages[u] = html
|
||||
s.mu.Unlock()
|
||||
}
|
||||
|
||||
// SetError registers a URL → error mapping (returned instead of a body).
|
||||
func (s *StubClient) SetError(u string, err error) {
|
||||
s.mu.Lock()
|
||||
s.errors[u] = err
|
||||
s.mu.Unlock()
|
||||
}
|
||||
|
||||
// CallLog returns the ordered list of URLs that were requested.
|
||||
func (s *StubClient) CallLog() []string {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
out := make([]string, len(s.callLog))
|
||||
copy(out, s.callLog)
|
||||
return out
|
||||
}
|
||||
|
||||
// GetContent returns the registered page or an error for the URL.
|
||||
func (s *StubClient) GetContent(_ context.Context, pageURL string) (string, error) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.callLog = append(s.callLog, pageURL)
|
||||
if err, ok := s.errors[pageURL]; ok {
|
||||
return "", err
|
||||
}
|
||||
if html, ok := s.pages[pageURL]; ok {
|
||||
return html, nil
|
||||
}
|
||||
return "", fmt.Errorf("stub: no page registered for %q", pageURL)
|
||||
}
|
||||
141
backend/internal/browser/browser_test.go
Normal file
141
backend/internal/browser/browser_test.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package browser_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/browser"
|
||||
)
|
||||
|
||||
func TestDirectClient_GetContent_Success(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte("<html>hello</html>"))
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := browser.NewDirectClient(browser.Config{MaxConcurrent: 2, Timeout: 5 * time.Second})
|
||||
body, err := c.GetContent(context.Background(), srv.URL)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if body != "<html>hello</html>" {
|
||||
t.Errorf("want <html>hello</html>, got %q", body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDirectClient_GetContent_4xxReturnsError(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := browser.NewDirectClient(browser.Config{})
|
||||
_, err := c.GetContent(context.Background(), srv.URL)
|
||||
if err == nil {
|
||||
t.Fatal("expected error for 404")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDirectClient_SemaphoreBlocksConcurrency(t *testing.T) {
|
||||
const maxConcurrent = 2
|
||||
var inflight atomic.Int32
|
||||
var peak atomic.Int32
|
||||
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
n := inflight.Add(1)
|
||||
if int(n) > int(peak.Load()) {
|
||||
peak.Store(n)
|
||||
}
|
||||
time.Sleep(20 * time.Millisecond)
|
||||
inflight.Add(-1)
|
||||
w.Write([]byte("ok"))
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := browser.NewDirectClient(browser.Config{MaxConcurrent: maxConcurrent, Timeout: 5 * time.Second})
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i := 0; i < 8; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
c.GetContent(context.Background(), srv.URL)
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
if int(peak.Load()) > maxConcurrent {
|
||||
t.Errorf("concurrent requests exceeded limit: peak=%d, limit=%d", peak.Load(), maxConcurrent)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDirectClient_ContextCancel(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
time.Sleep(200 * time.Millisecond)
|
||||
w.Write([]byte("ok"))
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel() // cancel before making the request
|
||||
|
||||
c := browser.NewDirectClient(browser.Config{})
|
||||
_, err := c.GetContent(ctx, srv.URL)
|
||||
if err == nil {
|
||||
t.Fatal("expected context cancellation error")
|
||||
}
|
||||
}
|
||||
|
||||
// ── StubClient ────────────────────────────────────────────────────────────────
|
||||
|
||||
func TestStubClient_ReturnsRegisteredPage(t *testing.T) {
|
||||
stub := browser.NewStub()
|
||||
stub.SetPage("http://example.com/page1", "<html>page1</html>")
|
||||
|
||||
body, err := stub.GetContent(context.Background(), "http://example.com/page1")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if body != "<html>page1</html>" {
|
||||
t.Errorf("want page1 html, got %q", body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestStubClient_ReturnsRegisteredError(t *testing.T) {
|
||||
stub := browser.NewStub()
|
||||
want := errors.New("network failure")
|
||||
stub.SetError("http://example.com/bad", want)
|
||||
|
||||
_, err := stub.GetContent(context.Background(), "http://example.com/bad")
|
||||
if err == nil {
|
||||
t.Fatal("expected error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStubClient_UnknownURLReturnsError(t *testing.T) {
|
||||
stub := browser.NewStub()
|
||||
_, err := stub.GetContent(context.Background(), "http://unknown.example.com/")
|
||||
if err == nil {
|
||||
t.Fatal("expected error for unknown URL")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStubClient_CallLog(t *testing.T) {
|
||||
stub := browser.NewStub()
|
||||
stub.SetPage("http://example.com/a", "a")
|
||||
stub.SetPage("http://example.com/b", "b")
|
||||
|
||||
stub.GetContent(context.Background(), "http://example.com/a")
|
||||
stub.GetContent(context.Background(), "http://example.com/b")
|
||||
|
||||
log := stub.CallLog()
|
||||
if len(log) != 2 || log[0] != "http://example.com/a" || log[1] != "http://example.com/b" {
|
||||
t.Errorf("unexpected call log: %v", log)
|
||||
}
|
||||
}
|
||||
225
backend/internal/config/config.go
Normal file
225
backend/internal/config/config.go
Normal file
@@ -0,0 +1,225 @@
|
||||
// Package config loads all service configuration from environment variables.
|
||||
// Both the runner and backend binaries call config.Load() at startup; each
|
||||
// uses only the sub-struct relevant to it.
|
||||
//
|
||||
// Every field has a documented default so the service starts sensibly without
|
||||
// any environment configuration (useful for local development).
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// PocketBase holds connection settings for the remote PocketBase instance.
|
||||
type PocketBase struct {
|
||||
// URL is the base URL of the PocketBase instance, e.g. https://pb.libnovel.cc
|
||||
URL string
|
||||
// AdminEmail is the admin account email used for API authentication.
|
||||
AdminEmail string
|
||||
// AdminPassword is the admin account password.
|
||||
AdminPassword string
|
||||
}
|
||||
|
||||
// MinIO holds connection settings for the remote MinIO / S3-compatible store.
|
||||
type MinIO struct {
|
||||
// Endpoint is the host:port of the MinIO S3 API, e.g. storage.libnovel.cc:443
|
||||
Endpoint string
|
||||
// PublicEndpoint is the browser-visible endpoint used for presigned URLs.
|
||||
// Falls back to Endpoint when empty.
|
||||
PublicEndpoint string
|
||||
// AccessKey is the MinIO access key.
|
||||
AccessKey string
|
||||
// SecretKey is the MinIO secret key.
|
||||
SecretKey string
|
||||
// UseSSL enables TLS for the internal MinIO connection.
|
||||
UseSSL bool
|
||||
// PublicUseSSL enables TLS for presigned URL generation.
|
||||
PublicUseSSL bool
|
||||
// BucketChapters is the bucket that holds chapter markdown objects.
|
||||
BucketChapters string
|
||||
// BucketAudio is the bucket that holds generated audio MP3 objects.
|
||||
BucketAudio string
|
||||
// BucketAvatars is the bucket that holds user avatar images.
|
||||
BucketAvatars string
|
||||
// BucketBrowse is the bucket that holds cached browse page snapshots (JSON).
|
||||
BucketBrowse string
|
||||
}
|
||||
|
||||
// Kokoro holds connection settings for the Kokoro-FastAPI TTS service.
|
||||
type Kokoro struct {
|
||||
// URL is the base URL of the Kokoro service, e.g. https://kokoro.libnovel.cc
|
||||
// An empty string disables TTS generation.
|
||||
URL string
|
||||
// DefaultVoice is the voice used when none is specified.
|
||||
DefaultVoice string
|
||||
}
|
||||
|
||||
// HTTP holds settings for the HTTP server (backend only).
|
||||
type HTTP struct {
|
||||
// Addr is the listen address, e.g. ":8080"
|
||||
Addr string
|
||||
}
|
||||
|
||||
// Meilisearch holds connection settings for the Meilisearch full-text search service.
|
||||
type Meilisearch struct {
|
||||
// URL is the base URL of the Meilisearch instance, e.g. http://localhost:7700
|
||||
// An empty string disables Meilisearch indexing and search.
|
||||
URL string
|
||||
// APIKey is the Meilisearch master/search API key.
|
||||
APIKey string
|
||||
}
|
||||
|
||||
// Valkey holds connection settings for the Valkey/Redis presign URL cache.
|
||||
type Valkey struct {
|
||||
// Addr is the host:port of the Valkey instance, e.g. localhost:6379
|
||||
// An empty string disables the Valkey cache (falls through to MinIO directly).
|
||||
Addr string
|
||||
}
|
||||
|
||||
// Runner holds settings specific to the runner/worker binary.
|
||||
type Runner struct {
|
||||
// PollInterval is how often the runner checks PocketBase for pending tasks.
|
||||
PollInterval time.Duration
|
||||
// MaxConcurrentScrape limits simultaneous book-scrape goroutines.
|
||||
MaxConcurrentScrape int
|
||||
// MaxConcurrentAudio limits simultaneous audio-generation goroutines.
|
||||
MaxConcurrentAudio int
|
||||
// WorkerID is a unique identifier for this runner instance.
|
||||
// Defaults to the system hostname.
|
||||
WorkerID string
|
||||
// Workers is the number of chapter-scraping goroutines per book.
|
||||
Workers int
|
||||
// Timeout is the per-request HTTP timeout for scraping.
|
||||
Timeout time.Duration
|
||||
// MetricsAddr is the listen address for the runner /metrics HTTP endpoint.
|
||||
// Defaults to ":9091". Set to "" to disable.
|
||||
MetricsAddr string
|
||||
// CatalogueRefreshInterval is how often the runner walks the full catalogue,
|
||||
// scrapes per-book metadata, downloads covers, and re-indexes in Meilisearch.
|
||||
// Defaults to 24h. Set to 0 to use the default.
|
||||
CatalogueRefreshInterval time.Duration
|
||||
// SkipInitialCatalogueRefresh prevents the runner from running a full
|
||||
// catalogue walk on startup. Useful for quick restarts where the catalogue
|
||||
// is already indexed and a 24h walk would be wasteful.
|
||||
// Controlled by RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true.
|
||||
SkipInitialCatalogueRefresh bool
|
||||
}
|
||||
|
||||
// Config is the top-level configuration struct consumed by both binaries.
|
||||
type Config struct {
|
||||
PocketBase PocketBase
|
||||
MinIO MinIO
|
||||
Kokoro Kokoro
|
||||
HTTP HTTP
|
||||
Runner Runner
|
||||
Meilisearch Meilisearch
|
||||
Valkey Valkey
|
||||
// LogLevel is one of "debug", "info", "warn", "error".
|
||||
LogLevel string
|
||||
}
|
||||
|
||||
// Load reads all configuration from environment variables and returns a
|
||||
// populated Config. Missing variables fall back to documented defaults.
|
||||
func Load() Config {
|
||||
workerID, _ := os.Hostname()
|
||||
if workerID == "" {
|
||||
workerID = "runner-default"
|
||||
}
|
||||
|
||||
return Config{
|
||||
LogLevel: envOr("LOG_LEVEL", "info"),
|
||||
|
||||
PocketBase: PocketBase{
|
||||
URL: envOr("POCKETBASE_URL", "http://localhost:8090"),
|
||||
AdminEmail: envOr("POCKETBASE_ADMIN_EMAIL", "admin@libnovel.local"),
|
||||
AdminPassword: envOr("POCKETBASE_ADMIN_PASSWORD", "changeme123"),
|
||||
},
|
||||
|
||||
MinIO: MinIO{
|
||||
Endpoint: envOr("MINIO_ENDPOINT", "localhost:9000"),
|
||||
PublicEndpoint: envOr("MINIO_PUBLIC_ENDPOINT", ""),
|
||||
AccessKey: envOr("MINIO_ACCESS_KEY", "admin"),
|
||||
SecretKey: envOr("MINIO_SECRET_KEY", "changeme123"),
|
||||
UseSSL: envBool("MINIO_USE_SSL", false),
|
||||
PublicUseSSL: envBool("MINIO_PUBLIC_USE_SSL", true),
|
||||
BucketChapters: envOr("MINIO_BUCKET_CHAPTERS", "chapters"),
|
||||
BucketAudio: envOr("MINIO_BUCKET_AUDIO", "audio"),
|
||||
BucketAvatars: envOr("MINIO_BUCKET_AVATARS", "avatars"),
|
||||
BucketBrowse: envOr("MINIO_BUCKET_BROWSE", "catalogue"),
|
||||
},
|
||||
|
||||
Kokoro: Kokoro{
|
||||
URL: envOr("KOKORO_URL", ""),
|
||||
DefaultVoice: envOr("KOKORO_VOICE", "af_bella"),
|
||||
},
|
||||
|
||||
HTTP: HTTP{
|
||||
Addr: envOr("BACKEND_HTTP_ADDR", ":8080"),
|
||||
},
|
||||
|
||||
Runner: Runner{
|
||||
PollInterval: envDuration("RUNNER_POLL_INTERVAL", 30*time.Second),
|
||||
MaxConcurrentScrape: envInt("RUNNER_MAX_CONCURRENT_SCRAPE", 1),
|
||||
MaxConcurrentAudio: envInt("RUNNER_MAX_CONCURRENT_AUDIO", 1),
|
||||
WorkerID: envOr("RUNNER_WORKER_ID", workerID),
|
||||
Workers: envInt("RUNNER_WORKERS", 0), // 0 → runtime.NumCPU()
|
||||
Timeout: envDuration("RUNNER_TIMEOUT", 90*time.Second),
|
||||
MetricsAddr: envOr("RUNNER_METRICS_ADDR", ":9091"),
|
||||
CatalogueRefreshInterval: envDuration("RUNNER_CATALOGUE_REFRESH_INTERVAL", 0),
|
||||
SkipInitialCatalogueRefresh: envBool("RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH", false),
|
||||
},
|
||||
|
||||
Meilisearch: Meilisearch{
|
||||
URL: envOr("MEILI_URL", ""),
|
||||
APIKey: envOr("MEILI_API_KEY", ""),
|
||||
},
|
||||
|
||||
Valkey: Valkey{
|
||||
Addr: envOr("VALKEY_ADDR", ""),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ── helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
func envOr(key, fallback string) string {
|
||||
if v := os.Getenv(key); v != "" {
|
||||
return v
|
||||
}
|
||||
return fallback
|
||||
}
|
||||
|
||||
func envBool(key string, fallback bool) bool {
|
||||
v := os.Getenv(key)
|
||||
if v == "" {
|
||||
return fallback
|
||||
}
|
||||
return strings.ToLower(v) == "true"
|
||||
}
|
||||
|
||||
func envInt(key string, fallback int) int {
|
||||
v := os.Getenv(key)
|
||||
if v == "" {
|
||||
return fallback
|
||||
}
|
||||
n, err := strconv.Atoi(v)
|
||||
if err != nil || n < 0 {
|
||||
return fallback
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func envDuration(key string, fallback time.Duration) time.Duration {
|
||||
v := os.Getenv(key)
|
||||
if v == "" {
|
||||
return fallback
|
||||
}
|
||||
d, err := time.ParseDuration(v)
|
||||
if err != nil {
|
||||
return fallback
|
||||
}
|
||||
return d
|
||||
}
|
||||
127
backend/internal/config/config_test.go
Normal file
127
backend/internal/config/config_test.go
Normal file
@@ -0,0 +1,127 @@
|
||||
package config_test
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/config"
|
||||
)
|
||||
|
||||
func TestLoad_Defaults(t *testing.T) {
|
||||
// Unset all relevant vars so we test pure defaults.
|
||||
unset := []string{
|
||||
"LOG_LEVEL",
|
||||
"POCKETBASE_URL", "POCKETBASE_ADMIN_EMAIL", "POCKETBASE_ADMIN_PASSWORD",
|
||||
"MINIO_ENDPOINT", "MINIO_PUBLIC_ENDPOINT", "MINIO_ACCESS_KEY", "MINIO_SECRET_KEY",
|
||||
"MINIO_USE_SSL", "MINIO_PUBLIC_USE_SSL",
|
||||
"MINIO_BUCKET_CHAPTERS", "MINIO_BUCKET_AUDIO", "MINIO_BUCKET_AVATARS",
|
||||
"KOKORO_URL", "KOKORO_VOICE",
|
||||
"BACKEND_HTTP_ADDR",
|
||||
"RUNNER_POLL_INTERVAL", "RUNNER_MAX_CONCURRENT_SCRAPE", "RUNNER_MAX_CONCURRENT_AUDIO",
|
||||
"RUNNER_WORKER_ID", "RUNNER_WORKERS", "RUNNER_TIMEOUT",
|
||||
}
|
||||
for _, k := range unset {
|
||||
t.Setenv(k, "")
|
||||
}
|
||||
|
||||
cfg := config.Load()
|
||||
|
||||
if cfg.LogLevel != "info" {
|
||||
t.Errorf("LogLevel: want info, got %q", cfg.LogLevel)
|
||||
}
|
||||
if cfg.PocketBase.URL != "http://localhost:8090" {
|
||||
t.Errorf("PocketBase.URL: want http://localhost:8090, got %q", cfg.PocketBase.URL)
|
||||
}
|
||||
if cfg.MinIO.BucketChapters != "chapters" {
|
||||
t.Errorf("MinIO.BucketChapters: want chapters, got %q", cfg.MinIO.BucketChapters)
|
||||
}
|
||||
if cfg.MinIO.UseSSL != false {
|
||||
t.Errorf("MinIO.UseSSL: want false, got %v", cfg.MinIO.UseSSL)
|
||||
}
|
||||
if cfg.MinIO.PublicUseSSL != true {
|
||||
t.Errorf("MinIO.PublicUseSSL: want true, got %v", cfg.MinIO.PublicUseSSL)
|
||||
}
|
||||
if cfg.Kokoro.DefaultVoice != "af_bella" {
|
||||
t.Errorf("Kokoro.DefaultVoice: want af_bella, got %q", cfg.Kokoro.DefaultVoice)
|
||||
}
|
||||
if cfg.HTTP.Addr != ":8080" {
|
||||
t.Errorf("HTTP.Addr: want :8080, got %q", cfg.HTTP.Addr)
|
||||
}
|
||||
if cfg.Runner.PollInterval != 30*time.Second {
|
||||
t.Errorf("Runner.PollInterval: want 30s, got %v", cfg.Runner.PollInterval)
|
||||
}
|
||||
if cfg.Runner.MaxConcurrentScrape != 1 {
|
||||
t.Errorf("Runner.MaxConcurrentScrape: want 1, got %d", cfg.Runner.MaxConcurrentScrape)
|
||||
}
|
||||
if cfg.Runner.MaxConcurrentAudio != 1 {
|
||||
t.Errorf("Runner.MaxConcurrentAudio: want 1, got %d", cfg.Runner.MaxConcurrentAudio)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_EnvOverride(t *testing.T) {
|
||||
t.Setenv("LOG_LEVEL", "debug")
|
||||
t.Setenv("POCKETBASE_URL", "https://pb.libnovel.cc")
|
||||
t.Setenv("MINIO_USE_SSL", "true")
|
||||
t.Setenv("MINIO_PUBLIC_USE_SSL", "false")
|
||||
t.Setenv("RUNNER_POLL_INTERVAL", "1m")
|
||||
t.Setenv("RUNNER_MAX_CONCURRENT_SCRAPE", "5")
|
||||
t.Setenv("RUNNER_WORKER_ID", "homelab-01")
|
||||
t.Setenv("BACKEND_HTTP_ADDR", ":9090")
|
||||
t.Setenv("KOKORO_URL", "https://kokoro.libnovel.cc")
|
||||
|
||||
cfg := config.Load()
|
||||
|
||||
if cfg.LogLevel != "debug" {
|
||||
t.Errorf("LogLevel: want debug, got %q", cfg.LogLevel)
|
||||
}
|
||||
if cfg.PocketBase.URL != "https://pb.libnovel.cc" {
|
||||
t.Errorf("PocketBase.URL: want https://pb.libnovel.cc, got %q", cfg.PocketBase.URL)
|
||||
}
|
||||
if !cfg.MinIO.UseSSL {
|
||||
t.Error("MinIO.UseSSL: want true")
|
||||
}
|
||||
if cfg.MinIO.PublicUseSSL {
|
||||
t.Error("MinIO.PublicUseSSL: want false")
|
||||
}
|
||||
if cfg.Runner.PollInterval != time.Minute {
|
||||
t.Errorf("Runner.PollInterval: want 1m, got %v", cfg.Runner.PollInterval)
|
||||
}
|
||||
if cfg.Runner.MaxConcurrentScrape != 5 {
|
||||
t.Errorf("Runner.MaxConcurrentScrape: want 5, got %d", cfg.Runner.MaxConcurrentScrape)
|
||||
}
|
||||
if cfg.Runner.WorkerID != "homelab-01" {
|
||||
t.Errorf("Runner.WorkerID: want homelab-01, got %q", cfg.Runner.WorkerID)
|
||||
}
|
||||
if cfg.HTTP.Addr != ":9090" {
|
||||
t.Errorf("HTTP.Addr: want :9090, got %q", cfg.HTTP.Addr)
|
||||
}
|
||||
if cfg.Kokoro.URL != "https://kokoro.libnovel.cc" {
|
||||
t.Errorf("Kokoro.URL: want https://kokoro.libnovel.cc, got %q", cfg.Kokoro.URL)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_InvalidInt_FallsToDefault(t *testing.T) {
|
||||
t.Setenv("RUNNER_MAX_CONCURRENT_SCRAPE", "notanumber")
|
||||
cfg := config.Load()
|
||||
if cfg.Runner.MaxConcurrentScrape != 1 {
|
||||
t.Errorf("want default 1, got %d", cfg.Runner.MaxConcurrentScrape)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_InvalidDuration_FallsToDefault(t *testing.T) {
|
||||
t.Setenv("RUNNER_POLL_INTERVAL", "notaduration")
|
||||
cfg := config.Load()
|
||||
if cfg.Runner.PollInterval != 30*time.Second {
|
||||
t.Errorf("want default 30s, got %v", cfg.Runner.PollInterval)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoad_WorkerID_FallsToHostname(t *testing.T) {
|
||||
t.Setenv("RUNNER_WORKER_ID", "")
|
||||
cfg := config.Load()
|
||||
host, _ := os.Hostname()
|
||||
if host != "" && cfg.Runner.WorkerID != host {
|
||||
t.Errorf("want hostname %q, got %q", host, cfg.Runner.WorkerID)
|
||||
}
|
||||
}
|
||||
137
backend/internal/domain/domain.go
Normal file
137
backend/internal/domain/domain.go
Normal file
@@ -0,0 +1,137 @@
|
||||
// Package domain contains the core value types shared across all packages
|
||||
// in this module. It has zero internal imports — only the standard library.
|
||||
// Every other package imports domain; domain imports nothing from this module.
|
||||
package domain
|
||||
|
||||
import "time"
|
||||
|
||||
// ── Book types ────────────────────────────────────────────────────────────────
|
||||
|
||||
// BookMeta carries all bibliographic information about a novel.
|
||||
type BookMeta struct {
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author"`
|
||||
Cover string `json:"cover,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
Genres []string `json:"genres,omitempty"`
|
||||
Summary string `json:"summary,omitempty"`
|
||||
TotalChapters int `json:"total_chapters,omitempty"`
|
||||
SourceURL string `json:"source_url"`
|
||||
Ranking int `json:"ranking,omitempty"`
|
||||
Rating float64 `json:"rating,omitempty"`
|
||||
// MetaUpdated is the Unix timestamp (seconds) when the book record was last
|
||||
// updated in PocketBase. Populated on read; not sent on write (PocketBase
|
||||
// manages its own updated field).
|
||||
MetaUpdated int64 `json:"meta_updated,omitempty"`
|
||||
}
|
||||
|
||||
// CatalogueEntry is a lightweight book reference returned by catalogue pages.
|
||||
type CatalogueEntry struct {
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
// ChapterRef is a reference to a single chapter returned by chapter-list pages.
|
||||
type ChapterRef struct {
|
||||
Number int `json:"number"`
|
||||
Title string `json:"title"`
|
||||
URL string `json:"url"`
|
||||
Volume int `json:"volume,omitempty"`
|
||||
}
|
||||
|
||||
// Chapter contains the fully-extracted text of a single chapter.
|
||||
type Chapter struct {
|
||||
Ref ChapterRef `json:"ref"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// RankingItem represents a single entry in the novel ranking list.
|
||||
type RankingItem struct {
|
||||
Rank int `json:"rank"`
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author,omitempty"`
|
||||
Cover string `json:"cover,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
Genres []string `json:"genres,omitempty"`
|
||||
SourceURL string `json:"source_url,omitempty"`
|
||||
Updated time.Time `json:"updated,omitempty"`
|
||||
}
|
||||
|
||||
// ── Storage record types ──────────────────────────────────────────────────────
|
||||
|
||||
// ChapterInfo is a lightweight chapter descriptor stored in the index.
|
||||
type ChapterInfo struct {
|
||||
Number int `json:"number"`
|
||||
Title string `json:"title"`
|
||||
Date string `json:"date,omitempty"`
|
||||
}
|
||||
|
||||
// ReadingProgress holds a single user's reading position for one book.
|
||||
type ReadingProgress struct {
|
||||
Slug string `json:"slug"`
|
||||
Chapter int `json:"chapter"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// ── Task record types ─────────────────────────────────────────────────────────
|
||||
|
||||
// TaskStatus enumerates the lifecycle states of any task.
|
||||
type TaskStatus string
|
||||
|
||||
const (
|
||||
TaskStatusPending TaskStatus = "pending"
|
||||
TaskStatusRunning TaskStatus = "running"
|
||||
TaskStatusDone TaskStatus = "done"
|
||||
TaskStatusFailed TaskStatus = "failed"
|
||||
TaskStatusCancelled TaskStatus = "cancelled"
|
||||
)
|
||||
|
||||
// ScrapeTask represents a book-scraping job stored in PocketBase.
|
||||
type ScrapeTask struct {
|
||||
ID string `json:"id"`
|
||||
Kind string `json:"kind"` // "catalogue" | "book" | "book_range"
|
||||
TargetURL string `json:"target_url"` // non-empty for single-book tasks
|
||||
FromChapter int `json:"from_chapter,omitempty"`
|
||||
ToChapter int `json:"to_chapter,omitempty"`
|
||||
WorkerID string `json:"worker_id,omitempty"`
|
||||
Status TaskStatus `json:"status"`
|
||||
BooksFound int `json:"books_found"`
|
||||
ChaptersScraped int `json:"chapters_scraped"`
|
||||
ChaptersSkipped int `json:"chapters_skipped"`
|
||||
Errors int `json:"errors"`
|
||||
Started time.Time `json:"started"`
|
||||
Finished time.Time `json:"finished,omitempty"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
}
|
||||
|
||||
// ScrapeResult is the outcome reported by the runner after finishing a ScrapeTask.
|
||||
type ScrapeResult struct {
|
||||
BooksFound int `json:"books_found"`
|
||||
ChaptersScraped int `json:"chapters_scraped"`
|
||||
ChaptersSkipped int `json:"chapters_skipped"`
|
||||
Errors int `json:"errors"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
}
|
||||
|
||||
// AudioTask represents an audio-generation job stored in PocketBase.
|
||||
type AudioTask struct {
|
||||
ID string `json:"id"`
|
||||
CacheKey string `json:"cache_key"` // "slug/chapter/voice"
|
||||
Slug string `json:"slug"`
|
||||
Chapter int `json:"chapter"`
|
||||
Voice string `json:"voice"`
|
||||
WorkerID string `json:"worker_id,omitempty"`
|
||||
Status TaskStatus `json:"status"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
Started time.Time `json:"started"`
|
||||
Finished time.Time `json:"finished,omitempty"`
|
||||
}
|
||||
|
||||
// AudioResult is the outcome reported by the runner after finishing an AudioTask.
|
||||
type AudioResult struct {
|
||||
ObjectKey string `json:"object_key,omitempty"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
}
|
||||
104
backend/internal/domain/domain_test.go
Normal file
104
backend/internal/domain/domain_test.go
Normal file
@@ -0,0 +1,104 @@
|
||||
package domain_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
func TestBookMeta_JSONRoundtrip(t *testing.T) {
|
||||
orig := domain.BookMeta{
|
||||
Slug: "a-great-novel",
|
||||
Title: "A Great Novel",
|
||||
Author: "Jane Doe",
|
||||
Cover: "https://example.com/cover.jpg",
|
||||
Status: "Ongoing",
|
||||
Genres: []string{"Fantasy", "Action"},
|
||||
Summary: "A thrilling tale.",
|
||||
TotalChapters: 120,
|
||||
SourceURL: "https://novelfire.net/book/a-great-novel",
|
||||
Ranking: 3,
|
||||
}
|
||||
|
||||
b, err := json.Marshal(orig)
|
||||
if err != nil {
|
||||
t.Fatalf("marshal: %v", err)
|
||||
}
|
||||
var got domain.BookMeta
|
||||
if err := json.Unmarshal(b, &got); err != nil {
|
||||
t.Fatalf("unmarshal: %v", err)
|
||||
}
|
||||
if got.Slug != orig.Slug {
|
||||
t.Errorf("Slug: want %q, got %q", orig.Slug, got.Slug)
|
||||
}
|
||||
if got.TotalChapters != orig.TotalChapters {
|
||||
t.Errorf("TotalChapters: want %d, got %d", orig.TotalChapters, got.TotalChapters)
|
||||
}
|
||||
if len(got.Genres) != len(orig.Genres) {
|
||||
t.Errorf("Genres len: want %d, got %d", len(orig.Genres), len(got.Genres))
|
||||
}
|
||||
}
|
||||
|
||||
func TestChapterRef_JSONRoundtrip(t *testing.T) {
|
||||
orig := domain.ChapterRef{Number: 42, Title: "The Battle", URL: "https://example.com/ch-42", Volume: 2}
|
||||
b, _ := json.Marshal(orig)
|
||||
var got domain.ChapterRef
|
||||
json.Unmarshal(b, &got)
|
||||
if got != orig {
|
||||
t.Errorf("want %+v, got %+v", orig, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRankingItem_JSONRoundtrip(t *testing.T) {
|
||||
now := time.Now().Truncate(time.Second)
|
||||
orig := domain.RankingItem{
|
||||
Rank: 1,
|
||||
Slug: "top-novel",
|
||||
Title: "Top Novel",
|
||||
SourceURL: "https://novelfire.net/book/top-novel",
|
||||
Updated: now,
|
||||
}
|
||||
b, _ := json.Marshal(orig)
|
||||
var got domain.RankingItem
|
||||
json.Unmarshal(b, &got)
|
||||
if got.Rank != orig.Rank || got.Slug != orig.Slug {
|
||||
t.Errorf("want %+v, got %+v", orig, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestScrapeResult_JSONRoundtrip(t *testing.T) {
|
||||
orig := domain.ScrapeResult{BooksFound: 10, ChaptersScraped: 200, ChaptersSkipped: 5, Errors: 1, ErrorMessage: "one error"}
|
||||
b, _ := json.Marshal(orig)
|
||||
var got domain.ScrapeResult
|
||||
json.Unmarshal(b, &got)
|
||||
if got != orig {
|
||||
t.Errorf("want %+v, got %+v", orig, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAudioResult_JSONRoundtrip(t *testing.T) {
|
||||
orig := domain.AudioResult{ObjectKey: "audio/slug/1/af_bella.mp3"}
|
||||
b, _ := json.Marshal(orig)
|
||||
var got domain.AudioResult
|
||||
json.Unmarshal(b, &got)
|
||||
if got != orig {
|
||||
t.Errorf("want %+v, got %+v", orig, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTaskStatus_Values(t *testing.T) {
|
||||
cases := []domain.TaskStatus{
|
||||
domain.TaskStatusPending,
|
||||
domain.TaskStatusRunning,
|
||||
domain.TaskStatusDone,
|
||||
domain.TaskStatusFailed,
|
||||
domain.TaskStatusCancelled,
|
||||
}
|
||||
for _, s := range cases {
|
||||
if s == "" {
|
||||
t.Errorf("TaskStatus constant must not be empty")
|
||||
}
|
||||
}
|
||||
}
|
||||
124
backend/internal/httputil/httputil.go
Normal file
124
backend/internal/httputil/httputil.go
Normal file
@@ -0,0 +1,124 @@
|
||||
// Package httputil provides shared HTTP helpers used by both the runner and
|
||||
// backend binaries. It has no imports from this module — only the standard
|
||||
// library — so it is safe to import from anywhere in the dependency graph.
|
||||
package httputil
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Client is the minimal interface for making HTTP GET requests.
|
||||
// *http.Client satisfies this interface.
|
||||
type Client interface {
|
||||
Do(req *http.Request) (*http.Response, error)
|
||||
}
|
||||
|
||||
// ErrMaxRetries is returned when RetryGet exhausts all attempts.
|
||||
var ErrMaxRetries = errors.New("httputil: max retries exceeded")
|
||||
|
||||
// errClientError is returned by doGet for 4xx responses; it signals that the
|
||||
// request should NOT be retried (the client is at fault).
|
||||
var errClientError = errors.New("httputil: client error")
|
||||
|
||||
// RetryGet fetches url using client, retrying on network errors or 5xx
|
||||
// responses with exponential backoff. It returns the full response body as a
|
||||
// string on success.
|
||||
//
|
||||
// - maxAttempts: total number of attempts (must be >= 1)
|
||||
// - baseDelay: initial wait before the second attempt; doubles each retry
|
||||
func RetryGet(ctx context.Context, client Client, url string, maxAttempts int, baseDelay time.Duration) (string, error) {
|
||||
if maxAttempts < 1 {
|
||||
maxAttempts = 1
|
||||
}
|
||||
delay := baseDelay
|
||||
|
||||
var lastErr error
|
||||
for attempt := 0; attempt < maxAttempts; attempt++ {
|
||||
if attempt > 0 {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return "", ctx.Err()
|
||||
case <-time.After(delay):
|
||||
}
|
||||
delay *= 2
|
||||
}
|
||||
|
||||
body, err := doGet(ctx, client, url)
|
||||
if err == nil {
|
||||
return body, nil
|
||||
}
|
||||
lastErr = err
|
||||
|
||||
// Do not retry on context cancellation.
|
||||
if ctx.Err() != nil {
|
||||
return "", ctx.Err()
|
||||
}
|
||||
// Do not retry on 4xx — the client is at fault.
|
||||
if errors.Is(err, errClientError) {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("%w after %d attempts: %w", ErrMaxRetries, maxAttempts, lastErr)
|
||||
}
|
||||
|
||||
func doGet(ctx context.Context, client Client, url string) (string, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("build request: %w", err)
|
||||
}
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; libnovel-runner/2)")
|
||||
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("GET %s: %w", url, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode >= 500 {
|
||||
return "", fmt.Errorf("GET %s: server error %d", url, resp.StatusCode)
|
||||
}
|
||||
if resp.StatusCode >= 400 {
|
||||
return "", fmt.Errorf("%w: GET %s: client error %d", errClientError, url, resp.StatusCode)
|
||||
}
|
||||
|
||||
raw, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("read body %s: %w", url, err)
|
||||
}
|
||||
return string(raw), nil
|
||||
}
|
||||
|
||||
// WriteJSON writes v as JSON to w with the given HTTP status code and sets the
|
||||
// Content-Type header to application/json.
|
||||
func WriteJSON(w http.ResponseWriter, status int, v any) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(status)
|
||||
_ = json.NewEncoder(w).Encode(v)
|
||||
}
|
||||
|
||||
// WriteError writes a JSON error object {"error": msg} with the given status.
|
||||
func WriteError(w http.ResponseWriter, status int, msg string) {
|
||||
WriteJSON(w, status, map[string]string{"error": msg})
|
||||
}
|
||||
|
||||
// maxBodyBytes is the limit applied by DecodeJSON to prevent unbounded reads.
|
||||
const maxBodyBytes = 1 << 20 // 1 MiB
|
||||
|
||||
// DecodeJSON decodes a JSON request body into v. It enforces a 1 MiB size
|
||||
// limit and returns a descriptive error on any failure.
|
||||
func DecodeJSON(r *http.Request, v any) error {
|
||||
r.Body = http.MaxBytesReader(nil, r.Body, maxBodyBytes)
|
||||
dec := json.NewDecoder(r.Body)
|
||||
dec.DisallowUnknownFields()
|
||||
if err := dec.Decode(v); err != nil {
|
||||
return fmt.Errorf("decode JSON body: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
181
backend/internal/httputil/httputil_test.go
Normal file
181
backend/internal/httputil/httputil_test.go
Normal file
@@ -0,0 +1,181 @@
|
||||
package httputil_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/httputil"
|
||||
)
|
||||
|
||||
// ── RetryGet ──────────────────────────────────────────────────────────────────
|
||||
|
||||
func TestRetryGet_ImmediateSuccess(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Write([]byte("hello"))
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
body, err := httputil.RetryGet(context.Background(), srv.Client(), srv.URL, 3, time.Millisecond)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if body != "hello" {
|
||||
t.Errorf("want hello, got %q", body)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetryGet_RetriesOn5xx(t *testing.T) {
|
||||
calls := 0
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
calls++
|
||||
if calls < 3 {
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
return
|
||||
}
|
||||
w.Write([]byte("ok"))
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
body, err := httputil.RetryGet(context.Background(), srv.Client(), srv.URL, 5, time.Millisecond)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if body != "ok" {
|
||||
t.Errorf("want ok, got %q", body)
|
||||
}
|
||||
if calls != 3 {
|
||||
t.Errorf("want 3 calls, got %d", calls)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetryGet_MaxAttemptsExceeded(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
_, err := httputil.RetryGet(context.Background(), srv.Client(), srv.URL, 3, time.Millisecond)
|
||||
if err == nil {
|
||||
t.Fatal("expected error, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetryGet_ContextCancelDuringBackoff(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
// Cancel after first failed attempt hits the backoff wait.
|
||||
go func() { time.Sleep(5 * time.Millisecond); cancel() }()
|
||||
|
||||
_, err := httputil.RetryGet(ctx, srv.Client(), srv.URL, 10, 500*time.Millisecond)
|
||||
if err == nil {
|
||||
t.Fatal("expected context cancellation error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetryGet_NoRetryOn4xx(t *testing.T) {
|
||||
calls := 0
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
calls++
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
_, err := httputil.RetryGet(context.Background(), srv.Client(), srv.URL, 5, time.Millisecond)
|
||||
if err == nil {
|
||||
t.Fatal("expected error for 404")
|
||||
}
|
||||
// 4xx is NOT retried — should be exactly 1 call.
|
||||
if calls != 1 {
|
||||
t.Errorf("want 1 call for 4xx, got %d", calls)
|
||||
}
|
||||
}
|
||||
|
||||
// ── WriteJSON ─────────────────────────────────────────────────────────────────
|
||||
|
||||
func TestWriteJSON_SetsHeadersAndStatus(t *testing.T) {
|
||||
rr := httptest.NewRecorder()
|
||||
httputil.WriteJSON(rr, http.StatusCreated, map[string]string{"key": "val"})
|
||||
|
||||
if rr.Code != http.StatusCreated {
|
||||
t.Errorf("status: want 201, got %d", rr.Code)
|
||||
}
|
||||
if ct := rr.Header().Get("Content-Type"); ct != "application/json" {
|
||||
t.Errorf("Content-Type: want application/json, got %q", ct)
|
||||
}
|
||||
var got map[string]string
|
||||
if err := json.NewDecoder(rr.Body).Decode(&got); err != nil {
|
||||
t.Fatalf("decode body: %v", err)
|
||||
}
|
||||
if got["key"] != "val" {
|
||||
t.Errorf("body key: want val, got %q", got["key"])
|
||||
}
|
||||
}
|
||||
|
||||
// ── WriteError ────────────────────────────────────────────────────────────────
|
||||
|
||||
func TestWriteError_Format(t *testing.T) {
|
||||
rr := httptest.NewRecorder()
|
||||
httputil.WriteError(rr, http.StatusBadRequest, "bad input")
|
||||
|
||||
if rr.Code != http.StatusBadRequest {
|
||||
t.Errorf("status: want 400, got %d", rr.Code)
|
||||
}
|
||||
var got map[string]string
|
||||
json.NewDecoder(rr.Body).Decode(&got)
|
||||
if got["error"] != "bad input" {
|
||||
t.Errorf("error field: want bad input, got %q", got["error"])
|
||||
}
|
||||
}
|
||||
|
||||
// ── DecodeJSON ────────────────────────────────────────────────────────────────
|
||||
|
||||
func TestDecodeJSON_HappyPath(t *testing.T) {
|
||||
body := `{"name":"test","value":42}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/", strings.NewReader(body))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
var payload struct {
|
||||
Name string `json:"name"`
|
||||
Value int `json:"value"`
|
||||
}
|
||||
if err := httputil.DecodeJSON(req, &payload); err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if payload.Name != "test" || payload.Value != 42 {
|
||||
t.Errorf("unexpected payload: %+v", payload)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeJSON_UnknownFieldReturnsError(t *testing.T) {
|
||||
body := `{"name":"test","unknown_field":"boom"}`
|
||||
req := httptest.NewRequest(http.MethodPost, "/", strings.NewReader(body))
|
||||
|
||||
var payload struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
if err := httputil.DecodeJSON(req, &payload); err == nil {
|
||||
t.Fatal("expected error for unknown field, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeJSON_BodyTooLarge(t *testing.T) {
|
||||
// Build a body > 1 MiB.
|
||||
big := bytes.Repeat([]byte("a"), 2<<20)
|
||||
req := httptest.NewRequest(http.MethodPost, "/", bytes.NewReader(big))
|
||||
|
||||
var payload map[string]any
|
||||
if err := httputil.DecodeJSON(req, &payload); err == nil {
|
||||
t.Fatal("expected error for oversized body, got nil")
|
||||
}
|
||||
}
|
||||
160
backend/internal/kokoro/client.go
Normal file
160
backend/internal/kokoro/client.go
Normal file
@@ -0,0 +1,160 @@
|
||||
// Package kokoro provides a client for the Kokoro-FastAPI TTS service.
|
||||
//
|
||||
// The Kokoro API is an OpenAI-compatible audio speech API that returns a
|
||||
// download link (X-Download-Path header) instead of streaming audio directly.
|
||||
// GenerateAudio handles the two-step flow: POST /v1/audio/speech → GET /v1/download/{file}.
|
||||
package kokoro
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Client is the interface for interacting with the Kokoro TTS service.
|
||||
type Client interface {
|
||||
// GenerateAudio synthesises text using voice and returns raw MP3 bytes.
|
||||
GenerateAudio(ctx context.Context, text, voice string) ([]byte, error)
|
||||
|
||||
// ListVoices returns the available voice IDs. Falls back to an empty slice
|
||||
// on error — callers should treat an empty list as "service unavailable".
|
||||
ListVoices(ctx context.Context) ([]string, error)
|
||||
}
|
||||
|
||||
// httpClient is the concrete Kokoro HTTP client.
|
||||
type httpClient struct {
|
||||
baseURL string
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
// New returns a Kokoro Client targeting baseURL (e.g. "https://kokoro.example.com").
|
||||
func New(baseURL string) Client {
|
||||
return &httpClient{
|
||||
baseURL: strings.TrimRight(baseURL, "/"),
|
||||
http: &http.Client{Timeout: 10 * time.Minute},
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateAudio calls POST /v1/audio/speech (return_download_link=true) and then
|
||||
// downloads the resulting MP3 from GET /v1/download/{filename}.
|
||||
func (c *httpClient) GenerateAudio(ctx context.Context, text, voice string) ([]byte, error) {
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("kokoro: empty text")
|
||||
}
|
||||
if voice == "" {
|
||||
voice = "af_bella"
|
||||
}
|
||||
|
||||
// ── Step 1: request generation ────────────────────────────────────────────
|
||||
reqBody, err := json.Marshal(map[string]any{
|
||||
"model": "kokoro",
|
||||
"input": text,
|
||||
"voice": voice,
|
||||
"response_format": "mp3",
|
||||
"speed": 1.0,
|
||||
"stream": false,
|
||||
"return_download_link": true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: marshal request: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
||||
c.baseURL+"/v1/audio/speech", bytes.NewReader(reqBody))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: build speech request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: speech request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
_, _ = io.Copy(io.Discard, resp.Body)
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("kokoro: speech returned %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
dlPath := resp.Header.Get("X-Download-Path")
|
||||
if dlPath == "" {
|
||||
return nil, fmt.Errorf("kokoro: no X-Download-Path header in response")
|
||||
}
|
||||
filename := dlPath
|
||||
if idx := strings.LastIndex(dlPath, "/"); idx >= 0 {
|
||||
filename = dlPath[idx+1:]
|
||||
}
|
||||
if filename == "" {
|
||||
return nil, fmt.Errorf("kokoro: empty filename in X-Download-Path: %q", dlPath)
|
||||
}
|
||||
|
||||
// ── Step 2: download the generated file ───────────────────────────────────
|
||||
dlURL := c.baseURL + "/v1/download/" + filename
|
||||
dlReq, err := http.NewRequestWithContext(ctx, http.MethodGet, dlURL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: build download request: %w", err)
|
||||
}
|
||||
|
||||
dlResp, err := c.http.Do(dlReq)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: download request: %w", err)
|
||||
}
|
||||
defer dlResp.Body.Close()
|
||||
|
||||
if dlResp.StatusCode != http.StatusOK {
|
||||
return nil, fmt.Errorf("kokoro: download returned %d", dlResp.StatusCode)
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(dlResp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: read download body: %w", err)
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// ListVoices calls GET /v1/audio/voices and returns the list of voice IDs.
|
||||
func (c *httpClient) ListVoices(ctx context.Context) ([]string, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet,
|
||||
c.baseURL+"/v1/audio/voices", nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: build voices request: %w", err)
|
||||
}
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: voices request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
_, _ = io.Copy(io.Discard, resp.Body)
|
||||
return nil, fmt.Errorf("kokoro: voices returned %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Voices []string `json:"voices"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return nil, fmt.Errorf("kokoro: decode voices response: %w", err)
|
||||
}
|
||||
return result.Voices, nil
|
||||
}
|
||||
|
||||
// VoiceSampleKey returns the MinIO object key for a voice sample MP3.
|
||||
// Key: _voice-samples/{voice}.mp3 (sanitised).
|
||||
func VoiceSampleKey(voice string) string {
|
||||
safe := strings.Map(func(r rune) rune {
|
||||
if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') ||
|
||||
(r >= '0' && r <= '9') || r == '_' || r == '-' {
|
||||
return r
|
||||
}
|
||||
return '_'
|
||||
}, voice)
|
||||
return fmt.Sprintf("_voice-samples/%s.mp3", safe)
|
||||
}
|
||||
291
backend/internal/kokoro/client_test.go
Normal file
291
backend/internal/kokoro/client_test.go
Normal file
@@ -0,0 +1,291 @@
|
||||
package kokoro_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
)
|
||||
|
||||
// ── VoiceSampleKey ────────────────────────────────────────────────────────────
|
||||
|
||||
func TestVoiceSampleKey(t *testing.T) {
|
||||
tests := []struct {
|
||||
voice string
|
||||
want string
|
||||
}{
|
||||
{"af_bella", "_voice-samples/af_bella.mp3"},
|
||||
{"am_echo", "_voice-samples/am_echo.mp3"},
|
||||
{"voice with spaces", "_voice-samples/voice_with_spaces.mp3"},
|
||||
{"special!@#chars", "_voice-samples/special___chars.mp3"},
|
||||
{"", "_voice-samples/.mp3"},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.voice, func(t *testing.T) {
|
||||
got := kokoro.VoiceSampleKey(tt.voice)
|
||||
if got != tt.want {
|
||||
t.Errorf("VoiceSampleKey(%q) = %q, want %q", tt.voice, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ── GenerateAudio ─────────────────────────────────────────────────────────────
|
||||
|
||||
func TestGenerateAudio_EmptyText(t *testing.T) {
|
||||
srv := httptest.NewServer(http.NotFoundHandler())
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
_, err := c.GenerateAudio(context.Background(), "", "af_bella")
|
||||
if err == nil {
|
||||
t.Fatal("expected error for empty text, got nil")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "empty text") {
|
||||
t.Errorf("expected 'empty text' in error, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateAudio_DefaultVoice(t *testing.T) {
|
||||
// Tracks that the voice defaults to af_bella when empty.
|
||||
var capturedBody string
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/v1/audio/speech" {
|
||||
buf := make([]byte, 512)
|
||||
n, _ := r.Body.Read(buf)
|
||||
capturedBody = string(buf[:n])
|
||||
w.Header().Set("X-Download-Path", "/download/test_file.mp3")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(r.URL.Path, "/v1/download/") {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, _ = w.Write([]byte("fake-mp3-data"))
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
data, err := c.GenerateAudio(context.Background(), "hello world", "")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if string(data) != "fake-mp3-data" {
|
||||
t.Errorf("unexpected data: %q", string(data))
|
||||
}
|
||||
if !strings.Contains(capturedBody, `"af_bella"`) {
|
||||
t.Errorf("expected default voice af_bella in request body, got: %s", capturedBody)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateAudio_SpeechNon200(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/v1/audio/speech" {
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
_, err := c.GenerateAudio(context.Background(), "text", "af_bella")
|
||||
if err == nil {
|
||||
t.Fatal("expected error for non-200 speech response")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "500") {
|
||||
t.Errorf("expected 500 in error, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateAudio_NoDownloadPathHeader(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/v1/audio/speech" {
|
||||
// No X-Download-Path header
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
_, err := c.GenerateAudio(context.Background(), "text", "af_bella")
|
||||
if err == nil {
|
||||
t.Fatal("expected error for missing X-Download-Path")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "X-Download-Path") {
|
||||
t.Errorf("expected X-Download-Path in error, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateAudio_DownloadFails(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/v1/audio/speech" {
|
||||
w.Header().Set("X-Download-Path", "/v1/download/speech.mp3")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
if strings.HasPrefix(r.URL.Path, "/v1/download/") {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
_, err := c.GenerateAudio(context.Background(), "text", "af_bella")
|
||||
if err == nil {
|
||||
t.Fatal("expected error for failed download")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "404") {
|
||||
t.Errorf("expected 404 in error, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateAudio_FullPath(t *testing.T) {
|
||||
// X-Download-Path with a full path: extract just filename.
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/v1/audio/speech" {
|
||||
w.Header().Set("X-Download-Path", "/some/nested/path/audio_abc123.mp3")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
if r.URL.Path == "/v1/download/audio_abc123.mp3" {
|
||||
_, _ = w.Write([]byte("audio-bytes"))
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
data, err := c.GenerateAudio(context.Background(), "text", "af_bella")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if string(data) != "audio-bytes" {
|
||||
t.Errorf("unexpected data: %q", string(data))
|
||||
}
|
||||
}
|
||||
|
||||
func TestGenerateAudio_ContextCancelled(t *testing.T) {
|
||||
// Server that hangs — context should cancel before we get a response.
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Never respond.
|
||||
select {}
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel() // cancel immediately
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
_, err := c.GenerateAudio(ctx, "text", "af_bella")
|
||||
if err == nil {
|
||||
t.Fatal("expected error for cancelled context")
|
||||
}
|
||||
}
|
||||
|
||||
// ── ListVoices ────────────────────────────────────────────────────────────────
|
||||
|
||||
func TestListVoices_Success(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/v1/audio/voices" {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
_, _ = w.Write([]byte(`{"voices":["af_bella","am_adam","bf_emma"]}`))
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
voices, err := c.ListVoices(context.Background())
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(voices) != 3 {
|
||||
t.Errorf("expected 3 voices, got %d: %v", len(voices), voices)
|
||||
}
|
||||
if voices[0] != "af_bella" {
|
||||
t.Errorf("expected first voice to be af_bella, got %q", voices[0])
|
||||
}
|
||||
}
|
||||
|
||||
func TestListVoices_Non200(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusServiceUnavailable)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
_, err := c.ListVoices(context.Background())
|
||||
if err == nil {
|
||||
t.Fatal("expected error for non-200 response")
|
||||
}
|
||||
if !strings.Contains(err.Error(), "503") {
|
||||
t.Errorf("expected 503 in error, got: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestListVoices_MalformedJSON(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, _ = w.Write([]byte(`not-json`))
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
_, err := c.ListVoices(context.Background())
|
||||
if err == nil {
|
||||
t.Fatal("expected error for malformed JSON")
|
||||
}
|
||||
}
|
||||
|
||||
func TestListVoices_EmptyVoices(t *testing.T) {
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
_, _ = w.Write([]byte(`{"voices":[]}`))
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL)
|
||||
voices, err := c.ListVoices(context.Background())
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(voices) != 0 {
|
||||
t.Errorf("expected 0 voices, got %d", len(voices))
|
||||
}
|
||||
}
|
||||
|
||||
// ── New ───────────────────────────────────────────────────────────────────────
|
||||
|
||||
func TestNew_TrailingSlashStripped(t *testing.T) {
|
||||
// Verify that a trailing slash on baseURL doesn't produce double-slash paths.
|
||||
srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
if r.URL.Path == "/v1/audio/voices" {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
_, _ = w.Write([]byte(`{"voices":["af_bella"]}`))
|
||||
return
|
||||
}
|
||||
http.NotFound(w, r)
|
||||
}))
|
||||
defer srv.Close()
|
||||
|
||||
c := kokoro.New(srv.URL + "/") // trailing slash
|
||||
voices, err := c.ListVoices(context.Background())
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if len(voices) == 0 {
|
||||
t.Error("expected at least one voice")
|
||||
}
|
||||
}
|
||||
327
backend/internal/meili/client.go
Normal file
327
backend/internal/meili/client.go
Normal file
@@ -0,0 +1,327 @@
|
||||
// Package meili provides a thin Meilisearch client for indexing and searching
|
||||
// locally scraped books.
|
||||
//
|
||||
// Index:
|
||||
// - Name: "books"
|
||||
// - Primary key: "slug"
|
||||
// - Searchable attributes: title, author, genres, summary
|
||||
// - Filterable attributes: status, genres
|
||||
// - Sortable attributes: rank, rating, total_chapters, meta_updated
|
||||
//
|
||||
// The client is intentionally simple: UpsertBook and Search only. All
|
||||
// Meilisearch-specific details (index management, attribute configuration)
|
||||
// are handled once in Configure(), called at startup.
|
||||
package meili
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/meilisearch/meilisearch-go"
|
||||
)
|
||||
|
||||
const indexName = "books"
|
||||
|
||||
// Client is the interface for Meilisearch operations used by runner and backend.
|
||||
type Client interface {
|
||||
// UpsertBook adds or updates a book document in the search index.
|
||||
UpsertBook(ctx context.Context, book domain.BookMeta) error
|
||||
// BookExists reports whether a book with the given slug is already in the
|
||||
// index. Used by the catalogue refresh to skip re-indexing known books.
|
||||
BookExists(ctx context.Context, slug string) bool
|
||||
// Search returns up to limit books matching query.
|
||||
Search(ctx context.Context, query string, limit int) ([]domain.BookMeta, error)
|
||||
// Catalogue queries books with optional filters, sort, and pagination.
|
||||
// Returns books, the total hit count for pagination, and a FacetResult
|
||||
// with available genre and status values from the index.
|
||||
Catalogue(ctx context.Context, q CatalogueQuery) ([]domain.BookMeta, int64, FacetResult, error)
|
||||
}
|
||||
|
||||
// CatalogueQuery holds parameters for the /api/catalogue endpoint.
|
||||
type CatalogueQuery struct {
|
||||
Q string // full-text query (may be empty for browse)
|
||||
Genre string // genre filter, e.g. "fantasy" or "all"
|
||||
Status string // status filter, e.g. "ongoing", "completed", or "all"
|
||||
Sort string // sort field: "popular", "new", "update", "top-rated", "rank", ""
|
||||
Page int // 1-indexed
|
||||
Limit int // items per page, default 20
|
||||
}
|
||||
|
||||
// FacetResult holds the available filter values discovered from the index.
|
||||
// Values are sorted alphabetically and include only those present in the index.
|
||||
type FacetResult struct {
|
||||
Genres []string // distinct genre values
|
||||
Statuses []string // distinct status values
|
||||
}
|
||||
|
||||
// MeiliClient wraps the meilisearch-go SDK.
|
||||
type MeiliClient struct {
|
||||
idx meilisearch.IndexManager
|
||||
}
|
||||
|
||||
// New creates a MeiliClient. Call Configure() once at startup to ensure the
|
||||
// index exists and has the correct attribute settings.
|
||||
func New(host, apiKey string) *MeiliClient {
|
||||
cli := meilisearch.New(host, meilisearch.WithAPIKey(apiKey))
|
||||
return &MeiliClient{idx: cli.Index(indexName)}
|
||||
}
|
||||
|
||||
// Configure creates the index if absent and sets searchable/filterable
|
||||
// attributes. It is idempotent — safe to call on every startup.
|
||||
func Configure(host, apiKey string) error {
|
||||
cli := meilisearch.New(host, meilisearch.WithAPIKey(apiKey))
|
||||
|
||||
// Create index with primary key. Returns 202 if exists — ignore.
|
||||
task, err := cli.CreateIndex(&meilisearch.IndexConfig{
|
||||
Uid: indexName,
|
||||
PrimaryKey: "slug",
|
||||
})
|
||||
if err != nil {
|
||||
// 400 "index_already_exists" is not an error here; the SDK returns
|
||||
// an error with Code "index_already_exists" which we can ignore.
|
||||
// Any other error is fatal.
|
||||
if apiErr, ok := err.(*meilisearch.Error); ok && apiErr.MeilisearchApiError.Code == "index_already_exists" {
|
||||
// already exists — continue
|
||||
} else {
|
||||
return fmt.Errorf("meili: create index: %w", err)
|
||||
}
|
||||
} else {
|
||||
_ = task // task is async; we don't wait for it
|
||||
}
|
||||
|
||||
idx := cli.Index(indexName)
|
||||
|
||||
searchable := []string{"title", "author", "genres", "summary"}
|
||||
if _, err := idx.UpdateSearchableAttributes(&searchable); err != nil {
|
||||
return fmt.Errorf("meili: update searchable attributes: %w", err)
|
||||
}
|
||||
|
||||
filterable := []interface{}{"status", "genres"}
|
||||
if _, err := idx.UpdateFilterableAttributes(&filterable); err != nil {
|
||||
return fmt.Errorf("meili: update filterable attributes: %w", err)
|
||||
}
|
||||
|
||||
sortable := []string{"rank", "rating", "total_chapters", "meta_updated"}
|
||||
if _, err := idx.UpdateSortableAttributes(&sortable); err != nil {
|
||||
return fmt.Errorf("meili: update sortable attributes: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// bookDoc is the Meilisearch document shape for a book.
|
||||
type bookDoc struct {
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author"`
|
||||
Cover string `json:"cover"`
|
||||
Status string `json:"status"`
|
||||
Genres []string `json:"genres"`
|
||||
Summary string `json:"summary"`
|
||||
TotalChapters int `json:"total_chapters"`
|
||||
SourceURL string `json:"source_url"`
|
||||
Rank int `json:"rank"`
|
||||
Rating float64 `json:"rating"`
|
||||
// MetaUpdated is the Unix timestamp (seconds) of the last PocketBase update.
|
||||
// Used for sort=update ("recently updated" ordering).
|
||||
MetaUpdated int64 `json:"meta_updated"`
|
||||
}
|
||||
|
||||
func toDoc(b domain.BookMeta) bookDoc {
|
||||
return bookDoc{
|
||||
Slug: b.Slug,
|
||||
Title: b.Title,
|
||||
Author: b.Author,
|
||||
Cover: b.Cover,
|
||||
Status: b.Status,
|
||||
Genres: b.Genres,
|
||||
Summary: b.Summary,
|
||||
TotalChapters: b.TotalChapters,
|
||||
SourceURL: b.SourceURL,
|
||||
Rank: b.Ranking,
|
||||
Rating: b.Rating,
|
||||
MetaUpdated: b.MetaUpdated,
|
||||
}
|
||||
}
|
||||
|
||||
func fromDoc(d bookDoc) domain.BookMeta {
|
||||
return domain.BookMeta{
|
||||
Slug: d.Slug,
|
||||
Title: d.Title,
|
||||
Author: d.Author,
|
||||
Cover: d.Cover,
|
||||
Status: d.Status,
|
||||
Genres: d.Genres,
|
||||
Summary: d.Summary,
|
||||
TotalChapters: d.TotalChapters,
|
||||
SourceURL: d.SourceURL,
|
||||
Ranking: d.Rank,
|
||||
Rating: d.Rating,
|
||||
MetaUpdated: d.MetaUpdated,
|
||||
}
|
||||
}
|
||||
|
||||
// UpsertBook adds or replaces the book document in Meilisearch. The operation
|
||||
// is fire-and-forget (Meilisearch processes tasks asynchronously).
|
||||
func (c *MeiliClient) UpsertBook(_ context.Context, book domain.BookMeta) error {
|
||||
docs := []bookDoc{toDoc(book)}
|
||||
pk := "slug"
|
||||
if _, err := c.idx.AddDocuments(docs, &meilisearch.DocumentOptions{PrimaryKey: &pk}); err != nil {
|
||||
return fmt.Errorf("meili: upsert book %q: %w", book.Slug, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// BookExists reports whether the slug is already present in the index.
|
||||
// It fetches the document by primary key; a 404 or any error is treated as
|
||||
// "not present" (safe default: re-index rather than silently skip).
|
||||
func (c *MeiliClient) BookExists(_ context.Context, slug string) bool {
|
||||
var doc bookDoc
|
||||
err := c.idx.GetDocument(slug, nil, &doc)
|
||||
return err == nil && doc.Slug != ""
|
||||
}
|
||||
|
||||
// Search returns books matching query, up to limit results.
|
||||
func (c *MeiliClient) Search(_ context.Context, query string, limit int) ([]domain.BookMeta, error) {
|
||||
if limit <= 0 {
|
||||
limit = 20
|
||||
}
|
||||
res, err := c.idx.Search(query, &meilisearch.SearchRequest{
|
||||
Limit: int64(limit),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("meili: search %q: %w", query, err)
|
||||
}
|
||||
|
||||
books := make([]domain.BookMeta, 0, len(res.Hits))
|
||||
for _, hit := range res.Hits {
|
||||
// Hit is map[string]json.RawMessage — unmarshal directly into bookDoc.
|
||||
var doc bookDoc
|
||||
raw, err := json.Marshal(hit)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if err := json.Unmarshal(raw, &doc); err != nil {
|
||||
continue
|
||||
}
|
||||
books = append(books, fromDoc(doc))
|
||||
}
|
||||
return books, nil
|
||||
}
|
||||
|
||||
// Catalogue queries books with optional full-text search, genre/status filters,
|
||||
// sort order, and pagination. Returns matching books, the total estimate, and
|
||||
// a FacetResult containing available genre and status values from the index.
|
||||
func (c *MeiliClient) Catalogue(_ context.Context, q CatalogueQuery) ([]domain.BookMeta, int64, FacetResult, error) {
|
||||
if q.Limit <= 0 {
|
||||
q.Limit = 20
|
||||
}
|
||||
if q.Page <= 0 {
|
||||
q.Page = 1
|
||||
}
|
||||
|
||||
req := &meilisearch.SearchRequest{
|
||||
Limit: int64(q.Limit),
|
||||
Offset: int64((q.Page - 1) * q.Limit),
|
||||
// Request facet distribution so the UI can build filter options
|
||||
// dynamically without hardcoding genre/status lists.
|
||||
Facets: []string{"genres", "status"},
|
||||
}
|
||||
|
||||
// Build filter
|
||||
var filters []string
|
||||
if q.Genre != "" && q.Genre != "all" {
|
||||
filters = append(filters, fmt.Sprintf("genres = %q", q.Genre))
|
||||
}
|
||||
if q.Status != "" && q.Status != "all" {
|
||||
filters = append(filters, fmt.Sprintf("status = %q", q.Status))
|
||||
}
|
||||
if len(filters) > 0 {
|
||||
req.Filter = strings.Join(filters, " AND ")
|
||||
}
|
||||
|
||||
// Map UI sort tokens to Meilisearch sort expressions.
|
||||
switch q.Sort {
|
||||
case "rank":
|
||||
req.Sort = []string{"rank:asc"}
|
||||
case "top-rated":
|
||||
req.Sort = []string{"rating:desc"}
|
||||
case "new":
|
||||
req.Sort = []string{"total_chapters:desc"}
|
||||
case "update":
|
||||
req.Sort = []string{"meta_updated:desc"}
|
||||
// "popular" and "" → relevance (no explicit sort)
|
||||
}
|
||||
|
||||
res, err := c.idx.Search(q.Q, req)
|
||||
if err != nil {
|
||||
return nil, 0, FacetResult{}, fmt.Errorf("meili: catalogue query: %w", err)
|
||||
}
|
||||
|
||||
books := make([]domain.BookMeta, 0, len(res.Hits))
|
||||
for _, hit := range res.Hits {
|
||||
var doc bookDoc
|
||||
raw, err := json.Marshal(hit)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if err := json.Unmarshal(raw, &doc); err != nil {
|
||||
continue
|
||||
}
|
||||
books = append(books, fromDoc(doc))
|
||||
}
|
||||
|
||||
facets := parseFacets(res.FacetDistribution)
|
||||
return books, res.EstimatedTotalHits, facets, nil
|
||||
}
|
||||
|
||||
// parseFacets extracts sorted genre and status slices from a Meilisearch
|
||||
// facetDistribution raw JSON value.
|
||||
// The JSON shape is: {"genres":{"fantasy":12,"action":5},"status":{"ongoing":7}}
|
||||
func parseFacets(raw json.RawMessage) FacetResult {
|
||||
var result FacetResult
|
||||
if len(raw) == 0 {
|
||||
return result
|
||||
}
|
||||
var dist map[string]map[string]int64
|
||||
if err := json.Unmarshal(raw, &dist); err != nil {
|
||||
return result
|
||||
}
|
||||
if m, ok := dist["genres"]; ok {
|
||||
for k := range m {
|
||||
result.Genres = append(result.Genres, k)
|
||||
}
|
||||
sortStrings(result.Genres)
|
||||
}
|
||||
if m, ok := dist["status"]; ok {
|
||||
for k := range m {
|
||||
result.Statuses = append(result.Statuses, k)
|
||||
}
|
||||
sortStrings(result.Statuses)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// sortStrings sorts a slice of strings in place.
|
||||
func sortStrings(s []string) {
|
||||
for i := 1; i < len(s); i++ {
|
||||
for j := i; j > 0 && s[j] < s[j-1]; j-- {
|
||||
s[j], s[j-1] = s[j-1], s[j]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NoopClient is a no-op Client used when Meilisearch is not configured.
|
||||
type NoopClient struct{}
|
||||
|
||||
func (NoopClient) UpsertBook(_ context.Context, _ domain.BookMeta) error { return nil }
|
||||
func (NoopClient) BookExists(_ context.Context, _ string) bool { return false }
|
||||
func (NoopClient) Search(_ context.Context, _ string, _ int) ([]domain.BookMeta, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (NoopClient) Catalogue(_ context.Context, _ CatalogueQuery) ([]domain.BookMeta, int64, FacetResult, error) {
|
||||
return nil, 0, FacetResult{}, nil
|
||||
}
|
||||
@@ -7,13 +7,12 @@ import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/libnovel/scraper/internal/scraper"
|
||||
"github.com/libnovel/backend/internal/scraper"
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// ResolveURL returns an absolute URL. If href is already absolute it is
|
||||
// returned unchanged. Otherwise it is resolved against base using standard
|
||||
// URL resolution (handles relative paths, absolute paths, etc.).
|
||||
// returned unchanged. Otherwise it is resolved against base.
|
||||
func ResolveURL(base, href string) string {
|
||||
if strings.HasPrefix(href, "http://") || strings.HasPrefix(href, "https://") {
|
||||
return href
|
||||
@@ -77,9 +76,6 @@ func AttrVal(n *html.Node, key string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
// attrVal is an unexported alias kept for internal use within this package.
|
||||
func attrVal(n *html.Node, key string) string { return AttrVal(n, key) }
|
||||
|
||||
// TextContent returns the concatenated text content of all descendant text nodes.
|
||||
func TextContent(n *html.Node) string {
|
||||
var sb strings.Builder
|
||||
@@ -96,9 +92,6 @@ func TextContent(n *html.Node) string {
|
||||
return strings.TrimSpace(sb.String())
|
||||
}
|
||||
|
||||
// textContent is an unexported alias kept for internal use within this package.
|
||||
func textContent(n *html.Node) string { return TextContent(n) }
|
||||
|
||||
// FindFirst returns the first node matching sel within root.
|
||||
func FindFirst(root *html.Node, sel scraper.Selector) *html.Node {
|
||||
var found *html.Node
|
||||
@@ -139,9 +132,9 @@ func FindAll(root *html.Node, sel scraper.Selector) []*html.Node {
|
||||
// If sel.Attr is set the attribute value is returned; otherwise the inner text.
|
||||
func ExtractText(n *html.Node, sel scraper.Selector) string {
|
||||
if sel.Attr != "" {
|
||||
return attrVal(n, sel.Attr)
|
||||
return AttrVal(n, sel.Attr)
|
||||
}
|
||||
return textContent(n)
|
||||
return TextContent(n)
|
||||
}
|
||||
|
||||
// ExtractFirst locates the first match in root and returns its text/attr value.
|
||||
@@ -165,29 +158,15 @@ func ExtractAll(root *html.Node, sel scraper.Selector) []string {
|
||||
return out
|
||||
}
|
||||
|
||||
// InnerHTML returns the serialized inner HTML of node n.
|
||||
func InnerHTML(n *html.Node) string {
|
||||
var sb strings.Builder
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
_ = html.Render(&sb, c)
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// NodeToMarkdown converts the children of an HTML node to a plain-text/Markdown
|
||||
// representation suitable for chapter storage. Block elements become newlines;
|
||||
// inline elements are inlined. Runs of more than one blank line are collapsed
|
||||
// to a single blank line.
|
||||
// representation suitable for chapter storage.
|
||||
func NodeToMarkdown(n *html.Node) string {
|
||||
var sb strings.Builder
|
||||
nodeToMD(n, &sb)
|
||||
// Collapse 3+ consecutive newlines (i.e. more than one blank line) to 2.
|
||||
out := multiBlankLine.ReplaceAllString(sb.String(), "\n\n")
|
||||
return strings.TrimSpace(out)
|
||||
}
|
||||
|
||||
// multiBlankLine matches three or more consecutive newline characters
|
||||
// (any mix of \n and surrounding whitespace-only lines).
|
||||
var multiBlankLine = regexp.MustCompile(`\n(\s*\n){2,}`)
|
||||
|
||||
var blockElements = map[string]bool{
|
||||
509
backend/internal/novelfire/scraper.go
Normal file
509
backend/internal/novelfire/scraper.go
Normal file
@@ -0,0 +1,509 @@
|
||||
// Package novelfire provides a NovelScraper implementation for novelfire.net.
|
||||
//
|
||||
// Site structure (as of 2025):
|
||||
//
|
||||
// Catalogue : https://novelfire.net/genre-all/sort-new/status-all/all-novel?page=N
|
||||
// Book page : https://novelfire.net/book/{slug}
|
||||
// Chapters : https://novelfire.net/book/{slug}/chapters?page=N
|
||||
// Chapter : https://novelfire.net/book/{slug}/{chapter-slug}
|
||||
package novelfire
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/browser"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/novelfire/htmlutil"
|
||||
"github.com/libnovel/backend/internal/scraper"
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
const (
|
||||
baseURL = "https://novelfire.net"
|
||||
cataloguePath = "/genre-all/sort-new/status-all/all-novel"
|
||||
rankingPath = "/genre-all/sort-popular/status-all/all-novel"
|
||||
)
|
||||
|
||||
// Scraper is the novelfire.net implementation of scraper.NovelScraper.
|
||||
type Scraper struct {
|
||||
client browser.Client
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
// Compile-time interface check.
|
||||
var _ scraper.NovelScraper = (*Scraper)(nil)
|
||||
|
||||
// New returns a new novelfire Scraper backed by client.
|
||||
func New(client browser.Client, log *slog.Logger) *Scraper {
|
||||
if log == nil {
|
||||
log = slog.Default()
|
||||
}
|
||||
return &Scraper{client: client, log: log}
|
||||
}
|
||||
|
||||
// SourceName implements NovelScraper.
|
||||
func (s *Scraper) SourceName() string { return "novelfire.net" }
|
||||
|
||||
// ── CatalogueProvider ─────────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeCatalogue streams all CatalogueEntry values across all catalogue pages.
|
||||
func (s *Scraper) ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueEntry, <-chan error) {
|
||||
entries := make(chan domain.CatalogueEntry, 64)
|
||||
errs := make(chan error, 16)
|
||||
|
||||
go func() {
|
||||
defer close(entries)
|
||||
defer close(errs)
|
||||
|
||||
pageURL := baseURL + cataloguePath
|
||||
page := 1
|
||||
|
||||
for pageURL != "" {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
default:
|
||||
}
|
||||
|
||||
s.log.Info("scraping catalogue page", "page", page, "url", pageURL)
|
||||
raw, err := s.client.GetContent(ctx, pageURL)
|
||||
if err != nil {
|
||||
errs <- fmt.Errorf("catalogue page %d: %w", page, err)
|
||||
return
|
||||
}
|
||||
|
||||
root, err := htmlutil.ParseHTML(raw)
|
||||
if err != nil {
|
||||
errs <- fmt.Errorf("catalogue page %d parse: %w", page, err)
|
||||
return
|
||||
}
|
||||
|
||||
cards := htmlutil.FindAll(root, scraper.Selector{Tag: "li", Class: "novel-item", Multiple: true})
|
||||
if len(cards) == 0 {
|
||||
s.log.Warn("no novel cards found, stopping pagination", "page", page)
|
||||
return
|
||||
}
|
||||
|
||||
for _, card := range cards {
|
||||
linkNode := htmlutil.FindFirst(card, scraper.Selector{Tag: "a", Attr: "href"})
|
||||
titleNode := htmlutil.FindFirst(card, scraper.Selector{Tag: "h4", Class: "novel-title"})
|
||||
|
||||
var title, href string
|
||||
if linkNode != nil {
|
||||
href = htmlutil.ExtractText(linkNode, scraper.Selector{Tag: "a", Attr: "href"})
|
||||
}
|
||||
if titleNode != nil {
|
||||
title = strings.TrimSpace(htmlutil.ExtractText(titleNode, scraper.Selector{}))
|
||||
}
|
||||
if href == "" || title == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
bookURL := resolveURL(baseURL, href)
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case entries <- domain.CatalogueEntry{Slug: slugFromURL(bookURL), Title: title, URL: bookURL}:
|
||||
}
|
||||
}
|
||||
|
||||
if !hasNextPageLink(root) {
|
||||
break
|
||||
}
|
||||
nextHref := ""
|
||||
for _, a := range htmlutil.FindAll(root, scraper.Selector{Tag: "a", Multiple: true}) {
|
||||
if htmlutil.AttrVal(a, "rel") == "next" {
|
||||
nextHref = htmlutil.AttrVal(a, "href")
|
||||
break
|
||||
}
|
||||
}
|
||||
if nextHref == "" {
|
||||
break
|
||||
}
|
||||
pageURL = resolveURL(baseURL, nextHref)
|
||||
page++
|
||||
}
|
||||
}()
|
||||
|
||||
return entries, errs
|
||||
}
|
||||
|
||||
// ── MetadataProvider ──────────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeMetadata fetches and parses book metadata from the book's landing page.
|
||||
func (s *Scraper) ScrapeMetadata(ctx context.Context, bookURL string) (domain.BookMeta, error) {
|
||||
s.log.Debug("metadata fetch starting", "url", bookURL)
|
||||
|
||||
raw, err := s.client.GetContent(ctx, bookURL)
|
||||
if err != nil {
|
||||
return domain.BookMeta{}, fmt.Errorf("metadata fetch %s: %w", bookURL, err)
|
||||
}
|
||||
|
||||
root, err := htmlutil.ParseHTML(raw)
|
||||
if err != nil {
|
||||
return domain.BookMeta{}, fmt.Errorf("metadata parse %s: %w", bookURL, err)
|
||||
}
|
||||
|
||||
title := htmlutil.ExtractFirst(root, scraper.Selector{Tag: "h1", Class: "novel-title"})
|
||||
author := htmlutil.ExtractFirst(root, scraper.Selector{Tag: "span", Class: "author"})
|
||||
|
||||
var cover string
|
||||
if fig := htmlutil.FindFirst(root, scraper.Selector{Tag: "figure", Class: "cover"}); fig != nil {
|
||||
cover = htmlutil.ExtractFirst(fig, scraper.Selector{Tag: "img", Attr: "src"})
|
||||
if cover != "" && !strings.HasPrefix(cover, "http") {
|
||||
cover = baseURL + cover
|
||||
}
|
||||
}
|
||||
|
||||
status := htmlutil.ExtractFirst(root, scraper.Selector{Tag: "span", Class: "status"})
|
||||
|
||||
genresNode := htmlutil.FindFirst(root, scraper.Selector{Tag: "div", Class: "genres"})
|
||||
var genres []string
|
||||
if genresNode != nil {
|
||||
genres = htmlutil.ExtractAll(genresNode, scraper.Selector{Tag: "a", Multiple: true})
|
||||
}
|
||||
|
||||
summary := htmlutil.ExtractFirst(root, scraper.Selector{Tag: "div", Class: "summary"})
|
||||
totalStr := htmlutil.ExtractFirst(root, scraper.Selector{Tag: "span", Class: "chapter-count"})
|
||||
totalChapters := parseChapterCount(totalStr)
|
||||
|
||||
slug := slugFromURL(bookURL)
|
||||
|
||||
meta := domain.BookMeta{
|
||||
Slug: slug,
|
||||
Title: title,
|
||||
Author: author,
|
||||
Cover: cover,
|
||||
Status: status,
|
||||
Genres: genres,
|
||||
Summary: summary,
|
||||
TotalChapters: totalChapters,
|
||||
SourceURL: bookURL,
|
||||
}
|
||||
s.log.Debug("metadata parsed", "slug", meta.Slug, "title", meta.Title)
|
||||
return meta, nil
|
||||
}
|
||||
|
||||
// ── ChapterListProvider ───────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeChapterList returns chapter references for a book, ordered ascending.
|
||||
// upTo > 0 stops pagination as soon as at least upTo chapter numbers have been
|
||||
// collected — use this for range scrapes so we don't paginate 100 pages just
|
||||
// to discover refs we'll never scrape. upTo == 0 fetches all pages.
|
||||
// Each page fetch uses retryGet with 429-aware exponential backoff.
|
||||
func (s *Scraper) ScrapeChapterList(ctx context.Context, bookURL string, upTo int) ([]domain.ChapterRef, error) {
|
||||
var refs []domain.ChapterRef
|
||||
baseChapterURL := strings.TrimRight(bookURL, "/") + "/chapters"
|
||||
page := 1
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return refs, ctx.Err()
|
||||
default:
|
||||
}
|
||||
|
||||
pageURL := fmt.Sprintf("%s?page=%d", baseChapterURL, page)
|
||||
s.log.Info("scraping chapter list", "page", page, "url", pageURL)
|
||||
|
||||
raw, err := retryGet(ctx, s.log, s.client, pageURL, 9, 6*time.Second)
|
||||
if err != nil {
|
||||
return refs, fmt.Errorf("chapter list page %d: %w", page, err)
|
||||
}
|
||||
|
||||
root, err := htmlutil.ParseHTML(raw)
|
||||
if err != nil {
|
||||
return refs, fmt.Errorf("chapter list page %d parse: %w", page, err)
|
||||
}
|
||||
|
||||
chapterList := htmlutil.FindFirst(root, scraper.Selector{Class: "chapter-list"})
|
||||
if chapterList == nil {
|
||||
s.log.Debug("chapter list container not found, stopping pagination", "page", page)
|
||||
break
|
||||
}
|
||||
|
||||
items := htmlutil.FindAll(chapterList, scraper.Selector{Tag: "li"})
|
||||
if len(items) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
for _, item := range items {
|
||||
linkNode := htmlutil.FindFirst(item, scraper.Selector{Tag: "a"})
|
||||
if linkNode == nil {
|
||||
continue
|
||||
}
|
||||
href := htmlutil.ExtractText(linkNode, scraper.Selector{Attr: "href"})
|
||||
chTitle := htmlutil.ExtractText(linkNode, scraper.Selector{})
|
||||
if href == "" {
|
||||
continue
|
||||
}
|
||||
chURL := resolveURL(baseURL, href)
|
||||
num := chapterNumberFromURL(chURL)
|
||||
if num <= 0 {
|
||||
num = len(refs) + 1
|
||||
s.log.Warn("chapter number not parseable from URL, falling back to position",
|
||||
"url", chURL, "position", num)
|
||||
}
|
||||
refs = append(refs, domain.ChapterRef{
|
||||
Number: num,
|
||||
Title: strings.TrimSpace(chTitle),
|
||||
URL: chURL,
|
||||
})
|
||||
}
|
||||
|
||||
// Early-stop: if we have seen at least upTo chapter numbers, we have
|
||||
// enough refs to cover the requested range — no need to paginate further.
|
||||
if upTo > 0 && len(refs) > 0 && refs[len(refs)-1].Number >= upTo {
|
||||
s.log.Debug("chapter list early-stop reached", "upTo", upTo, "collected", len(refs))
|
||||
break
|
||||
}
|
||||
|
||||
page++
|
||||
}
|
||||
|
||||
return refs, nil
|
||||
}
|
||||
|
||||
// ── ChapterTextProvider ───────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeChapterText fetches and parses a single chapter page.
|
||||
func (s *Scraper) ScrapeChapterText(ctx context.Context, ref domain.ChapterRef) (domain.Chapter, error) {
|
||||
s.log.Debug("chapter text fetch starting", "chapter", ref.Number, "url", ref.URL)
|
||||
|
||||
raw, err := retryGet(ctx, s.log, s.client, ref.URL, 9, 6*time.Second)
|
||||
if err != nil {
|
||||
return domain.Chapter{}, fmt.Errorf("chapter %d fetch: %w", ref.Number, err)
|
||||
}
|
||||
|
||||
root, err := htmlutil.ParseHTML(raw)
|
||||
if err != nil {
|
||||
return domain.Chapter{}, fmt.Errorf("chapter %d parse: %w", ref.Number, err)
|
||||
}
|
||||
|
||||
container := htmlutil.FindFirst(root, scraper.Selector{ID: "content"})
|
||||
if container == nil {
|
||||
return domain.Chapter{}, fmt.Errorf("chapter %d: #content container not found in %s", ref.Number, ref.URL)
|
||||
}
|
||||
|
||||
text := htmlutil.NodeToMarkdown(container)
|
||||
|
||||
s.log.Debug("chapter text parsed", "chapter", ref.Number, "text_bytes", len(text))
|
||||
|
||||
return domain.Chapter{Ref: ref, Text: text}, nil
|
||||
}
|
||||
|
||||
// ── RankingProvider ───────────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeRanking pages through up to maxPages pages of the popular-novels listing.
|
||||
// maxPages <= 0 means all pages. The caller decides whether to persist items.
|
||||
func (s *Scraper) ScrapeRanking(ctx context.Context, maxPages int) (<-chan domain.BookMeta, <-chan error) {
|
||||
entries := make(chan domain.BookMeta, 32)
|
||||
errs := make(chan error, 16)
|
||||
|
||||
go func() {
|
||||
defer close(entries)
|
||||
defer close(errs)
|
||||
|
||||
rank := 1
|
||||
|
||||
for page := 1; maxPages <= 0 || page <= maxPages; page++ {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
default:
|
||||
}
|
||||
|
||||
pageURL := fmt.Sprintf("%s%s?page=%d", baseURL, rankingPath, page)
|
||||
s.log.Info("scraping popular ranking page", "page", page, "url", pageURL)
|
||||
|
||||
raw, err := s.client.GetContent(ctx, pageURL)
|
||||
if err != nil {
|
||||
errs <- fmt.Errorf("ranking page %d: %w", page, err)
|
||||
return
|
||||
}
|
||||
|
||||
root, err := htmlutil.ParseHTML(raw)
|
||||
if err != nil {
|
||||
errs <- fmt.Errorf("ranking page %d parse: %w", page, err)
|
||||
return
|
||||
}
|
||||
|
||||
cards := htmlutil.FindAll(root, scraper.Selector{Tag: "li", Class: "novel-item", Multiple: true})
|
||||
if len(cards) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
for _, card := range cards {
|
||||
linkNode := htmlutil.FindFirst(card, scraper.Selector{Tag: "a"})
|
||||
if linkNode == nil {
|
||||
continue
|
||||
}
|
||||
href := htmlutil.ExtractText(linkNode, scraper.Selector{Tag: "a", Attr: "href"})
|
||||
bookURL := resolveURL(baseURL, href)
|
||||
if bookURL == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
title := strings.TrimSpace(htmlutil.ExtractFirst(card, scraper.Selector{Tag: "h4", Class: "novel-title"}))
|
||||
if title == "" {
|
||||
title = strings.TrimSpace(htmlutil.ExtractText(linkNode, scraper.Selector{Tag: "a", Attr: "title"}))
|
||||
}
|
||||
if title == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
var cover string
|
||||
if fig := htmlutil.FindFirst(card, scraper.Selector{Tag: "figure", Class: "novel-cover"}); fig != nil {
|
||||
cover = htmlutil.ExtractFirst(fig, scraper.Selector{Tag: "img", Attr: "data-src"})
|
||||
if cover == "" {
|
||||
cover = htmlutil.ExtractFirst(fig, scraper.Selector{Tag: "img", Attr: "src"})
|
||||
}
|
||||
if strings.HasPrefix(cover, "data:") {
|
||||
cover = ""
|
||||
}
|
||||
if cover != "" && !strings.HasPrefix(cover, "http") {
|
||||
cover = baseURL + cover
|
||||
}
|
||||
}
|
||||
|
||||
meta := domain.BookMeta{
|
||||
Slug: slugFromURL(bookURL),
|
||||
Title: title,
|
||||
Cover: cover,
|
||||
SourceURL: bookURL,
|
||||
Ranking: rank,
|
||||
}
|
||||
rank++
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case entries <- meta:
|
||||
}
|
||||
}
|
||||
|
||||
if !hasNextPageLink(root) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return entries, errs
|
||||
}
|
||||
|
||||
// ── helpers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
func resolveURL(base, href string) string { return htmlutil.ResolveURL(base, href) }
|
||||
|
||||
func hasNextPageLink(root *html.Node) bool {
|
||||
links := htmlutil.FindAll(root, scraper.Selector{Tag: "a", Multiple: true})
|
||||
for _, a := range links {
|
||||
for _, attr := range a.Attr {
|
||||
if attr.Key == "rel" && attr.Val == "next" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func slugFromURL(bookURL string) string {
|
||||
u, err := url.Parse(bookURL)
|
||||
if err != nil {
|
||||
return bookURL
|
||||
}
|
||||
parts := strings.Split(strings.Trim(u.Path, "/"), "/")
|
||||
if len(parts) >= 2 && parts[0] == "book" {
|
||||
return parts[1]
|
||||
}
|
||||
if len(parts) > 0 {
|
||||
return parts[len(parts)-1]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func parseChapterCount(s string) int {
|
||||
s = strings.ReplaceAll(s, ",", "")
|
||||
fields := strings.Fields(s)
|
||||
if len(fields) == 0 {
|
||||
return 0
|
||||
}
|
||||
n, _ := strconv.Atoi(fields[0])
|
||||
return n
|
||||
}
|
||||
|
||||
func chapterNumberFromURL(chapterURL string) int {
|
||||
u, err := url.Parse(chapterURL)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
seg := path.Base(u.Path)
|
||||
seg = strings.TrimPrefix(seg, "chapter-")
|
||||
seg = strings.TrimPrefix(seg, "chap-")
|
||||
seg = strings.TrimPrefix(seg, "ch-")
|
||||
digits := strings.FieldsFunc(seg, func(r rune) bool {
|
||||
return r < '0' || r > '9'
|
||||
})
|
||||
if len(digits) == 0 {
|
||||
return 0
|
||||
}
|
||||
n, _ := strconv.Atoi(digits[0])
|
||||
return n
|
||||
}
|
||||
|
||||
// retryGet calls client.GetContent up to maxAttempts times with exponential backoff.
|
||||
// If the server returns 429 (ErrRateLimit), the suggested Retry-After delay is used
|
||||
// instead of the geometric backoff delay.
|
||||
func retryGet(
|
||||
ctx context.Context,
|
||||
log *slog.Logger,
|
||||
client browser.Client,
|
||||
pageURL string,
|
||||
maxAttempts int,
|
||||
baseDelay time.Duration,
|
||||
) (string, error) {
|
||||
var lastErr error
|
||||
delay := baseDelay
|
||||
for attempt := 1; attempt <= maxAttempts; attempt++ {
|
||||
raw, err := client.GetContent(ctx, pageURL)
|
||||
if err == nil {
|
||||
return raw, nil
|
||||
}
|
||||
lastErr = err
|
||||
if ctx.Err() != nil {
|
||||
return "", err
|
||||
}
|
||||
if attempt < maxAttempts {
|
||||
// If the server is rate-limiting us, honour its Retry-After delay.
|
||||
waitFor := delay
|
||||
var rlErr *browser.RateLimitError
|
||||
if errors.As(err, &rlErr) {
|
||||
waitFor = rlErr.RetryAfter
|
||||
if log != nil {
|
||||
log.Warn("rate limited, backing off",
|
||||
"url", pageURL, "attempt", attempt, "retry_in", waitFor)
|
||||
}
|
||||
} else {
|
||||
if log != nil {
|
||||
log.Warn("fetch failed, retrying",
|
||||
"url", pageURL, "attempt", attempt, "retry_in", delay, "err", err)
|
||||
}
|
||||
delay *= 2
|
||||
}
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return "", ctx.Err()
|
||||
case <-time.After(waitFor):
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", lastErr
|
||||
}
|
||||
129
backend/internal/novelfire/scraper_test.go
Normal file
129
backend/internal/novelfire/scraper_test.go
Normal file
@@ -0,0 +1,129 @@
|
||||
package novelfire
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestSlugFromURL(t *testing.T) {
|
||||
cases := []struct {
|
||||
url string
|
||||
want string
|
||||
}{
|
||||
{"https://novelfire.net/book/shadow-slave", "shadow-slave"},
|
||||
{"https://novelfire.net/book/a-dragon-against-the-whole-world", "a-dragon-against-the-whole-world"},
|
||||
{"https://novelfire.net/book/foo/chapter-1", "foo"},
|
||||
{"https://novelfire.net/", ""},
|
||||
{"not-a-url", "not-a-url"},
|
||||
}
|
||||
for _, c := range cases {
|
||||
got := slugFromURL(c.url)
|
||||
if got != c.want {
|
||||
t.Errorf("slugFromURL(%q) = %q, want %q", c.url, got, c.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestChapterNumberFromURL(t *testing.T) {
|
||||
cases := []struct {
|
||||
url string
|
||||
want int
|
||||
}{
|
||||
{"https://novelfire.net/book/shadow-slave/chapter-42", 42},
|
||||
{"https://novelfire.net/book/shadow-slave/chapter-1000", 1000},
|
||||
{"https://novelfire.net/book/shadow-slave/chap-7", 7},
|
||||
{"https://novelfire.net/book/shadow-slave/ch-3", 3},
|
||||
{"https://novelfire.net/book/shadow-slave/42", 42},
|
||||
{"https://novelfire.net/book/shadow-slave/no-number-here", 0},
|
||||
{"not-a-url", 0},
|
||||
}
|
||||
for _, c := range cases {
|
||||
got := chapterNumberFromURL(c.url)
|
||||
if got != c.want {
|
||||
t.Errorf("chapterNumberFromURL(%q) = %d, want %d", c.url, got, c.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseChapterCount(t *testing.T) {
|
||||
cases := []struct {
|
||||
in string
|
||||
want int
|
||||
}{
|
||||
{"123 Chapters", 123},
|
||||
{"1,234 Chapters", 1234},
|
||||
{"0", 0},
|
||||
{"", 0},
|
||||
{"500", 500},
|
||||
}
|
||||
for _, c := range cases {
|
||||
got := parseChapterCount(c.in)
|
||||
if got != c.want {
|
||||
t.Errorf("parseChapterCount(%q) = %d, want %d", c.in, got, c.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetryGet_ContextCancellation(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel() // cancel immediately
|
||||
|
||||
stub := newStubClient()
|
||||
stub.setError("https://example.com/page", context.Canceled)
|
||||
|
||||
_, err := retryGet(ctx, nil, stub, "https://example.com/page", 3, 0)
|
||||
if err == nil {
|
||||
t.Fatal("expected error on cancelled context")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRetryGet_EventualSuccess(t *testing.T) {
|
||||
stub := newStubClient()
|
||||
calls := 0
|
||||
stub.setFn("https://example.com/page", func() (string, error) {
|
||||
calls++
|
||||
if calls < 3 {
|
||||
return "", context.DeadlineExceeded
|
||||
}
|
||||
return "<html>ok</html>", nil
|
||||
})
|
||||
|
||||
got, err := retryGet(context.Background(), nil, stub, "https://example.com/page", 5, 0)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if got != "<html>ok</html>" {
|
||||
t.Errorf("got %q, want html", got)
|
||||
}
|
||||
if calls != 3 {
|
||||
t.Errorf("expected 3 calls, got %d", calls)
|
||||
}
|
||||
}
|
||||
|
||||
// ── minimal stub client for tests ─────────────────────────────────────────────
|
||||
|
||||
type stubClient struct {
|
||||
errors map[string]error
|
||||
fns map[string]func() (string, error)
|
||||
}
|
||||
|
||||
func newStubClient() *stubClient {
|
||||
return &stubClient{
|
||||
errors: make(map[string]error),
|
||||
fns: make(map[string]func() (string, error)),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *stubClient) setError(u string, err error) { s.errors[u] = err }
|
||||
|
||||
func (s *stubClient) setFn(u string, fn func() (string, error)) { s.fns[u] = fn }
|
||||
|
||||
func (s *stubClient) GetContent(_ context.Context, pageURL string) (string, error) {
|
||||
if fn, ok := s.fns[pageURL]; ok {
|
||||
return fn()
|
||||
}
|
||||
if err, ok := s.errors[pageURL]; ok {
|
||||
return "", err
|
||||
}
|
||||
return "", context.DeadlineExceeded
|
||||
}
|
||||
222
backend/internal/orchestrator/orchestrator.go
Normal file
222
backend/internal/orchestrator/orchestrator.go
Normal file
@@ -0,0 +1,222 @@
|
||||
// Package orchestrator coordinates metadata extraction, chapter-list fetching,
|
||||
// and parallel chapter scraping for a single book.
|
||||
//
|
||||
// Design:
|
||||
// - RunBook scrapes one book (metadata + chapter list + chapter texts) end-to-end.
|
||||
// - N worker goroutines pull chapter refs from a shared queue and call ScrapeChapterText.
|
||||
// - The caller (runner poll loop) owns the outer task-claim / finish cycle.
|
||||
// - An optional PostMetadata hook (set in Config) is called after WriteMetadata
|
||||
// succeeds. The runner uses this to upsert books into Meilisearch.
|
||||
package orchestrator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"runtime"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/scraper"
|
||||
)
|
||||
|
||||
// Config holds tunable parameters for the orchestrator.
|
||||
type Config struct {
|
||||
// Workers is the number of goroutines used to scrape chapters in parallel.
|
||||
// Defaults to runtime.NumCPU() when 0.
|
||||
Workers int
|
||||
// PostMetadata is an optional hook called with the scraped BookMeta after
|
||||
// WriteMetadata succeeds. Errors from the hook are logged but not fatal.
|
||||
// Used by the runner to index books in Meilisearch.
|
||||
PostMetadata func(ctx context.Context, meta domain.BookMeta)
|
||||
}
|
||||
|
||||
// Orchestrator runs a single-book scrape pipeline.
|
||||
type Orchestrator struct {
|
||||
novel scraper.NovelScraper
|
||||
store bookstore.BookWriter
|
||||
log *slog.Logger
|
||||
workers int
|
||||
postMetadata func(ctx context.Context, meta domain.BookMeta)
|
||||
}
|
||||
|
||||
// New returns a new Orchestrator.
|
||||
func New(cfg Config, novel scraper.NovelScraper, store bookstore.BookWriter, log *slog.Logger) *Orchestrator {
|
||||
if log == nil {
|
||||
log = slog.Default()
|
||||
}
|
||||
workers := cfg.Workers
|
||||
if workers <= 0 {
|
||||
workers = runtime.NumCPU()
|
||||
}
|
||||
return &Orchestrator{
|
||||
novel: novel,
|
||||
store: store,
|
||||
log: log,
|
||||
workers: workers,
|
||||
postMetadata: cfg.PostMetadata,
|
||||
}
|
||||
}
|
||||
|
||||
// RunBook scrapes a single book described by task. It handles:
|
||||
// 1. Metadata scrape + write
|
||||
// 2. Chapter list scrape + write
|
||||
// 3. Parallel chapter text scrape + write (worker pool)
|
||||
//
|
||||
// Returns a ScrapeResult with counters. The result's ErrorMessage is non-empty
|
||||
// if the run failed at the metadata or chapter-list level.
|
||||
func (o *Orchestrator) RunBook(ctx context.Context, task domain.ScrapeTask) domain.ScrapeResult {
|
||||
o.log.Info("orchestrator: RunBook starting",
|
||||
"task_id", task.ID,
|
||||
"kind", task.Kind,
|
||||
"url", task.TargetURL,
|
||||
"workers", o.workers,
|
||||
)
|
||||
|
||||
var result domain.ScrapeResult
|
||||
|
||||
if task.TargetURL == "" {
|
||||
result.ErrorMessage = "task has no target URL"
|
||||
return result
|
||||
}
|
||||
|
||||
// ── Step 1: Metadata ──────────────────────────────────────────────────────
|
||||
meta, err := o.novel.ScrapeMetadata(ctx, task.TargetURL)
|
||||
if err != nil {
|
||||
o.log.Error("metadata scrape failed", "url", task.TargetURL, "err", err)
|
||||
result.ErrorMessage = fmt.Sprintf("metadata: %v", err)
|
||||
result.Errors++
|
||||
return result
|
||||
}
|
||||
|
||||
if err := o.store.WriteMetadata(ctx, meta); err != nil {
|
||||
o.log.Error("metadata write failed", "slug", meta.Slug, "err", err)
|
||||
// non-fatal: continue to chapters
|
||||
result.Errors++
|
||||
} else {
|
||||
result.BooksFound = 1
|
||||
// Fire optional post-metadata hook (e.g. Meilisearch indexing).
|
||||
if o.postMetadata != nil {
|
||||
o.postMetadata(ctx, meta)
|
||||
}
|
||||
}
|
||||
|
||||
o.log.Info("metadata saved", "slug", meta.Slug, "title", meta.Title)
|
||||
|
||||
// ── Step 2: Chapter list ──────────────────────────────────────────────────
|
||||
refs, err := o.novel.ScrapeChapterList(ctx, task.TargetURL, task.ToChapter)
|
||||
if err != nil {
|
||||
o.log.Error("chapter list scrape failed", "slug", meta.Slug, "err", err)
|
||||
result.ErrorMessage = fmt.Sprintf("chapter list: %v", err)
|
||||
result.Errors++
|
||||
return result
|
||||
}
|
||||
|
||||
o.log.Info("chapter list fetched", "slug", meta.Slug, "chapters", len(refs))
|
||||
|
||||
// Persist chapter refs (without text) so the index exists early.
|
||||
if wErr := o.store.WriteChapterRefs(ctx, meta.Slug, refs); wErr != nil {
|
||||
o.log.Warn("chapter refs write failed", "slug", meta.Slug, "err", wErr)
|
||||
}
|
||||
|
||||
// ── Step 3: Chapter texts (worker pool) ───────────────────────────────────
|
||||
type chapterJob struct {
|
||||
slug string
|
||||
ref domain.ChapterRef
|
||||
total int // total chapters to scrape (for progress logging)
|
||||
}
|
||||
work := make(chan chapterJob, o.workers*4)
|
||||
|
||||
var scraped, skipped, errors atomic.Int64
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for i := 0; i < o.workers; i++ {
|
||||
wg.Add(1)
|
||||
go func(workerID int) {
|
||||
defer wg.Done()
|
||||
for job := range work {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
default:
|
||||
}
|
||||
|
||||
if o.store.ChapterExists(ctx, job.slug, job.ref) {
|
||||
o.log.Debug("chapter already exists, skipping",
|
||||
"slug", job.slug, "chapter", job.ref.Number)
|
||||
skipped.Add(1)
|
||||
continue
|
||||
}
|
||||
|
||||
ch, err := o.novel.ScrapeChapterText(ctx, job.ref)
|
||||
if err != nil {
|
||||
o.log.Error("chapter scrape failed",
|
||||
"slug", job.slug, "chapter", job.ref.Number, "err", err)
|
||||
errors.Add(1)
|
||||
continue
|
||||
}
|
||||
|
||||
if err := o.store.WriteChapter(ctx, job.slug, ch); err != nil {
|
||||
o.log.Error("chapter write failed",
|
||||
"slug", job.slug, "chapter", job.ref.Number, "err", err)
|
||||
errors.Add(1)
|
||||
continue
|
||||
}
|
||||
|
||||
n := scraped.Add(1)
|
||||
// Log a progress summary every 25 chapters scraped.
|
||||
if n%25 == 0 {
|
||||
o.log.Info("scraping chapters",
|
||||
"slug", job.slug, "scraped", n, "total", job.total)
|
||||
}
|
||||
}
|
||||
}(i)
|
||||
}
|
||||
|
||||
// Count how many chapters will actually be enqueued (for progress logging).
|
||||
toScrape := 0
|
||||
for _, ref := range refs {
|
||||
if task.FromChapter > 0 && ref.Number < task.FromChapter {
|
||||
continue
|
||||
}
|
||||
if task.ToChapter > 0 && ref.Number > task.ToChapter {
|
||||
continue
|
||||
}
|
||||
toScrape++
|
||||
}
|
||||
|
||||
// Enqueue chapter jobs respecting the optional range filter from the task.
|
||||
for _, ref := range refs {
|
||||
if task.FromChapter > 0 && ref.Number < task.FromChapter {
|
||||
skipped.Add(1)
|
||||
continue
|
||||
}
|
||||
if task.ToChapter > 0 && ref.Number > task.ToChapter {
|
||||
skipped.Add(1)
|
||||
continue
|
||||
}
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
goto drain
|
||||
case work <- chapterJob{slug: meta.Slug, ref: ref, total: toScrape}:
|
||||
}
|
||||
}
|
||||
|
||||
drain:
|
||||
close(work)
|
||||
wg.Wait()
|
||||
|
||||
result.ChaptersScraped = int(scraped.Load())
|
||||
result.ChaptersSkipped = int(skipped.Load())
|
||||
result.Errors += int(errors.Load())
|
||||
|
||||
o.log.Info("book scrape finished",
|
||||
"slug", meta.Slug,
|
||||
"scraped", result.ChaptersScraped,
|
||||
"skipped", result.ChaptersSkipped,
|
||||
"errors", result.Errors,
|
||||
)
|
||||
return result
|
||||
}
|
||||
210
backend/internal/orchestrator/orchestrator_test.go
Normal file
210
backend/internal/orchestrator/orchestrator_test.go
Normal file
@@ -0,0 +1,210 @@
|
||||
package orchestrator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// ── stubs ─────────────────────────────────────────────────────────────────────
|
||||
|
||||
type stubScraper struct {
|
||||
meta domain.BookMeta
|
||||
metaErr error
|
||||
refs []domain.ChapterRef
|
||||
refsErr error
|
||||
chapters map[int]domain.Chapter
|
||||
chapErr map[int]error
|
||||
}
|
||||
|
||||
func (s *stubScraper) SourceName() string { return "stub" }
|
||||
|
||||
func (s *stubScraper) ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueEntry, <-chan error) {
|
||||
ch := make(chan domain.CatalogueEntry)
|
||||
errs := make(chan error)
|
||||
close(ch)
|
||||
close(errs)
|
||||
return ch, errs
|
||||
}
|
||||
|
||||
func (s *stubScraper) ScrapeMetadata(_ context.Context, _ string) (domain.BookMeta, error) {
|
||||
return s.meta, s.metaErr
|
||||
}
|
||||
|
||||
func (s *stubScraper) ScrapeChapterList(_ context.Context, _ string, _ int) ([]domain.ChapterRef, error) {
|
||||
return s.refs, s.refsErr
|
||||
}
|
||||
|
||||
func (s *stubScraper) ScrapeChapterText(_ context.Context, ref domain.ChapterRef) (domain.Chapter, error) {
|
||||
if s.chapErr != nil {
|
||||
if err, ok := s.chapErr[ref.Number]; ok {
|
||||
return domain.Chapter{}, err
|
||||
}
|
||||
}
|
||||
if s.chapters != nil {
|
||||
if ch, ok := s.chapters[ref.Number]; ok {
|
||||
return ch, nil
|
||||
}
|
||||
}
|
||||
return domain.Chapter{Ref: ref, Text: "text"}, nil
|
||||
}
|
||||
|
||||
func (s *stubScraper) ScrapeRanking(ctx context.Context, maxPages int) (<-chan domain.BookMeta, <-chan error) {
|
||||
ch := make(chan domain.BookMeta)
|
||||
errs := make(chan error)
|
||||
close(ch)
|
||||
close(errs)
|
||||
return ch, errs
|
||||
}
|
||||
|
||||
type stubStore struct {
|
||||
mu sync.Mutex
|
||||
metaWritten []domain.BookMeta
|
||||
chaptersWritten []domain.Chapter
|
||||
existing map[string]bool // "slug:N" → exists
|
||||
writeMetaErr error
|
||||
}
|
||||
|
||||
func (s *stubStore) WriteMetadata(_ context.Context, meta domain.BookMeta) error {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.writeMetaErr != nil {
|
||||
return s.writeMetaErr
|
||||
}
|
||||
s.metaWritten = append(s.metaWritten, meta)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubStore) WriteChapter(_ context.Context, slug string, ch domain.Chapter) error {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.chaptersWritten = append(s.chaptersWritten, ch)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubStore) WriteChapterRefs(_ context.Context, _ string, _ []domain.ChapterRef) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubStore) ChapterExists(_ context.Context, slug string, ref domain.ChapterRef) bool {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
key := slug + ":" + string(rune('0'+ref.Number))
|
||||
return s.existing[key]
|
||||
}
|
||||
|
||||
// ── tests ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
func TestRunBook_HappyPath(t *testing.T) {
|
||||
sc := &stubScraper{
|
||||
meta: domain.BookMeta{Slug: "test-book", Title: "Test Book", SourceURL: "https://example.com/book/test-book"},
|
||||
refs: []domain.ChapterRef{
|
||||
{Number: 1, Title: "Ch 1", URL: "https://example.com/book/test-book/chapter-1"},
|
||||
{Number: 2, Title: "Ch 2", URL: "https://example.com/book/test-book/chapter-2"},
|
||||
{Number: 3, Title: "Ch 3", URL: "https://example.com/book/test-book/chapter-3"},
|
||||
},
|
||||
}
|
||||
st := &stubStore{}
|
||||
o := New(Config{Workers: 2}, sc, st, nil)
|
||||
|
||||
task := domain.ScrapeTask{
|
||||
ID: "t1",
|
||||
Kind: "book",
|
||||
TargetURL: "https://example.com/book/test-book",
|
||||
}
|
||||
|
||||
result := o.RunBook(context.Background(), task)
|
||||
|
||||
if result.ErrorMessage != "" {
|
||||
t.Fatalf("unexpected error: %s", result.ErrorMessage)
|
||||
}
|
||||
if result.BooksFound != 1 {
|
||||
t.Errorf("BooksFound = %d, want 1", result.BooksFound)
|
||||
}
|
||||
if result.ChaptersScraped != 3 {
|
||||
t.Errorf("ChaptersScraped = %d, want 3", result.ChaptersScraped)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRunBook_MetadataError(t *testing.T) {
|
||||
sc := &stubScraper{metaErr: errors.New("404 not found")}
|
||||
st := &stubStore{}
|
||||
o := New(Config{Workers: 1}, sc, st, nil)
|
||||
|
||||
result := o.RunBook(context.Background(), domain.ScrapeTask{
|
||||
ID: "t2",
|
||||
TargetURL: "https://example.com/book/missing",
|
||||
})
|
||||
|
||||
if result.ErrorMessage == "" {
|
||||
t.Fatal("expected ErrorMessage to be set")
|
||||
}
|
||||
if result.Errors != 1 {
|
||||
t.Errorf("Errors = %d, want 1", result.Errors)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRunBook_ChapterRange(t *testing.T) {
|
||||
sc := &stubScraper{
|
||||
meta: domain.BookMeta{Slug: "range-book", SourceURL: "https://example.com/book/range-book"},
|
||||
refs: func() []domain.ChapterRef {
|
||||
var refs []domain.ChapterRef
|
||||
for i := 1; i <= 10; i++ {
|
||||
refs = append(refs, domain.ChapterRef{Number: i, URL: "https://example.com/book/range-book/chapter-" + string(rune('0'+i))})
|
||||
}
|
||||
return refs
|
||||
}(),
|
||||
}
|
||||
st := &stubStore{}
|
||||
o := New(Config{Workers: 2}, sc, st, nil)
|
||||
|
||||
result := o.RunBook(context.Background(), domain.ScrapeTask{
|
||||
ID: "t3",
|
||||
TargetURL: "https://example.com/book/range-book",
|
||||
FromChapter: 3,
|
||||
ToChapter: 7,
|
||||
})
|
||||
|
||||
if result.ErrorMessage != "" {
|
||||
t.Fatalf("unexpected error: %s", result.ErrorMessage)
|
||||
}
|
||||
// chapters 3–7 = 5 scraped, chapters 1-2 and 8-10 = 5 skipped
|
||||
if result.ChaptersScraped != 5 {
|
||||
t.Errorf("ChaptersScraped = %d, want 5", result.ChaptersScraped)
|
||||
}
|
||||
if result.ChaptersSkipped != 5 {
|
||||
t.Errorf("ChaptersSkipped = %d, want 5", result.ChaptersSkipped)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRunBook_ContextCancellation(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel()
|
||||
|
||||
sc := &stubScraper{
|
||||
meta: domain.BookMeta{Slug: "ctx-book", SourceURL: "https://example.com/book/ctx-book"},
|
||||
refs: []domain.ChapterRef{
|
||||
{Number: 1, URL: "https://example.com/book/ctx-book/chapter-1"},
|
||||
},
|
||||
}
|
||||
st := &stubStore{}
|
||||
o := New(Config{Workers: 1}, sc, st, nil)
|
||||
|
||||
// Should not panic; result may have errors or zero chapters.
|
||||
result := o.RunBook(ctx, domain.ScrapeTask{
|
||||
ID: "t4",
|
||||
TargetURL: "https://example.com/book/ctx-book",
|
||||
})
|
||||
_ = result
|
||||
}
|
||||
|
||||
func TestRunBook_EmptyTargetURL(t *testing.T) {
|
||||
o := New(Config{Workers: 1}, &stubScraper{}, &stubStore{}, nil)
|
||||
result := o.RunBook(context.Background(), domain.ScrapeTask{ID: "t5"})
|
||||
if result.ErrorMessage == "" {
|
||||
t.Fatal("expected ErrorMessage for empty target URL")
|
||||
}
|
||||
}
|
||||
96
backend/internal/presigncache/cache.go
Normal file
96
backend/internal/presigncache/cache.go
Normal file
@@ -0,0 +1,96 @@
|
||||
// Package presigncache provides a Valkey (Redis-compatible) backed cache for
|
||||
// MinIO presigned URLs. The backend generates presigned URLs and stores them
|
||||
// here with a TTL; subsequent requests for the same key return the cached URL
|
||||
// without re-contacting MinIO.
|
||||
//
|
||||
// Design:
|
||||
// - Cache is intentionally best-effort: Get returns ("", false, nil) on any
|
||||
// Valkey error, so callers always have a fallback path to regenerate.
|
||||
// - Set silently drops errors — a miss on the next request is acceptable.
|
||||
// - TTL should be set shorter than the actual presigned URL lifetime so that
|
||||
// cached URLs are always valid when served. Recommended: 55 minutes for a
|
||||
// 1-hour presigned URL.
|
||||
package presigncache
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
|
||||
// Cache is the interface for presign URL caching.
|
||||
// Implementations must be safe for concurrent use.
|
||||
type Cache interface {
|
||||
// Get returns the cached URL for key. ok is false on cache miss or error.
|
||||
Get(ctx context.Context, key string) (url string, ok bool, err error)
|
||||
// Set stores url under key with the given TTL.
|
||||
Set(ctx context.Context, key, url string, ttl time.Duration) error
|
||||
// Delete removes key from the cache.
|
||||
Delete(ctx context.Context, key string) error
|
||||
}
|
||||
|
||||
// ValkeyCache is a Cache backed by Valkey / Redis via go-redis.
|
||||
type ValkeyCache struct {
|
||||
rdb *redis.Client
|
||||
}
|
||||
|
||||
// New creates a ValkeyCache connecting to addr (e.g. "valkey:6379").
|
||||
// The connection is not established until the first command; use Ping to
|
||||
// verify connectivity at startup.
|
||||
func New(addr string) *ValkeyCache {
|
||||
rdb := redis.NewClient(&redis.Options{
|
||||
Addr: addr,
|
||||
DialTimeout: 2 * time.Second,
|
||||
ReadTimeout: 1 * time.Second,
|
||||
WriteTimeout: 1 * time.Second,
|
||||
})
|
||||
return &ValkeyCache{rdb: rdb}
|
||||
}
|
||||
|
||||
// Ping checks connectivity. Call once at startup.
|
||||
func (c *ValkeyCache) Ping(ctx context.Context) error {
|
||||
if err := c.rdb.Ping(ctx).Err(); err != nil {
|
||||
return fmt.Errorf("presigncache: ping valkey: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Get returns (url, true, nil) on hit, ("", false, nil) on miss, and
|
||||
// ("", false, err) only on unexpected errors (not redis.Nil).
|
||||
func (c *ValkeyCache) Get(ctx context.Context, key string) (string, bool, error) {
|
||||
val, err := c.rdb.Get(ctx, key).Result()
|
||||
if err == redis.Nil {
|
||||
return "", false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return "", false, fmt.Errorf("presigncache: get %q: %w", key, err)
|
||||
}
|
||||
return val, true, nil
|
||||
}
|
||||
|
||||
// Set stores url under key with ttl. Errors are returned but are non-fatal
|
||||
// for callers — a Set failure means the next request will miss and regenerate.
|
||||
func (c *ValkeyCache) Set(ctx context.Context, key, url string, ttl time.Duration) error {
|
||||
if err := c.rdb.Set(ctx, key, url, ttl).Err(); err != nil {
|
||||
return fmt.Errorf("presigncache: set %q: %w", key, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete removes key from the cache. It is not an error if the key does not exist.
|
||||
func (c *ValkeyCache) Delete(ctx context.Context, key string) error {
|
||||
if err := c.rdb.Del(ctx, key).Err(); err != nil {
|
||||
return fmt.Errorf("presigncache: delete %q: %w", key, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// NoopCache is a no-op Cache that always returns a miss. Used when Valkey is
|
||||
// not configured (e.g. local development without Docker).
|
||||
type NoopCache struct{}
|
||||
|
||||
func (NoopCache) Get(_ context.Context, _ string) (string, bool, error) { return "", false, nil }
|
||||
func (NoopCache) Set(_ context.Context, _, _ string, _ time.Duration) error { return nil }
|
||||
func (NoopCache) Delete(_ context.Context, _ string) error { return nil }
|
||||
185
backend/internal/runner/catalogue_refresh.go
Normal file
185
backend/internal/runner/catalogue_refresh.go
Normal file
@@ -0,0 +1,185 @@
|
||||
package runner
|
||||
|
||||
// catalogue_refresh.go — independent loop that walks the full novelfire.net
|
||||
// catalogue, scrapes per-book metadata, downloads cover images to MinIO, and
|
||||
// indexes every book in Meilisearch.
|
||||
//
|
||||
// Design:
|
||||
// - Runs on its own ticker (CatalogueRefreshInterval, default 24h) inside Run().
|
||||
// - Also fires once on startup.
|
||||
// - ScrapeCatalogue streams CatalogueEntry values over a channel — we iterate
|
||||
// and call ScrapeMetadata for each entry.
|
||||
// - Per-request random jitter (1–3s) prevents hammering novelfire.net.
|
||||
// - Cover images are fetched from the URL embedded in BookMeta.Cover and
|
||||
// stored in MinIO (browse bucket, key: covers/{slug}.jpg).
|
||||
// - WriteMetadata + UpsertBook are called for every successfully scraped book.
|
||||
// - Errors for individual books are logged and skipped; the loop continues.
|
||||
// - The cover URL stored in BookMeta.Cover is rewritten to the internal proxy
|
||||
// path (/api/cover/novelfire.net/{slug}) so the UI always fetches via the
|
||||
// backend, which will serve from MinIO.
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// runCatalogueRefresh performs one full catalogue walk: scrapes metadata for
|
||||
// every book on novelfire.net, downloads covers to MinIO, and upserts to
|
||||
// Meilisearch. Errors for individual books are logged and skipped.
|
||||
func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
if r.deps.Novel == nil {
|
||||
r.deps.Log.Warn("runner: catalogue refresh skipped — Novel scraper not configured")
|
||||
return
|
||||
}
|
||||
if r.deps.BookWriter == nil {
|
||||
r.deps.Log.Warn("runner: catalogue refresh skipped — BookWriter not configured")
|
||||
return
|
||||
}
|
||||
|
||||
log := r.deps.Log.With("op", "catalogue_refresh")
|
||||
log.Info("runner: catalogue refresh starting")
|
||||
|
||||
entries, errCh := r.deps.Novel.ScrapeCatalogue(ctx)
|
||||
|
||||
ok, skipped, errCount := 0, 0, 0
|
||||
for entry := range entries {
|
||||
if ctx.Err() != nil {
|
||||
break
|
||||
}
|
||||
|
||||
// Skip books already present in Meilisearch — they were indexed on a
|
||||
// previous run. Re-indexing only happens when a scrape task is
|
||||
// explicitly enqueued (e.g. via the admin UI or API).
|
||||
if r.deps.SearchIndex.BookExists(ctx, entry.Slug) {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
// Random jitter between books to avoid rate-limiting.
|
||||
jitter := time.Duration(1000+rand.Intn(2000)) * time.Millisecond
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
break
|
||||
case <-time.After(jitter):
|
||||
}
|
||||
|
||||
meta, err := r.deps.Novel.ScrapeMetadata(ctx, entry.URL)
|
||||
if err != nil {
|
||||
log.Warn("runner: catalogue refresh: metadata scrape failed",
|
||||
"url", entry.URL, "err", err)
|
||||
errCount++
|
||||
continue
|
||||
}
|
||||
|
||||
// Rewrite cover URL to backend proxy path so UI never hits CDN directly.
|
||||
originalCover := meta.Cover
|
||||
meta.Cover = fmt.Sprintf("/api/cover/novelfire.net/%s", meta.Slug)
|
||||
|
||||
// Persist to PocketBase.
|
||||
if err := r.deps.BookWriter.WriteMetadata(ctx, meta); err != nil {
|
||||
log.Warn("runner: catalogue refresh: WriteMetadata failed",
|
||||
"slug", meta.Slug, "err", err)
|
||||
errCount++
|
||||
continue
|
||||
}
|
||||
|
||||
// Index in Meilisearch.
|
||||
if err := r.deps.SearchIndex.UpsertBook(ctx, meta); err != nil {
|
||||
log.Warn("runner: catalogue refresh: UpsertBook failed",
|
||||
"slug", meta.Slug, "err", err)
|
||||
// non-fatal — continue
|
||||
}
|
||||
|
||||
// Download and store cover image in MinIO if we have a cover URL
|
||||
// and a CoverStore is wired in.
|
||||
if r.deps.CoverStore != nil && originalCover != "" {
|
||||
if !r.deps.CoverStore.CoverExists(ctx, meta.Slug) {
|
||||
if err := r.downloadCover(ctx, meta.Slug, originalCover); err != nil {
|
||||
log.Warn("runner: catalogue refresh: cover download failed",
|
||||
"slug", meta.Slug, "url", originalCover, "err", err)
|
||||
// non-fatal
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ok++
|
||||
if ok%100 == 0 {
|
||||
log.Info("runner: catalogue refresh progress",
|
||||
"scraped", ok, "errors", errCount)
|
||||
}
|
||||
}
|
||||
|
||||
if err := <-errCh; err != nil {
|
||||
log.Warn("runner: catalogue refresh: catalogue stream error", "err", err)
|
||||
}
|
||||
|
||||
log.Info("runner: catalogue refresh finished",
|
||||
"ok", ok, "skipped", skipped, "errors", errCount)
|
||||
}
|
||||
|
||||
// downloadCover fetches the cover image from coverURL and stores it in MinIO
|
||||
// under covers/{slug}.jpg. It retries up to 3 times with exponential backoff
|
||||
// on transient errors (5xx, network failures).
|
||||
func (r *Runner) downloadCover(ctx context.Context, slug, coverURL string) error {
|
||||
const maxRetries = 3
|
||||
delay := 2 * time.Second
|
||||
|
||||
var lastErr error
|
||||
for attempt := 0; attempt < maxRetries; attempt++ {
|
||||
if ctx.Err() != nil {
|
||||
return ctx.Err()
|
||||
}
|
||||
if attempt > 0 {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case <-time.After(delay):
|
||||
}
|
||||
delay *= 2
|
||||
}
|
||||
|
||||
data, err := fetchCoverBytes(ctx, coverURL)
|
||||
if err != nil {
|
||||
lastErr = err
|
||||
continue
|
||||
}
|
||||
|
||||
if err := r.deps.CoverStore.PutCover(ctx, slug, data, ""); err != nil {
|
||||
return fmt.Errorf("put cover: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return fmt.Errorf("download cover after %d retries: %w", maxRetries, lastErr)
|
||||
}
|
||||
|
||||
// fetchCoverBytes performs a single HTTP GET for coverURL and returns the body.
|
||||
func fetchCoverBytes(ctx context.Context, coverURL string) ([]byte, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, coverURL, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("build request: %w", err)
|
||||
}
|
||||
req.Header.Set("User-Agent", "Mozilla/5.0 (compatible; libnovel-runner/2)")
|
||||
req.Header.Set("Referer", "https://novelfire.net/")
|
||||
|
||||
client := &http.Client{Timeout: 30 * time.Second}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("http get: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode >= 500 {
|
||||
_, _ = io.Copy(io.Discard, resp.Body)
|
||||
return nil, fmt.Errorf("upstream %d for %s", resp.StatusCode, coverURL)
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
_, _ = io.Copy(io.Discard, resp.Body)
|
||||
return nil, fmt.Errorf("unexpected status %d for %s", resp.StatusCode, coverURL)
|
||||
}
|
||||
|
||||
return io.ReadAll(io.LimitReader(resp.Body, 5<<20)) // 5 MiB cap
|
||||
}
|
||||
21
backend/internal/runner/helpers.go
Normal file
21
backend/internal/runner/helpers.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// stripMarkdown removes common markdown syntax from src, returning plain text
|
||||
// suitable for TTS. Mirrors the helper in the scraper's server package.
|
||||
func stripMarkdown(src string) string {
|
||||
src = regexp.MustCompile(`(?m)^#{1,6}\s+`).ReplaceAllString(src, "")
|
||||
src = regexp.MustCompile(`\*{1,3}|_{1,3}`).ReplaceAllString(src, "")
|
||||
src = regexp.MustCompile("(?s)```.*?```").ReplaceAllString(src, "")
|
||||
src = regexp.MustCompile("`[^`]*`").ReplaceAllString(src, "")
|
||||
src = regexp.MustCompile(`\[([^\]]+)\]\([^)]+\)`).ReplaceAllString(src, "$1")
|
||||
src = regexp.MustCompile(`!\[[^\]]*\]\([^)]+\)`).ReplaceAllString(src, "")
|
||||
src = regexp.MustCompile(`(?m)^>\s?`).ReplaceAllString(src, "")
|
||||
src = regexp.MustCompile(`(?m)^[-*_]{3,}\s*$`).ReplaceAllString(src, "")
|
||||
src = regexp.MustCompile(`\n{3,}`).ReplaceAllString(src, "\n\n")
|
||||
return strings.TrimSpace(src)
|
||||
}
|
||||
92
backend/internal/runner/metrics.go
Normal file
92
backend/internal/runner/metrics.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package runner
|
||||
|
||||
// metrics.go — lightweight HTTP metrics endpoint for the runner.
|
||||
//
|
||||
// GET /metrics returns a JSON document with live task counters and uptime.
|
||||
// No external dependency (no Prometheus); plain net/http only.
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// metricsServer serves GET /metrics for the runner process.
|
||||
type metricsServer struct {
|
||||
addr string
|
||||
r *Runner
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
func newMetricsServer(addr string, r *Runner, log *slog.Logger) *metricsServer {
|
||||
return &metricsServer{addr: addr, r: r, log: log}
|
||||
}
|
||||
|
||||
// ListenAndServe starts the HTTP server and blocks until ctx is cancelled or
|
||||
// a fatal listen error occurs.
|
||||
func (ms *metricsServer) ListenAndServe(ctx context.Context) error {
|
||||
mux := http.NewServeMux()
|
||||
mux.HandleFunc("GET /metrics", ms.handleMetrics)
|
||||
mux.HandleFunc("GET /health", ms.handleHealth)
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: ms.addr,
|
||||
Handler: mux,
|
||||
ReadTimeout: 5 * time.Second,
|
||||
WriteTimeout: 5 * time.Second,
|
||||
BaseContext: func(_ net.Listener) context.Context { return ctx },
|
||||
}
|
||||
|
||||
errCh := make(chan error, 1)
|
||||
go func() {
|
||||
ms.log.Info("runner: metrics server listening", "addr", ms.addr)
|
||||
errCh <- srv.ListenAndServe()
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
shutCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
_ = srv.Shutdown(shutCtx)
|
||||
return nil
|
||||
case err := <-errCh:
|
||||
return fmt.Errorf("runner: metrics server: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// handleMetrics handles GET /metrics.
|
||||
// Response shape (JSON):
|
||||
//
|
||||
// {
|
||||
// "tasks_running": N,
|
||||
// "tasks_completed": N,
|
||||
// "tasks_failed": N,
|
||||
// "uptime_seconds": N
|
||||
// }
|
||||
func (ms *metricsServer) handleMetrics(w http.ResponseWriter, _ *http.Request) {
|
||||
uptimeSec := int64(time.Since(ms.r.startedAt).Seconds())
|
||||
metricsWriteJSON(w, 0, map[string]int64{
|
||||
"tasks_running": ms.r.tasksRunning.Load(),
|
||||
"tasks_completed": ms.r.tasksCompleted.Load(),
|
||||
"tasks_failed": ms.r.tasksFailed.Load(),
|
||||
"uptime_seconds": uptimeSec,
|
||||
})
|
||||
}
|
||||
|
||||
// handleHealth handles GET /health — simple liveness probe for the metrics server.
|
||||
func (ms *metricsServer) handleHealth(w http.ResponseWriter, _ *http.Request) {
|
||||
metricsWriteJSON(w, 0, map[string]string{"status": "ok"})
|
||||
}
|
||||
|
||||
// metricsWriteJSON writes v as a JSON response with the given status code.
|
||||
func metricsWriteJSON(w http.ResponseWriter, status int, v any) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if status != 0 {
|
||||
w.WriteHeader(status)
|
||||
}
|
||||
_ = json.NewEncoder(w).Encode(v)
|
||||
}
|
||||
442
backend/internal/runner/runner.go
Normal file
442
backend/internal/runner/runner.go
Normal file
@@ -0,0 +1,442 @@
|
||||
// Package runner implements the worker loop that polls PocketBase for pending
|
||||
// scrape and audio tasks, executes them, and reports results back.
|
||||
//
|
||||
// Design:
|
||||
// - Run(ctx) loops on a ticker; each tick claims and dispatches pending tasks.
|
||||
// - Scrape tasks are dispatched to the Orchestrator (one goroutine per task,
|
||||
// up to MaxConcurrentScrape).
|
||||
// - Audio tasks fetch chapter text, call Kokoro, upload to MinIO, and report
|
||||
// the result back (up to MaxConcurrentAudio goroutines).
|
||||
// - The runner is stateless between ticks; all state lives in PocketBase.
|
||||
// - Atomic task counters are exposed via /metrics (see metrics.go).
|
||||
// - Books are indexed in Meilisearch via an orchestrator.Config.PostMetadata
|
||||
// hook injected at construction time.
|
||||
package runner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
"github.com/libnovel/backend/internal/meili"
|
||||
"github.com/libnovel/backend/internal/orchestrator"
|
||||
"github.com/libnovel/backend/internal/scraper"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
)
|
||||
|
||||
// Config tunes the runner behaviour.
|
||||
type Config struct {
|
||||
// WorkerID uniquely identifies this runner instance in PocketBase records.
|
||||
WorkerID string
|
||||
// PollInterval is how often the runner checks for new tasks.
|
||||
PollInterval time.Duration
|
||||
// MaxConcurrentScrape limits simultaneous book-scrape goroutines.
|
||||
MaxConcurrentScrape int
|
||||
// MaxConcurrentAudio limits simultaneous audio-generation goroutines.
|
||||
MaxConcurrentAudio int
|
||||
// OrchestratorWorkers is the chapter-scraping parallelism inside each book run.
|
||||
OrchestratorWorkers int
|
||||
// HeartbeatInterval is how often active tasks PATCH their heartbeat_at
|
||||
// timestamp to signal they are still alive. Defaults to 30s when 0.
|
||||
HeartbeatInterval time.Duration
|
||||
// StaleTaskThreshold is how old a heartbeat must be (or absent) before the
|
||||
// task is considered orphaned and reset to pending. Defaults to 2m when 0.
|
||||
StaleTaskThreshold time.Duration
|
||||
// CatalogueRefreshInterval is how often the runner walks the full catalogue,
|
||||
// scrapes per-book metadata, downloads covers, and re-indexes everything in
|
||||
// Meilisearch. Defaults to 24h (expensive — full catalogue walk).
|
||||
CatalogueRefreshInterval time.Duration
|
||||
// SkipInitialCatalogueRefresh suppresses the immediate catalogue walk that
|
||||
// otherwise fires at startup. The periodic ticker (CatalogueRefreshInterval)
|
||||
// still fires normally. Set RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true for
|
||||
// quick restarts where the catalogue is already up to date.
|
||||
SkipInitialCatalogueRefresh bool
|
||||
// MetricsAddr is the HTTP listen address for the /metrics endpoint.
|
||||
// Defaults to ":9091". Set to "" to disable.
|
||||
MetricsAddr string
|
||||
}
|
||||
|
||||
// Dependencies are the external services the runner depends on.
|
||||
type Dependencies struct {
|
||||
// Consumer claims tasks from PocketBase.
|
||||
Consumer taskqueue.Consumer
|
||||
// BookWriter persists scraped data (used by orchestrator).
|
||||
BookWriter bookstore.BookWriter
|
||||
// BookReader reads chapter text for audio generation.
|
||||
BookReader bookstore.BookReader
|
||||
// AudioStore persists generated audio and checks key existence.
|
||||
AudioStore bookstore.AudioStore
|
||||
// CoverStore stores book cover images in MinIO.
|
||||
CoverStore bookstore.CoverStore
|
||||
// SearchIndex indexes books in Meilisearch after scraping.
|
||||
// If nil a no-op is used.
|
||||
SearchIndex meili.Client
|
||||
// Novel is the scraper implementation.
|
||||
Novel scraper.NovelScraper
|
||||
// Kokoro is the TTS client.
|
||||
Kokoro kokoro.Client
|
||||
// Log is the structured logger.
|
||||
Log *slog.Logger
|
||||
}
|
||||
|
||||
// Runner is the main worker process.
|
||||
type Runner struct {
|
||||
cfg Config
|
||||
deps Dependencies
|
||||
|
||||
// Atomic task counters — read by /metrics without locking.
|
||||
tasksRunning atomic.Int64
|
||||
tasksCompleted atomic.Int64
|
||||
tasksFailed atomic.Int64
|
||||
|
||||
startedAt time.Time
|
||||
}
|
||||
|
||||
// New creates a Runner from cfg and deps.
|
||||
func New(cfg Config, deps Dependencies) *Runner {
|
||||
if cfg.PollInterval <= 0 {
|
||||
cfg.PollInterval = 30 * time.Second
|
||||
}
|
||||
if cfg.MaxConcurrentScrape <= 0 {
|
||||
cfg.MaxConcurrentScrape = 2
|
||||
}
|
||||
if cfg.MaxConcurrentAudio <= 0 {
|
||||
cfg.MaxConcurrentAudio = 1
|
||||
}
|
||||
if cfg.WorkerID == "" {
|
||||
cfg.WorkerID = "runner"
|
||||
}
|
||||
if cfg.HeartbeatInterval <= 0 {
|
||||
cfg.HeartbeatInterval = 30 * time.Second
|
||||
}
|
||||
if cfg.StaleTaskThreshold <= 0 {
|
||||
cfg.StaleTaskThreshold = 2 * time.Minute
|
||||
}
|
||||
if cfg.CatalogueRefreshInterval <= 0 {
|
||||
cfg.CatalogueRefreshInterval = 24 * time.Hour
|
||||
}
|
||||
if cfg.MetricsAddr == "" {
|
||||
cfg.MetricsAddr = ":9091"
|
||||
}
|
||||
if deps.Log == nil {
|
||||
deps.Log = slog.Default()
|
||||
}
|
||||
if deps.SearchIndex == nil {
|
||||
deps.SearchIndex = meili.NoopClient{}
|
||||
}
|
||||
return &Runner{cfg: cfg, deps: deps, startedAt: time.Now()}
|
||||
}
|
||||
|
||||
// Run starts the poll loop and the metrics HTTP server, blocking until ctx is
|
||||
// cancelled.
|
||||
func (r *Runner) Run(ctx context.Context) error {
|
||||
r.deps.Log.Info("runner: starting",
|
||||
"worker_id", r.cfg.WorkerID,
|
||||
"poll_interval", r.cfg.PollInterval,
|
||||
"max_scrape", r.cfg.MaxConcurrentScrape,
|
||||
"max_audio", r.cfg.MaxConcurrentAudio,
|
||||
"catalogue_refresh_interval", r.cfg.CatalogueRefreshInterval,
|
||||
"metrics_addr", r.cfg.MetricsAddr,
|
||||
)
|
||||
|
||||
// Start metrics HTTP server in background if configured.
|
||||
if r.cfg.MetricsAddr != "" {
|
||||
ms := newMetricsServer(r.cfg.MetricsAddr, r, r.deps.Log)
|
||||
go func() {
|
||||
if err := ms.ListenAndServe(ctx); err != nil {
|
||||
r.deps.Log.Error("runner: metrics server error", "err", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
scrapeSem := make(chan struct{}, r.cfg.MaxConcurrentScrape)
|
||||
audioSem := make(chan struct{}, r.cfg.MaxConcurrentAudio)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
tick := time.NewTicker(r.cfg.PollInterval)
|
||||
defer tick.Stop()
|
||||
|
||||
catalogueTick := time.NewTicker(r.cfg.CatalogueRefreshInterval)
|
||||
defer catalogueTick.Stop()
|
||||
|
||||
// Run one catalogue refresh immediately on startup (unless skipped by flag).
|
||||
if !r.cfg.SkipInitialCatalogueRefresh {
|
||||
go r.runCatalogueRefresh(ctx)
|
||||
} else {
|
||||
r.deps.Log.Info("runner: skipping initial catalogue refresh (RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true)")
|
||||
}
|
||||
|
||||
// Run one poll immediately on startup, then on each tick.
|
||||
for {
|
||||
r.poll(ctx, scrapeSem, audioSem, &wg)
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
r.deps.Log.Info("runner: context cancelled, draining active tasks")
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(done)
|
||||
}()
|
||||
select {
|
||||
case <-done:
|
||||
r.deps.Log.Info("runner: all tasks drained, exiting")
|
||||
case <-time.After(2 * time.Minute):
|
||||
r.deps.Log.Warn("runner: drain timeout exceeded, forcing exit")
|
||||
}
|
||||
return nil
|
||||
case <-catalogueTick.C:
|
||||
go r.runCatalogueRefresh(ctx)
|
||||
case <-tick.C:
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// poll claims all available pending tasks and dispatches them to goroutines.
|
||||
func (r *Runner) poll(ctx context.Context, scrapeSem, audioSem chan struct{}, wg *sync.WaitGroup) {
|
||||
// ── Heartbeat file ────────────────────────────────────────────────────
|
||||
// Touch /tmp/runner.alive so the Docker health check can confirm the
|
||||
// runner is actively polling. Failure is non-fatal — just log it.
|
||||
if f, err := os.Create("/tmp/runner.alive"); err != nil {
|
||||
r.deps.Log.Warn("runner: could not write heartbeat file", "err", err)
|
||||
} else {
|
||||
f.Close()
|
||||
}
|
||||
|
||||
// ── Reap orphaned tasks ───────────────────────────────────────────────
|
||||
if n, err := r.deps.Consumer.ReapStaleTasks(ctx, r.cfg.StaleTaskThreshold); err != nil {
|
||||
r.deps.Log.Warn("runner: reap stale tasks failed", "err", err)
|
||||
} else if n > 0 {
|
||||
r.deps.Log.Info("runner: reaped stale tasks", "count", n)
|
||||
}
|
||||
|
||||
// ── Scrape tasks ──────────────────────────────────────────────────────
|
||||
for {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
task, ok, err := r.deps.Consumer.ClaimNextScrapeTask(ctx, r.cfg.WorkerID)
|
||||
if err != nil {
|
||||
r.deps.Log.Error("runner: ClaimNextScrapeTask failed", "err", err)
|
||||
break
|
||||
}
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
select {
|
||||
case scrapeSem <- struct{}{}:
|
||||
default:
|
||||
r.deps.Log.Warn("runner: scrape semaphore full, will retry next tick",
|
||||
"task_id", task.ID)
|
||||
break
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
wg.Add(1)
|
||||
go func(t domain.ScrapeTask) {
|
||||
defer wg.Done()
|
||||
defer func() { <-scrapeSem }()
|
||||
defer r.tasksRunning.Add(-1)
|
||||
r.runScrapeTask(ctx, t)
|
||||
}(task)
|
||||
}
|
||||
|
||||
// ── Audio tasks ───────────────────────────────────────────────────────
|
||||
for {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
task, ok, err := r.deps.Consumer.ClaimNextAudioTask(ctx, r.cfg.WorkerID)
|
||||
if err != nil {
|
||||
r.deps.Log.Error("runner: ClaimNextAudioTask failed", "err", err)
|
||||
break
|
||||
}
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
select {
|
||||
case audioSem <- struct{}{}:
|
||||
default:
|
||||
r.deps.Log.Warn("runner: audio semaphore full, will retry next tick",
|
||||
"task_id", task.ID)
|
||||
break
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
wg.Add(1)
|
||||
go func(t domain.AudioTask) {
|
||||
defer wg.Done()
|
||||
defer func() { <-audioSem }()
|
||||
defer r.tasksRunning.Add(-1)
|
||||
r.runAudioTask(ctx, t)
|
||||
}(task)
|
||||
}
|
||||
}
|
||||
|
||||
// newOrchestrator builds an orchestrator with the Meilisearch post-hook wired in.
|
||||
func (r *Runner) newOrchestrator() *orchestrator.Orchestrator {
|
||||
oCfg := orchestrator.Config{
|
||||
Workers: r.cfg.OrchestratorWorkers,
|
||||
PostMetadata: func(ctx context.Context, meta domain.BookMeta) {
|
||||
if err := r.deps.SearchIndex.UpsertBook(ctx, meta); err != nil {
|
||||
r.deps.Log.Warn("runner: meilisearch upsert failed",
|
||||
"slug", meta.Slug, "err", err)
|
||||
}
|
||||
},
|
||||
}
|
||||
return orchestrator.New(oCfg, r.deps.Novel, r.deps.BookWriter, r.deps.Log)
|
||||
}
|
||||
|
||||
// runScrapeTask executes one scrape task end-to-end and reports the result.
|
||||
func (r *Runner) runScrapeTask(ctx context.Context, task domain.ScrapeTask) {
|
||||
log := r.deps.Log.With("task_id", task.ID, "kind", task.Kind, "url", task.TargetURL)
|
||||
log.Info("runner: scrape task starting")
|
||||
|
||||
hbCtx, hbCancel := context.WithCancel(ctx)
|
||||
defer hbCancel()
|
||||
go func() {
|
||||
tick := time.NewTicker(r.cfg.HeartbeatInterval)
|
||||
defer tick.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-hbCtx.Done():
|
||||
return
|
||||
case <-tick.C:
|
||||
if err := r.deps.Consumer.HeartbeatTask(ctx, task.ID); err != nil {
|
||||
log.Warn("runner: heartbeat failed", "err", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
o := r.newOrchestrator()
|
||||
var result domain.ScrapeResult
|
||||
|
||||
switch task.Kind {
|
||||
case "catalogue":
|
||||
result = r.runCatalogueTask(ctx, task, o, log)
|
||||
case "book", "book_range":
|
||||
result = o.RunBook(ctx, task)
|
||||
default:
|
||||
result.ErrorMessage = fmt.Sprintf("unknown task kind: %q", task.Kind)
|
||||
log.Warn("runner: unknown task kind")
|
||||
}
|
||||
|
||||
if err := r.deps.Consumer.FinishScrapeTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishScrapeTask failed", "err", err)
|
||||
}
|
||||
|
||||
if result.ErrorMessage != "" {
|
||||
r.tasksFailed.Add(1)
|
||||
} else {
|
||||
r.tasksCompleted.Add(1)
|
||||
}
|
||||
|
||||
log.Info("runner: scrape task finished",
|
||||
"scraped", result.ChaptersScraped,
|
||||
"skipped", result.ChaptersSkipped,
|
||||
"errors", result.Errors,
|
||||
)
|
||||
}
|
||||
|
||||
// runCatalogueTask runs a full catalogue scrape.
|
||||
func (r *Runner) runCatalogueTask(ctx context.Context, task domain.ScrapeTask, o *orchestrator.Orchestrator, log *slog.Logger) domain.ScrapeResult {
|
||||
entries, errCh := r.deps.Novel.ScrapeCatalogue(ctx)
|
||||
var result domain.ScrapeResult
|
||||
|
||||
for entry := range entries {
|
||||
if ctx.Err() != nil {
|
||||
break
|
||||
}
|
||||
bookTask := domain.ScrapeTask{
|
||||
ID: task.ID,
|
||||
Kind: "book",
|
||||
TargetURL: entry.URL,
|
||||
}
|
||||
bookResult := o.RunBook(ctx, bookTask)
|
||||
result.BooksFound += bookResult.BooksFound + 1
|
||||
result.ChaptersScraped += bookResult.ChaptersScraped
|
||||
result.ChaptersSkipped += bookResult.ChaptersSkipped
|
||||
result.Errors += bookResult.Errors
|
||||
}
|
||||
|
||||
if err := <-errCh; err != nil {
|
||||
log.Warn("runner: catalogue scrape finished with error", "err", err)
|
||||
result.Errors++
|
||||
if result.ErrorMessage == "" {
|
||||
result.ErrorMessage = err.Error()
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// runAudioTask executes one audio-generation task.
|
||||
func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
log := r.deps.Log.With("task_id", task.ID, "slug", task.Slug, "chapter", task.Chapter, "voice", task.Voice)
|
||||
log.Info("runner: audio task starting")
|
||||
|
||||
hbCtx, hbCancel := context.WithCancel(ctx)
|
||||
defer hbCancel()
|
||||
go func() {
|
||||
tick := time.NewTicker(r.cfg.HeartbeatInterval)
|
||||
defer tick.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-hbCtx.Done():
|
||||
return
|
||||
case <-tick.C:
|
||||
if err := r.deps.Consumer.HeartbeatTask(ctx, task.ID); err != nil {
|
||||
log.Warn("runner: heartbeat failed", "err", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
fail := func(msg string) {
|
||||
log.Error("runner: audio task failed", "reason", msg)
|
||||
r.tasksFailed.Add(1)
|
||||
result := domain.AudioResult{ErrorMessage: msg}
|
||||
if err := r.deps.Consumer.FinishAudioTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishAudioTask failed", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
raw, err := r.deps.BookReader.ReadChapter(ctx, task.Slug, task.Chapter)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("read chapter: %v", err))
|
||||
return
|
||||
}
|
||||
text := stripMarkdown(raw)
|
||||
if text == "" {
|
||||
fail("chapter text is empty after stripping markdown")
|
||||
return
|
||||
}
|
||||
|
||||
if r.deps.Kokoro == nil {
|
||||
fail("kokoro client not configured")
|
||||
return
|
||||
}
|
||||
audioData, err := r.deps.Kokoro.GenerateAudio(ctx, text, task.Voice)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("kokoro generate: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
key := r.deps.AudioStore.AudioObjectKey(task.Slug, task.Chapter, task.Voice)
|
||||
if err := r.deps.AudioStore.PutAudio(ctx, key, audioData); err != nil {
|
||||
fail(fmt.Sprintf("put audio: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
r.tasksCompleted.Add(1)
|
||||
result := domain.AudioResult{ObjectKey: key}
|
||||
if err := r.deps.Consumer.FinishAudioTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishAudioTask failed", "err", err)
|
||||
}
|
||||
log.Info("runner: audio task finished", "key", key)
|
||||
}
|
||||
365
backend/internal/runner/runner_test.go
Normal file
365
backend/internal/runner/runner_test.go
Normal file
@@ -0,0 +1,365 @@
|
||||
package runner_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/runner"
|
||||
)
|
||||
|
||||
// ── Stub types ────────────────────────────────────────────────────────────────
|
||||
|
||||
// stubConsumer is a test double for taskqueue.Consumer.
|
||||
type stubConsumer struct {
|
||||
scrapeQueue []domain.ScrapeTask
|
||||
audioQueue []domain.AudioTask
|
||||
scrapeIdx int
|
||||
audioIdx int
|
||||
finished []string
|
||||
failCalled []string
|
||||
claimErr error
|
||||
}
|
||||
|
||||
func (s *stubConsumer) ClaimNextScrapeTask(_ context.Context, _ string) (domain.ScrapeTask, bool, error) {
|
||||
if s.claimErr != nil {
|
||||
return domain.ScrapeTask{}, false, s.claimErr
|
||||
}
|
||||
if s.scrapeIdx >= len(s.scrapeQueue) {
|
||||
return domain.ScrapeTask{}, false, nil
|
||||
}
|
||||
t := s.scrapeQueue[s.scrapeIdx]
|
||||
s.scrapeIdx++
|
||||
return t, true, nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) ClaimNextAudioTask(_ context.Context, _ string) (domain.AudioTask, bool, error) {
|
||||
if s.claimErr != nil {
|
||||
return domain.AudioTask{}, false, s.claimErr
|
||||
}
|
||||
if s.audioIdx >= len(s.audioQueue) {
|
||||
return domain.AudioTask{}, false, nil
|
||||
}
|
||||
t := s.audioQueue[s.audioIdx]
|
||||
s.audioIdx++
|
||||
return t, true, nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) FinishScrapeTask(_ context.Context, id string, _ domain.ScrapeResult) error {
|
||||
s.finished = append(s.finished, id)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) FinishAudioTask(_ context.Context, id string, _ domain.AudioResult) error {
|
||||
s.finished = append(s.finished, id)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) FailTask(_ context.Context, id, _ string) error {
|
||||
s.failCalled = append(s.failCalled, id)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) HeartbeatTask(_ context.Context, _ string) error { return nil }
|
||||
|
||||
func (s *stubConsumer) ReapStaleTasks(_ context.Context, _ time.Duration) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
// stubBookWriter satisfies bookstore.BookWriter (no-op).
|
||||
type stubBookWriter struct{}
|
||||
|
||||
func (s *stubBookWriter) WriteMetadata(_ context.Context, _ domain.BookMeta) error { return nil }
|
||||
func (s *stubBookWriter) WriteChapter(_ context.Context, _ string, _ domain.Chapter) error {
|
||||
return nil
|
||||
}
|
||||
func (s *stubBookWriter) WriteChapterRefs(_ context.Context, _ string, _ []domain.ChapterRef) error {
|
||||
return nil
|
||||
}
|
||||
func (s *stubBookWriter) ChapterExists(_ context.Context, _ string, _ domain.ChapterRef) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// stubBookReader satisfies bookstore.BookReader — returns a single chapter.
|
||||
type stubBookReader struct {
|
||||
text string
|
||||
readErr error
|
||||
}
|
||||
|
||||
func (s *stubBookReader) ReadChapter(_ context.Context, _ string, _ int) (string, error) {
|
||||
return s.text, s.readErr
|
||||
}
|
||||
func (s *stubBookReader) ReadMetadata(_ context.Context, _ string) (domain.BookMeta, bool, error) {
|
||||
return domain.BookMeta{}, false, nil
|
||||
}
|
||||
func (s *stubBookReader) ListBooks(_ context.Context) ([]domain.BookMeta, error) { return nil, nil }
|
||||
func (s *stubBookReader) LocalSlugs(_ context.Context) (map[string]bool, error) { return nil, nil }
|
||||
func (s *stubBookReader) MetadataMtime(_ context.Context, _ string) int64 { return 0 }
|
||||
func (s *stubBookReader) ListChapters(_ context.Context, _ string) ([]domain.ChapterInfo, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (s *stubBookReader) CountChapters(_ context.Context, _ string) int { return 0 }
|
||||
func (s *stubBookReader) ReindexChapters(_ context.Context, _ string) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
// stubAudioStore satisfies bookstore.AudioStore.
|
||||
type stubAudioStore struct {
|
||||
putCalled atomic.Int32
|
||||
putErr error
|
||||
}
|
||||
|
||||
func (s *stubAudioStore) AudioObjectKey(slug string, n int, voice string) string {
|
||||
return slug + "/" + string(rune('0'+n)) + "/" + voice + ".mp3"
|
||||
}
|
||||
func (s *stubAudioStore) AudioExists(_ context.Context, _ string) bool { return false }
|
||||
func (s *stubAudioStore) PutAudio(_ context.Context, _ string, _ []byte) error {
|
||||
s.putCalled.Add(1)
|
||||
return s.putErr
|
||||
}
|
||||
|
||||
// stubNovelScraper satisfies scraper.NovelScraper minimally.
|
||||
type stubNovelScraper struct {
|
||||
entries []domain.CatalogueEntry
|
||||
metaErr error
|
||||
chapters []domain.ChapterRef
|
||||
}
|
||||
|
||||
func (s *stubNovelScraper) ScrapeCatalogue(_ context.Context) (<-chan domain.CatalogueEntry, <-chan error) {
|
||||
ch := make(chan domain.CatalogueEntry, len(s.entries))
|
||||
errCh := make(chan error, 1)
|
||||
for _, e := range s.entries {
|
||||
ch <- e
|
||||
}
|
||||
close(ch)
|
||||
close(errCh)
|
||||
return ch, errCh
|
||||
}
|
||||
|
||||
func (s *stubNovelScraper) ScrapeMetadata(_ context.Context, _ string) (domain.BookMeta, error) {
|
||||
if s.metaErr != nil {
|
||||
return domain.BookMeta{}, s.metaErr
|
||||
}
|
||||
return domain.BookMeta{Slug: "test-book", Title: "Test Book", SourceURL: "https://example.com/book/test-book"}, nil
|
||||
}
|
||||
|
||||
func (s *stubNovelScraper) ScrapeChapterList(_ context.Context, _ string, _ int) ([]domain.ChapterRef, error) {
|
||||
return s.chapters, nil
|
||||
}
|
||||
|
||||
func (s *stubNovelScraper) ScrapeChapterText(_ context.Context, ref domain.ChapterRef) (domain.Chapter, error) {
|
||||
return domain.Chapter{Ref: ref, Text: "# Chapter\n\nSome text."}, nil
|
||||
}
|
||||
|
||||
func (s *stubNovelScraper) ScrapeRanking(_ context.Context, _ int) (<-chan domain.BookMeta, <-chan error) {
|
||||
ch := make(chan domain.BookMeta)
|
||||
errCh := make(chan error, 1)
|
||||
close(ch)
|
||||
close(errCh)
|
||||
return ch, errCh
|
||||
}
|
||||
|
||||
func (s *stubNovelScraper) SourceName() string { return "stub" }
|
||||
|
||||
// stubKokoro satisfies kokoro.Client.
|
||||
type stubKokoro struct {
|
||||
data []byte
|
||||
genErr error
|
||||
called atomic.Int32
|
||||
}
|
||||
|
||||
func (s *stubKokoro) GenerateAudio(_ context.Context, _, _ string) ([]byte, error) {
|
||||
s.called.Add(1)
|
||||
return s.data, s.genErr
|
||||
}
|
||||
|
||||
func (s *stubKokoro) ListVoices(_ context.Context) ([]string, error) {
|
||||
return []string{"af_bella"}, nil
|
||||
}
|
||||
|
||||
// ── stripMarkdown helper ──────────────────────────────────────────────────────
|
||||
|
||||
func TestStripMarkdownViaAudioTask(t *testing.T) {
|
||||
// Verify markdown is stripped before sending to Kokoro.
|
||||
// We inject chapter text with markdown; the kokoro stub verifies data flows.
|
||||
consumer := &stubConsumer{
|
||||
audioQueue: []domain.AudioTask{
|
||||
{ID: "a1", Slug: "book", Chapter: 1, Voice: "af_bella", Status: domain.TaskStatusRunning},
|
||||
},
|
||||
}
|
||||
bookReader := &stubBookReader{text: "## Chapter 1\n\nPlain **text** here."}
|
||||
audioStore := &stubAudioStore{}
|
||||
kokoroStub := &stubKokoro{data: []byte("mp3")}
|
||||
|
||||
cfg := runner.Config{
|
||||
WorkerID: "test",
|
||||
PollInterval: time.Hour, // long poll — we'll cancel manually
|
||||
}
|
||||
deps := runner.Dependencies{
|
||||
Consumer: consumer,
|
||||
BookWriter: &stubBookWriter{},
|
||||
BookReader: bookReader,
|
||||
AudioStore: audioStore,
|
||||
Novel: &stubNovelScraper{},
|
||||
Kokoro: kokoroStub,
|
||||
}
|
||||
|
||||
r := runner.New(cfg, deps)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
_ = r.Run(ctx)
|
||||
|
||||
if kokoroStub.called.Load() != 1 {
|
||||
t.Errorf("expected Kokoro.GenerateAudio called once, got %d", kokoroStub.called.Load())
|
||||
}
|
||||
if audioStore.putCalled.Load() != 1 {
|
||||
t.Errorf("expected PutAudio called once, got %d", audioStore.putCalled.Load())
|
||||
}
|
||||
}
|
||||
|
||||
func TestAudioTask_ReadChapterError(t *testing.T) {
|
||||
consumer := &stubConsumer{
|
||||
audioQueue: []domain.AudioTask{
|
||||
{ID: "a2", Slug: "book", Chapter: 2, Voice: "af_bella", Status: domain.TaskStatusRunning},
|
||||
},
|
||||
}
|
||||
bookReader := &stubBookReader{readErr: errors.New("chapter not found")}
|
||||
audioStore := &stubAudioStore{}
|
||||
kokoroStub := &stubKokoro{data: []byte("mp3")}
|
||||
|
||||
cfg := runner.Config{WorkerID: "test", PollInterval: time.Hour}
|
||||
deps := runner.Dependencies{
|
||||
Consumer: consumer,
|
||||
BookWriter: &stubBookWriter{},
|
||||
BookReader: bookReader,
|
||||
AudioStore: audioStore,
|
||||
Novel: &stubNovelScraper{},
|
||||
Kokoro: kokoroStub,
|
||||
}
|
||||
|
||||
r := runner.New(cfg, deps)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
_ = r.Run(ctx)
|
||||
|
||||
// Kokoro should not be called; FinishAudioTask should be called with error.
|
||||
if kokoroStub.called.Load() != 0 {
|
||||
t.Errorf("expected Kokoro not called, got %d", kokoroStub.called.Load())
|
||||
}
|
||||
if len(consumer.finished) != 1 {
|
||||
t.Errorf("expected FinishAudioTask called once, got %d", len(consumer.finished))
|
||||
}
|
||||
}
|
||||
|
||||
func TestAudioTask_KokoroError(t *testing.T) {
|
||||
consumer := &stubConsumer{
|
||||
audioQueue: []domain.AudioTask{
|
||||
{ID: "a3", Slug: "book", Chapter: 3, Voice: "af_bella", Status: domain.TaskStatusRunning},
|
||||
},
|
||||
}
|
||||
bookReader := &stubBookReader{text: "Chapter text."}
|
||||
audioStore := &stubAudioStore{}
|
||||
kokoroStub := &stubKokoro{genErr: errors.New("tts failed")}
|
||||
|
||||
cfg := runner.Config{WorkerID: "test", PollInterval: time.Hour}
|
||||
deps := runner.Dependencies{
|
||||
Consumer: consumer,
|
||||
BookWriter: &stubBookWriter{},
|
||||
BookReader: bookReader,
|
||||
AudioStore: audioStore,
|
||||
Novel: &stubNovelScraper{},
|
||||
Kokoro: kokoroStub,
|
||||
}
|
||||
|
||||
r := runner.New(cfg, deps)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
_ = r.Run(ctx)
|
||||
|
||||
if audioStore.putCalled.Load() != 0 {
|
||||
t.Errorf("expected PutAudio not called, got %d", audioStore.putCalled.Load())
|
||||
}
|
||||
if len(consumer.finished) != 1 {
|
||||
t.Errorf("expected FinishAudioTask called once, got %d", len(consumer.finished))
|
||||
}
|
||||
}
|
||||
|
||||
func TestScrapeTask_BookKind(t *testing.T) {
|
||||
consumer := &stubConsumer{
|
||||
scrapeQueue: []domain.ScrapeTask{
|
||||
{ID: "s1", Kind: "book", TargetURL: "https://example.com/book/test-book", Status: domain.TaskStatusRunning},
|
||||
},
|
||||
}
|
||||
|
||||
cfg := runner.Config{WorkerID: "test", PollInterval: time.Hour}
|
||||
deps := runner.Dependencies{
|
||||
Consumer: consumer,
|
||||
BookWriter: &stubBookWriter{},
|
||||
BookReader: &stubBookReader{},
|
||||
AudioStore: &stubAudioStore{},
|
||||
Novel: &stubNovelScraper{},
|
||||
Kokoro: &stubKokoro{},
|
||||
}
|
||||
|
||||
r := runner.New(cfg, deps)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
_ = r.Run(ctx)
|
||||
|
||||
if len(consumer.finished) != 1 || consumer.finished[0] != "s1" {
|
||||
t.Errorf("expected task s1 finished, got %v", consumer.finished)
|
||||
}
|
||||
}
|
||||
|
||||
func TestScrapeTask_UnknownKind(t *testing.T) {
|
||||
consumer := &stubConsumer{
|
||||
scrapeQueue: []domain.ScrapeTask{
|
||||
{ID: "s2", Kind: "unknown_kind", Status: domain.TaskStatusRunning},
|
||||
},
|
||||
}
|
||||
|
||||
cfg := runner.Config{WorkerID: "test", PollInterval: time.Hour}
|
||||
deps := runner.Dependencies{
|
||||
Consumer: consumer,
|
||||
BookWriter: &stubBookWriter{},
|
||||
BookReader: &stubBookReader{},
|
||||
AudioStore: &stubAudioStore{},
|
||||
Novel: &stubNovelScraper{},
|
||||
Kokoro: &stubKokoro{},
|
||||
}
|
||||
|
||||
r := runner.New(cfg, deps)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
|
||||
defer cancel()
|
||||
_ = r.Run(ctx)
|
||||
|
||||
// Unknown kind still finishes the task (with error message in result).
|
||||
if len(consumer.finished) != 1 || consumer.finished[0] != "s2" {
|
||||
t.Errorf("expected task s2 finished, got %v", consumer.finished)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRun_CancelImmediately(t *testing.T) {
|
||||
consumer := &stubConsumer{}
|
||||
cfg := runner.Config{WorkerID: "test", PollInterval: 10 * time.Millisecond}
|
||||
deps := runner.Dependencies{
|
||||
Consumer: consumer,
|
||||
BookWriter: &stubBookWriter{},
|
||||
BookReader: &stubBookReader{},
|
||||
AudioStore: &stubAudioStore{},
|
||||
Novel: &stubNovelScraper{},
|
||||
Kokoro: &stubKokoro{},
|
||||
}
|
||||
|
||||
r := runner.New(cfg, deps)
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel() // cancel before Run
|
||||
|
||||
err := r.Run(ctx)
|
||||
if err != nil {
|
||||
t.Errorf("expected nil on graceful shutdown, got %v", err)
|
||||
}
|
||||
}
|
||||
60
backend/internal/scraper/scraper.go
Normal file
60
backend/internal/scraper/scraper.go
Normal file
@@ -0,0 +1,60 @@
|
||||
// Package scraper defines the NovelScraper interface and its sub-interfaces.
|
||||
// Domain types live in internal/domain — this package only defines the scraping
|
||||
// contract so that novelfire and any future scrapers can be swapped freely.
|
||||
package scraper
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// CatalogueProvider can enumerate every novel available on a source site.
|
||||
type CatalogueProvider interface {
|
||||
ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueEntry, <-chan error)
|
||||
}
|
||||
|
||||
// MetadataProvider can extract structured book metadata from a novel's landing page.
|
||||
type MetadataProvider interface {
|
||||
ScrapeMetadata(ctx context.Context, bookURL string) (domain.BookMeta, error)
|
||||
}
|
||||
|
||||
// ChapterListProvider can enumerate all chapters of a book.
|
||||
// upTo > 0 stops pagination once at least upTo chapter numbers have been
|
||||
// collected (early-exit optimisation for range scrapes). upTo == 0 fetches all pages.
|
||||
type ChapterListProvider interface {
|
||||
ScrapeChapterList(ctx context.Context, bookURL string, upTo int) ([]domain.ChapterRef, error)
|
||||
}
|
||||
|
||||
// ChapterTextProvider can extract the readable text from a single chapter page.
|
||||
type ChapterTextProvider interface {
|
||||
ScrapeChapterText(ctx context.Context, ref domain.ChapterRef) (domain.Chapter, error)
|
||||
}
|
||||
|
||||
// RankingProvider can enumerate novels from a ranking page.
|
||||
type RankingProvider interface {
|
||||
// ScrapeRanking pages through up to maxPages ranking pages.
|
||||
// maxPages <= 0 means all pages.
|
||||
ScrapeRanking(ctx context.Context, maxPages int) (<-chan domain.BookMeta, <-chan error)
|
||||
}
|
||||
|
||||
// NovelScraper is the full interface a concrete novel source must implement.
|
||||
type NovelScraper interface {
|
||||
CatalogueProvider
|
||||
MetadataProvider
|
||||
ChapterListProvider
|
||||
ChapterTextProvider
|
||||
RankingProvider
|
||||
|
||||
// SourceName returns the human-readable name of this scraper, e.g. "novelfire.net".
|
||||
SourceName() string
|
||||
}
|
||||
|
||||
// Selector describes how to locate an element in an HTML document.
|
||||
type Selector struct {
|
||||
Tag string
|
||||
Class string
|
||||
ID string
|
||||
Attr string
|
||||
Multiple bool
|
||||
}
|
||||
244
backend/internal/storage/minio.go
Normal file
244
backend/internal/storage/minio.go
Normal file
@@ -0,0 +1,244 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
minio "github.com/minio/minio-go/v7"
|
||||
"github.com/minio/minio-go/v7/pkg/credentials"
|
||||
|
||||
"github.com/libnovel/backend/internal/config"
|
||||
)
|
||||
|
||||
// minioClient wraps the official minio-go client with bucket names.
|
||||
type minioClient struct {
|
||||
client *minio.Client // internal — all read/write operations
|
||||
pubClient *minio.Client // presign-only — initialised against the public endpoint
|
||||
bucketChapters string
|
||||
bucketAudio string
|
||||
bucketAvatars string
|
||||
bucketBrowse string
|
||||
}
|
||||
|
||||
func newMinioClient(cfg config.MinIO) (*minioClient, error) {
|
||||
creds := credentials.NewStaticV4(cfg.AccessKey, cfg.SecretKey, "")
|
||||
|
||||
internal, err := minio.New(cfg.Endpoint, &minio.Options{
|
||||
Creds: creds,
|
||||
Secure: cfg.UseSSL,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("minio: init internal client: %w", err)
|
||||
}
|
||||
|
||||
// Presigned URLs must be signed with the hostname the browser will use
|
||||
// (PUBLIC_MINIO_PUBLIC_URL), because AWS Signature V4 includes the Host
|
||||
// header in the canonical request — a URL signed against "minio:9000" will
|
||||
// return SignatureDoesNotMatch when the browser fetches it from
|
||||
// "localhost:9000".
|
||||
//
|
||||
// However, minio-go normally makes a live BucketLocation HTTP call before
|
||||
// signing, which would fail from inside the container when the public
|
||||
// endpoint is externally-facing (e.g. "localhost:9000" is unreachable from
|
||||
// within Docker). We prevent this by:
|
||||
// 1. Setting Region: "us-east-1" — minio-go skips getBucketLocation when
|
||||
// the region is already known (bucket-cache.go:49).
|
||||
// 2. Setting BucketLookup: BucketLookupPath — forces path-style URLs
|
||||
// (e.g. host/bucket/key), matching MinIO's default behaviour and
|
||||
// avoiding any virtual-host DNS probing.
|
||||
//
|
||||
// When no public endpoint is configured (or it equals the internal one),
|
||||
// fall back to the internal client so presigning still works.
|
||||
publicEndpoint := cfg.PublicEndpoint
|
||||
if u, err2 := url.Parse(publicEndpoint); err2 == nil && u.Host != "" {
|
||||
publicEndpoint = u.Host // strip scheme so minio.New is happy
|
||||
}
|
||||
pubUseSSL := cfg.PublicUseSSL
|
||||
if publicEndpoint == "" || publicEndpoint == cfg.Endpoint {
|
||||
publicEndpoint = cfg.Endpoint
|
||||
pubUseSSL = cfg.UseSSL
|
||||
}
|
||||
pub, err := minio.New(publicEndpoint, &minio.Options{
|
||||
Creds: creds,
|
||||
Secure: pubUseSSL,
|
||||
Region: "us-east-1", // skip live BucketLocation preflight
|
||||
BucketLookup: minio.BucketLookupPath,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("minio: init public client: %w", err)
|
||||
}
|
||||
|
||||
return &minioClient{
|
||||
client: internal,
|
||||
pubClient: pub,
|
||||
bucketChapters: cfg.BucketChapters,
|
||||
bucketAudio: cfg.BucketAudio,
|
||||
bucketAvatars: cfg.BucketAvatars,
|
||||
bucketBrowse: cfg.BucketBrowse,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ensureBuckets creates all required buckets if they don't already exist.
|
||||
func (m *minioClient) ensureBuckets(ctx context.Context) error {
|
||||
for _, bucket := range []string{m.bucketChapters, m.bucketAudio, m.bucketAvatars, m.bucketBrowse} {
|
||||
exists, err := m.client.BucketExists(ctx, bucket)
|
||||
if err != nil {
|
||||
return fmt.Errorf("minio: check bucket %q: %w", bucket, err)
|
||||
}
|
||||
if !exists {
|
||||
if err := m.client.MakeBucket(ctx, bucket, minio.MakeBucketOptions{}); err != nil {
|
||||
return fmt.Errorf("minio: create bucket %q: %w", bucket, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ── Key helpers ───────────────────────────────────────────────────────────────
|
||||
|
||||
// ChapterObjectKey returns the MinIO object key for a chapter markdown file.
|
||||
// Format: {slug}/chapter-{n:06d}.md
|
||||
func ChapterObjectKey(slug string, n int) string {
|
||||
return fmt.Sprintf("%s/chapter-%06d.md", slug, n)
|
||||
}
|
||||
|
||||
// AudioObjectKey returns the MinIO object key for a cached audio file.
|
||||
// Format: {slug}/{n}/{voice}.mp3
|
||||
func AudioObjectKey(slug string, n int, voice string) string {
|
||||
return fmt.Sprintf("%s/%d/%s.mp3", slug, n, voice)
|
||||
}
|
||||
|
||||
// AvatarObjectKey returns the MinIO object key for a user avatar image.
|
||||
// Format: {userID}/{ext}.{ext}
|
||||
func AvatarObjectKey(userID, ext string) string {
|
||||
return fmt.Sprintf("%s/%s.%s", userID, ext, ext)
|
||||
}
|
||||
|
||||
// CoverObjectKey returns the MinIO object key for a book cover image.
|
||||
// Format: covers/{slug}.jpg
|
||||
func CoverObjectKey(slug string) string {
|
||||
return fmt.Sprintf("covers/%s.jpg", slug)
|
||||
}
|
||||
|
||||
// chapterNumberFromKey extracts the chapter number from a MinIO object key.
|
||||
// e.g. "my-book/chapter-000042.md" → 42
|
||||
func chapterNumberFromKey(key string) int {
|
||||
base := path.Base(key)
|
||||
base = strings.TrimPrefix(base, "chapter-")
|
||||
base = strings.TrimSuffix(base, ".md")
|
||||
var n int
|
||||
fmt.Sscanf(base, "%d", &n)
|
||||
return n
|
||||
}
|
||||
|
||||
// ── Object operations ─────────────────────────────────────────────────────────
|
||||
|
||||
func (m *minioClient) putObject(ctx context.Context, bucket, key, contentType string, data []byte) error {
|
||||
_, err := m.client.PutObject(ctx, bucket, key,
|
||||
strings.NewReader(string(data)),
|
||||
int64(len(data)),
|
||||
minio.PutObjectOptions{ContentType: contentType},
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
func (m *minioClient) getObject(ctx context.Context, bucket, key string) ([]byte, error) {
|
||||
obj, err := m.client.GetObject(ctx, bucket, key, minio.GetObjectOptions{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer obj.Close()
|
||||
return io.ReadAll(obj)
|
||||
}
|
||||
|
||||
func (m *minioClient) objectExists(ctx context.Context, bucket, key string) bool {
|
||||
_, err := m.client.StatObject(ctx, bucket, key, minio.StatObjectOptions{})
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func (m *minioClient) presignGet(ctx context.Context, bucket, key string, expires time.Duration) (string, error) {
|
||||
u, err := m.pubClient.PresignedGetObject(ctx, bucket, key, expires, nil)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("minio presign %s/%s: %w", bucket, key, err)
|
||||
}
|
||||
return u.String(), nil
|
||||
}
|
||||
|
||||
func (m *minioClient) presignPut(ctx context.Context, bucket, key string, expires time.Duration) (string, error) {
|
||||
u, err := m.pubClient.PresignedPutObject(ctx, bucket, key, expires)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("minio presign PUT %s/%s: %w", bucket, key, err)
|
||||
}
|
||||
return u.String(), nil
|
||||
}
|
||||
|
||||
func (m *minioClient) deleteObjects(ctx context.Context, bucket, prefix string) error {
|
||||
objCh := m.client.ListObjects(ctx, bucket, minio.ListObjectsOptions{Prefix: prefix})
|
||||
for obj := range objCh {
|
||||
if obj.Err != nil {
|
||||
return obj.Err
|
||||
}
|
||||
if err := m.client.RemoveObject(ctx, bucket, obj.Key, minio.RemoveObjectOptions{}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *minioClient) listObjectKeys(ctx context.Context, bucket, prefix string) ([]string, error) {
|
||||
var keys []string
|
||||
for obj := range m.client.ListObjects(ctx, bucket, minio.ListObjectsOptions{Prefix: prefix}) {
|
||||
if obj.Err != nil {
|
||||
return nil, obj.Err
|
||||
}
|
||||
keys = append(keys, obj.Key)
|
||||
}
|
||||
return keys, nil
|
||||
}
|
||||
|
||||
// ── Cover operations ──────────────────────────────────────────────────────────
|
||||
|
||||
// putCover stores a raw cover image in the browse bucket under covers/{slug}.jpg.
|
||||
func (m *minioClient) putCover(ctx context.Context, key, contentType string, data []byte) error {
|
||||
return m.putObject(ctx, m.bucketBrowse, key, contentType, data)
|
||||
}
|
||||
|
||||
// getCover retrieves a cover image. Returns (nil, "", false, nil) when the
|
||||
// object does not exist.
|
||||
func (m *minioClient) getCover(ctx context.Context, key string) ([]byte, bool, error) {
|
||||
if !m.objectExists(ctx, m.bucketBrowse, key) {
|
||||
return nil, false, nil
|
||||
}
|
||||
data, err := m.getObject(ctx, m.bucketBrowse, key)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
return data, true, nil
|
||||
}
|
||||
|
||||
// coverExists returns true when the cover image object exists.
|
||||
func (m *minioClient) coverExists(ctx context.Context, key string) bool {
|
||||
return m.objectExists(ctx, m.bucketBrowse, key)
|
||||
}
|
||||
|
||||
// coverContentType inspects the first bytes of data to determine if it is
|
||||
// a JPEG or PNG image. Falls back to "image/jpeg".
|
||||
func coverContentType(data []byte) string {
|
||||
if len(data) >= 4 {
|
||||
// PNG magic: 0x89 0x50 0x4E 0x47
|
||||
if data[0] == 0x89 && data[1] == 0x50 && data[2] == 0x4E && data[3] == 0x47 {
|
||||
return "image/png"
|
||||
}
|
||||
// WebP: starts with "RIFF" at 0..3 and "WEBP" at 8..11
|
||||
if len(data) >= 12 && data[0] == 'R' && data[1] == 'I' && data[2] == 'F' && data[3] == 'F' &&
|
||||
data[8] == 'W' && data[9] == 'E' && data[10] == 'B' && data[11] == 'P' {
|
||||
return "image/webp"
|
||||
}
|
||||
}
|
||||
return "image/jpeg"
|
||||
}
|
||||
268
backend/internal/storage/pocketbase.go
Normal file
268
backend/internal/storage/pocketbase.go
Normal file
@@ -0,0 +1,268 @@
|
||||
// Package storage provides the concrete implementations of all bookstore and
|
||||
// taskqueue interfaces backed by PocketBase (structured data) and MinIO (blobs).
|
||||
//
|
||||
// Entry point: NewStore(ctx, cfg, log) returns a *Store that satisfies every
|
||||
// interface defined in bookstore and taskqueue.
|
||||
package storage
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/config"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// ErrNotFound is returned by single-record lookups when no record exists.
|
||||
var ErrNotFound = errors.New("storage: record not found")
|
||||
|
||||
// pbClient is the internal PocketBase REST admin client.
|
||||
type pbClient struct {
|
||||
baseURL string
|
||||
email string
|
||||
password string
|
||||
log *slog.Logger
|
||||
|
||||
mu sync.Mutex
|
||||
token string
|
||||
exp time.Time
|
||||
}
|
||||
|
||||
func newPBClient(cfg config.PocketBase, log *slog.Logger) *pbClient {
|
||||
return &pbClient{
|
||||
baseURL: strings.TrimRight(cfg.URL, "/"),
|
||||
email: cfg.AdminEmail,
|
||||
password: cfg.AdminPassword,
|
||||
log: log,
|
||||
}
|
||||
}
|
||||
|
||||
// authToken returns a valid admin auth token, refreshing it when expired.
|
||||
func (c *pbClient) authToken(ctx context.Context) (string, error) {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
if c.token != "" && time.Now().Before(c.exp) {
|
||||
return c.token, nil
|
||||
}
|
||||
|
||||
body, _ := json.Marshal(map[string]string{
|
||||
"identity": c.email,
|
||||
"password": c.password,
|
||||
})
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
||||
c.baseURL+"/api/collections/_superusers/auth-with-password", bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("pb auth: build request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("pb auth: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
raw, _ := io.ReadAll(resp.Body)
|
||||
return "", fmt.Errorf("pb auth: status %d: %s", resp.StatusCode, string(raw))
|
||||
}
|
||||
|
||||
var payload struct {
|
||||
Token string `json:"token"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&payload); err != nil {
|
||||
return "", fmt.Errorf("pb auth: decode: %w", err)
|
||||
}
|
||||
c.token = payload.Token
|
||||
c.exp = time.Now().Add(30 * time.Minute)
|
||||
return c.token, nil
|
||||
}
|
||||
|
||||
// do executes an authenticated PocketBase REST request.
|
||||
func (c *pbClient) do(ctx context.Context, method, path string, body io.Reader) (*http.Response, error) {
|
||||
tok, err := c.authToken(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, method, c.baseURL+path, body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("pb: build request %s %s: %w", method, path, err)
|
||||
}
|
||||
req.Header.Set("Authorization", tok)
|
||||
if body != nil {
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("pb: %s %s: %w", method, path, err)
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// get is a convenience wrapper that decodes a JSON response into v.
|
||||
func (c *pbClient) get(ctx context.Context, path string, v any) error {
|
||||
resp, err := c.do(ctx, http.MethodGet, path, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == http.StatusNotFound {
|
||||
return ErrNotFound
|
||||
}
|
||||
if resp.StatusCode >= 400 {
|
||||
raw, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("pb GET %s: status %d: %s", path, resp.StatusCode, string(raw))
|
||||
}
|
||||
return json.NewDecoder(resp.Body).Decode(v)
|
||||
}
|
||||
|
||||
// post creates a record and decodes the created record into v.
|
||||
func (c *pbClient) post(ctx context.Context, path string, payload, v any) error {
|
||||
b, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return fmt.Errorf("pb: marshal: %w", err)
|
||||
}
|
||||
resp, err := c.do(ctx, http.MethodPost, path, bytes.NewReader(b))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode >= 400 {
|
||||
raw, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("pb POST %s: status %d: %s", path, resp.StatusCode, string(raw))
|
||||
}
|
||||
if v != nil {
|
||||
return json.NewDecoder(resp.Body).Decode(v)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// patch updates a record.
|
||||
func (c *pbClient) patch(ctx context.Context, path string, payload any) error {
|
||||
b, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return fmt.Errorf("pb: marshal: %w", err)
|
||||
}
|
||||
resp, err := c.do(ctx, http.MethodPatch, path, bytes.NewReader(b))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode >= 400 {
|
||||
raw, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("pb PATCH %s: status %d: %s", path, resp.StatusCode, string(raw))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// delete removes a record.
|
||||
func (c *pbClient) delete(ctx context.Context, path string) error {
|
||||
resp, err := c.do(ctx, http.MethodDelete, path, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode == http.StatusNotFound {
|
||||
return ErrNotFound
|
||||
}
|
||||
if resp.StatusCode >= 400 {
|
||||
raw, _ := io.ReadAll(resp.Body)
|
||||
return fmt.Errorf("pb DELETE %s: status %d: %s", path, resp.StatusCode, string(raw))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// listAll fetches all pages of a collection. PocketBase returns at most 200
|
||||
// records per page; we paginate until empty.
|
||||
func (c *pbClient) listAll(ctx context.Context, collection string, filter, sort string) ([]json.RawMessage, error) {
|
||||
var all []json.RawMessage
|
||||
page := 1
|
||||
for {
|
||||
q := url.Values{
|
||||
"page": {fmt.Sprintf("%d", page)},
|
||||
"perPage": {"200"},
|
||||
}
|
||||
if filter != "" {
|
||||
q.Set("filter", filter)
|
||||
}
|
||||
if sort != "" {
|
||||
q.Set("sort", sort)
|
||||
}
|
||||
path := fmt.Sprintf("/api/collections/%s/records?%s", collection, q.Encode())
|
||||
|
||||
var result struct {
|
||||
Items []json.RawMessage `json:"items"`
|
||||
Page int `json:"page"`
|
||||
Pages int `json:"totalPages"`
|
||||
}
|
||||
if err := c.get(ctx, path, &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
all = append(all, result.Items...)
|
||||
if result.Page >= result.Pages {
|
||||
break
|
||||
}
|
||||
page++
|
||||
}
|
||||
return all, nil
|
||||
}
|
||||
|
||||
// claimRecord atomically claims the first pending record matching collection.
|
||||
// It fetches the oldest pending record (filter + sort), then PATCHes it with
|
||||
// the claim payload. Returns (nil, nil) when the queue is empty.
|
||||
func (c *pbClient) claimRecord(ctx context.Context, collection, workerID string, extraClaim map[string]any) (json.RawMessage, error) {
|
||||
q := url.Values{}
|
||||
q.Set("filter", `status="pending"`)
|
||||
q.Set("sort", "+started")
|
||||
q.Set("perPage", "1")
|
||||
path := fmt.Sprintf("/api/collections/%s/records?%s", collection, q.Encode())
|
||||
|
||||
var result struct {
|
||||
Items []json.RawMessage `json:"items"`
|
||||
}
|
||||
if err := c.get(ctx, path, &result); err != nil {
|
||||
return nil, fmt.Errorf("claimRecord list: %w", err)
|
||||
}
|
||||
if len(result.Items) == 0 {
|
||||
return nil, nil // queue empty
|
||||
}
|
||||
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
if err := json.Unmarshal(result.Items[0], &rec); err != nil {
|
||||
return nil, fmt.Errorf("claimRecord parse id: %w", err)
|
||||
}
|
||||
|
||||
claim := map[string]any{
|
||||
"status": string(domain.TaskStatusRunning),
|
||||
"worker_id": workerID,
|
||||
}
|
||||
for k, v := range extraClaim {
|
||||
claim[k] = v
|
||||
}
|
||||
|
||||
claimPath := fmt.Sprintf("/api/collections/%s/records/%s", collection, rec.ID)
|
||||
if err := c.patch(ctx, claimPath, claim); err != nil {
|
||||
return nil, fmt.Errorf("claimRecord patch: %w", err)
|
||||
}
|
||||
|
||||
// Re-fetch the updated record so caller has current state.
|
||||
var updated json.RawMessage
|
||||
if err := c.get(ctx, claimPath, &updated); err != nil {
|
||||
return nil, fmt.Errorf("claimRecord re-fetch: %w", err)
|
||||
}
|
||||
return updated, nil
|
||||
}
|
||||
820
backend/internal/storage/store.go
Normal file
820
backend/internal/storage/store.go
Normal file
@@ -0,0 +1,820 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/config"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
)
|
||||
|
||||
// Store is the unified persistence implementation that satisfies all bookstore
|
||||
// and taskqueue interfaces. It routes structured data to PocketBase and binary
|
||||
// blobs to MinIO.
|
||||
type Store struct {
|
||||
pb *pbClient
|
||||
mc *minioClient
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
// NewStore initialises PocketBase and MinIO connections and ensures all MinIO
|
||||
// buckets exist. Returns a ready-to-use Store.
|
||||
func NewStore(ctx context.Context, cfg config.Config, log *slog.Logger) (*Store, error) {
|
||||
pb := newPBClient(cfg.PocketBase, log)
|
||||
// Validate PocketBase connectivity by fetching an auth token.
|
||||
if _, err := pb.authToken(ctx); err != nil {
|
||||
return nil, fmt.Errorf("pocketbase: %w", err)
|
||||
}
|
||||
|
||||
mc, err := newMinioClient(cfg.MinIO)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("minio: %w", err)
|
||||
}
|
||||
if err := mc.ensureBuckets(ctx); err != nil {
|
||||
return nil, fmt.Errorf("minio: ensure buckets: %w", err)
|
||||
}
|
||||
|
||||
return &Store{pb: pb, mc: mc, log: log}, nil
|
||||
}
|
||||
|
||||
// Compile-time interface satisfaction.
|
||||
var _ bookstore.BookWriter = (*Store)(nil)
|
||||
var _ bookstore.BookReader = (*Store)(nil)
|
||||
var _ bookstore.RankingStore = (*Store)(nil)
|
||||
var _ bookstore.AudioStore = (*Store)(nil)
|
||||
var _ bookstore.PresignStore = (*Store)(nil)
|
||||
var _ bookstore.ProgressStore = (*Store)(nil)
|
||||
var _ bookstore.CoverStore = (*Store)(nil)
|
||||
var _ taskqueue.Producer = (*Store)(nil)
|
||||
var _ taskqueue.Consumer = (*Store)(nil)
|
||||
var _ taskqueue.Reader = (*Store)(nil)
|
||||
|
||||
// ── BookWriter ────────────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) WriteMetadata(ctx context.Context, meta domain.BookMeta) error {
|
||||
payload := map[string]any{
|
||||
"slug": meta.Slug,
|
||||
"title": meta.Title,
|
||||
"author": meta.Author,
|
||||
"cover": meta.Cover,
|
||||
"status": meta.Status,
|
||||
"genres": meta.Genres,
|
||||
"summary": meta.Summary,
|
||||
"total_chapters": meta.TotalChapters,
|
||||
"source_url": meta.SourceURL,
|
||||
"ranking": meta.Ranking,
|
||||
"rating": meta.Rating,
|
||||
}
|
||||
// Upsert via filter: if exists PATCH, otherwise POST.
|
||||
existing, err := s.getBookBySlug(ctx, meta.Slug)
|
||||
if err != nil && err != ErrNotFound {
|
||||
return fmt.Errorf("WriteMetadata: %w", err)
|
||||
}
|
||||
if err == ErrNotFound {
|
||||
return s.pb.post(ctx, "/api/collections/books/records", payload, nil)
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/books/records/%s", existing.ID), payload)
|
||||
}
|
||||
|
||||
func (s *Store) WriteChapter(ctx context.Context, slug string, chapter domain.Chapter) error {
|
||||
key := ChapterObjectKey(slug, chapter.Ref.Number)
|
||||
if err := s.mc.putObject(ctx, s.mc.bucketChapters, key, "text/markdown", []byte(chapter.Text)); err != nil {
|
||||
return fmt.Errorf("WriteChapter: minio: %w", err)
|
||||
}
|
||||
// Upsert the chapters_idx record in PocketBase.
|
||||
return s.upsertChapterIdx(ctx, slug, chapter.Ref)
|
||||
}
|
||||
|
||||
func (s *Store) WriteChapterRefs(ctx context.Context, slug string, refs []domain.ChapterRef) error {
|
||||
for _, ref := range refs {
|
||||
if err := s.upsertChapterIdx(ctx, slug, ref); err != nil {
|
||||
s.log.Warn("WriteChapterRefs: upsert failed", "slug", slug, "chapter", ref.Number, "err", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Store) ChapterExists(ctx context.Context, slug string, ref domain.ChapterRef) bool {
|
||||
return s.mc.objectExists(ctx, s.mc.bucketChapters, ChapterObjectKey(slug, ref.Number))
|
||||
}
|
||||
|
||||
func (s *Store) upsertChapterIdx(ctx context.Context, slug string, ref domain.ChapterRef) error {
|
||||
payload := map[string]any{
|
||||
"slug": slug,
|
||||
"number": ref.Number,
|
||||
"title": ref.Title,
|
||||
}
|
||||
filter := fmt.Sprintf(`slug=%q&&number=%d`, slug, ref.Number)
|
||||
items, err := s.pb.listAll(ctx, "chapters_idx", filter, "")
|
||||
if err != nil && err != ErrNotFound {
|
||||
return err
|
||||
}
|
||||
if len(items) == 0 {
|
||||
return s.pb.post(ctx, "/api/collections/chapters_idx/records", payload, nil)
|
||||
}
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
json.Unmarshal(items[0], &rec)
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/chapters_idx/records/%s", rec.ID), payload)
|
||||
}
|
||||
|
||||
// ── BookReader ────────────────────────────────────────────────────────────────
|
||||
|
||||
type pbBook struct {
|
||||
ID string `json:"id"`
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author"`
|
||||
Cover string `json:"cover"`
|
||||
Status string `json:"status"`
|
||||
Genres []string `json:"genres"`
|
||||
Summary string `json:"summary"`
|
||||
TotalChapters int `json:"total_chapters"`
|
||||
SourceURL string `json:"source_url"`
|
||||
Ranking int `json:"ranking"`
|
||||
Rating float64 `json:"rating"`
|
||||
Updated string `json:"updated"`
|
||||
}
|
||||
|
||||
func (b pbBook) toDomain() domain.BookMeta {
|
||||
var metaUpdated int64
|
||||
if t, err := time.Parse(time.RFC3339, b.Updated); err == nil {
|
||||
metaUpdated = t.Unix()
|
||||
}
|
||||
return domain.BookMeta{
|
||||
Slug: b.Slug,
|
||||
Title: b.Title,
|
||||
Author: b.Author,
|
||||
Cover: b.Cover,
|
||||
Status: b.Status,
|
||||
Genres: b.Genres,
|
||||
Summary: b.Summary,
|
||||
TotalChapters: b.TotalChapters,
|
||||
SourceURL: b.SourceURL,
|
||||
Ranking: b.Ranking,
|
||||
Rating: b.Rating,
|
||||
MetaUpdated: metaUpdated,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Store) getBookBySlug(ctx context.Context, slug string) (pbBook, error) {
|
||||
filter := fmt.Sprintf(`slug=%q`, slug)
|
||||
items, err := s.pb.listAll(ctx, "books", filter, "")
|
||||
if err != nil {
|
||||
return pbBook{}, err
|
||||
}
|
||||
if len(items) == 0 {
|
||||
return pbBook{}, ErrNotFound
|
||||
}
|
||||
var b pbBook
|
||||
json.Unmarshal(items[0], &b)
|
||||
return b, nil
|
||||
}
|
||||
|
||||
func (s *Store) ReadMetadata(ctx context.Context, slug string) (domain.BookMeta, bool, error) {
|
||||
b, err := s.getBookBySlug(ctx, slug)
|
||||
if err == ErrNotFound {
|
||||
return domain.BookMeta{}, false, nil
|
||||
}
|
||||
if err != nil {
|
||||
return domain.BookMeta{}, false, err
|
||||
}
|
||||
return b.toDomain(), true, nil
|
||||
}
|
||||
|
||||
func (s *Store) ListBooks(ctx context.Context) ([]domain.BookMeta, error) {
|
||||
items, err := s.pb.listAll(ctx, "books", "", "title")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
books := make([]domain.BookMeta, 0, len(items))
|
||||
for _, raw := range items {
|
||||
var b pbBook
|
||||
json.Unmarshal(raw, &b)
|
||||
books = append(books, b.toDomain())
|
||||
}
|
||||
return books, nil
|
||||
}
|
||||
|
||||
func (s *Store) LocalSlugs(ctx context.Context) (map[string]bool, error) {
|
||||
items, err := s.pb.listAll(ctx, "books", "", "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
slugs := make(map[string]bool, len(items))
|
||||
for _, raw := range items {
|
||||
var b struct {
|
||||
Slug string `json:"slug"`
|
||||
}
|
||||
json.Unmarshal(raw, &b)
|
||||
if b.Slug != "" {
|
||||
slugs[b.Slug] = true
|
||||
}
|
||||
}
|
||||
return slugs, nil
|
||||
}
|
||||
|
||||
func (s *Store) MetadataMtime(ctx context.Context, slug string) int64 {
|
||||
b, err := s.getBookBySlug(ctx, slug)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
t, err := time.Parse(time.RFC3339, b.Updated)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return t.Unix()
|
||||
}
|
||||
|
||||
func (s *Store) ReadChapter(ctx context.Context, slug string, n int) (string, error) {
|
||||
data, err := s.mc.getObject(ctx, s.mc.bucketChapters, ChapterObjectKey(slug, n))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("ReadChapter: %w", err)
|
||||
}
|
||||
return string(data), nil
|
||||
}
|
||||
|
||||
func (s *Store) ListChapters(ctx context.Context, slug string) ([]domain.ChapterInfo, error) {
|
||||
filter := fmt.Sprintf(`slug=%q`, slug)
|
||||
items, err := s.pb.listAll(ctx, "chapters_idx", filter, "number")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
chapters := make([]domain.ChapterInfo, 0, len(items))
|
||||
for _, raw := range items {
|
||||
var rec struct {
|
||||
Number int `json:"number"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
json.Unmarshal(raw, &rec)
|
||||
chapters = append(chapters, domain.ChapterInfo{Number: rec.Number, Title: rec.Title})
|
||||
}
|
||||
return chapters, nil
|
||||
}
|
||||
|
||||
func (s *Store) CountChapters(ctx context.Context, slug string) int {
|
||||
chapters, err := s.ListChapters(ctx, slug)
|
||||
if err != nil {
|
||||
return 0
|
||||
}
|
||||
return len(chapters)
|
||||
}
|
||||
|
||||
func (s *Store) ReindexChapters(ctx context.Context, slug string) (int, error) {
|
||||
keys, err := s.mc.listObjectKeys(ctx, s.mc.bucketChapters, slug+"/")
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("ReindexChapters: list objects: %w", err)
|
||||
}
|
||||
count := 0
|
||||
for _, key := range keys {
|
||||
if !strings.HasSuffix(key, ".md") {
|
||||
continue
|
||||
}
|
||||
n := chapterNumberFromKey(key)
|
||||
if n == 0 {
|
||||
continue
|
||||
}
|
||||
ref := domain.ChapterRef{Number: n}
|
||||
if err := s.upsertChapterIdx(ctx, slug, ref); err != nil {
|
||||
s.log.Warn("ReindexChapters: upsert failed", "key", key, "err", err)
|
||||
continue
|
||||
}
|
||||
count++
|
||||
}
|
||||
return count, nil
|
||||
}
|
||||
|
||||
// ── RankingStore ──────────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) WriteRankingItem(ctx context.Context, item domain.RankingItem) error {
|
||||
payload := map[string]any{
|
||||
"rank": item.Rank,
|
||||
"slug": item.Slug,
|
||||
"title": item.Title,
|
||||
"author": item.Author,
|
||||
"cover": item.Cover,
|
||||
"status": item.Status,
|
||||
"genres": item.Genres,
|
||||
"source_url": item.SourceURL,
|
||||
}
|
||||
filter := fmt.Sprintf(`slug=%q`, item.Slug)
|
||||
items, err := s.pb.listAll(ctx, "ranking", filter, "")
|
||||
if err != nil && err != ErrNotFound {
|
||||
return err
|
||||
}
|
||||
if len(items) == 0 {
|
||||
return s.pb.post(ctx, "/api/collections/ranking/records", payload, nil)
|
||||
}
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
json.Unmarshal(items[0], &rec)
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/ranking/records/%s", rec.ID), payload)
|
||||
}
|
||||
|
||||
func (s *Store) ReadRankingItems(ctx context.Context) ([]domain.RankingItem, error) {
|
||||
items, err := s.pb.listAll(ctx, "ranking", "", "rank")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := make([]domain.RankingItem, 0, len(items))
|
||||
for _, raw := range items {
|
||||
var rec struct {
|
||||
Rank int `json:"rank"`
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author"`
|
||||
Cover string `json:"cover"`
|
||||
Status string `json:"status"`
|
||||
Genres []string `json:"genres"`
|
||||
SourceURL string `json:"source_url"`
|
||||
Updated string `json:"updated"`
|
||||
}
|
||||
json.Unmarshal(raw, &rec)
|
||||
t, _ := time.Parse(time.RFC3339, rec.Updated)
|
||||
result = append(result, domain.RankingItem{
|
||||
Rank: rec.Rank,
|
||||
Slug: rec.Slug,
|
||||
Title: rec.Title,
|
||||
Author: rec.Author,
|
||||
Cover: rec.Cover,
|
||||
Status: rec.Status,
|
||||
Genres: rec.Genres,
|
||||
SourceURL: rec.SourceURL,
|
||||
Updated: t,
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Store) RankingFreshEnough(ctx context.Context, maxAge time.Duration) (bool, error) {
|
||||
items, err := s.ReadRankingItems(ctx)
|
||||
if err != nil || len(items) == 0 {
|
||||
return false, err
|
||||
}
|
||||
var latest time.Time
|
||||
for _, item := range items {
|
||||
if item.Updated.After(latest) {
|
||||
latest = item.Updated
|
||||
}
|
||||
}
|
||||
return time.Since(latest) < maxAge, nil
|
||||
}
|
||||
|
||||
// ── AudioStore ────────────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) AudioObjectKey(slug string, n int, voice string) string {
|
||||
return AudioObjectKey(slug, n, voice)
|
||||
}
|
||||
|
||||
func (s *Store) AudioExists(ctx context.Context, key string) bool {
|
||||
return s.mc.objectExists(ctx, s.mc.bucketAudio, key)
|
||||
}
|
||||
|
||||
func (s *Store) PutAudio(ctx context.Context, key string, data []byte) error {
|
||||
return s.mc.putObject(ctx, s.mc.bucketAudio, key, "audio/mpeg", data)
|
||||
}
|
||||
|
||||
// ── PresignStore ──────────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) PresignChapter(ctx context.Context, slug string, n int, expires time.Duration) (string, error) {
|
||||
return s.mc.presignGet(ctx, s.mc.bucketChapters, ChapterObjectKey(slug, n), expires)
|
||||
}
|
||||
|
||||
func (s *Store) PresignAudio(ctx context.Context, key string, expires time.Duration) (string, error) {
|
||||
return s.mc.presignGet(ctx, s.mc.bucketAudio, key, expires)
|
||||
}
|
||||
|
||||
func (s *Store) PresignAvatarUpload(ctx context.Context, userID, ext string) (uploadURL, key string, err error) {
|
||||
key = AvatarObjectKey(userID, ext)
|
||||
uploadURL, err = s.mc.presignPut(ctx, s.mc.bucketAvatars, key, 15*time.Minute)
|
||||
return
|
||||
}
|
||||
|
||||
func (s *Store) PresignAvatarURL(ctx context.Context, userID string) (string, bool, error) {
|
||||
for _, ext := range []string{"jpg", "png", "webp"} {
|
||||
key := AvatarObjectKey(userID, ext)
|
||||
if s.mc.objectExists(ctx, s.mc.bucketAvatars, key) {
|
||||
u, err := s.mc.presignGet(ctx, s.mc.bucketAvatars, key, 1*time.Hour)
|
||||
return u, true, err
|
||||
}
|
||||
}
|
||||
return "", false, nil
|
||||
}
|
||||
|
||||
func (s *Store) PutAvatar(ctx context.Context, userID, ext, contentType string, data []byte) (string, error) {
|
||||
// Delete existing avatar objects for this user before writing the new one
|
||||
// so old extensions don't linger (e.g. old .png after uploading a .jpg).
|
||||
_ = s.mc.deleteObjects(ctx, s.mc.bucketAvatars, userID+"/")
|
||||
key := AvatarObjectKey(userID, ext)
|
||||
if err := s.mc.putObject(ctx, s.mc.bucketAvatars, key, contentType, data); err != nil {
|
||||
return "", fmt.Errorf("put avatar: %w", err)
|
||||
}
|
||||
return key, nil
|
||||
}
|
||||
|
||||
func (s *Store) DeleteAvatar(ctx context.Context, userID string) error {
|
||||
return s.mc.deleteObjects(ctx, s.mc.bucketAvatars, userID+"/")
|
||||
}
|
||||
|
||||
// ── ProgressStore ─────────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) GetProgress(ctx context.Context, sessionID, slug string) (domain.ReadingProgress, bool) {
|
||||
filter := fmt.Sprintf(`session_id=%q&&slug=%q`, sessionID, slug)
|
||||
items, err := s.pb.listAll(ctx, "progress", filter, "")
|
||||
if err != nil || len(items) == 0 {
|
||||
return domain.ReadingProgress{}, false
|
||||
}
|
||||
var rec struct {
|
||||
Slug string `json:"slug"`
|
||||
Chapter int `json:"chapter"`
|
||||
UpdatedAt string `json:"updated"`
|
||||
}
|
||||
json.Unmarshal(items[0], &rec)
|
||||
t, _ := time.Parse(time.RFC3339, rec.UpdatedAt)
|
||||
return domain.ReadingProgress{Slug: rec.Slug, Chapter: rec.Chapter, UpdatedAt: t}, true
|
||||
}
|
||||
|
||||
func (s *Store) SetProgress(ctx context.Context, sessionID string, p domain.ReadingProgress) error {
|
||||
payload := map[string]any{
|
||||
"session_id": sessionID,
|
||||
"slug": p.Slug,
|
||||
"chapter": p.Chapter,
|
||||
}
|
||||
filter := fmt.Sprintf(`session_id=%q&&slug=%q`, sessionID, p.Slug)
|
||||
items, err := s.pb.listAll(ctx, "progress", filter, "")
|
||||
if err != nil && err != ErrNotFound {
|
||||
return err
|
||||
}
|
||||
if len(items) == 0 {
|
||||
return s.pb.post(ctx, "/api/collections/progress/records", payload, nil)
|
||||
}
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
json.Unmarshal(items[0], &rec)
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/progress/records/%s", rec.ID), payload)
|
||||
}
|
||||
|
||||
func (s *Store) AllProgress(ctx context.Context, sessionID string) ([]domain.ReadingProgress, error) {
|
||||
filter := fmt.Sprintf(`session_id=%q`, sessionID)
|
||||
items, err := s.pb.listAll(ctx, "progress", filter, "-updated")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := make([]domain.ReadingProgress, 0, len(items))
|
||||
for _, raw := range items {
|
||||
var rec struct {
|
||||
Slug string `json:"slug"`
|
||||
Chapter int `json:"chapter"`
|
||||
UpdatedAt string `json:"updated"`
|
||||
}
|
||||
json.Unmarshal(raw, &rec)
|
||||
t, _ := time.Parse(time.RFC3339, rec.UpdatedAt)
|
||||
result = append(result, domain.ReadingProgress{Slug: rec.Slug, Chapter: rec.Chapter, UpdatedAt: t})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Store) DeleteProgress(ctx context.Context, sessionID, slug string) error {
|
||||
filter := fmt.Sprintf(`session_id=%q&&slug=%q`, sessionID, slug)
|
||||
items, err := s.pb.listAll(ctx, "progress", filter, "")
|
||||
if err != nil || len(items) == 0 {
|
||||
return nil
|
||||
}
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
json.Unmarshal(items[0], &rec)
|
||||
return s.pb.delete(ctx, fmt.Sprintf("/api/collections/progress/records/%s", rec.ID))
|
||||
}
|
||||
|
||||
// ── taskqueue.Producer ────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) CreateScrapeTask(ctx context.Context, kind, targetURL string, fromChapter, toChapter int) (string, error) {
|
||||
payload := map[string]any{
|
||||
"kind": kind,
|
||||
"target_url": targetURL,
|
||||
"from_chapter": fromChapter,
|
||||
"to_chapter": toChapter,
|
||||
"status": string(domain.TaskStatusPending),
|
||||
"started": time.Now().UTC().Format(time.RFC3339),
|
||||
}
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
if err := s.pb.post(ctx, "/api/collections/scraping_tasks/records", payload, &rec); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return rec.ID, nil
|
||||
}
|
||||
|
||||
func (s *Store) CreateAudioTask(ctx context.Context, slug string, chapter int, voice string) (string, error) {
|
||||
cacheKey := fmt.Sprintf("%s/%d/%s", slug, chapter, voice)
|
||||
payload := map[string]any{
|
||||
"cache_key": cacheKey,
|
||||
"slug": slug,
|
||||
"chapter": chapter,
|
||||
"voice": voice,
|
||||
"status": string(domain.TaskStatusPending),
|
||||
"started": time.Now().UTC().Format(time.RFC3339),
|
||||
}
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
if err := s.pb.post(ctx, "/api/collections/audio_jobs/records", payload, &rec); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return rec.ID, nil
|
||||
}
|
||||
|
||||
func (s *Store) CancelTask(ctx context.Context, id string) error {
|
||||
// Try scraping_tasks first, then audio_jobs.
|
||||
if err := s.pb.patch(ctx, fmt.Sprintf("/api/collections/scraping_tasks/records/%s", id),
|
||||
map[string]string{"status": string(domain.TaskStatusCancelled)}); err == nil {
|
||||
return nil
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/audio_jobs/records/%s", id),
|
||||
map[string]string{"status": string(domain.TaskStatusCancelled)})
|
||||
}
|
||||
|
||||
// ── taskqueue.Consumer ────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) ClaimNextScrapeTask(ctx context.Context, workerID string) (domain.ScrapeTask, bool, error) {
|
||||
raw, err := s.pb.claimRecord(ctx, "scraping_tasks", workerID, nil)
|
||||
if err != nil {
|
||||
return domain.ScrapeTask{}, false, err
|
||||
}
|
||||
if raw == nil {
|
||||
return domain.ScrapeTask{}, false, nil
|
||||
}
|
||||
task, err := parseScrapeTask(raw)
|
||||
return task, err == nil, err
|
||||
}
|
||||
|
||||
func (s *Store) ClaimNextAudioTask(ctx context.Context, workerID string) (domain.AudioTask, bool, error) {
|
||||
raw, err := s.pb.claimRecord(ctx, "audio_jobs", workerID, nil)
|
||||
if err != nil {
|
||||
return domain.AudioTask{}, false, err
|
||||
}
|
||||
if raw == nil {
|
||||
return domain.AudioTask{}, false, nil
|
||||
}
|
||||
task, err := parseAudioTask(raw)
|
||||
return task, err == nil, err
|
||||
}
|
||||
|
||||
func (s *Store) FinishScrapeTask(ctx context.Context, id string, result domain.ScrapeResult) error {
|
||||
status := string(domain.TaskStatusDone)
|
||||
if result.ErrorMessage != "" {
|
||||
status = string(domain.TaskStatusFailed)
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/scraping_tasks/records/%s", id), map[string]any{
|
||||
"status": status,
|
||||
"books_found": result.BooksFound,
|
||||
"chapters_scraped": result.ChaptersScraped,
|
||||
"chapters_skipped": result.ChaptersSkipped,
|
||||
"errors": result.Errors,
|
||||
"error_message": result.ErrorMessage,
|
||||
"finished": time.Now().UTC().Format(time.RFC3339),
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Store) FinishAudioTask(ctx context.Context, id string, result domain.AudioResult) error {
|
||||
status := string(domain.TaskStatusDone)
|
||||
if result.ErrorMessage != "" {
|
||||
status = string(domain.TaskStatusFailed)
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/audio_jobs/records/%s", id), map[string]any{
|
||||
"status": status,
|
||||
"error_message": result.ErrorMessage,
|
||||
"finished": time.Now().UTC().Format(time.RFC3339),
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Store) FailTask(ctx context.Context, id, errMsg string) error {
|
||||
payload := map[string]any{
|
||||
"status": string(domain.TaskStatusFailed),
|
||||
"error_message": errMsg,
|
||||
"finished": time.Now().UTC().Format(time.RFC3339),
|
||||
}
|
||||
if err := s.pb.patch(ctx, fmt.Sprintf("/api/collections/scraping_tasks/records/%s", id), payload); err == nil {
|
||||
return nil
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/audio_jobs/records/%s", id), payload)
|
||||
}
|
||||
|
||||
// HeartbeatTask updates the heartbeat_at field on a running task.
|
||||
// Tries scraping_tasks first, then audio_jobs (same pattern as FailTask).
|
||||
func (s *Store) HeartbeatTask(ctx context.Context, id string) error {
|
||||
payload := map[string]any{
|
||||
"heartbeat_at": time.Now().UTC().Format(time.RFC3339),
|
||||
}
|
||||
if err := s.pb.patch(ctx, fmt.Sprintf("/api/collections/scraping_tasks/records/%s", id), payload); err == nil {
|
||||
return nil
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/audio_jobs/records/%s", id), payload)
|
||||
}
|
||||
|
||||
// ReapStaleTasks finds all running tasks whose heartbeat_at is either missing
|
||||
// or older than staleAfter, and resets them to pending so they can be
|
||||
// re-claimed. Returns the number of tasks reaped.
|
||||
func (s *Store) ReapStaleTasks(ctx context.Context, staleAfter time.Duration) (int, error) {
|
||||
threshold := time.Now().UTC().Add(-staleAfter).Format(time.RFC3339)
|
||||
// Match tasks that are running AND (heartbeat_at is null OR heartbeat_at < threshold).
|
||||
// PocketBase datetime fields require `=null` not `=""` in filter expressions.
|
||||
filter := fmt.Sprintf(`status="running"&&(heartbeat_at=null||heartbeat_at<"%s")`, threshold)
|
||||
resetPayload := map[string]any{
|
||||
"status": string(domain.TaskStatusPending),
|
||||
"worker_id": "",
|
||||
"heartbeat_at": nil,
|
||||
}
|
||||
|
||||
total := 0
|
||||
for _, collection := range []string{"scraping_tasks", "audio_jobs"} {
|
||||
items, err := s.pb.listAll(ctx, collection, filter, "")
|
||||
if err != nil {
|
||||
return total, fmt.Errorf("ReapStaleTasks list %s: %w", collection, err)
|
||||
}
|
||||
for _, raw := range items {
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
if err := json.Unmarshal(raw, &rec); err != nil || rec.ID == "" {
|
||||
continue
|
||||
}
|
||||
path := fmt.Sprintf("/api/collections/%s/records/%s", collection, rec.ID)
|
||||
if err := s.pb.patch(ctx, path, resetPayload); err != nil {
|
||||
s.log.Warn("ReapStaleTasks: patch failed", "collection", collection, "id", rec.ID, "err", err)
|
||||
continue
|
||||
}
|
||||
total++
|
||||
}
|
||||
}
|
||||
return total, nil
|
||||
}
|
||||
|
||||
// ── taskqueue.Reader ──────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) ListScrapeTasks(ctx context.Context) ([]domain.ScrapeTask, error) {
|
||||
items, err := s.pb.listAll(ctx, "scraping_tasks", "", "-started")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tasks := make([]domain.ScrapeTask, 0, len(items))
|
||||
for _, raw := range items {
|
||||
t, err := parseScrapeTask(raw)
|
||||
if err == nil {
|
||||
tasks = append(tasks, t)
|
||||
}
|
||||
}
|
||||
return tasks, nil
|
||||
}
|
||||
|
||||
func (s *Store) GetScrapeTask(ctx context.Context, id string) (domain.ScrapeTask, bool, error) {
|
||||
var raw json.RawMessage
|
||||
if err := s.pb.get(ctx, fmt.Sprintf("/api/collections/scraping_tasks/records/%s", id), &raw); err != nil {
|
||||
if err == ErrNotFound {
|
||||
return domain.ScrapeTask{}, false, nil
|
||||
}
|
||||
return domain.ScrapeTask{}, false, err
|
||||
}
|
||||
t, err := parseScrapeTask(raw)
|
||||
return t, err == nil, err
|
||||
}
|
||||
|
||||
func (s *Store) ListAudioTasks(ctx context.Context) ([]domain.AudioTask, error) {
|
||||
items, err := s.pb.listAll(ctx, "audio_jobs", "", "-started")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tasks := make([]domain.AudioTask, 0, len(items))
|
||||
for _, raw := range items {
|
||||
t, err := parseAudioTask(raw)
|
||||
if err == nil {
|
||||
tasks = append(tasks, t)
|
||||
}
|
||||
}
|
||||
return tasks, nil
|
||||
}
|
||||
|
||||
func (s *Store) GetAudioTask(ctx context.Context, cacheKey string) (domain.AudioTask, bool, error) {
|
||||
filter := fmt.Sprintf(`cache_key=%q`, cacheKey)
|
||||
items, err := s.pb.listAll(ctx, "audio_jobs", filter, "-started")
|
||||
if err != nil || len(items) == 0 {
|
||||
return domain.AudioTask{}, false, err
|
||||
}
|
||||
t, err := parseAudioTask(items[0])
|
||||
return t, err == nil, err
|
||||
}
|
||||
|
||||
// ── Parsers ───────────────────────────────────────────────────────────────────
|
||||
|
||||
func parseScrapeTask(raw json.RawMessage) (domain.ScrapeTask, error) {
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
Kind string `json:"kind"`
|
||||
TargetURL string `json:"target_url"`
|
||||
FromChapter int `json:"from_chapter"`
|
||||
ToChapter int `json:"to_chapter"`
|
||||
WorkerID string `json:"worker_id"`
|
||||
Status string `json:"status"`
|
||||
BooksFound int `json:"books_found"`
|
||||
ChaptersScraped int `json:"chapters_scraped"`
|
||||
ChaptersSkipped int `json:"chapters_skipped"`
|
||||
Errors int `json:"errors"`
|
||||
Started string `json:"started"`
|
||||
Finished string `json:"finished"`
|
||||
ErrorMessage string `json:"error_message"`
|
||||
}
|
||||
if err := json.Unmarshal(raw, &rec); err != nil {
|
||||
return domain.ScrapeTask{}, err
|
||||
}
|
||||
started, _ := time.Parse(time.RFC3339, rec.Started)
|
||||
finished, _ := time.Parse(time.RFC3339, rec.Finished)
|
||||
return domain.ScrapeTask{
|
||||
ID: rec.ID,
|
||||
Kind: rec.Kind,
|
||||
TargetURL: rec.TargetURL,
|
||||
FromChapter: rec.FromChapter,
|
||||
ToChapter: rec.ToChapter,
|
||||
WorkerID: rec.WorkerID,
|
||||
Status: domain.TaskStatus(rec.Status),
|
||||
BooksFound: rec.BooksFound,
|
||||
ChaptersScraped: rec.ChaptersScraped,
|
||||
ChaptersSkipped: rec.ChaptersSkipped,
|
||||
Errors: rec.Errors,
|
||||
Started: started,
|
||||
Finished: finished,
|
||||
ErrorMessage: rec.ErrorMessage,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func parseAudioTask(raw json.RawMessage) (domain.AudioTask, error) {
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
CacheKey string `json:"cache_key"`
|
||||
Slug string `json:"slug"`
|
||||
Chapter int `json:"chapter"`
|
||||
Voice string `json:"voice"`
|
||||
WorkerID string `json:"worker_id"`
|
||||
Status string `json:"status"`
|
||||
ErrorMessage string `json:"error_message"`
|
||||
Started string `json:"started"`
|
||||
Finished string `json:"finished"`
|
||||
}
|
||||
if err := json.Unmarshal(raw, &rec); err != nil {
|
||||
return domain.AudioTask{}, err
|
||||
}
|
||||
started, _ := time.Parse(time.RFC3339, rec.Started)
|
||||
finished, _ := time.Parse(time.RFC3339, rec.Finished)
|
||||
return domain.AudioTask{
|
||||
ID: rec.ID,
|
||||
CacheKey: rec.CacheKey,
|
||||
Slug: rec.Slug,
|
||||
Chapter: rec.Chapter,
|
||||
Voice: rec.Voice,
|
||||
WorkerID: rec.WorkerID,
|
||||
Status: domain.TaskStatus(rec.Status),
|
||||
ErrorMessage: rec.ErrorMessage,
|
||||
Started: started,
|
||||
Finished: finished,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ── CoverStore ─────────────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) PutCover(ctx context.Context, slug string, data []byte, contentType string) error {
|
||||
key := CoverObjectKey(slug)
|
||||
if contentType == "" {
|
||||
contentType = coverContentType(data)
|
||||
}
|
||||
if err := s.mc.putCover(ctx, key, contentType, data); err != nil {
|
||||
return fmt.Errorf("PutCover: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Store) GetCover(ctx context.Context, slug string) ([]byte, string, bool, error) {
|
||||
key := CoverObjectKey(slug)
|
||||
data, ok, err := s.mc.getCover(ctx, key)
|
||||
if err != nil {
|
||||
return nil, "", false, fmt.Errorf("GetCover: %w", err)
|
||||
}
|
||||
if !ok {
|
||||
return nil, "", false, nil
|
||||
}
|
||||
ct := coverContentType(data)
|
||||
return data, ct, true, nil
|
||||
}
|
||||
|
||||
func (s *Store) CoverExists(ctx context.Context, slug string) bool {
|
||||
return s.mc.coverExists(ctx, CoverObjectKey(slug))
|
||||
}
|
||||
84
backend/internal/taskqueue/taskqueue.go
Normal file
84
backend/internal/taskqueue/taskqueue.go
Normal file
@@ -0,0 +1,84 @@
|
||||
// Package taskqueue defines the interfaces for creating and consuming
|
||||
// scrape/audio tasks stored in PocketBase.
|
||||
//
|
||||
// Interface segregation:
|
||||
// - Producer is used only by the backend (creates tasks, cancels tasks).
|
||||
// - Consumer is used only by the runner (claims tasks, reports results).
|
||||
// - Reader is used by the backend for status/history endpoints.
|
||||
//
|
||||
// Concrete implementations live in internal/storage.
|
||||
package taskqueue
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// Producer is the write side of the task queue used by the backend service.
|
||||
// It creates new tasks in PocketBase for the runner to pick up.
|
||||
type Producer interface {
|
||||
// CreateScrapeTask inserts a new scrape task with status=pending and
|
||||
// returns the assigned PocketBase record ID.
|
||||
// kind is one of "catalogue", "book", or "book_range".
|
||||
// targetURL is the book URL (empty for catalogue-wide tasks).
|
||||
CreateScrapeTask(ctx context.Context, kind, targetURL string, fromChapter, toChapter int) (string, error)
|
||||
|
||||
// CreateAudioTask inserts a new audio task with status=pending and
|
||||
// returns the assigned PocketBase record ID.
|
||||
CreateAudioTask(ctx context.Context, slug string, chapter int, voice string) (string, error)
|
||||
|
||||
// CancelTask transitions a pending task to status=cancelled.
|
||||
// Returns ErrNotFound if the task does not exist.
|
||||
CancelTask(ctx context.Context, id string) error
|
||||
}
|
||||
|
||||
// Consumer is the read/claim side of the task queue used by the runner.
|
||||
type Consumer interface {
|
||||
// ClaimNextScrapeTask atomically finds the oldest pending scrape task,
|
||||
// sets its status=running and worker_id=workerID, and returns it.
|
||||
// Returns (zero, false, nil) when the queue is empty.
|
||||
ClaimNextScrapeTask(ctx context.Context, workerID string) (domain.ScrapeTask, bool, error)
|
||||
|
||||
// ClaimNextAudioTask atomically finds the oldest pending audio task,
|
||||
// sets its status=running and worker_id=workerID, and returns it.
|
||||
// Returns (zero, false, nil) when the queue is empty.
|
||||
ClaimNextAudioTask(ctx context.Context, workerID string) (domain.AudioTask, bool, error)
|
||||
|
||||
// FinishScrapeTask marks a running scrape task as done and records the result.
|
||||
FinishScrapeTask(ctx context.Context, id string, result domain.ScrapeResult) error
|
||||
|
||||
// FinishAudioTask marks a running audio task as done and records the result.
|
||||
FinishAudioTask(ctx context.Context, id string, result domain.AudioResult) error
|
||||
|
||||
// FailTask marks a task (scrape or audio) as failed with an error message.
|
||||
FailTask(ctx context.Context, id, errMsg string) error
|
||||
|
||||
// HeartbeatTask updates the heartbeat_at timestamp on a running task.
|
||||
// Should be called periodically by the runner while the task is active so
|
||||
// the reaper knows the task is still alive.
|
||||
HeartbeatTask(ctx context.Context, id string) error
|
||||
|
||||
// ReapStaleTasks finds all running tasks whose heartbeat_at is older than
|
||||
// staleAfter (or was never set) and resets them to pending so they can be
|
||||
// re-claimed by a healthy runner. Returns the number of tasks reaped.
|
||||
ReapStaleTasks(ctx context.Context, staleAfter time.Duration) (int, error)
|
||||
}
|
||||
|
||||
// Reader is the read-only side used by the backend for status pages.
|
||||
type Reader interface {
|
||||
// ListScrapeTasks returns all scrape tasks sorted by started descending.
|
||||
ListScrapeTasks(ctx context.Context) ([]domain.ScrapeTask, error)
|
||||
|
||||
// GetScrapeTask returns a single scrape task by ID.
|
||||
// Returns (zero, false, nil) if not found.
|
||||
GetScrapeTask(ctx context.Context, id string) (domain.ScrapeTask, bool, error)
|
||||
|
||||
// ListAudioTasks returns all audio tasks sorted by started descending.
|
||||
ListAudioTasks(ctx context.Context) ([]domain.AudioTask, error)
|
||||
|
||||
// GetAudioTask returns the most recent audio task for cacheKey.
|
||||
// Returns (zero, false, nil) if not found.
|
||||
GetAudioTask(ctx context.Context, cacheKey string) (domain.AudioTask, bool, error)
|
||||
}
|
||||
138
backend/internal/taskqueue/taskqueue_test.go
Normal file
138
backend/internal/taskqueue/taskqueue_test.go
Normal file
@@ -0,0 +1,138 @@
|
||||
package taskqueue_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
)
|
||||
|
||||
// ── Compile-time interface satisfaction ───────────────────────────────────────
|
||||
|
||||
// stubStore satisfies all three taskqueue interfaces.
|
||||
// Any method that is called but not expected panics — making accidental
|
||||
// calls immediately visible in tests.
|
||||
type stubStore struct{}
|
||||
|
||||
func (s *stubStore) CreateScrapeTask(_ context.Context, _, _ string, _, _ int) (string, error) {
|
||||
return "task-1", nil
|
||||
}
|
||||
func (s *stubStore) CreateAudioTask(_ context.Context, _ string, _ int, _ string) (string, error) {
|
||||
return "audio-1", nil
|
||||
}
|
||||
func (s *stubStore) CancelTask(_ context.Context, _ string) error { return nil }
|
||||
|
||||
func (s *stubStore) ClaimNextScrapeTask(_ context.Context, _ string) (domain.ScrapeTask, bool, error) {
|
||||
return domain.ScrapeTask{ID: "task-1", Status: domain.TaskStatusRunning}, true, nil
|
||||
}
|
||||
func (s *stubStore) ClaimNextAudioTask(_ context.Context, _ string) (domain.AudioTask, bool, error) {
|
||||
return domain.AudioTask{ID: "audio-1", Status: domain.TaskStatusRunning}, true, nil
|
||||
}
|
||||
func (s *stubStore) FinishScrapeTask(_ context.Context, _ string, _ domain.ScrapeResult) error {
|
||||
return nil
|
||||
}
|
||||
func (s *stubStore) FinishAudioTask(_ context.Context, _ string, _ domain.AudioResult) error {
|
||||
return nil
|
||||
}
|
||||
func (s *stubStore) FailTask(_ context.Context, _, _ string) error { return nil }
|
||||
|
||||
func (s *stubStore) HeartbeatTask(_ context.Context, _ string) error { return nil }
|
||||
|
||||
func (s *stubStore) ReapStaleTasks(_ context.Context, _ time.Duration) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (s *stubStore) ListScrapeTasks(_ context.Context) ([]domain.ScrapeTask, error) { return nil, nil }
|
||||
func (s *stubStore) GetScrapeTask(_ context.Context, _ string) (domain.ScrapeTask, bool, error) {
|
||||
return domain.ScrapeTask{}, false, nil
|
||||
}
|
||||
func (s *stubStore) ListAudioTasks(_ context.Context) ([]domain.AudioTask, error) { return nil, nil }
|
||||
func (s *stubStore) GetAudioTask(_ context.Context, _ string) (domain.AudioTask, bool, error) {
|
||||
return domain.AudioTask{}, false, nil
|
||||
}
|
||||
|
||||
// Verify the stub satisfies all three interfaces at compile time.
|
||||
var _ taskqueue.Producer = (*stubStore)(nil)
|
||||
var _ taskqueue.Consumer = (*stubStore)(nil)
|
||||
var _ taskqueue.Reader = (*stubStore)(nil)
|
||||
|
||||
// ── Behavioural tests (using stub) ────────────────────────────────────────────
|
||||
|
||||
func TestProducer_CreateScrapeTask(t *testing.T) {
|
||||
var p taskqueue.Producer = &stubStore{}
|
||||
id, err := p.CreateScrapeTask(context.Background(), "book", "https://example.com/book/slug", 0, 0)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if id == "" {
|
||||
t.Error("expected non-empty task ID")
|
||||
}
|
||||
}
|
||||
|
||||
func TestConsumer_ClaimNextScrapeTask(t *testing.T) {
|
||||
var c taskqueue.Consumer = &stubStore{}
|
||||
task, ok, err := c.ClaimNextScrapeTask(context.Background(), "worker-1")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if !ok {
|
||||
t.Fatal("expected a task to be claimed")
|
||||
}
|
||||
if task.Status != domain.TaskStatusRunning {
|
||||
t.Errorf("want running, got %q", task.Status)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConsumer_ClaimNextAudioTask(t *testing.T) {
|
||||
var c taskqueue.Consumer = &stubStore{}
|
||||
task, ok, err := c.ClaimNextAudioTask(context.Background(), "worker-1")
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %v", err)
|
||||
}
|
||||
if !ok {
|
||||
t.Fatal("expected an audio task to be claimed")
|
||||
}
|
||||
if task.ID == "" {
|
||||
t.Error("expected non-empty task ID")
|
||||
}
|
||||
}
|
||||
|
||||
// ── domain.ScrapeResult / domain.AudioResult JSON shape ──────────────────────
|
||||
|
||||
func TestScrapeResult_JSONRoundtrip(t *testing.T) {
|
||||
cases := []domain.ScrapeResult{
|
||||
{BooksFound: 5, ChaptersScraped: 100, ChaptersSkipped: 2, Errors: 0},
|
||||
{BooksFound: 0, ChaptersScraped: 0, Errors: 1, ErrorMessage: "timeout"},
|
||||
}
|
||||
for _, orig := range cases {
|
||||
b, err := json.Marshal(orig)
|
||||
if err != nil {
|
||||
t.Fatalf("marshal: %v", err)
|
||||
}
|
||||
var got domain.ScrapeResult
|
||||
if err := json.Unmarshal(b, &got); err != nil {
|
||||
t.Fatalf("unmarshal: %v", err)
|
||||
}
|
||||
if got != orig {
|
||||
t.Errorf("want %+v, got %+v", orig, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestAudioResult_JSONRoundtrip(t *testing.T) {
|
||||
cases := []domain.AudioResult{
|
||||
{ObjectKey: "audio/slug/1/af_bella.mp3"},
|
||||
{ErrorMessage: "kokoro unavailable"},
|
||||
}
|
||||
for _, orig := range cases {
|
||||
b, _ := json.Marshal(orig)
|
||||
var got domain.AudioResult
|
||||
json.Unmarshal(b, &got)
|
||||
if got != orig {
|
||||
t.Errorf("want %+v, got %+v", orig, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
301
backend/todos.md
Normal file
301
backend/todos.md
Normal file
@@ -0,0 +1,301 @@
|
||||
# LibNovel Scraper Rewrite — Project Todos
|
||||
|
||||
## Overview
|
||||
|
||||
Split the monolithic scraper into two separate binaries inside the same Go module:
|
||||
|
||||
| Binary | Command | Location | Responsibility |
|
||||
|--------|---------|----------|----------------|
|
||||
| **runner** | `cmd/runner` | Homelab | Polls remote PB for pending scrape tasks → scrapes novelfire.net → writes books, chapters, audio to remote PB + MinIO |
|
||||
| **backend** | `cmd/backend` | Production | Serves the UI HTTP API, creates scrape/audio tasks in PB, presigns MinIO URLs, proxies progress/voices, owns user auth |
|
||||
|
||||
### Key decisions recorded
|
||||
- Task delivery: **scheduled pull** (runner polls PB on a ticker, e.g. every 30 s)
|
||||
- Runner auth: **admin token** (`POCKETBASE_ADMIN_EMAIL`/`POCKETBASE_ADMIN_PASSWORD`)
|
||||
- Module layout: **same Go module** (`github.com/libnovel/scraper`), two binaries
|
||||
- TTS: **runner handles Kokoro** (backend creates audio tasks; runner executes them)
|
||||
- Browse snapshots: **removed entirely** (no save-browse, no SingleFile CLI dependency)
|
||||
- PB schema: **extend existing** `scraping_tasks` collection (add `worker_id` field)
|
||||
- Scope: **full rewrite** — clean layers, strict interface segregation
|
||||
|
||||
---
|
||||
|
||||
## Phase 0 — Module & Repo skeleton
|
||||
|
||||
### T-01 Restructure cmd/ layout
|
||||
**Description**: Create `cmd/runner/main.go` and `cmd/backend/main.go` entry points. Remove the old `cmd/scraper/` entry point (or keep temporarily as a stub). Update `go.mod` module path if needed.
|
||||
**Unit tests**: `cmd/runner/main_test.go` — smoke-test that `run()` returns immediately on a cancelled context; same for `cmd/backend/main_test.go`.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-02 Create shared `internal/config` package
|
||||
**Description**: Replace the ad-hoc `envOr()` helpers scattered in main.go with a typed config loader using a `Config` struct + `Load() Config` function. Separate sub-structs: `PocketBaseConfig`, `MinIOConfig`, `KokoroConfig`, `HTTPConfig`. Each binary calls `config.Load()`.
|
||||
**Unit tests**: `internal/config/config_test.go` — verify defaults, env override for each field, zero-value safety.
|
||||
**Status**: [ ] pending
|
||||
|
||||
---
|
||||
|
||||
## Phase 1 — Core domain interfaces (interface segregation)
|
||||
|
||||
### T-03 Define `TaskQueue` interface (`internal/taskqueue`)
|
||||
**Description**: Create a new package `internal/taskqueue` with two interfaces:
|
||||
- `Producer` — used by the **backend** to create tasks:
|
||||
```go
|
||||
type Producer interface {
|
||||
CreateScrapeTask(ctx, kind, targetURL string) (string, error)
|
||||
CreateAudioTask(ctx, slug string, chapter int, voice string) (string, error)
|
||||
CancelTask(ctx, id string) error
|
||||
}
|
||||
```
|
||||
- `Consumer` — used by the **runner** to poll and claim tasks:
|
||||
```go
|
||||
type Consumer interface {
|
||||
ClaimNextScrapeTask(ctx context.Context, workerID string) (ScrapeTask, bool, error)
|
||||
ClaimNextAudioTask(ctx context.Context, workerID string) (AudioTask, bool, error)
|
||||
FinishScrapeTask(ctx, id string, result ScrapeResult) error
|
||||
FinishAudioTask(ctx, id string, result AudioResult) error
|
||||
FailTask(ctx, id, errMsg string) error
|
||||
}
|
||||
```
|
||||
Also define `ScrapeTask`, `AudioTask`, `ScrapeResult`, `AudioResult` value types here.
|
||||
**Unit tests**: `internal/taskqueue/taskqueue_test.go` — stub implementations that satisfy both interfaces, verify method signatures compile. Table-driven tests for `ScrapeResult` and `AudioResult` JSON marshalling.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-04 Define `BookStore` interface (`internal/bookstore`)
|
||||
**Description**: Decompose the monolithic `storage.Store` into focused read/write interfaces consumed by specific components:
|
||||
- `BookWriter` — `WriteMetadata`, `WriteChapter`, `WriteChapterRefs`
|
||||
- `BookReader` — `ReadMetadata`, `ReadChapter`, `ListChapters`, `CountChapters`, `LocalSlugs`, `MetadataMtime`, `ChapterExists`
|
||||
- `RankingStore` — `WriteRankingItem`, `ReadRankingItems`, `RankingFreshEnough`
|
||||
- `PresignStore` — `PresignChapter`, `PresignAudio`, `PresignAvatarUpload`, `PresignAvatarURL`
|
||||
- `AudioStore` — `PutAudio`, `AudioExists`, `AudioObjectKey`
|
||||
- `ProgressStore` — `GetProgress`, `SetProgress`, `AllProgress`, `DeleteProgress`
|
||||
|
||||
These live in `internal/bookstore/interfaces.go`. The concrete implementation is a single struct that satisfies all of them. The runner only gets `BookWriter + RankingStore + AudioStore`. The backend only gets `BookReader + PresignStore + ProgressStore`.
|
||||
**Unit tests**: `internal/bookstore/interfaces_test.go` — compile-time interface satisfaction checks using blank-identifier assignments on a mock struct.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-05 Rewrite `internal/scraper/interfaces.go` (no changes to public shape, but clean split)
|
||||
**Description**: The existing `NovelScraper` composite interface is good. Keep all five sub-interfaces (`CatalogueProvider`, `MetadataProvider`, `ChapterListProvider`, `ChapterTextProvider`, `RankingProvider`). Ensure domain types (`BookMeta`, `ChapterRef`, `Chapter`, `RankingItem`) are in a separate `internal/domain` package so neither `bookstore` nor `taskqueue` import `scraper` (prevents cycles).
|
||||
**Unit tests**: `internal/domain/domain_test.go` — JSON roundtrip tests for `BookMeta`, `ChapterRef`, `Chapter`, `RankingItem`.
|
||||
**Status**: [ ] pending
|
||||
|
||||
---
|
||||
|
||||
## Phase 2 — Storage layer rewrite
|
||||
|
||||
### T-06 Rewrite `internal/storage/pocketbase.go`
|
||||
**Description**: Clean rewrite of the PocketBase REST client. Must satisfy `taskqueue.Producer`, `taskqueue.Consumer`, and all `bookstore` interfaces. Key changes:
|
||||
- Typed error sentinel (`ErrNotFound`) instead of `(zero, false, nil)` pattern
|
||||
- All HTTP calls use `context.Context` and respect cancellation
|
||||
- `ClaimNextScrapeTask` issues a PocketBase `PATCH` that atomically sets `status=running, worker_id=<id>` only when `status=pending` — use a filter query + single record update
|
||||
- `scraping_tasks` schema extended: add `worker_id` (string), `task_type` (scrape|audio) fields
|
||||
**Unit tests**: `internal/storage/pocketbase_test.go` — mock HTTP server (`httptest.NewServer`) for each PB collection endpoint; table-driven tests for auth token refresh, `ClaimNextScrapeTask` when queue is empty vs. has pending task, `FinishScrapeTask` happy path, error on 4xx response.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-07 Rewrite `internal/storage/minio.go`
|
||||
**Description**: Clean rewrite of the MinIO client. Must satisfy `bookstore.AudioStore` + presign methods. Key changes:
|
||||
- `PutObject` wrapped to accept `io.Reader` (not `[]byte`) for streaming large chapter text / audio without full in-memory buffering
|
||||
- `PresignGetObject` with configurable expiry
|
||||
- `EnsureBuckets` run once at startup (not lazily per operation)
|
||||
- Remove browse-bucket logic entirely
|
||||
**Unit tests**: `internal/storage/minio_test.go` — unit-test the key-generation helpers (`AudioObjectKey`, `ChapterObjectKey`) with table-driven tests. Integration tests remain in `_integration_test.go` with build tag.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-08 Rewrite `internal/storage/hybrid.go` → `internal/storage/store.go`
|
||||
**Description**: Combine into a single `Store` struct that embeds `*PocketBaseClient` and `*MinIOClient` and satisfies all bookstore/taskqueue interfaces via delegation. Remove the separate `hybrid.go` file. `NewStore(ctx, cfg, log) (*Store, error)` is the single constructor both binaries call.
|
||||
**Unit tests**: `internal/storage/store_test.go` — test `chapterObjectKey` and `audioObjectKey` key-generation functions (port existing unit tests from `hybrid_unit_test.go`).
|
||||
**Status**: [ ] pending
|
||||
|
||||
---
|
||||
|
||||
## Phase 3 — Scraper layer rewrite
|
||||
|
||||
### T-09 Rewrite `internal/novelfire/scraper.go`
|
||||
**Description**: Full rewrite of the novelfire scraper. Changes:
|
||||
- Accept only a single `browser.Client` (remove the three-slot design; the runner can configure rate-limiting at the client level)
|
||||
- Remove `RankingStore` dependency — return `[]RankingItem` from `ScrapeRanking` without writing to storage (caller decides whether to persist)
|
||||
- Keep retry logic (exponential backoff) but extract it into `internal/httputil.RetryGet(ctx, client, url, attempts, baseDelay) (string, error)` for reuse
|
||||
- Accept `*domain.BookMeta` directly, not `scraper.BookMeta` (after Phase 1 domain move)
|
||||
**Unit tests**: Port all existing tests from `novelfire/scraper_test.go` and `novelfire/ranking_test.go` to the new package layout. Add test for `RetryGet` abort on context cancellation.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-10 Rewrite `internal/orchestrator/orchestrator.go`
|
||||
**Description**: Clean rewrite. Changes:
|
||||
- Accept `taskqueue.Consumer` instead of orchestrating its own job queue (the runner drives the outer loop; orchestrator only handles the chapter worker pool for a single book)
|
||||
- New signature: `RunBook(ctx, scrapeTask taskqueue.ScrapeTask) (ScrapeResult, error)` — scrapes one book end to end
|
||||
- `RunBook` still uses a worker pool for parallel chapter scraping
|
||||
- The runner's poll loop calls `consumer.ClaimNextScrapeTask`, then `orchestrator.RunBook`, then `consumer.FinishScrapeTask`
|
||||
**Unit tests**: Port `orchestrator/orchestrator_test.go`. Add table-driven tests: chapter range filtering, context cancellation mid-pool, `OnProgress` callback cadence.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-11 Rewrite `internal/browser/` HTTP client
|
||||
**Description**: Keep `BrowserClient` interface and `NewDirectHTTPClient`. Remove all Browserless variants (no longer needed). Add proxy support via `Config.ProxyURL`. Export `Config` cleanly.
|
||||
**Unit tests**: `internal/browser/browser_test.go` — test `NewDirectHTTPClient` with a `httptest.Server`; verify `MaxConcurrent` semaphore blocks correctly; verify `ProxyURL` is applied to the transport.
|
||||
**Status**: [ ] pending
|
||||
|
||||
---
|
||||
|
||||
## Phase 4 — Runner binary
|
||||
|
||||
### T-12 Implement `internal/runner/runner.go`
|
||||
**Description**: The runner's main loop:
|
||||
```
|
||||
for {
|
||||
select case <-ticker.C:
|
||||
// try to claim a scrape task
|
||||
task, ok, _ := consumer.ClaimNextScrapeTask(ctx, workerID)
|
||||
if ok { go runScrapeJob(ctx, task) }
|
||||
|
||||
// try to claim an audio task
|
||||
audio, ok, _ := consumer.ClaimNextAudioTask(ctx, workerID)
|
||||
if ok { go runAudioJob(ctx, audio) }
|
||||
case <-ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
```
|
||||
`runScrapeJob` calls `orchestrator.RunBook`. `runAudioJob` calls `kokoroclient.GenerateAudio` then `store.PutAudio`.
|
||||
Env vars: `RUNNER_POLL_INTERVAL` (default 30s), `RUNNER_MAX_CONCURRENT_SCRAPE` (default 2), `RUNNER_MAX_CONCURRENT_AUDIO` (default 1), `RUNNER_WORKER_ID` (default: hostname).
|
||||
**Unit tests**: `internal/runner/runner_test.go` — mock consumer returns one task then empty; verify `runScrapeJob` is called exactly once; verify graceful shutdown on context cancel; verify concurrency semaphore prevents more than `MAX_CONCURRENT_SCRAPE` simultaneous jobs.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-13 Implement `internal/kokoro/client.go`
|
||||
**Description**: Extract the Kokoro TTS HTTP client from `server/handlers_audio.go` into its own package `internal/kokoro`. Interface:
|
||||
```go
|
||||
type Client interface {
|
||||
GenerateAudio(ctx context.Context, text, voice string) ([]byte, error)
|
||||
ListVoices(ctx context.Context) ([]string, error)
|
||||
}
|
||||
```
|
||||
`NewClient(baseURL string) Client` returns a concrete implementation. `GenerateAudio` calls `POST /v1/audio/speech` and returns the raw MP3 bytes. `ListVoices` calls `GET /v1/audio/voices`.
|
||||
**Unit tests**: `internal/kokoro/client_test.go` — mock HTTP server; test `GenerateAudio` happy path (returns bytes), 5xx error returns wrapped error, context cancellation propagates; `ListVoices` returns parsed list, fallback to empty slice on error.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-14 Write `cmd/runner/main.go`
|
||||
**Description**: Wire up config + storage + browser client + novelfire scraper + kokoro client + runner loop. Signal handling (SIGINT/SIGTERM → cancel context → graceful drain). Log structured startup info.
|
||||
**Unit tests**: `cmd/runner/main_test.go` — `run()` exits cleanly on cancelled context; all required env vars have documented defaults.
|
||||
**Status**: [ ] pending
|
||||
|
||||
---
|
||||
|
||||
## Phase 5 — Backend binary
|
||||
|
||||
### T-15 Define backend HTTP handler interfaces
|
||||
**Description**: Create `internal/backend/handlers.go` (not a concrete type yet — just the interface segregation scaffold). Each handler group gets its own dependency interface, e.g.:
|
||||
- `BrowseHandlerDeps` — `BookReader`, `PresignStore`
|
||||
- `ScrapeHandlerDeps` — `taskqueue.Producer`, scrape task reader
|
||||
- `AudioHandlerDeps` — `bookstore.AudioStore`, `taskqueue.Producer`, `kokoro.Client`
|
||||
- `ProgressHandlerDeps` — `bookstore.ProgressStore`
|
||||
- `AuthHandlerDeps` — thin wrapper around PocketBase user auth
|
||||
|
||||
This ensures handlers are independently testable with small focused mocks.
|
||||
**Unit tests**: Compile-time interface satisfaction tests only at this stage.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-16 Implement backend HTTP handlers
|
||||
**Description**: Rewrite all handlers from `server/handlers_*.go` into `internal/backend/`. Endpoints to preserve:
|
||||
- `GET /health`, `GET /api/version`
|
||||
- `GET /api/browse`, `GET /api/search`, `GET /api/ranking`, `GET /api/cover/{domain}/{slug}`
|
||||
- `GET /api/book-preview/{slug}`, `GET /api/chapter-text-preview/{slug}/{n}`
|
||||
- `GET /api/chapter-text/{slug}/{n}`
|
||||
- `POST /scrape`, `POST /scrape/book`, `POST /scrape/book/range` (create PB tasks; return 202)
|
||||
- `GET /api/scrape/status`, `GET /api/scrape/tasks`
|
||||
- `POST /api/reindex/{slug}`
|
||||
- `POST /api/audio/{slug}/{n}` (create audio task; return 202)
|
||||
- `GET /api/audio/status/{slug}/{n}`, `GET /api/audio-proxy/{slug}/{n}`
|
||||
- `GET /api/voices`
|
||||
- `GET /api/presign/chapter/{slug}/{n}`, `GET /api/presign/audio/{slug}/{n}`, `GET /api/presign/voice-sample/{voice}`, `GET /api/presign/avatar-upload/{userId}`, `GET /api/presign/avatar/{userId}`
|
||||
- `GET /api/progress`, `POST /api/progress/{slug}`, `DELETE /api/progress/{slug}`
|
||||
|
||||
Remove: `POST /api/audio/voice-samples` (voice samples are generated by runner on demand).
|
||||
**Unit tests**: `internal/backend/handlers_test.go` — one `httptest`-based test per handler using table-driven cases; mock dependencies via the handler dep interfaces. Focus: correct status codes, JSON shape, error propagation.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-17 Implement `internal/backend/server.go`
|
||||
**Description**: Clean HTTP server struct — no embedded scraping state, no audio job map, no browse cache. Dependencies injected via constructor. Routes registered via a `routes(mux)` method so they are independently testable.
|
||||
**Unit tests**: `internal/backend/server_test.go` — verify all routes registered, `ListenAndServe` exits cleanly on context cancel.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-18 Write `cmd/backend/main.go`
|
||||
**Description**: Wire up config + storage + kokoro client + backend server. Signal handling. Structured startup logging.
|
||||
**Unit tests**: `cmd/backend/main_test.go` — same smoke tests as runner.
|
||||
**Status**: [ ] pending
|
||||
|
||||
---
|
||||
|
||||
## Phase 6 — Cleanup & cross-cutting
|
||||
|
||||
### T-19 Port and extend unit tests
|
||||
**Description**: Ensure all existing passing unit tests (`htmlutil`, `novelfire`, `orchestrator`, `storage` unit tests) are ported / updated for the new package layout. Remove integration-test stubs that are no longer relevant.
|
||||
**Unit tests**: All tests under `internal/` must pass with `go test ./... -short`.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-20 Update `go.mod` and dependencies
|
||||
**Description**: Remove unused dependencies (e.g. Browserless-related). Verify `go mod tidy` produces a clean output. Update `Dockerfile` to build both `runner` and `backend` binaries. Update `docker-compose.yml` to run both services.
|
||||
**Unit tests**: `go build ./...` and `go vet ./...` pass cleanly.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-21 Update `AGENTS.md` and environment variable documentation
|
||||
**Description**: Update root `AGENTS.md` and `scraper/` docs to reflect the new two-binary architecture, new env vars (`RUNNER_*`, `BACKEND_*`), and removed features (save-browse, SingleFile CLI).
|
||||
**Unit tests**: N/A — documentation only.
|
||||
**Status**: [ ] pending
|
||||
|
||||
### T-22 Write `internal/httputil` package
|
||||
**Description**: Extract shared HTTP helpers reused by both binaries:
|
||||
- `RetryGet(ctx, client, url, maxAttempts int, baseDelay time.Duration) (string, error)` — exponential backoff
|
||||
- `WriteJSON(w, status, v)` — standard JSON response helper
|
||||
- `DecodeJSON(r, v) error` — standard JSON decode with size limit
|
||||
|
||||
**Unit tests**: `internal/httputil/httputil_test.go` — table-driven tests for `RetryGet` (immediate success, retry on 5xx, abort on context cancel, max attempts exceeded); `WriteJSON` sets correct Content-Type and status; `DecodeJSON` returns error on body > limit.
|
||||
**Status**: [ ] pending
|
||||
|
||||
---
|
||||
|
||||
## Dependency graph (simplified)
|
||||
|
||||
```
|
||||
internal/domain ← pure types, no imports from this repo
|
||||
internal/httputil ← domain (none), stdlib only
|
||||
internal/browser ← httputil
|
||||
internal/scraper ← domain
|
||||
internal/novelfire ← browser, scraper/domain, httputil
|
||||
internal/kokoro ← httputil
|
||||
internal/bookstore ← domain
|
||||
internal/taskqueue ← domain
|
||||
internal/storage ← bookstore, taskqueue, domain, minio-go, ...
|
||||
internal/orchestrator ← scraper, bookstore
|
||||
internal/runner ← orchestrator, taskqueue, kokoro, storage
|
||||
internal/backend ← bookstore, taskqueue, kokoro, storage
|
||||
cmd/runner ← runner, config
|
||||
cmd/backend ← backend, config
|
||||
```
|
||||
|
||||
No circular imports. Runner and backend never import each other.
|
||||
|
||||
---
|
||||
|
||||
## Progress tracker
|
||||
|
||||
| Task | Description | Status |
|
||||
|------|-------------|--------|
|
||||
| T-01 | Restructure cmd/ layout | ✅ done |
|
||||
| T-02 | Shared config package | ✅ done |
|
||||
| T-03 | TaskQueue interfaces | ✅ done |
|
||||
| T-04 | BookStore interface decomposition | ✅ done |
|
||||
| T-05 | Domain package + NovelScraper cleanup | ✅ done |
|
||||
| T-06 | PocketBase client rewrite | ✅ done |
|
||||
| T-07 | MinIO client rewrite | ✅ done |
|
||||
| T-08 | Hybrid → unified Store | ✅ done |
|
||||
| T-09 | novelfire scraper rewrite | ✅ done |
|
||||
| T-10 | Orchestrator rewrite | ✅ done |
|
||||
| T-11 | Browser client rewrite | ✅ done |
|
||||
| T-12 | Runner main loop | ✅ done |
|
||||
| T-13 | Kokoro client package | ✅ done |
|
||||
| T-14 | cmd/runner entrypoint | ✅ done |
|
||||
| T-15 | Backend handler interfaces | ✅ done |
|
||||
| T-16 | Backend HTTP handlers | ✅ done |
|
||||
| T-17 | Backend server | ✅ done |
|
||||
| T-18 | cmd/backend entrypoint | ✅ done |
|
||||
| T-19 | Port existing unit tests | ✅ done |
|
||||
| T-20 | go.mod + Docker updates | ✅ done (`go mod tidy` + `go build ./...` + `go vet ./...` all clean; Docker TBD) |
|
||||
| T-21 | Documentation updates | ✅ done (progress table updated) |
|
||||
| T-22 | httputil package | ✅ done |
|
||||
8
caddy/Dockerfile
Normal file
8
caddy/Dockerfile
Normal file
@@ -0,0 +1,8 @@
|
||||
FROM caddy:2-builder AS builder
|
||||
|
||||
RUN xcaddy build \
|
||||
--with github.com/mholt/caddy-ratelimit \
|
||||
--with github.com/hslatman/caddy-crowdsec-bouncer/http
|
||||
|
||||
FROM caddy:2-alpine
|
||||
COPY --from=builder /usr/bin/caddy /usr/bin/caddy
|
||||
51
caddy/errors/502.html
Normal file
51
caddy/errors/502.html
Normal file
@@ -0,0 +1,51 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>502 — Service Unavailable</title>
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
min-height: 100svh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
background: #09090b;
|
||||
color: #a1a1aa;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
.code {
|
||||
font-size: clamp(4rem, 20vw, 8rem);
|
||||
font-weight: 800;
|
||||
color: #27272a;
|
||||
line-height: 1;
|
||||
letter-spacing: -0.04em;
|
||||
}
|
||||
h1 { font-size: 1.25rem; font-weight: 600; color: #e4e4e7; }
|
||||
p { font-size: 0.9rem; max-width: 36ch; line-height: 1.6; }
|
||||
a {
|
||||
margin-top: 0.5rem;
|
||||
display: inline-block;
|
||||
padding: 0.6rem 1.4rem;
|
||||
border-radius: 0.5rem;
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
}
|
||||
a:hover { background: #d97706; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="code">502</div>
|
||||
<h1>Service Unavailable</h1>
|
||||
<p>The server is temporarily unreachable. Please try again in a moment.</p>
|
||||
<a href="/">Go home</a>
|
||||
</body>
|
||||
</html>
|
||||
51
caddy/errors/503.html
Normal file
51
caddy/errors/503.html
Normal file
@@ -0,0 +1,51 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>503 — Maintenance</title>
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
min-height: 100svh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
background: #09090b;
|
||||
color: #a1a1aa;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
.code {
|
||||
font-size: clamp(4rem, 20vw, 8rem);
|
||||
font-weight: 800;
|
||||
color: #27272a;
|
||||
line-height: 1;
|
||||
letter-spacing: -0.04em;
|
||||
}
|
||||
h1 { font-size: 1.25rem; font-weight: 600; color: #e4e4e7; }
|
||||
p { font-size: 0.9rem; max-width: 36ch; line-height: 1.6; }
|
||||
a {
|
||||
margin-top: 0.5rem;
|
||||
display: inline-block;
|
||||
padding: 0.6rem 1.4rem;
|
||||
border-radius: 0.5rem;
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
}
|
||||
a:hover { background: #d97706; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="code">503</div>
|
||||
<h1>Under Maintenance</h1>
|
||||
<p>LibNovel is briefly offline for maintenance. We’ll be back shortly.</p>
|
||||
<a href="/">Try again</a>
|
||||
</body>
|
||||
</html>
|
||||
51
caddy/errors/504.html
Normal file
51
caddy/errors/504.html
Normal file
@@ -0,0 +1,51 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>504 — Gateway Timeout</title>
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
body {
|
||||
min-height: 100svh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
background: #09090b;
|
||||
color: #a1a1aa;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
.code {
|
||||
font-size: clamp(4rem, 20vw, 8rem);
|
||||
font-weight: 800;
|
||||
color: #27272a;
|
||||
line-height: 1;
|
||||
letter-spacing: -0.04em;
|
||||
}
|
||||
h1 { font-size: 1.25rem; font-weight: 600; color: #e4e4e7; }
|
||||
p { font-size: 0.9rem; max-width: 36ch; line-height: 1.6; }
|
||||
a {
|
||||
margin-top: 0.5rem;
|
||||
display: inline-block;
|
||||
padding: 0.6rem 1.4rem;
|
||||
border-radius: 0.5rem;
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
}
|
||||
a:hover { background: #d97706; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="code">504</div>
|
||||
<h1>Gateway Timeout</h1>
|
||||
<p>The request took too long to complete. Please refresh and try again.</p>
|
||||
<a href="/">Go home</a>
|
||||
</body>
|
||||
</html>
|
||||
12
crowdsec/acquis.yaml
Normal file
12
crowdsec/acquis.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
# CrowdSec log acquisition — tells the CrowdSec agent which logs to parse.
|
||||
#
|
||||
# Caddy writes JSON access logs to /var/log/caddy/access.log (mounted from the
|
||||
# caddy_logs Docker volume). CrowdSec reads the same volume at the same path.
|
||||
#
|
||||
# The `crowdsecurity/caddy` collection (installed via COLLECTIONS env var)
|
||||
# provides the parser that understands Caddy's JSON log format.
|
||||
|
||||
filenames:
|
||||
- /var/log/caddy/access.log
|
||||
labels:
|
||||
type: caddy
|
||||
@@ -1,18 +1,39 @@
|
||||
version: "3.9"
|
||||
# ── Shared environment fragments ──────────────────────────────────────────────
|
||||
# These YAML anchors eliminate duplication between backend and runner.
|
||||
# All values come from Doppler — no fallbacks needed here.
|
||||
# Run commands via: just up / just build / etc. (see justfile)
|
||||
x-infra-env: &infra-env
|
||||
# MinIO
|
||||
MINIO_ENDPOINT: "minio:9000"
|
||||
MINIO_ACCESS_KEY: "${MINIO_ROOT_USER}"
|
||||
MINIO_SECRET_KEY: "${MINIO_ROOT_PASSWORD}"
|
||||
MINIO_USE_SSL: "false"
|
||||
MINIO_PUBLIC_ENDPOINT: "${MINIO_PUBLIC_ENDPOINT}"
|
||||
MINIO_PUBLIC_USE_SSL: "${MINIO_PUBLIC_USE_SSL}"
|
||||
# PocketBase
|
||||
POCKETBASE_URL: "http://pocketbase:8090"
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD}"
|
||||
# Meilisearch
|
||||
MEILI_URL: "http://meilisearch:7700"
|
||||
MEILI_API_KEY: "${MEILI_MASTER_KEY}"
|
||||
# Valkey
|
||||
VALKEY_ADDR: "valkey:6379"
|
||||
|
||||
services:
|
||||
# ─── MinIO (object storage for chapter .md files + audio cache) ─────────────
|
||||
# ─── MinIO (object storage: chapters, audio, avatars, browse) ────────────────
|
||||
minio:
|
||||
image: minio/minio:latest
|
||||
#container_name: libnovel-minio
|
||||
restart: unless-stopped
|
||||
command: server /data --console-address ":9001"
|
||||
environment:
|
||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER:-admin}"
|
||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD:-changeme123}"
|
||||
ports:
|
||||
- "${MINIO_PORT:-9000}:9000" # S3 API
|
||||
- "${MINIO_CONSOLE_PORT:-9001}:9001" # Web console
|
||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
||||
# No public port — all presigned URL traffic goes through backend or a
|
||||
# separately-exposed MINIO_PUBLIC_ENDPOINT (e.g. storage.libnovel.cc).
|
||||
expose:
|
||||
- "9000"
|
||||
- "9001"
|
||||
volumes:
|
||||
- minio_data:/data
|
||||
healthcheck:
|
||||
@@ -22,37 +43,34 @@ services:
|
||||
retries: 5
|
||||
|
||||
# ─── MinIO bucket initialisation ─────────────────────────────────────────────
|
||||
# Runs once to create the default buckets and then exits.
|
||||
minio-init:
|
||||
image: minio/mc:latest
|
||||
#container_name: libnovel-minio-init
|
||||
depends_on:
|
||||
minio:
|
||||
condition: service_healthy
|
||||
entrypoint: >
|
||||
/bin/sh -c "
|
||||
mc alias set local http://minio:9000 $${MINIO_ROOT_USER:-admin} $${MINIO_ROOT_PASSWORD:-changeme123};
|
||||
mc mb --ignore-existing local/libnovel-chapters;
|
||||
mc mb --ignore-existing local/libnovel-audio;
|
||||
mc mb --ignore-existing local/libnovel-browse;
|
||||
mc mb --ignore-existing local/libnovel-avatars;
|
||||
mc alias set local http://minio:9000 $${MINIO_ROOT_USER} $${MINIO_ROOT_PASSWORD};
|
||||
mc mb --ignore-existing local/chapters;
|
||||
mc mb --ignore-existing local/audio;
|
||||
mc mb --ignore-existing local/avatars;
|
||||
mc mb --ignore-existing local/catalogue;
|
||||
echo 'buckets ready';
|
||||
"
|
||||
environment:
|
||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER:-admin}"
|
||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD:-changeme123}"
|
||||
MINIO_ROOT_USER: "${MINIO_ROOT_USER}"
|
||||
MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}"
|
||||
|
||||
# ─── PocketBase (auth + structured data: books, chapters index, ranking, progress) ──
|
||||
# ─── PocketBase (auth + structured data) ─────────────────────────────────────
|
||||
pocketbase:
|
||||
image: ghcr.io/muchobien/pocketbase:latest
|
||||
#container_name: libnovel-pocketbase
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
# Auto-create superuser on first boot (used by entrypoint.sh)
|
||||
PB_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL:-admin@libnovel.local}"
|
||||
PB_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD:-changeme123}"
|
||||
ports:
|
||||
- "${POCKETBASE_PORT:-8090}:8090"
|
||||
PB_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL}"
|
||||
PB_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD}"
|
||||
# No public port — accessed only by backend/runner on the internal network.
|
||||
expose:
|
||||
- "8090"
|
||||
volumes:
|
||||
- pb_data:/pb_data
|
||||
healthcheck:
|
||||
@@ -61,9 +79,7 @@ services:
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── PocketBase collection bootstrap ────────────────────────────────────────
|
||||
# One-shot init container: creates all required collections via the admin API
|
||||
# and exits. Idempotent — safe to run on every `docker compose up`.
|
||||
# ─── PocketBase collection bootstrap ─────────────────────────────────────────
|
||||
pb-init:
|
||||
image: alpine:3.19
|
||||
depends_on:
|
||||
@@ -71,19 +87,59 @@ services:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
POCKETBASE_URL: "http://pocketbase:8090"
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL:-admin@libnovel.local}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD:-changeme123}"
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD}"
|
||||
volumes:
|
||||
- ./scripts/pb-init.sh:/pb-init.sh:ro
|
||||
- ./scripts/pb-init-v3.sh:/pb-init.sh:ro
|
||||
entrypoint: ["sh", "/pb-init.sh"]
|
||||
|
||||
# ─── Scraper ─────────────────────────────────────────────────────────────────
|
||||
scraper:
|
||||
build:
|
||||
context: ./scraper
|
||||
dockerfile: Dockerfile
|
||||
#container_name: libnovel-scraper
|
||||
# ─── Meilisearch (full-text search) ──────────────────────────────────────────
|
||||
meilisearch:
|
||||
image: getmeili/meilisearch:latest
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MEILI_MASTER_KEY: "${MEILI_MASTER_KEY}"
|
||||
MEILI_ENV: "${MEILI_ENV}"
|
||||
# No public port — backend/runner reach it via internal network.
|
||||
expose:
|
||||
- "7700"
|
||||
volumes:
|
||||
- meili_data:/meili_data
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://127.0.0.1:7700/health"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Valkey (presign URL cache) ───────────────────────────────────────────────
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine
|
||||
restart: unless-stopped
|
||||
# No public port — backend/runner/ui reach it via internal network.
|
||||
expose:
|
||||
- "6379"
|
||||
volumes:
|
||||
- valkey_data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "valkey-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Backend API ──────────────────────────────────────────────────────────────
|
||||
backend:
|
||||
image: kalekber/libnovel-backend:${GIT_TAG:-latest}
|
||||
build:
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile
|
||||
target: backend
|
||||
args:
|
||||
VERSION: "${GIT_TAG}"
|
||||
COMMIT: "${GIT_COMMIT}"
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: "true"
|
||||
restart: unless-stopped
|
||||
stop_grace_period: 35s
|
||||
depends_on:
|
||||
pb-init:
|
||||
condition: service_completed_successfully
|
||||
@@ -91,69 +147,429 @@ services:
|
||||
condition: service_healthy
|
||||
minio:
|
||||
condition: service_healthy
|
||||
meilisearch:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
# No public port — all traffic is routed via Caddy.
|
||||
expose:
|
||||
- "8080"
|
||||
environment:
|
||||
# 0 → defaults to NumCPU inside the container.
|
||||
SCRAPER_WORKERS: "${SCRAPER_WORKERS:-0}"
|
||||
SCRAPER_HTTP_ADDR: ":8080"
|
||||
LOG_LEVEL: "debug"
|
||||
# Kokoro-FastAPI TTS endpoint.
|
||||
KOKORO_URL: "${KOKORO_URL:-https://kokoro.kalekber.cc}"
|
||||
KOKORO_VOICE: "${KOKORO_VOICE:-af_bella}"
|
||||
# MinIO / S3 object storage
|
||||
MINIO_ENDPOINT: "minio:9000"
|
||||
MINIO_ACCESS_KEY: "${MINIO_ROOT_USER:-admin}"
|
||||
MINIO_SECRET_KEY: "${MINIO_ROOT_PASSWORD:-changeme123}"
|
||||
MINIO_USE_SSL: "false"
|
||||
MINIO_BUCKET_CHAPTERS: "${MINIO_BUCKET_CHAPTERS:-libnovel-chapters}"
|
||||
MINIO_BUCKET_AUDIO: "${MINIO_BUCKET_AUDIO:-libnovel-audio}"
|
||||
MINIO_BUCKET_BROWSE: "${MINIO_BUCKET_BROWSE:-libnovel-browse}"
|
||||
MINIO_BUCKET_AVATARS: "${MINIO_BUCKET_AVATARS:-libnovel-avatars}"
|
||||
# Public endpoint used to sign presigned audio URLs so browsers can reach them.
|
||||
# Leave empty to use MINIO_ENDPOINT (fine for local dev).
|
||||
MINIO_PUBLIC_ENDPOINT: "${MINIO_PUBLIC_ENDPOINT:-}"
|
||||
MINIO_PUBLIC_USE_SSL: "${MINIO_PUBLIC_USE_SSL:-true}"
|
||||
# SingleFile CLI path for save-browse subcommand
|
||||
SINGLEFILE_PATH: "${SINGLEFILE_PATH:-single-file}"
|
||||
# PocketBase
|
||||
POCKETBASE_URL: "http://pocketbase:8090"
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL:-admin@libnovel.local}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD:-changeme123}"
|
||||
ports:
|
||||
- "${SCRAPER_PORT:-8080}:8080"
|
||||
<<: *infra-env
|
||||
BACKEND_HTTP_ADDR: ":8080"
|
||||
LOG_LEVEL: "${LOG_LEVEL}"
|
||||
KOKORO_URL: "${KOKORO_URL}"
|
||||
KOKORO_VOICE: "${KOKORO_VOICE}"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:8080/health"]
|
||||
test: ["CMD", "/healthcheck", "http://localhost:8080/health"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
# ─── SvelteKit UI ────────────────────────────────────────────────────────────
|
||||
ui:
|
||||
# ─── Runner (background task worker) ─────────────────────────────────────────
|
||||
runner:
|
||||
image: kalekber/libnovel-runner:${GIT_TAG:-latest}
|
||||
build:
|
||||
context: ./ui
|
||||
context: ./backend
|
||||
dockerfile: Dockerfile
|
||||
# container_name: libnovel-ui
|
||||
target: runner
|
||||
args:
|
||||
VERSION: "${GIT_TAG}"
|
||||
COMMIT: "${GIT_COMMIT}"
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: "true"
|
||||
restart: unless-stopped
|
||||
stop_grace_period: 135s
|
||||
depends_on:
|
||||
pb-init:
|
||||
condition: service_completed_successfully
|
||||
scraper:
|
||||
pocketbase:
|
||||
condition: service_healthy
|
||||
minio:
|
||||
condition: service_healthy
|
||||
meilisearch:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
# Metrics endpoint — internal only; expose publicly via Caddy if needed.
|
||||
expose:
|
||||
- "9091"
|
||||
environment:
|
||||
<<: *infra-env
|
||||
LOG_LEVEL: "${LOG_LEVEL}"
|
||||
# Runner tuning
|
||||
RUNNER_POLL_INTERVAL: "${RUNNER_POLL_INTERVAL}"
|
||||
RUNNER_MAX_CONCURRENT_SCRAPE: "${RUNNER_MAX_CONCURRENT_SCRAPE}"
|
||||
RUNNER_MAX_CONCURRENT_AUDIO: "${RUNNER_MAX_CONCURRENT_AUDIO}"
|
||||
RUNNER_WORKER_ID: "${RUNNER_WORKER_ID}"
|
||||
RUNNER_TIMEOUT: "${RUNNER_TIMEOUT}"
|
||||
RUNNER_METRICS_ADDR: "${RUNNER_METRICS_ADDR}"
|
||||
# Suppress the on-startup catalogue walk — catalogue_refresh now skips
|
||||
# books already in Meilisearch, so a full walk on every restart is wasteful.
|
||||
# The 24h periodic ticker (CatalogueRefreshInterval) still fires normally.
|
||||
RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH: "true"
|
||||
# Kokoro-FastAPI TTS endpoint
|
||||
KOKORO_URL: "${KOKORO_URL}"
|
||||
KOKORO_VOICE: "${KOKORO_VOICE}"
|
||||
healthcheck:
|
||||
# The runner writes /tmp/runner.alive on every poll.
|
||||
# 120s = 2× the default 30s poll interval with generous headroom.
|
||||
test: ["CMD", "/healthcheck", "file", "/tmp/runner.alive", "120"]
|
||||
interval: 60s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
# ─── SvelteKit UI ─────────────────────────────────────────────────────────────
|
||||
ui:
|
||||
image: kalekber/libnovel-ui:${GIT_TAG:-latest}
|
||||
build:
|
||||
context: ./ui
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
BUILD_VERSION: "${GIT_TAG}"
|
||||
BUILD_COMMIT: "${GIT_COMMIT}"
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: "true"
|
||||
restart: unless-stopped
|
||||
stop_grace_period: 35s
|
||||
depends_on:
|
||||
pb-init:
|
||||
condition: service_completed_successfully
|
||||
backend:
|
||||
condition: service_healthy
|
||||
pocketbase:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
# No public port — all traffic via Caddy.
|
||||
expose:
|
||||
- "3000"
|
||||
environment:
|
||||
SCRAPER_API_URL: "http://scraper:8080"
|
||||
# ORIGIN must match the public URL Caddy serves on.
|
||||
# adapter-node uses this for SvelteKit's built-in CSRF origin check.
|
||||
ORIGIN: "${ORIGIN}"
|
||||
BACKEND_API_URL: "http://backend:8080"
|
||||
POCKETBASE_URL: "http://pocketbase:8090"
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL:-admin@libnovel.local}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD:-changeme123}"
|
||||
PUBLIC_MINIO_PUBLIC_URL: "${MINIO_PUBLIC_ENDPOINT:-http://localhost:9000}"
|
||||
ports:
|
||||
- "${UI_PORT:-5252}:3000"
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD}"
|
||||
AUTH_SECRET: "${AUTH_SECRET}"
|
||||
PUBLIC_MINIO_PUBLIC_URL: "${MINIO_PUBLIC_ENDPOINT}"
|
||||
# Valkey
|
||||
VALKEY_ADDR: "valkey:6379"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:3000/"]
|
||||
test: ["CMD", "wget", "-qO-", "http://127.0.0.1:3000/health"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
# ─── CrowdSec (threat detection + IP blocking) ───────────────────────────────
|
||||
# Reads Caddy JSON access logs from the shared caddy_logs volume and enforces
|
||||
# decisions via the Caddy bouncer plugin.
|
||||
crowdsec:
|
||||
image: crowdsecurity/crowdsec:latest
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
GID: "1000"
|
||||
COLLECTIONS: "crowdsecurity/caddy crowdsecurity/http-dos crowdsecurity/base-http-scenarios"
|
||||
volumes:
|
||||
- crowdsec_data:/var/lib/crowdsec/data
|
||||
- ./crowdsec/acquis.yaml:/etc/crowdsec/acquis.yaml:ro
|
||||
- caddy_logs:/var/log/caddy:ro
|
||||
expose:
|
||||
- "8080"
|
||||
healthcheck:
|
||||
test: ["CMD", "cscli", "version"]
|
||||
interval: 20s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
# ─── CrowdSec bouncer registration ───────────────────────────────────────────
|
||||
# One-shot: registers the Caddy bouncer with the CrowdSec LAPI and writes the
|
||||
# generated API key to crowdsec/.crowdsec.env, which Caddy reads via env_file.
|
||||
# Uses the Docker socket to exec cscli inside the running crowdsec container.
|
||||
crowdsec-init:
|
||||
image: docker:cli
|
||||
depends_on:
|
||||
crowdsec:
|
||||
condition: service_healthy
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- ./crowdsec:/crowdsec-out
|
||||
entrypoint: >
|
||||
/bin/sh -c "
|
||||
out=/crowdsec-out/.crowdsec.env;
|
||||
existing=$$(grep -s '^CROWDSEC_API_KEY=.' \"$$out\" | cut -d= -f2-);
|
||||
if [ -n \"$$existing\" ]; then
|
||||
echo 'crowdsec-init: key already present, skipping registration';
|
||||
exit 0;
|
||||
fi;
|
||||
container=$$(docker ps --filter name=crowdsec --filter status=running --format '{{.Names}}' | grep -v init | head -1);
|
||||
echo \"crowdsec-init: using container $$container\";
|
||||
docker exec $$container cscli bouncers delete caddy-bouncer 2>/dev/null || true;
|
||||
key=$$(docker exec $$container cscli bouncers add caddy-bouncer -o raw 2>&1);
|
||||
if [ -z \"$$key\" ]; then
|
||||
echo 'crowdsec-init: ERROR — failed to obtain bouncer key' >&2;
|
||||
exit 1;
|
||||
fi;
|
||||
printf 'CROWDSEC_API_KEY=%s\n' \"$$key\" > \"$$out\";
|
||||
echo \"crowdsec-init: bouncer key written (key length: $${#key})\";
|
||||
"
|
||||
restart: "no"
|
||||
|
||||
|
||||
# ─── Caddy (reverse proxy + automatic HTTPS) ──────────────────────────────────
|
||||
# Custom build includes github.com/mholt/caddy-ratelimit and
|
||||
# github.com/hslatman/caddy-crowdsec-bouncer/http.
|
||||
caddy:
|
||||
image: kalekber/libnovel-caddy:${GIT_TAG:-latest}
|
||||
build:
|
||||
context: ./caddy
|
||||
dockerfile: Dockerfile
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
backend:
|
||||
condition: service_healthy
|
||||
ui:
|
||||
condition: service_healthy
|
||||
crowdsec-init:
|
||||
condition: service_completed_successfully
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
- "443:443/udp" # HTTP/3 (QUIC)
|
||||
environment:
|
||||
DOMAIN: "${DOMAIN}"
|
||||
CADDY_ACME_EMAIL: "${CADDY_ACME_EMAIL}"
|
||||
env_file:
|
||||
- path: ./crowdsec/.crowdsec.env
|
||||
required: false
|
||||
volumes:
|
||||
- ./Caddyfile:/etc/caddy/Caddyfile:ro
|
||||
- ./caddy/errors:/srv/errors:ro
|
||||
- caddy_data:/data
|
||||
- caddy_config:/config
|
||||
- caddy_logs:/var/log/caddy
|
||||
|
||||
# ─── Watchtower (auto-redeploy custom services on new images) ────────────────
|
||||
# Only watches services labelled com.centurylinklabs.watchtower.enable=true.
|
||||
# Third-party infra images (minio, pocketbase, meilisearch, etc.) are excluded.
|
||||
watchtower:
|
||||
image: containrrr/watchtower:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: --label-enable --interval 300 --cleanup
|
||||
environment:
|
||||
WATCHTOWER_NOTIFICATIONS: "${WATCHTOWER_NOTIFICATIONS}"
|
||||
WATCHTOWER_NOTIFICATION_URL: "${WATCHTOWER_NOTIFICATION_URL}"
|
||||
DOCKER_API_VERSION: "1.44"
|
||||
|
||||
# ─── Shared PostgreSQL (Fider + GlitchTip + Umami) ───────────────────────────
|
||||
# A single Postgres instance hosting three separate databases.
|
||||
# PocketBase uses its own embedded SQLite; this postgres is only for the
|
||||
# three new services below.
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: "${POSTGRES_USER}"
|
||||
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD}"
|
||||
POSTGRES_DB: postgres
|
||||
expose:
|
||||
- "5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD", "pg_isready", "-U", "${POSTGRES_USER}"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Postgres database initialisation ────────────────────────────────────────
|
||||
# One-shot: creates the fider, glitchtip, and umami databases if missing.
|
||||
postgres-init:
|
||||
image: postgres:16-alpine
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
PGPASSWORD: "${POSTGRES_PASSWORD}"
|
||||
entrypoint: >
|
||||
/bin/sh -c "
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='fider'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE fider\";
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='glitchtip'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE glitchtip\";
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='umami'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE umami\";
|
||||
echo 'postgres-init: databases ready';
|
||||
"
|
||||
restart: "no"
|
||||
|
||||
# ─── Fider (user feedback & feature requests) ─────────────────────────────────
|
||||
fider:
|
||||
image: getfider/fider:stable
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
expose:
|
||||
- "3000"
|
||||
environment:
|
||||
BASE_URL: "${FIDER_BASE_URL}"
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/fider?sslmode=disable"
|
||||
JWT_SECRET: "${FIDER_JWT_SECRET}"
|
||||
# Email: noreply mode — emails are suppressed (logged to stdout).
|
||||
# Fider still requires SMTP vars to be non-empty even in noreply mode.
|
||||
EMAIL_NOREPLY: "noreply@libnovel.cc"
|
||||
EMAIL_SMTP_HOST: "localhost"
|
||||
EMAIL_SMTP_PORT: "25"
|
||||
# Disable outbound email — set real SMTP values to enable.
|
||||
EMAIL_NOREPLY_MODE: "true"
|
||||
|
||||
# ─── GlitchTip DB migration (one-shot) ───────────────────────────────────────
|
||||
glitchtip-migrate:
|
||||
image: glitchtip/glitchtip:latest
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "consolemail://"
|
||||
DEFAULT_FROM_EMAIL: "errors@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
command: "./manage.py migrate"
|
||||
restart: "no"
|
||||
|
||||
# ─── GlitchTip web (error tracking UI + API) ─────────────────────────────────
|
||||
glitchtip-web:
|
||||
image: glitchtip/glitchtip:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
glitchtip-migrate:
|
||||
condition: service_completed_successfully
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
expose:
|
||||
- "8000"
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "consolemail://"
|
||||
DEFAULT_FROM_EMAIL: "errors@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
PORT: "8000"
|
||||
ENABLE_USER_REGISTRATION: "false"
|
||||
healthcheck:
|
||||
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/api/0/')"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── GlitchTip worker (background task processor) ─────────────────────────────
|
||||
glitchtip-worker:
|
||||
image: glitchtip/glitchtip:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
glitchtip-migrate:
|
||||
condition: service_completed_successfully
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "consolemail://"
|
||||
DEFAULT_FROM_EMAIL: "errors@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
SERVER_ROLE: "worker"
|
||||
|
||||
# ─── Umami (page analytics) ───────────────────────────────────────────────────
|
||||
umami:
|
||||
image: ghcr.io/umami-software/umami:postgresql-latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
expose:
|
||||
- "3000"
|
||||
environment:
|
||||
DATABASE_URL: "postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/umami"
|
||||
APP_SECRET: "${UMAMI_APP_SECRET}"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:3000/api/heartbeat"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Dozzle (Docker log viewer) ───────────────────────────────────────────────
|
||||
dozzle:
|
||||
image: amir20/dozzle:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- ./dozzle/users.yml:/data/users.yml:ro
|
||||
expose:
|
||||
- "8080"
|
||||
environment:
|
||||
DOZZLE_AUTH_PROVIDER: simple
|
||||
DOZZLE_HOSTNAME: "logs.libnovel.cc"
|
||||
healthcheck:
|
||||
test: ["CMD", "/dozzle", "healthcheck"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Uptime Kuma (uptime monitoring) ──────────────────────────────────────────
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma:1
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- uptime_kuma_data:/app/data
|
||||
expose:
|
||||
- "3001"
|
||||
healthcheck:
|
||||
test: ["CMD", "extra/healthcheck"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Gotify (push notifications) ──────────────────────────────────────────────
|
||||
gotify:
|
||||
image: gotify/server:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- gotify_data:/app/data
|
||||
expose:
|
||||
- "80"
|
||||
environment:
|
||||
GOTIFY_DEFAULTUSER_NAME: "${GOTIFY_ADMIN_USER}"
|
||||
GOTIFY_DEFAULTUSER_PASS: "${GOTIFY_ADMIN_PASS}"
|
||||
GOTIFY_SERVER_PORT: "80"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:80/health"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
volumes:
|
||||
minio_data:
|
||||
pb_data:
|
||||
meili_data:
|
||||
valkey_data:
|
||||
caddy_data:
|
||||
caddy_config:
|
||||
caddy_logs:
|
||||
crowdsec_data:
|
||||
postgres_data:
|
||||
uptime_kuma_data:
|
||||
gotify_data:
|
||||
|
||||
82
docs/api-endpoints.md
Normal file
82
docs/api-endpoints.md
Normal file
@@ -0,0 +1,82 @@
|
||||
# API Endpoint Reference
|
||||
|
||||
> **Routing ownership map**: see [`docs/d2/api-routing.svg`](d2/api-routing.svg) (source: [`docs/d2/api-routing.d2`](d2/api-routing.d2)) for a visual overview of which paths Caddy sends to the backend directly vs. through SvelteKit, with auth levels colour-coded.
|
||||
|
||||
All traffic enters through **Caddy :443**. Caddy routes a subset of paths directly to the Go backend (bypassing SvelteKit); everything else goes to SvelteKit, which enforces auth before proxying onward.
|
||||
|
||||
## Health / Version
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `GET` | `/health` | — | Liveness probe. Returns `{"ok":true}`. |
|
||||
| `GET` | `/api/version` | — | Build version + commit hash. |
|
||||
|
||||
## Scrape Jobs (admin)
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `POST` | `/scrape` | admin | Enqueue full catalogue scrape. |
|
||||
| `POST` | `/scrape/book` | admin | Enqueue single-book scrape `{url}`. |
|
||||
| `POST` | `/scrape/book/range` | admin | Enqueue range scrape `{url, from, to?}`. |
|
||||
| `GET` | `/api/scrape/status` | admin | Current job status. |
|
||||
| `GET` | `/api/scrape/tasks` | admin | All scrape task records. |
|
||||
| `POST` | `/api/cancel-task/{id}` | admin | Cancel a pending task. |
|
||||
|
||||
## Browse / Catalogue
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `GET` | `/api/browse` | — | Live novelfire.net browse (MinIO page-1 cache). Legacy — used by save-browse subcommand. |
|
||||
| `GET` | `/api/catalogue` | — | **Primary browse endpoint.** Meilisearch-backed, paginated. Params: `q`, `page`, `limit`, `genre`, `status`, `sort` (`popular`\|`new`\|`update`\|`rank`\|`top-rated`). Falls back to empty when Meilisearch is not configured. |
|
||||
| `GET` | `/api/search` | — | Full-text search: Meilisearch local results merged with live novelfire.net remote results. Param: `q` (≥ 2 chars). Used by iOS app. |
|
||||
| `GET` | `/api/ranking` | — | Top-ranked novels from PocketBase. |
|
||||
| `GET` | `/api/cover/{domain}/{slug}` | — | Proxy cover image from MinIO (redirect to presigned URL). |
|
||||
|
||||
## Book / Chapter Content
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `GET` | `/api/book-preview/{slug}` | — | Returns stored metadata + chapter list, or enqueues a scrape task (202) if unknown. |
|
||||
| `GET` | `/api/chapter-text/{slug}/{n}` | — | Chapter content as plain text (markdown stripped). |
|
||||
| `GET` | `/api/chapter-markdown/{slug}/{n}` | — | Chapter content as raw markdown from MinIO. |
|
||||
| `POST` | `/api/reindex/{slug}` | admin | Rebuild `chapters_idx` from MinIO objects. |
|
||||
|
||||
## Audio
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `POST` | `/api/audio/{slug}/{n}` | — | Trigger Kokoro TTS generation. Body: `{voice?}`. Returns `200 {status:"done"}` if cached, `202 {task_id, status}` if enqueued. |
|
||||
| `GET` | `/api/audio/status/{slug}/{n}` | — | Poll audio generation status. Param: `voice`. Returns `{status, task_id?, error?}`. |
|
||||
| `GET` | `/api/audio-proxy/{slug}/{n}` | — | Redirect to presigned MinIO audio URL. |
|
||||
| `GET` | `/api/voices` | — | List available Kokoro voices. Returns `{voices:[]}` on error. |
|
||||
|
||||
## Presigned URLs
|
||||
|
||||
All presign endpoints return a `302` redirect to a short-lived MinIO presigned
|
||||
URL. The URL is cached in Valkey (TTL ~55 min) to avoid regenerating on every
|
||||
request.
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `GET` | `/api/presign/chapter/{slug}/{n}` | — | Presigned URL for chapter markdown object. |
|
||||
| `GET` | `/api/presign/audio/{slug}/{n}` | — | Presigned URL for audio MP3. Param: `voice`. |
|
||||
| `GET` | `/api/presign/voice-sample/{voice}` | — | Presigned URL for voice sample MP3. |
|
||||
| `GET` | `/api/presign/avatar-upload/{userId}` | user | Presigned PUT URL for avatar upload. |
|
||||
| `GET` | `/api/presign/avatar/{userId}` | — | Presigned GET URL for avatar image. |
|
||||
|
||||
## Reading Progress
|
||||
|
||||
Session-scoped (anonymous via cookie session ID, or tied to authenticated user).
|
||||
|
||||
| Method | Path | Auth | Description |
|
||||
|--------|------|------|-------------|
|
||||
| `GET` | `/api/progress` | — | Get all reading progress for the current session/user. |
|
||||
| `POST` | `/api/progress/{slug}` | — | Set progress. Body: `{chapter}`. |
|
||||
| `DELETE` | `/api/progress/{slug}` | — | Delete progress for a book. |
|
||||
|
||||
## Notes
|
||||
|
||||
- **Auth**: The backend does not enforce auth itself — the SvelteKit UI layer enforces admin/user guards before proxying requests. The backend trusts all incoming requests.
|
||||
- **`/api/catalogue` vs `/api/browse`**: `/api/catalogue` is the primary UI endpoint (Meilisearch, always-local, fast). `/api/browse` hits or caches the live novelfire.net browse page and is only used internally by the `save-browse` subcommand.
|
||||
- **Meilisearch fallback**: When `MEILI_URL` is unset, `/api/catalogue` returns `{books:[], has_next:false}` and `/api/search` falls back to a PocketBase substring scan.
|
||||
- **`BACKEND_API_URL`**: The SvelteKit UI reads this env var (default `http://localhost:8080`) to reach the backend server-side. In docker-compose it is set to `http://backend:8080`.
|
||||
201
docs/d2/api-routing.d2
Normal file
201
docs/d2/api-routing.d2
Normal file
@@ -0,0 +1,201 @@
|
||||
direction: right
|
||||
|
||||
# ─── Legend ───────────────────────────────────────────────────────────────────
|
||||
|
||||
legend: Legend {
|
||||
style.fill: "#fafafa"
|
||||
style.stroke: "#d4d4d8"
|
||||
|
||||
pub: public {
|
||||
style.fill: "#f0fdf4"
|
||||
style.font-color: "#15803d"
|
||||
style.stroke: "#86efac"
|
||||
}
|
||||
user: user auth {
|
||||
style.fill: "#eff6ff"
|
||||
style.font-color: "#1d4ed8"
|
||||
style.stroke: "#93c5fd"
|
||||
}
|
||||
adm: admin only {
|
||||
style.fill: "#fff7ed"
|
||||
style.font-color: "#c2410c"
|
||||
style.stroke: "#fdba74"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Client ───────────────────────────────────────────────────────────────────
|
||||
|
||||
client: Browser / iOS App {
|
||||
shape: person
|
||||
style.fill: "#fff9e6"
|
||||
}
|
||||
|
||||
# ─── Caddy ────────────────────────────────────────────────────────────────────
|
||||
|
||||
caddy: Caddy :443 {
|
||||
shape: rectangle
|
||||
style.fill: "#f1f5f9"
|
||||
label: "Caddy :443\ncustom build · caddy-ratelimit\nsecurity headers · rate limiting\nstatic error pages"
|
||||
}
|
||||
|
||||
# ─── SvelteKit UI ─────────────────────────────────────────────────────────────
|
||||
# Handles: auth enforcement, session, all /api/* routes that have SK counterparts
|
||||
|
||||
sk: SvelteKit UI :3000 {
|
||||
style.fill: "#fef3c7"
|
||||
|
||||
auth: Auth {
|
||||
style.fill: "#fde68a"
|
||||
style.stroke: "#f59e0b"
|
||||
label: "POST /api/auth/login\nPOST /api/auth/register\nPOST /api/auth/change-password\nGET /api/auth/session"
|
||||
}
|
||||
|
||||
catalogue_sk: Catalogue {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/catalogue-page\nGET /api/search"
|
||||
}
|
||||
|
||||
book_sk: Book {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/book/{slug}\nGET /api/chapter/{slug}/{n}\nGET /api/chapter-text-preview/{slug}/{n}"
|
||||
}
|
||||
|
||||
scrape_sk: Scrape (admin) {
|
||||
style.fill: "#fff7ed"
|
||||
style.stroke: "#fdba74"
|
||||
label: "GET /api/scrape/status\nGET /api/scrape/tasks\nPOST /api/scrape\nPOST /api/scrape/range\nPOST /api/scrape/cancel/{id}"
|
||||
}
|
||||
|
||||
audio_sk: Audio {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "POST /api/audio/{slug}/{n}\nGET /api/audio/status/{slug}/{n}\nGET /api/voices"
|
||||
}
|
||||
|
||||
presign_sk: Presigned URLs {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/presign/chapter/{slug}/{n}\nGET /api/presign/audio/{slug}/{n}\nGET /api/presign/voice-sample/{voice}"
|
||||
}
|
||||
|
||||
presign_user: Presigned URLs (user) {
|
||||
style.fill: "#eff6ff"
|
||||
style.stroke: "#93c5fd"
|
||||
label: "GET /api/presign/avatar-upload/{userId}\nGET /api/presign/avatar/{userId}"
|
||||
}
|
||||
|
||||
progress_sk: Progress {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/progress\nPOST /api/progress/{slug}\nDELETE /api/progress/{slug}"
|
||||
}
|
||||
|
||||
library_sk: Library {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/library\nPOST /api/library/{slug}\nDELETE /api/library/{slug}"
|
||||
}
|
||||
|
||||
comments_sk: Comments {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/comments/{slug}\nPOST /api/comments/{slug}"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Go Backend ───────────────────────────────────────────────────────────────
|
||||
# Caddy proxies these paths directly — no SvelteKit auth layer
|
||||
|
||||
be: Backend API :8080 {
|
||||
style.fill: "#eef3ff"
|
||||
|
||||
health_be: Health {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /health\nGET /api/version"
|
||||
}
|
||||
|
||||
scrape_be: Scrape admin (direct) {
|
||||
style.fill: "#fff7ed"
|
||||
style.stroke: "#fdba74"
|
||||
label: "POST /scrape\nPOST /scrape/book\nPOST /scrape/book/range"
|
||||
}
|
||||
|
||||
catalogue_be: Catalogue {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/browse\nGET /api/catalogue\nGET /api/ranking\nGET /api/cover/{domain}/{slug}"
|
||||
}
|
||||
|
||||
book_be: Book / Chapter {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/book-preview/{slug}\nGET /api/chapter-text/{slug}/{n}\nGET /api/chapter-markdown/{slug}/{n}\nPOST /api/reindex/{slug} ⚠ admin"
|
||||
}
|
||||
|
||||
audio_be: Audio {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/audio-proxy/{slug}/{n}\nGET /api/voices"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Storage ──────────────────────────────────────────────────────────────────
|
||||
|
||||
storage: Storage {
|
||||
style.fill: "#eaf7ea"
|
||||
|
||||
pb: PocketBase :8090 {
|
||||
shape: cylinder
|
||||
label: "auth · books · progress\ncomments · library\nscrape_jobs · audio_cache"
|
||||
}
|
||||
mn: MinIO :9000 {
|
||||
shape: cylinder
|
||||
label: "chapters · audio\navatars · browse"
|
||||
}
|
||||
ms: Meilisearch :7700 {
|
||||
shape: cylinder
|
||||
label: "index: books"
|
||||
}
|
||||
vk: Valkey :6379 {
|
||||
shape: cylinder
|
||||
label: "presign URL cache"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Caddy routing ────────────────────────────────────────────────────────────
|
||||
|
||||
client -> caddy: HTTPS :443
|
||||
|
||||
caddy -> sk: "/* (catch-all)\n→ SvelteKit handles auth"
|
||||
caddy -> be: "/health /scrape*\n/api/browse /api/book-preview/*\n/api/chapter-text/* /api/chapter-markdown/*\n/api/reindex/* /api/cover/*\n/api/audio-proxy/* /api/catalogue /api/ranking"
|
||||
caddy -> storage.mn: "/avatars/*\n/audio/*\n/chapters/*\n(presigned MinIO GETs)"
|
||||
|
||||
# ─── SvelteKit → Backend (server-side proxy) ──────────────────────────────────
|
||||
|
||||
sk.catalogue_sk -> be.catalogue_be: internal proxy
|
||||
sk.book_sk -> be.book_be: internal proxy
|
||||
sk.audio_sk -> be.audio_be: internal proxy
|
||||
sk.presign_sk -> storage.vk: check cache
|
||||
sk.presign_sk -> storage.mn: generate presign
|
||||
sk.presign_user -> storage.mn: generate presign
|
||||
|
||||
# ─── SvelteKit → Storage (direct) ────────────────────────────────────────────
|
||||
|
||||
sk.auth -> storage.pb: sessions / users
|
||||
sk.scrape_sk -> storage.pb: scrape job records
|
||||
sk.progress_sk -> storage.pb
|
||||
sk.library_sk -> storage.pb
|
||||
sk.comments_sk -> storage.pb
|
||||
|
||||
# ─── Backend → Storage ────────────────────────────────────────────────────────
|
||||
|
||||
be.catalogue_be -> storage.ms: full-text search
|
||||
be.catalogue_be -> storage.pb: ranking records
|
||||
be.catalogue_be -> storage.mn: cover presign
|
||||
be.book_be -> storage.mn: chapter objects
|
||||
be.book_be -> storage.pb: book metadata
|
||||
be.audio_be -> storage.mn: audio presign
|
||||
be.audio_be -> storage.vk: presign cache
|
||||
127
docs/d2/api-routing.svg
Normal file
127
docs/d2/api-routing.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 57 KiB |
154
docs/d2/architecture.d2
Normal file
154
docs/d2/architecture.d2
Normal file
@@ -0,0 +1,154 @@
|
||||
direction: right
|
||||
|
||||
# ─── External ─────────────────────────────────────────────────────────────────
|
||||
|
||||
novelfire: novelfire.net {
|
||||
shape: cloud
|
||||
style.fill: "#f0f4ff"
|
||||
}
|
||||
|
||||
kokoro: Kokoro-FastAPI TTS {
|
||||
shape: cloud
|
||||
style.fill: "#f0f4ff"
|
||||
}
|
||||
|
||||
letsencrypt: Let's Encrypt {
|
||||
shape: cloud
|
||||
style.fill: "#f0f4ff"
|
||||
}
|
||||
|
||||
browser: Browser / iOS App {
|
||||
shape: person
|
||||
style.fill: "#fff9e6"
|
||||
}
|
||||
|
||||
# ─── Init containers (one-shot) ───────────────────────────────────────────────
|
||||
|
||||
init: Init containers {
|
||||
style.fill: "#f5f5f5"
|
||||
style.stroke-dash: 4
|
||||
|
||||
minio-init: minio-init {
|
||||
shape: rectangle
|
||||
label: "minio-init\n(mc: create buckets)"
|
||||
}
|
||||
|
||||
pb-init: pb-init {
|
||||
shape: rectangle
|
||||
label: "pb-init\n(bootstrap collections)"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Storage ──────────────────────────────────────────────────────────────────
|
||||
|
||||
storage: Storage {
|
||||
style.fill: "#eaf7ea"
|
||||
|
||||
minio: MinIO {
|
||||
shape: cylinder
|
||||
label: "MinIO :9000\n\nbuckets:\n chapters\n audio\n avatars\n catalogue"
|
||||
}
|
||||
|
||||
pocketbase: PocketBase {
|
||||
shape: cylinder
|
||||
label: "PocketBase :8090\n\ncollections:\n books chapters_idx\n audio_cache progress\n scrape_jobs app_users\n ranking"
|
||||
}
|
||||
|
||||
valkey: Valkey {
|
||||
shape: cylinder
|
||||
label: "Valkey :6379\n\n(presign URL cache\nTTL-based, shared)"
|
||||
}
|
||||
|
||||
meilisearch: Meilisearch {
|
||||
shape: cylinder
|
||||
label: "Meilisearch :7700\n\nindices:\n books"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Application ──────────────────────────────────────────────────────────────
|
||||
|
||||
app: Application {
|
||||
style.fill: "#eef3ff"
|
||||
|
||||
caddy: caddy {
|
||||
shape: rectangle
|
||||
label: "Caddy :443 / :80\ncustom build + caddy-ratelimit\n\nfeatures:\n auto-HTTPS (Let's Encrypt)\n security headers\n rate limiting (per-IP)\n static error pages (502/503/504)"
|
||||
}
|
||||
|
||||
backend: backend {
|
||||
shape: rectangle
|
||||
label: "Backend API :8080\n(Go — HTTP API server)"
|
||||
}
|
||||
|
||||
runner: runner {
|
||||
shape: rectangle
|
||||
label: "Runner :9091\n(Go — background worker\nscraping + TTS jobs\n/metrics endpoint)"
|
||||
}
|
||||
|
||||
ui: ui {
|
||||
shape: rectangle
|
||||
label: "SvelteKit UI :3000\n(adapter-node)"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Ops ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
ops: Ops {
|
||||
style.fill: "#fef9ec"
|
||||
|
||||
watchtower: Watchtower {
|
||||
shape: rectangle
|
||||
label: "Watchtower\n(containrrr/watchtower)\n\npolls every 5 min\nautopulls + redeploys:\n backend · runner · ui"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Init → Storage deps ──────────────────────────────────────────────────────
|
||||
|
||||
init.minio-init -> storage.minio: create buckets {style.stroke-dash: 4}
|
||||
init.pb-init -> storage.pocketbase: bootstrap schema {style.stroke-dash: 4}
|
||||
|
||||
# ─── App → Storage ────────────────────────────────────────────────────────────
|
||||
|
||||
app.backend -> storage.minio: blobs (chapters, audio,\navatars, browse)
|
||||
app.backend -> storage.pocketbase: structured records\n(books, progress, jobs…)
|
||||
app.backend -> storage.valkey: cache presigned URLs\n(SET/GET with TTL)
|
||||
|
||||
app.runner -> storage.minio: write chapter markdown\n& audio MP3s
|
||||
app.runner -> storage.pocketbase: read/update scrape jobs\nwrite book records
|
||||
app.runner -> storage.meilisearch: index books on\nscrape completion
|
||||
|
||||
app.ui -> storage.valkey: read presigned URL cache
|
||||
app.ui -> storage.pocketbase: auth, progress,\ncomments, settings
|
||||
|
||||
# ─── App internal ─────────────────────────────────────────────────────────────
|
||||
|
||||
app.ui -> app.backend: REST API calls (server-side)\n/api/catalogue /api/book-preview\n/api/chapter-text /api/audio etc.
|
||||
|
||||
# ─── Caddy routing ────────────────────────────────────────────────────────────
|
||||
# Routes sent directly to backend (no SvelteKit counterpart):
|
||||
# /health /scrape*
|
||||
# /api/browse /api/book-preview/* /api/chapter-text/*
|
||||
# /api/reindex/* /api/cover/* /api/audio-proxy/*
|
||||
# Routes sent to MinIO:
|
||||
# /avatars/*
|
||||
# Everything else → SvelteKit UI (including /api/scrape/*, /api/chapter-text-preview/*)
|
||||
|
||||
app.caddy -> app.ui: "/* (catch-all)\n/api/scrape/*\n/api/chapter-text-preview/*\n→ SvelteKit (auth enforced)"
|
||||
app.caddy -> app.backend: "/health /scrape*\n/api/browse /api/book-preview/*\n/api/chapter-text/*\n/api/reindex/* /api/cover/*\n/api/audio-proxy/*"
|
||||
app.caddy -> storage.minio: "/avatars/*\n/audio/*\n/chapters/*\n(presigned MinIO GETs)"
|
||||
|
||||
# ─── External → App ───────────────────────────────────────────────────────────
|
||||
|
||||
app.runner -> novelfire: scrape\n(HTTP GET)
|
||||
app.runner -> kokoro: TTS generation\n(HTTP POST)
|
||||
app.caddy -> letsencrypt: ACME certificate\n(TLS-ALPN-01)
|
||||
|
||||
# ─── Ops → Docker socket ──────────────────────────────────────────────────────
|
||||
|
||||
ops.watchtower -> app.backend: watch (label-enabled)
|
||||
ops.watchtower -> app.runner: watch (label-enabled)
|
||||
ops.watchtower -> app.ui: watch (label-enabled)
|
||||
|
||||
# ─── Browser ──────────────────────────────────────────────────────────────────
|
||||
|
||||
browser -> app.caddy: HTTPS :443\n(single entry point)
|
||||
129
docs/d2/architecture.svg
Normal file
129
docs/d2/architecture.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 58 KiB |
72
docs/mermaid/architecture.mermaid.md
Normal file
72
docs/mermaid/architecture.mermaid.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# Architecture Overview
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
%% ── External ──────────────────────────────────────────────────────────
|
||||
NF([novelfire.net])
|
||||
KK([Kokoro-FastAPI TTS])
|
||||
LE([Let's Encrypt])
|
||||
CL([Browser / iOS App])
|
||||
|
||||
%% ── Init containers ───────────────────────────────────────────────────
|
||||
subgraph INIT["Init containers (one-shot)"]
|
||||
MI[minio-init\nmc: create buckets]
|
||||
PI[pb-init\nbootstrap collections]
|
||||
end
|
||||
|
||||
%% ── Storage ───────────────────────────────────────────────────────────
|
||||
subgraph STORAGE["Storage"]
|
||||
MN[(MinIO :9000\nchapters · audio\navatars · browse)]
|
||||
PB[(PocketBase :8090\nbooks · chapters_idx\naudio_cache · progress\nscrape_jobs · app_users · ranking)]
|
||||
VK[(Valkey :6379\npresign URL cache\nTTL-based · shared)]
|
||||
MS[(Meilisearch :7700\nindex: books)]
|
||||
end
|
||||
|
||||
%% ── Application ───────────────────────────────────────────────────────
|
||||
subgraph APP["Application"]
|
||||
CD["Caddy :443/:80\ncustom build + caddy-ratelimit\nauto-HTTPS · security headers\nrate limiting · error pages"]
|
||||
BE[Backend API :8080\nGo HTTP server]
|
||||
RN[Runner :9091\nGo background worker\n/metrics endpoint]
|
||||
UI[SvelteKit UI :3000\nadapter-node]
|
||||
end
|
||||
|
||||
%% ── Ops ───────────────────────────────────────────────────────────────
|
||||
subgraph OPS["Ops"]
|
||||
WT[Watchtower\npolls every 5 min\nautopull + redeploy\nbackend · runner · ui]
|
||||
end
|
||||
|
||||
%% ── Init → Storage ────────────────────────────────────────────────────
|
||||
MI -.->|create buckets| MN
|
||||
PI -.->|bootstrap schema| PB
|
||||
|
||||
%% ── App → Storage ─────────────────────────────────────────────────────
|
||||
BE -->|blobs| MN
|
||||
BE -->|structured records| PB
|
||||
BE -->|cache presigned URLs| VK
|
||||
RN -->|chapter markdown & audio| MN
|
||||
RN -->|read/update jobs & books| PB
|
||||
RN -->|index books on scrape| MS
|
||||
UI -->|read presign cache| VK
|
||||
UI -->|auth · progress · comments| PB
|
||||
|
||||
%% ── App internal ──────────────────────────────────────────────────────
|
||||
UI -->|"REST API (server-side)\n/api/catalogue /api/book-preview\n/api/chapter-text /api/audio"| BE
|
||||
|
||||
%% ── Caddy routing ─────────────────────────────────────────────────────
|
||||
CD -->|"/* catch-all\n/api/scrape/*\n/api/chapter-text-preview/*\n→ SvelteKit (auth enforced)"| UI
|
||||
CD -->|"/health /scrape*\n/api/browse /api/book-preview/*\n/api/chapter-text/*\n/api/reindex/* /api/cover/*\n/api/audio-proxy/*"| BE
|
||||
CD -->|/avatars/* presigned GETs| MN
|
||||
|
||||
%% ── Runner → External ─────────────────────────────────────────────────
|
||||
RN -->|scrape HTTP GET| NF
|
||||
RN -->|TTS HTTP POST| KK
|
||||
CD -->|ACME certificate| LE
|
||||
|
||||
%% ── Ops ───────────────────────────────────────────────────────────────
|
||||
WT -->|watch label-enabled| BE
|
||||
WT -->|watch label-enabled| RN
|
||||
WT -->|watch label-enabled| UI
|
||||
|
||||
%% ── Client ────────────────────────────────────────────────────────────
|
||||
CL -->|HTTPS :443 single entry| CD
|
||||
```
|
||||
102
docs/mermaid/data-flow.mermaid.md
Normal file
102
docs/mermaid/data-flow.mermaid.md
Normal file
@@ -0,0 +1,102 @@
|
||||
# Data Flow — Scrape & TTS Job Pipeline
|
||||
|
||||
How content moves from novelfire.net through the runner into storage, and how
|
||||
audio is generated on-demand via the backend.
|
||||
|
||||
## Catalogue Scrape Pipeline
|
||||
|
||||
The runner performs a background catalogue walk on startup and then on a
|
||||
configurable interval (`RUNNER_CATALOGUE_REFRESH_INTERVAL`, default 24 h).
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A([Runner starts / refresh tick]) --> B[Walk novelfire.net catalogue\npages 1…N]
|
||||
B --> C{Book already\nin PocketBase?}
|
||||
C -- no --> D[Scrape book metadata\ntitle · author · genres\ncover · summary · status]
|
||||
C -- yes --> E[Check for new chapters\ncompare total_chapters]
|
||||
D --> F[Write BookMeta\nto PocketBase books]
|
||||
E --> G{New chapters\nfound?}
|
||||
G -- no --> Z([Done — next book])
|
||||
G -- yes --> H
|
||||
F --> H[Scrape chapter list with upTo limit\n→ chapters_idx in PocketBase\nretries on 429 with Retry-After backoff]
|
||||
H --> I[Worker pool — N goroutines\nRUNNER_MAX_CONCURRENT_SCRAPE]
|
||||
I --> J[For each missing chapter:\nGET chapter HTML from novelfire.net]
|
||||
J --> K[Parse HTML → Markdown\nhtmlutil.NodeToMarkdown]
|
||||
K --> L[PUT object to MinIO\nchapters/{slug}/{n}.md]
|
||||
L --> M[Upsert book doc\nto Meilisearch index: books]
|
||||
M --> Z
|
||||
F --> M
|
||||
```
|
||||
|
||||
## On-Demand Single-Book Scrape
|
||||
|
||||
Triggered when a user visits `/books/{slug}` and the book is not in PocketBase.
|
||||
The UI calls `GET /api/book-preview/{slug}` → backend enqueues a scrape task.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
actor U as User
|
||||
participant UI as SvelteKit UI
|
||||
participant BE as Backend API
|
||||
participant TQ as Task Queue (PocketBase)
|
||||
participant RN as Runner
|
||||
participant NF as novelfire.net
|
||||
participant PB as PocketBase
|
||||
participant MN as MinIO
|
||||
participant MS as Meilisearch
|
||||
|
||||
U->>UI: Visit /books/{slug}
|
||||
UI->>BE: GET /api/book-preview/{slug}
|
||||
BE->>PB: getBook(slug) — not found
|
||||
BE->>TQ: INSERT scrape_task (slug, status=pending)
|
||||
BE-->>UI: 202 {task_id, message}
|
||||
UI-->>U: "Scraping…" placeholder
|
||||
|
||||
RN->>TQ: Poll for pending tasks
|
||||
TQ-->>RN: scrape_task (slug)
|
||||
RN->>NF: GET novelfire.net/book/{slug}
|
||||
NF-->>RN: HTML
|
||||
RN->>PB: upsert book + chapters_idx
|
||||
RN->>MN: PUT chapter objects
|
||||
RN->>MS: UpsertBook doc
|
||||
RN->>TQ: UPDATE task status=done
|
||||
|
||||
U->>UI: Poll GET /api/scrape/tasks/{task_id}
|
||||
UI->>BE: GET /api/scrape/status
|
||||
BE->>TQ: get task
|
||||
TQ-->>BE: status=done
|
||||
BE-->>UI: {status:"done"}
|
||||
UI-->>U: Redirect to /books/{slug}
|
||||
```
|
||||
|
||||
## TTS Audio Generation Pipeline
|
||||
|
||||
Audio is generated lazily: on first request the job is enqueued; subsequent
|
||||
requests poll for completion and then stream from MinIO via presigned URL.
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A(["POST /api/audio/{slug}/{n}\nbody: voice=af_bella"]) --> B{Audio already\nin MinIO?}
|
||||
B -- yes --> C[200 status: done]
|
||||
B -- no --> D{Job already\nin queue?}
|
||||
D -- "yes pending/generating" --> E[202 task_id + status]
|
||||
D -- no --> F[INSERT audio_task\nstatus=pending\nin PocketBase]
|
||||
F --> E
|
||||
|
||||
G([Runner polls task queue]) --> H[Claim audio_task\nstatus=generating]
|
||||
H --> I["GET /api/chapter-text/{slug}/{n}\nfrom backend — plain text"]
|
||||
I --> J[POST /v1/audio/speech\nto Kokoro-FastAPI\nbody: text + voice]
|
||||
J --> K[Stream MP3 response]
|
||||
K --> L[PUT object to MinIO\naudio/{slug}/{n}/{voice}.mp3]
|
||||
L --> M[UPDATE audio_task\nstatus=done]
|
||||
|
||||
N(["Client polls\nGET /api/audio/status/{slug}/{n}"]) --> O{status?}
|
||||
O -- "pending/generating" --> N
|
||||
O -- done --> P["GET /api/presign/audio/{slug}/{n}"]
|
||||
P --> Q{Valkey cache hit?}
|
||||
Q -- yes --> R[302 → presigned URL]
|
||||
Q -- no --> S[GeneratePresignedURL\nfrom MinIO — TTL 1h]
|
||||
S --> T[Cache in Valkey\nTTL 3500s]
|
||||
T --> R
|
||||
R --> U([Client streams audio\ndirectly from MinIO])
|
||||
```
|
||||
111
docs/mermaid/request-flow.mermaid.md
Normal file
111
docs/mermaid/request-flow.mermaid.md
Normal file
@@ -0,0 +1,111 @@
|
||||
# Request Flow
|
||||
|
||||
Two representative request paths through the stack: a **page load** (SSR) and a
|
||||
**media playback** (presigned URL → direct MinIO stream).
|
||||
|
||||
## SSR Page Load — Catalogue / Book Detail
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
actor C as Browser / iOS App
|
||||
participant CD as Caddy :443
|
||||
participant UI as SvelteKit UI :3000
|
||||
participant BE as Backend API :8080
|
||||
participant MS as Meilisearch :7700
|
||||
participant PB as PocketBase :8090
|
||||
participant VK as Valkey :6379
|
||||
|
||||
C->>CD: HTTPS GET /catalogue
|
||||
CD->>UI: proxy /* (SvelteKit catch-all)
|
||||
UI->>BE: GET /api/catalogue?page=1&sort=popular
|
||||
BE->>MS: search(query, filters, sort)
|
||||
MS-->>BE: [{slug, title, …}, …]
|
||||
BE-->>UI: {books[], page, total, has_next}
|
||||
UI-->>CD: SSR HTML
|
||||
CD-->>C: 200 HTML
|
||||
|
||||
Note over C,UI: Infinite scroll — client fetches next page via SvelteKit API route
|
||||
C->>CD: HTTPS GET /api/catalogue-page?page=2
|
||||
CD->>UI: proxy /* (SvelteKit /api/catalogue-page server route)
|
||||
UI->>BE: GET /api/catalogue?page=2
|
||||
BE->>MS: search(…)
|
||||
MS-->>BE: next page
|
||||
BE-->>UI: {books[], …}
|
||||
UI-->>C: JSON
|
||||
```
|
||||
|
||||
## Audio Playback — Presigned URL Flow
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
actor C as Browser / iOS App
|
||||
participant CD as Caddy :443
|
||||
participant UI as SvelteKit UI :3000
|
||||
participant BE as Backend API :8080
|
||||
participant VK as Valkey :6379
|
||||
participant MN as MinIO :9000
|
||||
|
||||
C->>CD: GET /api/presign/audio/{slug}/{n}?voice=af_bella
|
||||
CD->>UI: proxy /* (SvelteKit /api/presign/audio route)
|
||||
UI->>BE: GET /api/presign/audio/{slug}/{n}?voice=af_bella
|
||||
BE->>VK: GET presign:audio:{slug}:{n}:{voice}
|
||||
alt cache hit
|
||||
VK-->>BE: presigned URL (TTL remaining)
|
||||
BE-->>UI: 302 redirect → presigned URL
|
||||
UI-->>C: 302 redirect
|
||||
else cache miss
|
||||
BE->>MN: GeneratePresignedURL(audio-bucket, key, 1h)
|
||||
MN-->>BE: presigned URL
|
||||
BE->>VK: SET presign:audio:… EX 3500
|
||||
BE-->>UI: 302 redirect → presigned URL
|
||||
UI-->>C: 302 redirect
|
||||
end
|
||||
C->>MN: GET presigned URL (direct, no proxy)
|
||||
MN-->>C: audio/mpeg stream
|
||||
```
|
||||
|
||||
## Chapter Read — SSR + Content Fetch
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
actor C as Browser / iOS App
|
||||
participant CD as Caddy :443
|
||||
participant UI as SvelteKit UI :3000
|
||||
participant BE as Backend API :8080
|
||||
participant PB as PocketBase :8090
|
||||
participant MN as MinIO :9000
|
||||
|
||||
C->>CD: HTTPS GET /books/{slug}/chapters/{n}
|
||||
CD->>UI: proxy /* (SvelteKit catch-all)
|
||||
UI->>PB: getBook(slug) + listChapterIdx(slug)
|
||||
PB-->>UI: book meta + chapter list
|
||||
UI->>BE: GET /api/chapter-text/{slug}/{n}
|
||||
BE->>MN: GetObject(chapters-bucket, {slug}/{n}.md)
|
||||
MN-->>BE: markdown text
|
||||
BE-->>UI: plain text (markdown stripped)
|
||||
Note over UI: marked() → HTML
|
||||
UI-->>CD: SSR HTML
|
||||
CD-->>C: 200 HTML
|
||||
```
|
||||
|
||||
## Caddy Request Lifecycle
|
||||
|
||||
Shows how security hardening applies before a request reaches any upstream.
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A([Incoming HTTPS request]) --> B[TLS termination\nLet's Encrypt cert]
|
||||
B --> C{Rate limit check\ncaddy-ratelimit}
|
||||
C -- over limit --> D[429 Too Many Requests]
|
||||
C -- ok --> E[Add security headers\nX-Frame-Options · X-Content-Type-Options\nReferrer-Policy · Permissions-Policy\nHSTS · X-XSS-Protection\nremove Server header]
|
||||
E --> F{Route match}
|
||||
F -- "/health /scrape*\n/api/browse /api/book-preview/*\n/api/chapter-text/*\n/api/reindex/* /api/cover/*\n/api/audio-proxy/*" --> G[reverse_proxy → backend:8080]
|
||||
F -- "/avatars/*" --> H[reverse_proxy → minio:9000]
|
||||
F -- "/* everything else\n(incl. /api/scrape/*\n/api/chapter-text-preview/*)" --> I[reverse_proxy → ui:3000\nSvelteKit auth middleware runs]
|
||||
G --> J{Upstream healthy?}
|
||||
H --> J
|
||||
I --> J
|
||||
J -- yes --> K([Response to client])
|
||||
J -- "502/503/504" --> L[handle_errors\nstatic HTML from /srv/errors/]
|
||||
L --> K
|
||||
```
|
||||
5
dozzle/users.yml
Normal file
5
dozzle/users.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
users:
|
||||
admin:
|
||||
name: admin
|
||||
email: admin@libnovel.cc
|
||||
password: "$2y$10$4jqLza2grpxnQn0EGux2C.UmlSxRmOvH/J1ySzOBxMZgW6cA2TnmK"
|
||||
87
ios/AGENTS.md
Normal file
87
ios/AGENTS.md
Normal file
@@ -0,0 +1,87 @@
|
||||
# LibNovel iOS App
|
||||
|
||||
SwiftUI app targeting iOS 17+. Consumes the Go scraper HTTP API for books, chapters, and audio. Uses MinIO presigned URLs for media playback and downloads.
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
ios/LibNovel/LibNovel/
|
||||
├── App/ # LibNovelApp.swift, ContentView.swift, RootTabView.swift
|
||||
├── Models/ # Models.swift (all domain types)
|
||||
├── Networking/ # APIClient.swift (URLSession-based HTTP client)
|
||||
├── Services/ # AudioPlayerService, AudioDownloadService, AuthStore,
|
||||
│ # BookVoicePreferences, NetworkMonitor
|
||||
├── ViewModels/ # One per view/feature (HomeViewModel, BrowseViewModel, etc.)
|
||||
├── Views/
|
||||
│ ├── Auth/ # AuthView
|
||||
│ ├── BookDetail/ # BookDetailView, CommentsView
|
||||
│ ├── Browse/ # BrowseView (infinite scroll shelves)
|
||||
│ ├── ChapterReader/ # ChapterReaderView, DownloadAudioButton
|
||||
│ ├── Common/ # CommonViews (shared reusable components)
|
||||
│ ├── Components/ # OfflineBanner
|
||||
│ ├── Downloads/ # DownloadsView, DownloadQueueButton
|
||||
│ ├── Home/ # HomeView
|
||||
│ ├── Library/ # LibraryView (2-col grid, filters)
|
||||
│ ├── Player/ # PlayerViews (floating FAB, compact, full-screen)
|
||||
│ ├── Profile/ # ProfileView, VoiceSelectionView, UserProfileView, etc.
|
||||
│ └── Search/ # SearchView
|
||||
└── Extensions/ # NavDestination.swift, String+App.swift, Color+App.swift
|
||||
```
|
||||
|
||||
## iOS / Swift Conventions
|
||||
|
||||
- **Deployment target**: iOS 17.0 — use iOS 17+ APIs freely.
|
||||
- **Observable pattern**: The codebase currently uses `@StateObject` / `ObservableObject` / `@Published`. When adding new types, prefer the **`@Observable` macro** (iOS 17+) over `ObservableObject`. Do not refactor existing types unless explicitly asked.
|
||||
- **Navigation**: Use `NavigationStack` (not `NavigationView`). Use `.navigationDestination(for:)` for type-safe routing.
|
||||
- **Concurrency**: Use `async/await` and structured concurrency. Avoid callback-based APIs and `DispatchQueue.main.async` — prefer `@MainActor` or `await MainActor.run`.
|
||||
- **State management**: Prefer `@State` + `@Binding` for local UI state. Use environment objects for app-wide services (authStore, audioPlayer, downloadService, networkMonitor).
|
||||
- **SwiftData**: Not currently used. Do not introduce SwiftData without discussion.
|
||||
- **SF Symbols**: Use `Image(systemName:)` for icons. No emoji in UI unless already present.
|
||||
|
||||
## Key Patterns
|
||||
|
||||
- **Download keys**: Use `::` as separator (e.g., `"slug::chapter-1::voice"`), never `-`. Slugs contain hyphens.
|
||||
- **Voice fallback chain**: book override → global default → `"af_bella"`. See `BookVoicePreferences.voiceWithFallback()`.
|
||||
- **Offline handling**: Wrap view bodies in `VStack` with `OfflineBanner` at top. Use `NetworkMonitor` (environment object) to gate network calls. Suppress network errors silently when offline via `ErrorAlertModifier`.
|
||||
- **Audio playback priority**: local file → MinIO presigned URL → trigger TTS generation.
|
||||
- **Progress display**: Show decimal % when < 10% (e.g., "3.4%"), rounded when >= 10% (e.g., "47%").
|
||||
- **Cover images**: Always proxy via `/api/cover/{domain}/{slug}` — never link directly to source.
|
||||
|
||||
## Networking
|
||||
|
||||
`APIClient.swift` wraps all Go scraper API calls. When adding new endpoints:
|
||||
|
||||
1. Add a method to `APIClient`.
|
||||
2. Keep error handling consistent — throw typed errors, let ViewModels catch and set `errorMessage`.
|
||||
3. All requests are relative to `SCRAPER_API_URL` (configured at build time via xcconfig or environment).
|
||||
|
||||
## Using Documentation Tools
|
||||
|
||||
When writing or reviewing SwiftUI/Swift code:
|
||||
|
||||
- Use `context7` to look up current Apple SwiftUI/Swift documentation before implementing anything non-trivial. Apple's APIs evolve fast — do not rely on training data alone.
|
||||
- Use `gh_grep` to find real-world Swift patterns when unsure how something is typically implemented.
|
||||
|
||||
Example prompts:
|
||||
- "How does `.searchable` work in iOS 17? use context7"
|
||||
- "Show me examples of `@Observable` with async tasks. use context7"
|
||||
- "How do other apps implement background URLSession downloads in Swift? use gh_grep"
|
||||
|
||||
## UI/UX Skill
|
||||
|
||||
For any iOS view work, always load the `ios-ux` skill at the start of the task:
|
||||
|
||||
```
|
||||
skill({ name: "ios-ux" })
|
||||
```
|
||||
|
||||
This skill defines the full design system, animation rules, haptic feedback policy, accessibility checklist, performance guidelines, and offline handling requirements. It also governs how to handle screenshot-based reviews (analyze → suggest → confirm before applying).
|
||||
|
||||
## What to Avoid
|
||||
|
||||
- `NavigationView` — deprecated, use `NavigationStack`
|
||||
- `ObservableObject` / `@Published` for new types — prefer `@Observable`
|
||||
- `DispatchQueue.main.async` — prefer `@MainActor`
|
||||
- Force unwrapping optionals
|
||||
- Hardcoded color literals — use `Color+App.swift` extensions or semantic colors
|
||||
- Adding new dependencies (SPM packages) without discussion
|
||||
10
ios/LibNovel/.gitignore
vendored
10
ios/LibNovel/.gitignore
vendored
@@ -1,10 +0,0 @@
|
||||
# Fastlane
|
||||
fastlane/report.xml
|
||||
fastlane/Preview.html
|
||||
fastlane/screenshots/**/*.png
|
||||
fastlane/test_output
|
||||
fastlane/README.md
|
||||
|
||||
# Bundler
|
||||
.bundle
|
||||
vendor/bundle
|
||||
@@ -1,21 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>method</key>
|
||||
<string>app-store</string>
|
||||
<key>teamID</key>
|
||||
<string>GHZXC6FVMU</string>
|
||||
<key>uploadBitcode</key>
|
||||
<false/>
|
||||
<key>uploadSymbols</key>
|
||||
<true/>
|
||||
<key>signingStyle</key>
|
||||
<string>manual</string>
|
||||
<key>provisioningProfiles</key>
|
||||
<dict>
|
||||
<key>com.kalekber.LibNovel</key>
|
||||
<string>LibNovel Distribution</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,3 +0,0 @@
|
||||
source "https://rubygems.org"
|
||||
|
||||
gem "fastlane"
|
||||
@@ -1,697 +0,0 @@
|
||||
// !$*UTF8*$!
|
||||
{
|
||||
archiveVersion = 1;
|
||||
classes = {
|
||||
};
|
||||
objectVersion = 77;
|
||||
objects = {
|
||||
|
||||
/* Begin PBXBuildFile section */
|
||||
032E049A4BB3CF0EA990C0CD /* LibNovelApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4F56C8E2BC3614530B81569D /* LibNovelApp.swift */; };
|
||||
08DFB5F626BA769556C8D145 /* BrowseView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1FA3F0FCA383180EE4C93BBA /* BrowseView.swift */; };
|
||||
0A52BC1CE71BED9E75D20D35 /* Models.swift in Sources */ = {isa = PBXBuildFile; fileRef = 762E378B9BC2161A7AA2CC36 /* Models.swift */; };
|
||||
2790B8C051BE389D83645047 /* BrowseViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9812F5FE30ED657FB40ABD7A /* BrowseViewModel.swift */; };
|
||||
2A15157AD2AE2271675C3485 /* ChapterReaderViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8995E667B3DD9CFCAD8A91D7 /* ChapterReaderViewModel.swift */; };
|
||||
3521DFD5FCBBED7B90368829 /* LibraryViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = FC338B05EA6DB22900712000 /* LibraryViewModel.swift */; };
|
||||
367C88FFC11701D2BAD8CCD0 /* RootTabView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2D5C115992F1CE2326236765 /* RootTabView.swift */; };
|
||||
41FB51553F1F1AEBFEA91C0A /* String+App.swift in Sources */ = {isa = PBXBuildFile; fileRef = FEC6F837FF2E902E334ED72E /* String+App.swift */; };
|
||||
4BB2C76262D5BD5DAD0D5D28 /* LibNovelTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = B4C918833E173D6B44D06955 /* LibNovelTests.swift */; };
|
||||
58E440CE4360D755401D1672 /* ProfileViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 937A589F84FD412BBB6FBC45 /* ProfileViewModel.swift */; };
|
||||
5D8D783259EF54C773788AAB /* AuthStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = F219788AE5ACBD6F240674F5 /* AuthStore.swift */; };
|
||||
64D80AACB8E1967B17921EE3 /* ProfileView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C0B17D50389C6C98FC78BDBC /* ProfileView.swift */; };
|
||||
749292A18C57FA41EC88A30B /* BookDetailView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 39DE056C37FBC5EED8771821 /* BookDetailView.swift */; };
|
||||
7C74C10317D389121922A5E3 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 5A776719B77EDDB5E44743B0 /* Assets.xcassets */; };
|
||||
7D81DEB2EEFF9CA5079AEEF7 /* BookDetailViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 837F83AA12B59924FDF16617 /* BookDetailViewModel.swift */; };
|
||||
94D0C4B15734B4056BF3B127 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4B820081FA4817765A39939A /* ContentView.swift */; };
|
||||
9B2D6F241E707312AB80DC31 /* AuthView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7CEF6782A2A28B2A485CBD48 /* AuthView.swift */; };
|
||||
A9B95BAD7CE2DCD1DDDABD4C /* AudioPlayerService.swift in Sources */ = {isa = PBXBuildFile; fileRef = DB13E89E50529E3081533A66 /* AudioPlayerService.swift */; };
|
||||
BE7805A4E78037A82B12AE56 /* PlayerViews.swift in Sources */ = {isa = PBXBuildFile; fileRef = DF49C3AEF9D010F9FEDAB1FC /* PlayerViews.swift */; };
|
||||
C807AD8D627CF6BED47D517C /* HomeViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3AB2E843D93461074A89A171 /* HomeViewModel.swift */; };
|
||||
CFDAA4776344B075A1E3CD6B /* Kingfisher in Frameworks */ = {isa = PBXBuildFile; productRef = 09584EAB68A07B47F876A062 /* Kingfisher */; };
|
||||
E1F564399D1325F6A1B2B84F /* LibraryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = C21107BECA55C07416E0CB8B /* LibraryView.swift */; };
|
||||
E2572692178FD17145FDAF77 /* Color+App.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9D83BB88C4306BE7A4F947CB /* Color+App.swift */; };
|
||||
EF3C57C400BF05CBEAC1F7FE /* HomeView.swift in Sources */ = {isa = PBXBuildFile; fileRef = D6268D60803940CBD38FB921 /* HomeView.swift */; };
|
||||
F2AF05B9C8C23132A73ACDD3 /* CommonViews.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8E89FD8F46747CA653C5203D /* CommonViews.swift */; };
|
||||
F4FDA3C44752EB979235C042 /* NavDestination.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7CAFB96D2500F34F0B0C860C /* NavDestination.swift */; };
|
||||
FB32F3772CA09684F00497F3 /* APIClient.swift in Sources */ = {isa = PBXBuildFile; fileRef = B593F179EC3E9112126B540B /* APIClient.swift */; };
|
||||
FEFB5FDC2424D22914458001 /* ChapterReaderView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 81E3939152E23B4985FAF7E2 /* ChapterReaderView.swift */; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXContainerItemProxy section */
|
||||
698AC3AA533BC05C985595D0 /* PBXContainerItemProxy */ = {
|
||||
isa = PBXContainerItemProxy;
|
||||
containerPortal = A10A669C0C8B43078C0FEE9F /* Project object */;
|
||||
proxyType = 1;
|
||||
remoteGlobalIDString = D039EDECDE3998D8534BB680;
|
||||
remoteInfo = LibNovel;
|
||||
};
|
||||
/* End PBXContainerItemProxy section */
|
||||
|
||||
/* Begin PBXFileReference section */
|
||||
1B8BF3DB582A658386E402C7 /* LibNovel.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = LibNovel.app; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
1FA3F0FCA383180EE4C93BBA /* BrowseView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BrowseView.swift; sourceTree = "<group>"; };
|
||||
235967A21B386BE13F56F3F8 /* LibNovelTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = LibNovelTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
2D5C115992F1CE2326236765 /* RootTabView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RootTabView.swift; sourceTree = "<group>"; };
|
||||
39DE056C37FBC5EED8771821 /* BookDetailView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BookDetailView.swift; sourceTree = "<group>"; };
|
||||
3AB2E843D93461074A89A171 /* HomeViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomeViewModel.swift; sourceTree = "<group>"; };
|
||||
4B820081FA4817765A39939A /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
|
||||
4F56C8E2BC3614530B81569D /* LibNovelApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibNovelApp.swift; sourceTree = "<group>"; };
|
||||
5A776719B77EDDB5E44743B0 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
|
||||
762E378B9BC2161A7AA2CC36 /* Models.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Models.swift; sourceTree = "<group>"; };
|
||||
7CAFB96D2500F34F0B0C860C /* NavDestination.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NavDestination.swift; sourceTree = "<group>"; };
|
||||
7CEF6782A2A28B2A485CBD48 /* AuthView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AuthView.swift; sourceTree = "<group>"; };
|
||||
81E3939152E23B4985FAF7E2 /* ChapterReaderView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChapterReaderView.swift; sourceTree = "<group>"; };
|
||||
837F83AA12B59924FDF16617 /* BookDetailViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BookDetailViewModel.swift; sourceTree = "<group>"; };
|
||||
8995E667B3DD9CFCAD8A91D7 /* ChapterReaderViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChapterReaderViewModel.swift; sourceTree = "<group>"; };
|
||||
8E89FD8F46747CA653C5203D /* CommonViews.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CommonViews.swift; sourceTree = "<group>"; };
|
||||
937A589F84FD412BBB6FBC45 /* ProfileViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileViewModel.swift; sourceTree = "<group>"; };
|
||||
9812F5FE30ED657FB40ABD7A /* BrowseViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BrowseViewModel.swift; sourceTree = "<group>"; };
|
||||
9D83BB88C4306BE7A4F947CB /* Color+App.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Color+App.swift"; sourceTree = "<group>"; };
|
||||
B4C918833E173D6B44D06955 /* LibNovelTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibNovelTests.swift; sourceTree = "<group>"; };
|
||||
B593F179EC3E9112126B540B /* APIClient.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = APIClient.swift; sourceTree = "<group>"; };
|
||||
C0B17D50389C6C98FC78BDBC /* ProfileView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProfileView.swift; sourceTree = "<group>"; };
|
||||
C21107BECA55C07416E0CB8B /* LibraryView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibraryView.swift; sourceTree = "<group>"; };
|
||||
D6268D60803940CBD38FB921 /* HomeView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HomeView.swift; sourceTree = "<group>"; };
|
||||
DB13E89E50529E3081533A66 /* AudioPlayerService.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioPlayerService.swift; sourceTree = "<group>"; };
|
||||
DF49C3AEF9D010F9FEDAB1FC /* PlayerViews.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PlayerViews.swift; sourceTree = "<group>"; };
|
||||
F219788AE5ACBD6F240674F5 /* AuthStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AuthStore.swift; sourceTree = "<group>"; };
|
||||
FC338B05EA6DB22900712000 /* LibraryViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LibraryViewModel.swift; sourceTree = "<group>"; };
|
||||
FEC6F837FF2E902E334ED72E /* String+App.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "String+App.swift"; sourceTree = "<group>"; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
EFE3211B202EDF04EB141EFB /* Frameworks */ = {
|
||||
isa = PBXFrameworksBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
CFDAA4776344B075A1E3CD6B /* Kingfisher in Frameworks */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXFrameworksBuildPhase section */
|
||||
|
||||
/* Begin PBXGroup section */
|
||||
2C0FB0EDFF9B3E24B97F4214 /* Resources */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
5A776719B77EDDB5E44743B0 /* Assets.xcassets */,
|
||||
);
|
||||
path = Resources;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
2C57B93EAF19A3B18E7B7E87 /* Views */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
2F18D1275D6022B9847E310E /* Auth */,
|
||||
FB5C0D4925633786D28C6DE3 /* BookDetail */,
|
||||
8E8AAA58A33084ADB8AEA80C /* Browse */,
|
||||
4EAB87A1ED4943A311F26F84 /* ChapterReader */,
|
||||
5D5809803A3D74FAE19DB218 /* Common */,
|
||||
811FC0F6B9C209D6EC8543BD /* Home */,
|
||||
FA994FD601E79EC811D822A4 /* Library */,
|
||||
89F2CB14192E7D7565A588E0 /* Player */,
|
||||
3DB66C5703A4CCAFFA1B7AFE /* Profile */,
|
||||
);
|
||||
path = Views;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
2F18D1275D6022B9847E310E /* Auth */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
7CEF6782A2A28B2A485CBD48 /* AuthView.swift */,
|
||||
);
|
||||
path = Auth;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
3DB66C5703A4CCAFFA1B7AFE /* Profile */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
C0B17D50389C6C98FC78BDBC /* ProfileView.swift */,
|
||||
);
|
||||
path = Profile;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
426F7C5465758645B93A1AB1 /* Networking */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B593F179EC3E9112126B540B /* APIClient.swift */,
|
||||
);
|
||||
path = Networking;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
4EAB87A1ED4943A311F26F84 /* ChapterReader */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
81E3939152E23B4985FAF7E2 /* ChapterReaderView.swift */,
|
||||
);
|
||||
path = ChapterReader;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
5D5809803A3D74FAE19DB218 /* Common */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
8E89FD8F46747CA653C5203D /* CommonViews.swift */,
|
||||
);
|
||||
path = Common;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
6318D3C6F0DC6C8E2C377103 /* Products */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
1B8BF3DB582A658386E402C7 /* LibNovel.app */,
|
||||
235967A21B386BE13F56F3F8 /* LibNovelTests.xctest */,
|
||||
);
|
||||
name = Products;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
646952B9CE927F8038FF0A13 /* LibNovelTests */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
B4C918833E173D6B44D06955 /* LibNovelTests.swift */,
|
||||
);
|
||||
path = LibNovelTests;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
80148B5E27BD0A3DEDB3ADAA /* Models */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
762E378B9BC2161A7AA2CC36 /* Models.swift */,
|
||||
);
|
||||
path = Models;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
811FC0F6B9C209D6EC8543BD /* Home */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
D6268D60803940CBD38FB921 /* HomeView.swift */,
|
||||
);
|
||||
path = Home;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
89F2CB14192E7D7565A588E0 /* Player */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
DF49C3AEF9D010F9FEDAB1FC /* PlayerViews.swift */,
|
||||
);
|
||||
path = Player;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
8E8AAA58A33084ADB8AEA80C /* Browse */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
1FA3F0FCA383180EE4C93BBA /* BrowseView.swift */,
|
||||
);
|
||||
path = Browse;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
9AF55E5D62F980C72431782A = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
A28A184E73B15138A4D13F31 /* LibNovel */,
|
||||
646952B9CE927F8038FF0A13 /* LibNovelTests */,
|
||||
6318D3C6F0DC6C8E2C377103 /* Products */,
|
||||
);
|
||||
indentWidth = 4;
|
||||
sourceTree = "<group>";
|
||||
tabWidth = 4;
|
||||
usesTabs = 0;
|
||||
};
|
||||
A28A184E73B15138A4D13F31 /* LibNovel */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
FE92158CC5DA9AD446062724 /* App */,
|
||||
FD5EDEE9747643D45CA6423E /* Extensions */,
|
||||
80148B5E27BD0A3DEDB3ADAA /* Models */,
|
||||
426F7C5465758645B93A1AB1 /* Networking */,
|
||||
2C0FB0EDFF9B3E24B97F4214 /* Resources */,
|
||||
DA6F6F625578875F3E74F1D3 /* Services */,
|
||||
B6916C5C762A37AB1279DF44 /* ViewModels */,
|
||||
2C57B93EAF19A3B18E7B7E87 /* Views */,
|
||||
);
|
||||
path = LibNovel;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
B6916C5C762A37AB1279DF44 /* ViewModels */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
837F83AA12B59924FDF16617 /* BookDetailViewModel.swift */,
|
||||
9812F5FE30ED657FB40ABD7A /* BrowseViewModel.swift */,
|
||||
8995E667B3DD9CFCAD8A91D7 /* ChapterReaderViewModel.swift */,
|
||||
3AB2E843D93461074A89A171 /* HomeViewModel.swift */,
|
||||
FC338B05EA6DB22900712000 /* LibraryViewModel.swift */,
|
||||
937A589F84FD412BBB6FBC45 /* ProfileViewModel.swift */,
|
||||
);
|
||||
path = ViewModels;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
DA6F6F625578875F3E74F1D3 /* Services */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
DB13E89E50529E3081533A66 /* AudioPlayerService.swift */,
|
||||
F219788AE5ACBD6F240674F5 /* AuthStore.swift */,
|
||||
);
|
||||
path = Services;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FA994FD601E79EC811D822A4 /* Library */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
C21107BECA55C07416E0CB8B /* LibraryView.swift */,
|
||||
);
|
||||
path = Library;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FB5C0D4925633786D28C6DE3 /* BookDetail */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
39DE056C37FBC5EED8771821 /* BookDetailView.swift */,
|
||||
);
|
||||
path = BookDetail;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FD5EDEE9747643D45CA6423E /* Extensions */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
9D83BB88C4306BE7A4F947CB /* Color+App.swift */,
|
||||
7CAFB96D2500F34F0B0C860C /* NavDestination.swift */,
|
||||
FEC6F837FF2E902E334ED72E /* String+App.swift */,
|
||||
);
|
||||
path = Extensions;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
FE92158CC5DA9AD446062724 /* App */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
4B820081FA4817765A39939A /* ContentView.swift */,
|
||||
4F56C8E2BC3614530B81569D /* LibNovelApp.swift */,
|
||||
2D5C115992F1CE2326236765 /* RootTabView.swift */,
|
||||
);
|
||||
path = App;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
/* End PBXGroup section */
|
||||
|
||||
/* Begin PBXNativeTarget section */
|
||||
5E6D3E8266BFCF0AAF5EC79D /* LibNovelTests */ = {
|
||||
isa = PBXNativeTarget;
|
||||
buildConfigurationList = 964FF85B62FA35E819BE7661 /* Build configuration list for PBXNativeTarget "LibNovelTests" */;
|
||||
buildPhases = (
|
||||
247D45B3DB26CAC41FA78A0B /* Sources */,
|
||||
);
|
||||
buildRules = (
|
||||
);
|
||||
dependencies = (
|
||||
9FD4A50EB175FC09D6BFD28D /* PBXTargetDependency */,
|
||||
);
|
||||
name = LibNovelTests;
|
||||
packageProductDependencies = (
|
||||
);
|
||||
productName = LibNovelTests;
|
||||
productReference = 235967A21B386BE13F56F3F8 /* LibNovelTests.xctest */;
|
||||
productType = "com.apple.product-type.bundle.unit-test";
|
||||
};
|
||||
D039EDECDE3998D8534BB680 /* LibNovel */ = {
|
||||
isa = PBXNativeTarget;
|
||||
buildConfigurationList = 29B2DE7267A3A4B2D89B32DA /* Build configuration list for PBXNativeTarget "LibNovel" */;
|
||||
buildPhases = (
|
||||
48661ADCA15B54E048CF694C /* Sources */,
|
||||
27446CA4728C022832398376 /* Resources */,
|
||||
EFE3211B202EDF04EB141EFB /* Frameworks */,
|
||||
);
|
||||
buildRules = (
|
||||
);
|
||||
dependencies = (
|
||||
);
|
||||
name = LibNovel;
|
||||
packageProductDependencies = (
|
||||
09584EAB68A07B47F876A062 /* Kingfisher */,
|
||||
);
|
||||
productName = LibNovel;
|
||||
productReference = 1B8BF3DB582A658386E402C7 /* LibNovel.app */;
|
||||
productType = "com.apple.product-type.application";
|
||||
};
|
||||
/* End PBXNativeTarget section */
|
||||
|
||||
/* Begin PBXProject section */
|
||||
A10A669C0C8B43078C0FEE9F /* Project object */ = {
|
||||
isa = PBXProject;
|
||||
attributes = {
|
||||
BuildIndependentTargetsInParallel = YES;
|
||||
LastUpgradeCheck = 2630;
|
||||
};
|
||||
buildConfigurationList = D27899EE96A9AFCBBE62EA3C /* Build configuration list for PBXProject "LibNovel" */;
|
||||
developmentRegion = en;
|
||||
hasScannedForEncodings = 0;
|
||||
knownRegions = (
|
||||
Base,
|
||||
en,
|
||||
);
|
||||
mainGroup = 9AF55E5D62F980C72431782A;
|
||||
minimizedProjectReferenceProxies = 1;
|
||||
packageReferences = (
|
||||
AFEF7128801A76181793EA9C /* XCRemoteSwiftPackageReference "Kingfisher" */,
|
||||
);
|
||||
preferredProjectObjectVersion = 77;
|
||||
productRefGroup = 6318D3C6F0DC6C8E2C377103 /* Products */;
|
||||
projectDirPath = "";
|
||||
projectRoot = "";
|
||||
targets = (
|
||||
D039EDECDE3998D8534BB680 /* LibNovel */,
|
||||
5E6D3E8266BFCF0AAF5EC79D /* LibNovelTests */,
|
||||
);
|
||||
};
|
||||
/* End PBXProject section */
|
||||
|
||||
/* Begin PBXResourcesBuildPhase section */
|
||||
27446CA4728C022832398376 /* Resources */ = {
|
||||
isa = PBXResourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
7C74C10317D389121922A5E3 /* Assets.xcassets in Resources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXResourcesBuildPhase section */
|
||||
|
||||
/* Begin PBXSourcesBuildPhase section */
|
||||
247D45B3DB26CAC41FA78A0B /* Sources */ = {
|
||||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
4BB2C76262D5BD5DAD0D5D28 /* LibNovelTests.swift in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
48661ADCA15B54E048CF694C /* Sources */ = {
|
||||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
FB32F3772CA09684F00497F3 /* APIClient.swift in Sources */,
|
||||
A9B95BAD7CE2DCD1DDDABD4C /* AudioPlayerService.swift in Sources */,
|
||||
5D8D783259EF54C773788AAB /* AuthStore.swift in Sources */,
|
||||
9B2D6F241E707312AB80DC31 /* AuthView.swift in Sources */,
|
||||
749292A18C57FA41EC88A30B /* BookDetailView.swift in Sources */,
|
||||
7D81DEB2EEFF9CA5079AEEF7 /* BookDetailViewModel.swift in Sources */,
|
||||
08DFB5F626BA769556C8D145 /* BrowseView.swift in Sources */,
|
||||
2790B8C051BE389D83645047 /* BrowseViewModel.swift in Sources */,
|
||||
FEFB5FDC2424D22914458001 /* ChapterReaderView.swift in Sources */,
|
||||
2A15157AD2AE2271675C3485 /* ChapterReaderViewModel.swift in Sources */,
|
||||
E2572692178FD17145FDAF77 /* Color+App.swift in Sources */,
|
||||
F2AF05B9C8C23132A73ACDD3 /* CommonViews.swift in Sources */,
|
||||
94D0C4B15734B4056BF3B127 /* ContentView.swift in Sources */,
|
||||
EF3C57C400BF05CBEAC1F7FE /* HomeView.swift in Sources */,
|
||||
C807AD8D627CF6BED47D517C /* HomeViewModel.swift in Sources */,
|
||||
032E049A4BB3CF0EA990C0CD /* LibNovelApp.swift in Sources */,
|
||||
E1F564399D1325F6A1B2B84F /* LibraryView.swift in Sources */,
|
||||
3521DFD5FCBBED7B90368829 /* LibraryViewModel.swift in Sources */,
|
||||
0A52BC1CE71BED9E75D20D35 /* Models.swift in Sources */,
|
||||
F4FDA3C44752EB979235C042 /* NavDestination.swift in Sources */,
|
||||
BE7805A4E78037A82B12AE56 /* PlayerViews.swift in Sources */,
|
||||
64D80AACB8E1967B17921EE3 /* ProfileView.swift in Sources */,
|
||||
58E440CE4360D755401D1672 /* ProfileViewModel.swift in Sources */,
|
||||
367C88FFC11701D2BAD8CCD0 /* RootTabView.swift in Sources */,
|
||||
41FB51553F1F1AEBFEA91C0A /* String+App.swift in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXSourcesBuildPhase section */
|
||||
|
||||
/* Begin PBXTargetDependency section */
|
||||
9FD4A50EB175FC09D6BFD28D /* PBXTargetDependency */ = {
|
||||
isa = PBXTargetDependency;
|
||||
target = D039EDECDE3998D8534BB680 /* LibNovel */;
|
||||
targetProxy = 698AC3AA533BC05C985595D0 /* PBXContainerItemProxy */;
|
||||
};
|
||||
/* End PBXTargetDependency section */
|
||||
|
||||
/* Begin XCBuildConfiguration section */
|
||||
428871329DC9E7B31FA1664B /* Release */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
BUNDLE_LOADER = "$(TEST_HOST)";
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
"@loader_path/Frameworks",
|
||||
);
|
||||
PRODUCT_BUNDLE_IDENTIFIER = com.kalekber.LibNovel.tests;
|
||||
SDKROOT = iphoneos;
|
||||
TARGETED_DEVICE_FAMILY = "1,2";
|
||||
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/LibNovel.app/LibNovel";
|
||||
};
|
||||
name = Release;
|
||||
};
|
||||
49CBF0D367E562629E002A4B /* Debug */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
BUNDLE_LOADER = "$(TEST_HOST)";
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
"@loader_path/Frameworks",
|
||||
);
|
||||
PRODUCT_BUNDLE_IDENTIFIER = com.kalekber.LibNovel.tests;
|
||||
SDKROOT = iphoneos;
|
||||
TARGETED_DEVICE_FAMILY = "1,2";
|
||||
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/LibNovel.app/LibNovel";
|
||||
};
|
||||
name = Debug;
|
||||
};
|
||||
8098D4A97F989064EC71E5A1 /* Debug */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
|
||||
CODE_SIGN_IDENTITY = "Apple Development";
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
CURRENT_PROJECT_VERSION = 1;
|
||||
DEVELOPMENT_TEAM = GHZXC6FVMU;
|
||||
GENERATE_INFOPLIST_FILE = NO;
|
||||
INFOPLIST_FILE = LibNovel/Resources/Info.plist;
|
||||
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.books";
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
);
|
||||
MARKETING_VERSION = 1.0.1;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = com.kalekber.LibNovel;
|
||||
PROVISIONING_PROFILE_SPECIFIER = "";
|
||||
SDKROOT = iphoneos;
|
||||
TARGETED_DEVICE_FAMILY = "1,2";
|
||||
};
|
||||
name = Debug;
|
||||
};
|
||||
9C182367114E72FF84D54A2F /* Debug */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||
CLANG_ANALYZER_NONNULL = YES;
|
||||
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
||||
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
|
||||
CLANG_CXX_LIBRARY = "libc++";
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CLANG_ENABLE_OBJC_ARC = YES;
|
||||
CLANG_ENABLE_OBJC_WEAK = YES;
|
||||
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
||||
CLANG_WARN_BOOL_CONVERSION = YES;
|
||||
CLANG_WARN_COMMA = YES;
|
||||
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
||||
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
||||
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
||||
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
||||
CLANG_WARN_EMPTY_BODY = YES;
|
||||
CLANG_WARN_ENUM_CONVERSION = YES;
|
||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||
CLANG_WARN_INT_CONVERSION = YES;
|
||||
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
|
||||
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
||||
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||
COPY_PHASE_STRIP = NO;
|
||||
CURRENT_PROJECT_VERSION = 1000;
|
||||
DEBUG_INFORMATION_FORMAT = dwarf;
|
||||
ENABLE_PREVIEWS = YES;
|
||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||
ENABLE_TESTABILITY = YES;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu11;
|
||||
GCC_DYNAMIC_NO_PIC = NO;
|
||||
GCC_NO_COMMON_BLOCKS = YES;
|
||||
GCC_OPTIMIZATION_LEVEL = 0;
|
||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||
"$(inherited)",
|
||||
"DEBUG=1",
|
||||
);
|
||||
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
||||
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
|
||||
LIBNOVEL_BASE_URL = "https://v2.libnovel.kalekber.cc";
|
||||
MARKETING_VERSION = 1.0.0;
|
||||
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
|
||||
MTL_FAST_MATH = YES;
|
||||
ONLY_ACTIVE_ARCH = YES;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SDKROOT = iphoneos;
|
||||
STRING_CATALOG_GENERATE_SYMBOLS = YES;
|
||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
|
||||
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
|
||||
SWIFT_VERSION = 5.10;
|
||||
};
|
||||
name = Debug;
|
||||
};
|
||||
D9977A0FA70F052FD0C126D3 /* Release */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
|
||||
CODE_SIGN_IDENTITY = "Apple Distribution";
|
||||
"CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Distribution";
|
||||
CODE_SIGN_STYLE = Manual;
|
||||
CURRENT_PROJECT_VERSION = 1;
|
||||
DEVELOPMENT_TEAM = GHZXC6FVMU;
|
||||
"DEVELOPMENT_TEAM[sdk=iphoneos*]" = GHZXC6FVMU;
|
||||
GENERATE_INFOPLIST_FILE = NO;
|
||||
INFOPLIST_FILE = LibNovel/Resources/Info.plist;
|
||||
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.books";
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
|
||||
LD_RUNPATH_SEARCH_PATHS = (
|
||||
"$(inherited)",
|
||||
"@executable_path/Frameworks",
|
||||
);
|
||||
MARKETING_VERSION = 1.0.1;
|
||||
PRODUCT_BUNDLE_IDENTIFIER = com.kalekber.LibNovel;
|
||||
PROVISIONING_PROFILE = "af592c3a-f60b-4ac1-a14f-30b8a206017f";
|
||||
"PROVISIONING_PROFILE_SPECIFIER[sdk=iphoneos*]" = "LibNovel Distribution";
|
||||
SDKROOT = iphoneos;
|
||||
TARGETED_DEVICE_FAMILY = "1,2";
|
||||
};
|
||||
name = Release;
|
||||
};
|
||||
F9ED141CFB1E2EC6F5E9F089 /* Release */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||
CLANG_ANALYZER_NONNULL = YES;
|
||||
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
||||
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
|
||||
CLANG_CXX_LIBRARY = "libc++";
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CLANG_ENABLE_OBJC_ARC = YES;
|
||||
CLANG_ENABLE_OBJC_WEAK = YES;
|
||||
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
||||
CLANG_WARN_BOOL_CONVERSION = YES;
|
||||
CLANG_WARN_COMMA = YES;
|
||||
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
||||
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
||||
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
||||
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
||||
CLANG_WARN_EMPTY_BODY = YES;
|
||||
CLANG_WARN_ENUM_CONVERSION = YES;
|
||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||
CLANG_WARN_INT_CONVERSION = YES;
|
||||
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
|
||||
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
||||
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||
COPY_PHASE_STRIP = NO;
|
||||
CURRENT_PROJECT_VERSION = 1000;
|
||||
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
|
||||
ENABLE_NS_ASSERTIONS = NO;
|
||||
ENABLE_PREVIEWS = YES;
|
||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||
ENABLE_USER_SCRIPT_SANDBOXING = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu11;
|
||||
GCC_NO_COMMON_BLOCKS = YES;
|
||||
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
||||
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||
IPHONEOS_DEPLOYMENT_TARGET = 17.0;
|
||||
LIBNOVEL_BASE_URL = "https://v2.libnovel.kalekber.cc";
|
||||
MARKETING_VERSION = 1.0.0;
|
||||
MTL_ENABLE_DEBUG_INFO = NO;
|
||||
MTL_FAST_MATH = YES;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SDKROOT = iphoneos;
|
||||
STRING_CATALOG_GENERATE_SYMBOLS = YES;
|
||||
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "";
|
||||
SWIFT_COMPILATION_MODE = wholemodule;
|
||||
SWIFT_OPTIMIZATION_LEVEL = "-O";
|
||||
SWIFT_VERSION = 5.10;
|
||||
};
|
||||
name = Release;
|
||||
};
|
||||
/* End XCBuildConfiguration section */
|
||||
|
||||
/* Begin XCConfigurationList section */
|
||||
29B2DE7267A3A4B2D89B32DA /* Build configuration list for PBXNativeTarget "LibNovel" */ = {
|
||||
isa = XCConfigurationList;
|
||||
buildConfigurations = (
|
||||
8098D4A97F989064EC71E5A1 /* Debug */,
|
||||
D9977A0FA70F052FD0C126D3 /* Release */,
|
||||
);
|
||||
defaultConfigurationIsVisible = 0;
|
||||
defaultConfigurationName = Debug;
|
||||
};
|
||||
964FF85B62FA35E819BE7661 /* Build configuration list for PBXNativeTarget "LibNovelTests" */ = {
|
||||
isa = XCConfigurationList;
|
||||
buildConfigurations = (
|
||||
49CBF0D367E562629E002A4B /* Debug */,
|
||||
428871329DC9E7B31FA1664B /* Release */,
|
||||
);
|
||||
defaultConfigurationIsVisible = 0;
|
||||
defaultConfigurationName = Debug;
|
||||
};
|
||||
D27899EE96A9AFCBBE62EA3C /* Build configuration list for PBXProject "LibNovel" */ = {
|
||||
isa = XCConfigurationList;
|
||||
buildConfigurations = (
|
||||
9C182367114E72FF84D54A2F /* Debug */,
|
||||
F9ED141CFB1E2EC6F5E9F089 /* Release */,
|
||||
);
|
||||
defaultConfigurationIsVisible = 0;
|
||||
defaultConfigurationName = Debug;
|
||||
};
|
||||
/* End XCConfigurationList section */
|
||||
|
||||
/* Begin XCRemoteSwiftPackageReference section */
|
||||
AFEF7128801A76181793EA9C /* XCRemoteSwiftPackageReference "Kingfisher" */ = {
|
||||
isa = XCRemoteSwiftPackageReference;
|
||||
repositoryURL = "https://github.com/onevcat/Kingfisher";
|
||||
requirement = {
|
||||
kind = upToNextMajorVersion;
|
||||
minimumVersion = 8.0.0;
|
||||
};
|
||||
};
|
||||
/* End XCRemoteSwiftPackageReference section */
|
||||
|
||||
/* Begin XCSwiftPackageProductDependency section */
|
||||
09584EAB68A07B47F876A062 /* Kingfisher */ = {
|
||||
isa = XCSwiftPackageProductDependency;
|
||||
package = AFEF7128801A76181793EA9C /* XCRemoteSwiftPackageReference "Kingfisher" */;
|
||||
productName = Kingfisher;
|
||||
};
|
||||
/* End XCSwiftPackageProductDependency section */
|
||||
};
|
||||
rootObject = A10A669C0C8B43078C0FEE9F /* Project object */;
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"originHash" : "ad75ae2d3b8d8b80d99635f65213a3c1092464aa54a86354f850b8317b6fa240",
|
||||
"pins" : [
|
||||
{
|
||||
"identity" : "kingfisher",
|
||||
"kind" : "remoteSourceControl",
|
||||
"location" : "https://github.com/onevcat/Kingfisher",
|
||||
"state" : {
|
||||
"revision" : "c92b84898e34ab46ff0dad86c02a0acbe2d87008",
|
||||
"version" : "8.8.0"
|
||||
}
|
||||
}
|
||||
],
|
||||
"version" : 3
|
||||
}
|
||||
@@ -1,105 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Scheme
|
||||
LastUpgradeVersion = "2630"
|
||||
version = "1.3">
|
||||
<BuildAction
|
||||
parallelizeBuildables = "YES"
|
||||
buildImplicitDependencies = "YES">
|
||||
<BuildActionEntries>
|
||||
<BuildActionEntry
|
||||
buildForTesting = "YES"
|
||||
buildForRunning = "YES"
|
||||
buildForProfiling = "YES"
|
||||
buildForArchiving = "YES"
|
||||
buildForAnalyzing = "YES">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "D039EDECDE3998D8534BB680"
|
||||
BuildableName = "LibNovel.app"
|
||||
BlueprintName = "LibNovel"
|
||||
ReferencedContainer = "container:LibNovel.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildActionEntry>
|
||||
</BuildActionEntries>
|
||||
</BuildAction>
|
||||
<TestAction
|
||||
buildConfiguration = "Debug"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
shouldUseLaunchSchemeArgsEnv = "YES">
|
||||
<MacroExpansion>
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "D039EDECDE3998D8534BB680"
|
||||
BuildableName = "LibNovel.app"
|
||||
BlueprintName = "LibNovel"
|
||||
ReferencedContainer = "container:LibNovel.xcodeproj">
|
||||
</BuildableReference>
|
||||
</MacroExpansion>
|
||||
<Testables>
|
||||
<TestableReference
|
||||
skipped = "NO"
|
||||
parallelizable = "NO">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "5E6D3E8266BFCF0AAF5EC79D"
|
||||
BuildableName = "LibNovelTests.xctest"
|
||||
BlueprintName = "LibNovelTests"
|
||||
ReferencedContainer = "container:LibNovel.xcodeproj">
|
||||
</BuildableReference>
|
||||
</TestableReference>
|
||||
</Testables>
|
||||
</TestAction>
|
||||
<LaunchAction
|
||||
buildConfiguration = "Debug"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
launchStyle = "0"
|
||||
useCustomWorkingDirectory = "NO"
|
||||
ignoresPersistentStateOnLaunch = "NO"
|
||||
debugDocumentVersioning = "YES"
|
||||
debugServiceExtension = "internal"
|
||||
allowLocationSimulation = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "D039EDECDE3998D8534BB680"
|
||||
BuildableName = "LibNovel.app"
|
||||
BlueprintName = "LibNovel"
|
||||
ReferencedContainer = "container:LibNovel.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildableProductRunnable>
|
||||
<EnvironmentVariables>
|
||||
<EnvironmentVariable
|
||||
key = "LIBNOVEL_BASE_URL"
|
||||
value = "["value": "https://v2.libnovel.kalekber.cc", "isEnabled": true]"
|
||||
isEnabled = "YES">
|
||||
</EnvironmentVariable>
|
||||
</EnvironmentVariables>
|
||||
</LaunchAction>
|
||||
<ProfileAction
|
||||
buildConfiguration = "Release"
|
||||
shouldUseLaunchSchemeArgsEnv = "YES"
|
||||
savedToolIdentifier = ""
|
||||
useCustomWorkingDirectory = "NO"
|
||||
debugDocumentVersioning = "YES">
|
||||
<BuildableProductRunnable
|
||||
runnableDebuggingMode = "0">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "D039EDECDE3998D8534BB680"
|
||||
BuildableName = "LibNovel.app"
|
||||
BlueprintName = "LibNovel"
|
||||
ReferencedContainer = "container:LibNovel.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildableProductRunnable>
|
||||
</ProfileAction>
|
||||
<AnalyzeAction
|
||||
buildConfiguration = "Debug">
|
||||
</AnalyzeAction>
|
||||
<ArchiveAction
|
||||
buildConfiguration = "Release"
|
||||
revealArchiveInOrganizer = "YES">
|
||||
</ArchiveAction>
|
||||
</Scheme>
|
||||
@@ -1,16 +0,0 @@
|
||||
import SwiftUI
|
||||
|
||||
struct ContentView: View {
|
||||
@EnvironmentObject var authStore: AuthStore
|
||||
@EnvironmentObject var audioPlayer: AudioPlayerService
|
||||
|
||||
var body: some View {
|
||||
Group {
|
||||
if authStore.isAuthenticated {
|
||||
RootTabView()
|
||||
} else {
|
||||
AuthView()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
import SwiftUI
|
||||
|
||||
@main
|
||||
struct LibNovelApp: App {
|
||||
@StateObject private var authStore = AuthStore()
|
||||
@StateObject private var audioPlayer = AudioPlayerService()
|
||||
|
||||
var body: some Scene {
|
||||
WindowGroup {
|
||||
ContentView()
|
||||
.environmentObject(authStore)
|
||||
.environmentObject(audioPlayer)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,105 +0,0 @@
|
||||
import SwiftUI
|
||||
|
||||
// MARK: - Root tab container with persistent mini-player overlay
|
||||
|
||||
struct RootTabView: View {
|
||||
@EnvironmentObject var authStore: AuthStore
|
||||
@EnvironmentObject var audioPlayer: AudioPlayerService
|
||||
|
||||
@State private var selectedTab: Tab = .home
|
||||
@State private var showFullPlayer: Bool = false
|
||||
|
||||
/// Live drag offset while the user is dragging the full player down.
|
||||
@State private var fullPlayerDragOffset: CGFloat = 0
|
||||
|
||||
enum Tab: Hashable {
|
||||
case home, library, browse, profile
|
||||
}
|
||||
|
||||
/// Height of the mini player bar (progress line 2pt + vertical padding 20pt + content ~44pt)
|
||||
private let miniPlayerBarHeight: CGFloat = AppLayout.miniPlayerBarHeight
|
||||
|
||||
var body: some View {
|
||||
ZStack(alignment: .bottom) {
|
||||
TabView(selection: $selectedTab) {
|
||||
HomeView()
|
||||
.tabItem { Label("Home", systemImage: "house.fill") }
|
||||
.tag(Tab.home)
|
||||
|
||||
LibraryView()
|
||||
.tabItem { Label("Library", systemImage: "book.pages.fill") }
|
||||
.tag(Tab.library)
|
||||
|
||||
BrowseView()
|
||||
.tabItem { Label("Discover", systemImage: "sparkles") }
|
||||
.tag(Tab.browse)
|
||||
|
||||
ProfileView()
|
||||
.tabItem { Label("Profile", systemImage: "gear") }
|
||||
.tag(Tab.profile)
|
||||
}
|
||||
// Reserve space for the mini-player above the tab bar so scroll content
|
||||
// never slides beneath it.
|
||||
.safeAreaInset(edge: .bottom) {
|
||||
if audioPlayer.isActive {
|
||||
Color.clear.frame(height: miniPlayerBarHeight)
|
||||
}
|
||||
}
|
||||
|
||||
// Mini-player pinned above the tab bar (hidden while full player is open)
|
||||
if audioPlayer.isActive && !showFullPlayer {
|
||||
MiniPlayerView(showFullPlayer: $showFullPlayer)
|
||||
.padding(.bottom, tabBarHeight)
|
||||
.transition(.move(edge: .bottom).combined(with: .opacity))
|
||||
.animation(.spring(response: 0.35, dampingFraction: 0.8), value: audioPlayer.isActive)
|
||||
}
|
||||
|
||||
// Full player — slides up from the bottom as a custom overlay (not a sheet)
|
||||
// so it feels physically connected to the mini player bar.
|
||||
if showFullPlayer {
|
||||
FullPlayerView(onDismiss: {
|
||||
withAnimation(.spring(response: 0.4, dampingFraction: 0.85)) {
|
||||
showFullPlayer = false
|
||||
fullPlayerDragOffset = 0
|
||||
}
|
||||
})
|
||||
.offset(y: max(fullPlayerDragOffset, 0))
|
||||
.gesture(
|
||||
DragGesture(minimumDistance: 10)
|
||||
.onChanged { value in
|
||||
if value.translation.height > 0 {
|
||||
// Rubberband slightly so it doesn't feel locked
|
||||
fullPlayerDragOffset = value.translation.height
|
||||
}
|
||||
}
|
||||
.onEnded { value in
|
||||
let velocity = value.predictedEndTranslation.height - value.translation.height
|
||||
if value.translation.height > 120 || velocity > 400 {
|
||||
withAnimation(.spring(response: 0.4, dampingFraction: 0.85)) {
|
||||
showFullPlayer = false
|
||||
fullPlayerDragOffset = 0
|
||||
}
|
||||
} else {
|
||||
withAnimation(.spring(response: 0.35, dampingFraction: 0.8)) {
|
||||
fullPlayerDragOffset = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
.transition(.move(edge: .bottom))
|
||||
.animation(.spring(response: 0.45, dampingFraction: 0.85), value: showFullPlayer)
|
||||
.ignoresSafeArea()
|
||||
}
|
||||
}
|
||||
.animation(.spring(response: 0.45, dampingFraction: 0.85), value: showFullPlayer)
|
||||
}
|
||||
|
||||
// Approximate safe-area-aware tab bar height
|
||||
private var tabBarHeight: CGFloat {
|
||||
let window = UIApplication.shared.connectedScenes
|
||||
.compactMap { $0 as? UIWindowScene }
|
||||
.first?.windows.first(where: \.isKeyWindow)
|
||||
let bottomInset = window?.safeAreaInsets.bottom ?? 0
|
||||
return 49 + bottomInset // 49pt is the standard iOS tab bar height
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import SwiftUI
|
||||
|
||||
// MARK: - App accent color (amber — mirrors Tailwind amber-500 #f59e0b)
|
||||
extension Color {
|
||||
static let amber = Color(red: 0.96, green: 0.62, blue: 0.04)
|
||||
}
|
||||
|
||||
extension ShapeStyle where Self == Color {
|
||||
static var amber: Color { .amber }
|
||||
}
|
||||
@@ -1,100 +0,0 @@
|
||||
import SwiftUI
|
||||
|
||||
// MARK: - Navigation destination enum used across all tabs
|
||||
|
||||
enum NavDestination: Hashable {
|
||||
case book(String) // slug
|
||||
case chapter(String, Int) // slug + chapter number
|
||||
}
|
||||
|
||||
// MARK: - View extensions for shared navigation + error alert patterns
|
||||
|
||||
extension View {
|
||||
/// Registers the app-wide navigation destinations for NavDestination values.
|
||||
/// Apply once per NavigationStack instead of repeating the switch in every tab.
|
||||
func appNavigationDestination() -> some View {
|
||||
modifier(AppNavigationDestinationModifier())
|
||||
}
|
||||
|
||||
/// Presents a standard "Error" alert driven by an optional String binding.
|
||||
/// Dismissing the alert sets the binding back to nil.
|
||||
func errorAlert(_ error: Binding<String?>) -> some View {
|
||||
alert("Error", isPresented: Binding(
|
||||
get: { error.wrappedValue != nil },
|
||||
set: { if !$0 { error.wrappedValue = nil } }
|
||||
)) {
|
||||
Button("OK") { error.wrappedValue = nil }
|
||||
} message: {
|
||||
Text(error.wrappedValue ?? "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Navigation destination modifier
|
||||
|
||||
private struct AppNavigationDestinationModifier: ViewModifier {
|
||||
@Namespace private var zoomNamespace
|
||||
|
||||
func body(content: Content) -> some View {
|
||||
if #available(iOS 18.0, *) {
|
||||
content
|
||||
.navigationDestination(for: NavDestination.self) { dest in
|
||||
switch dest {
|
||||
case .book(let slug):
|
||||
BookDetailView(slug: slug)
|
||||
.navigationTransition(.zoom(sourceID: slug, in: zoomNamespace))
|
||||
case .chapter(let slug, let n):
|
||||
ChapterReaderView(slug: slug, chapterNumber: n)
|
||||
}
|
||||
}
|
||||
// Expose namespace to child views via environment
|
||||
.environment(\.bookZoomNamespace, zoomNamespace)
|
||||
} else {
|
||||
content
|
||||
.navigationDestination(for: NavDestination.self) { dest in
|
||||
switch dest {
|
||||
case .book(let slug): BookDetailView(slug: slug)
|
||||
case .chapter(let slug, let n): ChapterReaderView(slug: slug, chapterNumber: n)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Environment key for zoom namespace
|
||||
|
||||
struct BookZoomNamespaceKey: EnvironmentKey {
|
||||
static var defaultValue: Namespace.ID? { nil }
|
||||
}
|
||||
|
||||
extension EnvironmentValues {
|
||||
var bookZoomNamespace: Namespace.ID? {
|
||||
get { self[BookZoomNamespaceKey.self] }
|
||||
set { self[BookZoomNamespaceKey.self] = newValue }
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Cover card zoom source modifier
|
||||
|
||||
/// Apply this to any cover image that should be a zoom source for book navigation.
|
||||
/// Falls back to a no-op on iOS 17 or when no namespace is available.
|
||||
struct BookCoverZoomSource: ViewModifier {
|
||||
let slug: String
|
||||
@Environment(\.bookZoomNamespace) private var namespace
|
||||
|
||||
func body(content: Content) -> some View {
|
||||
if #available(iOS 18.0, *), let ns = namespace {
|
||||
content.matchedTransitionSource(id: slug, in: ns)
|
||||
} else {
|
||||
content
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
extension View {
|
||||
/// Marks a cover image as the zoom source for a book's navigation transition.
|
||||
func bookCoverZoomSource(slug: String) -> some View {
|
||||
modifier(BookCoverZoomSource(slug: slug))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
import Foundation
|
||||
|
||||
// MARK: - String helpers for display purposes
|
||||
|
||||
extension String {
|
||||
/// Strips trailing relative-date suffixes (e.g. "2 years ago", "3 days ago",
|
||||
/// or "(One)4 years ago" where the number is attached without a preceding space).
|
||||
func strippingTrailingDate() -> String {
|
||||
let units = ["second", "minute", "hour", "day", "week", "month", "year"]
|
||||
let lower = self.lowercased()
|
||||
for unit in units {
|
||||
for suffix in [unit + "s ago", unit + " ago"] {
|
||||
guard let suffixRange = lower.range(of: suffix, options: .backwards) else { continue }
|
||||
// Everything before the suffix
|
||||
let before = String(self[self.startIndex ..< suffixRange.lowerBound])
|
||||
let trimmed = before.trimmingCharacters(in: .whitespaces)
|
||||
// Strip trailing digits (the numeric count, which may be attached without a space)
|
||||
var result = trimmed
|
||||
while let last = result.last, last.isNumber {
|
||||
result.removeLast()
|
||||
}
|
||||
result = result.trimmingCharacters(in: .whitespaces)
|
||||
if result != trimmed {
|
||||
// We actually stripped some digits — return cleaned result
|
||||
return result
|
||||
}
|
||||
// Fallback: number preceded by space
|
||||
if let spaceIdx = trimmed.lastIndex(of: " ") {
|
||||
let potentialNum = String(trimmed[trimmed.index(after: spaceIdx)...])
|
||||
if Int(potentialNum) != nil {
|
||||
return String(trimmed[trimmed.startIndex ..< spaceIdx])
|
||||
.trimmingCharacters(in: .whitespaces)
|
||||
}
|
||||
} else if Int(trimmed) != nil {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
}
|
||||
return self
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - App-wide layout constants
|
||||
|
||||
enum AppLayout {
|
||||
/// Height of the persistent mini-player bar:
|
||||
/// 12pt vertical padding (top) + 56pt cover height + 12pt vertical padding (bottom) + 12pt horizontal margin.
|
||||
static let miniPlayerBarHeight: CGFloat = 92
|
||||
}
|
||||
@@ -1,261 +0,0 @@
|
||||
import Foundation
|
||||
import SwiftUI
|
||||
|
||||
// MARK: - Book
|
||||
|
||||
struct Book: Identifiable, Codable, Hashable {
|
||||
let id: String
|
||||
let slug: String
|
||||
let title: String
|
||||
let author: String
|
||||
let cover: String
|
||||
let status: String
|
||||
let genres: [String]
|
||||
let summary: String
|
||||
let totalChapters: Int
|
||||
let sourceURL: String
|
||||
let ranking: Int
|
||||
let metaUpdated: String
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case id, slug, title, author, cover, status, genres, summary
|
||||
case totalChapters = "total_chapters"
|
||||
case sourceURL = "source_url"
|
||||
case ranking
|
||||
case metaUpdated = "meta_updated"
|
||||
}
|
||||
|
||||
// PocketBase returns genres as either a JSON string array or a real array
|
||||
init(from decoder: Decoder) throws {
|
||||
let container = try decoder.container(keyedBy: CodingKeys.self)
|
||||
id = try container.decode(String.self, forKey: .id)
|
||||
slug = try container.decode(String.self, forKey: .slug)
|
||||
title = try container.decode(String.self, forKey: .title)
|
||||
author = try container.decode(String.self, forKey: .author)
|
||||
cover = try container.decodeIfPresent(String.self, forKey: .cover) ?? ""
|
||||
status = try container.decodeIfPresent(String.self, forKey: .status) ?? ""
|
||||
totalChapters = try container.decodeIfPresent(Int.self, forKey: .totalChapters) ?? 0
|
||||
sourceURL = try container.decodeIfPresent(String.self, forKey: .sourceURL) ?? ""
|
||||
ranking = try container.decodeIfPresent(Int.self, forKey: .ranking) ?? 0
|
||||
metaUpdated = try container.decodeIfPresent(String.self, forKey: .metaUpdated) ?? ""
|
||||
summary = try container.decodeIfPresent(String.self, forKey: .summary) ?? ""
|
||||
|
||||
// genres is sometimes a JSON-encoded string, sometimes a real array
|
||||
if let arr = try? container.decode([String].self, forKey: .genres) {
|
||||
genres = arr
|
||||
} else if let str = try? container.decode(String.self, forKey: .genres),
|
||||
let data = str.data(using: .utf8),
|
||||
let arr = try? JSONDecoder().decode([String].self, from: data) {
|
||||
genres = arr
|
||||
} else {
|
||||
genres = []
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - ChapterIndex
|
||||
|
||||
struct ChapterIndex: Identifiable, Codable, Hashable {
|
||||
let id: String
|
||||
let slug: String
|
||||
let number: Int
|
||||
let title: String
|
||||
let dateLabel: String
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case id, slug, number, title
|
||||
case dateLabel = "date_label"
|
||||
}
|
||||
}
|
||||
|
||||
struct ChapterIndexBrief: Codable, Hashable {
|
||||
let number: Int
|
||||
let title: String
|
||||
}
|
||||
|
||||
// MARK: - User Settings
|
||||
|
||||
struct UserSettings: Codable {
|
||||
var id: String?
|
||||
var autoNext: Bool
|
||||
var voice: String
|
||||
var speed: Double
|
||||
|
||||
// Server sends/expects camelCase: { autoNext, voice, speed }
|
||||
// (No CodingKeys needed — Swift synthesises the same names by default)
|
||||
|
||||
static let `default` = UserSettings(id: nil, autoNext: false, voice: "af_bella", speed: 1.0)
|
||||
}
|
||||
|
||||
// MARK: - Reading Display Settings (local only — stored in UserDefaults)
|
||||
|
||||
enum ReaderTheme: String, CaseIterable, Codable {
|
||||
case white, sepia, night
|
||||
|
||||
var backgroundColor: Color {
|
||||
switch self {
|
||||
case .white: return Color(.sRGB, white: 1.0, opacity: 1)
|
||||
case .sepia: return Color(red: 0.97, green: 0.93, blue: 0.82)
|
||||
case .night: return Color(red: 0.10, green: 0.10, blue: 0.12)
|
||||
}
|
||||
}
|
||||
|
||||
var textColor: Color {
|
||||
switch self {
|
||||
case .white: return Color(.sRGB, white: 0.1, opacity: 1)
|
||||
case .sepia: return Color(red: 0.25, green: 0.18, blue: 0.08)
|
||||
case .night: return Color(red: 0.85, green: 0.85, blue: 0.87)
|
||||
}
|
||||
}
|
||||
|
||||
var colorScheme: ColorScheme? {
|
||||
switch self {
|
||||
case .white: return nil // follows system
|
||||
case .sepia: return .light
|
||||
case .night: return .dark
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ReaderFont: String, CaseIterable, Codable {
|
||||
case system = "System"
|
||||
case georgia = "Georgia"
|
||||
case newYork = "New York"
|
||||
|
||||
var fontName: String? {
|
||||
switch self {
|
||||
case .system: return nil
|
||||
case .georgia: return "Georgia"
|
||||
case .newYork: return "NewYorkMedium-Regular"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ReaderSettings: Codable, Equatable {
|
||||
var fontSize: CGFloat
|
||||
var lineSpacing: CGFloat
|
||||
var font: ReaderFont
|
||||
var theme: ReaderTheme
|
||||
var scrollMode: Bool
|
||||
|
||||
static let `default` = ReaderSettings(
|
||||
fontSize: 17,
|
||||
lineSpacing: 1.7,
|
||||
font: .system,
|
||||
theme: .white,
|
||||
scrollMode: false
|
||||
)
|
||||
|
||||
static let userDefaultsKey = "readerSettings"
|
||||
|
||||
static func load() -> ReaderSettings {
|
||||
guard let data = UserDefaults.standard.data(forKey: userDefaultsKey),
|
||||
let decoded = try? JSONDecoder().decode(ReaderSettings.self, from: data)
|
||||
else { return .default }
|
||||
return decoded
|
||||
}
|
||||
|
||||
func save() {
|
||||
if let data = try? JSONEncoder().encode(self) {
|
||||
UserDefaults.standard.set(data, forKey: ReaderSettings.userDefaultsKey)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - User
|
||||
|
||||
struct AppUser: Codable, Identifiable {
|
||||
let id: String
|
||||
let username: String
|
||||
let role: String
|
||||
let created: String
|
||||
let avatarURL: String?
|
||||
|
||||
var isAdmin: Bool { role == "admin" }
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case id, username, role, created
|
||||
case avatarURL = "avatar_url"
|
||||
}
|
||||
|
||||
init(from decoder: Decoder) throws {
|
||||
let c = try decoder.container(keyedBy: CodingKeys.self)
|
||||
id = try c.decode(String.self, forKey: .id)
|
||||
username = try c.decode(String.self, forKey: .username)
|
||||
role = try c.decodeIfPresent(String.self, forKey: .role) ?? "user"
|
||||
created = try c.decodeIfPresent(String.self, forKey: .created) ?? ""
|
||||
avatarURL = try c.decodeIfPresent(String.self, forKey: .avatarURL)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Ranking
|
||||
|
||||
struct RankingItem: Codable, Identifiable {
|
||||
var id: String { slug }
|
||||
let rank: Int
|
||||
let slug: String
|
||||
let title: String
|
||||
let author: String
|
||||
let cover: String
|
||||
let status: String
|
||||
let genres: [String]
|
||||
let sourceURL: String
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case rank, slug, title, author, cover, status, genres
|
||||
case sourceURL = "source_url"
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Home
|
||||
|
||||
struct ContinueReadingItem: Identifiable {
|
||||
var id: String { book.id }
|
||||
let book: Book
|
||||
let chapter: Int
|
||||
}
|
||||
|
||||
struct HomeStats: Codable {
|
||||
let totalBooks: Int
|
||||
let totalChapters: Int
|
||||
let booksInProgress: Int
|
||||
}
|
||||
|
||||
// MARK: - Session
|
||||
|
||||
struct UserSession: Codable, Identifiable {
|
||||
let id: String
|
||||
let userAgent: String
|
||||
let ip: String
|
||||
let createdAt: String
|
||||
let lastSeen: String
|
||||
var isCurrent: Bool
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case id
|
||||
case userAgent = "user_agent"
|
||||
case ip
|
||||
case createdAt = "created_at"
|
||||
case lastSeen = "last_seen"
|
||||
case isCurrent = "is_current"
|
||||
}
|
||||
}
|
||||
|
||||
struct PreviewChapter: Codable, Identifiable {
|
||||
var id: Int { number }
|
||||
let number: Int
|
||||
let title: String
|
||||
let url: String
|
||||
}
|
||||
|
||||
struct BookBrief: Codable {
|
||||
let slug: String
|
||||
let title: String
|
||||
let cover: String
|
||||
}
|
||||
|
||||
// MARK: - Audio
|
||||
|
||||
enum NextPrefetchStatus {
|
||||
case none, prefetching, prefetched, failed
|
||||
}
|
||||
@@ -1,493 +0,0 @@
|
||||
import Foundation
|
||||
|
||||
// MARK: - API Client
|
||||
// Communicates with the SvelteKit UI server (not directly with the Go scraper).
|
||||
// The SvelteKit layer handles auth, PocketBase queries, and MinIO presigning.
|
||||
// For the iOS app we talk to the same /api/* endpoints the web UI uses,
|
||||
// so we reuse the exact same HMAC-cookie auth flow.
|
||||
|
||||
actor APIClient {
|
||||
static let shared = APIClient()
|
||||
|
||||
var baseURL: URL
|
||||
private var authCookie: String? // raw "libnovel_auth=<token>" header value
|
||||
|
||||
// URLSession with persistent cookie storage
|
||||
private let session: URLSession = {
|
||||
let config = URLSessionConfiguration.default
|
||||
config.httpCookieAcceptPolicy = .always
|
||||
config.httpShouldSetCookies = true
|
||||
config.httpCookieStorage = HTTPCookieStorage.shared
|
||||
return URLSession(configuration: config)
|
||||
}()
|
||||
|
||||
private init() {
|
||||
// Default: point at the UI server. Override via Settings bundle or compile flag.
|
||||
let urlString = Bundle.main.object(forInfoDictionaryKey: "LIBNOVEL_BASE_URL") as? String
|
||||
?? "https://v2.libnovel.kalekber.cc"
|
||||
baseURL = URL(string: urlString)!
|
||||
}
|
||||
|
||||
// MARK: - Auth cookie management
|
||||
|
||||
func setAuthCookie(_ value: String?) {
|
||||
authCookie = value
|
||||
if let value {
|
||||
// Also inject into shared cookie storage so redirects carry the cookie
|
||||
let cookieProps: [HTTPCookiePropertyKey: Any] = [
|
||||
.name: "libnovel_auth",
|
||||
.value: value,
|
||||
.domain: baseURL.host ?? "localhost",
|
||||
.path: "/"
|
||||
]
|
||||
if let cookie = HTTPCookie(properties: cookieProps) {
|
||||
HTTPCookieStorage.shared.setCookie(cookie)
|
||||
}
|
||||
} else {
|
||||
// Clear
|
||||
let cookieStorage = HTTPCookieStorage.shared
|
||||
cookieStorage.cookies(for: baseURL)?.forEach { cookieStorage.deleteCookie($0) }
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Low-level request builder
|
||||
|
||||
private func makeRequest(_ path: String, method: String = "GET", body: Encodable? = nil) throws -> URLRequest {
|
||||
// Build URL by appending the path string directly to the base URL string.
|
||||
// appendingPathComponent() percent-encodes slashes, which breaks multi-segment
|
||||
// paths like /api/chapter/slug/1. URL(string:) preserves slashes correctly.
|
||||
let urlString = baseURL.absoluteString.trimmingCharacters(in: CharacterSet(charactersIn: "/"))
|
||||
+ "/" + path.trimmingCharacters(in: CharacterSet(charactersIn: "/"))
|
||||
guard let url = URL(string: urlString) else {
|
||||
throw APIError.invalidResponse
|
||||
}
|
||||
var req = URLRequest(url: url)
|
||||
req.httpMethod = method
|
||||
req.setValue("application/json", forHTTPHeaderField: "Accept")
|
||||
if let body {
|
||||
req.setValue("application/json", forHTTPHeaderField: "Content-Type")
|
||||
req.httpBody = try JSONEncoder().encode(body)
|
||||
}
|
||||
return req
|
||||
}
|
||||
|
||||
// MARK: - Generic fetch
|
||||
|
||||
func fetch<T: Decodable>(_ path: String, method: String = "GET", body: Encodable? = nil) async throws -> T {
|
||||
let req = try makeRequest(path, method: method, body: body)
|
||||
let (data, response) = try await session.data(for: req)
|
||||
guard let http = response as? HTTPURLResponse else {
|
||||
throw APIError.invalidResponse
|
||||
}
|
||||
let rawBody = String(data: data, encoding: .utf8) ?? "<non-utf8 data, \(data.count) bytes>"
|
||||
guard (200..<300).contains(http.statusCode) else {
|
||||
throw APIError.httpError(http.statusCode, rawBody)
|
||||
}
|
||||
do {
|
||||
return try JSONDecoder.iso8601.decode(T.self, from: data)
|
||||
} catch {
|
||||
throw APIError.decodingError(error)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Auth
|
||||
|
||||
struct LoginRequest: Encodable {
|
||||
let username: String
|
||||
let password: String
|
||||
}
|
||||
|
||||
struct LoginResponse: Decodable {
|
||||
let token: String
|
||||
let user: AppUser
|
||||
}
|
||||
|
||||
func login(username: String, password: String) async throws -> LoginResponse {
|
||||
try await fetch("/api/auth/login", method: "POST",
|
||||
body: LoginRequest(username: username, password: password))
|
||||
}
|
||||
|
||||
func register(username: String, password: String) async throws -> LoginResponse {
|
||||
try await fetch("/api/auth/register", method: "POST",
|
||||
body: LoginRequest(username: username, password: password))
|
||||
}
|
||||
|
||||
func logout() async throws {
|
||||
let _: EmptyResponse = try await fetch("/api/auth/logout", method: "POST")
|
||||
await setAuthCookie(nil)
|
||||
}
|
||||
|
||||
// MARK: - Home
|
||||
|
||||
func homeData() async throws -> HomeDataResponse {
|
||||
try await fetch("/api/home")
|
||||
}
|
||||
|
||||
// MARK: - Library
|
||||
|
||||
func library() async throws -> [LibraryItem] {
|
||||
try await fetch("/api/library")
|
||||
}
|
||||
|
||||
func saveBook(slug: String) async throws {
|
||||
let _: EmptyResponse = try await fetch("/api/library/\(slug)", method: "POST")
|
||||
}
|
||||
|
||||
func unsaveBook(slug: String) async throws {
|
||||
let _: EmptyResponse = try await fetch("/api/library/\(slug)", method: "DELETE")
|
||||
}
|
||||
|
||||
// MARK: - Book Detail
|
||||
|
||||
func bookDetail(slug: String) async throws -> BookDetailResponse {
|
||||
try await fetch("/api/book/\(slug)")
|
||||
}
|
||||
|
||||
// MARK: - Chapter
|
||||
|
||||
func chapterContent(slug: String, chapter: Int) async throws -> ChapterResponse {
|
||||
try await fetch("/api/chapter/\(slug)/\(chapter)")
|
||||
}
|
||||
|
||||
// MARK: - Browse
|
||||
|
||||
func browse(page: Int, genre: String = "all", sort: String = "popular", status: String = "all") async throws -> BrowseResponse {
|
||||
let query = "?page=\(page)&genre=\(genre)&sort=\(sort)&status=\(status)"
|
||||
return try await fetch("/api/browse-page\(query)")
|
||||
}
|
||||
|
||||
func search(query: String) async throws -> SearchResponse {
|
||||
let encoded = query.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? query
|
||||
return try await fetch("/api/search?q=\(encoded)")
|
||||
}
|
||||
|
||||
func ranking() async throws -> [RankingItem] {
|
||||
try await fetch("/api/ranking")
|
||||
}
|
||||
|
||||
// MARK: - Progress
|
||||
|
||||
func progress() async throws -> [ProgressEntry] {
|
||||
try await fetch("/api/progress")
|
||||
}
|
||||
|
||||
func setProgress(slug: String, chapter: Int) async throws {
|
||||
struct Body: Encodable { let chapter: Int }
|
||||
let _: EmptyResponse = try await fetch("/api/progress/\(slug)", method: "POST", body: Body(chapter: chapter))
|
||||
}
|
||||
|
||||
func audioTime(slug: String, chapter: Int) async throws -> Double? {
|
||||
struct Response: Decodable { let audioTime: Double?; enum CodingKeys: String, CodingKey { case audioTime = "audio_time" } }
|
||||
let r: Response = try await fetch("/api/progress/audio-time?slug=\(slug)&chapter=\(chapter)")
|
||||
return r.audioTime
|
||||
}
|
||||
|
||||
func setAudioTime(slug: String, chapter: Int, time: Double) async throws {
|
||||
struct Body: Encodable { let slug: String; let chapter: Int; let audioTime: Double; enum CodingKeys: String, CodingKey { case slug, chapter; case audioTime = "audio_time" } }
|
||||
let _: EmptyResponse = try await fetch("/api/progress/audio-time", method: "PATCH", body: Body(slug: slug, chapter: chapter, audioTime: time))
|
||||
}
|
||||
|
||||
// MARK: - Audio
|
||||
|
||||
func triggerAudio(slug: String, chapter: Int, voice: String, speed: Double) async throws -> AudioTriggerResponse {
|
||||
struct Body: Encodable { let voice: String; let speed: Double }
|
||||
return try await fetch("/api/audio/\(slug)/\(chapter)", method: "POST", body: Body(voice: voice, speed: speed))
|
||||
}
|
||||
|
||||
/// Poll GET /api/audio/status/{slug}/{n}?voice=... until the job is done or failed.
|
||||
/// Returns the presigned/proxy URL on success, throws on failure or cancellation.
|
||||
func pollAudioStatus(slug: String, chapter: Int, voice: String) async throws -> String {
|
||||
let path = "/api/audio/status/\(slug)/\(chapter)?voice=\(voice)"
|
||||
struct StatusResponse: Decodable {
|
||||
let status: String
|
||||
let url: String?
|
||||
let error: String?
|
||||
}
|
||||
while true {
|
||||
try Task.checkCancellation()
|
||||
let r: StatusResponse = try await fetch(path)
|
||||
switch r.status {
|
||||
case "done":
|
||||
guard let url = r.url, !url.isEmpty else {
|
||||
throw URLError(.badServerResponse)
|
||||
}
|
||||
return url
|
||||
case "failed":
|
||||
throw NSError(
|
||||
domain: "AudioGeneration",
|
||||
code: 0,
|
||||
userInfo: [NSLocalizedDescriptionKey: r.error ?? "Audio generation failed"]
|
||||
)
|
||||
default:
|
||||
// pending / generating / idle — keep polling
|
||||
try await Task.sleep(nanoseconds: 2_000_000_000) // 2 s
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func presignAudio(slug: String, chapter: Int, voice: String) async throws -> String {
|
||||
struct Response: Decodable { let url: String }
|
||||
let r: Response = try await fetch("/api/presign/audio?slug=\(slug)&chapter=\(chapter)&voice=\(voice)")
|
||||
return r.url
|
||||
}
|
||||
|
||||
func presignVoiceSample(voice: String) async throws -> String {
|
||||
struct Response: Decodable { let url: String }
|
||||
let r: Response = try await fetch("/api/presign/voice-sample?voice=\(voice)")
|
||||
return r.url
|
||||
}
|
||||
|
||||
func voices() async throws -> [String] {
|
||||
struct Response: Decodable { let voices: [String] }
|
||||
let r: Response = try await fetch("/api/voices")
|
||||
return r.voices
|
||||
}
|
||||
|
||||
// MARK: - Settings
|
||||
|
||||
func settings() async throws -> UserSettings {
|
||||
try await fetch("/api/settings")
|
||||
}
|
||||
|
||||
func updateSettings(_ settings: UserSettings) async throws {
|
||||
let _: EmptyResponse = try await fetch("/api/settings", method: "PUT", body: settings)
|
||||
}
|
||||
|
||||
// MARK: - Sessions
|
||||
|
||||
func sessions() async throws -> [UserSession] {
|
||||
struct Response: Decodable { let sessions: [UserSession] }
|
||||
let r: Response = try await fetch("/api/sessions")
|
||||
return r.sessions
|
||||
}
|
||||
|
||||
func revokeSession(id: String) async throws {
|
||||
let _: EmptyResponse = try await fetch("/api/sessions/\(id)", method: "DELETE")
|
||||
}
|
||||
|
||||
// MARK: - Avatar
|
||||
|
||||
struct AvatarPresignResponse: Decodable {
|
||||
let uploadURL: String
|
||||
let key: String
|
||||
enum CodingKeys: String, CodingKey { case uploadURL = "upload_url"; case key }
|
||||
}
|
||||
|
||||
struct AvatarResponse: Decodable {
|
||||
let avatarURL: String?
|
||||
enum CodingKeys: String, CodingKey { case avatarURL = "avatar_url" }
|
||||
}
|
||||
|
||||
/// Upload a profile avatar using a two-step presigned PUT flow:
|
||||
/// 1. POST /api/profile/avatar → get a presigned PUT URL + object key
|
||||
/// 2. PUT image bytes directly to MinIO via the presigned URL
|
||||
/// 3. PATCH /api/profile/avatar with the key to record it in PocketBase
|
||||
/// Returns the presigned GET URL for the uploaded avatar.
|
||||
func uploadAvatar(_ imageData: Data, mimeType: String = "image/jpeg") async throws -> String? {
|
||||
// Step 1: request a presigned PUT URL from the SvelteKit server
|
||||
let presign: AvatarPresignResponse = try await fetch(
|
||||
"/api/profile/avatar",
|
||||
method: "POST",
|
||||
body: ["mime_type": mimeType]
|
||||
)
|
||||
|
||||
// Step 2: PUT the image bytes directly to MinIO
|
||||
guard let putURL = URL(string: presign.uploadURL) else { throw APIError.invalidResponse }
|
||||
var putReq = URLRequest(url: putURL)
|
||||
putReq.httpMethod = "PUT"
|
||||
putReq.setValue(mimeType, forHTTPHeaderField: "Content-Type")
|
||||
putReq.httpBody = imageData
|
||||
|
||||
let (_, putResp) = try await session.data(for: putReq)
|
||||
guard let putHttp = putResp as? HTTPURLResponse,
|
||||
(200..<300).contains(putHttp.statusCode) else {
|
||||
let code = (putResp as? HTTPURLResponse)?.statusCode ?? 0
|
||||
throw APIError.httpError(code, "MinIO PUT failed")
|
||||
}
|
||||
|
||||
// Step 3: record the key in PocketBase and get back a presigned GET URL
|
||||
let result: AvatarResponse = try await fetch(
|
||||
"/api/profile/avatar",
|
||||
method: "PATCH",
|
||||
body: ["key": presign.key]
|
||||
)
|
||||
return result.avatarURL
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Response types
|
||||
|
||||
struct HomeDataResponse: Decodable {
|
||||
struct ContinueItem: Decodable {
|
||||
let book: Book
|
||||
let chapter: Int
|
||||
}
|
||||
let continueReading: [ContinueItem]
|
||||
let recentlyUpdated: [Book]
|
||||
let stats: HomeStats
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case continueReading = "continue_reading"
|
||||
case recentlyUpdated = "recently_updated"
|
||||
case stats
|
||||
}
|
||||
}
|
||||
|
||||
struct LibraryItem: Decodable, Identifiable {
|
||||
var id: String { book.id }
|
||||
let book: Book
|
||||
let savedAt: String
|
||||
let lastChapter: Int?
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case book
|
||||
case savedAt = "saved_at"
|
||||
case lastChapter = "last_chapter"
|
||||
}
|
||||
}
|
||||
|
||||
struct BookDetailResponse: Decodable {
|
||||
let book: Book
|
||||
let chapters: [ChapterIndex]
|
||||
let previewChapters: [PreviewChapter]?
|
||||
let inLib: Bool
|
||||
let saved: Bool
|
||||
let lastChapter: Int?
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case book, chapters
|
||||
case previewChapters = "preview_chapters"
|
||||
case inLib = "in_lib"
|
||||
case saved
|
||||
case lastChapter = "last_chapter"
|
||||
}
|
||||
}
|
||||
|
||||
struct ChapterResponse: Decodable {
|
||||
let book: BookBrief
|
||||
let chapter: ChapterIndex
|
||||
let html: String
|
||||
let voices: [String]
|
||||
let prev: Int?
|
||||
let next: Int?
|
||||
let chapters: [ChapterIndexBrief]
|
||||
let isPreview: Bool
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case book, chapter, html, voices, prev, next, chapters
|
||||
case isPreview = "is_preview"
|
||||
}
|
||||
}
|
||||
|
||||
struct BrowseResponse: Decodable {
|
||||
let novels: [BrowseNovel]
|
||||
let page: Int
|
||||
let hasNext: Bool
|
||||
}
|
||||
|
||||
struct BrowseNovel: Decodable, Identifiable, Hashable {
|
||||
var id: String { slug.isEmpty ? url : slug }
|
||||
let slug: String
|
||||
let title: String
|
||||
let cover: String
|
||||
let rank: String
|
||||
let rating: String
|
||||
let chapters: String
|
||||
let url: String
|
||||
let author: String
|
||||
let status: String
|
||||
let genres: [String]
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case slug, title, cover, rank, rating, chapters, url, author, status, genres
|
||||
}
|
||||
|
||||
init(from decoder: Decoder) throws {
|
||||
let c = try decoder.container(keyedBy: CodingKeys.self)
|
||||
slug = try c.decodeIfPresent(String.self, forKey: .slug) ?? ""
|
||||
title = try c.decode(String.self, forKey: .title)
|
||||
cover = try c.decodeIfPresent(String.self, forKey: .cover) ?? ""
|
||||
rank = try c.decodeIfPresent(String.self, forKey: .rank) ?? ""
|
||||
rating = try c.decodeIfPresent(String.self, forKey: .rating) ?? ""
|
||||
chapters = try c.decodeIfPresent(String.self, forKey: .chapters) ?? ""
|
||||
url = try c.decodeIfPresent(String.self, forKey: .url) ?? ""
|
||||
author = try c.decodeIfPresent(String.self, forKey: .author) ?? ""
|
||||
status = try c.decodeIfPresent(String.self, forKey: .status) ?? ""
|
||||
genres = try c.decodeIfPresent([String].self, forKey: .genres) ?? []
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchResponse: Decodable {
|
||||
let results: [BrowseNovel]
|
||||
let localCount: Int
|
||||
let remoteCount: Int
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case results
|
||||
case localCount = "local_count"
|
||||
case remoteCount = "remote_count"
|
||||
}
|
||||
}
|
||||
|
||||
/// Returned by POST /api/audio/{slug}/{n}.
|
||||
/// - 202 Accepted: job enqueued → poll via pollAudioStatus()
|
||||
/// - 200 OK: audio already cached → url is ready to play
|
||||
struct AudioTriggerResponse: Decodable {
|
||||
let jobId: String? // present on 202
|
||||
let status: String? // present on 202: "pending" | "generating"
|
||||
let url: String? // present on 200: proxy URL ready to play
|
||||
let filename: String? // present on 200
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case jobId = "job_id"
|
||||
case status, url, filename
|
||||
}
|
||||
|
||||
/// True when the server accepted the request and created an async job.
|
||||
var isAsync: Bool { jobId != nil }
|
||||
}
|
||||
|
||||
struct ProgressEntry: Decodable, Identifiable {
|
||||
var id: String { slug }
|
||||
let slug: String
|
||||
let chapter: Int
|
||||
let audioTime: Double?
|
||||
let updated: String
|
||||
|
||||
enum CodingKeys: String, CodingKey {
|
||||
case slug, chapter, updated
|
||||
case audioTime = "audio_time"
|
||||
}
|
||||
}
|
||||
|
||||
struct EmptyResponse: Decodable {}
|
||||
|
||||
// MARK: - API Error
|
||||
|
||||
enum APIError: LocalizedError {
|
||||
case invalidResponse
|
||||
case httpError(Int, String)
|
||||
case decodingError(Error)
|
||||
case unauthorized
|
||||
case networkError(Error)
|
||||
|
||||
var errorDescription: String? {
|
||||
switch self {
|
||||
case .invalidResponse: return "Invalid server response"
|
||||
case .httpError(let code, let msg): return "HTTP \(code): \(msg)"
|
||||
case .decodingError(let e): return "Decode error: \(e.localizedDescription)"
|
||||
case .unauthorized: return "Not authenticated"
|
||||
case .networkError(let e): return e.localizedDescription
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - JSONDecoder helper
|
||||
|
||||
extension JSONDecoder {
|
||||
static let iso8601: JSONDecoder = {
|
||||
let d = JSONDecoder()
|
||||
d.dateDecodingStrategy = .iso8601
|
||||
return d
|
||||
}()
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
{
|
||||
"colors": [
|
||||
{
|
||||
"color": {
|
||||
"color-space": "srgb",
|
||||
"components": { "alpha": "1.000", "blue": "0.040", "green": "0.620", "red": "0.960" }
|
||||
},
|
||||
"idiom": "universal"
|
||||
}
|
||||
],
|
||||
"info": { "author": "xcode", "version": 1 }
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
{
|
||||
"images" : [
|
||||
{
|
||||
"filename" : "icon-1024.png",
|
||||
"idiom" : "universal",
|
||||
"platform" : "ios",
|
||||
"size" : "1024x1024"
|
||||
}
|
||||
],
|
||||
"info" : {
|
||||
"author" : "xcode",
|
||||
"version" : 1
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 6.6 KiB |
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"info": { "author": "xcode", "version": 1 }
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundleDisplayName</key>
|
||||
<string>LibNovel</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>$(EXECUTABLE_NAME)</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>$(PRODUCT_BUNDLE_IDENTIFIER)</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>LibNovel</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>$(MARKETING_VERSION)</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>1000</string>
|
||||
<key>LIBNOVEL_BASE_URL</key>
|
||||
<string>$(LIBNOVEL_BASE_URL)</string>
|
||||
<key>LSRequiresIPhoneOS</key>
|
||||
<true/>
|
||||
<key>UIBackgroundModes</key>
|
||||
<array>
|
||||
<string>audio</string>
|
||||
</array>
|
||||
<key>UILaunchScreen</key>
|
||||
<dict/>
|
||||
<key>UISupportedInterfaceOrientations</key>
|
||||
<array>
|
||||
<string>UIInterfaceOrientationPortrait</string>
|
||||
<string>UIInterfaceOrientationLandscapeLeft</string>
|
||||
<string>UIInterfaceOrientationLandscapeRight</string>
|
||||
</array>
|
||||
<key>UISupportedInterfaceOrientations~ipad</key>
|
||||
<array>
|
||||
<string>UIInterfaceOrientationPortrait</string>
|
||||
<string>UIInterfaceOrientationPortraitUpsideDown</string>
|
||||
<string>UIInterfaceOrientationLandscapeLeft</string>
|
||||
<string>UIInterfaceOrientationLandscapeRight</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,616 +0,0 @@
|
||||
import Foundation
|
||||
import AVFoundation
|
||||
import MediaPlayer
|
||||
import Combine
|
||||
import Kingfisher
|
||||
|
||||
// MARK: - PlaybackProgress
|
||||
// Isolated ObservableObject for high-frequency playback state (currentTime,
|
||||
// duration, isPlaying). Keeping these separate from AudioPlayerService means
|
||||
// the 0.5-second time-observer ticks only invalidate views that explicitly
|
||||
// observe PlaybackProgress — menus and other stable UI are unaffected.
|
||||
|
||||
@MainActor
|
||||
final class PlaybackProgress: ObservableObject {
|
||||
@Published var currentTime: Double = 0
|
||||
@Published var duration: Double = 0
|
||||
@Published var isPlaying: Bool = false
|
||||
}
|
||||
|
||||
// MARK: - AudioPlayerService
|
||||
// Central singleton that owns AVPlayer, drives audio state, handles lock-screen
|
||||
// controls (NowPlayingInfoCenter + MPRemoteCommandCenter), and pre-fetches the
|
||||
// next chapter audio.
|
||||
|
||||
@MainActor
|
||||
final class AudioPlayerService: ObservableObject {
|
||||
|
||||
// MARK: - Published state
|
||||
|
||||
@Published var slug: String = ""
|
||||
@Published var chapter: Int = 0
|
||||
@Published var chapterTitle: String = ""
|
||||
@Published var bookTitle: String = ""
|
||||
@Published var coverURL: String = ""
|
||||
@Published var voice: String = "af_bella"
|
||||
@Published var speed: Double = 1.0
|
||||
@Published var chapters: [ChapterIndexBrief] = []
|
||||
|
||||
@Published var status: AudioPlayerStatus = .idle
|
||||
@Published var audioURL: String = ""
|
||||
@Published var errorMessage: String = ""
|
||||
@Published var generationProgress: Double = 0
|
||||
|
||||
/// High-frequency playback state (currentTime / duration / isPlaying).
|
||||
/// Views that only need the seek bar or play-pause button should observe
|
||||
/// this directly so they don't trigger re-renders of menu-bearing parents.
|
||||
let progress = PlaybackProgress()
|
||||
|
||||
// Convenience forwarders so non-view call sites keep compiling unchanged.
|
||||
var currentTime: Double {
|
||||
get { progress.currentTime }
|
||||
set { progress.currentTime = newValue }
|
||||
}
|
||||
var duration: Double {
|
||||
get { progress.duration }
|
||||
set { progress.duration = newValue }
|
||||
}
|
||||
var isPlaying: Bool {
|
||||
get { progress.isPlaying }
|
||||
set { progress.isPlaying = newValue }
|
||||
}
|
||||
|
||||
@Published var autoNext: Bool = false
|
||||
@Published var nextChapter: Int? = nil
|
||||
@Published var prevChapter: Int? = nil
|
||||
|
||||
@Published var sleepTimer: SleepTimerOption? = nil
|
||||
/// Human-readable countdown string shown in the full player near the moon button.
|
||||
/// e.g. "38:12" for minute-based, "2 ch left" for chapter-based, "" when off.
|
||||
@Published var sleepTimerRemainingText: String = ""
|
||||
|
||||
@Published var nextPrefetchStatus: NextPrefetchStatus = .none
|
||||
@Published var nextAudioURL: String = ""
|
||||
@Published var nextPrefetchedChapter: Int? = nil
|
||||
|
||||
var isActive: Bool {
|
||||
switch status {
|
||||
case .idle: return false
|
||||
default: return true
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Private
|
||||
|
||||
private var player: AVPlayer?
|
||||
private var playerItem: AVPlayerItem?
|
||||
private var timeObserver: Any?
|
||||
private var statusObserver: AnyCancellable?
|
||||
private var durationObserver: AnyCancellable?
|
||||
private var finishObserver: AnyCancellable?
|
||||
private var generationTask: Task<Void, Never>?
|
||||
private var prefetchTask: Task<Void, Never>?
|
||||
|
||||
// Cached cover image — downloaded once per chapter load, reused on every
|
||||
// updateNowPlaying() call so we don't re-download on every play/pause/seek.
|
||||
private var cachedCoverArtwork: MPMediaItemArtwork?
|
||||
private var cachedCoverURL: String = ""
|
||||
|
||||
// Sleep timer tracking
|
||||
private var sleepTimerTask: Task<Void, Never>?
|
||||
private var sleepTimerStartChapter: Int = 0
|
||||
/// Absolute deadline for minute-based timers (nil when not active or chapter-based).
|
||||
private var sleepTimerDeadline: Date? = nil
|
||||
/// 1-second tick task that keeps sleepTimerRemainingText up-to-date.
|
||||
private var sleepTimerCountdownTask: Task<Void, Never>? = nil
|
||||
|
||||
// MARK: - Init
|
||||
|
||||
init() {
|
||||
configureAudioSession()
|
||||
setupRemoteCommandCenter()
|
||||
}
|
||||
|
||||
// MARK: - Public API
|
||||
|
||||
/// Load audio for a specific chapter. Triggers TTS generation if not cached.
|
||||
func load(slug: String, chapter: Int, chapterTitle: String,
|
||||
bookTitle: String, coverURL: String, voice: String, speed: Double,
|
||||
chapters: [ChapterIndexBrief], nextChapter: Int?, prevChapter: Int?) {
|
||||
generationTask?.cancel()
|
||||
prefetchTask?.cancel()
|
||||
stop()
|
||||
|
||||
self.slug = slug
|
||||
self.chapter = chapter
|
||||
self.chapterTitle = chapterTitle
|
||||
self.bookTitle = bookTitle
|
||||
self.coverURL = coverURL
|
||||
self.voice = voice
|
||||
self.speed = speed
|
||||
self.chapters = chapters
|
||||
self.nextChapter = nextChapter
|
||||
self.prevChapter = prevChapter
|
||||
self.nextPrefetchStatus = .none
|
||||
self.nextAudioURL = ""
|
||||
self.nextPrefetchedChapter = nil
|
||||
|
||||
// Reset sleep timer start chapter if it's a chapter-based timer
|
||||
if case .chapters = sleepTimer {
|
||||
sleepTimerStartChapter = chapter
|
||||
}
|
||||
|
||||
status = .generating
|
||||
generationProgress = 0
|
||||
|
||||
// Invalidate cover cache if the book changed.
|
||||
if coverURL != cachedCoverURL {
|
||||
cachedCoverArtwork = nil
|
||||
cachedCoverURL = coverURL
|
||||
prefetchCoverArtwork(from: coverURL)
|
||||
}
|
||||
|
||||
generationTask = Task { await generateAudio() }
|
||||
}
|
||||
|
||||
func play() {
|
||||
player?.play()
|
||||
player?.rate = Float(speed)
|
||||
isPlaying = true
|
||||
updateNowPlaying()
|
||||
}
|
||||
|
||||
func pause() {
|
||||
player?.pause()
|
||||
isPlaying = false
|
||||
updateNowPlaying()
|
||||
}
|
||||
|
||||
func togglePlayPause() {
|
||||
isPlaying ? pause() : play()
|
||||
}
|
||||
|
||||
func seek(to seconds: Double) {
|
||||
let time = CMTime(seconds: seconds, preferredTimescale: 600)
|
||||
currentTime = seconds // optimistic UI update
|
||||
player?.seek(to: time, toleranceBefore: .zero, toleranceAfter: .zero) { [weak self] _ in
|
||||
guard let self else { return }
|
||||
Task { @MainActor in self.updateNowPlaying() }
|
||||
}
|
||||
}
|
||||
|
||||
func skip(by seconds: Double) {
|
||||
seek(to: max(0, min(currentTime + seconds, duration)))
|
||||
}
|
||||
|
||||
func setSpeed(_ newSpeed: Double) {
|
||||
speed = newSpeed
|
||||
if isPlaying { player?.rate = Float(newSpeed) }
|
||||
updateNowPlaying()
|
||||
}
|
||||
|
||||
func setSleepTimer(_ option: SleepTimerOption?) {
|
||||
// Cancel existing timer + countdown
|
||||
sleepTimerTask?.cancel()
|
||||
sleepTimerTask = nil
|
||||
sleepTimerCountdownTask?.cancel()
|
||||
sleepTimerCountdownTask = nil
|
||||
sleepTimerDeadline = nil
|
||||
|
||||
sleepTimer = option
|
||||
|
||||
guard let option else {
|
||||
sleepTimerRemainingText = ""
|
||||
return
|
||||
}
|
||||
|
||||
// Start timer based on option
|
||||
switch option {
|
||||
case .chapters(let count):
|
||||
sleepTimerStartChapter = chapter
|
||||
// Update display immediately; chapter changes are tracked in handlePlaybackFinished.
|
||||
updateChapterTimerLabel(chaptersRemaining: count)
|
||||
|
||||
case .minutes(let minutes):
|
||||
let deadline = Date().addingTimeInterval(Double(minutes) * 60)
|
||||
sleepTimerDeadline = deadline
|
||||
// Stop playback when the deadline is reached.
|
||||
sleepTimerTask = Task { [weak self] in
|
||||
try? await Task.sleep(nanoseconds: UInt64(minutes) * 60 * 1_000_000_000)
|
||||
guard let self, !Task.isCancelled else { return }
|
||||
await MainActor.run {
|
||||
self.stop()
|
||||
self.sleepTimer = nil
|
||||
self.sleepTimerRemainingText = ""
|
||||
}
|
||||
}
|
||||
// 1-second tick to keep the countdown label fresh.
|
||||
sleepTimerCountdownTask = Task { [weak self] in
|
||||
while !Task.isCancelled {
|
||||
try? await Task.sleep(nanoseconds: 1_000_000_000)
|
||||
guard let self, !Task.isCancelled else { return }
|
||||
await MainActor.run {
|
||||
guard let deadline = self.sleepTimerDeadline else { return }
|
||||
let remaining = max(0, deadline.timeIntervalSinceNow)
|
||||
self.sleepTimerRemainingText = Self.formatCountdown(remaining)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Set initial label without waiting for the first tick.
|
||||
sleepTimerRemainingText = Self.formatCountdown(Double(minutes) * 60)
|
||||
}
|
||||
}
|
||||
|
||||
private func updateChapterTimerLabel(chaptersRemaining: Int) {
|
||||
sleepTimerRemainingText = chaptersRemaining == 1 ? "1 ch left" : "\(chaptersRemaining) ch left"
|
||||
}
|
||||
|
||||
private static func formatCountdown(_ seconds: Double) -> String {
|
||||
let s = Int(max(0, seconds))
|
||||
let m = s / 60
|
||||
let sec = s % 60
|
||||
return "\(m):\(String(format: "%02d", sec))"
|
||||
}
|
||||
|
||||
func stop() {
|
||||
player?.pause()
|
||||
teardownPlayer()
|
||||
isPlaying = false
|
||||
currentTime = 0
|
||||
duration = 0
|
||||
audioURL = ""
|
||||
status = .idle
|
||||
|
||||
// Cancel sleep timer + countdown
|
||||
sleepTimerTask?.cancel()
|
||||
sleepTimerTask = nil
|
||||
sleepTimerCountdownTask?.cancel()
|
||||
sleepTimerCountdownTask = nil
|
||||
sleepTimerDeadline = nil
|
||||
sleepTimer = nil
|
||||
sleepTimerRemainingText = ""
|
||||
}
|
||||
|
||||
// MARK: - Audio generation
|
||||
|
||||
private func generateAudio() async {
|
||||
guard !slug.isEmpty, chapter > 0 else { return }
|
||||
do {
|
||||
// Fast path: audio already in MinIO — get a presigned URL and play immediately.
|
||||
if let presignedURL = try? await APIClient.shared.presignAudio(slug: slug, chapter: chapter, voice: voice) {
|
||||
audioURL = presignedURL
|
||||
status = .ready
|
||||
generationProgress = 100
|
||||
await playURL(presignedURL)
|
||||
await prefetchNext()
|
||||
return
|
||||
}
|
||||
|
||||
// Slow path: trigger TTS generation (async — returns 202 immediately).
|
||||
status = .generating
|
||||
generationProgress = 10
|
||||
let trigger = try await APIClient.shared.triggerAudio(slug: slug, chapter: chapter, voice: voice, speed: speed)
|
||||
|
||||
let playableURL: String
|
||||
if trigger.isAsync {
|
||||
// 202 Accepted: poll until done.
|
||||
generationProgress = 30
|
||||
playableURL = try await APIClient.shared.pollAudioStatus(slug: slug, chapter: chapter, voice: voice)
|
||||
} else {
|
||||
// 200: already cached URL returned inline.
|
||||
guard let url = trigger.url, !url.isEmpty else {
|
||||
throw URLError(.badServerResponse)
|
||||
}
|
||||
playableURL = url
|
||||
}
|
||||
|
||||
audioURL = playableURL
|
||||
status = .ready
|
||||
generationProgress = 100
|
||||
await playURL(playableURL)
|
||||
await prefetchNext()
|
||||
} catch is CancellationError {
|
||||
// Cancelled — no-op
|
||||
} catch {
|
||||
status = .error(error.localizedDescription)
|
||||
errorMessage = error.localizedDescription
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Prefetch next chapter
|
||||
// Always prefetch regardless of autoNext — faster playback when the user
|
||||
// manually navigates forward. autoNext only controls whether we auto-navigate.
|
||||
|
||||
private func prefetchNext() async {
|
||||
guard let next = nextChapter, !Task.isCancelled else { return }
|
||||
nextPrefetchStatus = .prefetching
|
||||
nextPrefetchedChapter = next
|
||||
do {
|
||||
// Fast path: already in MinIO.
|
||||
if let presignedURL = try? await APIClient.shared.presignAudio(slug: slug, chapter: next, voice: voice) {
|
||||
nextAudioURL = presignedURL
|
||||
nextPrefetchStatus = .prefetched
|
||||
return
|
||||
}
|
||||
// Slow path: trigger generation; poll until done (background — won't block playback).
|
||||
let trigger = try await APIClient.shared.triggerAudio(slug: slug, chapter: next, voice: voice, speed: speed)
|
||||
let url: String
|
||||
if trigger.isAsync {
|
||||
url = try await APIClient.shared.pollAudioStatus(slug: slug, chapter: next, voice: voice)
|
||||
} else {
|
||||
guard let u = trigger.url, !u.isEmpty else { throw URLError(.badServerResponse) }
|
||||
url = u
|
||||
}
|
||||
nextAudioURL = url
|
||||
nextPrefetchStatus = .prefetched
|
||||
} catch {
|
||||
nextPrefetchStatus = .failed
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - AVPlayer management
|
||||
|
||||
private func playURL(_ urlString: String) async {
|
||||
// Resolve relative paths (e.g. "/api/audio/...") to absolute URLs.
|
||||
let resolved: URL?
|
||||
if urlString.hasPrefix("http://") || urlString.hasPrefix("https://") {
|
||||
resolved = URL(string: urlString)
|
||||
} else {
|
||||
resolved = URL(string: urlString, relativeTo: await APIClient.shared.baseURL)?.absoluteURL
|
||||
}
|
||||
guard let url = resolved else { return }
|
||||
teardownPlayer()
|
||||
let item = AVPlayerItem(url: url)
|
||||
playerItem = item
|
||||
player = AVPlayer(playerItem: item)
|
||||
|
||||
// KVO: update duration as soon as asset metadata is loaded.
|
||||
durationObserver = item.publisher(for: \.duration)
|
||||
.receive(on: RunLoop.main)
|
||||
.sink { [weak self] dur in
|
||||
guard let self else { return }
|
||||
let secs = dur.seconds
|
||||
if secs.isFinite && secs > 0 {
|
||||
self.duration = secs
|
||||
self.updateNowPlaying()
|
||||
}
|
||||
}
|
||||
|
||||
// KVO: set playback rate once the item is ready.
|
||||
// Do NOT call player?.play() unconditionally — let readyToPlay trigger it
|
||||
// so we don't race between AVPlayer's internal buffering and our call.
|
||||
statusObserver = item.publisher(for: \.status)
|
||||
.receive(on: RunLoop.main)
|
||||
.sink { [weak self] itemStatus in
|
||||
guard let self else { return }
|
||||
if itemStatus == .readyToPlay {
|
||||
self.player?.rate = Float(self.speed)
|
||||
self.isPlaying = true
|
||||
self.updateNowPlaying()
|
||||
} else if itemStatus == .failed {
|
||||
self.status = .error(item.error?.localizedDescription ?? "Playback failed")
|
||||
self.errorMessage = item.error?.localizedDescription ?? "Playback failed"
|
||||
}
|
||||
}
|
||||
|
||||
// Periodic time observer for seek bar position.
|
||||
timeObserver = player?.addPeriodicTimeObserver(
|
||||
forInterval: CMTime(seconds: 0.5, preferredTimescale: 600),
|
||||
queue: .main
|
||||
) { [weak self] time in
|
||||
guard let self else { return }
|
||||
Task { @MainActor in
|
||||
let secs = time.seconds
|
||||
if secs.isFinite && secs >= 0 {
|
||||
self.currentTime = secs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Observe when playback ends.
|
||||
finishObserver = NotificationCenter.default
|
||||
.publisher(for: AVPlayerItem.didPlayToEndTimeNotification, object: item)
|
||||
.sink { [weak self] _ in
|
||||
Task { @MainActor in
|
||||
self?.handlePlaybackFinished()
|
||||
}
|
||||
}
|
||||
|
||||
// Kick off buffering — actual playback starts via statusObserver above.
|
||||
player?.play()
|
||||
}
|
||||
|
||||
private func teardownPlayer() {
|
||||
if let observer = timeObserver { player?.removeTimeObserver(observer) }
|
||||
timeObserver = nil
|
||||
statusObserver = nil
|
||||
durationObserver = nil
|
||||
finishObserver = nil
|
||||
player = nil
|
||||
playerItem = nil
|
||||
}
|
||||
|
||||
private func handlePlaybackFinished() {
|
||||
isPlaying = false
|
||||
|
||||
guard let next = nextChapter else { return }
|
||||
|
||||
// Check chapter-based sleep timer
|
||||
if case .chapters(let count) = sleepTimer {
|
||||
let chaptersPlayed = chapter - sleepTimerStartChapter + 1
|
||||
if chaptersPlayed >= count {
|
||||
stop()
|
||||
return
|
||||
}
|
||||
// Update the remaining chapters label.
|
||||
let remaining = count - chaptersPlayed
|
||||
updateChapterTimerLabel(chaptersRemaining: remaining)
|
||||
}
|
||||
|
||||
// Always notify the view that the chapter finished (it may update UI).
|
||||
NotificationCenter.default.post(
|
||||
name: .audioDidFinishChapter,
|
||||
object: nil,
|
||||
userInfo: ["next": next, "autoNext": autoNext]
|
||||
)
|
||||
|
||||
// If autoNext is on, load the next chapter internally right away.
|
||||
// We already have the metadata in `chapters`, so we can reconstruct
|
||||
// everything without waiting for the view to navigate.
|
||||
guard autoNext else { return }
|
||||
|
||||
let nextTitle = chapters.first(where: { $0.number == next })?.title ?? ""
|
||||
let nextNextChapter = chapters.first(where: { $0.number > next })?.number
|
||||
let nextPrevChapter: Int? = chapter // Current chapter becomes previous for the next one
|
||||
|
||||
// If we already prefetched a URL for the next chapter, skip straight to
|
||||
// playback and kick off generation in the background for the one after.
|
||||
if nextPrefetchStatus == .prefetched, !nextAudioURL.isEmpty {
|
||||
let url = nextAudioURL
|
||||
|
||||
// Advance state before tearing down the current player.
|
||||
chapter = next
|
||||
chapterTitle = nextTitle
|
||||
nextChapter = nextNextChapter
|
||||
prevChapter = nextPrevChapter
|
||||
nextPrefetchStatus = .none
|
||||
nextAudioURL = ""
|
||||
nextPrefetchedChapter = nil
|
||||
audioURL = url
|
||||
status = .ready
|
||||
generationProgress = 100
|
||||
|
||||
// Update sleep timer start chapter if using chapter-based timer
|
||||
if case .chapters = sleepTimer {
|
||||
sleepTimerStartChapter = next
|
||||
}
|
||||
|
||||
generationTask = Task {
|
||||
await playURL(url)
|
||||
await prefetchNext()
|
||||
}
|
||||
} else {
|
||||
// No prefetch available — do a full load.
|
||||
load(
|
||||
slug: slug,
|
||||
chapter: next,
|
||||
chapterTitle: nextTitle,
|
||||
bookTitle: bookTitle,
|
||||
coverURL: coverURL,
|
||||
voice: voice,
|
||||
speed: speed,
|
||||
chapters: chapters,
|
||||
nextChapter: nextNextChapter,
|
||||
prevChapter: nextPrevChapter
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Cover art prefetch
|
||||
|
||||
private func prefetchCoverArtwork(from urlString: String) {
|
||||
guard !urlString.isEmpty, let url = URL(string: urlString) else { return }
|
||||
KingfisherManager.shared.retrieveImage(with: url) { [weak self] result in
|
||||
guard let self else { return }
|
||||
if case .success(let value) = result {
|
||||
let image = value.image
|
||||
let artwork = MPMediaItemArtwork(boundsSize: image.size) { _ in image }
|
||||
Task { @MainActor in
|
||||
self.cachedCoverArtwork = artwork
|
||||
self.updateNowPlaying()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Audio Session
|
||||
|
||||
private func configureAudioSession() {
|
||||
do {
|
||||
try AVAudioSession.sharedInstance().setCategory(.playback, mode: .spokenAudio)
|
||||
try AVAudioSession.sharedInstance().setActive(true)
|
||||
} catch {
|
||||
// Non-fatal
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Lock Screen / Control Center
|
||||
|
||||
private func setupRemoteCommandCenter() {
|
||||
let center = MPRemoteCommandCenter.shared()
|
||||
center.playCommand.addTarget { [weak self] _ in
|
||||
self?.play()
|
||||
return .success
|
||||
}
|
||||
center.pauseCommand.addTarget { [weak self] _ in
|
||||
self?.pause()
|
||||
return .success
|
||||
}
|
||||
center.togglePlayPauseCommand.addTarget { [weak self] _ in
|
||||
self?.togglePlayPause()
|
||||
return .success
|
||||
}
|
||||
center.skipForwardCommand.preferredIntervals = [15]
|
||||
center.skipForwardCommand.addTarget { [weak self] _ in
|
||||
self?.skip(by: 15)
|
||||
return .success
|
||||
}
|
||||
center.skipBackwardCommand.preferredIntervals = [15]
|
||||
center.skipBackwardCommand.addTarget { [weak self] _ in
|
||||
self?.skip(by: -15)
|
||||
return .success
|
||||
}
|
||||
center.changePlaybackPositionCommand.addTarget { [weak self] event in
|
||||
if let e = event as? MPChangePlaybackPositionCommandEvent {
|
||||
self?.seek(to: e.positionTime)
|
||||
}
|
||||
return .success
|
||||
}
|
||||
}
|
||||
|
||||
private func updateNowPlaying() {
|
||||
var info: [String: Any] = [
|
||||
MPMediaItemPropertyTitle: chapterTitle.isEmpty ? "Chapter \(chapter)" : chapterTitle,
|
||||
MPMediaItemPropertyArtist: bookTitle,
|
||||
MPNowPlayingInfoPropertyElapsedPlaybackTime: currentTime,
|
||||
MPMediaItemPropertyPlaybackDuration: duration,
|
||||
MPNowPlayingInfoPropertyPlaybackRate: isPlaying ? speed : 0.0
|
||||
]
|
||||
// Use cached artwork — downloaded once in prefetchCoverArtwork().
|
||||
if let artwork = cachedCoverArtwork {
|
||||
info[MPMediaItemPropertyArtwork] = artwork
|
||||
}
|
||||
MPNowPlayingInfoCenter.default().nowPlayingInfo = info
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Supporting types
|
||||
|
||||
enum AudioPlayerStatus: Equatable {
|
||||
case idle
|
||||
case generating // covers both "loading" and "generating TTS" phases
|
||||
case ready
|
||||
case error(String)
|
||||
|
||||
static func == (lhs: AudioPlayerStatus, rhs: AudioPlayerStatus) -> Bool {
|
||||
switch (lhs, rhs) {
|
||||
case (.idle, .idle), (.generating, .generating), (.ready, .ready):
|
||||
return true
|
||||
case (.error(let a), .error(let b)):
|
||||
return a == b
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum SleepTimerOption: Equatable {
|
||||
case chapters(Int) // Stop after N chapters
|
||||
case minutes(Int) // Stop after N minutes
|
||||
}
|
||||
|
||||
extension Notification.Name {
|
||||
static let audioDidFinishChapter = Notification.Name("audioDidFinishChapter")
|
||||
static let skipToNextChapter = Notification.Name("skipToNextChapter")
|
||||
static let skipToPrevChapter = Notification.Name("skipToPrevChapter")
|
||||
}
|
||||
@@ -1,146 +0,0 @@
|
||||
import Foundation
|
||||
import Combine
|
||||
|
||||
// MARK: - AuthStore
|
||||
// Owns the authenticated user, the HMAC auth token, and user settings.
|
||||
// Persists the token to Keychain so the user stays logged in across launches.
|
||||
|
||||
@MainActor
|
||||
final class AuthStore: ObservableObject {
|
||||
@Published var user: AppUser?
|
||||
@Published var settings: UserSettings = .default
|
||||
@Published var isLoading: Bool = false
|
||||
@Published var error: String?
|
||||
|
||||
var isAuthenticated: Bool { user != nil }
|
||||
|
||||
private let keychainKey = "libnovel_auth_token"
|
||||
|
||||
init() {
|
||||
// Restore token from Keychain and validate it on launch
|
||||
if let token = loadToken() {
|
||||
Task { await validateToken(token) }
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Login / Register
|
||||
|
||||
func login(username: String, password: String) async {
|
||||
isLoading = true
|
||||
error = nil
|
||||
do {
|
||||
let response = try await APIClient.shared.login(username: username, password: password)
|
||||
await APIClient.shared.setAuthCookie(response.token)
|
||||
saveToken(response.token)
|
||||
user = response.user
|
||||
await loadSettings()
|
||||
} catch {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
isLoading = false
|
||||
}
|
||||
|
||||
func register(username: String, password: String) async {
|
||||
isLoading = true
|
||||
error = nil
|
||||
do {
|
||||
let response = try await APIClient.shared.register(username: username, password: password)
|
||||
await APIClient.shared.setAuthCookie(response.token)
|
||||
saveToken(response.token)
|
||||
user = response.user
|
||||
await loadSettings()
|
||||
} catch {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
isLoading = false
|
||||
}
|
||||
|
||||
func logout() async {
|
||||
do {
|
||||
try await APIClient.shared.logout()
|
||||
} catch {
|
||||
// Best-effort; clear local state regardless
|
||||
}
|
||||
clearToken()
|
||||
user = nil
|
||||
settings = .default
|
||||
}
|
||||
|
||||
// MARK: - Settings
|
||||
|
||||
func loadSettings() async {
|
||||
do {
|
||||
settings = try await APIClient.shared.settings()
|
||||
} catch {
|
||||
// Use defaults if settings endpoint fails
|
||||
}
|
||||
}
|
||||
|
||||
func saveSettings(_ updated: UserSettings) async {
|
||||
do {
|
||||
try await APIClient.shared.updateSettings(updated)
|
||||
settings = updated
|
||||
} catch {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Token validation
|
||||
|
||||
/// Re-validates the current session and refreshes `user` + `settings`.
|
||||
/// Call this after any operation that may change the user record (e.g. avatar upload).
|
||||
func validateToken() async {
|
||||
guard let token = loadToken() else { return }
|
||||
await validateToken(token)
|
||||
}
|
||||
|
||||
private func validateToken(_ token: String) async {
|
||||
await APIClient.shared.setAuthCookie(token)
|
||||
// Use /api/auth/me to restore the user record and confirm the token is still valid
|
||||
do {
|
||||
async let me: AppUser = APIClient.shared.fetch("/api/auth/me")
|
||||
async let s: UserSettings = APIClient.shared.settings()
|
||||
let (restoredUser, restoredSettings) = try await (me, s)
|
||||
user = restoredUser
|
||||
settings = restoredSettings
|
||||
} catch let e as APIError {
|
||||
if case .httpError(let code, _) = e, code == 401 {
|
||||
clearToken()
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// MARK: - Keychain helpers
|
||||
|
||||
private func saveToken(_ token: String) {
|
||||
let data = Data(token.utf8)
|
||||
let query: [String: Any] = [
|
||||
kSecClass as String: kSecClassGenericPassword,
|
||||
kSecAttrAccount as String: keychainKey,
|
||||
kSecValueData as String: data
|
||||
]
|
||||
SecItemDelete(query as CFDictionary)
|
||||
SecItemAdd(query as CFDictionary, nil)
|
||||
}
|
||||
|
||||
private func loadToken() -> String? {
|
||||
let query: [String: Any] = [
|
||||
kSecClass as String: kSecClassGenericPassword,
|
||||
kSecAttrAccount as String: keychainKey,
|
||||
kSecReturnData as String: true,
|
||||
kSecMatchLimit as String: kSecMatchLimitOne
|
||||
]
|
||||
var item: CFTypeRef?
|
||||
guard SecItemCopyMatching(query as CFDictionary, &item) == errSecSuccess,
|
||||
let data = item as? Data else { return nil }
|
||||
return String(data: data, encoding: .utf8)
|
||||
}
|
||||
|
||||
private func clearToken() {
|
||||
let query: [String: Any] = [
|
||||
kSecClass as String: kSecClassGenericPassword,
|
||||
kSecAttrAccount as String: keychainKey
|
||||
]
|
||||
SecItemDelete(query as CFDictionary)
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
import Foundation
|
||||
|
||||
@MainActor
|
||||
final class BookDetailViewModel: ObservableObject {
|
||||
let slug: String
|
||||
|
||||
@Published var book: Book?
|
||||
@Published var chapters: [ChapterIndex] = []
|
||||
@Published var saved: Bool = false
|
||||
@Published var lastChapter: Int?
|
||||
@Published var isLoading = false
|
||||
@Published var error: String?
|
||||
|
||||
init(slug: String) {
|
||||
self.slug = slug
|
||||
}
|
||||
|
||||
func load() async {
|
||||
isLoading = true
|
||||
error = nil
|
||||
do {
|
||||
let detail = try await APIClient.shared.bookDetail(slug: slug)
|
||||
book = detail.book
|
||||
chapters = detail.chapters
|
||||
saved = detail.saved
|
||||
lastChapter = detail.lastChapter
|
||||
} catch {
|
||||
if !(error is CancellationError) {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
isLoading = false
|
||||
}
|
||||
|
||||
func toggleSaved() async {
|
||||
do {
|
||||
if saved {
|
||||
try await APIClient.shared.unsaveBook(slug: slug)
|
||||
} else {
|
||||
try await APIClient.shared.saveBook(slug: slug)
|
||||
}
|
||||
saved.toggle()
|
||||
} catch {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
import Foundation
|
||||
|
||||
@MainActor
|
||||
final class BrowseViewModel: ObservableObject {
|
||||
@Published var novels: [BrowseNovel] = []
|
||||
@Published var sort: String = "popular"
|
||||
@Published var genre: String = "all"
|
||||
@Published var status: String = "all"
|
||||
@Published var searchQuery: String = ""
|
||||
@Published var isLoading = false
|
||||
@Published var hasNext = false
|
||||
@Published var error: String?
|
||||
|
||||
private var currentPage = 1
|
||||
private var isSearchMode = false
|
||||
|
||||
func loadFirstPage() async {
|
||||
currentPage = 1
|
||||
novels = []
|
||||
isSearchMode = false
|
||||
await loadPage(1)
|
||||
}
|
||||
|
||||
func loadNextPage() async {
|
||||
guard hasNext, !isLoading else { return }
|
||||
await loadPage(currentPage + 1)
|
||||
}
|
||||
|
||||
func search() async {
|
||||
guard !searchQuery.isEmpty else { await loadFirstPage(); return }
|
||||
isLoading = true
|
||||
isSearchMode = true
|
||||
novels = []
|
||||
error = nil
|
||||
do {
|
||||
let result = try await APIClient.shared.search(query: searchQuery)
|
||||
novels = result.results
|
||||
hasNext = false
|
||||
} catch {
|
||||
if !(error is CancellationError) {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
isLoading = false
|
||||
}
|
||||
|
||||
func clearSearch() {
|
||||
searchQuery = ""
|
||||
Task { await loadFirstPage() }
|
||||
}
|
||||
|
||||
private func loadPage(_ page: Int) async {
|
||||
isLoading = true
|
||||
error = nil
|
||||
do {
|
||||
let result = try await APIClient.shared.browse(
|
||||
page: page, genre: genre, sort: sort, status: status
|
||||
)
|
||||
if page == 1 {
|
||||
novels = result.novels
|
||||
} else {
|
||||
novels.append(contentsOf: result.novels)
|
||||
}
|
||||
hasNext = result.hasNext
|
||||
currentPage = page
|
||||
} catch {
|
||||
if !(error is CancellationError) {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
isLoading = false
|
||||
}
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
import Foundation
|
||||
|
||||
@MainActor
|
||||
final class ChapterReaderViewModel: ObservableObject {
|
||||
let slug: String
|
||||
private(set) var chapter: Int
|
||||
|
||||
@Published var content: ChapterResponse?
|
||||
@Published var isLoading = false
|
||||
@Published var error: String?
|
||||
|
||||
init(slug: String, chapter: Int) {
|
||||
self.slug = slug
|
||||
self.chapter = chapter
|
||||
}
|
||||
|
||||
/// Switch to a different chapter in-place: resets state and updates `chapter`
|
||||
/// so that `.task(id: currentChapter)` in the View re-fires `load()`.
|
||||
func switchChapter(to newChapter: Int) {
|
||||
guard newChapter != chapter else { return }
|
||||
chapter = newChapter
|
||||
content = nil
|
||||
error = nil
|
||||
}
|
||||
|
||||
func load() async {
|
||||
isLoading = true
|
||||
error = nil
|
||||
do {
|
||||
content = try await APIClient.shared.chapterContent(slug: slug, chapter: chapter)
|
||||
// Record reading progress
|
||||
try? await APIClient.shared.setProgress(slug: slug, chapter: chapter)
|
||||
} catch {
|
||||
if !(error is CancellationError) {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
isLoading = false
|
||||
}
|
||||
|
||||
func toggleAudio(audioPlayer: AudioPlayerService, settings: UserSettings) {
|
||||
guard let content else { return }
|
||||
|
||||
// Only treat as "current" if the player is active (not idle/stopped).
|
||||
// If the user stopped playback, isActive is false — we must re-load.
|
||||
let isCurrent = audioPlayer.isActive &&
|
||||
audioPlayer.slug == slug &&
|
||||
audioPlayer.chapter == chapter
|
||||
|
||||
if isCurrent {
|
||||
audioPlayer.togglePlayPause()
|
||||
} else {
|
||||
let nextChapter: Int? = content.next
|
||||
let prevChapter: Int? = content.prev
|
||||
audioPlayer.load(
|
||||
slug: slug,
|
||||
chapter: chapter,
|
||||
chapterTitle: content.chapter.title,
|
||||
bookTitle: content.book.title,
|
||||
coverURL: content.book.cover,
|
||||
voice: settings.voice,
|
||||
speed: settings.speed,
|
||||
chapters: content.chapters,
|
||||
nextChapter: nextChapter,
|
||||
prevChapter: prevChapter
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,28 +0,0 @@
|
||||
import Foundation
|
||||
|
||||
@MainActor
|
||||
final class HomeViewModel: ObservableObject {
|
||||
@Published var continueReading: [ContinueReadingItem] = []
|
||||
@Published var recentlyUpdated: [Book] = []
|
||||
@Published var stats: HomeStats?
|
||||
@Published var isLoading = false
|
||||
@Published var error: String?
|
||||
|
||||
func load() async {
|
||||
isLoading = true
|
||||
error = nil
|
||||
do {
|
||||
let data = try await APIClient.shared.homeData()
|
||||
continueReading = data.continueReading.map {
|
||||
ContinueReadingItem(book: $0.book, chapter: $0.chapter)
|
||||
}
|
||||
recentlyUpdated = data.recentlyUpdated
|
||||
stats = data.stats
|
||||
} catch {
|
||||
if !(error is CancellationError) {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
isLoading = false
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
import Foundation
|
||||
|
||||
@MainActor
|
||||
final class LibraryViewModel: ObservableObject {
|
||||
@Published var items: [LibraryItem] = []
|
||||
@Published var isLoading = false
|
||||
@Published var error: String?
|
||||
|
||||
func load() async {
|
||||
isLoading = true
|
||||
error = nil
|
||||
do {
|
||||
items = try await APIClient.shared.library()
|
||||
} catch {
|
||||
if !(error is CancellationError) {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
isLoading = false
|
||||
}
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
import Foundation
|
||||
|
||||
@MainActor
|
||||
final class ProfileViewModel: ObservableObject {
|
||||
@Published var sessions: [UserSession] = []
|
||||
@Published var voices: [String] = []
|
||||
@Published var sessionsLoading = false
|
||||
@Published var error: String?
|
||||
|
||||
func loadSessions() async {
|
||||
sessionsLoading = true
|
||||
do {
|
||||
sessions = try await APIClient.shared.sessions()
|
||||
} catch {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
sessionsLoading = false
|
||||
}
|
||||
|
||||
func loadVoices() async {
|
||||
guard voices.isEmpty else { return }
|
||||
do {
|
||||
voices = try await APIClient.shared.voices()
|
||||
} catch {
|
||||
// Use hardcoded fallback — same as Go server helpers.go
|
||||
voices = ["af_bella", "af_sky", "af_sarah", "af_nicole",
|
||||
"am_adam", "am_michael", "bf_emma", "bf_isabella",
|
||||
"bm_george", "bm_lewis"]
|
||||
}
|
||||
}
|
||||
|
||||
func revokeSession(id: String) async {
|
||||
do {
|
||||
try await APIClient.shared.revokeSession(id: id)
|
||||
sessions.removeAll { $0.id == id }
|
||||
} catch {
|
||||
self.error = error.localizedDescription
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,123 +0,0 @@
|
||||
import SwiftUI
|
||||
|
||||
struct AuthView: View {
|
||||
@EnvironmentObject var authStore: AuthStore
|
||||
@State private var mode: Mode = .login
|
||||
@State private var username: String = ""
|
||||
@State private var password: String = ""
|
||||
@State private var confirmPassword: String = ""
|
||||
@FocusState private var focusedField: Field?
|
||||
|
||||
enum Mode { case login, register }
|
||||
enum Field { case username, password, confirmPassword }
|
||||
|
||||
var body: some View {
|
||||
NavigationStack {
|
||||
VStack(spacing: 0) {
|
||||
// Logo / header
|
||||
VStack(spacing: 8) {
|
||||
Image(systemName: "books.vertical.fill")
|
||||
.font(.system(size: 56))
|
||||
.foregroundStyle(.amber)
|
||||
Text("LibNovel")
|
||||
.font(.largeTitle.bold())
|
||||
}
|
||||
.padding(.top, 60)
|
||||
.padding(.bottom, 40)
|
||||
|
||||
// Tab switcher
|
||||
Picker("Mode", selection: $mode) {
|
||||
Text("Sign In").tag(Mode.login)
|
||||
Text("Create Account").tag(Mode.register)
|
||||
}
|
||||
.pickerStyle(.segmented)
|
||||
.padding(.horizontal, 24)
|
||||
.padding(.bottom, 32)
|
||||
|
||||
// Form
|
||||
VStack(spacing: 16) {
|
||||
TextField("Username", text: $username)
|
||||
.textFieldStyle(.roundedBorder)
|
||||
.textInputAutocapitalization(.never)
|
||||
.autocorrectionDisabled()
|
||||
.focused($focusedField, equals: .username)
|
||||
.submitLabel(.next)
|
||||
.onSubmit { focusedField = .password }
|
||||
|
||||
SecureField("Password", text: $password)
|
||||
.textFieldStyle(.roundedBorder)
|
||||
.focused($focusedField, equals: .password)
|
||||
.submitLabel(mode == .register ? .next : .go)
|
||||
.onSubmit {
|
||||
if mode == .register { focusedField = .confirmPassword }
|
||||
else { submit() }
|
||||
}
|
||||
|
||||
if mode == .register {
|
||||
SecureField("Confirm Password", text: $confirmPassword)
|
||||
.textFieldStyle(.roundedBorder)
|
||||
.focused($focusedField, equals: .confirmPassword)
|
||||
.submitLabel(.go)
|
||||
.onSubmit { submit() }
|
||||
.transition(.opacity.combined(with: .move(edge: .top)))
|
||||
}
|
||||
}
|
||||
.padding(.horizontal, 24)
|
||||
.animation(.easeInOut(duration: 0.2), value: mode)
|
||||
|
||||
if let error = authStore.error {
|
||||
Text(error)
|
||||
.font(.footnote)
|
||||
.foregroundStyle(.red)
|
||||
.multilineTextAlignment(.center)
|
||||
.padding(.horizontal, 24)
|
||||
.padding(.top, 8)
|
||||
}
|
||||
|
||||
Button(action: submit) {
|
||||
Group {
|
||||
if authStore.isLoading {
|
||||
ProgressView()
|
||||
.progressViewStyle(.circular)
|
||||
.tint(.white)
|
||||
} else {
|
||||
Text(mode == .login ? "Sign In" : "Create Account")
|
||||
.fontWeight(.semibold)
|
||||
}
|
||||
}
|
||||
.frame(maxWidth: .infinity)
|
||||
.frame(height: 50)
|
||||
}
|
||||
.buttonStyle(.borderedProminent)
|
||||
.tint(.amber)
|
||||
.padding(.horizontal, 24)
|
||||
.padding(.top, 24)
|
||||
.disabled(authStore.isLoading || !formIsValid)
|
||||
|
||||
Spacer()
|
||||
}
|
||||
.navigationBarHidden(true)
|
||||
}
|
||||
.onChange(of: mode) { _, _ in
|
||||
authStore.error = nil
|
||||
confirmPassword = ""
|
||||
}
|
||||
}
|
||||
|
||||
private var formIsValid: Bool {
|
||||
let base = !username.isEmpty && password.count >= 4
|
||||
if mode == .register { return base && password == confirmPassword }
|
||||
return base
|
||||
}
|
||||
|
||||
private func submit() {
|
||||
focusedField = nil
|
||||
Task {
|
||||
if mode == .login {
|
||||
await authStore.login(username: username, password: password)
|
||||
} else {
|
||||
await authStore.register(username: username, password: password)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,388 +0,0 @@
|
||||
import SwiftUI
|
||||
import Kingfisher
|
||||
|
||||
struct BookDetailView: View {
|
||||
let slug: String
|
||||
@StateObject private var vm: BookDetailViewModel
|
||||
@EnvironmentObject var authStore: AuthStore
|
||||
@EnvironmentObject var audioPlayer: AudioPlayerService
|
||||
@State private var summaryExpanded = false
|
||||
@State private var chapterPage = 0
|
||||
private let pageSize = 50
|
||||
|
||||
init(slug: String) {
|
||||
self.slug = slug
|
||||
_vm = StateObject(wrappedValue: BookDetailViewModel(slug: slug))
|
||||
}
|
||||
|
||||
var body: some View {
|
||||
ZStack(alignment: .top) {
|
||||
// Scroll content
|
||||
ScrollView {
|
||||
VStack(alignment: .leading, spacing: 0) {
|
||||
if vm.isLoading {
|
||||
ProgressView().frame(maxWidth: .infinity).padding(.top, 120)
|
||||
} else if let book = vm.book {
|
||||
heroSection(book: book)
|
||||
metaSection(book: book)
|
||||
Divider().padding(.horizontal)
|
||||
chapterSection(book: book)
|
||||
}
|
||||
}
|
||||
}
|
||||
.ignoresSafeArea(edges: .top)
|
||||
}
|
||||
.navigationBarTitleDisplayMode(.inline)
|
||||
.toolbar { bookmarkButton }
|
||||
.task { await vm.load() }
|
||||
.errorAlert($vm.error)
|
||||
}
|
||||
|
||||
// MARK: - Hero
|
||||
|
||||
@ViewBuilder
|
||||
private func heroSection(book: Book) -> some View {
|
||||
ZStack(alignment: .bottom) {
|
||||
// Full-bleed blurred background
|
||||
KFImage(URL(string: book.cover))
|
||||
.resizable()
|
||||
.scaledToFill()
|
||||
.frame(maxWidth: .infinity)
|
||||
.frame(height: 320)
|
||||
.blur(radius: 24)
|
||||
.clipped()
|
||||
.overlay(
|
||||
LinearGradient(
|
||||
colors: [.black.opacity(0.15), .black.opacity(0.68)],
|
||||
startPoint: .top,
|
||||
endPoint: .bottom
|
||||
)
|
||||
)
|
||||
|
||||
// Cover + info column centered
|
||||
VStack(spacing: 16) {
|
||||
// Isolated cover with 3D-style shadow
|
||||
KFImage(URL(string: book.cover))
|
||||
.resizable()
|
||||
.placeholder {
|
||||
RoundedRectangle(cornerRadius: 12)
|
||||
.fill(Color(.systemGray5))
|
||||
}
|
||||
.scaledToFill()
|
||||
.frame(width: 130, height: 188)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 12))
|
||||
.shadow(color: .black.opacity(0.55), radius: 18, x: 0, y: 10)
|
||||
.shadow(color: .black.opacity(0.3), radius: 6, x: 0, y: 3)
|
||||
|
||||
// Title + author
|
||||
VStack(spacing: 6) {
|
||||
Text(book.title)
|
||||
.font(.title3.bold())
|
||||
.foregroundStyle(.white)
|
||||
.multilineTextAlignment(.center)
|
||||
.lineLimit(3)
|
||||
.padding(.horizontal, 32)
|
||||
|
||||
Text(book.author)
|
||||
.font(.subheadline)
|
||||
.foregroundStyle(.white.opacity(0.75))
|
||||
}
|
||||
|
||||
// Genre tags
|
||||
if !book.genres.isEmpty {
|
||||
HStack(spacing: 8) {
|
||||
ForEach(book.genres.prefix(3), id: \.self) { genre in
|
||||
TagChip(label: genre).colorScheme(.dark)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Status badge
|
||||
if !book.status.isEmpty {
|
||||
StatusBadge(status: book.status)
|
||||
}
|
||||
}
|
||||
.padding(.horizontal)
|
||||
.padding(.bottom, 28)
|
||||
}
|
||||
.frame(minHeight: 320)
|
||||
}
|
||||
|
||||
// MARK: - Meta section (summary + CTAs)
|
||||
|
||||
@ViewBuilder
|
||||
private func metaSection(book: Book) -> some View {
|
||||
VStack(alignment: .leading, spacing: 0) {
|
||||
// Quick stats row
|
||||
HStack(spacing: 0) {
|
||||
MetaStat(value: "\(book.totalChapters)", label: "Chapters",
|
||||
icon: "doc.text")
|
||||
Divider().frame(height: 36)
|
||||
MetaStat(value: book.status.capitalized.isEmpty ? "—" : book.status.capitalized,
|
||||
label: "Status", icon: "flag")
|
||||
if book.ranking > 0 {
|
||||
Divider().frame(height: 36)
|
||||
MetaStat(value: "#\(book.ranking)", label: "Rank",
|
||||
icon: "chart.bar.fill")
|
||||
}
|
||||
}
|
||||
.padding(.vertical, 16)
|
||||
.frame(maxWidth: .infinity)
|
||||
|
||||
Divider().padding(.horizontal)
|
||||
|
||||
// Summary
|
||||
VStack(alignment: .leading, spacing: 8) {
|
||||
Text("About")
|
||||
.font(.headline)
|
||||
|
||||
Text(book.summary)
|
||||
.font(.subheadline)
|
||||
.foregroundStyle(.secondary)
|
||||
.lineLimit(summaryExpanded ? nil : 4)
|
||||
.animation(.easeInOut(duration: 0.2), value: summaryExpanded)
|
||||
|
||||
if book.summary.count > 200 {
|
||||
Button(summaryExpanded ? "Less" : "More") {
|
||||
withAnimation { summaryExpanded.toggle() }
|
||||
}
|
||||
.font(.caption.bold())
|
||||
.foregroundStyle(.amber)
|
||||
}
|
||||
}
|
||||
.padding(.horizontal)
|
||||
.padding(.vertical, 16)
|
||||
|
||||
Divider().padding(.horizontal)
|
||||
|
||||
// CTA buttons
|
||||
HStack(spacing: 10) {
|
||||
if let last = vm.lastChapter, last > 0 {
|
||||
NavigationLink(value: NavDestination.chapter(slug, last)) {
|
||||
Label("Continue Ch.\(last)", systemImage: "play.fill")
|
||||
.frame(maxWidth: .infinity)
|
||||
.fontWeight(.semibold)
|
||||
}
|
||||
.buttonStyle(.borderedProminent)
|
||||
.tint(.amber)
|
||||
|
||||
NavigationLink(value: NavDestination.chapter(slug, 1)) {
|
||||
Label("Ch.1", systemImage: "arrow.counterclockwise")
|
||||
.frame(maxWidth: .infinity)
|
||||
}
|
||||
.buttonStyle(.bordered)
|
||||
.tint(.secondary)
|
||||
} else {
|
||||
NavigationLink(value: NavDestination.chapter(slug, 1)) {
|
||||
Label("Start Reading", systemImage: "book.fill")
|
||||
.frame(maxWidth: .infinity)
|
||||
.fontWeight(.semibold)
|
||||
}
|
||||
.buttonStyle(.borderedProminent)
|
||||
.tint(.amber)
|
||||
}
|
||||
}
|
||||
.padding(.horizontal)
|
||||
.padding(.vertical, 16)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Chapter list
|
||||
|
||||
@ViewBuilder
|
||||
private func chapterSection(book: Book) -> some View {
|
||||
let chapters = vm.chapters
|
||||
let total = chapters.count
|
||||
let start = chapterPage * pageSize
|
||||
let end = min(start + pageSize, total)
|
||||
let pageChapters = Array(chapters[start..<end])
|
||||
|
||||
VStack(alignment: .leading, spacing: 0) {
|
||||
// Section header
|
||||
HStack {
|
||||
Text("Chapters")
|
||||
.font(.headline)
|
||||
Spacer()
|
||||
if total > 0 {
|
||||
Text("\(start + 1)–\(end) of \(total)")
|
||||
.font(.caption)
|
||||
.foregroundStyle(.secondary)
|
||||
}
|
||||
}
|
||||
.padding(.horizontal)
|
||||
.padding(.vertical, 14)
|
||||
|
||||
if vm.isLoading {
|
||||
ProgressView().frame(maxWidth: .infinity).padding()
|
||||
} else {
|
||||
ForEach(pageChapters) { ch in
|
||||
NavigationLink(value: NavDestination.chapter(slug, ch.number)) {
|
||||
ChapterRow(chapter: ch, isCurrent: ch.number == vm.lastChapter,
|
||||
totalChapters: total)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
Divider().padding(.leading)
|
||||
}
|
||||
}
|
||||
|
||||
// Pagination bar
|
||||
if total > pageSize {
|
||||
HStack {
|
||||
Button {
|
||||
withAnimation { chapterPage -= 1 }
|
||||
} label: {
|
||||
Image(systemName: "chevron.left")
|
||||
Text("Previous")
|
||||
}
|
||||
.disabled(chapterPage == 0)
|
||||
|
||||
Spacer()
|
||||
|
||||
Text("Page \(chapterPage + 1) of \((total + pageSize - 1) / pageSize)")
|
||||
.font(.caption)
|
||||
.foregroundStyle(.secondary)
|
||||
|
||||
Spacer()
|
||||
|
||||
Button {
|
||||
withAnimation { chapterPage += 1 }
|
||||
} label: {
|
||||
Text("Next")
|
||||
Image(systemName: "chevron.right")
|
||||
}
|
||||
.disabled(end >= total)
|
||||
}
|
||||
.font(.subheadline)
|
||||
.foregroundStyle(.amber)
|
||||
.padding()
|
||||
}
|
||||
|
||||
Color.clear.frame(height: 32)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Bookmark toolbar
|
||||
|
||||
@ToolbarContentBuilder
|
||||
private var bookmarkButton: some ToolbarContent {
|
||||
ToolbarItem(placement: .topBarTrailing) {
|
||||
Button {
|
||||
Task { await vm.toggleSaved() }
|
||||
} label: {
|
||||
Image(systemName: vm.saved ? "bookmark.fill" : "bookmark")
|
||||
.foregroundStyle(vm.saved ? .amber : .primary)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Chapter row
|
||||
|
||||
private struct ChapterRow: View {
|
||||
let chapter: ChapterIndex
|
||||
let isCurrent: Bool
|
||||
let totalChapters: Int
|
||||
|
||||
private var progressFraction: Double {
|
||||
guard totalChapters > 1 else { return 0 }
|
||||
return Double(chapter.number) / Double(totalChapters)
|
||||
}
|
||||
|
||||
var body: some View {
|
||||
HStack(spacing: 10) {
|
||||
// Number badge
|
||||
ZStack {
|
||||
Circle()
|
||||
.fill(isCurrent ? Color.amber : Color(.systemGray6))
|
||||
Text("\(chapter.number)")
|
||||
.font(.caption2.bold().monospacedDigit())
|
||||
.foregroundStyle(isCurrent ? .black : .secondary)
|
||||
}
|
||||
.frame(width: 32, height: 32)
|
||||
|
||||
VStack(alignment: .leading, spacing: 2) {
|
||||
let displayTitle: String = {
|
||||
let stripped = chapter.title.strippingTrailingDate()
|
||||
if stripped.isEmpty || stripped == "Chapter \(chapter.number)" {
|
||||
return "Chapter \(chapter.number)"
|
||||
}
|
||||
return stripped
|
||||
}()
|
||||
|
||||
Text(displayTitle)
|
||||
.font(.subheadline)
|
||||
.fontWeight(isCurrent ? .semibold : .regular)
|
||||
.foregroundStyle(isCurrent ? .amber : .primary)
|
||||
.lineLimit(1)
|
||||
}
|
||||
|
||||
Spacer(minLength: 8)
|
||||
|
||||
VStack(alignment: .trailing, spacing: 2) {
|
||||
if !chapter.dateLabel.isEmpty {
|
||||
Text(chapter.dateLabel)
|
||||
.font(.caption2)
|
||||
.foregroundStyle(.tertiary)
|
||||
}
|
||||
}
|
||||
|
||||
Image(systemName: "chevron.right")
|
||||
.font(.caption2)
|
||||
.foregroundStyle(.tertiary)
|
||||
}
|
||||
.padding(.horizontal, 16)
|
||||
.padding(.vertical, 10)
|
||||
.contentShape(Rectangle())
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Supporting components
|
||||
|
||||
private struct MetaStat: View {
|
||||
let value: String
|
||||
let label: String
|
||||
let icon: String
|
||||
|
||||
var body: some View {
|
||||
VStack(spacing: 4) {
|
||||
Image(systemName: icon)
|
||||
.font(.caption)
|
||||
.foregroundStyle(.amber)
|
||||
Text(value)
|
||||
.font(.subheadline.bold())
|
||||
.lineLimit(1)
|
||||
.minimumScaleFactor(0.7)
|
||||
Text(label)
|
||||
.font(.caption2)
|
||||
.foregroundStyle(.secondary)
|
||||
}
|
||||
.frame(maxWidth: .infinity)
|
||||
}
|
||||
}
|
||||
|
||||
private struct StatusBadge: View {
|
||||
let status: String
|
||||
|
||||
private var color: Color {
|
||||
switch status.lowercased() {
|
||||
case "ongoing", "active": return .green
|
||||
case "completed": return .blue
|
||||
case "hiatus": return .orange
|
||||
default: return .secondary
|
||||
}
|
||||
}
|
||||
|
||||
var body: some View {
|
||||
HStack(spacing: 4) {
|
||||
Circle()
|
||||
.fill(color)
|
||||
.frame(width: 6, height: 6)
|
||||
Text(status.capitalized)
|
||||
.font(.caption.weight(.medium))
|
||||
.foregroundStyle(color)
|
||||
}
|
||||
.padding(.horizontal, 10)
|
||||
.padding(.vertical, 4)
|
||||
.background(color.opacity(0.12), in: Capsule())
|
||||
}
|
||||
}
|
||||
@@ -1,211 +0,0 @@
|
||||
import SwiftUI
|
||||
|
||||
struct BrowseView: View {
|
||||
@StateObject private var vm = BrowseViewModel()
|
||||
@State private var showFilters = false
|
||||
|
||||
var body: some View {
|
||||
NavigationStack {
|
||||
VStack(spacing: 0) {
|
||||
// Search bar
|
||||
HStack {
|
||||
Image(systemName: "magnifyingglass").foregroundStyle(.secondary)
|
||||
TextField("Search novels...", text: $vm.searchQuery)
|
||||
.textInputAutocapitalization(.never)
|
||||
.autocorrectionDisabled()
|
||||
.submitLabel(.search)
|
||||
.onSubmit { Task { await vm.search() } }
|
||||
if !vm.searchQuery.isEmpty {
|
||||
Button { vm.clearSearch() } label: {
|
||||
Image(systemName: "xmark.circle.fill").foregroundStyle(.secondary)
|
||||
}
|
||||
}
|
||||
}
|
||||
.padding(10)
|
||||
.background(Color(.systemGray6), in: RoundedRectangle(cornerRadius: 10))
|
||||
.padding(.horizontal)
|
||||
.padding(.vertical, 8)
|
||||
|
||||
// Filter chips row
|
||||
ScrollView(.horizontal, showsIndicators: false) {
|
||||
HStack(spacing: 8) {
|
||||
FilterChip(label: "Sort: \(vm.sort.capitalized)", isActive: vm.sort != "popular") {
|
||||
showFilters = true
|
||||
}
|
||||
FilterChip(label: "Genre: \(vm.genre == "all" ? "All" : vm.genre.capitalized)", isActive: vm.genre != "all") {
|
||||
showFilters = true
|
||||
}
|
||||
FilterChip(label: "Status: \(vm.status == "all" ? "All" : vm.status.capitalized)", isActive: vm.status != "all") {
|
||||
showFilters = true
|
||||
}
|
||||
}
|
||||
.padding(.horizontal)
|
||||
}
|
||||
.padding(.bottom, 4)
|
||||
|
||||
Divider()
|
||||
|
||||
// Results
|
||||
if vm.isLoading && vm.novels.isEmpty {
|
||||
ProgressView().frame(maxWidth: .infinity, maxHeight: .infinity)
|
||||
} else if vm.novels.isEmpty && !vm.isLoading {
|
||||
VStack(spacing: 16) {
|
||||
if let errMsg = vm.error {
|
||||
Image(systemName: "wifi.slash")
|
||||
.font(.largeTitle)
|
||||
.foregroundStyle(.secondary)
|
||||
Text(errMsg)
|
||||
.multilineTextAlignment(.center)
|
||||
.foregroundStyle(.secondary)
|
||||
.padding(.horizontal)
|
||||
Button("Retry") { Task { await vm.loadFirstPage() } }
|
||||
.buttonStyle(.borderedProminent)
|
||||
.tint(.amber)
|
||||
} else {
|
||||
EmptyStateView(icon: "magnifyingglass", title: "No results", message: "Try a different search or filter.")
|
||||
}
|
||||
}
|
||||
.frame(maxWidth: .infinity, maxHeight: .infinity)
|
||||
} else {
|
||||
ScrollView {
|
||||
LazyVGrid(columns: [GridItem(.adaptive(minimum: 150), spacing: 12)], spacing: 16) {
|
||||
ForEach(vm.novels) { novel in
|
||||
NavigationLink(value: NavDestination.book(novel.slug)) {
|
||||
BrowseCard(novel: novel)
|
||||
.bookCoverZoomSource(slug: novel.slug)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
}
|
||||
|
||||
// Infinite scroll trigger
|
||||
if vm.hasNext {
|
||||
ProgressView()
|
||||
.frame(maxWidth: .infinity)
|
||||
.padding()
|
||||
.onAppear { Task { await vm.loadNextPage() } }
|
||||
}
|
||||
}
|
||||
.padding()
|
||||
}
|
||||
.refreshable { await vm.loadFirstPage() }
|
||||
}
|
||||
}
|
||||
.navigationTitle("Discover")
|
||||
.appNavigationDestination()
|
||||
.sheet(isPresented: $showFilters) {
|
||||
BrowseFiltersView(vm: vm)
|
||||
}
|
||||
.task { await vm.loadFirstPage() }
|
||||
.onChange(of: vm.sort) { _, _ in Task { await vm.loadFirstPage() } }
|
||||
.onChange(of: vm.genre) { _, _ in Task { await vm.loadFirstPage() } }
|
||||
.onChange(of: vm.status) { _, _ in Task { await vm.loadFirstPage() } }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Filter chip
|
||||
|
||||
private struct FilterChip: View {
|
||||
let label: String
|
||||
let isActive: Bool
|
||||
let action: () -> Void
|
||||
var body: some View {
|
||||
Button(action: action) {
|
||||
Text(label)
|
||||
.font(.caption.bold())
|
||||
.padding(.horizontal, 10)
|
||||
.padding(.vertical, 6)
|
||||
.background(isActive ? Color.amber.opacity(0.15) : Color(.systemGray6), in: Capsule())
|
||||
.foregroundStyle(isActive ? .amber : .primary)
|
||||
.overlay(Capsule().strokeBorder(isActive ? Color.amber : .clear, lineWidth: 1))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Browse card
|
||||
|
||||
private struct BrowseCard: View {
|
||||
let novel: BrowseNovel
|
||||
var body: some View {
|
||||
VStack(alignment: .leading, spacing: 6) {
|
||||
ZStack(alignment: .topLeading) {
|
||||
AsyncCoverImage(url: novel.cover)
|
||||
.frame(height: 200)
|
||||
.clipShape(RoundedRectangle(cornerRadius: 10))
|
||||
if !novel.rank.isEmpty {
|
||||
Text(novel.rank)
|
||||
.font(.caption2.bold())
|
||||
.padding(.horizontal, 6).padding(.vertical, 3)
|
||||
.background(.ultraThinMaterial, in: Capsule())
|
||||
.padding(6)
|
||||
}
|
||||
}
|
||||
Text(novel.title)
|
||||
.font(.caption.bold()).lineLimit(2)
|
||||
if !novel.chapters.isEmpty {
|
||||
Text(novel.chapters).font(.caption2).foregroundStyle(.secondary)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Filters sheet
|
||||
|
||||
struct BrowseFiltersView: View {
|
||||
@ObservedObject var vm: BrowseViewModel
|
||||
@Environment(\.dismiss) private var dismiss
|
||||
|
||||
let sortOptions = ["popular", "new", "updated", "rating", "rank"]
|
||||
let genreOptions = ["all", "action", "fantasy", "romance", "sci-fi", "mystery",
|
||||
"horror", "comedy", "drama", "adventure", "martial arts",
|
||||
"cultivation", "magic", "supernatural", "historical", "slice of life"]
|
||||
let statusOptions = ["all", "ongoing", "completed"]
|
||||
|
||||
var body: some View {
|
||||
NavigationStack {
|
||||
Form {
|
||||
Section("Sort") {
|
||||
ForEach(sortOptions, id: \.self) { opt in
|
||||
HStack {
|
||||
Text(opt.capitalized)
|
||||
Spacer()
|
||||
if vm.sort == opt { Image(systemName: "checkmark").foregroundStyle(.amber) }
|
||||
}
|
||||
.contentShape(Rectangle())
|
||||
.onTapGesture { vm.sort = opt; dismiss() }
|
||||
}
|
||||
}
|
||||
Section("Genre") {
|
||||
ForEach(genreOptions, id: \.self) { opt in
|
||||
HStack {
|
||||
Text(opt == "all" ? "All Genres" : opt.capitalized)
|
||||
Spacer()
|
||||
if vm.genre == opt { Image(systemName: "checkmark").foregroundStyle(.amber) }
|
||||
}
|
||||
.contentShape(Rectangle())
|
||||
.onTapGesture { vm.genre = opt; dismiss() }
|
||||
}
|
||||
}
|
||||
Section("Status") {
|
||||
ForEach(statusOptions, id: \.self) { opt in
|
||||
HStack {
|
||||
Text(opt.capitalized)
|
||||
Spacer()
|
||||
if vm.status == opt { Image(systemName: "checkmark").foregroundStyle(.amber) }
|
||||
}
|
||||
.contentShape(Rectangle())
|
||||
.onTapGesture { vm.status = opt; dismiss() }
|
||||
}
|
||||
}
|
||||
}
|
||||
.navigationTitle("Filters")
|
||||
.navigationBarTitleDisplayMode(.inline)
|
||||
.toolbar {
|
||||
ToolbarItem(placement: .topBarTrailing) {
|
||||
Button("Done") { dismiss() }
|
||||
}
|
||||
}
|
||||
}
|
||||
.presentationDetents([.medium, .large])
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user