Compare commits
254 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a47cc0e711 | ||
|
|
ac3d6e1784 | ||
|
|
adacd8944b | ||
|
|
ea58dab71c | ||
|
|
cf3a3ad910 | ||
|
|
8660c675b6 | ||
|
|
1f4d67dc77 | ||
|
|
b0e23cb50a | ||
|
|
1e886a705d | ||
|
|
19b5b44454 | ||
|
|
b95c811898 | ||
|
|
3a9f3b773e | ||
|
|
6776d9106f | ||
|
|
ada7de466a | ||
|
|
c91dd20c8c | ||
|
|
3b24f4560f | ||
|
|
973e639274 | ||
|
|
e78c44459e | ||
|
|
f8c66fcf63 | ||
|
|
a1def0f0f8 | ||
|
|
e0dec05885 | ||
|
|
8662aed565 | ||
|
|
cdfa1ac5b2 | ||
|
|
ffcdf5ee10 | ||
|
|
899c504d1f | ||
|
|
d82aa9d4b4 | ||
|
|
ae08382b81 | ||
|
|
b9f8008c2c | ||
|
|
d25cee3d8c | ||
|
|
48714cd98b | ||
|
|
1a2bf580cd | ||
|
|
2ca1ab2250 | ||
|
|
2571c243c9 | ||
|
|
89f0d6a546 | ||
|
|
8bc9460989 | ||
|
|
fcd4b3ad7f | ||
|
|
ab92bf84bb | ||
|
|
bb55afb562 | ||
|
|
e088bc056e | ||
|
|
a904ff4e21 | ||
|
|
04e63414a3 | ||
|
|
bae363893b | ||
|
|
b7306877f1 | ||
|
|
0723049e0c | ||
|
|
b206994459 | ||
|
|
956594ae7b | ||
|
|
e399b1ce01 | ||
|
|
320f9fc76b | ||
|
|
7bcc481483 | ||
|
|
16f277354b | ||
|
|
3c33b22511 | ||
|
|
85492fae73 | ||
|
|
559b6234e7 | ||
|
|
75cac363fc | ||
|
|
68c7ae55e7 | ||
|
|
c900fc476f | ||
|
|
d612b40fdb | ||
|
|
faa4c42f20 | ||
|
|
17fa913ba9 | ||
|
|
95f45a5f13 | ||
|
|
2ed37f78c7 | ||
|
|
963ecdd89b | ||
|
|
12963342bb | ||
|
|
bdbec3ae16 | ||
|
|
c98d43a503 | ||
|
|
1f83a7c05f | ||
|
|
93e9d88066 | ||
|
|
5b8987a191 | ||
|
|
b6904bcb6e | ||
|
|
75e6a870d3 | ||
|
|
5098acea20 | ||
|
|
3e4d7b54d7 | ||
|
|
495f386b4f | ||
|
|
bb61a4654a | ||
|
|
1cdc7275f8 | ||
|
|
9d925382b3 | ||
|
|
718929e9cd | ||
|
|
e8870a11da | ||
|
|
b70fed5cd7 | ||
|
|
5dd9dd2ebb | ||
|
|
1c5c25e5dd | ||
|
|
5177320418 | ||
|
|
836c9855af | ||
|
|
5c2c9b1b67 | ||
|
|
79b3de3e8d | ||
|
|
5804cd629a | ||
|
|
b130ba4e1b | ||
|
|
cc1f6b87e4 | ||
|
|
8279bd5caa | ||
|
|
59794e3694 | ||
|
|
150eb2a2af | ||
|
|
a0404cea57 | ||
|
|
45a0190d75 | ||
|
|
1abb4cd714 | ||
|
|
a308672317 | ||
|
|
5d7c3b42fa | ||
|
|
45f5c51da6 | ||
|
|
55df88c3e5 | ||
|
|
eb137fdbf5 | ||
|
|
385c9cd8f2 | ||
|
|
e3bb19892c | ||
|
|
6ca704ec9a | ||
|
|
2bdb5e29af | ||
|
|
222627a18c | ||
|
|
0ae71c62f9 | ||
|
|
d0c95889ca | ||
|
|
a3ad54db70 | ||
|
|
48bc206c4e | ||
|
|
4c1ad84fa9 | ||
|
|
9c79fd5deb | ||
|
|
7aad42834f | ||
|
|
15a31a5c64 | ||
|
|
4d3b91af30 | ||
|
|
eb8a92f0c1 | ||
|
|
fa2803c164 | ||
|
|
787942b172 | ||
|
|
cb858bf4c9 | ||
|
|
4c3c160102 | ||
|
|
37deac1eb3 | ||
|
|
6f0069daca | ||
|
|
0fc30d1328 | ||
|
|
40151f2f33 | ||
|
|
ad2d1a2603 | ||
|
|
b0d8c02787 | ||
|
|
5b4c1db931 | ||
|
|
0c54c59586 | ||
|
|
0e5eb84097 | ||
|
|
6ef82a1d12 | ||
|
|
7a418ee62b | ||
|
|
d4f35a4899 | ||
|
|
6559a8c015 | ||
|
|
05bfd110b8 | ||
|
|
bfd0ad8fb7 | ||
|
|
4b7fcf432b | ||
|
|
c4a0256f6e | ||
|
|
18f490f790 | ||
|
|
6456e8cf5d | ||
|
|
25150c2284 | ||
|
|
0e0a70a786 | ||
|
|
bdbe48ce1a | ||
|
|
87c541b178 | ||
|
|
0b82d96798 | ||
|
|
a2dd0681d2 | ||
|
|
ad50bd21ea | ||
|
|
6572e7c849 | ||
|
|
74ece7e94e | ||
|
|
d1b7d3e36c | ||
|
|
aaa008ac99 | ||
|
|
9806f0d894 | ||
|
|
e862135775 | ||
|
|
8588475d03 | ||
|
|
28fee7aee3 | ||
|
|
a888d9a0f5 | ||
|
|
ac7b686fba | ||
|
|
24d73cb730 | ||
|
|
19aeb90403 | ||
|
|
06d4a7bfd4 | ||
|
|
73a92ccf8f | ||
|
|
08361172c6 | ||
|
|
809dc8d898 | ||
|
|
e9c3426fbe | ||
|
|
8e611840d1 | ||
|
|
b9383570e3 | ||
|
|
eac9358c6f | ||
|
|
9cb11bc5e4 | ||
|
|
7196f8e930 | ||
|
|
a771405db8 | ||
|
|
1e9a96aa0f | ||
|
|
23ae1ed500 | ||
|
|
e7cb460f9b | ||
|
|
392248e8a6 | ||
|
|
68ea2d2808 | ||
|
|
7b1df9b592 | ||
|
|
f4089fe111 | ||
|
|
87b5ad1460 | ||
|
|
168cb52ed0 | ||
|
|
e1621a3ec2 | ||
|
|
10c7a48bc6 | ||
|
|
8b597c0bd2 | ||
|
|
28cafe2aa8 | ||
|
|
65f0425b61 | ||
|
|
4e70a2981d | ||
|
|
004cb95e56 | ||
|
|
aca649039c | ||
|
|
8d95411139 | ||
|
|
f9a4a0e416 | ||
|
|
a4d94f522a | ||
|
|
34c8fab358 | ||
|
|
d54769ab12 | ||
|
|
d2a4edba43 | ||
|
|
4e7f8c6266 | ||
|
|
b0a4cb8b3d | ||
|
|
f136ce6a60 | ||
|
|
3bd1112a63 | ||
|
|
278e292956 | ||
|
|
76de5eb491 | ||
|
|
c6597c8d19 | ||
|
|
e8d7108753 | ||
|
|
90dbecfa17 | ||
|
|
2deb306419 | ||
|
|
fd283bf6c6 | ||
|
|
3154a22500 | ||
|
|
61e0d98057 | ||
|
|
601c26d436 | ||
|
|
4a267d8fd8 | ||
|
|
c9478a67fb | ||
|
|
1b4835daeb | ||
|
|
c9c12fc4a8 | ||
|
|
dd35024d02 | ||
|
|
4b8104f087 | ||
|
|
5da880d189 | ||
|
|
98631df47a | ||
|
|
83b3dccc41 | ||
|
|
588e455aae | ||
|
|
28ac8d8826 | ||
|
|
0a3a61a3ef | ||
|
|
7a2a4fc755 | ||
|
|
801928aadf | ||
|
|
040072c3f5 | ||
|
|
6a76e97a67 | ||
|
|
71f79c8e02 | ||
|
|
5ee4a06654 | ||
|
|
63b286d0a4 | ||
|
|
d3f06c5c40 | ||
|
|
e71ddc2f8b | ||
|
|
b783dae5f4 | ||
|
|
dcf40197d4 | ||
|
|
9dae5e7cc0 | ||
|
|
908f5679fd | ||
|
|
f75292f531 | ||
|
|
2cf0528730 | ||
|
|
428b57732e | ||
|
|
61e77e3e28 | ||
|
|
b363c151a5 | ||
|
|
aef9e04419 | ||
|
|
58e78cd34d | ||
|
|
c5c167035d | ||
|
|
4a00d953bb | ||
|
|
fe1a933fd0 | ||
|
|
98e4a87432 | ||
|
|
9c8849c6cd | ||
|
|
b30aa23d64 | ||
|
|
fea09e3e23 | ||
|
|
4831c74acc | ||
|
|
7e5e0495cf | ||
|
|
188685e1b6 | ||
|
|
3271a5f3e6 | ||
|
|
ee3ed29316 | ||
|
|
a39f660a37 | ||
|
|
69818089a6 | ||
|
|
09062b8c82 | ||
|
|
d518710cc4 | ||
|
|
e2c15f5931 | ||
|
|
a50b968b95 |
@@ -2,20 +2,11 @@ name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ["main", "master"]
|
||||
tags-ignore:
|
||||
- "v*"
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "ui/**"
|
||||
- "caddy/**"
|
||||
- "docker-compose.yml"
|
||||
- ".gitea/workflows/ci.yaml"
|
||||
pull_request:
|
||||
branches: ["main", "master"]
|
||||
paths:
|
||||
- "backend/**"
|
||||
- "ui/**"
|
||||
- "caddy/**"
|
||||
- "docker-compose.yml"
|
||||
- ".gitea/workflows/ci.yaml"
|
||||
|
||||
concurrency:
|
||||
@@ -23,10 +14,13 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# ── backend: vet & test ───────────────────────────────────────────────────────
|
||||
test-backend:
|
||||
name: Test backend
|
||||
# ── Go: vet + build + test ────────────────────────────────────────────────
|
||||
backend:
|
||||
name: Backend
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
working-directory: backend
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
@@ -36,16 +30,23 @@ jobs:
|
||||
cache-dependency-path: backend/go.sum
|
||||
|
||||
- name: go vet
|
||||
working-directory: backend
|
||||
run: go vet ./...
|
||||
|
||||
- name: Build backend
|
||||
run: go build -o /dev/null ./cmd/backend
|
||||
|
||||
- name: Build runner
|
||||
run: go build -o /dev/null ./cmd/runner
|
||||
|
||||
- name: Build healthcheck
|
||||
run: go build -o /dev/null ./cmd/healthcheck
|
||||
|
||||
- name: Run tests
|
||||
working-directory: backend
|
||||
run: go test -short -race -count=1 -timeout=60s ./...
|
||||
|
||||
# ── ui: type-check & build ────────────────────────────────────────────────────
|
||||
check-ui:
|
||||
name: Check ui
|
||||
# ── UI: type-check + build ────────────────────────────────────────────────
|
||||
ui:
|
||||
name: UI
|
||||
runs-on: ubuntu-latest
|
||||
defaults:
|
||||
run:
|
||||
@@ -62,62 +63,11 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Check Paraglide codegen is up to date
|
||||
run: npm run paraglide && git diff --exit-code src/lib/paraglide/
|
||||
|
||||
- name: Type check
|
||||
run: npm run check
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
# ── docker: validate Dockerfiles build (no push) ──────────────────────────────
|
||||
docker-backend:
|
||||
name: Docker / backend
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-backend]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: backend
|
||||
target: backend
|
||||
push: false
|
||||
|
||||
docker-runner:
|
||||
name: Docker / runner
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-backend]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: backend
|
||||
target: runner
|
||||
push: false
|
||||
|
||||
docker-ui:
|
||||
name: Docker / ui
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-ui]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ui
|
||||
push: false
|
||||
|
||||
docker-caddy:
|
||||
name: Docker / caddy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: caddy
|
||||
push: false
|
||||
|
||||
@@ -55,24 +55,122 @@ jobs:
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
# ── docker: backend ───────────────────────────────────────────────────────────
|
||||
docker-backend:
|
||||
name: Docker / backend
|
||||
- name: Upload build artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ui-build
|
||||
path: ui/build
|
||||
retention-days: 1
|
||||
|
||||
# ── ui: source map upload ─────────────────────────────────────────────────────
|
||||
# Commented out — re-enable when GlitchTip source map uploads are needed again.
|
||||
#
|
||||
# upload-sourcemaps:
|
||||
# name: Upload source maps
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: [check-ui]
|
||||
# steps:
|
||||
# - name: Compute release version (strip leading v)
|
||||
# id: ver
|
||||
# run: |
|
||||
# V="${{ gitea.ref_name }}"
|
||||
# echo "version=${V#v}" >> "$GITHUB_OUTPUT"
|
||||
#
|
||||
# - name: Download build artifacts
|
||||
# uses: actions/download-artifact@v3
|
||||
# with:
|
||||
# name: ui-build
|
||||
# path: build
|
||||
#
|
||||
# - name: Install sentry-cli
|
||||
# run: npm install -g @sentry/cli
|
||||
#
|
||||
# - name: Inject debug IDs into build artifacts
|
||||
# run: sentry-cli sourcemaps inject ./build
|
||||
# env:
|
||||
# SENTRY_URL: https://errors.libnovel.cc/
|
||||
# SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
# SENTRY_ORG: libnovel
|
||||
# SENTRY_PROJECT: ui
|
||||
#
|
||||
# - name: Upload injected build (for docker-ui)
|
||||
# uses: actions/upload-artifact@v3
|
||||
# with:
|
||||
# name: ui-build-injected
|
||||
# path: build
|
||||
# retention-days: 1
|
||||
#
|
||||
# - name: Create GlitchTip release
|
||||
# run: sentry-cli releases new ${{ steps.ver.outputs.version }}
|
||||
# env:
|
||||
# SENTRY_URL: https://errors.libnovel.cc/
|
||||
# SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
# SENTRY_ORG: libnovel
|
||||
# SENTRY_PROJECT: ui
|
||||
#
|
||||
# - name: Upload source maps to GlitchTip
|
||||
# run: sentry-cli sourcemaps upload ./build --release ${{ steps.ver.outputs.version }}
|
||||
# env:
|
||||
# SENTRY_URL: https://errors.libnovel.cc/
|
||||
# SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
# SENTRY_ORG: libnovel
|
||||
# SENTRY_PROJECT: ui
|
||||
#
|
||||
# - name: Finalize GlitchTip release
|
||||
# run: sentry-cli releases finalize ${{ steps.ver.outputs.version }}
|
||||
# env:
|
||||
# SENTRY_URL: https://errors.libnovel.cc/
|
||||
# SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
# SENTRY_ORG: libnovel
|
||||
# SENTRY_PROJECT: ui
|
||||
#
|
||||
# - name: Prune old GlitchTip releases (keep latest 10)
|
||||
# run: |
|
||||
# set -euo pipefail
|
||||
# KEEP=10
|
||||
# OLD=$(curl -sf \
|
||||
# -H "Authorization: Bearer $SENTRY_AUTH_TOKEN" \
|
||||
# "$SENTRY_URL/api/0/organizations/$SENTRY_ORG/releases/?project=$SENTRY_PROJECT&per_page=100" \
|
||||
# | python3 -c "
|
||||
# import sys, json
|
||||
# releases = json.load(sys.stdin)
|
||||
# for r in releases[$KEEP:]:
|
||||
# print(r['version'])
|
||||
# " KEEP=$KEEP)
|
||||
# for ver in $OLD; do
|
||||
# echo "Deleting old release: $ver"
|
||||
# sentry-cli releases delete "$ver" || true
|
||||
# done
|
||||
# env:
|
||||
# SENTRY_URL: https://errors.libnovel.cc
|
||||
# SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
# SENTRY_ORG: libnovel
|
||||
# SENTRY_PROJECT: ui
|
||||
|
||||
# ── docker: all images in one job (single login) ──────────────────────────────
|
||||
# backend, runner, ui, and caddy are built sequentially in one job so Docker
|
||||
# Hub only needs to be authenticated once. This also eliminates 3 redundant
|
||||
# checkout + setup-buildx + scheduler round-trips compared to separate jobs.
|
||||
docker:
|
||||
name: Docker
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-backend]
|
||||
needs: [test-backend, check-ui]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
# Single login — credential is written to ~/.docker/config.json and
|
||||
# reused by all subsequent build-push-action steps in this job.
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
# ── backend ──────────────────────────────────────────────────────────────
|
||||
- name: Docker meta / backend
|
||||
id: meta-backend
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-backend
|
||||
@@ -81,38 +179,23 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
- name: Build and push / backend
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: backend
|
||||
target: backend
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.meta-backend.outputs.tags }}
|
||||
labels: ${{ steps.meta-backend.outputs.labels }}
|
||||
build-args: |
|
||||
VERSION=${{ steps.meta.outputs.version }}
|
||||
VERSION=${{ steps.meta-backend.outputs.version }}
|
||||
COMMIT=${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-backend:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── docker: runner ────────────────────────────────────────────────────────────
|
||||
docker-runner:
|
||||
name: Docker / runner
|
||||
runs-on: ubuntu-latest
|
||||
needs: [test-backend]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
# ── runner ───────────────────────────────────────────────────────────────
|
||||
- name: Docker meta / runner
|
||||
id: meta-runner
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-runner
|
||||
@@ -121,67 +204,34 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
- name: Build and push / runner
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: backend
|
||||
target: runner
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.meta-runner.outputs.tags }}
|
||||
labels: ${{ steps.meta-runner.outputs.labels }}
|
||||
build-args: |
|
||||
VERSION=${{ steps.meta.outputs.version }}
|
||||
VERSION=${{ steps.meta-runner.outputs.version }}
|
||||
COMMIT=${{ gitea.sha }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-runner:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── ui: source map upload ─────────────────────────────────────────────────────
|
||||
# Builds the UI with source maps and uploads them to GlitchTip so that error
|
||||
# stack traces resolve to original .svelte/.ts file names and line numbers.
|
||||
# Runs in parallel with docker-ui (both need check-ui to pass first).
|
||||
upload-sourcemaps:
|
||||
name: Upload source maps
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-ui]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ui
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
# ── ui ───────────────────────────────────────────────────────────────────
|
||||
- name: Download ui build artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
node-version: "22"
|
||||
cache: npm
|
||||
cache-dependency-path: ui/package-lock.json
|
||||
name: ui-build
|
||||
path: ui/build
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: Allow build/ into Docker context (override .dockerignore)
|
||||
run: |
|
||||
grep -v '^build$' ui/.dockerignore > ui/.dockerignore.tmp
|
||||
mv ui/.dockerignore.tmp ui/.dockerignore
|
||||
|
||||
- name: Build with source maps and upload to GlitchTip
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
BUILD_VERSION: ${{ gitea.ref_name }}
|
||||
run: npm run build
|
||||
|
||||
# ── docker: ui ────────────────────────────────────────────────────────────────
|
||||
docker-ui:
|
||||
name: Docker / ui
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-ui]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
- name: Docker meta / ui
|
||||
id: meta-ui
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-ui
|
||||
@@ -190,36 +240,24 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
- name: Build and push / ui
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ui
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.meta-ui.outputs.tags }}
|
||||
labels: ${{ steps.meta-ui.outputs.labels }}
|
||||
build-args: |
|
||||
BUILD_VERSION=${{ steps.meta.outputs.version }}
|
||||
BUILD_VERSION=${{ steps.meta-ui.outputs.version }}
|
||||
BUILD_COMMIT=${{ gitea.sha }}
|
||||
BUILD_TIME=${{ gitea.event.head_commit.timestamp }}
|
||||
PREBUILT=1
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-ui:latest
|
||||
cache-to: type=inline
|
||||
|
||||
# ── docker: caddy ─────────────────────────────────────────────────────────────
|
||||
docker-caddy:
|
||||
name: Docker / caddy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
# ── caddy ────────────────────────────────────────────────────────────────
|
||||
- name: Docker meta / caddy
|
||||
id: meta-caddy
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USER }}/libnovel-caddy
|
||||
@@ -228,13 +266,13 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push
|
||||
- name: Build and push / caddy
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: caddy
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
tags: ${{ steps.meta-caddy.outputs.tags }}
|
||||
labels: ${{ steps.meta-caddy.outputs.labels }}
|
||||
cache-from: type=registry,ref=${{ secrets.DOCKER_USER }}/libnovel-caddy:latest
|
||||
cache-to: type=inline
|
||||
|
||||
@@ -242,14 +280,31 @@ jobs:
|
||||
release:
|
||||
name: Gitea Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [docker-backend, docker-runner, docker-ui, docker-caddy, upload-sourcemaps]
|
||||
needs: [docker]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract release notes from tag commit
|
||||
id: notes
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Subject line (first line of commit message) → release title
|
||||
SUBJECT=$(git log -1 --format="%s" "${{ gitea.sha }}")
|
||||
# Body (everything after the blank line) → release body
|
||||
BODY=$(git log -1 --format="%b" "${{ gitea.sha }}" | sed '/^Co-Authored-By:/d' | sed '/^[[:space:]]*$/{ N; /^\n$/d }' | sed 's/^[[:space:]]*$//' | awk 'NF || !p; {p = !NF}')
|
||||
echo "title=${SUBJECT}" >> "$GITHUB_OUTPUT"
|
||||
# Use a heredoc delimiter to safely handle multi-line body
|
||||
{
|
||||
echo "body<<RELEASE_BODY_EOF"
|
||||
echo "${BODY}"
|
||||
echo "RELEASE_BODY_EOF"
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Create release
|
||||
uses: actions/gitea-release-action@v1
|
||||
uses: https://gitea.com/actions/gitea-release-action@v1
|
||||
with:
|
||||
token: ${{ secrets.GITEA_TOKEN }}
|
||||
generate_release_notes: true
|
||||
title: ${{ steps.notes.outputs.title }}
|
||||
body: ${{ steps.notes.outputs.body }}
|
||||
|
||||
14
.githooks/pre-commit
Executable file
14
.githooks/pre-commit
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
# Auto-recompile paraglide messages when ui/messages/*.json files are staged.
|
||||
# Prevents svelte-check / CI failures caused by stale generated JS files.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
STAGED=$(git diff --cached --name-only)
|
||||
|
||||
if echo "$STAGED" | grep -q '^ui/messages/'; then
|
||||
echo "[pre-commit] ui/messages/*.json changed — recompiling paraglide..."
|
||||
(cd ui && npm run paraglide --silent)
|
||||
git add -f ui/src/lib/paraglide/messages/
|
||||
echo "[pre-commit] paraglide output re-staged."
|
||||
fi
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -6,6 +6,8 @@
|
||||
|
||||
# ── Compiled binaries ──────────────────────────────────────────────────────────
|
||||
backend/bin/
|
||||
backend/backend
|
||||
backend/runner
|
||||
|
||||
# ── Environment & secrets ──────────────────────────────────────────────────────
|
||||
# Secrets are managed by Doppler — never commit .env files.
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
---
|
||||
name: ios-ux
|
||||
description: iOS/SwiftUI UI & UX review and implementation guidelines for LibNovel. Enforces Apple HIG, iOS 17+ APIs, spring animations, haptics, accessibility, performance, and offline handling. Load this skill for any iOS view work.
|
||||
compatibility: opencode
|
||||
---
|
||||
|
||||
# iOS UI/UX Skill — LibNovel
|
||||
|
||||
Load this skill whenever working on SwiftUI views in `ios/`. It defines design standards, review process for screenshots, and implementation rules.
|
||||
|
||||
---
|
||||
|
||||
## Screenshot Review Process
|
||||
|
||||
When the user provides a screenshot of the app:
|
||||
|
||||
1. **Analyze first** — identify specific UI/UX issues across these categories:
|
||||
- Visual hierarchy and spacing
|
||||
- Typography (size, weight, contrast)
|
||||
- Color and material usage
|
||||
- Animation and interactivity gaps
|
||||
- Accessibility problems
|
||||
- Deprecated or non-native patterns
|
||||
2. **Present a numbered list** of suggested improvements with brief rationale for each.
|
||||
3. **Ask for confirmation** before writing any code: "Should I apply all of these, or only specific ones?"
|
||||
4. Apply only what the user confirms.
|
||||
|
||||
---
|
||||
|
||||
## Design System
|
||||
|
||||
### Colors & Materials
|
||||
- **Accent**: `Color.amber` (project-defined). Use for active state, selection indicators, progress fills, and CTAs.
|
||||
- **Backgrounds**: Prefer `.regularMaterial`, `.ultraThinMaterial`, or `.thinMaterial` over hard-coded `Color.black.opacity(x)` or `Color(.systemBackground)`.
|
||||
- **Dark overlays** (e.g. full-screen players): Use `KFImage` blurred background + `Color.black.opacity(0.5–0.6)` overlay. Never use a flat solid black background.
|
||||
- **Semantic colors**: Use `.primary`, `.secondary`, `.tertiary` foreground styles. Avoid hard-coded `Color.white` except on dark material contexts (full-screen player).
|
||||
- **No hardcoded color literals** — use `Color+App.swift` extensions or system semantic colors.
|
||||
|
||||
### Typography
|
||||
- Use the SF Pro system font via `.font(.title)`, `.font(.body)`, etc. — never hardcode font names except for intentional stylistic accents (e.g. "Snell Roundhand" for voice watermark).
|
||||
- Apply `.fontWeight()` and `.fontDesign()` modifiers rather than custom font families.
|
||||
- Support Dynamic Type — never hardcode a fixed font size as the sole option without a `.minimumScaleFactor` or system font size modifier.
|
||||
- Hierarchy: title3.bold for primary labels, subheadline for secondary, caption/caption2 for metadata.
|
||||
|
||||
### Spacing & Layout
|
||||
- Minimum touch target: **44×44 pt**. Use `.frame(minWidth: 44, minHeight: 44)` or `.contentShape(Rectangle())` on small icons.
|
||||
- Prefer 16–20 pt horizontal padding on full-width containers; 12 pt for compact inner elements.
|
||||
- Use `VStack(spacing:)` and `HStack(spacing:)` explicitly — never rely on default spacing for production UI.
|
||||
- Corner radii: 12–14 pt for cards/chips, 10 pt for small badges, 20–24 pt for large cover art.
|
||||
|
||||
---
|
||||
|
||||
## Animation Rules
|
||||
|
||||
### Spring Animations (default for all interactive transitions)
|
||||
- Use `.spring(response:dampingFraction:)` for state-driven layout changes, selection feedback, and appear/disappear transitions.
|
||||
- Recommended defaults:
|
||||
- Interactive elements: `response: 0.3, dampingFraction: 0.7`
|
||||
- Entrance animations: `response: 0.45–0.5, dampingFraction: 0.7`
|
||||
- Quick snappy feedback: `response: 0.2, dampingFraction: 0.6`
|
||||
- Reserve `.easeInOut` only for non-interactive, ambient animations (e.g. opacity pulses, generating overlays).
|
||||
|
||||
### SF Symbol Transitions
|
||||
- Always use `contentTransition(.symbolEffect(.replace.downUp))` when a symbol name changes based on state (play/pause, checkmark/circle, etc.).
|
||||
- Use `.symbolEffect(.variableColor.cumulative)` for continuous animations (waveform, loading indicators).
|
||||
- Use `.symbolEffect(.bounce)` for one-shot entrance emphasis (e.g. completion checkmark appearing).
|
||||
- Use `.symbolEffect(.pulse)` for error/warning states that need attention.
|
||||
|
||||
### Repeating Animations
|
||||
- Use `phaseAnimator` for any looping animation that previously used manual `@State` + `withAnimation` chains.
|
||||
- Do not use `Timer` publishers for UI animation — prefer `phaseAnimator` or `TimelineView`.
|
||||
|
||||
---
|
||||
|
||||
## Haptic Feedback
|
||||
|
||||
Add `UIImpactFeedbackGenerator` to every user-initiated interactive control:
|
||||
- `.light` — toggle switches, selection chips, secondary actions, slider drag start.
|
||||
- `.medium` — primary transport buttons (play/pause, chapter skip), significant confirmations.
|
||||
- `.heavy` — destructive actions (only if no confirmation dialog).
|
||||
|
||||
Pattern:
|
||||
```swift
|
||||
Button {
|
||||
UIImpactFeedbackGenerator(style: .light).impactOccurred()
|
||||
// action
|
||||
} label: { ... }
|
||||
```
|
||||
|
||||
Do **not** add haptics to:
|
||||
- Programmatic state changes not directly triggered by a tap.
|
||||
- Buttons inside `List` rows that already use swipe actions.
|
||||
- Scroll events.
|
||||
|
||||
---
|
||||
|
||||
## iOS 17+ API Usage
|
||||
|
||||
Flag and replace any of the following deprecated patterns:
|
||||
|
||||
| Deprecated | Replace with |
|
||||
|---|---|
|
||||
| `NavigationView` | `NavigationStack` |
|
||||
| `@StateObject` / `ObservableObject` (new types only) | `@Observable` macro |
|
||||
| `DispatchQueue.main.async` | `await MainActor.run` or `@MainActor` |
|
||||
| Manual `@State` animation chains for repeating loops | `phaseAnimator` |
|
||||
| `.animation(_:)` without `value:` | `.animation(_:value:)` |
|
||||
| `AnyView` wrapping for conditional content | `@ViewBuilder` + `Group` |
|
||||
|
||||
Do **not** refactor existing `ObservableObject` types to `@Observable` unless explicitly asked — only apply `@Observable` to new types.
|
||||
|
||||
---
|
||||
|
||||
## Accessibility
|
||||
|
||||
Every view must:
|
||||
- Support VoiceOver: add `.accessibilityLabel()` to icon-only buttons and image views.
|
||||
- Support Dynamic Type: test that text doesn't truncate at xxxLarge without a layout adjustment.
|
||||
- Meet contrast ratio: text on tinted backgrounds must be legible — avoid `.opacity(0.25)` or lower for any user-readable text.
|
||||
- Touch targets ≥ 44pt (see Spacing above).
|
||||
- Interactive controls must have `.accessibilityAddTraits(.isButton)` if not using `Button`.
|
||||
- Do not rely solely on color to convey state — pair color with icon or label.
|
||||
|
||||
---
|
||||
|
||||
## Performance
|
||||
|
||||
- **Isolate high-frequency observers**: Any view that observes a `PlaybackProgress` (timer-tick updates) must be a separate sub-view that `@ObservedObject`-observes only the progress object — not the parent view. This prevents the entire parent from re-rendering every 0.5 seconds.
|
||||
- **Avoid `id()` overuse**: Only use `.id()` to force view recreation when necessary (e.g. background image on track change). Prefer `onChange(of:)` for side effects.
|
||||
- **Lazy containers**: Use `LazyVStack` / `LazyHStack` inside `ScrollView` for lists of 20+ items. `List` is inherently lazy and does not need this.
|
||||
- **Image loading**: Always use `KFImage` (Kingfisher) with `.placeholder` for remote images. Never use `AsyncImage` for cover art — it has no disk cache.
|
||||
- **Avoid `AnyView`**: It breaks structural identity and hurts diffing. Use `@ViewBuilder` or `Group { }` instead.
|
||||
|
||||
---
|
||||
|
||||
## Offline & Error States
|
||||
|
||||
Every view that makes network calls must:
|
||||
1. Wrap the body in a `VStack` with `OfflineBanner` at the top, gated on `networkMonitor.isConnected`.
|
||||
2. Suppress network errors silently when offline via `ErrorAlertModifier` — do not show an alert when the device is offline.
|
||||
3. Gate `.task` / `.onAppear` network calls: `guard networkMonitor.isConnected else { return }`.
|
||||
4. Show a non-blocking inline empty state (not a full-screen error) for failed loads when online.
|
||||
|
||||
---
|
||||
|
||||
## Component Checklist (before submitting any view change)
|
||||
|
||||
- [ ] All interactive elements ≥ 44pt touch target
|
||||
- [ ] SF Symbol state changes use `contentTransition(.symbolEffect(...))`
|
||||
- [ ] State-driven layout transitions use `.spring(response:dampingFraction:)`
|
||||
- [ ] Tappable controls have haptic feedback
|
||||
- [ ] No `NavigationView`, no `DispatchQueue.main.async`, no `.animation(_:)` without `value:`
|
||||
- [ ] High-frequency observers are isolated sub-views
|
||||
- [ ] Offline state handled with `OfflineBanner` + `NetworkMonitor`
|
||||
- [ ] VoiceOver labels on icon-only buttons
|
||||
- [ ] No hardcoded `Color.black` / `Color.white` / `Color(.systemBackground)` where a material applies
|
||||
193
AGENTS.md
Normal file
193
AGENTS.md
Normal file
@@ -0,0 +1,193 @@
|
||||
# LibNovel v2 — Agent Context
|
||||
|
||||
This file is the root-level knowledge base for LLM coding agents (OpenCode, Claude, Cursor, Copilot, etc.).
|
||||
Sub-directories have their own `AGENTS.md` with deeper context (e.g. `ios/AGENTS.md`).
|
||||
|
||||
---
|
||||
|
||||
## Stack
|
||||
|
||||
| Layer | Technology |
|
||||
|---|---|
|
||||
| UI | SvelteKit 2 + Svelte 5, TypeScript, TailwindCSS |
|
||||
| Backend / Runner | Go (single repo, two binaries: `backend`, `runner`) |
|
||||
| iOS app | SwiftUI, iOS 17+, Swift 5.9+ |
|
||||
| Database | PocketBase (SQLite) + MinIO (object storage) |
|
||||
| Search | Meilisearch |
|
||||
| Queue | Asynq over Redis (local) / Valkey (prod) |
|
||||
| Scraping | Novelfire scraper in `backend/novelfire/` |
|
||||
|
||||
---
|
||||
|
||||
## Repository Layout
|
||||
|
||||
```
|
||||
.
|
||||
├── .gitea/workflows/ # CI/CD — Gitea Actions (NOT .github/)
|
||||
├── .opencode/ # OpenCode agent config (memory, skills)
|
||||
├── backend/ # Go backend + runner (single module)
|
||||
├── caddy/ # Caddy reverse proxy Dockerfile
|
||||
├── homelab/ # Homelab docker-compose + observability stack
|
||||
├── ios/ # SwiftUI iOS app (see ios/AGENTS.md)
|
||||
├── scripts/ # Utility scripts
|
||||
├── ui/ # SvelteKit UI
|
||||
├── docker-compose.yml # Prod compose (all services)
|
||||
├── AGENTS.md # This file
|
||||
└── opencode.json # OpenCode config
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## CI/CD — Gitea Actions
|
||||
|
||||
- Workflows live in `.gitea/workflows/` — **not** `.github/workflows/`
|
||||
- Self-hosted Gitea instance; use `gitea.ref_name` / `gitea.sha` (not `github.*`)
|
||||
- Two workflows:
|
||||
- `ci.yaml` — runs on every push to `main` (test + type-check)
|
||||
- `release.yaml` — runs on `v*` tags (build Docker images, upload source maps, create Gitea release)
|
||||
- Secrets: `DOCKER_USER`, `DOCKER_TOKEN`, `GITEA_TOKEN`, `GLITCHTIP_AUTH_TOKEN`
|
||||
|
||||
### Git credentials
|
||||
|
||||
Credentials are embedded in the remote URL — no `HOME=/root` or credential helper needed for push:
|
||||
|
||||
```
|
||||
https://kamil:95782641Apple%24@gitea.kalekber.cc/kamil/libnovel.git
|
||||
```
|
||||
|
||||
All git commands still use `HOME=/root` prefix for consistency (picks up `/root/.gitconfig` for user name/email), but push auth works without it.
|
||||
|
||||
### Releasing a new version
|
||||
|
||||
```bash
|
||||
HOME=/root git tag v2.6.X -m "Short title"
|
||||
HOME=/root git push origin v3-cleanup --tags
|
||||
```
|
||||
|
||||
CI will build all Docker images, upload source maps to GlitchTip, and create a Gitea release automatically.
|
||||
|
||||
---
|
||||
|
||||
## GlitchTip Error Tracking
|
||||
|
||||
- Instance: `https://errors.libnovel.cc/`
|
||||
- Org: `libnovel`
|
||||
- Projects: `ui` (id/1), `backend` (id/2), `runner` (id/3)
|
||||
- Tool: `glitchtip-cli` v0.1.0
|
||||
|
||||
### Per-service DSNs (stored in Doppler)
|
||||
|
||||
| Service | Doppler key | GlitchTip project |
|
||||
|---|---|---|
|
||||
| UI (SvelteKit) | `PUBLIC_GLITCHTIP_DSN` | ui (1) |
|
||||
| Backend (Go) | `GLITCHTIP_DSN_BACKEND` | backend (2) |
|
||||
| Runner (Go) | `GLITCHTIP_DSN_RUNNER` | runner (3) |
|
||||
|
||||
### Source map upload flow (release.yaml)
|
||||
|
||||
The correct order is **critical** — uploading before `releases new` results in 0 files shown in GlitchTip UI:
|
||||
|
||||
```
|
||||
glitchtip-cli sourcemaps inject ./build # inject debug IDs
|
||||
glitchtip-cli releases new <version> # MUST come before upload
|
||||
glitchtip-cli sourcemaps upload ./build \
|
||||
--release <version> # associate files with release
|
||||
glitchtip-cli releases finalize <version> # mark release complete
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Infrastructure
|
||||
|
||||
| Environment | Host | Path | Doppler config |
|
||||
|---|---|---|---|
|
||||
| Prod | `165.22.70.138` | `/opt/libnovel/` | `prd` |
|
||||
| Homelab runner | `192.168.0.109` | `/opt/libnovel-runner/` | `prd_homelab` |
|
||||
|
||||
### Docker Compose — always use Doppler
|
||||
|
||||
```bash
|
||||
# Prod
|
||||
doppler run --project libnovel --config prd -- docker compose <cmd>
|
||||
|
||||
# Homelab full-stack (runs from .bak file on server)
|
||||
doppler run --project libnovel --config prd_homelab -- docker compose -f homelab/docker-compose.yml.bak <cmd>
|
||||
|
||||
# Homelab runner only
|
||||
doppler run --project libnovel --config prd_homelab -- docker compose -f homelab/runner/docker-compose.yml <cmd>
|
||||
```
|
||||
|
||||
- Prod runner has `profiles: [runner]` — `docker compose up -d` will NOT accidentally start it
|
||||
- When deploying, always sync `docker-compose.yml` to the server before running `up -d`
|
||||
- **Caddyfile is NOT in git** — lives at `/opt/libnovel/Caddyfile` on prod server only. Edit directly on the server and restart the `caddy` container.
|
||||
|
||||
---
|
||||
|
||||
## Observability
|
||||
|
||||
| Tool | Purpose |
|
||||
|---|---|
|
||||
| GlitchTip | Error tracking (UI + backend + runner) |
|
||||
| Grafana Faro | RUM / Web Vitals (collector at `faro.libnovel.cc/collect`) → Alloy (port 12347) |
|
||||
| OpenTelemetry | Distributed tracing (OTLP → cloudflared → OTel collector → Tempo) |
|
||||
| Grafana | Dashboards at `https://grafana.libnovel.cc` |
|
||||
|
||||
### Grafana dashboards: `homelab/otel/grafana/provisioning/dashboards/`
|
||||
|
||||
Key dashboards:
|
||||
- `backend.json` — Backend logs (Loki: `{service_name="backend"}`, plain text)
|
||||
- `runner.json` — Runner logs (Loki: `{service_name="runner"}`) + Asynq Prometheus metrics
|
||||
- `web-vitals.json` — Web Vitals (Loki: `{service_name="unknown_service"} kind=measurement` + pattern parser)
|
||||
- `catalogue.json` — Scrape progress (Loki: `{service_name="runner"} | json | body="..."`)
|
||||
|
||||
### Data pipeline (2026-04-07 working state)
|
||||
|
||||
**Browser → Grafana Faro:**
|
||||
Browser sends RUM data → `https://faro.libnovel.cc/collect` → **Alloy** `faro.receiver` (port 12347) → Loki (logs/exceptions) + OTel collector → **Tempo** (traces)
|
||||
|
||||
**Backend/Runner → OTel:**
|
||||
Backend/Runner Go SDK → `https://otel.libnovel.cc` (cloudflared tunnel) → **OTel collector** (port 4318) → Tempo (traces) + Loki (logs via `otlphttp/loki` exporter)
|
||||
Runner also sends to **Alloy** `otelcol.receiver.otlp` (port 4318) → `otelcol.exporter.loki` → Loki
|
||||
|
||||
### Loki log format per service
|
||||
|
||||
- `service_name="backend"`: Plain text (e.g. `backend: asynq task dispatch enabled`)
|
||||
- `service_name="runner"`: JSON with `body`, `attributes{slug,chapters,page}`, `severity`
|
||||
- `service_name="unknown_service"`: Faro RUM text format (e.g. `kind=measurement lcp=5428.0 ...`)
|
||||
|
||||
### OTel Collector ports (homelab)
|
||||
|
||||
- gRPC: `4317` — receives from cloudflared (`otel.libnovel.cc`)
|
||||
- HTTP: `4318` — receives from cloudflared + Alloy
|
||||
- Metrics: `8888`
|
||||
|
||||
### Known issues / pending fixes
|
||||
|
||||
- Web Vitals use `service_name="unknown_service"` (Faro SDK doesn't set service.name in browser) — works with `unknown_service` label
|
||||
- Runner logs go to both Alloy→Loki AND OTel collector→Loki (dual pipeline — intentional for resilience)
|
||||
|
||||
---
|
||||
|
||||
## Go Backend
|
||||
|
||||
- Primary files: `orchestrator.go`, `server/handlers_*.go`, `novelfire/scraper.go`, `storage/hybrid.go`, `storage/pocketbase.go`
|
||||
- Store interface: `store.go` — never touch MinIO/PocketBase clients directly outside `storage/`
|
||||
- Two binaries built from the same module: `backend` (HTTP API) and `runner` (Asynq worker)
|
||||
|
||||
---
|
||||
|
||||
## SvelteKit UI
|
||||
|
||||
- Source: `ui/src/`
|
||||
- i18n: Paraglide — translation files in `ui/messages/*.json` (5 locales)
|
||||
- Auth debug bypass: `GET /api/auth/debug-login?token=<DEBUG_LOGIN_TOKEN>&username=<username>&next=<path>`
|
||||
|
||||
---
|
||||
|
||||
## iOS App
|
||||
|
||||
Full context in `ios/AGENTS.md`. Quick notes:
|
||||
- SwiftUI, iOS 17+, `@Observable` for new types
|
||||
- Download key separator: `::` (not `-`)
|
||||
- Voice fallback: book override → global default → `"af_bella"`
|
||||
- Offline pattern: `NetworkMonitor` env object + `OfflineBanner` + `ErrorAlertModifier`
|
||||
107
Caddyfile
107
Caddyfile
@@ -30,6 +30,7 @@
|
||||
# logs.libnovel.cc → dozzle:8080 (Docker log viewer)
|
||||
# uptime.libnovel.cc → uptime-kuma:3001 (uptime monitoring)
|
||||
# push.libnovel.cc → gotify:80 (push notifications)
|
||||
# search.libnovel.cc → meilisearch:7700 (search index — homelab runner)
|
||||
#
|
||||
# Routes intentionally removed from direct-to-backend:
|
||||
# /api/scrape/* — SvelteKit has /api/scrape/ counterparts
|
||||
@@ -55,6 +56,28 @@
|
||||
ticker_interval 15s
|
||||
}
|
||||
|
||||
# ── Redis TCP proxy via layer4 ────────────────────────────────────────────
|
||||
# Exposes prod Redis over TLS for Asynq job enqueueing from the homelab runner.
|
||||
# Listens on :6380 (all interfaces). TLS is terminated here using the cert
|
||||
# for redis.libnovel.cc; traffic is proxied to the local Redis sidecar.
|
||||
# Requires the caddy-l4 module in the custom Caddy build.
|
||||
layer4 {
|
||||
:6380 {
|
||||
route {
|
||||
tls {
|
||||
connection_policy {
|
||||
match {
|
||||
sni redis.libnovel.cc
|
||||
}
|
||||
}
|
||||
}
|
||||
proxy {
|
||||
upstream redis:6379
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(security_headers) {
|
||||
header {
|
||||
@@ -169,12 +192,36 @@
|
||||
# ── SvelteKit UI (catch-all — includes all remaining /api/* routes) ───────
|
||||
handle {
|
||||
reverse_proxy ui:3000 {
|
||||
}
|
||||
# Active health check: Caddy polls /health every 5 s and marks the
|
||||
# upstream down immediately when it fails. Combined with
|
||||
# lb_try_duration this means Watchtower container replacements
|
||||
# show the maintenance page within a few seconds instead of
|
||||
# hanging or returning a raw connection error to the browser.
|
||||
health_uri /health
|
||||
health_interval 5s
|
||||
health_timeout 2s
|
||||
health_status 200
|
||||
|
||||
# If the upstream is down, fail fast (don't retry for longer than
|
||||
# 3 s) and let Caddy's handle_errors 502/503 take over.
|
||||
lb_try_duration 3s
|
||||
}
|
||||
}
|
||||
|
||||
# ── Caddy-level error pages ───────────────────────────────────────────────
|
||||
# These fire when the upstream (backend or ui) is completely unreachable.
|
||||
# SvelteKit's own +error.svelte handles application-level errors (404, 500).
|
||||
handle_errors 404 {
|
||||
root * /srv/errors
|
||||
rewrite * /404.html
|
||||
file_server
|
||||
}
|
||||
handle_errors 500 {
|
||||
root * /srv/errors
|
||||
rewrite * /500.html
|
||||
file_server
|
||||
}
|
||||
handle_errors 502 {
|
||||
root * /srv/errors
|
||||
rewrite * /502.html
|
||||
file_server
|
||||
@@ -203,41 +250,11 @@
|
||||
}
|
||||
|
||||
# ── Tooling subdomains ────────────────────────────────────────────────────────
|
||||
feedback.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy fider:3000
|
||||
}
|
||||
|
||||
# ── GlitchTip: error tracking ─────────────────────────────────────────────────
|
||||
errors.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy glitchtip-web:8000
|
||||
}
|
||||
|
||||
# ── Umami: page analytics ─────────────────────────────────────────────────────
|
||||
analytics.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy umami:3000
|
||||
}
|
||||
|
||||
# ── Dozzle: Docker log viewer ─────────────────────────────────────────────────
|
||||
logs.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy dozzle:8080
|
||||
}
|
||||
|
||||
# ── Uptime Kuma: uptime monitoring ────────────────────────────────────────────
|
||||
uptime.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy uptime-kuma:3001
|
||||
}
|
||||
|
||||
# ── Gotify: push notifications ────────────────────────────────────────────────
|
||||
push.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy gotify:80
|
||||
}
|
||||
|
||||
# feedback.libnovel.cc, errors.libnovel.cc, analytics.libnovel.cc,
|
||||
# logs.libnovel.cc, uptime.libnovel.cc, push.libnovel.cc, grafana.libnovel.cc
|
||||
# are now routed via Cloudflare Tunnel directly to the homelab (192.168.0.109).
|
||||
# No Caddy rules needed here — Cloudflare handles TLS termination and routing.
|
||||
|
||||
# ── PocketBase: exposed for homelab runner task polling ───────────────────────
|
||||
# Allows the homelab runner to claim tasks and write results via the PB API.
|
||||
# Admin UI is also accessible here for convenience.
|
||||
@@ -254,3 +271,21 @@ storage.libnovel.cc {
|
||||
reverse_proxy minio:9000
|
||||
}
|
||||
|
||||
# ── Meilisearch: exposed for homelab runner search indexing ──────────────────
|
||||
# The homelab runner connects here as MEILI_URL to index books after scraping.
|
||||
# Protected by MEILI_MASTER_KEY bearer token — Meilisearch enforces auth on
|
||||
# every request; Caddy just terminates TLS.
|
||||
search.libnovel.cc {
|
||||
import security_headers
|
||||
reverse_proxy meilisearch:7700
|
||||
}
|
||||
|
||||
# ── Redis TLS cert anchor ─────────────────────────────────────────────────────
|
||||
# This virtual host exists solely so Caddy obtains and caches a TLS certificate
|
||||
# for redis.libnovel.cc. The layer4 block above uses that cert to terminate TLS
|
||||
# on :6380 (Asynq job-queue channel from prod → homelab Redis).
|
||||
# The HTTP route itself just returns 404 — no real traffic expected here.
|
||||
redis.libnovel.cc {
|
||||
respond 404
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,13 +30,23 @@ RUN --mount=type=cache,target=/root/go/pkg/mod \
|
||||
-o /out/healthcheck ./cmd/healthcheck
|
||||
|
||||
# ── backend service ──────────────────────────────────────────────────────────
|
||||
FROM gcr.io/distroless/static:nonroot AS backend
|
||||
# Uses Alpine (not distroless) so ffmpeg is available for on-demand voice
|
||||
# sample generation via pocket-tts (WAV→MP3 transcoding).
|
||||
FROM alpine:3.21 AS backend
|
||||
RUN apk add --no-cache ffmpeg ca-certificates && \
|
||||
addgroup -S appgroup && adduser -S appuser -G appgroup
|
||||
COPY --from=builder /out/healthcheck /healthcheck
|
||||
COPY --from=builder /out/backend /backend
|
||||
USER appuser
|
||||
ENTRYPOINT ["/backend"]
|
||||
|
||||
# ── runner service ───────────────────────────────────────────────────────────
|
||||
FROM gcr.io/distroless/static:nonroot AS runner
|
||||
# Uses Alpine (not distroless) so ffmpeg is available for WAV→MP3 transcoding
|
||||
# when pocket-tts voices are used.
|
||||
FROM alpine:3.21 AS runner
|
||||
RUN apk add --no-cache ffmpeg ca-certificates && \
|
||||
addgroup -S appgroup && adduser -S appuser -G appgroup
|
||||
COPY --from=builder /out/healthcheck /healthcheck
|
||||
COPY --from=builder /out/runner /runner
|
||||
USER appuser
|
||||
ENTRYPOINT ["/runner"]
|
||||
|
||||
@@ -15,6 +15,7 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/signal"
|
||||
@@ -22,11 +23,17 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/getsentry/sentry-go"
|
||||
"github.com/hibiken/asynq"
|
||||
"github.com/libnovel/backend/internal/asynqqueue"
|
||||
"github.com/libnovel/backend/internal/backend"
|
||||
"github.com/libnovel/backend/internal/cfai"
|
||||
"github.com/libnovel/backend/internal/config"
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
"github.com/libnovel/backend/internal/meili"
|
||||
"github.com/libnovel/backend/internal/otelsetup"
|
||||
"github.com/libnovel/backend/internal/pockettts"
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
)
|
||||
|
||||
// version and commit are set at build time via -ldflags.
|
||||
@@ -70,6 +77,19 @@ func run() error {
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
// ── OpenTelemetry tracing + logs ──────────────────────────────────────────
|
||||
otelShutdown, otelLog, err := otelsetup.Init(ctx, version)
|
||||
if err != nil {
|
||||
return fmt.Errorf("init otel: %w", err)
|
||||
}
|
||||
if otelShutdown != nil {
|
||||
defer otelShutdown()
|
||||
// Replace the plain slog logger with the OTel-bridged one so all
|
||||
// structured log lines are forwarded to Loki with trace IDs attached.
|
||||
log = otelLog
|
||||
log.Info("otel tracing + logs enabled", "endpoint", os.Getenv("OTEL_EXPORTER_OTLP_ENDPOINT"))
|
||||
}
|
||||
|
||||
// ── Storage ──────────────────────────────────────────────────────────────
|
||||
store, err := storage.NewStore(ctx, cfg, log)
|
||||
if err != nil {
|
||||
@@ -86,6 +106,42 @@ func run() error {
|
||||
kokoroClient = &noopKokoro{}
|
||||
}
|
||||
|
||||
// ── Pocket-TTS (voice list + sample generation; audio generation is the runner's job) ──
|
||||
var pocketTTSClient pockettts.Client
|
||||
if cfg.PocketTTS.URL != "" {
|
||||
pocketTTSClient = pockettts.New(cfg.PocketTTS.URL)
|
||||
log.Info("pocket-tts voices enabled", "url", cfg.PocketTTS.URL)
|
||||
} else {
|
||||
log.Info("POCKET_TTS_URL not set — pocket-tts voices unavailable in backend")
|
||||
}
|
||||
|
||||
// ── Cloudflare Workers AI (voice sample generation + audio-stream live TTS) ──
|
||||
var cfaiClient cfai.Client
|
||||
if cfg.CFAI.AccountID != "" && cfg.CFAI.APIToken != "" {
|
||||
cfaiClient = cfai.New(cfg.CFAI.AccountID, cfg.CFAI.APIToken, cfg.CFAI.Model)
|
||||
log.Info("cloudflare AI TTS enabled", "model", cfg.CFAI.Model)
|
||||
} else {
|
||||
log.Info("CFAI_ACCOUNT_ID/CFAI_API_TOKEN not set — CF AI voices unavailable in backend")
|
||||
}
|
||||
|
||||
// ── Cloudflare Workers AI Image Generation ────────────────────────────────
|
||||
var imageGenClient cfai.ImageGenClient
|
||||
if cfg.CFAI.AccountID != "" && cfg.CFAI.APIToken != "" {
|
||||
imageGenClient = cfai.NewImageGen(cfg.CFAI.AccountID, cfg.CFAI.APIToken)
|
||||
log.Info("cloudflare AI image generation enabled")
|
||||
} else {
|
||||
log.Info("CFAI_ACCOUNT_ID/CFAI_API_TOKEN not set — image generation unavailable")
|
||||
}
|
||||
|
||||
// ── Cloudflare Workers AI Text Generation ─────────────────────────────────
|
||||
var textGenClient cfai.TextGenClient
|
||||
if cfg.CFAI.AccountID != "" && cfg.CFAI.APIToken != "" {
|
||||
textGenClient = cfai.NewTextGen(cfg.CFAI.AccountID, cfg.CFAI.APIToken)
|
||||
log.Info("cloudflare AI text generation enabled")
|
||||
} else {
|
||||
log.Info("CFAI_ACCOUNT_ID/CFAI_API_TOKEN not set — text generation unavailable")
|
||||
}
|
||||
|
||||
// ── Meilisearch (search reads only; indexing is the runner's job) ────────
|
||||
var searchIndex meili.Client
|
||||
if cfg.Meilisearch.URL != "" {
|
||||
@@ -96,6 +152,24 @@ func run() error {
|
||||
searchIndex = meili.NoopClient{}
|
||||
}
|
||||
|
||||
// ── Task Producer ────────────────────────────────────────────────────────
|
||||
// When REDIS_ADDR is set the backend dual-writes: PocketBase record (audit)
|
||||
// + Asynq job (immediate delivery). Otherwise it writes to PocketBase only
|
||||
// and the runner picks up on the next poll tick.
|
||||
var producer taskqueue.Producer = store
|
||||
if cfg.Redis.Addr != "" {
|
||||
redisOpt, parseErr := parseRedisOpt(cfg.Redis)
|
||||
if parseErr != nil {
|
||||
return fmt.Errorf("parse REDIS_ADDR: %w", parseErr)
|
||||
}
|
||||
asynqProducer := asynqqueue.NewProducer(store, redisOpt, log)
|
||||
defer asynqProducer.Close() //nolint:errcheck
|
||||
producer = asynqProducer
|
||||
log.Info("backend: asynq task dispatch enabled", "addr", cfg.Redis.Addr)
|
||||
} else {
|
||||
log.Info("backend: poll-mode task dispatch (REDIS_ADDR not set)")
|
||||
}
|
||||
|
||||
// ── Backend server ───────────────────────────────────────────────────────
|
||||
srv := backend.New(
|
||||
backend.Config{
|
||||
@@ -105,17 +179,27 @@ func run() error {
|
||||
Commit: commit,
|
||||
},
|
||||
backend.Dependencies{
|
||||
BookReader: store,
|
||||
RankingStore: store,
|
||||
AudioStore: store,
|
||||
PresignStore: store,
|
||||
ProgressStore: store,
|
||||
CoverStore: store,
|
||||
Producer: store,
|
||||
TaskReader: store,
|
||||
SearchIndex: searchIndex,
|
||||
Kokoro: kokoroClient,
|
||||
Log: log,
|
||||
BookReader: store,
|
||||
RankingStore: store,
|
||||
AudioStore: store,
|
||||
TranslationStore: store,
|
||||
PresignStore: store,
|
||||
ProgressStore: store,
|
||||
CoverStore: store,
|
||||
ChapterImageStore: store,
|
||||
Producer: producer,
|
||||
TaskReader: store,
|
||||
ImportFileStore: store,
|
||||
SearchIndex: searchIndex,
|
||||
Kokoro: kokoroClient,
|
||||
PocketTTS: pocketTTSClient,
|
||||
CFAI: cfaiClient,
|
||||
ImageGen: imageGenClient,
|
||||
TextGen: textGenClient,
|
||||
BookWriter: store,
|
||||
AIJobStore: store,
|
||||
BookAdminStore: store,
|
||||
Log: log,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -148,6 +232,27 @@ func (n *noopKokoro) GenerateAudio(_ context.Context, _, _ string) ([]byte, erro
|
||||
return nil, fmt.Errorf("kokoro not configured (KOKORO_URL is empty)")
|
||||
}
|
||||
|
||||
func (n *noopKokoro) StreamAudioMP3(_ context.Context, _, _ string) (io.ReadCloser, error) {
|
||||
return nil, fmt.Errorf("kokoro not configured (KOKORO_URL is empty)")
|
||||
}
|
||||
|
||||
func (n *noopKokoro) StreamAudioWAV(_ context.Context, _, _ string) (io.ReadCloser, error) {
|
||||
return nil, fmt.Errorf("kokoro not configured (KOKORO_URL is empty)")
|
||||
}
|
||||
|
||||
func (n *noopKokoro) ListVoices(_ context.Context) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// parseRedisOpt converts a config.Redis into an asynq.RedisConnOpt.
|
||||
// Handles full "redis://" / "rediss://" URLs and plain "host:port".
|
||||
func parseRedisOpt(cfg config.Redis) (asynq.RedisConnOpt, error) {
|
||||
addr := cfg.Addr
|
||||
if len(addr) > 7 && (addr[:8] == "redis://" || (len(addr) > 8 && addr[:9] == "rediss://")) {
|
||||
return asynq.ParseRedisURI(addr)
|
||||
}
|
||||
return asynq.RedisClientOpt{
|
||||
Addr: addr,
|
||||
Password: cfg.Password,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ package main
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"log/slog"
|
||||
"os"
|
||||
"os/signal"
|
||||
@@ -20,13 +21,20 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/getsentry/sentry-go"
|
||||
"github.com/libnovel/backend/internal/asynqqueue"
|
||||
"github.com/libnovel/backend/internal/browser"
|
||||
"github.com/libnovel/backend/internal/cfai"
|
||||
"github.com/libnovel/backend/internal/config"
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
"github.com/libnovel/backend/internal/libretranslate"
|
||||
"github.com/libnovel/backend/internal/meili"
|
||||
"github.com/libnovel/backend/internal/novelfire"
|
||||
"github.com/libnovel/backend/internal/otelsetup"
|
||||
"github.com/libnovel/backend/internal/pockettts"
|
||||
"github.com/libnovel/backend/internal/runner"
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
"github.com/libnovel/backend/internal/webpush"
|
||||
)
|
||||
|
||||
// version and commit are set at build time via -ldflags.
|
||||
@@ -70,6 +78,19 @@ func run() error {
|
||||
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
||||
defer stop()
|
||||
|
||||
// ── OpenTelemetry tracing + logs ─────────────────────────────────────────
|
||||
otelShutdown, otelLog, err := otelsetup.Init(ctx, version)
|
||||
if err != nil {
|
||||
return fmt.Errorf("init otel: %w", err)
|
||||
}
|
||||
if otelShutdown != nil {
|
||||
defer otelShutdown()
|
||||
// Switch to the OTel-bridged logger so all structured log lines are
|
||||
// forwarded to Loki with trace IDs attached.
|
||||
log = otelLog
|
||||
log.Info("otel tracing + logs enabled", "endpoint", os.Getenv("OTEL_EXPORTER_OTLP_ENDPOINT"))
|
||||
}
|
||||
|
||||
// ── Storage ─────────────────────────────────────────────────────────────
|
||||
store, err := storage.NewStore(ctx, cfg, log)
|
||||
if err != nil {
|
||||
@@ -98,10 +119,36 @@ func run() error {
|
||||
kokoroClient = kokoro.New(cfg.Kokoro.URL)
|
||||
log.Info("kokoro TTS enabled", "url", cfg.Kokoro.URL)
|
||||
} else {
|
||||
log.Warn("KOKORO_URL not set — audio tasks will fail")
|
||||
log.Warn("KOKORO_URL not set — kokoro voice tasks will fail")
|
||||
kokoroClient = &noopKokoro{}
|
||||
}
|
||||
|
||||
// ── pocket-tts ──────────────────────────────────────────────────────────
|
||||
var pocketTTSClient pockettts.Client
|
||||
if cfg.PocketTTS.URL != "" {
|
||||
pocketTTSClient = pockettts.New(cfg.PocketTTS.URL)
|
||||
log.Info("pocket-tts enabled", "url", cfg.PocketTTS.URL)
|
||||
} else {
|
||||
log.Warn("POCKET_TTS_URL not set — pocket-tts voice tasks will fail")
|
||||
}
|
||||
|
||||
// ── Cloudflare Workers AI ────────────────────────────────────────────────
|
||||
var cfaiClient cfai.Client
|
||||
if cfg.CFAI.AccountID != "" && cfg.CFAI.APIToken != "" {
|
||||
cfaiClient = cfai.New(cfg.CFAI.AccountID, cfg.CFAI.APIToken, cfg.CFAI.Model)
|
||||
log.Info("cloudflare AI TTS enabled", "model", cfg.CFAI.Model)
|
||||
} else {
|
||||
log.Info("CFAI_ACCOUNT_ID/CFAI_API_TOKEN not set — CF AI voice tasks will fail")
|
||||
}
|
||||
|
||||
// ── LibreTranslate ──────────────────────────────────────────────────────
|
||||
ltClient := libretranslate.New(cfg.LibreTranslate.URL, cfg.LibreTranslate.APIKey)
|
||||
if ltClient != nil {
|
||||
log.Info("libretranslate enabled", "url", cfg.LibreTranslate.URL)
|
||||
} else {
|
||||
log.Info("LIBRETRANSLATE_URL not set — machine translation disabled")
|
||||
}
|
||||
|
||||
// ── Meilisearch ─────────────────────────────────────────────────────────
|
||||
var searchIndex meili.Client
|
||||
if cfg.Meilisearch.URL != "" {
|
||||
@@ -123,21 +170,56 @@ func run() error {
|
||||
PollInterval: cfg.Runner.PollInterval,
|
||||
MaxConcurrentScrape: cfg.Runner.MaxConcurrentScrape,
|
||||
MaxConcurrentAudio: cfg.Runner.MaxConcurrentAudio,
|
||||
MaxConcurrentTranslation: cfg.Runner.MaxConcurrentTranslation,
|
||||
OrchestratorWorkers: workers,
|
||||
MetricsAddr: cfg.Runner.MetricsAddr,
|
||||
CatalogueRefreshInterval: cfg.Runner.CatalogueRefreshInterval,
|
||||
CatalogueRequestDelay: cfg.Runner.CatalogueRequestDelay,
|
||||
SkipInitialCatalogueRefresh: cfg.Runner.SkipInitialCatalogueRefresh,
|
||||
RedisAddr: cfg.Redis.Addr,
|
||||
RedisPassword: cfg.Redis.Password,
|
||||
}
|
||||
|
||||
// In Asynq mode the Consumer is a thin wrapper: claim/heartbeat/reap are
|
||||
// no-ops, but FinishAudioTask / FinishScrapeTask / FailTask write back to
|
||||
// PocketBase as before.
|
||||
var consumer taskqueue.Consumer = store
|
||||
if cfg.Redis.Addr != "" {
|
||||
log.Info("runner: asynq mode — using Redis for task dispatch", "addr", cfg.Redis.Addr)
|
||||
consumer = asynqqueue.NewConsumer(store)
|
||||
} else {
|
||||
log.Info("runner: poll mode — using PocketBase for task dispatch")
|
||||
}
|
||||
|
||||
// ── Web Push ─────────────────────────────────────────────────────────────
|
||||
var pushSender *webpush.Sender
|
||||
if cfg.VAPID.PublicKey != "" && cfg.VAPID.PrivateKey != "" {
|
||||
pushSender = webpush.New(cfg.VAPID.PublicKey, cfg.VAPID.PrivateKey, cfg.VAPID.Subject, log)
|
||||
log.Info("runner: web push notifications enabled")
|
||||
} else {
|
||||
log.Info("runner: VAPID_PUBLIC_KEY/VAPID_PRIVATE_KEY not set — push notifications disabled")
|
||||
}
|
||||
|
||||
deps := runner.Dependencies{
|
||||
Consumer: store,
|
||||
BookWriter: store,
|
||||
BookReader: store,
|
||||
AudioStore: store,
|
||||
CoverStore: store,
|
||||
SearchIndex: searchIndex,
|
||||
Novel: novel,
|
||||
Kokoro: kokoroClient,
|
||||
Log: log,
|
||||
Consumer: consumer,
|
||||
BookWriter: store,
|
||||
BookReader: store,
|
||||
AudioStore: store,
|
||||
CoverStore: store,
|
||||
TranslationStore: store,
|
||||
BookImport: storage.NewBookImporter(store),
|
||||
ImportChapterStore: store,
|
||||
ChapterIngester: store,
|
||||
SearchIndex: searchIndex,
|
||||
Novel: novel,
|
||||
Kokoro: kokoroClient,
|
||||
PocketTTS: pocketTTSClient,
|
||||
CFAI: cfaiClient,
|
||||
LibreTranslate: ltClient,
|
||||
Notifier: store,
|
||||
WebPush: pushSender,
|
||||
Store: store,
|
||||
Log: log,
|
||||
}
|
||||
r := runner.New(rCfg, deps)
|
||||
|
||||
@@ -168,6 +250,14 @@ func (n *noopKokoro) GenerateAudio(_ context.Context, _, _ string) ([]byte, erro
|
||||
return nil, fmt.Errorf("kokoro not configured (KOKORO_URL is empty)")
|
||||
}
|
||||
|
||||
func (n *noopKokoro) StreamAudioMP3(_ context.Context, _, _ string) (io.ReadCloser, error) {
|
||||
return nil, fmt.Errorf("kokoro not configured (KOKORO_URL is empty)")
|
||||
}
|
||||
|
||||
func (n *noopKokoro) StreamAudioWAV(_ context.Context, _, _ string) (io.ReadCloser, error) {
|
||||
return nil, fmt.Errorf("kokoro not configured (KOKORO_URL is empty)")
|
||||
}
|
||||
|
||||
func (n *noopKokoro) ListVoices(_ context.Context) ([]string, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
@@ -3,35 +3,77 @@ module github.com/libnovel/backend
|
||||
go 1.26.1
|
||||
|
||||
require (
|
||||
github.com/getsentry/sentry-go v0.43.0
|
||||
github.com/hibiken/asynq v0.26.0
|
||||
github.com/hibiken/asynq/x v0.0.0-20260203063626-d704b68a426d
|
||||
github.com/meilisearch/meilisearch-go v0.36.1
|
||||
github.com/minio/minio-go/v7 v7.0.98
|
||||
github.com/pdfcpu/pdfcpu v0.11.1
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/redis/go-redis/v9 v9.18.0
|
||||
github.com/yuin/goldmark v1.8.2
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.17.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0
|
||||
go.opentelemetry.io/otel v1.42.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.18.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0
|
||||
go.opentelemetry.io/otel/log v0.18.0
|
||||
go.opentelemetry.io/otel/sdk v1.42.0
|
||||
go.opentelemetry.io/otel/sdk/log v0.18.0
|
||||
golang.org/x/net v0.51.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/SherClockHolmes/webpush-go v1.4.0 // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/getsentry/sentry-go v0.43.0 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/go-ini/ini v1.67.0 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 // indirect
|
||||
github.com/hhrutter/lzw v1.0.0 // indirect
|
||||
github.com/hhrutter/pkcs7 v0.2.0 // indirect
|
||||
github.com/hhrutter/tiff v1.0.2 // indirect
|
||||
github.com/klauspost/compress v1.18.2 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/klauspost/crc32 v1.3.0 // indirect
|
||||
github.com/meilisearch/meilisearch-go v0.36.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.19 // indirect
|
||||
github.com/minio/crc64nvme v1.1.1 // indirect
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/philhofer/fwd v1.2.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/redis/go-redis/v9 v9.18.0 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.66.1 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/rs/xid v1.6.0 // indirect
|
||||
github.com/spf13/cast v1.10.0 // indirect
|
||||
github.com/tinylib/msgp v1.6.1 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.42.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.42.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.42.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.48.0 // indirect
|
||||
golang.org/x/image v0.32.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/text v0.34.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect
|
||||
google.golang.org/grpc v1.79.2 // indirect
|
||||
google.golang.org/protobuf v1.36.11 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
)
|
||||
|
||||
194
backend/go.sum
194
backend/go.sum
@@ -1,21 +1,62 @@
|
||||
github.com/SherClockHolmes/webpush-go v1.4.0 h1:ocnzNKWN23T9nvHi6IfyrQjkIc0oJWv1B1pULsf9i3s=
|
||||
github.com/SherClockHolmes/webpush-go v1.4.0/go.mod h1:XSq8pKX11vNV8MJEMwjrlTkxhAj1zKfxmyhdV7Pd6UA=
|
||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0 h1:ChwIKnQN3kcZteTXMgb1wztSgaU+ZemkgWdohwgs8tY=
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/getsentry/sentry-go v0.43.0 h1:XbXLpFicpo8HmBDaInk7dum18G9KSLcjZiyUKS+hLW4=
|
||||
github.com/getsentry/sentry-go v0.43.0/go.mod h1:XDotiNZbgf5U8bPDUAfvcFmOnMQQceESxyKaObSssW0=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
||||
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 h1:HWRh5R2+9EifMyIHV7ZV+MIZqgz+PMpZ14Jynv3O2Zs=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0/go.mod h1:JfhWUomR1baixubs02l85lZYYOm7LV6om4ceouMv45c=
|
||||
github.com/hhrutter/lzw v1.0.0 h1:laL89Llp86W3rRs83LvKbwYRx6INE8gDn0XNb1oXtm0=
|
||||
github.com/hhrutter/lzw v1.0.0/go.mod h1:2HC6DJSn/n6iAZfgM3Pg+cP1KxeWc3ezG8bBqW5+WEo=
|
||||
github.com/hhrutter/pkcs7 v0.2.0 h1:i4HN2XMbGQpZRnKBLsUwO3dSckzgX142TNqY/KfXg+I=
|
||||
github.com/hhrutter/pkcs7 v0.2.0/go.mod h1:aEzKz0+ZAlz7YaEMY47jDHL14hVWD6iXt0AgqgAvWgE=
|
||||
github.com/hhrutter/tiff v1.0.2 h1:7H3FQQpKu/i5WaSChoD1nnJbGx4MxU5TlNqqpxw55z8=
|
||||
github.com/hhrutter/tiff v1.0.2/go.mod h1:pcOeuK5loFUE7Y/WnzGw20YxUdnqjY1P0Jlcieb/cCw=
|
||||
github.com/hibiken/asynq v0.26.0 h1:1Zxr92MlDnb1Zt/QR5g2vSCqUS03i95lUfqx5X7/wrw=
|
||||
github.com/hibiken/asynq v0.26.0/go.mod h1:Qk4e57bTnWDoyJ67VkchuV6VzSM9IQW2nPvAGuDyw58=
|
||||
github.com/hibiken/asynq/x v0.0.0-20260203063626-d704b68a426d h1:Ld5m8EIK5QVOq/owOexKIbETij3skACg4eU1pArHsrw=
|
||||
github.com/hibiken/asynq/x v0.0.0-20260203063626-d704b68a426d/go.mod h1:hhpStehaxSGg3ib9wJXzw5AXY1YS6lQ9BNavAgPbIhE=
|
||||
github.com/klauspost/compress v1.18.2 h1:iiPHWW0YrcFgpBYhsA6D1+fqHssJscY/Tm/y2Uqnapk=
|
||||
github.com/klauspost/compress v1.18.2/go.mod h1:R0h/fSBs8DE4ENlcrlib3PsXS61voFxhIs2DeRhCvJ4=
|
||||
github.com/klauspost/cpuid/v2 v2.0.1/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
@@ -23,6 +64,14 @@ github.com/klauspost/cpuid/v2 v2.2.11 h1:0OwqZRYI2rFrjS4kvkDnqJkKHdHaRnCm68/DY4O
|
||||
github.com/klauspost/cpuid/v2 v2.2.11/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/klauspost/crc32 v1.3.0 h1:sSmTt3gUt81RP655XGZPElI0PelVTZ6YwCRnPSupoFM=
|
||||
github.com/klauspost/crc32 v1.3.0/go.mod h1:D7kQaZhnkX/Y0tstFGf8VUzv2UofNGqCjnC3zdHB0Hw=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
|
||||
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
|
||||
github.com/meilisearch/meilisearch-go v0.36.1 h1:mJTCJE5g7tRvaqKco6DfqOuJEjX+rRltDEnkEC02Y0M=
|
||||
github.com/meilisearch/meilisearch-go v0.36.1/go.mod h1:hWcR0MuWLSzHfbz9GGzIr3s9rnXLm1jqkmHkJPbUSvM=
|
||||
github.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI=
|
||||
@@ -31,34 +80,173 @@ github.com/minio/md5-simd v1.1.2 h1:Gdi1DZK69+ZVMoNHRXJyNcxrMA4dSxoYHZSQbirFg34=
|
||||
github.com/minio/md5-simd v1.1.2/go.mod h1:MzdKDxYpY2BT9XQFocsiZf/NKVtR7nkE4RoEpN+20RM=
|
||||
github.com/minio/minio-go/v7 v7.0.98 h1:MeAVKjLVz+XJ28zFcuYyImNSAh8Mq725uNW4beRisi0=
|
||||
github.com/minio/minio-go/v7 v7.0.98/go.mod h1:cY0Y+W7yozf0mdIclrttzo1Iiu7mEf9y7nk2uXqMOvM=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/pdfcpu/pdfcpu v0.11.1 h1:htHBSkGH5jMKWC6e0sihBFbcKZ8vG1M67c8/dJxhjas=
|
||||
github.com/pdfcpu/pdfcpu v0.11.1/go.mod h1:pP3aGga7pRvwFWAm9WwFvo+V68DfANi9kxSQYioNYcw=
|
||||
github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=
|
||||
github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
|
||||
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
|
||||
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
|
||||
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
|
||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||
github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
|
||||
github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
|
||||
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
|
||||
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
|
||||
github.com/redis/go-redis/v9 v9.18.0 h1:pMkxYPkEbMPwRdenAzUNyFNrDgHx9U+DrBabWNfSRQs=
|
||||
github.com/redis/go-redis/v9 v9.18.0/go.mod h1:k3ufPphLU5YXwNTUcCRXGxUoF1fqxnhFQmscfkCoDA0=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
||||
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY=
|
||||
github.com/tinylib/msgp v1.6.1/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77roAqEA=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yuin/goldmark v1.8.2 h1:kEGpgqJXdgbkhcOgBxkC0X0PmoPG1ZyoZ117rDVp4zE=
|
||||
github.com/yuin/goldmark v1.8.2/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
|
||||
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.17.0 h1:NFIS6x7wyObQ7cR84x7bt1sr8nYBx89s3x3GwRjw40k=
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.17.0/go.mod h1:39SaByOyDMRMe872AE7uelMuQZidIw7LLFAnQi0FWTE=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 h1:OyrsyzuttWTSur2qN/Lm0m2a8yqyIjUVBZcxFPuXq2o=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0/go.mod h1:C2NGBr+kAB4bk3xtMXfZ94gqFDtg/GkI7e9zqGh5Beg=
|
||||
go.opentelemetry.io/otel v1.42.0 h1:lSQGzTgVR3+sgJDAU/7/ZMjN9Z+vUip7leaqBKy4sho=
|
||||
go.opentelemetry.io/otel v1.42.0/go.mod h1:lJNsdRMxCUIWuMlVJWzecSMuNjE7dOYyWlqOXWkdqCc=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.18.0 h1:icqq3Z34UrEFk2u+HMhTtRsvo7Ues+eiJVjaJt62njs=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.18.0/go.mod h1:W2m8P+d5Wn5kipj4/xmbt9uMqezEKfBjzVJadfABSBE=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.42.0 h1:THuZiwpQZuHPul65w4WcwEnkX2QIuMT+UFoOrygtoJw=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.42.0/go.mod h1:J2pvYM5NGHofZ2/Ru6zw/TNWnEQp5crgyDeSrYpXkAw=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0 h1:uLXP+3mghfMf7XmV4PkGfFhFKuNWoCvvx5wP/wOXo0o=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0/go.mod h1:v0Tj04armyT59mnURNUJf7RCKcKzq+lgJs6QSjHjaTc=
|
||||
go.opentelemetry.io/otel/log v0.18.0 h1:XgeQIIBjZZrliksMEbcwMZefoOSMI1hdjiLEiiB0bAg=
|
||||
go.opentelemetry.io/otel/log v0.18.0/go.mod h1:KEV1kad0NofR3ycsiDH4Yjcoj0+8206I6Ox2QYFSNgI=
|
||||
go.opentelemetry.io/otel/metric v1.42.0 h1:2jXG+3oZLNXEPfNmnpxKDeZsFI5o4J+nz6xUlaFdF/4=
|
||||
go.opentelemetry.io/otel/metric v1.42.0/go.mod h1:RlUN/7vTU7Ao/diDkEpQpnz3/92J9ko05BIwxYa2SSI=
|
||||
go.opentelemetry.io/otel/sdk v1.42.0 h1:LyC8+jqk6UJwdrI/8VydAq/hvkFKNHZVIWuslJXYsDo=
|
||||
go.opentelemetry.io/otel/sdk v1.42.0/go.mod h1:rGHCAxd9DAph0joO4W6OPwxjNTYWghRWmkHuGbayMts=
|
||||
go.opentelemetry.io/otel/sdk/log v0.18.0 h1:n8OyZr7t7otkeTnPTbDNom6rW16TBYGtvyy2Gk6buQw=
|
||||
go.opentelemetry.io/otel/sdk/log v0.18.0/go.mod h1:C0+wxkTwKpOCZLrlJ3pewPiiQwpzycPI/u6W0Z9fuYk=
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.18.0 h1:l3mYuPsuBx6UKE47BVcPrZoZ0q/KER57vbj2qkgDLXA=
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.18.0/go.mod h1:7cHtiVJpZebB3wybTa4NG+FUo5NPe3PROz1FqB0+qdw=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.42.0 h1:D/1QR46Clz6ajyZ3G8SgNlTJKBdGp84q9RKCAZ3YGuA=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.42.0/go.mod h1:Ua6AAlDKdZ7tdvaQKfSmnFTdHx37+J4ba8MwVCYM5hc=
|
||||
go.opentelemetry.io/otel/trace v1.42.0 h1:OUCgIPt+mzOnaUTpOQcBiM/PLQ/Op7oq6g4LenLmOYY=
|
||||
go.opentelemetry.io/otel/trace v1.42.0/go.mod h1:f3K9S+IFqnumBkKhRJMeaZeNk9epyhnCmQh/EysQCdc=
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A=
|
||||
go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4=
|
||||
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
|
||||
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
|
||||
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
|
||||
golang.org/x/image v0.32.0 h1:6lZQWq75h7L5IWNk0r+SCpUJ6tUVd3v4ZHnbRKLkUDQ=
|
||||
golang.org/x/image v0.32.0/go.mod h1:/R37rrQmKXtO6tYXAjtDLwQgFLHmhW+V6ayXlxzP2Pc=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
|
||||
golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57 h1:JLQynH/LBHfCTSbDWl+py8C+Rg/k1OVH3xfcaiANuF0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:kSJwQxqmFXeo79zOmbrALdflXQeAYcUbgS7PbpMknCY=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 h1:mWPCjDEyshlQYzBpMNHaEof6UX1PmHcaUODUywQ0uac=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
|
||||
google.golang.org/grpc v1.79.2 h1:fRMD94s2tITpyJGtBBn7MkMseNpOZU8ZxgC3MMBaXRU=
|
||||
google.golang.org/grpc v1.79.2/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
|
||||
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
81
backend/internal/asynqqueue/consumer.go
Normal file
81
backend/internal/asynqqueue/consumer.go
Normal file
@@ -0,0 +1,81 @@
|
||||
package asynqqueue
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
)
|
||||
|
||||
// Consumer wraps the PocketBase-backed Consumer for result write-back only.
|
||||
//
|
||||
// When using Asynq, the runner no longer polls for scrape/audio work — Asynq
|
||||
// delivers those tasks via the ServeMux handlers. However translation tasks
|
||||
// live in PocketBase (not Redis), so ClaimNextTranslationTask and HeartbeatTask
|
||||
// still delegate to the underlying PocketBase consumer.
|
||||
//
|
||||
// ClaimNextAudioTask, ClaimNextScrapeTask are no-ops here because Asynq owns
|
||||
// those responsibilities.
|
||||
type Consumer struct {
|
||||
pb taskqueue.Consumer // underlying PocketBase consumer (for write-back)
|
||||
}
|
||||
|
||||
// NewConsumer wraps an existing PocketBase Consumer.
|
||||
func NewConsumer(pb taskqueue.Consumer) *Consumer {
|
||||
return &Consumer{pb: pb}
|
||||
}
|
||||
|
||||
// ── Write-back (delegated to PocketBase) ──────────────────────────────────────
|
||||
|
||||
func (c *Consumer) FinishScrapeTask(ctx context.Context, id string, result domain.ScrapeResult) error {
|
||||
return c.pb.FinishScrapeTask(ctx, id, result)
|
||||
}
|
||||
|
||||
func (c *Consumer) FinishAudioTask(ctx context.Context, id string, result domain.AudioResult) error {
|
||||
return c.pb.FinishAudioTask(ctx, id, result)
|
||||
}
|
||||
|
||||
func (c *Consumer) FinishTranslationTask(ctx context.Context, id string, result domain.TranslationResult) error {
|
||||
return c.pb.FinishTranslationTask(ctx, id, result)
|
||||
}
|
||||
|
||||
func (c *Consumer) FinishImportTask(ctx context.Context, id string, result domain.ImportResult) error {
|
||||
return c.pb.FinishImportTask(ctx, id, result)
|
||||
}
|
||||
|
||||
func (c *Consumer) FailTask(ctx context.Context, id, errMsg string) error {
|
||||
return c.pb.FailTask(ctx, id, errMsg)
|
||||
}
|
||||
|
||||
// ── No-ops (Asynq owns claiming / heartbeating / reaping) ───────────────────
|
||||
|
||||
func (c *Consumer) ClaimNextScrapeTask(_ context.Context, _ string) (domain.ScrapeTask, bool, error) {
|
||||
return domain.ScrapeTask{}, false, nil
|
||||
}
|
||||
|
||||
func (c *Consumer) ClaimNextAudioTask(_ context.Context, _ string) (domain.AudioTask, bool, error) {
|
||||
return domain.AudioTask{}, false, nil
|
||||
}
|
||||
|
||||
// ClaimNextTranslationTask delegates to PocketBase because translation tasks
|
||||
// are stored in PocketBase (not Redis/Asynq) and must still be polled directly.
|
||||
func (c *Consumer) ClaimNextTranslationTask(ctx context.Context, workerID string) (domain.TranslationTask, bool, error) {
|
||||
return c.pb.ClaimNextTranslationTask(ctx, workerID)
|
||||
}
|
||||
|
||||
// ClaimNextImportTask delegates to PocketBase because import tasks
|
||||
// are stored in PocketBase (not Redis/Asynq) and must still be polled directly.
|
||||
func (c *Consumer) ClaimNextImportTask(ctx context.Context, workerID string) (domain.ImportTask, bool, error) {
|
||||
return c.pb.ClaimNextImportTask(ctx, workerID)
|
||||
}
|
||||
|
||||
func (c *Consumer) HeartbeatTask(ctx context.Context, id string) error {
|
||||
return c.pb.HeartbeatTask(ctx, id)
|
||||
}
|
||||
|
||||
// ReapStaleTasks delegates to PocketBase so stale translation tasks are reset
|
||||
// to pending and can be reclaimed.
|
||||
func (c *Consumer) ReapStaleTasks(ctx context.Context, staleAfter time.Duration) (int, error) {
|
||||
return c.pb.ReapStaleTasks(ctx, staleAfter)
|
||||
}
|
||||
135
backend/internal/asynqqueue/producer.go
Normal file
135
backend/internal/asynqqueue/producer.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package asynqqueue
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
|
||||
"github.com/hibiken/asynq"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
)
|
||||
|
||||
// Producer dual-writes every task: first to PocketBase (via pb, for audit /
|
||||
// UI status), then to Redis via Asynq so the runner picks it up immediately.
|
||||
type Producer struct {
|
||||
pb taskqueue.Producer // underlying PocketBase producer
|
||||
client *asynq.Client
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
// NewProducer wraps an existing PocketBase Producer with Asynq dispatch.
|
||||
func NewProducer(pb taskqueue.Producer, redisOpt asynq.RedisConnOpt, log *slog.Logger) *Producer {
|
||||
return &Producer{
|
||||
pb: pb,
|
||||
client: asynq.NewClient(redisOpt),
|
||||
log: log,
|
||||
}
|
||||
}
|
||||
|
||||
// Close shuts down the underlying Asynq client connection.
|
||||
func (p *Producer) Close() error {
|
||||
return p.client.Close()
|
||||
}
|
||||
|
||||
// CreateScrapeTask creates a PocketBase record then enqueues an Asynq job.
|
||||
func (p *Producer) CreateScrapeTask(ctx context.Context, kind, targetURL string, fromChapter, toChapter int) (string, error) {
|
||||
id, err := p.pb.CreateScrapeTask(ctx, kind, targetURL, fromChapter, toChapter)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
payload := ScrapePayload{
|
||||
PBTaskID: id,
|
||||
Kind: kind,
|
||||
TargetURL: targetURL,
|
||||
FromChapter: fromChapter,
|
||||
ToChapter: toChapter,
|
||||
}
|
||||
taskType := TypeScrapeBook
|
||||
if kind == "catalogue" {
|
||||
taskType = TypeScrapeCatalogue
|
||||
}
|
||||
if err := p.enqueue(ctx, taskType, payload); err != nil {
|
||||
// Non-fatal: PB record exists; runner will pick it up on next poll.
|
||||
p.log.Warn("asynq enqueue scrape failed (task still in PB, runner will poll)",
|
||||
"task_id", id, "err", err)
|
||||
return id, nil
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
|
||||
// CreateAudioTask creates a PocketBase record then enqueues an Asynq job.
|
||||
func (p *Producer) CreateAudioTask(ctx context.Context, slug string, chapter int, voice string) (string, error) {
|
||||
id, err := p.pb.CreateAudioTask(ctx, slug, chapter, voice)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
payload := AudioPayload{
|
||||
PBTaskID: id,
|
||||
Slug: slug,
|
||||
Chapter: chapter,
|
||||
Voice: voice,
|
||||
}
|
||||
if err := p.enqueue(ctx, TypeAudioGenerate, payload); err != nil {
|
||||
// Non-fatal: PB record exists; runner will pick it up on next poll.
|
||||
p.log.Warn("asynq enqueue audio failed (task still in PB, runner will poll)",
|
||||
"task_id", id, "err", err)
|
||||
return id, nil
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
|
||||
// CreateTranslationTask creates a PocketBase record. Translation tasks are
|
||||
// not currently dispatched via Asynq — the runner picks them up via polling.
|
||||
func (p *Producer) CreateTranslationTask(ctx context.Context, slug string, chapter int, lang string) (string, error) {
|
||||
return p.pb.CreateTranslationTask(ctx, slug, chapter, lang)
|
||||
}
|
||||
|
||||
// CreateImportTask creates a PocketBase record then enqueues an Asynq job for PDF/EPUB import.
|
||||
func (p *Producer) CreateImportTask(ctx context.Context, task domain.ImportTask) (string, error) {
|
||||
id, err := p.pb.CreateImportTask(ctx, task)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
payload := ImportPayload{
|
||||
PBTaskID: id,
|
||||
Slug: task.Slug,
|
||||
Title: task.Title,
|
||||
FileType: task.FileType,
|
||||
ObjectKey: task.ObjectKey,
|
||||
ChaptersKey: task.ChaptersKey,
|
||||
}
|
||||
if err := p.enqueue(ctx, TypeImportBook, payload); err != nil {
|
||||
// Non-fatal: PB record exists; runner will pick it up on next poll.
|
||||
p.log.Warn("asynq enqueue import failed (task still in PB, runner will poll)",
|
||||
"task_id", id, "err", err)
|
||||
return id, nil
|
||||
}
|
||||
return id, nil
|
||||
}
|
||||
|
||||
// CancelTask delegates to PocketBase; Asynq jobs may already be running and
|
||||
// cannot be reliably cancelled, so we only update the audit record.
|
||||
func (p *Producer) CancelTask(ctx context.Context, id string) error {
|
||||
return p.pb.CancelTask(ctx, id)
|
||||
}
|
||||
|
||||
// CancelAudioTasksBySlug delegates to PocketBase to cancel all pending/running
|
||||
// audio tasks for slug.
|
||||
func (p *Producer) CancelAudioTasksBySlug(ctx context.Context, slug string) (int, error) {
|
||||
return p.pb.CancelAudioTasksBySlug(ctx, slug)
|
||||
}
|
||||
|
||||
// enqueue serialises payload and dispatches it to Asynq.
|
||||
func (p *Producer) enqueue(_ context.Context, taskType string, payload any) error {
|
||||
b, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshal payload: %w", err)
|
||||
}
|
||||
_, err = p.client.Enqueue(asynq.NewTask(taskType, b))
|
||||
return err
|
||||
}
|
||||
57
backend/internal/asynqqueue/tasks.go
Normal file
57
backend/internal/asynqqueue/tasks.go
Normal file
@@ -0,0 +1,57 @@
|
||||
// Package asynqqueue provides Asynq-backed implementations of the
|
||||
// taskqueue.Producer and taskqueue.Consumer interfaces.
|
||||
//
|
||||
// Architecture:
|
||||
// - Producer: dual-writes — creates a PocketBase record for audit/UI, then
|
||||
// enqueues an Asynq job so the runner picks it up immediately (sub-ms).
|
||||
// - Consumer: thin wrapper used only for result write-back (FinishAudioTask,
|
||||
// FinishScrapeTask, FailTask). ClaimNext*/Heartbeat/Reap are no-ops because
|
||||
// Asynq owns those responsibilities.
|
||||
// - Handlers: asynq.HandlerFunc wrappers that decode job payloads and invoke
|
||||
// the existing runner logic (runScrapeTask / runAudioTask).
|
||||
//
|
||||
// Fallback: when REDIS_ADDR is empty the caller should use the plain
|
||||
// storage.Store (PocketBase-polling) implementation unchanged.
|
||||
package asynqqueue
|
||||
|
||||
// Queue names — keep all jobs on the default queue for now.
|
||||
// Add separate queues (e.g. "audio", "scrape") later if you need priority.
|
||||
const QueueDefault = "default"
|
||||
|
||||
// Task type constants used for Asynq routing.
|
||||
const (
|
||||
TypeAudioGenerate = "audio:generate"
|
||||
TypeScrapeBook = "scrape:book"
|
||||
TypeScrapeCatalogue = "scrape:catalogue"
|
||||
TypeImportBook = "import:book"
|
||||
)
|
||||
|
||||
// AudioPayload is the Asynq job payload for audio generation tasks.
|
||||
type AudioPayload struct {
|
||||
// PBTaskID is the PocketBase record ID created before enqueueing.
|
||||
// The handler uses it to write results back via Consumer.FinishAudioTask.
|
||||
PBTaskID string `json:"pb_task_id"`
|
||||
Slug string `json:"slug"`
|
||||
Chapter int `json:"chapter"`
|
||||
Voice string `json:"voice"`
|
||||
}
|
||||
|
||||
// ScrapePayload is the Asynq job payload for scrape tasks.
|
||||
type ScrapePayload struct {
|
||||
// PBTaskID is the PocketBase record ID created before enqueueing.
|
||||
PBTaskID string `json:"pb_task_id"`
|
||||
Kind string `json:"kind"` // "catalogue", "book", or "book_range"
|
||||
TargetURL string `json:"target_url"` // empty for catalogue tasks
|
||||
FromChapter int `json:"from_chapter"` // 0 unless Kind=="book_range"
|
||||
ToChapter int `json:"to_chapter"` // 0 unless Kind=="book_range"
|
||||
}
|
||||
|
||||
// ImportPayload is the Asynq job payload for PDF/EPUB import tasks.
|
||||
type ImportPayload struct {
|
||||
PBTaskID string `json:"pb_task_id"`
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
FileType string `json:"file_type"` // "pdf" or "epub"
|
||||
ObjectKey string `json:"object_key"` // MinIO path to uploaded file
|
||||
ChaptersKey string `json:"chapters_key"` // MinIO path to pre-parsed chapters JSON
|
||||
}
|
||||
143
backend/internal/backend/epub.go
Normal file
143
backend/internal/backend/epub.go
Normal file
@@ -0,0 +1,143 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type epubChapter struct {
|
||||
Number int
|
||||
Title string
|
||||
HTML string
|
||||
}
|
||||
|
||||
func generateEPUB(slug, title, author string, chapters []epubChapter) ([]byte, error) {
|
||||
var buf bytes.Buffer
|
||||
w := zip.NewWriter(&buf)
|
||||
|
||||
// 1. mimetype — MUST be first, MUST be uncompressed (Store method)
|
||||
mw, err := w.CreateHeader(&zip.FileHeader{
|
||||
Name: "mimetype",
|
||||
Method: zip.Store,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
mw.Write([]byte("application/epub+zip"))
|
||||
|
||||
// 2. META-INF/container.xml
|
||||
addFile(w, "META-INF/container.xml", containerXML())
|
||||
|
||||
// 3. OEBPS/style.css
|
||||
addFile(w, "OEBPS/style.css", epubCSS())
|
||||
|
||||
// 4. OEBPS/content.opf
|
||||
addFile(w, "OEBPS/content.opf", contentOPF(slug, title, author, chapters))
|
||||
|
||||
// 5. OEBPS/toc.ncx
|
||||
addFile(w, "OEBPS/toc.ncx", tocNCX(slug, title, chapters))
|
||||
|
||||
// 6. Chapter files
|
||||
for _, ch := range chapters {
|
||||
name := fmt.Sprintf("OEBPS/chapter-%04d.xhtml", ch.Number)
|
||||
addFile(w, name, chapterXHTML(ch))
|
||||
}
|
||||
|
||||
w.Close()
|
||||
return buf.Bytes(), nil
|
||||
}
|
||||
|
||||
func addFile(w *zip.Writer, name, content string) {
|
||||
f, _ := w.Create(name)
|
||||
f.Write([]byte(content))
|
||||
}
|
||||
|
||||
func containerXML() string {
|
||||
return `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<container version="1.0" xmlns="urn:oasis:names:tc:opendocument:xmlns:container">
|
||||
<rootfiles>
|
||||
<rootfile full-path="OEBPS/content.opf" media-type="application/oebps-package+xml"/>
|
||||
</rootfiles>
|
||||
</container>`
|
||||
}
|
||||
|
||||
func contentOPF(slug, title, author string, chapters []epubChapter) string {
|
||||
var items, spine strings.Builder
|
||||
for _, ch := range chapters {
|
||||
id := fmt.Sprintf("ch%04d", ch.Number)
|
||||
href := fmt.Sprintf("chapter-%04d.xhtml", ch.Number)
|
||||
items.WriteString(fmt.Sprintf(` <item id="%s" href="%s" media-type="application/xhtml+xml"/>`+"\n", id, href))
|
||||
spine.WriteString(fmt.Sprintf(` <itemref idref="%s"/>`+"\n", id))
|
||||
}
|
||||
return fmt.Sprintf(`<?xml version="1.0" encoding="UTF-8"?>
|
||||
<package xmlns="http://www.idpf.org/2007/opf" unique-identifier="uid" version="2.0">
|
||||
<metadata xmlns:dc="http://purl.org/dc/elements/1.1/">
|
||||
<dc:title>%s</dc:title>
|
||||
<dc:creator>%s</dc:creator>
|
||||
<dc:identifier id="uid">%s</dc:identifier>
|
||||
<dc:language>en</dc:language>
|
||||
</metadata>
|
||||
<manifest>
|
||||
<item id="ncx" href="toc.ncx" media-type="application/x-dtbncx+xml"/>
|
||||
<item id="css" href="style.css" media-type="text/css"/>
|
||||
%s </manifest>
|
||||
<spine toc="ncx">
|
||||
%s </spine>
|
||||
</package>`, escapeXML(title), escapeXML(author), slug, items.String(), spine.String())
|
||||
}
|
||||
|
||||
func tocNCX(slug, title string, chapters []epubChapter) string {
|
||||
var points strings.Builder
|
||||
for i, ch := range chapters {
|
||||
chTitle := ch.Title
|
||||
if chTitle == "" {
|
||||
chTitle = fmt.Sprintf("Chapter %d", ch.Number)
|
||||
}
|
||||
points.WriteString(fmt.Sprintf(` <navPoint id="np%d" playOrder="%d">
|
||||
<navLabel><text>%s</text></navLabel>
|
||||
<content src="chapter-%04d.xhtml"/>
|
||||
</navPoint>`+"\n", i+1, i+1, escapeXML(chTitle), ch.Number))
|
||||
}
|
||||
return fmt.Sprintf(`<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE ncx PUBLIC "-//NISO//DTD ncx 2005-1//EN" "http://www.daisy.org/z3986/2005/ncx-2005-1.dtd">
|
||||
<ncx xmlns="http://www.daisy.org/z3986/2005/ncx/" version="2005-1">
|
||||
<head><meta name="dtb:uid" content="%s"/></head>
|
||||
<docTitle><text>%s</text></docTitle>
|
||||
<navMap>
|
||||
%s </navMap>
|
||||
</ncx>`, slug, escapeXML(title), points.String())
|
||||
}
|
||||
|
||||
func chapterXHTML(ch epubChapter) string {
|
||||
title := ch.Title
|
||||
if title == "" {
|
||||
title = fmt.Sprintf("Chapter %d", ch.Number)
|
||||
}
|
||||
return fmt.Sprintf(`<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head><title>%s</title><link rel="stylesheet" href="style.css"/></head>
|
||||
<body>
|
||||
<h1 class="chapter-title">%s</h1>
|
||||
%s
|
||||
</body>
|
||||
</html>`, escapeXML(title), escapeXML(title), ch.HTML)
|
||||
}
|
||||
|
||||
func epubCSS() string {
|
||||
return `body { font-family: Georgia, serif; font-size: 1em; line-height: 1.6; margin: 1em 2em; }
|
||||
h1.chapter-title { font-size: 1.4em; margin-bottom: 1em; }
|
||||
p { margin: 0 0 0.8em 0; text-indent: 1.5em; }
|
||||
p:first-of-type { text-indent: 0; }
|
||||
`
|
||||
}
|
||||
|
||||
func escapeXML(s string) string {
|
||||
s = strings.ReplaceAll(s, "&", "&")
|
||||
s = strings.ReplaceAll(s, "<", "<")
|
||||
s = strings.ReplaceAll(s, ">", ">")
|
||||
s = strings.ReplaceAll(s, `"`, """)
|
||||
return s
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
233
backend/internal/backend/handlers_aijobs.go
Normal file
233
backend/internal/backend/handlers_aijobs.go
Normal file
@@ -0,0 +1,233 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/cfai"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// ── Cancel registry ────────────────────────────────────────────────────────────
|
||||
// cancelJobsMu guards cancelJobs.
|
||||
var cancelJobsMu sync.Mutex
|
||||
|
||||
// cancelJobs maps a job ID to its CancelFunc. Entries are added when a batch
|
||||
// job starts and removed when it finishes or is cancelled.
|
||||
var cancelJobs = map[string]context.CancelFunc{}
|
||||
|
||||
func registerCancelJob(id string, cancel context.CancelFunc) {
|
||||
cancelJobsMu.Lock()
|
||||
cancelJobs[id] = cancel
|
||||
cancelJobsMu.Unlock()
|
||||
}
|
||||
|
||||
func deregisterCancelJob(id string) {
|
||||
cancelJobsMu.Lock()
|
||||
delete(cancelJobs, id)
|
||||
cancelJobsMu.Unlock()
|
||||
}
|
||||
|
||||
// ── AI Job list / get / cancel ─────────────────────────────────────────────────
|
||||
|
||||
// handleAdminListAIJobs handles GET /api/admin/ai-jobs.
|
||||
// Returns all ai_job records sorted by started descending.
|
||||
func (s *Server) handleAdminListAIJobs(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.AIJobStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "ai job store not configured")
|
||||
return
|
||||
}
|
||||
jobs, err := s.deps.AIJobStore.ListAIJobs(r.Context())
|
||||
if err != nil {
|
||||
s.deps.Log.Error("admin: list ai jobs failed", "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, "list ai jobs: "+err.Error())
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]any{"jobs": jobs})
|
||||
}
|
||||
|
||||
// handleAdminGetAIJob handles GET /api/admin/ai-jobs/{id}.
|
||||
func (s *Server) handleAdminGetAIJob(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.AIJobStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "ai job store not configured")
|
||||
return
|
||||
}
|
||||
id := r.PathValue("id")
|
||||
job, ok, err := s.deps.AIJobStore.GetAIJob(r.Context(), id)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("job %q not found", id))
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, job)
|
||||
}
|
||||
|
||||
// handleAdminCancelAIJob handles POST /api/admin/ai-jobs/{id}/cancel.
|
||||
// Marks the job as cancelled in PB and cancels the in-memory context if present.
|
||||
func (s *Server) handleAdminCancelAIJob(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.AIJobStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "ai job store not configured")
|
||||
return
|
||||
}
|
||||
id := r.PathValue("id")
|
||||
|
||||
// Cancel in-memory context if the job is still running in this process.
|
||||
cancelJobsMu.Lock()
|
||||
if cancel, ok := cancelJobs[id]; ok {
|
||||
cancel()
|
||||
}
|
||||
cancelJobsMu.Unlock()
|
||||
|
||||
// Mark as cancelled in PB.
|
||||
if err := s.deps.AIJobStore.UpdateAIJob(r.Context(), id, map[string]any{
|
||||
"status": string(domain.TaskStatusCancelled),
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
}); err != nil {
|
||||
s.deps.Log.Error("admin: cancel ai job failed", "id", id, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, "cancel ai job: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
s.deps.Log.Info("admin: ai job cancelled", "id", id)
|
||||
writeJSON(w, 0, map[string]any{"cancelled": true})
|
||||
}
|
||||
|
||||
// ── Auto-prompt ────────────────────────────────────────────────────────────────
|
||||
|
||||
// autoPromptRequest is the JSON body for POST /api/admin/image-gen/auto-prompt.
|
||||
type autoPromptRequest struct {
|
||||
// Slug is the book slug.
|
||||
Slug string `json:"slug"`
|
||||
// Type is "cover" or "chapter".
|
||||
Type string `json:"type"`
|
||||
// Chapter number (required when type == "chapter").
|
||||
Chapter int `json:"chapter"`
|
||||
// Model is the text-gen model to use. Defaults to DefaultTextModel.
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
// autoPromptResponse is returned by POST /api/admin/image-gen/auto-prompt.
|
||||
type autoPromptResponse struct {
|
||||
Prompt string `json:"prompt"`
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
// handleAdminImageGenAutoPrompt handles POST /api/admin/image-gen/auto-prompt.
|
||||
//
|
||||
// Uses the text generation model to create a vivid image generation prompt
|
||||
// based on the book's description (for covers) or chapter title/content (for chapters).
|
||||
func (s *Server) handleAdminImageGenAutoPrompt(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req autoPromptRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if req.Type != "cover" && req.Type != "chapter" {
|
||||
jsonError(w, http.StatusBadRequest, `type must be "cover" or "chapter"`)
|
||||
return
|
||||
}
|
||||
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
model := req.Model
|
||||
if model == "" {
|
||||
model = string(cfai.DefaultTextModel)
|
||||
}
|
||||
|
||||
var userPrompt string
|
||||
if req.Type == "cover" {
|
||||
userPrompt = fmt.Sprintf(
|
||||
"Book: \"%s\"\nAuthor: %s\nGenres: %s\n\nDescription:\n%s",
|
||||
meta.Title,
|
||||
meta.Author,
|
||||
strings.Join(meta.Genres, ", "),
|
||||
meta.Summary,
|
||||
)
|
||||
} else {
|
||||
// For chapter images, use chapter title if available.
|
||||
chapterTitle := fmt.Sprintf("Chapter %d", req.Chapter)
|
||||
if req.Chapter > 0 {
|
||||
chapters, listErr := s.deps.BookReader.ListChapters(r.Context(), req.Slug)
|
||||
if listErr == nil {
|
||||
for _, ch := range chapters {
|
||||
if ch.Number == req.Chapter {
|
||||
chapterTitle = ch.Title
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
userPrompt = fmt.Sprintf(
|
||||
"Book: \"%s\"\nGenres: %s\nChapter: %s\n\nBook description:\n%s",
|
||||
meta.Title,
|
||||
strings.Join(meta.Genres, ", "),
|
||||
chapterTitle,
|
||||
meta.Summary,
|
||||
)
|
||||
}
|
||||
|
||||
systemPrompt := buildAutoPromptSystem(req.Type)
|
||||
|
||||
s.deps.Log.Info("admin: image auto-prompt requested",
|
||||
"slug", req.Slug, "type", req.Type, "chapter", req.Chapter, "model", model)
|
||||
|
||||
result, genErr := s.deps.TextGen.Generate(r.Context(), cfai.TextRequest{
|
||||
Model: cfai.TextModel(model),
|
||||
Messages: []cfai.TextMessage{
|
||||
{Role: "system", Content: systemPrompt},
|
||||
{Role: "user", Content: userPrompt},
|
||||
},
|
||||
MaxTokens: 256,
|
||||
})
|
||||
if genErr != nil {
|
||||
s.deps.Log.Error("admin: auto-prompt failed", "err", genErr)
|
||||
jsonError(w, http.StatusBadGateway, "text generation failed: "+genErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, autoPromptResponse{
|
||||
Prompt: strings.TrimSpace(result),
|
||||
Model: model,
|
||||
})
|
||||
}
|
||||
|
||||
func buildAutoPromptSystem(imageType string) string {
|
||||
if imageType == "cover" {
|
||||
return `You are a professional prompt engineer for AI image generation (Stable Diffusion / FLUX models). ` +
|
||||
`Given a book's title, genres, and description, write a single vivid image generation prompt ` +
|
||||
`for a book cover. The prompt should describe the visual composition, art style, lighting, ` +
|
||||
`and mood without mentioning text or typography. ` +
|
||||
`Format: comma-separated visual descriptors, 30–60 words. ` +
|
||||
`Output ONLY the prompt — no explanation, no quotes, no labels.`
|
||||
}
|
||||
return `You are a professional prompt engineer for AI image generation (Stable Diffusion / FLUX models). ` +
|
||||
`Given a book's title, genres, and a specific chapter title, write a single vivid scene illustration prompt. ` +
|
||||
`Describe the scene, characters, setting, lighting, and art style. ` +
|
||||
`Format: comma-separated visual descriptors, 30–60 words. ` +
|
||||
`Output ONLY the prompt — no explanation, no quotes, no labels.`
|
||||
}
|
||||
117
backend/internal/backend/handlers_books_admin.go
Normal file
117
backend/internal/backend/handlers_books_admin.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
// handleAdminArchiveBook handles PATCH /api/admin/books/{slug}/archive.
|
||||
// Soft-deletes a book by setting archived=true in PocketBase and updating the
|
||||
// Meilisearch document so it is excluded from all public search results.
|
||||
// The book data is preserved and can be restored with the unarchive endpoint.
|
||||
func (s *Server) handleAdminArchiveBook(w http.ResponseWriter, r *http.Request) {
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "missing slug")
|
||||
return
|
||||
}
|
||||
if s.deps.BookAdminStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book admin store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.deps.BookAdminStore.ArchiveBook(r.Context(), slug); err != nil {
|
||||
if errors.Is(err, storage.ErrNotFound) {
|
||||
jsonError(w, http.StatusNotFound, "book not found")
|
||||
return
|
||||
}
|
||||
s.deps.Log.Error("archive book failed", "slug", slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Update the Meilisearch document so the archived flag takes effect
|
||||
// immediately in search/catalogue results.
|
||||
if meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), slug); err == nil && ok {
|
||||
if upsertErr := s.deps.SearchIndex.UpsertBook(r.Context(), meta); upsertErr != nil {
|
||||
s.deps.Log.Warn("archive book: meili upsert failed", "slug", slug, "err", upsertErr)
|
||||
}
|
||||
}
|
||||
|
||||
s.deps.Log.Info("book archived", "slug", slug)
|
||||
writeJSON(w, http.StatusOK, map[string]string{"slug": slug, "status": "archived"})
|
||||
}
|
||||
|
||||
// handleAdminUnarchiveBook handles PATCH /api/admin/books/{slug}/unarchive.
|
||||
// Restores a previously archived book by clearing the archived flag, making it
|
||||
// publicly visible in search and catalogue results again.
|
||||
func (s *Server) handleAdminUnarchiveBook(w http.ResponseWriter, r *http.Request) {
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "missing slug")
|
||||
return
|
||||
}
|
||||
if s.deps.BookAdminStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book admin store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.deps.BookAdminStore.UnarchiveBook(r.Context(), slug); err != nil {
|
||||
if errors.Is(err, storage.ErrNotFound) {
|
||||
jsonError(w, http.StatusNotFound, "book not found")
|
||||
return
|
||||
}
|
||||
s.deps.Log.Error("unarchive book failed", "slug", slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Sync the updated archived=false state back to Meilisearch.
|
||||
if meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), slug); err == nil && ok {
|
||||
if upsertErr := s.deps.SearchIndex.UpsertBook(r.Context(), meta); upsertErr != nil {
|
||||
s.deps.Log.Warn("unarchive book: meili upsert failed", "slug", slug, "err", upsertErr)
|
||||
}
|
||||
}
|
||||
|
||||
s.deps.Log.Info("book unarchived", "slug", slug)
|
||||
writeJSON(w, http.StatusOK, map[string]string{"slug": slug, "status": "active"})
|
||||
}
|
||||
|
||||
// handleAdminDeleteBook handles DELETE /api/admin/books/{slug}.
|
||||
// Permanently removes all data for a book:
|
||||
// - PocketBase books record and all chapters_idx records
|
||||
// - All MinIO chapter markdown objects and the cover image
|
||||
// - Meilisearch document
|
||||
//
|
||||
// This operation is irreversible. Use the archive endpoint for soft-deletion.
|
||||
func (s *Server) handleAdminDeleteBook(w http.ResponseWriter, r *http.Request) {
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "missing slug")
|
||||
return
|
||||
}
|
||||
if s.deps.BookAdminStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book admin store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.deps.BookAdminStore.DeleteBook(r.Context(), slug); err != nil {
|
||||
if errors.Is(err, storage.ErrNotFound) {
|
||||
jsonError(w, http.StatusNotFound, "book not found")
|
||||
return
|
||||
}
|
||||
s.deps.Log.Error("delete book failed", "slug", slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Remove from Meilisearch — best-effort (log on failure, don't fail request).
|
||||
if err := s.deps.SearchIndex.DeleteBook(r.Context(), slug); err != nil {
|
||||
s.deps.Log.Warn("delete book: meili delete failed", "slug", slug, "err", err)
|
||||
}
|
||||
|
||||
s.deps.Log.Info("book deleted", "slug", slug)
|
||||
writeJSON(w, http.StatusOK, map[string]string{"slug": slug, "status": "deleted"})
|
||||
}
|
||||
792
backend/internal/backend/handlers_catalogue.go
Normal file
792
backend/internal/backend/handlers_catalogue.go
Normal file
@@ -0,0 +1,792 @@
|
||||
package backend
|
||||
|
||||
// Catalogue enrichment handlers: tagline, genre tagging, content warnings,
|
||||
// quality scoring, batch cover regeneration, and per-book metadata refresh.
|
||||
//
|
||||
// All generation endpoints are admin-only (enforced by the SvelteKit proxy layer).
|
||||
// All long-running operations support cancellation via r.Context().Done().
|
||||
// Batch operations use an in-memory cancel registry (cancelJobs map) so the
|
||||
// frontend can send a cancel request by job ID.
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/rand"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/cfai"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
|
||||
// ── Tagline ───────────────────────────────────────────────────────────────
|
||||
|
||||
// textGenTaglineRequest is the JSON body for POST /api/admin/text-gen/tagline.
|
||||
type textGenTaglineRequest struct {
|
||||
Slug string `json:"slug"`
|
||||
Model string `json:"model"`
|
||||
MaxTokens int `json:"max_tokens"`
|
||||
}
|
||||
|
||||
// textGenTaglineResponse is returned by POST /api/admin/text-gen/tagline.
|
||||
type textGenTaglineResponse struct {
|
||||
OldTagline string `json:"old_tagline"`
|
||||
NewTagline string `json:"new_tagline"`
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
// handleAdminTextGenTagline handles POST /api/admin/text-gen/tagline.
|
||||
// Generates a 1-sentence marketing hook for a book.
|
||||
func (s *Server) handleAdminTextGenTagline(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req textGenTaglineRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
model := cfai.TextModel(req.Model)
|
||||
if model == "" {
|
||||
model = cfai.DefaultTextModel
|
||||
}
|
||||
|
||||
system := `You are a copywriter for a web novel platform. ` +
|
||||
`Given a book's title, genres, and description, write a single punchy tagline ` +
|
||||
`(one sentence, under 20 words) that hooks a reader. ` +
|
||||
`Output ONLY the tagline — no quotes, no labels, no explanation.`
|
||||
|
||||
user := fmt.Sprintf("Title: %s\nGenres: %s\n\nDescription:\n%s",
|
||||
meta.Title,
|
||||
strings.Join(meta.Genres, ", "),
|
||||
meta.Summary,
|
||||
)
|
||||
|
||||
s.deps.Log.Info("admin: text-gen tagline requested", "slug", req.Slug, "model", model)
|
||||
|
||||
result, genErr := s.deps.TextGen.Generate(r.Context(), cfai.TextRequest{
|
||||
Model: model,
|
||||
Messages: []cfai.TextMessage{{Role: "system", Content: system}, {Role: "user", Content: user}},
|
||||
MaxTokens: 64,
|
||||
})
|
||||
if genErr != nil {
|
||||
s.deps.Log.Error("admin: text-gen tagline failed", "err", genErr)
|
||||
jsonError(w, http.StatusBadGateway, "text generation failed: "+genErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, textGenTaglineResponse{
|
||||
OldTagline: "", // BookMeta has no tagline field yet — always empty
|
||||
NewTagline: strings.TrimSpace(result),
|
||||
Model: string(model),
|
||||
})
|
||||
}
|
||||
|
||||
// ── Genres ────────────────────────────────────────────────────────────────
|
||||
|
||||
// textGenGenresRequest is the JSON body for POST /api/admin/text-gen/genres.
|
||||
type textGenGenresRequest struct {
|
||||
Slug string `json:"slug"`
|
||||
Model string `json:"model"`
|
||||
MaxTokens int `json:"max_tokens"`
|
||||
}
|
||||
|
||||
// textGenGenresResponse is returned by POST /api/admin/text-gen/genres.
|
||||
type textGenGenresResponse struct {
|
||||
CurrentGenres []string `json:"current_genres"`
|
||||
ProposedGenres []string `json:"proposed_genres"`
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
// handleAdminTextGenGenres handles POST /api/admin/text-gen/genres.
|
||||
// Suggests a refined genre list based on the book's description.
|
||||
func (s *Server) handleAdminTextGenGenres(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req textGenGenresRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
model := cfai.TextModel(req.Model)
|
||||
if model == "" {
|
||||
model = cfai.DefaultTextModel
|
||||
}
|
||||
|
||||
system := `You are a genre classification expert for a web novel platform. ` +
|
||||
`Given a book's title and description, return a JSON array of 2–6 genre tags. ` +
|
||||
`Use only well-known web novel genres such as: ` +
|
||||
`Action, Adventure, Comedy, Drama, Fantasy, Historical, Horror, Isekai, Josei, ` +
|
||||
`Martial Arts, Mature, Mecha, Mystery, Psychological, Romance, School Life, ` +
|
||||
`Sci-fi, Seinen, Shoujo, Shounen, Slice of Life, Supernatural, System, Tragedy, Wuxia, Xianxia. ` +
|
||||
`Output ONLY a raw JSON array of strings — no prose, no markdown, no explanation. ` +
|
||||
`Example: ["Fantasy","Adventure","Action"]`
|
||||
|
||||
user := fmt.Sprintf("Title: %s\nCurrent genres: %s\n\nDescription:\n%s",
|
||||
meta.Title,
|
||||
strings.Join(meta.Genres, ", "),
|
||||
meta.Summary,
|
||||
)
|
||||
|
||||
s.deps.Log.Info("admin: text-gen genres requested", "slug", req.Slug, "model", model)
|
||||
|
||||
raw, genErr := s.deps.TextGen.Generate(r.Context(), cfai.TextRequest{
|
||||
Model: model,
|
||||
Messages: []cfai.TextMessage{{Role: "system", Content: system}, {Role: "user", Content: user}},
|
||||
MaxTokens: 128,
|
||||
})
|
||||
if genErr != nil {
|
||||
s.deps.Log.Error("admin: text-gen genres failed", "err", genErr)
|
||||
jsonError(w, http.StatusBadGateway, "text generation failed: "+genErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
proposed := parseStringArrayJSON(raw)
|
||||
|
||||
writeJSON(w, 0, textGenGenresResponse{
|
||||
CurrentGenres: meta.Genres,
|
||||
ProposedGenres: proposed,
|
||||
Model: string(model),
|
||||
})
|
||||
}
|
||||
|
||||
// handleAdminTextGenApplyGenres handles POST /api/admin/text-gen/genres/apply.
|
||||
// Persists the confirmed genre list to PocketBase.
|
||||
func (s *Server) handleAdminTextGenApplyGenres(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.BookWriter == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book writer not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
Slug string `json:"slug"`
|
||||
Genres []string `json:"genres"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
meta.Genres = req.Genres
|
||||
if err := s.deps.BookWriter.WriteMetadata(r.Context(), meta); err != nil {
|
||||
s.deps.Log.Error("admin: apply genres failed", "slug", req.Slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, "write metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
s.deps.Log.Info("admin: genres applied", "slug", req.Slug, "genres", req.Genres)
|
||||
writeJSON(w, 0, map[string]any{"updated": true})
|
||||
}
|
||||
|
||||
// ── Content warnings ──────────────────────────────────────────────────────
|
||||
|
||||
// textGenContentWarningsRequest is the JSON body for POST /api/admin/text-gen/content-warnings.
|
||||
type textGenContentWarningsRequest struct {
|
||||
Slug string `json:"slug"`
|
||||
Model string `json:"model"`
|
||||
MaxTokens int `json:"max_tokens"`
|
||||
}
|
||||
|
||||
// textGenContentWarningsResponse is returned by POST /api/admin/text-gen/content-warnings.
|
||||
type textGenContentWarningsResponse struct {
|
||||
Warnings []string `json:"warnings"`
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
// handleAdminTextGenContentWarnings handles POST /api/admin/text-gen/content-warnings.
|
||||
// Detects mature or sensitive themes in a book's description.
|
||||
func (s *Server) handleAdminTextGenContentWarnings(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req textGenContentWarningsRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
model := cfai.TextModel(req.Model)
|
||||
if model == "" {
|
||||
model = cfai.DefaultTextModel
|
||||
}
|
||||
|
||||
system := `You are a content moderation assistant for a web novel platform. ` +
|
||||
`Given a book's title, genres, and description, detect any content warnings that should be shown to readers. ` +
|
||||
`Choose only relevant warnings from: Violence, Strong Language, Sexual Content, Mature Themes, ` +
|
||||
`Dark Themes, Gore, Torture, Abuse, Drug Use, Suicide/Self-Harm. ` +
|
||||
`If the book is clean, return an empty array. ` +
|
||||
`Output ONLY a raw JSON array of strings — no prose, no markdown. ` +
|
||||
`Example: ["Violence","Dark Themes"]`
|
||||
|
||||
user := fmt.Sprintf("Title: %s\nGenres: %s\n\nDescription:\n%s",
|
||||
meta.Title,
|
||||
strings.Join(meta.Genres, ", "),
|
||||
meta.Summary,
|
||||
)
|
||||
|
||||
s.deps.Log.Info("admin: text-gen content-warnings requested", "slug", req.Slug, "model", model)
|
||||
|
||||
raw, genErr := s.deps.TextGen.Generate(r.Context(), cfai.TextRequest{
|
||||
Model: model,
|
||||
Messages: []cfai.TextMessage{{Role: "system", Content: system}, {Role: "user", Content: user}},
|
||||
MaxTokens: 128,
|
||||
})
|
||||
if genErr != nil {
|
||||
s.deps.Log.Error("admin: text-gen content-warnings failed", "err", genErr)
|
||||
jsonError(w, http.StatusBadGateway, "text generation failed: "+genErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
warnings := parseStringArrayJSON(raw)
|
||||
|
||||
writeJSON(w, 0, textGenContentWarningsResponse{
|
||||
Warnings: warnings,
|
||||
Model: string(model),
|
||||
})
|
||||
}
|
||||
|
||||
// ── Quality score ─────────────────────────────────────────────────────────
|
||||
|
||||
// textGenQualityScoreRequest is the JSON body for POST /api/admin/text-gen/quality-score.
|
||||
type textGenQualityScoreRequest struct {
|
||||
Slug string `json:"slug"`
|
||||
Model string `json:"model"`
|
||||
MaxTokens int `json:"max_tokens"`
|
||||
}
|
||||
|
||||
// textGenQualityScoreResponse is returned by POST /api/admin/text-gen/quality-score.
|
||||
type textGenQualityScoreResponse struct {
|
||||
Score int `json:"score"` // 1–5
|
||||
Feedback string `json:"feedback"` // brief reasoning
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
// handleAdminTextGenQualityScore handles POST /api/admin/text-gen/quality-score.
|
||||
// Rates the book description quality on a 1–5 scale with brief feedback.
|
||||
func (s *Server) handleAdminTextGenQualityScore(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req textGenQualityScoreRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
model := cfai.TextModel(req.Model)
|
||||
if model == "" {
|
||||
model = cfai.DefaultTextModel
|
||||
}
|
||||
|
||||
system := `You are a book description quality reviewer for a web novel platform. ` +
|
||||
`Rate the provided description on a scale of 1–5 where: ` +
|
||||
`1=poor (vague/too short), 2=below average, 3=average, 4=good, 5=excellent (engaging/detailed). ` +
|
||||
`Respond with ONLY a JSON object: {"score": <1-5>, "feedback": "<one sentence explanation>"}. ` +
|
||||
`No markdown, no extra text.`
|
||||
|
||||
user := fmt.Sprintf("Title: %s\nGenres: %s\n\nDescription:\n%s",
|
||||
meta.Title,
|
||||
strings.Join(meta.Genres, ", "),
|
||||
meta.Summary,
|
||||
)
|
||||
|
||||
s.deps.Log.Info("admin: text-gen quality-score requested", "slug", req.Slug, "model", model)
|
||||
|
||||
raw, genErr := s.deps.TextGen.Generate(r.Context(), cfai.TextRequest{
|
||||
Model: model,
|
||||
Messages: []cfai.TextMessage{{Role: "system", Content: system}, {Role: "user", Content: user}},
|
||||
MaxTokens: 128,
|
||||
})
|
||||
if genErr != nil {
|
||||
s.deps.Log.Error("admin: text-gen quality-score failed", "err", genErr)
|
||||
jsonError(w, http.StatusBadGateway, "text generation failed: "+genErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
var parsed struct {
|
||||
Score int `json:"score"`
|
||||
Feedback string `json:"feedback"`
|
||||
}
|
||||
// Strip markdown fences if any.
|
||||
clean := extractJSONObject(raw)
|
||||
if err := json.Unmarshal([]byte(clean), &parsed); err != nil {
|
||||
// Fallback: try to extract a digit.
|
||||
parsed.Score = 0
|
||||
for _, ch := range raw {
|
||||
if ch >= '1' && ch <= '5' {
|
||||
parsed.Score = int(ch - '0')
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed.Feedback = strings.TrimSpace(raw)
|
||||
}
|
||||
|
||||
writeJSON(w, 0, textGenQualityScoreResponse{
|
||||
Score: parsed.Score,
|
||||
Feedback: parsed.Feedback,
|
||||
Model: string(model),
|
||||
})
|
||||
}
|
||||
|
||||
// ── Batch cover regeneration ──────────────────────────────────────────────
|
||||
|
||||
// batchCoverEvent is one SSE event emitted during batch cover regeneration.
|
||||
type batchCoverEvent struct {
|
||||
// JobID is the opaque identifier clients use to cancel this job.
|
||||
JobID string `json:"job_id,omitempty"`
|
||||
Done int `json:"done"`
|
||||
Total int `json:"total"`
|
||||
Slug string `json:"slug,omitempty"`
|
||||
Error string `json:"error,omitempty"`
|
||||
Skipped bool `json:"skipped,omitempty"`
|
||||
Finish bool `json:"finish,omitempty"`
|
||||
}
|
||||
|
||||
// handleAdminBatchCovers handles POST /api/admin/catalogue/batch-covers.
|
||||
//
|
||||
// Streams SSE events as it generates covers for every book that has no cover
|
||||
// stored in MinIO. Each event carries progress info. The final event has Finish=true.
|
||||
//
|
||||
// Supports from_item/to_item to process a sub-range of the catalogue (0-based indices).
|
||||
// Supports job_id to resume a previously interrupted job.
|
||||
// The job can be cancelled by calling POST /api/admin/ai-jobs/{id}/cancel.
|
||||
func (s *Server) handleAdminBatchCovers(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil || s.deps.ImageGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "image/text generation not configured")
|
||||
return
|
||||
}
|
||||
if s.deps.CoverStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "cover store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var reqBody struct {
|
||||
Model string `json:"model"`
|
||||
NumSteps int `json:"num_steps"`
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
FromItem int `json:"from_item"`
|
||||
ToItem int `json:"to_item"`
|
||||
JobID string `json:"job_id"`
|
||||
}
|
||||
// Body is optional — defaults used if absent.
|
||||
json.NewDecoder(r.Body).Decode(&reqBody) //nolint:errcheck
|
||||
|
||||
allBooks, err := s.deps.BookReader.ListBooks(r.Context())
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "list books: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Apply range filter.
|
||||
books := allBooks
|
||||
if reqBody.FromItem > 0 || reqBody.ToItem > 0 {
|
||||
from := reqBody.FromItem
|
||||
to := reqBody.ToItem
|
||||
if to == 0 || to >= len(allBooks) {
|
||||
to = len(allBooks) - 1
|
||||
}
|
||||
if from < 0 {
|
||||
from = 0
|
||||
}
|
||||
if from <= to && from < len(allBooks) {
|
||||
books = allBooks[from : to+1]
|
||||
}
|
||||
}
|
||||
|
||||
// SSE headers.
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("X-Accel-Buffering", "no")
|
||||
flusher, canFlush := w.(http.Flusher)
|
||||
|
||||
sseWrite := func(evt batchCoverEvent) {
|
||||
b, _ := json.Marshal(evt)
|
||||
fmt.Fprintf(w, "data: %s\n\n", b)
|
||||
if canFlush {
|
||||
flusher.Flush()
|
||||
}
|
||||
}
|
||||
|
||||
total := len(books)
|
||||
done := 0
|
||||
|
||||
// Create or resume PB ai_job and register cancel context.
|
||||
var pbJobID string
|
||||
resumeFrom := 0
|
||||
ctx, cancel := context.WithCancel(r.Context())
|
||||
defer cancel()
|
||||
|
||||
if s.deps.AIJobStore != nil {
|
||||
if reqBody.JobID != "" {
|
||||
if existing, ok, _ := s.deps.AIJobStore.GetAIJob(r.Context(), reqBody.JobID); ok {
|
||||
pbJobID = reqBody.JobID
|
||||
resumeFrom = existing.ItemsDone
|
||||
done = resumeFrom
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), pbJobID, map[string]any{
|
||||
"status": string(domain.TaskStatusRunning),
|
||||
"items_total": total,
|
||||
})
|
||||
}
|
||||
}
|
||||
if pbJobID == "" {
|
||||
id, createErr := s.deps.AIJobStore.CreateAIJob(r.Context(), domain.AIJob{
|
||||
Kind: "batch-covers",
|
||||
Status: domain.TaskStatusRunning,
|
||||
FromItem: reqBody.FromItem,
|
||||
ToItem: reqBody.ToItem,
|
||||
ItemsTotal: total,
|
||||
Started: time.Now(),
|
||||
})
|
||||
if createErr == nil {
|
||||
pbJobID = id
|
||||
}
|
||||
}
|
||||
if pbJobID != "" {
|
||||
registerCancelJob(pbJobID, cancel)
|
||||
defer deregisterCancelJob(pbJobID)
|
||||
}
|
||||
}
|
||||
|
||||
// Use pbJobID as the SSE job_id when available, else a random hex fallback.
|
||||
sseJobID := pbJobID
|
||||
if sseJobID == "" {
|
||||
sseJobID = randomHex(8)
|
||||
ctx2, cancel2 := context.WithCancel(r.Context())
|
||||
registerCancelJob(sseJobID, cancel2)
|
||||
defer deregisterCancelJob(sseJobID)
|
||||
defer cancel2()
|
||||
cancel() // replace ctx with ctx2
|
||||
ctx = ctx2
|
||||
}
|
||||
|
||||
// Send initial event with jobID so frontend can store it for cancellation.
|
||||
sseWrite(batchCoverEvent{JobID: sseJobID, Done: done, Total: total})
|
||||
|
||||
for i, book := range books {
|
||||
if ctx.Err() != nil {
|
||||
break
|
||||
}
|
||||
// Skip already-processed items when resuming.
|
||||
if i < resumeFrom {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if cover already exists.
|
||||
hasCover := s.deps.CoverStore.CoverExists(ctx, book.Slug)
|
||||
if hasCover {
|
||||
done++
|
||||
sseWrite(batchCoverEvent{Done: done, Total: total, Slug: book.Slug, Skipped: true})
|
||||
if pbJobID != "" && s.deps.AIJobStore != nil {
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), pbJobID, map[string]any{"items_done": done})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Build a prompt from the book metadata.
|
||||
prompt := buildCoverPrompt(book)
|
||||
|
||||
// Generate the image via CF AI.
|
||||
imgBytes, genErr := s.deps.ImageGen.GenerateImage(ctx, cfai.ImageRequest{
|
||||
Prompt: prompt,
|
||||
NumSteps: reqBody.NumSteps,
|
||||
Width: reqBody.Width,
|
||||
Height: reqBody.Height,
|
||||
})
|
||||
if genErr != nil {
|
||||
done++
|
||||
s.deps.Log.Error("batch-covers: image gen failed", "slug", book.Slug, "err", genErr)
|
||||
sseWrite(batchCoverEvent{Done: done, Total: total, Slug: book.Slug, Error: genErr.Error()})
|
||||
continue
|
||||
}
|
||||
|
||||
// Save to CoverStore.
|
||||
if saveErr := s.deps.CoverStore.PutCover(ctx, book.Slug, imgBytes, "image/png"); saveErr != nil {
|
||||
done++
|
||||
s.deps.Log.Error("batch-covers: save failed", "slug", book.Slug, "err", saveErr)
|
||||
sseWrite(batchCoverEvent{Done: done, Total: total, Slug: book.Slug, Error: saveErr.Error()})
|
||||
continue
|
||||
}
|
||||
|
||||
done++
|
||||
s.deps.Log.Info("batch-covers: cover generated", "slug", book.Slug)
|
||||
sseWrite(batchCoverEvent{Done: done, Total: total, Slug: book.Slug})
|
||||
if pbJobID != "" && s.deps.AIJobStore != nil {
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), pbJobID, map[string]any{"items_done": done})
|
||||
}
|
||||
}
|
||||
|
||||
if pbJobID != "" && s.deps.AIJobStore != nil {
|
||||
status := domain.TaskStatusDone
|
||||
if ctx.Err() != nil {
|
||||
status = domain.TaskStatusCancelled
|
||||
}
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), pbJobID, map[string]any{
|
||||
"status": string(status),
|
||||
"items_done": done,
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
}
|
||||
|
||||
sseWrite(batchCoverEvent{Done: done, Total: total, Finish: true})
|
||||
}
|
||||
|
||||
// handleAdminBatchCoversCancel handles POST /api/admin/catalogue/batch-covers/cancel.
|
||||
// Cancels an in-progress batch cover job by its job ID.
|
||||
func (s *Server) handleAdminBatchCoversCancel(w http.ResponseWriter, r *http.Request) {
|
||||
var req struct {
|
||||
JobID string `json:"job_id"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil || req.JobID == "" {
|
||||
jsonError(w, http.StatusBadRequest, "job_id is required")
|
||||
return
|
||||
}
|
||||
|
||||
cancelJobsMu.Lock()
|
||||
cancel, ok := cancelJobs[req.JobID]
|
||||
cancelJobsMu.Unlock()
|
||||
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("job %q not found", req.JobID))
|
||||
return
|
||||
}
|
||||
cancel()
|
||||
s.deps.Log.Info("batch-covers: job cancelled", "job_id", req.JobID)
|
||||
writeJSON(w, 0, map[string]any{"cancelled": true})
|
||||
}
|
||||
|
||||
// ── Refresh metadata (per-book) ────────────────────────────────────────────
|
||||
|
||||
// refreshMetadataEvent is one SSE event during per-book metadata refresh.
|
||||
type refreshMetadataEvent struct {
|
||||
Step string `json:"step"` // "description" | "tagline" | "cover"
|
||||
Done bool `json:"done"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// handleAdminRefreshMetadata handles POST /api/admin/catalogue/refresh-metadata/{slug}.
|
||||
//
|
||||
// Runs description → tagline → cover generation in sequence for a single book
|
||||
// and streams SSE progress. Interruptable via client disconnect (r.Context()).
|
||||
func (s *Server) handleAdminRefreshMetadata(w http.ResponseWriter, r *http.Request) {
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", slug))
|
||||
return
|
||||
}
|
||||
|
||||
// SSE headers.
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("X-Accel-Buffering", "no")
|
||||
flusher, canFlush := w.(http.Flusher)
|
||||
|
||||
sseWrite := func(evt refreshMetadataEvent) {
|
||||
b, _ := json.Marshal(evt)
|
||||
fmt.Fprintf(w, "data: %s\n\n", b)
|
||||
if canFlush {
|
||||
flusher.Flush()
|
||||
}
|
||||
}
|
||||
|
||||
ctx := r.Context()
|
||||
|
||||
// Step 1 — description.
|
||||
if s.deps.TextGen != nil {
|
||||
if ctx.Err() == nil {
|
||||
newDesc, genErr := s.deps.TextGen.Generate(ctx, cfai.TextRequest{
|
||||
Model: cfai.DefaultTextModel,
|
||||
Messages: []cfai.TextMessage{
|
||||
{Role: "system", Content: `You are a book description writer for a web novel platform. Write an improved description. Respond with ONLY the new description text — no title, no labels, no markdown.`},
|
||||
{Role: "user", Content: fmt.Sprintf("Title: %s\nGenres: %s\n\nCurrent description:\n%s\n\nInstructions: Write a compelling 2–4 sentence description. Keep it spoiler-free and engaging.", meta.Title, strings.Join(meta.Genres, ", "), meta.Summary)},
|
||||
},
|
||||
MaxTokens: 512,
|
||||
})
|
||||
if genErr == nil && strings.TrimSpace(newDesc) != "" && s.deps.BookWriter != nil {
|
||||
meta.Summary = strings.TrimSpace(newDesc)
|
||||
if writeErr := s.deps.BookWriter.WriteMetadata(ctx, meta); writeErr != nil {
|
||||
sseWrite(refreshMetadataEvent{Step: "description", Error: writeErr.Error()})
|
||||
} else {
|
||||
sseWrite(refreshMetadataEvent{Step: "description"})
|
||||
}
|
||||
} else if genErr != nil {
|
||||
sseWrite(refreshMetadataEvent{Step: "description", Error: genErr.Error()})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Step 2 — cover.
|
||||
if s.deps.ImageGen != nil && s.deps.CoverStore != nil {
|
||||
if ctx.Err() == nil {
|
||||
prompt := buildCoverPrompt(meta)
|
||||
imgBytes, genErr := s.deps.ImageGen.GenerateImage(ctx, cfai.ImageRequest{Prompt: prompt})
|
||||
if genErr == nil {
|
||||
if saveErr := s.deps.CoverStore.PutCover(ctx, slug, imgBytes, "image/png"); saveErr != nil {
|
||||
sseWrite(refreshMetadataEvent{Step: "cover", Error: saveErr.Error()})
|
||||
} else {
|
||||
sseWrite(refreshMetadataEvent{Step: "cover"})
|
||||
}
|
||||
} else {
|
||||
sseWrite(refreshMetadataEvent{Step: "cover", Error: genErr.Error()})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sseWrite(refreshMetadataEvent{Step: "done", Done: true})
|
||||
}
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────────
|
||||
|
||||
// parseStringArrayJSON extracts a JSON string array from model output,
|
||||
// tolerating markdown fences and surrounding prose.
|
||||
func parseStringArrayJSON(raw string) []string {
|
||||
s := raw
|
||||
if idx := strings.Index(s, "```json"); idx >= 0 {
|
||||
s = s[idx+7:]
|
||||
} else if idx := strings.Index(s, "```"); idx >= 0 {
|
||||
s = s[idx+3:]
|
||||
}
|
||||
if idx := strings.LastIndex(s, "```"); idx >= 0 {
|
||||
s = s[:idx]
|
||||
}
|
||||
start := strings.Index(s, "[")
|
||||
end := strings.LastIndex(s, "]")
|
||||
if start < 0 || end <= start {
|
||||
return nil
|
||||
}
|
||||
s = s[start : end+1]
|
||||
var out []string
|
||||
json.Unmarshal([]byte(s), &out) //nolint:errcheck
|
||||
return out
|
||||
}
|
||||
|
||||
// extractJSONObject finds the first {...} object in a string.
|
||||
func extractJSONObject(raw string) string {
|
||||
start := strings.Index(raw, "{")
|
||||
end := strings.LastIndex(raw, "}")
|
||||
if start < 0 || end <= start {
|
||||
return raw
|
||||
}
|
||||
return raw[start : end+1]
|
||||
}
|
||||
|
||||
// buildCoverPrompt constructs a prompt string for cover generation from a book.
|
||||
func buildCoverPrompt(meta domain.BookMeta) string {
|
||||
parts := []string{"book cover art"}
|
||||
if meta.Title != "" {
|
||||
parts = append(parts, "titled \""+meta.Title+"\"")
|
||||
}
|
||||
if len(meta.Genres) > 0 {
|
||||
parts = append(parts, strings.Join(meta.Genres, ", ")+" genre")
|
||||
}
|
||||
if meta.Summary != "" {
|
||||
summary := meta.Summary
|
||||
if len(summary) > 200 {
|
||||
summary = summary[:200]
|
||||
}
|
||||
parts = append(parts, summary)
|
||||
}
|
||||
return strings.Join(parts, ", ")
|
||||
}
|
||||
|
||||
// randomHex returns a random hex string of n bytes.
|
||||
func randomHex(n int) string {
|
||||
b := make([]byte, n)
|
||||
_, _ = rand.Read(b)
|
||||
return hex.EncodeToString(b)
|
||||
}
|
||||
645
backend/internal/backend/handlers_image.go
Normal file
645
backend/internal/backend/handlers_image.go
Normal file
@@ -0,0 +1,645 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/cfai"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// handleAdminImageGenModels handles GET /api/admin/image-gen/models.
|
||||
// Returns the list of supported Cloudflare AI image generation models.
|
||||
func (s *Server) handleAdminImageGenModels(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.ImageGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "image generation not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN missing)")
|
||||
return
|
||||
}
|
||||
models := s.deps.ImageGen.Models()
|
||||
writeJSON(w, 0, map[string]any{"models": models})
|
||||
}
|
||||
|
||||
// imageGenRequest is the JSON body for POST /api/admin/image-gen.
|
||||
type imageGenRequest struct {
|
||||
// Prompt is the text description of the desired image.
|
||||
Prompt string `json:"prompt"`
|
||||
|
||||
// Model is the CF Workers AI model ID (e.g. "@cf/black-forest-labs/flux-2-dev").
|
||||
// Defaults to the recommended model for the given type.
|
||||
Model string `json:"model"`
|
||||
|
||||
// Type is either "cover" or "chapter".
|
||||
Type string `json:"type"`
|
||||
|
||||
// Slug is the book slug. Required for cover; required for chapter.
|
||||
Slug string `json:"slug"`
|
||||
|
||||
// Chapter number (1-based). Required when type == "chapter".
|
||||
Chapter int `json:"chapter"`
|
||||
|
||||
// ReferenceImageB64 is an optional base64-encoded PNG/JPEG reference image.
|
||||
// When present the img2img path is used.
|
||||
ReferenceImageB64 string `json:"reference_image_b64"`
|
||||
|
||||
// NumSteps overrides inference steps (default 20).
|
||||
NumSteps int `json:"num_steps"`
|
||||
|
||||
// Width / Height override output dimensions (0 = model default).
|
||||
Width int `json:"width"`
|
||||
Height int `json:"height"`
|
||||
|
||||
// Guidance overrides prompt guidance scale (0 = model default).
|
||||
Guidance float64 `json:"guidance"`
|
||||
|
||||
// Strength for img2img: 0.0–1.0, default 0.75.
|
||||
Strength float64 `json:"strength"`
|
||||
|
||||
// SaveToCover when true stores the result as the book cover in MinIO
|
||||
// (overwriting any existing cover) and sets the book's cover URL.
|
||||
// Only valid when type == "cover".
|
||||
SaveToCover bool `json:"save_to_cover"`
|
||||
}
|
||||
|
||||
// imageGenResponse is the JSON body returned by POST /api/admin/image-gen.
|
||||
type imageGenResponse struct {
|
||||
// ImageB64 is the generated image as a base64-encoded PNG string.
|
||||
ImageB64 string `json:"image_b64"`
|
||||
// ContentType is "image/png" or "image/jpeg".
|
||||
ContentType string `json:"content_type"`
|
||||
// Saved indicates whether the image was persisted to MinIO.
|
||||
Saved bool `json:"saved"`
|
||||
// CoverURL is the URL the cover is now served from (only set when Saved==true).
|
||||
CoverURL string `json:"cover_url,omitempty"`
|
||||
// Model is the model that was used.
|
||||
Model string `json:"model"`
|
||||
// Bytes is the raw image size in bytes.
|
||||
Bytes int `json:"bytes"`
|
||||
}
|
||||
|
||||
// handleAdminImageGen handles POST /api/admin/image-gen.
|
||||
//
|
||||
// Generates an image using Cloudflare Workers AI and optionally stores it.
|
||||
// Multipart/form-data is also accepted so the reference image can be uploaded
|
||||
// directly; otherwise the reference is expected as base64 JSON.
|
||||
func (s *Server) handleAdminImageGen(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.ImageGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "image generation not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN missing)")
|
||||
return
|
||||
}
|
||||
|
||||
var req imageGenRequest
|
||||
var refImageData []byte
|
||||
|
||||
ct := r.Header.Get("Content-Type")
|
||||
if strings.HasPrefix(ct, "multipart/form-data") {
|
||||
// Multipart: parse JSON fields from a "json" part + optional "reference" file part.
|
||||
if err := r.ParseMultipartForm(32 << 20); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse multipart: "+err.Error())
|
||||
return
|
||||
}
|
||||
if jsonPart := r.FormValue("json"); jsonPart != "" {
|
||||
if err := json.Unmarshal([]byte(jsonPart), &req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse json field: "+err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
if f, _, err := r.FormFile("reference"); err == nil {
|
||||
defer f.Close()
|
||||
refImageData, _ = io.ReadAll(f)
|
||||
}
|
||||
} else {
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if req.ReferenceImageB64 != "" {
|
||||
var decErr error
|
||||
refImageData, decErr = base64.StdEncoding.DecodeString(req.ReferenceImageB64)
|
||||
if decErr != nil {
|
||||
// Try std without padding
|
||||
refImageData, decErr = base64.RawStdEncoding.DecodeString(req.ReferenceImageB64)
|
||||
if decErr != nil {
|
||||
jsonError(w, http.StatusBadRequest, "decode reference_image_b64: "+decErr.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if strings.TrimSpace(req.Prompt) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "prompt is required")
|
||||
return
|
||||
}
|
||||
if req.Type != "cover" && req.Type != "chapter" {
|
||||
jsonError(w, http.StatusBadRequest, `type must be "cover" or "chapter"`)
|
||||
return
|
||||
}
|
||||
if req.Slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if req.Type == "chapter" && req.Chapter <= 0 {
|
||||
jsonError(w, http.StatusBadRequest, "chapter must be > 0 when type is chapter")
|
||||
return
|
||||
}
|
||||
|
||||
// Resolve model
|
||||
model := cfai.ImageModel(req.Model)
|
||||
if model == "" {
|
||||
if req.Type == "cover" {
|
||||
model = cfai.DefaultImageModel
|
||||
} else {
|
||||
model = cfai.ImageModelFlux2Klein4B
|
||||
}
|
||||
}
|
||||
|
||||
imgReq := cfai.ImageRequest{
|
||||
Prompt: req.Prompt,
|
||||
Model: model,
|
||||
NumSteps: req.NumSteps,
|
||||
Width: req.Width,
|
||||
Height: req.Height,
|
||||
Guidance: req.Guidance,
|
||||
Strength: req.Strength,
|
||||
}
|
||||
|
||||
s.deps.Log.Info("admin: image gen requested",
|
||||
"type", req.Type, "slug", req.Slug, "chapter", req.Chapter,
|
||||
"model", model, "has_reference", len(refImageData) > 0)
|
||||
|
||||
var imgData []byte
|
||||
var genErr error
|
||||
if len(refImageData) > 0 {
|
||||
imgData, genErr = s.deps.ImageGen.GenerateImageFromReference(r.Context(), imgReq, refImageData)
|
||||
} else {
|
||||
imgData, genErr = s.deps.ImageGen.GenerateImage(r.Context(), imgReq)
|
||||
}
|
||||
if genErr != nil {
|
||||
s.deps.Log.Error("admin: image gen failed", "err", genErr)
|
||||
jsonError(w, http.StatusBadGateway, "image generation failed: "+genErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
contentType := sniffImageContentType(imgData)
|
||||
|
||||
// ── Optional persistence ──────────────────────────────────────────────────
|
||||
var saved bool
|
||||
var coverURL string
|
||||
|
||||
if req.SaveToCover && req.Type == "cover" && s.deps.CoverStore != nil {
|
||||
if err := s.deps.CoverStore.PutCover(r.Context(), req.Slug, imgData, contentType); err != nil {
|
||||
s.deps.Log.Error("admin: save generated cover failed", "slug", req.Slug, "err", err)
|
||||
// Non-fatal: still return the image
|
||||
} else {
|
||||
saved = true
|
||||
coverURL = fmt.Sprintf("/api/cover/novelfire.net/%s", req.Slug)
|
||||
s.deps.Log.Info("admin: generated cover saved", "slug", req.Slug, "bytes", len(imgData))
|
||||
}
|
||||
}
|
||||
|
||||
// Encode result as base64
|
||||
b64 := base64.StdEncoding.EncodeToString(imgData)
|
||||
|
||||
writeJSON(w, 0, imageGenResponse{
|
||||
ImageB64: b64,
|
||||
ContentType: contentType,
|
||||
Saved: saved,
|
||||
CoverURL: coverURL,
|
||||
Model: string(model),
|
||||
Bytes: len(imgData),
|
||||
})
|
||||
}
|
||||
|
||||
// saveCoverRequest is the JSON body for POST /api/admin/image-gen/save-cover.
|
||||
type saveCoverRequest struct {
|
||||
// Slug is the book slug whose cover should be overwritten.
|
||||
Slug string `json:"slug"`
|
||||
// ImageB64 is the base64-encoded image bytes (PNG or JPEG).
|
||||
ImageB64 string `json:"image_b64"`
|
||||
}
|
||||
|
||||
// handleAdminImageGenSaveCover handles POST /api/admin/image-gen/save-cover.
|
||||
//
|
||||
// Accepts a pre-generated image as base64 and stores it as the book cover in
|
||||
// MinIO, replacing the existing one. Does not call Cloudflare AI at all.
|
||||
func (s *Server) handleAdminImageGenSaveCover(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.CoverStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "cover store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req saveCoverRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if req.Slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if req.ImageB64 == "" {
|
||||
jsonError(w, http.StatusBadRequest, "image_b64 is required")
|
||||
return
|
||||
}
|
||||
|
||||
imgData, err := base64.StdEncoding.DecodeString(req.ImageB64)
|
||||
if err != nil {
|
||||
imgData, err = base64.RawStdEncoding.DecodeString(req.ImageB64)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "decode image_b64: "+err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
contentType := sniffImageContentType(imgData)
|
||||
if err := s.deps.CoverStore.PutCover(r.Context(), req.Slug, imgData, contentType); err != nil {
|
||||
s.deps.Log.Error("admin: save-cover failed", "slug", req.Slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, "save cover: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
s.deps.Log.Info("admin: cover saved via image-gen", "slug", req.Slug, "bytes", len(imgData))
|
||||
writeJSON(w, 0, map[string]any{
|
||||
"saved": true,
|
||||
"cover_url": fmt.Sprintf("/api/cover/novelfire.net/%s", req.Slug),
|
||||
"bytes": len(imgData),
|
||||
})
|
||||
}
|
||||
|
||||
// sniffImageContentType returns the MIME type of the image bytes.
|
||||
func sniffImageContentType(data []byte) string {
|
||||
if len(data) >= 4 {
|
||||
// PNG: 0x89 P N G
|
||||
if data[0] == 0x89 && data[1] == 0x50 && data[2] == 0x4e && data[3] == 0x47 {
|
||||
return "image/png"
|
||||
}
|
||||
// JPEG: FF D8 FF
|
||||
if data[0] == 0xFF && data[1] == 0xD8 && data[2] == 0xFF {
|
||||
return "image/jpeg"
|
||||
}
|
||||
// WebP: RIFF....WEBP
|
||||
if len(data) >= 12 && data[0] == 'R' && data[1] == 'I' && data[2] == 'F' && data[3] == 'F' &&
|
||||
data[8] == 'W' && data[9] == 'E' && data[10] == 'B' && data[11] == 'P' {
|
||||
return "image/webp"
|
||||
}
|
||||
}
|
||||
return "image/png"
|
||||
}
|
||||
|
||||
// saveChapterImageRequest is the JSON body for POST /api/admin/image-gen/save-chapter-image.
|
||||
type saveChapterImageRequest struct {
|
||||
// Slug is the book slug.
|
||||
Slug string `json:"slug"`
|
||||
// Chapter is the 1-based chapter number.
|
||||
Chapter int `json:"chapter"`
|
||||
// ImageB64 is the base64-encoded image bytes (PNG or JPEG).
|
||||
ImageB64 string `json:"image_b64"`
|
||||
}
|
||||
|
||||
// handleAdminImageGenSaveChapterImage handles POST /api/admin/image-gen/save-chapter-image.
|
||||
//
|
||||
// Accepts a pre-generated image as base64 and stores it as the chapter illustration
|
||||
// in MinIO, replacing the existing one if present. Does not call Cloudflare AI.
|
||||
func (s *Server) handleAdminImageGenSaveChapterImage(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.ChapterImageStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "chapter image store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req saveChapterImageRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if req.Slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if req.Chapter <= 0 {
|
||||
jsonError(w, http.StatusBadRequest, "chapter must be > 0")
|
||||
return
|
||||
}
|
||||
if req.ImageB64 == "" {
|
||||
jsonError(w, http.StatusBadRequest, "image_b64 is required")
|
||||
return
|
||||
}
|
||||
|
||||
imgData, err := base64.StdEncoding.DecodeString(req.ImageB64)
|
||||
if err != nil {
|
||||
imgData, err = base64.RawStdEncoding.DecodeString(req.ImageB64)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "decode image_b64: "+err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
contentType := sniffImageContentType(imgData)
|
||||
if err := s.deps.ChapterImageStore.PutChapterImage(r.Context(), req.Slug, req.Chapter, imgData, contentType); err != nil {
|
||||
s.deps.Log.Error("admin: save-chapter-image failed", "slug", req.Slug, "chapter", req.Chapter, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, "save chapter image: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
s.deps.Log.Info("admin: chapter image saved", "slug", req.Slug, "chapter", req.Chapter, "bytes", len(imgData))
|
||||
writeJSON(w, 0, map[string]any{
|
||||
"saved": true,
|
||||
"image_url": fmt.Sprintf("/api/chapter-image/novelfire.net/%s/%d", req.Slug, req.Chapter),
|
||||
"bytes": len(imgData),
|
||||
})
|
||||
}
|
||||
|
||||
// handleHeadChapterImage handles HEAD /api/chapter-image/{domain}/{slug}/{n}.
|
||||
//
|
||||
// Returns 200 when an image exists for this chapter, 404 otherwise.
|
||||
// Used by the SSR loader to check existence without downloading the full image.
|
||||
func (s *Server) handleHeadChapterImage(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.ChapterImageStore == nil {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
|
||||
slug := r.PathValue("slug")
|
||||
nStr := r.PathValue("n")
|
||||
n, err := strconv.Atoi(nStr)
|
||||
if err != nil || n <= 0 {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if s.deps.ChapterImageStore.ChapterImageExists(r.Context(), slug, n) {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
} else {
|
||||
w.WriteHeader(http.StatusNotFound)
|
||||
}
|
||||
}
|
||||
|
||||
// handleGetChapterImage handles GET /api/chapter-image/{domain}/{slug}/{n}.
|
||||
//
|
||||
// Serves the stored chapter illustration directly from MinIO.
|
||||
// Returns 404 when no image has been saved for this chapter.
|
||||
func (s *Server) handleGetChapterImage(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.ChapterImageStore == nil {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
slug := r.PathValue("slug")
|
||||
nStr := r.PathValue("n")
|
||||
n, err := strconv.Atoi(nStr)
|
||||
if err != nil || n <= 0 {
|
||||
jsonError(w, http.StatusBadRequest, "invalid chapter number")
|
||||
return
|
||||
}
|
||||
|
||||
data, contentType, ok, err := s.deps.ChapterImageStore.GetChapterImage(r.Context(), slug, n)
|
||||
if err != nil {
|
||||
s.deps.Log.Error("chapter-image: get failed", "slug", slug, "n", n, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, "could not retrieve chapter image")
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
w.Header().Set("Cache-Control", "public, max-age=31536000, immutable")
|
||||
w.Header().Set("Content-Length", fmt.Sprintf("%d", len(data)))
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_, _ = w.Write(data)
|
||||
}
|
||||
|
||||
// handleAdminImageGenAsync handles POST /api/admin/image-gen/async.
|
||||
//
|
||||
// Fire-and-forget variant: validates the request, creates an ai_job record of
|
||||
// kind "image-gen", spawns a background goroutine, and returns HTTP 202 with
|
||||
// {job_id} immediately. The goroutine calls Cloudflare AI, stores the result
|
||||
// as base64 in the job payload, and marks the job done/failed when finished.
|
||||
//
|
||||
// The admin can then review the result via the ai-jobs page and approve
|
||||
// (save as cover) or reject (discard) the image.
|
||||
func (s *Server) handleAdminImageGenAsync(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.ImageGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "image generation not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN missing)")
|
||||
return
|
||||
}
|
||||
if s.deps.AIJobStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "ai job store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req imageGenRequest
|
||||
var refImageData []byte
|
||||
|
||||
ct := r.Header.Get("Content-Type")
|
||||
if strings.HasPrefix(ct, "multipart/form-data") {
|
||||
if err := r.ParseMultipartForm(32 << 20); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse multipart: "+err.Error())
|
||||
return
|
||||
}
|
||||
if jsonPart := r.FormValue("json"); jsonPart != "" {
|
||||
if err := json.Unmarshal([]byte(jsonPart), &req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse json field: "+err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
if f, _, err := r.FormFile("reference"); err == nil {
|
||||
defer f.Close()
|
||||
refImageData, _ = io.ReadAll(f)
|
||||
}
|
||||
} else {
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if req.ReferenceImageB64 != "" {
|
||||
var decErr error
|
||||
refImageData, decErr = base64.StdEncoding.DecodeString(req.ReferenceImageB64)
|
||||
if decErr != nil {
|
||||
refImageData, decErr = base64.RawStdEncoding.DecodeString(req.ReferenceImageB64)
|
||||
if decErr != nil {
|
||||
jsonError(w, http.StatusBadRequest, "decode reference_image_b64: "+decErr.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if strings.TrimSpace(req.Prompt) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "prompt is required")
|
||||
return
|
||||
}
|
||||
if req.Type != "cover" && req.Type != "chapter" {
|
||||
jsonError(w, http.StatusBadRequest, `type must be "cover" or "chapter"`)
|
||||
return
|
||||
}
|
||||
if req.Slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if req.Type == "chapter" && req.Chapter <= 0 {
|
||||
jsonError(w, http.StatusBadRequest, "chapter must be > 0 when type is chapter")
|
||||
return
|
||||
}
|
||||
|
||||
// Resolve model.
|
||||
model := cfai.ImageModel(req.Model)
|
||||
if model == "" {
|
||||
if req.Type == "cover" {
|
||||
model = cfai.DefaultImageModel
|
||||
} else {
|
||||
model = cfai.ImageModelFlux2Klein4B
|
||||
}
|
||||
}
|
||||
|
||||
// Encode request params as job payload so the UI can reconstruct context.
|
||||
type jobParams struct {
|
||||
Prompt string `json:"prompt"`
|
||||
Type string `json:"type"`
|
||||
Chapter int `json:"chapter,omitempty"`
|
||||
NumSteps int `json:"num_steps,omitempty"`
|
||||
Width int `json:"width,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
Guidance float64 `json:"guidance,omitempty"`
|
||||
Strength float64 `json:"strength,omitempty"`
|
||||
HasRef bool `json:"has_ref,omitempty"`
|
||||
}
|
||||
paramsJSON, _ := json.Marshal(jobParams{
|
||||
Prompt: req.Prompt,
|
||||
Type: req.Type,
|
||||
Chapter: req.Chapter,
|
||||
NumSteps: req.NumSteps,
|
||||
Width: req.Width,
|
||||
Height: req.Height,
|
||||
Guidance: req.Guidance,
|
||||
Strength: req.Strength,
|
||||
HasRef: len(refImageData) > 0,
|
||||
})
|
||||
|
||||
jobID, createErr := s.deps.AIJobStore.CreateAIJob(r.Context(), domain.AIJob{
|
||||
Kind: "image-gen",
|
||||
Slug: req.Slug,
|
||||
Status: domain.TaskStatusPending,
|
||||
Model: string(model),
|
||||
Payload: string(paramsJSON),
|
||||
Started: time.Now(),
|
||||
})
|
||||
if createErr != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "create ai job: "+createErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
jobCtx, jobCancel := context.WithCancel(context.Background())
|
||||
registerCancelJob(jobID, jobCancel)
|
||||
|
||||
// Mark running before returning.
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusRunning),
|
||||
})
|
||||
|
||||
s.deps.Log.Info("admin: image-gen async started",
|
||||
"job_id", jobID, "slug", req.Slug, "type", req.Type, "model", model)
|
||||
|
||||
// Capture locals for the goroutine.
|
||||
store := s.deps.AIJobStore
|
||||
imageGen := s.deps.ImageGen
|
||||
coverStore := s.deps.CoverStore
|
||||
logger := s.deps.Log
|
||||
capturedReq := req
|
||||
capturedModel := model
|
||||
capturedRefImage := refImageData
|
||||
|
||||
go func() {
|
||||
defer deregisterCancelJob(jobID)
|
||||
defer jobCancel()
|
||||
|
||||
if jobCtx.Err() != nil {
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusCancelled),
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
imgReq := cfai.ImageRequest{
|
||||
Prompt: capturedReq.Prompt,
|
||||
Model: capturedModel,
|
||||
NumSteps: capturedReq.NumSteps,
|
||||
Width: capturedReq.Width,
|
||||
Height: capturedReq.Height,
|
||||
Guidance: capturedReq.Guidance,
|
||||
Strength: capturedReq.Strength,
|
||||
}
|
||||
|
||||
var imgData []byte
|
||||
var genErr error
|
||||
if len(capturedRefImage) > 0 {
|
||||
imgData, genErr = imageGen.GenerateImageFromReference(jobCtx, imgReq, capturedRefImage)
|
||||
} else {
|
||||
imgData, genErr = imageGen.GenerateImage(jobCtx, imgReq)
|
||||
}
|
||||
|
||||
if genErr != nil {
|
||||
logger.Error("admin: image-gen async failed", "job_id", jobID, "err", genErr)
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusFailed),
|
||||
"error_message": genErr.Error(),
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
contentType := sniffImageContentType(imgData)
|
||||
b64 := base64.StdEncoding.EncodeToString(imgData)
|
||||
|
||||
// Build result payload: include the original params + the generated image.
|
||||
type resultPayload struct {
|
||||
Prompt string `json:"prompt"`
|
||||
Type string `json:"type"`
|
||||
Chapter int `json:"chapter,omitempty"`
|
||||
ContentType string `json:"content_type"`
|
||||
ImageB64 string `json:"image_b64"`
|
||||
Bytes int `json:"bytes"`
|
||||
NumSteps int `json:"num_steps,omitempty"`
|
||||
Width int `json:"width,omitempty"`
|
||||
Height int `json:"height,omitempty"`
|
||||
Guidance float64 `json:"guidance,omitempty"`
|
||||
}
|
||||
resultJSON, _ := json.Marshal(resultPayload{
|
||||
Prompt: capturedReq.Prompt,
|
||||
Type: capturedReq.Type,
|
||||
Chapter: capturedReq.Chapter,
|
||||
ContentType: contentType,
|
||||
ImageB64: b64,
|
||||
Bytes: len(imgData),
|
||||
NumSteps: capturedReq.NumSteps,
|
||||
Width: capturedReq.Width,
|
||||
Height: capturedReq.Height,
|
||||
Guidance: capturedReq.Guidance,
|
||||
})
|
||||
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusDone),
|
||||
"items_done": 1,
|
||||
"items_total": 1,
|
||||
"payload": string(resultJSON),
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
|
||||
logger.Info("admin: image-gen async done",
|
||||
"job_id", jobID, "slug", capturedReq.Slug,
|
||||
"bytes", len(imgData), "content_type", contentType)
|
||||
|
||||
// Suppress unused variable warning for coverStore when SaveToCover is false.
|
||||
_ = coverStore
|
||||
}()
|
||||
|
||||
writeJSON(w, http.StatusAccepted, map[string]any{"job_id": jobID})
|
||||
}
|
||||
234
backend/internal/backend/handlers_import.go
Normal file
234
backend/internal/backend/handlers_import.go
Normal file
@@ -0,0 +1,234 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
type importRequest struct {
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author"`
|
||||
CoverURL string `json:"cover_url"`
|
||||
Genres []string `json:"genres"`
|
||||
Summary string `json:"summary"`
|
||||
BookStatus string `json:"book_status"` // "ongoing" | "completed" | "hiatus"
|
||||
FileName string `json:"file_name"`
|
||||
FileType string `json:"file_type"` // "pdf" or "epub"
|
||||
ObjectKey string `json:"object_key"` // MinIO path to uploaded file
|
||||
}
|
||||
|
||||
type importResponse struct {
|
||||
TaskID string `json:"task_id"`
|
||||
Slug string `json:"slug"`
|
||||
Preview *importPreview `json:"preview,omitempty"`
|
||||
}
|
||||
|
||||
type importPreview struct {
|
||||
Chapters int `json:"chapters"`
|
||||
FirstLines []string `json:"first_lines"`
|
||||
}
|
||||
|
||||
func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.Producer == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "task queue not configured")
|
||||
return
|
||||
}
|
||||
|
||||
ct := r.Header.Get("Content-Type")
|
||||
var req importRequest
|
||||
var objectKey string
|
||||
var chaptersKey string
|
||||
var chapterCount int
|
||||
|
||||
if strings.HasPrefix(ct, "multipart/form-data") {
|
||||
if err := r.ParseMultipartForm(32 << 20); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse multipart: "+err.Error())
|
||||
return
|
||||
}
|
||||
req.Title = r.FormValue("title")
|
||||
req.Author = r.FormValue("author")
|
||||
req.CoverURL = r.FormValue("cover_url")
|
||||
req.Summary = r.FormValue("summary")
|
||||
req.BookStatus = r.FormValue("book_status")
|
||||
if g := r.FormValue("genres"); g != "" {
|
||||
for _, s := range strings.Split(g, ",") {
|
||||
if s = strings.TrimSpace(s); s != "" {
|
||||
req.Genres = append(req.Genres, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
req.FileName = r.FormValue("file_name")
|
||||
req.FileType = r.FormValue("file_type")
|
||||
analyzeOnly := r.FormValue("analyze") == "true"
|
||||
|
||||
file, header, err := r.FormFile("file")
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse file: "+err.Error())
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
if req.FileName == "" {
|
||||
req.FileName = header.Filename
|
||||
}
|
||||
if req.FileType == "" {
|
||||
req.FileType = strings.TrimPrefix(filepath.Ext(header.Filename), ".")
|
||||
}
|
||||
|
||||
data, err := io.ReadAll(file)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "read file: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Analyze only - just count chapters
|
||||
if analyzeOnly {
|
||||
preview := analyzeImportFile(data, req.FileType)
|
||||
writeJSON(w, 0, importResponse{
|
||||
Preview: preview,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Parse PDF/EPUB on the backend (with timeout) and store chapters as JSON.
|
||||
// The runner only needs to ingest pre-parsed chapters — no PDF parsing on runner.
|
||||
parseCtx, parseCancel := context.WithTimeout(r.Context(), 3*time.Minute)
|
||||
defer parseCancel()
|
||||
chapters, parseErr := storage.ParseImportFile(parseCtx, data, req.FileType)
|
||||
if parseErr != nil || len(chapters) == 0 {
|
||||
jsonError(w, http.StatusUnprocessableEntity, "could not parse file: "+func() string {
|
||||
if parseErr != nil { return parseErr.Error() }
|
||||
return "no chapters found"
|
||||
}())
|
||||
return
|
||||
}
|
||||
|
||||
// Store raw file in MinIO (for reference/re-import).
|
||||
objectKey = fmt.Sprintf("imports/%d_%s", time.Now().Unix(), header.Filename)
|
||||
if s.deps.ImportFileStore == nil {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
if err := s.deps.ImportFileStore.PutImportFile(r.Context(), objectKey, data); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "upload file: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Store pre-parsed chapters JSON in MinIO so runner can ingest without re-parsing.
|
||||
chaptersJSON, _ := json.Marshal(chapters)
|
||||
chaptersKey = fmt.Sprintf("imports/%d_%s_chapters.json", time.Now().Unix(), strings.TrimSuffix(header.Filename, filepath.Ext(header.Filename)))
|
||||
if err := s.deps.ImportFileStore.PutImportChapters(r.Context(), chaptersKey, chaptersJSON); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "store chapters: "+err.Error())
|
||||
return
|
||||
}
|
||||
chapterCount = len(chapters)
|
||||
} else {
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
objectKey = req.ObjectKey
|
||||
}
|
||||
|
||||
if req.Title == "" {
|
||||
jsonError(w, http.StatusBadRequest, "title is required")
|
||||
return
|
||||
}
|
||||
if req.FileType != "pdf" && req.FileType != "epub" {
|
||||
jsonError(w, http.StatusBadRequest, "file_type must be 'pdf' or 'epub'")
|
||||
return
|
||||
}
|
||||
|
||||
slug := strings.ToLower(strings.ReplaceAll(req.Title, " ", "-"))
|
||||
slug = strings.Map(func(r rune) rune {
|
||||
if (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '-' {
|
||||
return r
|
||||
}
|
||||
return -1
|
||||
}, slug)
|
||||
|
||||
taskID, err := s.deps.Producer.CreateImportTask(r.Context(), domain.ImportTask{
|
||||
Slug: slug,
|
||||
Title: req.Title,
|
||||
Author: req.Author,
|
||||
CoverURL: req.CoverURL,
|
||||
Genres: req.Genres,
|
||||
Summary: req.Summary,
|
||||
BookStatus: req.BookStatus,
|
||||
FileType: req.FileType,
|
||||
ObjectKey: objectKey,
|
||||
ChaptersKey: chaptersKey,
|
||||
ChaptersTotal: chapterCount,
|
||||
InitiatorUserID: "",
|
||||
})
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "create import task: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, importResponse{
|
||||
TaskID: taskID,
|
||||
Slug: slug,
|
||||
Preview: &importPreview{Chapters: chapterCount},
|
||||
})
|
||||
}
|
||||
|
||||
// analyzeImportFile parses the file to count chapters and extract preview lines.
|
||||
func analyzeImportFile(data []byte, fileType string) *importPreview {
|
||||
count, firstLines, err := storage.AnalyzeFile(data, fileType)
|
||||
if err != nil || count == 0 {
|
||||
// Fall back to rough size estimate so the UI still shows something
|
||||
count = estimateChapters(data, fileType)
|
||||
}
|
||||
return &importPreview{
|
||||
Chapters: count,
|
||||
FirstLines: firstLines,
|
||||
}
|
||||
}
|
||||
|
||||
func estimateChapters(data []byte, fileType string) int {
|
||||
// Rough estimate: ~100KB per chapter for PDF, ~50KB for EPUB
|
||||
size := len(data)
|
||||
if fileType == "pdf" {
|
||||
return size / 100000
|
||||
}
|
||||
return size / 50000
|
||||
}
|
||||
|
||||
func (s *Server) handleAdminImportStatus(w http.ResponseWriter, r *http.Request) {
|
||||
taskID := r.PathValue("id")
|
||||
if taskID == "" {
|
||||
jsonError(w, http.StatusBadRequest, "task id required")
|
||||
return
|
||||
}
|
||||
|
||||
task, ok, err := s.deps.TaskReader.GetImportTask(r.Context(), taskID)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "get task: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, "task not found")
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, task)
|
||||
}
|
||||
|
||||
func (s *Server) handleAdminImportList(w http.ResponseWriter, r *http.Request) {
|
||||
tasks, err := s.deps.TaskReader.ListImportTasks(r.Context())
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "list tasks: "+err.Error())
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]any{"tasks": tasks})
|
||||
}
|
||||
126
backend/internal/backend/handlers_notifications.go
Normal file
126
backend/internal/backend/handlers_notifications.go
Normal file
@@ -0,0 +1,126 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
// handleDismissNotification handles DELETE /api/notifications/{id}.
|
||||
func (s *Server) handleDismissNotification(w http.ResponseWriter, r *http.Request) {
|
||||
id := r.PathValue("id")
|
||||
if id == "" {
|
||||
jsonError(w, http.StatusBadRequest, "notification id required")
|
||||
return
|
||||
}
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
if err := store.DeleteNotification(r.Context(), id); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "dismiss notification: "+err.Error())
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
|
||||
// handleClearAllNotifications handles DELETE /api/notifications?user_id=...
|
||||
func (s *Server) handleClearAllNotifications(w http.ResponseWriter, r *http.Request) {
|
||||
userID := r.URL.Query().Get("user_id")
|
||||
if userID == "" {
|
||||
jsonError(w, http.StatusBadRequest, "user_id required")
|
||||
return
|
||||
}
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
if err := store.ClearAllNotifications(r.Context(), userID); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "clear notifications: "+err.Error())
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
|
||||
// handleMarkAllNotificationsRead handles PATCH /api/notifications?user_id=...
|
||||
func (s *Server) handleMarkAllNotificationsRead(w http.ResponseWriter, r *http.Request) {
|
||||
userID := r.URL.Query().Get("user_id")
|
||||
if userID == "" {
|
||||
jsonError(w, http.StatusBadRequest, "user_id required")
|
||||
return
|
||||
}
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
if err := store.MarkAllNotificationsRead(r.Context(), userID); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "mark all read: "+err.Error())
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
|
||||
type notification struct {
|
||||
ID string `json:"id"`
|
||||
UserID string `json:"user_id"`
|
||||
Title string `json:"title"`
|
||||
Message string `json:"message"`
|
||||
Link string `json:"link"`
|
||||
Read bool `json:"read"`
|
||||
}
|
||||
|
||||
func (s *Server) handleListNotifications(w http.ResponseWriter, r *http.Request) {
|
||||
userID := r.URL.Query().Get("user_id")
|
||||
if userID == "" {
|
||||
jsonError(w, http.StatusBadRequest, "user_id required")
|
||||
return
|
||||
}
|
||||
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
|
||||
items, err := store.ListNotifications(r.Context(), userID, 50)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "list notifications: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Parse each item as notification
|
||||
notifications := make([]notification, 0, len(items))
|
||||
for _, item := range items {
|
||||
b, _ := json.Marshal(item)
|
||||
var n notification
|
||||
json.Unmarshal(b, &n)
|
||||
notifications = append(notifications, n)
|
||||
}
|
||||
|
||||
writeJSON(w, 0, map[string]any{"notifications": notifications})
|
||||
}
|
||||
|
||||
func (s *Server) handleMarkNotificationRead(w http.ResponseWriter, r *http.Request) {
|
||||
id := r.PathValue("id")
|
||||
if id == "" {
|
||||
jsonError(w, http.StatusBadRequest, "notification id required")
|
||||
return
|
||||
}
|
||||
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
|
||||
if err := store.MarkNotificationRead(r.Context(), id); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "mark read: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
87
backend/internal/backend/handlers_push.go
Normal file
87
backend/internal/backend/handlers_push.go
Normal file
@@ -0,0 +1,87 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
// handleGetVAPIDPublicKey handles GET /api/push-subscriptions/vapid-public-key.
|
||||
// Returns the VAPID public key so the SvelteKit frontend can subscribe browsers.
|
||||
func (s *Server) handleGetVAPIDPublicKey(w http.ResponseWriter, r *http.Request) {
|
||||
key := os.Getenv("VAPID_PUBLIC_KEY")
|
||||
if key == "" {
|
||||
jsonError(w, http.StatusServiceUnavailable, "push notifications not configured")
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]string{"public_key": key})
|
||||
}
|
||||
|
||||
// handleSavePushSubscription handles POST /api/push-subscriptions.
|
||||
// Registers a new browser push subscription for the authenticated user.
|
||||
func (s *Server) handleSavePushSubscription(w http.ResponseWriter, r *http.Request) {
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
|
||||
var body struct {
|
||||
UserID string `json:"user_id"`
|
||||
Endpoint string `json:"endpoint"`
|
||||
P256DH string `json:"p256dh"`
|
||||
Auth string `json:"auth"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
if body.UserID == "" || body.Endpoint == "" || body.P256DH == "" || body.Auth == "" {
|
||||
jsonError(w, http.StatusBadRequest, "user_id, endpoint, p256dh and auth are required")
|
||||
return
|
||||
}
|
||||
|
||||
if err := store.SavePushSubscription(r.Context(), storage.PushSubscription{
|
||||
UserID: body.UserID,
|
||||
Endpoint: body.Endpoint,
|
||||
P256DH: body.P256DH,
|
||||
Auth: body.Auth,
|
||||
}); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "save push subscription: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
|
||||
// handleDeletePushSubscription handles DELETE /api/push-subscriptions.
|
||||
// Removes a push subscription by endpoint for the given user.
|
||||
func (s *Server) handleDeletePushSubscription(w http.ResponseWriter, r *http.Request) {
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
|
||||
var body struct {
|
||||
UserID string `json:"user_id"`
|
||||
Endpoint string `json:"endpoint"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "invalid request body")
|
||||
return
|
||||
}
|
||||
if body.UserID == "" || body.Endpoint == "" {
|
||||
jsonError(w, http.StatusBadRequest, "user_id and endpoint are required")
|
||||
return
|
||||
}
|
||||
|
||||
if err := store.DeletePushSubscription(r.Context(), body.UserID, body.Endpoint); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "delete push subscription: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
141
backend/internal/backend/handlers_split.go
Normal file
141
backend/internal/backend/handlers_split.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// handleAdminSplitChapters handles POST /api/admin/books/{slug}/split-chapters.
|
||||
//
|
||||
// Request body (JSON):
|
||||
//
|
||||
// { "text": "<full text with --- dividers and optional ## Title lines>" }
|
||||
//
|
||||
// The text is split on lines containing only "---". Each segment may start with
|
||||
// a "## Title" line which becomes the chapter title; remaining lines are the
|
||||
// chapter content. Sequential chapter numbers 1..N are assigned.
|
||||
//
|
||||
// All existing chapters for the book are replaced: WriteChapter is called for
|
||||
// each new chapter (upsert by number), so chapters beyond N are not deleted —
|
||||
// use the dedup endpoint afterwards if needed.
|
||||
func (s *Server) handleAdminSplitChapters(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.BookWriter == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book writer not configured")
|
||||
return
|
||||
}
|
||||
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Text) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "text is required")
|
||||
return
|
||||
}
|
||||
|
||||
chapters := splitChapterText(req.Text)
|
||||
if len(chapters) == 0 {
|
||||
jsonError(w, http.StatusUnprocessableEntity, "no chapters produced from text")
|
||||
return
|
||||
}
|
||||
|
||||
for _, ch := range chapters {
|
||||
var mdContent string
|
||||
if ch.Title != "" && ch.Title != fmt.Sprintf("Chapter %d", ch.Number) {
|
||||
mdContent = fmt.Sprintf("# %s\n\n%s", ch.Title, ch.Content)
|
||||
} else {
|
||||
mdContent = fmt.Sprintf("# Chapter %d\n\n%s", ch.Number, ch.Content)
|
||||
}
|
||||
domainCh := domain.Chapter{
|
||||
Ref: domain.ChapterRef{Number: ch.Number, Title: ch.Title},
|
||||
Text: mdContent,
|
||||
}
|
||||
if err := s.deps.BookWriter.WriteChapter(r.Context(), slug, domainCh); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, fmt.Sprintf("write chapter %d: %s", ch.Number, err.Error()))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
writeJSON(w, 0, map[string]any{
|
||||
"chapters": len(chapters),
|
||||
"slug": slug,
|
||||
})
|
||||
}
|
||||
|
||||
// splitChapterText splits text on "---" divider lines into bookstore.Chapter
|
||||
// slices. Each segment may optionally start with a "## Title" header line.
|
||||
func splitChapterText(text string) []bookstore.Chapter {
|
||||
lines := strings.Split(text, "\n")
|
||||
|
||||
// Collect raw segments split on "---" dividers.
|
||||
var segments [][]string
|
||||
cur := []string{}
|
||||
for _, line := range lines {
|
||||
if strings.TrimSpace(line) == "---" {
|
||||
segments = append(segments, cur)
|
||||
cur = []string{}
|
||||
} else {
|
||||
cur = append(cur, line)
|
||||
}
|
||||
}
|
||||
segments = append(segments, cur) // last segment
|
||||
|
||||
var chapters []bookstore.Chapter
|
||||
chNum := 0
|
||||
for _, seg := range segments {
|
||||
// Trim leading/trailing blank lines from the segment.
|
||||
start, end := 0, len(seg)
|
||||
for start < end && strings.TrimSpace(seg[start]) == "" {
|
||||
start++
|
||||
}
|
||||
for end > start && strings.TrimSpace(seg[end-1]) == "" {
|
||||
end--
|
||||
}
|
||||
seg = seg[start:end]
|
||||
if len(seg) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for a "## Title" header on the first line.
|
||||
title := ""
|
||||
contentStart := 0
|
||||
if strings.HasPrefix(strings.TrimSpace(seg[0]), "## ") {
|
||||
title = strings.TrimSpace(strings.TrimPrefix(strings.TrimSpace(seg[0]), "## "))
|
||||
contentStart = 1
|
||||
// Skip blank lines after the title.
|
||||
for contentStart < len(seg) && strings.TrimSpace(seg[contentStart]) == "" {
|
||||
contentStart++
|
||||
}
|
||||
}
|
||||
|
||||
content := strings.TrimSpace(strings.Join(seg[contentStart:], "\n"))
|
||||
if content == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
chNum++
|
||||
if title == "" {
|
||||
title = fmt.Sprintf("Chapter %d", chNum)
|
||||
}
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: chNum,
|
||||
Title: title,
|
||||
Content: content,
|
||||
})
|
||||
}
|
||||
return chapters
|
||||
}
|
||||
961
backend/internal/backend/handlers_textgen.go
Normal file
961
backend/internal/backend/handlers_textgen.go
Normal file
@@ -0,0 +1,961 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/cfai"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// chapterNamesBatchSize is the number of chapters sent per LLM request.
|
||||
// Keeps output well within the 4096-token response limit (~30 tokens/title).
|
||||
const chapterNamesBatchSize = 100
|
||||
|
||||
// handleAdminTextGenModels handles GET /api/admin/text-gen/models.
|
||||
// Returns the list of supported Cloudflare AI text generation models.
|
||||
func (s *Server) handleAdminTextGenModels(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN missing)")
|
||||
return
|
||||
}
|
||||
models := s.deps.TextGen.Models()
|
||||
writeJSON(w, 0, map[string]any{"models": models})
|
||||
}
|
||||
|
||||
// ── Chapter names ─────────────────────────────────────────────────────────────
|
||||
|
||||
// textGenChapterNamesRequest is the JSON body for POST /api/admin/text-gen/chapter-names.
|
||||
type textGenChapterNamesRequest struct {
|
||||
// Slug is the book slug whose chapters to process.
|
||||
Slug string `json:"slug"`
|
||||
// Pattern is a free-text description of the desired naming convention,
|
||||
// e.g. "Chapter {n}: {brief scene description}".
|
||||
Pattern string `json:"pattern"`
|
||||
// Model is the CF Workers AI model ID. Defaults to the recommended model when empty.
|
||||
Model string `json:"model"`
|
||||
// MaxTokens limits response length (0 = model default).
|
||||
MaxTokens int `json:"max_tokens"`
|
||||
// FromChapter is the first chapter to process (1-based). 0 = start from chapter 1.
|
||||
FromChapter int `json:"from_chapter"`
|
||||
// ToChapter is the last chapter to process (inclusive). 0 = process all.
|
||||
ToChapter int `json:"to_chapter"`
|
||||
// JobID is an optional existing ai_job ID for resuming a previous run.
|
||||
// If set, the handler resumes from items_done instead of starting from scratch.
|
||||
JobID string `json:"job_id"`
|
||||
}
|
||||
|
||||
// proposedChapterTitle is a single chapter with its AI-proposed title.
|
||||
type proposedChapterTitle struct {
|
||||
Number int `json:"number"`
|
||||
// OldTitle is the current title stored in the database.
|
||||
OldTitle string `json:"old_title"`
|
||||
// NewTitle is the AI-proposed replacement.
|
||||
NewTitle string `json:"new_title"`
|
||||
}
|
||||
|
||||
// chapterNamesBatchEvent is one SSE event emitted per processed batch.
|
||||
type chapterNamesBatchEvent struct {
|
||||
// JobID is the PB ai_job ID for this run (emitted on the first event only).
|
||||
JobID string `json:"job_id,omitempty"`
|
||||
// Batch is the 1-based batch index.
|
||||
Batch int `json:"batch"`
|
||||
// TotalBatches is the total number of batches.
|
||||
TotalBatches int `json:"total_batches"`
|
||||
// ChaptersDone is the cumulative count of chapters processed so far.
|
||||
ChaptersDone int `json:"chapters_done"`
|
||||
// TotalChapters is the total chapter count for this book.
|
||||
TotalChapters int `json:"total_chapters"`
|
||||
// Model is the CF AI model used.
|
||||
Model string `json:"model"`
|
||||
// Chapters contains the proposed titles for this batch.
|
||||
Chapters []proposedChapterTitle `json:"chapters"`
|
||||
// Error is non-empty if this batch failed.
|
||||
Error string `json:"error,omitempty"`
|
||||
// Done is true on the final sentinel event (no Chapters).
|
||||
Done bool `json:"done,omitempty"`
|
||||
}
|
||||
|
||||
// handleAdminTextGenChapterNames handles POST /api/admin/text-gen/chapter-names.
|
||||
//
|
||||
// Splits all chapters into batches of chapterNamesBatchSize, sends each batch
|
||||
// to the LLM sequentially, and streams results back as Server-Sent Events so
|
||||
// the frontend can show live progress. Each SSE data line is a JSON-encoded
|
||||
// chapterNamesBatchEvent. The final event has Done=true.
|
||||
//
|
||||
// Does NOT persist anything — the frontend shows a diff and the user must
|
||||
// confirm via POST /api/admin/text-gen/chapter-names/apply.
|
||||
func (s *Server) handleAdminTextGenChapterNames(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN missing)")
|
||||
return
|
||||
}
|
||||
|
||||
var req textGenChapterNamesRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Pattern) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "pattern is required")
|
||||
return
|
||||
}
|
||||
|
||||
// Load existing chapter list.
|
||||
allChapters, err := s.deps.BookReader.ListChapters(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "list chapters: "+err.Error())
|
||||
return
|
||||
}
|
||||
if len(allChapters) == 0 {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("no chapters found for slug %q", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
// Apply chapter range filter.
|
||||
chapters := allChapters
|
||||
if req.FromChapter > 0 || req.ToChapter > 0 {
|
||||
filtered := chapters[:0]
|
||||
for _, ch := range allChapters {
|
||||
if req.FromChapter > 0 && ch.Number < req.FromChapter {
|
||||
continue
|
||||
}
|
||||
if req.ToChapter > 0 && ch.Number > req.ToChapter {
|
||||
break
|
||||
}
|
||||
filtered = append(filtered, ch)
|
||||
}
|
||||
chapters = filtered
|
||||
}
|
||||
if len(chapters) == 0 {
|
||||
jsonError(w, http.StatusBadRequest, "no chapters in the specified range")
|
||||
return
|
||||
}
|
||||
|
||||
model := cfai.TextModel(req.Model)
|
||||
if model == "" {
|
||||
model = cfai.DefaultTextModel
|
||||
}
|
||||
// 4096 tokens comfortably fits 100 chapter titles (~30 tokens each).
|
||||
maxTokens := req.MaxTokens
|
||||
if maxTokens <= 0 {
|
||||
maxTokens = 4096
|
||||
}
|
||||
|
||||
// Index existing titles for old/new diff.
|
||||
existing := make(map[int]string, len(chapters))
|
||||
for _, ch := range chapters {
|
||||
existing[ch.Number] = ch.Title
|
||||
}
|
||||
|
||||
// Partition chapters into batches.
|
||||
batches := chunkChapters(chapters, chapterNamesBatchSize)
|
||||
totalBatches := len(batches)
|
||||
|
||||
s.deps.Log.Info("admin: text-gen chapter-names requested",
|
||||
"slug", req.Slug, "chapters", len(chapters),
|
||||
"batches", totalBatches, "model", model, "max_tokens", maxTokens)
|
||||
|
||||
systemPrompt := `You are a chapter title editor for a web novel platform. ` +
|
||||
`The user provides a list of chapter numbers with their current titles, ` +
|
||||
`and a naming pattern template. ` +
|
||||
`Your job: produce one new title for every chapter, following the pattern exactly. ` +
|
||||
`Pattern placeholders: {n} = the chapter number (integer), {scene} = a very short (2–5 word) scene hint derived from the existing title. ` +
|
||||
`RULES: ` +
|
||||
`1. Do NOT include the chapter number inside the title text — the {n} placeholder is already in the pattern. ` +
|
||||
`2. Do NOT include any prefix like "Chapter X -" or "Chapter X:" inside the title field itself. ` +
|
||||
`3. The "title" field in your JSON must be the fully-rendered string (e.g. if pattern is "Chapter {n}: {scene}", output "Chapter 3: The Bet"). ` +
|
||||
`4. Respond ONLY with a raw JSON array — no prose, no markdown fences, no explanation. ` +
|
||||
`5. Each element: {"number": <int>, "title": <string>}. ` +
|
||||
`6. Output every chapter in the input list, in order. Do not skip any.`
|
||||
|
||||
// Switch to SSE before writing anything.
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("X-Accel-Buffering", "no") // disable nginx/caddy buffering
|
||||
flusher, canFlush := w.(http.Flusher)
|
||||
|
||||
sseWrite := func(evt chapterNamesBatchEvent) {
|
||||
b, _ := json.Marshal(evt)
|
||||
fmt.Fprintf(w, "data: %s\n\n", b)
|
||||
if canFlush {
|
||||
flusher.Flush()
|
||||
}
|
||||
}
|
||||
|
||||
// Create or resume an ai_job record for tracking.
|
||||
var jobID string
|
||||
resumeFrom := 0
|
||||
jobCtx := r.Context()
|
||||
var jobCancel context.CancelFunc
|
||||
|
||||
if s.deps.AIJobStore != nil {
|
||||
if req.JobID != "" {
|
||||
if existingJob, ok, _ := s.deps.AIJobStore.GetAIJob(r.Context(), req.JobID); ok {
|
||||
jobID = req.JobID
|
||||
resumeFrom = existingJob.ItemsDone
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusRunning),
|
||||
"items_total": len(chapters),
|
||||
})
|
||||
}
|
||||
}
|
||||
if jobID == "" {
|
||||
jobPayload := fmt.Sprintf(`{"pattern":%q}`, req.Pattern)
|
||||
id, createErr := s.deps.AIJobStore.CreateAIJob(r.Context(), domain.AIJob{
|
||||
Kind: "chapter-names",
|
||||
Slug: req.Slug,
|
||||
Status: domain.TaskStatusRunning,
|
||||
FromItem: req.FromChapter,
|
||||
ToItem: req.ToChapter,
|
||||
ItemsTotal: len(chapters),
|
||||
Model: string(model),
|
||||
Payload: jobPayload,
|
||||
Started: time.Now(),
|
||||
})
|
||||
if createErr == nil {
|
||||
jobID = id
|
||||
}
|
||||
}
|
||||
if jobID != "" {
|
||||
jobCtx, jobCancel = context.WithCancel(r.Context())
|
||||
registerCancelJob(jobID, jobCancel)
|
||||
defer deregisterCancelJob(jobID)
|
||||
defer jobCancel()
|
||||
}
|
||||
}
|
||||
|
||||
chaptersDone := resumeFrom
|
||||
firstEvent := true
|
||||
for i, batch := range batches {
|
||||
if jobCtx.Err() != nil {
|
||||
return // client disconnected or cancelled
|
||||
}
|
||||
// Skip batches already processed in a previous run.
|
||||
batchEnd := (i + 1) * chapterNamesBatchSize
|
||||
if batchEnd <= resumeFrom {
|
||||
continue
|
||||
}
|
||||
|
||||
var chapterListSB strings.Builder
|
||||
for _, ch := range batch {
|
||||
chapterListSB.WriteString(fmt.Sprintf("%d: %s\n", ch.Number, ch.Title))
|
||||
}
|
||||
userPrompt := fmt.Sprintf("Naming pattern: %s\n\nChapters:\n%s", req.Pattern, chapterListSB.String())
|
||||
|
||||
raw, genErr := s.deps.TextGen.Generate(jobCtx, cfai.TextRequest{
|
||||
Model: model,
|
||||
Messages: []cfai.TextMessage{
|
||||
{Role: "system", Content: systemPrompt},
|
||||
{Role: "user", Content: userPrompt},
|
||||
},
|
||||
MaxTokens: maxTokens,
|
||||
})
|
||||
if genErr != nil {
|
||||
s.deps.Log.Error("admin: text-gen chapter-names batch failed",
|
||||
"batch", i+1, "err", genErr)
|
||||
evt := chapterNamesBatchEvent{
|
||||
Batch: i + 1,
|
||||
TotalBatches: totalBatches,
|
||||
ChaptersDone: chaptersDone,
|
||||
TotalChapters: len(chapters),
|
||||
Model: string(model),
|
||||
Error: genErr.Error(),
|
||||
}
|
||||
if firstEvent {
|
||||
evt.JobID = jobID
|
||||
firstEvent = false
|
||||
}
|
||||
sseWrite(evt)
|
||||
continue
|
||||
}
|
||||
|
||||
proposed := parseChapterTitlesJSON(raw)
|
||||
result := make([]proposedChapterTitle, 0, len(proposed))
|
||||
for _, p := range proposed {
|
||||
result = append(result, proposedChapterTitle{
|
||||
Number: p.Number,
|
||||
OldTitle: existing[p.Number],
|
||||
NewTitle: p.Title,
|
||||
})
|
||||
}
|
||||
chaptersDone += len(batch)
|
||||
|
||||
if jobID != "" && s.deps.AIJobStore != nil {
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), jobID, map[string]any{
|
||||
"items_done": chaptersDone,
|
||||
})
|
||||
}
|
||||
|
||||
evt := chapterNamesBatchEvent{
|
||||
Batch: i + 1,
|
||||
TotalBatches: totalBatches,
|
||||
ChaptersDone: chaptersDone,
|
||||
TotalChapters: len(chapters),
|
||||
Model: string(model),
|
||||
Chapters: result,
|
||||
}
|
||||
if firstEvent {
|
||||
evt.JobID = jobID
|
||||
firstEvent = false
|
||||
}
|
||||
sseWrite(evt)
|
||||
}
|
||||
|
||||
// Mark job as done in PB.
|
||||
if jobID != "" && s.deps.AIJobStore != nil {
|
||||
status := domain.TaskStatusDone
|
||||
if jobCtx.Err() != nil {
|
||||
status = domain.TaskStatusCancelled
|
||||
}
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), jobID, map[string]any{
|
||||
"status": string(status),
|
||||
"items_done": chaptersDone,
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
}
|
||||
|
||||
// Final sentinel event.
|
||||
sseWrite(chapterNamesBatchEvent{Done: true, TotalChapters: len(chapters), Model: string(model)})
|
||||
}
|
||||
|
||||
// chunkChapters splits a chapter slice into batches of at most size n.
|
||||
func chunkChapters(chapters []domain.ChapterInfo, n int) [][]domain.ChapterInfo {
|
||||
var batches [][]domain.ChapterInfo
|
||||
for len(chapters) > 0 {
|
||||
end := n
|
||||
if end > len(chapters) {
|
||||
end = len(chapters)
|
||||
}
|
||||
batches = append(batches, chapters[:end])
|
||||
chapters = chapters[end:]
|
||||
}
|
||||
return batches
|
||||
}
|
||||
|
||||
// parseChapterTitlesJSON extracts the JSON array from a model response.
|
||||
// It tolerates markdown fences and surrounding prose.
|
||||
type rawChapterTitle struct {
|
||||
Number int `json:"number"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
func parseChapterTitlesJSON(raw string) []rawChapterTitle {
|
||||
// Strip markdown fences if present.
|
||||
s := raw
|
||||
if idx := strings.Index(s, "```json"); idx >= 0 {
|
||||
s = s[idx+7:]
|
||||
} else if idx := strings.Index(s, "```"); idx >= 0 {
|
||||
s = s[idx+3:]
|
||||
}
|
||||
if idx := strings.LastIndex(s, "```"); idx >= 0 {
|
||||
s = s[:idx]
|
||||
}
|
||||
// Find the JSON array boundaries.
|
||||
start := strings.Index(s, "[")
|
||||
end := strings.LastIndex(s, "]")
|
||||
if start < 0 || end <= start {
|
||||
return nil
|
||||
}
|
||||
s = s[start : end+1]
|
||||
var out []rawChapterTitle
|
||||
json.Unmarshal([]byte(s), &out) //nolint:errcheck
|
||||
return out
|
||||
}
|
||||
|
||||
// handleAdminTextGenChapterNamesAsync handles POST /api/admin/text-gen/chapter-names/async.
|
||||
//
|
||||
// Fire-and-forget variant: validates inputs, creates an ai_job record, spawns a
|
||||
// background goroutine, and returns HTTP 202 with {job_id} immediately. The
|
||||
// goroutine runs all batches, stores the proposed titles in the job payload, and
|
||||
// marks the job done/failed/cancelled when finished.
|
||||
//
|
||||
// The client can poll GET /api/admin/ai-jobs/{id} for progress, then call
|
||||
// POST /api/admin/text-gen/chapter-names/apply once the job is "done".
|
||||
func (s *Server) handleAdminTextGenChapterNamesAsync(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN missing)")
|
||||
return
|
||||
}
|
||||
|
||||
var req textGenChapterNamesRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Pattern) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "pattern is required")
|
||||
return
|
||||
}
|
||||
|
||||
// Load existing chapter list (use request context — just for validation).
|
||||
allChapters, err := s.deps.BookReader.ListChapters(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "list chapters: "+err.Error())
|
||||
return
|
||||
}
|
||||
if len(allChapters) == 0 {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("no chapters found for slug %q", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
// Apply chapter range filter.
|
||||
chapters := allChapters
|
||||
if req.FromChapter > 0 || req.ToChapter > 0 {
|
||||
filtered := chapters[:0]
|
||||
for _, ch := range allChapters {
|
||||
if req.FromChapter > 0 && ch.Number < req.FromChapter {
|
||||
continue
|
||||
}
|
||||
if req.ToChapter > 0 && ch.Number > req.ToChapter {
|
||||
break
|
||||
}
|
||||
filtered = append(filtered, ch)
|
||||
}
|
||||
chapters = filtered
|
||||
}
|
||||
if len(chapters) == 0 {
|
||||
jsonError(w, http.StatusBadRequest, "no chapters in the specified range")
|
||||
return
|
||||
}
|
||||
|
||||
model := cfai.TextModel(req.Model)
|
||||
if model == "" {
|
||||
model = cfai.DefaultTextModel
|
||||
}
|
||||
maxTokens := req.MaxTokens
|
||||
if maxTokens <= 0 {
|
||||
maxTokens = 4096
|
||||
}
|
||||
|
||||
// Index existing titles for old/new diff.
|
||||
existing := make(map[int]string, len(chapters))
|
||||
for _, ch := range chapters {
|
||||
existing[ch.Number] = ch.Title
|
||||
}
|
||||
|
||||
batches := chunkChapters(chapters, chapterNamesBatchSize)
|
||||
totalBatches := len(batches)
|
||||
|
||||
if s.deps.AIJobStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "ai job store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
jobPayload := fmt.Sprintf(`{"pattern":%q}`, req.Pattern)
|
||||
jobID, createErr := s.deps.AIJobStore.CreateAIJob(r.Context(), domain.AIJob{
|
||||
Kind: "chapter-names",
|
||||
Slug: req.Slug,
|
||||
Status: domain.TaskStatusPending,
|
||||
FromItem: req.FromChapter,
|
||||
ToItem: req.ToChapter,
|
||||
ItemsTotal: len(chapters),
|
||||
Model: string(model),
|
||||
Payload: jobPayload,
|
||||
Started: time.Now(),
|
||||
})
|
||||
if createErr != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "create ai job: "+createErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
jobCtx, jobCancel := context.WithCancel(context.Background())
|
||||
registerCancelJob(jobID, jobCancel)
|
||||
|
||||
s.deps.Log.Info("admin: text-gen chapter-names async started",
|
||||
"job_id", jobID, "slug", req.Slug,
|
||||
"chapters", len(chapters), "batches", totalBatches, "model", model)
|
||||
|
||||
// Mark running before returning so the UI sees it immediately.
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusRunning),
|
||||
})
|
||||
|
||||
systemPrompt := `You are a chapter title editor for a web novel platform. ` +
|
||||
`The user provides a list of chapter numbers with their current titles, ` +
|
||||
`and a naming pattern template. ` +
|
||||
`Your job: produce one new title for every chapter, following the pattern exactly. ` +
|
||||
`Pattern placeholders: {n} = the chapter number (integer), {scene} = a very short (2–5 word) scene hint derived from the existing title. ` +
|
||||
`RULES: ` +
|
||||
`1. Do NOT include the chapter number inside the title text — the {n} placeholder is already in the pattern. ` +
|
||||
`2. Do NOT include any prefix like "Chapter X -" or "Chapter X:" inside the title field itself. ` +
|
||||
`3. The "title" field in your JSON must be the fully-rendered string (e.g. if pattern is "Chapter {n}: {scene}", output "Chapter 3: The Bet"). ` +
|
||||
`4. Respond ONLY with a raw JSON array — no prose, no markdown fences, no explanation. ` +
|
||||
`5. Each element: {"number": <int>, "title": <string>}. ` +
|
||||
`6. Output every chapter in the input list, in order. Do not skip any.`
|
||||
|
||||
// Capture all locals needed in the goroutine.
|
||||
store := s.deps.AIJobStore
|
||||
textGen := s.deps.TextGen
|
||||
logger := s.deps.Log
|
||||
capturedModel := model
|
||||
capturedMaxTokens := maxTokens
|
||||
capturedPattern := req.Pattern
|
||||
capturedSlug := req.Slug
|
||||
|
||||
go func() {
|
||||
defer deregisterCancelJob(jobID)
|
||||
defer jobCancel()
|
||||
|
||||
var allResults []proposedChapterTitle
|
||||
chaptersDone := 0
|
||||
|
||||
for i, batch := range batches {
|
||||
if jobCtx.Err() != nil {
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusCancelled),
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
var chapterListSB strings.Builder
|
||||
for _, ch := range batch {
|
||||
chapterListSB.WriteString(fmt.Sprintf("%d: %s\n", ch.Number, ch.Title))
|
||||
}
|
||||
userPrompt := fmt.Sprintf("Naming pattern: %s\n\nChapters:\n%s", capturedPattern, chapterListSB.String())
|
||||
|
||||
raw, genErr := textGen.Generate(jobCtx, cfai.TextRequest{
|
||||
Model: capturedModel,
|
||||
Messages: []cfai.TextMessage{
|
||||
{Role: "system", Content: systemPrompt},
|
||||
{Role: "user", Content: userPrompt},
|
||||
},
|
||||
MaxTokens: capturedMaxTokens,
|
||||
})
|
||||
if genErr != nil {
|
||||
logger.Error("admin: text-gen chapter-names async batch failed",
|
||||
"job_id", jobID, "batch", i+1, "err", genErr)
|
||||
// Continue — skip errored batch rather than aborting.
|
||||
continue
|
||||
}
|
||||
|
||||
proposed := parseChapterTitlesJSON(raw)
|
||||
for _, p := range proposed {
|
||||
allResults = append(allResults, proposedChapterTitle{
|
||||
Number: p.Number,
|
||||
OldTitle: existing[p.Number],
|
||||
NewTitle: p.Title,
|
||||
})
|
||||
}
|
||||
chaptersDone += len(batch)
|
||||
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"items_done": chaptersDone,
|
||||
})
|
||||
}
|
||||
|
||||
// Persist results into payload so the UI can load them for review.
|
||||
resultsJSON, _ := json.Marshal(allResults)
|
||||
finalPayload := fmt.Sprintf(`{"pattern":%q,"slug":%q,"results":%s}`,
|
||||
capturedPattern, capturedSlug, string(resultsJSON))
|
||||
|
||||
status := domain.TaskStatusDone
|
||||
if jobCtx.Err() != nil {
|
||||
status = domain.TaskStatusCancelled
|
||||
}
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"status": string(status),
|
||||
"items_done": chaptersDone,
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
"payload": finalPayload,
|
||||
})
|
||||
logger.Info("admin: text-gen chapter-names async done",
|
||||
"job_id", jobID, "slug", capturedSlug,
|
||||
"results", len(allResults), "status", string(status))
|
||||
}()
|
||||
|
||||
writeJSON(w, http.StatusAccepted, map[string]any{"job_id": jobID})
|
||||
}
|
||||
|
||||
// ── Apply chapter names ───────────────────────────────────────────────────────
|
||||
|
||||
// applyChapterNamesRequest is the JSON body for POST /api/admin/text-gen/chapter-names/apply.
|
||||
type applyChapterNamesRequest struct {
|
||||
// Slug is the book slug to update.
|
||||
Slug string `json:"slug"`
|
||||
// Chapters is the list of chapters to save (number + new_title pairs).
|
||||
// The UI may modify individual titles before confirming.
|
||||
Chapters []applyChapterEntry `json:"chapters"`
|
||||
}
|
||||
|
||||
type applyChapterEntry struct {
|
||||
Number int `json:"number"`
|
||||
Title string `json:"title"`
|
||||
}
|
||||
|
||||
// handleAdminTextGenApplyChapterNames handles POST /api/admin/text-gen/chapter-names/apply.
|
||||
//
|
||||
// Persists the confirmed chapter titles to PocketBase chapters_idx.
|
||||
func (s *Server) handleAdminTextGenApplyChapterNames(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.BookWriter == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book writer not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req applyChapterNamesRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if len(req.Chapters) == 0 {
|
||||
jsonError(w, http.StatusBadRequest, "chapters is required")
|
||||
return
|
||||
}
|
||||
|
||||
refs := make([]domain.ChapterRef, 0, len(req.Chapters))
|
||||
for _, ch := range req.Chapters {
|
||||
if ch.Number <= 0 {
|
||||
continue
|
||||
}
|
||||
refs = append(refs, domain.ChapterRef{
|
||||
Number: ch.Number,
|
||||
Title: strings.TrimSpace(ch.Title),
|
||||
})
|
||||
}
|
||||
|
||||
if err := s.deps.BookWriter.WriteChapterRefs(r.Context(), req.Slug, refs); err != nil {
|
||||
s.deps.Log.Error("admin: apply chapter names failed", "slug", req.Slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, "write chapter refs: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
s.deps.Log.Info("admin: chapter names applied", "slug", req.Slug, "count", len(refs))
|
||||
writeJSON(w, 0, map[string]any{"updated": len(refs)})
|
||||
}
|
||||
|
||||
// ── Book description ──────────────────────────────────────────────────────────
|
||||
|
||||
// textGenDescriptionRequest is the JSON body for POST /api/admin/text-gen/description.
|
||||
type textGenDescriptionRequest struct {
|
||||
// Slug is the book slug whose description to regenerate.
|
||||
Slug string `json:"slug"`
|
||||
// Instructions is an optional free-text hint for the AI,
|
||||
// e.g. "Write a 3-sentence blurb, avoid spoilers, dramatic tone."
|
||||
Instructions string `json:"instructions"`
|
||||
// Model is the CF Workers AI model ID. Defaults to recommended when empty.
|
||||
Model string `json:"model"`
|
||||
// MaxTokens limits response length (0 = model default).
|
||||
MaxTokens int `json:"max_tokens"`
|
||||
}
|
||||
|
||||
// textGenDescriptionResponse is the JSON body returned by POST /api/admin/text-gen/description.
|
||||
type textGenDescriptionResponse struct {
|
||||
// OldDescription is the current summary stored in the database.
|
||||
OldDescription string `json:"old_description"`
|
||||
// NewDescription is the AI-proposed replacement.
|
||||
NewDescription string `json:"new_description"`
|
||||
// Model is the model that was used.
|
||||
Model string `json:"model"`
|
||||
}
|
||||
|
||||
// handleAdminTextGenDescription handles POST /api/admin/text-gen/description.
|
||||
//
|
||||
// Reads the current book metadata, sends it to the LLM, and returns a proposed
|
||||
// new description. Does NOT persist anything — the user must confirm via
|
||||
// POST /api/admin/text-gen/description/apply.
|
||||
func (s *Server) handleAdminTextGenDescription(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN missing)")
|
||||
return
|
||||
}
|
||||
|
||||
var req textGenDescriptionRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
// Load current book metadata.
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
systemPrompt := `You are a book description writer for a web novel platform. ` +
|
||||
`Given a book's title, author, genres, and current description, write an improved ` +
|
||||
`description that accurately captures the story. ` +
|
||||
`Respond with ONLY the new description text — no title, no labels, no markdown, no quotes.`
|
||||
|
||||
instructions := strings.TrimSpace(req.Instructions)
|
||||
if instructions == "" {
|
||||
instructions = "Write a compelling 2–4 sentence description. Keep it spoiler-free and engaging."
|
||||
}
|
||||
|
||||
userPrompt := fmt.Sprintf(
|
||||
"Title: %s\nAuthor: %s\nGenres: %s\nStatus: %s\n\nCurrent description:\n%s\n\nInstructions: %s",
|
||||
meta.Title,
|
||||
meta.Author,
|
||||
strings.Join(meta.Genres, ", "),
|
||||
meta.Status,
|
||||
meta.Summary,
|
||||
instructions,
|
||||
)
|
||||
|
||||
model := cfai.TextModel(req.Model)
|
||||
if model == "" {
|
||||
model = cfai.DefaultTextModel
|
||||
}
|
||||
|
||||
s.deps.Log.Info("admin: text-gen description requested",
|
||||
"slug", req.Slug, "model", model)
|
||||
|
||||
newDesc, genErr := s.deps.TextGen.Generate(r.Context(), cfai.TextRequest{
|
||||
Model: model,
|
||||
Messages: []cfai.TextMessage{
|
||||
{Role: "system", Content: systemPrompt},
|
||||
{Role: "user", Content: userPrompt},
|
||||
},
|
||||
MaxTokens: req.MaxTokens,
|
||||
})
|
||||
if genErr != nil {
|
||||
s.deps.Log.Error("admin: text-gen description failed", "err", genErr)
|
||||
jsonError(w, http.StatusBadGateway, "text generation failed: "+genErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, textGenDescriptionResponse{
|
||||
OldDescription: meta.Summary,
|
||||
NewDescription: strings.TrimSpace(newDesc),
|
||||
Model: string(model),
|
||||
})
|
||||
}
|
||||
|
||||
// ── Apply description ─────────────────────────────────────────────────────────
|
||||
|
||||
// applyDescriptionRequest is the JSON body for POST /api/admin/text-gen/description/apply.
|
||||
type applyDescriptionRequest struct {
|
||||
// Slug is the book slug to update.
|
||||
Slug string `json:"slug"`
|
||||
// Description is the new summary text to persist.
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
// handleAdminTextGenApplyDescription handles POST /api/admin/text-gen/description/apply.
|
||||
//
|
||||
// Updates only the summary field in PocketBase, leaving all other book metadata
|
||||
// unchanged.
|
||||
func (s *Server) handleAdminTextGenApplyDescription(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.BookWriter == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book writer not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req applyDescriptionRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Description) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "description is required")
|
||||
return
|
||||
}
|
||||
|
||||
// Read existing metadata so we can write it back with only summary changed.
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
meta.Summary = strings.TrimSpace(req.Description)
|
||||
if err := s.deps.BookWriter.WriteMetadata(r.Context(), meta); err != nil {
|
||||
s.deps.Log.Error("admin: apply description failed", "slug", req.Slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, "write metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
s.deps.Log.Info("admin: book description applied", "slug", req.Slug)
|
||||
writeJSON(w, 0, map[string]any{"updated": true})
|
||||
}
|
||||
|
||||
// handleAdminTextGenDescriptionAsync handles POST /api/admin/text-gen/description/async.
|
||||
//
|
||||
// Fire-and-forget variant: validates inputs, creates an ai_job record of kind
|
||||
// "description", spawns a background goroutine that calls the LLM, stores the
|
||||
// old/new description in the job payload, and marks the job done/failed.
|
||||
// Returns HTTP 202 with {job_id} immediately.
|
||||
func (s *Server) handleAdminTextGenDescriptionAsync(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.TextGen == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "text generation not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN missing)")
|
||||
return
|
||||
}
|
||||
if s.deps.AIJobStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "ai job store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
var req textGenDescriptionRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Slug) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
// Load current metadata eagerly so we can fail fast if the book is missing.
|
||||
meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), req.Slug)
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "read metadata: "+err.Error())
|
||||
return
|
||||
}
|
||||
if !ok {
|
||||
jsonError(w, http.StatusNotFound, fmt.Sprintf("book %q not found", req.Slug))
|
||||
return
|
||||
}
|
||||
|
||||
model := cfai.TextModel(req.Model)
|
||||
if model == "" {
|
||||
model = cfai.DefaultTextModel
|
||||
}
|
||||
|
||||
instructions := strings.TrimSpace(req.Instructions)
|
||||
if instructions == "" {
|
||||
instructions = "Write a compelling 2–4 sentence description. Keep it spoiler-free and engaging."
|
||||
}
|
||||
|
||||
// Encode the initial params (without result) as the starting payload.
|
||||
type initPayload struct {
|
||||
Instructions string `json:"instructions"`
|
||||
OldDescription string `json:"old_description"`
|
||||
}
|
||||
initJSON, _ := json.Marshal(initPayload{
|
||||
Instructions: instructions,
|
||||
OldDescription: meta.Summary,
|
||||
})
|
||||
|
||||
jobID, createErr := s.deps.AIJobStore.CreateAIJob(r.Context(), domain.AIJob{
|
||||
Kind: "description",
|
||||
Slug: req.Slug,
|
||||
Status: domain.TaskStatusPending,
|
||||
Model: string(model),
|
||||
Payload: string(initJSON),
|
||||
Started: time.Now(),
|
||||
})
|
||||
if createErr != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "create ai job: "+createErr.Error())
|
||||
return
|
||||
}
|
||||
|
||||
jobCtx, jobCancel := context.WithCancel(context.Background())
|
||||
registerCancelJob(jobID, jobCancel)
|
||||
|
||||
_ = s.deps.AIJobStore.UpdateAIJob(r.Context(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusRunning),
|
||||
})
|
||||
|
||||
s.deps.Log.Info("admin: text-gen description async started",
|
||||
"job_id", jobID, "slug", req.Slug, "model", model)
|
||||
|
||||
// Capture locals.
|
||||
store := s.deps.AIJobStore
|
||||
textGen := s.deps.TextGen
|
||||
logger := s.deps.Log
|
||||
capturedMeta := meta
|
||||
capturedModel := model
|
||||
capturedInstructions := instructions
|
||||
capturedMaxTokens := req.MaxTokens
|
||||
|
||||
go func() {
|
||||
defer deregisterCancelJob(jobID)
|
||||
defer jobCancel()
|
||||
|
||||
if jobCtx.Err() != nil {
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusCancelled),
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
systemPrompt := `You are a book description writer for a web novel platform. ` +
|
||||
`Given a book's title, author, genres, and current description, write an improved ` +
|
||||
`description that accurately captures the story. ` +
|
||||
`Respond with ONLY the new description text — no title, no labels, no markdown, no quotes.`
|
||||
|
||||
userPrompt := fmt.Sprintf(
|
||||
"Title: %s\nAuthor: %s\nGenres: %s\nStatus: %s\n\nCurrent description:\n%s\n\nInstructions: %s",
|
||||
capturedMeta.Title,
|
||||
capturedMeta.Author,
|
||||
strings.Join(capturedMeta.Genres, ", "),
|
||||
capturedMeta.Status,
|
||||
capturedMeta.Summary,
|
||||
capturedInstructions,
|
||||
)
|
||||
|
||||
newDesc, genErr := textGen.Generate(jobCtx, cfai.TextRequest{
|
||||
Model: capturedModel,
|
||||
Messages: []cfai.TextMessage{
|
||||
{Role: "system", Content: systemPrompt},
|
||||
{Role: "user", Content: userPrompt},
|
||||
},
|
||||
MaxTokens: capturedMaxTokens,
|
||||
})
|
||||
if genErr != nil {
|
||||
logger.Error("admin: text-gen description async failed", "job_id", jobID, "err", genErr)
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusFailed),
|
||||
"error_message": genErr.Error(),
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
type resultPayload struct {
|
||||
Instructions string `json:"instructions"`
|
||||
OldDescription string `json:"old_description"`
|
||||
NewDescription string `json:"new_description"`
|
||||
}
|
||||
resultJSON, _ := json.Marshal(resultPayload{
|
||||
Instructions: capturedInstructions,
|
||||
OldDescription: capturedMeta.Summary,
|
||||
NewDescription: strings.TrimSpace(newDesc),
|
||||
})
|
||||
|
||||
_ = store.UpdateAIJob(context.Background(), jobID, map[string]any{
|
||||
"status": string(domain.TaskStatusDone),
|
||||
"items_done": 1,
|
||||
"items_total": 1,
|
||||
"payload": string(resultJSON),
|
||||
"finished": time.Now().Format(time.RFC3339),
|
||||
})
|
||||
logger.Info("admin: text-gen description async done", "job_id", jobID, "slug", capturedMeta.Slug)
|
||||
}()
|
||||
|
||||
writeJSON(w, http.StatusAccepted, map[string]any{"job_id": jobID})
|
||||
}
|
||||
@@ -30,9 +30,13 @@ import (
|
||||
|
||||
sentryhttp "github.com/getsentry/sentry-go/http"
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/cfai"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
"github.com/libnovel/backend/internal/meili"
|
||||
"github.com/libnovel/backend/internal/pockettts"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
"go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"
|
||||
)
|
||||
|
||||
// Dependencies holds all external services the backend server depends on.
|
||||
@@ -44,6 +48,8 @@ type Dependencies struct {
|
||||
RankingStore bookstore.RankingStore
|
||||
// AudioStore checks audio object existence and computes MinIO keys.
|
||||
AudioStore bookstore.AudioStore
|
||||
// TranslationStore checks translation existence and reads/writes translated markdown.
|
||||
TranslationStore bookstore.TranslationStore
|
||||
// PresignStore generates short-lived MinIO URLs.
|
||||
PresignStore bookstore.PresignStore
|
||||
// ProgressStore reads/writes per-session reading progress.
|
||||
@@ -51,6 +57,9 @@ type Dependencies struct {
|
||||
// CoverStore reads and writes book cover images from MinIO.
|
||||
// If nil, the cover endpoint falls back to a CDN redirect.
|
||||
CoverStore bookstore.CoverStore
|
||||
// ChapterImageStore reads and writes per-chapter illustration images from MinIO.
|
||||
// If nil, chapter image endpoints return 404/503.
|
||||
ChapterImageStore bookstore.ChapterImageStore
|
||||
// Producer creates scrape/audio tasks in PocketBase.
|
||||
Producer taskqueue.Producer
|
||||
// TaskReader reads scrape/audio task records from PocketBase.
|
||||
@@ -58,9 +67,33 @@ type Dependencies struct {
|
||||
// SearchIndex provides full-text book search via Meilisearch.
|
||||
// If nil, the local-only fallback search is used.
|
||||
SearchIndex meili.Client
|
||||
// Kokoro is the TTS client (used for voice list only in the backend;
|
||||
// Kokoro is the Kokoro TTS client (used for voice list only in the backend;
|
||||
// audio generation is done by the runner).
|
||||
Kokoro kokoro.Client
|
||||
// PocketTTS is the pocket-tts client (used for voice list only in the backend;
|
||||
// audio generation is done by the runner).
|
||||
PocketTTS pockettts.Client
|
||||
// CFAI is the Cloudflare Workers AI TTS client (used for voice sample
|
||||
// generation and audio-stream live TTS; audio task generation is done by the runner).
|
||||
CFAI cfai.Client
|
||||
// ImageGen is the Cloudflare Workers AI image generation client.
|
||||
// If nil, image generation endpoints return 503.
|
||||
ImageGen cfai.ImageGenClient
|
||||
// TextGen is the Cloudflare Workers AI text generation client.
|
||||
// If nil, text generation endpoints return 503.
|
||||
TextGen cfai.TextGenClient
|
||||
// BookWriter writes book metadata and chapter refs to PocketBase.
|
||||
// Used by admin text-gen apply endpoints.
|
||||
BookWriter bookstore.BookWriter
|
||||
// ImportFileStore uploads raw PDF/EPUB files to MinIO for the runner to process.
|
||||
// Always wired to the concrete *storage.Store (not the Asynq wrapper).
|
||||
ImportFileStore bookstore.ImportFileStore
|
||||
// AIJobStore tracks long-running AI generation jobs in PocketBase.
|
||||
// If nil, job persistence is disabled (jobs still run but are not recorded).
|
||||
AIJobStore bookstore.AIJobStore
|
||||
// BookAdminStore provides admin-only operations: archive, unarchive, hard-delete.
|
||||
// If nil, the admin book management endpoints return 503.
|
||||
BookAdminStore bookstore.BookAdminStore
|
||||
// Log is the structured logger.
|
||||
Log *slog.Logger
|
||||
}
|
||||
@@ -83,7 +116,7 @@ type Server struct {
|
||||
|
||||
// voiceMu guards cachedVoices. Populated lazily on first GET /api/voices.
|
||||
voiceMu sync.RWMutex
|
||||
cachedVoices []string
|
||||
cachedVoices []domain.Voice
|
||||
}
|
||||
|
||||
// New creates a Server from cfg and deps.
|
||||
@@ -153,6 +186,94 @@ func (s *Server) ListenAndServe(ctx context.Context) error {
|
||||
mux.HandleFunc("POST /api/audio/{slug}/{n}", s.handleAudioGenerate)
|
||||
mux.HandleFunc("GET /api/audio/status/{slug}/{n}", s.handleAudioStatus)
|
||||
mux.HandleFunc("GET /api/audio-proxy/{slug}/{n}", s.handleAudioProxy)
|
||||
// Streaming audio: serves from MinIO if cached, else streams live TTS
|
||||
// while simultaneously uploading to MinIO for future requests.
|
||||
mux.HandleFunc("GET /api/audio-stream/{slug}/{n}", s.handleAudioStream)
|
||||
// TTS for arbitrary short text (chapter announcements) — no MinIO caching.
|
||||
mux.HandleFunc("GET /api/tts-announce", s.handleTTSAnnounce)
|
||||
// CF AI preview: generates only the first ~1 800-char chunk so the client
|
||||
// can start playing immediately while the full audio is generated by the runner.
|
||||
mux.HandleFunc("GET /api/audio-preview/{slug}/{n}", s.handleAudioPreview)
|
||||
|
||||
// Translation task creation (backend creates task; runner executes via LibreTranslate)
|
||||
mux.HandleFunc("POST /api/translation/{slug}/{n}", s.handleTranslationGenerate)
|
||||
mux.HandleFunc("GET /api/translation/status/{slug}/{n}", s.handleTranslationStatus)
|
||||
mux.HandleFunc("GET /api/translation/{slug}/{n}", s.handleTranslationRead)
|
||||
|
||||
// Admin translation endpoints
|
||||
mux.HandleFunc("GET /api/admin/translation/jobs", s.handleAdminTranslationJobs)
|
||||
mux.HandleFunc("POST /api/admin/translation/bulk", s.handleAdminTranslationBulk)
|
||||
|
||||
// Admin audio endpoints
|
||||
mux.HandleFunc("GET /api/admin/audio/jobs", s.handleAdminAudioJobs)
|
||||
mux.HandleFunc("POST /api/admin/audio/bulk", s.handleAdminAudioBulk)
|
||||
mux.HandleFunc("POST /api/admin/audio/cancel-bulk", s.handleAdminAudioCancelBulk)
|
||||
|
||||
// Admin image generation endpoints
|
||||
mux.HandleFunc("GET /api/admin/image-gen/models", s.handleAdminImageGenModels)
|
||||
mux.HandleFunc("POST /api/admin/image-gen", s.handleAdminImageGen)
|
||||
mux.HandleFunc("POST /api/admin/image-gen/async", s.handleAdminImageGenAsync)
|
||||
mux.HandleFunc("POST /api/admin/image-gen/save-cover", s.handleAdminImageGenSaveCover)
|
||||
mux.HandleFunc("POST /api/admin/image-gen/save-chapter-image", s.handleAdminImageGenSaveChapterImage)
|
||||
|
||||
// Chapter image serving
|
||||
mux.HandleFunc("GET /api/chapter-image/{domain}/{slug}/{n}", s.handleGetChapterImage)
|
||||
mux.HandleFunc("HEAD /api/chapter-image/{domain}/{slug}/{n}", s.handleHeadChapterImage)
|
||||
|
||||
// Admin text generation endpoints (chapter names + book description)
|
||||
mux.HandleFunc("GET /api/admin/text-gen/models", s.handleAdminTextGenModels)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/chapter-names", s.handleAdminTextGenChapterNames)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/chapter-names/async", s.handleAdminTextGenChapterNamesAsync)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/chapter-names/apply", s.handleAdminTextGenApplyChapterNames)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/description", s.handleAdminTextGenDescription)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/description/async", s.handleAdminTextGenDescriptionAsync)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/description/apply", s.handleAdminTextGenApplyDescription)
|
||||
|
||||
// Admin catalogue enrichment endpoints
|
||||
mux.HandleFunc("POST /api/admin/text-gen/tagline", s.handleAdminTextGenTagline)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/genres", s.handleAdminTextGenGenres)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/genres/apply", s.handleAdminTextGenApplyGenres)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/content-warnings", s.handleAdminTextGenContentWarnings)
|
||||
mux.HandleFunc("POST /api/admin/text-gen/quality-score", s.handleAdminTextGenQualityScore)
|
||||
mux.HandleFunc("POST /api/admin/catalogue/batch-covers", s.handleAdminBatchCovers)
|
||||
mux.HandleFunc("POST /api/admin/catalogue/batch-covers/cancel", s.handleAdminBatchCoversCancel)
|
||||
mux.HandleFunc("POST /api/admin/catalogue/refresh-metadata/{slug}", s.handleAdminRefreshMetadata)
|
||||
|
||||
// Admin AI job tracking endpoints
|
||||
mux.HandleFunc("GET /api/admin/ai-jobs", s.handleAdminListAIJobs)
|
||||
mux.HandleFunc("GET /api/admin/ai-jobs/{id}", s.handleAdminGetAIJob)
|
||||
mux.HandleFunc("POST /api/admin/ai-jobs/{id}/cancel", s.handleAdminCancelAIJob)
|
||||
|
||||
// Auto-prompt generation from book/chapter content
|
||||
mux.HandleFunc("POST /api/admin/image-gen/auto-prompt", s.handleAdminImageGenAutoPrompt)
|
||||
|
||||
// Admin data repair endpoints
|
||||
mux.HandleFunc("POST /api/admin/dedup-chapters/{slug}", s.handleDedupChapters)
|
||||
|
||||
// Admin book management (soft-delete / hard-delete)
|
||||
mux.HandleFunc("PATCH /api/admin/books/{slug}/archive", s.handleAdminArchiveBook)
|
||||
mux.HandleFunc("PATCH /api/admin/books/{slug}/unarchive", s.handleAdminUnarchiveBook)
|
||||
mux.HandleFunc("DELETE /api/admin/books/{slug}", s.handleAdminDeleteBook)
|
||||
|
||||
// Admin chapter split (imported books)
|
||||
mux.HandleFunc("POST /api/admin/books/{slug}/split-chapters", s.handleAdminSplitChapters)
|
||||
|
||||
// Import (PDF/EPUB)
|
||||
mux.HandleFunc("POST /api/admin/import", s.handleAdminImport)
|
||||
mux.HandleFunc("GET /api/admin/import", s.handleAdminImportList)
|
||||
mux.HandleFunc("GET /api/admin/import/{id}", s.handleAdminImportStatus)
|
||||
|
||||
// Notifications
|
||||
mux.HandleFunc("GET /api/notifications", s.handleListNotifications)
|
||||
mux.HandleFunc("PATCH /api/notifications", s.handleMarkAllNotificationsRead)
|
||||
mux.HandleFunc("PATCH /api/notifications/{id}", s.handleMarkNotificationRead)
|
||||
mux.HandleFunc("DELETE /api/notifications", s.handleClearAllNotifications)
|
||||
mux.HandleFunc("DELETE /api/notifications/{id}", s.handleDismissNotification)
|
||||
|
||||
// Web Push subscriptions
|
||||
mux.HandleFunc("GET /api/push-subscriptions/vapid-public-key", s.handleGetVAPIDPublicKey)
|
||||
mux.HandleFunc("POST /api/push-subscriptions", s.handleSavePushSubscription)
|
||||
mux.HandleFunc("DELETE /api/push-subscriptions", s.handleDeletePushSubscription)
|
||||
|
||||
// Voices list
|
||||
mux.HandleFunc("GET /api/voices", s.handleVoices)
|
||||
@@ -165,16 +286,27 @@ func (s *Server) ListenAndServe(ctx context.Context) error {
|
||||
mux.HandleFunc("GET /api/presign/avatar/{userId}", s.handlePresignAvatar)
|
||||
mux.HandleFunc("PUT /api/avatar-upload/{userId}", s.handleAvatarUpload)
|
||||
|
||||
// EPUB export
|
||||
mux.HandleFunc("GET /api/export/{slug}", s.handleExportEPUB)
|
||||
|
||||
// Reading progress
|
||||
mux.HandleFunc("GET /api/progress", s.handleGetProgress)
|
||||
mux.HandleFunc("POST /api/progress/{slug}", s.handleSetProgress)
|
||||
mux.HandleFunc("DELETE /api/progress/{slug}", s.handleDeleteProgress)
|
||||
|
||||
// Wrap mux with OTel tracing (no-op when no TracerProvider is set),
|
||||
// then with Sentry for panic recovery and error reporting.
|
||||
var handler http.Handler = mux
|
||||
handler = otelhttp.NewHandler(handler, "libnovel.backend",
|
||||
otelhttp.WithMessageEvents(otelhttp.ReadEvents, otelhttp.WriteEvents),
|
||||
)
|
||||
handler = sentryhttp.New(sentryhttp.Options{Repanic: true}).Handle(handler)
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: s.cfg.Addr,
|
||||
Handler: sentryhttp.New(sentryhttp.Options{Repanic: true}).Handle(mux),
|
||||
Handler: handler,
|
||||
ReadTimeout: 15 * time.Second,
|
||||
WriteTimeout: 60 * time.Second,
|
||||
WriteTimeout: 15 * time.Minute, // audio-stream can take several minutes for a full chapter
|
||||
IdleTimeout: 60 * time.Second,
|
||||
}
|
||||
|
||||
@@ -255,10 +387,10 @@ func jsonError(w http.ResponseWriter, status int, msg string) {
|
||||
_ = json.NewEncoder(w).Encode(map[string]string{"error": msg})
|
||||
}
|
||||
|
||||
// voices returns the list of available Kokoro voices. On the first call it
|
||||
// fetches from the Kokoro service and caches the result. Falls back to the
|
||||
// hardcoded list on error.
|
||||
func (s *Server) voices(ctx context.Context) []string {
|
||||
// voices returns the merged list of available voices from Kokoro and pocket-tts.
|
||||
// On the first call it fetches from both services and caches the result.
|
||||
// Falls back to the hardcoded Kokoro list on error.
|
||||
func (s *Server) voices(ctx context.Context) []domain.Voice {
|
||||
s.voiceMu.RLock()
|
||||
cached := s.cachedVoices
|
||||
s.voiceMu.RUnlock()
|
||||
@@ -266,23 +398,106 @@ func (s *Server) voices(ctx context.Context) []string {
|
||||
return cached
|
||||
}
|
||||
|
||||
if s.deps.Kokoro == nil {
|
||||
return kokoroVoices
|
||||
}
|
||||
|
||||
fetchCtx, cancel := context.WithTimeout(ctx, 5*time.Second)
|
||||
defer cancel()
|
||||
list, err := s.deps.Kokoro.ListVoices(fetchCtx)
|
||||
if err != nil || len(list) == 0 {
|
||||
s.deps.Log.Warn("backend: could not fetch kokoro voices, using built-in list", "err", err)
|
||||
return kokoroVoices
|
||||
|
||||
var result []domain.Voice
|
||||
|
||||
// ── Kokoro voices ─────────────────────────────────────────────────────────
|
||||
var kokoroIDs []string
|
||||
if s.deps.Kokoro != nil {
|
||||
ids, err := s.deps.Kokoro.ListVoices(fetchCtx)
|
||||
if err != nil || len(ids) == 0 {
|
||||
s.deps.Log.Warn("backend: could not fetch kokoro voices, using built-in list", "err", err)
|
||||
ids = kokoroVoiceIDs
|
||||
} else {
|
||||
s.deps.Log.Info("backend: fetched kokoro voices", "count", len(ids))
|
||||
}
|
||||
kokoroIDs = ids
|
||||
} else {
|
||||
kokoroIDs = kokoroVoiceIDs
|
||||
}
|
||||
for _, id := range kokoroIDs {
|
||||
result = append(result, kokoroVoice(id))
|
||||
}
|
||||
|
||||
// ── Pocket-TTS voices ─────────────────────────────────────────────────────
|
||||
if s.deps.PocketTTS != nil {
|
||||
ids, err := s.deps.PocketTTS.ListVoices(fetchCtx)
|
||||
if err != nil {
|
||||
s.deps.Log.Warn("backend: could not fetch pocket-tts voices", "err", err)
|
||||
} else {
|
||||
for _, id := range ids {
|
||||
result = append(result, pocketTTSVoice(id))
|
||||
}
|
||||
s.deps.Log.Info("backend: fetched pocket-tts voices", "count", len(ids))
|
||||
}
|
||||
}
|
||||
|
||||
// ── Cloudflare AI voices ──────────────────────────────────────────────────
|
||||
if s.deps.CFAI != nil {
|
||||
for _, speaker := range cfai.Speakers() {
|
||||
gender := "m"
|
||||
if cfai.IsFemale(speaker) {
|
||||
gender = "f"
|
||||
}
|
||||
result = append(result, domain.Voice{
|
||||
ID: cfai.VoiceID(speaker),
|
||||
Engine: "cfai",
|
||||
Lang: "en",
|
||||
Gender: gender,
|
||||
})
|
||||
}
|
||||
s.deps.Log.Info("backend: loaded CF AI voices", "count", len(cfai.Speakers()))
|
||||
}
|
||||
|
||||
s.voiceMu.Lock()
|
||||
s.cachedVoices = list
|
||||
s.cachedVoices = result
|
||||
s.voiceMu.Unlock()
|
||||
s.deps.Log.Info("backend: fetched kokoro voices", "count", len(list))
|
||||
return list
|
||||
return result
|
||||
}
|
||||
|
||||
// kokoroVoice builds a domain.Voice for a Kokoro voice ID.
|
||||
// The two-character prefix encodes language and gender:
|
||||
//
|
||||
// af/am → en-us f/m | bf/bm → en-gb f/m
|
||||
// ef/em → es f/m | ff → fr f
|
||||
// hf/hm → hi f/m | if/im → it f/m
|
||||
// jf/jm → ja f/m | pf/pm → pt f/m
|
||||
// zf/zm → zh f/m
|
||||
func kokoroVoice(id string) domain.Voice {
|
||||
type meta struct{ lang, gender string }
|
||||
prefixMap := map[string]meta{
|
||||
"af": {"en-us", "f"}, "am": {"en-us", "m"},
|
||||
"bf": {"en-gb", "f"}, "bm": {"en-gb", "m"},
|
||||
"ef": {"es", "f"}, "em": {"es", "m"},
|
||||
"ff": {"fr", "f"},
|
||||
"hf": {"hi", "f"}, "hm": {"hi", "m"},
|
||||
"if": {"it", "f"}, "im": {"it", "m"},
|
||||
"jf": {"ja", "f"}, "jm": {"ja", "m"},
|
||||
"pf": {"pt", "f"}, "pm": {"pt", "m"},
|
||||
"zf": {"zh", "f"}, "zm": {"zh", "m"},
|
||||
}
|
||||
if len(id) >= 2 {
|
||||
if m, ok := prefixMap[id[:2]]; ok {
|
||||
return domain.Voice{ID: id, Engine: "kokoro", Lang: m.lang, Gender: m.gender}
|
||||
}
|
||||
}
|
||||
return domain.Voice{ID: id, Engine: "kokoro", Lang: "en", Gender: ""}
|
||||
}
|
||||
|
||||
// pocketTTSVoice builds a domain.Voice for a pocket-tts voice ID.
|
||||
// All pocket-tts voices are English audiobook narrators.
|
||||
func pocketTTSVoice(id string) domain.Voice {
|
||||
femaleVoices := map[string]struct{}{
|
||||
"alba": {}, "fantine": {}, "cosette": {}, "eponine": {},
|
||||
"azelma": {}, "anna": {}, "vera": {}, "mary": {}, "jane": {}, "eve": {},
|
||||
}
|
||||
gender := "m"
|
||||
if _, ok := femaleVoices[id]; ok {
|
||||
gender = "f"
|
||||
}
|
||||
return domain.Voice{ID: id, Engine: "pocket-tts", Lang: "en", Gender: gender}
|
||||
}
|
||||
|
||||
// handleHealth handles GET /health.
|
||||
|
||||
@@ -14,6 +14,7 @@ package bookstore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
@@ -34,6 +35,11 @@ type BookWriter interface {
|
||||
|
||||
// ChapterExists returns true if the markdown object for ref already exists.
|
||||
ChapterExists(ctx context.Context, slug string, ref domain.ChapterRef) bool
|
||||
|
||||
// DeduplicateChapters removes duplicate chapters_idx records for slug,
|
||||
// keeping only one record per chapter number (the one with the latest
|
||||
// updated timestamp). Returns the number of duplicate records deleted.
|
||||
DeduplicateChapters(ctx context.Context, slug string) (int, error)
|
||||
}
|
||||
|
||||
// BookReader is the read side used by the backend to serve content.
|
||||
@@ -79,14 +85,24 @@ type RankingStore interface {
|
||||
|
||||
// AudioStore covers audio object storage (runner writes; backend reads).
|
||||
type AudioStore interface {
|
||||
// AudioObjectKey returns the MinIO object key for a cached audio file.
|
||||
// AudioObjectKey returns the MinIO object key for a cached MP3 audio file.
|
||||
// Format: {slug}/{n}/{voice}.mp3
|
||||
AudioObjectKey(slug string, n int, voice string) string
|
||||
|
||||
// AudioObjectKeyExt returns the MinIO object key for a cached audio file
|
||||
// with a custom extension (e.g. "mp3" or "wav").
|
||||
AudioObjectKeyExt(slug string, n int, voice, ext string) string
|
||||
|
||||
// AudioExists returns true when the audio object is present in MinIO.
|
||||
AudioExists(ctx context.Context, key string) bool
|
||||
|
||||
// PutAudio stores raw audio bytes under the given MinIO object key.
|
||||
PutAudio(ctx context.Context, key string, data []byte) error
|
||||
|
||||
// PutAudioStream uploads audio from r to MinIO under key.
|
||||
// size must be the exact byte length of r, or -1 to use multipart upload.
|
||||
// contentType should be "audio/mpeg" or "audio/wav".
|
||||
PutAudioStream(ctx context.Context, key string, r io.Reader, size int64, contentType string) error
|
||||
}
|
||||
|
||||
// PresignStore generates short-lived URLs — used exclusively by the backend.
|
||||
@@ -141,3 +157,93 @@ type CoverStore interface {
|
||||
// CoverExists returns true when a cover image is stored for slug.
|
||||
CoverExists(ctx context.Context, slug string) bool
|
||||
}
|
||||
|
||||
// AIJobStore manages AI generation jobs tracked in PocketBase.
|
||||
type AIJobStore interface {
|
||||
// CreateAIJob inserts a new ai_job record with status=running and returns its ID.
|
||||
CreateAIJob(ctx context.Context, job domain.AIJob) (string, error)
|
||||
// GetAIJob retrieves a single ai_job by ID.
|
||||
// Returns (zero, false, nil) when not found.
|
||||
GetAIJob(ctx context.Context, id string) (domain.AIJob, bool, error)
|
||||
// UpdateAIJob patches an existing ai_job record with the given fields.
|
||||
UpdateAIJob(ctx context.Context, id string, fields map[string]any) error
|
||||
// ListAIJobs returns all ai_job records sorted by started descending.
|
||||
ListAIJobs(ctx context.Context) ([]domain.AIJob, error)
|
||||
}
|
||||
|
||||
// ChapterImageStore covers per-chapter illustration images stored in MinIO.
|
||||
// The backend admin writes them; the backend serves them.
|
||||
type ChapterImageStore interface {
|
||||
// PutChapterImage stores a raw image for chapter n of slug in MinIO.
|
||||
PutChapterImage(ctx context.Context, slug string, n int, data []byte, contentType string) error
|
||||
|
||||
// GetChapterImage retrieves the image for chapter n of slug.
|
||||
// Returns (nil, "", false, nil) when no image exists.
|
||||
GetChapterImage(ctx context.Context, slug string, n int) ([]byte, string, bool, error)
|
||||
|
||||
// ChapterImageExists returns true when an image is stored for slug/n.
|
||||
ChapterImageExists(ctx context.Context, slug string, n int) bool
|
||||
}
|
||||
|
||||
// TranslationStore covers machine-translated chapter storage in MinIO.
|
||||
// The runner writes translations; the backend reads them.
|
||||
type TranslationStore interface {
|
||||
// TranslationObjectKey returns the MinIO object key for a cached translation.
|
||||
TranslationObjectKey(lang, slug string, n int) string
|
||||
|
||||
// TranslationExists returns true when the translation object is present in MinIO.
|
||||
TranslationExists(ctx context.Context, key string) bool
|
||||
|
||||
// PutTranslation stores raw translated markdown under the given MinIO object key.
|
||||
PutTranslation(ctx context.Context, key string, data []byte) error
|
||||
|
||||
// GetTranslation retrieves translated markdown from MinIO.
|
||||
GetTranslation(ctx context.Context, key string) (string, error)
|
||||
}
|
||||
|
||||
// Chapter represents a single chapter extracted from PDF/EPUB.
|
||||
type Chapter struct {
|
||||
Number int // 1-based chapter number
|
||||
Title string // chapter title (may be empty)
|
||||
Content string // plain text content
|
||||
}
|
||||
|
||||
// BookImporter handles PDF/EPUB file parsing and chapter extraction.
|
||||
// Used by the runner to import books from uploaded files.
|
||||
type BookImporter interface {
|
||||
// Import extracts chapters from a PDF or EPUB file stored in MinIO.
|
||||
// Returns the extracted chapters or an error.
|
||||
Import(ctx context.Context, objectKey, fileType string) ([]Chapter, error)
|
||||
}
|
||||
|
||||
// BookAdminStore covers admin-only operations for managing books in the catalogue.
|
||||
// All methods require admin authorisation at the HTTP handler level.
|
||||
type BookAdminStore interface {
|
||||
// ArchiveBook sets archived=true on a book record, hiding it from all
|
||||
// public search and catalogue responses. Returns ErrNotFound when the
|
||||
// slug does not exist.
|
||||
ArchiveBook(ctx context.Context, slug string) error
|
||||
|
||||
// UnarchiveBook clears archived on a book record, making it publicly
|
||||
// visible again. Returns ErrNotFound when the slug does not exist.
|
||||
UnarchiveBook(ctx context.Context, slug string) error
|
||||
|
||||
// DeleteBook permanently removes all data for a book:
|
||||
// - PocketBase books record
|
||||
// - All PocketBase chapters_idx records
|
||||
// - All MinIO chapter markdown objects ({slug}/chapter-*.md)
|
||||
// - MinIO cover image (covers/{slug}.jpg)
|
||||
// The caller is responsible for also deleting the Meilisearch document.
|
||||
DeleteBook(ctx context.Context, slug string) error
|
||||
}
|
||||
|
||||
// ImportFileStore uploads raw import files to object storage.
|
||||
// Kept separate from BookImporter so the HTTP handler can upload the file
|
||||
// without a concrete type assertion, regardless of which Producer is wired.
|
||||
type ImportFileStore interface {
|
||||
PutImportFile(ctx context.Context, objectKey string, data []byte) error
|
||||
// PutImportChapters stores the pre-parsed chapters JSON under the given key.
|
||||
PutImportChapters(ctx context.Context, key string, data []byte) error
|
||||
// GetImportChapters retrieves the pre-parsed chapters JSON.
|
||||
GetImportChapters(ctx context.Context, key string) ([]byte, error)
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package bookstore_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -38,8 +39,9 @@ func (m *mockStore) ReadChapter(_ context.Context, _ string, _ int) (string, err
|
||||
func (m *mockStore) ListChapters(_ context.Context, _ string) ([]domain.ChapterInfo, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (m *mockStore) CountChapters(_ context.Context, _ string) int { return 0 }
|
||||
func (m *mockStore) ReindexChapters(_ context.Context, _ string) (int, error) { return 0, nil }
|
||||
func (m *mockStore) CountChapters(_ context.Context, _ string) int { return 0 }
|
||||
func (m *mockStore) ReindexChapters(_ context.Context, _ string) (int, error) { return 0, nil }
|
||||
func (m *mockStore) DeduplicateChapters(_ context.Context, _ string) (int, error) { return 0, nil }
|
||||
|
||||
// RankingStore
|
||||
func (m *mockStore) WriteRankingItem(_ context.Context, _ domain.RankingItem) error { return nil }
|
||||
@@ -51,9 +53,13 @@ func (m *mockStore) RankingFreshEnough(_ context.Context, _ time.Duration) (bool
|
||||
}
|
||||
|
||||
// AudioStore
|
||||
func (m *mockStore) AudioObjectKey(_ string, _ int, _ string) string { return "" }
|
||||
func (m *mockStore) AudioExists(_ context.Context, _ string) bool { return false }
|
||||
func (m *mockStore) PutAudio(_ context.Context, _ string, _ []byte) error { return nil }
|
||||
func (m *mockStore) AudioObjectKey(_ string, _ int, _ string) string { return "" }
|
||||
func (m *mockStore) AudioObjectKeyExt(_ string, _ int, _, _ string) string { return "" }
|
||||
func (m *mockStore) AudioExists(_ context.Context, _ string) bool { return false }
|
||||
func (m *mockStore) PutAudio(_ context.Context, _ string, _ []byte) error { return nil }
|
||||
func (m *mockStore) PutAudioStream(_ context.Context, _ string, _ io.Reader, _ int64, _ string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// PresignStore
|
||||
func (m *mockStore) PresignChapter(_ context.Context, _ string, _ int, _ time.Duration) (string, error) {
|
||||
|
||||
315
backend/internal/cfai/client.go
Normal file
315
backend/internal/cfai/client.go
Normal file
@@ -0,0 +1,315 @@
|
||||
// Package cfai provides a client for Cloudflare Workers AI Text-to-Speech models.
|
||||
//
|
||||
// The Cloudflare Workers AI REST API is used to run TTS models:
|
||||
//
|
||||
// POST https://api.cloudflare.com/client/v4/accounts/{accountID}/ai/run/{model}
|
||||
// Authorization: Bearer {apiToken}
|
||||
// Content-Type: application/json
|
||||
// { "text": "...", "speaker": "luna" }
|
||||
//
|
||||
// → 200 audio/mpeg — raw MP3 bytes
|
||||
//
|
||||
// Currently supported model: @cf/deepgram/aura-2-en (40 English speakers).
|
||||
// Voice IDs are prefixed with "cfai:" to distinguish them from Kokoro/pocket-tts
|
||||
// voices (e.g. "cfai:luna", "cfai:orion").
|
||||
//
|
||||
// The API is batch-only (no streaming), so GenerateAudio waits for the full
|
||||
// response. There is no 100-second Cloudflare proxy timeout because we are
|
||||
// calling the Cloudflare API directly, not routing through a Cloudflare-proxied
|
||||
// homelab tunnel.
|
||||
//
|
||||
// The aura-2-en model enforces a hard 2 000-character limit per request.
|
||||
// GenerateAudio transparently splits longer texts into sentence-boundary chunks
|
||||
// and concatenates the resulting MP3 frames.
|
||||
package cfai
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
// DefaultModel is the Cloudflare Workers AI TTS model used by default.
|
||||
DefaultModel = "@cf/deepgram/aura-2-en"
|
||||
|
||||
// voicePrefix is the prefix used to namespace CF AI voice IDs.
|
||||
voicePrefix = "cfai:"
|
||||
)
|
||||
|
||||
// aura2Speakers is the exhaustive list of speakers supported by aura-2-en.
|
||||
var aura2Speakers = []string{
|
||||
"amalthea", "andromeda", "apollo", "arcas", "aries", "asteria",
|
||||
"athena", "atlas", "aurora", "callista", "cora", "cordelia",
|
||||
"delia", "draco", "electra", "harmonia", "helena", "hera",
|
||||
"hermes", "hyperion", "iris", "janus", "juno", "jupiter",
|
||||
"luna", "mars", "minerva", "neptune", "odysseus", "ophelia",
|
||||
"orion", "orpheus", "pandora", "phoebe", "pluto", "saturn",
|
||||
"thalia", "theia", "vesta", "zeus",
|
||||
}
|
||||
|
||||
// femaleSpeakers is the set of aura-2-en speaker names that are female voices.
|
||||
var femaleSpeakers = map[string]struct{}{
|
||||
"amalthea": {}, "andromeda": {}, "aries": {}, "asteria": {},
|
||||
"athena": {}, "aurora": {}, "callista": {}, "cora": {},
|
||||
"cordelia": {}, "delia": {}, "electra": {}, "harmonia": {},
|
||||
"helena": {}, "hera": {}, "iris": {}, "juno": {},
|
||||
"luna": {}, "minerva": {}, "ophelia": {}, "pandora": {},
|
||||
"phoebe": {}, "thalia": {}, "theia": {}, "vesta": {},
|
||||
}
|
||||
|
||||
// IsCFAIVoice reports whether voice is served by the Cloudflare AI client.
|
||||
// CF AI voices use the "cfai:" prefix, e.g. "cfai:luna".
|
||||
func IsCFAIVoice(voice string) bool {
|
||||
return strings.HasPrefix(voice, voicePrefix)
|
||||
}
|
||||
|
||||
// SpeakerName strips the "cfai:" prefix and returns the bare speaker name.
|
||||
// If voice is not a CF AI voice the original string is returned unchanged.
|
||||
func SpeakerName(voice string) string {
|
||||
return strings.TrimPrefix(voice, voicePrefix)
|
||||
}
|
||||
|
||||
// VoiceID returns the full voice ID (with prefix) for a bare speaker name.
|
||||
func VoiceID(speaker string) string {
|
||||
return voicePrefix + speaker
|
||||
}
|
||||
|
||||
// VoiceSampleKey returns the MinIO object key for a CF AI voice sample MP3.
|
||||
func VoiceSampleKey(voice string) string {
|
||||
safe := strings.Map(func(r rune) rune {
|
||||
if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') ||
|
||||
(r >= '0' && r <= '9') || r == '_' || r == '-' {
|
||||
return r
|
||||
}
|
||||
return '_'
|
||||
}, voice)
|
||||
return fmt.Sprintf("_voice-samples/%s.mp3", safe)
|
||||
}
|
||||
|
||||
// IsFemale reports whether the given CF AI voice ID (with or without prefix)
|
||||
// is a female speaker.
|
||||
func IsFemale(voice string) bool {
|
||||
speaker := SpeakerName(voice)
|
||||
_, ok := femaleSpeakers[speaker]
|
||||
return ok
|
||||
}
|
||||
|
||||
// Speakers returns all available bare speaker names for aura-2-en.
|
||||
func Speakers() []string {
|
||||
out := make([]string, len(aura2Speakers))
|
||||
copy(out, aura2Speakers)
|
||||
return out
|
||||
}
|
||||
|
||||
// Client is the interface for interacting with Cloudflare Workers AI TTS.
|
||||
type Client interface {
|
||||
// GenerateAudio synthesises text using the given voice (e.g. "cfai:luna")
|
||||
// and returns raw MP3 bytes.
|
||||
GenerateAudio(ctx context.Context, text, voice string) ([]byte, error)
|
||||
|
||||
// StreamAudioMP3 is not natively supported by the CF AI batch API.
|
||||
// It buffers the full response and returns an io.ReadCloser over the bytes,
|
||||
// so callers can use it like a stream without special-casing.
|
||||
StreamAudioMP3(ctx context.Context, text, voice string) (io.ReadCloser, error)
|
||||
|
||||
// StreamAudioWAV is not natively supported; the CF AI model returns MP3.
|
||||
// This method returns the same MP3 bytes wrapped as an io.ReadCloser.
|
||||
StreamAudioWAV(ctx context.Context, text, voice string) (io.ReadCloser, error)
|
||||
|
||||
// ListVoices returns all available voice IDs (with the "cfai:" prefix).
|
||||
ListVoices(ctx context.Context) ([]string, error)
|
||||
}
|
||||
|
||||
// httpClient is the concrete CF AI HTTP client.
|
||||
type httpClient struct {
|
||||
accountID string
|
||||
apiToken string
|
||||
model string
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
// New returns a Client for the given Cloudflare account and API token.
|
||||
// model defaults to DefaultModel when empty.
|
||||
func New(accountID, apiToken, model string) Client {
|
||||
if model == "" {
|
||||
model = DefaultModel
|
||||
}
|
||||
return &httpClient{
|
||||
accountID: accountID,
|
||||
apiToken: apiToken,
|
||||
model: model,
|
||||
http: &http.Client{Timeout: 5 * time.Minute},
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateAudio calls the Cloudflare Workers AI TTS endpoint and returns MP3 bytes.
|
||||
// The aura-2-en model rejects inputs longer than 2 000 characters, so this method
|
||||
// splits the text into sentence-bounded chunks and concatenates the MP3 responses.
|
||||
func (c *httpClient) GenerateAudio(ctx context.Context, text, voice string) ([]byte, error) {
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("cfai: empty text")
|
||||
}
|
||||
speaker := SpeakerName(voice)
|
||||
if speaker == "" {
|
||||
speaker = "luna"
|
||||
}
|
||||
|
||||
chunks := splitText(text, 1800) // stay comfortably under the 2 000-char limit
|
||||
var combined []byte
|
||||
for _, chunk := range chunks {
|
||||
part, err := c.generateChunk(ctx, chunk, speaker)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
combined = append(combined, part...)
|
||||
}
|
||||
return combined, nil
|
||||
}
|
||||
|
||||
// generateChunk sends a single ≤2 000-character request and returns MP3 bytes.
|
||||
func (c *httpClient) generateChunk(ctx context.Context, text, speaker string) ([]byte, error) {
|
||||
body, err := json.Marshal(map[string]any{
|
||||
"text": text,
|
||||
"speaker": speaker,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cfai: marshal request: %w", err)
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("https://api.cloudflare.com/client/v4/accounts/%s/ai/run/%s",
|
||||
c.accountID, c.model)
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(body))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cfai: build request: %w", err)
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer "+c.apiToken)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cfai: request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("cfai: server returned %d: %s", resp.StatusCode, strings.TrimSpace(string(body)))
|
||||
}
|
||||
|
||||
mp3, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cfai: read response: %w", err)
|
||||
}
|
||||
return mp3, nil
|
||||
}
|
||||
|
||||
// splitText splits src into chunks of at most maxChars characters each.
|
||||
// It tries to break at paragraph boundaries first, then at sentence-ending
|
||||
// punctuation (. ! ?), and falls back to the nearest space.
|
||||
func splitText(src string, maxChars int) []string {
|
||||
if len(src) <= maxChars {
|
||||
return []string{src}
|
||||
}
|
||||
|
||||
var chunks []string
|
||||
remaining := src
|
||||
|
||||
for len(remaining) > 0 {
|
||||
if len(remaining) <= maxChars {
|
||||
chunks = append(chunks, strings.TrimSpace(remaining))
|
||||
break
|
||||
}
|
||||
|
||||
// Search window: the first maxChars bytes of remaining.
|
||||
// Use byte length here because the API limit is in bytes/chars for ASCII;
|
||||
// for safety we operate on rune-aware slices.
|
||||
window := remaining
|
||||
if len(window) > maxChars {
|
||||
// Trim to maxChars runes (not bytes), ensuring we don't split a multi-byte char.
|
||||
window = runeSlice(remaining, maxChars)
|
||||
}
|
||||
|
||||
cut := -1
|
||||
|
||||
// 1. Prefer paragraph break (\n\n or \n).
|
||||
if i := strings.LastIndex(window, "\n\n"); i > 0 {
|
||||
cut = i + 2
|
||||
} else if i := strings.LastIndex(window, "\n"); i > 0 {
|
||||
cut = i + 1
|
||||
}
|
||||
|
||||
// 2. Fall back to sentence-ending punctuation followed by a space.
|
||||
if cut < 0 {
|
||||
for _, punct := range []string{". ", "! ", "? ", ".\n", "!\n", "?\n"} {
|
||||
if i := strings.LastIndex(window, punct); i > 0 {
|
||||
candidate := i + len(punct)
|
||||
if cut < 0 || candidate > cut {
|
||||
cut = candidate
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Last resort: nearest space.
|
||||
if cut < 0 {
|
||||
if i := strings.LastIndex(window, " "); i > 0 {
|
||||
cut = i + 1
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Hard cut at maxChars runes if no boundary found.
|
||||
if cut < 0 {
|
||||
cut = len(window)
|
||||
}
|
||||
|
||||
chunk := strings.TrimSpace(remaining[:cut])
|
||||
if chunk != "" {
|
||||
chunks = append(chunks, chunk)
|
||||
}
|
||||
remaining = remaining[cut:]
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
|
||||
// runeSlice returns the first n runes of s as a string.
|
||||
func runeSlice(s string, n int) string {
|
||||
count := 0
|
||||
for i := range s {
|
||||
if count == n {
|
||||
return s[:i]
|
||||
}
|
||||
count++
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// StreamAudioMP3 generates audio and wraps the MP3 bytes as an io.ReadCloser.
|
||||
func (c *httpClient) StreamAudioMP3(ctx context.Context, text, voice string) (io.ReadCloser, error) {
|
||||
mp3, err := c.GenerateAudio(ctx, text, voice)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return io.NopCloser(bytes.NewReader(mp3)), nil
|
||||
}
|
||||
|
||||
// StreamAudioWAV generates audio (MP3) and wraps it as an io.ReadCloser.
|
||||
// Note: the CF AI aura-2-en model returns MP3 regardless of the method name.
|
||||
func (c *httpClient) StreamAudioWAV(ctx context.Context, text, voice string) (io.ReadCloser, error) {
|
||||
return c.StreamAudioMP3(ctx, text, voice)
|
||||
}
|
||||
|
||||
// ListVoices returns all available CF AI voice IDs (with the "cfai:" prefix).
|
||||
func (c *httpClient) ListVoices(_ context.Context) ([]string, error) {
|
||||
ids := make([]string, len(aura2Speakers))
|
||||
for i, s := range aura2Speakers {
|
||||
ids[i] = VoiceID(s)
|
||||
}
|
||||
return ids, nil
|
||||
}
|
||||
475
backend/internal/cfai/image.go
Normal file
475
backend/internal/cfai/image.go
Normal file
@@ -0,0 +1,475 @@
|
||||
// Image generation via Cloudflare Workers AI text-to-image models.
|
||||
//
|
||||
// API reference:
|
||||
//
|
||||
// POST https://api.cloudflare.com/client/v4/accounts/{accountID}/ai/run/{model}
|
||||
// Authorization: Bearer {apiToken}
|
||||
//
|
||||
// FLUX.2 models (flux-2-dev, flux-2-klein-4b, flux-2-klein-9b):
|
||||
//
|
||||
// Content-Type: multipart/form-data
|
||||
// Fields: prompt, num_steps, width, height, guidance, image_b64 (optional)
|
||||
// Response: { "image": "<base64 JPEG>" }
|
||||
//
|
||||
// Other models (flux-1-schnell, SDXL, SD 1.5):
|
||||
//
|
||||
// Content-Type: application/json
|
||||
// Body: { "prompt": "...", "num_steps": 20 }
|
||||
// Response: { "image": "<base64>" } or raw bytes depending on model
|
||||
//
|
||||
// Reference-image request (FLUX.2):
|
||||
//
|
||||
// Same multipart form; include image_b64 field with base64-encoded reference.
|
||||
//
|
||||
// Reference-image request (SD img2img):
|
||||
//
|
||||
// JSON body: { "prompt": "...", "image": [r,g,b,a,...], "strength": 0.75 }
|
||||
//
|
||||
// Recommended models for LibNovel:
|
||||
// - Book covers (no reference): flux-2-dev, flux-2-klein-9b, lucid-origin
|
||||
// - Chapter images (speed): flux-2-klein-4b, flux-1-schnell
|
||||
// - With reference image: flux-2-dev, flux-2-klein-9b, sd-v1-5-img2img
|
||||
package cfai
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/draw"
|
||||
"image/jpeg"
|
||||
_ "image/jpeg" // register JPEG decoder
|
||||
"image/png"
|
||||
_ "image/png" // register PNG decoder
|
||||
"io"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ImageModel identifies a Cloudflare Workers AI text-to-image model.
|
||||
type ImageModel string
|
||||
|
||||
const (
|
||||
// ImageModelFlux2Dev — best quality, multi-reference. Recommended for covers.
|
||||
ImageModelFlux2Dev ImageModel = "@cf/black-forest-labs/flux-2-dev"
|
||||
// ImageModelFlux2Klein9B — 9B params, multi-reference. Good for covers.
|
||||
ImageModelFlux2Klein9B ImageModel = "@cf/black-forest-labs/flux-2-klein-9b"
|
||||
// ImageModelFlux2Klein4B — ultra-fast, unified gen+edit. Recommended for chapters.
|
||||
ImageModelFlux2Klein4B ImageModel = "@cf/black-forest-labs/flux-2-klein-4b"
|
||||
// ImageModelFlux1Schnell — fastest, text-only. Good for quick illustrations.
|
||||
ImageModelFlux1Schnell ImageModel = "@cf/black-forest-labs/flux-1-schnell"
|
||||
// ImageModelSDXLLightning — fast 1024px generation.
|
||||
ImageModelSDXLLightning ImageModel = "@cf/bytedance/stable-diffusion-xl-lightning"
|
||||
// ImageModelSD15Img2Img — explicit img2img with flat RGBA reference.
|
||||
ImageModelSD15Img2Img ImageModel = "@cf/runwayml/stable-diffusion-v1-5-img2img"
|
||||
// ImageModelSDXLBase — Stability AI SDXL base.
|
||||
ImageModelSDXLBase ImageModel = "@cf/stabilityai/stable-diffusion-xl-base-1.0"
|
||||
// ImageModelLucidOrigin — Leonardo AI; strong prompt adherence.
|
||||
ImageModelLucidOrigin ImageModel = "@cf/leonardo/lucid-origin"
|
||||
// ImageModelPhoenix10 — Leonardo AI; accurate text rendering.
|
||||
ImageModelPhoenix10 ImageModel = "@cf/leonardo/phoenix-1.0"
|
||||
|
||||
// DefaultImageModel is the default model for book-cover generation.
|
||||
DefaultImageModel = ImageModelFlux2Dev
|
||||
)
|
||||
|
||||
// ImageModelInfo describes a single image generation model.
|
||||
type ImageModelInfo struct {
|
||||
ID string `json:"id"`
|
||||
Label string `json:"label"`
|
||||
Provider string `json:"provider"`
|
||||
SupportsRef bool `json:"supports_ref"`
|
||||
RecommendedFor []string `json:"recommended_for"` // "cover" and/or "chapter"
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
// AllImageModels returns metadata about every supported image model.
|
||||
func AllImageModels() []ImageModelInfo {
|
||||
return []ImageModelInfo{
|
||||
{
|
||||
ID: string(ImageModelFlux2Dev), Label: "FLUX.2 Dev", Provider: "Black Forest Labs",
|
||||
SupportsRef: true, RecommendedFor: []string{"cover"},
|
||||
Description: "Best quality; multi-reference editing. Recommended for book covers.",
|
||||
},
|
||||
{
|
||||
ID: string(ImageModelFlux2Klein9B), Label: "FLUX.2 Klein 9B", Provider: "Black Forest Labs",
|
||||
SupportsRef: true, RecommendedFor: []string{"cover"},
|
||||
Description: "9B parameters with multi-reference support.",
|
||||
},
|
||||
{
|
||||
ID: string(ImageModelFlux2Klein4B), Label: "FLUX.2 Klein 4B", Provider: "Black Forest Labs",
|
||||
SupportsRef: true, RecommendedFor: []string{"chapter"},
|
||||
Description: "Ultra-fast unified gen+edit. Recommended for chapter images.",
|
||||
},
|
||||
{
|
||||
ID: string(ImageModelFlux1Schnell), Label: "FLUX.1 Schnell", Provider: "Black Forest Labs",
|
||||
SupportsRef: false, RecommendedFor: []string{"chapter"},
|
||||
Description: "Fastest inference. Good for quick chapter illustrations.",
|
||||
},
|
||||
{
|
||||
ID: string(ImageModelSDXLLightning), Label: "SDXL Lightning", Provider: "ByteDance",
|
||||
SupportsRef: false, RecommendedFor: []string{"chapter"},
|
||||
Description: "Lightning-fast 1024px images in a few steps.",
|
||||
},
|
||||
{
|
||||
ID: string(ImageModelSD15Img2Img), Label: "SD 1.5 img2img", Provider: "RunwayML",
|
||||
SupportsRef: true, RecommendedFor: []string{"cover", "chapter"},
|
||||
Description: "Explicit img2img: generates from a reference image + prompt.",
|
||||
},
|
||||
{
|
||||
ID: string(ImageModelSDXLBase), Label: "SDXL Base 1.0", Provider: "Stability AI",
|
||||
SupportsRef: false, RecommendedFor: []string{"cover"},
|
||||
Description: "Stable Diffusion XL base model.",
|
||||
},
|
||||
{
|
||||
ID: string(ImageModelLucidOrigin), Label: "Lucid Origin", Provider: "Leonardo AI",
|
||||
SupportsRef: false, RecommendedFor: []string{"cover"},
|
||||
Description: "Highly prompt-responsive; strong graphic design and HD renders.",
|
||||
},
|
||||
{
|
||||
ID: string(ImageModelPhoenix10), Label: "Phoenix 1.0", Provider: "Leonardo AI",
|
||||
SupportsRef: false, RecommendedFor: []string{"cover"},
|
||||
Description: "Exceptional prompt adherence; accurate text rendering.",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// ImageRequest is the input to GenerateImage / GenerateImageFromReference.
|
||||
type ImageRequest struct {
|
||||
// Prompt is the text description of the desired image.
|
||||
Prompt string
|
||||
// Model is the CF Workers AI model. Defaults to DefaultImageModel when empty.
|
||||
Model ImageModel
|
||||
// NumSteps controls inference quality (default 20). Range: 1–20.
|
||||
NumSteps int
|
||||
// Width and Height in pixels. 0 = model default (typically 1024x1024).
|
||||
Width, Height int
|
||||
// Guidance controls prompt adherence (default 7.5).
|
||||
Guidance float64
|
||||
// Strength for img2img: 0.0 = copy reference, 1.0 = ignore reference (default 0.75).
|
||||
Strength float64
|
||||
}
|
||||
|
||||
// ImageGenClient generates images via Cloudflare Workers AI.
|
||||
type ImageGenClient interface {
|
||||
// GenerateImage creates an image from a text prompt only.
|
||||
// Returns raw PNG bytes.
|
||||
GenerateImage(ctx context.Context, req ImageRequest) ([]byte, error)
|
||||
|
||||
// GenerateImageFromReference creates an image from a text prompt + reference image.
|
||||
// refImage should be PNG or JPEG bytes. Returns raw PNG bytes.
|
||||
GenerateImageFromReference(ctx context.Context, req ImageRequest, refImage []byte) ([]byte, error)
|
||||
|
||||
// Models returns metadata about all supported image models.
|
||||
Models() []ImageModelInfo
|
||||
}
|
||||
|
||||
// imageGenHTTPClient is the concrete CF AI image generation client.
|
||||
type imageGenHTTPClient struct {
|
||||
accountID string
|
||||
apiToken string
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
// NewImageGen returns an ImageGenClient for the given Cloudflare account.
|
||||
func NewImageGen(accountID, apiToken string) ImageGenClient {
|
||||
return &imageGenHTTPClient{
|
||||
accountID: accountID,
|
||||
apiToken: apiToken,
|
||||
http: &http.Client{Timeout: 5 * time.Minute},
|
||||
}
|
||||
}
|
||||
|
||||
// requiresMultipart reports whether the model requires a multipart/form-data
|
||||
// request body instead of JSON. FLUX.2 models on Cloudflare Workers AI changed
|
||||
// their API to require multipart and return {"image":"<base64>"} instead of
|
||||
// raw image bytes.
|
||||
func requiresMultipart(model ImageModel) bool {
|
||||
switch model {
|
||||
case ImageModelFlux2Dev, ImageModelFlux2Klein4B, ImageModelFlux2Klein9B:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateImage generates an image from text only.
|
||||
func (c *imageGenHTTPClient) GenerateImage(ctx context.Context, req ImageRequest) ([]byte, error) {
|
||||
req = applyImageDefaults(req)
|
||||
|
||||
// FLUX.2 multipart models use "steps"; JSON models use "num_steps".
|
||||
stepsKey := "num_steps"
|
||||
if requiresMultipart(req.Model) {
|
||||
stepsKey = "steps"
|
||||
}
|
||||
|
||||
fields := map[string]any{
|
||||
"prompt": req.Prompt,
|
||||
stepsKey: req.NumSteps,
|
||||
}
|
||||
if req.Width > 0 {
|
||||
fields["width"] = req.Width
|
||||
}
|
||||
if req.Height > 0 {
|
||||
fields["height"] = req.Height
|
||||
}
|
||||
if req.Guidance > 0 {
|
||||
fields["guidance"] = req.Guidance
|
||||
}
|
||||
return c.callImageAPI(ctx, req.Model, fields, nil)
|
||||
}
|
||||
|
||||
// refImageMaxDim is the maximum dimension (width or height) for reference images
|
||||
// sent to Cloudflare Workers AI. CF's JSON body limit is ~4 MB; a 768px JPEG
|
||||
// stays well under that while preserving enough detail for img2img guidance.
|
||||
const refImageMaxDim = 768
|
||||
|
||||
// GenerateImageFromReference generates an image from a text prompt + reference image.
|
||||
func (c *imageGenHTTPClient) GenerateImageFromReference(ctx context.Context, req ImageRequest, refImage []byte) ([]byte, error) {
|
||||
if len(refImage) == 0 {
|
||||
return c.GenerateImage(ctx, req)
|
||||
}
|
||||
req = applyImageDefaults(req)
|
||||
|
||||
// Shrink the reference image if it exceeds the safe payload size.
|
||||
refImage = resizeRefImage(refImage, refImageMaxDim)
|
||||
|
||||
// FLUX.2 multipart models use "steps"; JSON models use "num_steps".
|
||||
stepsKey := "num_steps"
|
||||
if requiresMultipart(req.Model) {
|
||||
stepsKey = "steps"
|
||||
}
|
||||
|
||||
fields := map[string]any{
|
||||
"prompt": req.Prompt,
|
||||
stepsKey: req.NumSteps,
|
||||
}
|
||||
if req.Width > 0 {
|
||||
fields["width"] = req.Width
|
||||
}
|
||||
if req.Height > 0 {
|
||||
fields["height"] = req.Height
|
||||
}
|
||||
if req.Guidance > 0 {
|
||||
fields["guidance"] = req.Guidance
|
||||
}
|
||||
|
||||
if requiresMultipart(req.Model) {
|
||||
// FLUX.2: reference image sent as base64 form field "image_b64".
|
||||
fields["image_b64"] = base64.StdEncoding.EncodeToString(refImage)
|
||||
if req.Strength > 0 {
|
||||
fields["strength"] = req.Strength
|
||||
}
|
||||
return c.callImageAPI(ctx, req.Model, fields, nil)
|
||||
}
|
||||
|
||||
if req.Model == ImageModelSD15Img2Img {
|
||||
pixels, err := decodeImageToRGBA(refImage)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cfai/image: decode reference: %w", err)
|
||||
}
|
||||
strength := req.Strength
|
||||
if strength <= 0 {
|
||||
strength = 0.75
|
||||
}
|
||||
fields["image"] = pixels
|
||||
fields["strength"] = strength
|
||||
return c.callImageAPI(ctx, req.Model, fields, nil)
|
||||
}
|
||||
|
||||
// Other FLUX models: image_b64 JSON field.
|
||||
fields["image_b64"] = base64.StdEncoding.EncodeToString(refImage)
|
||||
if req.Strength > 0 {
|
||||
fields["strength"] = req.Strength
|
||||
}
|
||||
return c.callImageAPI(ctx, req.Model, fields, nil)
|
||||
}
|
||||
|
||||
// Models returns all supported image model metadata.
|
||||
func (c *imageGenHTTPClient) Models() []ImageModelInfo {
|
||||
return AllImageModels()
|
||||
}
|
||||
|
||||
func (c *imageGenHTTPClient) callImageAPI(ctx context.Context, model ImageModel, fields map[string]any, _ []byte) ([]byte, error) {
|
||||
cfURL := fmt.Sprintf("https://api.cloudflare.com/client/v4/accounts/%s/ai/run/%s",
|
||||
c.accountID, string(model))
|
||||
|
||||
var (
|
||||
bodyReader io.Reader
|
||||
contentType string
|
||||
)
|
||||
|
||||
if requiresMultipart(model) {
|
||||
// Build a multipart/form-data body from the fields map.
|
||||
// All values are serialised to their string representation.
|
||||
var buf bytes.Buffer
|
||||
mw := multipart.NewWriter(&buf)
|
||||
for k, v := range fields {
|
||||
var strVal string
|
||||
switch tv := v.(type) {
|
||||
case string:
|
||||
strVal = tv
|
||||
default:
|
||||
encoded, merr := json.Marshal(tv)
|
||||
if merr != nil {
|
||||
return nil, fmt.Errorf("cfai/image: marshal field %q: %w", k, merr)
|
||||
}
|
||||
strVal = strings.Trim(string(encoded), `"`)
|
||||
}
|
||||
if werr := mw.WriteField(k, strVal); werr != nil {
|
||||
return nil, fmt.Errorf("cfai/image: write field %q: %w", k, werr)
|
||||
}
|
||||
}
|
||||
if cerr := mw.Close(); cerr != nil {
|
||||
return nil, fmt.Errorf("cfai/image: close multipart writer: %w", cerr)
|
||||
}
|
||||
bodyReader = &buf
|
||||
contentType = mw.FormDataContentType()
|
||||
} else {
|
||||
encoded, merr := json.Marshal(fields)
|
||||
if merr != nil {
|
||||
return nil, fmt.Errorf("cfai/image: marshal: %w", merr)
|
||||
}
|
||||
bodyReader = bytes.NewReader(encoded)
|
||||
contentType = "application/json"
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, cfURL, bodyReader)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cfai/image: build request: %w", err)
|
||||
}
|
||||
req.Header.Set("Authorization", "Bearer "+c.apiToken)
|
||||
req.Header.Set("Content-Type", contentType)
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cfai/image: http: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
respBody, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cfai/image: read response: %w", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
msg := string(respBody)
|
||||
if len(msg) > 300 {
|
||||
msg = msg[:300]
|
||||
}
|
||||
return nil, fmt.Errorf("cfai/image: model %s returned %d: %s", model, resp.StatusCode, msg)
|
||||
}
|
||||
|
||||
// Try to parse as {"image": "<base64>"} first (FLUX.2 and newer models).
|
||||
// Fall back to treating the body as raw image bytes for legacy models.
|
||||
var jsonResp struct {
|
||||
Image string `json:"image"`
|
||||
}
|
||||
if jerr := json.Unmarshal(respBody, &jsonResp); jerr == nil && jsonResp.Image != "" {
|
||||
imgBytes, decErr := base64.StdEncoding.DecodeString(jsonResp.Image)
|
||||
if decErr != nil {
|
||||
// Try raw (no padding) base64
|
||||
imgBytes, decErr = base64.RawStdEncoding.DecodeString(jsonResp.Image)
|
||||
if decErr != nil {
|
||||
return nil, fmt.Errorf("cfai/image: decode base64 response: %w", decErr)
|
||||
}
|
||||
}
|
||||
return imgBytes, nil
|
||||
}
|
||||
|
||||
// Legacy: model returned raw image bytes directly.
|
||||
return respBody, nil
|
||||
}
|
||||
|
||||
func applyImageDefaults(req ImageRequest) ImageRequest {
|
||||
if req.Model == "" {
|
||||
req.Model = DefaultImageModel
|
||||
}
|
||||
if req.NumSteps <= 0 {
|
||||
req.NumSteps = 20
|
||||
}
|
||||
return req
|
||||
}
|
||||
|
||||
// resizeRefImage down-scales an image so that its longest side is at most maxDim
|
||||
// pixels, then re-encodes it as JPEG (quality 85). If the image is already small
|
||||
// enough, or if decoding fails, the original bytes are returned unchanged.
|
||||
// This keeps the JSON payload well under Cloudflare Workers AI's 4 MB body limit.
|
||||
func resizeRefImage(data []byte, maxDim int) []byte {
|
||||
src, format, err := image.Decode(bytes.NewReader(data))
|
||||
if err != nil {
|
||||
return data
|
||||
}
|
||||
b := src.Bounds()
|
||||
w, h := b.Dx(), b.Dy()
|
||||
|
||||
longest := w
|
||||
if h > longest {
|
||||
longest = h
|
||||
}
|
||||
if longest <= maxDim {
|
||||
return data // already fits
|
||||
}
|
||||
|
||||
// Compute target dimensions preserving aspect ratio.
|
||||
scale := float64(maxDim) / float64(longest)
|
||||
newW := int(float64(w)*scale + 0.5)
|
||||
newH := int(float64(h)*scale + 0.5)
|
||||
if newW < 1 {
|
||||
newW = 1
|
||||
}
|
||||
if newH < 1 {
|
||||
newH = 1
|
||||
}
|
||||
|
||||
// Nearest-neighbour downsample (no extra deps, sufficient for reference guidance).
|
||||
dst := image.NewRGBA(image.Rect(0, 0, newW, newH))
|
||||
for y := 0; y < newH; y++ {
|
||||
for x := 0; x < newW; x++ {
|
||||
srcX := b.Min.X + int(float64(x)/scale)
|
||||
srcY := b.Min.Y + int(float64(y)/scale)
|
||||
draw.Draw(dst, image.Rect(x, y, x+1, y+1), src, image.Pt(srcX, srcY), draw.Src)
|
||||
}
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if format == "jpeg" {
|
||||
if encErr := jpeg.Encode(&buf, dst, &jpeg.Options{Quality: 85}); encErr != nil {
|
||||
return data
|
||||
}
|
||||
} else {
|
||||
if encErr := png.Encode(&buf, dst); encErr != nil {
|
||||
return data
|
||||
}
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// decodeImageToRGBA decodes PNG/JPEG bytes to a flat []uint8 RGBA pixel array
|
||||
// required by the stable-diffusion-v1-5-img2img model.
|
||||
func decodeImageToRGBA(data []byte) ([]uint8, error) {
|
||||
img, _, err := image.Decode(bytes.NewReader(data))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("decode image: %w", err)
|
||||
}
|
||||
bounds := img.Bounds()
|
||||
w := bounds.Max.X - bounds.Min.X
|
||||
h := bounds.Max.Y - bounds.Min.Y
|
||||
pixels := make([]uint8, w*h*4)
|
||||
idx := 0
|
||||
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
|
||||
for x := bounds.Min.X; x < bounds.Max.X; x++ {
|
||||
r, g, b, a := img.At(x, y).RGBA()
|
||||
pixels[idx] = uint8(r >> 8)
|
||||
pixels[idx+1] = uint8(g >> 8)
|
||||
pixels[idx+2] = uint8(b >> 8)
|
||||
pixels[idx+3] = uint8(a >> 8)
|
||||
idx += 4
|
||||
}
|
||||
}
|
||||
return pixels, nil
|
||||
}
|
||||
253
backend/internal/cfai/text.go
Normal file
253
backend/internal/cfai/text.go
Normal file
@@ -0,0 +1,253 @@
|
||||
// Text generation via Cloudflare Workers AI LLM models.
|
||||
//
|
||||
// API reference:
|
||||
//
|
||||
// POST https://api.cloudflare.com/client/v4/accounts/{accountID}/ai/run/{model}
|
||||
// Authorization: Bearer {apiToken}
|
||||
// Content-Type: application/json
|
||||
//
|
||||
// Request body (all models):
|
||||
//
|
||||
// { "messages": [{"role":"system","content":"..."},{"role":"user","content":"..."}] }
|
||||
//
|
||||
// Response (wrapped):
|
||||
//
|
||||
// { "result": { "response": "..." }, "success": true }
|
||||
package cfai
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
// TextModel identifies a Cloudflare Workers AI text generation model.
|
||||
type TextModel string
|
||||
|
||||
const (
|
||||
// TextModelGemma4 — Google Gemma 4, 256k context.
|
||||
TextModelGemma4 TextModel = "@cf/google/gemma-4-26b-a4b-it"
|
||||
// TextModelLlama4Scout — Meta Llama 4 Scout 17B, multimodal.
|
||||
TextModelLlama4Scout TextModel = "@cf/meta/llama-4-scout-17b-16e-instruct"
|
||||
// TextModelLlama33_70B — Meta Llama 3.3 70B, fast fp8.
|
||||
TextModelLlama33_70B TextModel = "@cf/meta/llama-3.3-70b-instruct-fp8-fast"
|
||||
// TextModelQwen3_30B — Qwen3 30B MoE, function calling.
|
||||
TextModelQwen3_30B TextModel = "@cf/qwen/qwen3-30b-a3b-fp8"
|
||||
// TextModelMistralSmall — Mistral Small 3.1 24B, 128k context.
|
||||
TextModelMistralSmall TextModel = "@cf/mistralai/mistral-small-3.1-24b-instruct"
|
||||
// TextModelQwQ32B — Qwen QwQ 32B reasoning model.
|
||||
TextModelQwQ32B TextModel = "@cf/qwen/qwq-32b"
|
||||
// TextModelDeepSeekR1 — DeepSeek R1 distill Qwen 32B.
|
||||
TextModelDeepSeekR1 TextModel = "@cf/deepseek-ai/deepseek-r1-distill-qwen-32b"
|
||||
// TextModelGemma3_12B — Google Gemma 3 12B, 80k context.
|
||||
TextModelGemma3_12B TextModel = "@cf/google/gemma-3-12b-it"
|
||||
// TextModelGPTOSS120B — OpenAI gpt-oss-120b, high reasoning.
|
||||
TextModelGPTOSS120B TextModel = "@cf/openai/gpt-oss-120b"
|
||||
// TextModelGPTOSS20B — OpenAI gpt-oss-20b, lower latency.
|
||||
TextModelGPTOSS20B TextModel = "@cf/openai/gpt-oss-20b"
|
||||
// TextModelNemotron3 — NVIDIA Nemotron 3 120B, agentic.
|
||||
TextModelNemotron3 TextModel = "@cf/nvidia/nemotron-3-120b-a12b"
|
||||
// TextModelLlama32_3B — Meta Llama 3.2 3B, lightweight.
|
||||
TextModelLlama32_3B TextModel = "@cf/meta/llama-3.2-3b-instruct"
|
||||
|
||||
// DefaultTextModel is the default model used when none is specified.
|
||||
DefaultTextModel = TextModelLlama4Scout
|
||||
)
|
||||
|
||||
// TextModelInfo describes a single text generation model.
|
||||
type TextModelInfo struct {
|
||||
ID string `json:"id"`
|
||||
Label string `json:"label"`
|
||||
Provider string `json:"provider"`
|
||||
ContextSize int `json:"context_size"` // max context in tokens
|
||||
Description string `json:"description"`
|
||||
}
|
||||
|
||||
// AllTextModels returns metadata about every supported text generation model.
|
||||
func AllTextModels() []TextModelInfo {
|
||||
return []TextModelInfo{
|
||||
{
|
||||
ID: string(TextModelGemma4), Label: "Gemma 4 26B", Provider: "Google",
|
||||
ContextSize: 256000,
|
||||
Description: "Google's most intelligent open model family. 256k context, function calling.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelLlama4Scout), Label: "Llama 4 Scout 17B", Provider: "Meta",
|
||||
ContextSize: 131000,
|
||||
Description: "Natively multimodal, 16 experts. Good all-purpose model with function calling.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelLlama33_70B), Label: "Llama 3.3 70B (fp8 fast)", Provider: "Meta",
|
||||
ContextSize: 24000,
|
||||
Description: "Llama 3.3 70B quantized to fp8 for speed. Excellent instruction following.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelQwen3_30B), Label: "Qwen3 30B MoE", Provider: "Qwen",
|
||||
ContextSize: 32768,
|
||||
Description: "MoE architecture with strong reasoning and instruction following.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelMistralSmall), Label: "Mistral Small 3.1 24B", Provider: "MistralAI",
|
||||
ContextSize: 128000,
|
||||
Description: "Strong text performance with 128k context and function calling.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelQwQ32B), Label: "QwQ 32B (reasoning)", Provider: "Qwen",
|
||||
ContextSize: 24000,
|
||||
Description: "Reasoning model — thinks before answering. Slower but more accurate.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelDeepSeekR1), Label: "DeepSeek R1 32B", Provider: "DeepSeek",
|
||||
ContextSize: 80000,
|
||||
Description: "R1-distilled reasoning model. Outperforms o1-mini on many benchmarks.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelGemma3_12B), Label: "Gemma 3 12B", Provider: "Google",
|
||||
ContextSize: 80000,
|
||||
Description: "Multimodal, 128k context, multilingual (140+ languages).",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelGPTOSS120B), Label: "GPT-OSS 120B", Provider: "OpenAI",
|
||||
ContextSize: 128000,
|
||||
Description: "OpenAI open-weight model for production, general purpose, high reasoning.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelGPTOSS20B), Label: "GPT-OSS 20B", Provider: "OpenAI",
|
||||
ContextSize: 128000,
|
||||
Description: "OpenAI open-weight model for lower latency and specialized use cases.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelNemotron3), Label: "Nemotron 3 120B", Provider: "NVIDIA",
|
||||
ContextSize: 256000,
|
||||
Description: "Hybrid MoE with leading accuracy for multi-agent applications.",
|
||||
},
|
||||
{
|
||||
ID: string(TextModelLlama32_3B), Label: "Llama 3.2 3B", Provider: "Meta",
|
||||
ContextSize: 80000,
|
||||
Description: "Lightweight model for simple tasks. Fast and cheap.",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// TextMessage is a single message in a chat conversation.
|
||||
type TextMessage struct {
|
||||
Role string `json:"role"` // "system" or "user"
|
||||
Content string `json:"content"` // message text
|
||||
}
|
||||
|
||||
// TextRequest is the input to Generate.
|
||||
type TextRequest struct {
|
||||
// Model is the CF Workers AI model ID. Defaults to DefaultTextModel when empty.
|
||||
Model TextModel
|
||||
// Messages is the conversation history (system + user messages).
|
||||
Messages []TextMessage
|
||||
// MaxTokens limits the output length (0 = model default).
|
||||
MaxTokens int
|
||||
}
|
||||
|
||||
// TextGenClient generates text via Cloudflare Workers AI LLM models.
|
||||
type TextGenClient interface {
|
||||
// Generate sends a chat-style request and returns the model's response text.
|
||||
Generate(ctx context.Context, req TextRequest) (string, error)
|
||||
|
||||
// Models returns metadata about all supported text generation models.
|
||||
Models() []TextModelInfo
|
||||
}
|
||||
|
||||
// textGenHTTPClient is the concrete CF AI text generation client.
|
||||
type textGenHTTPClient struct {
|
||||
accountID string
|
||||
apiToken string
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
// NewTextGen returns a TextGenClient for the given Cloudflare account.
|
||||
func NewTextGen(accountID, apiToken string) TextGenClient {
|
||||
return &textGenHTTPClient{
|
||||
accountID: accountID,
|
||||
apiToken: apiToken,
|
||||
http: &http.Client{Timeout: 5 * time.Minute},
|
||||
}
|
||||
}
|
||||
|
||||
// Generate sends messages to the model and returns the response text.
|
||||
func (c *textGenHTTPClient) Generate(ctx context.Context, req TextRequest) (string, error) {
|
||||
if req.Model == "" {
|
||||
req.Model = DefaultTextModel
|
||||
}
|
||||
|
||||
body := map[string]any{
|
||||
"messages": req.Messages,
|
||||
}
|
||||
if req.MaxTokens > 0 {
|
||||
body["max_tokens"] = req.MaxTokens
|
||||
}
|
||||
|
||||
encoded, err := json.Marshal(body)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("cfai/text: marshal: %w", err)
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("https://api.cloudflare.com/client/v4/accounts/%s/ai/run/%s",
|
||||
c.accountID, string(req.Model))
|
||||
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(encoded))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("cfai/text: build request: %w", err)
|
||||
}
|
||||
httpReq.Header.Set("Authorization", "Bearer "+c.apiToken)
|
||||
httpReq.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.http.Do(httpReq)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("cfai/text: http: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
errBody, _ := io.ReadAll(resp.Body)
|
||||
msg := string(errBody)
|
||||
if len(msg) > 300 {
|
||||
msg = msg[:300]
|
||||
}
|
||||
return "", fmt.Errorf("cfai/text: model %s returned %d: %s", req.Model, resp.StatusCode, msg)
|
||||
}
|
||||
|
||||
// CF AI wraps responses: { "result": { "response": "..." }, "success": true }
|
||||
// Some models (e.g. Llama 4 Scout) return response as an array:
|
||||
// { "result": { "response": [{"generated_text":"..."}] } }
|
||||
var wrapper struct {
|
||||
Result struct {
|
||||
Response json.RawMessage `json:"response"`
|
||||
} `json:"result"`
|
||||
Success bool `json:"success"`
|
||||
Errors []string `json:"errors"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&wrapper); err != nil {
|
||||
return "", fmt.Errorf("cfai/text: decode response: %w", err)
|
||||
}
|
||||
if !wrapper.Success {
|
||||
return "", fmt.Errorf("cfai/text: model %s error: %v", req.Model, wrapper.Errors)
|
||||
}
|
||||
// Try plain string first.
|
||||
var text string
|
||||
if err := json.Unmarshal(wrapper.Result.Response, &text); err == nil {
|
||||
return text, nil
|
||||
}
|
||||
// Fall back: array of objects with a "generated_text" field.
|
||||
var arr []struct {
|
||||
GeneratedText string `json:"generated_text"`
|
||||
}
|
||||
if err := json.Unmarshal(wrapper.Result.Response, &arr); err == nil && len(arr) > 0 {
|
||||
return arr[0].GeneratedText, nil
|
||||
}
|
||||
return "", fmt.Errorf("cfai/text: model %s: unrecognised response shape: %s", req.Model, wrapper.Result.Response)
|
||||
}
|
||||
|
||||
// Models returns all supported text generation model metadata.
|
||||
func (c *textGenHTTPClient) Models() []TextModelInfo {
|
||||
return AllTextModels()
|
||||
}
|
||||
@@ -46,17 +46,48 @@ type MinIO struct {
|
||||
BucketAvatars string
|
||||
// BucketBrowse is the bucket that holds cached browse page snapshots (JSON).
|
||||
BucketBrowse string
|
||||
// BucketTranslations is the bucket that holds machine-translated chapter markdown.
|
||||
BucketTranslations string
|
||||
}
|
||||
|
||||
// Kokoro holds connection settings for the Kokoro-FastAPI TTS service.
|
||||
type Kokoro struct {
|
||||
// URL is the base URL of the Kokoro service, e.g. https://kokoro.libnovel.cc
|
||||
// An empty string disables TTS generation.
|
||||
// URL is the base URL of the Kokoro service, e.g. https://tts.libnovel.cc
|
||||
// An empty string disables Kokoro TTS generation.
|
||||
URL string
|
||||
// DefaultVoice is the voice used when none is specified.
|
||||
DefaultVoice string
|
||||
}
|
||||
|
||||
// PocketTTS holds connection settings for the kyutai-labs/pocket-tts service.
|
||||
type PocketTTS struct {
|
||||
// URL is the base URL of the pocket-tts service, e.g. https://pocket-tts.libnovel.cc
|
||||
// An empty string disables pocket-tts generation.
|
||||
URL string
|
||||
}
|
||||
|
||||
// CFAI holds credentials for Cloudflare Workers AI TTS.
|
||||
type CFAI struct {
|
||||
// AccountID is the Cloudflare account ID.
|
||||
// An empty string disables CF AI generation.
|
||||
AccountID string
|
||||
// APIToken is a Workers AI API token with Workers AI Read+Edit permissions.
|
||||
APIToken string
|
||||
// Model is the Workers AI TTS model ID.
|
||||
// Defaults to "@cf/deepgram/aura-2-en" when empty.
|
||||
Model string
|
||||
}
|
||||
|
||||
// LibreTranslate holds connection settings for a self-hosted LibreTranslate instance.
|
||||
type LibreTranslate struct {
|
||||
// URL is the base URL of the LibreTranslate instance, e.g. https://translate.libnovel.cc
|
||||
// An empty string disables machine translation entirely.
|
||||
URL string
|
||||
// APIKey is the optional API key for the LibreTranslate instance.
|
||||
// Leave empty if the instance runs without authentication.
|
||||
APIKey string
|
||||
}
|
||||
|
||||
// HTTP holds settings for the HTTP server (backend only).
|
||||
type HTTP struct {
|
||||
// Addr is the listen address, e.g. ":8080"
|
||||
@@ -79,6 +110,32 @@ type Valkey struct {
|
||||
Addr string
|
||||
}
|
||||
|
||||
// Redis holds connection settings for the Asynq task queue Redis instance.
|
||||
// This is separate from Valkey (presign cache) — it may point to the same
|
||||
// Redis or a dedicated one. An empty Addr falls back to PocketBase polling.
|
||||
type Redis struct {
|
||||
// Addr is the host:port (or rediss://... URL) of the Redis instance.
|
||||
// Use rediss:// scheme for TLS (e.g. rediss://:password@redis.libnovel.cc:6380).
|
||||
// An empty string disables Asynq and falls back to PocketBase polling.
|
||||
Addr string
|
||||
// Password is the Redis AUTH password.
|
||||
// Not needed when Addr is a full rediss:// URL that includes the password.
|
||||
Password string
|
||||
}
|
||||
|
||||
// VAPID holds Web Push VAPID key pair for browser push notifications.
|
||||
// Generate a pair once with: go run ./cmd/genkeys (or use the web-push CLI).
|
||||
// The public key is exposed via GET /api/push-subscriptions/vapid-public-key
|
||||
// and embedded in the SvelteKit app via PUBLIC_VAPID_PUBLIC_KEY.
|
||||
type VAPID struct {
|
||||
// PublicKey is the base64url-encoded VAPID public key (65 bytes, uncompressed EC P-256).
|
||||
PublicKey string
|
||||
// PrivateKey is the base64url-encoded VAPID private key (32 bytes).
|
||||
PrivateKey string
|
||||
// Subject is the mailto: or https: URL used as the VAPID subscriber contact.
|
||||
Subject string
|
||||
}
|
||||
|
||||
// Runner holds settings specific to the runner/worker binary.
|
||||
type Runner struct {
|
||||
// PollInterval is how often the runner checks PocketBase for pending tasks.
|
||||
@@ -87,6 +144,8 @@ type Runner struct {
|
||||
MaxConcurrentScrape int
|
||||
// MaxConcurrentAudio limits simultaneous audio-generation goroutines.
|
||||
MaxConcurrentAudio int
|
||||
// MaxConcurrentTranslation limits simultaneous translation goroutines.
|
||||
MaxConcurrentTranslation int
|
||||
// WorkerID is a unique identifier for this runner instance.
|
||||
// Defaults to the system hostname.
|
||||
WorkerID string
|
||||
@@ -106,17 +165,27 @@ type Runner struct {
|
||||
// is already indexed and a 24h walk would be wasteful.
|
||||
// Controlled by RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true.
|
||||
SkipInitialCatalogueRefresh bool
|
||||
// CatalogueRequestDelay is the base delay inserted between per-book metadata
|
||||
// requests during a catalogue refresh. A random jitter of up to 50% is added
|
||||
// on top. Defaults to 2s. Increase to reduce 429 pressure on novelfire.net.
|
||||
// Controlled by RUNNER_CATALOGUE_REQUEST_DELAY (e.g. "3s", "500ms").
|
||||
CatalogueRequestDelay time.Duration
|
||||
}
|
||||
|
||||
// Config is the top-level configuration struct consumed by both binaries.
|
||||
type Config struct {
|
||||
PocketBase PocketBase
|
||||
MinIO MinIO
|
||||
Kokoro Kokoro
|
||||
HTTP HTTP
|
||||
Runner Runner
|
||||
Meilisearch Meilisearch
|
||||
Valkey Valkey
|
||||
PocketBase PocketBase
|
||||
MinIO MinIO
|
||||
Kokoro Kokoro
|
||||
PocketTTS PocketTTS
|
||||
CFAI CFAI
|
||||
LibreTranslate LibreTranslate
|
||||
HTTP HTTP
|
||||
Runner Runner
|
||||
Meilisearch Meilisearch
|
||||
Valkey Valkey
|
||||
Redis Redis
|
||||
VAPID VAPID
|
||||
// LogLevel is one of "debug", "info", "warn", "error".
|
||||
LogLevel string
|
||||
}
|
||||
@@ -139,16 +208,17 @@ func Load() Config {
|
||||
},
|
||||
|
||||
MinIO: MinIO{
|
||||
Endpoint: envOr("MINIO_ENDPOINT", "localhost:9000"),
|
||||
PublicEndpoint: envOr("MINIO_PUBLIC_ENDPOINT", ""),
|
||||
AccessKey: envOr("MINIO_ACCESS_KEY", "admin"),
|
||||
SecretKey: envOr("MINIO_SECRET_KEY", "changeme123"),
|
||||
UseSSL: envBool("MINIO_USE_SSL", false),
|
||||
PublicUseSSL: envBool("MINIO_PUBLIC_USE_SSL", true),
|
||||
BucketChapters: envOr("MINIO_BUCKET_CHAPTERS", "chapters"),
|
||||
BucketAudio: envOr("MINIO_BUCKET_AUDIO", "audio"),
|
||||
BucketAvatars: envOr("MINIO_BUCKET_AVATARS", "avatars"),
|
||||
BucketBrowse: envOr("MINIO_BUCKET_BROWSE", "catalogue"),
|
||||
Endpoint: envOr("MINIO_ENDPOINT", "localhost:9000"),
|
||||
PublicEndpoint: envOr("MINIO_PUBLIC_ENDPOINT", ""),
|
||||
AccessKey: envOr("MINIO_ACCESS_KEY", "admin"),
|
||||
SecretKey: envOr("MINIO_SECRET_KEY", "changeme123"),
|
||||
UseSSL: envBool("MINIO_USE_SSL", false),
|
||||
PublicUseSSL: envBool("MINIO_PUBLIC_USE_SSL", true),
|
||||
BucketChapters: envOr("MINIO_BUCKET_CHAPTERS", "chapters"),
|
||||
BucketAudio: envOr("MINIO_BUCKET_AUDIO", "audio"),
|
||||
BucketAvatars: envOr("MINIO_BUCKET_AVATARS", "avatars"),
|
||||
BucketBrowse: envOr("MINIO_BUCKET_BROWSE", "catalogue"),
|
||||
BucketTranslations: envOr("MINIO_BUCKET_TRANSLATIONS", "translations"),
|
||||
},
|
||||
|
||||
Kokoro: Kokoro{
|
||||
@@ -156,6 +226,21 @@ func Load() Config {
|
||||
DefaultVoice: envOr("KOKORO_VOICE", "af_bella"),
|
||||
},
|
||||
|
||||
PocketTTS: PocketTTS{
|
||||
URL: envOr("POCKET_TTS_URL", ""),
|
||||
},
|
||||
|
||||
CFAI: CFAI{
|
||||
AccountID: envOr("CFAI_ACCOUNT_ID", ""),
|
||||
APIToken: envOr("CFAI_API_TOKEN", ""),
|
||||
Model: envOr("CFAI_TTS_MODEL", ""),
|
||||
},
|
||||
|
||||
LibreTranslate: LibreTranslate{
|
||||
URL: envOr("LIBRETRANSLATE_URL", ""),
|
||||
APIKey: envOr("LIBRETRANSLATE_API_KEY", ""),
|
||||
},
|
||||
|
||||
HTTP: HTTP{
|
||||
Addr: envOr("BACKEND_HTTP_ADDR", ":8080"),
|
||||
},
|
||||
@@ -164,12 +249,14 @@ func Load() Config {
|
||||
PollInterval: envDuration("RUNNER_POLL_INTERVAL", 30*time.Second),
|
||||
MaxConcurrentScrape: envInt("RUNNER_MAX_CONCURRENT_SCRAPE", 1),
|
||||
MaxConcurrentAudio: envInt("RUNNER_MAX_CONCURRENT_AUDIO", 1),
|
||||
MaxConcurrentTranslation: envInt("RUNNER_MAX_CONCURRENT_TRANSLATION", 1),
|
||||
WorkerID: envOr("RUNNER_WORKER_ID", workerID),
|
||||
Workers: envInt("RUNNER_WORKERS", 0), // 0 → runtime.NumCPU()
|
||||
Timeout: envDuration("RUNNER_TIMEOUT", 90*time.Second),
|
||||
MetricsAddr: envOr("RUNNER_METRICS_ADDR", ":9091"),
|
||||
CatalogueRefreshInterval: envDuration("RUNNER_CATALOGUE_REFRESH_INTERVAL", 0),
|
||||
SkipInitialCatalogueRefresh: envBool("RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH", false),
|
||||
CatalogueRequestDelay: envDuration("RUNNER_CATALOGUE_REQUEST_DELAY", 2*time.Second),
|
||||
},
|
||||
|
||||
Meilisearch: Meilisearch{
|
||||
@@ -180,6 +267,17 @@ func Load() Config {
|
||||
Valkey: Valkey{
|
||||
Addr: envOr("VALKEY_ADDR", ""),
|
||||
},
|
||||
|
||||
Redis: Redis{
|
||||
Addr: envOr("REDIS_ADDR", ""),
|
||||
Password: envOr("REDIS_PASSWORD", ""),
|
||||
},
|
||||
|
||||
VAPID: VAPID{
|
||||
PublicKey: envOr("VAPID_PUBLIC_KEY", ""),
|
||||
PrivateKey: envOr("VAPID_PRIVATE_KEY", ""),
|
||||
Subject: envOr("VAPID_SUBJECT", "mailto:admin@libnovel.cc"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -24,6 +24,9 @@ type BookMeta struct {
|
||||
// updated in PocketBase. Populated on read; not sent on write (PocketBase
|
||||
// manages its own updated field).
|
||||
MetaUpdated int64 `json:"meta_updated,omitempty"`
|
||||
// Archived is true when the book has been soft-deleted by an admin.
|
||||
// Archived books are excluded from all public search and catalogue responses.
|
||||
Archived bool `json:"archived,omitempty"`
|
||||
}
|
||||
|
||||
// CatalogueEntry is a lightweight book reference returned by catalogue pages.
|
||||
@@ -60,6 +63,20 @@ type RankingItem struct {
|
||||
Updated time.Time `json:"updated,omitempty"`
|
||||
}
|
||||
|
||||
// ── Voice types ───────────────────────────────────────────────────────────────
|
||||
|
||||
// Voice describes a single text-to-speech voice available in the system.
|
||||
type Voice struct {
|
||||
// ID is the voice identifier passed to TTS clients (e.g. "af_bella", "alba").
|
||||
ID string `json:"id"`
|
||||
// Engine is "kokoro" or "pocket-tts".
|
||||
Engine string `json:"engine"`
|
||||
// Lang is the primary language tag (e.g. "en-us", "en-gb", "en", "es", "fr").
|
||||
Lang string `json:"lang"`
|
||||
// Gender is "f" or "m".
|
||||
Gender string `json:"gender"`
|
||||
}
|
||||
|
||||
// ── Storage record types ──────────────────────────────────────────────────────
|
||||
|
||||
// ChapterInfo is a lightweight chapter descriptor stored in the index.
|
||||
@@ -109,6 +126,8 @@ type ScrapeTask struct {
|
||||
|
||||
// ScrapeResult is the outcome reported by the runner after finishing a ScrapeTask.
|
||||
type ScrapeResult struct {
|
||||
// Slug is the book slug that was scraped. Empty for catalogue tasks.
|
||||
Slug string `json:"slug,omitempty"`
|
||||
BooksFound int `json:"books_found"`
|
||||
ChaptersScraped int `json:"chapters_scraped"`
|
||||
ChaptersSkipped int `json:"chapters_skipped"`
|
||||
@@ -135,3 +154,81 @@ type AudioResult struct {
|
||||
ObjectKey string `json:"object_key,omitempty"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
}
|
||||
|
||||
// TranslationTask represents a machine-translation job stored in PocketBase.
|
||||
type TranslationTask struct {
|
||||
ID string `json:"id"`
|
||||
CacheKey string `json:"cache_key"` // "{slug}/{chapter}/{lang}"
|
||||
Slug string `json:"slug"`
|
||||
Chapter int `json:"chapter"`
|
||||
Lang string `json:"lang"`
|
||||
WorkerID string `json:"worker_id,omitempty"`
|
||||
Status TaskStatus `json:"status"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
Started time.Time `json:"started"`
|
||||
Finished time.Time `json:"finished,omitempty"`
|
||||
}
|
||||
|
||||
// TranslationResult is the outcome reported by the runner after finishing a TranslationTask.
|
||||
type TranslationResult struct {
|
||||
ObjectKey string `json:"object_key,omitempty"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
}
|
||||
|
||||
// ImportTask represents a PDF/EPUB import job stored in PocketBase.
|
||||
type ImportTask struct {
|
||||
ID string `json:"id"`
|
||||
Slug string `json:"slug"` // derived from filename
|
||||
Title string `json:"title"`
|
||||
FileName string `json:"file_name"`
|
||||
FileType string `json:"file_type"` // "pdf" or "epub"
|
||||
ObjectKey string `json:"object_key,omitempty"` // MinIO path to uploaded file
|
||||
ChaptersKey string `json:"chapters_key,omitempty"` // MinIO path to pre-parsed chapters JSON
|
||||
Author string `json:"author,omitempty"`
|
||||
CoverURL string `json:"cover_url,omitempty"`
|
||||
Genres []string `json:"genres,omitempty"`
|
||||
Summary string `json:"summary,omitempty"`
|
||||
BookStatus string `json:"book_status,omitempty"` // "ongoing" | "completed" | "hiatus"
|
||||
WorkerID string `json:"worker_id,omitempty"`
|
||||
InitiatorUserID string `json:"initiator_user_id,omitempty"` // PocketBase user ID who submitted the import
|
||||
Status TaskStatus `json:"status"`
|
||||
ChaptersDone int `json:"chapters_done"`
|
||||
ChaptersTotal int `json:"chapters_total"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
Started time.Time `json:"started"`
|
||||
Finished time.Time `json:"finished,omitempty"`
|
||||
}
|
||||
|
||||
// ImportResult is the outcome reported by the runner after finishing an ImportTask.
|
||||
type ImportResult struct {
|
||||
Slug string `json:"slug,omitempty"`
|
||||
ChaptersImported int `json:"chapters_imported"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
}
|
||||
|
||||
// AIJob represents an AI generation task tracked in PocketBase (ai_jobs collection).
|
||||
type AIJob struct {
|
||||
ID string `json:"id"`
|
||||
// Kind is one of: "chapter-names", "batch-covers", "chapter-covers", "refresh-metadata".
|
||||
Kind string `json:"kind"`
|
||||
// Slug is the book slug for per-book jobs; empty for catalogue-wide jobs.
|
||||
Slug string `json:"slug"`
|
||||
Status TaskStatus `json:"status"`
|
||||
// FromItem is the first item to process (chapter number, or 0-based book index).
|
||||
// 0 = start from the beginning.
|
||||
FromItem int `json:"from_item"`
|
||||
// ToItem is the last item to process (inclusive). 0 = process all.
|
||||
ToItem int `json:"to_item"`
|
||||
// ItemsDone is the cumulative count of successfully processed items.
|
||||
ItemsDone int `json:"items_done"`
|
||||
// ItemsTotal is the total number of items in this job.
|
||||
ItemsTotal int `json:"items_total"`
|
||||
Model string `json:"model"`
|
||||
// Payload is a JSON-encoded string with job-specific parameters
|
||||
// (e.g. naming pattern for chapter-names, num_steps for batch-covers).
|
||||
Payload string `json:"payload"`
|
||||
ErrorMessage string `json:"error_message,omitempty"`
|
||||
Started time.Time `json:"started,omitempty"`
|
||||
Finished time.Time `json:"finished,omitempty"`
|
||||
HeartbeatAt time.Time `json:"heartbeat_at,omitempty"`
|
||||
}
|
||||
|
||||
@@ -21,6 +21,17 @@ type Client interface {
|
||||
// GenerateAudio synthesises text using voice and returns raw MP3 bytes.
|
||||
GenerateAudio(ctx context.Context, text, voice string) ([]byte, error)
|
||||
|
||||
// StreamAudioMP3 synthesises text and returns an io.ReadCloser that streams
|
||||
// MP3-encoded audio incrementally. Uses the kokoro-fastapi streaming mode
|
||||
// (stream:true), which delivers MP3 frames as they are generated without
|
||||
// waiting for the full output. The caller must always close the ReadCloser.
|
||||
StreamAudioMP3(ctx context.Context, text, voice string) (io.ReadCloser, error)
|
||||
|
||||
// StreamAudioWAV synthesises text and returns an io.ReadCloser that streams
|
||||
// WAV-encoded audio incrementally using kokoro-fastapi's streaming mode with
|
||||
// response_format:"wav". The caller must always close the ReadCloser.
|
||||
StreamAudioWAV(ctx context.Context, text, voice string) (io.ReadCloser, error)
|
||||
|
||||
// ListVoices returns the available voice IDs. Falls back to an empty slice
|
||||
// on error — callers should treat an empty list as "service unavailable".
|
||||
ListVoices(ctx context.Context) ([]string, error)
|
||||
@@ -118,6 +129,90 @@ func (c *httpClient) GenerateAudio(ctx context.Context, text, voice string) ([]b
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// StreamAudioMP3 calls POST /v1/audio/speech with stream:true and returns an
|
||||
// io.ReadCloser that delivers MP3 frames as kokoro generates them.
|
||||
// kokoro-fastapi emits raw MP3 bytes when stream mode is enabled — no download
|
||||
// redirect; the response body IS the audio stream.
|
||||
func (c *httpClient) StreamAudioMP3(ctx context.Context, text, voice string) (io.ReadCloser, error) {
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("kokoro: empty text")
|
||||
}
|
||||
if voice == "" {
|
||||
voice = "af_bella"
|
||||
}
|
||||
|
||||
reqBody, err := json.Marshal(map[string]any{
|
||||
"model": "kokoro",
|
||||
"input": text,
|
||||
"voice": voice,
|
||||
"response_format": "mp3",
|
||||
"speed": 1.0,
|
||||
"stream": true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: marshal stream request: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
||||
c.baseURL+"/v1/audio/speech", bytes.NewReader(reqBody))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: build stream request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: stream request: %w", err)
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
_, _ = io.Copy(io.Discard, resp.Body)
|
||||
resp.Body.Close()
|
||||
return nil, fmt.Errorf("kokoro: stream returned %d", resp.StatusCode)
|
||||
}
|
||||
return resp.Body, nil
|
||||
}
|
||||
|
||||
// StreamAudioWAV calls POST /v1/audio/speech with stream:true and response_format:wav,
|
||||
// returning an io.ReadCloser that delivers WAV bytes as kokoro generates them.
|
||||
func (c *httpClient) StreamAudioWAV(ctx context.Context, text, voice string) (io.ReadCloser, error) {
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("kokoro: empty text")
|
||||
}
|
||||
if voice == "" {
|
||||
voice = "af_bella"
|
||||
}
|
||||
|
||||
reqBody, err := json.Marshal(map[string]any{
|
||||
"model": "kokoro",
|
||||
"input": text,
|
||||
"voice": voice,
|
||||
"response_format": "wav",
|
||||
"speed": 1.0,
|
||||
"stream": true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: marshal wav stream request: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
||||
c.baseURL+"/v1/audio/speech", bytes.NewReader(reqBody))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: build wav stream request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("kokoro: wav stream request: %w", err)
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
_, _ = io.Copy(io.Discard, resp.Body)
|
||||
resp.Body.Close()
|
||||
return nil, fmt.Errorf("kokoro: wav stream returned %d", resp.StatusCode)
|
||||
}
|
||||
return resp.Body, nil
|
||||
}
|
||||
|
||||
// ListVoices calls GET /v1/audio/voices and returns the list of voice IDs.
|
||||
func (c *httpClient) ListVoices(ctx context.Context) ([]string, error) {
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet,
|
||||
|
||||
181
backend/internal/libretranslate/client.go
Normal file
181
backend/internal/libretranslate/client.go
Normal file
@@ -0,0 +1,181 @@
|
||||
// Package libretranslate provides an HTTP client for a self-hosted
|
||||
// LibreTranslate instance. It handles text chunking, concurrent translation,
|
||||
// and reassembly so callers can pass arbitrarily long markdown strings.
|
||||
package libretranslate
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
// maxChunkBytes is the target maximum size of each chunk sent to
|
||||
// LibreTranslate. LibreTranslate's default limit is 5000 characters;
|
||||
// we stay comfortably below that.
|
||||
maxChunkBytes = 4500
|
||||
// concurrency is the number of simultaneous translation requests per chapter.
|
||||
concurrency = 3
|
||||
)
|
||||
|
||||
// Client translates text via LibreTranslate.
|
||||
// A nil Client is valid — all calls return the original text unchanged.
|
||||
type Client interface {
|
||||
// Translate translates text from sourceLang to targetLang.
|
||||
// text is a raw markdown string. The returned string is the translated
|
||||
// markdown, reassembled in original paragraph order.
|
||||
Translate(ctx context.Context, text, sourceLang, targetLang string) (string, error)
|
||||
}
|
||||
|
||||
// New returns a Client for the given LibreTranslate URL.
|
||||
// Returns nil when url is empty, which disables translation.
|
||||
func New(url, apiKey string) Client {
|
||||
if url == "" {
|
||||
return nil
|
||||
}
|
||||
return &httpClient{
|
||||
url: strings.TrimRight(url, "/"),
|
||||
apiKey: apiKey,
|
||||
http: &http.Client{Timeout: 60 * time.Second},
|
||||
}
|
||||
}
|
||||
|
||||
type httpClient struct {
|
||||
url string
|
||||
apiKey string
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
// Translate splits text into paragraph chunks, translates them concurrently
|
||||
// (up to concurrency goroutines), and reassembles in order.
|
||||
func (c *httpClient) Translate(ctx context.Context, text, sourceLang, targetLang string) (string, error) {
|
||||
paragraphs := splitParagraphs(text)
|
||||
if len(paragraphs) == 0 {
|
||||
return text, nil
|
||||
}
|
||||
chunks := binChunks(paragraphs, maxChunkBytes)
|
||||
|
||||
translated := make([]string, len(chunks))
|
||||
errs := make([]error, len(chunks))
|
||||
|
||||
sem := make(chan struct{}, concurrency)
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for i, chunk := range chunks {
|
||||
wg.Add(1)
|
||||
sem <- struct{}{}
|
||||
go func(idx int, chunkText string) {
|
||||
defer wg.Done()
|
||||
defer func() { <-sem }()
|
||||
result, err := c.translateChunk(ctx, chunkText, sourceLang, targetLang)
|
||||
translated[idx] = result
|
||||
errs[idx] = err
|
||||
}(i, chunk)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
for _, err := range errs {
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
return strings.Join(translated, "\n\n"), nil
|
||||
}
|
||||
|
||||
// translateChunk sends a single POST /translate request.
|
||||
func (c *httpClient) translateChunk(ctx context.Context, text, sourceLang, targetLang string) (string, error) {
|
||||
reqBody := map[string]string{
|
||||
"q": text,
|
||||
"source": sourceLang,
|
||||
"target": targetLang,
|
||||
"format": "html",
|
||||
}
|
||||
if c.apiKey != "" {
|
||||
reqBody["api_key"] = c.apiKey
|
||||
}
|
||||
|
||||
b, err := json.Marshal(reqBody)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("libretranslate: marshal request: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.url+"/translate", bytes.NewReader(b))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("libretranslate: build request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("libretranslate: request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
var errBody struct {
|
||||
Error string `json:"error"`
|
||||
}
|
||||
_ = json.NewDecoder(resp.Body).Decode(&errBody)
|
||||
return "", fmt.Errorf("libretranslate: status %d: %s", resp.StatusCode, errBody.Error)
|
||||
}
|
||||
|
||||
var result struct {
|
||||
TranslatedText string `json:"translatedText"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
return "", fmt.Errorf("libretranslate: decode response: %w", err)
|
||||
}
|
||||
return result.TranslatedText, nil
|
||||
}
|
||||
|
||||
// splitParagraphs splits markdown text on blank lines, preserving non-empty paragraphs.
|
||||
func splitParagraphs(text string) []string {
|
||||
// Normalise line endings.
|
||||
text = strings.ReplaceAll(text, "\r\n", "\n")
|
||||
// Split on double newlines (blank lines between paragraphs).
|
||||
parts := strings.Split(text, "\n\n")
|
||||
var paragraphs []string
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if p != "" {
|
||||
paragraphs = append(paragraphs, p)
|
||||
}
|
||||
}
|
||||
return paragraphs
|
||||
}
|
||||
|
||||
// binChunks groups paragraphs into chunks each at most maxBytes in length.
|
||||
// Each chunk is a single string with paragraphs joined by "\n\n".
|
||||
func binChunks(paragraphs []string, maxBytes int) []string {
|
||||
var chunks []string
|
||||
var current strings.Builder
|
||||
|
||||
for _, p := range paragraphs {
|
||||
needed := len(p)
|
||||
if current.Len() > 0 {
|
||||
needed += 2 // for the "\n\n" separator
|
||||
}
|
||||
|
||||
if current.Len()+needed > maxBytes && current.Len() > 0 {
|
||||
// Flush current chunk.
|
||||
chunks = append(chunks, current.String())
|
||||
current.Reset()
|
||||
}
|
||||
|
||||
if current.Len() > 0 {
|
||||
current.WriteString("\n\n")
|
||||
}
|
||||
current.WriteString(p)
|
||||
}
|
||||
|
||||
if current.Len() > 0 {
|
||||
chunks = append(chunks, current.String())
|
||||
}
|
||||
return chunks
|
||||
}
|
||||
@@ -32,11 +32,15 @@ type Client interface {
|
||||
// BookExists reports whether a book with the given slug is already in the
|
||||
// index. Used by the catalogue refresh to skip re-indexing known books.
|
||||
BookExists(ctx context.Context, slug string) bool
|
||||
// DeleteBook removes a book document from the search index by slug.
|
||||
DeleteBook(ctx context.Context, slug string) error
|
||||
// Search returns up to limit books matching query.
|
||||
// Archived books are always excluded.
|
||||
Search(ctx context.Context, query string, limit int) ([]domain.BookMeta, error)
|
||||
// Catalogue queries books with optional filters, sort, and pagination.
|
||||
// Returns books, the total hit count for pagination, and a FacetResult
|
||||
// with available genre and status values from the index.
|
||||
// Archived books are always excluded.
|
||||
Catalogue(ctx context.Context, q CatalogueQuery) ([]domain.BookMeta, int64, FacetResult, error)
|
||||
}
|
||||
|
||||
@@ -99,7 +103,7 @@ func Configure(host, apiKey string) error {
|
||||
return fmt.Errorf("meili: update searchable attributes: %w", err)
|
||||
}
|
||||
|
||||
filterable := []interface{}{"status", "genres"}
|
||||
filterable := []interface{}{"status", "genres", "archived"}
|
||||
if _, err := idx.UpdateFilterableAttributes(&filterable); err != nil {
|
||||
return fmt.Errorf("meili: update filterable attributes: %w", err)
|
||||
}
|
||||
@@ -128,6 +132,9 @@ type bookDoc struct {
|
||||
// MetaUpdated is the Unix timestamp (seconds) of the last PocketBase update.
|
||||
// Used for sort=update ("recently updated" ordering).
|
||||
MetaUpdated int64 `json:"meta_updated"`
|
||||
// Archived is true when the book has been soft-deleted by an admin.
|
||||
// Used as a filter to exclude archived books from all search results.
|
||||
Archived bool `json:"archived"`
|
||||
}
|
||||
|
||||
func toDoc(b domain.BookMeta) bookDoc {
|
||||
@@ -144,6 +151,7 @@ func toDoc(b domain.BookMeta) bookDoc {
|
||||
Rank: b.Ranking,
|
||||
Rating: b.Rating,
|
||||
MetaUpdated: b.MetaUpdated,
|
||||
Archived: b.Archived,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,6 +169,7 @@ func fromDoc(d bookDoc) domain.BookMeta {
|
||||
Ranking: d.Rank,
|
||||
Rating: d.Rating,
|
||||
MetaUpdated: d.MetaUpdated,
|
||||
Archived: d.Archived,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,13 +193,24 @@ func (c *MeiliClient) BookExists(_ context.Context, slug string) bool {
|
||||
return err == nil && doc.Slug != ""
|
||||
}
|
||||
|
||||
// DeleteBook removes a book document from the index by slug.
|
||||
// The operation is fire-and-forget (Meilisearch processes tasks asynchronously).
|
||||
func (c *MeiliClient) DeleteBook(_ context.Context, slug string) error {
|
||||
if _, err := c.idx.DeleteDocument(slug, nil); err != nil {
|
||||
return fmt.Errorf("meili: delete book %q: %w", slug, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Search returns books matching query, up to limit results.
|
||||
// Archived books are always excluded.
|
||||
func (c *MeiliClient) Search(_ context.Context, query string, limit int) ([]domain.BookMeta, error) {
|
||||
if limit <= 0 {
|
||||
limit = 20
|
||||
}
|
||||
res, err := c.idx.Search(query, &meilisearch.SearchRequest{
|
||||
Limit: int64(limit),
|
||||
Limit: int64(limit),
|
||||
Filter: "archived = false",
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("meili: search %q: %w", query, err)
|
||||
@@ -231,17 +251,15 @@ func (c *MeiliClient) Catalogue(_ context.Context, q CatalogueQuery) ([]domain.B
|
||||
Facets: []string{"genres", "status"},
|
||||
}
|
||||
|
||||
// Build filter
|
||||
var filters []string
|
||||
// Build filter — always exclude archived books
|
||||
filters := []string{"archived = false"}
|
||||
if q.Genre != "" && q.Genre != "all" {
|
||||
filters = append(filters, fmt.Sprintf("genres = %q", q.Genre))
|
||||
}
|
||||
if q.Status != "" && q.Status != "all" {
|
||||
filters = append(filters, fmt.Sprintf("status = %q", q.Status))
|
||||
}
|
||||
if len(filters) > 0 {
|
||||
req.Filter = strings.Join(filters, " AND ")
|
||||
}
|
||||
req.Filter = strings.Join(filters, " AND ")
|
||||
|
||||
// Map UI sort tokens to Meilisearch sort expressions.
|
||||
switch q.Sort {
|
||||
@@ -318,7 +336,8 @@ func sortStrings(s []string) {
|
||||
type NoopClient struct{}
|
||||
|
||||
func (NoopClient) UpsertBook(_ context.Context, _ domain.BookMeta) error { return nil }
|
||||
func (NoopClient) BookExists(_ context.Context, _ string) bool { return false }
|
||||
func (NoopClient) BookExists(_ context.Context, _ string) bool { return false }
|
||||
func (NoopClient) DeleteBook(_ context.Context, _ string) error { return nil }
|
||||
func (NoopClient) Search(_ context.Context, _ string, _ int) ([]domain.BookMeta, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
@@ -55,6 +56,9 @@ func (s *Scraper) SourceName() string { return "novelfire.net" }
|
||||
// ── CatalogueProvider ─────────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeCatalogue streams all CatalogueEntry values across all catalogue pages.
|
||||
// Each page fetch uses retryGet with 429-aware exponential backoff.
|
||||
// A small inter-page delay (cataloguePageDelay) is inserted between requests to
|
||||
// avoid hammering the server when paging through hundreds of catalogue pages.
|
||||
func (s *Scraper) ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueEntry, <-chan error) {
|
||||
entries := make(chan domain.CatalogueEntry, 64)
|
||||
errs := make(chan error, 16)
|
||||
@@ -73,8 +77,18 @@ func (s *Scraper) ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueE
|
||||
default:
|
||||
}
|
||||
|
||||
// Polite inter-page delay — skipped on the very first page.
|
||||
if page > 1 {
|
||||
jitter := time.Duration(500+rand.Intn(1000)) * time.Millisecond
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-time.After(jitter):
|
||||
}
|
||||
}
|
||||
|
||||
s.log.Info("scraping catalogue page", "page", page, "url", pageURL)
|
||||
raw, err := s.client.GetContent(ctx, pageURL)
|
||||
raw, err := retryGet(ctx, s.log, s.client, pageURL, 9, 10*time.Second)
|
||||
if err != nil {
|
||||
errs <- fmt.Errorf("catalogue page %d: %w", page, err)
|
||||
return
|
||||
@@ -139,10 +153,11 @@ func (s *Scraper) ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueE
|
||||
// ── MetadataProvider ──────────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeMetadata fetches and parses book metadata from the book's landing page.
|
||||
// Uses retryGet with 429-aware exponential backoff (up to 9 attempts).
|
||||
func (s *Scraper) ScrapeMetadata(ctx context.Context, bookURL string) (domain.BookMeta, error) {
|
||||
s.log.Debug("metadata fetch starting", "url", bookURL)
|
||||
|
||||
raw, err := s.client.GetContent(ctx, bookURL)
|
||||
raw, err := retryGet(ctx, s.log, s.client, bookURL, 9, 10*time.Second)
|
||||
if err != nil {
|
||||
return domain.BookMeta{}, fmt.Errorf("metadata fetch %s: %w", bookURL, err)
|
||||
}
|
||||
@@ -163,12 +178,26 @@ func (s *Scraper) ScrapeMetadata(ctx context.Context, bookURL string) (domain.Bo
|
||||
}
|
||||
}
|
||||
|
||||
status := htmlutil.ExtractFirst(root, scraper.Selector{Tag: "span", Class: "status"})
|
||||
// Status: novelfire renders <strong class="ongoing">Ongoing</strong> (or
|
||||
// "completed", "hiatus") inside the .header-stats block. We take the text
|
||||
// content and lowercase it so the index value is always canonical lowercase.
|
||||
var status string
|
||||
for _, cls := range []string{"ongoing", "completed", "hiatus"} {
|
||||
if v := htmlutil.ExtractFirst(root, scraper.Selector{Tag: "strong", Class: cls}); v != "" {
|
||||
status = strings.ToLower(strings.TrimSpace(v))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
genresNode := htmlutil.FindFirst(root, scraper.Selector{Tag: "div", Class: "genres"})
|
||||
// Genres: novelfire renders <div class="categories"><ul><li><a class="property-item">Genre</a>
|
||||
// Each <a class="property-item"> is one genre tag. Lowercase for index consistency.
|
||||
var genres []string
|
||||
if genresNode != nil {
|
||||
genres = htmlutil.ExtractAll(genresNode, scraper.Selector{Tag: "a", Multiple: true})
|
||||
if categoriesNode := htmlutil.FindFirst(root, scraper.Selector{Tag: "div", Class: "categories"}); categoriesNode != nil {
|
||||
for _, v := range htmlutil.ExtractAll(categoriesNode, scraper.Selector{Tag: "a", Class: "property-item", Multiple: true}) {
|
||||
if v != "" {
|
||||
genres = append(genres, strings.ToLower(strings.TrimSpace(v)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
summary := htmlutil.ExtractFirst(root, scraper.Selector{Tag: "div", Class: "summary"})
|
||||
@@ -212,7 +241,7 @@ func (s *Scraper) ScrapeChapterList(ctx context.Context, bookURL string, upTo in
|
||||
}
|
||||
|
||||
pageURL := fmt.Sprintf("%s?page=%d", baseChapterURL, page)
|
||||
s.log.Info("scraping chapter list", "page", page, "url", pageURL)
|
||||
s.log.Debug("scraping chapter list", "page", page, "url", pageURL)
|
||||
|
||||
raw, err := retryGet(ctx, s.log, s.client, pageURL, 9, 6*time.Second)
|
||||
if err != nil {
|
||||
|
||||
@@ -2,6 +2,7 @@ package novelfire
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log/slog"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@@ -100,6 +101,56 @@ func TestRetryGet_EventualSuccess(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// TestParseMetadataSelectors verifies that the status and genres selectors
|
||||
// match the current novelfire.net HTML structure.
|
||||
func TestParseMetadataSelectors(t *testing.T) {
|
||||
// Minimal HTML reproducing the relevant novelfire.net book page structure.
|
||||
const html = `<!DOCTYPE html>
|
||||
<html><body>
|
||||
<h1 class="novel-title">Shadow Slave</h1>
|
||||
<span class="author">Guiltythree</span>
|
||||
<figure class="cover"><img src="https://cdn.example.com/cover.jpg"></figure>
|
||||
<div class="header-stats">
|
||||
<span><strong>123</strong><small>Chapters</small></span>
|
||||
<span> <strong class="ongoing">Ongoing</strong> <small>Status</small></span>
|
||||
</div>
|
||||
<div class="categories">
|
||||
<h4>Genres</h4>
|
||||
<ul>
|
||||
<li><a href="/genre-fantasy/..." class="property-item">Fantasy</a></li>
|
||||
<li><a href="/genre-action/..." class="property-item">Action</a></li>
|
||||
<li><a href="/genre-adventure/..." class="property-item">Adventure</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<span class="chapter-count">123 Chapters</span>
|
||||
</body></html>`
|
||||
|
||||
stub := newStubClient()
|
||||
stub.setFn("https://novelfire.net/book/shadow-slave", func() (string, error) {
|
||||
return html, nil
|
||||
})
|
||||
|
||||
s := &Scraper{client: stub, log: slog.Default()}
|
||||
meta, err := s.ScrapeMetadata(t.Context(), "https://novelfire.net/book/shadow-slave")
|
||||
if err != nil {
|
||||
t.Fatalf("ScrapeMetadata: %v", err)
|
||||
}
|
||||
|
||||
if meta.Status != "ongoing" {
|
||||
t.Errorf("status = %q, want %q", meta.Status, "ongoing")
|
||||
}
|
||||
|
||||
wantGenres := []string{"fantasy", "action", "adventure"}
|
||||
if len(meta.Genres) != len(wantGenres) {
|
||||
t.Fatalf("genres = %v, want %v", meta.Genres, wantGenres)
|
||||
}
|
||||
for i, g := range meta.Genres {
|
||||
if g != wantGenres[i] {
|
||||
t.Errorf("genres[%d] = %q, want %q", i, g, wantGenres[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── minimal stub client for tests ─────────────────────────────────────────────
|
||||
|
||||
type stubClient struct {
|
||||
|
||||
@@ -68,7 +68,7 @@ func New(cfg Config, novel scraper.NovelScraper, store bookstore.BookWriter, log
|
||||
// Returns a ScrapeResult with counters. The result's ErrorMessage is non-empty
|
||||
// if the run failed at the metadata or chapter-list level.
|
||||
func (o *Orchestrator) RunBook(ctx context.Context, task domain.ScrapeTask) domain.ScrapeResult {
|
||||
o.log.Info("orchestrator: RunBook starting",
|
||||
o.log.Debug("orchestrator: RunBook starting",
|
||||
"task_id", task.ID,
|
||||
"kind", task.Kind,
|
||||
"url", task.TargetURL,
|
||||
@@ -90,6 +90,7 @@ func (o *Orchestrator) RunBook(ctx context.Context, task domain.ScrapeTask) doma
|
||||
result.Errors++
|
||||
return result
|
||||
}
|
||||
result.Slug = meta.Slug
|
||||
|
||||
if err := o.store.WriteMetadata(ctx, meta); err != nil {
|
||||
o.log.Error("metadata write failed", "slug", meta.Slug, "err", err)
|
||||
@@ -97,13 +98,14 @@ func (o *Orchestrator) RunBook(ctx context.Context, task domain.ScrapeTask) doma
|
||||
result.Errors++
|
||||
} else {
|
||||
result.BooksFound = 1
|
||||
result.Slug = meta.Slug
|
||||
// Fire optional post-metadata hook (e.g. Meilisearch indexing).
|
||||
if o.postMetadata != nil {
|
||||
o.postMetadata(ctx, meta)
|
||||
}
|
||||
}
|
||||
|
||||
o.log.Info("metadata saved", "slug", meta.Slug, "title", meta.Title)
|
||||
o.log.Debug("metadata saved", "slug", meta.Slug, "title", meta.Title)
|
||||
|
||||
// ── Step 2: Chapter list ──────────────────────────────────────────────────
|
||||
refs, err := o.novel.ScrapeChapterList(ctx, task.TargetURL, task.ToChapter)
|
||||
@@ -114,7 +116,7 @@ func (o *Orchestrator) RunBook(ctx context.Context, task domain.ScrapeTask) doma
|
||||
return result
|
||||
}
|
||||
|
||||
o.log.Info("chapter list fetched", "slug", meta.Slug, "chapters", len(refs))
|
||||
o.log.Debug("chapter list fetched", "slug", meta.Slug, "chapters", len(refs))
|
||||
|
||||
// Persist chapter refs (without text) so the index exists early.
|
||||
if wErr := o.store.WriteChapterRefs(ctx, meta.Slug, refs); wErr != nil {
|
||||
|
||||
@@ -89,6 +89,8 @@ func (s *stubStore) WriteChapterRefs(_ context.Context, _ string, _ []domain.Cha
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubStore) DeduplicateChapters(_ context.Context, _ string) (int, error) { return 0, nil }
|
||||
|
||||
func (s *stubStore) ChapterExists(_ context.Context, slug string, ref domain.ChapterRef) bool {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
|
||||
120
backend/internal/otelsetup/otelsetup.go
Normal file
120
backend/internal/otelsetup/otelsetup.go
Normal file
@@ -0,0 +1,120 @@
|
||||
// Package otelsetup initialises the OpenTelemetry SDK for the LibNovel backend.
|
||||
//
|
||||
// It reads two environment variables:
|
||||
//
|
||||
// OTEL_EXPORTER_OTLP_ENDPOINT — OTLP/HTTP endpoint; accepts either a full
|
||||
// URL ("https://otel.example.com") or a bare
|
||||
// host[:port] ("otel-collector:4318").
|
||||
// TLS is used when the value starts with "https://".
|
||||
// OTEL_SERVICE_NAME — service name reported in traces (default: "backend")
|
||||
//
|
||||
// When OTEL_EXPORTER_OTLP_ENDPOINT is empty the function is a no-op: it
|
||||
// returns a nil shutdown func and the default slog.Logger, so callers never
|
||||
// need to branch on it.
|
||||
//
|
||||
// Usage in main.go:
|
||||
//
|
||||
// shutdown, log, err := otelsetup.Init(ctx, version)
|
||||
// if err != nil { return err }
|
||||
// if shutdown != nil { defer shutdown() }
|
||||
package otelsetup
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"go.opentelemetry.io/contrib/bridges/otelslog"
|
||||
"go.opentelemetry.io/otel"
|
||||
"go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp"
|
||||
"go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp"
|
||||
otellog "go.opentelemetry.io/otel/log/global"
|
||||
"go.opentelemetry.io/otel/sdk/log"
|
||||
"go.opentelemetry.io/otel/sdk/resource"
|
||||
sdktrace "go.opentelemetry.io/otel/sdk/trace"
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.26.0"
|
||||
)
|
||||
|
||||
// Init sets up TracerProvider and LoggerProvider that export via OTLP/HTTP.
|
||||
//
|
||||
// Returns:
|
||||
// - shutdown: flushes and stops both providers (nil when OTel is disabled).
|
||||
// - logger: an slog.Logger bridged to OTel logs (falls back to default when disabled).
|
||||
// - err: non-nil only on SDK initialisation failure.
|
||||
func Init(ctx context.Context, version string) (shutdown func(), logger *slog.Logger, err error) {
|
||||
rawEndpoint := os.Getenv("OTEL_EXPORTER_OTLP_ENDPOINT")
|
||||
if rawEndpoint == "" {
|
||||
return nil, slog.Default(), nil // OTel disabled — not an error
|
||||
}
|
||||
|
||||
// WithEndpoint expects a host[:port] value — no scheme.
|
||||
// Support both "https://otel.example.com" and "otel-collector:4318".
|
||||
useTLS := strings.HasPrefix(rawEndpoint, "https://")
|
||||
endpoint := strings.TrimPrefix(rawEndpoint, "https://")
|
||||
endpoint = strings.TrimPrefix(endpoint, "http://")
|
||||
|
||||
serviceName := os.Getenv("OTEL_SERVICE_NAME")
|
||||
if serviceName == "" {
|
||||
serviceName = "backend"
|
||||
}
|
||||
|
||||
// ── Shared resource ───────────────────────────────────────────────────────
|
||||
res, err := resource.New(ctx,
|
||||
resource.WithAttributes(
|
||||
semconv.ServiceName(serviceName),
|
||||
semconv.ServiceVersion(version),
|
||||
),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, slog.Default(), fmt.Errorf("otelsetup: create resource: %w", err)
|
||||
}
|
||||
|
||||
// ── Trace provider ────────────────────────────────────────────────────────
|
||||
traceOpts := []otlptracehttp.Option{otlptracehttp.WithEndpoint(endpoint)}
|
||||
if !useTLS {
|
||||
traceOpts = append(traceOpts, otlptracehttp.WithInsecure())
|
||||
}
|
||||
traceExp, err := otlptracehttp.New(ctx, traceOpts...)
|
||||
if err != nil {
|
||||
return nil, slog.Default(), fmt.Errorf("otelsetup: create OTLP trace exporter: %w", err)
|
||||
}
|
||||
|
||||
tp := sdktrace.NewTracerProvider(
|
||||
sdktrace.WithBatcher(traceExp),
|
||||
sdktrace.WithResource(res),
|
||||
sdktrace.WithSampler(sdktrace.ParentBased(sdktrace.TraceIDRatioBased(0.2))),
|
||||
)
|
||||
otel.SetTracerProvider(tp)
|
||||
|
||||
// ── Log provider ──────────────────────────────────────────────────────────
|
||||
logOpts := []otlploghttp.Option{otlploghttp.WithEndpoint(endpoint)}
|
||||
if !useTLS {
|
||||
logOpts = append(logOpts, otlploghttp.WithInsecure())
|
||||
}
|
||||
logExp, err := otlploghttp.New(ctx, logOpts...)
|
||||
if err != nil {
|
||||
return nil, slog.Default(), fmt.Errorf("otelsetup: create OTLP log exporter: %w", err)
|
||||
}
|
||||
|
||||
lp := log.NewLoggerProvider(
|
||||
log.WithProcessor(log.NewBatchProcessor(logExp)),
|
||||
log.WithResource(res),
|
||||
)
|
||||
otellog.SetLoggerProvider(lp)
|
||||
|
||||
// Bridge slog → OTel logs. Structured fields and trace IDs are forwarded
|
||||
// automatically; Grafana can correlate log lines with Tempo traces.
|
||||
otelLogger := otelslog.NewLogger(serviceName)
|
||||
|
||||
shutdown = func() {
|
||||
shutCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
|
||||
defer cancel()
|
||||
_ = tp.Shutdown(shutCtx)
|
||||
_ = lp.Shutdown(shutCtx)
|
||||
}
|
||||
|
||||
return shutdown, otelLogger, nil
|
||||
}
|
||||
254
backend/internal/pockettts/client.go
Normal file
254
backend/internal/pockettts/client.go
Normal file
@@ -0,0 +1,254 @@
|
||||
// Package pockettts provides a client for the kyutai-labs/pocket-tts TTS service.
|
||||
//
|
||||
// pocket-tts exposes a non-OpenAI API:
|
||||
//
|
||||
// POST /tts (multipart form: text, voice_url) → streaming WAV
|
||||
// GET /health → {"status":"healthy"}
|
||||
//
|
||||
// GenerateAudio streams the WAV response and transcodes it to MP3 using ffmpeg,
|
||||
// so callers receive MP3 bytes — the same format as the kokoro client — and the
|
||||
// rest of the pipeline does not need to care which TTS engine was used.
|
||||
//
|
||||
// StreamAudioMP3 is the streaming variant: it returns an io.ReadCloser that
|
||||
// yields MP3-encoded audio incrementally as pocket-tts generates it, without
|
||||
// buffering the full output.
|
||||
//
|
||||
// Predefined voices (pass the bare name as the voice parameter):
|
||||
//
|
||||
// alba, marius, javert, jean, fantine, cosette, eponine, azelma,
|
||||
// anna, vera, charles, paul, george, mary, jane, michael, eve,
|
||||
// bill_boerst, peter_yearsley, stuart_bell
|
||||
package pockettts
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// PredefinedVoices is the set of voice names built into pocket-tts.
|
||||
// The runner uses this to decide which TTS engine to route a task to.
|
||||
var PredefinedVoices = map[string]struct{}{
|
||||
"alba": {}, "marius": {}, "javert": {}, "jean": {},
|
||||
"fantine": {}, "cosette": {}, "eponine": {}, "azelma": {},
|
||||
"anna": {}, "vera": {}, "charles": {}, "paul": {},
|
||||
"george": {}, "mary": {}, "jane": {}, "michael": {},
|
||||
"eve": {}, "bill_boerst": {}, "peter_yearsley": {}, "stuart_bell": {},
|
||||
}
|
||||
|
||||
// IsPocketTTSVoice reports whether voice is served by pocket-tts.
|
||||
func IsPocketTTSVoice(voice string) bool {
|
||||
_, ok := PredefinedVoices[voice]
|
||||
return ok
|
||||
}
|
||||
|
||||
// Client is the interface for interacting with the pocket-tts service.
|
||||
type Client interface {
|
||||
// GenerateAudio synthesises text using the given voice and returns MP3 bytes.
|
||||
// Voice must be one of the predefined pocket-tts voice names.
|
||||
GenerateAudio(ctx context.Context, text, voice string) ([]byte, error)
|
||||
|
||||
// StreamAudioMP3 synthesises text and returns an io.ReadCloser that streams
|
||||
// MP3-encoded audio incrementally via a live ffmpeg transcode pipe.
|
||||
// The caller must always close the returned ReadCloser.
|
||||
StreamAudioMP3(ctx context.Context, text, voice string) (io.ReadCloser, error)
|
||||
|
||||
// StreamAudioWAV synthesises text and returns an io.ReadCloser that streams
|
||||
// raw WAV audio directly from pocket-tts without any transcoding.
|
||||
// The stream begins with a WAV header followed by 16-bit PCM frames at 16 kHz.
|
||||
// The caller must always close the returned ReadCloser.
|
||||
StreamAudioWAV(ctx context.Context, text, voice string) (io.ReadCloser, error)
|
||||
|
||||
// ListVoices returns the available predefined voice names.
|
||||
ListVoices(ctx context.Context) ([]string, error)
|
||||
}
|
||||
|
||||
// httpClient is the concrete pocket-tts HTTP client.
|
||||
type httpClient struct {
|
||||
baseURL string
|
||||
http *http.Client
|
||||
}
|
||||
|
||||
// New returns a Client targeting baseURL (e.g. "https://pocket-tts.libnovel.cc").
|
||||
func New(baseURL string) Client {
|
||||
return &httpClient{
|
||||
baseURL: strings.TrimRight(baseURL, "/"),
|
||||
http: &http.Client{Timeout: 10 * time.Minute},
|
||||
}
|
||||
}
|
||||
|
||||
// GenerateAudio posts to POST /tts and transcodes the WAV response to MP3
|
||||
// using the system ffmpeg binary. Requires ffmpeg to be on PATH (available in
|
||||
// the runner Docker image via Alpine's ffmpeg package).
|
||||
func (c *httpClient) GenerateAudio(ctx context.Context, text, voice string) ([]byte, error) {
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("pockettts: empty text")
|
||||
}
|
||||
if voice == "" {
|
||||
voice = "alba"
|
||||
}
|
||||
|
||||
resp, err := c.postTTS(ctx, text, voice)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
wavData, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("pockettts: read response body: %w", err)
|
||||
}
|
||||
|
||||
// ── Transcode WAV → MP3 via ffmpeg ────────────────────────────────────────
|
||||
mp3Data, err := wavToMP3(ctx, wavData)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("pockettts: transcode to mp3: %w", err)
|
||||
}
|
||||
return mp3Data, nil
|
||||
}
|
||||
|
||||
// StreamAudioMP3 posts to POST /tts and returns an io.ReadCloser that delivers
|
||||
// MP3 bytes as pocket-tts generates WAV frames. ffmpeg runs as a subprocess
|
||||
// with stdin connected to the live WAV stream and stdout piped to the caller.
|
||||
// The caller must always close the returned ReadCloser.
|
||||
func (c *httpClient) StreamAudioMP3(ctx context.Context, text, voice string) (io.ReadCloser, error) {
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("pockettts: empty text")
|
||||
}
|
||||
if voice == "" {
|
||||
voice = "alba"
|
||||
}
|
||||
|
||||
resp, err := c.postTTS(ctx, text, voice)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Start ffmpeg: read WAV from stdin (the live HTTP body), write MP3 to stdout.
|
||||
cmd := exec.CommandContext(ctx,
|
||||
"ffmpeg",
|
||||
"-hide_banner", "-loglevel", "error",
|
||||
"-i", "pipe:0", // WAV from stdin
|
||||
"-f", "mp3", // output format
|
||||
"-q:a", "2", // VBR ~190 kbps
|
||||
"pipe:1", // MP3 to stdout
|
||||
)
|
||||
cmd.Stdin = resp.Body
|
||||
|
||||
pr, pw := io.Pipe()
|
||||
cmd.Stdout = pw
|
||||
|
||||
var stderrBuf bytes.Buffer
|
||||
cmd.Stderr = &stderrBuf
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
resp.Body.Close()
|
||||
return nil, fmt.Errorf("pockettts: start ffmpeg: %w", err)
|
||||
}
|
||||
|
||||
// Close the write end of the pipe when ffmpeg exits, propagating any error.
|
||||
go func() {
|
||||
waitErr := cmd.Wait()
|
||||
resp.Body.Close()
|
||||
if waitErr != nil {
|
||||
pw.CloseWithError(fmt.Errorf("ffmpeg: %w (stderr: %s)", waitErr, stderrBuf.String()))
|
||||
} else {
|
||||
pw.Close()
|
||||
}
|
||||
}()
|
||||
|
||||
return pr, nil
|
||||
}
|
||||
|
||||
// StreamAudioWAV posts to POST /tts and returns an io.ReadCloser that delivers
|
||||
// raw WAV bytes directly from pocket-tts — no ffmpeg transcoding required.
|
||||
// The first bytes will be a WAV header (RIFF/fmt chunk) followed by PCM frames.
|
||||
// The caller must always close the returned ReadCloser.
|
||||
func (c *httpClient) StreamAudioWAV(ctx context.Context, text, voice string) (io.ReadCloser, error) {
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("pockettts: empty text")
|
||||
}
|
||||
if voice == "" {
|
||||
voice = "alba"
|
||||
}
|
||||
|
||||
resp, err := c.postTTS(ctx, text, voice)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp.Body, nil
|
||||
}
|
||||
|
||||
// ListVoices returns the statically known predefined voice names.
|
||||
// pocket-tts has no REST endpoint for listing voices.
|
||||
func (c *httpClient) ListVoices(_ context.Context) ([]string, error) {
|
||||
voices := make([]string, 0, len(PredefinedVoices))
|
||||
for v := range PredefinedVoices {
|
||||
voices = append(voices, v)
|
||||
}
|
||||
return voices, nil
|
||||
}
|
||||
|
||||
// postTTS sends a multipart POST /tts request and returns the raw response.
|
||||
// The caller is responsible for closing resp.Body.
|
||||
func (c *httpClient) postTTS(ctx context.Context, text, voice string) (*http.Response, error) {
|
||||
var body bytes.Buffer
|
||||
mw := multipart.NewWriter(&body)
|
||||
|
||||
if err := mw.WriteField("text", text); err != nil {
|
||||
return nil, fmt.Errorf("pockettts: write text field: %w", err)
|
||||
}
|
||||
if err := mw.WriteField("voice_url", voice); err != nil {
|
||||
return nil, fmt.Errorf("pockettts: write voice_url field: %w", err)
|
||||
}
|
||||
if err := mw.Close(); err != nil {
|
||||
return nil, fmt.Errorf("pockettts: close multipart writer: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
||||
c.baseURL+"/tts", &body)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("pockettts: build request: %w", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", mw.FormDataContentType())
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("pockettts: request: %w", err)
|
||||
}
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
_, _ = io.Copy(io.Discard, resp.Body)
|
||||
resp.Body.Close()
|
||||
return nil, fmt.Errorf("pockettts: server returned %d", resp.StatusCode)
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
// wavToMP3 converts raw WAV bytes to MP3 using ffmpeg.
|
||||
// ffmpeg reads from stdin (pipe:0) and writes to stdout (pipe:1).
|
||||
func wavToMP3(ctx context.Context, wav []byte) ([]byte, error) {
|
||||
cmd := exec.CommandContext(ctx,
|
||||
"ffmpeg",
|
||||
"-hide_banner", "-loglevel", "error",
|
||||
"-i", "pipe:0", // read WAV from stdin
|
||||
"-f", "mp3", // output format
|
||||
"-q:a", "2", // VBR quality ~190 kbps
|
||||
"pipe:1", // write MP3 to stdout
|
||||
)
|
||||
cmd.Stdin = bytes.NewReader(wav)
|
||||
|
||||
var out, stderr bytes.Buffer
|
||||
cmd.Stdout = &out
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
return nil, fmt.Errorf("ffmpeg: %w (stderr: %s)", err, stderr.String())
|
||||
}
|
||||
return out.Bytes(), nil
|
||||
}
|
||||
256
backend/internal/runner/asynq_runner.go
Normal file
256
backend/internal/runner/asynq_runner.go
Normal file
@@ -0,0 +1,256 @@
|
||||
package runner
|
||||
|
||||
// asynq_runner.go — Asynq-based task dispatch for the runner.
|
||||
//
|
||||
// When cfg.RedisAddr is set, Run() calls runAsynq() instead of runPoll().
|
||||
// The Asynq server replaces the polling loop: it listens on Redis for tasks
|
||||
// enqueued by the backend Producer and delivers them immediately.
|
||||
//
|
||||
// Handlers in this file decode Asynq job payloads and call the existing
|
||||
// runScrapeTask / runAudioTask methods, keeping all execution logic in one place.
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/hibiken/asynq"
|
||||
asynqmetrics "github.com/hibiken/asynq/x/metrics"
|
||||
"github.com/libnovel/backend/internal/asynqqueue"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// runAsynq starts an Asynq server that replaces the PocketBase poll loop.
|
||||
// It also starts the periodic catalogue refresh ticker.
|
||||
// Blocks until ctx is cancelled.
|
||||
func (r *Runner) runAsynq(ctx context.Context) error {
|
||||
redisOpt, err := r.redisConnOpt()
|
||||
if err != nil {
|
||||
return fmt.Errorf("runner: parse redis addr: %w", err)
|
||||
}
|
||||
|
||||
srv := asynq.NewServer(redisOpt, asynq.Config{
|
||||
// Allocate concurrency slots for each task type.
|
||||
// Total concurrency = scrape + audio slots.
|
||||
Concurrency: r.cfg.MaxConcurrentScrape + r.cfg.MaxConcurrentAudio,
|
||||
Queues: map[string]int{
|
||||
asynqqueue.QueueDefault: 1,
|
||||
},
|
||||
// Let Asynq handle retries with exponential back-off.
|
||||
RetryDelayFunc: asynq.DefaultRetryDelayFunc,
|
||||
// Log errors from handlers via the existing structured logger.
|
||||
ErrorHandler: asynq.ErrorHandlerFunc(func(_ context.Context, task *asynq.Task, err error) {
|
||||
r.deps.Log.Error("runner: asynq task failed",
|
||||
"type", task.Type(),
|
||||
"err", err,
|
||||
)
|
||||
}),
|
||||
})
|
||||
|
||||
mux := asynq.NewServeMux()
|
||||
mux.HandleFunc(asynqqueue.TypeAudioGenerate, r.handleAudioTask)
|
||||
mux.HandleFunc(asynqqueue.TypeScrapeBook, r.handleScrapeTask)
|
||||
mux.HandleFunc(asynqqueue.TypeScrapeCatalogue, r.handleScrapeTask)
|
||||
mux.HandleFunc(asynqqueue.TypeImportBook, r.handleImportTask)
|
||||
|
||||
// Register Asynq queue metrics with the default Prometheus registry so
|
||||
// the /metrics endpoint (metrics.go) can expose them.
|
||||
inspector := asynq.NewInspector(redisOpt)
|
||||
collector := asynqmetrics.NewQueueMetricsCollector(inspector)
|
||||
if err := r.metricsRegistry.Register(collector); err != nil {
|
||||
r.deps.Log.Warn("runner: could not register asynq prometheus collector", "err", err)
|
||||
}
|
||||
|
||||
// Start the periodic catalogue refresh.
|
||||
catalogueTick := time.NewTicker(r.cfg.CatalogueRefreshInterval)
|
||||
defer catalogueTick.Stop()
|
||||
if !r.cfg.SkipInitialCatalogueRefresh {
|
||||
go r.runCatalogueRefresh(ctx)
|
||||
} else {
|
||||
r.deps.Log.Info("runner: skipping initial catalogue refresh (RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true)")
|
||||
}
|
||||
|
||||
r.deps.Log.Info("runner: asynq mode active", "redis_addr", r.cfg.RedisAddr)
|
||||
|
||||
// ── Heartbeat goroutine ──────────────────────────────────────────────
|
||||
// Write /tmp/runner.alive every 30s so Docker healthcheck passes in asynq mode.
|
||||
// This mirrors the heartbeat file behavior from the poll() loop.
|
||||
go func() {
|
||||
heartbeatTick := time.NewTicker(r.cfg.StaleTaskThreshold / 2)
|
||||
defer heartbeatTick.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-heartbeatTick.C:
|
||||
if f, err := os.Create("/tmp/runner.alive"); err != nil {
|
||||
r.deps.Log.Warn("runner: could not write heartbeat file", "err", err)
|
||||
} else {
|
||||
f.Close()
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// ── Translation polling goroutine ────────────────────────────────────
|
||||
// Translation tasks live in PocketBase (not Redis), so we need a separate
|
||||
// poll loop to claim and dispatch them. This runs alongside the Asynq server.
|
||||
translationSem := make(chan struct{}, r.cfg.MaxConcurrentTranslation)
|
||||
var translationWg sync.WaitGroup
|
||||
go func() {
|
||||
tick := time.NewTicker(r.cfg.PollInterval)
|
||||
defer tick.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-tick.C:
|
||||
r.pollTranslationTasks(ctx, translationSem, &translationWg)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Run catalogue refresh ticker in the background.
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-catalogueTick.C:
|
||||
go r.runCatalogueRefresh(ctx)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Start Asynq server (non-blocking).
|
||||
if err := srv.Start(mux); err != nil {
|
||||
return fmt.Errorf("runner: asynq server start: %w", err)
|
||||
}
|
||||
|
||||
// Block until context is cancelled, then gracefully stop.
|
||||
<-ctx.Done()
|
||||
r.deps.Log.Info("runner: context cancelled, shutting down asynq server")
|
||||
srv.Shutdown()
|
||||
|
||||
// Wait for translation tasks to complete.
|
||||
translationWg.Wait()
|
||||
return nil
|
||||
}
|
||||
|
||||
// redisConnOpt parses cfg.RedisAddr into an asynq.RedisConnOpt.
|
||||
// Supports full "redis://" / "rediss://" URLs and plain "host:port".
|
||||
func (r *Runner) redisConnOpt() (asynq.RedisConnOpt, error) {
|
||||
addr := r.cfg.RedisAddr
|
||||
// ParseRedisURI handles redis:// and rediss:// schemes.
|
||||
if len(addr) > 7 && (addr[:8] == "redis://" || addr[:9] == "rediss://") {
|
||||
return asynq.ParseRedisURI(addr)
|
||||
}
|
||||
// Plain "host:port" — use RedisClientOpt directly.
|
||||
return asynq.RedisClientOpt{
|
||||
Addr: addr,
|
||||
Password: r.cfg.RedisPassword,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// handleScrapeTask is the Asynq handler for TypeScrapeBook and TypeScrapeCatalogue.
|
||||
func (r *Runner) handleScrapeTask(ctx context.Context, t *asynq.Task) error {
|
||||
var p asynqqueue.ScrapePayload
|
||||
if err := json.Unmarshal(t.Payload(), &p); err != nil {
|
||||
return fmt.Errorf("unmarshal scrape payload: %w", err)
|
||||
}
|
||||
task := domain.ScrapeTask{
|
||||
ID: p.PBTaskID,
|
||||
Kind: p.Kind,
|
||||
TargetURL: p.TargetURL,
|
||||
FromChapter: p.FromChapter,
|
||||
ToChapter: p.ToChapter,
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
defer r.tasksRunning.Add(-1)
|
||||
r.runScrapeTask(ctx, task)
|
||||
return nil
|
||||
}
|
||||
|
||||
// handleAudioTask is the Asynq handler for TypeAudioGenerate.
|
||||
func (r *Runner) handleAudioTask(ctx context.Context, t *asynq.Task) error {
|
||||
var p asynqqueue.AudioPayload
|
||||
if err := json.Unmarshal(t.Payload(), &p); err != nil {
|
||||
return fmt.Errorf("unmarshal audio payload: %w", err)
|
||||
}
|
||||
task := domain.AudioTask{
|
||||
ID: p.PBTaskID,
|
||||
Slug: p.Slug,
|
||||
Chapter: p.Chapter,
|
||||
Voice: p.Voice,
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
defer r.tasksRunning.Add(-1)
|
||||
r.runAudioTask(ctx, task)
|
||||
return nil
|
||||
}
|
||||
|
||||
// handleImportTask is the Asynq handler for TypeImportBook (PDF/EPUB import).
|
||||
func (r *Runner) handleImportTask(ctx context.Context, t *asynq.Task) error {
|
||||
var p asynqqueue.ImportPayload
|
||||
if err := json.Unmarshal(t.Payload(), &p); err != nil {
|
||||
return fmt.Errorf("unmarshal import payload: %w", err)
|
||||
}
|
||||
task := domain.ImportTask{
|
||||
ID: p.PBTaskID,
|
||||
Slug: p.Slug,
|
||||
Title: p.Title,
|
||||
FileType: p.FileType,
|
||||
ChaptersKey: p.ChaptersKey,
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
defer r.tasksRunning.Add(-1)
|
||||
r.runImportTask(ctx, task, p.ObjectKey)
|
||||
return nil
|
||||
}
|
||||
|
||||
// pollTranslationTasks claims all available translation tasks from PocketBase
|
||||
// and dispatches them to goroutines. Translation tasks don't go through Redis/Asynq
|
||||
// because they're stored in PocketBase, so we need this separate poll loop.
|
||||
func (r *Runner) pollTranslationTasks(ctx context.Context, translationSem chan struct{}, wg *sync.WaitGroup) {
|
||||
// Reap orphaned tasks (same logic as poll() in runner.go).
|
||||
if n, err := r.deps.Consumer.ReapStaleTasks(ctx, r.cfg.StaleTaskThreshold); err != nil {
|
||||
r.deps.Log.Warn("runner: reap stale translation tasks failed", "err", err)
|
||||
} else if n > 0 {
|
||||
r.deps.Log.Info("runner: reaped stale translation tasks", "count", n)
|
||||
}
|
||||
|
||||
translationLoop:
|
||||
for {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case translationSem <- struct{}{}:
|
||||
// Slot acquired — proceed to claim a task.
|
||||
default:
|
||||
// All slots busy; leave remaining pending tasks for next tick.
|
||||
break translationLoop
|
||||
}
|
||||
task, ok, err := r.deps.Consumer.ClaimNextTranslationTask(ctx, r.cfg.WorkerID)
|
||||
if err != nil {
|
||||
<-translationSem
|
||||
r.deps.Log.Error("runner: ClaimNextTranslationTask failed", "err", err)
|
||||
break
|
||||
}
|
||||
if !ok {
|
||||
<-translationSem
|
||||
break
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
wg.Add(1)
|
||||
go func(t domain.TranslationTask) {
|
||||
defer wg.Done()
|
||||
defer func() { <-translationSem }()
|
||||
defer r.tasksRunning.Add(-1)
|
||||
r.runTranslationTask(ctx, t)
|
||||
}(task)
|
||||
}
|
||||
}
|
||||
@@ -6,17 +6,20 @@ package runner
|
||||
//
|
||||
// Design:
|
||||
// - Runs on its own ticker (CatalogueRefreshInterval, default 24h) inside Run().
|
||||
// - Also fires once on startup.
|
||||
// - ScrapeCatalogue streams CatalogueEntry values over a channel — we iterate
|
||||
// and call ScrapeMetadata for each entry.
|
||||
// - Per-request random jitter (1–3s) prevents hammering novelfire.net.
|
||||
// - Cover images are fetched from the URL embedded in BookMeta.Cover and
|
||||
// stored in MinIO (browse bucket, key: covers/{slug}.jpg).
|
||||
// - WriteMetadata + UpsertBook are called for every successfully scraped book.
|
||||
// - Errors for individual books are logged and skipped; the loop continues.
|
||||
// - The cover URL stored in BookMeta.Cover is rewritten to the internal proxy
|
||||
// path (/api/cover/novelfire.net/{slug}) so the UI always fetches via the
|
||||
// backend, which will serve from MinIO.
|
||||
// - Also fires once on startup (unless SkipInitialCatalogueRefresh is set).
|
||||
// - ScrapeCatalogue streams CatalogueEntry values over a channel — already has
|
||||
// its own inter-page jitter + retryGet (see scraper.go).
|
||||
// - Per-book: only metadata is scraped here (not chapters). Chapters are scraped
|
||||
// on-demand when a user opens a book or via an explicit scrape task.
|
||||
// - Between each metadata request a configurable base delay plus up to 50%
|
||||
// random jitter is applied (CatalogueRequestDelay, default 2s). This keeps
|
||||
// the request rate well below novelfire.net's rate limit even for ~15k books.
|
||||
// - ScrapeMetadata itself uses retryGet with 429-aware exponential backoff
|
||||
// (up to 9 attempts), so transient rate limits are handled gracefully.
|
||||
// - Cover images are fetched and stored in MinIO on first sight; subsequent
|
||||
// refreshes skip covers that already exist (CoverExists check).
|
||||
// - Books already present in Meilisearch are skipped entirely (fast path).
|
||||
// - Errors for individual books are logged and skipped; the loop never aborts.
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -29,7 +32,7 @@ import (
|
||||
|
||||
// runCatalogueRefresh performs one full catalogue walk: scrapes metadata for
|
||||
// every book on novelfire.net, downloads covers to MinIO, and upserts to
|
||||
// Meilisearch. Errors for individual books are logged and skipped.
|
||||
// Meilisearch. Individual book failures are logged and skipped.
|
||||
func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
if r.deps.Novel == nil {
|
||||
r.deps.Log.Warn("runner: catalogue refresh skipped — Novel scraper not configured")
|
||||
@@ -40,8 +43,9 @@ func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
delay := r.cfg.CatalogueRequestDelay
|
||||
log := r.deps.Log.With("op", "catalogue_refresh")
|
||||
log.Info("runner: catalogue refresh starting")
|
||||
log.Info("runner: catalogue refresh starting", "request_delay", delay)
|
||||
|
||||
entries, errCh := r.deps.Novel.ScrapeCatalogue(ctx)
|
||||
|
||||
@@ -51,26 +55,26 @@ func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
break
|
||||
}
|
||||
|
||||
// Skip books already present in Meilisearch — they were indexed on a
|
||||
// previous run. Re-indexing only happens when a scrape task is
|
||||
// explicitly enqueued (e.g. via the admin UI or API).
|
||||
// Fast path: skip books already indexed in Meilisearch.
|
||||
if r.deps.SearchIndex.BookExists(ctx, entry.Slug) {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
// Random jitter between books to avoid rate-limiting.
|
||||
jitter := time.Duration(1000+rand.Intn(2000)) * time.Millisecond
|
||||
// Polite delay between metadata requests: base + up to 50% jitter.
|
||||
// This applies before every fetch so we never fire bursts.
|
||||
jitter := time.Duration(rand.Int63n(int64(delay / 2)))
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
break
|
||||
case <-time.After(jitter):
|
||||
case <-time.After(delay + jitter):
|
||||
}
|
||||
|
||||
// ScrapeMetadata internally retries on 429 with exponential back-off.
|
||||
meta, err := r.deps.Novel.ScrapeMetadata(ctx, entry.URL)
|
||||
if err != nil {
|
||||
log.Warn("runner: catalogue refresh: metadata scrape failed",
|
||||
"url", entry.URL, "err", err)
|
||||
log.Warn("runner: catalogue refresh: metadata scrape failed — skipping book",
|
||||
"slug", entry.Slug, "url", entry.URL, "err", err)
|
||||
errCount++
|
||||
continue
|
||||
}
|
||||
@@ -81,35 +85,32 @@ func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
|
||||
// Persist to PocketBase.
|
||||
if err := r.deps.BookWriter.WriteMetadata(ctx, meta); err != nil {
|
||||
log.Warn("runner: catalogue refresh: WriteMetadata failed",
|
||||
log.Warn("runner: catalogue refresh: WriteMetadata failed — skipping book",
|
||||
"slug", meta.Slug, "err", err)
|
||||
errCount++
|
||||
continue
|
||||
}
|
||||
|
||||
// Index in Meilisearch.
|
||||
// Index in Meilisearch (non-fatal).
|
||||
if err := r.deps.SearchIndex.UpsertBook(ctx, meta); err != nil {
|
||||
log.Warn("runner: catalogue refresh: UpsertBook failed",
|
||||
"slug", meta.Slug, "err", err)
|
||||
// non-fatal — continue
|
||||
}
|
||||
|
||||
// Download and store cover image in MinIO if we have a cover URL
|
||||
// and a CoverStore is wired in.
|
||||
// Download cover to MinIO if not already cached (non-fatal).
|
||||
if r.deps.CoverStore != nil && originalCover != "" {
|
||||
if !r.deps.CoverStore.CoverExists(ctx, meta.Slug) {
|
||||
if err := r.downloadCover(ctx, meta.Slug, originalCover); err != nil {
|
||||
log.Warn("runner: catalogue refresh: cover download failed",
|
||||
"slug", meta.Slug, "url", originalCover, "err", err)
|
||||
// non-fatal
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ok++
|
||||
if ok%100 == 0 {
|
||||
if ok%50 == 0 {
|
||||
log.Info("runner: catalogue refresh progress",
|
||||
"scraped", ok, "errors", errCount)
|
||||
"scraped", ok, "skipped", skipped, "errors", errCount)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,3 +19,53 @@ func stripMarkdown(src string) string {
|
||||
src = regexp.MustCompile(`\n{3,}`).ReplaceAllString(src, "\n\n")
|
||||
return strings.TrimSpace(src)
|
||||
}
|
||||
|
||||
// chunkText splits text into chunks of at most maxChars characters, breaking
|
||||
// at sentence boundaries (". ", "! ", "? ", "\n") so that the TTS service
|
||||
// receives natural prose fragments rather than mid-sentence cuts.
|
||||
//
|
||||
// If a single sentence exceeds maxChars it is included as its own chunk —
|
||||
// never silently truncated.
|
||||
func chunkText(text string, maxChars int) []string {
|
||||
if len(text) <= maxChars {
|
||||
return []string{text}
|
||||
}
|
||||
|
||||
// Sentence-boundary delimiters — we split AFTER these sequences.
|
||||
// Order matters: longer sequences first.
|
||||
delimiters := []string{".\n", "!\n", "?\n", ". ", "! ", "? ", "\n\n", "\n"}
|
||||
|
||||
var chunks []string
|
||||
remaining := text
|
||||
|
||||
for len(remaining) > 0 {
|
||||
if len(remaining) <= maxChars {
|
||||
chunks = append(chunks, strings.TrimSpace(remaining))
|
||||
break
|
||||
}
|
||||
|
||||
// Find the last sentence boundary within the maxChars window.
|
||||
window := remaining[:maxChars]
|
||||
cutAt := -1
|
||||
for _, delim := range delimiters {
|
||||
idx := strings.LastIndex(window, delim)
|
||||
if idx > 0 && idx+len(delim) > cutAt {
|
||||
cutAt = idx + len(delim)
|
||||
}
|
||||
}
|
||||
|
||||
if cutAt <= 0 {
|
||||
// No boundary found — hard-break at maxChars to avoid infinite loop.
|
||||
cutAt = maxChars
|
||||
}
|
||||
|
||||
chunk := strings.TrimSpace(remaining[:cutAt])
|
||||
if chunk != "" {
|
||||
chunks = append(chunks, chunk)
|
||||
}
|
||||
remaining = strings.TrimSpace(remaining[cutAt:])
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
|
||||
|
||||
@@ -1,21 +1,28 @@
|
||||
package runner
|
||||
|
||||
// metrics.go — lightweight HTTP metrics endpoint for the runner.
|
||||
// metrics.go — Prometheus metrics HTTP endpoint for the runner.
|
||||
//
|
||||
// GET /metrics returns a JSON document with live task counters and uptime.
|
||||
// No external dependency (no Prometheus); plain net/http only.
|
||||
// GET /metrics returns a Prometheus text/plain scrape response.
|
||||
// Exposes:
|
||||
// - Standard Go runtime metrics (via promhttp)
|
||||
// - Runner task counters (tasks_running, tasks_completed, tasks_failed)
|
||||
// - Asynq queue metrics (registered in asynq_runner.go when Redis is enabled)
|
||||
//
|
||||
// GET /health — simple liveness probe.
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"net"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
)
|
||||
|
||||
// metricsServer serves GET /metrics for the runner process.
|
||||
// metricsServer serves GET /metrics and GET /health for the runner process.
|
||||
type metricsServer struct {
|
||||
addr string
|
||||
r *Runner
|
||||
@@ -23,21 +30,62 @@ type metricsServer struct {
|
||||
}
|
||||
|
||||
func newMetricsServer(addr string, r *Runner, log *slog.Logger) *metricsServer {
|
||||
return &metricsServer{addr: addr, r: r, log: log}
|
||||
ms := &metricsServer{addr: addr, r: r, log: log}
|
||||
ms.registerCollectors()
|
||||
return ms
|
||||
}
|
||||
|
||||
// registerCollectors registers runner-specific Prometheus collectors.
|
||||
// Called once at construction; Asynq queue collector is registered separately
|
||||
// in asynq_runner.go after the Redis connection is established.
|
||||
func (ms *metricsServer) registerCollectors() {
|
||||
// Runner task gauges / counters backed by the atomic fields on Runner.
|
||||
ms.r.metricsRegistry.MustRegister(prometheus.NewGaugeFunc(
|
||||
prometheus.GaugeOpts{
|
||||
Namespace: "runner",
|
||||
Name: "tasks_running",
|
||||
Help: "Number of tasks currently being processed.",
|
||||
},
|
||||
func() float64 { return float64(ms.r.tasksRunning.Load()) },
|
||||
))
|
||||
ms.r.metricsRegistry.MustRegister(prometheus.NewCounterFunc(
|
||||
prometheus.CounterOpts{
|
||||
Namespace: "runner",
|
||||
Name: "tasks_completed_total",
|
||||
Help: "Total number of tasks completed successfully since startup.",
|
||||
},
|
||||
func() float64 { return float64(ms.r.tasksCompleted.Load()) },
|
||||
))
|
||||
ms.r.metricsRegistry.MustRegister(prometheus.NewCounterFunc(
|
||||
prometheus.CounterOpts{
|
||||
Namespace: "runner",
|
||||
Name: "tasks_failed_total",
|
||||
Help: "Total number of tasks that ended in failure since startup.",
|
||||
},
|
||||
func() float64 { return float64(ms.r.tasksFailed.Load()) },
|
||||
))
|
||||
ms.r.metricsRegistry.MustRegister(prometheus.NewGaugeFunc(
|
||||
prometheus.GaugeOpts{
|
||||
Namespace: "runner",
|
||||
Name: "uptime_seconds",
|
||||
Help: "Seconds since the runner process started.",
|
||||
},
|
||||
func() float64 { return time.Since(ms.r.startedAt).Seconds() },
|
||||
))
|
||||
}
|
||||
|
||||
// ListenAndServe starts the HTTP server and blocks until ctx is cancelled or
|
||||
// a fatal listen error occurs.
|
||||
func (ms *metricsServer) ListenAndServe(ctx context.Context) error {
|
||||
mux := http.NewServeMux()
|
||||
mux.HandleFunc("GET /metrics", ms.handleMetrics)
|
||||
mux.Handle("GET /metrics", promhttp.HandlerFor(ms.r.metricsRegistry, promhttp.HandlerOpts{}))
|
||||
mux.HandleFunc("GET /health", ms.handleHealth)
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: ms.addr,
|
||||
Handler: mux,
|
||||
ReadTimeout: 5 * time.Second,
|
||||
WriteTimeout: 5 * time.Second,
|
||||
WriteTimeout: 10 * time.Second,
|
||||
BaseContext: func(_ net.Listener) context.Context { return ctx },
|
||||
}
|
||||
|
||||
@@ -58,35 +106,8 @@ func (ms *metricsServer) ListenAndServe(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
// handleMetrics handles GET /metrics.
|
||||
// Response shape (JSON):
|
||||
//
|
||||
// {
|
||||
// "tasks_running": N,
|
||||
// "tasks_completed": N,
|
||||
// "tasks_failed": N,
|
||||
// "uptime_seconds": N
|
||||
// }
|
||||
func (ms *metricsServer) handleMetrics(w http.ResponseWriter, _ *http.Request) {
|
||||
uptimeSec := int64(time.Since(ms.r.startedAt).Seconds())
|
||||
metricsWriteJSON(w, 0, map[string]int64{
|
||||
"tasks_running": ms.r.tasksRunning.Load(),
|
||||
"tasks_completed": ms.r.tasksCompleted.Load(),
|
||||
"tasks_failed": ms.r.tasksFailed.Load(),
|
||||
"uptime_seconds": uptimeSec,
|
||||
})
|
||||
}
|
||||
|
||||
// handleHealth handles GET /health — simple liveness probe for the metrics server.
|
||||
// handleHealth handles GET /health — simple liveness probe.
|
||||
func (ms *metricsServer) handleHealth(w http.ResponseWriter, _ *http.Request) {
|
||||
metricsWriteJSON(w, 0, map[string]string{"status": "ok"})
|
||||
}
|
||||
|
||||
// metricsWriteJSON writes v as a JSON response with the given status code.
|
||||
func metricsWriteJSON(w http.ResponseWriter, status int, v any) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
if status != 0 {
|
||||
w.WriteHeader(status)
|
||||
}
|
||||
_ = json.NewEncoder(w).Encode(v)
|
||||
_, _ = w.Write([]byte(`{"status":"ok"}`))
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ package runner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
@@ -22,37 +23,71 @@ import (
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"go.opentelemetry.io/otel"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/cfai"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/kokoro"
|
||||
"github.com/libnovel/backend/internal/libretranslate"
|
||||
"github.com/libnovel/backend/internal/meili"
|
||||
"github.com/libnovel/backend/internal/orchestrator"
|
||||
"github.com/libnovel/backend/internal/pockettts"
|
||||
"github.com/libnovel/backend/internal/scraper"
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
"github.com/libnovel/backend/internal/webpush"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
)
|
||||
|
||||
// Notifier creates notifications for users.
|
||||
type Notifier interface {
|
||||
CreateNotification(ctx context.Context, userID, title, message, link string) error
|
||||
}
|
||||
|
||||
// ChapterIngester persists imported chapters for a book.
|
||||
type ChapterIngester interface {
|
||||
IngestChapters(ctx context.Context, slug string, chapters []bookstore.Chapter) error
|
||||
}
|
||||
|
||||
// ImportChapterStore retrieves pre-parsed chapter JSON blobs from object storage.
|
||||
type ImportChapterStore interface {
|
||||
GetImportChapters(ctx context.Context, key string) ([]byte, error)
|
||||
}
|
||||
|
||||
// Config tunes the runner behaviour.
|
||||
type Config struct {
|
||||
// WorkerID uniquely identifies this runner instance in PocketBase records.
|
||||
WorkerID string
|
||||
// PollInterval is how often the runner checks for new tasks.
|
||||
// Only used in PocketBase-polling mode (RedisAddr == "").
|
||||
PollInterval time.Duration
|
||||
// MaxConcurrentScrape limits simultaneous book-scrape goroutines.
|
||||
MaxConcurrentScrape int
|
||||
// MaxConcurrentAudio limits simultaneous audio-generation goroutines.
|
||||
MaxConcurrentAudio int
|
||||
// MaxConcurrentTranslation limits simultaneous translation goroutines.
|
||||
MaxConcurrentTranslation int
|
||||
// OrchestratorWorkers is the chapter-scraping parallelism inside each book run.
|
||||
OrchestratorWorkers int
|
||||
// HeartbeatInterval is how often active tasks PATCH their heartbeat_at
|
||||
// timestamp to signal they are still alive. Defaults to 30s when 0.
|
||||
// Only used in PocketBase-polling mode.
|
||||
HeartbeatInterval time.Duration
|
||||
// StaleTaskThreshold is how old a heartbeat must be (or absent) before the
|
||||
// task is considered orphaned and reset to pending. Defaults to 2m when 0.
|
||||
// Only used in PocketBase-polling mode.
|
||||
StaleTaskThreshold time.Duration
|
||||
// CatalogueRefreshInterval is how often the runner walks the full catalogue,
|
||||
// scrapes per-book metadata, downloads covers, and re-indexes everything in
|
||||
// Meilisearch. Defaults to 24h (expensive — full catalogue walk).
|
||||
CatalogueRefreshInterval time.Duration
|
||||
// CatalogueRequestDelay is the base inter-request pause during a catalogue
|
||||
// refresh metadata walk. Jitter of up to 50% is added on top.
|
||||
// Defaults to 2s. Set via RUNNER_CATALOGUE_REQUEST_DELAY.
|
||||
CatalogueRequestDelay time.Duration
|
||||
// SkipInitialCatalogueRefresh suppresses the immediate catalogue walk that
|
||||
// otherwise fires at startup. The periodic ticker (CatalogueRefreshInterval)
|
||||
// still fires normally. Set RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true for
|
||||
@@ -61,6 +96,15 @@ type Config struct {
|
||||
// MetricsAddr is the HTTP listen address for the /metrics endpoint.
|
||||
// Defaults to ":9091". Set to "" to disable.
|
||||
MetricsAddr string
|
||||
// RedisAddr is the address of the Redis instance used for Asynq task
|
||||
// dispatch. When set the runner switches from PocketBase-polling mode to
|
||||
// Asynq ServeMux mode (immediate task delivery, no polling).
|
||||
// Supports plain "host:port" or a full "rediss://..." URL.
|
||||
// When empty the runner falls back to PocketBase polling.
|
||||
RedisAddr string
|
||||
// RedisPassword is the Redis AUTH password.
|
||||
// Not required when RedisAddr is a full URL that includes credentials.
|
||||
RedisPassword string
|
||||
}
|
||||
|
||||
// Dependencies are the external services the runner depends on.
|
||||
@@ -73,15 +117,43 @@ type Dependencies struct {
|
||||
BookReader bookstore.BookReader
|
||||
// AudioStore persists generated audio and checks key existence.
|
||||
AudioStore bookstore.AudioStore
|
||||
// TranslationStore persists translated markdown and checks key existence.
|
||||
TranslationStore bookstore.TranslationStore
|
||||
// CoverStore stores book cover images in MinIO.
|
||||
CoverStore bookstore.CoverStore
|
||||
// BookImport handles PDF/EPUB file parsing and chapter extraction.
|
||||
// Kept for backward compatibility when ChaptersKey is not set.
|
||||
BookImport bookstore.BookImporter
|
||||
// ImportChapterStore retrieves pre-parsed chapter JSON blobs from MinIO.
|
||||
// When set and the task has a ChaptersKey, the runner reads from here
|
||||
// instead of calling BookImport.Import() (the new preferred path).
|
||||
ImportChapterStore ImportChapterStore
|
||||
// ChapterIngester persists extracted chapters into MinIO/PocketBase.
|
||||
ChapterIngester ChapterIngester
|
||||
// Notifier creates notifications for users.
|
||||
Notifier Notifier
|
||||
// WebPush sends browser push notifications to subscribed users.
|
||||
// If nil, push notifications are disabled.
|
||||
WebPush *webpush.Sender
|
||||
// Store is the underlying *storage.Store; used for push subscription lookups.
|
||||
// Only needed when WebPush is non-nil.
|
||||
Store *storage.Store
|
||||
// SearchIndex indexes books in Meilisearch after scraping.
|
||||
// If nil a no-op is used.
|
||||
SearchIndex meili.Client
|
||||
// Novel is the scraper implementation.
|
||||
Novel scraper.NovelScraper
|
||||
// Kokoro is the TTS client.
|
||||
// Kokoro is the Kokoro-FastAPI TTS client (GPU, OpenAI-compatible voices).
|
||||
Kokoro kokoro.Client
|
||||
// PocketTTS is the pocket-tts client (CPU, kyutai voices: alba, marius, etc.).
|
||||
// If nil, pocket-tts voice tasks will fail with a clear error.
|
||||
PocketTTS pockettts.Client
|
||||
// CFAI is the Cloudflare Workers AI TTS client (cfai:* prefixed voices).
|
||||
// If nil, CF AI voice tasks will fail with a clear error.
|
||||
CFAI cfai.Client
|
||||
// LibreTranslate is the machine translation client.
|
||||
// If nil, translation tasks will fail with a clear error.
|
||||
LibreTranslate libretranslate.Client
|
||||
// Log is the structured logger.
|
||||
Log *slog.Logger
|
||||
}
|
||||
@@ -91,6 +163,8 @@ type Runner struct {
|
||||
cfg Config
|
||||
deps Dependencies
|
||||
|
||||
metricsRegistry *prometheus.Registry
|
||||
|
||||
// Atomic task counters — read by /metrics without locking.
|
||||
tasksRunning atomic.Int64
|
||||
tasksCompleted atomic.Int64
|
||||
@@ -110,6 +184,9 @@ func New(cfg Config, deps Dependencies) *Runner {
|
||||
if cfg.MaxConcurrentAudio <= 0 {
|
||||
cfg.MaxConcurrentAudio = 1
|
||||
}
|
||||
if cfg.MaxConcurrentTranslation <= 0 {
|
||||
cfg.MaxConcurrentTranslation = 1
|
||||
}
|
||||
if cfg.WorkerID == "" {
|
||||
cfg.WorkerID = "runner"
|
||||
}
|
||||
@@ -122,6 +199,9 @@ func New(cfg Config, deps Dependencies) *Runner {
|
||||
if cfg.CatalogueRefreshInterval <= 0 {
|
||||
cfg.CatalogueRefreshInterval = 24 * time.Hour
|
||||
}
|
||||
if cfg.CatalogueRequestDelay <= 0 {
|
||||
cfg.CatalogueRequestDelay = 2 * time.Second
|
||||
}
|
||||
if cfg.MetricsAddr == "" {
|
||||
cfg.MetricsAddr = ":9091"
|
||||
}
|
||||
@@ -131,17 +211,21 @@ func New(cfg Config, deps Dependencies) *Runner {
|
||||
if deps.SearchIndex == nil {
|
||||
deps.SearchIndex = meili.NoopClient{}
|
||||
}
|
||||
return &Runner{cfg: cfg, deps: deps, startedAt: time.Now()}
|
||||
return &Runner{cfg: cfg, deps: deps, startedAt: time.Now(), metricsRegistry: prometheus.NewRegistry()}
|
||||
}
|
||||
|
||||
// Run starts the poll loop and the metrics HTTP server, blocking until ctx is
|
||||
// cancelled.
|
||||
// Run starts the worker loop and the metrics HTTP server, blocking until ctx
|
||||
// is cancelled.
|
||||
//
|
||||
// When cfg.RedisAddr is set the runner uses Asynq (immediate task delivery).
|
||||
// Otherwise it falls back to PocketBase polling (legacy mode).
|
||||
func (r *Runner) Run(ctx context.Context) error {
|
||||
r.deps.Log.Info("runner: starting",
|
||||
"worker_id", r.cfg.WorkerID,
|
||||
"poll_interval", r.cfg.PollInterval,
|
||||
"mode", r.mode(),
|
||||
"max_scrape", r.cfg.MaxConcurrentScrape,
|
||||
"max_audio", r.cfg.MaxConcurrentAudio,
|
||||
"max_translation", r.cfg.MaxConcurrentTranslation,
|
||||
"catalogue_refresh_interval", r.cfg.CatalogueRefreshInterval,
|
||||
"metrics_addr", r.cfg.MetricsAddr,
|
||||
)
|
||||
@@ -156,8 +240,27 @@ func (r *Runner) Run(ctx context.Context) error {
|
||||
}()
|
||||
}
|
||||
|
||||
if r.cfg.RedisAddr != "" {
|
||||
return r.runAsynq(ctx)
|
||||
}
|
||||
return r.runPoll(ctx)
|
||||
}
|
||||
|
||||
// mode returns a short string describing the active dispatch mode.
|
||||
func (r *Runner) mode() string {
|
||||
if r.cfg.RedisAddr != "" {
|
||||
return "asynq"
|
||||
}
|
||||
return "poll"
|
||||
}
|
||||
|
||||
// runPoll is the legacy PocketBase-polling dispatch loop.
|
||||
// Used when cfg.RedisAddr is empty.
|
||||
func (r *Runner) runPoll(ctx context.Context) error {
|
||||
scrapeSem := make(chan struct{}, r.cfg.MaxConcurrentScrape)
|
||||
audioSem := make(chan struct{}, r.cfg.MaxConcurrentAudio)
|
||||
translationSem := make(chan struct{}, r.cfg.MaxConcurrentTranslation)
|
||||
importSem := make(chan struct{}, 1) // Limit concurrent imports
|
||||
var wg sync.WaitGroup
|
||||
|
||||
tick := time.NewTicker(r.cfg.PollInterval)
|
||||
@@ -173,9 +276,11 @@ func (r *Runner) Run(ctx context.Context) error {
|
||||
r.deps.Log.Info("runner: skipping initial catalogue refresh (RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true)")
|
||||
}
|
||||
|
||||
r.deps.Log.Info("runner: poll mode active", "poll_interval", r.cfg.PollInterval)
|
||||
|
||||
// Run one poll immediately on startup, then on each tick.
|
||||
for {
|
||||
r.poll(ctx, scrapeSem, audioSem, &wg)
|
||||
r.poll(ctx, scrapeSem, audioSem, translationSem, importSem, &wg)
|
||||
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
@@ -200,7 +305,7 @@ func (r *Runner) Run(ctx context.Context) error {
|
||||
}
|
||||
|
||||
// poll claims all available pending tasks and dispatches them to goroutines.
|
||||
func (r *Runner) poll(ctx context.Context, scrapeSem, audioSem chan struct{}, wg *sync.WaitGroup) {
|
||||
func (r *Runner) poll(ctx context.Context, scrapeSem, audioSem, translationSem, importSem chan struct{}, wg *sync.WaitGroup) {
|
||||
// ── Heartbeat file ────────────────────────────────────────────────────
|
||||
// Touch /tmp/runner.alive so the Docker health check can confirm the
|
||||
// runner is actively polling. Failure is non-fatal — just log it.
|
||||
@@ -283,6 +388,72 @@ audioLoop:
|
||||
r.runAudioTask(ctx, t)
|
||||
}(task)
|
||||
}
|
||||
|
||||
// ── Translation tasks ─────────────────────────────────────────────────
|
||||
translationLoop:
|
||||
for {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case translationSem <- struct{}{}:
|
||||
// Slot acquired — proceed to claim a task.
|
||||
default:
|
||||
// All slots busy; leave remaining pending tasks for next tick.
|
||||
break translationLoop
|
||||
}
|
||||
task, ok, err := r.deps.Consumer.ClaimNextTranslationTask(ctx, r.cfg.WorkerID)
|
||||
if err != nil {
|
||||
<-translationSem
|
||||
r.deps.Log.Error("runner: ClaimNextTranslationTask failed", "err", err)
|
||||
break
|
||||
}
|
||||
if !ok {
|
||||
<-translationSem
|
||||
break
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
wg.Add(1)
|
||||
go func(t domain.TranslationTask) {
|
||||
defer wg.Done()
|
||||
defer func() { <-translationSem }()
|
||||
defer r.tasksRunning.Add(-1)
|
||||
r.runTranslationTask(ctx, t)
|
||||
}(task)
|
||||
}
|
||||
|
||||
// ── Import tasks ─────────────────────────────────────────────────────
|
||||
importLoop:
|
||||
for {
|
||||
if ctx.Err() != nil {
|
||||
return
|
||||
}
|
||||
select {
|
||||
case importSem <- struct{}{}:
|
||||
// Slot acquired — proceed to claim a task.
|
||||
default:
|
||||
// All slots busy; leave remaining pending tasks for next tick.
|
||||
break importLoop
|
||||
}
|
||||
task, ok, err := r.deps.Consumer.ClaimNextImportTask(ctx, r.cfg.WorkerID)
|
||||
if err != nil {
|
||||
<-importSem
|
||||
r.deps.Log.Error("runner: ClaimNextImportTask failed", "err", err)
|
||||
break
|
||||
}
|
||||
if !ok {
|
||||
<-importSem
|
||||
break
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
wg.Add(1)
|
||||
go func(t domain.ImportTask) {
|
||||
defer wg.Done()
|
||||
defer func() { <-importSem }()
|
||||
defer r.tasksRunning.Add(-1)
|
||||
r.runImportTask(ctx, t, t.ObjectKey)
|
||||
}(task)
|
||||
}
|
||||
}
|
||||
|
||||
// newOrchestrator builds an orchestrator with the Meilisearch post-hook wired in.
|
||||
@@ -301,6 +472,14 @@ func (r *Runner) newOrchestrator() *orchestrator.Orchestrator {
|
||||
|
||||
// runScrapeTask executes one scrape task end-to-end and reports the result.
|
||||
func (r *Runner) runScrapeTask(ctx context.Context, task domain.ScrapeTask) {
|
||||
ctx, span := otel.Tracer("runner").Start(ctx, "runner.scrape_task")
|
||||
defer span.End()
|
||||
span.SetAttributes(
|
||||
attribute.String("task.id", task.ID),
|
||||
attribute.String("task.kind", task.Kind),
|
||||
attribute.String("task.url", task.TargetURL),
|
||||
)
|
||||
|
||||
log := r.deps.Log.With("task_id", task.ID, "kind", task.Kind, "url", task.TargetURL)
|
||||
log.Info("runner: scrape task starting")
|
||||
|
||||
@@ -334,14 +513,56 @@ func (r *Runner) runScrapeTask(ctx context.Context, task domain.ScrapeTask) {
|
||||
log.Warn("runner: unknown task kind")
|
||||
}
|
||||
|
||||
if err := r.deps.Consumer.FinishScrapeTask(ctx, task.ID, result); err != nil {
|
||||
// Use a fresh context for the final write so a cancelled task context doesn't
|
||||
// prevent the result counters from being persisted to PocketBase.
|
||||
finishCtx, finishCancel := context.WithTimeout(context.Background(), 15*time.Second)
|
||||
defer finishCancel()
|
||||
if err := r.deps.Consumer.FinishScrapeTask(finishCtx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishScrapeTask failed", "err", err)
|
||||
}
|
||||
|
||||
if result.ErrorMessage != "" {
|
||||
r.tasksFailed.Add(1)
|
||||
span.SetStatus(codes.Error, result.ErrorMessage)
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Scrape Failed",
|
||||
fmt.Sprintf("Scrape task (%s) failed: %s", task.Kind, result.ErrorMessage),
|
||||
"/admin/tasks")
|
||||
}
|
||||
} else {
|
||||
r.tasksCompleted.Add(1)
|
||||
span.SetStatus(codes.Ok, "")
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Scrape Complete",
|
||||
fmt.Sprintf("Scraped %d chapters, skipped %d (%s)", result.ChaptersScraped, result.ChaptersSkipped, task.Kind),
|
||||
"/admin/tasks")
|
||||
}
|
||||
// Fan-out in-app new-chapter notification to all users who have this book
|
||||
// in their library. Runs in background so it doesn't block the task loop.
|
||||
if r.deps.Store != nil && result.ChaptersScraped > 0 &&
|
||||
result.Slug != "" && task.Kind != "catalogue" {
|
||||
go func() {
|
||||
notifyCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
title := result.Slug
|
||||
_ = r.deps.Store.NotifyUsersWithBook(notifyCtx, result.Slug,
|
||||
"New chapters available",
|
||||
fmt.Sprintf("%d new chapter(s) added to %s", result.ChaptersScraped, title),
|
||||
"/books/"+result.Slug)
|
||||
}()
|
||||
}
|
||||
// Send Web Push notifications to subscribed browsers.
|
||||
if r.deps.WebPush != nil && r.deps.Store != nil &&
|
||||
result.ChaptersScraped > 0 && result.Slug != "" && task.Kind != "catalogue" {
|
||||
go r.deps.WebPush.SendToBook(context.Background(), r.deps.Store, result.Slug, webpush.Payload{
|
||||
Title: "New chapter available",
|
||||
Body: fmt.Sprintf("%d new chapter(s) added", result.ChaptersScraped),
|
||||
URL: "/books/" + result.Slug,
|
||||
Icon: "/icon-192.png",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
log.Info("runner: scrape task finished",
|
||||
@@ -366,7 +587,7 @@ func (r *Runner) runCatalogueTask(ctx context.Context, task domain.ScrapeTask, o
|
||||
TargetURL: entry.URL,
|
||||
}
|
||||
bookResult := o.RunBook(ctx, bookTask)
|
||||
result.BooksFound += bookResult.BooksFound + 1
|
||||
result.BooksFound += bookResult.BooksFound
|
||||
result.ChaptersScraped += bookResult.ChaptersScraped
|
||||
result.ChaptersSkipped += bookResult.ChaptersSkipped
|
||||
result.Errors += bookResult.Errors
|
||||
@@ -384,6 +605,15 @@ func (r *Runner) runCatalogueTask(ctx context.Context, task domain.ScrapeTask, o
|
||||
|
||||
// runAudioTask executes one audio-generation task.
|
||||
func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
ctx, span := otel.Tracer("runner").Start(ctx, "runner.audio_task")
|
||||
defer span.End()
|
||||
span.SetAttributes(
|
||||
attribute.String("task.id", task.ID),
|
||||
attribute.String("book.slug", task.Slug),
|
||||
attribute.Int("chapter.number", task.Chapter),
|
||||
attribute.String("audio.voice", task.Voice),
|
||||
)
|
||||
|
||||
log := r.deps.Log.With("task_id", task.ID, "slug", task.Slug, "chapter", task.Chapter, "voice", task.Voice)
|
||||
log.Info("runner: audio task starting")
|
||||
|
||||
@@ -407,10 +637,17 @@ func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
fail := func(msg string) {
|
||||
log.Error("runner: audio task failed", "reason", msg)
|
||||
r.tasksFailed.Add(1)
|
||||
span.SetStatus(codes.Error, msg)
|
||||
result := domain.AudioResult{ErrorMessage: msg}
|
||||
if err := r.deps.Consumer.FinishAudioTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishAudioTask failed", "err", err)
|
||||
}
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Audio Failed",
|
||||
fmt.Sprintf("Ch.%d of %s (%s): %s", task.Chapter, task.Slug, task.Voice, msg),
|
||||
fmt.Sprintf("/books/%s", task.Slug))
|
||||
}
|
||||
}
|
||||
|
||||
raw, err := r.deps.BookReader.ReadChapter(ctx, task.Slug, task.Chapter)
|
||||
@@ -424,14 +661,43 @@ func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
return
|
||||
}
|
||||
|
||||
if r.deps.Kokoro == nil {
|
||||
fail("kokoro client not configured")
|
||||
return
|
||||
}
|
||||
audioData, err := r.deps.Kokoro.GenerateAudio(ctx, text, task.Voice)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("kokoro generate: %v", err))
|
||||
return
|
||||
var audioData []byte
|
||||
if pockettts.IsPocketTTSVoice(task.Voice) {
|
||||
if r.deps.PocketTTS == nil {
|
||||
fail("pocket-tts client not configured (POCKET_TTS_URL is empty)")
|
||||
return
|
||||
}
|
||||
var genErr error
|
||||
audioData, genErr = r.deps.PocketTTS.GenerateAudio(ctx, text, task.Voice)
|
||||
if genErr != nil {
|
||||
fail(fmt.Sprintf("pocket-tts generate: %v", genErr))
|
||||
return
|
||||
}
|
||||
log.Info("runner: audio generated via pocket-tts", "voice", task.Voice)
|
||||
} else if cfai.IsCFAIVoice(task.Voice) {
|
||||
if r.deps.CFAI == nil {
|
||||
fail("cloudflare AI client not configured (CFAI_ACCOUNT_ID/CFAI_API_TOKEN empty)")
|
||||
return
|
||||
}
|
||||
var genErr error
|
||||
audioData, genErr = r.deps.CFAI.GenerateAudio(ctx, text, task.Voice)
|
||||
if genErr != nil {
|
||||
fail(fmt.Sprintf("cfai generate: %v", genErr))
|
||||
return
|
||||
}
|
||||
log.Info("runner: audio generated via cloudflare AI", "voice", task.Voice)
|
||||
} else {
|
||||
if r.deps.Kokoro == nil {
|
||||
fail("kokoro client not configured (KOKORO_URL is empty)")
|
||||
return
|
||||
}
|
||||
var genErr error
|
||||
audioData, genErr = kokoroGenerateChunked(ctx, r.deps.Kokoro, text, task.Voice, log)
|
||||
if genErr != nil {
|
||||
fail(fmt.Sprintf("kokoro generate: %v", genErr))
|
||||
return
|
||||
}
|
||||
log.Info("runner: audio generated via kokoro-fastapi", "voice", task.Voice)
|
||||
}
|
||||
|
||||
key := r.deps.AudioStore.AudioObjectKey(task.Slug, task.Chapter, task.Voice)
|
||||
@@ -441,9 +707,178 @@ func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
}
|
||||
|
||||
r.tasksCompleted.Add(1)
|
||||
span.SetStatus(codes.Ok, "")
|
||||
result := domain.AudioResult{ObjectKey: key}
|
||||
if err := r.deps.Consumer.FinishAudioTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishAudioTask failed", "err", err)
|
||||
}
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Audio Ready",
|
||||
fmt.Sprintf("Ch.%d of %s (%s) is ready", task.Chapter, task.Slug, task.Voice),
|
||||
fmt.Sprintf("/books/%s", task.Slug))
|
||||
}
|
||||
log.Info("runner: audio task finished", "key", key)
|
||||
}
|
||||
|
||||
// kokoroGenerateChunked splits text into ~1 000-character sentence-boundary
|
||||
// chunks, calls Kokoro.GenerateAudio for each, and concatenates the raw MP3
|
||||
// bytes. This avoids EOF / timeout failures that occur when the Kokoro
|
||||
// FastAPI server receives very large inputs (e.g. a full imported PDF chapter).
|
||||
//
|
||||
// Concatenating raw MP3 frames is valid — MP3 is a frame-based format and
|
||||
// standard players handle multi-segment files correctly.
|
||||
func kokoroGenerateChunked(ctx context.Context, k kokoro.Client, text, voice string, log *slog.Logger) ([]byte, error) {
|
||||
const chunkSize = 1000
|
||||
|
||||
chunks := chunkText(text, chunkSize)
|
||||
log.Info("runner: kokoro chunked generation", "chunks", len(chunks), "total_chars", len(text))
|
||||
|
||||
var combined []byte
|
||||
for i, chunk := range chunks {
|
||||
data, err := k.GenerateAudio(ctx, chunk, voice)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("chunk %d/%d: %w", i+1, len(chunks), err)
|
||||
}
|
||||
combined = append(combined, data...)
|
||||
log.Info("runner: kokoro chunk done", "chunk", i+1, "of", len(chunks), "bytes", len(data))
|
||||
}
|
||||
return combined, nil
|
||||
}
|
||||
|
||||
// runImportTask executes one PDF/EPUB import task.
|
||||
// Preferred path: when task.ChaptersKey is set, it reads pre-parsed chapters
|
||||
// JSON from MinIO (written by the backend at upload time) and ingests them.
|
||||
// Fallback path: when ChaptersKey is empty, calls BookImport.Import() to
|
||||
// parse the raw file on the runner (legacy behaviour, not used for new tasks).
|
||||
func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, objectKey string) {
|
||||
ctx, span := otel.Tracer("runner").Start(ctx, "runner.import_task")
|
||||
defer span.End()
|
||||
span.SetAttributes(
|
||||
attribute.String("task.id", task.ID),
|
||||
attribute.String("book.slug", task.Slug),
|
||||
attribute.String("file.type", task.FileType),
|
||||
attribute.String("chapters_key", task.ChaptersKey),
|
||||
)
|
||||
|
||||
log := r.deps.Log.With("task_id", task.ID, "slug", task.Slug, "file_type", task.FileType)
|
||||
log.Info("runner: import task starting", "chapters_key", task.ChaptersKey)
|
||||
|
||||
hbCtx, hbCancel := context.WithCancel(ctx)
|
||||
defer hbCancel()
|
||||
go func() {
|
||||
tick := time.NewTicker(r.cfg.HeartbeatInterval)
|
||||
defer tick.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-hbCtx.Done():
|
||||
return
|
||||
case <-tick.C:
|
||||
if err := r.deps.Consumer.HeartbeatTask(ctx, task.ID); err != nil {
|
||||
log.Warn("runner: heartbeat failed", "err", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
fail := func(msg string) {
|
||||
log.Error("runner: import task failed", "reason", msg)
|
||||
r.tasksFailed.Add(1)
|
||||
span.SetStatus(codes.Error, msg)
|
||||
result := domain.ImportResult{ErrorMessage: msg}
|
||||
if err := r.deps.Consumer.FinishImportTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishImportTask failed", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
var chapters []bookstore.Chapter
|
||||
|
||||
if task.ChaptersKey != "" && r.deps.ImportChapterStore != nil {
|
||||
// New path: read pre-parsed chapters JSON uploaded by the backend.
|
||||
raw, err := r.deps.ImportChapterStore.GetImportChapters(ctx, task.ChaptersKey)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("get chapters JSON: %v", err))
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal(raw, &chapters); err != nil {
|
||||
fail(fmt.Sprintf("unmarshal chapters JSON: %v", err))
|
||||
return
|
||||
}
|
||||
log.Info("runner: loaded pre-parsed chapters", "count", len(chapters))
|
||||
} else {
|
||||
// Legacy path: parse the raw file on the runner.
|
||||
if r.deps.BookImport == nil {
|
||||
fail("book import not configured (BookImport dependency missing)")
|
||||
return
|
||||
}
|
||||
var err error
|
||||
chapters, err = r.deps.BookImport.Import(ctx, objectKey, task.FileType)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("import file: %v", err))
|
||||
return
|
||||
}
|
||||
log.Info("runner: parsed chapters from file (legacy path)", "count", len(chapters))
|
||||
}
|
||||
|
||||
if len(chapters) == 0 {
|
||||
fail("no chapters extracted from file")
|
||||
return
|
||||
}
|
||||
|
||||
// Persist chapters via ChapterIngester.
|
||||
if r.deps.ChapterIngester == nil {
|
||||
fail("chapter ingester not configured")
|
||||
return
|
||||
}
|
||||
if err := r.deps.ChapterIngester.IngestChapters(ctx, task.Slug, chapters); err != nil {
|
||||
fail(fmt.Sprintf("store chapters: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Write book metadata so the book appears in PocketBase catalogue.
|
||||
if r.deps.BookWriter != nil {
|
||||
meta := domain.BookMeta{
|
||||
Slug: task.Slug,
|
||||
Title: task.Title,
|
||||
Author: task.Author,
|
||||
Cover: task.CoverURL,
|
||||
Status: task.BookStatus,
|
||||
Genres: task.Genres,
|
||||
Summary: task.Summary,
|
||||
TotalChapters: len(chapters),
|
||||
}
|
||||
if meta.Status == "" {
|
||||
meta.Status = "completed"
|
||||
}
|
||||
if err := r.deps.BookWriter.WriteMetadata(ctx, meta); err != nil {
|
||||
log.Warn("runner: import task WriteMetadata failed (non-fatal)", "err", err)
|
||||
} else {
|
||||
// Index in Meilisearch so the book is searchable.
|
||||
if err := r.deps.SearchIndex.UpsertBook(ctx, meta); err != nil {
|
||||
log.Warn("runner: import task meilisearch upsert failed (non-fatal)", "err", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
r.tasksCompleted.Add(1)
|
||||
span.SetStatus(codes.Ok, "")
|
||||
result := domain.ImportResult{
|
||||
Slug: task.Slug,
|
||||
ChaptersImported: len(chapters),
|
||||
}
|
||||
if err := r.deps.Consumer.FinishImportTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishImportTask failed", "err", err)
|
||||
}
|
||||
|
||||
// Notify the user who initiated the import.
|
||||
if r.deps.Notifier != nil {
|
||||
msg := fmt.Sprintf("Import completed: %d chapters from %s", len(chapters), task.Title)
|
||||
targetUser := task.InitiatorUserID
|
||||
if targetUser == "" {
|
||||
targetUser = "admin"
|
||||
}
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, targetUser, "Import Complete", msg, "/admin/import")
|
||||
}
|
||||
|
||||
log.Info("runner: import task finished", "chapters", len(chapters))
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
package runner_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
"time"
|
||||
@@ -48,6 +50,14 @@ func (s *stubConsumer) ClaimNextAudioTask(_ context.Context, _ string) (domain.A
|
||||
return t, true, nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) ClaimNextTranslationTask(_ context.Context, _ string) (domain.TranslationTask, bool, error) {
|
||||
return domain.TranslationTask{}, false, nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) ClaimNextImportTask(_ context.Context, _ string) (domain.ImportTask, bool, error) {
|
||||
return domain.ImportTask{}, false, nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) FinishScrapeTask(_ context.Context, id string, _ domain.ScrapeResult) error {
|
||||
s.finished = append(s.finished, id)
|
||||
return nil
|
||||
@@ -58,6 +68,16 @@ func (s *stubConsumer) FinishAudioTask(_ context.Context, id string, _ domain.Au
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) FinishTranslationTask(_ context.Context, id string, _ domain.TranslationResult) error {
|
||||
s.finished = append(s.finished, id)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) FinishImportTask(_ context.Context, id string, _ domain.ImportResult) error {
|
||||
s.finished = append(s.finished, id)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *stubConsumer) FailTask(_ context.Context, id, _ string) error {
|
||||
s.failCalled = append(s.failCalled, id)
|
||||
return nil
|
||||
@@ -83,6 +103,10 @@ func (s *stubBookWriter) ChapterExists(_ context.Context, _ string, _ domain.Cha
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *stubBookWriter) DeduplicateChapters(_ context.Context, _ string) (int, error) {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
// stubBookReader satisfies bookstore.BookReader — returns a single chapter.
|
||||
type stubBookReader struct {
|
||||
text string
|
||||
@@ -115,11 +139,18 @@ type stubAudioStore struct {
|
||||
func (s *stubAudioStore) AudioObjectKey(slug string, n int, voice string) string {
|
||||
return slug + "/" + string(rune('0'+n)) + "/" + voice + ".mp3"
|
||||
}
|
||||
func (s *stubAudioStore) AudioObjectKeyExt(slug string, n int, voice, ext string) string {
|
||||
return slug + "/" + string(rune('0'+n)) + "/" + voice + "." + ext
|
||||
}
|
||||
func (s *stubAudioStore) AudioExists(_ context.Context, _ string) bool { return false }
|
||||
func (s *stubAudioStore) PutAudio(_ context.Context, _ string, _ []byte) error {
|
||||
s.putCalled.Add(1)
|
||||
return s.putErr
|
||||
}
|
||||
func (s *stubAudioStore) PutAudioStream(_ context.Context, _ string, _ io.Reader, _ int64, _ string) error {
|
||||
s.putCalled.Add(1)
|
||||
return s.putErr
|
||||
}
|
||||
|
||||
// stubNovelScraper satisfies scraper.NovelScraper minimally.
|
||||
type stubNovelScraper struct {
|
||||
@@ -176,6 +207,22 @@ func (s *stubKokoro) GenerateAudio(_ context.Context, _, _ string) ([]byte, erro
|
||||
return s.data, s.genErr
|
||||
}
|
||||
|
||||
func (s *stubKokoro) StreamAudioMP3(_ context.Context, _, _ string) (io.ReadCloser, error) {
|
||||
s.called.Add(1)
|
||||
if s.genErr != nil {
|
||||
return nil, s.genErr
|
||||
}
|
||||
return io.NopCloser(bytes.NewReader(s.data)), nil
|
||||
}
|
||||
|
||||
func (s *stubKokoro) StreamAudioWAV(_ context.Context, _, _ string) (io.ReadCloser, error) {
|
||||
s.called.Add(1)
|
||||
if s.genErr != nil {
|
||||
return nil, s.genErr
|
||||
}
|
||||
return io.NopCloser(bytes.NewReader(s.data)), nil
|
||||
}
|
||||
|
||||
func (s *stubKokoro) ListVoices(_ context.Context) ([]string, error) {
|
||||
return []string{"af_bella"}, nil
|
||||
}
|
||||
|
||||
109
backend/internal/runner/translation.go
Normal file
109
backend/internal/runner/translation.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package runner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"go.opentelemetry.io/otel"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// runTranslationTask executes one machine-translation task end-to-end and
|
||||
// reports the result back to PocketBase.
|
||||
func (r *Runner) runTranslationTask(ctx context.Context, task domain.TranslationTask) {
|
||||
ctx, span := otel.Tracer("runner").Start(ctx, "runner.translation_task")
|
||||
defer span.End()
|
||||
span.SetAttributes(
|
||||
attribute.String("task.id", task.ID),
|
||||
attribute.String("book.slug", task.Slug),
|
||||
attribute.Int("chapter.number", task.Chapter),
|
||||
attribute.String("translation.lang", task.Lang),
|
||||
)
|
||||
|
||||
log := r.deps.Log.With("task_id", task.ID, "slug", task.Slug, "chapter", task.Chapter, "lang", task.Lang)
|
||||
log.Info("runner: translation task starting")
|
||||
|
||||
// Heartbeat goroutine — keeps the task alive while translation runs.
|
||||
hbCtx, hbCancel := context.WithCancel(ctx)
|
||||
defer hbCancel()
|
||||
go func() {
|
||||
tick := time.NewTicker(r.cfg.HeartbeatInterval)
|
||||
defer tick.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-hbCtx.Done():
|
||||
return
|
||||
case <-tick.C:
|
||||
if err := r.deps.Consumer.HeartbeatTask(ctx, task.ID); err != nil {
|
||||
log.Warn("runner: heartbeat failed", "err", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
fail := func(msg string) {
|
||||
log.Error("runner: translation task failed", "reason", msg)
|
||||
r.tasksFailed.Add(1)
|
||||
span.SetStatus(codes.Error, msg)
|
||||
result := domain.TranslationResult{ErrorMessage: msg}
|
||||
if err := r.deps.Consumer.FinishTranslationTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishTranslationTask failed", "err", err)
|
||||
}
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Translation Failed",
|
||||
fmt.Sprintf("Ch.%d of %s (%s): %s", task.Chapter, task.Slug, task.Lang, msg),
|
||||
fmt.Sprintf("/books/%s", task.Slug))
|
||||
}
|
||||
}
|
||||
|
||||
// Guard: LibreTranslate must be configured.
|
||||
if r.deps.LibreTranslate == nil {
|
||||
fail("libretranslate client not configured (LIBRETRANSLATE_URL is empty)")
|
||||
return
|
||||
}
|
||||
|
||||
// 1. Read raw markdown chapter.
|
||||
raw, err := r.deps.BookReader.ReadChapter(ctx, task.Slug, task.Chapter)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("read chapter: %v", err))
|
||||
return
|
||||
}
|
||||
if raw == "" {
|
||||
fail("chapter text is empty")
|
||||
return
|
||||
}
|
||||
|
||||
// 2. Translate (chunked, concurrent).
|
||||
translated, err := r.deps.LibreTranslate.Translate(ctx, raw, "en", task.Lang)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("translate: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// 3. Store translated markdown in MinIO.
|
||||
key := r.deps.TranslationStore.TranslationObjectKey(task.Lang, task.Slug, task.Chapter)
|
||||
if err := r.deps.TranslationStore.PutTranslation(ctx, key, []byte(translated)); err != nil {
|
||||
fail(fmt.Sprintf("put translation: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// 4. Report success.
|
||||
r.tasksCompleted.Add(1)
|
||||
span.SetStatus(codes.Ok, "")
|
||||
result := domain.TranslationResult{ObjectKey: key}
|
||||
if err := r.deps.Consumer.FinishTranslationTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishTranslationTask failed", "err", err)
|
||||
}
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Translation Ready",
|
||||
fmt.Sprintf("Ch.%d of %s translated to %s", task.Chapter, task.Slug, task.Lang),
|
||||
fmt.Sprintf("/books/%s", task.Slug))
|
||||
}
|
||||
log.Info("runner: translation task finished", "key", key)
|
||||
}
|
||||
857
backend/internal/storage/import.go
Normal file
857
backend/internal/storage/import.go
Normal file
@@ -0,0 +1,857 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
minio "github.com/minio/minio-go/v7"
|
||||
"github.com/pdfcpu/pdfcpu/pkg/api"
|
||||
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu/model"
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
type importer struct {
|
||||
mc *minioClient
|
||||
}
|
||||
|
||||
// NewBookImporter creates a BookImporter that reads files from MinIO.
|
||||
func NewBookImporter(s *Store) bookstore.BookImporter {
|
||||
return &importer{mc: s.mc}
|
||||
}
|
||||
|
||||
func (i *importer) Import(ctx context.Context, objectKey, fileType string) ([]bookstore.Chapter, error) {
|
||||
if fileType != "pdf" && fileType != "epub" {
|
||||
return nil, fmt.Errorf("unsupported file type: %s", fileType)
|
||||
}
|
||||
|
||||
obj, err := i.mc.client.GetObject(ctx, "imports", objectKey, minio.GetObjectOptions{})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get object from minio: %w", err)
|
||||
}
|
||||
defer obj.Close()
|
||||
|
||||
data, err := io.ReadAll(obj)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read object: %w", err)
|
||||
}
|
||||
|
||||
if fileType == "pdf" {
|
||||
return parsePDF(data)
|
||||
}
|
||||
return parseEPUB(data)
|
||||
}
|
||||
|
||||
// AnalyzeFile parses the given PDF or EPUB data and returns the detected
|
||||
// chapter count and up to 3 preview lines (first non-empty line of each of
|
||||
// the first 3 chapters). It is used by the analyze-only endpoint so users
|
||||
// can preview chapter count before committing the import.
|
||||
// Note: uses parsePDF which is backed by pdfcpu ExtractContent — fast, no hang risk.
|
||||
func AnalyzeFile(data []byte, fileType string) (chapterCount int, firstLines []string, err error) {
|
||||
var chapters []bookstore.Chapter
|
||||
switch fileType {
|
||||
case "pdf":
|
||||
chapters, err = parsePDF(data)
|
||||
case "epub":
|
||||
chapters, err = parseEPUB(data)
|
||||
default:
|
||||
return 0, nil, fmt.Errorf("unsupported file type: %s", fileType)
|
||||
}
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
chapterCount = len(chapters)
|
||||
for i, ch := range chapters {
|
||||
if i >= 3 {
|
||||
break
|
||||
}
|
||||
line := strings.TrimSpace(ch.Content)
|
||||
if nl := strings.Index(line, "\n"); nl > 0 {
|
||||
line = line[:nl]
|
||||
}
|
||||
if len(line) > 120 {
|
||||
line = line[:120] + "…"
|
||||
}
|
||||
firstLines = append(firstLines, line)
|
||||
}
|
||||
return chapterCount, firstLines, nil
|
||||
}
|
||||
|
||||
|
||||
|
||||
// decryptPDF strips encryption from a PDF using an empty user password.
|
||||
// Returns the decrypted bytes, or an error if decryption is not possible.
|
||||
// This handles the common case of "owner-only" encrypted PDFs (copy/print
|
||||
// restrictions) which use an empty user password and open normally in readers.
|
||||
func decryptPDF(data []byte) ([]byte, error) {
|
||||
conf := model.NewDefaultConfiguration()
|
||||
conf.UserPW = ""
|
||||
conf.OwnerPW = ""
|
||||
|
||||
var out bytes.Buffer
|
||||
err := api.Decrypt(bytes.NewReader(data), &out, conf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out.Bytes(), nil
|
||||
}
|
||||
|
||||
// ParseImportFile parses a PDF or EPUB and returns chapters.
|
||||
// Unlike AnalyzeFile it respects ctx cancellation so callers can apply a timeout.
|
||||
// For PDFs it first attempts to strip encryption with an empty password.
|
||||
func ParseImportFile(ctx context.Context, data []byte, fileType string) ([]bookstore.Chapter, error) {
|
||||
type result struct {
|
||||
chapters []bookstore.Chapter
|
||||
err error
|
||||
}
|
||||
ch := make(chan result, 1)
|
||||
go func() {
|
||||
var chapters []bookstore.Chapter
|
||||
var err error
|
||||
switch fileType {
|
||||
case "pdf":
|
||||
chapters, err = parsePDF(data)
|
||||
case "epub":
|
||||
chapters, err = parseEPUB(data)
|
||||
default:
|
||||
err = fmt.Errorf("unsupported file type: %s", fileType)
|
||||
}
|
||||
ch <- result{chapters, err}
|
||||
}()
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, fmt.Errorf("parse timed out: %w", ctx.Err())
|
||||
case r := <-ch:
|
||||
return r.chapters, r.err
|
||||
}
|
||||
}
|
||||
|
||||
// pdfSkipBookmarks lists bookmark titles that are front/back matter, not story chapters.
|
||||
// These are skipped when building the chapter list.
|
||||
var pdfSkipBookmarks = map[string]bool{
|
||||
"cover": true, "insert": true, "title page": true, "copyright": true,
|
||||
"appendix": true, "color insert": true, "color illustrations": true,
|
||||
}
|
||||
|
||||
// parsePDF extracts text from PDF bytes and returns it as a single chapter.
|
||||
//
|
||||
// The full readable text is returned as one chapter so the admin can manually
|
||||
// split it into chapters via the UI using --- markers.
|
||||
//
|
||||
// Strategy:
|
||||
// 1. Decrypt owner-protected PDFs (empty user password).
|
||||
// 2. Extract raw content streams for every page using pdfcpu ExtractContent.
|
||||
// 3. Concatenate text from all pages in order, skipping front matter
|
||||
// (cover, title page, copyright — typically the first 10 pages).
|
||||
func parsePDF(data []byte) ([]bookstore.Chapter, error) {
|
||||
// Decrypt owner-protected PDFs (empty user password).
|
||||
decrypted, err := decryptPDF(data)
|
||||
if err == nil {
|
||||
data = decrypted
|
||||
}
|
||||
|
||||
conf := model.NewDefaultConfiguration()
|
||||
conf.UserPW = ""
|
||||
conf.OwnerPW = ""
|
||||
|
||||
// Extract all page content streams to a temp directory.
|
||||
tmpDir, err := os.MkdirTemp("", "pdf-extract-*")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("create temp dir: %w", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
if err := api.ExtractContent(bytes.NewReader(data), tmpDir, "out", nil, conf); err != nil {
|
||||
return nil, fmt.Errorf("extract PDF content: %w", err)
|
||||
}
|
||||
|
||||
entries, err := os.ReadDir(tmpDir)
|
||||
if err != nil || len(entries) == 0 {
|
||||
return nil, fmt.Errorf("PDF has no content pages")
|
||||
}
|
||||
|
||||
// Parse page number from filename and build ordered text map.
|
||||
pageTexts := make(map[int]string, len(entries))
|
||||
maxPage := 0
|
||||
for _, e := range entries {
|
||||
pageNum := pageNumFromFilename(e.Name())
|
||||
if pageNum <= 0 {
|
||||
continue
|
||||
}
|
||||
raw, readErr := os.ReadFile(tmpDir + "/" + e.Name())
|
||||
if readErr != nil {
|
||||
continue
|
||||
}
|
||||
pageTexts[pageNum] = fixWin1252(extractTextFromContentStream(raw))
|
||||
if pageNum > maxPage {
|
||||
maxPage = pageNum
|
||||
}
|
||||
}
|
||||
|
||||
// Determine front-matter cutoff using bookmarks if available,
|
||||
// otherwise skip the first 10 pages (cover/title/copyright).
|
||||
bodyStart := 1
|
||||
bookmarks, bmErr := api.Bookmarks(bytes.NewReader(data), conf)
|
||||
if bmErr == nil {
|
||||
for _, bm := range bookmarks {
|
||||
title := strings.ToLower(strings.TrimSpace(bm.Title))
|
||||
if !pdfSkipBookmarks[title] && bm.PageFrom > 0 {
|
||||
// First non-front-matter bookmark — body starts here.
|
||||
bodyStart = bm.PageFrom
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if maxPage > 10 {
|
||||
bodyStart = 11
|
||||
}
|
||||
|
||||
// Concatenate all body pages.
|
||||
var sb strings.Builder
|
||||
for p := bodyStart; p <= maxPage; p++ {
|
||||
t := strings.TrimSpace(pageTexts[p])
|
||||
if t == "" {
|
||||
continue
|
||||
}
|
||||
sb.WriteString(t)
|
||||
sb.WriteString("\n\n")
|
||||
}
|
||||
|
||||
text := strings.TrimSpace(sb.String())
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("could not extract any text from PDF")
|
||||
}
|
||||
|
||||
return []bookstore.Chapter{{
|
||||
Number: 1,
|
||||
Title: "Full Text",
|
||||
Content: text,
|
||||
}}, nil
|
||||
}
|
||||
|
||||
// pageNumFromFilename extracts the page number from a pdfcpu content-stream
|
||||
// filename like "out_Content_page_42.txt". Returns 0 if not parseable.
|
||||
func pageNumFromFilename(name string) int {
|
||||
// Strip directory prefix and extension.
|
||||
base := name
|
||||
if idx := strings.LastIndex(base, "/"); idx >= 0 {
|
||||
base = base[idx+1:]
|
||||
}
|
||||
if idx := strings.LastIndex(base, "."); idx >= 0 {
|
||||
base = base[:idx]
|
||||
}
|
||||
// Find last "_" and parse the number after it.
|
||||
if idx := strings.LastIndex(base, "_"); idx >= 0 {
|
||||
n, err := strconv.Atoi(base[idx+1:])
|
||||
if err == nil && n > 0 {
|
||||
return n
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// win1252ToUnicode maps the Windows-1252 control range 0x80–0x9F to the
|
||||
// Unicode characters they actually represent in that encoding.
|
||||
// Standard Latin-1 maps these bytes to control characters; Win-1252 maps
|
||||
// them to typographic symbols that appear in publisher PDFs.
|
||||
var win1252ToUnicode = map[byte]rune{
|
||||
0x80: '\u20AC', // €
|
||||
0x82: '\u201A', // ‚
|
||||
0x83: '\u0192', // ƒ
|
||||
0x84: '\u201E', // „
|
||||
0x85: '\u2026', // …
|
||||
0x86: '\u2020', // †
|
||||
0x87: '\u2021', // ‡
|
||||
0x88: '\u02C6', // ˆ
|
||||
0x89: '\u2030', // ‰
|
||||
0x8A: '\u0160', // Š
|
||||
0x8B: '\u2039', // ‹
|
||||
0x8C: '\u0152', // Œ
|
||||
0x8E: '\u017D', // Ž
|
||||
0x91: '\u2018', // ' (left single quotation mark)
|
||||
0x92: '\u2019', // ' (right single quotation mark / apostrophe)
|
||||
0x93: '\u201C', // " (left double quotation mark)
|
||||
0x94: '\u201D', // " (right double quotation mark)
|
||||
0x95: '\u2022', // • (bullet)
|
||||
0x96: '\u2013', // – (en dash)
|
||||
0x97: '\u2014', // — (em dash)
|
||||
0x98: '\u02DC', // ˜
|
||||
0x99: '\u2122', // ™
|
||||
0x9A: '\u0161', // š
|
||||
0x9B: '\u203A', // ›
|
||||
0x9C: '\u0153', // œ
|
||||
0x9E: '\u017E', // ž
|
||||
0x9F: '\u0178', // Ÿ
|
||||
}
|
||||
|
||||
// fixWin1252 replaces Windows-1252 specific bytes (0x80–0x9F) in a string
|
||||
// that was decoded as raw Latin-1 bytes with their proper Unicode equivalents.
|
||||
func fixWin1252(s string) string {
|
||||
// Fast path: if no bytes in 0x80–0x9F range, return unchanged.
|
||||
needsFix := false
|
||||
for i := 0; i < len(s); i++ {
|
||||
b := s[i]
|
||||
if b >= 0x80 && b <= 0x9F {
|
||||
needsFix = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !needsFix {
|
||||
return s
|
||||
}
|
||||
var sb strings.Builder
|
||||
sb.Grow(len(s))
|
||||
for i := 0; i < len(s); i++ {
|
||||
b := s[i]
|
||||
if b >= 0x80 && b <= 0x9F {
|
||||
if r, ok := win1252ToUnicode[b]; ok {
|
||||
sb.WriteRune(r)
|
||||
continue
|
||||
}
|
||||
}
|
||||
sb.WriteByte(b)
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// extractTextFromContentStream parses a raw PDF content stream and extracts
|
||||
// readable text from Tj and TJ operators.
|
||||
//
|
||||
// TJ arrays may contain a mix of literal strings (parenthesised) and hex glyph
|
||||
// arrays. Only the literal strings are decoded — hex arrays require per-font
|
||||
// ToUnicode CMaps and are skipped. Kerning adjustment numbers inside TJ arrays
|
||||
// are also ignored (they're just spacing hints).
|
||||
//
|
||||
// Line breaks are inserted on ET / Td / TD / T* operators.
|
||||
func extractTextFromContentStream(stream []byte) string {
|
||||
s := string(stream)
|
||||
var sb strings.Builder
|
||||
i := 0
|
||||
n := len(s)
|
||||
for i < n {
|
||||
// TJ array: [ ... ]TJ — collect all literal strings, skip hex & numbers.
|
||||
if s[i] == '[' {
|
||||
j := i + 1
|
||||
for j < n && s[j] != ']' {
|
||||
if s[j] == '(' {
|
||||
// Literal string inside TJ array.
|
||||
k := j + 1
|
||||
depth := 1
|
||||
for k < n && depth > 0 {
|
||||
if s[k] == '\\' {
|
||||
k += 2
|
||||
continue
|
||||
}
|
||||
if s[k] == '(' {
|
||||
depth++
|
||||
} else if s[k] == ')' {
|
||||
depth--
|
||||
}
|
||||
k++
|
||||
}
|
||||
lit := pdfUnescapeString(s[j+1 : k-1])
|
||||
if hasPrintableASCII(lit) {
|
||||
sb.WriteString(lit)
|
||||
}
|
||||
j = k
|
||||
continue
|
||||
}
|
||||
j++
|
||||
}
|
||||
// Check if this is a TJ operator (skip whitespace after ']').
|
||||
end := j + 1
|
||||
for end < n && (s[end] == ' ' || s[end] == '\t' || s[end] == '\r' || s[end] == '\n') {
|
||||
end++
|
||||
}
|
||||
if end+2 <= n && s[end:end+2] == "TJ" && (end+2 == n || !isAlphaNum(s[end+2])) {
|
||||
i = end + 2
|
||||
continue
|
||||
}
|
||||
i = j + 1
|
||||
continue
|
||||
}
|
||||
// Single string: (string) Tj
|
||||
if s[i] == '(' {
|
||||
j := i + 1
|
||||
depth := 1
|
||||
for j < n && depth > 0 {
|
||||
if s[j] == '\\' {
|
||||
j += 2
|
||||
continue
|
||||
}
|
||||
if s[j] == '(' {
|
||||
depth++
|
||||
} else if s[j] == ')' {
|
||||
depth--
|
||||
}
|
||||
j++
|
||||
}
|
||||
lit := pdfUnescapeString(s[i+1 : j-1])
|
||||
if hasPrintableASCII(lit) {
|
||||
// Check for Tj operator.
|
||||
end := j
|
||||
for end < n && (s[end] == ' ' || s[end] == '\t') {
|
||||
end++
|
||||
}
|
||||
if end+2 <= n && s[end:end+2] == "Tj" && (end+2 == n || !isAlphaNum(s[end+2])) {
|
||||
sb.WriteString(lit)
|
||||
i = end + 2
|
||||
continue
|
||||
}
|
||||
}
|
||||
i = j
|
||||
continue
|
||||
}
|
||||
// Detect end of text object (ET) — add a newline.
|
||||
if i+2 <= n && s[i:i+2] == "ET" && (i+2 == n || !isAlphaNum(s[i+2])) {
|
||||
sb.WriteByte('\n')
|
||||
i += 2
|
||||
continue
|
||||
}
|
||||
// Detect Td / TD / T* — newline within text block.
|
||||
if i+2 <= n && (s[i:i+2] == "Td" || s[i:i+2] == "TD" || s[i:i+2] == "T*") &&
|
||||
(i+2 == n || !isAlphaNum(s[i+2])) {
|
||||
sb.WriteByte('\n')
|
||||
i += 2
|
||||
continue
|
||||
}
|
||||
i++
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func isAlphaNum(b byte) bool {
|
||||
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9') || b == '_'
|
||||
}
|
||||
|
||||
func hasPrintableASCII(s string) bool {
|
||||
for _, c := range s {
|
||||
if c >= 0x20 && c < 0x7F {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// pdfUnescapeString handles PDF string escape sequences.
|
||||
func pdfUnescapeString(s string) string {
|
||||
if !strings.ContainsRune(s, '\\') {
|
||||
return s
|
||||
}
|
||||
var sb strings.Builder
|
||||
i := 0
|
||||
for i < len(s) {
|
||||
if s[i] == '\\' && i+1 < len(s) {
|
||||
switch s[i+1] {
|
||||
case 'n':
|
||||
sb.WriteByte('\n')
|
||||
case 'r':
|
||||
sb.WriteByte('\r')
|
||||
case 't':
|
||||
sb.WriteByte('\t')
|
||||
case '(', ')', '\\':
|
||||
sb.WriteByte(s[i+1])
|
||||
default:
|
||||
// Octal escape \ddd
|
||||
if s[i+1] >= '0' && s[i+1] <= '7' {
|
||||
end := i + 2
|
||||
for end < i+5 && end < len(s) && s[end] >= '0' && s[end] <= '7' {
|
||||
end++
|
||||
}
|
||||
val, _ := strconv.ParseInt(s[i+1:end], 8, 16)
|
||||
sb.WriteByte(byte(val))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
sb.WriteByte(s[i+1])
|
||||
}
|
||||
i += 2
|
||||
} else {
|
||||
sb.WriteByte(s[i])
|
||||
i++
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// ── EPUB parsing ──────────────────────────────────────────────────────────────
|
||||
|
||||
func parseEPUB(data []byte) ([]bookstore.Chapter, error) {
|
||||
zr, err := zip.NewReader(bytes.NewReader(data), int64(len(data)))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("open EPUB zip: %w", err)
|
||||
}
|
||||
|
||||
// 1. Read META-INF/container.xml → find rootfile (content.opf path).
|
||||
opfPath, err := epubRootfilePath(zr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("epub container: %w", err)
|
||||
}
|
||||
|
||||
// 2. Parse content.opf → spine order of chapter files.
|
||||
spineFiles, titleMap, err := epubSpine(zr, opfPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("epub spine: %w", err)
|
||||
}
|
||||
|
||||
if len(spineFiles) == 0 {
|
||||
return nil, fmt.Errorf("EPUB spine is empty")
|
||||
}
|
||||
|
||||
// Base directory of the OPF file for resolving relative hrefs.
|
||||
opfDir := ""
|
||||
if idx := strings.LastIndex(opfPath, "/"); idx >= 0 {
|
||||
opfDir = opfPath[:idx+1]
|
||||
}
|
||||
|
||||
var chapters []bookstore.Chapter
|
||||
chNum := 0
|
||||
for i, href := range spineFiles {
|
||||
fullPath := opfDir + href
|
||||
content, err := epubFileContent(zr, fullPath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
text := htmlToText(content)
|
||||
if strings.TrimSpace(text) == "" {
|
||||
continue
|
||||
}
|
||||
chNum++
|
||||
title := titleMap[href]
|
||||
if title == "" {
|
||||
title = fmt.Sprintf("Chapter %d", chNum)
|
||||
}
|
||||
_ = i // spine index unused for numbering
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: chNum,
|
||||
Title: title,
|
||||
Content: text,
|
||||
})
|
||||
}
|
||||
|
||||
if len(chapters) == 0 {
|
||||
return nil, fmt.Errorf("no readable chapters found in EPUB")
|
||||
}
|
||||
return chapters, nil
|
||||
}
|
||||
|
||||
// epubRootfilePath parses META-INF/container.xml and returns the full-path
|
||||
// of the OPF package document.
|
||||
func epubRootfilePath(zr *zip.Reader) (string, error) {
|
||||
f := zipFile(zr, "META-INF/container.xml")
|
||||
if f == nil {
|
||||
return "", fmt.Errorf("META-INF/container.xml not found")
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer rc.Close()
|
||||
|
||||
doc, err := html.Parse(rc)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var path string
|
||||
var walk func(*html.Node)
|
||||
walk = func(n *html.Node) {
|
||||
if n.Type == html.ElementNode && strings.EqualFold(n.Data, "rootfile") {
|
||||
for _, a := range n.Attr {
|
||||
if strings.EqualFold(a.Key, "full-path") {
|
||||
path = a.Val
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
walk(c)
|
||||
}
|
||||
}
|
||||
walk(doc)
|
||||
|
||||
if path == "" {
|
||||
return "", fmt.Errorf("rootfile full-path not found in container.xml")
|
||||
}
|
||||
return path, nil
|
||||
}
|
||||
|
||||
// epubSpine parses the OPF document and returns the spine item hrefs in order,
|
||||
// plus a map from href → nav title (if available from NCX/NAV).
|
||||
func epubSpine(zr *zip.Reader, opfPath string) ([]string, map[string]string, error) {
|
||||
f := zipFile(zr, opfPath)
|
||||
if f == nil {
|
||||
return nil, nil, fmt.Errorf("OPF file %q not found in EPUB", opfPath)
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rc.Close()
|
||||
|
||||
opfData, err := io.ReadAll(rc)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Build id→href map from <manifest>.
|
||||
idToHref := make(map[string]string)
|
||||
// Also keep a href→navTitle map (populated from NCX later).
|
||||
hrefTitle := make(map[string]string)
|
||||
|
||||
// Parse OPF XML with html.Parse (handles malformed XML too).
|
||||
doc, _ := html.Parse(bytes.NewReader(opfData))
|
||||
|
||||
var manifestItems []struct{ id, href, mediaType string }
|
||||
var spineIdrefs []string
|
||||
var ncxID string
|
||||
|
||||
var walk func(*html.Node)
|
||||
walk = func(n *html.Node) {
|
||||
if n.Type == html.ElementNode {
|
||||
tag := strings.ToLower(n.Data)
|
||||
switch tag {
|
||||
case "item":
|
||||
var id, href, mt string
|
||||
for _, a := range n.Attr {
|
||||
switch strings.ToLower(a.Key) {
|
||||
case "id":
|
||||
id = a.Val
|
||||
case "href":
|
||||
href = a.Val
|
||||
case "media-type":
|
||||
mt = a.Val
|
||||
}
|
||||
}
|
||||
if id != "" && href != "" {
|
||||
manifestItems = append(manifestItems, struct{ id, href, mediaType string }{id, href, mt})
|
||||
idToHref[id] = href
|
||||
}
|
||||
case "itemref":
|
||||
for _, a := range n.Attr {
|
||||
if strings.ToLower(a.Key) == "idref" {
|
||||
spineIdrefs = append(spineIdrefs, a.Val)
|
||||
}
|
||||
}
|
||||
case "spine":
|
||||
for _, a := range n.Attr {
|
||||
if strings.ToLower(a.Key) == "toc" {
|
||||
ncxID = a.Val
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
walk(c)
|
||||
}
|
||||
}
|
||||
walk(doc)
|
||||
|
||||
// Build ordered spine href list.
|
||||
var spineHrefs []string
|
||||
for _, idref := range spineIdrefs {
|
||||
if href, ok := idToHref[idref]; ok {
|
||||
spineHrefs = append(spineHrefs, href)
|
||||
}
|
||||
}
|
||||
|
||||
// If no explicit spine, fall back to all XHTML items in manifest order.
|
||||
if len(spineHrefs) == 0 {
|
||||
sort.Slice(manifestItems, func(i, j int) bool {
|
||||
return manifestItems[i].href < manifestItems[j].href
|
||||
})
|
||||
for _, it := range manifestItems {
|
||||
mt := strings.ToLower(it.mediaType)
|
||||
if strings.Contains(mt, "html") || strings.HasSuffix(strings.ToLower(it.href), ".html") || strings.HasSuffix(strings.ToLower(it.href), ".xhtml") {
|
||||
spineHrefs = append(spineHrefs, it.href)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to get chapter titles from NCX (toc.ncx).
|
||||
opfDir := ""
|
||||
if idx := strings.LastIndex(opfPath, "/"); idx >= 0 {
|
||||
opfDir = opfPath[:idx+1]
|
||||
}
|
||||
if ncxHref, ok := idToHref[ncxID]; ok {
|
||||
ncxPath := opfDir + ncxHref
|
||||
if ncxFile := zipFile(zr, ncxPath); ncxFile != nil {
|
||||
if ncxRC, err := ncxFile.Open(); err == nil {
|
||||
defer ncxRC.Close()
|
||||
parseNCXTitles(ncxRC, hrefTitle)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return spineHrefs, hrefTitle, nil
|
||||
}
|
||||
|
||||
// parseNCXTitles extracts navPoint label→src mappings from a toc.ncx.
|
||||
func parseNCXTitles(r io.Reader, out map[string]string) {
|
||||
doc, err := html.Parse(r)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Collect navPoints: each has a <navLabel><text>…</text></navLabel> and
|
||||
// a <content src="…"/> child.
|
||||
var walk func(*html.Node)
|
||||
walk = func(n *html.Node) {
|
||||
if n.Type == html.ElementNode && strings.EqualFold(n.Data, "navpoint") {
|
||||
var label, src string
|
||||
var inner func(*html.Node)
|
||||
inner = func(c *html.Node) {
|
||||
if c.Type == html.ElementNode {
|
||||
if strings.EqualFold(c.Data, "text") && label == "" {
|
||||
if c.FirstChild != nil && c.FirstChild.Type == html.TextNode {
|
||||
label = strings.TrimSpace(c.FirstChild.Data)
|
||||
}
|
||||
}
|
||||
if strings.EqualFold(c.Data, "content") {
|
||||
for _, a := range c.Attr {
|
||||
if strings.EqualFold(a.Key, "src") {
|
||||
// Strip fragment identifier (#...).
|
||||
src = strings.SplitN(a.Val, "#", 2)[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for child := c.FirstChild; child != nil; child = child.NextSibling {
|
||||
inner(child)
|
||||
}
|
||||
}
|
||||
inner(n)
|
||||
if label != "" && src != "" {
|
||||
out[src] = label
|
||||
}
|
||||
}
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
walk(c)
|
||||
}
|
||||
}
|
||||
walk(doc)
|
||||
}
|
||||
|
||||
// epubFileContent returns the raw bytes of a file inside the EPUB zip.
|
||||
func epubFileContent(zr *zip.Reader, path string) ([]byte, error) {
|
||||
f := zipFile(zr, path)
|
||||
if f == nil {
|
||||
return nil, fmt.Errorf("file %q not in EPUB", path)
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rc.Close()
|
||||
return io.ReadAll(rc)
|
||||
}
|
||||
|
||||
// zipFile finds a file by name (case-insensitive) in a zip.Reader.
|
||||
func zipFile(zr *zip.Reader, name string) *zip.File {
|
||||
nameLower := strings.ToLower(name)
|
||||
for _, f := range zr.File {
|
||||
if strings.ToLower(f.Name) == nameLower {
|
||||
return f
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// htmlToText converts HTML/XHTML content to plain text suitable for storage.
|
||||
func htmlToText(data []byte) string {
|
||||
doc, err := html.Parse(bytes.NewReader(data))
|
||||
if err != nil {
|
||||
return string(data)
|
||||
}
|
||||
|
||||
var sb strings.Builder
|
||||
var walk func(*html.Node)
|
||||
walk = func(n *html.Node) {
|
||||
if n.Type == html.TextNode {
|
||||
text := strings.TrimSpace(n.Data)
|
||||
if text != "" {
|
||||
sb.WriteString(text)
|
||||
sb.WriteByte(' ')
|
||||
}
|
||||
}
|
||||
if n.Type == html.ElementNode {
|
||||
switch strings.ToLower(n.Data) {
|
||||
case "p", "div", "br", "h1", "h2", "h3", "h4", "h5", "h6", "li", "tr":
|
||||
// Block-level: ensure newline before content.
|
||||
if sb.Len() > 0 {
|
||||
s := sb.String()
|
||||
if s[len(s)-1] != '\n' {
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
case "script", "style", "head":
|
||||
// Skip entirely.
|
||||
return
|
||||
}
|
||||
}
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
walk(c)
|
||||
}
|
||||
if n.Type == html.ElementNode {
|
||||
switch strings.ToLower(n.Data) {
|
||||
case "p", "div", "h1", "h2", "h3", "h4", "h5", "h6", "li", "tr":
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
}
|
||||
walk(doc)
|
||||
|
||||
// Collapse multiple blank lines.
|
||||
lines := strings.Split(sb.String(), "\n")
|
||||
var out []string
|
||||
blanks := 0
|
||||
for _, l := range lines {
|
||||
l = strings.TrimSpace(l)
|
||||
if l == "" {
|
||||
blanks++
|
||||
if blanks <= 1 {
|
||||
out = append(out, "")
|
||||
}
|
||||
} else {
|
||||
blanks = 0
|
||||
out = append(out, l)
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(strings.Join(out, "\n"))
|
||||
}
|
||||
|
||||
// ── Chapter ingestion ─────────────────────────────────────────────────────────
|
||||
|
||||
// IngestChapters stores extracted chapters for a book.
|
||||
// Each chapter is written as a markdown file in the chapters MinIO bucket
|
||||
// and its index record is upserted in PocketBase via WriteChapter.
|
||||
func (s *Store) IngestChapters(ctx context.Context, slug string, chapters []bookstore.Chapter) error {
|
||||
for _, ch := range chapters {
|
||||
var mdContent string
|
||||
if ch.Title != "" && ch.Title != fmt.Sprintf("Chapter %d", ch.Number) {
|
||||
mdContent = fmt.Sprintf("# %s\n\n%s", ch.Title, ch.Content)
|
||||
} else {
|
||||
mdContent = fmt.Sprintf("# Chapter %d\n\n%s", ch.Number, ch.Content)
|
||||
}
|
||||
domainCh := domain.Chapter{
|
||||
Ref: domain.ChapterRef{Number: ch.Number, Title: ch.Title},
|
||||
Text: mdContent,
|
||||
}
|
||||
if err := s.WriteChapter(ctx, slug, domainCh); err != nil {
|
||||
return fmt.Errorf("ingest chapter %d: %w", ch.Number, err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetImportObjectKey returns the MinIO object key for an uploaded import file.
|
||||
func GetImportObjectKey(filename string) string {
|
||||
return fmt.Sprintf("imports/%s", filename)
|
||||
}
|
||||
@@ -17,12 +17,13 @@ import (
|
||||
|
||||
// minioClient wraps the official minio-go client with bucket names.
|
||||
type minioClient struct {
|
||||
client *minio.Client // internal — all read/write operations
|
||||
pubClient *minio.Client // presign-only — initialised against the public endpoint
|
||||
bucketChapters string
|
||||
bucketAudio string
|
||||
bucketAvatars string
|
||||
bucketBrowse string
|
||||
client *minio.Client // internal — all read/write operations
|
||||
pubClient *minio.Client // presign-only — initialised against the public endpoint
|
||||
bucketChapters string
|
||||
bucketAudio string
|
||||
bucketAvatars string
|
||||
bucketBrowse string
|
||||
bucketTranslations string
|
||||
}
|
||||
|
||||
func newMinioClient(cfg config.MinIO) (*minioClient, error) {
|
||||
@@ -74,18 +75,19 @@ func newMinioClient(cfg config.MinIO) (*minioClient, error) {
|
||||
}
|
||||
|
||||
return &minioClient{
|
||||
client: internal,
|
||||
pubClient: pub,
|
||||
bucketChapters: cfg.BucketChapters,
|
||||
bucketAudio: cfg.BucketAudio,
|
||||
bucketAvatars: cfg.BucketAvatars,
|
||||
bucketBrowse: cfg.BucketBrowse,
|
||||
client: internal,
|
||||
pubClient: pub,
|
||||
bucketChapters: cfg.BucketChapters,
|
||||
bucketAudio: cfg.BucketAudio,
|
||||
bucketAvatars: cfg.BucketAvatars,
|
||||
bucketBrowse: cfg.BucketBrowse,
|
||||
bucketTranslations: cfg.BucketTranslations,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// ensureBuckets creates all required buckets if they don't already exist.
|
||||
func (m *minioClient) ensureBuckets(ctx context.Context) error {
|
||||
for _, bucket := range []string{m.bucketChapters, m.bucketAudio, m.bucketAvatars, m.bucketBrowse} {
|
||||
for _, bucket := range []string{m.bucketChapters, m.bucketAudio, m.bucketAvatars, m.bucketBrowse, m.bucketTranslations} {
|
||||
exists, err := m.client.BucketExists(ctx, bucket)
|
||||
if err != nil {
|
||||
return fmt.Errorf("minio: check bucket %q: %w", bucket, err)
|
||||
@@ -107,10 +109,17 @@ func ChapterObjectKey(slug string, n int) string {
|
||||
return fmt.Sprintf("%s/chapter-%06d.md", slug, n)
|
||||
}
|
||||
|
||||
// AudioObjectKey returns the MinIO object key for a cached audio file.
|
||||
// AudioObjectKeyExt returns the MinIO object key for a cached audio file
|
||||
// with a custom extension (e.g. "mp3" or "wav").
|
||||
// Format: {slug}/{n}/{voice}.{ext}
|
||||
func AudioObjectKeyExt(slug string, n int, voice, ext string) string {
|
||||
return fmt.Sprintf("%s/%d/%s.%s", slug, n, voice, ext)
|
||||
}
|
||||
|
||||
// AudioObjectKey returns the MinIO object key for a cached MP3 audio file.
|
||||
// Format: {slug}/{n}/{voice}.mp3
|
||||
func AudioObjectKey(slug string, n int, voice string) string {
|
||||
return fmt.Sprintf("%s/%d/%s.mp3", slug, n, voice)
|
||||
return AudioObjectKeyExt(slug, n, voice, "mp3")
|
||||
}
|
||||
|
||||
// AvatarObjectKey returns the MinIO object key for a user avatar image.
|
||||
@@ -125,6 +134,18 @@ func CoverObjectKey(slug string) string {
|
||||
return fmt.Sprintf("covers/%s.jpg", slug)
|
||||
}
|
||||
|
||||
// ChapterImageObjectKey returns the MinIO object key for a chapter illustration.
|
||||
// Format: chapter-images/{slug}/{n:06d}.jpg
|
||||
func ChapterImageObjectKey(slug string, n int) string {
|
||||
return fmt.Sprintf("chapter-images/%s/%06d.jpg", slug, n)
|
||||
}
|
||||
|
||||
// TranslationObjectKey returns the MinIO object key for a translated chapter.
|
||||
// Format: {lang}/{slug}/{n:06d}.md
|
||||
func TranslationObjectKey(lang, slug string, n int) string {
|
||||
return fmt.Sprintf("%s/%s/%06d.md", lang, slug, n)
|
||||
}
|
||||
|
||||
// chapterNumberFromKey extracts the chapter number from a MinIO object key.
|
||||
// e.g. "my-book/chapter-000042.md" → 42
|
||||
func chapterNumberFromKey(key string) int {
|
||||
@@ -147,6 +168,14 @@ func (m *minioClient) putObject(ctx context.Context, bucket, key, contentType st
|
||||
return err
|
||||
}
|
||||
|
||||
// putObjectStream uploads from r with known size (or -1 for multipart).
|
||||
func (m *minioClient) putObjectStream(ctx context.Context, bucket, key, contentType string, r io.Reader, size int64) error {
|
||||
_, err := m.client.PutObject(ctx, bucket, key, r, size,
|
||||
minio.PutObjectOptions{ContentType: contentType},
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
func (m *minioClient) getObject(ctx context.Context, bucket, key string) ([]byte, error) {
|
||||
obj, err := m.client.GetObject(ctx, bucket, key, minio.GetObjectOptions{})
|
||||
if err != nil {
|
||||
@@ -242,3 +271,28 @@ func coverContentType(data []byte) string {
|
||||
}
|
||||
return "image/jpeg"
|
||||
}
|
||||
|
||||
// ── Chapter image operations ───────────────────────────────────────────────────
|
||||
|
||||
// putChapterImage stores a chapter illustration in the browse bucket.
|
||||
func (m *minioClient) putChapterImage(ctx context.Context, key, contentType string, data []byte) error {
|
||||
return m.putObject(ctx, m.bucketBrowse, key, contentType, data)
|
||||
}
|
||||
|
||||
// getChapterImage retrieves a chapter illustration. Returns (nil, false, nil)
|
||||
// when the object does not exist.
|
||||
func (m *minioClient) getChapterImage(ctx context.Context, key string) ([]byte, bool, error) {
|
||||
if !m.objectExists(ctx, m.bucketBrowse, key) {
|
||||
return nil, false, nil
|
||||
}
|
||||
data, err := m.getObject(ctx, m.bucketBrowse, key)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
return data, true, nil
|
||||
}
|
||||
|
||||
// chapterImageExists returns true when the chapter image object exists.
|
||||
func (m *minioClient) chapterImageExists(ctx context.Context, key string) bool {
|
||||
return m.objectExists(ctx, m.bucketBrowse, key)
|
||||
}
|
||||
|
||||
@@ -26,6 +26,11 @@ import (
|
||||
// ErrNotFound is returned by single-record lookups when no record exists.
|
||||
var ErrNotFound = errors.New("storage: record not found")
|
||||
|
||||
// pbHTTPClient is a shared HTTP client with a 30 s timeout so that a slow or
|
||||
// hung PocketBase never stalls the backend/runner process indefinitely.
|
||||
// http.DefaultClient has no timeout and must not be used for PocketBase calls.
|
||||
var pbHTTPClient = &http.Client{Timeout: 30 * time.Second}
|
||||
|
||||
// pbClient is the internal PocketBase REST admin client.
|
||||
type pbClient struct {
|
||||
baseURL string
|
||||
@@ -66,7 +71,7 @@ func (c *pbClient) authToken(ctx context.Context) (string, error) {
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
resp, err := pbHTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("pb auth: %w", err)
|
||||
}
|
||||
@@ -104,7 +109,7 @@ func (c *pbClient) do(ctx context.Context, method, path string, body io.Reader)
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
resp, err := pbHTTPClient.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("pb: %s %s: %w", method, path, err)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -29,9 +29,22 @@ type Producer interface {
|
||||
// returns the assigned PocketBase record ID.
|
||||
CreateAudioTask(ctx context.Context, slug string, chapter int, voice string) (string, error)
|
||||
|
||||
// CreateTranslationTask inserts a new translation task with status=pending and
|
||||
// returns the assigned PocketBase record ID.
|
||||
CreateTranslationTask(ctx context.Context, slug string, chapter int, lang string) (string, error)
|
||||
|
||||
// CreateImportTask inserts a new import task with status=pending and
|
||||
// returns the assigned PocketBase record ID.
|
||||
// The task struct must have at minimum Slug, Title, FileType, and ObjectKey set.
|
||||
CreateImportTask(ctx context.Context, task domain.ImportTask) (string, error)
|
||||
|
||||
// CancelTask transitions a pending task to status=cancelled.
|
||||
// Returns ErrNotFound if the task does not exist.
|
||||
CancelTask(ctx context.Context, id string) error
|
||||
|
||||
// CancelAudioTasksBySlug cancels all pending or running audio tasks for slug.
|
||||
// Returns the number of tasks cancelled.
|
||||
CancelAudioTasksBySlug(ctx context.Context, slug string) (int, error)
|
||||
}
|
||||
|
||||
// Consumer is the read/claim side of the task queue used by the runner.
|
||||
@@ -46,13 +59,29 @@ type Consumer interface {
|
||||
// Returns (zero, false, nil) when the queue is empty.
|
||||
ClaimNextAudioTask(ctx context.Context, workerID string) (domain.AudioTask, bool, error)
|
||||
|
||||
// ClaimNextTranslationTask atomically finds the oldest pending translation task,
|
||||
// sets its status=running and worker_id=workerID, and returns it.
|
||||
// Returns (zero, false, nil) when the queue is empty.
|
||||
ClaimNextTranslationTask(ctx context.Context, workerID string) (domain.TranslationTask, bool, error)
|
||||
|
||||
// ClaimNextImportTask atomically finds the oldest pending import task,
|
||||
// sets its status=running and worker_id=workerID, and returns it.
|
||||
// Returns (zero, false, nil) when the queue is empty.
|
||||
ClaimNextImportTask(ctx context.Context, workerID string) (domain.ImportTask, bool, error)
|
||||
|
||||
// FinishScrapeTask marks a running scrape task as done and records the result.
|
||||
FinishScrapeTask(ctx context.Context, id string, result domain.ScrapeResult) error
|
||||
|
||||
// FinishAudioTask marks a running audio task as done and records the result.
|
||||
FinishAudioTask(ctx context.Context, id string, result domain.AudioResult) error
|
||||
|
||||
// FailTask marks a task (scrape or audio) as failed with an error message.
|
||||
// FinishTranslationTask marks a running translation task as done and records the result.
|
||||
FinishTranslationTask(ctx context.Context, id string, result domain.TranslationResult) error
|
||||
|
||||
// FinishImportTask marks a running import task as done and records the result.
|
||||
FinishImportTask(ctx context.Context, id string, result domain.ImportResult) error
|
||||
|
||||
// FailTask marks a task (scrape, audio, or translation) as failed with an error message.
|
||||
FailTask(ctx context.Context, id, errMsg string) error
|
||||
|
||||
// HeartbeatTask updates the heartbeat_at timestamp on a running task.
|
||||
@@ -81,4 +110,18 @@ type Reader interface {
|
||||
// GetAudioTask returns the most recent audio task for cacheKey.
|
||||
// Returns (zero, false, nil) if not found.
|
||||
GetAudioTask(ctx context.Context, cacheKey string) (domain.AudioTask, bool, error)
|
||||
|
||||
// ListTranslationTasks returns all translation tasks sorted by started descending.
|
||||
ListTranslationTasks(ctx context.Context) ([]domain.TranslationTask, error)
|
||||
|
||||
// GetTranslationTask returns the most recent translation task for cacheKey.
|
||||
// Returns (zero, false, nil) if not found.
|
||||
GetTranslationTask(ctx context.Context, cacheKey string) (domain.TranslationTask, bool, error)
|
||||
|
||||
// ListImportTasks returns all import tasks sorted by started descending.
|
||||
ListImportTasks(ctx context.Context) ([]domain.ImportTask, error)
|
||||
|
||||
// GetImportTask returns a single import task by ID.
|
||||
// Returns (zero, false, nil) if not found.
|
||||
GetImportTask(ctx context.Context, id string) (domain.ImportTask, bool, error)
|
||||
}
|
||||
|
||||
@@ -23,7 +23,14 @@ func (s *stubStore) CreateScrapeTask(_ context.Context, _, _ string, _, _ int) (
|
||||
func (s *stubStore) CreateAudioTask(_ context.Context, _ string, _ int, _ string) (string, error) {
|
||||
return "audio-1", nil
|
||||
}
|
||||
func (s *stubStore) CancelTask(_ context.Context, _ string) error { return nil }
|
||||
func (s *stubStore) CreateTranslationTask(_ context.Context, _ string, _ int, _ string) (string, error) {
|
||||
return "translation-1", nil
|
||||
}
|
||||
func (s *stubStore) CreateImportTask(_ context.Context, _ domain.ImportTask) (string, error) {
|
||||
return "import-1", nil
|
||||
}
|
||||
func (s *stubStore) CancelTask(_ context.Context, _ string) error { return nil }
|
||||
func (s *stubStore) CancelAudioTasksBySlug(_ context.Context, _ string) (int, error) { return 0, nil }
|
||||
|
||||
func (s *stubStore) ClaimNextScrapeTask(_ context.Context, _ string) (domain.ScrapeTask, bool, error) {
|
||||
return domain.ScrapeTask{ID: "task-1", Status: domain.TaskStatusRunning}, true, nil
|
||||
@@ -31,12 +38,24 @@ func (s *stubStore) ClaimNextScrapeTask(_ context.Context, _ string) (domain.Scr
|
||||
func (s *stubStore) ClaimNextAudioTask(_ context.Context, _ string) (domain.AudioTask, bool, error) {
|
||||
return domain.AudioTask{ID: "audio-1", Status: domain.TaskStatusRunning}, true, nil
|
||||
}
|
||||
func (s *stubStore) ClaimNextTranslationTask(_ context.Context, _ string) (domain.TranslationTask, bool, error) {
|
||||
return domain.TranslationTask{ID: "translation-1", Status: domain.TaskStatusRunning}, true, nil
|
||||
}
|
||||
func (s *stubStore) ClaimNextImportTask(_ context.Context, _ string) (domain.ImportTask, bool, error) {
|
||||
return domain.ImportTask{ID: "import-1", Status: domain.TaskStatusRunning}, true, nil
|
||||
}
|
||||
func (s *stubStore) FinishScrapeTask(_ context.Context, _ string, _ domain.ScrapeResult) error {
|
||||
return nil
|
||||
}
|
||||
func (s *stubStore) FinishAudioTask(_ context.Context, _ string, _ domain.AudioResult) error {
|
||||
return nil
|
||||
}
|
||||
func (s *stubStore) FinishTranslationTask(_ context.Context, _ string, _ domain.TranslationResult) error {
|
||||
return nil
|
||||
}
|
||||
func (s *stubStore) FinishImportTask(_ context.Context, _ string, _ domain.ImportResult) error {
|
||||
return nil
|
||||
}
|
||||
func (s *stubStore) FailTask(_ context.Context, _, _ string) error { return nil }
|
||||
|
||||
func (s *stubStore) HeartbeatTask(_ context.Context, _ string) error { return nil }
|
||||
@@ -53,6 +72,16 @@ func (s *stubStore) ListAudioTasks(_ context.Context) ([]domain.AudioTask, error
|
||||
func (s *stubStore) GetAudioTask(_ context.Context, _ string) (domain.AudioTask, bool, error) {
|
||||
return domain.AudioTask{}, false, nil
|
||||
}
|
||||
func (s *stubStore) ListTranslationTasks(_ context.Context) ([]domain.TranslationTask, error) {
|
||||
return nil, nil
|
||||
}
|
||||
func (s *stubStore) GetTranslationTask(_ context.Context, _ string) (domain.TranslationTask, bool, error) {
|
||||
return domain.TranslationTask{}, false, nil
|
||||
}
|
||||
func (s *stubStore) ListImportTasks(_ context.Context) ([]domain.ImportTask, error) { return nil, nil }
|
||||
func (s *stubStore) GetImportTask(_ context.Context, _ string) (domain.ImportTask, bool, error) {
|
||||
return domain.ImportTask{}, false, nil
|
||||
}
|
||||
|
||||
// Verify the stub satisfies all three interfaces at compile time.
|
||||
var _ taskqueue.Producer = (*stubStore)(nil)
|
||||
|
||||
147
backend/internal/webpush/webpush.go
Normal file
147
backend/internal/webpush/webpush.go
Normal file
@@ -0,0 +1,147 @@
|
||||
// Package webpush sends Web Push notifications using the VAPID protocol.
|
||||
package webpush
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"sync"
|
||||
|
||||
webpushgo "github.com/SherClockHolmes/webpush-go"
|
||||
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
// Payload is the JSON body delivered to the browser service worker.
|
||||
type Payload struct {
|
||||
Title string `json:"title"`
|
||||
Body string `json:"body"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Icon string `json:"icon,omitempty"`
|
||||
}
|
||||
|
||||
// Sender sends Web Push notifications to subscribed browsers.
|
||||
type Sender struct {
|
||||
vapidPublic string
|
||||
vapidPrivate string
|
||||
subject string
|
||||
log *slog.Logger
|
||||
}
|
||||
|
||||
// New returns a Sender configured with the given VAPID key pair.
|
||||
// subject should be a mailto: or https: contact URL per the VAPID spec.
|
||||
func New(vapidPublic, vapidPrivate, subject string, log *slog.Logger) *Sender {
|
||||
if log == nil {
|
||||
log = slog.Default()
|
||||
}
|
||||
return &Sender{
|
||||
vapidPublic: vapidPublic,
|
||||
vapidPrivate: vapidPrivate,
|
||||
subject: subject,
|
||||
log: log,
|
||||
}
|
||||
}
|
||||
|
||||
// Enabled returns true when VAPID keys are configured.
|
||||
func (s *Sender) Enabled() bool {
|
||||
return s.vapidPublic != "" && s.vapidPrivate != ""
|
||||
}
|
||||
|
||||
// Send delivers payload to all provided subscriptions concurrently.
|
||||
// Errors for individual subscriptions are logged but do not abort other sends.
|
||||
// Returns the number of successful sends.
|
||||
func (s *Sender) Send(ctx context.Context, subs []storage.PushSubscription, p Payload) int {
|
||||
if !s.Enabled() || len(subs) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
body, err := json.Marshal(p)
|
||||
if err != nil {
|
||||
s.log.Error("webpush: marshal payload", "err", err)
|
||||
return 0
|
||||
}
|
||||
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
mu sync.Mutex
|
||||
success int
|
||||
)
|
||||
|
||||
for _, sub := range subs {
|
||||
sub := sub
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
||||
resp, err := webpushgo.SendNotificationWithContext(ctx, body, &webpushgo.Subscription{
|
||||
Endpoint: sub.Endpoint,
|
||||
Keys: webpushgo.Keys{
|
||||
P256dh: sub.P256DH,
|
||||
Auth: sub.Auth,
|
||||
},
|
||||
}, &webpushgo.Options{
|
||||
VAPIDPublicKey: s.vapidPublic,
|
||||
VAPIDPrivateKey: s.vapidPrivate,
|
||||
Subscriber: s.subject,
|
||||
TTL: 86400,
|
||||
})
|
||||
if err != nil {
|
||||
s.log.Warn("webpush: send failed", "endpoint", truncate(sub.Endpoint, 60), "err", err)
|
||||
return
|
||||
}
|
||||
defer resp.Body.Close() //nolint:errcheck
|
||||
if resp.StatusCode >= 400 {
|
||||
s.log.Warn("webpush: push service returned error",
|
||||
"endpoint", truncate(sub.Endpoint, 60),
|
||||
"status", resp.StatusCode)
|
||||
return
|
||||
}
|
||||
mu.Lock()
|
||||
success++
|
||||
mu.Unlock()
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
return success
|
||||
}
|
||||
|
||||
// SendToBook sends a push notification to all subscribers of the given book.
|
||||
// store is used to list subscriptions for the book's library followers.
|
||||
func (s *Sender) SendToBook(ctx context.Context, store *storage.Store, slug string, p Payload) {
|
||||
if !s.Enabled() {
|
||||
return
|
||||
}
|
||||
subs, err := store.ListPushSubscriptionsByBook(ctx, slug)
|
||||
if err != nil {
|
||||
s.log.Warn("webpush: list push subscriptions", "slug", slug, "err", err)
|
||||
return
|
||||
}
|
||||
if len(subs) == 0 {
|
||||
return
|
||||
}
|
||||
n := s.Send(ctx, subs, p)
|
||||
s.log.Info("webpush: sent chapter notification",
|
||||
"slug", slug,
|
||||
"recipients", n,
|
||||
"total_subs", len(subs),
|
||||
)
|
||||
}
|
||||
|
||||
// GenerateVAPIDKeys generates a new VAPID key pair and prints them.
|
||||
// Useful for one-off key generation during setup.
|
||||
func GenerateVAPIDKeys() (public, private string, err error) {
|
||||
private, public, err = webpushgo.GenerateVAPIDKeys()
|
||||
if err != nil {
|
||||
return "", "", fmt.Errorf("generate VAPID keys: %w", err)
|
||||
}
|
||||
return public, private, nil
|
||||
}
|
||||
|
||||
func truncate(s string, n int) string {
|
||||
if len(s) <= n {
|
||||
return s
|
||||
}
|
||||
return s[:n] + "..."
|
||||
}
|
||||
@@ -2,7 +2,9 @@ FROM caddy:2-builder AS builder
|
||||
|
||||
RUN xcaddy build \
|
||||
--with github.com/mholt/caddy-ratelimit \
|
||||
--with github.com/hslatman/caddy-crowdsec-bouncer/http
|
||||
--with github.com/hslatman/caddy-crowdsec-bouncer/http \
|
||||
--with github.com/mholt/caddy-l4
|
||||
|
||||
FROM caddy:2-alpine
|
||||
COPY --from=builder /usr/bin/caddy /usr/bin/caddy
|
||||
COPY errors/ /srv/errors/
|
||||
|
||||
138
caddy/errors/404.html
Normal file
138
caddy/errors/404.html
Normal file
@@ -0,0 +1,138 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>404 — Page Not Found — libnovel</title>
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
background: #09090b;
|
||||
}
|
||||
|
||||
body {
|
||||
min-height: 100svh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
color: #a1a1aa;
|
||||
}
|
||||
|
||||
header {
|
||||
padding: 1.5rem 2rem;
|
||||
border-bottom: 1px solid #27272a;
|
||||
}
|
||||
.logo {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 700;
|
||||
color: #f59e0b;
|
||||
letter-spacing: -0.02em;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
main {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 3rem 2rem;
|
||||
text-align: center;
|
||||
gap: 0;
|
||||
}
|
||||
|
||||
.watermark {
|
||||
font-size: clamp(5rem, 22vw, 9rem);
|
||||
font-weight: 800;
|
||||
color: #18181b;
|
||||
line-height: 1;
|
||||
letter-spacing: -0.04em;
|
||||
user-select: none;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.status-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 1.25rem;
|
||||
}
|
||||
.dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: #71717a;
|
||||
}
|
||||
.status-label {
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
color: #71717a;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 700;
|
||||
color: #e4e4e7;
|
||||
letter-spacing: -0.02em;
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 0.9375rem;
|
||||
max-width: 38ch;
|
||||
line-height: 1.65;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: inline-block;
|
||||
padding: 0.625rem 1.5rem;
|
||||
border-radius: 0.5rem;
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
transition: background 0.15s;
|
||||
}
|
||||
.btn:hover { background: #d97706; }
|
||||
|
||||
footer {
|
||||
padding: 1.5rem 2rem;
|
||||
border-top: 1px solid #27272a;
|
||||
text-align: center;
|
||||
font-size: 0.8rem;
|
||||
color: #3f3f46;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<header>
|
||||
<a class="logo" href="/">libnovel</a>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
<div class="watermark">404</div>
|
||||
|
||||
<div class="status-row">
|
||||
<div class="dot"></div>
|
||||
<span class="status-label">Page not found</span>
|
||||
</div>
|
||||
|
||||
<h1>Nothing here</h1>
|
||||
<p>The page you're looking for doesn't exist or has been moved.</p>
|
||||
|
||||
<a class="btn" href="/">Go home</a>
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
© LibNovel
|
||||
</footer>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
203
caddy/errors/500.html
Normal file
203
caddy/errors/500.html
Normal file
@@ -0,0 +1,203 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>500 — Internal Error — libnovel</title>
|
||||
<meta http-equiv="refresh" content="20">
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
background: #09090b;
|
||||
}
|
||||
|
||||
body {
|
||||
min-height: 100svh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
color: #a1a1aa;
|
||||
}
|
||||
|
||||
header {
|
||||
padding: 1.5rem 2rem;
|
||||
border-bottom: 1px solid #27272a;
|
||||
}
|
||||
.logo {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 700;
|
||||
color: #f59e0b;
|
||||
letter-spacing: -0.02em;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
main {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
padding: 3rem 2rem;
|
||||
text-align: center;
|
||||
gap: 0;
|
||||
}
|
||||
|
||||
.illustration {
|
||||
width: 96px;
|
||||
height: 96px;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.watermark {
|
||||
font-size: clamp(5rem, 22vw, 9rem);
|
||||
font-weight: 800;
|
||||
color: #18181b;
|
||||
line-height: 1;
|
||||
letter-spacing: -0.04em;
|
||||
user-select: none;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.status-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 1.25rem;
|
||||
}
|
||||
.dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: #f59e0b;
|
||||
animation: pulse 2s ease-in-out infinite;
|
||||
}
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; transform: scale(1); }
|
||||
50% { opacity: 0.4; transform: scale(0.75); }
|
||||
}
|
||||
.status-label {
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
color: #f59e0b;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 700;
|
||||
color: #e4e4e7;
|
||||
letter-spacing: -0.02em;
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 0.9375rem;
|
||||
max-width: 38ch;
|
||||
line-height: 1.65;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.actions {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 0.75rem;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: inline-block;
|
||||
padding: 0.625rem 1.5rem;
|
||||
border-radius: 0.5rem;
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
transition: background 0.15s;
|
||||
}
|
||||
.btn:hover { background: #d97706; }
|
||||
|
||||
.btn-secondary {
|
||||
background: transparent;
|
||||
color: #a1a1aa;
|
||||
border: 1px solid #27272a;
|
||||
cursor: pointer;
|
||||
}
|
||||
.btn-secondary:hover { background: #18181b; color: #e4e4e7; }
|
||||
|
||||
.refresh-note {
|
||||
margin-top: 1.25rem;
|
||||
font-size: 0.8rem;
|
||||
color: #52525b;
|
||||
}
|
||||
#countdown { color: #71717a; }
|
||||
|
||||
footer {
|
||||
padding: 1.5rem 2rem;
|
||||
border-top: 1px solid #27272a;
|
||||
text-align: center;
|
||||
font-size: 0.8rem;
|
||||
color: #3f3f46;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<header>
|
||||
<a class="logo" href="/">libnovel</a>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
<!-- Book with lightning bolt SVG -->
|
||||
<svg class="illustration" viewBox="0 0 96 96" fill="none" xmlns="http://www.w3.org/2000/svg" aria-hidden="true">
|
||||
<!-- Book cover -->
|
||||
<rect x="14" y="12" width="50" height="68" rx="4" fill="#27272a" stroke="#3f3f46" stroke-width="1.5"/>
|
||||
<!-- Spine -->
|
||||
<rect x="10" y="12" width="8" height="68" rx="2" fill="#18181b" stroke="#3f3f46" stroke-width="1.5"/>
|
||||
<!-- Pages edge -->
|
||||
<rect x="62" y="14" width="4" height="64" rx="1" fill="#1c1c1f"/>
|
||||
<!-- Lightning bolt -->
|
||||
<path d="M44 22 L34 46 H42 L36 70 L58 42 H48 L56 22 Z" fill="#f59e0b" opacity="0.9"/>
|
||||
<!-- Text lines -->
|
||||
<rect x="22" y="58" width="28" height="2.5" rx="1.25" fill="#3f3f46"/>
|
||||
<rect x="22" y="63" width="18" height="2.5" rx="1.25" fill="#3f3f46"/>
|
||||
<rect x="22" y="68" width="24" height="2.5" rx="1.25" fill="#3f3f46"/>
|
||||
</svg>
|
||||
|
||||
<div class="watermark">500</div>
|
||||
|
||||
<div class="status-row">
|
||||
<div class="dot"></div>
|
||||
<span class="status-label">Internal error</span>
|
||||
</div>
|
||||
|
||||
<h1>Something went wrong</h1>
|
||||
<p>An unexpected error occurred on our end. We're on it — try again in a moment.</p>
|
||||
|
||||
<div class="actions">
|
||||
<a class="btn" href="/">Go home</a>
|
||||
<button class="btn btn-secondary" onclick="location.reload()">Retry</button>
|
||||
</div>
|
||||
|
||||
<p class="refresh-note">Auto-refreshing in <span id="countdown">20</span>s</p>
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
© LibNovel
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
var s = 20;
|
||||
var el = document.getElementById('countdown');
|
||||
var t = setInterval(function () {
|
||||
s--;
|
||||
el.textContent = s;
|
||||
if (s <= 0) { clearInterval(t); location.reload(); }
|
||||
}, 1000);
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
@@ -3,49 +3,160 @@
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>502 — Service Unavailable</title>
|
||||
<title>502 — Service Unavailable — libnovel</title>
|
||||
<meta http-equiv="refresh" content="20">
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
background: #09090b;
|
||||
}
|
||||
|
||||
body {
|
||||
min-height: 100svh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
color: #a1a1aa;
|
||||
}
|
||||
|
||||
header {
|
||||
padding: 1.5rem 2rem;
|
||||
border-bottom: 1px solid #27272a;
|
||||
}
|
||||
.logo {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 700;
|
||||
color: #f59e0b;
|
||||
letter-spacing: -0.02em;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
main {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
background: #09090b;
|
||||
color: #a1a1aa;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
padding: 2rem;
|
||||
padding: 3rem 2rem;
|
||||
text-align: center;
|
||||
gap: 0;
|
||||
}
|
||||
.code {
|
||||
font-size: clamp(4rem, 20vw, 8rem);
|
||||
|
||||
.watermark {
|
||||
font-size: clamp(5rem, 22vw, 9rem);
|
||||
font-weight: 800;
|
||||
color: #27272a;
|
||||
color: #18181b;
|
||||
line-height: 1;
|
||||
letter-spacing: -0.04em;
|
||||
user-select: none;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
h1 { font-size: 1.25rem; font-weight: 600; color: #e4e4e7; }
|
||||
p { font-size: 0.9rem; max-width: 36ch; line-height: 1.6; }
|
||||
a {
|
||||
margin-top: 0.5rem;
|
||||
|
||||
.status-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 1.25rem;
|
||||
}
|
||||
.dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: #f59e0b;
|
||||
animation: pulse 2s ease-in-out infinite;
|
||||
}
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; transform: scale(1); }
|
||||
50% { opacity: 0.4; transform: scale(0.75); }
|
||||
}
|
||||
.status-label {
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
color: #f59e0b;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 700;
|
||||
color: #e4e4e7;
|
||||
letter-spacing: -0.02em;
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 0.9375rem;
|
||||
max-width: 38ch;
|
||||
line-height: 1.65;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: inline-block;
|
||||
padding: 0.6rem 1.4rem;
|
||||
padding: 0.625rem 1.5rem;
|
||||
border-radius: 0.5rem;
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
transition: background 0.15s;
|
||||
}
|
||||
.btn:hover { background: #d97706; }
|
||||
|
||||
.refresh-note {
|
||||
margin-top: 1.25rem;
|
||||
font-size: 0.8rem;
|
||||
color: #52525b;
|
||||
}
|
||||
#countdown { color: #71717a; }
|
||||
|
||||
footer {
|
||||
padding: 1.5rem 2rem;
|
||||
border-top: 1px solid #27272a;
|
||||
text-align: center;
|
||||
font-size: 0.8rem;
|
||||
color: #3f3f46;
|
||||
}
|
||||
a:hover { background: #d97706; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="code">502</div>
|
||||
<h1>Service Unavailable</h1>
|
||||
<p>The server is temporarily unreachable. Please try again in a moment.</p>
|
||||
<a href="/">Go home</a>
|
||||
|
||||
<header>
|
||||
<a class="logo" href="/">libnovel</a>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
<div class="watermark">502</div>
|
||||
|
||||
<div class="status-row">
|
||||
<div class="dot"></div>
|
||||
<span class="status-label">Service unavailable</span>
|
||||
</div>
|
||||
|
||||
<h1>Something went wrong</h1>
|
||||
<p>The server is temporarily unreachable. This usually resolves itself quickly.</p>
|
||||
|
||||
<a class="btn" href="/">Try again</a>
|
||||
<p class="refresh-note">Page refreshes automatically in <span id="countdown">20</span>s</p>
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
© LibNovel
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
var s = 20;
|
||||
var el = document.getElementById('countdown');
|
||||
var t = setInterval(function () {
|
||||
s--;
|
||||
el.textContent = s;
|
||||
if (s <= 0) { clearInterval(t); location.reload(); }
|
||||
}, 1000);
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -3,49 +3,163 @@
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>503 — Maintenance</title>
|
||||
<title>Under Maintenance — libnovel</title>
|
||||
<meta http-equiv="refresh" content="30">
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
background: #09090b;
|
||||
}
|
||||
|
||||
body {
|
||||
min-height: 100svh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
color: #a1a1aa;
|
||||
}
|
||||
|
||||
/* ── Header ── */
|
||||
header {
|
||||
padding: 1.5rem 2rem;
|
||||
border-bottom: 1px solid #27272a;
|
||||
}
|
||||
.logo {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 700;
|
||||
color: #f59e0b;
|
||||
letter-spacing: -0.02em;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
/* ── Main ── */
|
||||
main {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
background: #09090b;
|
||||
color: #a1a1aa;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
padding: 2rem;
|
||||
padding: 3rem 2rem;
|
||||
text-align: center;
|
||||
gap: 0;
|
||||
}
|
||||
.code {
|
||||
font-size: clamp(4rem, 20vw, 8rem);
|
||||
|
||||
.watermark {
|
||||
font-size: clamp(5rem, 22vw, 9rem);
|
||||
font-weight: 800;
|
||||
color: #27272a;
|
||||
color: #18181b;
|
||||
line-height: 1;
|
||||
letter-spacing: -0.04em;
|
||||
user-select: none;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
h1 { font-size: 1.25rem; font-weight: 600; color: #e4e4e7; }
|
||||
p { font-size: 0.9rem; max-width: 36ch; line-height: 1.6; }
|
||||
a {
|
||||
margin-top: 0.5rem;
|
||||
|
||||
.status-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 1.25rem;
|
||||
}
|
||||
.dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: #f59e0b;
|
||||
animation: pulse 2s ease-in-out infinite;
|
||||
}
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; transform: scale(1); }
|
||||
50% { opacity: 0.4; transform: scale(0.75); }
|
||||
}
|
||||
.status-label {
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
color: #f59e0b;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 700;
|
||||
color: #e4e4e7;
|
||||
letter-spacing: -0.02em;
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 0.9375rem;
|
||||
max-width: 38ch;
|
||||
line-height: 1.65;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: inline-block;
|
||||
padding: 0.6rem 1.4rem;
|
||||
padding: 0.625rem 1.5rem;
|
||||
border-radius: 0.5rem;
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
transition: background 0.15s;
|
||||
}
|
||||
.btn:hover { background: #d97706; }
|
||||
|
||||
.refresh-note {
|
||||
margin-top: 1.25rem;
|
||||
font-size: 0.8rem;
|
||||
color: #52525b;
|
||||
}
|
||||
#countdown { color: #71717a; }
|
||||
|
||||
/* ── Footer ── */
|
||||
footer {
|
||||
padding: 1.5rem 2rem;
|
||||
border-top: 1px solid #27272a;
|
||||
text-align: center;
|
||||
font-size: 0.8rem;
|
||||
color: #3f3f46;
|
||||
}
|
||||
a:hover { background: #d97706; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="code">503</div>
|
||||
<h1>Under Maintenance</h1>
|
||||
<p>LibNovel is briefly offline for maintenance. We’ll be back shortly.</p>
|
||||
<a href="/">Try again</a>
|
||||
|
||||
<header>
|
||||
<a class="logo" href="/">libnovel</a>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
<div class="watermark">503</div>
|
||||
|
||||
<div class="status-row">
|
||||
<div class="dot"></div>
|
||||
<span class="status-label">Maintenance in progress</span>
|
||||
</div>
|
||||
|
||||
<h1>We'll be right back</h1>
|
||||
<p>LibNovel is briefly offline for scheduled maintenance. No data is being changed — hang tight.</p>
|
||||
|
||||
<a class="btn" href="/">Try again</a>
|
||||
<p class="refresh-note">Page refreshes automatically in <span id="countdown">30</span>s</p>
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
© LibNovel
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
var s = 30;
|
||||
var el = document.getElementById('countdown');
|
||||
var t = setInterval(function () {
|
||||
s--;
|
||||
el.textContent = s;
|
||||
if (s <= 0) { clearInterval(t); location.reload(); }
|
||||
}, 1000);
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -3,49 +3,160 @@
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>504 — Gateway Timeout</title>
|
||||
<title>504 — Gateway Timeout — libnovel</title>
|
||||
<meta http-equiv="refresh" content="20">
|
||||
<style>
|
||||
*, *::before, *::after { box-sizing: border-box; margin: 0; padding: 0; }
|
||||
|
||||
html, body {
|
||||
height: 100%;
|
||||
background: #09090b;
|
||||
}
|
||||
|
||||
body {
|
||||
min-height: 100svh;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
color: #a1a1aa;
|
||||
}
|
||||
|
||||
header {
|
||||
padding: 1.5rem 2rem;
|
||||
border-bottom: 1px solid #27272a;
|
||||
}
|
||||
.logo {
|
||||
font-size: 1.125rem;
|
||||
font-weight: 700;
|
||||
color: #f59e0b;
|
||||
letter-spacing: -0.02em;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
main {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
gap: 1rem;
|
||||
background: #09090b;
|
||||
color: #a1a1aa;
|
||||
font-family: ui-sans-serif, system-ui, sans-serif;
|
||||
padding: 2rem;
|
||||
padding: 3rem 2rem;
|
||||
text-align: center;
|
||||
gap: 0;
|
||||
}
|
||||
.code {
|
||||
font-size: clamp(4rem, 20vw, 8rem);
|
||||
|
||||
.watermark {
|
||||
font-size: clamp(5rem, 22vw, 9rem);
|
||||
font-weight: 800;
|
||||
color: #27272a;
|
||||
color: #18181b;
|
||||
line-height: 1;
|
||||
letter-spacing: -0.04em;
|
||||
user-select: none;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
h1 { font-size: 1.25rem; font-weight: 600; color: #e4e4e7; }
|
||||
p { font-size: 0.9rem; max-width: 36ch; line-height: 1.6; }
|
||||
a {
|
||||
margin-top: 0.5rem;
|
||||
|
||||
.status-row {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
margin-bottom: 1.25rem;
|
||||
}
|
||||
.dot {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
border-radius: 50%;
|
||||
background: #f59e0b;
|
||||
animation: pulse 2s ease-in-out infinite;
|
||||
}
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; transform: scale(1); }
|
||||
50% { opacity: 0.4; transform: scale(0.75); }
|
||||
}
|
||||
.status-label {
|
||||
font-size: 0.75rem;
|
||||
font-weight: 600;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: 0.08em;
|
||||
color: #f59e0b;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 1.5rem;
|
||||
font-weight: 700;
|
||||
color: #e4e4e7;
|
||||
letter-spacing: -0.02em;
|
||||
margin-bottom: 0.75rem;
|
||||
}
|
||||
|
||||
p {
|
||||
font-size: 0.9375rem;
|
||||
max-width: 38ch;
|
||||
line-height: 1.65;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.btn {
|
||||
display: inline-block;
|
||||
padding: 0.6rem 1.4rem;
|
||||
padding: 0.625rem 1.5rem;
|
||||
border-radius: 0.5rem;
|
||||
background: #f59e0b;
|
||||
color: #000;
|
||||
font-weight: 600;
|
||||
font-size: 0.875rem;
|
||||
text-decoration: none;
|
||||
transition: background 0.15s;
|
||||
}
|
||||
.btn:hover { background: #d97706; }
|
||||
|
||||
.refresh-note {
|
||||
margin-top: 1.25rem;
|
||||
font-size: 0.8rem;
|
||||
color: #52525b;
|
||||
}
|
||||
#countdown { color: #71717a; }
|
||||
|
||||
footer {
|
||||
padding: 1.5rem 2rem;
|
||||
border-top: 1px solid #27272a;
|
||||
text-align: center;
|
||||
font-size: 0.8rem;
|
||||
color: #3f3f46;
|
||||
}
|
||||
a:hover { background: #d97706; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="code">504</div>
|
||||
<h1>Gateway Timeout</h1>
|
||||
<p>The request took too long to complete. Please refresh and try again.</p>
|
||||
<a href="/">Go home</a>
|
||||
|
||||
<header>
|
||||
<a class="logo" href="/">libnovel</a>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
<div class="watermark">504</div>
|
||||
|
||||
<div class="status-row">
|
||||
<div class="dot"></div>
|
||||
<span class="status-label">Gateway timeout</span>
|
||||
</div>
|
||||
|
||||
<h1>Request timed out</h1>
|
||||
<p>The server took too long to respond. Please refresh and try again.</p>
|
||||
|
||||
<a class="btn" href="/">Try again</a>
|
||||
<p class="refresh-note">Page refreshes automatically in <span id="countdown">20</span>s</p>
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
© LibNovel
|
||||
</footer>
|
||||
|
||||
<script>
|
||||
var s = 20;
|
||||
var el = document.getElementById('countdown');
|
||||
var t = setInterval(function () {
|
||||
s--;
|
||||
el.textContent = s;
|
||||
if (s <= 0) { clearInterval(t); location.reload(); }
|
||||
}, 1000);
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -15,10 +15,13 @@ x-infra-env: &infra-env
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD}"
|
||||
# Meilisearch
|
||||
MEILI_URL: "http://meilisearch:7700"
|
||||
MEILI_URL: "${MEILI_URL:-http://meilisearch:7700}"
|
||||
MEILI_API_KEY: "${MEILI_MASTER_KEY}"
|
||||
# Valkey
|
||||
VALKEY_ADDR: "valkey:6379"
|
||||
# Cloudflare AI (TTS + image generation)
|
||||
CFAI_ACCOUNT_ID: "${CFAI_ACCOUNT_ID}"
|
||||
CFAI_API_TOKEN: "${CFAI_API_TOKEN}"
|
||||
|
||||
services:
|
||||
# ─── MinIO (object storage: chapters, audio, avatars, browse) ────────────────
|
||||
@@ -55,6 +58,8 @@ services:
|
||||
mc mb --ignore-existing local/audio;
|
||||
mc mb --ignore-existing local/avatars;
|
||||
mc mb --ignore-existing local/catalogue;
|
||||
mc mb --ignore-existing local/translations;
|
||||
mc mb --ignore-existing local/imports;
|
||||
echo 'buckets ready';
|
||||
"
|
||||
environment:
|
||||
@@ -126,6 +131,26 @@ services:
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Redis (Asynq task queue — accessed locally by backend, remotely by homelab runner) ──
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
command: >
|
||||
redis-server
|
||||
--appendonly yes
|
||||
--requirepass "${REDIS_PASSWORD}"
|
||||
# No public port — backend reaches it via internal network.
|
||||
# Homelab runner reaches it via Caddy TLS proxy on :6380 → redis:6379.
|
||||
expose:
|
||||
- "6379"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "-a", "${REDIS_PASSWORD}", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Backend API ──────────────────────────────────────────────────────────────
|
||||
backend:
|
||||
image: kalekber/libnovel-backend:${GIT_TAG:-latest}
|
||||
@@ -151,6 +176,8 @@ services:
|
||||
condition: service_healthy
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
# No public port — all traffic is routed via Caddy.
|
||||
expose:
|
||||
- "8080"
|
||||
@@ -160,7 +187,14 @@ services:
|
||||
LOG_LEVEL: "${LOG_LEVEL}"
|
||||
KOKORO_URL: "${KOKORO_URL}"
|
||||
KOKORO_VOICE: "${KOKORO_VOICE}"
|
||||
GLITCHTIP_DSN: "${GLITCHTIP_DSN}"
|
||||
POCKET_TTS_URL: "${POCKET_TTS_URL}"
|
||||
GLITCHTIP_DSN: "${GLITCHTIP_DSN_BACKEND}"
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: "${OTEL_EXPORTER_OTLP_ENDPOINT}"
|
||||
OTEL_SERVICE_NAME: "backend"
|
||||
# Asynq task queue — backend enqueues jobs to local Redis sidecar.
|
||||
# Homelab runner connects to the same Redis via Caddy TLS proxy on :6380.
|
||||
REDIS_ADDR: "redis:6379"
|
||||
REDIS_PASSWORD: "${REDIS_PASSWORD}"
|
||||
healthcheck:
|
||||
test: ["CMD", "/healthcheck", "http://localhost:8080/health"]
|
||||
interval: 15s
|
||||
@@ -216,9 +250,11 @@ services:
|
||||
# Kokoro-FastAPI TTS endpoint
|
||||
KOKORO_URL: "${KOKORO_URL}"
|
||||
KOKORO_VOICE: "${KOKORO_VOICE}"
|
||||
GLITCHTIP_DSN: "${GLITCHTIP_DSN}"
|
||||
POCKET_TTS_URL: "${POCKET_TTS_URL}"
|
||||
GLITCHTIP_DSN: "${GLITCHTIP_DSN_RUNNER}"
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: "${OTEL_EXPORTER_OTLP_ENDPOINT}"
|
||||
OTEL_SERVICE_NAME: "runner"
|
||||
healthcheck:
|
||||
# The runner writes /tmp/runner.alive on every poll.
|
||||
# 120s = 2× the default 30s poll interval with generous headroom.
|
||||
test: ["CMD", "/healthcheck", "file", "/tmp/runner.alive", "120"]
|
||||
interval: 60s
|
||||
@@ -259,6 +295,7 @@ services:
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD}"
|
||||
AUTH_SECRET: "${AUTH_SECRET}"
|
||||
DEBUG_LOGIN_TOKEN: "${DEBUG_LOGIN_TOKEN}"
|
||||
PUBLIC_MINIO_PUBLIC_URL: "${MINIO_PUBLIC_ENDPOINT}"
|
||||
# Valkey
|
||||
VALKEY_ADDR: "valkey:6379"
|
||||
@@ -267,11 +304,21 @@ services:
|
||||
PUBLIC_UMAMI_SCRIPT_URL: "${PUBLIC_UMAMI_SCRIPT_URL}"
|
||||
# GlitchTip client + server-side error tracking
|
||||
PUBLIC_GLITCHTIP_DSN: "${PUBLIC_GLITCHTIP_DSN}"
|
||||
# Grafana Faro RUM (browser Web Vitals, traces, errors)
|
||||
PUBLIC_FARO_COLLECTOR_URL: "${PUBLIC_FARO_COLLECTOR_URL}"
|
||||
# OpenTelemetry tracing
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: "${OTEL_EXPORTER_OTLP_ENDPOINT}"
|
||||
OTEL_SERVICE_NAME: "ui"
|
||||
# Allow large PDF/EPUB uploads (adapter-node default is 512KB)
|
||||
BODY_SIZE_LIMIT: "52428800"
|
||||
# OAuth2 providers
|
||||
GOOGLE_CLIENT_ID: "${GOOGLE_CLIENT_ID}"
|
||||
GOOGLE_CLIENT_SECRET: "${GOOGLE_CLIENT_SECRET}"
|
||||
GITHUB_CLIENT_ID: "${GITHUB_CLIENT_ID}"
|
||||
GITHUB_CLIENT_SECRET: "${GITHUB_CLIENT_SECRET}"
|
||||
# Polar (subscriptions)
|
||||
POLAR_API_TOKEN: "${POLAR_API_TOKEN}"
|
||||
POLAR_WEBHOOK_SECRET: "${POLAR_WEBHOOK_SECRET}"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://127.0.0.1:3000/health"]
|
||||
interval: 15s
|
||||
@@ -299,6 +346,19 @@ services:
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
# ─── Dozzle agent ────────────────────────────────────────────────────────────
|
||||
# Exposes prod container logs to the Dozzle instance on the homelab.
|
||||
# The homelab Dozzle connects here via DOZZLE_REMOTE_AGENT.
|
||||
# Port 7007 is bound to localhost only — not reachable from the internet.
|
||||
dozzle-agent:
|
||||
image: amir20/dozzle:latest
|
||||
restart: unless-stopped
|
||||
command: agent
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
ports:
|
||||
- "127.0.0.1:7007:7007"
|
||||
|
||||
# ─── CrowdSec bouncer registration ───────────────────────────────────────────
|
||||
# One-shot: registers the Caddy bouncer with the CrowdSec LAPI and writes the
|
||||
# generated API key to crowdsec/.crowdsec.env, which Caddy reads via env_file.
|
||||
@@ -334,13 +394,16 @@ services:
|
||||
|
||||
|
||||
# ─── Caddy (reverse proxy + automatic HTTPS) ──────────────────────────────────
|
||||
# Custom build includes github.com/mholt/caddy-ratelimit and
|
||||
# github.com/hslatman/caddy-crowdsec-bouncer/http.
|
||||
# Custom build includes github.com/mholt/caddy-ratelimit,
|
||||
# github.com/hslatman/caddy-crowdsec-bouncer/http, and
|
||||
# github.com/mholt/caddy-l4 (TCP layer4 proxy for Redis).
|
||||
caddy:
|
||||
image: kalekber/libnovel-caddy:${GIT_TAG:-latest}
|
||||
build:
|
||||
context: ./caddy
|
||||
dockerfile: Dockerfile
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: "true"
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
backend:
|
||||
@@ -353,6 +416,7 @@ services:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
- "443:443/udp" # HTTP/3 (QUIC)
|
||||
- "6380:6380" # Redis TCP proxy (TLS) for homelab runner → Asynq
|
||||
environment:
|
||||
DOMAIN: "${DOMAIN}"
|
||||
CADDY_ACME_EMAIL: "${CADDY_ACME_EMAIL}"
|
||||
@@ -369,223 +433,28 @@ services:
|
||||
# ─── Watchtower (auto-redeploy custom services on new images) ────────────────
|
||||
# Only watches services labelled com.centurylinklabs.watchtower.enable=true.
|
||||
# Third-party infra images (minio, pocketbase, meilisearch, etc.) are excluded.
|
||||
# doppler binary is mounted from the host so watchtower fetches fresh secrets
|
||||
# on every start (notification URL, credentials) without baking them in.
|
||||
watchtower:
|
||||
image: containrrr/watchtower:latest
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/usr/bin/doppler", "run", "--project", "libnovel", "--config", "prd", "--"]
|
||||
command: ["/watchtower", "--label-enable", "--interval", "300", "--cleanup"]
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
command: --label-enable --interval 300 --cleanup
|
||||
- /usr/bin/doppler:/usr/bin/doppler:ro
|
||||
- /root/.doppler:/root/.doppler:ro
|
||||
environment:
|
||||
WATCHTOWER_NOTIFICATIONS: "${WATCHTOWER_NOTIFICATIONS}"
|
||||
WATCHTOWER_NOTIFICATION_URL: "${WATCHTOWER_NOTIFICATION_URL}"
|
||||
HOME: "/root"
|
||||
DOCKER_API_VERSION: "1.44"
|
||||
|
||||
# ─── Shared PostgreSQL (Fider + GlitchTip + Umami) ───────────────────────────
|
||||
# A single Postgres instance hosting three separate databases.
|
||||
# PocketBase uses its own embedded SQLite; this postgres is only for the
|
||||
# three new services below.
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: "${POSTGRES_USER}"
|
||||
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD}"
|
||||
POSTGRES_DB: postgres
|
||||
expose:
|
||||
- "5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD", "pg_isready", "-U", "${POSTGRES_USER}"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Postgres database initialisation ────────────────────────────────────────
|
||||
# One-shot: creates the fider, glitchtip, and umami databases if missing.
|
||||
postgres-init:
|
||||
image: postgres:16-alpine
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
PGPASSWORD: "${POSTGRES_PASSWORD}"
|
||||
entrypoint: >
|
||||
/bin/sh -c "
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='fider'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE fider\";
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='glitchtip'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE glitchtip\";
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='umami'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE umami\";
|
||||
echo 'postgres-init: databases ready';
|
||||
"
|
||||
restart: "no"
|
||||
|
||||
# ─── Fider (user feedback & feature requests) ─────────────────────────────────
|
||||
fider:
|
||||
image: getfider/fider:stable
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
expose:
|
||||
- "3000"
|
||||
environment:
|
||||
BASE_URL: "${FIDER_BASE_URL}"
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/fider?sslmode=disable"
|
||||
JWT_SECRET: "${FIDER_JWT_SECRET}"
|
||||
# Email: Resend SMTP
|
||||
EMAIL_NOREPLY: "noreply@libnovel.cc"
|
||||
EMAIL_SMTP_HOST: "${FIDER_SMTP_HOST}"
|
||||
EMAIL_SMTP_PORT: "${FIDER_SMTP_PORT}"
|
||||
EMAIL_SMTP_USERNAME: "${FIDER_SMTP_USER}"
|
||||
EMAIL_SMTP_PASSWORD: "${FIDER_SMTP_PASSWORD}"
|
||||
EMAIL_SMTP_ENABLE_STARTTLS: "false"
|
||||
|
||||
# ─── GlitchTip DB migration (one-shot) ───────────────────────────────────────
|
||||
glitchtip-migrate:
|
||||
image: glitchtip/glitchtip:latest
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "${GLITCHTIP_EMAIL_URL}"
|
||||
DEFAULT_FROM_EMAIL: "noreply@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
command: "./manage.py migrate"
|
||||
restart: "no"
|
||||
|
||||
# ─── GlitchTip web (error tracking UI + API) ─────────────────────────────────
|
||||
glitchtip-web:
|
||||
image: glitchtip/glitchtip:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
glitchtip-migrate:
|
||||
condition: service_completed_successfully
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
expose:
|
||||
- "8000"
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "${GLITCHTIP_EMAIL_URL}"
|
||||
DEFAULT_FROM_EMAIL: "noreply@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
PORT: "8000"
|
||||
ENABLE_USER_REGISTRATION: "false"
|
||||
healthcheck:
|
||||
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/api/0/')"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── GlitchTip worker (background task processor) ─────────────────────────────
|
||||
glitchtip-worker:
|
||||
image: glitchtip/glitchtip:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
glitchtip-migrate:
|
||||
condition: service_completed_successfully
|
||||
valkey:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "${GLITCHTIP_EMAIL_URL}"
|
||||
DEFAULT_FROM_EMAIL: "noreply@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
SERVER_ROLE: "worker"
|
||||
|
||||
# ─── Umami (page analytics) ───────────────────────────────────────────────────
|
||||
umami:
|
||||
image: ghcr.io/umami-software/umami:postgresql-latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
expose:
|
||||
- "3000"
|
||||
environment:
|
||||
DATABASE_URL: "postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/umami"
|
||||
APP_SECRET: "${UMAMI_APP_SECRET}"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:3000/api/heartbeat"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Dozzle (Docker log viewer) ───────────────────────────────────────────────
|
||||
dozzle:
|
||||
image: amir20/dozzle:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- ./dozzle/users.yml:/data/users.yml:ro
|
||||
expose:
|
||||
- "8080"
|
||||
environment:
|
||||
DOZZLE_AUTH_PROVIDER: simple
|
||||
DOZZLE_HOSTNAME: "logs.libnovel.cc"
|
||||
healthcheck:
|
||||
test: ["CMD", "/dozzle", "healthcheck"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Uptime Kuma (uptime monitoring) ──────────────────────────────────────────
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma:1
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- uptime_kuma_data:/app/data
|
||||
expose:
|
||||
- "3001"
|
||||
healthcheck:
|
||||
test: ["CMD", "extra/healthcheck"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ─── Gotify (push notifications) ──────────────────────────────────────────────
|
||||
gotify:
|
||||
image: gotify/server:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- gotify_data:/app/data
|
||||
expose:
|
||||
- "80"
|
||||
environment:
|
||||
GOTIFY_DEFAULTUSER_NAME: "${GOTIFY_ADMIN_USER}"
|
||||
GOTIFY_DEFAULTUSER_PASS: "${GOTIFY_ADMIN_PASS}"
|
||||
GOTIFY_SERVER_PORT: "80"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:80/health"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
volumes:
|
||||
minio_data:
|
||||
pb_data:
|
||||
meili_data:
|
||||
valkey_data:
|
||||
redis_data:
|
||||
caddy_data:
|
||||
caddy_config:
|
||||
caddy_logs:
|
||||
crowdsec_data:
|
||||
postgres_data:
|
||||
uptime_kuma_data:
|
||||
gotify_data:
|
||||
|
||||
@@ -35,11 +35,11 @@ client: Browser / iOS App {
|
||||
caddy: Caddy :443 {
|
||||
shape: rectangle
|
||||
style.fill: "#f1f5f9"
|
||||
label: "Caddy :443\ncustom build · caddy-ratelimit\nsecurity headers · rate limiting\nstatic error pages"
|
||||
label: "Caddy :443\ncustom build · caddy-l4 · caddy-ratelimit\nCrowdSec bouncer · security headers\nrate limiting · static error pages\nRedis TCP proxy :6380"
|
||||
}
|
||||
|
||||
# ─── SvelteKit UI ─────────────────────────────────────────────────────────────
|
||||
# Handles: auth enforcement, session, all /api/* routes that have SK counterparts
|
||||
# All routes here pass through SvelteKit — auth is enforced server-side.
|
||||
|
||||
sk: SvelteKit UI :3000 {
|
||||
style.fill: "#fef3c7"
|
||||
@@ -53,7 +53,7 @@ sk: SvelteKit UI :3000 {
|
||||
catalogue_sk: Catalogue {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/catalogue-page\nGET /api/search"
|
||||
label: "GET /api/catalogue-page (infinite scroll)\nGET /api/search"
|
||||
}
|
||||
|
||||
book_sk: Book {
|
||||
@@ -65,7 +65,7 @@ sk: SvelteKit UI :3000 {
|
||||
scrape_sk: Scrape (admin) {
|
||||
style.fill: "#fff7ed"
|
||||
style.stroke: "#fdba74"
|
||||
label: "GET /api/scrape/status\nGET /api/scrape/tasks\nPOST /api/scrape\nPOST /api/scrape/range\nPOST /api/scrape/cancel/{id}"
|
||||
label: "GET /api/scrape/status\nGET /api/scrape/tasks\nPOST /api/scrape\nPOST /api/scrape/book\nPOST /api/scrape/book/range\nPOST /api/scrape/cancel/{id}"
|
||||
}
|
||||
|
||||
audio_sk: Audio {
|
||||
@@ -74,7 +74,7 @@ sk: SvelteKit UI :3000 {
|
||||
label: "POST /api/audio/{slug}/{n}\nGET /api/audio/status/{slug}/{n}\nGET /api/voices"
|
||||
}
|
||||
|
||||
presign_sk: Presigned URLs {
|
||||
presign_sk: Presigned URLs (public) {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/presign/chapter/{slug}/{n}\nGET /api/presign/audio/{slug}/{n}\nGET /api/presign/voice-sample/{voice}"
|
||||
@@ -106,12 +106,12 @@ sk: SvelteKit UI :3000 {
|
||||
}
|
||||
|
||||
# ─── Go Backend ───────────────────────────────────────────────────────────────
|
||||
# Caddy proxies these paths directly — no SvelteKit auth layer
|
||||
# Caddy proxies these paths directly — bypasses SvelteKit entirely.
|
||||
|
||||
be: Backend API :8080 {
|
||||
style.fill: "#eef3ff"
|
||||
|
||||
health_be: Health {
|
||||
health_be: Health / Version {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /health\nGET /api/version"
|
||||
@@ -126,7 +126,7 @@ be: Backend API :8080 {
|
||||
catalogue_be: Catalogue {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/browse\nGET /api/catalogue\nGET /api/ranking\nGET /api/cover/{domain}/{slug}"
|
||||
label: "GET /api/catalogue (Meilisearch)\nGET /api/browse (legacy MinIO cache)\nGET /api/ranking\nGET /api/cover/{domain}/{slug}"
|
||||
}
|
||||
|
||||
book_be: Book / Chapter {
|
||||
@@ -138,7 +138,13 @@ be: Backend API :8080 {
|
||||
audio_be: Audio {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/audio-proxy/{slug}/{n}\nGET /api/voices"
|
||||
label: "POST /api/audio/{slug}/{n}\nGET /api/audio/status/{slug}/{n}\nGET /api/audio-proxy/{slug}/{n}\nGET /api/voices"
|
||||
}
|
||||
|
||||
presign_be: Presigned URLs {
|
||||
style.fill: "#f0fdf4"
|
||||
style.stroke: "#86efac"
|
||||
label: "GET /api/presign/chapter/{slug}/{n}\nGET /api/presign/audio/{slug}/{n}\nGET /api/presign/voice-sample/{voice}\nGET /api/presign/avatar-upload/{userId}\nGET /api/presign/avatar/{userId}"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,19 +155,19 @@ storage: Storage {
|
||||
|
||||
pb: PocketBase :8090 {
|
||||
shape: cylinder
|
||||
label: "auth · books · progress\ncomments · library\nscrape_jobs · audio_cache"
|
||||
label: "auth · books · progress\ncomments · library\nscrape_jobs · audio_cache\nranking"
|
||||
}
|
||||
mn: MinIO :9000 {
|
||||
shape: cylinder
|
||||
label: "chapters · audio\navatars · browse"
|
||||
label: "chapters · audio\navatars · catalogue (browse)"
|
||||
}
|
||||
ms: Meilisearch :7700 {
|
||||
shape: cylinder
|
||||
label: "index: books"
|
||||
label: "index: books\nfilterable: status · genres\nsortable: rank · rating\n total_chapters · meta_updated"
|
||||
}
|
||||
vk: Valkey :6379 {
|
||||
shape: cylinder
|
||||
label: "presign URL cache"
|
||||
label: "presign URL cache (TTL ~55 min)\nAsynq job queue (runner)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -169,18 +175,17 @@ storage: Storage {
|
||||
|
||||
client -> caddy: HTTPS :443
|
||||
|
||||
caddy -> sk: "/* (catch-all)\n→ SvelteKit handles auth"
|
||||
caddy -> be: "/health /scrape*\n/api/browse /api/book-preview/*\n/api/chapter-text/* /api/chapter-markdown/*\n/api/reindex/* /api/cover/*\n/api/audio-proxy/* /api/catalogue /api/ranking"
|
||||
caddy -> storage.mn: "/avatars/*\n/audio/*\n/chapters/*\n(presigned MinIO GETs)"
|
||||
caddy -> sk: "/* (catch-all)\n→ SvelteKit enforces auth"
|
||||
caddy -> be: "/health /scrape*\n/api/browse /api/catalogue /api/ranking\n/api/version /api/book-preview/*\n/api/chapter-text/* /api/chapter-markdown/*\n/api/reindex/* /api/cover/*\n/api/audio* /api/voices /api/presign/*"
|
||||
caddy -> storage.mn: "/avatars/* /audio/* /chapters/*\n(presigned MinIO GETs)"
|
||||
|
||||
# ─── SvelteKit → Backend (server-side proxy) ──────────────────────────────────
|
||||
|
||||
sk.catalogue_sk -> be.catalogue_be: internal proxy
|
||||
sk.book_sk -> be.book_be: internal proxy
|
||||
sk.audio_sk -> be.audio_be: internal proxy
|
||||
sk.presign_sk -> storage.vk: check cache
|
||||
sk.presign_sk -> storage.mn: generate presign
|
||||
sk.presign_user -> storage.mn: generate presign
|
||||
sk.presign_sk -> be.presign_be: internal proxy
|
||||
sk.presign_user -> be.presign_be: internal proxy
|
||||
|
||||
# ─── SvelteKit → Storage (direct) ────────────────────────────────────────────
|
||||
|
||||
@@ -192,10 +197,12 @@ sk.comments_sk -> storage.pb
|
||||
|
||||
# ─── Backend → Storage ────────────────────────────────────────────────────────
|
||||
|
||||
be.catalogue_be -> storage.ms: full-text search
|
||||
be.catalogue_be -> storage.ms: full-text search + facets
|
||||
be.catalogue_be -> storage.pb: ranking records
|
||||
be.catalogue_be -> storage.mn: cover presign
|
||||
be.book_be -> storage.mn: chapter objects
|
||||
be.book_be -> storage.pb: book metadata
|
||||
be.audio_be -> storage.mn: audio presign
|
||||
be.audio_be -> storage.vk: presign cache
|
||||
be.presign_be -> storage.vk: check / set presign cache
|
||||
be.presign_be -> storage.mn: generate presigned URL
|
||||
|
||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 60 KiB |
@@ -5,16 +5,25 @@ direction: right
|
||||
novelfire: novelfire.net {
|
||||
shape: cloud
|
||||
style.fill: "#f0f4ff"
|
||||
label: "novelfire.net\n(scrape source)"
|
||||
}
|
||||
|
||||
kokoro: Kokoro-FastAPI TTS {
|
||||
shape: cloud
|
||||
style.fill: "#f0f4ff"
|
||||
label: "Kokoro-FastAPI TTS\n(self-hosted · homelab)\nchapter audio"
|
||||
}
|
||||
|
||||
pockettts: pocket-tts {
|
||||
shape: cloud
|
||||
style.fill: "#f0f4ff"
|
||||
label: "pocket-tts\n(self-hosted · homelab)\nvoice sample MP3s"
|
||||
}
|
||||
|
||||
letsencrypt: Let's Encrypt {
|
||||
shape: cloud
|
||||
style.fill: "#f0f4ff"
|
||||
label: "Let's Encrypt\n(ACME TLS-ALPN-01)"
|
||||
}
|
||||
|
||||
browser: Browser / iOS App {
|
||||
@@ -30,12 +39,12 @@ init: Init containers {
|
||||
|
||||
minio-init: minio-init {
|
||||
shape: rectangle
|
||||
label: "minio-init\n(mc: create buckets)"
|
||||
label: "minio-init\n(mc: create buckets\n chapters · audio\n avatars · catalogue)"
|
||||
}
|
||||
|
||||
pb-init: pb-init {
|
||||
shape: rectangle
|
||||
label: "pb-init\n(bootstrap collections)"
|
||||
label: "pb-init\n(bootstrap PocketBase\n collections + schema)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,109 +55,126 @@ storage: Storage {
|
||||
|
||||
minio: MinIO {
|
||||
shape: cylinder
|
||||
label: "MinIO :9000\n\nbuckets:\n chapters\n audio\n avatars\n catalogue"
|
||||
label: "MinIO :9000\nbuckets:\n chapters · audio\n avatars · catalogue"
|
||||
}
|
||||
|
||||
pocketbase: PocketBase {
|
||||
shape: cylinder
|
||||
label: "PocketBase :8090\n\ncollections:\n books chapters_idx\n audio_cache progress\n scrape_jobs app_users\n ranking"
|
||||
label: "PocketBase :8090\ncollections:\n books · chapters_idx\n audio_cache · progress\n scrape_jobs · app_users\n ranking · library\n comments"
|
||||
}
|
||||
|
||||
valkey: Valkey {
|
||||
shape: cylinder
|
||||
label: "Valkey :6379\n\n(presign URL cache\nTTL-based, shared)"
|
||||
label: "Valkey :6379\npresign URL cache (TTL ~55 min)\nAsynq job queue (runner tasks)"
|
||||
}
|
||||
|
||||
meilisearch: Meilisearch {
|
||||
shape: cylinder
|
||||
label: "Meilisearch :7700\n\nindices:\n books"
|
||||
label: "Meilisearch :7700\nindex: books\n(filterable: status · genres\n sortable: rank · rating\n total_chapters · meta_updated)"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Application ──────────────────────────────────────────────────────────────
|
||||
# ─── Application — prod VPS (165.22.70.138) ───────────────────────────────────
|
||||
|
||||
app: Application {
|
||||
app: Application — prod (165.22.70.138) {
|
||||
style.fill: "#eef3ff"
|
||||
|
||||
caddy: caddy {
|
||||
shape: rectangle
|
||||
label: "Caddy :443 / :80\ncustom build + caddy-ratelimit\n\nfeatures:\n auto-HTTPS (Let's Encrypt)\n security headers\n rate limiting (per-IP)\n static error pages (502/503/504)"
|
||||
label: "Caddy :443 / :80 / :6380\ncustom build\n+ caddy-l4 (Redis TCP proxy)\n+ caddy-ratelimit\nauto-HTTPS · security headers\nrate limiting (per-IP)\nstatic error pages (404/502/503/504)\nCrowdSec bouncer"
|
||||
}
|
||||
|
||||
backend: backend {
|
||||
shape: rectangle
|
||||
label: "Backend API :8080\n(Go — HTTP API server)"
|
||||
}
|
||||
|
||||
runner: runner {
|
||||
shape: rectangle
|
||||
label: "Runner :9091\n(Go — background worker\nscraping + TTS jobs\n/metrics endpoint)"
|
||||
label: "Backend API :8080\n(Go)\nHTTP API server\nffmpeg (audio sample conv.)\nOpenTelemetry tracing\nSentry / GlitchTip errors"
|
||||
}
|
||||
|
||||
ui: ui {
|
||||
shape: rectangle
|
||||
label: "SvelteKit UI :3000\n(adapter-node)"
|
||||
label: "SvelteKit UI :3000\n(adapter-node)\nSSR · session auth\nserver-side API proxy"
|
||||
}
|
||||
|
||||
crowdsec: CrowdSec {
|
||||
shape: rectangle
|
||||
label: "CrowdSec :8080\nsecurity engine\nreads Caddy JSON logs\nbouncer integrated in Caddy"
|
||||
}
|
||||
|
||||
dozzle: Dozzle agent {
|
||||
shape: rectangle
|
||||
label: "Dozzle agent\n127.0.0.1:7007\nlog relay → homelab dashboard"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Runner — homelab (192.168.0.109) ────────────────────────────────────────
|
||||
|
||||
homelab: Runner — homelab (192.168.0.109) {
|
||||
style.fill: "#fef9ec"
|
||||
|
||||
runner: runner {
|
||||
shape: rectangle
|
||||
label: "Runner :9091\n(Go background worker)\nscrape pipeline\nTTS audio job queue\nPrometheus /metrics\ncron: catalogue refresh\nAsynq worker → Valkey"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Ops ──────────────────────────────────────────────────────────────────────
|
||||
|
||||
ops: Ops {
|
||||
style.fill: "#fef9ec"
|
||||
style.fill: "#f5f5f5"
|
||||
|
||||
watchtower: Watchtower {
|
||||
shape: rectangle
|
||||
label: "Watchtower\n(containrrr/watchtower)\n\npolls every 5 min\nautopulls + redeploys:\n backend · runner · ui"
|
||||
label: "Watchtower\n(containrrr/watchtower)\npolls Docker Hub every 5 min\nautopulls + redeploys:\n backend · ui\n(runner: label-disabled on prod)"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Init → Storage deps ──────────────────────────────────────────────────────
|
||||
# ─── CI / CD ──────────────────────────────────────────────────────────────────
|
||||
|
||||
cicd: CI / CD {
|
||||
style.fill: "#f0f9ff"
|
||||
|
||||
gitea: Gitea Actions {
|
||||
shape: rectangle
|
||||
label: "Gitea Actions\n(homelab runner)\ntag v* trigger:\n test-backend\n check-ui (type-check + build)\n docker-backend\n docker-runner\n docker-ui (bakes releases.json)\n docker-caddy\n → push Docker Hub\n → Gitea Release"
|
||||
}
|
||||
}
|
||||
|
||||
# ─── Init → Storage ───────────────────────────────────────────────────────────
|
||||
|
||||
init.minio-init -> storage.minio: create buckets {style.stroke-dash: 4}
|
||||
init.pb-init -> storage.pocketbase: bootstrap schema {style.stroke-dash: 4}
|
||||
|
||||
# ─── App → Storage ────────────────────────────────────────────────────────────
|
||||
|
||||
app.backend -> storage.minio: blobs (chapters, audio,\navatars, browse)
|
||||
app.backend -> storage.pocketbase: structured records\n(books, progress, jobs…)
|
||||
app.backend -> storage.valkey: cache presigned URLs\n(SET/GET with TTL)
|
||||
|
||||
app.runner -> storage.minio: write chapter markdown\n& audio MP3s
|
||||
app.runner -> storage.pocketbase: read/update scrape jobs\nwrite book records
|
||||
app.runner -> storage.meilisearch: index books on\nscrape completion
|
||||
|
||||
app.ui -> storage.valkey: read presigned URL cache
|
||||
app.ui -> storage.pocketbase: auth, progress,\ncomments, settings
|
||||
|
||||
# ─── App internal ─────────────────────────────────────────────────────────────
|
||||
|
||||
app.ui -> app.backend: REST API calls (server-side)\n/api/catalogue /api/book-preview\n/api/chapter-text /api/audio etc.
|
||||
app.caddy -> app.ui: "/* (catch-all)\nSvelteKit — auth enforced"
|
||||
app.caddy -> app.backend: "/health /scrape*\n/api/browse /api/catalogue\n/api/ranking /api/version\n/api/book-preview/*\n/api/chapter-text/*\n/api/chapter-markdown/*\n/api/reindex/* /api/cover/*\n/api/audio-proxy/* /api/voices\n/api/audio* /api/presign/*"
|
||||
app.caddy -> storage.minio: "/avatars/* /audio/*\n/chapters/*\n(presigned GETs)"
|
||||
app.caddy -> app.crowdsec: bouncer check (15 s poll)
|
||||
app.caddy -> letsencrypt: ACME cert (TLS-ALPN-01)
|
||||
|
||||
# ─── Caddy routing ────────────────────────────────────────────────────────────
|
||||
# Routes sent directly to backend (no SvelteKit counterpart):
|
||||
# /health /scrape*
|
||||
# /api/browse /api/book-preview/* /api/chapter-text/*
|
||||
# /api/reindex/* /api/cover/* /api/audio-proxy/*
|
||||
# Routes sent to MinIO:
|
||||
# /avatars/*
|
||||
# Everything else → SvelteKit UI (including /api/scrape/*, /api/chapter-text-preview/*)
|
||||
app.ui -> app.backend: "internal REST proxy\n(server-side only)"
|
||||
app.ui -> storage.pocketbase: "auth · sessions\nprogress · library\ncomments"
|
||||
|
||||
app.caddy -> app.ui: "/* (catch-all)\n/api/scrape/*\n/api/chapter-text-preview/*\n→ SvelteKit (auth enforced)"
|
||||
app.caddy -> app.backend: "/health /scrape*\n/api/browse /api/book-preview/*\n/api/chapter-text/*\n/api/reindex/* /api/cover/*\n/api/audio-proxy/*"
|
||||
app.caddy -> storage.minio: "/avatars/*\n/audio/*\n/chapters/*\n(presigned MinIO GETs)"
|
||||
app.backend -> storage.minio: "chapter objs · audio MP3s\navatars · browse cache"
|
||||
app.backend -> storage.pocketbase: "books · scrape_jobs\naudio_cache · ranking"
|
||||
app.backend -> storage.valkey: "presign URL cache\n(SET/GET TTL ~55 min)"
|
||||
app.backend -> storage.meilisearch: "catalogue search\nfacets: genres · status"
|
||||
app.backend -> pockettts: "voice sample gen.\n(on-demand · ffmpeg conv.)"
|
||||
|
||||
# ─── External → App ───────────────────────────────────────────────────────────
|
||||
# ─── Runner → deps ────────────────────────────────────────────────────────────
|
||||
|
||||
app.runner -> novelfire: scrape\n(HTTP GET)
|
||||
app.runner -> kokoro: TTS generation\n(HTTP POST)
|
||||
app.caddy -> letsencrypt: ACME certificate\n(TLS-ALPN-01)
|
||||
homelab.runner -> novelfire: "HTTP scrape\nHTML → Markdown"
|
||||
homelab.runner -> kokoro: "TTS generation\ntext → MP3"
|
||||
homelab.runner -> storage.minio: "write chapters\n& audio MP3s"
|
||||
homelab.runner -> storage.pocketbase: "read/update scrape_jobs\nwrite book records"
|
||||
homelab.runner -> storage.meilisearch: "index books\n(on scrape completion)"
|
||||
homelab.runner -> storage.valkey: "Asynq job queue\n(task consume)"
|
||||
|
||||
# ─── Ops → Docker socket ──────────────────────────────────────────────────────
|
||||
|
||||
ops.watchtower -> app.backend: watch (label-enabled)
|
||||
ops.watchtower -> app.runner: watch (label-enabled)
|
||||
ops.watchtower -> app.ui: watch (label-enabled)
|
||||
|
||||
# ─── Browser ──────────────────────────────────────────────────────────────────
|
||||
# ─── Client ───────────────────────────────────────────────────────────────────
|
||||
|
||||
browser -> app.caddy: HTTPS :443\n(single entry point)
|
||||
|
||||
# ─── Ops / CI ─────────────────────────────────────────────────────────────────
|
||||
|
||||
ops.watchtower -> app.backend: watch (label-enabled)
|
||||
ops.watchtower -> app.ui: watch (label-enabled)
|
||||
cicd.gitea -> ops.watchtower: push to Docker Hub\n→ Watchtower detects new tag
|
||||
|
||||
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 58 KiB After Width: | Height: | Size: 65 KiB |
556
homelab/docker-compose.yml
Normal file
556
homelab/docker-compose.yml
Normal file
@@ -0,0 +1,556 @@
|
||||
# LibNovel homelab
|
||||
#
|
||||
# Runs on 192.168.0.109. Hosts:
|
||||
# - libnovel runner (background task worker)
|
||||
# - tooling: GlitchTip, Umami, Fider, Dozzle, Uptime Kuma, Gotify
|
||||
# - observability: OTel Collector, Tempo, Loki, Prometheus, Grafana
|
||||
# - cloudflared tunnel (public subdomains via Cloudflare Zero Trust)
|
||||
# - shared Postgres for tooling DBs
|
||||
#
|
||||
# All secrets come from Doppler (project=libnovel, config=prd_homelab).
|
||||
# Run with: doppler run -- docker compose up -d
|
||||
#
|
||||
# Public subdomains (via Cloudflare Tunnel — no ports exposed to internet):
|
||||
# errors.libnovel.cc → glitchtip-web:8000
|
||||
# analytics.libnovel.cc → umami:3000
|
||||
# feedback.libnovel.cc → fider:3000
|
||||
# logs.libnovel.cc → dozzle:8080
|
||||
# uptime.libnovel.cc → uptime-kuma:3001
|
||||
# push.libnovel.cc → gotify:80
|
||||
# grafana.libnovel.cc → grafana:3000
|
||||
# faro.libnovel.cc → alloy:12347
|
||||
|
||||
services:
|
||||
|
||||
# ── Cloudflare Tunnel ───────────────────────────────────────────────────────
|
||||
# Outbound-only encrypted tunnel to Cloudflare.
|
||||
# Routes all public subdomains to their respective containers on this network.
|
||||
# No inbound ports needed — cloudflared initiates all connections outward.
|
||||
cloudflared:
|
||||
image: cloudflare/cloudflared:latest
|
||||
restart: unless-stopped
|
||||
command: tunnel --no-autoupdate run --token ${CLOUDFLARE_TUNNEL_TOKEN}
|
||||
environment:
|
||||
CLOUDFLARE_TUNNEL_TOKEN: "${CLOUDFLARE_TUNNEL_TOKEN}"
|
||||
|
||||
# ── LibNovel Runner ─────────────────────────────────────────────────────────
|
||||
# Background task worker. Connects to prod PocketBase, MinIO, Meilisearch
|
||||
# via their public subdomains (pb.libnovel.cc, storage.libnovel.cc, etc.)
|
||||
runner:
|
||||
image: kalekber/libnovel-runner:latest
|
||||
restart: unless-stopped
|
||||
stop_grace_period: 135s
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: "true"
|
||||
environment:
|
||||
POCKETBASE_URL: "https://pb.libnovel.cc"
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD}"
|
||||
|
||||
MINIO_ENDPOINT: "storage.libnovel.cc"
|
||||
MINIO_ACCESS_KEY: "${MINIO_ROOT_USER}"
|
||||
MINIO_SECRET_KEY: "${MINIO_ROOT_PASSWORD}"
|
||||
MINIO_USE_SSL: "true"
|
||||
MINIO_PUBLIC_ENDPOINT: "${MINIO_PUBLIC_ENDPOINT}"
|
||||
MINIO_PUBLIC_USE_SSL: "${MINIO_PUBLIC_USE_SSL}"
|
||||
|
||||
MEILI_URL: "${MEILI_URL}"
|
||||
MEILI_API_KEY: "${MEILI_API_KEY}"
|
||||
VALKEY_ADDR: ""
|
||||
GODEBUG: "preferIPv4=1"
|
||||
|
||||
# ── LibreTranslate (internal Docker network) ──────────────────────────
|
||||
LIBRETRANSLATE_URL: "http://libretranslate:5000"
|
||||
LIBRETRANSLATE_API_KEY: "${LIBRETRANSLATE_API_KEY}"
|
||||
|
||||
# ── Asynq / Redis ─────────────────────────────────────────────────────
|
||||
REDIS_ADDR: "${REDIS_ADDR}"
|
||||
REDIS_PASSWORD: "${REDIS_PASSWORD}"
|
||||
|
||||
KOKORO_URL: "http://kokoro-fastapi:8880"
|
||||
KOKORO_VOICE: "${KOKORO_VOICE}"
|
||||
|
||||
POCKET_TTS_URL: "http://pocket-tts:8000"
|
||||
|
||||
RUNNER_WORKER_ID: "${RUNNER_WORKER_ID}"
|
||||
RUNNER_POLL_INTERVAL: "${RUNNER_POLL_INTERVAL}"
|
||||
RUNNER_MAX_CONCURRENT_SCRAPE: "${RUNNER_MAX_CONCURRENT_SCRAPE}"
|
||||
RUNNER_MAX_CONCURRENT_AUDIO: "${RUNNER_MAX_CONCURRENT_AUDIO}"
|
||||
RUNNER_MAX_CONCURRENT_TRANSLATION: "${RUNNER_MAX_CONCURRENT_TRANSLATION}"
|
||||
RUNNER_TIMEOUT: "${RUNNER_TIMEOUT}"
|
||||
RUNNER_METRICS_ADDR: "${RUNNER_METRICS_ADDR}"
|
||||
RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH: "true"
|
||||
|
||||
LOG_LEVEL: "${LOG_LEVEL}"
|
||||
GLITCHTIP_DSN: "${GLITCHTIP_DSN_RUNNER}"
|
||||
|
||||
# OTel — send runner traces/logs to Alloy (HTTP)
|
||||
# Alloy forwards traces → OTel collector → Tempo
|
||||
# logs → Loki
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: "http://alloy:4318"
|
||||
OTEL_SERVICE_NAME: "runner"
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD", "/healthcheck", "file", "/tmp/runner.alive", "120"]
|
||||
interval: 60s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
# ── Shared Postgres ─────────────────────────────────────────────────────────
|
||||
# Hosts glitchtip, umami, and fider databases.
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
POSTGRES_USER: "${POSTGRES_USER}"
|
||||
POSTGRES_PASSWORD: "${POSTGRES_PASSWORD}"
|
||||
POSTGRES_DB: postgres
|
||||
expose:
|
||||
- "5432"
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
healthcheck:
|
||||
test: ["CMD", "pg_isready", "-U", "${POSTGRES_USER}"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Postgres database initialisation ────────────────────────────────────────
|
||||
postgres-init:
|
||||
image: postgres:16-alpine
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
PGPASSWORD: "${POSTGRES_PASSWORD}"
|
||||
entrypoint: >
|
||||
/bin/sh -c "
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='fider'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE fider\";
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='glitchtip'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE glitchtip\";
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -tc \"SELECT 1 FROM pg_database WHERE datname='umami'\" | grep -q 1 ||
|
||||
psql -h postgres -U ${POSTGRES_USER} -d postgres -c \"CREATE DATABASE umami\";
|
||||
echo 'postgres-init: databases ready';
|
||||
"
|
||||
restart: "no"
|
||||
|
||||
# ── GlitchTip DB migration ──────────────────────────────────────────────────
|
||||
glitchtip-migrate:
|
||||
image: glitchtip/glitchtip:latest
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "${GLITCHTIP_EMAIL_URL}"
|
||||
DEFAULT_FROM_EMAIL: "noreply@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
command: "./manage.py migrate"
|
||||
restart: "no"
|
||||
|
||||
# ── GlitchTip web ───────────────────────────────────────────────────────────
|
||||
glitchtip-web:
|
||||
image: glitchtip/glitchtip:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
glitchtip-migrate:
|
||||
condition: service_completed_successfully
|
||||
expose:
|
||||
- "8000"
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "${GLITCHTIP_EMAIL_URL}"
|
||||
DEFAULT_FROM_EMAIL: "noreply@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
PORT: "8000"
|
||||
ENABLE_USER_REGISTRATION: "false"
|
||||
MEDIA_ROOT: "/code/uploads"
|
||||
volumes:
|
||||
- glitchtip_uploads:/code/uploads
|
||||
# Patch: GzipChunk fallback for sentry-cli 3.x raw zip uploads (GlitchTip bug)
|
||||
- ./glitchtip/files_api.py:/code/apps/files/api.py:ro
|
||||
healthcheck:
|
||||
test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:8000/api/0/')"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── GlitchTip worker ────────────────────────────────────────────────────────
|
||||
glitchtip-worker:
|
||||
image: glitchtip/glitchtip:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
glitchtip-migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/glitchtip"
|
||||
SECRET_KEY: "${GLITCHTIP_SECRET_KEY}"
|
||||
GLITCHTIP_DOMAIN: "${GLITCHTIP_DOMAIN}"
|
||||
EMAIL_URL: "${GLITCHTIP_EMAIL_URL}"
|
||||
DEFAULT_FROM_EMAIL: "noreply@libnovel.cc"
|
||||
VALKEY_URL: "redis://valkey:6379/1"
|
||||
SERVER_ROLE: "worker"
|
||||
MEDIA_ROOT: "/code/uploads"
|
||||
volumes:
|
||||
- glitchtip_uploads:/code/uploads
|
||||
# Patch: GzipChunk fallback for sentry-cli 3.x raw zip uploads (GlitchTip bug)
|
||||
- ./glitchtip/files_api.py:/code/apps/files/api.py:ro
|
||||
|
||||
# ── Umami ───────────────────────────────────────────────────────────────────
|
||||
umami:
|
||||
image: ghcr.io/umami-software/umami:postgresql-latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
expose:
|
||||
- "3000"
|
||||
environment:
|
||||
DATABASE_URL: "postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/umami"
|
||||
APP_SECRET: "${UMAMI_APP_SECRET}"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:3000/api/heartbeat"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Fider ───────────────────────────────────────────────────────────────────
|
||||
fider:
|
||||
image: getfider/fider:stable
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
postgres-init:
|
||||
condition: service_completed_successfully
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
expose:
|
||||
- "3000"
|
||||
environment:
|
||||
BASE_URL: "${FIDER_BASE_URL}"
|
||||
DATABASE_URL: "postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/fider?sslmode=disable"
|
||||
JWT_SECRET: "${FIDER_JWT_SECRET}"
|
||||
EMAIL_NOREPLY: "noreply@libnovel.cc"
|
||||
EMAIL_SMTP_HOST: "${FIDER_SMTP_HOST}"
|
||||
EMAIL_SMTP_PORT: "${FIDER_SMTP_PORT}"
|
||||
EMAIL_SMTP_USERNAME: "${FIDER_SMTP_USER}"
|
||||
EMAIL_SMTP_PASSWORD: "${FIDER_SMTP_PASSWORD}"
|
||||
EMAIL_SMTP_ENABLE_STARTTLS: "${FIDER_SMTP_ENABLE_STARTTLS}"
|
||||
OAUTH_GOOGLE_CLIENTID: "${OAUTH_GOOGLE_CLIENTID}"
|
||||
OAUTH_GOOGLE_SECRET: "${OAUTH_GOOGLE_SECRET}"
|
||||
OAUTH_GITHUB_CLIENTID: "${OAUTH_GITHUB_CLIENTID}"
|
||||
OAUTH_GITHUB_SECRET: "${OAUTH_GITHUB_SECRET}"
|
||||
|
||||
# ── Dozzle ──────────────────────────────────────────────────────────────────
|
||||
# Watches both homelab and prod containers.
|
||||
# Prod agent runs on 165.22.70.138:7007 (added separately to prod compose).
|
||||
dozzle:
|
||||
image: amir20/dozzle:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
- ./dozzle/users.yml:/data/users.yml:ro
|
||||
expose:
|
||||
- "8080"
|
||||
environment:
|
||||
DOZZLE_AUTH_PROVIDER: simple
|
||||
DOZZLE_HOSTNAME: "logs.libnovel.cc"
|
||||
DOZZLE_REMOTE_AGENT: "prod@165.22.70.138:7007"
|
||||
healthcheck:
|
||||
test: ["CMD", "/dozzle", "healthcheck"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Uptime Kuma ─────────────────────────────────────────────────────────────
|
||||
uptime-kuma:
|
||||
image: louislam/uptime-kuma:1
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- uptime_kuma_data:/app/data
|
||||
expose:
|
||||
- "3001"
|
||||
healthcheck:
|
||||
test: ["CMD", "extra/healthcheck"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Gotify ──────────────────────────────────────────────────────────────────
|
||||
gotify:
|
||||
image: gotify/server:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- gotify_data:/app/data
|
||||
expose:
|
||||
- "80"
|
||||
environment:
|
||||
GOTIFY_DEFAULTUSER_NAME: "${GOTIFY_ADMIN_USER}"
|
||||
GOTIFY_DEFAULTUSER_PASS: "${GOTIFY_ADMIN_PASS}"
|
||||
GOTIFY_SERVER_PORT: "80"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:80/health"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Redis (Asynq task queue) ────────────────────────────────────────────────
|
||||
# Dedicated Redis instance for Asynq job dispatch.
|
||||
# The prod backend enqueues jobs via redis.libnovel.cc:6380 (Caddy TLS proxy →
|
||||
# host:6379). The runner reads from this instance directly on the Docker network.
|
||||
# Port is bound to 0.0.0.0:6379 so the Caddy layer4 proxy on prod can reach it.
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
restart: unless-stopped
|
||||
command: ["redis-server", "--appendonly", "yes", "--requirepass", "${REDIS_PASSWORD}"]
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "-a", "${REDIS_PASSWORD}", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── LibreTranslate ──────────────────────────────────────────────────────────
|
||||
# Self-hosted machine translation. Runner connects via http://libretranslate:5000.
|
||||
# Only English → configured target languages are loaded to save RAM.
|
||||
libretranslate:
|
||||
image: libretranslate/libretranslate:latest
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
LT_API_KEYS: "true"
|
||||
LT_API_KEYS_DB_PATH: "/app/db/api_keys.db"
|
||||
LT_LOAD_ONLY: "en,ru,id,pt,fr"
|
||||
LT_DISABLE_WEB_UI: "true"
|
||||
LT_UPDATE_MODELS: "false"
|
||||
expose:
|
||||
- "5000"
|
||||
volumes:
|
||||
- libretranslate_data:/app/db
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:5000/languages"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 60s
|
||||
|
||||
# ── Valkey ──────────────────────────────────────────────────────────────────
|
||||
# Used by GlitchTip for task queuing.
|
||||
valkey:
|
||||
image: valkey/valkey:7-alpine
|
||||
restart: unless-stopped
|
||||
expose:
|
||||
- "6379"
|
||||
volumes:
|
||||
- valkey_data:/data
|
||||
healthcheck:
|
||||
test: ["CMD", "valkey-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Grafana Alloy (Faro RUM receiver) ───────────────────────────────────────
|
||||
# Receives browser telemetry from @grafana/faro-web-sdk (Web Vitals, traces,
|
||||
# errors). Exposes POST /collect at faro.libnovel.cc via cloudflared.
|
||||
# Forwards traces to otel-collector (→ Tempo) and logs to Loki directly.
|
||||
alloy:
|
||||
image: grafana/alloy:latest
|
||||
restart: unless-stopped
|
||||
command: ["run", "--server.http.listen-addr=0.0.0.0:12348", "/etc/alloy/alloy.river"]
|
||||
volumes:
|
||||
- ./otel/alloy.river:/etc/alloy/alloy.river:ro
|
||||
expose:
|
||||
- "12347" # Faro HTTP receiver (POST /collect)
|
||||
- "12348" # Alloy UI / health endpoint
|
||||
- "4318" # OTLP receiver (HTTP) for backend/runner logs
|
||||
depends_on:
|
||||
- otel-collector
|
||||
- loki
|
||||
|
||||
# ── OTel Collector ──────────────────────────────────────────────────────────
|
||||
# Receives OTLP from backend/ui/runner, fans out to Tempo + Prometheus + Loki.
|
||||
otel-collector:
|
||||
image: otel/opentelemetry-collector-contrib:latest
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./otel/collector.yaml:/etc/otelcol-contrib/config.yaml:ro
|
||||
expose:
|
||||
- "14317" # OTLP gRPC (Alloy forwards traces here)
|
||||
- "14318" # OTLP HTTP (Alloy forwards traces here)
|
||||
- "8888" # Collector self-metrics (scraped by Prometheus)
|
||||
depends_on:
|
||||
- tempo
|
||||
- prometheus
|
||||
- loki
|
||||
# No healthcheck — distroless image has no shell or curl
|
||||
|
||||
# ── Tempo ───────────────────────────────────────────────────────────────────
|
||||
# Distributed trace storage. Receives OTLP from the collector.
|
||||
tempo:
|
||||
image: grafana/tempo:2.6.1
|
||||
restart: unless-stopped
|
||||
command: ["-config.file=/etc/tempo.yaml"]
|
||||
volumes:
|
||||
- ./otel/tempo.yaml:/etc/tempo.yaml:ro
|
||||
- tempo_data:/var/tempo
|
||||
expose:
|
||||
- "3200" # Tempo query API (queried by Grafana)
|
||||
- "4317" # OTLP gRPC ingest (collector → tempo)
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:3200/ready"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Prometheus ──────────────────────────────────────────────────────────────
|
||||
# Scrapes metrics from backend (via prod), runner, and otel-collector.
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
restart: unless-stopped
|
||||
command:
|
||||
- "--config.file=/etc/prometheus/prometheus.yaml"
|
||||
- "--storage.tsdb.path=/prometheus"
|
||||
- "--storage.tsdb.retention.time=30d"
|
||||
- "--web.enable-remote-write-receiver"
|
||||
volumes:
|
||||
- ./otel/prometheus.yaml:/etc/prometheus/prometheus.yaml:ro
|
||||
- prometheus_data:/prometheus
|
||||
expose:
|
||||
- "9090"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:9090/-/healthy"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Loki ────────────────────────────────────────────────────────────────────
|
||||
# Log aggregation. Receives logs from OTel collector. Replaces manual Dozzle
|
||||
# tailing for structured log search.
|
||||
loki:
|
||||
image: grafana/loki:latest
|
||||
restart: unless-stopped
|
||||
command: ["-config.file=/etc/loki/loki.yaml"]
|
||||
volumes:
|
||||
- ./otel/loki.yaml:/etc/loki/loki.yaml:ro
|
||||
- loki_data:/loki
|
||||
expose:
|
||||
- "3100"
|
||||
# No healthcheck — distroless image has no shell or curl
|
||||
|
||||
# ── Grafana ─────────────────────────────────────────────────────────────────
|
||||
# Single UI for traces (Tempo), metrics (Prometheus), and logs (Loki).
|
||||
# Accessible at grafana.libnovel.cc via Cloudflare Tunnel.
|
||||
grafana:
|
||||
image: grafana/grafana:latest
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
- tempo
|
||||
- prometheus
|
||||
- loki
|
||||
expose:
|
||||
- "3000"
|
||||
volumes:
|
||||
- grafana_data:/var/lib/grafana
|
||||
- ./otel/grafana/provisioning:/etc/grafana/provisioning:ro
|
||||
environment:
|
||||
GF_SERVER_ROOT_URL: "https://grafana.libnovel.cc"
|
||||
GF_SECURITY_ADMIN_USER: "${GRAFANA_ADMIN_USER}"
|
||||
GF_SECURITY_ADMIN_PASSWORD: "${GRAFANA_ADMIN_PASSWORD}"
|
||||
GF_AUTH_ANONYMOUS_ENABLED: "false"
|
||||
GF_FEATURE_TOGGLES_ENABLE: "traceqlEditor"
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "-qO-", "http://localhost:3000/api/health"]
|
||||
interval: 15s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ── Kokoro-FastAPI (GPU TTS) ────────────────────────────────────────────────
|
||||
# OpenAI-compatible TTS service backed by the Kokoro model, running on the
|
||||
# homelab RTX 3050 (8 GB VRAM). Replaces the broken kokoro.kalekber.cc DNS.
|
||||
# Voices match existing IDs: af_bella, af_sky, af_heart, etc.
|
||||
# The runner reaches it at http://kokoro-fastapi:8880 via the Docker network.
|
||||
kokoro-fastapi:
|
||||
image: kokoro-fastapi:latest
|
||||
restart: unless-stopped
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: 1
|
||||
capabilities: [gpu]
|
||||
expose:
|
||||
- "8880"
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:8880/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 60s
|
||||
|
||||
# ── pocket-tts (CPU TTS) ────────────────────────────────────────────────────
|
||||
# Lightweight CPU-only TTS using kyutai-labs/pocket-tts.
|
||||
# Image is built locally on homelab from https://github.com/kyutai-labs/pocket-tts
|
||||
# (no prebuilt image published): cd /tmp && git clone --depth=1 https://github.com/kyutai-labs/pocket-tts.git && docker build -t pocket-tts:latest /tmp/pocket-tts
|
||||
# OpenAI-compatible: POST /tts (multipart form) on port 8000.
|
||||
# Voices: alba, marius, javert, jean, fantine, cosette, eponine, azelma, etc.
|
||||
# Not currently used by the runner (runner uses kokoro-fastapi), but available
|
||||
# for experimentation / fallback.
|
||||
pocket-tts:
|
||||
image: pocket-tts:latest
|
||||
restart: unless-stopped
|
||||
command: ["uv", "run", "pocket-tts", "serve", "--host", "0.0.0.0"]
|
||||
expose:
|
||||
- "8000"
|
||||
volumes:
|
||||
- pocket_tts_cache:/root/.cache/pocket_tts
|
||||
- hf_cache:/root/.cache/huggingface
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 120s
|
||||
|
||||
# ── Watchtower ──────────────────────────────────────────────────────────────
|
||||
# Auto-updates runner image when CI pushes a new tag.
|
||||
# Only watches services with the watchtower label.
|
||||
# doppler binary is mounted from the host so watchtower fetches fresh secrets
|
||||
# on every start (notification URL, credentials) without baking them in.
|
||||
watchtower:
|
||||
image: containrrr/watchtower:latest
|
||||
restart: unless-stopped
|
||||
entrypoint: ["/usr/bin/doppler", "run", "--project", "libnovel", "--config", "prd_homelab", "--"]
|
||||
command: ["/watchtower", "--label-enable", "--interval", "300", "--cleanup"]
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- /usr/bin/doppler:/usr/bin/doppler:ro
|
||||
- /root/.doppler:/root/.doppler:ro
|
||||
environment:
|
||||
HOME: "/root"
|
||||
DOCKER_API_VERSION: "1.44"
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
libretranslate_data:
|
||||
valkey_data:
|
||||
uptime_kuma_data:
|
||||
gotify_data:
|
||||
tempo_data:
|
||||
prometheus_data:
|
||||
loki_data:
|
||||
grafana_data:
|
||||
pocket_tts_cache:
|
||||
hf_cache:
|
||||
glitchtip_uploads:
|
||||
5
homelab/dozzle/users.yml
Normal file
5
homelab/dozzle/users.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
users:
|
||||
admin:
|
||||
name: admin
|
||||
email: admin@libnovel.cc
|
||||
password: "$2y$10$4jqLza2grpxnQn0EGux2C.UmlSxRmOvH/J1ySzOBxMZgW6cA2TnmK"
|
||||
127
homelab/glitchtip/files_api.py
Normal file
127
homelab/glitchtip/files_api.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""Port of sentry.api.endpoints.chunk.ChunkUploadEndpoint"""
|
||||
|
||||
import logging
|
||||
from gzip import GzipFile
|
||||
from io import BytesIO
|
||||
|
||||
from django.conf import settings
|
||||
from django.shortcuts import aget_object_or_404
|
||||
from django.urls import reverse
|
||||
from ninja import File, Router
|
||||
from ninja.errors import HttpError
|
||||
from ninja.files import UploadedFile
|
||||
|
||||
from apps.organizations_ext.models import Organization
|
||||
from glitchtip.api.authentication import AuthHttpRequest
|
||||
from glitchtip.api.decorators import optional_slash
|
||||
from glitchtip.api.permissions import has_permission
|
||||
|
||||
from .models import FileBlob
|
||||
|
||||
# Force just one blob
|
||||
CHUNK_UPLOAD_BLOB_SIZE = 32 * 1024 * 1024 # 32MB
|
||||
MAX_CHUNKS_PER_REQUEST = 1
|
||||
MAX_REQUEST_SIZE = CHUNK_UPLOAD_BLOB_SIZE
|
||||
MAX_CONCURRENCY = 1
|
||||
HASH_ALGORITHM = "sha1"
|
||||
|
||||
CHUNK_UPLOAD_ACCEPT = (
|
||||
"debug_files", # DIF assemble
|
||||
"release_files", # Release files assemble
|
||||
"pdbs", # PDB upload and debug id override
|
||||
"sources", # Source artifact bundle upload
|
||||
"artifact_bundles", # Artifact bundles contain debug ids to link source to sourcemaps
|
||||
"proguard",
|
||||
)
|
||||
|
||||
|
||||
class GzipChunk(BytesIO):
|
||||
def __init__(self, file):
|
||||
raw = file.read()
|
||||
try:
|
||||
data = GzipFile(fileobj=BytesIO(raw), mode="rb").read()
|
||||
except Exception:
|
||||
# sentry-cli 3.x sends raw (uncompressed) zip data despite gzip being
|
||||
# advertised by the server — fall back to using the raw bytes as-is.
|
||||
data = raw
|
||||
self.size = len(data)
|
||||
self.name = file.name
|
||||
super().__init__(data)
|
||||
|
||||
|
||||
router = Router()
|
||||
|
||||
|
||||
@optional_slash(router, "get", "organizations/{slug:organization_slug}/chunk-upload/")
|
||||
async def get_chunk_upload_info(request: AuthHttpRequest, organization_slug: str):
|
||||
"""Get server settings for chunk file upload"""
|
||||
path = reverse("api:get_chunk_upload_info", args=[organization_slug])
|
||||
url = (
|
||||
path
|
||||
if settings.GLITCHTIP_CHUNK_UPLOAD_USE_RELATIVE_URL
|
||||
else settings.GLITCHTIP_URL.geturl() + path
|
||||
)
|
||||
return {
|
||||
"url": url,
|
||||
"chunkSize": CHUNK_UPLOAD_BLOB_SIZE,
|
||||
"chunksPerRequest": MAX_CHUNKS_PER_REQUEST,
|
||||
"maxFileSize": 2147483648,
|
||||
"maxRequestSize": MAX_REQUEST_SIZE,
|
||||
"concurrency": MAX_CONCURRENCY,
|
||||
"hashAlgorithm": HASH_ALGORITHM,
|
||||
"compression": ["gzip"],
|
||||
"accept": CHUNK_UPLOAD_ACCEPT,
|
||||
}
|
||||
|
||||
|
||||
@optional_slash(router, "post", "organizations/{slug:organization_slug}/chunk-upload/")
|
||||
@has_permission(["project:write", "project:admin", "project:releases"])
|
||||
async def chunk_upload(
|
||||
request: AuthHttpRequest,
|
||||
organization_slug: str,
|
||||
file_gzip: list[UploadedFile] = File(...),
|
||||
):
|
||||
"""Upload one more more gzipped files to save"""
|
||||
logger = logging.getLogger("glitchtip.files")
|
||||
logger.info("chunkupload.start")
|
||||
|
||||
organization = await aget_object_or_404(
|
||||
Organization, slug=organization_slug.lower(), users=request.auth.user_id
|
||||
)
|
||||
|
||||
files = [GzipChunk(chunk) for chunk in file_gzip]
|
||||
|
||||
if len(files) == 0:
|
||||
# No files uploaded is ok
|
||||
logger.info("chunkupload.end", extra={"status": 200})
|
||||
return
|
||||
|
||||
logger.info("chunkupload.post.files", extra={"len": len(files)})
|
||||
|
||||
# Validate file size
|
||||
checksums = []
|
||||
size = 0
|
||||
for chunk in files:
|
||||
size += chunk.size
|
||||
if chunk.size > CHUNK_UPLOAD_BLOB_SIZE:
|
||||
logger.info("chunkupload.end", extra={"status": 400})
|
||||
raise HttpError(400, "Chunk size too large")
|
||||
checksums.append(chunk.name)
|
||||
|
||||
if size > MAX_REQUEST_SIZE:
|
||||
logger.info("chunkupload.end", extra={"status": 400})
|
||||
raise HttpError(400, "Request too large")
|
||||
|
||||
if len(files) > MAX_CHUNKS_PER_REQUEST:
|
||||
logger.info("chunkupload.end", extra={"status": 400})
|
||||
raise HttpError(400, "Too many chunks")
|
||||
|
||||
try:
|
||||
await FileBlob.from_files(
|
||||
zip(files, checksums), organization=organization, logger=logger
|
||||
)
|
||||
except IOError as err:
|
||||
logger.info("chunkupload.end", extra={"status": 400})
|
||||
raise HttpError(400, str(err)) from err
|
||||
|
||||
logger.info("chunkupload.end", extra={"status": 200})
|
||||
81
homelab/otel/alloy.river
Normal file
81
homelab/otel/alloy.river
Normal file
@@ -0,0 +1,81 @@
|
||||
// Grafana Alloy — Faro RUM receiver + OTel log bridge
|
||||
//
|
||||
// Receives browser telemetry (Web Vitals, traces, logs, exceptions) from the
|
||||
// LibNovel SvelteKit frontend via the @grafana/faro-web-sdk.
|
||||
//
|
||||
// Also receives OTLP logs from the backend and runner services, and forwards
|
||||
// them to Loki in the native push format (solving the OTLP→Loki gap).
|
||||
//
|
||||
// Pipeline:
|
||||
// faro.receiver → receives HTTP POST /collect from browsers
|
||||
// otelcol.receiver.otlp → receives OTLP logs from backend/runner (HTTP :4318)
|
||||
// otelcol.exporter.otlphttp → forwards traces to OTel Collector → Tempo
|
||||
// loki.write → forwards Faro logs/exceptions to Loki
|
||||
// otelcol.exporter.loki → forwards OTel logs to Loki (native format)
|
||||
//
|
||||
// The Faro endpoint is exposed publicly at faro.libnovel.cc via cloudflared.
|
||||
|
||||
faro.receiver "faro" {
|
||||
server {
|
||||
listen_address = "0.0.0.0"
|
||||
listen_port = 12347
|
||||
|
||||
cors_allowed_origins = ["https://libnovel.cc", "https://www.libnovel.cc"]
|
||||
}
|
||||
|
||||
output {
|
||||
logs = [loki.write.faro.receiver]
|
||||
traces = [otelcol.exporter.otlphttp.faro.input]
|
||||
}
|
||||
}
|
||||
|
||||
// Receive OTLP traces and logs from backend/runner
|
||||
otelcol.receiver.otlp "otel_logs" {
|
||||
http {
|
||||
endpoint = "0.0.0.0:4318"
|
||||
}
|
||||
|
||||
output {
|
||||
logs = [otelcol.exporter.loki.otel_logs.input]
|
||||
traces = [otelcol.exporter.otlphttp.otel_logs.input]
|
||||
}
|
||||
}
|
||||
|
||||
// Convert OTel logs to Loki format and forward to loki.write
|
||||
otelcol.exporter.loki "otel_logs" {
|
||||
forward_to = [loki.write.otel_logs.receiver]
|
||||
}
|
||||
|
||||
// Send backend/runner traces to the OTel Collector → Tempo
|
||||
otelcol.exporter.otlphttp "otel_logs" {
|
||||
client {
|
||||
endpoint = "http://otel-collector:4318"
|
||||
tls {
|
||||
insecure = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Push backend/runner logs to Loki (native push format)
|
||||
loki.write "otel_logs" {
|
||||
endpoint {
|
||||
url = "http://loki:3100/loki/api/v1/push"
|
||||
}
|
||||
}
|
||||
|
||||
// Forward Faro traces to the OTel Collector (which routes to Tempo)
|
||||
otelcol.exporter.otlphttp "faro" {
|
||||
client {
|
||||
endpoint = "http://otel-collector:4318"
|
||||
tls {
|
||||
insecure = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Forward Faro logs/exceptions directly to Loki
|
||||
loki.write "faro" {
|
||||
endpoint {
|
||||
url = "http://loki:3100/loki/api/v1/push"
|
||||
}
|
||||
}
|
||||
79
homelab/otel/collector.yaml
Normal file
79
homelab/otel/collector.yaml
Normal file
@@ -0,0 +1,79 @@
|
||||
# OTel Collector config
|
||||
#
|
||||
# Receivers: OTLP (gRPC + HTTP) from backend, ui, runner
|
||||
# Processors: batch for efficiency, resource detection for host metadata
|
||||
# Exporters: Tempo (traces), Prometheus (metrics), Loki (logs)
|
||||
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
|
||||
processors:
|
||||
batch:
|
||||
timeout: 5s
|
||||
send_batch_size: 512
|
||||
|
||||
# Attach host metadata to traces/metrics
|
||||
resourcedetection:
|
||||
detectors: [env, system]
|
||||
timeout: 5s
|
||||
|
||||
exporters:
|
||||
# Traces → Tempo
|
||||
otlp/tempo:
|
||||
endpoint: tempo:4317
|
||||
tls:
|
||||
insecure: true
|
||||
|
||||
# Metrics → Prometheus (remote write)
|
||||
prometheusremotewrite:
|
||||
endpoint: "http://prometheus:9090/api/v1/write"
|
||||
tls:
|
||||
insecure_skip_verify: true
|
||||
|
||||
# Logs → Loki (via OTLP HTTP endpoint)
|
||||
otlphttp/loki:
|
||||
endpoint: "http://loki:3100/otlp"
|
||||
tls:
|
||||
insecure: true
|
||||
|
||||
# Collector self-observability (optional debug)
|
||||
debug:
|
||||
verbosity: basic
|
||||
|
||||
extensions:
|
||||
health_check:
|
||||
endpoint: 0.0.0.0:13133
|
||||
pprof:
|
||||
endpoint: 0.0.0.0:1777
|
||||
|
||||
service:
|
||||
extensions: [health_check, pprof]
|
||||
telemetry:
|
||||
metrics:
|
||||
# otel-collector v0.103+ replaced `address` with `readers`
|
||||
readers:
|
||||
- pull:
|
||||
exporter:
|
||||
prometheus:
|
||||
host: 0.0.0.0
|
||||
port: 8888
|
||||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [otlp/tempo]
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
processors: [resourcedetection, batch]
|
||||
exporters: [prometheusremotewrite]
|
||||
logs:
|
||||
receivers: [otlp]
|
||||
# No resourcedetection — preserve service.name from OTel resource attributes
|
||||
# (backend=backend, runner=runner, Alloy/Faro=no service.name → unknown_service)
|
||||
processors: [batch]
|
||||
exporters: [otlphttp/loki]
|
||||
@@ -0,0 +1,16 @@
|
||||
# Grafana alerting provisioning — contact points
|
||||
# Sends all alerts to Gotify (self-hosted push notifications).
|
||||
apiVersion: 1
|
||||
|
||||
contactPoints:
|
||||
- orgId: 1
|
||||
name: Gotify
|
||||
receivers:
|
||||
- uid: gotify-webhook
|
||||
type: webhook
|
||||
settings:
|
||||
url: "http://gotify/message?token=ABZrZgCY-4ivcmt"
|
||||
httpMethod: POST
|
||||
title: "{{ .CommonLabels.alertname }}"
|
||||
message: "{{ range .Alerts }}{{ .Annotations.summary }}\n{{ .Annotations.description }}{{ end }}"
|
||||
disableResolveMessage: false
|
||||
@@ -0,0 +1,15 @@
|
||||
# Grafana alerting provisioning — notification policies
|
||||
# Routes all alerts to Gotify by default.
|
||||
apiVersion: 1
|
||||
|
||||
policies:
|
||||
- orgId: 1
|
||||
receiver: Gotify
|
||||
group_by: ["alertname", "service"]
|
||||
group_wait: 30s
|
||||
group_interval: 5m
|
||||
repeat_interval: 4h
|
||||
routes:
|
||||
- receiver: Gotify
|
||||
matchers:
|
||||
- severity =~ "critical|warning"
|
||||
214
homelab/otel/grafana/provisioning/alerting/rules.yaml
Normal file
214
homelab/otel/grafana/provisioning/alerting/rules.yaml
Normal file
@@ -0,0 +1,214 @@
|
||||
# Grafana alerting provisioning — alert rules
|
||||
# Covers: runner down, high task failure rate, audio error spike, backend error spike.
|
||||
apiVersion: 1
|
||||
|
||||
groups:
|
||||
- orgId: 1
|
||||
name: LibNovel Runner
|
||||
folder: LibNovel
|
||||
interval: 1m
|
||||
rules:
|
||||
|
||||
- uid: runner-down
|
||||
title: Runner Down
|
||||
condition: C
|
||||
for: 2m
|
||||
annotations:
|
||||
summary: "LibNovel runner is not reachable"
|
||||
description: "The Prometheus scrape of runner:9091 has been failing for >2 minutes. Tasks are not being processed."
|
||||
labels:
|
||||
severity: critical
|
||||
service: runner
|
||||
data:
|
||||
- refId: A
|
||||
datasourceUid: prometheus
|
||||
relativeTimeRange: { from: 300, to: 0 }
|
||||
model:
|
||||
expr: "up{job=\"libnovel-runner\"}"
|
||||
instant: true
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: C
|
||||
datasourceUid: __expr__
|
||||
relativeTimeRange: { from: 300, to: 0 }
|
||||
model:
|
||||
type: classic_conditions
|
||||
conditions:
|
||||
- evaluator: { params: [1], type: lt }
|
||||
operator: { type: and }
|
||||
query: { params: [A] }
|
||||
reducer: { params: [], type: last }
|
||||
|
||||
- uid: runner-high-failure-rate
|
||||
title: Runner High Task Failure Rate
|
||||
condition: C
|
||||
for: 5m
|
||||
annotations:
|
||||
summary: "Runner task failure rate is above 20%"
|
||||
description: "More than 20% of runner tasks have been failing for the last 5 minutes. Check runner logs."
|
||||
labels:
|
||||
severity: warning
|
||||
service: runner
|
||||
data:
|
||||
- refId: A
|
||||
datasourceUid: prometheus
|
||||
relativeTimeRange: { from: 600, to: 0 }
|
||||
model:
|
||||
expr: "rate(libnovel_runner_tasks_failed_total[5m]) / clamp_min(rate(libnovel_runner_tasks_completed_total[5m]) + rate(libnovel_runner_tasks_failed_total[5m]), 0.001)"
|
||||
instant: true
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: C
|
||||
datasourceUid: __expr__
|
||||
relativeTimeRange: { from: 600, to: 0 }
|
||||
model:
|
||||
type: classic_conditions
|
||||
conditions:
|
||||
- evaluator: { params: [0.2], type: gt }
|
||||
operator: { type: and }
|
||||
query: { params: [A] }
|
||||
reducer: { params: [], type: last }
|
||||
|
||||
- uid: runner-tasks-stalled
|
||||
title: Runner Tasks Stalled
|
||||
condition: C
|
||||
for: 10m
|
||||
annotations:
|
||||
summary: "Runner has tasks running for >10 minutes with no completions"
|
||||
description: "tasks_running > 0 but rate(tasks_completed) is 0. Tasks may be stuck or the runner is in a crash loop."
|
||||
labels:
|
||||
severity: warning
|
||||
service: runner
|
||||
data:
|
||||
- refId: Running
|
||||
datasourceUid: prometheus
|
||||
relativeTimeRange: { from: 900, to: 0 }
|
||||
model:
|
||||
expr: "libnovel_runner_tasks_running"
|
||||
instant: true
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: Rate
|
||||
datasourceUid: prometheus
|
||||
relativeTimeRange: { from: 900, to: 0 }
|
||||
model:
|
||||
expr: "rate(libnovel_runner_tasks_completed_total[10m])"
|
||||
instant: true
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: C
|
||||
datasourceUid: __expr__
|
||||
relativeTimeRange: { from: 900, to: 0 }
|
||||
model:
|
||||
type: classic_conditions
|
||||
conditions:
|
||||
- evaluator: { params: [0], type: gt }
|
||||
operator: { type: and }
|
||||
query: { params: [Running] }
|
||||
reducer: { params: [], type: last }
|
||||
- evaluator: { params: [0.001], type: lt }
|
||||
operator: { type: and }
|
||||
query: { params: [Rate] }
|
||||
reducer: { params: [], type: last }
|
||||
|
||||
- orgId: 1
|
||||
name: LibNovel Backend
|
||||
folder: LibNovel
|
||||
interval: 1m
|
||||
rules:
|
||||
|
||||
- uid: backend-high-error-rate
|
||||
title: Backend High Error Rate
|
||||
condition: C
|
||||
for: 5m
|
||||
annotations:
|
||||
summary: "Backend API error rate above 5%"
|
||||
description: "More than 5% of backend HTTP requests are returning 5xx status codes (as seen from UI OTel instrumentation)."
|
||||
labels:
|
||||
severity: warning
|
||||
service: backend
|
||||
data:
|
||||
- refId: A
|
||||
datasourceUid: prometheus
|
||||
relativeTimeRange: { from: 600, to: 0 }
|
||||
model:
|
||||
expr: "sum(rate(http_client_request_duration_seconds_count{job=\"ui\", server_address=\"backend\", http_response_status_code=~\"5..\"}[5m])) / clamp_min(sum(rate(http_client_request_duration_seconds_count{job=\"ui\", server_address=\"backend\"}[5m])), 0.001)"
|
||||
instant: true
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: C
|
||||
datasourceUid: __expr__
|
||||
relativeTimeRange: { from: 600, to: 0 }
|
||||
model:
|
||||
type: classic_conditions
|
||||
conditions:
|
||||
- evaluator: { params: [0.05], type: gt }
|
||||
operator: { type: and }
|
||||
query: { params: [A] }
|
||||
reducer: { params: [], type: last }
|
||||
|
||||
- uid: backend-high-p95-latency
|
||||
title: Backend High p95 Latency
|
||||
condition: C
|
||||
for: 5m
|
||||
annotations:
|
||||
summary: "Backend p95 latency above 2s"
|
||||
description: "95th percentile latency of backend spans has exceeded 2 seconds for >5 minutes."
|
||||
labels:
|
||||
severity: warning
|
||||
service: backend
|
||||
data:
|
||||
- refId: A
|
||||
datasourceUid: prometheus
|
||||
relativeTimeRange: { from: 600, to: 0 }
|
||||
model:
|
||||
expr: "histogram_quantile(0.95, sum(rate(traces_spanmetrics_latency_bucket{service=\"backend\"}[5m])) by (le))"
|
||||
instant: true
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: C
|
||||
datasourceUid: __expr__
|
||||
relativeTimeRange: { from: 600, to: 0 }
|
||||
model:
|
||||
type: classic_conditions
|
||||
conditions:
|
||||
- evaluator: { params: [2], type: gt }
|
||||
operator: { type: and }
|
||||
query: { params: [A] }
|
||||
reducer: { params: [], type: last }
|
||||
|
||||
- orgId: 1
|
||||
name: LibNovel OTel Pipeline
|
||||
folder: LibNovel
|
||||
interval: 2m
|
||||
rules:
|
||||
|
||||
- uid: otel-collector-down
|
||||
title: OTel Collector Down
|
||||
condition: C
|
||||
for: 3m
|
||||
annotations:
|
||||
summary: "OTel collector is not reachable"
|
||||
description: "Prometheus cannot scrape otel-collector:8888. Traces and logs may be dropping."
|
||||
labels:
|
||||
severity: warning
|
||||
service: otel-collector
|
||||
data:
|
||||
- refId: A
|
||||
datasourceUid: prometheus
|
||||
relativeTimeRange: { from: 600, to: 0 }
|
||||
model:
|
||||
expr: "up{job=\"otel-collector\"}"
|
||||
instant: true
|
||||
intervalMs: 1000
|
||||
maxDataPoints: 43200
|
||||
- refId: C
|
||||
datasourceUid: __expr__
|
||||
relativeTimeRange: { from: 600, to: 0 }
|
||||
model:
|
||||
type: classic_conditions
|
||||
conditions:
|
||||
- evaluator: { params: [1], type: lt }
|
||||
operator: { type: and }
|
||||
query: { params: [A] }
|
||||
reducer: { params: [], type: last }
|
||||
307
homelab/otel/grafana/provisioning/dashboards/backend.json
Normal file
307
homelab/otel/grafana/provisioning/dashboards/backend.json
Normal file
@@ -0,0 +1,307 @@
|
||||
{
|
||||
"uid": "libnovel-backend",
|
||||
"title": "Backend API",
|
||||
"description": "Request rate, error rate, and latency for the LibNovel backend. Powered by Tempo span metrics.",
|
||||
"tags": ["libnovel", "backend", "api"],
|
||||
"timezone": "browser",
|
||||
"refresh": "30s",
|
||||
"time": { "from": "now-3h", "to": "now" },
|
||||
"schemaVersion": 39,
|
||||
"panels": [
|
||||
{
|
||||
"id": 1,
|
||||
"type": "stat",
|
||||
"title": "Request Rate (RPS)",
|
||||
"gridPos": { "x": 0, "y": 0, "w": 4, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"textMode": "auto"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "reqps",
|
||||
"color": { "mode": "thresholds" },
|
||||
"thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "sum(rate(traces_spanmetrics_calls_total{service=\"backend\"}[5m]))",
|
||||
"legendFormat": "rps",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"type": "stat",
|
||||
"title": "Error Rate",
|
||||
"gridPos": { "x": 4, "y": 0, "w": 4, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "background",
|
||||
"graphMode": "none"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "percentunit",
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 0.01 },
|
||||
{ "color": "red", "value": 0.05 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "sum(rate(traces_spanmetrics_calls_total{service=\"backend\", status_code=\"STATUS_CODE_ERROR\"}[5m])) / clamp_min(sum(rate(traces_spanmetrics_calls_total{service=\"backend\"}[5m])), 0.001)",
|
||||
"legendFormat": "error rate",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"type": "stat",
|
||||
"title": "p50 Latency",
|
||||
"gridPos": { "x": 8, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "area" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "s",
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 0.2 },
|
||||
{ "color": "red", "value": 1 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.50, sum(rate(traces_spanmetrics_latency_bucket{service=\"backend\"}[5m])) by (le))",
|
||||
"legendFormat": "p50",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"type": "stat",
|
||||
"title": "p95 Latency",
|
||||
"gridPos": { "x": 12, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "area" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "s",
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 0.5 },
|
||||
{ "color": "red", "value": 2 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.95, sum(rate(traces_spanmetrics_latency_bucket{service=\"backend\"}[5m])) by (le))",
|
||||
"legendFormat": "p95",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"type": "stat",
|
||||
"title": "p99 Latency",
|
||||
"gridPos": { "x": 16, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "area" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "s",
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 1 },
|
||||
{ "color": "red", "value": 5 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.99, sum(rate(traces_spanmetrics_latency_bucket{service=\"backend\"}[5m])) by (le))",
|
||||
"legendFormat": "p99",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"type": "stat",
|
||||
"title": "5xx Errors / min",
|
||||
"gridPos": { "x": 20, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "background", "graphMode": "none" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "short",
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 1 },
|
||||
{ "color": "red", "value": 5 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "sum(rate(traces_spanmetrics_calls_total{service=\"backend\", status_code=\"STATUS_CODE_ERROR\"}[5m])) * 60",
|
||||
"legendFormat": "5xx/min",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"type": "timeseries",
|
||||
"title": "Request Rate (total vs errors)",
|
||||
"gridPos": { "x": 0, "y": 4, "w": 12, "h": 8 },
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "list", "placement": "bottom" }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": { "unit": "reqps", "custom": { "lineWidth": 2, "fillOpacity": 10 } },
|
||||
"overrides": [
|
||||
{ "matcher": { "id": "byName", "options": "errors" }, "properties": [{ "id": "color", "value": { "fixedColor": "red", "mode": "fixed" } }] }
|
||||
]
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"refId": "total",
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "sum(rate(traces_spanmetrics_calls_total{service=\"backend\"}[5m]))",
|
||||
"legendFormat": "total"
|
||||
},
|
||||
{
|
||||
"refId": "errors",
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "sum(rate(traces_spanmetrics_calls_total{service=\"backend\", status_code=\"STATUS_CODE_ERROR\"}[5m]))",
|
||||
"legendFormat": "errors"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"type": "timeseries",
|
||||
"title": "Latency Percentiles (backend spans)",
|
||||
"gridPos": { "x": 12, "y": 4, "w": 12, "h": 8 },
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "list", "placement": "bottom" }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": { "unit": "s", "custom": { "lineWidth": 2, "fillOpacity": 10 } }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.50, sum(rate(traces_spanmetrics_latency_bucket{service=\"backend\"}[5m])) by (le))",
|
||||
"legendFormat": "p50"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.95, sum(rate(traces_spanmetrics_latency_bucket{service=\"backend\"}[5m])) by (le))",
|
||||
"legendFormat": "p95"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.99, sum(rate(traces_spanmetrics_latency_bucket{service=\"backend\"}[5m])) by (le))",
|
||||
"legendFormat": "p99"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 12,
|
||||
"type": "timeseries",
|
||||
"title": "Request Rate by Span Name (top operations)",
|
||||
"gridPos": { "x": 0, "y": 12, "w": 12, "h": 8 },
|
||||
"description": "Throughput broken down by HTTP route / span name from Tempo span metrics.",
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "list", "placement": "bottom" }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": { "unit": "reqps", "custom": { "lineWidth": 2, "fillOpacity": 5 } }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "topk(10, sum(rate(traces_spanmetrics_calls_total{service=\"backend\"}[5m])) by (span_name))",
|
||||
"legendFormat": "{{span_name}}"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 13,
|
||||
"type": "timeseries",
|
||||
"title": "Latency by Span Name (p95)",
|
||||
"gridPos": { "x": 12, "y": 12, "w": 12, "h": 8 },
|
||||
"description": "p95 latency per operation — helps identify slow endpoints.",
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "list", "placement": "bottom" }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": { "unit": "s", "custom": { "lineWidth": 2, "fillOpacity": 5 } }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "topk(10, histogram_quantile(0.95, sum(rate(traces_spanmetrics_latency_bucket{service=\"backend\"}[5m])) by (le, span_name)))",
|
||||
"legendFormat": "{{span_name}}"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 20,
|
||||
"type": "logs",
|
||||
"title": "Backend Errors",
|
||||
"gridPos": { "x": 0, "y": 20, "w": 24, "h": 10 },
|
||||
"options": {
|
||||
"showTime": true,
|
||||
"showLabels": false,
|
||||
"wrapLogMessage": true,
|
||||
"prettifyLogMessage": true,
|
||||
"enableLogDetails": true,
|
||||
"sortOrder": "Descending",
|
||||
"dedupStrategy": "none"
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "{service_name=\"backend\"}",
|
||||
"legendFormat": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
225
homelab/otel/grafana/provisioning/dashboards/catalogue.json
Normal file
225
homelab/otel/grafana/provisioning/dashboards/catalogue.json
Normal file
@@ -0,0 +1,225 @@
|
||||
{
|
||||
"uid": "libnovel-catalogue",
|
||||
"title": "Catalogue & Content Progress",
|
||||
"description": "Scraping progress from runner OTel logs in Loki. Logs are JSON: body=message, attributes.slug/chapters/page=fields.",
|
||||
"tags": ["libnovel", "catalogue", "content"],
|
||||
"timezone": "browser",
|
||||
"refresh": "1m",
|
||||
"time": { "from": "now-24h", "to": "now" },
|
||||
"schemaVersion": 39,
|
||||
"panels": [
|
||||
{
|
||||
"id": 1,
|
||||
"type": "stat",
|
||||
"title": "Books Scraped (last 24h)",
|
||||
"description": "Count of unique slugs from chapter list fetched messages.",
|
||||
"gridPos": { "x": 0, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "none" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "fixedColor": "blue", "mode": "fixed" },
|
||||
"thresholds": { "mode": "absolute", "steps": [] }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "count(count_over_time({service_name=\"runner\"} | json | body=\"chapter list fetched\" [24h])) by (attributes_slug)",
|
||||
"legendFormat": "books scraped"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"type": "stat",
|
||||
"title": "Chapters Scraped (last 24h)",
|
||||
"description": "Count of 'chapter list fetched' events.",
|
||||
"gridPos": { "x": 4, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "none" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "fixedColor": "blue", "mode": "fixed" },
|
||||
"thresholds": { "mode": "absolute", "steps": [] }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(count_over_time({service_name=\"runner\"} | json | body=\"chapter list fetched\" [24h]))",
|
||||
"legendFormat": "chapter lists fetched"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"type": "stat",
|
||||
"title": "Metadata Saved (last 24h)",
|
||||
"description": "Count of 'metadata saved' events.",
|
||||
"gridPos": { "x": 8, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "value", "graphMode": "none" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "fixedColor": "green", "mode": "fixed" },
|
||||
"thresholds": { "mode": "absolute", "steps": [] }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(count_over_time({service_name=\"runner\"} | json | body=\"metadata saved\" [24h]))",
|
||||
"legendFormat": "metadata saved"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"type": "stat",
|
||||
"title": "Scrape Errors (last 24h)",
|
||||
"description": "Count of error severity logs from the runner.",
|
||||
"gridPos": { "x": 12, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "background", "graphMode": "none" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 1 },
|
||||
{ "color": "red", "value": 10 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(count_over_time({service_name=\"runner\"} | json | severity=\"ERROR\" [24h]))",
|
||||
"legendFormat": "errors"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"type": "stat",
|
||||
"title": "Rate Limited (last 24h)",
|
||||
"description": "Count of rate limiting events from Novelfire.",
|
||||
"gridPos": { "x": 16, "y": 0, "w": 4, "h": 4 },
|
||||
"options": { "reduceOptions": { "calcs": ["lastNotNull"] }, "colorMode": "background", "graphMode": "none" },
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 5 },
|
||||
{ "color": "red", "value": 50 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(count_over_time({service_name=\"runner\"} | json | body=~\"rate limit\" [24h]))",
|
||||
"legendFormat": "rate limited"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"type": "timeseries",
|
||||
"title": "Scrape Rate (books/min)",
|
||||
"description": "Rate of events per minute.",
|
||||
"gridPos": { "x": 0, "y": 4, "w": 12, "h": 8 },
|
||||
"options": { "tooltip": { "mode": "multi" }, "legend": { "displayMode": "list", "placement": "bottom" } },
|
||||
"fieldConfig": {
|
||||
"defaults": { "unit": "short", "custom": { "lineWidth": 2, "fillOpacity": 10 } }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(rate({service_name=\"runner\"} | json | body=\"chapter list fetched\" [5m])) * 60",
|
||||
"legendFormat": "books/min"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(rate({service_name=\"runner\"} | json | body=\"metadata saved\" [5m])) * 60",
|
||||
"legendFormat": "metadata/min"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"type": "timeseries",
|
||||
"title": "Error Rate (errors/min)",
|
||||
"description": "Rate of error and rate-limit messages over time.",
|
||||
"gridPos": { "x": 12, "y": 4, "w": 12, "h": 8 },
|
||||
"options": { "tooltip": { "mode": "multi" }, "legend": { "displayMode": "list", "placement": "bottom" } },
|
||||
"fieldConfig": {
|
||||
"defaults": { "unit": "short", "custom": { "lineWidth": 2, "fillOpacity": 10 } },
|
||||
"overrides": [
|
||||
{ "matcher": { "id": "byName", "options": "errors/min" }, "properties": [{ "id": "color", "value": { "fixedColor": "red", "mode": "fixed" } }] },
|
||||
{ "matcher": { "id": "byName", "options": "rate-limit/min" }, "properties": [{ "id": "color", "value": { "fixedColor": "orange", "mode": "fixed" } }] }
|
||||
]
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(rate({service_name=\"runner\"} | json | severity=\"ERROR\" [5m])) * 60",
|
||||
"legendFormat": "errors/min"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(rate({service_name=\"runner\"} | json | body=~\"rate limit\" [5m])) * 60",
|
||||
"legendFormat": "rate-limit/min"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 20,
|
||||
"type": "logs",
|
||||
"title": "Runner Logs (errors & warnings)",
|
||||
"description": "Runner log lines containing errors or warnings.",
|
||||
"gridPos": { "x": 0, "y": 12, "w": 24, "h": 10 },
|
||||
"options": {
|
||||
"showTime": true,
|
||||
"showLabels": false,
|
||||
"wrapLogMessage": false,
|
||||
"prettifyLogMessage": true,
|
||||
"enableLogDetails": true,
|
||||
"sortOrder": "Descending",
|
||||
"dedupStrategy": "none"
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "{service_name=\"runner\"} | json | severity=~\"ERROR|WARN\"",
|
||||
"legendFormat": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 21,
|
||||
"type": "logs",
|
||||
"title": "Runner Logs (all)",
|
||||
"description": "All runner log entries.",
|
||||
"gridPos": { "x": 0, "y": 22, "w": 24, "h": 10 },
|
||||
"options": {
|
||||
"showTime": true,
|
||||
"showLabels": true,
|
||||
"wrapLogMessage": false,
|
||||
"prettifyLogMessage": true,
|
||||
"enableLogDetails": true,
|
||||
"sortOrder": "Descending",
|
||||
"dedupStrategy": "none"
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "{service_name=\"runner\"}",
|
||||
"legendFormat": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
13
homelab/otel/grafana/provisioning/dashboards/dashboards.yaml
Normal file
13
homelab/otel/grafana/provisioning/dashboards/dashboards.yaml
Normal file
@@ -0,0 +1,13 @@
|
||||
# Grafana dashboard provisioning
|
||||
# Points Grafana at the local dashboards directory.
|
||||
# Drop any .json dashboard file into homelab/otel/grafana/provisioning/dashboards/
|
||||
# and it will appear in Grafana automatically on restart.
|
||||
|
||||
apiVersion: 1
|
||||
|
||||
providers:
|
||||
- name: libnovel
|
||||
folder: LibNovel
|
||||
type: file
|
||||
options:
|
||||
path: /etc/grafana/provisioning/dashboards
|
||||
377
homelab/otel/grafana/provisioning/dashboards/runner.json
Normal file
377
homelab/otel/grafana/provisioning/dashboards/runner.json
Normal file
@@ -0,0 +1,377 @@
|
||||
{
|
||||
"uid": "libnovel-runner",
|
||||
"title": "Runner Operations",
|
||||
"description": "Task queue health, throughput, TTS routing, and live logs for the homelab runner.",
|
||||
"tags": ["libnovel", "runner"],
|
||||
"timezone": "browser",
|
||||
"refresh": "30s",
|
||||
"time": { "from": "now-3h", "to": "now" },
|
||||
"schemaVersion": 39,
|
||||
"panels": [
|
||||
{
|
||||
"id": 1,
|
||||
"type": "stat",
|
||||
"title": "Tasks Running",
|
||||
"gridPos": { "x": 0, "y": 0, "w": 4, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "background",
|
||||
"graphMode": "none",
|
||||
"textMode": "auto"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 1 },
|
||||
{ "color": "red", "value": 3 }
|
||||
]
|
||||
},
|
||||
"mappings": []
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "runner_tasks_running",
|
||||
"legendFormat": "running",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"type": "stat",
|
||||
"title": "Tasks Completed (total)",
|
||||
"gridPos": { "x": 4, "y": 0, "w": 4, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "background",
|
||||
"graphMode": "area",
|
||||
"textMode": "auto"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": { "fixedColor": "green", "mode": "fixed" },
|
||||
"thresholds": { "mode": "absolute", "steps": [] }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "runner_tasks_completed_total",
|
||||
"legendFormat": "completed",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"type": "stat",
|
||||
"title": "Tasks Failed (total)",
|
||||
"gridPos": { "x": 8, "y": 0, "w": 4, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "background",
|
||||
"graphMode": "none",
|
||||
"textMode": "auto"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 1 },
|
||||
{ "color": "red", "value": 5 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "runner_tasks_failed_total",
|
||||
"legendFormat": "failed",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"type": "stat",
|
||||
"title": "Runner Uptime",
|
||||
"gridPos": { "x": 12, "y": 0, "w": 4, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "value",
|
||||
"graphMode": "none",
|
||||
"textMode": "auto"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "s",
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "red", "value": null },
|
||||
{ "color": "yellow", "value": 60 },
|
||||
{ "color": "green", "value": 300 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "runner_uptime_seconds",
|
||||
"legendFormat": "uptime",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"type": "stat",
|
||||
"title": "Task Failure Rate",
|
||||
"gridPos": { "x": 16, "y": 0, "w": 4, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "background",
|
||||
"graphMode": "none",
|
||||
"textMode": "auto"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "percentunit",
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 0.05 },
|
||||
{ "color": "red", "value": 0.2 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "runner_tasks_failed_total / clamp_min(runner_tasks_completed_total + runner_tasks_failed_total, 1)",
|
||||
"legendFormat": "failure rate",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"type": "stat",
|
||||
"title": "Runner Alive",
|
||||
"gridPos": { "x": 20, "y": 0, "w": 4, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "background",
|
||||
"graphMode": "none",
|
||||
"textMode": "auto"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"mappings": [
|
||||
{ "type": "value", "options": { "1": { "text": "UP", "color": "green" }, "0": { "text": "DOWN", "color": "red" } } }
|
||||
],
|
||||
"thresholds": { "mode": "absolute", "steps": [] }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "up{job=\"libnovel-runner\"}",
|
||||
"legendFormat": "runner",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"type": "timeseries",
|
||||
"title": "Task Throughput (per minute)",
|
||||
"gridPos": { "x": 0, "y": 4, "w": 12, "h": 8 },
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "list", "placement": "bottom" }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ops",
|
||||
"custom": { "lineWidth": 2, "fillOpacity": 10 }
|
||||
},
|
||||
"overrides": [
|
||||
{ "matcher": { "id": "byName", "options": "failed" }, "properties": [{ "id": "color", "value": { "fixedColor": "red", "mode": "fixed" } }] },
|
||||
{ "matcher": { "id": "byName", "options": "completed" }, "properties": [{ "id": "color", "value": { "fixedColor": "green", "mode": "fixed" } }] }
|
||||
]
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "rate(runner_tasks_completed_total[5m]) * 60",
|
||||
"legendFormat": "completed"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "rate(runner_tasks_failed_total[5m]) * 60",
|
||||
"legendFormat": "failed"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "runner_tasks_running",
|
||||
"legendFormat": "running"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"type": "timeseries",
|
||||
"title": "Audio Task Span Latency (p50 / p95 / p99)",
|
||||
"gridPos": { "x": 12, "y": 4, "w": 12, "h": 8 },
|
||||
"description": "End-to-end latency of runner.audio_task spans from Tempo span metrics.",
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "list", "placement": "bottom" }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "s",
|
||||
"custom": { "lineWidth": 2, "fillOpacity": 10 }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.50, sum(rate(traces_spanmetrics_latency_bucket{service=\"runner\", span_name=\"runner.audio_task\"}[5m])) by (le))",
|
||||
"legendFormat": "p50"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.95, sum(rate(traces_spanmetrics_latency_bucket{service=\"runner\", span_name=\"runner.audio_task\"}[5m])) by (le))",
|
||||
"legendFormat": "p95"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.99, sum(rate(traces_spanmetrics_latency_bucket{service=\"runner\", span_name=\"runner.audio_task\"}[5m])) by (le))",
|
||||
"legendFormat": "p99"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 20,
|
||||
"type": "timeseries",
|
||||
"title": "Scrape Task Span Latency (p50 / p95 / p99)",
|
||||
"gridPos": { "x": 0, "y": 12, "w": 12, "h": 8 },
|
||||
"description": "End-to-end latency of runner.scrape_task spans from Tempo span metrics.",
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "list", "placement": "bottom" }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "s",
|
||||
"custom": { "lineWidth": 2, "fillOpacity": 10 }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.50, sum(rate(traces_spanmetrics_latency_bucket{service=\"runner\", span_name=\"runner.scrape_task\"}[5m])) by (le))",
|
||||
"legendFormat": "p50"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.95, sum(rate(traces_spanmetrics_latency_bucket{service=\"runner\", span_name=\"runner.scrape_task\"}[5m])) by (le))",
|
||||
"legendFormat": "p95"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "histogram_quantile(0.99, sum(rate(traces_spanmetrics_latency_bucket{service=\"runner\", span_name=\"runner.scrape_task\"}[5m])) by (le))",
|
||||
"legendFormat": "p99"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 21,
|
||||
"type": "timeseries",
|
||||
"title": "Audio vs Scrape Task Rate",
|
||||
"gridPos": { "x": 12, "y": 12, "w": 12, "h": 8 },
|
||||
"description": "Relative throughput of audio generation vs book scraping.",
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "list", "placement": "bottom" }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ops",
|
||||
"custom": { "lineWidth": 2, "fillOpacity": 10 }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "sum(rate(traces_spanmetrics_calls_total{service=\"runner\", span_name=\"runner.audio_task\"}[5m]))",
|
||||
"legendFormat": "audio tasks/s"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "prometheus", "uid": "prometheus" },
|
||||
"expr": "sum(rate(traces_spanmetrics_calls_total{service=\"runner\", span_name=\"runner.scrape_task\"}[5m]))",
|
||||
"legendFormat": "scrape tasks/s"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 30,
|
||||
"type": "logs",
|
||||
"title": "Runner Logs (errors & warnings)",
|
||||
"gridPos": { "x": 0, "y": 20, "w": 24, "h": 10 },
|
||||
"options": {
|
||||
"showTime": true,
|
||||
"showLabels": false,
|
||||
"showCommonLabels": false,
|
||||
"wrapLogMessage": true,
|
||||
"prettifyLogMessage": true,
|
||||
"enableLogDetails": true,
|
||||
"sortOrder": "Descending",
|
||||
"dedupStrategy": "none"
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "{service_name=\"runner\"}",
|
||||
"legendFormat": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 31,
|
||||
"type": "logs",
|
||||
"title": "Runner Logs (all)",
|
||||
"gridPos": { "x": 0, "y": 30, "w": 24, "h": 10 },
|
||||
"options": {
|
||||
"showTime": true,
|
||||
"showLabels": false,
|
||||
"showCommonLabels": false,
|
||||
"wrapLogMessage": true,
|
||||
"prettifyLogMessage": true,
|
||||
"enableLogDetails": true,
|
||||
"sortOrder": "Descending",
|
||||
"dedupStrategy": "none"
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "{service_name=\"runner\"}",
|
||||
"legendFormat": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
804
homelab/otel/grafana/provisioning/dashboards/web-vitals.json
Normal file
804
homelab/otel/grafana/provisioning/dashboards/web-vitals.json
Normal file
@@ -0,0 +1,804 @@
|
||||
{
|
||||
"uid": "libnovel-web-vitals",
|
||||
"title": "Web Vitals (RUM)",
|
||||
"description": "Core Web Vitals from @grafana/faro-web-sdk. Data: browser \u2192 Alloy faro.receiver \u2192 Loki ({service_name=unknown_service}). Log format: key=value pairs, e.g. lcp=767.000000 fcp=767.000000. Use | regexp to extract.",
|
||||
"tags": [
|
||||
"libnovel",
|
||||
"frontend",
|
||||
"rum",
|
||||
"web-vitals"
|
||||
],
|
||||
"timezone": "browser",
|
||||
"refresh": "1m",
|
||||
"time": {
|
||||
"from": "now-24h",
|
||||
"to": "now"
|
||||
},
|
||||
"schemaVersion": 39,
|
||||
"panels": [
|
||||
{
|
||||
"id": 1,
|
||||
"type": "stat",
|
||||
"title": "LCP \u2014 p75 (Largest Contentful Paint)",
|
||||
"description": "Good < 2.5s, needs improvement < 4s, poor >= 4s. Source: Loki {service_name=unknown_service} Faro measurements.",
|
||||
"gridPos": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4
|
||||
},
|
||||
"options": {
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
]
|
||||
},
|
||||
"colorMode": "background",
|
||||
"graphMode": "none"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ms",
|
||||
"decimals": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "yellow",
|
||||
"value": 2500
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 4000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.75, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `lcp=(?P<lcp>\\d+\\.?\\d*)` | unwrap lcp [1h])",
|
||||
"legendFormat": "LCP p75",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"type": "stat",
|
||||
"title": "INP \u2014 p75 (Interaction to Next Paint)",
|
||||
"description": "Good < 200ms, needs improvement < 500ms, poor >= 500ms.",
|
||||
"gridPos": {
|
||||
"x": 4,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4
|
||||
},
|
||||
"options": {
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
]
|
||||
},
|
||||
"colorMode": "background",
|
||||
"graphMode": "none"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ms",
|
||||
"decimals": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "yellow",
|
||||
"value": 200
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 500
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.75, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `inp=(?P<inp>\\d+\\.?\\d*)` | unwrap inp [1h])",
|
||||
"legendFormat": "INP p75",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"type": "stat",
|
||||
"title": "CLS \u2014 p75 (Cumulative Layout Shift)",
|
||||
"description": "Good < 0.1, needs improvement < 0.25, poor >= 0.25.",
|
||||
"gridPos": {
|
||||
"x": 8,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4
|
||||
},
|
||||
"options": {
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
]
|
||||
},
|
||||
"colorMode": "background",
|
||||
"graphMode": "none"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "short",
|
||||
"decimals": 3,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "yellow",
|
||||
"value": 0.1
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 0.25
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.75, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `cls=(?P<cls>\\d+\\.?\\d*)` | unwrap cls [1h])",
|
||||
"legendFormat": "CLS p75",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"type": "stat",
|
||||
"title": "TTFB \u2014 p75 (Time to First Byte)",
|
||||
"description": "Good < 800ms, needs improvement < 1800ms, poor >= 1800ms.",
|
||||
"gridPos": {
|
||||
"x": 12,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4
|
||||
},
|
||||
"options": {
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
]
|
||||
},
|
||||
"colorMode": "background",
|
||||
"graphMode": "none"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ms",
|
||||
"decimals": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "yellow",
|
||||
"value": 800
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 1800
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.75, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `ttfb=(?P<ttfb>\\d+\\.?\\d*)` | unwrap ttfb [1h])",
|
||||
"legendFormat": "TTFB p75",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"type": "stat",
|
||||
"title": "FCP \u2014 p75 (First Contentful Paint)",
|
||||
"description": "Good < 1.8s, needs improvement < 3s, poor >= 3s.",
|
||||
"gridPos": {
|
||||
"x": 16,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4
|
||||
},
|
||||
"options": {
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
]
|
||||
},
|
||||
"colorMode": "background",
|
||||
"graphMode": "none"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ms",
|
||||
"decimals": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "yellow",
|
||||
"value": 1800
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 3000
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.75, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `fcp=(?P<fcp>\\d+\\.?\\d*)` | unwrap fcp [1h])",
|
||||
"legendFormat": "FCP p75",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"type": "stat",
|
||||
"title": "Measurements / min",
|
||||
"description": "Number of Faro measurement events in the last 5 minutes (activity indicator).",
|
||||
"gridPos": {
|
||||
"x": 20,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4
|
||||
},
|
||||
"options": {
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
]
|
||||
},
|
||||
"colorMode": "value",
|
||||
"graphMode": "area"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "short",
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "sum(count_over_time({service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" [5m]))",
|
||||
"legendFormat": "measurements",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"type": "timeseries",
|
||||
"title": "LCP over time (p50 / p75 / p95)",
|
||||
"gridPos": {
|
||||
"x": 0,
|
||||
"y": 4,
|
||||
"w": 12,
|
||||
"h": 8
|
||||
},
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "multi"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "list",
|
||||
"placement": "bottom"
|
||||
}
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ms",
|
||||
"custom": {
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 10
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "Good (2.5s)"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "color",
|
||||
"value": {
|
||||
"fixedColor": "green",
|
||||
"mode": "fixed"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "custom.lineStyle",
|
||||
"value": {
|
||||
"fill": "dash",
|
||||
"dash": [
|
||||
4,
|
||||
4
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "Poor (4s)"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "color",
|
||||
"value": {
|
||||
"fixedColor": "red",
|
||||
"mode": "fixed"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "custom.lineStyle",
|
||||
"value": {
|
||||
"fill": "dash",
|
||||
"dash": [
|
||||
4,
|
||||
4
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.50, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `lcp=(?P<lcp>\\d+\\.?\\d*)` | unwrap lcp [5m])",
|
||||
"legendFormat": "p50"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.75, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `lcp=(?P<lcp>\\d+\\.?\\d*)` | unwrap lcp [5m])",
|
||||
"legendFormat": "p75"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `lcp=(?P<lcp>\\d+\\.?\\d*)` | unwrap lcp [5m])",
|
||||
"legendFormat": "p95"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"expr": "2500",
|
||||
"legendFormat": "Good (2.5s)"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus"
|
||||
},
|
||||
"expr": "4000",
|
||||
"legendFormat": "Poor (4s)"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"type": "timeseries",
|
||||
"title": "TTFB over time (p50 / p75 / p95)",
|
||||
"gridPos": {
|
||||
"x": 12,
|
||||
"y": 4,
|
||||
"w": 12,
|
||||
"h": 8
|
||||
},
|
||||
"options": {
|
||||
"tooltip": {
|
||||
"mode": "multi"
|
||||
},
|
||||
"legend": {
|
||||
"displayMode": "list",
|
||||
"placement": "bottom"
|
||||
}
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ms",
|
||||
"custom": {
|
||||
"lineWidth": 2,
|
||||
"fillOpacity": 10
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.50, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `ttfb=(?P<ttfb>\\d+\\.?\\d*)` | unwrap ttfb [5m])",
|
||||
"legendFormat": "p50"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.75, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `ttfb=(?P<ttfb>\\d+\\.?\\d*)` | unwrap ttfb [5m])",
|
||||
"legendFormat": "p75"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"kind=measurement\" |= \"type=web-vitals\" | regexp `ttfb=(?P<ttfb>\\d+\\.?\\d*)` | unwrap ttfb [5m])",
|
||||
"legendFormat": "p95"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 20,
|
||||
"type": "logs",
|
||||
"title": "Frontend Errors & Exceptions",
|
||||
"description": "JS exceptions captured by Faro. kind=exception events.",
|
||||
"gridPos": {
|
||||
"x": 0,
|
||||
"y": 12,
|
||||
"w": 24,
|
||||
"h": 10
|
||||
},
|
||||
"options": {
|
||||
"showTime": true,
|
||||
"showLabels": true,
|
||||
"wrapLogMessage": true,
|
||||
"prettifyLogMessage": true,
|
||||
"enableLogDetails": true,
|
||||
"sortOrder": "Descending",
|
||||
"dedupStrategy": "none"
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "{service_name=\"unknown_service\"} | regexp `(?P<kind>\\w+)` | kind = \"exception\"",
|
||||
"legendFormat": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 21,
|
||||
"type": "logs",
|
||||
"title": "Web Vitals Measurements",
|
||||
"description": "All Faro measurement events.",
|
||||
"gridPos": {
|
||||
"x": 0,
|
||||
"y": 22,
|
||||
"w": 24,
|
||||
"h": 10
|
||||
},
|
||||
"options": {
|
||||
"showTime": true,
|
||||
"showLabels": true,
|
||||
"wrapLogMessage": false,
|
||||
"prettifyLogMessage": true,
|
||||
"enableLogDetails": true,
|
||||
"sortOrder": "Descending",
|
||||
"dedupStrategy": "none"
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "loki"
|
||||
},
|
||||
"expr": "{service_name=\"unknown_service\"} | regexp `(?P<kind>\\w+)` | kind = \"measurement\"",
|
||||
"legendFormat": ""
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 30,
|
||||
"type": "row",
|
||||
"title": "API Performance (Upstream Requests)",
|
||||
"gridPos": { "x": 0, "y": 32, "w": 24, "h": 1 },
|
||||
"collapsed": false
|
||||
},
|
||||
{
|
||||
"id": 31,
|
||||
"type": "timeseries",
|
||||
"title": "API Request Duration — p50 / p75 / p95 by endpoint",
|
||||
"description": "Duration of all libnovel.cc/api/* fetch requests captured by Faro faro.performance.resource events. Values in ms.",
|
||||
"gridPos": { "x": 0, "y": 33, "w": 24, "h": 10 },
|
||||
"options": {
|
||||
"tooltip": { "mode": "multi" },
|
||||
"legend": { "displayMode": "table", "placement": "bottom", "calcs": ["mean", "max", "lastNotNull"] }
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ms",
|
||||
"custom": { "lineWidth": 2, "fillOpacity": 5 }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.50, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/progress/audio-time\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p50 /api/progress/audio-time"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/progress/audio-time\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p95 /api/progress/audio-time"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.50, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/presign/audio\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p50 /api/presign/audio"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/presign/audio\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p95 /api/presign/audio"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.50, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/progress\" !~ \"audio-time\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p50 /api/progress"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/progress\" !~ \"audio-time\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p95 /api/progress"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.50, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/comments\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p50 /api/comments"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/comments\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p95 /api/comments"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.50, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/settings\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p50 /api/settings"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/settings\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p95 /api/settings"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.50, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/catalogue-page\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p50 /api/catalogue-page"
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/catalogue-page\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [5m])",
|
||||
"legendFormat": "p95 /api/catalogue-page"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 32,
|
||||
"type": "barchart",
|
||||
"title": "API Avg Duration — last 1h",
|
||||
"description": "Average duration per endpoint over the last hour. Useful for spotting the slowest APIs at a glance.",
|
||||
"gridPos": { "x": 0, "y": 43, "w": 12, "h": 8 },
|
||||
"options": {
|
||||
"orientation": "horizontal",
|
||||
"legend": { "displayMode": "list", "placement": "bottom" },
|
||||
"tooltip": { "mode": "single" },
|
||||
"xTickLabelRotation": 0
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": { "unit": "ms", "color": { "mode": "palette-classic" } }
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "avg_over_time({service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/progress/audio-time\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [1h])",
|
||||
"legendFormat": "/api/progress/audio-time",
|
||||
"instant": true
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "avg_over_time({service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/presign/audio\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [1h])",
|
||||
"legendFormat": "/api/presign/audio",
|
||||
"instant": true
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "avg_over_time({service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/progress\" !~ \"audio-time\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [1h])",
|
||||
"legendFormat": "/api/progress",
|
||||
"instant": true
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "avg_over_time({service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/comments\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [1h])",
|
||||
"legendFormat": "/api/comments",
|
||||
"instant": true
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "avg_over_time({service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/settings\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [1h])",
|
||||
"legendFormat": "/api/settings",
|
||||
"instant": true
|
||||
},
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "avg_over_time({service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api/catalogue-page\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [1h])",
|
||||
"legendFormat": "/api/catalogue-page",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 33,
|
||||
"type": "stat",
|
||||
"title": "Slowest API call — p95 last 1h",
|
||||
"description": "p95 duration of the single slowest endpoint in the last hour.",
|
||||
"gridPos": { "x": 12, "y": 43, "w": 6, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "background",
|
||||
"graphMode": "none"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "ms",
|
||||
"decimals": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{ "color": "green", "value": null },
|
||||
{ "color": "yellow", "value": 500 },
|
||||
{ "color": "red", "value": 1000 }
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "max(quantile_over_time(0.95, {service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur [1h]))",
|
||||
"legendFormat": "p95 max",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 34,
|
||||
"type": "stat",
|
||||
"title": "API Requests / min",
|
||||
"description": "Rate of libnovel.cc API requests captured by Faro in the last 5 minutes.",
|
||||
"gridPos": { "x": 18, "y": 43, "w": 6, "h": 4 },
|
||||
"options": {
|
||||
"reduceOptions": { "calcs": ["lastNotNull"] },
|
||||
"colorMode": "value",
|
||||
"graphMode": "area"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"unit": "short",
|
||||
"thresholds": { "mode": "absolute", "steps": [{ "color": "green", "value": null }] }
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "sum(count_over_time({service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api\" [5m])) / 5",
|
||||
"legendFormat": "req/min",
|
||||
"instant": true
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 35,
|
||||
"type": "logs",
|
||||
"title": "Slow API Requests (>500ms)",
|
||||
"description": "Individual faro.performance.resource events where duration > 500ms. Useful for debugging outliers.",
|
||||
"gridPos": { "x": 0, "y": 47, "w": 24, "h": 8 },
|
||||
"options": {
|
||||
"showTime": true,
|
||||
"showLabels": false,
|
||||
"wrapLogMessage": false,
|
||||
"prettifyLogMessage": false,
|
||||
"enableLogDetails": true,
|
||||
"sortOrder": "Descending",
|
||||
"dedupStrategy": "none"
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": { "type": "loki", "uid": "loki" },
|
||||
"expr": "{service_name=\"unknown_service\"} |= \"faro.performance.resource\" |= \"libnovel.cc/api\" | regexp `event_data_duration=(?P<dur>[0-9.]+)` | unwrap dur | dur > 500",
|
||||
"legendFormat": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
# Grafana datasource provisioning
|
||||
# Auto-configures Tempo, Prometheus, and Loki on first start.
|
||||
# No manual setup needed in the UI.
|
||||
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: Tempo
|
||||
type: tempo
|
||||
uid: tempo
|
||||
url: http://tempo:3200
|
||||
access: proxy
|
||||
isDefault: false
|
||||
jsonData:
|
||||
httpMethod: GET
|
||||
serviceMap:
|
||||
datasourceUid: prometheus
|
||||
nodeGraph:
|
||||
enabled: true
|
||||
traceQuery:
|
||||
timeShiftEnabled: true
|
||||
spanStartTimeShift: "1h"
|
||||
spanEndTimeShift: "-1h"
|
||||
spanBar:
|
||||
type: "Tag"
|
||||
tag: "http.url"
|
||||
lokiSearch:
|
||||
datasourceUid: loki
|
||||
|
||||
- name: Prometheus
|
||||
type: prometheus
|
||||
uid: prometheus
|
||||
url: http://prometheus:9090
|
||||
access: proxy
|
||||
isDefault: true
|
||||
jsonData:
|
||||
httpMethod: POST
|
||||
exemplarTraceIdDestinations:
|
||||
- name: traceID
|
||||
datasourceUid: tempo
|
||||
|
||||
- name: Loki
|
||||
type: loki
|
||||
uid: loki
|
||||
url: http://loki:3100
|
||||
access: proxy
|
||||
isDefault: false
|
||||
jsonData:
|
||||
derivedFields:
|
||||
- datasourceUid: tempo
|
||||
matcherRegex: '"traceID":"(\w+)"'
|
||||
name: TraceID
|
||||
url: "$${__value.raw}"
|
||||
38
homelab/otel/loki.yaml
Normal file
38
homelab/otel/loki.yaml
Normal file
@@ -0,0 +1,38 @@
|
||||
# Loki config — minimal single-node setup
|
||||
# Receives logs from OTel Collector. 30-day retention.
|
||||
|
||||
auth_enabled: false
|
||||
|
||||
server:
|
||||
http_listen_port: 3100
|
||||
grpc_listen_port: 9096
|
||||
|
||||
common:
|
||||
instance_addr: 127.0.0.1
|
||||
path_prefix: /loki
|
||||
storage:
|
||||
filesystem:
|
||||
chunks_directory: /loki/chunks
|
||||
rules_directory: /loki/rules
|
||||
replication_factor: 1
|
||||
ring:
|
||||
kvstore:
|
||||
store: inmemory
|
||||
|
||||
schema_config:
|
||||
configs:
|
||||
- from: 2024-01-01
|
||||
store: tsdb
|
||||
object_store: filesystem
|
||||
schema: v13
|
||||
index:
|
||||
prefix: index_
|
||||
period: 24h
|
||||
|
||||
limits_config:
|
||||
retention_period: 720h # 30 days
|
||||
|
||||
compactor:
|
||||
working_directory: /loki/compactor
|
||||
delete_request_store: filesystem
|
||||
retention_enabled: true
|
||||
22
homelab/otel/prometheus.yaml
Normal file
22
homelab/otel/prometheus.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
# Prometheus config
|
||||
# Scrapes OTel collector self-metrics and runner metrics endpoint.
|
||||
# Backend metrics come in via OTel remote-write — no direct scrape needed.
|
||||
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
external_labels:
|
||||
environment: production
|
||||
|
||||
scrape_configs:
|
||||
# OTel Collector self-metrics
|
||||
- job_name: otel-collector
|
||||
static_configs:
|
||||
- targets: ["otel-collector:8888"]
|
||||
|
||||
# Runner JSON metrics endpoint (native format, no Prometheus client yet)
|
||||
# Will be replaced by OTLP once runner is instrumented with OTel SDK.
|
||||
- job_name: libnovel-runner
|
||||
metrics_path: /metrics
|
||||
static_configs:
|
||||
- targets: ["runner:9091"]
|
||||
45
homelab/otel/tempo.yaml
Normal file
45
homelab/otel/tempo.yaml
Normal file
@@ -0,0 +1,45 @@
|
||||
# Tempo config — minimal single-node setup
|
||||
# Stores traces locally. Grafana queries via the HTTP API on port 3200.
|
||||
|
||||
server:
|
||||
http_listen_port: 3200
|
||||
|
||||
distributor:
|
||||
receivers:
|
||||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
|
||||
ingester:
|
||||
trace_idle_period: 10s
|
||||
max_block_bytes: 104857600 # 100MB
|
||||
max_block_duration: 30m
|
||||
|
||||
compactor:
|
||||
compaction:
|
||||
block_retention: 720h # 30 days
|
||||
|
||||
storage:
|
||||
trace:
|
||||
backend: local
|
||||
local:
|
||||
path: /var/tempo/blocks
|
||||
wal:
|
||||
path: /var/tempo/wal
|
||||
|
||||
metrics_generator:
|
||||
registry:
|
||||
external_labels:
|
||||
source: tempo
|
||||
storage:
|
||||
path: /var/tempo/generator/wal
|
||||
remote_write:
|
||||
- url: http://prometheus:9090/api/v1/write
|
||||
send_exemplars: true
|
||||
|
||||
overrides:
|
||||
defaults:
|
||||
metrics_generator:
|
||||
processors: [service-graphs, span-metrics]
|
||||
generate_native_histograms: both
|
||||
@@ -1,21 +1,53 @@
|
||||
# LibNovel homelab runner
|
||||
#
|
||||
# Connects to production PocketBase and MinIO via public subdomains.
|
||||
# All secrets come from Doppler (project=libnovel, config=prd).
|
||||
# All secrets come from Doppler (project=libnovel, config=prd_homelab).
|
||||
# Run with: doppler run -- docker compose up -d
|
||||
#
|
||||
# Differs from prod runner:
|
||||
# - RUNNER_WORKER_ID=homelab-runner-1 (unique, avoids task claiming conflicts)
|
||||
# - MINIO_ENDPOINT/USE_SSL → storage.libnovel.cc over HTTPS
|
||||
# - POCKETBASE_URL → https://pb.libnovel.cc
|
||||
# - MEILI_URL/VALKEY_ADDR → unset (not exposed publicly; not needed by runner)
|
||||
# - MEILI_URL → https://search.libnovel.cc (Caddy-proxied)
|
||||
# - VALKEY_ADDR → unset (not exposed publicly)
|
||||
# - RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true
|
||||
# - REDIS_ADDR → rediss://redis.libnovel.cc:6380 (prod Redis via Caddy TLS proxy)
|
||||
# - LibreTranslate service for machine translation (internal network only)
|
||||
#
|
||||
# extra_hosts pins storage.libnovel.cc and pb.libnovel.cc to the prod server IP
|
||||
# (165.22.70.138) so that large PutObject uploads and PocketBase writes bypass
|
||||
# Cloudflare's 100-second proxy timeout entirely. TLS still terminates at Caddy
|
||||
# on prod; the TLS certificate is valid for the domain names so SNI works fine.
|
||||
|
||||
services:
|
||||
libretranslate:
|
||||
image: libretranslate/libretranslate:latest
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
LT_API_KEYS: "true"
|
||||
LT_API_KEYS_DB_PATH: "/app/db/api_keys.db"
|
||||
# Limit to source→target pairs the runner actually uses
|
||||
LT_LOAD_ONLY: "en,ru,id,pt,fr"
|
||||
LT_DISABLE_WEB_UI: "true"
|
||||
LT_UPDATE_MODELS: "false"
|
||||
volumes:
|
||||
- libretranslate_models:/home/libretranslate/.local/share/argos-translate
|
||||
- libretranslate_db:/app/db
|
||||
|
||||
runner:
|
||||
image: kalekber/libnovel-runner:latest
|
||||
restart: unless-stopped
|
||||
stop_grace_period: 135s
|
||||
labels:
|
||||
- "com.centurylinklabs.watchtower.enable=true"
|
||||
depends_on:
|
||||
- libretranslate
|
||||
# Pin prod subdomains to the prod server IP to bypass Cloudflare's 100s
|
||||
# proxy timeout. Large MP3 PutObject uploads and PocketBase writes go
|
||||
# directly to Caddy on prod; TLS and SNI still work normally.
|
||||
extra_hosts:
|
||||
- "storage.libnovel.cc:165.22.70.138"
|
||||
- "pb.libnovel.cc:165.22.70.138"
|
||||
environment:
|
||||
# ── PocketBase ──────────────────────────────────────────────────────────
|
||||
POCKETBASE_URL: "https://pb.libnovel.cc"
|
||||
@@ -30,29 +62,54 @@ services:
|
||||
MINIO_PUBLIC_ENDPOINT: "${MINIO_PUBLIC_ENDPOINT}"
|
||||
MINIO_PUBLIC_USE_SSL: "${MINIO_PUBLIC_USE_SSL}"
|
||||
|
||||
# ── Meilisearch / Valkey — not exposed, disabled ────────────────────────
|
||||
MEILI_URL: ""
|
||||
# ── Meilisearch (via search.libnovel.cc Caddy proxy) ────────────────────
|
||||
MEILI_URL: "${MEILI_URL}"
|
||||
MEILI_API_KEY: "${MEILI_API_KEY}"
|
||||
VALKEY_ADDR: ""
|
||||
# Force IPv4 DNS resolution — homelab has no IPv6 route to search.libnovel.cc
|
||||
GODEBUG: "preferIPv4=1"
|
||||
|
||||
# ── Kokoro TTS ──────────────────────────────────────────────────────────
|
||||
KOKORO_URL: "${KOKORO_URL}"
|
||||
KOKORO_VOICE: "${KOKORO_VOICE}"
|
||||
|
||||
# ── Pocket TTS ──────────────────────────────────────────────────────────
|
||||
POCKET_TTS_URL: "${POCKET_TTS_URL}"
|
||||
|
||||
# ── Cloudflare Workers AI TTS ────────────────────────────────────────────
|
||||
CFAI_ACCOUNT_ID: "${CFAI_ACCOUNT_ID}"
|
||||
CFAI_API_TOKEN: "${CFAI_API_TOKEN}"
|
||||
|
||||
# ── LibreTranslate (internal Docker network) ────────────────────────────
|
||||
LIBRETRANSLATE_URL: "http://libretranslate:5000"
|
||||
LIBRETRANSLATE_API_KEY: "${LIBRETRANSLATE_API_KEY}"
|
||||
|
||||
# ── Asynq / Redis (prod Redis via Caddy TLS proxy) ──────────────────────
|
||||
# The runner connects to prod Redis over TLS: rediss://redis.libnovel.cc:6380.
|
||||
# Caddy on prod terminates TLS and proxies to the local redis:6379 sidecar.
|
||||
REDIS_ADDR: "${REDIS_ADDR}"
|
||||
REDIS_PASSWORD: "${REDIS_PASSWORD}"
|
||||
|
||||
# ── Runner tuning ───────────────────────────────────────────────────────
|
||||
RUNNER_WORKER_ID: "${RUNNER_WORKER_ID}"
|
||||
RUNNER_POLL_INTERVAL: "${RUNNER_POLL_INTERVAL}"
|
||||
RUNNER_MAX_CONCURRENT_SCRAPE: "${RUNNER_MAX_CONCURRENT_SCRAPE}"
|
||||
RUNNER_MAX_CONCURRENT_AUDIO: "${RUNNER_MAX_CONCURRENT_AUDIO}"
|
||||
RUNNER_MAX_CONCURRENT_TRANSLATION: "${RUNNER_MAX_CONCURRENT_TRANSLATION}"
|
||||
RUNNER_TIMEOUT: "${RUNNER_TIMEOUT}"
|
||||
RUNNER_METRICS_ADDR: "${RUNNER_METRICS_ADDR}"
|
||||
RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH: "true"
|
||||
|
||||
# ── Observability ───────────────────────────────────────────────────────
|
||||
LOG_LEVEL: "${LOG_LEVEL}"
|
||||
GLITCHTIP_DSN: "${GLITCHTIP_DSN}"
|
||||
GLITCHTIP_DSN: "${GLITCHTIP_DSN_RUNNER}"
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD", "/healthcheck", "file", "/tmp/runner.alive", "120"]
|
||||
interval: 60s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
|
||||
volumes:
|
||||
libretranslate_models:
|
||||
libretranslate_db:
|
||||
|
||||
7
justfile
7
justfile
@@ -122,6 +122,13 @@ secrets-env:
|
||||
secrets-dashboard:
|
||||
doppler open dashboard
|
||||
|
||||
# ── Developer setup ───────────────────────────────────────────────────────────
|
||||
|
||||
# One-time dev setup: configure git to use committed hooks in .githooks/
|
||||
setup:
|
||||
git config core.hooksPath .githooks
|
||||
@echo "Git hooks configured (.githooks/pre-commit active)."
|
||||
|
||||
# ── Gitea CI ──────────────────────────────────────────────────────────────────
|
||||
|
||||
# Validate workflow files
|
||||
|
||||
@@ -62,6 +62,39 @@ create() {
|
||||
esac
|
||||
}
|
||||
|
||||
# add_index COLLECTION INDEX_NAME SQL_EXPR
|
||||
# Fetches current schema, adds index if absent by name, PATCHes collection.
|
||||
add_index() {
|
||||
COLL="$1"; INAME="$2"; ISQL="$3"
|
||||
SCHEMA=$(curl -sf -H "Authorization: Bearer $TOK" "$PB/api/collections/$COLL" 2>/dev/null)
|
||||
PARSED=$(echo "$SCHEMA" | python3 -c "
|
||||
import sys, json
|
||||
d = json.load(sys.stdin)
|
||||
indexes = d.get('indexes', [])
|
||||
exists = any('$INAME' in idx for idx in indexes)
|
||||
print('exists=' + str(exists))
|
||||
print('id=' + d.get('id', ''))
|
||||
if not exists:
|
||||
indexes.append('$ISQL')
|
||||
print('indexes=' + json.dumps(indexes))
|
||||
" 2>/dev/null)
|
||||
if echo "$PARSED" | grep -q "^exists=True"; then
|
||||
log "index exists (skip): $COLL.$INAME"; return
|
||||
fi
|
||||
COLL_ID=$(echo "$PARSED" | grep "^id=" | sed 's/^id=//')
|
||||
[ -z "$COLL_ID" ] && { log "WARNING: cannot resolve id for $COLL"; return; }
|
||||
NEW_INDEXES=$(echo "$PARSED" | grep "^indexes=" | sed 's/^indexes=//')
|
||||
STATUS=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
-X PATCH "$PB/api/collections/$COLL_ID" \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Bearer $TOK" \
|
||||
-d "{\"indexes\":${NEW_INDEXES}}")
|
||||
case "$STATUS" in
|
||||
200|201) log "added index: $COLL.$INAME" ;;
|
||||
*) log "WARNING: add_index $COLL.$INAME returned $STATUS" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# add_field COLLECTION FIELD_NAME FIELD_TYPE
|
||||
# Fetches current schema, appends field if absent, PATCHes collection.
|
||||
# Requires python3 for safe JSON manipulation.
|
||||
@@ -111,14 +144,16 @@ create "books" '{
|
||||
{"name":"total_chapters","type":"number"},
|
||||
{"name":"source_url", "type":"text"},
|
||||
{"name":"ranking", "type":"number"},
|
||||
{"name":"meta_updated", "type":"text"}
|
||||
{"name":"meta_updated", "type":"text"},
|
||||
{"name":"archived", "type":"bool"}
|
||||
]}'
|
||||
|
||||
create "chapters_idx" '{
|
||||
"name":"chapters_idx","type":"base","fields":[
|
||||
{"name":"slug", "type":"text", "required":true},
|
||||
{"name":"number","type":"number", "required":true},
|
||||
{"name":"title", "type":"text"}
|
||||
{"name":"slug", "type":"text", "required":true},
|
||||
{"name":"number", "type":"number", "required":true},
|
||||
{"name":"title", "type":"text"},
|
||||
{"name":"created", "type":"date"}
|
||||
]}'
|
||||
|
||||
create "ranking" '{
|
||||
@@ -190,14 +225,15 @@ create "app_users" '{
|
||||
{"name":"oauth_id", "type":"text"}
|
||||
]}'
|
||||
|
||||
create "user_sessions" '{
|
||||
create "user_sessions" '{
|
||||
"name":"user_sessions","type":"base","fields":[
|
||||
{"name":"user_id", "type":"text","required":true},
|
||||
{"name":"session_id","type":"text","required":true},
|
||||
{"name":"user_agent","type":"text"},
|
||||
{"name":"ip", "type":"text"},
|
||||
{"name":"created_at","type":"text"},
|
||||
{"name":"last_seen", "type":"text"}
|
||||
{"name":"user_id", "type":"text","required":true},
|
||||
{"name":"session_id", "type":"text","required":true},
|
||||
{"name":"user_agent", "type":"text"},
|
||||
{"name":"ip", "type":"text"},
|
||||
{"name":"device_fingerprint", "type":"text"},
|
||||
{"name":"created_at", "type":"text"},
|
||||
{"name":"last_seen", "type":"text"}
|
||||
]}'
|
||||
|
||||
create "user_library" '{
|
||||
@@ -210,12 +246,18 @@ create "user_library" '{
|
||||
|
||||
create "user_settings" '{
|
||||
"name":"user_settings","type":"base","fields":[
|
||||
{"name":"session_id","type":"text","required":true},
|
||||
{"name":"user_id", "type":"text"},
|
||||
{"name":"auto_next","type":"bool"},
|
||||
{"name":"voice", "type":"text"},
|
||||
{"name":"speed", "type":"number"},
|
||||
{"name":"updated", "type":"text"}
|
||||
{"name":"session_id", "type":"text", "required":true},
|
||||
{"name":"user_id", "type":"text"},
|
||||
{"name":"auto_next", "type":"bool"},
|
||||
{"name":"voice", "type":"text"},
|
||||
{"name":"speed", "type":"number"},
|
||||
{"name":"theme", "type":"text"},
|
||||
{"name":"locale", "type":"text"},
|
||||
{"name":"font_family", "type":"text"},
|
||||
{"name":"font_size", "type":"number"},
|
||||
{"name":"announce_chapter","type":"bool"},
|
||||
{"name":"audio_mode", "type":"text"},
|
||||
{"name":"updated", "type":"text"}
|
||||
]}'
|
||||
|
||||
create "user_subscriptions" '{
|
||||
@@ -245,6 +287,87 @@ create "comment_votes" '{
|
||||
{"name":"vote", "type":"text"}
|
||||
]}'
|
||||
|
||||
create "translation_jobs" '{
|
||||
"name":"translation_jobs","type":"base","fields":[
|
||||
{"name":"cache_key", "type":"text", "required":true},
|
||||
{"name":"slug", "type":"text", "required":true},
|
||||
{"name":"chapter", "type":"number","required":true},
|
||||
{"name":"lang", "type":"text", "required":true},
|
||||
{"name":"worker_id", "type":"text"},
|
||||
{"name":"status", "type":"text", "required":true},
|
||||
{"name":"error_message","type":"text"},
|
||||
{"name":"started", "type":"date"},
|
||||
{"name":"finished", "type":"date"},
|
||||
{"name":"heartbeat_at", "type":"date"}
|
||||
]}'
|
||||
|
||||
create "import_tasks" '{
|
||||
"name":"import_tasks","type":"base","fields":[
|
||||
{"name":"slug", "type":"text", "required":true},
|
||||
{"name":"title", "type":"text", "required":true},
|
||||
{"name":"file_name", "type":"text"},
|
||||
{"name":"file_type", "type":"text"},
|
||||
{"name":"object_key", "type":"text"},
|
||||
{"name":"chapters_key", "type":"text"},
|
||||
{"name":"author", "type":"text"},
|
||||
{"name":"cover_url", "type":"text"},
|
||||
{"name":"genres", "type":"text"},
|
||||
{"name":"summary", "type":"text"},
|
||||
{"name":"book_status", "type":"text"},
|
||||
{"name":"worker_id", "type":"text"},
|
||||
{"name":"initiator_user_id", "type":"text"},
|
||||
{"name":"status", "type":"text", "required":true},
|
||||
{"name":"chapters_done", "type":"number"},
|
||||
{"name":"chapters_total", "type":"number"},
|
||||
{"name":"error_message", "type":"text"},
|
||||
{"name":"started", "type":"date"},
|
||||
{"name":"finished", "type":"date"},
|
||||
{"name":"heartbeat_at", "type":"date"}
|
||||
]}'
|
||||
|
||||
create "notifications" '{
|
||||
"name":"notifications","type":"base","fields":[
|
||||
{"name":"user_id", "type":"text","required":true},
|
||||
{"name":"title", "type":"text","required":true},
|
||||
{"name":"message", "type":"text"},
|
||||
{"name":"link", "type":"text"},
|
||||
{"name":"read", "type":"bool"},
|
||||
{"name":"created", "type":"date"}
|
||||
]}'
|
||||
|
||||
create "ai_jobs" '{
|
||||
"name":"ai_jobs","type":"base","fields":[
|
||||
{"name":"kind", "type":"text", "required":true},
|
||||
{"name":"slug", "type":"text"},
|
||||
{"name":"status", "type":"text", "required":true},
|
||||
{"name":"from_item", "type":"number"},
|
||||
{"name":"to_item", "type":"number"},
|
||||
{"name":"items_done", "type":"number"},
|
||||
{"name":"items_total", "type":"number"},
|
||||
{"name":"model", "type":"text"},
|
||||
{"name":"payload", "type":"text"},
|
||||
{"name":"error_message", "type":"text"},
|
||||
{"name":"started", "type":"date"},
|
||||
{"name":"finished", "type":"date"},
|
||||
{"name":"heartbeat_at", "type":"date"}
|
||||
]}'
|
||||
|
||||
create "discovery_votes" '{
|
||||
"name":"discovery_votes","type":"base","fields":[
|
||||
{"name":"session_id","type":"text","required":true},
|
||||
{"name":"user_id", "type":"text"},
|
||||
{"name":"slug", "type":"text","required":true},
|
||||
{"name":"action", "type":"text","required":true}
|
||||
]}'
|
||||
|
||||
create "book_ratings" '{
|
||||
"name":"book_ratings","type":"base","fields":[
|
||||
{"name":"session_id","type":"text", "required":true},
|
||||
{"name":"user_id", "type":"text"},
|
||||
{"name":"slug", "type":"text", "required":true},
|
||||
{"name":"rating", "type":"number", "required":true}
|
||||
]}'
|
||||
|
||||
# ── 5. Field migrations (idempotent — adds fields missing from older installs) ─
|
||||
add_field "scraping_tasks" "heartbeat_at" "date"
|
||||
add_field "audio_jobs" "heartbeat_at" "date"
|
||||
@@ -258,5 +381,23 @@ add_field "app_users" "verification_token" "text"
|
||||
add_field "app_users" "verification_token_exp" "text"
|
||||
add_field "app_users" "oauth_provider" "text"
|
||||
add_field "app_users" "oauth_id" "text"
|
||||
add_field "app_users" "polar_customer_id" "text"
|
||||
add_field "app_users" "polar_subscription_id" "text"
|
||||
add_field "user_library" "shelf" "text"
|
||||
add_field "user_sessions" "device_fingerprint" "text"
|
||||
add_field "chapters_idx" "created" "date"
|
||||
add_field "user_settings" "theme" "text"
|
||||
add_field "user_settings" "locale" "text"
|
||||
add_field "user_settings" "font_family" "text"
|
||||
add_field "user_settings" "font_size" "number"
|
||||
add_field "user_settings" "announce_chapter" "bool"
|
||||
add_field "user_settings" "audio_mode" "text"
|
||||
add_field "books" "archived" "bool"
|
||||
|
||||
# ── 6. Indexes ────────────────────────────────────────────────────────────────
|
||||
add_index "chapters_idx" "idx_chapters_idx_slug_number" \
|
||||
"CREATE UNIQUE INDEX idx_chapters_idx_slug_number ON chapters_idx (slug, number)"
|
||||
add_index "chapters_idx" "idx_chapters_idx_created" \
|
||||
"CREATE INDEX idx_chapters_idx_created ON chapters_idx (created)"
|
||||
|
||||
log "done"
|
||||
|
||||
2
ui/.gitignore
vendored
2
ui/.gitignore
vendored
@@ -21,3 +21,5 @@ Thumbs.db
|
||||
# Vite
|
||||
vite.config.js.timestamp-*
|
||||
vite.config.ts.timestamp-*
|
||||
|
||||
|
||||
|
||||
@@ -14,12 +14,18 @@ COPY . .
|
||||
# Build-time version info — injected by docker-compose or CI via --build-arg.
|
||||
ARG BUILD_VERSION=dev
|
||||
ARG BUILD_COMMIT=unknown
|
||||
ARG BUILD_TIME=unknown
|
||||
|
||||
# Expose as PUBLIC_ env vars so SvelteKit's $env/dynamic/public can read them.
|
||||
ENV PUBLIC_BUILD_VERSION=$BUILD_VERSION
|
||||
ENV PUBLIC_BUILD_COMMIT=$BUILD_COMMIT
|
||||
ENV PUBLIC_BUILD_TIME=$BUILD_TIME
|
||||
|
||||
RUN npm run build
|
||||
# PREBUILT=1 skips npm run build — used in CI when the build/ directory has
|
||||
# already been compiled (and debug IDs injected) by a prior job. The caller
|
||||
# must copy the pre-built build/ into the Docker context before building.
|
||||
ARG PREBUILT=0
|
||||
RUN [ "$PREBUILT" = "1" ] || npm run build
|
||||
|
||||
# ── Runtime image ──────────────────────────────────────────────────────────────
|
||||
# adapter-node bundles most server-side code, but packages with dynamic
|
||||
@@ -40,5 +46,16 @@ ENV NODE_ENV=production
|
||||
ENV PORT=3000
|
||||
ENV HOST=0.0.0.0
|
||||
|
||||
# Carry build-time metadata into the runtime image so the UI footer can
|
||||
# display the version, commit SHA, and build timestamp.
|
||||
# These must be re-declared after the second FROM — ARG values do not
|
||||
# cross stage boundaries, but ENV values set here persist at runtime.
|
||||
ARG BUILD_VERSION=dev
|
||||
ARG BUILD_COMMIT=unknown
|
||||
ARG BUILD_TIME=unknown
|
||||
ENV PUBLIC_BUILD_VERSION=$BUILD_VERSION
|
||||
ENV PUBLIC_BUILD_COMMIT=$BUILD_COMMIT
|
||||
ENV PUBLIC_BUILD_TIME=$BUILD_TIME
|
||||
|
||||
EXPOSE $PORT
|
||||
CMD ["node", "build"]
|
||||
|
||||
499
ui/messages/en.json
Normal file
499
ui/messages/en.json
Normal file
@@ -0,0 +1,499 @@
|
||||
{
|
||||
"$schema": "https://inlang.com/schema/inlang-message-format",
|
||||
|
||||
"nav_library": "Library",
|
||||
"nav_catalogue": "Catalogue",
|
||||
"nav_feed": "Feed",
|
||||
"nav_feedback": "Feedback",
|
||||
"nav_admin": "Admin",
|
||||
"nav_profile": "Profile",
|
||||
"nav_sign_in": "Sign in",
|
||||
"nav_sign_out": "Sign out",
|
||||
"nav_toggle_menu": "Toggle menu",
|
||||
"nav_admin_panel": "Admin panel",
|
||||
|
||||
"footer_library": "Library",
|
||||
"footer_catalogue": "Catalogue",
|
||||
"footer_feedback": "Feedback",
|
||||
"footer_disclaimer": "Disclaimer",
|
||||
"footer_privacy": "Privacy",
|
||||
"footer_dmca": "DMCA",
|
||||
"footer_copyright": "© {year} libnovel",
|
||||
"footer_dev": "dev",
|
||||
|
||||
"home_title": "libnovel",
|
||||
"home_stat_books": "Books",
|
||||
"home_stat_chapters": "Chapters",
|
||||
"home_stat_in_progress": "In progress",
|
||||
"home_continue_reading": "Continue Reading",
|
||||
"home_view_all": "View all",
|
||||
"home_recently_updated": "Recently Updated",
|
||||
"home_from_following": "From People You Follow",
|
||||
"home_empty_title": "Your library is empty",
|
||||
"home_empty_body": "Discover novels and scrape them into your library.",
|
||||
"home_discover_novels": "Discover Novels",
|
||||
"home_via_reader": "via {username}",
|
||||
"home_chapter_badge": "ch.{n}",
|
||||
|
||||
"player_generating": "Generating… {percent}%",
|
||||
"player_loading": "Loading…",
|
||||
"player_chapters": "Chapters",
|
||||
"player_chapter_n": "Chapter {n}",
|
||||
"player_toggle_chapter_list": "Toggle chapter list",
|
||||
"player_chapter_list_label": "Chapter list",
|
||||
"player_close_chapter_list": "Close chapter list",
|
||||
"player_rewind_15": "Rewind 15 seconds",
|
||||
"player_skip_30": "Skip 30 seconds",
|
||||
"player_back_15": "Back 15s",
|
||||
"player_forward_30": "Forward 30s",
|
||||
"player_play": "Play",
|
||||
"player_pause": "Pause",
|
||||
"player_speed_label": "Playback speed {speed}x",
|
||||
"player_seek_label": "Chapter progress",
|
||||
"player_change_speed": "Change playback speed",
|
||||
"player_auto_next_on": "Auto-next on",
|
||||
"player_auto_next_off": "Auto-next off",
|
||||
"player_auto_next_ready": "Auto-next on — Ch.{n} ready",
|
||||
"player_auto_next_preparing": "Auto-next on — preparing Ch.{n}…",
|
||||
"player_auto_next_aria": "Auto-next {state}",
|
||||
"player_go_to_chapter": "Go to chapter",
|
||||
"player_close": "Close player",
|
||||
|
||||
"login_page_title": "Sign in — libnovel",
|
||||
"login_heading": "Sign in to libnovel",
|
||||
"login_subheading": "Choose a provider to continue",
|
||||
"login_continue_google": "Continue with Google",
|
||||
"login_continue_github": "Continue with GitHub",
|
||||
"login_terms_notice": "By signing in you agree to our terms of service.",
|
||||
"login_error_oauth_state": "Sign-in was cancelled or expired. Please try again.",
|
||||
"login_error_oauth_failed": "Could not connect to the provider. Please try again.",
|
||||
"login_error_oauth_no_email": "Your account has no verified email address. Please add one and retry.",
|
||||
|
||||
"books_page_title": "Library — libnovel",
|
||||
"books_heading": "Your Library",
|
||||
"books_empty_title": "No books yet",
|
||||
"books_empty_body": "Add books to your library by visiting a book page.",
|
||||
"books_browse_catalogue": "Browse Catalogue",
|
||||
"books_chapter_count": "{n} chapters",
|
||||
"books_last_read": "Last read: Ch.{n}",
|
||||
"books_reading_progress": "Ch.{current} / {total}",
|
||||
"books_remove": "Remove",
|
||||
|
||||
"catalogue_page_title": "Catalogue — libnovel",
|
||||
"catalogue_heading": "Catalogue",
|
||||
"catalogue_search_placeholder": "Search novels…",
|
||||
"catalogue_filter_genre": "Genre",
|
||||
"catalogue_filter_status": "Status",
|
||||
"catalogue_filter_sort": "Sort",
|
||||
"catalogue_sort_popular": "Popular",
|
||||
"catalogue_sort_new": "New",
|
||||
"catalogue_sort_top_rated": "Top Rated",
|
||||
"catalogue_sort_rank": "Rank",
|
||||
"catalogue_status_all": "All",
|
||||
"catalogue_status_ongoing": "Ongoing",
|
||||
"catalogue_status_completed": "Completed",
|
||||
"catalogue_genre_all": "All genres",
|
||||
"catalogue_clear_filters": "Clear",
|
||||
"catalogue_reset": "Reset",
|
||||
"catalogue_no_results": "No novels found.",
|
||||
"catalogue_loading": "Loading…",
|
||||
"catalogue_load_more": "Load more",
|
||||
"catalogue_results_count": "{n} results",
|
||||
|
||||
"book_detail_page_title": "{title} — libnovel",
|
||||
"book_detail_signin_to_save": "Sign in to save",
|
||||
"book_detail_add_to_library": "Add to Library",
|
||||
"book_detail_remove_from_library": "Remove from Library",
|
||||
"book_detail_read_now": "Read Now",
|
||||
"book_detail_continue_reading": "Continue Reading",
|
||||
"book_detail_start_reading": "Start Reading",
|
||||
"book_detail_chapters": "{n} Chapters",
|
||||
"book_detail_status": "Status",
|
||||
"book_detail_author": "Author",
|
||||
"book_detail_genres": "Genres",
|
||||
"book_detail_description": "Description",
|
||||
"book_detail_source": "Source",
|
||||
"book_detail_rescrape": "Re-scrape",
|
||||
"book_detail_scraping": "Scraping…",
|
||||
"book_detail_in_library": "In Library",
|
||||
|
||||
"chapters_page_title": "Chapters — {title}",
|
||||
"chapters_heading": "Chapters",
|
||||
"chapters_back_to_book": "Back to book",
|
||||
"chapters_reading_now": "Reading",
|
||||
"chapters_empty": "No chapters scraped yet.",
|
||||
|
||||
"reader_page_title": "{title} — Ch.{n} — libnovel",
|
||||
"reader_play_narration": "Play narration",
|
||||
"reader_generating_audio": "Generating audio…",
|
||||
"reader_signin_for_audio": "Audio narration available",
|
||||
"reader_signin_audio_desc": "Sign in to listen to this chapter narrated by AI.",
|
||||
"reader_audio_error": "Audio generation failed.",
|
||||
"reader_prev_chapter": "Previous chapter",
|
||||
"reader_next_chapter": "Next chapter",
|
||||
"reader_back_to_chapters": "Back to chapters",
|
||||
"reader_chapter_n": "Chapter {n}",
|
||||
"reader_change_voice": "Change voice",
|
||||
"reader_voice_panel_title": "Select voice",
|
||||
"reader_voice_kokoro": "Kokoro voices",
|
||||
"reader_voice_pocket": "Pocket-TTS voices",
|
||||
"reader_voice_play_sample": "Play sample",
|
||||
"reader_voice_stop_sample": "Stop sample",
|
||||
"reader_voice_selected": "Selected",
|
||||
"reader_close_voice_panel": "Close voice panel",
|
||||
"reader_auto_next": "Auto-next",
|
||||
"reader_speed": "Speed",
|
||||
"reader_preview_notice": "Preview — this chapter has not been fully scraped.",
|
||||
|
||||
"profile_page_title": "Profile — libnovel",
|
||||
"profile_heading": "Profile",
|
||||
"profile_avatar_label": "Avatar",
|
||||
"profile_change_avatar": "Change avatar",
|
||||
"profile_username": "Username",
|
||||
"profile_email": "Email",
|
||||
"profile_change_password": "Change password",
|
||||
"profile_current_password": "Current password",
|
||||
"profile_new_password": "New password",
|
||||
"profile_confirm_password": "Confirm password",
|
||||
"profile_save_password": "Save password",
|
||||
"profile_appearance_heading": "Appearance",
|
||||
"profile_theme_label": "Theme",
|
||||
"profile_theme_amber": "Amber",
|
||||
"profile_theme_slate": "Slate",
|
||||
"profile_theme_rose": "Rose",
|
||||
"profile_theme_forest": "Forest",
|
||||
"profile_theme_mono": "Mono",
|
||||
"profile_theme_cyber": "Cyberpunk",
|
||||
"profile_theme_light": "Light",
|
||||
"profile_theme_light_slate": "Light Blue",
|
||||
"profile_theme_light_rose": "Light Rose",
|
||||
"profile_reading_heading": "Reading settings",
|
||||
"profile_voice_label": "Default voice",
|
||||
"profile_speed_label": "Playback speed",
|
||||
"profile_auto_next_label": "Auto-next chapter",
|
||||
"profile_save_settings": "Save settings",
|
||||
"profile_settings_saved": "Settings saved.",
|
||||
"profile_settings_error": "Failed to save settings.",
|
||||
"profile_password_saved": "Password changed.",
|
||||
"profile_password_error": "Failed to change password.",
|
||||
"profile_sessions_heading": "Active sessions",
|
||||
"profile_sign_out_all": "Sign out all other devices",
|
||||
"profile_joined": "Joined {date}",
|
||||
|
||||
"user_page_title": "{username} — libnovel",
|
||||
"user_library_heading": "{username}'s Library",
|
||||
"user_follow": "Follow",
|
||||
"user_unfollow": "Unfollow",
|
||||
"user_followers": "{n} followers",
|
||||
"user_following": "{n} following",
|
||||
"user_library_empty": "No books in library.",
|
||||
|
||||
"error_not_found_title": "Page not found",
|
||||
"error_not_found_body": "The page you're looking for doesn't exist.",
|
||||
"error_generic_title": "Something went wrong",
|
||||
"error_go_home": "Go home",
|
||||
"error_status": "Error {status}",
|
||||
|
||||
"admin_scrape_page_title": "Scrape — Admin",
|
||||
"admin_scrape_heading": "Scrape",
|
||||
"admin_scrape_catalogue": "Scrape Catalogue",
|
||||
"admin_scrape_book": "Scrape Book",
|
||||
"admin_scrape_url_placeholder": "novelfire.net book URL",
|
||||
"admin_scrape_range": "Chapter range",
|
||||
"admin_scrape_from": "From",
|
||||
"admin_scrape_to": "To",
|
||||
"admin_scrape_submit": "Scrape",
|
||||
"admin_scrape_cancel": "Cancel",
|
||||
"admin_scrape_status_pending": "Pending",
|
||||
"admin_scrape_status_running": "Running",
|
||||
"admin_scrape_status_done": "Done",
|
||||
"admin_scrape_status_failed": "Failed",
|
||||
"admin_scrape_status_cancelled": "Cancelled",
|
||||
"admin_tasks_heading": "Recent tasks",
|
||||
"admin_tasks_empty": "No tasks yet.",
|
||||
|
||||
"admin_audio_page_title": "Audio — Admin",
|
||||
"admin_audio_heading": "Audio Jobs",
|
||||
"admin_audio_empty": "No audio jobs.",
|
||||
|
||||
"admin_changelog_page_title": "Changelog — Admin",
|
||||
"admin_changelog_heading": "Changelog",
|
||||
|
||||
"comments_heading": "Comments",
|
||||
"comments_empty": "No comments yet. Be the first!",
|
||||
"comments_placeholder": "Write a comment…",
|
||||
"comments_submit": "Post",
|
||||
"comments_login_prompt": "Sign in to comment.",
|
||||
"comments_vote_up": "Upvote",
|
||||
"comments_vote_down": "Downvote",
|
||||
"comments_delete": "Delete",
|
||||
"comments_reply": "Reply",
|
||||
"comments_show_replies": "Show {n} replies",
|
||||
"comments_hide_replies": "Hide replies",
|
||||
"comments_edited": "edited",
|
||||
"comments_deleted": "[deleted]",
|
||||
|
||||
"disclaimer_page_title": "Disclaimer — libnovel",
|
||||
"privacy_page_title": "Privacy Policy — libnovel",
|
||||
"dmca_page_title": "DMCA — libnovel",
|
||||
"terms_page_title": "Terms of Service — libnovel",
|
||||
|
||||
"common_loading": "Loading…",
|
||||
"common_error": "Error",
|
||||
"common_save": "Save",
|
||||
"common_cancel": "Cancel",
|
||||
"common_close": "Close",
|
||||
"common_search": "Search",
|
||||
"common_back": "Back",
|
||||
"common_next": "Next",
|
||||
"common_previous": "Previous",
|
||||
"common_yes": "Yes",
|
||||
"common_no": "No",
|
||||
"common_on": "on",
|
||||
"common_off": "off",
|
||||
|
||||
"locale_switcher_label": "Language",
|
||||
|
||||
"books_empty_library": "Your library is empty.",
|
||||
"books_empty_discover": "Books you start reading or save from",
|
||||
"books_empty_discover_link": "Discover",
|
||||
"books_empty_discover_suffix": "will appear here.",
|
||||
"books_count": "{n} book{s}",
|
||||
|
||||
"catalogue_sort_updated": "Updated",
|
||||
"catalogue_search_button": "Search",
|
||||
"catalogue_refresh": "Refresh",
|
||||
"catalogue_refreshing": "Queuing\u2026",
|
||||
"catalogue_refresh_mobile": "Refresh catalogue",
|
||||
"catalogue_all_loaded": "All novels loaded",
|
||||
"catalogue_scroll_top": "Back to top",
|
||||
"catalogue_view_grid": "Grid view",
|
||||
"catalogue_view_list": "List view",
|
||||
"catalogue_browse_source": "Browse novels from novelfire.net",
|
||||
"catalogue_search_results": "{n} result{s} for \"{q}\"",
|
||||
"catalogue_search_local_count": "({local} local, {remote} from novelfire)",
|
||||
"catalogue_rank_ranked": "{n} novels ranked from last catalogue scrape",
|
||||
"catalogue_rank_no_data": "No ranking data.",
|
||||
"catalogue_rank_no_data_body": "No ranking data \u2014 run a full catalogue scrape to populate",
|
||||
"catalogue_rank_run_scrape_admin": "Click Refresh catalogue above to trigger a full catalogue scrape.",
|
||||
"catalogue_rank_run_scrape_user": "Ask an admin to run a catalogue scrape.",
|
||||
"catalogue_scrape_queued_flash": "Full catalogue scrape queued. Library and ranking will update as books are processed.",
|
||||
"catalogue_scrape_busy_flash": "A scrape job is already running. Check back once it finishes.",
|
||||
"catalogue_scrape_error_flash": "Failed to queue scrape. Check that the scraper service is reachable.",
|
||||
"catalogue_filters_label": "Filters",
|
||||
"catalogue_apply": "Apply",
|
||||
"catalogue_filter_rank_note": "Genre & status filters apply to Browse only",
|
||||
"catalogue_no_results_search": "No results found.",
|
||||
"catalogue_no_results_try": "Try a different search term.",
|
||||
"catalogue_no_results_filters": "Try different filters or check back later.",
|
||||
"catalogue_scrape_queued_badge": "Queued",
|
||||
"catalogue_scrape_busy_badge": "Scraper busy",
|
||||
"catalogue_scrape_busy_list": "Busy",
|
||||
"catalogue_scrape_forbidden_badge": "Forbidden",
|
||||
"catalogue_scrape_novel_button": "Scrape",
|
||||
"catalogue_scraping_novel": "Scraping\u2026",
|
||||
|
||||
"book_detail_not_in_library": "not in library",
|
||||
"book_detail_continue_ch": "Continue ch.{n}",
|
||||
"book_detail_start_ch1": "Start from ch.1",
|
||||
"book_detail_preview_ch1": "Preview ch.1",
|
||||
"book_detail_reading_ch": "Reading ch.{n} of {total}",
|
||||
"book_detail_n_chapters": "{n} chapters",
|
||||
"book_detail_rescraping": "Queuing\u2026",
|
||||
"book_detail_from_chapter": "From chapter",
|
||||
"book_detail_to_chapter": "To chapter (optional)",
|
||||
"book_detail_range_queuing": "Queuing\u2026",
|
||||
"book_detail_scrape_range": "Scrape range",
|
||||
"book_detail_admin": "Admin",
|
||||
"book_detail_admin_book_cover": "Book Cover",
|
||||
"book_detail_admin_chapter_cover": "Chapter Cover",
|
||||
"book_detail_admin_chapter_n": "Chapter #",
|
||||
"book_detail_admin_description": "Description",
|
||||
"book_detail_admin_chapter_names": "Chapter Names",
|
||||
"book_detail_admin_audio_tts": "Audio TTS",
|
||||
"book_detail_admin_voice": "Voice",
|
||||
"book_detail_admin_generate": "Generate",
|
||||
"book_detail_admin_save_cover": "Save Cover",
|
||||
"book_detail_admin_saving": "Saving…",
|
||||
"book_detail_admin_saved": "Saved",
|
||||
"book_detail_admin_apply": "Apply",
|
||||
"book_detail_admin_applying": "Applying…",
|
||||
"book_detail_admin_applied": "Applied",
|
||||
"book_detail_admin_discard": "Discard",
|
||||
"book_detail_admin_enqueue_audio": "Enqueue Audio",
|
||||
"book_detail_admin_cancel_audio": "Cancel",
|
||||
"book_detail_admin_enqueued": "Enqueued {enqueued}, skipped {skipped}",
|
||||
"book_detail_scraping_progress": "Fetching the first 20 chapters. This page will refresh automatically.",
|
||||
"book_detail_scraping_home": "\u2190 Home",
|
||||
"book_detail_rescrape_book": "Rescrape book",
|
||||
"book_detail_less": "Less",
|
||||
"book_detail_more": "More",
|
||||
|
||||
"chapters_search_placeholder": "Search chapters\u2026",
|
||||
"chapters_jump_to": "Jump to Ch.{n}",
|
||||
"chapters_no_match": "No chapters match \"{q}\"",
|
||||
"chapters_none_available": "No chapters available yet.",
|
||||
"chapters_reading_indicator": "reading",
|
||||
"chapters_result_count": "{n} results",
|
||||
|
||||
"reader_fetching_chapter": "Fetching chapter\u2026",
|
||||
"reader_words": "{n} words",
|
||||
"reader_preview_audio_notice": "Preview chapter \u2014 audio not available for books outside the library.",
|
||||
|
||||
"profile_click_to_change": "Click avatar to change photo",
|
||||
"profile_tts_voice": "TTS voice",
|
||||
"profile_auto_advance": "Auto-advance to next chapter",
|
||||
"profile_saving": "Saving\u2026",
|
||||
"profile_saved": "Saved!",
|
||||
"profile_session_this": "This session",
|
||||
"profile_session_signed_in": "Signed in {date}",
|
||||
"profile_session_last_seen": "\u00b7 Last seen {date}",
|
||||
"profile_session_sign_out": "Sign out",
|
||||
"profile_session_end": "End",
|
||||
"profile_session_unrecognised": "These are all devices currently signed into your account. End any session you don\u2019t recognise.",
|
||||
"profile_no_sessions": "No session records found. Sessions are tracked from the next login.",
|
||||
"profile_change_password_heading": "Change password",
|
||||
"profile_update_password": "Update password",
|
||||
"profile_updating": "Updating\u2026",
|
||||
"profile_password_changed_ok": "Password changed successfully.",
|
||||
"profile_playback_speed": "Playback speed \u2014 {speed}x",
|
||||
|
||||
"profile_subscription_heading": "Subscription",
|
||||
"profile_plan_pro": "Pro",
|
||||
"profile_plan_free": "Free",
|
||||
"profile_pro_active": "Your Pro subscription is active.",
|
||||
"profile_pro_perks": "Unlimited audio, all translation languages, and voice selection are enabled.",
|
||||
"profile_manage_subscription": "Manage subscription",
|
||||
"profile_upgrade_heading": "Upgrade to Pro",
|
||||
"profile_upgrade_desc": "Unlock unlimited audio, translations in 4 languages, and voice selection.",
|
||||
"profile_upgrade_monthly": "Monthly \u2014 $6 / mo",
|
||||
"profile_upgrade_annual": "Annual \u2014 $48 / yr",
|
||||
"profile_free_limits": "Free plan: 3 audio chapters per day, English reading only.",
|
||||
"subscribe_page_title": "Go Pro \u2014 libnovel",
|
||||
"subscribe_heading": "Read more. Listen more.",
|
||||
"subscribe_subheading": "Upgrade to Pro and unlock the full libnovel experience.",
|
||||
"subscribe_monthly_label": "Monthly",
|
||||
"subscribe_monthly_price": "$6",
|
||||
"subscribe_monthly_period": "per month",
|
||||
"subscribe_annual_label": "Annual",
|
||||
"subscribe_annual_price": "$48",
|
||||
"subscribe_annual_period": "per year",
|
||||
"subscribe_annual_save": "Save 33%",
|
||||
"subscribe_cta_monthly": "Start monthly plan",
|
||||
"subscribe_cta_annual": "Start annual plan",
|
||||
"subscribe_already_pro": "You already have a Pro subscription.",
|
||||
"subscribe_manage": "Manage subscription",
|
||||
"subscribe_benefit_audio": "Unlimited audio chapters per day",
|
||||
"subscribe_benefit_voices": "Voice selection across all TTS engines",
|
||||
"subscribe_benefit_translation": "Read in French, Indonesian, Portuguese, and Russian",
|
||||
"subscribe_benefit_downloads": "Download chapters for offline listening",
|
||||
"subscribe_login_prompt": "Sign in to subscribe",
|
||||
"subscribe_login_cta": "Sign in",
|
||||
|
||||
"user_currently_reading": "Currently Reading",
|
||||
"user_library_count": "Library ({n})",
|
||||
"user_joined": "Joined {date}",
|
||||
"user_followers_label": "followers",
|
||||
"user_following_label": "following",
|
||||
"user_no_books": "No books in library yet.",
|
||||
|
||||
"admin_pages_label": "Pages",
|
||||
"admin_tools_label": "Tools",
|
||||
"admin_nav_scrape": "Scrape",
|
||||
"admin_nav_audio": "Audio",
|
||||
"admin_nav_translation": "Translation",
|
||||
"admin_nav_import": "Import",
|
||||
"admin_nav_changelog": "Changelog",
|
||||
"admin_nav_image_gen": "Image Gen",
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "AI Jobs",
|
||||
"admin_nav_notifications": "Notifications",
|
||||
"admin_nav_feedback": "Feedback",
|
||||
"admin_nav_errors": "Errors",
|
||||
"admin_nav_analytics": "Analytics",
|
||||
"admin_nav_logs": "Logs",
|
||||
"admin_nav_uptime": "Uptime",
|
||||
"admin_nav_push": "Push",
|
||||
"admin_nav_gitea": "Gitea",
|
||||
"admin_nav_grafana": "Grafana",
|
||||
|
||||
"admin_scrape_status_idle": "Idle",
|
||||
"admin_scrape_status_running": "Running",
|
||||
"admin_scrape_full_catalogue": "Full catalogue",
|
||||
"admin_scrape_single_book": "Single book",
|
||||
"admin_scrape_quick_genres": "Quick genres",
|
||||
"admin_scrape_task_history": "Task history",
|
||||
"admin_scrape_filter_placeholder": "Filter by kind, status or URL\u2026",
|
||||
"admin_scrape_no_matching": "No matching tasks.",
|
||||
"admin_scrape_start": "Start scrape",
|
||||
"admin_scrape_queuing": "Queuing\u2026",
|
||||
"admin_scrape_running": "Running\u2026",
|
||||
|
||||
"admin_audio_filter_jobs": "Filter by slug, voice or status\u2026",
|
||||
"admin_audio_filter_cache": "Filter by slug, chapter or voice\u2026",
|
||||
"admin_audio_no_matching_jobs": "No matching jobs.",
|
||||
"admin_audio_no_jobs": "No audio jobs yet.",
|
||||
"admin_audio_cache_empty": "Audio cache is empty.",
|
||||
"admin_audio_no_cache_results": "No results.",
|
||||
|
||||
"admin_changelog_gitea": "Gitea releases",
|
||||
"admin_changelog_no_releases": "No releases found.",
|
||||
"admin_changelog_load_error": "Could not load releases: {error}",
|
||||
|
||||
"comments_top": "Top",
|
||||
"comments_new": "New",
|
||||
"comments_posting": "Posting\u2026",
|
||||
"comments_login_link": "Log in",
|
||||
"comments_login_suffix": "to leave a comment.",
|
||||
"comments_anonymous": "Anonymous",
|
||||
|
||||
"reader_audio_narration": "Audio Narration",
|
||||
"reader_playing": "Playing \u2014 controls below",
|
||||
"reader_paused": "Paused \u2014 controls below",
|
||||
"reader_ch_ready": "Ch.{n} ready",
|
||||
"reader_ch_preparing": "Preparing Ch.{n}\u2026 {percent}%",
|
||||
"reader_ch_generate_on_nav": "Ch.{n} will generate on navigate",
|
||||
"reader_now_playing": "Now playing: {title}",
|
||||
"reader_load_this_chapter": "Load this chapter",
|
||||
"reader_generate_samples": "Generate missing samples",
|
||||
"reader_voice_applies_next": "New voice applies on next \u201cPlay narration\u201d.",
|
||||
"reader_choose_voice": "Choose Voice",
|
||||
"reader_generating_narration": "Generating narration\u2026",
|
||||
|
||||
"profile_font_family": "Font Family",
|
||||
"profile_font_system": "System",
|
||||
"profile_font_serif": "Serif",
|
||||
"profile_font_mono": "Monospace",
|
||||
"profile_text_size": "Text Size",
|
||||
"profile_text_size_sm": "Small",
|
||||
"profile_text_size_md": "Normal",
|
||||
"profile_text_size_lg": "Large",
|
||||
"profile_text_size_xl": "X-Large",
|
||||
|
||||
"feed_page_title": "Feed — LibNovel",
|
||||
"feed_heading": "Following Feed",
|
||||
"feed_subheading": "Books your followed users are reading",
|
||||
"feed_empty_heading": "Nothing here yet",
|
||||
"feed_empty_body": "Follow other readers to see what they're reading.",
|
||||
"feed_not_logged_in": "Sign in to see your feed.",
|
||||
"feed_reader_label": "reading",
|
||||
"feed_chapters_label": "{n} chapters",
|
||||
"feed_browse_cta": "Browse catalogue",
|
||||
"feed_find_users_cta": "Discover readers",
|
||||
|
||||
"admin_translation_page_title": "Translation \u2014 Admin",
|
||||
"admin_translation_heading": "Machine Translation",
|
||||
"admin_translation_tab_enqueue": "Enqueue",
|
||||
"admin_translation_tab_jobs": "Jobs",
|
||||
"admin_translation_filter_placeholder": "Filter by slug, lang, or status\u2026",
|
||||
"admin_translation_no_matching": "No matching jobs.",
|
||||
"admin_translation_no_jobs": "No translation jobs yet.",
|
||||
|
||||
"admin_ai_jobs_page_title": "AI Jobs \u2014 Admin",
|
||||
"admin_ai_jobs_heading": "AI Jobs",
|
||||
"admin_ai_jobs_subheading": "Background AI generation tasks",
|
||||
|
||||
"admin_text_gen_page_title": "Text Gen \u2014 Admin",
|
||||
"admin_text_gen_heading": "Text Generation"
|
||||
}
|
||||
458
ui/messages/fr.json
Normal file
458
ui/messages/fr.json
Normal file
@@ -0,0 +1,458 @@
|
||||
{
|
||||
"$schema": "https://inlang.com/schema/inlang-message-format",
|
||||
"nav_library": "Bibliothèque",
|
||||
"nav_catalogue": "Catalogue",
|
||||
"nav_feed": "Fil",
|
||||
"nav_feedback": "Retour",
|
||||
"nav_admin": "Admin",
|
||||
"nav_profile": "Profil",
|
||||
"nav_sign_in": "Connexion",
|
||||
"nav_sign_out": "Déconnexion",
|
||||
"nav_toggle_menu": "Menu",
|
||||
"nav_admin_panel": "Panneau admin",
|
||||
"footer_library": "Bibliothèque",
|
||||
"footer_catalogue": "Catalogue",
|
||||
"footer_feedback": "Retour",
|
||||
"footer_disclaimer": "Avertissement",
|
||||
"footer_privacy": "Confidentialité",
|
||||
"footer_dmca": "DMCA",
|
||||
"footer_copyright": "© {year} libnovel",
|
||||
"footer_dev": "dev",
|
||||
"home_title": "libnovel",
|
||||
"home_stat_books": "Livres",
|
||||
"home_stat_chapters": "Chapitres",
|
||||
"home_stat_in_progress": "En cours",
|
||||
"home_continue_reading": "Continuer la lecture",
|
||||
"home_view_all": "Voir tout",
|
||||
"home_recently_updated": "Récemment mis à jour",
|
||||
"home_from_following": "Des personnes que vous suivez",
|
||||
"home_empty_title": "Votre bibliothèque est vide",
|
||||
"home_empty_body": "Découvrez des romans et ajoutez-les à votre bibliothèque.",
|
||||
"home_discover_novels": "Découvrir des romans",
|
||||
"home_via_reader": "via {username}",
|
||||
"home_chapter_badge": "ch.{n}",
|
||||
"player_generating": "Génération… {percent}%",
|
||||
"player_loading": "Chargement…",
|
||||
"player_chapters": "Chapitres",
|
||||
"player_chapter_n": "Chapitre {n}",
|
||||
"player_toggle_chapter_list": "Liste des chapitres",
|
||||
"player_chapter_list_label": "Liste des chapitres",
|
||||
"player_close_chapter_list": "Fermer la liste des chapitres",
|
||||
"player_rewind_15": "Reculer de 15 secondes",
|
||||
"player_skip_30": "Avancer de 30 secondes",
|
||||
"player_back_15": "−15 s",
|
||||
"player_forward_30": "+30 s",
|
||||
"player_play": "Lecture",
|
||||
"player_pause": "Pause",
|
||||
"player_speed_label": "Vitesse {speed}x",
|
||||
"player_seek_label": "Progression du chapitre",
|
||||
"player_change_speed": "Changer la vitesse",
|
||||
"player_auto_next_on": "Suivant auto activé",
|
||||
"player_auto_next_off": "Suivant auto désactivé",
|
||||
"player_auto_next_ready": "Suivant auto — Ch.{n} prêt",
|
||||
"player_auto_next_preparing": "Suivant auto — préparation Ch.{n}…",
|
||||
"player_auto_next_aria": "Suivant auto {state}",
|
||||
"player_go_to_chapter": "Aller au chapitre",
|
||||
"player_close": "Fermer le lecteur",
|
||||
"login_page_title": "Connexion — libnovel",
|
||||
"login_heading": "Se connecter à libnovel",
|
||||
"login_subheading": "Choisissez un fournisseur pour continuer",
|
||||
"login_continue_google": "Continuer avec Google",
|
||||
"login_continue_github": "Continuer avec GitHub",
|
||||
"login_terms_notice": "En vous connectant, vous acceptez nos conditions d'utilisation.",
|
||||
"login_error_oauth_state": "Connexion annulée ou expirée. Veuillez réessayer.",
|
||||
"login_error_oauth_failed": "Impossible de se connecter au fournisseur. Veuillez réessayer.",
|
||||
"login_error_oauth_no_email": "Votre compte n'a pas d'adresse e-mail vérifiée. Ajoutez-en une et réessayez.",
|
||||
"books_page_title": "Bibliothèque — libnovel",
|
||||
"books_heading": "Votre bibliothèque",
|
||||
"books_empty_title": "Aucun livre pour l'instant",
|
||||
"books_empty_body": "Ajoutez des livres à votre bibliothèque en visitant une page de livre.",
|
||||
"books_browse_catalogue": "Parcourir le catalogue",
|
||||
"books_chapter_count": "{n} chapitres",
|
||||
"books_last_read": "Dernier lu : Ch.{n}",
|
||||
"books_reading_progress": "Ch.{current} / {total}",
|
||||
"books_remove": "Supprimer",
|
||||
"catalogue_page_title": "Catalogue — libnovel",
|
||||
"catalogue_heading": "Catalogue",
|
||||
"catalogue_search_placeholder": "Rechercher des romans…",
|
||||
"catalogue_filter_genre": "Genre",
|
||||
"catalogue_filter_status": "Statut",
|
||||
"catalogue_filter_sort": "Trier",
|
||||
"catalogue_sort_popular": "Populaire",
|
||||
"catalogue_sort_new": "Nouveau",
|
||||
"catalogue_sort_top_rated": "Mieux notés",
|
||||
"catalogue_sort_rank": "Rang",
|
||||
"catalogue_status_all": "Tous",
|
||||
"catalogue_status_ongoing": "En cours",
|
||||
"catalogue_status_completed": "Terminé",
|
||||
"catalogue_genre_all": "Tous les genres",
|
||||
"catalogue_clear_filters": "Effacer",
|
||||
"catalogue_reset": "Réinitialiser",
|
||||
"catalogue_no_results": "Aucun roman trouvé.",
|
||||
"catalogue_loading": "Chargement…",
|
||||
"catalogue_load_more": "Charger plus",
|
||||
"catalogue_results_count": "{n} résultats",
|
||||
"book_detail_page_title": "{title} — libnovel",
|
||||
"book_detail_signin_to_save": "Connectez-vous pour sauvegarder",
|
||||
"book_detail_add_to_library": "Ajouter à la bibliothèque",
|
||||
"book_detail_remove_from_library": "Retirer de la bibliothèque",
|
||||
"book_detail_read_now": "Lire maintenant",
|
||||
"book_detail_continue_reading": "Continuer la lecture",
|
||||
"book_detail_start_reading": "Commencer la lecture",
|
||||
"book_detail_chapters": "{n} chapitres",
|
||||
"book_detail_status": "Statut",
|
||||
"book_detail_author": "Auteur",
|
||||
"book_detail_genres": "Genres",
|
||||
"book_detail_description": "Description",
|
||||
"book_detail_source": "Source",
|
||||
"book_detail_rescrape": "Réextraire",
|
||||
"book_detail_scraping": "Extraction en cours…",
|
||||
"book_detail_in_library": "Dans la bibliothèque",
|
||||
"chapters_page_title": "Chapitres — {title}",
|
||||
"chapters_heading": "Chapitres",
|
||||
"chapters_back_to_book": "Retour au livre",
|
||||
"chapters_reading_now": "En cours de lecture",
|
||||
"chapters_empty": "Aucun chapitre extrait pour l'instant.",
|
||||
"reader_page_title": "{title} — Ch.{n} — libnovel",
|
||||
"reader_play_narration": "Lire la narration",
|
||||
"reader_generating_audio": "Génération audio…",
|
||||
"reader_signin_for_audio": "Narration audio disponible",
|
||||
"reader_signin_audio_desc": "Connectez-vous pour écouter ce chapitre narré par l'IA.",
|
||||
"reader_audio_error": "Échec de la génération audio.",
|
||||
"reader_prev_chapter": "Chapitre précédent",
|
||||
"reader_next_chapter": "Chapitre suivant",
|
||||
"reader_back_to_chapters": "Retour aux chapitres",
|
||||
"reader_chapter_n": "Chapitre {n}",
|
||||
"reader_change_voice": "Changer de voix",
|
||||
"reader_voice_panel_title": "Sélectionner une voix",
|
||||
"reader_voice_kokoro": "Voix Kokoro",
|
||||
"reader_voice_pocket": "Voix Pocket-TTS",
|
||||
"reader_voice_play_sample": "Écouter un extrait",
|
||||
"reader_voice_stop_sample": "Arrêter l'extrait",
|
||||
"reader_voice_selected": "Sélectionné",
|
||||
"reader_close_voice_panel": "Fermer le panneau vocal",
|
||||
"reader_auto_next": "Suivant auto",
|
||||
"reader_speed": "Vitesse",
|
||||
"reader_preview_notice": "Aperçu — ce chapitre n'a pas été entièrement extrait.",
|
||||
"profile_page_title": "Profil — libnovel",
|
||||
"profile_heading": "Profil",
|
||||
"profile_avatar_label": "Avatar",
|
||||
"profile_change_avatar": "Changer l'avatar",
|
||||
"profile_username": "Nom d'utilisateur",
|
||||
"profile_email": "E-mail",
|
||||
"profile_change_password": "Changer le mot de passe",
|
||||
"profile_current_password": "Mot de passe actuel",
|
||||
"profile_new_password": "Nouveau mot de passe",
|
||||
"profile_confirm_password": "Confirmer le mot de passe",
|
||||
"profile_save_password": "Enregistrer le mot de passe",
|
||||
"profile_appearance_heading": "Apparence",
|
||||
"profile_theme_label": "Thème",
|
||||
"profile_theme_amber": "Ambre",
|
||||
"profile_theme_slate": "Ardoise",
|
||||
"profile_theme_rose": "Rose",
|
||||
"profile_theme_forest": "Forêt",
|
||||
"profile_theme_mono": "Mono",
|
||||
"profile_theme_cyber": "Cyberpunk",
|
||||
"profile_theme_light": "Light",
|
||||
"profile_theme_light_slate": "Light Blue",
|
||||
"profile_theme_light_rose": "Light Rose",
|
||||
"profile_reading_heading": "Paramètres de lecture",
|
||||
"profile_voice_label": "Voix par défaut",
|
||||
"profile_speed_label": "Vitesse de lecture",
|
||||
"profile_auto_next_label": "Chapitre suivant automatique",
|
||||
"profile_save_settings": "Enregistrer les paramètres",
|
||||
"profile_settings_saved": "Paramètres enregistrés.",
|
||||
"profile_settings_error": "Impossible d'enregistrer les paramètres.",
|
||||
"profile_password_saved": "Mot de passe modifié.",
|
||||
"profile_password_error": "Impossible de modifier le mot de passe.",
|
||||
"profile_sessions_heading": "Sessions actives",
|
||||
"profile_sign_out_all": "Se déconnecter de tous les autres appareils",
|
||||
"profile_joined": "Inscrit le {date}",
|
||||
"user_page_title": "{username} — libnovel",
|
||||
"user_library_heading": "Bibliothèque de {username}",
|
||||
"user_follow": "Suivre",
|
||||
"user_unfollow": "Ne plus suivre",
|
||||
"user_followers": "{n} abonnés",
|
||||
"user_following": "{n} abonnements",
|
||||
"user_library_empty": "Aucun livre dans la bibliothèque.",
|
||||
"error_not_found_title": "Page introuvable",
|
||||
"error_not_found_body": "La page que vous cherchez n'existe pas.",
|
||||
"error_generic_title": "Une erreur s'est produite",
|
||||
"error_go_home": "Accueil",
|
||||
"error_status": "Erreur {status}",
|
||||
"admin_scrape_page_title": "Extraction — Admin",
|
||||
"admin_scrape_heading": "Extraction",
|
||||
"admin_scrape_catalogue": "Extraire le catalogue",
|
||||
"admin_scrape_book": "Extraire un livre",
|
||||
"admin_scrape_url_placeholder": "URL du livre sur novelfire.net",
|
||||
"admin_scrape_range": "Plage de chapitres",
|
||||
"admin_scrape_from": "De",
|
||||
"admin_scrape_to": "À",
|
||||
"admin_scrape_submit": "Extraire",
|
||||
"admin_scrape_cancel": "Annuler",
|
||||
"admin_scrape_status_pending": "En attente",
|
||||
"admin_scrape_status_running": "En cours",
|
||||
"admin_scrape_status_done": "Terminé",
|
||||
"admin_scrape_status_failed": "Échoué",
|
||||
"admin_scrape_status_cancelled": "Annulé",
|
||||
"admin_tasks_heading": "Tâches récentes",
|
||||
"admin_tasks_empty": "Aucune tâche pour l'instant.",
|
||||
"admin_audio_page_title": "Audio — Admin",
|
||||
"admin_audio_heading": "Tâches audio",
|
||||
"admin_audio_empty": "Aucune tâche audio.",
|
||||
"admin_changelog_page_title": "Changelog — Admin",
|
||||
"admin_changelog_heading": "Changelog",
|
||||
"comments_heading": "Commentaires",
|
||||
"comments_empty": "Aucun commentaire pour l'instant. Soyez le premier !",
|
||||
"comments_placeholder": "Écrire un commentaire…",
|
||||
"comments_submit": "Publier",
|
||||
"comments_login_prompt": "Connectez-vous pour commenter.",
|
||||
"comments_vote_up": "Vote positif",
|
||||
"comments_vote_down": "Vote négatif",
|
||||
"comments_delete": "Supprimer",
|
||||
"comments_reply": "Répondre",
|
||||
"comments_show_replies": "Afficher {n} réponses",
|
||||
"comments_hide_replies": "Masquer les réponses",
|
||||
"comments_edited": "modifié",
|
||||
"comments_deleted": "[supprimé]",
|
||||
"disclaimer_page_title": "Avertissement — libnovel",
|
||||
"privacy_page_title": "Politique de confidentialité — libnovel",
|
||||
"dmca_page_title": "DMCA — libnovel",
|
||||
"terms_page_title": "Conditions d'utilisation — libnovel",
|
||||
"common_loading": "Chargement…",
|
||||
"common_error": "Erreur",
|
||||
"common_save": "Enregistrer",
|
||||
"common_cancel": "Annuler",
|
||||
"common_close": "Fermer",
|
||||
"common_search": "Rechercher",
|
||||
"common_back": "Retour",
|
||||
"common_next": "Suivant",
|
||||
"common_previous": "Précédent",
|
||||
"common_yes": "Oui",
|
||||
"common_no": "Non",
|
||||
"common_on": "activé",
|
||||
"common_off": "désactivé",
|
||||
"locale_switcher_label": "Langue",
|
||||
"books_empty_library": "Votre bibliothèque est vide.",
|
||||
"books_empty_discover": "Les livres que vous commencez à lire ou enregistrez depuis",
|
||||
"books_empty_discover_link": "Découvrir",
|
||||
"books_empty_discover_suffix": "apparaîtront ici.",
|
||||
"books_count": "{n} livre{s}",
|
||||
"catalogue_sort_updated": "Mis à jour",
|
||||
"catalogue_search_button": "Rechercher",
|
||||
"catalogue_refresh": "Actualiser",
|
||||
"catalogue_refreshing": "En file d'attente…",
|
||||
"catalogue_refresh_mobile": "Actualiser le catalogue",
|
||||
"catalogue_all_loaded": "Tous les romans chargés",
|
||||
"catalogue_scroll_top": "Retour en haut",
|
||||
"catalogue_view_grid": "Vue grille",
|
||||
"catalogue_view_list": "Vue liste",
|
||||
"catalogue_browse_source": "Parcourir les romans de novelfire.net",
|
||||
"catalogue_search_results": "{n} résultat{s} pour « {q} »",
|
||||
"catalogue_search_local_count": "({local} local, {remote} depuis novelfire)",
|
||||
"catalogue_rank_ranked": "{n} romans classés depuis le dernier scrape du catalogue",
|
||||
"catalogue_rank_no_data": "Aucune donnée de classement.",
|
||||
"catalogue_rank_no_data_body": "Aucune donnée de classement — lancez un scrape complet du catalogue pour remplir",
|
||||
"catalogue_rank_run_scrape_admin": "Cliquez sur Actualiser le catalogue ci-dessus pour déclencher un scrape complet.",
|
||||
"catalogue_rank_run_scrape_user": "Demandez à un administrateur d'effectuer un scrape du catalogue.",
|
||||
"catalogue_scrape_queued_flash": "Scrape complet du catalogue en file d'attente. La bibliothèque et le classement seront mis à jour au fur et à mesure du traitement des livres.",
|
||||
"catalogue_scrape_busy_flash": "Un job de scrape est déjà en cours. Revenez une fois terminé.",
|
||||
"catalogue_scrape_error_flash": "Échec de la mise en file d'attente du scrape. Vérifiez que le service de scraper est accessible.",
|
||||
"catalogue_filters_label": "Filtres",
|
||||
"catalogue_apply": "Appliquer",
|
||||
"catalogue_filter_rank_note": "Les filtres genre et statut s'appliquent uniquement à Parcourir",
|
||||
"catalogue_no_results_search": "Aucun résultat trouvé.",
|
||||
"catalogue_no_results_try": "Essayez un autre terme de recherche.",
|
||||
"catalogue_no_results_filters": "Essayez d'autres filtres ou revenez plus tard.",
|
||||
"catalogue_scrape_queued_badge": "En file",
|
||||
"catalogue_scrape_busy_badge": "Scraper occupé",
|
||||
"catalogue_scrape_busy_list": "Occupé",
|
||||
"catalogue_scrape_forbidden_badge": "Interdit",
|
||||
"catalogue_scrape_novel_button": "Extraire",
|
||||
"catalogue_scraping_novel": "Extraction…",
|
||||
"book_detail_not_in_library": "pas dans la bibliothèque",
|
||||
"book_detail_continue_ch": "Continuer ch.{n}",
|
||||
"book_detail_start_ch1": "Commencer au ch.1",
|
||||
"book_detail_preview_ch1": "Aperçu ch.1",
|
||||
"book_detail_reading_ch": "Lecture ch.{n} sur {total}",
|
||||
"book_detail_n_chapters": "{n} chapitres",
|
||||
"book_detail_rescraping": "En file d'attente…",
|
||||
"book_detail_from_chapter": "À partir du chapitre",
|
||||
"book_detail_to_chapter": "Jusqu'au chapitre (optionnel)",
|
||||
"book_detail_range_queuing": "En file d'attente…",
|
||||
"book_detail_scrape_range": "Plage d'extraction",
|
||||
"book_detail_admin": "Admin",
|
||||
"book_detail_admin_book_cover": "Couverture du livre",
|
||||
"book_detail_admin_chapter_cover": "Couverture du chapitre",
|
||||
"book_detail_admin_chapter_n": "Chapitre n°",
|
||||
"book_detail_admin_description": "Description",
|
||||
"book_detail_admin_chapter_names": "Noms des chapitres",
|
||||
"book_detail_admin_audio_tts": "Audio TTS",
|
||||
"book_detail_admin_voice": "Voix",
|
||||
"book_detail_admin_generate": "Générer",
|
||||
"book_detail_admin_save_cover": "Enregistrer la couverture",
|
||||
"book_detail_admin_saving": "Enregistrement…",
|
||||
"book_detail_admin_saved": "Enregistré",
|
||||
"book_detail_admin_apply": "Appliquer",
|
||||
"book_detail_admin_applying": "Application…",
|
||||
"book_detail_admin_applied": "Appliqué",
|
||||
"book_detail_admin_discard": "Ignorer",
|
||||
"book_detail_admin_enqueue_audio": "Mettre en file audio",
|
||||
"book_detail_admin_cancel_audio": "Annuler",
|
||||
"book_detail_admin_enqueued": "{enqueued} en file, {skipped} ignorés",
|
||||
"book_detail_scraping_progress": "Récupération des 20 premiers chapitres. Cette page sera actualisée automatiquement.",
|
||||
"book_detail_scraping_home": "← Accueil",
|
||||
"book_detail_rescrape_book": "Réextraire le livre",
|
||||
"book_detail_less": "Moins",
|
||||
"book_detail_more": "Plus",
|
||||
"chapters_search_placeholder": "Rechercher des chapitres…",
|
||||
"chapters_jump_to": "Aller au Ch.{n}",
|
||||
"chapters_no_match": "Aucun chapitre ne correspond à « {q} »",
|
||||
"chapters_none_available": "Aucun chapitre disponible pour l'instant.",
|
||||
"chapters_reading_indicator": "en cours",
|
||||
"chapters_result_count": "{n} résultats",
|
||||
"reader_fetching_chapter": "Récupération du chapitre…",
|
||||
"reader_words": "{n} mots",
|
||||
"reader_preview_audio_notice": "Aperçu — audio non disponible pour les livres hors bibliothèque.",
|
||||
"profile_click_to_change": "Cliquez sur l'avatar pour changer la photo",
|
||||
"profile_tts_voice": "Voix TTS",
|
||||
"profile_auto_advance": "Avancer automatiquement au chapitre suivant",
|
||||
"profile_saving": "Enregistrement…",
|
||||
"profile_saved": "Enregistré !",
|
||||
"profile_session_this": "Cette session",
|
||||
"profile_session_signed_in": "Connecté le {date}",
|
||||
"profile_session_last_seen": "· Dernière activité {date}",
|
||||
"profile_session_sign_out": "Se déconnecter",
|
||||
"profile_session_end": "Terminer",
|
||||
"profile_session_unrecognised": "Ce sont tous les appareils connectés à votre compte. Terminez toute session que vous ne reconnaissez pas.",
|
||||
"profile_no_sessions": "Aucun enregistrement de session trouvé. Les sessions sont suivies dès la prochaine connexion.",
|
||||
"profile_change_password_heading": "Changer le mot de passe",
|
||||
"profile_update_password": "Mettre à jour le mot de passe",
|
||||
"profile_updating": "Mise à jour…",
|
||||
"profile_password_changed_ok": "Mot de passe modifié avec succès.",
|
||||
"profile_playback_speed": "Vitesse de lecture — {speed}x",
|
||||
"profile_subscription_heading": "Abonnement",
|
||||
"profile_plan_pro": "Pro",
|
||||
"profile_plan_free": "Gratuit",
|
||||
"profile_pro_active": "Votre abonnement Pro est actif.",
|
||||
"profile_pro_perks": "Audio illimité, toutes les langues de traduction et la sélection de voix sont activées.",
|
||||
"profile_manage_subscription": "Gérer l'abonnement",
|
||||
"profile_upgrade_heading": "Passer au Pro",
|
||||
"profile_upgrade_desc": "Débloquez l'audio illimité, les traductions en 4 langues et la sélection de voix.",
|
||||
"profile_upgrade_monthly": "Mensuel — 6 $ / mois",
|
||||
"profile_upgrade_annual": "Annuel — 48 $ / an",
|
||||
"profile_free_limits": "Plan gratuit : 3 chapitres audio par jour, lecture en anglais uniquement.",
|
||||
"subscribe_page_title": "Passer Pro — libnovel",
|
||||
"subscribe_heading": "Lisez plus. Écoutez plus.",
|
||||
"subscribe_subheading": "Passez Pro et débloquez l'expérience libnovel complète.",
|
||||
"subscribe_monthly_label": "Mensuel",
|
||||
"subscribe_monthly_price": "6 $",
|
||||
"subscribe_monthly_period": "par mois",
|
||||
"subscribe_annual_label": "Annuel",
|
||||
"subscribe_annual_price": "48 $",
|
||||
"subscribe_annual_period": "par an",
|
||||
"subscribe_annual_save": "Économisez 33 %",
|
||||
"subscribe_cta_monthly": "Commencer le plan mensuel",
|
||||
"subscribe_cta_annual": "Commencer le plan annuel",
|
||||
"subscribe_already_pro": "Vous avez déjà un abonnement Pro.",
|
||||
"subscribe_manage": "Gérer l'abonnement",
|
||||
"subscribe_benefit_audio": "Chapitres audio illimités par jour",
|
||||
"subscribe_benefit_voices": "Sélection de voix pour tous les moteurs TTS",
|
||||
"subscribe_benefit_translation": "Lire en français, indonésien, portugais et russe",
|
||||
"subscribe_benefit_downloads": "Télécharger des chapitres pour une écoute hors ligne",
|
||||
"subscribe_login_prompt": "Connectez-vous pour vous abonner",
|
||||
"subscribe_login_cta": "Se connecter",
|
||||
"user_currently_reading": "En cours de lecture",
|
||||
"user_library_count": "Bibliothèque ({n})",
|
||||
"user_joined": "Inscrit le {date}",
|
||||
"user_followers_label": "abonnés",
|
||||
"user_following_label": "abonnements",
|
||||
"user_no_books": "Aucun livre dans la bibliothèque pour l'instant.",
|
||||
"admin_pages_label": "Pages",
|
||||
"admin_tools_label": "Outils",
|
||||
"admin_nav_scrape": "Scrape",
|
||||
"admin_nav_audio": "Audio",
|
||||
"admin_nav_translation": "Traduction",
|
||||
"admin_nav_changelog": "Modifications",
|
||||
"admin_nav_image_gen": "Image Gen",
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "Tâches IA",
|
||||
"admin_nav_notifications": "Notifications",
|
||||
"admin_nav_errors": "Erreurs",
|
||||
"admin_nav_analytics": "Analytique",
|
||||
"admin_nav_logs": "Journaux",
|
||||
"admin_nav_uptime": "Disponibilité",
|
||||
"admin_nav_push": "Notifications",
|
||||
"admin_scrape_status_idle": "Inactif",
|
||||
"admin_scrape_full_catalogue": "Catalogue complet",
|
||||
"admin_scrape_single_book": "Livre unique",
|
||||
"admin_scrape_quick_genres": "Genres rapides",
|
||||
"admin_scrape_task_history": "Historique des tâches",
|
||||
"admin_scrape_filter_placeholder": "Filtrer par type, statut ou URL…",
|
||||
"admin_scrape_no_matching": "Aucune tâche correspondante.",
|
||||
"admin_scrape_start": "Démarrer l'extraction",
|
||||
"admin_scrape_queuing": "En file d'attente…",
|
||||
"admin_scrape_running": "En cours…",
|
||||
"admin_audio_filter_jobs": "Filtrer par slug, voix ou statut…",
|
||||
"admin_audio_filter_cache": "Filtrer par slug, chapitre ou voix…",
|
||||
"admin_audio_no_matching_jobs": "Aucun job correspondant.",
|
||||
"admin_audio_no_jobs": "Aucun job audio pour l'instant.",
|
||||
"admin_audio_cache_empty": "Cache audio vide.",
|
||||
"admin_audio_no_cache_results": "Aucun résultat.",
|
||||
"admin_changelog_gitea": "Releases Gitea",
|
||||
"admin_changelog_no_releases": "Aucune release trouvée.",
|
||||
"admin_changelog_load_error": "Impossible de charger les releases : {error}",
|
||||
"comments_top": "Les meilleures",
|
||||
"comments_new": "Nouvelles",
|
||||
"comments_posting": "Publication…",
|
||||
"comments_login_link": "Connectez-vous",
|
||||
"comments_login_suffix": "pour laisser un commentaire.",
|
||||
"comments_anonymous": "Anonyme",
|
||||
"reader_audio_narration": "Narration Audio",
|
||||
"reader_playing": "Lecture en cours — contrôles ci-dessous",
|
||||
"reader_paused": "En pause — contrôles ci-dessous",
|
||||
"reader_ch_ready": "Ch.{n} prêt",
|
||||
"reader_ch_preparing": "Préparation Ch.{n}… {percent}%",
|
||||
"reader_ch_generate_on_nav": "Ch.{n} sera généré lors de la navigation",
|
||||
"reader_now_playing": "En cours : {title}",
|
||||
"reader_load_this_chapter": "Charger ce chapitre",
|
||||
"reader_generate_samples": "Générer les échantillons manquants",
|
||||
"reader_voice_applies_next": "La nouvelle voix s'appliquera au prochain « Lire la narration ».",
|
||||
"reader_choose_voice": "Choisir une voix",
|
||||
"reader_generating_narration": "Génération de la narration…",
|
||||
"profile_font_family": "Police",
|
||||
"profile_font_system": "Système",
|
||||
"profile_font_serif": "Serif",
|
||||
"profile_font_mono": "Mono",
|
||||
"profile_text_size": "Taille du texte",
|
||||
"profile_text_size_sm": "Petit",
|
||||
"profile_text_size_md": "Normal",
|
||||
"profile_text_size_lg": "Grand",
|
||||
"profile_text_size_xl": "Très grand",
|
||||
"feed_page_title": "Fil — LibNovel",
|
||||
"feed_heading": "Fil d'abonnements",
|
||||
"feed_subheading": "Livres lus par vos abonnements",
|
||||
"feed_empty_heading": "Rien encore",
|
||||
"feed_empty_body": "Suivez d'autres lecteurs pour voir ce qu'ils lisent.",
|
||||
"feed_not_logged_in": "Connectez-vous pour voir votre fil.",
|
||||
"feed_reader_label": "lit",
|
||||
"feed_chapters_label": "{n} chapitres",
|
||||
"feed_browse_cta": "Parcourir le catalogue",
|
||||
"feed_find_users_cta": "Trouver des lecteurs",
|
||||
"admin_nav_gitea": "Gitea",
|
||||
"admin_nav_grafana": "Grafana",
|
||||
"admin_translation_page_title": "Translation — Admin",
|
||||
"admin_translation_heading": "Machine Translation",
|
||||
"admin_translation_tab_enqueue": "Enqueue",
|
||||
"admin_translation_tab_jobs": "Jobs",
|
||||
"admin_translation_filter_placeholder": "Filter by slug, lang, or status…",
|
||||
"admin_translation_no_matching": "No matching jobs.",
|
||||
"admin_translation_no_jobs": "No translation jobs yet.",
|
||||
"admin_ai_jobs_page_title": "AI Jobs — Admin",
|
||||
"admin_ai_jobs_heading": "AI Jobs",
|
||||
"admin_ai_jobs_subheading": "Background AI generation tasks",
|
||||
"admin_text_gen_page_title": "Text Gen — Admin",
|
||||
"admin_text_gen_heading": "Text Generation",
|
||||
"admin_nav_import": "Import"
|
||||
}
|
||||
458
ui/messages/id.json
Normal file
458
ui/messages/id.json
Normal file
@@ -0,0 +1,458 @@
|
||||
{
|
||||
"$schema": "https://inlang.com/schema/inlang-message-format",
|
||||
"nav_library": "Perpustakaan",
|
||||
"nav_catalogue": "Katalog",
|
||||
"nav_feed": "Umpan",
|
||||
"nav_feedback": "Masukan",
|
||||
"nav_admin": "Admin",
|
||||
"nav_profile": "Profil",
|
||||
"nav_sign_in": "Masuk",
|
||||
"nav_sign_out": "Keluar",
|
||||
"nav_toggle_menu": "Menu",
|
||||
"nav_admin_panel": "Panel admin",
|
||||
"footer_library": "Perpustakaan",
|
||||
"footer_catalogue": "Katalog",
|
||||
"footer_feedback": "Masukan",
|
||||
"footer_disclaimer": "Penyangkalan",
|
||||
"footer_privacy": "Privasi",
|
||||
"footer_dmca": "DMCA",
|
||||
"footer_copyright": "© {year} libnovel",
|
||||
"footer_dev": "dev",
|
||||
"home_title": "libnovel",
|
||||
"home_stat_books": "Buku",
|
||||
"home_stat_chapters": "Bab",
|
||||
"home_stat_in_progress": "Sedang dibaca",
|
||||
"home_continue_reading": "Lanjutkan Membaca",
|
||||
"home_view_all": "Lihat semua",
|
||||
"home_recently_updated": "Baru Diperbarui",
|
||||
"home_from_following": "Dari Orang yang Kamu Ikuti",
|
||||
"home_empty_title": "Perpustakaanmu kosong",
|
||||
"home_empty_body": "Temukan novel dan tambahkan ke perpustakaanmu.",
|
||||
"home_discover_novels": "Temukan Novel",
|
||||
"home_via_reader": "via {username}",
|
||||
"home_chapter_badge": "bab.{n}",
|
||||
"player_generating": "Membuat… {percent}%",
|
||||
"player_loading": "Memuat…",
|
||||
"player_chapters": "Bab",
|
||||
"player_chapter_n": "Bab {n}",
|
||||
"player_toggle_chapter_list": "Daftar bab",
|
||||
"player_chapter_list_label": "Daftar bab",
|
||||
"player_close_chapter_list": "Tutup daftar bab",
|
||||
"player_rewind_15": "Mundur 15 detik",
|
||||
"player_skip_30": "Maju 30 detik",
|
||||
"player_back_15": "−15 dtk",
|
||||
"player_forward_30": "+30 dtk",
|
||||
"player_play": "Putar",
|
||||
"player_pause": "Jeda",
|
||||
"player_speed_label": "Kecepatan {speed}x",
|
||||
"player_seek_label": "Kemajuan bab",
|
||||
"player_change_speed": "Ubah kecepatan",
|
||||
"player_auto_next_on": "Auto-lanjut aktif",
|
||||
"player_auto_next_off": "Auto-lanjut nonaktif",
|
||||
"player_auto_next_ready": "Auto-lanjut — Bab.{n} siap",
|
||||
"player_auto_next_preparing": "Auto-lanjut — menyiapkan Bab.{n}…",
|
||||
"player_auto_next_aria": "Auto-lanjut {state}",
|
||||
"player_go_to_chapter": "Pergi ke bab",
|
||||
"player_close": "Tutup pemutar",
|
||||
"login_page_title": "Masuk — libnovel",
|
||||
"login_heading": "Masuk ke libnovel",
|
||||
"login_subheading": "Pilih penyedia untuk melanjutkan",
|
||||
"login_continue_google": "Lanjutkan dengan Google",
|
||||
"login_continue_github": "Lanjutkan dengan GitHub",
|
||||
"login_terms_notice": "Dengan masuk, kamu menyetujui syarat layanan kami.",
|
||||
"login_error_oauth_state": "Masuk dibatalkan atau kedaluwarsa. Coba lagi.",
|
||||
"login_error_oauth_failed": "Tidak dapat terhubung ke penyedia. Coba lagi.",
|
||||
"login_error_oauth_no_email": "Akunmu tidak memiliki alamat email terverifikasi. Tambahkan dan coba lagi.",
|
||||
"books_page_title": "Perpustakaan — libnovel",
|
||||
"books_heading": "Perpustakaanmu",
|
||||
"books_empty_title": "Belum ada buku",
|
||||
"books_empty_body": "Tambahkan buku ke perpustakaanmu dengan mengunjungi halaman buku.",
|
||||
"books_browse_catalogue": "Jelajahi Katalog",
|
||||
"books_chapter_count": "{n} bab",
|
||||
"books_last_read": "Terakhir: Bab.{n}",
|
||||
"books_reading_progress": "Bab.{current} / {total}",
|
||||
"books_remove": "Hapus",
|
||||
"catalogue_page_title": "Katalog — libnovel",
|
||||
"catalogue_heading": "Katalog",
|
||||
"catalogue_search_placeholder": "Cari novel…",
|
||||
"catalogue_filter_genre": "Genre",
|
||||
"catalogue_filter_status": "Status",
|
||||
"catalogue_filter_sort": "Urutkan",
|
||||
"catalogue_sort_popular": "Populer",
|
||||
"catalogue_sort_new": "Terbaru",
|
||||
"catalogue_sort_top_rated": "Nilai Tertinggi",
|
||||
"catalogue_sort_rank": "Peringkat",
|
||||
"catalogue_status_all": "Semua",
|
||||
"catalogue_status_ongoing": "Berlangsung",
|
||||
"catalogue_status_completed": "Selesai",
|
||||
"catalogue_genre_all": "Semua genre",
|
||||
"catalogue_clear_filters": "Hapus",
|
||||
"catalogue_reset": "Atur ulang",
|
||||
"catalogue_no_results": "Novel tidak ditemukan.",
|
||||
"catalogue_loading": "Memuat…",
|
||||
"catalogue_load_more": "Muat lebih banyak",
|
||||
"catalogue_results_count": "{n} hasil",
|
||||
"book_detail_page_title": "{title} — libnovel",
|
||||
"book_detail_signin_to_save": "Masuk untuk menyimpan",
|
||||
"book_detail_add_to_library": "Tambah ke Perpustakaan",
|
||||
"book_detail_remove_from_library": "Hapus dari Perpustakaan",
|
||||
"book_detail_read_now": "Baca Sekarang",
|
||||
"book_detail_continue_reading": "Lanjutkan Membaca",
|
||||
"book_detail_start_reading": "Mulai Membaca",
|
||||
"book_detail_chapters": "{n} Bab",
|
||||
"book_detail_status": "Status",
|
||||
"book_detail_author": "Penulis",
|
||||
"book_detail_genres": "Genre",
|
||||
"book_detail_description": "Deskripsi",
|
||||
"book_detail_source": "Sumber",
|
||||
"book_detail_rescrape": "Perbarui",
|
||||
"book_detail_scraping": "Memperbarui…",
|
||||
"book_detail_in_library": "Ada di Perpustakaan",
|
||||
"chapters_page_title": "Bab — {title}",
|
||||
"chapters_heading": "Bab",
|
||||
"chapters_back_to_book": "Kembali ke buku",
|
||||
"chapters_reading_now": "Sedang dibaca",
|
||||
"chapters_empty": "Belum ada bab yang diambil.",
|
||||
"reader_page_title": "{title} — Bab.{n} — libnovel",
|
||||
"reader_play_narration": "Putar narasi",
|
||||
"reader_generating_audio": "Membuat audio…",
|
||||
"reader_signin_for_audio": "Narasi audio tersedia",
|
||||
"reader_signin_audio_desc": "Masuk untuk mendengarkan bab ini yang dinarasikan oleh AI.",
|
||||
"reader_audio_error": "Pembuatan audio gagal.",
|
||||
"reader_prev_chapter": "Bab sebelumnya",
|
||||
"reader_next_chapter": "Bab berikutnya",
|
||||
"reader_back_to_chapters": "Kembali ke daftar bab",
|
||||
"reader_chapter_n": "Bab {n}",
|
||||
"reader_change_voice": "Ganti suara",
|
||||
"reader_voice_panel_title": "Pilih suara",
|
||||
"reader_voice_kokoro": "Suara Kokoro",
|
||||
"reader_voice_pocket": "Suara Pocket-TTS",
|
||||
"reader_voice_play_sample": "Putar sampel",
|
||||
"reader_voice_stop_sample": "Hentikan sampel",
|
||||
"reader_voice_selected": "Dipilih",
|
||||
"reader_close_voice_panel": "Tutup panel suara",
|
||||
"reader_auto_next": "Auto-lanjut",
|
||||
"reader_speed": "Kecepatan",
|
||||
"reader_preview_notice": "Pratinjau — bab ini belum sepenuhnya diambil.",
|
||||
"profile_page_title": "Profil — libnovel",
|
||||
"profile_heading": "Profil",
|
||||
"profile_avatar_label": "Avatar",
|
||||
"profile_change_avatar": "Ubah avatar",
|
||||
"profile_username": "Nama pengguna",
|
||||
"profile_email": "Email",
|
||||
"profile_change_password": "Ubah kata sandi",
|
||||
"profile_current_password": "Kata sandi saat ini",
|
||||
"profile_new_password": "Kata sandi baru",
|
||||
"profile_confirm_password": "Konfirmasi kata sandi",
|
||||
"profile_save_password": "Simpan kata sandi",
|
||||
"profile_appearance_heading": "Tampilan",
|
||||
"profile_theme_label": "Tema",
|
||||
"profile_theme_amber": "Amber",
|
||||
"profile_theme_slate": "Abu-abu",
|
||||
"profile_theme_rose": "Mawar",
|
||||
"profile_theme_forest": "Hutan",
|
||||
"profile_theme_mono": "Mono",
|
||||
"profile_theme_cyber": "Cyberpunk",
|
||||
"profile_theme_light": "Light",
|
||||
"profile_theme_light_slate": "Light Blue",
|
||||
"profile_theme_light_rose": "Light Rose",
|
||||
"profile_reading_heading": "Pengaturan membaca",
|
||||
"profile_voice_label": "Suara default",
|
||||
"profile_speed_label": "Kecepatan pemutaran",
|
||||
"profile_auto_next_label": "Auto-lanjut bab",
|
||||
"profile_save_settings": "Simpan pengaturan",
|
||||
"profile_settings_saved": "Pengaturan disimpan.",
|
||||
"profile_settings_error": "Gagal menyimpan pengaturan.",
|
||||
"profile_password_saved": "Kata sandi diubah.",
|
||||
"profile_password_error": "Gagal mengubah kata sandi.",
|
||||
"profile_sessions_heading": "Sesi aktif",
|
||||
"profile_sign_out_all": "Keluar dari semua perangkat lain",
|
||||
"profile_joined": "Bergabung {date}",
|
||||
"user_page_title": "{username} — libnovel",
|
||||
"user_library_heading": "Perpustakaan {username}",
|
||||
"user_follow": "Ikuti",
|
||||
"user_unfollow": "Berhenti mengikuti",
|
||||
"user_followers": "{n} pengikut",
|
||||
"user_following": "{n} mengikuti",
|
||||
"user_library_empty": "Tidak ada buku di perpustakaan.",
|
||||
"error_not_found_title": "Halaman tidak ditemukan",
|
||||
"error_not_found_body": "Halaman yang kamu cari tidak ada.",
|
||||
"error_generic_title": "Terjadi kesalahan",
|
||||
"error_go_home": "Ke beranda",
|
||||
"error_status": "Error {status}",
|
||||
"admin_scrape_page_title": "Scrape — Admin",
|
||||
"admin_scrape_heading": "Scrape",
|
||||
"admin_scrape_catalogue": "Scrape Katalog",
|
||||
"admin_scrape_book": "Scrape Buku",
|
||||
"admin_scrape_url_placeholder": "URL buku di novelfire.net",
|
||||
"admin_scrape_range": "Rentang bab",
|
||||
"admin_scrape_from": "Dari",
|
||||
"admin_scrape_to": "Sampai",
|
||||
"admin_scrape_submit": "Scrape",
|
||||
"admin_scrape_cancel": "Batal",
|
||||
"admin_scrape_status_pending": "Menunggu",
|
||||
"admin_scrape_status_running": "Berjalan",
|
||||
"admin_scrape_status_done": "Selesai",
|
||||
"admin_scrape_status_failed": "Gagal",
|
||||
"admin_scrape_status_cancelled": "Dibatalkan",
|
||||
"admin_tasks_heading": "Tugas terbaru",
|
||||
"admin_tasks_empty": "Belum ada tugas.",
|
||||
"admin_audio_page_title": "Audio — Admin",
|
||||
"admin_audio_heading": "Tugas Audio",
|
||||
"admin_audio_empty": "Tidak ada tugas audio.",
|
||||
"admin_changelog_page_title": "Changelog — Admin",
|
||||
"admin_changelog_heading": "Changelog",
|
||||
"comments_heading": "Komentar",
|
||||
"comments_empty": "Belum ada komentar. Jadilah yang pertama!",
|
||||
"comments_placeholder": "Tulis komentar…",
|
||||
"comments_submit": "Kirim",
|
||||
"comments_login_prompt": "Masuk untuk berkomentar.",
|
||||
"comments_vote_up": "Suka",
|
||||
"comments_vote_down": "Tidak suka",
|
||||
"comments_delete": "Hapus",
|
||||
"comments_reply": "Balas",
|
||||
"comments_show_replies": "Tampilkan {n} balasan",
|
||||
"comments_hide_replies": "Sembunyikan balasan",
|
||||
"comments_edited": "diedit",
|
||||
"comments_deleted": "[dihapus]",
|
||||
"disclaimer_page_title": "Penyangkalan — libnovel",
|
||||
"privacy_page_title": "Kebijakan Privasi — libnovel",
|
||||
"dmca_page_title": "DMCA — libnovel",
|
||||
"terms_page_title": "Syarat Layanan — libnovel",
|
||||
"common_loading": "Memuat…",
|
||||
"common_error": "Error",
|
||||
"common_save": "Simpan",
|
||||
"common_cancel": "Batal",
|
||||
"common_close": "Tutup",
|
||||
"common_search": "Cari",
|
||||
"common_back": "Kembali",
|
||||
"common_next": "Berikutnya",
|
||||
"common_previous": "Sebelumnya",
|
||||
"common_yes": "Ya",
|
||||
"common_no": "Tidak",
|
||||
"common_on": "aktif",
|
||||
"common_off": "nonaktif",
|
||||
"locale_switcher_label": "Bahasa",
|
||||
"books_empty_library": "Perpustakaanmu kosong.",
|
||||
"books_empty_discover": "Buku yang mulai kamu baca atau simpan dari",
|
||||
"books_empty_discover_link": "Temukan",
|
||||
"books_empty_discover_suffix": "akan muncul di sini.",
|
||||
"books_count": "{n} buku",
|
||||
"catalogue_sort_updated": "Diperbarui",
|
||||
"catalogue_search_button": "Cari",
|
||||
"catalogue_refresh": "Segarkan",
|
||||
"catalogue_refreshing": "Mengantri…",
|
||||
"catalogue_refresh_mobile": "Segarkan katalog",
|
||||
"catalogue_all_loaded": "Semua novel telah dimuat",
|
||||
"catalogue_scroll_top": "Kembali ke atas",
|
||||
"catalogue_view_grid": "Tampilan kisi",
|
||||
"catalogue_view_list": "Tampilan daftar",
|
||||
"catalogue_browse_source": "Jelajahi novel dari novelfire.net",
|
||||
"catalogue_search_results": "{n} hasil untuk \"{q}\"",
|
||||
"catalogue_search_local_count": "({local} lokal, {remote} dari novelfire)",
|
||||
"catalogue_rank_ranked": "{n} novel diurutkan dari scrape katalog terakhir",
|
||||
"catalogue_rank_no_data": "Tidak ada data peringkat.",
|
||||
"catalogue_rank_no_data_body": "Tidak ada data peringkat — jalankan scrape katalog penuh untuk mengisi",
|
||||
"catalogue_rank_run_scrape_admin": "Klik Segarkan katalog di atas untuk memicu scrape katalog penuh.",
|
||||
"catalogue_rank_run_scrape_user": "Minta admin untuk menjalankan scrape katalog.",
|
||||
"catalogue_scrape_queued_flash": "Scrape katalog penuh diantrekan. Perpustakaan dan peringkat akan diperbarui saat buku diproses.",
|
||||
"catalogue_scrape_busy_flash": "Pekerjaan scrape sedang berjalan. Periksa kembali setelah selesai.",
|
||||
"catalogue_scrape_error_flash": "Gagal mengantrekan scrape. Pastikan layanan scraper dapat dijangkau.",
|
||||
"catalogue_filters_label": "Filter",
|
||||
"catalogue_apply": "Terapkan",
|
||||
"catalogue_filter_rank_note": "Filter genre & status hanya berlaku untuk Jelajahi",
|
||||
"catalogue_no_results_search": "Tidak ada hasil.",
|
||||
"catalogue_no_results_try": "Coba kata kunci lain.",
|
||||
"catalogue_no_results_filters": "Coba filter lain atau periksa kembali nanti.",
|
||||
"catalogue_scrape_queued_badge": "Diantrekan",
|
||||
"catalogue_scrape_busy_badge": "Scraper sibuk",
|
||||
"catalogue_scrape_busy_list": "Sibuk",
|
||||
"catalogue_scrape_forbidden_badge": "Terlarang",
|
||||
"catalogue_scrape_novel_button": "Scrape",
|
||||
"catalogue_scraping_novel": "Scraping…",
|
||||
"book_detail_not_in_library": "tidak di perpustakaan",
|
||||
"book_detail_continue_ch": "Lanjutkan bab.{n}",
|
||||
"book_detail_start_ch1": "Mulai dari bab.1",
|
||||
"book_detail_preview_ch1": "Pratinjau bab.1",
|
||||
"book_detail_reading_ch": "Membaca bab.{n} dari {total}",
|
||||
"book_detail_n_chapters": "{n} bab",
|
||||
"book_detail_rescraping": "Mengantri…",
|
||||
"book_detail_from_chapter": "Dari bab",
|
||||
"book_detail_to_chapter": "Sampai bab (opsional)",
|
||||
"book_detail_range_queuing": "Mengantri…",
|
||||
"book_detail_scrape_range": "Rentang scrape",
|
||||
"book_detail_admin": "Admin",
|
||||
"book_detail_admin_book_cover": "Sampul Buku",
|
||||
"book_detail_admin_chapter_cover": "Sampul Bab",
|
||||
"book_detail_admin_chapter_n": "Bab #",
|
||||
"book_detail_admin_description": "Deskripsi",
|
||||
"book_detail_admin_chapter_names": "Nama Bab",
|
||||
"book_detail_admin_audio_tts": "Audio TTS",
|
||||
"book_detail_admin_voice": "Suara",
|
||||
"book_detail_admin_generate": "Buat",
|
||||
"book_detail_admin_save_cover": "Simpan Sampul",
|
||||
"book_detail_admin_saving": "Menyimpan…",
|
||||
"book_detail_admin_saved": "Tersimpan",
|
||||
"book_detail_admin_apply": "Terapkan",
|
||||
"book_detail_admin_applying": "Menerapkan…",
|
||||
"book_detail_admin_applied": "Diterapkan",
|
||||
"book_detail_admin_discard": "Buang",
|
||||
"book_detail_admin_enqueue_audio": "Antre Audio",
|
||||
"book_detail_admin_cancel_audio": "Batal",
|
||||
"book_detail_admin_enqueued": "Diantre {enqueued}, dilewati {skipped}",
|
||||
"book_detail_scraping_progress": "Mengambil 20 bab pertama. Halaman ini akan dimuat ulang otomatis.",
|
||||
"book_detail_scraping_home": "← Beranda",
|
||||
"book_detail_rescrape_book": "Scrape ulang buku",
|
||||
"book_detail_less": "Lebih sedikit",
|
||||
"book_detail_more": "Selengkapnya",
|
||||
"chapters_search_placeholder": "Cari bab…",
|
||||
"chapters_jump_to": "Loncat ke Bab.{n}",
|
||||
"chapters_no_match": "Tidak ada bab yang cocok dengan \"{q}\"",
|
||||
"chapters_none_available": "Belum ada bab tersedia.",
|
||||
"chapters_reading_indicator": "sedang dibaca",
|
||||
"chapters_result_count": "{n} hasil",
|
||||
"reader_fetching_chapter": "Mengambil bab…",
|
||||
"reader_words": "{n} kata",
|
||||
"reader_preview_audio_notice": "Pratinjau — audio tidak tersedia untuk buku di luar perpustakaan.",
|
||||
"profile_click_to_change": "Klik avatar untuk mengganti foto",
|
||||
"profile_tts_voice": "Suara TTS",
|
||||
"profile_auto_advance": "Otomatis lanjut ke bab berikutnya",
|
||||
"profile_saving": "Menyimpan…",
|
||||
"profile_saved": "Tersimpan!",
|
||||
"profile_session_this": "Sesi ini",
|
||||
"profile_session_signed_in": "Masuk {date}",
|
||||
"profile_session_last_seen": "· Terakhir dilihat {date}",
|
||||
"profile_session_sign_out": "Keluar",
|
||||
"profile_session_end": "Akhiri",
|
||||
"profile_session_unrecognised": "Ini semua perangkat yang masuk ke akunmu. Akhiri sesi yang tidak kamu kenali.",
|
||||
"profile_no_sessions": "Tidak ada catatan sesi. Sesi dilacak mulai login berikutnya.",
|
||||
"profile_change_password_heading": "Ubah kata sandi",
|
||||
"profile_update_password": "Perbarui kata sandi",
|
||||
"profile_updating": "Memperbarui…",
|
||||
"profile_password_changed_ok": "Kata sandi berhasil diubah.",
|
||||
"profile_playback_speed": "Kecepatan pemutaran — {speed}x",
|
||||
"profile_subscription_heading": "Langganan",
|
||||
"profile_plan_pro": "Pro",
|
||||
"profile_plan_free": "Gratis",
|
||||
"profile_pro_active": "Langganan Pro kamu aktif.",
|
||||
"profile_pro_perks": "Audio tanpa batas, semua bahasa terjemahan, dan pilihan suara tersedia.",
|
||||
"profile_manage_subscription": "Kelola langganan",
|
||||
"profile_upgrade_heading": "Tingkatkan ke Pro",
|
||||
"profile_upgrade_desc": "Buka audio tanpa batas, terjemahan dalam 4 bahasa, dan pilihan suara.",
|
||||
"profile_upgrade_monthly": "Bulanan — $6 / bln",
|
||||
"profile_upgrade_annual": "Tahunan — $48 / thn",
|
||||
"profile_free_limits": "Paket gratis: 3 bab audio per hari, hanya bahasa Inggris.",
|
||||
"subscribe_page_title": "Jadi Pro — libnovel",
|
||||
"subscribe_heading": "Baca lebih. Dengarkan lebih.",
|
||||
"subscribe_subheading": "Tingkatkan ke Pro dan buka pengalaman libnovel sepenuhnya.",
|
||||
"subscribe_monthly_label": "Bulanan",
|
||||
"subscribe_monthly_price": "$6",
|
||||
"subscribe_monthly_period": "per bulan",
|
||||
"subscribe_annual_label": "Tahunan",
|
||||
"subscribe_annual_price": "$48",
|
||||
"subscribe_annual_period": "per tahun",
|
||||
"subscribe_annual_save": "Hemat 33%",
|
||||
"subscribe_cta_monthly": "Mulai paket bulanan",
|
||||
"subscribe_cta_annual": "Mulai paket tahunan",
|
||||
"subscribe_already_pro": "Anda sudah berlangganan Pro.",
|
||||
"subscribe_manage": "Kelola langganan",
|
||||
"subscribe_benefit_audio": "Bab audio tak terbatas per hari",
|
||||
"subscribe_benefit_voices": "Pilihan suara untuk semua mesin TTS",
|
||||
"subscribe_benefit_translation": "Baca dalam bahasa Prancis, Indonesia, Portugis, dan Rusia",
|
||||
"subscribe_benefit_downloads": "Unduh bab untuk didengarkan secara offline",
|
||||
"subscribe_login_prompt": "Masuk untuk berlangganan",
|
||||
"subscribe_login_cta": "Masuk",
|
||||
"user_currently_reading": "Sedang Dibaca",
|
||||
"user_library_count": "Perpustakaan ({n})",
|
||||
"user_joined": "Bergabung {date}",
|
||||
"user_followers_label": "pengikut",
|
||||
"user_following_label": "mengikuti",
|
||||
"user_no_books": "Belum ada buku di perpustakaan.",
|
||||
"admin_pages_label": "Halaman",
|
||||
"admin_tools_label": "Alat",
|
||||
"admin_nav_scrape": "Scrape",
|
||||
"admin_nav_audio": "Audio",
|
||||
"admin_nav_translation": "Terjemahan",
|
||||
"admin_nav_changelog": "Perubahan",
|
||||
"admin_nav_image_gen": "Image Gen",
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "Tugas AI",
|
||||
"admin_nav_notifications": "Notifikasi",
|
||||
"admin_nav_errors": "Kesalahan",
|
||||
"admin_nav_analytics": "Analitik",
|
||||
"admin_nav_logs": "Log",
|
||||
"admin_nav_uptime": "Uptime",
|
||||
"admin_nav_push": "Notifikasi",
|
||||
"admin_scrape_status_idle": "Menunggu",
|
||||
"admin_scrape_full_catalogue": "Katalog penuh",
|
||||
"admin_scrape_single_book": "Satu buku",
|
||||
"admin_scrape_quick_genres": "Genre cepat",
|
||||
"admin_scrape_task_history": "Riwayat tugas",
|
||||
"admin_scrape_filter_placeholder": "Filter berdasarkan jenis, status, atau URL…",
|
||||
"admin_scrape_no_matching": "Tidak ada tugas yang cocok.",
|
||||
"admin_scrape_start": "Mulai scrape",
|
||||
"admin_scrape_queuing": "Mengantri…",
|
||||
"admin_scrape_running": "Berjalan…",
|
||||
"admin_audio_filter_jobs": "Filter berdasarkan slug, suara, atau status…",
|
||||
"admin_audio_filter_cache": "Filter berdasarkan slug, bab, atau suara…",
|
||||
"admin_audio_no_matching_jobs": "Tidak ada pekerjaan yang cocok.",
|
||||
"admin_audio_no_jobs": "Belum ada pekerjaan audio.",
|
||||
"admin_audio_cache_empty": "Cache audio kosong.",
|
||||
"admin_audio_no_cache_results": "Tidak ada hasil.",
|
||||
"admin_changelog_gitea": "Rilis Gitea",
|
||||
"admin_changelog_no_releases": "Tidak ada rilis.",
|
||||
"admin_changelog_load_error": "Gagal memuat rilis: {error}",
|
||||
"comments_top": "Teratas",
|
||||
"comments_new": "Terbaru",
|
||||
"comments_posting": "Mengirim…",
|
||||
"comments_login_link": "Masuk",
|
||||
"comments_login_suffix": "untuk meninggalkan komentar.",
|
||||
"comments_anonymous": "Anonim",
|
||||
"reader_audio_narration": "Narasi Audio",
|
||||
"reader_playing": "Memutar — kontrol di bawah",
|
||||
"reader_paused": "Dijeda — kontrol di bawah",
|
||||
"reader_ch_ready": "Bab.{n} siap",
|
||||
"reader_ch_preparing": "Menyiapkan Bab.{n}… {percent}%",
|
||||
"reader_ch_generate_on_nav": "Bab.{n} akan dihasilkan saat navigasi",
|
||||
"reader_now_playing": "Sedang diputar: {title}",
|
||||
"reader_load_this_chapter": "Muat bab ini",
|
||||
"reader_generate_samples": "Hasilkan sampel yang hilang",
|
||||
"reader_voice_applies_next": "Suara baru berlaku pada \"Putar narasi\" berikutnya.",
|
||||
"reader_choose_voice": "Pilih Suara",
|
||||
"reader_generating_narration": "Membuat narasi…",
|
||||
"profile_font_family": "Jenis Font",
|
||||
"profile_font_system": "Sistem",
|
||||
"profile_font_serif": "Serif",
|
||||
"profile_font_mono": "Mono",
|
||||
"profile_text_size": "Ukuran Teks",
|
||||
"profile_text_size_sm": "Kecil",
|
||||
"profile_text_size_md": "Normal",
|
||||
"profile_text_size_lg": "Besar",
|
||||
"profile_text_size_xl": "Sangat Besar",
|
||||
"feed_page_title": "Umpan — LibNovel",
|
||||
"feed_heading": "Umpan Ikutan",
|
||||
"feed_subheading": "Buku yang sedang dibaca oleh pengguna yang Anda ikuti",
|
||||
"feed_empty_heading": "Belum ada apa-apa",
|
||||
"feed_empty_body": "Ikuti pembaca lain untuk melihat apa yang mereka baca.",
|
||||
"feed_not_logged_in": "Masuk untuk melihat umpan Anda.",
|
||||
"feed_reader_label": "membaca",
|
||||
"feed_chapters_label": "{n} bab",
|
||||
"feed_browse_cta": "Jelajahi katalog",
|
||||
"feed_find_users_cta": "Temukan pembaca",
|
||||
"admin_nav_gitea": "Gitea",
|
||||
"admin_nav_grafana": "Grafana",
|
||||
"admin_translation_page_title": "Translation — Admin",
|
||||
"admin_translation_heading": "Machine Translation",
|
||||
"admin_translation_tab_enqueue": "Enqueue",
|
||||
"admin_translation_tab_jobs": "Jobs",
|
||||
"admin_translation_filter_placeholder": "Filter by slug, lang, or status…",
|
||||
"admin_translation_no_matching": "No matching jobs.",
|
||||
"admin_translation_no_jobs": "No translation jobs yet.",
|
||||
"admin_ai_jobs_page_title": "AI Jobs — Admin",
|
||||
"admin_ai_jobs_heading": "AI Jobs",
|
||||
"admin_ai_jobs_subheading": "Background AI generation tasks",
|
||||
"admin_text_gen_page_title": "Text Gen — Admin",
|
||||
"admin_text_gen_heading": "Text Generation",
|
||||
"admin_nav_import": "Import"
|
||||
}
|
||||
458
ui/messages/pt.json
Normal file
458
ui/messages/pt.json
Normal file
@@ -0,0 +1,458 @@
|
||||
{
|
||||
"$schema": "https://inlang.com/schema/inlang-message-format",
|
||||
"nav_library": "Biblioteca",
|
||||
"nav_catalogue": "Catálogo",
|
||||
"nav_feed": "Feed",
|
||||
"nav_feedback": "Feedback",
|
||||
"nav_admin": "Admin",
|
||||
"nav_profile": "Perfil",
|
||||
"nav_sign_in": "Entrar",
|
||||
"nav_sign_out": "Sair",
|
||||
"nav_toggle_menu": "Menu",
|
||||
"nav_admin_panel": "Painel admin",
|
||||
"footer_library": "Biblioteca",
|
||||
"footer_catalogue": "Catálogo",
|
||||
"footer_feedback": "Feedback",
|
||||
"footer_disclaimer": "Aviso legal",
|
||||
"footer_privacy": "Privacidade",
|
||||
"footer_dmca": "DMCA",
|
||||
"footer_copyright": "© {year} libnovel",
|
||||
"footer_dev": "dev",
|
||||
"home_title": "libnovel",
|
||||
"home_stat_books": "Livros",
|
||||
"home_stat_chapters": "Capítulos",
|
||||
"home_stat_in_progress": "Em andamento",
|
||||
"home_continue_reading": "Continuar Lendo",
|
||||
"home_view_all": "Ver tudo",
|
||||
"home_recently_updated": "Atualizados Recentemente",
|
||||
"home_from_following": "De Quem Você Segue",
|
||||
"home_empty_title": "Sua biblioteca está vazia",
|
||||
"home_empty_body": "Descubra romances e adicione à sua biblioteca.",
|
||||
"home_discover_novels": "Descobrir Romances",
|
||||
"home_via_reader": "via {username}",
|
||||
"home_chapter_badge": "cap.{n}",
|
||||
"player_generating": "Gerando… {percent}%",
|
||||
"player_loading": "Carregando…",
|
||||
"player_chapters": "Capítulos",
|
||||
"player_chapter_n": "Capítulo {n}",
|
||||
"player_toggle_chapter_list": "Lista de capítulos",
|
||||
"player_chapter_list_label": "Lista de capítulos",
|
||||
"player_close_chapter_list": "Fechar lista de capítulos",
|
||||
"player_rewind_15": "Voltar 15 segundos",
|
||||
"player_skip_30": "Avançar 30 segundos",
|
||||
"player_back_15": "−15 s",
|
||||
"player_forward_30": "+30 s",
|
||||
"player_play": "Reproduzir",
|
||||
"player_pause": "Pausar",
|
||||
"player_speed_label": "Velocidade {speed}x",
|
||||
"player_seek_label": "Progresso do capítulo",
|
||||
"player_change_speed": "Mudar velocidade",
|
||||
"player_auto_next_on": "Próximo automático ativado",
|
||||
"player_auto_next_off": "Próximo automático desativado",
|
||||
"player_auto_next_ready": "Próximo automático — Cap.{n} pronto",
|
||||
"player_auto_next_preparing": "Próximo automático — preparando Cap.{n}…",
|
||||
"player_auto_next_aria": "Próximo automático {state}",
|
||||
"player_go_to_chapter": "Ir para capítulo",
|
||||
"player_close": "Fechar player",
|
||||
"login_page_title": "Entrar — libnovel",
|
||||
"login_heading": "Entrar no libnovel",
|
||||
"login_subheading": "Escolha um provedor para continuar",
|
||||
"login_continue_google": "Continuar com Google",
|
||||
"login_continue_github": "Continuar com GitHub",
|
||||
"login_terms_notice": "Ao entrar, você concorda com nossos termos de serviço.",
|
||||
"login_error_oauth_state": "Login cancelado ou expirado. Tente novamente.",
|
||||
"login_error_oauth_failed": "Não foi possível conectar ao provedor. Tente novamente.",
|
||||
"login_error_oauth_no_email": "Sua conta não tem endereço de email verificado. Adicione um e tente novamente.",
|
||||
"books_page_title": "Biblioteca — libnovel",
|
||||
"books_heading": "Sua Biblioteca",
|
||||
"books_empty_title": "Nenhum livro ainda",
|
||||
"books_empty_body": "Adicione livros à sua biblioteca visitando a página de um livro.",
|
||||
"books_browse_catalogue": "Explorar Catálogo",
|
||||
"books_chapter_count": "{n} capítulos",
|
||||
"books_last_read": "Último: Cap.{n}",
|
||||
"books_reading_progress": "Cap.{current} / {total}",
|
||||
"books_remove": "Remover",
|
||||
"catalogue_page_title": "Catálogo — libnovel",
|
||||
"catalogue_heading": "Catálogo",
|
||||
"catalogue_search_placeholder": "Pesquisar romances…",
|
||||
"catalogue_filter_genre": "Gênero",
|
||||
"catalogue_filter_status": "Status",
|
||||
"catalogue_filter_sort": "Ordenar",
|
||||
"catalogue_sort_popular": "Popular",
|
||||
"catalogue_sort_new": "Novo",
|
||||
"catalogue_sort_top_rated": "Mais Bem Avaliados",
|
||||
"catalogue_sort_rank": "Ranking",
|
||||
"catalogue_status_all": "Todos",
|
||||
"catalogue_status_ongoing": "Em andamento",
|
||||
"catalogue_status_completed": "Concluído",
|
||||
"catalogue_genre_all": "Todos os gêneros",
|
||||
"catalogue_clear_filters": "Limpar",
|
||||
"catalogue_reset": "Redefinir",
|
||||
"catalogue_no_results": "Nenhum romance encontrado.",
|
||||
"catalogue_loading": "Carregando…",
|
||||
"catalogue_load_more": "Carregar mais",
|
||||
"catalogue_results_count": "{n} resultados",
|
||||
"book_detail_page_title": "{title} — libnovel",
|
||||
"book_detail_signin_to_save": "Entre para salvar",
|
||||
"book_detail_add_to_library": "Adicionar à Biblioteca",
|
||||
"book_detail_remove_from_library": "Remover da Biblioteca",
|
||||
"book_detail_read_now": "Ler Agora",
|
||||
"book_detail_continue_reading": "Continuar Lendo",
|
||||
"book_detail_start_reading": "Começar a Ler",
|
||||
"book_detail_chapters": "{n} Capítulos",
|
||||
"book_detail_status": "Status",
|
||||
"book_detail_author": "Autor",
|
||||
"book_detail_genres": "Gêneros",
|
||||
"book_detail_description": "Descrição",
|
||||
"book_detail_source": "Fonte",
|
||||
"book_detail_rescrape": "Atualizar",
|
||||
"book_detail_scraping": "Atualizando…",
|
||||
"book_detail_in_library": "Na Biblioteca",
|
||||
"chapters_page_title": "Capítulos — {title}",
|
||||
"chapters_heading": "Capítulos",
|
||||
"chapters_back_to_book": "Voltar ao livro",
|
||||
"chapters_reading_now": "Lendo",
|
||||
"chapters_empty": "Nenhum capítulo extraído ainda.",
|
||||
"reader_page_title": "{title} — Cap.{n} — libnovel",
|
||||
"reader_play_narration": "Reproduzir narração",
|
||||
"reader_generating_audio": "Gerando áudio…",
|
||||
"reader_signin_for_audio": "Narração de áudio disponível",
|
||||
"reader_signin_audio_desc": "Entre para ouvir este capítulo narrado por IA.",
|
||||
"reader_audio_error": "Falha na geração de áudio.",
|
||||
"reader_prev_chapter": "Capítulo anterior",
|
||||
"reader_next_chapter": "Próximo capítulo",
|
||||
"reader_back_to_chapters": "Voltar aos capítulos",
|
||||
"reader_chapter_n": "Capítulo {n}",
|
||||
"reader_change_voice": "Mudar voz",
|
||||
"reader_voice_panel_title": "Selecionar voz",
|
||||
"reader_voice_kokoro": "Vozes Kokoro",
|
||||
"reader_voice_pocket": "Vozes Pocket-TTS",
|
||||
"reader_voice_play_sample": "Reproduzir amostra",
|
||||
"reader_voice_stop_sample": "Parar amostra",
|
||||
"reader_voice_selected": "Selecionado",
|
||||
"reader_close_voice_panel": "Fechar painel de voz",
|
||||
"reader_auto_next": "Próximo automático",
|
||||
"reader_speed": "Velocidade",
|
||||
"reader_preview_notice": "Prévia — este capítulo não foi totalmente extraído.",
|
||||
"profile_page_title": "Perfil — libnovel",
|
||||
"profile_heading": "Perfil",
|
||||
"profile_avatar_label": "Avatar",
|
||||
"profile_change_avatar": "Mudar avatar",
|
||||
"profile_username": "Nome de usuário",
|
||||
"profile_email": "Email",
|
||||
"profile_change_password": "Mudar senha",
|
||||
"profile_current_password": "Senha atual",
|
||||
"profile_new_password": "Nova senha",
|
||||
"profile_confirm_password": "Confirmar senha",
|
||||
"profile_save_password": "Salvar senha",
|
||||
"profile_appearance_heading": "Aparência",
|
||||
"profile_theme_label": "Tema",
|
||||
"profile_theme_amber": "Âmbar",
|
||||
"profile_theme_slate": "Ardósia",
|
||||
"profile_theme_rose": "Rosa",
|
||||
"profile_theme_forest": "Floresta",
|
||||
"profile_theme_mono": "Mono",
|
||||
"profile_theme_cyber": "Cyberpunk",
|
||||
"profile_theme_light": "Light",
|
||||
"profile_theme_light_slate": "Light Blue",
|
||||
"profile_theme_light_rose": "Light Rose",
|
||||
"profile_reading_heading": "Configurações de leitura",
|
||||
"profile_voice_label": "Voz padrão",
|
||||
"profile_speed_label": "Velocidade de reprodução",
|
||||
"profile_auto_next_label": "Próximo capítulo automático",
|
||||
"profile_save_settings": "Salvar configurações",
|
||||
"profile_settings_saved": "Configurações salvas.",
|
||||
"profile_settings_error": "Falha ao salvar configurações.",
|
||||
"profile_password_saved": "Senha alterada.",
|
||||
"profile_password_error": "Falha ao alterar a senha.",
|
||||
"profile_sessions_heading": "Sessões ativas",
|
||||
"profile_sign_out_all": "Sair de todos os outros dispositivos",
|
||||
"profile_joined": "Entrou em {date}",
|
||||
"user_page_title": "{username} — libnovel",
|
||||
"user_library_heading": "Biblioteca de {username}",
|
||||
"user_follow": "Seguir",
|
||||
"user_unfollow": "Deixar de seguir",
|
||||
"user_followers": "{n} seguidores",
|
||||
"user_following": "{n} seguindo",
|
||||
"user_library_empty": "Nenhum livro na biblioteca.",
|
||||
"error_not_found_title": "Página não encontrada",
|
||||
"error_not_found_body": "A página que você procura não existe.",
|
||||
"error_generic_title": "Algo deu errado",
|
||||
"error_go_home": "Ir para início",
|
||||
"error_status": "Erro {status}",
|
||||
"admin_scrape_page_title": "Extração — Admin",
|
||||
"admin_scrape_heading": "Extração",
|
||||
"admin_scrape_catalogue": "Extrair Catálogo",
|
||||
"admin_scrape_book": "Extrair Livro",
|
||||
"admin_scrape_url_placeholder": "URL do livro em novelfire.net",
|
||||
"admin_scrape_range": "Intervalo de capítulos",
|
||||
"admin_scrape_from": "De",
|
||||
"admin_scrape_to": "Até",
|
||||
"admin_scrape_submit": "Extrair",
|
||||
"admin_scrape_cancel": "Cancelar",
|
||||
"admin_scrape_status_pending": "Pendente",
|
||||
"admin_scrape_status_running": "Em execução",
|
||||
"admin_scrape_status_done": "Concluído",
|
||||
"admin_scrape_status_failed": "Falhou",
|
||||
"admin_scrape_status_cancelled": "Cancelado",
|
||||
"admin_tasks_heading": "Tarefas recentes",
|
||||
"admin_tasks_empty": "Nenhuma tarefa ainda.",
|
||||
"admin_audio_page_title": "Áudio — Admin",
|
||||
"admin_audio_heading": "Tarefas de Áudio",
|
||||
"admin_audio_empty": "Nenhuma tarefa de áudio.",
|
||||
"admin_changelog_page_title": "Changelog — Admin",
|
||||
"admin_changelog_heading": "Changelog",
|
||||
"comments_heading": "Comentários",
|
||||
"comments_empty": "Nenhum comentário ainda. Seja o primeiro!",
|
||||
"comments_placeholder": "Escreva um comentário…",
|
||||
"comments_submit": "Publicar",
|
||||
"comments_login_prompt": "Entre para comentar.",
|
||||
"comments_vote_up": "Votar positivo",
|
||||
"comments_vote_down": "Votar negativo",
|
||||
"comments_delete": "Excluir",
|
||||
"comments_reply": "Responder",
|
||||
"comments_show_replies": "Mostrar {n} respostas",
|
||||
"comments_hide_replies": "Ocultar respostas",
|
||||
"comments_edited": "editado",
|
||||
"comments_deleted": "[excluído]",
|
||||
"disclaimer_page_title": "Aviso Legal — libnovel",
|
||||
"privacy_page_title": "Política de Privacidade — libnovel",
|
||||
"dmca_page_title": "DMCA — libnovel",
|
||||
"terms_page_title": "Termos de Serviço — libnovel",
|
||||
"common_loading": "Carregando…",
|
||||
"common_error": "Erro",
|
||||
"common_save": "Salvar",
|
||||
"common_cancel": "Cancelar",
|
||||
"common_close": "Fechar",
|
||||
"common_search": "Pesquisar",
|
||||
"common_back": "Voltar",
|
||||
"common_next": "Próximo",
|
||||
"common_previous": "Anterior",
|
||||
"common_yes": "Sim",
|
||||
"common_no": "Não",
|
||||
"common_on": "ativado",
|
||||
"common_off": "desativado",
|
||||
"locale_switcher_label": "Idioma",
|
||||
"books_empty_library": "Sua biblioteca está vazia.",
|
||||
"books_empty_discover": "Livros que você começar a ler ou salvar de",
|
||||
"books_empty_discover_link": "Descobrir",
|
||||
"books_empty_discover_suffix": "aparecerão aqui.",
|
||||
"books_count": "{n} livro{s}",
|
||||
"catalogue_sort_updated": "Atualizado",
|
||||
"catalogue_search_button": "Pesquisar",
|
||||
"catalogue_refresh": "Atualizar",
|
||||
"catalogue_refreshing": "Na fila…",
|
||||
"catalogue_refresh_mobile": "Atualizar catálogo",
|
||||
"catalogue_all_loaded": "Todos os romances carregados",
|
||||
"catalogue_scroll_top": "Voltar ao topo",
|
||||
"catalogue_view_grid": "Visualização em grade",
|
||||
"catalogue_view_list": "Visualização em lista",
|
||||
"catalogue_browse_source": "Explorar romances do novelfire.net",
|
||||
"catalogue_search_results": "{n} resultado{s} para \"{q}\"",
|
||||
"catalogue_search_local_count": "({local} local, {remote} do novelfire)",
|
||||
"catalogue_rank_ranked": "{n} romances classificados do último scrape do catálogo",
|
||||
"catalogue_rank_no_data": "Sem dados de classificação.",
|
||||
"catalogue_rank_no_data_body": "Sem dados de classificação — execute um scrape completo do catálogo para preencher",
|
||||
"catalogue_rank_run_scrape_admin": "Clique em Atualizar catálogo acima para acionar um scrape completo.",
|
||||
"catalogue_rank_run_scrape_user": "Peça a um administrador para executar um scrape do catálogo.",
|
||||
"catalogue_scrape_queued_flash": "Scrape completo do catálogo na fila. A biblioteca e a classificação serão atualizadas conforme os livros forem processados.",
|
||||
"catalogue_scrape_busy_flash": "Um job de scrape já está em execução. Volte quando terminar.",
|
||||
"catalogue_scrape_error_flash": "Falha ao enfileirar o scrape. Verifique se o serviço de scraper está acessível.",
|
||||
"catalogue_filters_label": "Filtros",
|
||||
"catalogue_apply": "Aplicar",
|
||||
"catalogue_filter_rank_note": "Filtros de gênero e status se aplicam apenas a Explorar",
|
||||
"catalogue_no_results_search": "Nenhum resultado encontrado.",
|
||||
"catalogue_no_results_try": "Tente um termo de pesquisa diferente.",
|
||||
"catalogue_no_results_filters": "Tente filtros diferentes ou volte mais tarde.",
|
||||
"catalogue_scrape_queued_badge": "Na fila",
|
||||
"catalogue_scrape_busy_badge": "Scraper ocupado",
|
||||
"catalogue_scrape_busy_list": "Ocupado",
|
||||
"catalogue_scrape_forbidden_badge": "Proibido",
|
||||
"catalogue_scrape_novel_button": "Extrair",
|
||||
"catalogue_scraping_novel": "Extraindo…",
|
||||
"book_detail_not_in_library": "não está na biblioteca",
|
||||
"book_detail_continue_ch": "Continuar cap.{n}",
|
||||
"book_detail_start_ch1": "Começar pelo cap.1",
|
||||
"book_detail_preview_ch1": "Prévia do cap.1",
|
||||
"book_detail_reading_ch": "Lendo cap.{n} de {total}",
|
||||
"book_detail_n_chapters": "{n} capítulos",
|
||||
"book_detail_rescraping": "Na fila…",
|
||||
"book_detail_from_chapter": "A partir do capítulo",
|
||||
"book_detail_to_chapter": "Até o capítulo (opcional)",
|
||||
"book_detail_range_queuing": "Na fila…",
|
||||
"book_detail_scrape_range": "Intervalo de extração",
|
||||
"book_detail_admin": "Admin",
|
||||
"book_detail_admin_book_cover": "Capa do Livro",
|
||||
"book_detail_admin_chapter_cover": "Capa do Capítulo",
|
||||
"book_detail_admin_chapter_n": "Capítulo nº",
|
||||
"book_detail_admin_description": "Descrição",
|
||||
"book_detail_admin_chapter_names": "Nomes dos Capítulos",
|
||||
"book_detail_admin_audio_tts": "Áudio TTS",
|
||||
"book_detail_admin_voice": "Voz",
|
||||
"book_detail_admin_generate": "Gerar",
|
||||
"book_detail_admin_save_cover": "Salvar Capa",
|
||||
"book_detail_admin_saving": "Salvando…",
|
||||
"book_detail_admin_saved": "Salvo",
|
||||
"book_detail_admin_apply": "Aplicar",
|
||||
"book_detail_admin_applying": "Aplicando…",
|
||||
"book_detail_admin_applied": "Aplicado",
|
||||
"book_detail_admin_discard": "Descartar",
|
||||
"book_detail_admin_enqueue_audio": "Enfileirar Áudio",
|
||||
"book_detail_admin_cancel_audio": "Cancelar",
|
||||
"book_detail_admin_enqueued": "{enqueued} enfileirados, {skipped} ignorados",
|
||||
"book_detail_scraping_progress": "Buscando os primeiros 20 capítulos. Esta página será atualizada automaticamente.",
|
||||
"book_detail_scraping_home": "← Início",
|
||||
"book_detail_rescrape_book": "Reextrair livro",
|
||||
"book_detail_less": "Menos",
|
||||
"book_detail_more": "Mais",
|
||||
"chapters_search_placeholder": "Pesquisar capítulos…",
|
||||
"chapters_jump_to": "Ir para Cap.{n}",
|
||||
"chapters_no_match": "Nenhum capítulo encontrado para \"{q}\"",
|
||||
"chapters_none_available": "Nenhum capítulo disponível ainda.",
|
||||
"chapters_reading_indicator": "lendo",
|
||||
"chapters_result_count": "{n} resultados",
|
||||
"reader_fetching_chapter": "Buscando capítulo…",
|
||||
"reader_words": "{n} palavras",
|
||||
"reader_preview_audio_notice": "Prévia — áudio não disponível para livros fora da biblioteca.",
|
||||
"profile_click_to_change": "Clique no avatar para mudar a foto",
|
||||
"profile_tts_voice": "Voz TTS",
|
||||
"profile_auto_advance": "Avançar automaticamente para o próximo capítulo",
|
||||
"profile_saving": "Salvando…",
|
||||
"profile_saved": "Salvo!",
|
||||
"profile_session_this": "Esta sessão",
|
||||
"profile_session_signed_in": "Entrou em {date}",
|
||||
"profile_session_last_seen": "· Visto por último em {date}",
|
||||
"profile_session_sign_out": "Sair",
|
||||
"profile_session_end": "Encerrar",
|
||||
"profile_session_unrecognised": "Estes são todos os dispositivos conectados à sua conta. Encerre qualquer sessão que não reconhecer.",
|
||||
"profile_no_sessions": "Nenhum registro de sessão encontrado. As sessões são rastreadas a partir do próximo login.",
|
||||
"profile_change_password_heading": "Mudar senha",
|
||||
"profile_update_password": "Atualizar senha",
|
||||
"profile_updating": "Atualizando…",
|
||||
"profile_password_changed_ok": "Senha alterada com sucesso.",
|
||||
"profile_playback_speed": "Velocidade de reprodução — {speed}x",
|
||||
"profile_subscription_heading": "Assinatura",
|
||||
"profile_plan_pro": "Pro",
|
||||
"profile_plan_free": "Gratuito",
|
||||
"profile_pro_active": "Sua assinatura Pro está ativa.",
|
||||
"profile_pro_perks": "Áudio ilimitado, todos os idiomas de tradução e seleção de voz estão habilitados.",
|
||||
"profile_manage_subscription": "Gerenciar assinatura",
|
||||
"profile_upgrade_heading": "Assinar o Pro",
|
||||
"profile_upgrade_desc": "Desbloqueie áudio ilimitado, traduções em 4 idiomas e seleção de voz.",
|
||||
"profile_upgrade_monthly": "Mensal — $6 / mês",
|
||||
"profile_upgrade_annual": "Anual — $48 / ano",
|
||||
"profile_free_limits": "Plano gratuito: 3 capítulos de áudio por dia, somente inglês.",
|
||||
"subscribe_page_title": "Seja Pro — libnovel",
|
||||
"subscribe_heading": "Leia mais. Ouça mais.",
|
||||
"subscribe_subheading": "Torne-se Pro e desbloqueie a experiência completa do libnovel.",
|
||||
"subscribe_monthly_label": "Mensal",
|
||||
"subscribe_monthly_price": "$6",
|
||||
"subscribe_monthly_period": "por mês",
|
||||
"subscribe_annual_label": "Anual",
|
||||
"subscribe_annual_price": "$48",
|
||||
"subscribe_annual_period": "por ano",
|
||||
"subscribe_annual_save": "Economize 33%",
|
||||
"subscribe_cta_monthly": "Começar plano mensal",
|
||||
"subscribe_cta_annual": "Começar plano anual",
|
||||
"subscribe_already_pro": "Você já tem uma assinatura Pro.",
|
||||
"subscribe_manage": "Gerenciar assinatura",
|
||||
"subscribe_benefit_audio": "Capítulos de áudio ilimitados por dia",
|
||||
"subscribe_benefit_voices": "Seleção de voz para todos os mecanismos TTS",
|
||||
"subscribe_benefit_translation": "Leia em francês, indonésio, português e russo",
|
||||
"subscribe_benefit_downloads": "Baixe capítulos para ouvir offline",
|
||||
"subscribe_login_prompt": "Entre para assinar",
|
||||
"subscribe_login_cta": "Entrar",
|
||||
"user_currently_reading": "Lendo Agora",
|
||||
"user_library_count": "Biblioteca ({n})",
|
||||
"user_joined": "Entrou em {date}",
|
||||
"user_followers_label": "seguidores",
|
||||
"user_following_label": "seguindo",
|
||||
"user_no_books": "Nenhum livro na biblioteca ainda.",
|
||||
"admin_pages_label": "Páginas",
|
||||
"admin_tools_label": "Ferramentas",
|
||||
"admin_nav_scrape": "Scrape",
|
||||
"admin_nav_audio": "Áudio",
|
||||
"admin_nav_translation": "Tradução",
|
||||
"admin_nav_changelog": "Alterações",
|
||||
"admin_nav_image_gen": "Image Gen",
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "Tarefas de IA",
|
||||
"admin_nav_notifications": "Notificações",
|
||||
"admin_nav_errors": "Erros",
|
||||
"admin_nav_analytics": "Análise",
|
||||
"admin_nav_logs": "Logs",
|
||||
"admin_nav_uptime": "Uptime",
|
||||
"admin_nav_push": "Notificações",
|
||||
"admin_scrape_status_idle": "Ocioso",
|
||||
"admin_scrape_full_catalogue": "Catálogo completo",
|
||||
"admin_scrape_single_book": "Livro único",
|
||||
"admin_scrape_quick_genres": "Gêneros rápidos",
|
||||
"admin_scrape_task_history": "Histórico de tarefas",
|
||||
"admin_scrape_filter_placeholder": "Filtrar por tipo, status ou URL…",
|
||||
"admin_scrape_no_matching": "Nenhuma tarefa correspondente.",
|
||||
"admin_scrape_start": "Iniciar extração",
|
||||
"admin_scrape_queuing": "Na fila…",
|
||||
"admin_scrape_running": "Executando…",
|
||||
"admin_audio_filter_jobs": "Filtrar por slug, voz ou status…",
|
||||
"admin_audio_filter_cache": "Filtrar por slug, capítulo ou voz…",
|
||||
"admin_audio_no_matching_jobs": "Nenhum job correspondente.",
|
||||
"admin_audio_no_jobs": "Nenhum job de áudio ainda.",
|
||||
"admin_audio_cache_empty": "Cache de áudio vazio.",
|
||||
"admin_audio_no_cache_results": "Sem resultados.",
|
||||
"admin_changelog_gitea": "Releases do Gitea",
|
||||
"admin_changelog_no_releases": "Nenhum release encontrado.",
|
||||
"admin_changelog_load_error": "Não foi possível carregar os releases: {error}",
|
||||
"comments_top": "Mais votados",
|
||||
"comments_new": "Novos",
|
||||
"comments_posting": "Publicando…",
|
||||
"comments_login_link": "Entre",
|
||||
"comments_login_suffix": "para deixar um comentário.",
|
||||
"comments_anonymous": "Anônimo",
|
||||
"reader_audio_narration": "Narração em Áudio",
|
||||
"reader_playing": "Reproduzindo — controles abaixo",
|
||||
"reader_paused": "Pausado — controles abaixo",
|
||||
"reader_ch_ready": "Cap.{n} pronto",
|
||||
"reader_ch_preparing": "Preparando Cap.{n}… {percent}%",
|
||||
"reader_ch_generate_on_nav": "Cap.{n} será gerado ao navegar",
|
||||
"reader_now_playing": "Reproduzindo: {title}",
|
||||
"reader_load_this_chapter": "Carregar este capítulo",
|
||||
"reader_generate_samples": "Gerar amostras ausentes",
|
||||
"reader_voice_applies_next": "A nova voz será aplicada no próximo \"Reproduzir narração\".",
|
||||
"reader_choose_voice": "Escolher Voz",
|
||||
"reader_generating_narration": "Gerando narração…",
|
||||
"profile_font_family": "Fonte",
|
||||
"profile_font_system": "Sistema",
|
||||
"profile_font_serif": "Serif",
|
||||
"profile_font_mono": "Mono",
|
||||
"profile_text_size": "Tamanho do texto",
|
||||
"profile_text_size_sm": "Pequeno",
|
||||
"profile_text_size_md": "Normal",
|
||||
"profile_text_size_lg": "Grande",
|
||||
"profile_text_size_xl": "Muito grande",
|
||||
"feed_page_title": "Feed — LibNovel",
|
||||
"feed_heading": "Feed de seguidos",
|
||||
"feed_subheading": "Livros que seus seguidos estão lendo",
|
||||
"feed_empty_heading": "Nada aqui ainda",
|
||||
"feed_empty_body": "Siga outros leitores para ver o que estão lendo.",
|
||||
"feed_not_logged_in": "Faça login para ver seu feed.",
|
||||
"feed_reader_label": "lendo",
|
||||
"feed_chapters_label": "{n} capítulos",
|
||||
"feed_browse_cta": "Ver catálogo",
|
||||
"feed_find_users_cta": "Encontrar leitores",
|
||||
"admin_nav_gitea": "Gitea",
|
||||
"admin_nav_grafana": "Grafana",
|
||||
"admin_translation_page_title": "Translation — Admin",
|
||||
"admin_translation_heading": "Machine Translation",
|
||||
"admin_translation_tab_enqueue": "Enqueue",
|
||||
"admin_translation_tab_jobs": "Jobs",
|
||||
"admin_translation_filter_placeholder": "Filter by slug, lang, or status…",
|
||||
"admin_translation_no_matching": "No matching jobs.",
|
||||
"admin_translation_no_jobs": "No translation jobs yet.",
|
||||
"admin_ai_jobs_page_title": "AI Jobs — Admin",
|
||||
"admin_ai_jobs_heading": "AI Jobs",
|
||||
"admin_ai_jobs_subheading": "Background AI generation tasks",
|
||||
"admin_text_gen_page_title": "Text Gen — Admin",
|
||||
"admin_text_gen_heading": "Text Generation",
|
||||
"admin_nav_import": "Import"
|
||||
}
|
||||
458
ui/messages/ru.json
Normal file
458
ui/messages/ru.json
Normal file
@@ -0,0 +1,458 @@
|
||||
{
|
||||
"$schema": "https://inlang.com/schema/inlang-message-format",
|
||||
"nav_library": "Библиотека",
|
||||
"nav_catalogue": "Каталог",
|
||||
"nav_feed": "Лента",
|
||||
"nav_feedback": "Обратная связь",
|
||||
"nav_admin": "Админ",
|
||||
"nav_profile": "Профиль",
|
||||
"nav_sign_in": "Войти",
|
||||
"nav_sign_out": "Выйти",
|
||||
"nav_toggle_menu": "Меню",
|
||||
"nav_admin_panel": "Панель администратора",
|
||||
"footer_library": "Библиотека",
|
||||
"footer_catalogue": "Каталог",
|
||||
"footer_feedback": "Обратная связь",
|
||||
"footer_disclaimer": "Отказ от ответственности",
|
||||
"footer_privacy": "Конфиденциальность",
|
||||
"footer_dmca": "DMCA",
|
||||
"footer_copyright": "© {year} libnovel",
|
||||
"footer_dev": "dev",
|
||||
"home_title": "libnovel",
|
||||
"home_stat_books": "Книги",
|
||||
"home_stat_chapters": "Главы",
|
||||
"home_stat_in_progress": "В процессе",
|
||||
"home_continue_reading": "Продолжить чтение",
|
||||
"home_view_all": "Смотреть все",
|
||||
"home_recently_updated": "Недавно обновлённые",
|
||||
"home_from_following": "От авторов, на которых вы подписаны",
|
||||
"home_empty_title": "Ваша библиотека пуста",
|
||||
"home_empty_body": "Откройте для себя новеллы и добавьте их в библиотеку.",
|
||||
"home_discover_novels": "Открыть новеллы",
|
||||
"home_via_reader": "от {username}",
|
||||
"home_chapter_badge": "гл.{n}",
|
||||
"player_generating": "Генерация… {percent}%",
|
||||
"player_loading": "Загрузка…",
|
||||
"player_chapters": "Главы",
|
||||
"player_chapter_n": "Глава {n}",
|
||||
"player_toggle_chapter_list": "Список глав",
|
||||
"player_chapter_list_label": "Список глав",
|
||||
"player_close_chapter_list": "Закрыть список глав",
|
||||
"player_rewind_15": "Назад 15 секунд",
|
||||
"player_skip_30": "Вперёд 30 секунд",
|
||||
"player_back_15": "−15 сек",
|
||||
"player_forward_30": "+30 сек",
|
||||
"player_play": "Воспроизвести",
|
||||
"player_pause": "Пауза",
|
||||
"player_speed_label": "Скорость {speed}x",
|
||||
"player_seek_label": "Прогресс главы",
|
||||
"player_change_speed": "Изменить скорость",
|
||||
"player_auto_next_on": "Автопереход вкл.",
|
||||
"player_auto_next_off": "Автопереход выкл.",
|
||||
"player_auto_next_ready": "Автопереход — гл.{n} готова",
|
||||
"player_auto_next_preparing": "Автопереход — подготовка гл.{n}…",
|
||||
"player_auto_next_aria": "Автопереход {state}",
|
||||
"player_go_to_chapter": "Перейти к главе",
|
||||
"player_close": "Закрыть плеер",
|
||||
"login_page_title": "Вход — libnovel",
|
||||
"login_heading": "Войти в libnovel",
|
||||
"login_subheading": "Выберите провайдера для входа",
|
||||
"login_continue_google": "Продолжить с Google",
|
||||
"login_continue_github": "Продолжить с GitHub",
|
||||
"login_terms_notice": "Входя, вы принимаете наши условия использования.",
|
||||
"login_error_oauth_state": "Вход отменён или истёк срок действия. Попробуйте снова.",
|
||||
"login_error_oauth_failed": "Не удалось подключиться к провайдеру. Попробуйте снова.",
|
||||
"login_error_oauth_no_email": "У вашего аккаунта нет подтверждённого email. Добавьте его и повторите попытку.",
|
||||
"books_page_title": "Библиотека — libnovel",
|
||||
"books_heading": "Ваша библиотека",
|
||||
"books_empty_title": "Книг пока нет",
|
||||
"books_empty_body": "Добавляйте книги в библиотеку, посещая страницы книг.",
|
||||
"books_browse_catalogue": "Обзор каталога",
|
||||
"books_chapter_count": "{n} глав",
|
||||
"books_last_read": "Последнее: гл.{n}",
|
||||
"books_reading_progress": "Гл.{current} / {total}",
|
||||
"books_remove": "Удалить",
|
||||
"catalogue_page_title": "Каталог — libnovel",
|
||||
"catalogue_heading": "Каталог",
|
||||
"catalogue_search_placeholder": "Поиск новелл…",
|
||||
"catalogue_filter_genre": "Жанр",
|
||||
"catalogue_filter_status": "Статус",
|
||||
"catalogue_filter_sort": "Сортировка",
|
||||
"catalogue_sort_popular": "Популярные",
|
||||
"catalogue_sort_new": "Новые",
|
||||
"catalogue_sort_top_rated": "Топ по рейтингу",
|
||||
"catalogue_sort_rank": "По рангу",
|
||||
"catalogue_status_all": "Все",
|
||||
"catalogue_status_ongoing": "Продолжаются",
|
||||
"catalogue_status_completed": "Завершены",
|
||||
"catalogue_genre_all": "Все жанры",
|
||||
"catalogue_clear_filters": "Сбросить",
|
||||
"catalogue_reset": "Сброс",
|
||||
"catalogue_no_results": "Новеллы не найдены.",
|
||||
"catalogue_loading": "Загрузка…",
|
||||
"catalogue_load_more": "Загрузить ещё",
|
||||
"catalogue_results_count": "{n} результатов",
|
||||
"book_detail_page_title": "{title} — libnovel",
|
||||
"book_detail_signin_to_save": "Войдите, чтобы сохранить",
|
||||
"book_detail_add_to_library": "В библиотеку",
|
||||
"book_detail_remove_from_library": "Удалить из библиотеки",
|
||||
"book_detail_read_now": "Читать",
|
||||
"book_detail_continue_reading": "Продолжить чтение",
|
||||
"book_detail_start_reading": "Начать чтение",
|
||||
"book_detail_chapters": "{n} глав",
|
||||
"book_detail_status": "Статус",
|
||||
"book_detail_author": "Автор",
|
||||
"book_detail_genres": "Жанры",
|
||||
"book_detail_description": "Описание",
|
||||
"book_detail_source": "Источник",
|
||||
"book_detail_rescrape": "Обновить",
|
||||
"book_detail_scraping": "Обновление…",
|
||||
"book_detail_in_library": "В библиотеке",
|
||||
"chapters_page_title": "Главы — {title}",
|
||||
"chapters_heading": "Главы",
|
||||
"chapters_back_to_book": "К книге",
|
||||
"chapters_reading_now": "Читается",
|
||||
"chapters_empty": "Главы ещё не загружены.",
|
||||
"reader_page_title": "{title} — Гл.{n} — libnovel",
|
||||
"reader_play_narration": "Воспроизвести озвучку",
|
||||
"reader_generating_audio": "Генерация аудио…",
|
||||
"reader_signin_for_audio": "Доступна аудионарративация",
|
||||
"reader_signin_audio_desc": "Войдите, чтобы слушать эту главу в озвучке ИИ.",
|
||||
"reader_audio_error": "Ошибка генерации аудио.",
|
||||
"reader_prev_chapter": "Предыдущая глава",
|
||||
"reader_next_chapter": "Следующая глава",
|
||||
"reader_back_to_chapters": "К главам",
|
||||
"reader_chapter_n": "Глава {n}",
|
||||
"reader_change_voice": "Сменить голос",
|
||||
"reader_voice_panel_title": "Выбрать голос",
|
||||
"reader_voice_kokoro": "Голоса Kokoro",
|
||||
"reader_voice_pocket": "Голоса Pocket-TTS",
|
||||
"reader_voice_play_sample": "Прослушать образец",
|
||||
"reader_voice_stop_sample": "Остановить образец",
|
||||
"reader_voice_selected": "Выбран",
|
||||
"reader_close_voice_panel": "Закрыть панель голоса",
|
||||
"reader_auto_next": "Автопереход",
|
||||
"reader_speed": "Скорость",
|
||||
"reader_preview_notice": "Предпросмотр — эта глава не полностью загружена.",
|
||||
"profile_page_title": "Профиль — libnovel",
|
||||
"profile_heading": "Профиль",
|
||||
"profile_avatar_label": "Аватар",
|
||||
"profile_change_avatar": "Изменить аватар",
|
||||
"profile_username": "Имя пользователя",
|
||||
"profile_email": "Email",
|
||||
"profile_change_password": "Изменить пароль",
|
||||
"profile_current_password": "Текущий пароль",
|
||||
"profile_new_password": "Новый пароль",
|
||||
"profile_confirm_password": "Подтвердить пароль",
|
||||
"profile_save_password": "Сохранить пароль",
|
||||
"profile_appearance_heading": "Внешний вид",
|
||||
"profile_theme_label": "Тема",
|
||||
"profile_theme_amber": "Янтарь",
|
||||
"profile_theme_slate": "Сланец",
|
||||
"profile_theme_rose": "Роза",
|
||||
"profile_theme_forest": "Лес",
|
||||
"profile_theme_mono": "Моно",
|
||||
"profile_theme_cyber": "Киберпанк",
|
||||
"profile_theme_light": "Light",
|
||||
"profile_theme_light_slate": "Light Blue",
|
||||
"profile_theme_light_rose": "Light Rose",
|
||||
"profile_reading_heading": "Настройки чтения",
|
||||
"profile_voice_label": "Голос по умолчанию",
|
||||
"profile_speed_label": "Скорость воспроизведения",
|
||||
"profile_auto_next_label": "Автопереход к следующей главе",
|
||||
"profile_save_settings": "Сохранить настройки",
|
||||
"profile_settings_saved": "Настройки сохранены.",
|
||||
"profile_settings_error": "Не удалось сохранить настройки.",
|
||||
"profile_password_saved": "Пароль изменён.",
|
||||
"profile_password_error": "Не удалось изменить пароль.",
|
||||
"profile_sessions_heading": "Активные сессии",
|
||||
"profile_sign_out_all": "Выйти на всех других устройствах",
|
||||
"profile_joined": "Зарегистрирован {date}",
|
||||
"user_page_title": "{username} — libnovel",
|
||||
"user_library_heading": "Библиотека {username}",
|
||||
"user_follow": "Подписаться",
|
||||
"user_unfollow": "Отписаться",
|
||||
"user_followers": "{n} подписчиков",
|
||||
"user_following": "{n} подписок",
|
||||
"user_library_empty": "В библиотеке нет книг.",
|
||||
"error_not_found_title": "Страница не найдена",
|
||||
"error_not_found_body": "Запрошенная страница не существует.",
|
||||
"error_generic_title": "Что-то пошло не так",
|
||||
"error_go_home": "На главную",
|
||||
"error_status": "Ошибка {status}",
|
||||
"admin_scrape_page_title": "Парсинг — Админ",
|
||||
"admin_scrape_heading": "Парсинг",
|
||||
"admin_scrape_catalogue": "Парсинг каталога",
|
||||
"admin_scrape_book": "Парсинг книги",
|
||||
"admin_scrape_url_placeholder": "URL книги на novelfire.net",
|
||||
"admin_scrape_range": "Диапазон глав",
|
||||
"admin_scrape_from": "От",
|
||||
"admin_scrape_to": "До",
|
||||
"admin_scrape_submit": "Парсить",
|
||||
"admin_scrape_cancel": "Отмена",
|
||||
"admin_scrape_status_pending": "Ожидание",
|
||||
"admin_scrape_status_running": "Выполняется",
|
||||
"admin_scrape_status_done": "Готово",
|
||||
"admin_scrape_status_failed": "Ошибка",
|
||||
"admin_scrape_status_cancelled": "Отменено",
|
||||
"admin_tasks_heading": "Последние задачи",
|
||||
"admin_tasks_empty": "Задач пока нет.",
|
||||
"admin_audio_page_title": "Аудио — Админ",
|
||||
"admin_audio_heading": "Аудио задачи",
|
||||
"admin_audio_empty": "Аудио задач нет.",
|
||||
"admin_changelog_page_title": "Changelog — Админ",
|
||||
"admin_changelog_heading": "Changelog",
|
||||
"comments_heading": "Комментарии",
|
||||
"comments_empty": "Комментариев пока нет. Будьте первым!",
|
||||
"comments_placeholder": "Написать комментарий…",
|
||||
"comments_submit": "Отправить",
|
||||
"comments_login_prompt": "Войдите, чтобы комментировать.",
|
||||
"comments_vote_up": "Плюс",
|
||||
"comments_vote_down": "Минус",
|
||||
"comments_delete": "Удалить",
|
||||
"comments_reply": "Ответить",
|
||||
"comments_show_replies": "Показать {n} ответов",
|
||||
"comments_hide_replies": "Скрыть ответы",
|
||||
"comments_edited": "изменено",
|
||||
"comments_deleted": "[удалено]",
|
||||
"disclaimer_page_title": "Отказ от ответственности — libnovel",
|
||||
"privacy_page_title": "Политика конфиденциальности — libnovel",
|
||||
"dmca_page_title": "DMCA — libnovel",
|
||||
"terms_page_title": "Условия использования — libnovel",
|
||||
"common_loading": "Загрузка…",
|
||||
"common_error": "Ошибка",
|
||||
"common_save": "Сохранить",
|
||||
"common_cancel": "Отмена",
|
||||
"common_close": "Закрыть",
|
||||
"common_search": "Поиск",
|
||||
"common_back": "Назад",
|
||||
"common_next": "Далее",
|
||||
"common_previous": "Назад",
|
||||
"common_yes": "Да",
|
||||
"common_no": "Нет",
|
||||
"common_on": "вкл.",
|
||||
"common_off": "выкл.",
|
||||
"locale_switcher_label": "Язык",
|
||||
"books_empty_library": "Ваша библиотека пуста.",
|
||||
"books_empty_discover": "Книги, которые вы начнёте читать или сохраните из",
|
||||
"books_empty_discover_link": "Каталога",
|
||||
"books_empty_discover_suffix": "появятся здесь.",
|
||||
"books_count": "{n} книг{s}",
|
||||
"catalogue_sort_updated": "По дате обновления",
|
||||
"catalogue_search_button": "Поиск",
|
||||
"catalogue_refresh": "Обновить",
|
||||
"catalogue_refreshing": "В очереди…",
|
||||
"catalogue_refresh_mobile": "Обновить каталог",
|
||||
"catalogue_all_loaded": "Все новеллы загружены",
|
||||
"catalogue_scroll_top": "Вверх",
|
||||
"catalogue_view_grid": "Сетка",
|
||||
"catalogue_view_list": "Список",
|
||||
"catalogue_browse_source": "Смотреть новеллы с novelfire.net",
|
||||
"catalogue_search_results": "{n} результат{s} по запросу «{q}»",
|
||||
"catalogue_search_local_count": "({local} локальных, {remote} с novelfire)",
|
||||
"catalogue_rank_ranked": "{n} новелл отсортированы по последнему парсингу каталога",
|
||||
"catalogue_rank_no_data": "Нет данных рейтинга.",
|
||||
"catalogue_rank_no_data_body": "Нет данных рейтинга — запустите полный парсинг каталога для заполнения",
|
||||
"catalogue_rank_run_scrape_admin": "Нажмите «Обновить каталог» выше, чтобы запустить полный парсинг.",
|
||||
"catalogue_rank_run_scrape_user": "Попросите администратора запустить парсинг каталога.",
|
||||
"catalogue_scrape_queued_flash": "Полный парсинг каталога поставлен в очередь. Библиотека и рейтинг обновятся по мере обработки.",
|
||||
"catalogue_scrape_busy_flash": "Парсинг уже запущен. Проверьте позже.",
|
||||
"catalogue_scrape_error_flash": "Не удалось поставить парсинг в очередь. Проверьте доступность сервиса.",
|
||||
"catalogue_filters_label": "Фильтры",
|
||||
"catalogue_apply": "Применить",
|
||||
"catalogue_filter_rank_note": "Фильтры по жанру и статусу применяются только к разделу «Обзор»",
|
||||
"catalogue_no_results_search": "Ничего не найдено.",
|
||||
"catalogue_no_results_try": "Попробуйте другой запрос.",
|
||||
"catalogue_no_results_filters": "Попробуйте другие фильтры или проверьте позже.",
|
||||
"catalogue_scrape_queued_badge": "В очереди",
|
||||
"catalogue_scrape_busy_badge": "Парсер занят",
|
||||
"catalogue_scrape_busy_list": "Занят",
|
||||
"catalogue_scrape_forbidden_badge": "Запрещено",
|
||||
"catalogue_scrape_novel_button": "Парсить",
|
||||
"catalogue_scraping_novel": "Парсинг…",
|
||||
"book_detail_not_in_library": "не в библиотеке",
|
||||
"book_detail_continue_ch": "Продолжить гл.{n}",
|
||||
"book_detail_start_ch1": "Начать с гл.1",
|
||||
"book_detail_preview_ch1": "Предпросмотр гл.1",
|
||||
"book_detail_reading_ch": "Читается гл.{n} из {total}",
|
||||
"book_detail_n_chapters": "{n} глав",
|
||||
"book_detail_rescraping": "В очереди…",
|
||||
"book_detail_from_chapter": "С главы",
|
||||
"book_detail_to_chapter": "До главы (необязательно)",
|
||||
"book_detail_range_queuing": "В очереди…",
|
||||
"book_detail_scrape_range": "Диапазон глав",
|
||||
"book_detail_admin": "Администрирование",
|
||||
"book_detail_admin_book_cover": "Обложка книги",
|
||||
"book_detail_admin_chapter_cover": "Обложка главы",
|
||||
"book_detail_admin_chapter_n": "Глава №",
|
||||
"book_detail_admin_description": "Описание",
|
||||
"book_detail_admin_chapter_names": "Названия глав",
|
||||
"book_detail_admin_audio_tts": "Аудио TTS",
|
||||
"book_detail_admin_voice": "Голос",
|
||||
"book_detail_admin_generate": "Сгенерировать",
|
||||
"book_detail_admin_save_cover": "Сохранить обложку",
|
||||
"book_detail_admin_saving": "Сохранение…",
|
||||
"book_detail_admin_saved": "Сохранено",
|
||||
"book_detail_admin_apply": "Применить",
|
||||
"book_detail_admin_applying": "Применение…",
|
||||
"book_detail_admin_applied": "Применено",
|
||||
"book_detail_admin_discard": "Отменить",
|
||||
"book_detail_admin_enqueue_audio": "Поставить в очередь",
|
||||
"book_detail_admin_cancel_audio": "Отмена",
|
||||
"book_detail_admin_enqueued": "В очереди {enqueued}, пропущено {skipped}",
|
||||
"book_detail_scraping_progress": "Загружаются первые 20 глав. Страница обновится автоматически.",
|
||||
"book_detail_scraping_home": "← На главную",
|
||||
"book_detail_rescrape_book": "Перепарсить книгу",
|
||||
"book_detail_less": "Скрыть",
|
||||
"book_detail_more": "Ещё",
|
||||
"chapters_search_placeholder": "Поиск глав…",
|
||||
"chapters_jump_to": "Перейти к гл.{n}",
|
||||
"chapters_no_match": "Главы по запросу «{q}» не найдены",
|
||||
"chapters_none_available": "Глав пока нет.",
|
||||
"chapters_reading_indicator": "читается",
|
||||
"chapters_result_count": "{n} результатов",
|
||||
"reader_fetching_chapter": "Загрузка главы…",
|
||||
"reader_words": "{n} слов",
|
||||
"reader_preview_audio_notice": "Предпросмотр — аудио недоступно для книг вне библиотеки.",
|
||||
"profile_click_to_change": "Нажмите на аватар для смены фото",
|
||||
"profile_tts_voice": "Голос TTS",
|
||||
"profile_auto_advance": "Автопереход к следующей главе",
|
||||
"profile_saving": "Сохранение…",
|
||||
"profile_saved": "Сохранено!",
|
||||
"profile_session_this": "Текущая сессия",
|
||||
"profile_session_signed_in": "Вход {date}",
|
||||
"profile_session_last_seen": "· Последний визит {date}",
|
||||
"profile_session_sign_out": "Выйти",
|
||||
"profile_session_end": "Завершить",
|
||||
"profile_session_unrecognised": "Это все устройства, авторизованные в вашем аккаунте. Завершите любую сессию, которую не узнаёте.",
|
||||
"profile_no_sessions": "Записей сессий нет. Отслеживание начнётся со следующего входа.",
|
||||
"profile_change_password_heading": "Изменить пароль",
|
||||
"profile_update_password": "Обновить пароль",
|
||||
"profile_updating": "Обновление…",
|
||||
"profile_password_changed_ok": "Пароль успешно изменён.",
|
||||
"profile_playback_speed": "Скорость воспроизведения — {speed}x",
|
||||
"profile_subscription_heading": "Подписка",
|
||||
"profile_plan_pro": "Pro",
|
||||
"profile_plan_free": "Бесплатно",
|
||||
"profile_pro_active": "Ваша подписка Pro активна.",
|
||||
"profile_pro_perks": "Безлимитное аудио, все языки перевода и выбор голоса доступны.",
|
||||
"profile_manage_subscription": "Управление подпиской",
|
||||
"profile_upgrade_heading": "Перейти на Pro",
|
||||
"profile_upgrade_desc": "Разблокируйте безлимитное аудио, переводы на 4 языка и выбор голоса.",
|
||||
"profile_upgrade_monthly": "Ежемесячно — $6 / мес",
|
||||
"profile_upgrade_annual": "Ежегодно — $48 / год",
|
||||
"profile_free_limits": "Бесплатный план: 3 аудиоглавы в день, только английский.",
|
||||
"subscribe_page_title": "Перейти на Pro — libnovel",
|
||||
"subscribe_heading": "Читайте больше. Слушайте больше.",
|
||||
"subscribe_subheading": "Перейдите на Pro и откройте полный опыт libnovel.",
|
||||
"subscribe_monthly_label": "Ежемесячно",
|
||||
"subscribe_monthly_price": "$6",
|
||||
"subscribe_monthly_period": "в месяц",
|
||||
"subscribe_annual_label": "Ежегодно",
|
||||
"subscribe_annual_price": "$48",
|
||||
"subscribe_annual_period": "в год",
|
||||
"subscribe_annual_save": "Сэкономьте 33%",
|
||||
"subscribe_cta_monthly": "Начать месячный план",
|
||||
"subscribe_cta_annual": "Начать годовой план",
|
||||
"subscribe_already_pro": "У вас уже есть подписка Pro.",
|
||||
"subscribe_manage": "Управление подпиской",
|
||||
"subscribe_benefit_audio": "Неограниченные аудиоглавы в день",
|
||||
"subscribe_benefit_voices": "Выбор голоса для всех TTS-движков",
|
||||
"subscribe_benefit_translation": "Читайте на французском, индонезийском, португальском и русском",
|
||||
"subscribe_benefit_downloads": "Скачивайте главы для прослушивания офлайн",
|
||||
"subscribe_login_prompt": "Войдите, чтобы оформить подписку",
|
||||
"subscribe_login_cta": "Войти",
|
||||
"user_currently_reading": "Сейчас читает",
|
||||
"user_library_count": "Библиотека ({n})",
|
||||
"user_joined": "Зарегистрирован {date}",
|
||||
"user_followers_label": "подписчиков",
|
||||
"user_following_label": "подписок",
|
||||
"user_no_books": "Книг в библиотеке пока нет.",
|
||||
"admin_pages_label": "Страницы",
|
||||
"admin_tools_label": "Инструменты",
|
||||
"admin_nav_scrape": "Скрейпинг",
|
||||
"admin_nav_audio": "Аудио",
|
||||
"admin_nav_translation": "Перевод",
|
||||
"admin_nav_changelog": "Изменения",
|
||||
"admin_nav_image_gen": "Image Gen",
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "Задачи ИИ",
|
||||
"admin_nav_notifications": "Уведомления",
|
||||
"admin_nav_errors": "Ошибки",
|
||||
"admin_nav_analytics": "Аналитика",
|
||||
"admin_nav_logs": "Логи",
|
||||
"admin_nav_uptime": "Мониторинг",
|
||||
"admin_nav_push": "Уведомления",
|
||||
"admin_scrape_status_idle": "Ожидание",
|
||||
"admin_scrape_full_catalogue": "Полный каталог",
|
||||
"admin_scrape_single_book": "Одна книга",
|
||||
"admin_scrape_quick_genres": "Быстрые жанры",
|
||||
"admin_scrape_task_history": "История задач",
|
||||
"admin_scrape_filter_placeholder": "Фильтр по типу, статусу или URL…",
|
||||
"admin_scrape_no_matching": "Задач не найдено.",
|
||||
"admin_scrape_start": "Начать парсинг",
|
||||
"admin_scrape_queuing": "В очереди…",
|
||||
"admin_scrape_running": "Выполняется…",
|
||||
"admin_audio_filter_jobs": "Фильтр по slug, голосу или статусу…",
|
||||
"admin_audio_filter_cache": "Фильтр по slug, главе или голосу…",
|
||||
"admin_audio_no_matching_jobs": "Заданий не найдено.",
|
||||
"admin_audio_no_jobs": "Аудиозаданий пока нет.",
|
||||
"admin_audio_cache_empty": "Аудиокэш пуст.",
|
||||
"admin_audio_no_cache_results": "Результатов нет.",
|
||||
"admin_changelog_gitea": "Релизы Gitea",
|
||||
"admin_changelog_no_releases": "Релизов не найдено.",
|
||||
"admin_changelog_load_error": "Не удалось загрузить релизы: {error}",
|
||||
"comments_top": "Лучшие",
|
||||
"comments_new": "Новые",
|
||||
"comments_posting": "Отправка…",
|
||||
"comments_login_link": "Войдите",
|
||||
"comments_login_suffix": "чтобы оставить комментарий.",
|
||||
"comments_anonymous": "Аноним",
|
||||
"reader_audio_narration": "Аудионарратив",
|
||||
"reader_playing": "Воспроизводится — управление ниже",
|
||||
"reader_paused": "Пауза — управление ниже",
|
||||
"reader_ch_ready": "Гл.{n} готова",
|
||||
"reader_ch_preparing": "Подготовка гл.{n}… {percent}%",
|
||||
"reader_ch_generate_on_nav": "Гл.{n} сгенерируется при переходе",
|
||||
"reader_now_playing": "Сейчас играет: {title}",
|
||||
"reader_load_this_chapter": "Загрузить эту главу",
|
||||
"reader_generate_samples": "Сгенерировать недостающие образцы",
|
||||
"reader_voice_applies_next": "Новый голос применится при следующем нажатии «Воспроизвести».",
|
||||
"reader_choose_voice": "Выбрать голос",
|
||||
"reader_generating_narration": "Генерация озвучки…",
|
||||
"profile_font_family": "Шрифт",
|
||||
"profile_font_system": "Системный",
|
||||
"profile_font_serif": "Serif",
|
||||
"profile_font_mono": "Моноширинный",
|
||||
"profile_text_size": "Размер текста",
|
||||
"profile_text_size_sm": "Маленький",
|
||||
"profile_text_size_md": "Нормальный",
|
||||
"profile_text_size_lg": "Большой",
|
||||
"profile_text_size_xl": "Очень большой",
|
||||
"feed_page_title": "Лента — LibNovel",
|
||||
"feed_heading": "Лента подписок",
|
||||
"feed_subheading": "Книги, которые читают ваши подписки",
|
||||
"feed_empty_heading": "Пока ничего нет",
|
||||
"feed_empty_body": "Подпишитесь на других читателей, чтобы видеть, что они читают.",
|
||||
"feed_not_logged_in": "Войдите, чтобы видеть свою ленту.",
|
||||
"feed_reader_label": "читает",
|
||||
"feed_chapters_label": "{n} глав",
|
||||
"feed_browse_cta": "Каталог",
|
||||
"feed_find_users_cta": "Найти читателей",
|
||||
"admin_nav_gitea": "Gitea",
|
||||
"admin_nav_grafana": "Grafana",
|
||||
"admin_translation_page_title": "Translation — Admin",
|
||||
"admin_translation_heading": "Machine Translation",
|
||||
"admin_translation_tab_enqueue": "Enqueue",
|
||||
"admin_translation_tab_jobs": "Jobs",
|
||||
"admin_translation_filter_placeholder": "Filter by slug, lang, or status…",
|
||||
"admin_translation_no_matching": "No matching jobs.",
|
||||
"admin_translation_no_jobs": "No translation jobs yet.",
|
||||
"admin_ai_jobs_page_title": "AI Jobs — Admin",
|
||||
"admin_ai_jobs_heading": "AI Jobs",
|
||||
"admin_ai_jobs_subheading": "Background AI generation tasks",
|
||||
"admin_text_gen_page_title": "Text Gen — Admin",
|
||||
"admin_text_gen_heading": "Text Generation",
|
||||
"admin_nav_import": "Import"
|
||||
}
|
||||
1548
ui/package-lock.json
generated
1548
ui/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,7 @@
|
||||
"build": "vite build",
|
||||
"preview": "vite preview",
|
||||
"prepare": "svelte-kit sync || echo ''",
|
||||
"paraglide": "paraglide-js compile --project ./project.inlang --outdir ./src/lib/paraglide && node -e \"const fs=require('fs'),f='./src/lib/paraglide/messages.js',c=fs.readFileSync(f,'utf8').split('\\n').filter(l=>!l.includes('export * as m')&&!l.includes('enabling auto-import')).join('\\n');fs.writeFileSync(f,c)\"",
|
||||
"check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json",
|
||||
"check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch"
|
||||
},
|
||||
@@ -30,6 +31,13 @@
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.1005.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.1005.0",
|
||||
"@grafana/faro-web-sdk": "^2.3.1",
|
||||
"@inlang/paraglide-js": "^2.15.1",
|
||||
"@opentelemetry/exporter-logs-otlp-http": "^0.214.0",
|
||||
"@opentelemetry/exporter-trace-otlp-http": "^0.214.0",
|
||||
"@opentelemetry/resources": "^2.6.1",
|
||||
"@opentelemetry/sdk-node": "^0.214.0",
|
||||
"@opentelemetry/semantic-conventions": "^1.40.0",
|
||||
"@sentry/sveltekit": "^10.45.0",
|
||||
"cropperjs": "^1.6.2",
|
||||
"ioredis": "^5.3.2",
|
||||
|
||||
11
ui/project.inlang/settings.json
Normal file
11
ui/project.inlang/settings.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"$schema": "https://inlang.com/schema/project-settings",
|
||||
"baseLocale": "en",
|
||||
"locales": ["en", "ru", "id", "pt", "fr"],
|
||||
"modules": [
|
||||
"https://cdn.jsdelivr.net/npm/@inlang/plugin-message-format/dist/index.js"
|
||||
],
|
||||
"plugin.inlang.messageFormat": {
|
||||
"pathPattern": "./messages/{locale}.json"
|
||||
}
|
||||
}
|
||||
221
ui/src/app.css
221
ui/src/app.css
@@ -8,6 +8,135 @@
|
||||
--color-surface-3: #3f3f46; /* zinc-700 */
|
||||
--color-muted: #a1a1aa; /* zinc-400 */
|
||||
--color-text: #f4f4f5; /* zinc-100 */
|
||||
--color-border: #3f3f46; /* zinc-700 */
|
||||
--color-danger: #f87171; /* red-400 */
|
||||
--color-success: #4ade80; /* green-400 */
|
||||
}
|
||||
|
||||
/* ── Amber theme (default) — same as @theme above, explicit for clarity ── */
|
||||
[data-theme="amber"] {
|
||||
--color-brand: #f59e0b;
|
||||
--color-brand-dim: #d97706;
|
||||
--color-surface: #18181b;
|
||||
--color-surface-2: #27272a;
|
||||
--color-surface-3: #3f3f46;
|
||||
--color-muted: #a1a1aa;
|
||||
--color-text: #f4f4f5;
|
||||
--color-border: #3f3f46;
|
||||
--color-danger: #f87171;
|
||||
--color-success: #4ade80;
|
||||
}
|
||||
|
||||
/* ── Slate theme — indigo/slate dark ─────────────────────────────────── */
|
||||
[data-theme="slate"] {
|
||||
--color-brand: #818cf8; /* indigo-400 */
|
||||
--color-brand-dim: #4f46e5; /* indigo-600 */
|
||||
--color-surface: #0f172a; /* slate-900 */
|
||||
--color-surface-2: #1e293b; /* slate-800 */
|
||||
--color-surface-3: #334155; /* slate-700 */
|
||||
--color-muted: #94a3b8; /* slate-400 */
|
||||
--color-text: #f1f5f9; /* slate-100 */
|
||||
--color-border: #334155; /* slate-700 */
|
||||
--color-danger: #f87171; /* red-400 */
|
||||
--color-success: #4ade80; /* green-400 */
|
||||
}
|
||||
|
||||
/* ── Rose theme — dark pink ───────────────────────────────────────────── */
|
||||
[data-theme="rose"] {
|
||||
--color-brand: #fb7185; /* rose-400 */
|
||||
--color-brand-dim: #e11d48; /* rose-600 */
|
||||
--color-surface: #18181b; /* zinc-900 */
|
||||
--color-surface-2: #1c1318; /* custom dark rose */
|
||||
--color-surface-3: #2d1f26; /* custom dark rose-2 */
|
||||
--color-muted: #a1a1aa; /* zinc-400 */
|
||||
--color-text: #f4f4f5; /* zinc-100 */
|
||||
--color-border: #3f2d36; /* custom rose border */
|
||||
--color-danger: #f87171; /* red-400 */
|
||||
--color-success: #4ade80; /* green-400 */
|
||||
}
|
||||
|
||||
/* ── Light amber theme ────────────────────────────────────────────────── */
|
||||
[data-theme="light"] {
|
||||
--color-brand: #d97706; /* amber-600 */
|
||||
--color-brand-dim: #b45309; /* amber-700 */
|
||||
--color-surface: #ffffff;
|
||||
--color-surface-2: #f4f4f5; /* zinc-100 */
|
||||
--color-surface-3: #e4e4e7; /* zinc-200 */
|
||||
--color-muted: #71717a; /* zinc-500 */
|
||||
--color-text: #18181b; /* zinc-900 */
|
||||
--color-border: #d4d4d8; /* zinc-300 */
|
||||
--color-danger: #dc2626; /* red-600 */
|
||||
--color-success: #16a34a; /* green-600 */
|
||||
}
|
||||
|
||||
/* ── Light slate theme ────────────────────────────────────────────────── */
|
||||
[data-theme="light-slate"] {
|
||||
--color-brand: #4f46e5; /* indigo-600 */
|
||||
--color-brand-dim: #4338ca; /* indigo-700 */
|
||||
--color-surface: #f8fafc; /* slate-50 */
|
||||
--color-surface-2: #f1f5f9; /* slate-100 */
|
||||
--color-surface-3: #e2e8f0; /* slate-200 */
|
||||
--color-muted: #64748b; /* slate-500 */
|
||||
--color-text: #0f172a; /* slate-900 */
|
||||
--color-border: #cbd5e1; /* slate-300 */
|
||||
--color-danger: #dc2626; /* red-600 */
|
||||
--color-success: #16a34a; /* green-600 */
|
||||
}
|
||||
|
||||
/* ── Light rose theme ─────────────────────────────────────────────────── */
|
||||
[data-theme="light-rose"] {
|
||||
--color-brand: #e11d48; /* rose-600 */
|
||||
--color-brand-dim: #be123c; /* rose-700 */
|
||||
--color-surface: #fff1f2; /* rose-50 */
|
||||
--color-surface-2: #ffe4e6; /* rose-100 */
|
||||
--color-surface-3: #fecdd3; /* rose-200 */
|
||||
--color-muted: #9f1239; /* rose-800 at 60% */
|
||||
--color-text: #0f0a0b; /* near black */
|
||||
--color-border: #fda4af; /* rose-300 */
|
||||
--color-danger: #dc2626; /* red-600 */
|
||||
--color-success: #16a34a; /* green-600 */
|
||||
}
|
||||
|
||||
/* ── Forest theme — dark green ────────────────────────────────────────── */
|
||||
[data-theme="forest"] {
|
||||
--color-brand: #4ade80; /* green-400 */
|
||||
--color-brand-dim: #16a34a; /* green-600 */
|
||||
--color-surface: #0a130d; /* custom near-black green */
|
||||
--color-surface-2: #111c14; /* custom dark green */
|
||||
--color-surface-3: #1a2e1e; /* custom mid green */
|
||||
--color-muted: #6b9a77; /* custom muted green */
|
||||
--color-text: #e8f5e9; /* custom light green-tinted white */
|
||||
--color-border: #1e3a24; /* custom green border */
|
||||
--color-danger: #f87171; /* red-400 */
|
||||
--color-success: #4ade80; /* green-400 */
|
||||
}
|
||||
|
||||
/* ── Mono theme — pure dark with white accent ─────────────────────────── */
|
||||
[data-theme="mono"] {
|
||||
--color-brand: #f4f4f5; /* zinc-100 — white accent */
|
||||
--color-brand-dim: #a1a1aa; /* zinc-400 */
|
||||
--color-surface: #09090b; /* zinc-950 */
|
||||
--color-surface-2: #18181b; /* zinc-900 */
|
||||
--color-surface-3: #27272a; /* zinc-800 */
|
||||
--color-muted: #71717a; /* zinc-500 */
|
||||
--color-text: #f4f4f5; /* zinc-100 */
|
||||
--color-border: #27272a; /* zinc-800 */
|
||||
--color-danger: #f87171; /* red-400 */
|
||||
--color-success: #4ade80; /* green-400 */
|
||||
}
|
||||
|
||||
/* ── Cyberpunk theme — dark with neon cyan/magenta accents ────────────── */
|
||||
[data-theme="cyber"] {
|
||||
--color-brand: #22d3ee; /* cyan-400 — neon cyan */
|
||||
--color-brand-dim: #06b6d4; /* cyan-500 */
|
||||
--color-surface: #050712; /* custom near-black blue */
|
||||
--color-surface-2: #0d1117; /* custom dark blue-black */
|
||||
--color-surface-3: #161b27; /* custom dark blue */
|
||||
--color-muted: #6272a4; /* dracula comment blue */
|
||||
--color-text: #e2e8f0; /* slate-200 */
|
||||
--color-border: #1e2d45; /* custom dark border */
|
||||
--color-danger: #ff5555; /* dracula red */
|
||||
--color-success: #50fa7b; /* dracula green */
|
||||
}
|
||||
|
||||
html {
|
||||
@@ -15,18 +144,27 @@ html {
|
||||
color: var(--color-text);
|
||||
}
|
||||
|
||||
/* ── Reading typography custom properties ──────────────────────────── */
|
||||
:root {
|
||||
--reading-font: system-ui, -apple-system, sans-serif;
|
||||
--reading-size: 1.05rem;
|
||||
--reading-line-height: 1.85;
|
||||
--reading-max-width: 72ch;
|
||||
}
|
||||
|
||||
/* ── Chapter prose ─────────────────────────────────────────────────── */
|
||||
.prose-chapter {
|
||||
max-width: 72ch;
|
||||
line-height: 1.85;
|
||||
font-size: 1.05rem;
|
||||
color: #d4d4d8; /* zinc-300 */
|
||||
max-width: var(--reading-max-width, 72ch);
|
||||
line-height: var(--reading-line-height, 1.85);
|
||||
font-family: var(--reading-font);
|
||||
font-size: var(--reading-size);
|
||||
color: var(--color-muted);
|
||||
}
|
||||
|
||||
.prose-chapter h1,
|
||||
.prose-chapter h2,
|
||||
.prose-chapter h3 {
|
||||
color: #f4f4f5;
|
||||
color: var(--color-text);
|
||||
font-weight: 700;
|
||||
margin-top: 1.5em;
|
||||
margin-bottom: 0.5em;
|
||||
@@ -40,19 +178,68 @@ html {
|
||||
margin-bottom: 1.2em;
|
||||
}
|
||||
|
||||
/* Indented paragraph style — book-like, no gap, indent instead */
|
||||
.prose-chapter.para-indented p {
|
||||
text-indent: 2em;
|
||||
margin-bottom: 0.35em;
|
||||
}
|
||||
|
||||
.prose-chapter em {
|
||||
color: #a1a1aa;
|
||||
color: var(--color-muted);
|
||||
}
|
||||
|
||||
.prose-chapter strong {
|
||||
color: #f4f4f5;
|
||||
color: var(--color-text);
|
||||
}
|
||||
|
||||
.prose-chapter hr {
|
||||
border-color: #3f3f46;
|
||||
border-color: var(--color-border);
|
||||
margin: 2em 0;
|
||||
}
|
||||
|
||||
/* ── Reading progress bar ───────────────────────────────────────────── */
|
||||
.reading-progress {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
height: 2px;
|
||||
z-index: 100;
|
||||
background: var(--color-brand);
|
||||
pointer-events: none;
|
||||
transition: width 0.1s linear;
|
||||
}
|
||||
|
||||
/* ── Paginated reader ───────────────────────────────────────────────── */
|
||||
.paginated-container {
|
||||
overflow: hidden;
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
}
|
||||
|
||||
.paginated-container .prose-chapter {
|
||||
transition: transform 0.25s cubic-bezier(0.4, 0, 0.2, 1);
|
||||
will-change: transform;
|
||||
}
|
||||
|
||||
/* ── Hide scrollbars (used on horizontal carousels) ────────────────── */
|
||||
.scrollbar-none {
|
||||
scrollbar-width: none; /* Firefox */
|
||||
-ms-overflow-style: none; /* IE / Edge legacy */
|
||||
}
|
||||
.scrollbar-none::-webkit-scrollbar {
|
||||
display: none; /* Chrome / Safari / WebKit */
|
||||
}
|
||||
|
||||
/* ── Hero carousel fade ─────────────────────────────────────────────── */
|
||||
@keyframes fade-in {
|
||||
from { opacity: 0; }
|
||||
to { opacity: 1; }
|
||||
}
|
||||
.animate-fade-in {
|
||||
animation: fade-in 0.4s ease-out forwards;
|
||||
}
|
||||
|
||||
/* ── Navigation progress bar ───────────────────────────────────────── */
|
||||
@keyframes progress-bar {
|
||||
0% { width: 0%; opacity: 1; }
|
||||
@@ -60,6 +247,22 @@ html {
|
||||
100% { width: 100%; opacity: 0; }
|
||||
}
|
||||
.animate-progress-bar {
|
||||
animation: progress-bar 8s cubic-bezier(0.1, 0.05, 0.1, 1) forwards;
|
||||
animation: progress-bar 4s cubic-bezier(0.1, 0.05, 0.1, 1) forwards;
|
||||
}
|
||||
|
||||
/* ── Respect reduced motion — disable all decorative animations ─────── */
|
||||
@media (prefers-reduced-motion: reduce) {
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
animation-duration: 0.01ms !important;
|
||||
animation-iteration-count: 1 !important;
|
||||
transition-duration: 0.01ms !important;
|
||||
}
|
||||
}
|
||||
|
||||
/* ── Footer content-visibility — skip paint for off-screen footer ───── */
|
||||
footer {
|
||||
content-visibility: auto;
|
||||
contain-intrinsic-size: auto 80px;
|
||||
}
|
||||
|
||||
2
ui/src/app.d.ts
vendored
2
ui/src/app.d.ts
vendored
@@ -6,9 +6,11 @@ declare global {
|
||||
interface Locals {
|
||||
sessionId: string;
|
||||
user: { id: string; username: string; role: string; authSessionId: string } | null;
|
||||
isPro: boolean;
|
||||
}
|
||||
interface PageData {
|
||||
user?: { id: string; username: string; role: string; authSessionId: string } | null;
|
||||
isPro?: boolean;
|
||||
}
|
||||
// interface PageState {}
|
||||
// interface Platform {}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user