Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b363c151a5 | ||
|
|
aef9e04419 | ||
|
|
58e78cd34d | ||
|
|
c5c167035d | ||
|
|
4a00d953bb |
@@ -136,52 +136,51 @@ jobs:
|
||||
cache-to: type=inline
|
||||
|
||||
# ── ui: source map upload ─────────────────────────────────────────────────────
|
||||
# Builds the UI with source maps and uploads them to GlitchTip so that error
|
||||
# stack traces resolve to original .svelte/.ts file names and line numbers.
|
||||
# Runs in parallel with docker-ui (both need check-ui to pass first).
|
||||
upload-sourcemaps:
|
||||
name: Upload source maps
|
||||
runs-on: ubuntu-latest
|
||||
needs: [check-ui]
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ui
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
cache: npm
|
||||
cache-dependency-path: ui/package-lock.json
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Build with source maps
|
||||
run: npm run build
|
||||
|
||||
- name: Download glitchtip-cli
|
||||
run: |
|
||||
curl -L "https://gitlab.com/glitchtip/glitchtip-cli/-/jobs/artifacts/v0.1.0/raw/artifacts/glitchtip-cli-linux-x86_64?job=build-linux-x86_64" \
|
||||
-o /usr/local/bin/glitchtip-cli
|
||||
chmod +x /usr/local/bin/glitchtip-cli
|
||||
|
||||
- name: Inject debug IDs into build artifacts
|
||||
run: glitchtip-cli sourcemaps inject ./build
|
||||
env:
|
||||
SENTRY_URL: https://errors.libnovel.cc/
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
SENTRY_ORG: libnovel
|
||||
SENTRY_PROJECT: libnovel-ui
|
||||
|
||||
- name: Upload source maps to GlitchTip
|
||||
run: glitchtip-cli sourcemaps upload ./build --release ${{ gitea.ref_name }}
|
||||
env:
|
||||
SENTRY_URL: https://errors.libnovel.cc/
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
SENTRY_ORG: libnovel
|
||||
SENTRY_PROJECT: libnovel-ui
|
||||
# Commented out: GlitchTip project/auth token needs to be recreated after
|
||||
# the GlitchTip DB wipe. Re-enable once GLITCHTIP_AUTH_TOKEN is updated.
|
||||
# upload-sourcemaps:
|
||||
# name: Upload source maps
|
||||
# runs-on: ubuntu-latest
|
||||
# needs: [check-ui]
|
||||
# defaults:
|
||||
# run:
|
||||
# working-directory: ui
|
||||
# steps:
|
||||
# - uses: actions/checkout@v4
|
||||
#
|
||||
# - uses: actions/setup-node@v4
|
||||
# with:
|
||||
# node-version: "22"
|
||||
# cache: npm
|
||||
# cache-dependency-path: ui/package-lock.json
|
||||
#
|
||||
# - name: Install dependencies
|
||||
# run: npm ci
|
||||
#
|
||||
# - name: Build with source maps
|
||||
# run: npm run build
|
||||
#
|
||||
# - name: Download glitchtip-cli
|
||||
# run: |
|
||||
# curl -L "https://gitlab.com/glitchtip/glitchtip-cli/-/jobs/artifacts/v0.1.0/raw/artifacts/glitchtip-cli-linux-x86_64?job=build-linux-x86_64" \
|
||||
# -o /usr/local/bin/glitchtip-cli
|
||||
# chmod +x /usr/local/bin/glitchtip-cli
|
||||
#
|
||||
# - name: Inject debug IDs into build artifacts
|
||||
# run: glitchtip-cli sourcemaps inject ./build
|
||||
# env:
|
||||
# SENTRY_URL: https://errors.libnovel.cc/
|
||||
# SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
# SENTRY_ORG: libnovel
|
||||
# SENTRY_PROJECT: libnovel-ui
|
||||
#
|
||||
# - name: Upload source maps to GlitchTip
|
||||
# run: glitchtip-cli sourcemaps upload ./build --release ${{ gitea.ref_name }}
|
||||
# env:
|
||||
# SENTRY_URL: https://errors.libnovel.cc/
|
||||
# SENTRY_AUTH_TOKEN: ${{ secrets.GLITCHTIP_AUTH_TOKEN }}
|
||||
# SENTRY_ORG: libnovel
|
||||
# SENTRY_PROJECT: libnovel-ui
|
||||
|
||||
# ── docker: ui ────────────────────────────────────────────────────────────────
|
||||
docker-ui:
|
||||
@@ -261,7 +260,7 @@ jobs:
|
||||
release:
|
||||
name: Gitea Release
|
||||
runs-on: ubuntu-latest
|
||||
needs: [docker-backend, docker-runner, docker-ui, docker-caddy, upload-sourcemaps]
|
||||
needs: [docker-backend, docker-runner, docker-ui, docker-caddy]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
|
||||
@@ -152,6 +152,7 @@ func run() error {
|
||||
OrchestratorWorkers: workers,
|
||||
MetricsAddr: cfg.Runner.MetricsAddr,
|
||||
CatalogueRefreshInterval: cfg.Runner.CatalogueRefreshInterval,
|
||||
CatalogueRequestDelay: cfg.Runner.CatalogueRequestDelay,
|
||||
SkipInitialCatalogueRefresh: cfg.Runner.SkipInitialCatalogueRefresh,
|
||||
RedisAddr: cfg.Redis.Addr,
|
||||
RedisPassword: cfg.Redis.Password,
|
||||
|
||||
@@ -126,6 +126,11 @@ type Runner struct {
|
||||
// is already indexed and a 24h walk would be wasteful.
|
||||
// Controlled by RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true.
|
||||
SkipInitialCatalogueRefresh bool
|
||||
// CatalogueRequestDelay is the base delay inserted between per-book metadata
|
||||
// requests during a catalogue refresh. A random jitter of up to 50% is added
|
||||
// on top. Defaults to 2s. Increase to reduce 429 pressure on novelfire.net.
|
||||
// Controlled by RUNNER_CATALOGUE_REQUEST_DELAY (e.g. "3s", "500ms").
|
||||
CatalogueRequestDelay time.Duration
|
||||
}
|
||||
|
||||
// Config is the top-level configuration struct consumed by both binaries.
|
||||
@@ -196,6 +201,7 @@ func Load() Config {
|
||||
MetricsAddr: envOr("RUNNER_METRICS_ADDR", ":9091"),
|
||||
CatalogueRefreshInterval: envDuration("RUNNER_CATALOGUE_REFRESH_INTERVAL", 0),
|
||||
SkipInitialCatalogueRefresh: envBool("RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH", false),
|
||||
CatalogueRequestDelay: envDuration("RUNNER_CATALOGUE_REQUEST_DELAY", 2*time.Second),
|
||||
},
|
||||
|
||||
Meilisearch: Meilisearch{
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
@@ -55,6 +56,9 @@ func (s *Scraper) SourceName() string { return "novelfire.net" }
|
||||
// ── CatalogueProvider ─────────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeCatalogue streams all CatalogueEntry values across all catalogue pages.
|
||||
// Each page fetch uses retryGet with 429-aware exponential backoff.
|
||||
// A small inter-page delay (cataloguePageDelay) is inserted between requests to
|
||||
// avoid hammering the server when paging through hundreds of catalogue pages.
|
||||
func (s *Scraper) ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueEntry, <-chan error) {
|
||||
entries := make(chan domain.CatalogueEntry, 64)
|
||||
errs := make(chan error, 16)
|
||||
@@ -73,8 +77,18 @@ func (s *Scraper) ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueE
|
||||
default:
|
||||
}
|
||||
|
||||
// Polite inter-page delay — skipped on the very first page.
|
||||
if page > 1 {
|
||||
jitter := time.Duration(500+rand.Intn(1000)) * time.Millisecond
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-time.After(jitter):
|
||||
}
|
||||
}
|
||||
|
||||
s.log.Info("scraping catalogue page", "page", page, "url", pageURL)
|
||||
raw, err := s.client.GetContent(ctx, pageURL)
|
||||
raw, err := retryGet(ctx, s.log, s.client, pageURL, 9, 10*time.Second)
|
||||
if err != nil {
|
||||
errs <- fmt.Errorf("catalogue page %d: %w", page, err)
|
||||
return
|
||||
@@ -139,10 +153,11 @@ func (s *Scraper) ScrapeCatalogue(ctx context.Context) (<-chan domain.CatalogueE
|
||||
// ── MetadataProvider ──────────────────────────────────────────────────────────
|
||||
|
||||
// ScrapeMetadata fetches and parses book metadata from the book's landing page.
|
||||
// Uses retryGet with 429-aware exponential backoff (up to 9 attempts).
|
||||
func (s *Scraper) ScrapeMetadata(ctx context.Context, bookURL string) (domain.BookMeta, error) {
|
||||
s.log.Debug("metadata fetch starting", "url", bookURL)
|
||||
|
||||
raw, err := s.client.GetContent(ctx, bookURL)
|
||||
raw, err := retryGet(ctx, s.log, s.client, bookURL, 9, 10*time.Second)
|
||||
if err != nil {
|
||||
return domain.BookMeta{}, fmt.Errorf("metadata fetch %s: %w", bookURL, err)
|
||||
}
|
||||
|
||||
@@ -6,17 +6,20 @@ package runner
|
||||
//
|
||||
// Design:
|
||||
// - Runs on its own ticker (CatalogueRefreshInterval, default 24h) inside Run().
|
||||
// - Also fires once on startup.
|
||||
// - ScrapeCatalogue streams CatalogueEntry values over a channel — we iterate
|
||||
// and call ScrapeMetadata for each entry.
|
||||
// - Per-request random jitter (1–3s) prevents hammering novelfire.net.
|
||||
// - Cover images are fetched from the URL embedded in BookMeta.Cover and
|
||||
// stored in MinIO (browse bucket, key: covers/{slug}.jpg).
|
||||
// - WriteMetadata + UpsertBook are called for every successfully scraped book.
|
||||
// - Errors for individual books are logged and skipped; the loop continues.
|
||||
// - The cover URL stored in BookMeta.Cover is rewritten to the internal proxy
|
||||
// path (/api/cover/novelfire.net/{slug}) so the UI always fetches via the
|
||||
// backend, which will serve from MinIO.
|
||||
// - Also fires once on startup (unless SkipInitialCatalogueRefresh is set).
|
||||
// - ScrapeCatalogue streams CatalogueEntry values over a channel — already has
|
||||
// its own inter-page jitter + retryGet (see scraper.go).
|
||||
// - Per-book: only metadata is scraped here (not chapters). Chapters are scraped
|
||||
// on-demand when a user opens a book or via an explicit scrape task.
|
||||
// - Between each metadata request a configurable base delay plus up to 50%
|
||||
// random jitter is applied (CatalogueRequestDelay, default 2s). This keeps
|
||||
// the request rate well below novelfire.net's rate limit even for ~15k books.
|
||||
// - ScrapeMetadata itself uses retryGet with 429-aware exponential backoff
|
||||
// (up to 9 attempts), so transient rate limits are handled gracefully.
|
||||
// - Cover images are fetched and stored in MinIO on first sight; subsequent
|
||||
// refreshes skip covers that already exist (CoverExists check).
|
||||
// - Books already present in Meilisearch are skipped entirely (fast path).
|
||||
// - Errors for individual books are logged and skipped; the loop never aborts.
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -29,7 +32,7 @@ import (
|
||||
|
||||
// runCatalogueRefresh performs one full catalogue walk: scrapes metadata for
|
||||
// every book on novelfire.net, downloads covers to MinIO, and upserts to
|
||||
// Meilisearch. Errors for individual books are logged and skipped.
|
||||
// Meilisearch. Individual book failures are logged and skipped.
|
||||
func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
if r.deps.Novel == nil {
|
||||
r.deps.Log.Warn("runner: catalogue refresh skipped — Novel scraper not configured")
|
||||
@@ -40,8 +43,9 @@ func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
delay := r.cfg.CatalogueRequestDelay
|
||||
log := r.deps.Log.With("op", "catalogue_refresh")
|
||||
log.Info("runner: catalogue refresh starting")
|
||||
log.Info("runner: catalogue refresh starting", "request_delay", delay)
|
||||
|
||||
entries, errCh := r.deps.Novel.ScrapeCatalogue(ctx)
|
||||
|
||||
@@ -51,26 +55,26 @@ func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
break
|
||||
}
|
||||
|
||||
// Skip books already present in Meilisearch — they were indexed on a
|
||||
// previous run. Re-indexing only happens when a scrape task is
|
||||
// explicitly enqueued (e.g. via the admin UI or API).
|
||||
// Fast path: skip books already indexed in Meilisearch.
|
||||
if r.deps.SearchIndex.BookExists(ctx, entry.Slug) {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
// Random jitter between books to avoid rate-limiting.
|
||||
jitter := time.Duration(1000+rand.Intn(2000)) * time.Millisecond
|
||||
// Polite delay between metadata requests: base + up to 50% jitter.
|
||||
// This applies before every fetch so we never fire bursts.
|
||||
jitter := time.Duration(rand.Int63n(int64(delay / 2)))
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
break
|
||||
case <-time.After(jitter):
|
||||
case <-time.After(delay + jitter):
|
||||
}
|
||||
|
||||
// ScrapeMetadata internally retries on 429 with exponential back-off.
|
||||
meta, err := r.deps.Novel.ScrapeMetadata(ctx, entry.URL)
|
||||
if err != nil {
|
||||
log.Warn("runner: catalogue refresh: metadata scrape failed",
|
||||
"url", entry.URL, "err", err)
|
||||
log.Warn("runner: catalogue refresh: metadata scrape failed — skipping book",
|
||||
"slug", entry.Slug, "url", entry.URL, "err", err)
|
||||
errCount++
|
||||
continue
|
||||
}
|
||||
@@ -81,35 +85,32 @@ func (r *Runner) runCatalogueRefresh(ctx context.Context) {
|
||||
|
||||
// Persist to PocketBase.
|
||||
if err := r.deps.BookWriter.WriteMetadata(ctx, meta); err != nil {
|
||||
log.Warn("runner: catalogue refresh: WriteMetadata failed",
|
||||
log.Warn("runner: catalogue refresh: WriteMetadata failed — skipping book",
|
||||
"slug", meta.Slug, "err", err)
|
||||
errCount++
|
||||
continue
|
||||
}
|
||||
|
||||
// Index in Meilisearch.
|
||||
// Index in Meilisearch (non-fatal).
|
||||
if err := r.deps.SearchIndex.UpsertBook(ctx, meta); err != nil {
|
||||
log.Warn("runner: catalogue refresh: UpsertBook failed",
|
||||
"slug", meta.Slug, "err", err)
|
||||
// non-fatal — continue
|
||||
}
|
||||
|
||||
// Download and store cover image in MinIO if we have a cover URL
|
||||
// and a CoverStore is wired in.
|
||||
// Download cover to MinIO if not already cached (non-fatal).
|
||||
if r.deps.CoverStore != nil && originalCover != "" {
|
||||
if !r.deps.CoverStore.CoverExists(ctx, meta.Slug) {
|
||||
if err := r.downloadCover(ctx, meta.Slug, originalCover); err != nil {
|
||||
log.Warn("runner: catalogue refresh: cover download failed",
|
||||
"slug", meta.Slug, "url", originalCover, "err", err)
|
||||
// non-fatal
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ok++
|
||||
if ok%100 == 0 {
|
||||
if ok%50 == 0 {
|
||||
log.Info("runner: catalogue refresh progress",
|
||||
"scraped", ok, "errors", errCount)
|
||||
"scraped", ok, "skipped", skipped, "errors", errCount)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -62,6 +62,10 @@ type Config struct {
|
||||
// scrapes per-book metadata, downloads covers, and re-indexes everything in
|
||||
// Meilisearch. Defaults to 24h (expensive — full catalogue walk).
|
||||
CatalogueRefreshInterval time.Duration
|
||||
// CatalogueRequestDelay is the base inter-request pause during a catalogue
|
||||
// refresh metadata walk. Jitter of up to 50% is added on top.
|
||||
// Defaults to 2s. Set via RUNNER_CATALOGUE_REQUEST_DELAY.
|
||||
CatalogueRequestDelay time.Duration
|
||||
// SkipInitialCatalogueRefresh suppresses the immediate catalogue walk that
|
||||
// otherwise fires at startup. The periodic ticker (CatalogueRefreshInterval)
|
||||
// still fires normally. Set RUNNER_SKIP_INITIAL_CATALOGUE_REFRESH=true for
|
||||
@@ -145,6 +149,9 @@ func New(cfg Config, deps Dependencies) *Runner {
|
||||
if cfg.CatalogueRefreshInterval <= 0 {
|
||||
cfg.CatalogueRefreshInterval = 24 * time.Hour
|
||||
}
|
||||
if cfg.CatalogueRequestDelay <= 0 {
|
||||
cfg.CatalogueRequestDelay = 2 * time.Second
|
||||
}
|
||||
if cfg.MetricsAddr == "" {
|
||||
cfg.MetricsAddr = ":9091"
|
||||
}
|
||||
|
||||
@@ -706,7 +706,7 @@ func (s *Store) ListAudioTasks(ctx context.Context) ([]domain.AudioTask, error)
|
||||
}
|
||||
|
||||
func (s *Store) GetAudioTask(ctx context.Context, cacheKey string) (domain.AudioTask, bool, error) {
|
||||
filter := fmt.Sprintf(`cache_key=%q`, cacheKey)
|
||||
filter := fmt.Sprintf(`cache_key='%s'`, cacheKey)
|
||||
items, err := s.pb.listAll(ctx, "audio_jobs", filter, "-started")
|
||||
if err != nil || len(items) == 0 {
|
||||
return domain.AudioTask{}, false, err
|
||||
|
||||
@@ -15,7 +15,7 @@ x-infra-env: &infra-env
|
||||
POCKETBASE_ADMIN_EMAIL: "${POCKETBASE_ADMIN_EMAIL}"
|
||||
POCKETBASE_ADMIN_PASSWORD: "${POCKETBASE_ADMIN_PASSWORD}"
|
||||
# Meilisearch
|
||||
MEILI_URL: "http://meilisearch:7700"
|
||||
MEILI_URL: "${MEILI_URL:-http://meilisearch:7700}"
|
||||
MEILI_API_KEY: "${MEILI_MASTER_KEY}"
|
||||
# Valkey
|
||||
VALKEY_ADDR: "valkey:6379"
|
||||
@@ -154,12 +154,13 @@ services:
|
||||
# No public port — all traffic is routed via Caddy.
|
||||
expose:
|
||||
- "8080"
|
||||
environment:
|
||||
environment:
|
||||
<<: *infra-env
|
||||
BACKEND_HTTP_ADDR: ":8080"
|
||||
LOG_LEVEL: "${LOG_LEVEL}"
|
||||
KOKORO_URL: "${KOKORO_URL}"
|
||||
KOKORO_VOICE: "${KOKORO_VOICE}"
|
||||
POCKET_TTS_URL: "${POCKET_TTS_URL}"
|
||||
GLITCHTIP_DSN: "${GLITCHTIP_DSN}"
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: "${OTEL_EXPORTER_OTLP_ENDPOINT}"
|
||||
OTEL_SERVICE_NAME: "backend"
|
||||
@@ -366,6 +367,8 @@ services:
|
||||
build:
|
||||
context: ./caddy
|
||||
dockerfile: Dockerfile
|
||||
labels:
|
||||
com.centurylinklabs.watchtower.enable: "true"
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
backend:
|
||||
|
||||
@@ -394,7 +394,7 @@ services:
|
||||
# Voices match existing IDs: af_bella, af_sky, af_heart, etc.
|
||||
# The runner reaches it at http://kokoro-fastapi:8880 via the Docker network.
|
||||
kokoro-fastapi:
|
||||
image: ghcr.io/remsky/kokoro-fastapi-gpu:latest
|
||||
image: kokoro-fastapi:latest
|
||||
restart: unless-stopped
|
||||
deploy:
|
||||
resources:
|
||||
|
||||
@@ -343,23 +343,28 @@
|
||||
|
||||
// ── API helpers ────────────────────────────────────────────────────────────
|
||||
|
||||
type PresignResult =
|
||||
| { ready: true; url: string }
|
||||
| { ready: false; enqueued: boolean }; // enqueued=true → presign already POSTed
|
||||
|
||||
async function tryPresign(
|
||||
targetSlug: string,
|
||||
targetChapter: number,
|
||||
targetVoice: string
|
||||
): Promise<string | null> {
|
||||
): Promise<PresignResult> {
|
||||
const params = new URLSearchParams({
|
||||
slug: targetSlug,
|
||||
n: String(targetChapter),
|
||||
voice: targetVoice
|
||||
});
|
||||
const res = await fetch(`/api/presign/audio?${params}`);
|
||||
// 202: TTS was just enqueued by the presign endpoint — audio not ready yet.
|
||||
// 202: presign endpoint already triggered TTS — skip the POST, go straight to polling.
|
||||
// 404: legacy fallback (should no longer occur after endpoint change).
|
||||
if (res.status === 202 || res.status === 404) return null;
|
||||
if (res.status === 202) return { ready: false, enqueued: true };
|
||||
if (res.status === 404) return { ready: false, enqueued: false };
|
||||
if (!res.ok) throw new Error(`presign HTTP ${res.status}`);
|
||||
const data = (await res.json()) as { url: string };
|
||||
return data.url;
|
||||
return { ready: true, url: data.url };
|
||||
}
|
||||
|
||||
type AudioStatusResponse =
|
||||
@@ -421,50 +426,52 @@
|
||||
|
||||
try {
|
||||
// Fast path: already generated
|
||||
const url = await tryPresign(slug, nextChapter, voice);
|
||||
if (url) {
|
||||
const presignResult = await tryPresign(slug, nextChapter, voice);
|
||||
if (presignResult.ready) {
|
||||
stopNextProgress();
|
||||
audioStore.nextProgress = 100;
|
||||
audioStore.nextAudioUrl = url;
|
||||
audioStore.nextAudioUrl = presignResult.url;
|
||||
audioStore.nextStatus = 'prefetched';
|
||||
return;
|
||||
}
|
||||
|
||||
// Slow path: trigger Kokoro generation (non-blocking POST), then poll.
|
||||
const res = await fetch(`/api/audio/${slug}/${nextChapter}`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ voice })
|
||||
});
|
||||
if (!res.ok) throw new Error(`Prefetch generation failed: HTTP ${res.status}`);
|
||||
// Slow path: trigger generation (or skip POST if presign already enqueued).
|
||||
if (!presignResult.enqueued) {
|
||||
const res = await fetch(`/api/audio/${slug}/${nextChapter}`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ voice })
|
||||
});
|
||||
if (!res.ok) throw new Error(`Prefetch generation failed: HTTP ${res.status}`);
|
||||
|
||||
// Whether the server returned 200 (already cached) or 202 (enqueued),
|
||||
// always presign — the status endpoint no longer returns a proxy URL.
|
||||
if (res.status === 200) {
|
||||
// Body is { status: 'done' } — audio confirmed in MinIO. Presign it.
|
||||
await res.body?.cancel();
|
||||
}
|
||||
// else 202: generation enqueued — fall through to poll.
|
||||
|
||||
if (res.status !== 200) {
|
||||
// 202: poll until done.
|
||||
const final = await pollAudioStatus(slug, nextChapter, voice);
|
||||
stopNextProgress();
|
||||
audioStore.nextProgress = 100;
|
||||
|
||||
if (final.status === 'failed') {
|
||||
throw new Error(`Prefetch failed: ${(final as { error?: string }).error ?? 'unknown'}`);
|
||||
if (res.status === 200) {
|
||||
// Body is { status: 'done' } — audio confirmed in MinIO. Presign it.
|
||||
await res.body?.cancel();
|
||||
stopNextProgress();
|
||||
audioStore.nextProgress = 100;
|
||||
const doneUrl = await tryPresign(slug, nextChapter, voice);
|
||||
if (!doneUrl.ready) throw new Error('Prefetch: audio done but presign returned 404');
|
||||
audioStore.nextAudioUrl = doneUrl.url;
|
||||
audioStore.nextStatus = 'prefetched';
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
stopNextProgress();
|
||||
audioStore.nextProgress = 100;
|
||||
// 202: generation enqueued — fall through to poll.
|
||||
}
|
||||
|
||||
// Poll until done (covers both: presign-enqueued and POST-enqueued paths).
|
||||
const final = await pollAudioStatus(slug, nextChapter, voice);
|
||||
stopNextProgress();
|
||||
audioStore.nextProgress = 100;
|
||||
|
||||
if (final.status === 'failed') {
|
||||
throw new Error(`Prefetch failed: ${(final as { error?: string }).error ?? 'unknown'}`);
|
||||
}
|
||||
|
||||
// Audio is ready in MinIO — get a direct presigned URL.
|
||||
const doneUrl = await tryPresign(slug, nextChapter, voice);
|
||||
if (!doneUrl) throw new Error('Prefetch: audio done but presign returned 404');
|
||||
if (!doneUrl.ready) throw new Error('Prefetch: audio done but presign returned 404');
|
||||
|
||||
audioStore.nextAudioUrl = doneUrl;
|
||||
audioStore.nextAudioUrl = doneUrl.url;
|
||||
audioStore.nextStatus = 'prefetched';
|
||||
} catch {
|
||||
stopNextProgress();
|
||||
@@ -532,9 +539,9 @@
|
||||
}
|
||||
|
||||
// Fast path B: audio already in MinIO (presign check).
|
||||
const url = await tryPresign(slug, chapter, voice);
|
||||
if (url) {
|
||||
audioStore.audioUrl = url;
|
||||
const presignResult = await tryPresign(slug, chapter, voice);
|
||||
if (presignResult.ready) {
|
||||
audioStore.audioUrl = presignResult.url;
|
||||
audioStore.status = 'ready';
|
||||
// Restore last saved position after the audio element loads
|
||||
restoreSavedAudioTime();
|
||||
@@ -547,33 +554,44 @@
|
||||
audioStore.status = 'generating';
|
||||
startProgress();
|
||||
|
||||
const res = await fetch(`/api/audio/${slug}/${chapter}`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ voice })
|
||||
});
|
||||
if (!res.ok) throw new Error(`Generation failed: HTTP ${res.status}`);
|
||||
// presignResult.enqueued=true means /api/presign/audio already POSTed on our
|
||||
// behalf — skip the duplicate POST and go straight to polling.
|
||||
if (!presignResult.enqueued) {
|
||||
const res = await fetch(`/api/audio/${slug}/${chapter}`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ voice })
|
||||
});
|
||||
if (!res.ok) throw new Error(`Generation failed: HTTP ${res.status}`);
|
||||
|
||||
if (res.status !== 200) {
|
||||
// 202: generation enqueued — poll until done.
|
||||
const final = await pollAudioStatus(slug, chapter, voice);
|
||||
|
||||
if (final.status === 'failed') {
|
||||
throw new Error(
|
||||
`Generation failed: ${(final as { error?: string }).error ?? 'unknown error'}`
|
||||
);
|
||||
if (res.status === 200) {
|
||||
// Already cached — body is { status: 'done' }, no url needed.
|
||||
await res.body?.cancel();
|
||||
await finishProgress();
|
||||
const doneUrl = await tryPresign(slug, chapter, voice);
|
||||
if (!doneUrl.ready) throw new Error('Audio generated but presign returned 404');
|
||||
audioStore.audioUrl = doneUrl.url;
|
||||
audioStore.status = 'ready';
|
||||
maybeStartPrefetch();
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
// 200: already cached — body is { status: 'done' }, no url needed.
|
||||
await res.body?.cancel();
|
||||
// 202: fall through to polling below.
|
||||
}
|
||||
|
||||
// Poll until the runner finishes generating.
|
||||
const final = await pollAudioStatus(slug, chapter, voice);
|
||||
if (final.status === 'failed') {
|
||||
throw new Error(
|
||||
`Generation failed: ${(final as { error?: string }).error ?? 'unknown error'}`
|
||||
);
|
||||
}
|
||||
|
||||
await finishProgress();
|
||||
|
||||
// Audio is ready in MinIO — always use a presigned URL for direct playback.
|
||||
const doneUrl = await tryPresign(slug, chapter, voice);
|
||||
if (!doneUrl) throw new Error('Audio generated but presign returned 404');
|
||||
audioStore.audioUrl = doneUrl;
|
||||
if (!doneUrl.ready) throw new Error('Audio generated but presign returned 404');
|
||||
audioStore.audioUrl = doneUrl.url;
|
||||
audioStore.status = 'ready';
|
||||
// Don't restore time for freshly generated audio — position is 0
|
||||
// Immediately start pre-generating the next chapter in background.
|
||||
|
||||
@@ -244,7 +244,7 @@
|
||||
href="/catalogue"
|
||||
class="hidden sm:block text-sm transition-colors {page.url.pathname.startsWith('/catalogue') ? 'text-zinc-100 font-medium' : 'text-zinc-400 hover:text-zinc-100'}"
|
||||
>
|
||||
Discover
|
||||
Catalogue
|
||||
</a>
|
||||
<a
|
||||
href="https://feedback.libnovel.cc"
|
||||
@@ -332,7 +332,7 @@
|
||||
onclick={() => (menuOpen = false)}
|
||||
class="px-3 py-2.5 rounded-lg text-sm font-medium transition-colors {page.url.pathname.startsWith('/catalogue') ? 'bg-zinc-800 text-zinc-100' : 'text-zinc-400 hover:bg-zinc-800 hover:text-zinc-100'}"
|
||||
>
|
||||
Discover
|
||||
Catalogue
|
||||
</a>
|
||||
<a
|
||||
href="https://feedback.libnovel.cc"
|
||||
@@ -400,7 +400,7 @@
|
||||
<!-- Top row: site links -->
|
||||
<nav class="flex flex-wrap items-center justify-center gap-x-5 gap-y-2">
|
||||
<a href="/books" class="hover:text-zinc-400 transition-colors">Library</a>
|
||||
<a href="/catalogue" class="hover:text-zinc-400 transition-colors">Discover</a>
|
||||
<a href="/catalogue" class="hover:text-zinc-400 transition-colors">Catalogue</a>
|
||||
<a
|
||||
href="https://feedback.libnovel.cc"
|
||||
target="_blank"
|
||||
@@ -426,16 +426,26 @@
|
||||
</svg>
|
||||
</a>
|
||||
</nav>
|
||||
<!-- Bottom row: legal links + copyright -->
|
||||
<div class="flex flex-wrap items-center justify-center gap-x-5 gap-y-2 text-zinc-700">
|
||||
<a href="/disclaimer" class="hover:text-zinc-500 transition-colors">Disclaimer</a>
|
||||
<a href="/privacy" class="hover:text-zinc-500 transition-colors">Privacy</a>
|
||||
<a href="/dmca" class="hover:text-zinc-500 transition-colors">DMCA</a>
|
||||
<span>© {new Date().getFullYear()} libnovel</span>
|
||||
{#if env.PUBLIC_BUILD_VERSION && env.PUBLIC_BUILD_VERSION !== 'dev'}
|
||||
<span class="text-zinc-800">{env.PUBLIC_BUILD_VERSION}+{env.PUBLIC_BUILD_COMMIT?.slice(0, 7)}</span>
|
||||
<!-- Bottom row: legal links + copyright -->
|
||||
<div class="flex flex-wrap items-center justify-center gap-x-5 gap-y-2 text-zinc-700">
|
||||
<a href="/disclaimer" class="hover:text-zinc-500 transition-colors">Disclaimer</a>
|
||||
<a href="/privacy" class="hover:text-zinc-500 transition-colors">Privacy</a>
|
||||
<a href="/dmca" class="hover:text-zinc-500 transition-colors">DMCA</a>
|
||||
<span>© {new Date().getFullYear()} libnovel</span>
|
||||
</div>
|
||||
<!-- Build version / commit SHA -->
|
||||
<div class="text-xs tabular-nums font-mono px-2 py-0.5 rounded bg-zinc-800 border border-zinc-700">
|
||||
{#if env.PUBLIC_BUILD_VERSION && env.PUBLIC_BUILD_VERSION !== 'dev'}
|
||||
<span class="text-zinc-300" title="Build version">{env.PUBLIC_BUILD_VERSION}</span>
|
||||
{#if env.PUBLIC_BUILD_COMMIT && env.PUBLIC_BUILD_COMMIT !== 'unknown'}
|
||||
<span class="text-zinc-500 select-all" title="Commit SHA"
|
||||
>+{env.PUBLIC_BUILD_COMMIT.slice(0, 7)}</span
|
||||
>
|
||||
{/if}
|
||||
</div>
|
||||
{:else}
|
||||
<span class="text-zinc-400">dev</span>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<script lang="ts">
|
||||
import { enhance } from '$app/forms';
|
||||
import { goto } from '$app/navigation';
|
||||
import { navigating } from '$app/state';
|
||||
import { untrack } from 'svelte';
|
||||
import type { PageData, ActionData } from './$types';
|
||||
@@ -7,6 +8,29 @@
|
||||
|
||||
let { data, form }: { data: PageData; form: ActionData } = $props();
|
||||
|
||||
// ── Local filter state (mirrors URL params) ──────────────────────────────
|
||||
// These are separate from data.* so we can bind them to selects and keep
|
||||
// the DOM in sync. They sync back from data whenever a navigation completes.
|
||||
let filterSort = $state(untrack(() => data.sort));
|
||||
let filterGenre = $state(untrack(() => data.genre));
|
||||
let filterStatus = $state(untrack(() => data.status));
|
||||
|
||||
// Keep local state in sync whenever SvelteKit re-runs the load (URL changed).
|
||||
$effect(() => {
|
||||
filterSort = data.sort;
|
||||
filterGenre = data.genre;
|
||||
filterStatus = data.status;
|
||||
});
|
||||
|
||||
function navigateWithFilters(overrides: { sort?: string; genre?: string; status?: string }) {
|
||||
const params = new URLSearchParams();
|
||||
params.set('sort', overrides.sort ?? filterSort);
|
||||
params.set('genre', overrides.genre ?? filterGenre);
|
||||
params.set('status', overrides.status ?? filterStatus);
|
||||
params.set('page', '1');
|
||||
goto(`/catalogue?${params.toString()}`);
|
||||
}
|
||||
|
||||
// Track which novel card is currently being navigated to
|
||||
let loadingSlug = $state<string | null>(null);
|
||||
|
||||
@@ -389,11 +413,12 @@
|
||||
<select
|
||||
id="filter-sort"
|
||||
name="sort"
|
||||
value={data.sort}
|
||||
bind:value={filterSort}
|
||||
onchange={() => navigateWithFilters({ sort: filterSort })}
|
||||
class="bg-zinc-900 border border-zinc-700 text-zinc-200 text-sm rounded px-3 py-2 focus:outline-none focus:border-amber-400 w-full"
|
||||
>
|
||||
{#each sorts as s}
|
||||
<option value={s.value}>{s.label}</option>
|
||||
<option value={s.value} selected={s.value === filterSort}>{s.label}</option>
|
||||
{/each}
|
||||
</select>
|
||||
</div>
|
||||
@@ -403,12 +428,13 @@
|
||||
<select
|
||||
id="filter-genre"
|
||||
name="genre"
|
||||
value={data.genre}
|
||||
bind:value={filterGenre}
|
||||
onchange={() => navigateWithFilters({ genre: filterGenre })}
|
||||
disabled={isRankView}
|
||||
class="bg-zinc-900 border border-zinc-700 text-zinc-200 text-sm rounded px-3 py-2 focus:outline-none focus:border-amber-400 disabled:opacity-40 disabled:cursor-not-allowed w-full"
|
||||
>
|
||||
{#each genres as g}
|
||||
<option value={g.value}>{g.label}</option>
|
||||
<option value={g.value} selected={g.value === filterGenre}>{g.label}</option>
|
||||
{/each}
|
||||
</select>
|
||||
</div>
|
||||
@@ -418,12 +444,13 @@
|
||||
<select
|
||||
id="filter-status"
|
||||
name="status"
|
||||
value={data.status}
|
||||
bind:value={filterStatus}
|
||||
onchange={() => navigateWithFilters({ status: filterStatus })}
|
||||
disabled={isRankView}
|
||||
class="bg-zinc-900 border border-zinc-700 text-zinc-200 text-sm rounded px-3 py-2 focus:outline-none focus:border-amber-400 disabled:opacity-40 disabled:cursor-not-allowed w-full"
|
||||
>
|
||||
{#each statuses as st}
|
||||
<option value={st.value}>{st.label}</option>
|
||||
<option value={st.value} selected={st.value === filterStatus}>{st.label}</option>
|
||||
{/each}
|
||||
</select>
|
||||
</div>
|
||||
@@ -437,13 +464,6 @@
|
||||
<a href="/catalogue" class="px-4 py-2 rounded bg-zinc-700 text-zinc-300 text-sm hover:bg-zinc-600 transition-colors">
|
||||
Reset
|
||||
</a>
|
||||
<button
|
||||
type="submit"
|
||||
onclick={() => (filtersOpen = false)}
|
||||
class="px-4 py-2 rounded bg-amber-400 text-zinc-900 text-sm font-semibold hover:bg-amber-300 transition-colors"
|
||||
>
|
||||
Apply
|
||||
</button>
|
||||
</div>
|
||||
</form>
|
||||
{/if}
|
||||
|
||||
Reference in New Issue
Block a user