Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ffcdf5ee10 | ||
|
|
899c504d1f | ||
|
|
d82aa9d4b4 | ||
|
|
ae08382b81 | ||
|
|
b9f8008c2c | ||
|
|
d25cee3d8c | ||
|
|
48714cd98b | ||
|
|
1a2bf580cd | ||
|
|
2ca1ab2250 | ||
|
|
2571c243c9 |
14
AGENTS.md
14
AGENTS.md
@@ -47,11 +47,21 @@ Sub-directories have their own `AGENTS.md` with deeper context (e.g. `ios/AGENTS
|
||||
- `release.yaml` — runs on `v*` tags (build Docker images, upload source maps, create Gitea release)
|
||||
- Secrets: `DOCKER_USER`, `DOCKER_TOKEN`, `GITEA_TOKEN`, `GLITCHTIP_AUTH_TOKEN`
|
||||
|
||||
### Git credentials
|
||||
|
||||
Credentials are embedded in the remote URL — no `HOME=/root` or credential helper needed for push:
|
||||
|
||||
```
|
||||
https://kamil:95782641Apple%24@gitea.kalekber.cc/kamil/libnovel.git
|
||||
```
|
||||
|
||||
All git commands still use `HOME=/root` prefix for consistency (picks up `/root/.gitconfig` for user name/email), but push auth works without it.
|
||||
|
||||
### Releasing a new version
|
||||
|
||||
```bash
|
||||
git tag v2.5.X -m "Short title\n\nOptional longer body"
|
||||
git push origin v2.5.X
|
||||
HOME=/root git tag v2.6.X -m "Short title"
|
||||
HOME=/root git push origin v3-cleanup --tags
|
||||
```
|
||||
|
||||
CI will build all Docker images, upload source maps to GlitchTip, and create a Gitea release automatically.
|
||||
|
||||
@@ -189,6 +189,7 @@ func run() error {
|
||||
ChapterImageStore: store,
|
||||
Producer: producer,
|
||||
TaskReader: store,
|
||||
ImportFileStore: store,
|
||||
SearchIndex: searchIndex,
|
||||
Kokoro: kokoroClient,
|
||||
PocketTTS: pocketTTSClient,
|
||||
|
||||
@@ -192,21 +192,22 @@ func run() error {
|
||||
|
||||
deps := runner.Dependencies{
|
||||
Consumer: consumer,
|
||||
BookWriter: store,
|
||||
BookReader: store,
|
||||
AudioStore: store,
|
||||
CoverStore: store,
|
||||
TranslationStore: store,
|
||||
BookImport: storage.NewBookImporter(store),
|
||||
ChapterIngester: store,
|
||||
SearchIndex: searchIndex,
|
||||
Novel: novel,
|
||||
Kokoro: kokoroClient,
|
||||
PocketTTS: pocketTTSClient,
|
||||
CFAI: cfaiClient,
|
||||
LibreTranslate: ltClient,
|
||||
Notifier: store,
|
||||
Log: log,
|
||||
BookWriter: store,
|
||||
BookReader: store,
|
||||
AudioStore: store,
|
||||
CoverStore: store,
|
||||
TranslationStore: store,
|
||||
BookImport: storage.NewBookImporter(store),
|
||||
ImportChapterStore: store,
|
||||
ChapterIngester: store,
|
||||
SearchIndex: searchIndex,
|
||||
Novel: novel,
|
||||
Kokoro: kokoroClient,
|
||||
PocketTTS: pocketTTSClient,
|
||||
CFAI: cfaiClient,
|
||||
LibreTranslate: ltClient,
|
||||
Notifier: store,
|
||||
Log: log,
|
||||
}
|
||||
r := runner.New(rCfg, deps)
|
||||
|
||||
|
||||
@@ -3,7 +3,23 @@ module github.com/libnovel/backend
|
||||
go 1.26.1
|
||||
|
||||
require (
|
||||
github.com/getsentry/sentry-go v0.43.0
|
||||
github.com/hibiken/asynq v0.26.0
|
||||
github.com/hibiken/asynq/x v0.0.0-20260203063626-d704b68a426d
|
||||
github.com/meilisearch/meilisearch-go v0.36.1
|
||||
github.com/minio/minio-go/v7 v7.0.98
|
||||
github.com/pdfcpu/pdfcpu v0.11.1
|
||||
github.com/prometheus/client_golang v1.23.2
|
||||
github.com/redis/go-redis/v9 v9.18.0
|
||||
github.com/yuin/goldmark v1.8.2
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.17.0
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0
|
||||
go.opentelemetry.io/otel v1.42.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.18.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0
|
||||
go.opentelemetry.io/otel/log v0.18.0
|
||||
go.opentelemetry.io/otel/sdk v1.42.0
|
||||
go.opentelemetry.io/otel/sdk/log v0.18.0
|
||||
golang.org/x/net v0.51.0
|
||||
)
|
||||
|
||||
@@ -12,55 +28,45 @@ require (
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/getsentry/sentry-go v0.43.0 // indirect
|
||||
github.com/go-ini/ini v1.67.0 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 // indirect
|
||||
github.com/hibiken/asynq v0.26.0 // indirect
|
||||
github.com/hibiken/asynq/x v0.0.0-20260203063626-d704b68a426d // indirect
|
||||
github.com/hhrutter/lzw v1.0.0 // indirect
|
||||
github.com/hhrutter/pkcs7 v0.2.0 // indirect
|
||||
github.com/hhrutter/tiff v1.0.2 // indirect
|
||||
github.com/klauspost/compress v1.18.2 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/klauspost/crc32 v1.3.0 // indirect
|
||||
github.com/meilisearch/meilisearch-go v0.36.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.19 // indirect
|
||||
github.com/minio/crc64nvme v1.1.1 // indirect
|
||||
github.com/minio/md5-simd v1.1.2 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/philhofer/fwd v1.2.0 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/prometheus/client_golang v1.23.2 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.66.1 // indirect
|
||||
github.com/prometheus/procfs v0.16.1 // indirect
|
||||
github.com/redis/go-redis/v9 v9.18.0 // indirect
|
||||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/rs/xid v1.6.0 // indirect
|
||||
github.com/spf13/cast v1.10.0 // indirect
|
||||
github.com/tinylib/msgp v1.6.1 // indirect
|
||||
github.com/yuin/goldmark v1.8.2 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.17.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.67.0 // indirect
|
||||
go.opentelemetry.io/otel v1.42.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlplog/otlploghttp v0.18.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.42.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.42.0 // indirect
|
||||
go.opentelemetry.io/otel/log v0.18.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.42.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.42.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/log v0.18.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.42.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.2 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.48.0 // indirect
|
||||
golang.org/x/image v0.32.0 // indirect
|
||||
golang.org/x/sys v0.41.0 // indirect
|
||||
golang.org/x/text v0.34.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
@@ -68,5 +74,5 @@ require (
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 // indirect
|
||||
google.golang.org/grpc v1.79.2 // indirect
|
||||
google.golang.org/protobuf v1.36.11 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
)
|
||||
|
||||
@@ -2,10 +2,16 @@ github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7X
|
||||
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs=
|
||||
github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c=
|
||||
github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA=
|
||||
github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
|
||||
github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs=
|
||||
github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0 h1:ChwIKnQN3kcZteTXMgb1wztSgaU+ZemkgWdohwgs8tY=
|
||||
github.com/clipperhouse/uax29/v2 v2.2.0/go.mod h1:EFJ2TJMRUaplDxHKj1qAEhCtQPW2tJSwu5BF98AuoVM=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
@@ -14,8 +20,12 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/getsentry/sentry-go v0.43.0 h1:XbXLpFicpo8HmBDaInk7dum18G9KSLcjZiyUKS+hLW4=
|
||||
github.com/getsentry/sentry-go v0.43.0/go.mod h1:XDotiNZbgf5U8bPDUAfvcFmOnMQQceESxyKaObSssW0=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||
github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A=
|
||||
github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
|
||||
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
|
||||
@@ -25,10 +35,20 @@ github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1 h1:kYf81DTWFe7t+1VvL7eS+jKFVWaUnK9cB1qbwn63YCY=
|
||||
github.com/golang-jwt/jwt/v5 v5.3.1/go.mod h1:fxCRLWMO43lRc8nhHWY6LGqRcf+1gQWArsqaEUEa5bE=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0 h1:HWRh5R2+9EifMyIHV7ZV+MIZqgz+PMpZ14Jynv3O2Zs=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.28.0/go.mod h1:JfhWUomR1baixubs02l85lZYYOm7LV6om4ceouMv45c=
|
||||
github.com/hhrutter/lzw v1.0.0 h1:laL89Llp86W3rRs83LvKbwYRx6INE8gDn0XNb1oXtm0=
|
||||
github.com/hhrutter/lzw v1.0.0/go.mod h1:2HC6DJSn/n6iAZfgM3Pg+cP1KxeWc3ezG8bBqW5+WEo=
|
||||
github.com/hhrutter/pkcs7 v0.2.0 h1:i4HN2XMbGQpZRnKBLsUwO3dSckzgX142TNqY/KfXg+I=
|
||||
github.com/hhrutter/pkcs7 v0.2.0/go.mod h1:aEzKz0+ZAlz7YaEMY47jDHL14hVWD6iXt0AgqgAvWgE=
|
||||
github.com/hhrutter/tiff v1.0.2 h1:7H3FQQpKu/i5WaSChoD1nnJbGx4MxU5TlNqqpxw55z8=
|
||||
github.com/hhrutter/tiff v1.0.2/go.mod h1:pcOeuK5loFUE7Y/WnzGw20YxUdnqjY1P0Jlcieb/cCw=
|
||||
github.com/hibiken/asynq v0.26.0 h1:1Zxr92MlDnb1Zt/QR5g2vSCqUS03i95lUfqx5X7/wrw=
|
||||
github.com/hibiken/asynq v0.26.0/go.mod h1:Qk4e57bTnWDoyJ67VkchuV6VzSM9IQW2nPvAGuDyw58=
|
||||
github.com/hibiken/asynq/x v0.0.0-20260203063626-d704b68a426d h1:Ld5m8EIK5QVOq/owOexKIbETij3skACg4eU1pArHsrw=
|
||||
@@ -40,6 +60,14 @@ github.com/klauspost/cpuid/v2 v2.2.11 h1:0OwqZRYI2rFrjS4kvkDnqJkKHdHaRnCm68/DY4O
|
||||
github.com/klauspost/cpuid/v2 v2.2.11/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0=
|
||||
github.com/klauspost/crc32 v1.3.0 h1:sSmTt3gUt81RP655XGZPElI0PelVTZ6YwCRnPSupoFM=
|
||||
github.com/klauspost/crc32 v1.3.0/go.mod h1:D7kQaZhnkX/Y0tstFGf8VUzv2UofNGqCjnC3zdHB0Hw=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/mattn/go-runewidth v0.0.19 h1:v++JhqYnZuu5jSKrk9RbgF5v4CGUjqRfBm05byFGLdw=
|
||||
github.com/mattn/go-runewidth v0.0.19/go.mod h1:XBkDxAl56ILZc9knddidhrOlY5R/pDhgLpndooCuJAs=
|
||||
github.com/meilisearch/meilisearch-go v0.36.1 h1:mJTCJE5g7tRvaqKco6DfqOuJEjX+rRltDEnkEC02Y0M=
|
||||
github.com/meilisearch/meilisearch-go v0.36.1/go.mod h1:hWcR0MuWLSzHfbz9GGzIr3s9rnXLm1jqkmHkJPbUSvM=
|
||||
github.com/minio/crc64nvme v1.1.1 h1:8dwx/Pz49suywbO+auHCBpCtlW1OfpcLN7wYgVR6wAI=
|
||||
@@ -50,42 +78,44 @@ github.com/minio/minio-go/v7 v7.0.98 h1:MeAVKjLVz+XJ28zFcuYyImNSAh8Mq725uNW4beRi
|
||||
github.com/minio/minio-go/v7 v7.0.98/go.mod h1:cY0Y+W7yozf0mdIclrttzo1Iiu7mEf9y7nk2uXqMOvM=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||
github.com/pdfcpu/pdfcpu v0.11.1 h1:htHBSkGH5jMKWC6e0sihBFbcKZ8vG1M67c8/dJxhjas=
|
||||
github.com/pdfcpu/pdfcpu v0.11.1/go.mod h1:pP3aGga7pRvwFWAm9WwFvo+V68DfANi9kxSQYioNYcw=
|
||||
github.com/philhofer/fwd v1.2.0 h1:e6DnBTl7vGY+Gz322/ASL4Gyp1FspeMvx1RNDoToZuM=
|
||||
github.com/philhofer/fwd v1.2.0/go.mod h1:RqIHx9QI14HlwKwm98g9Re5prTQ6LdeRQn+gXJFxsJM=
|
||||
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
|
||||
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y=
|
||||
github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE=
|
||||
github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o=
|
||||
github.com/prometheus/client_golang v1.23.2/go.mod h1:Tb1a6LWHB3/SPIzCoaDXI4I8UHKeFTEQ1YCr+0Gyqmg=
|
||||
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
|
||||
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
|
||||
github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk=
|
||||
github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE=
|
||||
github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc=
|
||||
github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8=
|
||||
github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9ZoGs=
|
||||
github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA=
|
||||
github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc=
|
||||
github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk=
|
||||
github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg=
|
||||
github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is=
|
||||
github.com/redis/go-redis/v9 v9.18.0 h1:pMkxYPkEbMPwRdenAzUNyFNrDgHx9U+DrBabWNfSRQs=
|
||||
github.com/redis/go-redis/v9 v9.18.0/go.mod h1:k3ufPphLU5YXwNTUcCRXGxUoF1fqxnhFQmscfkCoDA0=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/rogpeppe/go-internal v1.14.1 h1:UQB4HGPB6osV0SQTLymcB4TgvyWu6ZyliaW0tI/otEQ=
|
||||
github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7so1lCWt35ZSgc=
|
||||
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
||||
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/tinylib/msgp v1.6.1 h1:ESRv8eL3u+DNHUoSAAQRE50Hm162zqAnBoGv9PzScPY=
|
||||
github.com/tinylib/msgp v1.6.1/go.mod h1:RSp0LW9oSxFut3KzESt5Voq4GVWyS+PSulT77roAqEA=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/yuin/goldmark v1.8.2 h1:kEGpgqJXdgbkhcOgBxkC0X0PmoPG1ZyoZ117rDVp4zE=
|
||||
github.com/yuin/goldmark v1.8.2/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||
github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
|
||||
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 h1:jXsnJ4Lmnqd11kwkBV2LgLoFMZKizbCi5fNZ/ipaZ64=
|
||||
go.opentelemetry.io/auto/sdk v1.2.1/go.mod h1:KRTj+aOaElaLi+wW1kO/DZRXwkF4C5xPbEe3ZiIhN7Y=
|
||||
go.opentelemetry.io/contrib/bridges/otelslog v0.17.0 h1:NFIS6x7wyObQ7cR84x7bt1sr8nYBx89s3x3GwRjw40k=
|
||||
@@ -108,18 +138,26 @@ go.opentelemetry.io/otel/sdk v1.42.0 h1:LyC8+jqk6UJwdrI/8VydAq/hvkFKNHZVIWuslJXY
|
||||
go.opentelemetry.io/otel/sdk v1.42.0/go.mod h1:rGHCAxd9DAph0joO4W6OPwxjNTYWghRWmkHuGbayMts=
|
||||
go.opentelemetry.io/otel/sdk/log v0.18.0 h1:n8OyZr7t7otkeTnPTbDNom6rW16TBYGtvyy2Gk6buQw=
|
||||
go.opentelemetry.io/otel/sdk/log v0.18.0/go.mod h1:C0+wxkTwKpOCZLrlJ3pewPiiQwpzycPI/u6W0Z9fuYk=
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.18.0 h1:l3mYuPsuBx6UKE47BVcPrZoZ0q/KER57vbj2qkgDLXA=
|
||||
go.opentelemetry.io/otel/sdk/log/logtest v0.18.0/go.mod h1:7cHtiVJpZebB3wybTa4NG+FUo5NPe3PROz1FqB0+qdw=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.42.0 h1:D/1QR46Clz6ajyZ3G8SgNlTJKBdGp84q9RKCAZ3YGuA=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.42.0/go.mod h1:Ua6AAlDKdZ7tdvaQKfSmnFTdHx37+J4ba8MwVCYM5hc=
|
||||
go.opentelemetry.io/otel/trace v1.42.0 h1:OUCgIPt+mzOnaUTpOQcBiM/PLQ/Op7oq6g4LenLmOYY=
|
||||
go.opentelemetry.io/otel/trace v1.42.0/go.mod h1:f3K9S+IFqnumBkKhRJMeaZeNk9epyhnCmQh/EysQCdc=
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 h1:l706jCMITVouPOqEnii2fIAuO3IVGBRPV5ICjceRb/A=
|
||||
go.opentelemetry.io/proto/otlp v1.9.0/go.mod h1:xE+Cx5E/eEHw+ISFkwPLwCZefwVjY+pqKg1qcK03+/4=
|
||||
go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE=
|
||||
go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI=
|
||||
go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU=
|
||||
go.yaml.in/yaml/v3 v3.0.4 h1:tfq32ie2Jv2UxXFdLJdh3jXuOzWiL1fo0bu/FbuKpbc=
|
||||
go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg=
|
||||
golang.org/x/crypto v0.48.0 h1:/VRzVqiRSggnhY7gNRxPauEQ5Drw9haKdM0jqfcCFts=
|
||||
golang.org/x/crypto v0.48.0/go.mod h1:r0kV5h3qnFPlQnBSrULhlsRfryS2pmewsg+XfMgkVos=
|
||||
golang.org/x/image v0.32.0 h1:6lZQWq75h7L5IWNk0r+SCpUJ6tUVd3v4ZHnbRKLkUDQ=
|
||||
golang.org/x/image v0.32.0/go.mod h1:/R37rrQmKXtO6tYXAjtDLwQgFLHmhW+V6ayXlxzP2Pc=
|
||||
golang.org/x/net v0.51.0 h1:94R/GTO7mt3/4wIKpcR5gkGmRLOuE/2hNGeWq/GBIFo=
|
||||
golang.org/x/net v0.51.0/go.mod h1:aamm+2QF5ogm02fjy5Bb7CQ0WMt1/WVM7FtyaTLlA9Y=
|
||||
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
|
||||
@@ -128,6 +166,8 @@ golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
|
||||
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
|
||||
golang.org/x/time v0.14.0 h1:MRx4UaLrDotUKUdCIqzPC48t1Y9hANFKIRpNx+Te8PI=
|
||||
golang.org/x/time v0.14.0/go.mod h1:eL/Oa2bBBK0TkX57Fyni+NgnyQQN4LitPmob2Hjnqw4=
|
||||
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
|
||||
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57 h1:JLQynH/LBHfCTSbDWl+py8C+Rg/k1OVH3xfcaiANuF0=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20260209200024-4cfbd4190f57/go.mod h1:kSJwQxqmFXeo79zOmbrALdflXQeAYcUbgS7PbpMknCY=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20260209200024-4cfbd4190f57 h1:mWPCjDEyshlQYzBpMNHaEof6UX1PmHcaUODUywQ0uac=
|
||||
@@ -136,8 +176,10 @@ google.golang.org/grpc v1.79.2 h1:fRMD94s2tITpyJGtBBn7MkMseNpOZU8ZxgC3MMBaXRU=
|
||||
google.golang.org/grpc v1.79.2/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
|
||||
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
|
||||
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"log/slog"
|
||||
|
||||
"github.com/hibiken/asynq"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/taskqueue"
|
||||
)
|
||||
|
||||
@@ -88,18 +89,19 @@ func (p *Producer) CreateTranslationTask(ctx context.Context, slug string, chapt
|
||||
}
|
||||
|
||||
// CreateImportTask creates a PocketBase record then enqueues an Asynq job for PDF/EPUB import.
|
||||
func (p *Producer) CreateImportTask(ctx context.Context, slug, title, fileType, objectKey, initiatorUserID string) (string, error) {
|
||||
id, err := p.pb.CreateImportTask(ctx, slug, title, fileType, objectKey, initiatorUserID)
|
||||
func (p *Producer) CreateImportTask(ctx context.Context, task domain.ImportTask) (string, error) {
|
||||
id, err := p.pb.CreateImportTask(ctx, task)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
payload := ImportPayload{
|
||||
PBTaskID: id,
|
||||
Slug: slug,
|
||||
Title: title,
|
||||
FileType: fileType,
|
||||
ObjectKey: objectKey,
|
||||
PBTaskID: id,
|
||||
Slug: task.Slug,
|
||||
Title: task.Title,
|
||||
FileType: task.FileType,
|
||||
ObjectKey: task.ObjectKey,
|
||||
ChaptersKey: task.ChaptersKey,
|
||||
}
|
||||
if err := p.enqueue(ctx, TypeImportBook, payload); err != nil {
|
||||
// Non-fatal: PB record exists; runner will pick it up on next poll.
|
||||
|
||||
@@ -48,9 +48,10 @@ type ScrapePayload struct {
|
||||
|
||||
// ImportPayload is the Asynq job payload for PDF/EPUB import tasks.
|
||||
type ImportPayload struct {
|
||||
PBTaskID string `json:"pb_task_id"`
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
FileType string `json:"file_type"` // "pdf" or "epub"
|
||||
ObjectKey string `json:"object_key"` // MinIO path to uploaded file
|
||||
PBTaskID string `json:"pb_task_id"`
|
||||
Slug string `json:"slug"`
|
||||
Title string `json:"title"`
|
||||
FileType string `json:"file_type"` // "pdf" or "epub"
|
||||
ObjectKey string `json:"object_key"` // MinIO path to uploaded file
|
||||
ChaptersKey string `json:"chapters_key"` // MinIO path to pre-parsed chapters JSON
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
@@ -9,14 +10,20 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
type importRequest struct {
|
||||
Title string `json:"title"`
|
||||
FileName string `json:"file_name"`
|
||||
FileType string `json:"file_type"` // "pdf" or "epub"
|
||||
ObjectKey string `json:"object_key"` // MinIO path to uploaded file
|
||||
Title string `json:"title"`
|
||||
Author string `json:"author"`
|
||||
CoverURL string `json:"cover_url"`
|
||||
Genres []string `json:"genres"`
|
||||
Summary string `json:"summary"`
|
||||
BookStatus string `json:"book_status"` // "ongoing" | "completed" | "hiatus"
|
||||
FileName string `json:"file_name"`
|
||||
FileType string `json:"file_type"` // "pdf" or "epub"
|
||||
ObjectKey string `json:"object_key"` // MinIO path to uploaded file
|
||||
}
|
||||
|
||||
type importResponse struct {
|
||||
@@ -39,6 +46,8 @@ func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
|
||||
ct := r.Header.Get("Content-Type")
|
||||
var req importRequest
|
||||
var objectKey string
|
||||
var chaptersKey string
|
||||
var chapterCount int
|
||||
|
||||
if strings.HasPrefix(ct, "multipart/form-data") {
|
||||
if err := r.ParseMultipartForm(32 << 20); err != nil {
|
||||
@@ -46,6 +55,17 @@ func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
req.Title = r.FormValue("title")
|
||||
req.Author = r.FormValue("author")
|
||||
req.CoverURL = r.FormValue("cover_url")
|
||||
req.Summary = r.FormValue("summary")
|
||||
req.BookStatus = r.FormValue("book_status")
|
||||
if g := r.FormValue("genres"); g != "" {
|
||||
for _, s := range strings.Split(g, ",") {
|
||||
if s = strings.TrimSpace(s); s != "" {
|
||||
req.Genres = append(req.Genres, s)
|
||||
}
|
||||
}
|
||||
}
|
||||
req.FileName = r.FormValue("file_name")
|
||||
req.FileType = r.FormValue("file_type")
|
||||
analyzeOnly := r.FormValue("analyze") == "true"
|
||||
@@ -79,17 +99,38 @@ func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
// Upload to MinIO for actual import
|
||||
// Parse PDF/EPUB on the backend (with timeout) and store chapters as JSON.
|
||||
// The runner only needs to ingest pre-parsed chapters — no PDF parsing on runner.
|
||||
parseCtx, parseCancel := context.WithTimeout(r.Context(), 3*time.Minute)
|
||||
defer parseCancel()
|
||||
chapters, parseErr := storage.ParseImportFile(parseCtx, data, req.FileType)
|
||||
if parseErr != nil || len(chapters) == 0 {
|
||||
jsonError(w, http.StatusUnprocessableEntity, "could not parse file: "+func() string {
|
||||
if parseErr != nil { return parseErr.Error() }
|
||||
return "no chapters found"
|
||||
}())
|
||||
return
|
||||
}
|
||||
|
||||
// Store raw file in MinIO (for reference/re-import).
|
||||
objectKey = fmt.Sprintf("imports/%d_%s", time.Now().Unix(), header.Filename)
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
if s.deps.ImportFileStore == nil {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
if err := store.PutImportFile(r.Context(), objectKey, data); err != nil {
|
||||
if err := s.deps.ImportFileStore.PutImportFile(r.Context(), objectKey, data); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "upload file: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Store pre-parsed chapters JSON in MinIO so runner can ingest without re-parsing.
|
||||
chaptersJSON, _ := json.Marshal(chapters)
|
||||
chaptersKey = fmt.Sprintf("imports/%d_%s_chapters.json", time.Now().Unix(), strings.TrimSuffix(header.Filename, filepath.Ext(header.Filename)))
|
||||
if err := s.deps.ImportFileStore.PutImportChapters(r.Context(), chaptersKey, chaptersJSON); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "store chapters: "+err.Error())
|
||||
return
|
||||
}
|
||||
chapterCount = len(chapters)
|
||||
} else {
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
@@ -115,28 +156,43 @@ func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
|
||||
return -1
|
||||
}, slug)
|
||||
|
||||
taskID, err := s.deps.Producer.CreateImportTask(r.Context(), slug, req.Title, req.FileType, objectKey, "")
|
||||
taskID, err := s.deps.Producer.CreateImportTask(r.Context(), domain.ImportTask{
|
||||
Slug: slug,
|
||||
Title: req.Title,
|
||||
Author: req.Author,
|
||||
CoverURL: req.CoverURL,
|
||||
Genres: req.Genres,
|
||||
Summary: req.Summary,
|
||||
BookStatus: req.BookStatus,
|
||||
FileType: req.FileType,
|
||||
ObjectKey: objectKey,
|
||||
ChaptersKey: chaptersKey,
|
||||
ChaptersTotal: chapterCount,
|
||||
InitiatorUserID: "",
|
||||
})
|
||||
if err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "create import task: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
writeJSON(w, 0, importResponse{
|
||||
TaskID: taskID,
|
||||
Slug: slug,
|
||||
TaskID: taskID,
|
||||
Slug: slug,
|
||||
Preview: &importPreview{Chapters: chapterCount},
|
||||
})
|
||||
}
|
||||
|
||||
// analyzeImportFile does a quick scan of the file to count chapters.
|
||||
// This is a placeholder - real implementation would parse PDF/EPUB properly.
|
||||
// analyzeImportFile parses the file to count chapters and extract preview lines.
|
||||
func analyzeImportFile(data []byte, fileType string) *importPreview {
|
||||
// TODO: Implement actual PDF/EPUB parsing to count chapters
|
||||
// For now, estimate based on file size
|
||||
preview := &importPreview{
|
||||
Chapters: estimateChapters(data, fileType),
|
||||
FirstLines: []string{},
|
||||
count, firstLines, err := storage.AnalyzeFile(data, fileType)
|
||||
if err != nil || count == 0 {
|
||||
// Fall back to rough size estimate so the UI still shows something
|
||||
count = estimateChapters(data, fileType)
|
||||
}
|
||||
return &importPreview{
|
||||
Chapters: count,
|
||||
FirstLines: firstLines,
|
||||
}
|
||||
return preview
|
||||
}
|
||||
|
||||
func estimateChapters(data []byte, fileType string) int {
|
||||
|
||||
@@ -7,6 +7,63 @@ import (
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
// handleDismissNotification handles DELETE /api/notifications/{id}.
|
||||
func (s *Server) handleDismissNotification(w http.ResponseWriter, r *http.Request) {
|
||||
id := r.PathValue("id")
|
||||
if id == "" {
|
||||
jsonError(w, http.StatusBadRequest, "notification id required")
|
||||
return
|
||||
}
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
if err := store.DeleteNotification(r.Context(), id); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "dismiss notification: "+err.Error())
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
|
||||
// handleClearAllNotifications handles DELETE /api/notifications?user_id=...
|
||||
func (s *Server) handleClearAllNotifications(w http.ResponseWriter, r *http.Request) {
|
||||
userID := r.URL.Query().Get("user_id")
|
||||
if userID == "" {
|
||||
jsonError(w, http.StatusBadRequest, "user_id required")
|
||||
return
|
||||
}
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
if err := store.ClearAllNotifications(r.Context(), userID); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "clear notifications: "+err.Error())
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
|
||||
// handleMarkAllNotificationsRead handles PATCH /api/notifications?user_id=...
|
||||
func (s *Server) handleMarkAllNotificationsRead(w http.ResponseWriter, r *http.Request) {
|
||||
userID := r.URL.Query().Get("user_id")
|
||||
if userID == "" {
|
||||
jsonError(w, http.StatusBadRequest, "user_id required")
|
||||
return
|
||||
}
|
||||
store, ok := s.deps.Producer.(*storage.Store)
|
||||
if !ok {
|
||||
jsonError(w, http.StatusInternalServerError, "storage not available")
|
||||
return
|
||||
}
|
||||
if err := store.MarkAllNotificationsRead(r.Context(), userID); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, "mark all read: "+err.Error())
|
||||
return
|
||||
}
|
||||
writeJSON(w, 0, map[string]any{"success": true})
|
||||
}
|
||||
|
||||
type notification struct {
|
||||
ID string `json:"id"`
|
||||
UserID string `json:"user_id"`
|
||||
|
||||
@@ -85,6 +85,9 @@ type Dependencies struct {
|
||||
// BookWriter writes book metadata and chapter refs to PocketBase.
|
||||
// Used by admin text-gen apply endpoints.
|
||||
BookWriter bookstore.BookWriter
|
||||
// ImportFileStore uploads raw PDF/EPUB files to MinIO for the runner to process.
|
||||
// Always wired to the concrete *storage.Store (not the Asynq wrapper).
|
||||
ImportFileStore bookstore.ImportFileStore
|
||||
// AIJobStore tracks long-running AI generation jobs in PocketBase.
|
||||
// If nil, job persistence is disabled (jobs still run but are not recorded).
|
||||
AIJobStore bookstore.AIJobStore
|
||||
@@ -251,7 +254,10 @@ func (s *Server) ListenAndServe(ctx context.Context) error {
|
||||
|
||||
// Notifications
|
||||
mux.HandleFunc("GET /api/notifications", s.handleListNotifications)
|
||||
mux.HandleFunc("PATCH /api/notifications", s.handleMarkAllNotificationsRead)
|
||||
mux.HandleFunc("PATCH /api/notifications/{id}", s.handleMarkNotificationRead)
|
||||
mux.HandleFunc("DELETE /api/notifications", s.handleClearAllNotifications)
|
||||
mux.HandleFunc("DELETE /api/notifications/{id}", s.handleDismissNotification)
|
||||
|
||||
// Voices list
|
||||
mux.HandleFunc("GET /api/voices", s.handleVoices)
|
||||
|
||||
@@ -215,3 +215,14 @@ type BookImporter interface {
|
||||
// Returns the extracted chapters or an error.
|
||||
Import(ctx context.Context, objectKey, fileType string) ([]Chapter, error)
|
||||
}
|
||||
|
||||
// ImportFileStore uploads raw import files to object storage.
|
||||
// Kept separate from BookImporter so the HTTP handler can upload the file
|
||||
// without a concrete type assertion, regardless of which Producer is wired.
|
||||
type ImportFileStore interface {
|
||||
PutImportFile(ctx context.Context, objectKey string, data []byte) error
|
||||
// PutImportChapters stores the pre-parsed chapters JSON under the given key.
|
||||
PutImportChapters(ctx context.Context, key string, data []byte) error
|
||||
// GetImportChapters retrieves the pre-parsed chapters JSON.
|
||||
GetImportChapters(ctx context.Context, key string) ([]byte, error)
|
||||
}
|
||||
|
||||
@@ -177,6 +177,13 @@ type ImportTask struct {
|
||||
Title string `json:"title"`
|
||||
FileName string `json:"file_name"`
|
||||
FileType string `json:"file_type"` // "pdf" or "epub"
|
||||
ObjectKey string `json:"object_key,omitempty"` // MinIO path to uploaded file
|
||||
ChaptersKey string `json:"chapters_key,omitempty"` // MinIO path to pre-parsed chapters JSON
|
||||
Author string `json:"author,omitempty"`
|
||||
CoverURL string `json:"cover_url,omitempty"`
|
||||
Genres []string `json:"genres,omitempty"`
|
||||
Summary string `json:"summary,omitempty"`
|
||||
BookStatus string `json:"book_status,omitempty"` // "ongoing" | "completed" | "hiatus"
|
||||
WorkerID string `json:"worker_id,omitempty"`
|
||||
InitiatorUserID string `json:"initiator_user_id,omitempty"` // PocketBase user ID who submitted the import
|
||||
Status TaskStatus `json:"status"`
|
||||
|
||||
@@ -199,10 +199,11 @@ func (r *Runner) handleImportTask(ctx context.Context, t *asynq.Task) error {
|
||||
return fmt.Errorf("unmarshal import payload: %w", err)
|
||||
}
|
||||
task := domain.ImportTask{
|
||||
ID: p.PBTaskID,
|
||||
Slug: p.Slug,
|
||||
Title: p.Title,
|
||||
FileType: p.FileType,
|
||||
ID: p.PBTaskID,
|
||||
Slug: p.Slug,
|
||||
Title: p.Title,
|
||||
FileType: p.FileType,
|
||||
ChaptersKey: p.ChaptersKey,
|
||||
}
|
||||
r.tasksRunning.Add(1)
|
||||
defer r.tasksRunning.Add(-1)
|
||||
|
||||
@@ -15,6 +15,7 @@ package runner
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"os"
|
||||
@@ -49,6 +50,11 @@ type ChapterIngester interface {
|
||||
IngestChapters(ctx context.Context, slug string, chapters []bookstore.Chapter) error
|
||||
}
|
||||
|
||||
// ImportChapterStore retrieves pre-parsed chapter JSON blobs from object storage.
|
||||
type ImportChapterStore interface {
|
||||
GetImportChapters(ctx context.Context, key string) ([]byte, error)
|
||||
}
|
||||
|
||||
// Config tunes the runner behaviour.
|
||||
type Config struct {
|
||||
// WorkerID uniquely identifies this runner instance in PocketBase records.
|
||||
@@ -114,7 +120,12 @@ type Dependencies struct {
|
||||
// CoverStore stores book cover images in MinIO.
|
||||
CoverStore bookstore.CoverStore
|
||||
// BookImport handles PDF/EPUB file parsing and chapter extraction.
|
||||
// Kept for backward compatibility when ChaptersKey is not set.
|
||||
BookImport bookstore.BookImporter
|
||||
// ImportChapterStore retrieves pre-parsed chapter JSON blobs from MinIO.
|
||||
// When set and the task has a ChaptersKey, the runner reads from here
|
||||
// instead of calling BookImport.Import() (the new preferred path).
|
||||
ImportChapterStore ImportChapterStore
|
||||
// ChapterIngester persists extracted chapters into MinIO/PocketBase.
|
||||
ChapterIngester ChapterIngester
|
||||
// Notifier creates notifications for users.
|
||||
@@ -432,9 +443,7 @@ importLoop:
|
||||
defer wg.Done()
|
||||
defer func() { <-importSem }()
|
||||
defer r.tasksRunning.Add(-1)
|
||||
// Import tasks need object key - we'll need to fetch it from the task record
|
||||
// For now, assume it's stored in a field or we need to add it
|
||||
r.runImportTask(ctx, t, "")
|
||||
r.runImportTask(ctx, t, t.ObjectKey)
|
||||
}(task)
|
||||
}
|
||||
}
|
||||
@@ -503,9 +512,21 @@ func (r *Runner) runScrapeTask(ctx context.Context, task domain.ScrapeTask) {
|
||||
if result.ErrorMessage != "" {
|
||||
r.tasksFailed.Add(1)
|
||||
span.SetStatus(codes.Error, result.ErrorMessage)
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Scrape Failed",
|
||||
fmt.Sprintf("Scrape task (%s) failed: %s", task.Kind, result.ErrorMessage),
|
||||
"/admin/tasks")
|
||||
}
|
||||
} else {
|
||||
r.tasksCompleted.Add(1)
|
||||
span.SetStatus(codes.Ok, "")
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Scrape Complete",
|
||||
fmt.Sprintf("Scraped %d chapters, skipped %d (%s)", result.ChaptersScraped, result.ChaptersSkipped, task.Kind),
|
||||
"/admin/tasks")
|
||||
}
|
||||
}
|
||||
|
||||
log.Info("runner: scrape task finished",
|
||||
@@ -585,6 +606,12 @@ func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
if err := r.deps.Consumer.FinishAudioTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishAudioTask failed", "err", err)
|
||||
}
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Audio Failed",
|
||||
fmt.Sprintf("Ch.%d of %s (%s): %s", task.Chapter, task.Slug, task.Voice, msg),
|
||||
fmt.Sprintf("/books/%s", task.Slug))
|
||||
}
|
||||
}
|
||||
|
||||
raw, err := r.deps.BookReader.ReadChapter(ctx, task.Slug, task.Chapter)
|
||||
@@ -649,10 +676,20 @@ func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
if err := r.deps.Consumer.FinishAudioTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishAudioTask failed", "err", err)
|
||||
}
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Audio Ready",
|
||||
fmt.Sprintf("Ch.%d of %s (%s) is ready", task.Chapter, task.Slug, task.Voice),
|
||||
fmt.Sprintf("/books/%s", task.Slug))
|
||||
}
|
||||
log.Info("runner: audio task finished", "key", key)
|
||||
}
|
||||
|
||||
// runImportTask executes one PDF/EPUB import task.
|
||||
// Preferred path: when task.ChaptersKey is set, it reads pre-parsed chapters
|
||||
// JSON from MinIO (written by the backend at upload time) and ingests them.
|
||||
// Fallback path: when ChaptersKey is empty, calls BookImport.Import() to
|
||||
// parse the raw file on the runner (legacy behaviour, not used for new tasks).
|
||||
func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, objectKey string) {
|
||||
ctx, span := otel.Tracer("runner").Start(ctx, "runner.import_task")
|
||||
defer span.End()
|
||||
@@ -660,10 +697,11 @@ func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, obje
|
||||
attribute.String("task.id", task.ID),
|
||||
attribute.String("book.slug", task.Slug),
|
||||
attribute.String("file.type", task.FileType),
|
||||
attribute.String("chapters_key", task.ChaptersKey),
|
||||
)
|
||||
|
||||
log := r.deps.Log.With("task_id", task.ID, "slug", task.Slug, "file_type", task.FileType)
|
||||
log.Info("runner: import task starting")
|
||||
log.Info("runner: import task starting", "chapters_key", task.ChaptersKey)
|
||||
|
||||
hbCtx, hbCancel := context.WithCancel(ctx)
|
||||
defer hbCancel()
|
||||
@@ -692,15 +730,33 @@ func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, obje
|
||||
}
|
||||
}
|
||||
|
||||
if r.deps.BookImport == nil {
|
||||
fail("book import not configured (BookImport dependency missing)")
|
||||
return
|
||||
}
|
||||
var chapters []bookstore.Chapter
|
||||
|
||||
chapters, err := r.deps.BookImport.Import(ctx, objectKey, task.FileType)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("import file: %v", err))
|
||||
return
|
||||
if task.ChaptersKey != "" && r.deps.ImportChapterStore != nil {
|
||||
// New path: read pre-parsed chapters JSON uploaded by the backend.
|
||||
raw, err := r.deps.ImportChapterStore.GetImportChapters(ctx, task.ChaptersKey)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("get chapters JSON: %v", err))
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal(raw, &chapters); err != nil {
|
||||
fail(fmt.Sprintf("unmarshal chapters JSON: %v", err))
|
||||
return
|
||||
}
|
||||
log.Info("runner: loaded pre-parsed chapters", "count", len(chapters))
|
||||
} else {
|
||||
// Legacy path: parse the raw file on the runner.
|
||||
if r.deps.BookImport == nil {
|
||||
fail("book import not configured (BookImport dependency missing)")
|
||||
return
|
||||
}
|
||||
var err error
|
||||
chapters, err = r.deps.BookImport.Import(ctx, objectKey, task.FileType)
|
||||
if err != nil {
|
||||
fail(fmt.Sprintf("import file: %v", err))
|
||||
return
|
||||
}
|
||||
log.Info("runner: parsed chapters from file (legacy path)", "count", len(chapters))
|
||||
}
|
||||
|
||||
if len(chapters) == 0 {
|
||||
@@ -708,27 +764,41 @@ func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, obje
|
||||
return
|
||||
}
|
||||
|
||||
// Store chapters via BookWriter
|
||||
// Note: BookWriter.WriteChapters expects domain.Chapter, need conversion
|
||||
var domainChapters []bookstore.Chapter
|
||||
for _, ch := range chapters {
|
||||
domainChapters = append(domainChapters, bookstore.Chapter{
|
||||
Number: ch.Number,
|
||||
Title: ch.Title,
|
||||
Content: ch.Content,
|
||||
})
|
||||
}
|
||||
|
||||
// Store chapters via ChapterIngester
|
||||
// Persist chapters via ChapterIngester.
|
||||
if r.deps.ChapterIngester == nil {
|
||||
fail("chapter ingester not configured")
|
||||
return
|
||||
}
|
||||
if err := r.deps.ChapterIngester.IngestChapters(ctx, task.Slug, domainChapters); err != nil {
|
||||
if err := r.deps.ChapterIngester.IngestChapters(ctx, task.Slug, chapters); err != nil {
|
||||
fail(fmt.Sprintf("store chapters: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// Write book metadata so the book appears in PocketBase catalogue.
|
||||
if r.deps.BookWriter != nil {
|
||||
meta := domain.BookMeta{
|
||||
Slug: task.Slug,
|
||||
Title: task.Title,
|
||||
Author: task.Author,
|
||||
Cover: task.CoverURL,
|
||||
Status: task.BookStatus,
|
||||
Genres: task.Genres,
|
||||
Summary: task.Summary,
|
||||
TotalChapters: len(chapters),
|
||||
}
|
||||
if meta.Status == "" {
|
||||
meta.Status = "completed"
|
||||
}
|
||||
if err := r.deps.BookWriter.WriteMetadata(ctx, meta); err != nil {
|
||||
log.Warn("runner: import task WriteMetadata failed (non-fatal)", "err", err)
|
||||
} else {
|
||||
// Index in Meilisearch so the book is searchable.
|
||||
if err := r.deps.SearchIndex.UpsertBook(ctx, meta); err != nil {
|
||||
log.Warn("runner: import task meilisearch upsert failed (non-fatal)", "err", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
r.tasksCompleted.Add(1)
|
||||
span.SetStatus(codes.Ok, "")
|
||||
result := domain.ImportResult{
|
||||
@@ -739,7 +809,7 @@ func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, obje
|
||||
log.Error("runner: FinishImportTask failed", "err", err)
|
||||
}
|
||||
|
||||
// Create notification for the user who initiated the import
|
||||
// Notify the user who initiated the import.
|
||||
if r.deps.Notifier != nil {
|
||||
msg := fmt.Sprintf("Import completed: %d chapters from %s", len(chapters), task.Title)
|
||||
targetUser := task.InitiatorUserID
|
||||
|
||||
@@ -53,6 +53,12 @@ func (r *Runner) runTranslationTask(ctx context.Context, task domain.Translation
|
||||
if err := r.deps.Consumer.FinishTranslationTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishTranslationTask failed", "err", err)
|
||||
}
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Translation Failed",
|
||||
fmt.Sprintf("Ch.%d of %s (%s): %s", task.Chapter, task.Slug, task.Lang, msg),
|
||||
fmt.Sprintf("/books/%s", task.Slug))
|
||||
}
|
||||
}
|
||||
|
||||
// Guard: LibreTranslate must be configured.
|
||||
@@ -93,5 +99,11 @@ func (r *Runner) runTranslationTask(ctx context.Context, task domain.Translation
|
||||
if err := r.deps.Consumer.FinishTranslationTask(ctx, task.ID, result); err != nil {
|
||||
log.Error("runner: FinishTranslationTask failed", "err", err)
|
||||
}
|
||||
if r.deps.Notifier != nil {
|
||||
_ = r.deps.Notifier.CreateNotification(ctx, "admin",
|
||||
"Translation Ready",
|
||||
fmt.Sprintf("Ch.%d of %s translated to %s", task.Chapter, task.Slug, task.Lang),
|
||||
fmt.Sprintf("/books/%s", task.Slug))
|
||||
}
|
||||
log.Info("runner: translation task finished", "key", key)
|
||||
}
|
||||
|
||||
@@ -1,20 +1,30 @@
|
||||
package storage
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
minio "github.com/minio/minio-go/v7"
|
||||
"github.com/pdfcpu/pdfcpu/pkg/api"
|
||||
pdfcpu "github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
|
||||
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu/model"
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
var (
|
||||
chapterPattern = regexp.MustCompile(`(?i)chapter\s+(\d+)|The\s+Eminence\s+in\s+Shadow\s+(\d+)\s*-\s*(\d+)`)
|
||||
)
|
||||
// chapterHeadingRE matches common chapter heading patterns:
|
||||
// "Chapter 1", "Chapter 1:", "Chapter 1 -", "CHAPTER ONE", "1.", "Part 1", etc.
|
||||
var chapterHeadingRE = regexp.MustCompile(
|
||||
`(?i)^(?:chapter|ch\.?|part|episode|book)\s+(\d+|[ivxlcdm]+)\b|^\d{1,4}[\.\)]\s+\S`)
|
||||
|
||||
type importer struct {
|
||||
mc *minioClient
|
||||
@@ -42,73 +52,808 @@ func (i *importer) Import(ctx context.Context, objectKey, fileType string) ([]bo
|
||||
}
|
||||
|
||||
if fileType == "pdf" {
|
||||
return i.parsePDF(data)
|
||||
return parsePDF(data)
|
||||
}
|
||||
return i.parseEPUB(data)
|
||||
return parseEPUB(data)
|
||||
}
|
||||
|
||||
func (i *importer) parsePDF(data []byte) ([]bookstore.Chapter, error) {
|
||||
return nil, errors.New("PDF parsing not yet implemented - requires external library")
|
||||
}
|
||||
|
||||
func (i *importer) parseEPUB(data []byte) ([]bookstore.Chapter, error) {
|
||||
return nil, errors.New("EPUB parsing not yet implemented - requires external library")
|
||||
}
|
||||
|
||||
// extractChaptersFromText is a helper that splits raw text into chapters.
|
||||
// Used as a fallback when the PDF parser library returns raw text.
|
||||
func extractChaptersFromText(text string) []bookstore.Chapter {
|
||||
// AnalyzeFile parses the given PDF or EPUB data and returns the detected
|
||||
// chapter count and up to 3 preview lines (first non-empty line of each of
|
||||
// the first 3 chapters). It is used by the analyze-only endpoint so users
|
||||
// can preview chapter count before committing the import.
|
||||
// Note: uses parsePDF which is backed by pdfcpu ExtractContent — fast, no hang risk.
|
||||
func AnalyzeFile(data []byte, fileType string) (chapterCount int, firstLines []string, err error) {
|
||||
var chapters []bookstore.Chapter
|
||||
var currentChapter *bookstore.Chapter
|
||||
switch fileType {
|
||||
case "pdf":
|
||||
chapters, err = parsePDF(data)
|
||||
case "epub":
|
||||
chapters, err = parseEPUB(data)
|
||||
default:
|
||||
return 0, nil, fmt.Errorf("unsupported file type: %s", fileType)
|
||||
}
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
}
|
||||
chapterCount = len(chapters)
|
||||
for i, ch := range chapters {
|
||||
if i >= 3 {
|
||||
break
|
||||
}
|
||||
line := strings.TrimSpace(ch.Content)
|
||||
if nl := strings.Index(line, "\n"); nl > 0 {
|
||||
line = line[:nl]
|
||||
}
|
||||
if len(line) > 120 {
|
||||
line = line[:120] + "…"
|
||||
}
|
||||
firstLines = append(firstLines, line)
|
||||
}
|
||||
return chapterCount, firstLines, nil
|
||||
}
|
||||
|
||||
|
||||
|
||||
// decryptPDF strips encryption from a PDF using an empty user password.
|
||||
// Returns the decrypted bytes, or an error if decryption is not possible.
|
||||
// This handles the common case of "owner-only" encrypted PDFs (copy/print
|
||||
// restrictions) which use an empty user password and open normally in readers.
|
||||
func decryptPDF(data []byte) ([]byte, error) {
|
||||
conf := model.NewDefaultConfiguration()
|
||||
conf.UserPW = ""
|
||||
conf.OwnerPW = ""
|
||||
|
||||
var out bytes.Buffer
|
||||
err := api.Decrypt(bytes.NewReader(data), &out, conf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out.Bytes(), nil
|
||||
}
|
||||
|
||||
// ParseImportFile parses a PDF or EPUB and returns chapters.
|
||||
// Unlike AnalyzeFile it respects ctx cancellation so callers can apply a timeout.
|
||||
// For PDFs it first attempts to strip encryption with an empty password.
|
||||
func ParseImportFile(ctx context.Context, data []byte, fileType string) ([]bookstore.Chapter, error) {
|
||||
type result struct {
|
||||
chapters []bookstore.Chapter
|
||||
err error
|
||||
}
|
||||
ch := make(chan result, 1)
|
||||
go func() {
|
||||
var chapters []bookstore.Chapter
|
||||
var err error
|
||||
switch fileType {
|
||||
case "pdf":
|
||||
chapters, err = parsePDF(data)
|
||||
case "epub":
|
||||
chapters, err = parseEPUB(data)
|
||||
default:
|
||||
err = fmt.Errorf("unsupported file type: %s", fileType)
|
||||
}
|
||||
ch <- result{chapters, err}
|
||||
}()
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return nil, fmt.Errorf("parse timed out: %w", ctx.Err())
|
||||
case r := <-ch:
|
||||
return r.chapters, r.err
|
||||
}
|
||||
}
|
||||
|
||||
// pdfSkipBookmarks lists bookmark titles that are front/back matter, not story chapters.
|
||||
// These are skipped when building the chapter list.
|
||||
var pdfSkipBookmarks = map[string]bool{
|
||||
"cover": true, "insert": true, "title page": true, "copyright": true,
|
||||
"appendix": true, "color insert": true, "color illustrations": true,
|
||||
}
|
||||
|
||||
// parsePDF extracts chapters from PDF bytes.
|
||||
//
|
||||
// Strategy:
|
||||
// 1. Decrypt owner-protected PDFs (empty user password).
|
||||
// 2. Read the PDF outline (bookmarks) — these give chapter titles and page ranges.
|
||||
// 3. Extract raw content streams for every page using pdfcpu ExtractContent.
|
||||
// 4. For each story bookmark, concatenate the extracted text of its pages.
|
||||
//
|
||||
// Falls back to paragraph-splitting when no bookmarks are found.
|
||||
// This is fast (~100ms for a 250-page PDF) because it avoids font-glyph
|
||||
// resolution which causes older PDF libraries to hang on publisher PDFs.
|
||||
func parsePDF(data []byte) ([]bookstore.Chapter, error) {
|
||||
// Decrypt owner-protected PDFs (empty user password).
|
||||
decrypted, err := decryptPDF(data)
|
||||
if err == nil {
|
||||
data = decrypted
|
||||
}
|
||||
|
||||
conf := model.NewDefaultConfiguration()
|
||||
conf.UserPW = ""
|
||||
conf.OwnerPW = ""
|
||||
|
||||
// Extract all page content streams to a temp directory.
|
||||
tmpDir, err := os.MkdirTemp("", "pdf-extract-*")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("create temp dir: %w", err)
|
||||
}
|
||||
defer os.RemoveAll(tmpDir)
|
||||
|
||||
if err := api.ExtractContent(bytes.NewReader(data), tmpDir, "out", nil, conf); err != nil {
|
||||
return nil, fmt.Errorf("extract PDF content: %w", err)
|
||||
}
|
||||
|
||||
entries, err := os.ReadDir(tmpDir)
|
||||
if err != nil || len(entries) == 0 {
|
||||
return nil, fmt.Errorf("PDF has no content pages")
|
||||
}
|
||||
|
||||
// Sort entries by filename so index == page number - 1.
|
||||
sort.Slice(entries, func(i, j int) bool { return entries[i].Name() < entries[j].Name() })
|
||||
|
||||
// Build page-index → extracted text map.
|
||||
pageTexts := make(map[int]string, len(entries))
|
||||
for idx, e := range entries {
|
||||
raw, readErr := os.ReadFile(tmpDir + "/" + e.Name())
|
||||
if readErr != nil {
|
||||
continue
|
||||
}
|
||||
pageTexts[idx+1] = extractTextFromContentStream(raw)
|
||||
}
|
||||
|
||||
// Try to use bookmarks (outline) for chapter structure.
|
||||
bookmarks, bmErr := api.Bookmarks(bytes.NewReader(data), conf)
|
||||
if bmErr == nil && len(bookmarks) > 0 {
|
||||
chapters := chaptersFromBookmarks(bookmarks, pageTexts)
|
||||
if len(chapters) > 0 {
|
||||
return chapters, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: concatenate all page texts and split by heading patterns.
|
||||
var sb strings.Builder
|
||||
for p := 1; p <= len(entries); p++ {
|
||||
sb.WriteString(pageTexts[p])
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
chapters := extractChaptersFromText(sb.String())
|
||||
if len(chapters) == 0 {
|
||||
return nil, fmt.Errorf("could not extract any chapters from PDF")
|
||||
}
|
||||
return chapters, nil
|
||||
}
|
||||
|
||||
// chaptersFromBookmarks builds a chapter list from PDF bookmarks + per-page text.
|
||||
// It flattens the bookmark tree, skips front/back matter entries, and assigns
|
||||
// page ranges so each chapter spans from its own start page to the next
|
||||
// bookmark's start page minus one.
|
||||
func chaptersFromBookmarks(bookmarks []pdfcpu.Bookmark, pageTexts map[int]string) []bookstore.Chapter {
|
||||
// Flatten bookmark tree.
|
||||
var flat []pdfcpu.Bookmark
|
||||
var flatten func([]pdfcpu.Bookmark)
|
||||
flatten = func(bms []pdfcpu.Bookmark) {
|
||||
for _, bm := range bms {
|
||||
flat = append(flat, bm)
|
||||
flatten(bm.Kids)
|
||||
}
|
||||
}
|
||||
flatten(bookmarks)
|
||||
|
||||
// Sort by page number.
|
||||
sort.Slice(flat, func(i, j int) bool { return flat[i].PageFrom < flat[j].PageFrom })
|
||||
|
||||
// Assign PageThru for entries where it's 0 (last bookmark or missing).
|
||||
maxPage := 0
|
||||
for p := range pageTexts {
|
||||
if p > maxPage {
|
||||
maxPage = p
|
||||
}
|
||||
}
|
||||
for i := range flat {
|
||||
if flat[i].PageThru == 0 {
|
||||
if i+1 < len(flat) {
|
||||
flat[i].PageThru = flat[i+1].PageFrom - 1
|
||||
} else {
|
||||
flat[i].PageThru = maxPage
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var chapters []bookstore.Chapter
|
||||
chNum := 0
|
||||
for _, bm := range flat {
|
||||
if pdfSkipBookmarks[strings.ToLower(strings.TrimSpace(bm.Title))] {
|
||||
continue
|
||||
}
|
||||
// Gather text for all pages in this bookmark's range.
|
||||
var sb strings.Builder
|
||||
for p := bm.PageFrom; p <= bm.PageThru; p++ {
|
||||
if t, ok := pageTexts[p]; ok {
|
||||
sb.WriteString(t)
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
text := strings.TrimSpace(sb.String())
|
||||
if len(text) < 50 {
|
||||
continue // skip nearly-empty sections
|
||||
}
|
||||
chNum++
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: chNum,
|
||||
Title: bm.Title,
|
||||
Content: text,
|
||||
})
|
||||
}
|
||||
return chapters
|
||||
}
|
||||
|
||||
// extractTextFromContentStream parses a raw PDF content stream and extracts
|
||||
// readable text from Tj and TJ operators.
|
||||
//
|
||||
// TJ arrays may contain a mix of literal strings (parenthesised) and hex glyph
|
||||
// arrays. Only the literal strings are decoded — hex arrays require per-font
|
||||
// ToUnicode CMaps and are skipped. Kerning adjustment numbers inside TJ arrays
|
||||
// are also ignored (they're just spacing hints).
|
||||
//
|
||||
// Line breaks are inserted on ET / Td / TD / T* operators.
|
||||
func extractTextFromContentStream(stream []byte) string {
|
||||
s := string(stream)
|
||||
var sb strings.Builder
|
||||
i := 0
|
||||
n := len(s)
|
||||
for i < n {
|
||||
// TJ array: [ ... ]TJ — collect all literal strings, skip hex & numbers.
|
||||
if s[i] == '[' {
|
||||
j := i + 1
|
||||
for j < n && s[j] != ']' {
|
||||
if s[j] == '(' {
|
||||
// Literal string inside TJ array.
|
||||
k := j + 1
|
||||
depth := 1
|
||||
for k < n && depth > 0 {
|
||||
if s[k] == '\\' {
|
||||
k += 2
|
||||
continue
|
||||
}
|
||||
if s[k] == '(' {
|
||||
depth++
|
||||
} else if s[k] == ')' {
|
||||
depth--
|
||||
}
|
||||
k++
|
||||
}
|
||||
lit := pdfUnescapeString(s[j+1 : k-1])
|
||||
if hasPrintableASCII(lit) {
|
||||
sb.WriteString(lit)
|
||||
}
|
||||
j = k
|
||||
continue
|
||||
}
|
||||
j++
|
||||
}
|
||||
// Check if this is a TJ operator (skip whitespace after ']').
|
||||
end := j + 1
|
||||
for end < n && (s[end] == ' ' || s[end] == '\t' || s[end] == '\r' || s[end] == '\n') {
|
||||
end++
|
||||
}
|
||||
if end+2 <= n && s[end:end+2] == "TJ" && (end+2 == n || !isAlphaNum(s[end+2])) {
|
||||
i = end + 2
|
||||
continue
|
||||
}
|
||||
i = j + 1
|
||||
continue
|
||||
}
|
||||
// Single string: (string) Tj
|
||||
if s[i] == '(' {
|
||||
j := i + 1
|
||||
depth := 1
|
||||
for j < n && depth > 0 {
|
||||
if s[j] == '\\' {
|
||||
j += 2
|
||||
continue
|
||||
}
|
||||
if s[j] == '(' {
|
||||
depth++
|
||||
} else if s[j] == ')' {
|
||||
depth--
|
||||
}
|
||||
j++
|
||||
}
|
||||
lit := pdfUnescapeString(s[i+1 : j-1])
|
||||
if hasPrintableASCII(lit) {
|
||||
// Check for Tj operator.
|
||||
end := j
|
||||
for end < n && (s[end] == ' ' || s[end] == '\t') {
|
||||
end++
|
||||
}
|
||||
if end+2 <= n && s[end:end+2] == "Tj" && (end+2 == n || !isAlphaNum(s[end+2])) {
|
||||
sb.WriteString(lit)
|
||||
i = end + 2
|
||||
continue
|
||||
}
|
||||
}
|
||||
i = j
|
||||
continue
|
||||
}
|
||||
// Detect end of text object (ET) — add a newline.
|
||||
if i+2 <= n && s[i:i+2] == "ET" && (i+2 == n || !isAlphaNum(s[i+2])) {
|
||||
sb.WriteByte('\n')
|
||||
i += 2
|
||||
continue
|
||||
}
|
||||
// Detect Td / TD / T* — newline within text block.
|
||||
if i+2 <= n && (s[i:i+2] == "Td" || s[i:i+2] == "TD" || s[i:i+2] == "T*") &&
|
||||
(i+2 == n || !isAlphaNum(s[i+2])) {
|
||||
sb.WriteByte('\n')
|
||||
i += 2
|
||||
continue
|
||||
}
|
||||
i++
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func isAlphaNum(b byte) bool {
|
||||
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9') || b == '_'
|
||||
}
|
||||
|
||||
func hasPrintableASCII(s string) bool {
|
||||
for _, c := range s {
|
||||
if c >= 0x20 && c < 0x7F {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// pdfUnescapeString handles PDF string escape sequences.
|
||||
func pdfUnescapeString(s string) string {
|
||||
if !strings.ContainsRune(s, '\\') {
|
||||
return s
|
||||
}
|
||||
var sb strings.Builder
|
||||
i := 0
|
||||
for i < len(s) {
|
||||
if s[i] == '\\' && i+1 < len(s) {
|
||||
switch s[i+1] {
|
||||
case 'n':
|
||||
sb.WriteByte('\n')
|
||||
case 'r':
|
||||
sb.WriteByte('\r')
|
||||
case 't':
|
||||
sb.WriteByte('\t')
|
||||
case '(', ')', '\\':
|
||||
sb.WriteByte(s[i+1])
|
||||
default:
|
||||
// Octal escape \ddd
|
||||
if s[i+1] >= '0' && s[i+1] <= '7' {
|
||||
end := i + 2
|
||||
for end < i+5 && end < len(s) && s[end] >= '0' && s[end] <= '7' {
|
||||
end++
|
||||
}
|
||||
val, _ := strconv.ParseInt(s[i+1:end], 8, 16)
|
||||
sb.WriteByte(byte(val))
|
||||
i = end
|
||||
continue
|
||||
}
|
||||
sb.WriteByte(s[i+1])
|
||||
}
|
||||
i += 2
|
||||
} else {
|
||||
sb.WriteByte(s[i])
|
||||
i++
|
||||
}
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
// ── EPUB parsing ──────────────────────────────────────────────────────────────
|
||||
|
||||
func parseEPUB(data []byte) ([]bookstore.Chapter, error) {
|
||||
zr, err := zip.NewReader(bytes.NewReader(data), int64(len(data)))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("open EPUB zip: %w", err)
|
||||
}
|
||||
|
||||
// 1. Read META-INF/container.xml → find rootfile (content.opf path).
|
||||
opfPath, err := epubRootfilePath(zr)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("epub container: %w", err)
|
||||
}
|
||||
|
||||
// 2. Parse content.opf → spine order of chapter files.
|
||||
spineFiles, titleMap, err := epubSpine(zr, opfPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("epub spine: %w", err)
|
||||
}
|
||||
|
||||
if len(spineFiles) == 0 {
|
||||
return nil, fmt.Errorf("EPUB spine is empty")
|
||||
}
|
||||
|
||||
// Base directory of the OPF file for resolving relative hrefs.
|
||||
opfDir := ""
|
||||
if idx := strings.LastIndex(opfPath, "/"); idx >= 0 {
|
||||
opfDir = opfPath[:idx+1]
|
||||
}
|
||||
|
||||
var chapters []bookstore.Chapter
|
||||
for i, href := range spineFiles {
|
||||
fullPath := opfDir + href
|
||||
content, err := epubFileContent(zr, fullPath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
text := htmlToText(content)
|
||||
if strings.TrimSpace(text) == "" {
|
||||
continue
|
||||
}
|
||||
title := titleMap[href]
|
||||
if title == "" {
|
||||
title = fmt.Sprintf("Chapter %d", i+1)
|
||||
}
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: i + 1,
|
||||
Title: title,
|
||||
Content: text,
|
||||
})
|
||||
}
|
||||
|
||||
if len(chapters) == 0 {
|
||||
return nil, fmt.Errorf("no readable chapters found in EPUB")
|
||||
}
|
||||
return chapters, nil
|
||||
}
|
||||
|
||||
// epubRootfilePath parses META-INF/container.xml and returns the full-path
|
||||
// of the OPF package document.
|
||||
func epubRootfilePath(zr *zip.Reader) (string, error) {
|
||||
f := zipFile(zr, "META-INF/container.xml")
|
||||
if f == nil {
|
||||
return "", fmt.Errorf("META-INF/container.xml not found")
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer rc.Close()
|
||||
|
||||
doc, err := html.Parse(rc)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var path string
|
||||
var walk func(*html.Node)
|
||||
walk = func(n *html.Node) {
|
||||
if n.Type == html.ElementNode && strings.EqualFold(n.Data, "rootfile") {
|
||||
for _, a := range n.Attr {
|
||||
if strings.EqualFold(a.Key, "full-path") {
|
||||
path = a.Val
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
walk(c)
|
||||
}
|
||||
}
|
||||
walk(doc)
|
||||
|
||||
if path == "" {
|
||||
return "", fmt.Errorf("rootfile full-path not found in container.xml")
|
||||
}
|
||||
return path, nil
|
||||
}
|
||||
|
||||
// epubSpine parses the OPF document and returns the spine item hrefs in order,
|
||||
// plus a map from href → nav title (if available from NCX/NAV).
|
||||
func epubSpine(zr *zip.Reader, opfPath string) ([]string, map[string]string, error) {
|
||||
f := zipFile(zr, opfPath)
|
||||
if f == nil {
|
||||
return nil, nil, fmt.Errorf("OPF file %q not found in EPUB", opfPath)
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer rc.Close()
|
||||
|
||||
opfData, err := io.ReadAll(rc)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Build id→href map from <manifest>.
|
||||
idToHref := make(map[string]string)
|
||||
// Also keep a href→navTitle map (populated from NCX later).
|
||||
hrefTitle := make(map[string]string)
|
||||
|
||||
// Parse OPF XML with html.Parse (handles malformed XML too).
|
||||
doc, _ := html.Parse(bytes.NewReader(opfData))
|
||||
|
||||
var manifestItems []struct{ id, href, mediaType string }
|
||||
var spineIdrefs []string
|
||||
var ncxID string
|
||||
|
||||
var walk func(*html.Node)
|
||||
walk = func(n *html.Node) {
|
||||
if n.Type == html.ElementNode {
|
||||
tag := strings.ToLower(n.Data)
|
||||
switch tag {
|
||||
case "item":
|
||||
var id, href, mt string
|
||||
for _, a := range n.Attr {
|
||||
switch strings.ToLower(a.Key) {
|
||||
case "id":
|
||||
id = a.Val
|
||||
case "href":
|
||||
href = a.Val
|
||||
case "media-type":
|
||||
mt = a.Val
|
||||
}
|
||||
}
|
||||
if id != "" && href != "" {
|
||||
manifestItems = append(manifestItems, struct{ id, href, mediaType string }{id, href, mt})
|
||||
idToHref[id] = href
|
||||
}
|
||||
case "itemref":
|
||||
for _, a := range n.Attr {
|
||||
if strings.ToLower(a.Key) == "idref" {
|
||||
spineIdrefs = append(spineIdrefs, a.Val)
|
||||
}
|
||||
}
|
||||
case "spine":
|
||||
for _, a := range n.Attr {
|
||||
if strings.ToLower(a.Key) == "toc" {
|
||||
ncxID = a.Val
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
walk(c)
|
||||
}
|
||||
}
|
||||
walk(doc)
|
||||
|
||||
// Build ordered spine href list.
|
||||
var spineHrefs []string
|
||||
for _, idref := range spineIdrefs {
|
||||
if href, ok := idToHref[idref]; ok {
|
||||
spineHrefs = append(spineHrefs, href)
|
||||
}
|
||||
}
|
||||
|
||||
// If no explicit spine, fall back to all XHTML items in manifest order.
|
||||
if len(spineHrefs) == 0 {
|
||||
sort.Slice(manifestItems, func(i, j int) bool {
|
||||
return manifestItems[i].href < manifestItems[j].href
|
||||
})
|
||||
for _, it := range manifestItems {
|
||||
mt := strings.ToLower(it.mediaType)
|
||||
if strings.Contains(mt, "html") || strings.HasSuffix(strings.ToLower(it.href), ".html") || strings.HasSuffix(strings.ToLower(it.href), ".xhtml") {
|
||||
spineHrefs = append(spineHrefs, it.href)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to get chapter titles from NCX (toc.ncx).
|
||||
opfDir := ""
|
||||
if idx := strings.LastIndex(opfPath, "/"); idx >= 0 {
|
||||
opfDir = opfPath[:idx+1]
|
||||
}
|
||||
if ncxHref, ok := idToHref[ncxID]; ok {
|
||||
ncxPath := opfDir + ncxHref
|
||||
if ncxFile := zipFile(zr, ncxPath); ncxFile != nil {
|
||||
if ncxRC, err := ncxFile.Open(); err == nil {
|
||||
defer ncxRC.Close()
|
||||
parseNCXTitles(ncxRC, hrefTitle)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return spineHrefs, hrefTitle, nil
|
||||
}
|
||||
|
||||
// parseNCXTitles extracts navPoint label→src mappings from a toc.ncx.
|
||||
func parseNCXTitles(r io.Reader, out map[string]string) {
|
||||
doc, err := html.Parse(r)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Collect navPoints: each has a <navLabel><text>…</text></navLabel> and
|
||||
// a <content src="…"/> child.
|
||||
var walk func(*html.Node)
|
||||
walk = func(n *html.Node) {
|
||||
if n.Type == html.ElementNode && strings.EqualFold(n.Data, "navpoint") {
|
||||
var label, src string
|
||||
var inner func(*html.Node)
|
||||
inner = func(c *html.Node) {
|
||||
if c.Type == html.ElementNode {
|
||||
if strings.EqualFold(c.Data, "text") && label == "" {
|
||||
if c.FirstChild != nil && c.FirstChild.Type == html.TextNode {
|
||||
label = strings.TrimSpace(c.FirstChild.Data)
|
||||
}
|
||||
}
|
||||
if strings.EqualFold(c.Data, "content") {
|
||||
for _, a := range c.Attr {
|
||||
if strings.EqualFold(a.Key, "src") {
|
||||
// Strip fragment identifier (#...).
|
||||
src = strings.SplitN(a.Val, "#", 2)[0]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for child := c.FirstChild; child != nil; child = child.NextSibling {
|
||||
inner(child)
|
||||
}
|
||||
}
|
||||
inner(n)
|
||||
if label != "" && src != "" {
|
||||
out[src] = label
|
||||
}
|
||||
}
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
walk(c)
|
||||
}
|
||||
}
|
||||
walk(doc)
|
||||
}
|
||||
|
||||
// epubFileContent returns the raw bytes of a file inside the EPUB zip.
|
||||
func epubFileContent(zr *zip.Reader, path string) ([]byte, error) {
|
||||
f := zipFile(zr, path)
|
||||
if f == nil {
|
||||
return nil, fmt.Errorf("file %q not in EPUB", path)
|
||||
}
|
||||
rc, err := f.Open()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rc.Close()
|
||||
return io.ReadAll(rc)
|
||||
}
|
||||
|
||||
// zipFile finds a file by name (case-insensitive) in a zip.Reader.
|
||||
func zipFile(zr *zip.Reader, name string) *zip.File {
|
||||
nameLower := strings.ToLower(name)
|
||||
for _, f := range zr.File {
|
||||
if strings.ToLower(f.Name) == nameLower {
|
||||
return f
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// htmlToText converts HTML/XHTML content to plain text suitable for storage.
|
||||
func htmlToText(data []byte) string {
|
||||
doc, err := html.Parse(bytes.NewReader(data))
|
||||
if err != nil {
|
||||
return string(data)
|
||||
}
|
||||
|
||||
var sb strings.Builder
|
||||
var walk func(*html.Node)
|
||||
walk = func(n *html.Node) {
|
||||
if n.Type == html.TextNode {
|
||||
text := strings.TrimSpace(n.Data)
|
||||
if text != "" {
|
||||
sb.WriteString(text)
|
||||
sb.WriteByte(' ')
|
||||
}
|
||||
}
|
||||
if n.Type == html.ElementNode {
|
||||
switch strings.ToLower(n.Data) {
|
||||
case "p", "div", "br", "h1", "h2", "h3", "h4", "h5", "h6", "li", "tr":
|
||||
// Block-level: ensure newline before content.
|
||||
if sb.Len() > 0 {
|
||||
s := sb.String()
|
||||
if s[len(s)-1] != '\n' {
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
case "script", "style", "head":
|
||||
// Skip entirely.
|
||||
return
|
||||
}
|
||||
}
|
||||
for c := n.FirstChild; c != nil; c = c.NextSibling {
|
||||
walk(c)
|
||||
}
|
||||
if n.Type == html.ElementNode {
|
||||
switch strings.ToLower(n.Data) {
|
||||
case "p", "div", "h1", "h2", "h3", "h4", "h5", "h6", "li", "tr":
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
}
|
||||
walk(doc)
|
||||
|
||||
// Collapse multiple blank lines.
|
||||
lines := strings.Split(sb.String(), "\n")
|
||||
var out []string
|
||||
blanks := 0
|
||||
for _, l := range lines {
|
||||
l = strings.TrimSpace(l)
|
||||
if l == "" {
|
||||
blanks++
|
||||
if blanks <= 1 {
|
||||
out = append(out, "")
|
||||
}
|
||||
} else {
|
||||
blanks = 0
|
||||
out = append(out, l)
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(strings.Join(out, "\n"))
|
||||
}
|
||||
|
||||
// ── Chapter segmentation (shared by PDF and plain-text paths) ─────────────────
|
||||
|
||||
// extractChaptersFromText splits a block of plain text into chapters by
|
||||
// detecting heading lines that match chapterHeadingRE.
|
||||
// Falls back to paragraph-splitting when no headings are found.
|
||||
func extractChaptersFromText(text string) []bookstore.Chapter {
|
||||
lines := strings.Split(text, "\n")
|
||||
chapterNum := 0
|
||||
|
||||
type segment struct {
|
||||
title string
|
||||
number int
|
||||
lines []string
|
||||
}
|
||||
|
||||
var segments []segment
|
||||
var cur *segment
|
||||
chNum := 0
|
||||
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if len(line) < 3 {
|
||||
continue
|
||||
}
|
||||
|
||||
matches := chapterPattern.FindStringSubmatch(line)
|
||||
if matches != nil {
|
||||
if currentChapter != nil && currentChapter.Content != "" {
|
||||
chapters = append(chapters, *currentChapter)
|
||||
if chapterHeadingRE.MatchString(line) {
|
||||
if cur != nil {
|
||||
segments = append(segments, *cur)
|
||||
}
|
||||
chapterNum++
|
||||
if matches[1] != "" {
|
||||
chapterNum, _ = fmt.Sscanf(matches[1], "%d", &chapterNum)
|
||||
chNum++
|
||||
// Try to parse the explicit chapter number from the heading.
|
||||
if m := regexp.MustCompile(`\d+`).FindString(line); m != "" {
|
||||
if n, err := strconv.Atoi(m); err == nil && n > 0 && n < 100000 {
|
||||
chNum = n
|
||||
}
|
||||
}
|
||||
currentChapter = &bookstore.Chapter{
|
||||
Number: chapterNum,
|
||||
Title: line,
|
||||
Content: "",
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if currentChapter != nil {
|
||||
if currentChapter.Content != "" {
|
||||
currentChapter.Content += " "
|
||||
}
|
||||
currentChapter.Content += line
|
||||
cur = &segment{title: line, number: chNum}
|
||||
} else if cur != nil && line != "" {
|
||||
cur.lines = append(cur.lines, line)
|
||||
}
|
||||
}
|
||||
|
||||
if currentChapter != nil && currentChapter.Content != "" {
|
||||
chapters = append(chapters, *currentChapter)
|
||||
if cur != nil {
|
||||
segments = append(segments, *cur)
|
||||
}
|
||||
|
||||
// If no chapters found via regex, try splitting by double newlines
|
||||
// Require segments to have meaningful content (>= 100 chars).
|
||||
var chapters []bookstore.Chapter
|
||||
for _, seg := range segments {
|
||||
content := strings.Join(seg.lines, "\n")
|
||||
if len(strings.TrimSpace(content)) < 50 {
|
||||
continue
|
||||
}
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: seg.number,
|
||||
Title: seg.title,
|
||||
Content: content,
|
||||
})
|
||||
}
|
||||
|
||||
// Fallback: no headings found — split by double newlines (paragraph blocks).
|
||||
if len(chapters) == 0 {
|
||||
paragraphs := strings.Split(text, "\n\n")
|
||||
for i, para := range paragraphs {
|
||||
n := 0
|
||||
for _, para := range paragraphs {
|
||||
para = strings.TrimSpace(para)
|
||||
if len(para) > 50 {
|
||||
if len(para) > 100 {
|
||||
n++
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: i + 1,
|
||||
Title: fmt.Sprintf("Chapter %d", i+1),
|
||||
Content: para,
|
||||
Number: n,
|
||||
Title: fmt.Sprintf("Chapter %d", n),
|
||||
Content: para,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -117,28 +862,31 @@ func extractChaptersFromText(text string) []bookstore.Chapter {
|
||||
return chapters
|
||||
}
|
||||
|
||||
// IngestChapters stores extracted chapters for a book via BookWriter.
|
||||
// This is called by the runner after extracting chapters from PDF/EPUB.
|
||||
// ── Chapter ingestion ─────────────────────────────────────────────────────────
|
||||
|
||||
// IngestChapters stores extracted chapters for a book.
|
||||
// Each chapter is written as a markdown file in the chapters MinIO bucket
|
||||
// and its index record is upserted in PocketBase via WriteChapter.
|
||||
func (s *Store) IngestChapters(ctx context.Context, slug string, chapters []bookstore.Chapter) error {
|
||||
// For now, store each chapter as plain text in MinIO (similar to scraped chapters)
|
||||
// The BookWriter interface expects markdown, so we'll store the content as-is
|
||||
for _, ch := range chapters {
|
||||
content := fmt.Sprintf("# Chapter %d\n\n%s", ch.Number, ch.Content)
|
||||
if ch.Title != "" {
|
||||
content = fmt.Sprintf("# %s\n\n%s", ch.Title, ch.Content)
|
||||
var mdContent string
|
||||
if ch.Title != "" && ch.Title != fmt.Sprintf("Chapter %d", ch.Number) {
|
||||
mdContent = fmt.Sprintf("# %s\n\n%s", ch.Title, ch.Content)
|
||||
} else {
|
||||
mdContent = fmt.Sprintf("# Chapter %d\n\n%s", ch.Number, ch.Content)
|
||||
}
|
||||
key := fmt.Sprintf("books/%s/chapters/%d.md", slug, ch.Number)
|
||||
if err := s.mc.putObject(ctx, "books", key, "text/markdown", []byte(content)); err != nil {
|
||||
return fmt.Errorf("put chapter %d: %w", ch.Number, err)
|
||||
domainCh := domain.Chapter{
|
||||
Ref: domain.ChapterRef{Number: ch.Number, Title: ch.Title},
|
||||
Text: mdContent,
|
||||
}
|
||||
if err := s.WriteChapter(ctx, slug, domainCh); err != nil {
|
||||
return fmt.Errorf("ingest chapter %d: %w", ch.Number, err)
|
||||
}
|
||||
}
|
||||
|
||||
// Also create a simple metadata entry in the books collection
|
||||
// (in a real implementation, we'd update the existing book or create a placeholder)
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetImportObjectKey returns the MinIO object key for an uploaded import file.
|
||||
func GetImportObjectKey(filename string) string {
|
||||
return fmt.Sprintf("imports/%s", filename)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -647,17 +647,26 @@ func (s *Store) CreateTranslationTask(ctx context.Context, slug string, chapter
|
||||
return rec.ID, nil
|
||||
}
|
||||
|
||||
func (s *Store) CreateImportTask(ctx context.Context, slug, title, fileType, objectKey, initiatorUserID string) (string, error) {
|
||||
func (s *Store) CreateImportTask(ctx context.Context, task domain.ImportTask) (string, error) {
|
||||
payload := map[string]any{
|
||||
"slug": slug,
|
||||
"title": title,
|
||||
"file_name": slug + "." + fileType,
|
||||
"file_type": fileType,
|
||||
"slug": task.Slug,
|
||||
"title": task.Title,
|
||||
"file_name": task.Slug + "." + task.FileType,
|
||||
"file_type": task.FileType,
|
||||
"object_key": task.ObjectKey,
|
||||
"chapters_key": task.ChaptersKey,
|
||||
"author": task.Author,
|
||||
"cover_url": task.CoverURL,
|
||||
"summary": task.Summary,
|
||||
"book_status": task.BookStatus,
|
||||
"status": string(domain.TaskStatusPending),
|
||||
"chapters_done": 0,
|
||||
"chapters_total": 0,
|
||||
"chapters_total": task.ChaptersTotal,
|
||||
"started": time.Now().UTC().Format(time.RFC3339),
|
||||
"initiator_user_id": initiatorUserID,
|
||||
"initiator_user_id": task.InitiatorUserID,
|
||||
}
|
||||
if len(task.Genres) > 0 {
|
||||
payload["genres"] = strings.Join(task.Genres, ",")
|
||||
}
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
@@ -708,6 +717,48 @@ func (s *Store) MarkNotificationRead(ctx context.Context, id string) error {
|
||||
map[string]any{"read": true})
|
||||
}
|
||||
|
||||
// DeleteNotification deletes a single notification by ID.
|
||||
func (s *Store) DeleteNotification(ctx context.Context, id string) error {
|
||||
return s.pb.delete(ctx, fmt.Sprintf("/api/collections/notifications/records/%s", id))
|
||||
}
|
||||
|
||||
// ClearAllNotifications deletes all notifications for a user.
|
||||
func (s *Store) ClearAllNotifications(ctx context.Context, userID string) error {
|
||||
filter := fmt.Sprintf("user_id='%s'", userID)
|
||||
items, err := s.pb.listAll(ctx, "notifications", filter, "")
|
||||
if err != nil {
|
||||
return fmt.Errorf("ClearAllNotifications list: %w", err)
|
||||
}
|
||||
for _, raw := range items {
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
if json.Unmarshal(raw, &rec) == nil && rec.ID != "" {
|
||||
_ = s.pb.delete(ctx, fmt.Sprintf("/api/collections/notifications/records/%s", rec.ID))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// MarkAllNotificationsRead marks all notifications for a user as read.
|
||||
func (s *Store) MarkAllNotificationsRead(ctx context.Context, userID string) error {
|
||||
filter := fmt.Sprintf("user_id='%s'&&read=false", userID)
|
||||
items, err := s.pb.listAll(ctx, "notifications", filter, "")
|
||||
if err != nil {
|
||||
return fmt.Errorf("MarkAllNotificationsRead list: %w", err)
|
||||
}
|
||||
for _, raw := range items {
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
if json.Unmarshal(raw, &rec) == nil && rec.ID != "" {
|
||||
_ = s.pb.patch(ctx, fmt.Sprintf("/api/collections/notifications/records/%s", rec.ID),
|
||||
map[string]any{"read": true})
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Store) CancelTask(ctx context.Context, id string) error {
|
||||
// Try scraping_tasks first, then audio_jobs, then translation_jobs.
|
||||
if err := s.pb.patch(ctx, fmt.Sprintf("/api/collections/scraping_tasks/records/%s", id),
|
||||
@@ -864,7 +915,7 @@ func (s *Store) FailTask(ctx context.Context, id, errMsg string) error {
|
||||
}
|
||||
|
||||
// HeartbeatTask updates the heartbeat_at field on a running task.
|
||||
// Tries scraping_tasks first, then audio_jobs, then translation_jobs.
|
||||
// Tries scraping_tasks, audio_jobs, translation_jobs, then import_tasks.
|
||||
func (s *Store) HeartbeatTask(ctx context.Context, id string) error {
|
||||
payload := map[string]any{
|
||||
"heartbeat_at": time.Now().UTC().Format(time.RFC3339),
|
||||
@@ -875,7 +926,10 @@ func (s *Store) HeartbeatTask(ctx context.Context, id string) error {
|
||||
if err := s.pb.patch(ctx, fmt.Sprintf("/api/collections/audio_jobs/records/%s", id), payload); err == nil {
|
||||
return nil
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/translation_jobs/records/%s", id), payload)
|
||||
if err := s.pb.patch(ctx, fmt.Sprintf("/api/collections/translation_jobs/records/%s", id), payload); err == nil {
|
||||
return nil
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/import_tasks/records/%s", id), payload)
|
||||
}
|
||||
|
||||
// ReapStaleTasks finds all running tasks whose heartbeat_at is either missing
|
||||
@@ -893,7 +947,7 @@ func (s *Store) ReapStaleTasks(ctx context.Context, staleAfter time.Duration) (i
|
||||
}
|
||||
|
||||
total := 0
|
||||
for _, collection := range []string{"scraping_tasks", "audio_jobs", "translation_jobs"} {
|
||||
for _, collection := range []string{"scraping_tasks", "audio_jobs", "translation_jobs", "import_tasks"} {
|
||||
items, err := s.pb.listAll(ctx, collection, filter, "")
|
||||
if err != nil {
|
||||
return total, fmt.Errorf("ReapStaleTasks list %s: %w", collection, err)
|
||||
@@ -1134,6 +1188,13 @@ func parseImportTask(raw json.RawMessage) (domain.ImportTask, error) {
|
||||
Title string `json:"title"`
|
||||
FileName string `json:"file_name"`
|
||||
FileType string `json:"file_type"`
|
||||
ObjectKey string `json:"object_key"`
|
||||
ChaptersKey string `json:"chapters_key"`
|
||||
Author string `json:"author"`
|
||||
CoverURL string `json:"cover_url"`
|
||||
Genres string `json:"genres"` // stored as comma-separated
|
||||
Summary string `json:"summary"`
|
||||
BookStatus string `json:"book_status"`
|
||||
WorkerID string `json:"worker_id"`
|
||||
InitiatorUserID string `json:"initiator_user_id"`
|
||||
Status string `json:"status"`
|
||||
@@ -1148,12 +1209,27 @@ func parseImportTask(raw json.RawMessage) (domain.ImportTask, error) {
|
||||
}
|
||||
started, _ := time.Parse(time.RFC3339, rec.Started)
|
||||
finished, _ := time.Parse(time.RFC3339, rec.Finished)
|
||||
var genres []string
|
||||
if rec.Genres != "" {
|
||||
for _, g := range strings.Split(rec.Genres, ",") {
|
||||
if g = strings.TrimSpace(g); g != "" {
|
||||
genres = append(genres, g)
|
||||
}
|
||||
}
|
||||
}
|
||||
return domain.ImportTask{
|
||||
ID: rec.ID,
|
||||
Slug: rec.Slug,
|
||||
Title: rec.Title,
|
||||
FileName: rec.FileName,
|
||||
FileType: rec.FileType,
|
||||
ObjectKey: rec.ObjectKey,
|
||||
ChaptersKey: rec.ChaptersKey,
|
||||
Author: rec.Author,
|
||||
CoverURL: rec.CoverURL,
|
||||
Genres: genres,
|
||||
Summary: rec.Summary,
|
||||
BookStatus: rec.BookStatus,
|
||||
WorkerID: rec.WorkerID,
|
||||
InitiatorUserID: rec.InitiatorUserID,
|
||||
Status: domain.TaskStatus(rec.Status),
|
||||
@@ -1196,6 +1272,20 @@ func (s *Store) PutImportFile(ctx context.Context, key string, data []byte) erro
|
||||
return s.mc.putObject(ctx, "imports", key, "application/octet-stream", data)
|
||||
}
|
||||
|
||||
// PutImportChapters stores a pre-parsed chapters JSON blob in MinIO.
|
||||
func (s *Store) PutImportChapters(ctx context.Context, key string, data []byte) error {
|
||||
return s.mc.putObject(ctx, "imports", key, "application/json", data)
|
||||
}
|
||||
|
||||
// GetImportChapters retrieves the pre-parsed chapters JSON from MinIO.
|
||||
func (s *Store) GetImportChapters(ctx context.Context, key string) ([]byte, error) {
|
||||
data, err := s.mc.getObject(ctx, "imports", key)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get chapters object: %w", err)
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (s *Store) CoverExists(ctx context.Context, slug string) bool {
|
||||
return s.mc.coverExists(ctx, CoverObjectKey(slug))
|
||||
}
|
||||
|
||||
@@ -35,8 +35,8 @@ type Producer interface {
|
||||
|
||||
// CreateImportTask inserts a new import task with status=pending and
|
||||
// returns the assigned PocketBase record ID.
|
||||
// initiatorUserID is the PocketBase user ID who submitted the import (may be empty).
|
||||
CreateImportTask(ctx context.Context, slug, title, fileType, objectKey, initiatorUserID string) (string, error)
|
||||
// The task struct must have at minimum Slug, Title, FileType, and ObjectKey set.
|
||||
CreateImportTask(ctx context.Context, task domain.ImportTask) (string, error)
|
||||
|
||||
// CancelTask transitions a pending task to status=cancelled.
|
||||
// Returns ErrNotFound if the task does not exist.
|
||||
|
||||
@@ -26,7 +26,7 @@ func (s *stubStore) CreateAudioTask(_ context.Context, _ string, _ int, _ string
|
||||
func (s *stubStore) CreateTranslationTask(_ context.Context, _ string, _ int, _ string) (string, error) {
|
||||
return "translation-1", nil
|
||||
}
|
||||
func (s *stubStore) CreateImportTask(_ context.Context, _, _, _, _, _ string) (string, error) {
|
||||
func (s *stubStore) CreateImportTask(_ context.Context, _ domain.ImportTask) (string, error) {
|
||||
return "import-1", nil
|
||||
}
|
||||
func (s *stubStore) CancelTask(_ context.Context, _ string) error { return nil }
|
||||
|
||||
@@ -58,6 +58,8 @@ services:
|
||||
mc mb --ignore-existing local/audio;
|
||||
mc mb --ignore-existing local/avatars;
|
||||
mc mb --ignore-existing local/catalogue;
|
||||
mc mb --ignore-existing local/translations;
|
||||
mc mb --ignore-existing local/imports;
|
||||
echo 'buckets ready';
|
||||
"
|
||||
environment:
|
||||
@@ -307,6 +309,8 @@ services:
|
||||
# OpenTelemetry tracing
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: "${OTEL_EXPORTER_OTLP_ENDPOINT}"
|
||||
OTEL_SERVICE_NAME: "ui"
|
||||
# Allow large PDF/EPUB uploads (adapter-node default is 512KB)
|
||||
BODY_SIZE_LIMIT: "52428800"
|
||||
# OAuth2 providers
|
||||
GOOGLE_CLIENT_ID: "${GOOGLE_CLIENT_ID}"
|
||||
GOOGLE_CLIENT_SECRET: "${GOOGLE_CLIENT_SECRET}"
|
||||
|
||||
@@ -299,6 +299,40 @@ create "translation_jobs" '{
|
||||
{"name":"heartbeat_at", "type":"date"}
|
||||
]}'
|
||||
|
||||
create "import_tasks" '{
|
||||
"name":"import_tasks","type":"base","fields":[
|
||||
{"name":"slug", "type":"text", "required":true},
|
||||
{"name":"title", "type":"text", "required":true},
|
||||
{"name":"file_name", "type":"text"},
|
||||
{"name":"file_type", "type":"text"},
|
||||
{"name":"object_key", "type":"text"},
|
||||
{"name":"chapters_key", "type":"text"},
|
||||
{"name":"author", "type":"text"},
|
||||
{"name":"cover_url", "type":"text"},
|
||||
{"name":"genres", "type":"text"},
|
||||
{"name":"summary", "type":"text"},
|
||||
{"name":"book_status", "type":"text"},
|
||||
{"name":"worker_id", "type":"text"},
|
||||
{"name":"initiator_user_id", "type":"text"},
|
||||
{"name":"status", "type":"text", "required":true},
|
||||
{"name":"chapters_done", "type":"number"},
|
||||
{"name":"chapters_total", "type":"number"},
|
||||
{"name":"error_message", "type":"text"},
|
||||
{"name":"started", "type":"date"},
|
||||
{"name":"finished", "type":"date"},
|
||||
{"name":"heartbeat_at", "type":"date"}
|
||||
]}'
|
||||
|
||||
create "notifications" '{
|
||||
"name":"notifications","type":"base","fields":[
|
||||
{"name":"user_id", "type":"text","required":true},
|
||||
{"name":"title", "type":"text","required":true},
|
||||
{"name":"message", "type":"text"},
|
||||
{"name":"link", "type":"text"},
|
||||
{"name":"read", "type":"bool"},
|
||||
{"name":"created", "type":"date"}
|
||||
]}'
|
||||
|
||||
create "ai_jobs" '{
|
||||
"name":"ai_jobs","type":"base","fields":[
|
||||
{"name":"kind", "type":"text", "required":true},
|
||||
|
||||
@@ -408,6 +408,7 @@
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "AI Jobs",
|
||||
"admin_nav_notifications": "Notifications",
|
||||
"admin_nav_feedback": "Feedback",
|
||||
"admin_nav_errors": "Errors",
|
||||
"admin_nav_analytics": "Analytics",
|
||||
|
||||
@@ -378,6 +378,7 @@
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "Tâches IA",
|
||||
"admin_nav_notifications": "Notifications",
|
||||
"admin_nav_errors": "Erreurs",
|
||||
"admin_nav_analytics": "Analytique",
|
||||
"admin_nav_logs": "Journaux",
|
||||
|
||||
@@ -378,6 +378,7 @@
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "Tugas AI",
|
||||
"admin_nav_notifications": "Notifikasi",
|
||||
"admin_nav_errors": "Kesalahan",
|
||||
"admin_nav_analytics": "Analitik",
|
||||
"admin_nav_logs": "Log",
|
||||
|
||||
@@ -378,6 +378,7 @@
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "Tarefas de IA",
|
||||
"admin_nav_notifications": "Notificações",
|
||||
"admin_nav_errors": "Erros",
|
||||
"admin_nav_analytics": "Análise",
|
||||
"admin_nav_logs": "Logs",
|
||||
|
||||
@@ -378,6 +378,7 @@
|
||||
"admin_nav_text_gen": "Text Gen",
|
||||
"admin_nav_catalogue_tools": "Catalogue Tools",
|
||||
"admin_nav_ai_jobs": "Задачи ИИ",
|
||||
"admin_nav_notifications": "Уведомления",
|
||||
"admin_nav_errors": "Ошибки",
|
||||
"admin_nav_analytics": "Аналитика",
|
||||
"admin_nav_logs": "Логи",
|
||||
|
||||
@@ -379,6 +379,7 @@ export * from './admin_nav_image_gen.js'
|
||||
export * from './admin_nav_text_gen.js'
|
||||
export * from './admin_nav_catalogue_tools.js'
|
||||
export * from './admin_nav_ai_jobs.js'
|
||||
export * from './admin_nav_notifications.js'
|
||||
export * from './admin_nav_feedback.js'
|
||||
export * from './admin_nav_errors.js'
|
||||
export * from './admin_nav_analytics.js'
|
||||
|
||||
44
ui/src/lib/paraglide/messages/admin_nav_notifications.js
Normal file
44
ui/src/lib/paraglide/messages/admin_nav_notifications.js
Normal file
@@ -0,0 +1,44 @@
|
||||
/* eslint-disable */
|
||||
import { getLocale, experimentalStaticLocale } from '../runtime.js';
|
||||
|
||||
/** @typedef {import('../runtime.js').LocalizedString} LocalizedString */
|
||||
|
||||
/** @typedef {{}} Admin_Nav_NotificationsInputs */
|
||||
|
||||
const en_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
|
||||
return /** @type {LocalizedString} */ (`Notifications`)
|
||||
};
|
||||
|
||||
const ru_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
|
||||
return /** @type {LocalizedString} */ (`Уведомления`)
|
||||
};
|
||||
|
||||
const id_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
|
||||
return /** @type {LocalizedString} */ (`Notifikasi`)
|
||||
};
|
||||
|
||||
const pt_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
|
||||
return /** @type {LocalizedString} */ (`Notificações`)
|
||||
};
|
||||
|
||||
const fr_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
|
||||
return /** @type {LocalizedString} */ (`Notifications`)
|
||||
};
|
||||
|
||||
/**
|
||||
* | output |
|
||||
* | --- |
|
||||
* | "Notifications" |
|
||||
*
|
||||
* @param {Admin_Nav_NotificationsInputs} inputs
|
||||
* @param {{ locale?: "en" | "ru" | "id" | "pt" | "fr" }} options
|
||||
* @returns {LocalizedString}
|
||||
*/
|
||||
export const admin_nav_notifications = /** @type {((inputs?: Admin_Nav_NotificationsInputs, options?: { locale?: "en" | "ru" | "id" | "pt" | "fr" }) => LocalizedString) & import('../runtime.js').MessageMetadata<Admin_Nav_NotificationsInputs, { locale?: "en" | "ru" | "id" | "pt" | "fr" }, {}>} */ ((inputs = {}, options = {}) => {
|
||||
const locale = experimentalStaticLocale ?? options.locale ?? getLocale()
|
||||
if (locale === "en") return en_admin_nav_notifications(inputs)
|
||||
if (locale === "ru") return ru_admin_nav_notifications(inputs)
|
||||
if (locale === "id") return id_admin_nav_notifications(inputs)
|
||||
if (locale === "pt") return pt_admin_nav_notifications(inputs)
|
||||
return fr_admin_nav_notifications(inputs)
|
||||
});
|
||||
@@ -1383,6 +1383,20 @@ export async function revokeUserSession(recordId: string, userId: string): Promi
|
||||
return del.ok || del.status === 204;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a session by its auth session ID (the value stored in the cookie).
|
||||
* Used on logout so the row doesn't linger as a phantom active session.
|
||||
*/
|
||||
export async function deleteSessionByAuthId(authSessionId: string): Promise<void> {
|
||||
const row = await listOne<UserSession>('user_sessions', `session_id="${authSessionId}"`);
|
||||
if (!row) return;
|
||||
const token = await getToken();
|
||||
await fetch(`${PB_URL}/api/collections/user_sessions/records/${row.id}`, {
|
||||
method: 'DELETE',
|
||||
headers: { Authorization: `Bearer ${token}` }
|
||||
}).catch(() => {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Revoke all sessions for a user (used on password change etc).
|
||||
*/
|
||||
@@ -2287,10 +2301,17 @@ export async function getUserStats(
|
||||
|
||||
// ─── AI Jobs ──────────────────────────────────────────────────────────────────
|
||||
|
||||
const AI_JOBS_CACHE_KEY = 'admin:ai_jobs';
|
||||
const AI_JOBS_CACHE_TTL = 30; // 30 seconds — same as other admin job lists
|
||||
|
||||
/**
|
||||
* List all AI jobs from PocketBase, sorted by started descending.
|
||||
* No caching — admin views always want fresh data.
|
||||
* Short-lived cache (30s) to avoid hammering PocketBase on every navigation.
|
||||
*/
|
||||
export async function listAIJobs(): Promise<AIJob[]> {
|
||||
return listAll<AIJob>('ai_jobs', '', '-started');
|
||||
const cached = await cache.get<AIJob[]>(AI_JOBS_CACHE_KEY);
|
||||
if (cached) return cached;
|
||||
const jobs = await listAll<AIJob>('ai_jobs', '', '-started');
|
||||
await cache.set(AI_JOBS_CACHE_KEY, jobs, AI_JOBS_CACHE_TTL);
|
||||
return jobs;
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
// Notifications
|
||||
let notificationsOpen = $state(false);
|
||||
let notifications = $state<{id: string; title: string; message: string; link: string; read: boolean}[]>([]);
|
||||
let notifFilter = $state<'all' | 'unread'>('all');
|
||||
async function loadNotifications() {
|
||||
if (!data.user) return;
|
||||
try {
|
||||
@@ -42,8 +43,31 @@
|
||||
notifications = notifications.map(n => n.id === id ? {...n, read: true} : n);
|
||||
} catch (e) { console.error('mark read:', e); }
|
||||
}
|
||||
async function markAllRead() {
|
||||
if (!data.user) return;
|
||||
try {
|
||||
await fetch('/api/notifications?user_id=' + data.user.id, { method: 'PATCH' });
|
||||
notifications = notifications.map(n => ({ ...n, read: true }));
|
||||
} catch (e) { console.error('mark all read:', e); }
|
||||
}
|
||||
async function dismissNotification(id: string) {
|
||||
try {
|
||||
await fetch('/api/notifications/' + id, { method: 'DELETE' });
|
||||
notifications = notifications.filter(n => n.id !== id);
|
||||
} catch (e) { console.error('dismiss notification:', e); }
|
||||
}
|
||||
async function clearAllNotifications() {
|
||||
if (!data.user) return;
|
||||
try {
|
||||
await fetch('/api/notifications?user_id=' + data.user.id, { method: 'DELETE' });
|
||||
notifications = [];
|
||||
} catch (e) { console.error('clear notifications:', e); }
|
||||
}
|
||||
$effect(() => { if (data.user) loadNotifications(); });
|
||||
const unreadCount = $derived(notifications.filter(n => !n.read).length);
|
||||
const filteredNotifications = $derived(
|
||||
notifFilter === 'unread' ? notifications.filter(n => !n.read) : notifications
|
||||
);
|
||||
|
||||
// Close search on navigation
|
||||
$effect(() => {
|
||||
@@ -579,21 +603,84 @@
|
||||
{/if}
|
||||
</button>
|
||||
{#if notificationsOpen}
|
||||
<div class="absolute right-0 top-full mt-1 w-80 bg-(--color-surface-2) border border-(--color-border) rounded-lg shadow-xl z-50 max-h-96 overflow-y-auto">
|
||||
{#if notifications.length === 0}
|
||||
<div class="p-4 text-center text-(--color-muted) text-sm">No notifications</div>
|
||||
{:else}
|
||||
{#each notifications as n}
|
||||
<a
|
||||
href={n.link || '/admin/import'}
|
||||
onclick={() => { markRead(n.id); notificationsOpen = false; }}
|
||||
class="block p-3 border-b border-(--color-border)/50 hover:bg-(--color-surface-3) {n.read ? 'opacity-60' : ''}"
|
||||
>
|
||||
<div class="text-sm font-medium">{n.title}</div>
|
||||
<div class="text-xs text-(--color-muted) mt-0.5">{n.message}</div>
|
||||
</a>
|
||||
{/each}
|
||||
{/if}
|
||||
<div class="absolute right-0 top-full mt-1 w-80 bg-(--color-surface-2) border border-(--color-border) rounded-lg shadow-xl z-50 flex flex-col max-h-[28rem]">
|
||||
<!-- Header -->
|
||||
<div class="flex items-center justify-between px-3 pt-3 pb-2 shrink-0">
|
||||
<span class="text-sm font-semibold">Notifications</span>
|
||||
<div class="flex items-center gap-1">
|
||||
{#if unreadCount > 0}
|
||||
<button
|
||||
type="button"
|
||||
onclick={markAllRead}
|
||||
class="text-xs text-(--color-muted) hover:text-(--color-text) transition-colors px-1.5 py-0.5 rounded hover:bg-(--color-surface-3)"
|
||||
>Mark all read</button>
|
||||
{/if}
|
||||
{#if notifications.length > 0}
|
||||
<button
|
||||
type="button"
|
||||
onclick={clearAllNotifications}
|
||||
class="text-xs text-(--color-muted) hover:text-red-400 transition-colors px-1.5 py-0.5 rounded hover:bg-(--color-surface-3)"
|
||||
>Clear all</button>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
<!-- Filter tabs -->
|
||||
<div class="flex gap-0 px-3 pb-2 shrink-0">
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => notifFilter = 'all'}
|
||||
class="text-xs px-2.5 py-1 rounded-l border border-(--color-border) transition-colors {notifFilter === 'all' ? 'bg-(--color-brand) text-black border-(--color-brand)' : 'text-(--color-muted) hover:text-(--color-text)'}"
|
||||
>All ({notifications.length})</button>
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => notifFilter = 'unread'}
|
||||
class="text-xs px-2.5 py-1 rounded-r border border-l-0 border-(--color-border) transition-colors {notifFilter === 'unread' ? 'bg-(--color-brand) text-black border-(--color-brand)' : 'text-(--color-muted) hover:text-(--color-text)'}"
|
||||
>Unread ({unreadCount})</button>
|
||||
</div>
|
||||
<!-- List -->
|
||||
<div class="overflow-y-auto flex-1 min-h-0">
|
||||
{#if filteredNotifications.length === 0}
|
||||
<div class="p-4 text-center text-(--color-muted) text-sm">
|
||||
{notifFilter === 'unread' ? 'No unread notifications' : 'No notifications'}
|
||||
</div>
|
||||
{:else}
|
||||
{#each filteredNotifications as n (n.id)}
|
||||
<div class="flex items-start gap-1 border-b border-(--color-border)/40 hover:bg-(--color-surface-3) group {n.read ? 'opacity-60' : ''}">
|
||||
<a
|
||||
href={n.link || '/admin'}
|
||||
onclick={() => { markRead(n.id); notificationsOpen = false; }}
|
||||
class="flex-1 p-3 min-w-0"
|
||||
>
|
||||
<div class="flex items-center gap-1.5">
|
||||
{#if !n.read}
|
||||
<span class="w-1.5 h-1.5 rounded-full bg-(--color-brand) shrink-0"></span>
|
||||
{/if}
|
||||
<span class="text-sm font-medium truncate">{n.title}</span>
|
||||
</div>
|
||||
<div class="text-xs text-(--color-muted) mt-0.5 line-clamp-2">{n.message}</div>
|
||||
</a>
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => dismissNotification(n.id)}
|
||||
class="shrink-0 p-2.5 text-(--color-muted) hover:text-red-400 opacity-0 group-hover:opacity-100 transition-all"
|
||||
title="Dismiss"
|
||||
>
|
||||
<svg class="w-3 h-3" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
{/each}
|
||||
{/if}
|
||||
</div>
|
||||
<!-- Footer -->
|
||||
<div class="px-3 py-2 border-t border-(--color-border)/40 shrink-0">
|
||||
<a
|
||||
href="/admin/notifications"
|
||||
onclick={() => notificationsOpen = false}
|
||||
class="block text-center text-xs text-(--color-muted) hover:text-(--color-brand) transition-colors"
|
||||
>View all notifications</a>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
@@ -28,21 +28,6 @@
|
||||
label: () => m.admin_nav_image_gen(),
|
||||
icon: `<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />`
|
||||
},
|
||||
{
|
||||
href: '/admin/audio',
|
||||
label: () => m.admin_nav_audio(),
|
||||
icon: `<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 19V6l12-3v13M9 19c0 1.105-1.343 2-3 2s-3-.895-3-2 1.343-2 3-2 3 .895 3 2zm12-3c0 1.105-1.343 2-3 2s-3-.895-3-2 1.343-2 3-2 3 .895 3 2zM9 10l12-3" />`
|
||||
},
|
||||
{
|
||||
href: '/admin/translation',
|
||||
label: () => m.admin_nav_translation(),
|
||||
icon: `<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M3 5h12M9 3v2m1.048 9.5A18.022 18.022 0 016.412 9m6.088 9h7M11 21l5-10 5 10M12.751 5C11.783 10.77 8.07 15.61 3 18.129" />`
|
||||
},
|
||||
{
|
||||
href: '/admin/image-gen',
|
||||
label: () => m.admin_nav_image_gen(),
|
||||
icon: `<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />`
|
||||
},
|
||||
{
|
||||
href: '/admin/text-gen',
|
||||
label: () => m.admin_nav_text_gen(),
|
||||
@@ -53,6 +38,11 @@
|
||||
label: () => m.admin_nav_ai_jobs(),
|
||||
icon: `<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 5H7a2 2 0 00-2 2v12a2 2 0 002 2h10a2 2 0 002-2V7a2 2 0 00-2-2h-2M9 5a2 2 0 002 2h2a2 2 0 002-2M9 5a2 2 0 012-2h2a2 2 0 012 2m-6 9l2 2 4-4" />`
|
||||
},
|
||||
{
|
||||
href: '/admin/notifications',
|
||||
label: () => m.admin_nav_notifications(),
|
||||
icon: `<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9" />`
|
||||
},
|
||||
{
|
||||
href: '/admin/catalogue-tools',
|
||||
label: () => m.admin_nav_catalogue_tools(),
|
||||
|
||||
@@ -6,7 +6,8 @@ export type { AIJob };
|
||||
|
||||
export const load: PageServerLoad = async () => {
|
||||
// Parent layout already guards admin role.
|
||||
const jobs = await listAIJobs().catch((e): AIJob[] => {
|
||||
// Stream jobs so navigation is instant; list populates a moment later.
|
||||
const jobs = listAIJobs().catch((e): AIJob[] => {
|
||||
log.warn('admin/ai-jobs', 'failed to load ai jobs', { err: String(e) });
|
||||
return [];
|
||||
});
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
<script lang="ts">
|
||||
import { untrack } from 'svelte';
|
||||
import { invalidateAll } from '$app/navigation';
|
||||
import type { PageData } from './$types';
|
||||
import type { AIJob } from '$lib/server/pocketbase';
|
||||
@@ -8,11 +7,11 @@
|
||||
|
||||
let { data }: { data: PageData } = $props();
|
||||
|
||||
let jobs = $state<AIJob[]>(untrack(() => data.jobs));
|
||||
let jobs = $state<AIJob[]>([]);
|
||||
|
||||
// Keep in sync on server reloads
|
||||
// Resolve streamed promise on load and on server reloads (invalidateAll)
|
||||
$effect(() => {
|
||||
jobs = data.jobs;
|
||||
data.jobs.then((resolved) => { jobs = resolved; });
|
||||
});
|
||||
|
||||
// ── Live-poll while any job is in-flight ─────────────────────────────────────
|
||||
@@ -156,22 +155,24 @@
|
||||
};
|
||||
review = r;
|
||||
|
||||
try {
|
||||
const res = await fetch(`/api/admin/ai-jobs/${job.id}`);
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const data = await res.json();
|
||||
try {
|
||||
const res = await fetch(`/api/admin/ai-jobs/${job.id}`);
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const data = await res.json();
|
||||
|
||||
let payload: { pattern?: string; slug?: string; results?: ProposedTitle[] } = {};
|
||||
try { payload = JSON.parse(data.payload ?? '{}'); } catch { /* ignore */ }
|
||||
let payload: { pattern?: string; slug?: string; results?: ProposedTitle[] } = {};
|
||||
try { payload = JSON.parse(data.payload ?? '{}'); } catch { /* ignore */ }
|
||||
|
||||
r.pattern = payload.pattern ?? '';
|
||||
r.titles = (payload.results ?? []).map((t: ProposedTitle) => ({ ...t }));
|
||||
r.loading = false;
|
||||
} catch (e) {
|
||||
r.loading = false;
|
||||
r.error = String(e);
|
||||
}
|
||||
} else if (job.kind === 'image-gen') {
|
||||
review = {
|
||||
...r,
|
||||
pattern: payload.pattern ?? '',
|
||||
titles: (payload.results ?? []).map((t: ProposedTitle) => ({ ...t })),
|
||||
loading: false
|
||||
};
|
||||
} catch (e) {
|
||||
review = { ...r, loading: false, error: String(e) };
|
||||
}
|
||||
} else if (job.kind === 'image-gen') {
|
||||
const r: ImageGenReview = {
|
||||
kind: 'image-gen',
|
||||
jobId: job.id,
|
||||
@@ -190,38 +191,40 @@
|
||||
};
|
||||
review = r;
|
||||
|
||||
try {
|
||||
const res = await fetch(`/api/admin/ai-jobs/${job.id}`);
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const data = await res.json();
|
||||
try {
|
||||
const res = await fetch(`/api/admin/ai-jobs/${job.id}`);
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const data = await res.json();
|
||||
|
||||
let payload: {
|
||||
prompt?: string;
|
||||
type?: string;
|
||||
chapter?: number;
|
||||
content_type?: string;
|
||||
image_b64?: string;
|
||||
bytes?: number;
|
||||
} = {};
|
||||
try { payload = JSON.parse(data.payload ?? '{}'); } catch { /* ignore */ }
|
||||
let payload: {
|
||||
prompt?: string;
|
||||
type?: string;
|
||||
chapter?: number;
|
||||
content_type?: string;
|
||||
image_b64?: string;
|
||||
bytes?: number;
|
||||
} = {};
|
||||
try { payload = JSON.parse(data.payload ?? '{}'); } catch { /* ignore */ }
|
||||
|
||||
if (!payload.image_b64) {
|
||||
r.error = 'No image in job payload.';
|
||||
r.loading = false;
|
||||
return;
|
||||
}
|
||||
r.imageType = payload.type ?? 'cover';
|
||||
r.chapter = payload.chapter ?? 0;
|
||||
r.prompt = payload.prompt ?? '';
|
||||
r.contentType = payload.content_type ?? 'image/png';
|
||||
r.bytes = payload.bytes ?? 0;
|
||||
r.imageSrc = `data:${r.contentType};base64,${payload.image_b64}`;
|
||||
r.loading = false;
|
||||
} catch (e) {
|
||||
r.loading = false;
|
||||
r.error = String(e);
|
||||
if (!payload.image_b64) {
|
||||
review = { ...r, error: 'No image in job payload.', loading: false };
|
||||
return;
|
||||
}
|
||||
} else if (job.kind === 'description') {
|
||||
const contentType = payload.content_type ?? 'image/png';
|
||||
review = {
|
||||
...r,
|
||||
imageType: payload.type ?? 'cover',
|
||||
chapter: payload.chapter ?? 0,
|
||||
prompt: payload.prompt ?? '',
|
||||
contentType,
|
||||
bytes: payload.bytes ?? 0,
|
||||
imageSrc: `data:${contentType};base64,${payload.image_b64}`,
|
||||
loading: false
|
||||
};
|
||||
} catch (e) {
|
||||
review = { ...r, loading: false, error: String(e) };
|
||||
}
|
||||
} else if (job.kind === 'description') {
|
||||
const r: DescriptionReview = {
|
||||
kind: 'description',
|
||||
jobId: job.id,
|
||||
@@ -237,28 +240,30 @@
|
||||
};
|
||||
review = r;
|
||||
|
||||
try {
|
||||
const res = await fetch(`/api/admin/ai-jobs/${job.id}`);
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const data = await res.json();
|
||||
try {
|
||||
const res = await fetch(`/api/admin/ai-jobs/${job.id}`);
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const data = await res.json();
|
||||
|
||||
let payload: {
|
||||
instructions?: string;
|
||||
old_description?: string;
|
||||
new_description?: string;
|
||||
} = {};
|
||||
try { payload = JSON.parse(data.payload ?? '{}'); } catch { /* ignore */ }
|
||||
let payload: {
|
||||
instructions?: string;
|
||||
old_description?: string;
|
||||
new_description?: string;
|
||||
} = {};
|
||||
try { payload = JSON.parse(data.payload ?? '{}'); } catch { /* ignore */ }
|
||||
|
||||
r.instructions = payload.instructions ?? '';
|
||||
r.oldDescription = payload.old_description ?? '';
|
||||
r.newDescription = payload.new_description ?? '';
|
||||
r.loading = false;
|
||||
} catch (e) {
|
||||
r.loading = false;
|
||||
r.error = String(e);
|
||||
}
|
||||
review = {
|
||||
...r,
|
||||
instructions: payload.instructions ?? '',
|
||||
oldDescription: payload.old_description ?? '',
|
||||
newDescription: payload.new_description ?? '',
|
||||
loading: false
|
||||
};
|
||||
} catch (e) {
|
||||
review = { ...r, loading: false, error: String(e) };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function closeReview() {
|
||||
review = null;
|
||||
|
||||
@@ -18,23 +18,27 @@ const CACHE_KEY = 'admin:changelog:releases';
|
||||
const CACHE_TTL = 5 * 60; // 5 minutes
|
||||
|
||||
export const load: PageServerLoad = async ({ fetch }) => {
|
||||
// Return cached data synchronously (no streaming needed — already fast).
|
||||
const cached = await cache.get<Release[]>(CACHE_KEY);
|
||||
if (cached) {
|
||||
return { releases: cached };
|
||||
return { releases: cached, error: undefined as string | undefined };
|
||||
}
|
||||
|
||||
try {
|
||||
const res = await fetch(GITEA_RELEASES_URL, {
|
||||
headers: { Accept: 'application/json' }
|
||||
});
|
||||
if (!res.ok) {
|
||||
return { releases: [], error: `Gitea API returned ${res.status}` };
|
||||
// Cache miss: stream the external Gitea request so navigation isn't blocked.
|
||||
const releasesPromise = (async () => {
|
||||
try {
|
||||
const res = await fetch(GITEA_RELEASES_URL, {
|
||||
headers: { Accept: 'application/json' }
|
||||
});
|
||||
if (!res.ok) return [] as Release[];
|
||||
const releases: Release[] = await res.json();
|
||||
const filtered = releases.filter((r) => !r.draft);
|
||||
await cache.set(CACHE_KEY, filtered, CACHE_TTL);
|
||||
return filtered;
|
||||
} catch {
|
||||
return [] as Release[];
|
||||
}
|
||||
const releases: Release[] = await res.json();
|
||||
const filtered = releases.filter((r) => !r.draft);
|
||||
await cache.set(CACHE_KEY, filtered, CACHE_TTL);
|
||||
return { releases: filtered };
|
||||
} catch (e) {
|
||||
return { releases: [], error: String(e) };
|
||||
}
|
||||
})();
|
||||
|
||||
return { releases: releasesPromise, error: undefined as string | undefined };
|
||||
};
|
||||
|
||||
@@ -32,29 +32,33 @@
|
||||
</a>
|
||||
</div>
|
||||
|
||||
{#if data.error}
|
||||
<p class="text-sm text-(--color-danger)">{m.admin_changelog_load_error({ error: data.error })}</p>
|
||||
{:else if data.releases.length === 0}
|
||||
<p class="text-sm text-(--color-muted) py-8 text-center">{m.admin_changelog_no_releases()}</p>
|
||||
{:else}
|
||||
<div class="space-y-0 divide-y divide-(--color-border) border border-(--color-border) rounded-xl overflow-hidden">
|
||||
{#each data.releases as release}
|
||||
<div class="px-5 py-4 bg-(--color-surface) space-y-2">
|
||||
<div class="flex items-baseline gap-3 flex-wrap">
|
||||
<span class="font-mono text-sm font-semibold text-(--color-brand)">{release.tag_name}</span>
|
||||
{#if release.name && release.name !== release.tag_name}
|
||||
<span class="text-sm text-(--color-text)">{release.name}</span>
|
||||
{#await data.releases}
|
||||
<p class="text-sm text-(--color-muted) py-8 text-center">Loading releases…</p>
|
||||
{:then releases}
|
||||
{#if releases.length === 0}
|
||||
<p class="text-sm text-(--color-muted) py-8 text-center">{m.admin_changelog_no_releases()}</p>
|
||||
{:else}
|
||||
<div class="space-y-0 divide-y divide-(--color-border) border border-(--color-border) rounded-xl overflow-hidden">
|
||||
{#each releases as release}
|
||||
<div class="px-5 py-4 bg-(--color-surface) space-y-2">
|
||||
<div class="flex items-baseline gap-3 flex-wrap">
|
||||
<span class="font-mono text-sm font-semibold text-(--color-brand)">{release.tag_name}</span>
|
||||
{#if release.name && release.name !== release.tag_name}
|
||||
<span class="text-sm text-(--color-text)">{release.name}</span>
|
||||
{/if}
|
||||
{#if release.prerelease}
|
||||
<span class="text-xs px-1.5 py-0.5 rounded bg-(--color-surface-3) text-(--color-muted)">pre-release</span>
|
||||
{/if}
|
||||
<span class="text-xs text-(--color-muted) ml-auto">{fmtDate(release.published_at)}</span>
|
||||
</div>
|
||||
{#if release.body.trim()}
|
||||
<p class="text-sm text-(--color-muted) leading-relaxed whitespace-pre-wrap">{release.body.trim()}</p>
|
||||
{/if}
|
||||
{#if release.prerelease}
|
||||
<span class="text-xs px-1.5 py-0.5 rounded bg-(--color-surface-3) text-(--color-muted)">pre-release</span>
|
||||
{/if}
|
||||
<span class="text-xs text-(--color-muted) ml-auto">{fmtDate(release.published_at)}</span>
|
||||
</div>
|
||||
{#if release.body.trim()}
|
||||
<p class="text-sm text-(--color-muted) leading-relaxed whitespace-pre-wrap">{release.body.trim()}</p>
|
||||
{/if}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
{:catch}
|
||||
<p class="text-sm text-(--color-danger)">{m.admin_changelog_load_error({ error: 'Failed to load releases' })}</p>
|
||||
{/await}
|
||||
</div>
|
||||
|
||||
@@ -20,23 +20,29 @@ export interface BookSummary {
|
||||
}
|
||||
|
||||
export const load: PageServerLoad = async () => {
|
||||
// parent layout already guards admin role
|
||||
const [models, booksResult] = await Promise.allSettled([
|
||||
listImageModels<ImageModelInfo>(),
|
||||
listBooks()
|
||||
]);
|
||||
// Await models immediately — the page is unusable without them and the
|
||||
// backend returns this list instantly (in-memory, no I/O).
|
||||
// Books are streamed: the page renders at once and the book selector
|
||||
// populates a moment later without blocking navigation.
|
||||
const modelsResult = await listImageModels<ImageModelInfo>().catch((e) => {
|
||||
log.warn('admin/image-gen', 'failed to load models', { err: String(e) });
|
||||
return [] as ImageModelInfo[];
|
||||
});
|
||||
|
||||
if (models.status === 'rejected') {
|
||||
log.warn('admin/image-gen', 'failed to load models', { err: String(models.reason) });
|
||||
}
|
||||
const booksPromise = listBooks()
|
||||
.then((all) =>
|
||||
all.map((b) => ({
|
||||
slug: b.slug,
|
||||
title: b.title,
|
||||
summary: b.summary ?? '',
|
||||
cover: b.cover ?? ''
|
||||
})) as BookSummary[]
|
||||
)
|
||||
.catch(() => [] as BookSummary[]);
|
||||
|
||||
return {
|
||||
models: models.status === 'fulfilled' ? models.value : ([] as ImageModelInfo[]),
|
||||
books: (booksResult.status === 'fulfilled' ? booksResult.value : []).map((b) => ({
|
||||
slug: b.slug,
|
||||
title: b.title,
|
||||
summary: b.summary ?? '',
|
||||
cover: b.cover ?? ''
|
||||
})) as BookSummary[]
|
||||
models: modelsResult,
|
||||
// Streamed — SvelteKit resolves this after the initial HTML is sent.
|
||||
books: booksPromise
|
||||
};
|
||||
};
|
||||
|
||||
@@ -62,8 +62,11 @@
|
||||
});
|
||||
|
||||
// ── Book autocomplete ────────────────────────────────────────────────────────
|
||||
// svelte-ignore state_referenced_locally
|
||||
const books: BookSummary[] = data.books ?? [];
|
||||
// Books arrive as a streamed promise — start empty and populate on resolve.
|
||||
let books = $state<BookSummary[]>([]);
|
||||
$effect(() => {
|
||||
data.books.then((resolved) => { books = resolved; });
|
||||
});
|
||||
let slugInput = $state('');
|
||||
let slugFocused = $state(false);
|
||||
let selectedBook = $state<BookSummary | null>(null);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
<script lang="ts">
|
||||
import { onMount } from 'svelte';
|
||||
import { goto } from '$app/navigation';
|
||||
|
||||
interface ImportTask {
|
||||
id: string;
|
||||
@@ -7,6 +8,11 @@
|
||||
title: string;
|
||||
file_name: string;
|
||||
file_type: string;
|
||||
author: string;
|
||||
cover_url: string;
|
||||
genres: string[];
|
||||
summary: string;
|
||||
book_status: string;
|
||||
status: string;
|
||||
chapters_done: number;
|
||||
chapters_total: number;
|
||||
@@ -18,17 +24,35 @@
|
||||
interface PendingImport {
|
||||
file: File;
|
||||
title: string;
|
||||
author: string;
|
||||
coverUrl: string;
|
||||
genres: string;
|
||||
summary: string;
|
||||
bookStatus: string;
|
||||
preview: { chapters: number; firstLines: string[] };
|
||||
}
|
||||
|
||||
let tasks = $state<ImportTask[]>([]);
|
||||
let loading = $state(true);
|
||||
let uploading = $state(false);
|
||||
let title = $state('');
|
||||
let error = $state('');
|
||||
let selectedFile = $state<File | null>(null);
|
||||
let pendingImport = $state<PendingImport | null>(null);
|
||||
let analyzing = $state(false);
|
||||
let error = $state('');
|
||||
|
||||
// Form fields
|
||||
let selectedFile = $state<File | null>(null);
|
||||
let title = $state('');
|
||||
let author = $state('');
|
||||
let coverUrl = $state('');
|
||||
let genres = $state('');
|
||||
let summary = $state('');
|
||||
let bookStatus = $state('completed');
|
||||
|
||||
let pendingImport = $state<PendingImport | null>(null);
|
||||
|
||||
// AI panel: slug of recently completed import
|
||||
let aiSlug = $state('');
|
||||
let aiTitle = $state('');
|
||||
let showAiPanel = $state(false);
|
||||
|
||||
async function loadTasks() {
|
||||
loading = true;
|
||||
@@ -45,7 +69,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
async function handleFileSelect(e: Event) {
|
||||
function handleFileSelect(e: Event) {
|
||||
const input = e.target as HTMLInputElement;
|
||||
if (!input.files?.length) return;
|
||||
const file = input.files[0];
|
||||
@@ -54,8 +78,12 @@
|
||||
error = 'Please select a PDF or EPUB file';
|
||||
return;
|
||||
}
|
||||
error = '';
|
||||
selectedFile = file;
|
||||
title = file.name.replace(/\.(pdf|epub)$/i, '').replace(/[-_]/g, ' ');
|
||||
// Auto-fill title from filename if empty
|
||||
if (!title.trim()) {
|
||||
title = file.name.replace(/\.(pdf|epub)$/i, '').replace(/[-_]/g, ' ');
|
||||
}
|
||||
}
|
||||
|
||||
async function analyzeFile() {
|
||||
@@ -65,24 +93,26 @@
|
||||
try {
|
||||
const formData = new FormData();
|
||||
formData.append('file', selectedFile);
|
||||
formData.append('title', title);
|
||||
formData.append('title', title.trim());
|
||||
formData.append('analyze', 'true');
|
||||
const res = await fetch('/api/admin/import', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
const res = await fetch('/api/admin/import', { method: 'POST', body: formData });
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
pendingImport = {
|
||||
file: selectedFile,
|
||||
title: title,
|
||||
title: title.trim(),
|
||||
author: author.trim(),
|
||||
coverUrl: coverUrl.trim(),
|
||||
genres: genres.trim(),
|
||||
summary: summary.trim(),
|
||||
bookStatus,
|
||||
preview: data.preview || { chapters: 0, firstLines: [] }
|
||||
};
|
||||
} else {
|
||||
const d = await res.json().catch(() => ({}));
|
||||
error = d.error || 'Failed to analyze file';
|
||||
}
|
||||
} catch (e) {
|
||||
} catch {
|
||||
error = 'Failed to analyze file';
|
||||
} finally {
|
||||
analyzing = false;
|
||||
@@ -97,20 +127,36 @@
|
||||
const formData = new FormData();
|
||||
formData.append('file', pendingImport.file);
|
||||
formData.append('title', pendingImport.title);
|
||||
const res = await fetch('/api/admin/import', {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
formData.append('author', pendingImport.author);
|
||||
formData.append('cover_url', pendingImport.coverUrl);
|
||||
formData.append('genres', pendingImport.genres);
|
||||
formData.append('summary', pendingImport.summary);
|
||||
formData.append('book_status', pendingImport.bookStatus);
|
||||
const res = await fetch('/api/admin/import', { method: 'POST', body: formData });
|
||||
if (res.ok) {
|
||||
const data = await res.json();
|
||||
// Save for AI panel before clearing state
|
||||
const importedSlug = data.slug || '';
|
||||
const importedTitle = pendingImport.title;
|
||||
// Reset form
|
||||
pendingImport = null;
|
||||
selectedFile = null;
|
||||
title = '';
|
||||
author = '';
|
||||
coverUrl = '';
|
||||
genres = '';
|
||||
summary = '';
|
||||
bookStatus = 'completed';
|
||||
// Show AI panel for this slug
|
||||
aiSlug = importedSlug;
|
||||
aiTitle = importedTitle;
|
||||
showAiPanel = !!aiSlug;
|
||||
await loadTasks();
|
||||
} else {
|
||||
const d = await res.json().catch(() => ({}));
|
||||
error = d.error || 'Import failed';
|
||||
}
|
||||
} catch (e) {
|
||||
} catch {
|
||||
error = 'Import failed';
|
||||
} finally {
|
||||
uploading = false;
|
||||
@@ -126,149 +172,308 @@
|
||||
return new Date(dateStr).toLocaleString();
|
||||
}
|
||||
|
||||
function getStatusColor(status: string) {
|
||||
function statusColor(status: string) {
|
||||
switch (status) {
|
||||
case 'pending': return 'text-yellow-400';
|
||||
case 'running': return 'text-blue-400';
|
||||
case 'done': return 'text-green-400';
|
||||
case 'failed': return 'text-red-400';
|
||||
default: return 'text-gray-400';
|
||||
default: return 'text-(--color-muted)';
|
||||
}
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
loadTasks();
|
||||
onMount(() => { loadTasks(); });
|
||||
|
||||
// Poll every 3s while any task is active
|
||||
$effect(() => {
|
||||
const hasActive = tasks.some((t) => t.status === 'running' || t.status === 'pending');
|
||||
if (!hasActive) return;
|
||||
const timer = setInterval(() => { loadTasks(); }, 3000);
|
||||
return () => clearInterval(timer);
|
||||
});
|
||||
|
||||
// Poll every 3s while any task is running
|
||||
// When a running task finishes, surface the AI panel for it
|
||||
$effect(() => {
|
||||
const hasRunning = tasks.some((t) => t.status === 'running' || t.status === 'pending');
|
||||
if (!hasRunning) return;
|
||||
const timer = setInterval(() => {
|
||||
loadTasks();
|
||||
}, 3000);
|
||||
return () => clearInterval(timer);
|
||||
if (!showAiPanel) {
|
||||
const done = tasks.find((t) => t.status === 'done');
|
||||
if (done && !aiSlug) {
|
||||
aiSlug = done.slug;
|
||||
aiTitle = done.title;
|
||||
showAiPanel = true;
|
||||
}
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="max-w-4xl">
|
||||
<h1 class="text-2xl font-bold mb-6">Import PDF/EPUB</h1>
|
||||
<div class="max-w-3xl space-y-8">
|
||||
<h1 class="text-2xl font-bold">Import PDF/EPUB</h1>
|
||||
|
||||
{#if pendingImport}
|
||||
<!-- Review Step -->
|
||||
<div class="mb-8 p-6 bg-(--color-surface-2) rounded-lg border border-(--color-brand)/30">
|
||||
<h2 class="text-lg font-semibold mb-4">Review Import</h2>
|
||||
<div class="space-y-3 mb-6">
|
||||
<div class="flex justify-between">
|
||||
<span class="text-(--color-muted)">Title:</span>
|
||||
<span class="font-medium">{pendingImport.title}</span>
|
||||
<!-- ── Review step ── -->
|
||||
<div class="p-6 bg-(--color-surface-2) rounded-lg border border-(--color-brand)/30 space-y-4">
|
||||
<h2 class="text-lg font-semibold">Review Import</h2>
|
||||
<dl class="space-y-2 text-sm">
|
||||
<div class="flex justify-between gap-4">
|
||||
<dt class="text-(--color-muted) shrink-0">Title</dt>
|
||||
<dd class="font-medium text-right">{pendingImport.title}</dd>
|
||||
</div>
|
||||
<div class="flex justify-between">
|
||||
<span class="text-(--color-muted)">File:</span>
|
||||
<span>{pendingImport.file.name}</span>
|
||||
</div>
|
||||
<div class="flex justify-between">
|
||||
<span class="text-(--color-muted)">Size:</span>
|
||||
<span>{(pendingImport.file.size / 1024 / 1024).toFixed(2)} MB</span>
|
||||
</div>
|
||||
{#if pendingImport.preview.chapters > 0}
|
||||
<div class="flex justify-between">
|
||||
<span class="text-(--color-muted)">Detected chapters:</span>
|
||||
<span class="text-green-400">{pendingImport.preview.chapters}</span>
|
||||
{#if pendingImport.author}
|
||||
<div class="flex justify-between gap-4">
|
||||
<dt class="text-(--color-muted) shrink-0">Author</dt>
|
||||
<dd class="text-right">{pendingImport.author}</dd>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="flex gap-3">
|
||||
{#if pendingImport.genres}
|
||||
<div class="flex justify-between gap-4">
|
||||
<dt class="text-(--color-muted) shrink-0">Genres</dt>
|
||||
<dd class="text-right">{pendingImport.genres}</dd>
|
||||
</div>
|
||||
{/if}
|
||||
<div class="flex justify-between gap-4">
|
||||
<dt class="text-(--color-muted) shrink-0">Status</dt>
|
||||
<dd class="capitalize text-right">{pendingImport.bookStatus}</dd>
|
||||
</div>
|
||||
<div class="flex justify-between gap-4">
|
||||
<dt class="text-(--color-muted) shrink-0">File</dt>
|
||||
<dd class="text-right truncate max-w-xs">{pendingImport.file.name}</dd>
|
||||
</div>
|
||||
<div class="flex justify-between gap-4">
|
||||
<dt class="text-(--color-muted) shrink-0">Size</dt>
|
||||
<dd>{(pendingImport.file.size / 1024 / 1024).toFixed(2)} MB</dd>
|
||||
</div>
|
||||
{#if pendingImport.preview.chapters > 0}
|
||||
<div class="flex justify-between gap-4">
|
||||
<dt class="text-(--color-muted) shrink-0">Detected chapters</dt>
|
||||
<dd class="text-green-400 font-semibold">{pendingImport.preview.chapters}</dd>
|
||||
</div>
|
||||
{/if}
|
||||
</dl>
|
||||
{#if pendingImport.preview.firstLines?.length}
|
||||
<div class="mt-2 space-y-1">
|
||||
<p class="text-xs text-(--color-muted) mb-1">First lines preview:</p>
|
||||
{#each pendingImport.preview.firstLines as line}
|
||||
<p class="text-xs text-(--color-muted) italic truncate">{line}</p>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
<div class="flex gap-3 pt-2">
|
||||
<button
|
||||
onclick={startImport}
|
||||
disabled={uploading}
|
||||
class="px-4 py-2 bg-green-600 text-white rounded font-medium disabled:opacity-50"
|
||||
class="px-4 py-2 bg-green-600 hover:bg-green-500 text-white rounded font-medium disabled:opacity-50 transition-colors"
|
||||
>
|
||||
{uploading ? 'Starting...' : 'Start Import'}
|
||||
{uploading ? 'Starting…' : 'Start Import'}
|
||||
</button>
|
||||
<button
|
||||
onclick={cancelReview}
|
||||
class="px-4 py-2 border border-(--color-border) rounded font-medium"
|
||||
class="px-4 py-2 border border-(--color-border) rounded font-medium hover:bg-(--color-surface-3) transition-colors"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{:else}
|
||||
<!-- Upload Form -->
|
||||
<form onsubmit={(e) => { e.preventDefault(); analyzeFile(); }} class="mb-8 p-4 bg-(--color-surface-2) rounded-lg">
|
||||
<div class="mb-4">
|
||||
<label for="import-file" class="block text-sm font-medium mb-2">Select File (PDF or EPUB)</label>
|
||||
<!-- ── Upload form ── -->
|
||||
<form
|
||||
onsubmit={(e) => { e.preventDefault(); analyzeFile(); }}
|
||||
class="p-6 bg-(--color-surface-2) rounded-lg space-y-4"
|
||||
>
|
||||
<!-- File picker -->
|
||||
<div>
|
||||
<label for="import-file" class="block text-sm font-medium mb-1">File (PDF or EPUB)</label>
|
||||
<input
|
||||
id="import-file"
|
||||
type="file"
|
||||
accept=".pdf,.epub"
|
||||
onchange={handleFileSelect}
|
||||
class="w-full px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text)"
|
||||
class="w-full px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text) text-sm"
|
||||
/>
|
||||
</div>
|
||||
<div class="mb-4">
|
||||
<label for="import-title" class="block text-sm font-medium mb-2">Book Title</label>
|
||||
|
||||
<!-- Title -->
|
||||
<div>
|
||||
<label for="import-title" class="block text-sm font-medium mb-1">Title <span class="text-red-400">*</span></label>
|
||||
<input
|
||||
id="import-title"
|
||||
type="text"
|
||||
bind:value={title}
|
||||
placeholder="Enter book title"
|
||||
class="w-full px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text)"
|
||||
placeholder="Book title"
|
||||
required
|
||||
class="w-full px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text) text-sm"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Author -->
|
||||
<div>
|
||||
<label for="import-author" class="block text-sm font-medium mb-1">Author</label>
|
||||
<input
|
||||
id="import-author"
|
||||
type="text"
|
||||
bind:value={author}
|
||||
placeholder="Author name"
|
||||
class="w-full px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text) text-sm"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Cover URL -->
|
||||
<div>
|
||||
<label for="import-cover" class="block text-sm font-medium mb-1">Cover image URL</label>
|
||||
<input
|
||||
id="import-cover"
|
||||
type="url"
|
||||
bind:value={coverUrl}
|
||||
placeholder="https://…"
|
||||
class="w-full px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text) text-sm"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Genres -->
|
||||
<div>
|
||||
<label for="import-genres" class="block text-sm font-medium mb-1">Genres <span class="text-xs text-(--color-muted)">(comma-separated)</span></label>
|
||||
<input
|
||||
id="import-genres"
|
||||
type="text"
|
||||
bind:value={genres}
|
||||
placeholder="Fantasy, Action, Romance"
|
||||
class="w-full px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text) text-sm"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<!-- Summary -->
|
||||
<div>
|
||||
<label for="import-summary" class="block text-sm font-medium mb-1">Summary</label>
|
||||
<textarea
|
||||
id="import-summary"
|
||||
bind:value={summary}
|
||||
rows={3}
|
||||
placeholder="Short description of the book…"
|
||||
class="w-full px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text) text-sm resize-y"
|
||||
></textarea>
|
||||
</div>
|
||||
|
||||
<!-- Status -->
|
||||
<div>
|
||||
<label for="import-status" class="block text-sm font-medium mb-1">Book status</label>
|
||||
<select
|
||||
id="import-status"
|
||||
bind:value={bookStatus}
|
||||
class="px-3 py-2 rounded bg-(--color-surface) border border-(--color-border) text-(--color-text) text-sm"
|
||||
>
|
||||
<option value="completed">Completed</option>
|
||||
<option value="ongoing">Ongoing</option>
|
||||
<option value="hiatus">Hiatus</option>
|
||||
</select>
|
||||
</div>
|
||||
|
||||
{#if error}
|
||||
<p class="mb-4 text-sm text-red-400">{error}</p>
|
||||
<p class="text-sm text-red-400">{error}</p>
|
||||
{/if}
|
||||
|
||||
<button
|
||||
type="submit"
|
||||
disabled={analyzing || !selectedFile || !title.trim()}
|
||||
class="px-4 py-2 bg-(--color-brand) text-(--color-surface) rounded font-medium disabled:opacity-50"
|
||||
class="px-5 py-2 bg-(--color-brand) text-(--color-surface) rounded font-semibold disabled:opacity-50 hover:brightness-110 transition-all"
|
||||
>
|
||||
{analyzing ? 'Analyzing...' : 'Review & Import'}
|
||||
{analyzing ? 'Analyzing…' : 'Review & Import'}
|
||||
</button>
|
||||
<p class="mt-2 text-xs text-(--color-muted)">
|
||||
Select a file to preview chapter count before importing.
|
||||
</p>
|
||||
<p class="text-xs text-(--color-muted)">Detects chapter structure before committing.</p>
|
||||
</form>
|
||||
{/if}
|
||||
|
||||
<!-- Task List -->
|
||||
<h2 class="text-lg font-semibold mb-4">Import Tasks</h2>
|
||||
|
||||
{#if loading}
|
||||
<p class="text-(--color-muted)">Loading...</p>
|
||||
{:else if tasks.length === 0}
|
||||
<p class="text-(--color-muted)">No import tasks yet.</p>
|
||||
{:else}
|
||||
<div class="overflow-x-auto">
|
||||
<table class="w-full text-sm">
|
||||
<thead>
|
||||
<tr class="text-left text-(--color-muted) border-b border-(--color-border)">
|
||||
<th class="pb-2">Title</th>
|
||||
<th class="pb-2">Type</th>
|
||||
<th class="pb-2">Status</th>
|
||||
<th class="pb-2">Chapters</th>
|
||||
<th class="pb-2">Started</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each tasks as task}
|
||||
<tr class="border-b border-(--color-border)/50">
|
||||
<td class="py-2">
|
||||
<div class="font-medium">{task.title}</div>
|
||||
<div class="text-xs text-(--color-muted)">{task.slug}</div>
|
||||
</td>
|
||||
<td class="py-2 uppercase text-xs">{task.file_type}</td>
|
||||
<td class="py-2 {getStatusColor(task.status)}">{task.status}</td>
|
||||
<td class="py-2 text-(--color-muted)">
|
||||
{task.chapters_done}/{task.chapters_total}
|
||||
</td>
|
||||
<td class="py-2 text-(--color-muted)">{formatDate(task.started)}</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
<!-- ── AI Tasks panel (shown after successful import) ── -->
|
||||
{#if showAiPanel && aiSlug}
|
||||
<div class="p-5 bg-(--color-surface-2) rounded-lg border border-(--color-brand)/20 space-y-3">
|
||||
<div class="flex items-center justify-between">
|
||||
<h2 class="text-base font-semibold">AI Tasks for <span class="text-(--color-brand)">{aiTitle || aiSlug}</span></h2>
|
||||
<button
|
||||
onclick={() => { showAiPanel = false; }}
|
||||
class="text-(--color-muted) hover:text-(--color-text) text-lg leading-none"
|
||||
aria-label="Dismiss"
|
||||
>×</button>
|
||||
</div>
|
||||
<p class="text-sm text-(--color-muted)">Run AI tasks on the imported book to enrich it:</p>
|
||||
<div class="flex flex-wrap gap-2">
|
||||
<a
|
||||
href="/admin/text-gen?slug={aiSlug}&tab=chapters"
|
||||
class="px-3 py-1.5 text-sm rounded bg-(--color-surface-3) hover:bg-(--color-brand)/20 border border-(--color-border) transition-colors"
|
||||
>
|
||||
Generate chapter names
|
||||
</a>
|
||||
<a
|
||||
href="/admin/text-gen?slug={aiSlug}&tab=description"
|
||||
class="px-3 py-1.5 text-sm rounded bg-(--color-surface-3) hover:bg-(--color-brand)/20 border border-(--color-border) transition-colors"
|
||||
>
|
||||
Generate description
|
||||
</a>
|
||||
<a
|
||||
href="/admin/image-gen?slug={aiSlug}"
|
||||
class="px-3 py-1.5 text-sm rounded bg-(--color-surface-3) hover:bg-(--color-brand)/20 border border-(--color-border) transition-colors"
|
||||
>
|
||||
Generate cover image
|
||||
</a>
|
||||
<a
|
||||
href="/admin/text-gen?slug={aiSlug}&tab=tagline"
|
||||
class="px-3 py-1.5 text-sm rounded bg-(--color-surface-3) hover:bg-(--color-brand)/20 border border-(--color-border) transition-colors"
|
||||
>
|
||||
Generate tagline
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<!-- ── Task list ── -->
|
||||
<div>
|
||||
<h2 class="text-lg font-semibold mb-3">Import Tasks</h2>
|
||||
|
||||
{#if loading}
|
||||
<p class="text-(--color-muted) text-sm">Loading…</p>
|
||||
{:else if tasks.length === 0}
|
||||
<p class="text-(--color-muted) text-sm">No import tasks yet.</p>
|
||||
{:else}
|
||||
<div class="overflow-x-auto rounded-lg border border-(--color-border)">
|
||||
<table class="w-full text-sm">
|
||||
<thead>
|
||||
<tr class="text-left text-(--color-muted) border-b border-(--color-border) bg-(--color-surface-2)">
|
||||
<th class="px-3 py-2 font-medium">Title</th>
|
||||
<th class="px-3 py-2 font-medium">Type</th>
|
||||
<th class="px-3 py-2 font-medium">Status</th>
|
||||
<th class="px-3 py-2 font-medium">Chapters</th>
|
||||
<th class="px-3 py-2 font-medium">Started</th>
|
||||
<th class="px-3 py-2 font-medium">AI</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{#each tasks as task}
|
||||
<tr class="border-b border-(--color-border)/50 hover:bg-(--color-surface-2)/50">
|
||||
<td class="px-3 py-2">
|
||||
<div class="font-medium">{task.title}</div>
|
||||
<div class="text-xs text-(--color-muted)">{task.slug}</div>
|
||||
{#if task.error_message}
|
||||
<div class="text-xs text-red-400 mt-0.5 truncate max-w-xs" title={task.error_message}>{task.error_message}</div>
|
||||
{/if}
|
||||
</td>
|
||||
<td class="px-3 py-2 uppercase text-xs">{task.file_type}</td>
|
||||
<td class="px-3 py-2 {statusColor(task.status)} font-medium">{task.status}</td>
|
||||
<td class="px-3 py-2 text-(--color-muted)">
|
||||
{task.chapters_done}/{task.chapters_total}
|
||||
</td>
|
||||
<td class="px-3 py-2 text-(--color-muted) text-xs whitespace-nowrap">{formatDate(task.started)}</td>
|
||||
<td class="px-3 py-2">
|
||||
{#if task.status === 'done'}
|
||||
<button
|
||||
onclick={() => { aiSlug = task.slug; aiTitle = task.title; showAiPanel = true; }}
|
||||
class="text-xs px-2 py-1 rounded bg-(--color-brand)/20 hover:bg-(--color-brand)/40 text-(--color-brand) transition-colors"
|
||||
>
|
||||
AI tasks
|
||||
</button>
|
||||
{/if}
|
||||
</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
16
ui/src/routes/admin/notifications/+page.server.ts
Normal file
16
ui/src/routes/admin/notifications/+page.server.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import type { PageServerLoad } from './$types';
|
||||
import { backendFetch } from '$lib/server/scraper';
|
||||
|
||||
export const load: PageServerLoad = async ({ locals }) => {
|
||||
const userId = locals.user!.id;
|
||||
try {
|
||||
const res = await backendFetch('/api/notifications?user_id=' + userId);
|
||||
const data = await res.json().catch(() => ({ notifications: [] }));
|
||||
return {
|
||||
userId,
|
||||
notifications: (data.notifications ?? []) as Array<{id: string; title: string; message: string; link: string; read: boolean}>
|
||||
};
|
||||
} catch {
|
||||
return { userId, notifications: [] };
|
||||
}
|
||||
};
|
||||
127
ui/src/routes/admin/notifications/+page.svelte
Normal file
127
ui/src/routes/admin/notifications/+page.svelte
Normal file
@@ -0,0 +1,127 @@
|
||||
<script lang="ts">
|
||||
import type { PageData } from './$types';
|
||||
|
||||
let { data }: { data: PageData } = $props();
|
||||
|
||||
type Notification = { id: string; title: string; message: string; link: string; read: boolean };
|
||||
|
||||
let notifications = $state<Notification[]>(data.notifications);
|
||||
let filter = $state<'all' | 'unread'>('all');
|
||||
let busy = $state(false);
|
||||
|
||||
const filtered = $derived(
|
||||
filter === 'unread' ? notifications.filter(n => !n.read) : notifications
|
||||
);
|
||||
const unreadCount = $derived(notifications.filter(n => !n.read).length);
|
||||
|
||||
async function markRead(id: string) {
|
||||
await fetch('/api/notifications/' + id, { method: 'PATCH' }).catch(() => {});
|
||||
notifications = notifications.map(n => n.id === id ? { ...n, read: true } : n);
|
||||
}
|
||||
|
||||
async function dismiss(id: string) {
|
||||
await fetch('/api/notifications/' + id, { method: 'DELETE' }).catch(() => {});
|
||||
notifications = notifications.filter(n => n.id !== id);
|
||||
}
|
||||
|
||||
async function markAllRead() {
|
||||
busy = true;
|
||||
try {
|
||||
await fetch('/api/notifications?user_id=' + data.userId, { method: 'PATCH' });
|
||||
notifications = notifications.map(n => ({ ...n, read: true }));
|
||||
} finally { busy = false; }
|
||||
}
|
||||
|
||||
async function clearAll() {
|
||||
if (!confirm('Clear all notifications?')) return;
|
||||
busy = true;
|
||||
try {
|
||||
await fetch('/api/notifications?user_id=' + data.userId, { method: 'DELETE' });
|
||||
notifications = [];
|
||||
} finally { busy = false; }
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>Notifications — Admin</title>
|
||||
</svelte:head>
|
||||
|
||||
<div class="max-w-2xl mx-auto px-4 py-8">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
<div>
|
||||
<h1 class="text-xl font-semibold">Notifications</h1>
|
||||
{#if unreadCount > 0}
|
||||
<p class="text-sm text-(--color-muted) mt-0.5">{unreadCount} unread</p>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="flex gap-2">
|
||||
{#if unreadCount > 0}
|
||||
<button
|
||||
type="button"
|
||||
onclick={markAllRead}
|
||||
disabled={busy}
|
||||
class="text-sm px-3 py-1.5 rounded border border-(--color-border) text-(--color-muted) hover:text-(--color-text) hover:bg-(--color-surface-2) transition-colors disabled:opacity-50"
|
||||
>Mark all read</button>
|
||||
{/if}
|
||||
{#if notifications.length > 0}
|
||||
<button
|
||||
type="button"
|
||||
onclick={clearAll}
|
||||
disabled={busy}
|
||||
class="text-sm px-3 py-1.5 rounded border border-(--color-border) text-red-400 hover:bg-(--color-surface-2) transition-colors disabled:opacity-50"
|
||||
>Clear all</button>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Filter tabs -->
|
||||
<div class="flex gap-0 mb-4">
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => filter = 'all'}
|
||||
class="text-sm px-4 py-1.5 rounded-l border border-(--color-border) transition-colors {filter === 'all' ? 'bg-(--color-brand) text-black border-(--color-brand) font-medium' : 'text-(--color-muted) hover:text-(--color-text) hover:bg-(--color-surface-2)'}"
|
||||
>All ({notifications.length})</button>
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => filter = 'unread'}
|
||||
class="text-sm px-4 py-1.5 rounded-r border border-l-0 border-(--color-border) transition-colors {filter === 'unread' ? 'bg-(--color-brand) text-black border-(--color-brand) font-medium' : 'text-(--color-muted) hover:text-(--color-text) hover:bg-(--color-surface-2)'}"
|
||||
>Unread ({unreadCount})</button>
|
||||
</div>
|
||||
|
||||
<!-- List -->
|
||||
{#if filtered.length === 0}
|
||||
<div class="py-16 text-center text-(--color-muted)">
|
||||
{filter === 'unread' ? 'No unread notifications' : 'No notifications'}
|
||||
</div>
|
||||
{:else}
|
||||
<div class="rounded-lg border border-(--color-border) overflow-hidden">
|
||||
{#each filtered as n (n.id)}
|
||||
<div class="flex items-start gap-2 border-b border-(--color-border)/40 last:border-b-0 hover:bg-(--color-surface-2) group transition-colors {n.read ? 'opacity-60' : ''}">
|
||||
<a
|
||||
href={n.link || '/admin'}
|
||||
onclick={() => markRead(n.id)}
|
||||
class="flex-1 p-4 min-w-0"
|
||||
>
|
||||
<div class="flex items-center gap-2">
|
||||
{#if !n.read}
|
||||
<span class="w-2 h-2 rounded-full bg-(--color-brand) shrink-0"></span>
|
||||
{/if}
|
||||
<span class="font-medium text-sm">{n.title}</span>
|
||||
</div>
|
||||
<p class="text-sm text-(--color-muted) mt-1">{n.message}</p>
|
||||
</a>
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => dismiss(n.id)}
|
||||
class="shrink-0 p-3 text-(--color-muted) hover:text-red-400 opacity-0 group-hover:opacity-100 transition-all"
|
||||
title="Dismiss"
|
||||
>
|
||||
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M6 18L18 6M6 6l12 12"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
@@ -17,21 +17,23 @@ export interface TextModelInfo {
|
||||
}
|
||||
|
||||
export const load: PageServerLoad = async () => {
|
||||
// Parent layout already guards admin role.
|
||||
const [models, booksResult] = await Promise.allSettled([
|
||||
listTextModels<TextModelInfo>(),
|
||||
listBooks()
|
||||
]);
|
||||
// Await models immediately — in-memory list, no I/O, returns instantly.
|
||||
// Books are streamed so the page renders at once and the selector
|
||||
// populates a moment later without blocking navigation.
|
||||
const modelsResult = await listTextModels<TextModelInfo>().catch((e) => {
|
||||
log.warn('admin/text-gen', 'failed to load models', { err: String(e) });
|
||||
return [] as TextModelInfo[];
|
||||
});
|
||||
|
||||
if (models.status === 'rejected') {
|
||||
log.warn('admin/text-gen', 'failed to load models', { err: String(models.reason) });
|
||||
}
|
||||
const booksPromise = listBooks()
|
||||
.then((all) =>
|
||||
all.map((b) => ({ slug: b.slug, title: b.title })) as BookSummary[]
|
||||
)
|
||||
.catch(() => [] as BookSummary[]);
|
||||
|
||||
return {
|
||||
models: models.status === 'fulfilled' ? models.value : ([] as TextModelInfo[]),
|
||||
books: (booksResult.status === 'fulfilled' ? booksResult.value : []).map((b) => ({
|
||||
slug: b.slug,
|
||||
title: b.title
|
||||
})) as BookSummary[]
|
||||
models: modelsResult,
|
||||
// Streamed — SvelteKit resolves this after the initial HTML is sent.
|
||||
books: booksPromise
|
||||
};
|
||||
};
|
||||
|
||||
@@ -9,8 +9,11 @@
|
||||
// Server data is static per page load — intentional one-time snapshot.
|
||||
// svelte-ignore state_referenced_locally
|
||||
const models: TextModelInfo[] = data.models ?? [];
|
||||
// svelte-ignore state_referenced_locally
|
||||
const books: BookSummary[] = data.books ?? [];
|
||||
// Books arrive as a streamed promise — start empty and populate on resolve.
|
||||
let books = $state<BookSummary[]>([]);
|
||||
$effect(() => {
|
||||
data.books.then((resolved) => { books = resolved; });
|
||||
});
|
||||
|
||||
// ── Config persistence ───────────────────────────────────────────────────────
|
||||
const CONFIG_KEY = 'admin_text_gen_config_v2';
|
||||
|
||||
@@ -9,16 +9,18 @@ export const load: PageServerLoad = async ({ locals }) => {
|
||||
redirect(302, '/');
|
||||
}
|
||||
|
||||
const [books, jobs] = await Promise.all([
|
||||
listBookSlugs().catch((e): Awaited<ReturnType<typeof listBookSlugs>> => {
|
||||
log.warn('admin/translation', 'failed to load book slugs', { err: String(e) });
|
||||
return [];
|
||||
}),
|
||||
listTranslationJobs().catch((e): TranslationJob[] => {
|
||||
log.warn('admin/translation', 'failed to load translation jobs', { err: String(e) });
|
||||
return [];
|
||||
})
|
||||
]);
|
||||
// Stream jobs — navigation is instant, list populates shortly after.
|
||||
const jobs = listTranslationJobs().catch((e): TranslationJob[] => {
|
||||
log.warn('admin/translation', 'failed to load translation jobs', { err: String(e) });
|
||||
return [];
|
||||
});
|
||||
|
||||
// Books list is needed immediately for the enqueue form, but use cache so
|
||||
// it's fast on repeat visits.
|
||||
const books = await listBookSlugs().catch((e): Awaited<ReturnType<typeof listBookSlugs>> => {
|
||||
log.warn('admin/translation', 'failed to load book slugs', { err: String(e) });
|
||||
return [];
|
||||
});
|
||||
|
||||
return { books, jobs };
|
||||
};
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
<script lang="ts">
|
||||
import { untrack } from 'svelte';
|
||||
import { enhance } from '$app/forms';
|
||||
import type { PageData, ActionData } from './$types';
|
||||
import type { TranslationJob } from '$lib/server/pocketbase';
|
||||
@@ -7,11 +6,11 @@
|
||||
|
||||
let { data, form }: { data: PageData; form: ActionData } = $props();
|
||||
|
||||
let jobs = $state<TranslationJob[]>(untrack(() => data.jobs));
|
||||
let jobs = $state<TranslationJob[]>([]);
|
||||
|
||||
// Keep in sync on server reloads
|
||||
// Resolve streamed promise; re-runs on server reloads (invalidateAll)
|
||||
$effect(() => {
|
||||
jobs = data.jobs;
|
||||
Promise.resolve(data.jobs).then((resolved) => { jobs = resolved; });
|
||||
});
|
||||
|
||||
// ── Live-poll while any job is in-flight ─────────────────────────────────────
|
||||
|
||||
@@ -1,15 +1,24 @@
|
||||
import { json } from '@sveltejs/kit';
|
||||
import type { RequestHandler } from './$types';
|
||||
import { parseAuthToken } from '../../../../hooks.server.js';
|
||||
import { deleteSessionByAuthId } from '$lib/server/pocketbase';
|
||||
|
||||
const AUTH_COOKIE = 'libnovel_auth';
|
||||
|
||||
/**
|
||||
* POST /api/auth/logout
|
||||
* Clears the auth cookie and returns { ok: true }.
|
||||
* Does not revoke the session record from PocketBase —
|
||||
* for full revocation use DELETE /api/sessions/[id] first.
|
||||
* Deletes the session row from PocketBase AND clears the auth cookie, so the
|
||||
* session doesn't linger as a phantom "active session" after sign-out.
|
||||
*/
|
||||
export const POST: RequestHandler = async ({ cookies }) => {
|
||||
const token = cookies.get(AUTH_COOKIE);
|
||||
if (token) {
|
||||
const user = parseAuthToken(token);
|
||||
if (user?.authSessionId) {
|
||||
// Best-effort — non-fatal if PocketBase is unreachable.
|
||||
deleteSessionByAuthId(user.authSessionId).catch(() => {});
|
||||
}
|
||||
}
|
||||
cookies.delete(AUTH_COOKIE, { path: '/' });
|
||||
return json({ ok: true });
|
||||
};
|
||||
|
||||
@@ -8,4 +8,22 @@ export const GET: RequestHandler = async ({ url }) => {
|
||||
const res = await backendFetch('/api/notifications?user_id=' + userId);
|
||||
const data = await res.json().catch(() => ({ notifications: [] }));
|
||||
return json(data);
|
||||
};
|
||||
};
|
||||
|
||||
// PATCH /api/notifications?user_id=<id> — mark all read
|
||||
export const PATCH: RequestHandler = async ({ url }) => {
|
||||
const userId = url.searchParams.get('user_id');
|
||||
if (!userId) throw error(400, 'user_id required');
|
||||
const res = await backendFetch('/api/notifications?user_id=' + userId, { method: 'PATCH' });
|
||||
const data = await res.json().catch(() => ({}));
|
||||
return json(data);
|
||||
};
|
||||
|
||||
// DELETE /api/notifications?user_id=<id> — clear all
|
||||
export const DELETE: RequestHandler = async ({ url }) => {
|
||||
const userId = url.searchParams.get('user_id');
|
||||
if (!userId) throw error(400, 'user_id required');
|
||||
const res = await backendFetch('/api/notifications?user_id=' + userId, { method: 'DELETE' });
|
||||
const data = await res.json().catch(() => ({}));
|
||||
return json(data);
|
||||
};
|
||||
|
||||
@@ -2,18 +2,19 @@ import { json, error } from '@sveltejs/kit';
|
||||
import type { RequestHandler } from './$types';
|
||||
import { backendFetch } from '$lib/server/scraper';
|
||||
|
||||
export const GET: RequestHandler = async ({ url }) => {
|
||||
const userId = url.searchParams.get('user_id');
|
||||
if (!userId) throw error(400, 'user_id required');
|
||||
const res = await backendFetch('/api/notifications?user_id=' + userId);
|
||||
const data = await res.json().catch(() => ({ notifications: [] }));
|
||||
return json(data);
|
||||
};
|
||||
|
||||
export const PATCH: RequestHandler = async ({ params }) => {
|
||||
const id = params.id;
|
||||
if (!id) throw error(400, 'id required');
|
||||
const res = await backendFetch('/api/notifications/' + id, { method: 'PATCH' });
|
||||
const data = await res.json().catch(() => ({}));
|
||||
return json(data);
|
||||
};
|
||||
};
|
||||
|
||||
// DELETE /api/notifications/[id] — dismiss a single notification
|
||||
export const DELETE: RequestHandler = async ({ params }) => {
|
||||
const id = params.id;
|
||||
if (!id) throw error(400, 'id required');
|
||||
const res = await backendFetch('/api/notifications/' + id, { method: 'DELETE' });
|
||||
const data = await res.json().catch(() => ({}));
|
||||
return json(data);
|
||||
};
|
||||
|
||||
@@ -3,7 +3,10 @@ import adapter from '@sveltejs/adapter-node';
|
||||
/** @type {import('@sveltejs/kit').Config} */
|
||||
const config = {
|
||||
kit: {
|
||||
adapter: adapter()
|
||||
adapter: adapter(),
|
||||
paths: {
|
||||
relative: false
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
Reference in New Issue
Block a user