Compare commits

...

5 Commits

Author SHA1 Message Date
root
899c504d1f feat(import): move PDF parsing to backend; fix heartbeat/reap for import_tasks
All checks were successful
Release / Test backend (push) Successful in 51s
Release / Check ui (push) Successful in 2m2s
Release / Docker (push) Successful in 7m32s
Release / Gitea Release (push) Successful in 1m0s
- parsePDF function restored in import.go (body was orphaned outside function)
- ParseImportFile() called at upload time with 3-min timeout; chapters stored as JSON in MinIO
- runner.go: prefer ChaptersKey path (read pre-parsed JSON) over BookImport.Import()
- ImportChapterStore interface added; store wired in runner/main.go
- HeartbeatTask and ReapStaleTasks now include import_tasks collection
- parseImportTask now returns ChaptersKey in domain.ImportTask
- asynq_runner.go handleImportTask passes ChaptersKey
- pb-init-v3.sh: chapters_key field added to import_tasks schema
2026-04-09 21:19:43 +05:00
root
d82aa9d4b4 fix(import): decrypt owner-encrypted PDFs with pdfcpu; add imports bucket to minio-init
All checks were successful
Release / Test backend (push) Successful in 2m10s
Release / Check ui (push) Successful in 1m55s
Release / Docker (push) Successful in 7m17s
Release / Gitea Release (push) Successful in 48s
- parsePDF now attempts to strip encryption via pdfcpu (empty user password)
  before handing bytes to dslipak/pdf — fixes '256-bit encryption key' error
  on publisher PDFs that use owner-only encryption (copy/print restrictions)
- Add pdfcpu v0.11.1 as direct dependency (was already indirect)
- docker-compose.yml minio-init: add 'imports' and 'translations' buckets
  so a fresh deploy creates all required buckets
2026-04-09 20:08:12 +05:00
root
ae08382b81 fix(import): wire ImportFileStore to bypass Asynq type assertion; add pb-init collections
All checks were successful
Release / Test backend (push) Successful in 44s
Release / Check ui (push) Successful in 1m56s
Release / Docker (push) Successful in 6m10s
Release / Gitea Release (push) Successful in 37s
- Add ImportFileStore interface to bookstore package
- Add ImportFileStore field to backend.Dependencies
- Wire ImportFileStore: store in cmd/backend/main.go
- handlers_import.go: use s.deps.ImportFileStore.PutImportFile instead of
  broken s.deps.Producer.(*storage.Store) type assertion (fails when Asynq active)
- pb-init-v3.sh: add import_tasks and notifications collection definitions
2026-04-09 19:05:11 +05:00
root
b9f8008c2c chore: embed git credentials in remote URL; update AGENTS.md 2026-04-09 17:09:53 +05:00
root
d25cee3d8c fix(ci): track generated admin_nav_notifications.js to avoid CDN-dependent paraglide failure
All checks were successful
Release / Test backend (push) Successful in 37s
Release / Check ui (push) Successful in 1m54s
Release / Docker (push) Successful in 5m45s
Release / Gitea Release (push) Successful in 35s
2026-04-09 17:03:30 +05:00
17 changed files with 307 additions and 64 deletions

View File

@@ -47,11 +47,21 @@ Sub-directories have their own `AGENTS.md` with deeper context (e.g. `ios/AGENTS
- `release.yaml` — runs on `v*` tags (build Docker images, upload source maps, create Gitea release)
- Secrets: `DOCKER_USER`, `DOCKER_TOKEN`, `GITEA_TOKEN`, `GLITCHTIP_AUTH_TOKEN`
### Git credentials
Credentials are embedded in the remote URL — no `HOME=/root` or credential helper needed for push:
```
https://kamil:95782641Apple%24@gitea.kalekber.cc/kamil/libnovel.git
```
All git commands still use `HOME=/root` prefix for consistency (picks up `/root/.gitconfig` for user name/email), but push auth works without it.
### Releasing a new version
```bash
git tag v2.5.X -m "Short title\n\nOptional longer body"
git push origin v2.5.X
HOME=/root git tag v2.6.X -m "Short title"
HOME=/root git push origin v3-cleanup --tags
```
CI will build all Docker images, upload source maps to GlitchTip, and create a Gitea release automatically.

View File

@@ -189,6 +189,7 @@ func run() error {
ChapterImageStore: store,
Producer: producer,
TaskReader: store,
ImportFileStore: store,
SearchIndex: searchIndex,
Kokoro: kokoroClient,
PocketTTS: pocketTTSClient,

View File

@@ -192,21 +192,22 @@ func run() error {
deps := runner.Dependencies{
Consumer: consumer,
BookWriter: store,
BookReader: store,
AudioStore: store,
CoverStore: store,
TranslationStore: store,
BookImport: storage.NewBookImporter(store),
ChapterIngester: store,
SearchIndex: searchIndex,
Novel: novel,
Kokoro: kokoroClient,
PocketTTS: pocketTTSClient,
CFAI: cfaiClient,
LibreTranslate: ltClient,
Notifier: store,
Log: log,
BookWriter: store,
BookReader: store,
AudioStore: store,
CoverStore: store,
TranslationStore: store,
BookImport: storage.NewBookImporter(store),
ImportChapterStore: store,
ChapterIngester: store,
SearchIndex: searchIndex,
Novel: novel,
Kokoro: kokoroClient,
PocketTTS: pocketTTSClient,
CFAI: cfaiClient,
LibreTranslate: ltClient,
Notifier: store,
Log: log,
}
r := runner.New(rCfg, deps)

View File

@@ -38,6 +38,7 @@ require (
github.com/minio/crc64nvme v1.1.1 // indirect
github.com/minio/md5-simd v1.1.2 // indirect
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
github.com/pdfcpu/pdfcpu v0.11.1 // indirect
github.com/philhofer/fwd v1.2.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect

View File

@@ -96,11 +96,12 @@ func (p *Producer) CreateImportTask(ctx context.Context, task domain.ImportTask)
}
payload := ImportPayload{
PBTaskID: id,
Slug: task.Slug,
Title: task.Title,
FileType: task.FileType,
ObjectKey: task.ObjectKey,
PBTaskID: id,
Slug: task.Slug,
Title: task.Title,
FileType: task.FileType,
ObjectKey: task.ObjectKey,
ChaptersKey: task.ChaptersKey,
}
if err := p.enqueue(ctx, TypeImportBook, payload); err != nil {
// Non-fatal: PB record exists; runner will pick it up on next poll.

View File

@@ -48,9 +48,10 @@ type ScrapePayload struct {
// ImportPayload is the Asynq job payload for PDF/EPUB import tasks.
type ImportPayload struct {
PBTaskID string `json:"pb_task_id"`
Slug string `json:"slug"`
Title string `json:"title"`
FileType string `json:"file_type"` // "pdf" or "epub"
ObjectKey string `json:"object_key"` // MinIO path to uploaded file
PBTaskID string `json:"pb_task_id"`
Slug string `json:"slug"`
Title string `json:"title"`
FileType string `json:"file_type"` // "pdf" or "epub"
ObjectKey string `json:"object_key"` // MinIO path to uploaded file
ChaptersKey string `json:"chapters_key"` // MinIO path to pre-parsed chapters JSON
}

View File

@@ -1,6 +1,7 @@
package backend
import (
"context"
"encoding/json"
"fmt"
"io"
@@ -45,6 +46,8 @@ func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
ct := r.Header.Get("Content-Type")
var req importRequest
var objectKey string
var chaptersKey string
var chapterCount int
if strings.HasPrefix(ct, "multipart/form-data") {
if err := r.ParseMultipartForm(32 << 20); err != nil {
@@ -96,17 +99,38 @@ func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
return
}
// Upload to MinIO for actual import
// Parse PDF/EPUB on the backend (with timeout) and store chapters as JSON.
// The runner only needs to ingest pre-parsed chapters — no PDF parsing on runner.
parseCtx, parseCancel := context.WithTimeout(r.Context(), 3*time.Minute)
defer parseCancel()
chapters, parseErr := storage.ParseImportFile(parseCtx, data, req.FileType)
if parseErr != nil || len(chapters) == 0 {
jsonError(w, http.StatusUnprocessableEntity, "could not parse file: "+func() string {
if parseErr != nil { return parseErr.Error() }
return "no chapters found"
}())
return
}
// Store raw file in MinIO (for reference/re-import).
objectKey = fmt.Sprintf("imports/%d_%s", time.Now().Unix(), header.Filename)
store, ok := s.deps.Producer.(*storage.Store)
if !ok {
if s.deps.ImportFileStore == nil {
jsonError(w, http.StatusInternalServerError, "storage not available")
return
}
if err := store.PutImportFile(r.Context(), objectKey, data); err != nil {
if err := s.deps.ImportFileStore.PutImportFile(r.Context(), objectKey, data); err != nil {
jsonError(w, http.StatusInternalServerError, "upload file: "+err.Error())
return
}
// Store pre-parsed chapters JSON in MinIO so runner can ingest without re-parsing.
chaptersJSON, _ := json.Marshal(chapters)
chaptersKey = fmt.Sprintf("imports/%d_%s_chapters.json", time.Now().Unix(), strings.TrimSuffix(header.Filename, filepath.Ext(header.Filename)))
if err := s.deps.ImportFileStore.PutImportChapters(r.Context(), chaptersKey, chaptersJSON); err != nil {
jsonError(w, http.StatusInternalServerError, "store chapters: "+err.Error())
return
}
chapterCount = len(chapters)
} else {
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
@@ -142,6 +166,8 @@ func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
BookStatus: req.BookStatus,
FileType: req.FileType,
ObjectKey: objectKey,
ChaptersKey: chaptersKey,
ChaptersTotal: chapterCount,
InitiatorUserID: "",
})
if err != nil {
@@ -150,8 +176,9 @@ func (s *Server) handleAdminImport(w http.ResponseWriter, r *http.Request) {
}
writeJSON(w, 0, importResponse{
TaskID: taskID,
Slug: slug,
TaskID: taskID,
Slug: slug,
Preview: &importPreview{Chapters: chapterCount},
})
}

View File

@@ -85,6 +85,9 @@ type Dependencies struct {
// BookWriter writes book metadata and chapter refs to PocketBase.
// Used by admin text-gen apply endpoints.
BookWriter bookstore.BookWriter
// ImportFileStore uploads raw PDF/EPUB files to MinIO for the runner to process.
// Always wired to the concrete *storage.Store (not the Asynq wrapper).
ImportFileStore bookstore.ImportFileStore
// AIJobStore tracks long-running AI generation jobs in PocketBase.
// If nil, job persistence is disabled (jobs still run but are not recorded).
AIJobStore bookstore.AIJobStore

View File

@@ -215,3 +215,14 @@ type BookImporter interface {
// Returns the extracted chapters or an error.
Import(ctx context.Context, objectKey, fileType string) ([]Chapter, error)
}
// ImportFileStore uploads raw import files to object storage.
// Kept separate from BookImporter so the HTTP handler can upload the file
// without a concrete type assertion, regardless of which Producer is wired.
type ImportFileStore interface {
PutImportFile(ctx context.Context, objectKey string, data []byte) error
// PutImportChapters stores the pre-parsed chapters JSON under the given key.
PutImportChapters(ctx context.Context, key string, data []byte) error
// GetImportChapters retrieves the pre-parsed chapters JSON.
GetImportChapters(ctx context.Context, key string) ([]byte, error)
}

View File

@@ -178,6 +178,7 @@ type ImportTask struct {
FileName string `json:"file_name"`
FileType string `json:"file_type"` // "pdf" or "epub"
ObjectKey string `json:"object_key,omitempty"` // MinIO path to uploaded file
ChaptersKey string `json:"chapters_key,omitempty"` // MinIO path to pre-parsed chapters JSON
Author string `json:"author,omitempty"`
CoverURL string `json:"cover_url,omitempty"`
Genres []string `json:"genres,omitempty"`

View File

@@ -199,10 +199,11 @@ func (r *Runner) handleImportTask(ctx context.Context, t *asynq.Task) error {
return fmt.Errorf("unmarshal import payload: %w", err)
}
task := domain.ImportTask{
ID: p.PBTaskID,
Slug: p.Slug,
Title: p.Title,
FileType: p.FileType,
ID: p.PBTaskID,
Slug: p.Slug,
Title: p.Title,
FileType: p.FileType,
ChaptersKey: p.ChaptersKey,
}
r.tasksRunning.Add(1)
defer r.tasksRunning.Add(-1)

View File

@@ -15,6 +15,7 @@ package runner
import (
"context"
"encoding/json"
"fmt"
"log/slog"
"os"
@@ -49,6 +50,11 @@ type ChapterIngester interface {
IngestChapters(ctx context.Context, slug string, chapters []bookstore.Chapter) error
}
// ImportChapterStore retrieves pre-parsed chapter JSON blobs from object storage.
type ImportChapterStore interface {
GetImportChapters(ctx context.Context, key string) ([]byte, error)
}
// Config tunes the runner behaviour.
type Config struct {
// WorkerID uniquely identifies this runner instance in PocketBase records.
@@ -114,7 +120,12 @@ type Dependencies struct {
// CoverStore stores book cover images in MinIO.
CoverStore bookstore.CoverStore
// BookImport handles PDF/EPUB file parsing and chapter extraction.
// Kept for backward compatibility when ChaptersKey is not set.
BookImport bookstore.BookImporter
// ImportChapterStore retrieves pre-parsed chapter JSON blobs from MinIO.
// When set and the task has a ChaptersKey, the runner reads from here
// instead of calling BookImport.Import() (the new preferred path).
ImportChapterStore ImportChapterStore
// ChapterIngester persists extracted chapters into MinIO/PocketBase.
ChapterIngester ChapterIngester
// Notifier creates notifications for users.
@@ -675,6 +686,10 @@ func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
}
// runImportTask executes one PDF/EPUB import task.
// Preferred path: when task.ChaptersKey is set, it reads pre-parsed chapters
// JSON from MinIO (written by the backend at upload time) and ingests them.
// Fallback path: when ChaptersKey is empty, calls BookImport.Import() to
// parse the raw file on the runner (legacy behaviour, not used for new tasks).
func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, objectKey string) {
ctx, span := otel.Tracer("runner").Start(ctx, "runner.import_task")
defer span.End()
@@ -682,10 +697,11 @@ func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, obje
attribute.String("task.id", task.ID),
attribute.String("book.slug", task.Slug),
attribute.String("file.type", task.FileType),
attribute.String("chapters_key", task.ChaptersKey),
)
log := r.deps.Log.With("task_id", task.ID, "slug", task.Slug, "file_type", task.FileType)
log.Info("runner: import task starting")
log.Info("runner: import task starting", "chapters_key", task.ChaptersKey)
hbCtx, hbCancel := context.WithCancel(ctx)
defer hbCancel()
@@ -714,15 +730,33 @@ func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, obje
}
}
if r.deps.BookImport == nil {
fail("book import not configured (BookImport dependency missing)")
return
}
var chapters []bookstore.Chapter
chapters, err := r.deps.BookImport.Import(ctx, objectKey, task.FileType)
if err != nil {
fail(fmt.Sprintf("import file: %v", err))
return
if task.ChaptersKey != "" && r.deps.ImportChapterStore != nil {
// New path: read pre-parsed chapters JSON uploaded by the backend.
raw, err := r.deps.ImportChapterStore.GetImportChapters(ctx, task.ChaptersKey)
if err != nil {
fail(fmt.Sprintf("get chapters JSON: %v", err))
return
}
if err := json.Unmarshal(raw, &chapters); err != nil {
fail(fmt.Sprintf("unmarshal chapters JSON: %v", err))
return
}
log.Info("runner: loaded pre-parsed chapters", "count", len(chapters))
} else {
// Legacy path: parse the raw file on the runner.
if r.deps.BookImport == nil {
fail("book import not configured (BookImport dependency missing)")
return
}
var err error
chapters, err = r.deps.BookImport.Import(ctx, objectKey, task.FileType)
if err != nil {
fail(fmt.Sprintf("import file: %v", err))
return
}
log.Info("runner: parsed chapters from file (legacy path)", "count", len(chapters))
}
if len(chapters) == 0 {
@@ -730,23 +764,12 @@ func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, obje
return
}
// Store chapters via BookWriter
// Note: BookWriter.WriteChapters expects domain.Chapter, need conversion
var domainChapters []bookstore.Chapter
for _, ch := range chapters {
domainChapters = append(domainChapters, bookstore.Chapter{
Number: ch.Number,
Title: ch.Title,
Content: ch.Content,
})
}
// Store chapters via ChapterIngester
// Persist chapters via ChapterIngester.
if r.deps.ChapterIngester == nil {
fail("chapter ingester not configured")
return
}
if err := r.deps.ChapterIngester.IngestChapters(ctx, task.Slug, domainChapters); err != nil {
if err := r.deps.ChapterIngester.IngestChapters(ctx, task.Slug, chapters); err != nil {
fail(fmt.Sprintf("store chapters: %v", err))
return
}
@@ -786,7 +809,7 @@ func (r *Runner) runImportTask(ctx context.Context, task domain.ImportTask, obje
log.Error("runner: FinishImportTask failed", "err", err)
}
// Create notification for the user who initiated the import
// Notify the user who initiated the import.
if r.deps.Notifier != nil {
msg := fmt.Sprintf("Import completed: %d chapters from %s", len(chapters), task.Title)
targetUser := task.InitiatorUserID

View File

@@ -15,6 +15,8 @@ import (
"github.com/libnovel/backend/internal/bookstore"
"github.com/libnovel/backend/internal/domain"
minio "github.com/minio/minio-go/v7"
"github.com/pdfcpu/pdfcpu/pkg/api"
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu/model"
"golang.org/x/net/html"
)
@@ -90,7 +92,67 @@ func AnalyzeFile(data []byte, fileType string) (chapterCount int, firstLines []s
// decryptPDF strips encryption from a PDF using an empty user password.
// Returns the decrypted bytes, or an error if decryption is not possible.
// This handles the common case of "owner-only" encrypted PDFs (copy/print
// restrictions) which use an empty user password and open normally in readers.
func decryptPDF(data []byte) ([]byte, error) {
conf := model.NewDefaultConfiguration()
conf.UserPW = ""
conf.OwnerPW = ""
var out bytes.Buffer
err := api.Decrypt(bytes.NewReader(data), &out, conf)
if err != nil {
return nil, err
}
return out.Bytes(), nil
}
// ParseImportFile parses a PDF or EPUB and returns chapters.
// Unlike AnalyzeFile it respects ctx cancellation so callers can apply a timeout.
// For PDFs it first attempts to strip encryption with an empty password.
func ParseImportFile(ctx context.Context, data []byte, fileType string) ([]bookstore.Chapter, error) {
type result struct {
chapters []bookstore.Chapter
err error
}
ch := make(chan result, 1)
go func() {
var chapters []bookstore.Chapter
var err error
switch fileType {
case "pdf":
chapters, err = parsePDF(data)
case "epub":
chapters, err = parseEPUB(data)
default:
err = fmt.Errorf("unsupported file type: %s", fileType)
}
ch <- result{chapters, err}
}()
select {
case <-ctx.Done():
return nil, fmt.Errorf("parse timed out: %w", ctx.Err())
case r := <-ch:
return r.chapters, r.err
}
}
// parsePDF extracts chapters from PDF bytes using dslipak/pdf.
// It first attempts to decrypt the PDF with an empty password in case the file
// uses owner-only encryption (copy/print restrictions), which is common for
// publisher PDFs that open normally in readers but confuse raw parsers.
func parsePDF(data []byte) ([]bookstore.Chapter, error) {
// If the PDF is encrypted, try to decrypt it with an empty password.
// Many publisher PDFs use owner-only encryption (copy/print restrictions)
// with an empty user password, so they open normally but confuse parsers.
decrypted, err := decryptPDF(data)
if err == nil {
data = decrypted
}
// (if decryption fails we still attempt to parse — maybe it works anyway)
r, err := pdf.NewReader(bytes.NewReader(data), int64(len(data)))
if err != nil {
return nil, fmt.Errorf("open PDF: %w", err)

View File

@@ -654,13 +654,14 @@ func (s *Store) CreateImportTask(ctx context.Context, task domain.ImportTask) (s
"file_name": task.Slug + "." + task.FileType,
"file_type": task.FileType,
"object_key": task.ObjectKey,
"chapters_key": task.ChaptersKey,
"author": task.Author,
"cover_url": task.CoverURL,
"summary": task.Summary,
"book_status": task.BookStatus,
"status": string(domain.TaskStatusPending),
"chapters_done": 0,
"chapters_total": 0,
"chapters_total": task.ChaptersTotal,
"started": time.Now().UTC().Format(time.RFC3339),
"initiator_user_id": task.InitiatorUserID,
}
@@ -914,7 +915,7 @@ func (s *Store) FailTask(ctx context.Context, id, errMsg string) error {
}
// HeartbeatTask updates the heartbeat_at field on a running task.
// Tries scraping_tasks first, then audio_jobs, then translation_jobs.
// Tries scraping_tasks, audio_jobs, translation_jobs, then import_tasks.
func (s *Store) HeartbeatTask(ctx context.Context, id string) error {
payload := map[string]any{
"heartbeat_at": time.Now().UTC().Format(time.RFC3339),
@@ -925,7 +926,10 @@ func (s *Store) HeartbeatTask(ctx context.Context, id string) error {
if err := s.pb.patch(ctx, fmt.Sprintf("/api/collections/audio_jobs/records/%s", id), payload); err == nil {
return nil
}
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/translation_jobs/records/%s", id), payload)
if err := s.pb.patch(ctx, fmt.Sprintf("/api/collections/translation_jobs/records/%s", id), payload); err == nil {
return nil
}
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/import_tasks/records/%s", id), payload)
}
// ReapStaleTasks finds all running tasks whose heartbeat_at is either missing
@@ -943,7 +947,7 @@ func (s *Store) ReapStaleTasks(ctx context.Context, staleAfter time.Duration) (i
}
total := 0
for _, collection := range []string{"scraping_tasks", "audio_jobs", "translation_jobs"} {
for _, collection := range []string{"scraping_tasks", "audio_jobs", "translation_jobs", "import_tasks"} {
items, err := s.pb.listAll(ctx, collection, filter, "")
if err != nil {
return total, fmt.Errorf("ReapStaleTasks list %s: %w", collection, err)
@@ -1185,6 +1189,7 @@ func parseImportTask(raw json.RawMessage) (domain.ImportTask, error) {
FileName string `json:"file_name"`
FileType string `json:"file_type"`
ObjectKey string `json:"object_key"`
ChaptersKey string `json:"chapters_key"`
Author string `json:"author"`
CoverURL string `json:"cover_url"`
Genres string `json:"genres"` // stored as comma-separated
@@ -1219,6 +1224,7 @@ func parseImportTask(raw json.RawMessage) (domain.ImportTask, error) {
FileName: rec.FileName,
FileType: rec.FileType,
ObjectKey: rec.ObjectKey,
ChaptersKey: rec.ChaptersKey,
Author: rec.Author,
CoverURL: rec.CoverURL,
Genres: genres,
@@ -1266,6 +1272,20 @@ func (s *Store) PutImportFile(ctx context.Context, key string, data []byte) erro
return s.mc.putObject(ctx, "imports", key, "application/octet-stream", data)
}
// PutImportChapters stores a pre-parsed chapters JSON blob in MinIO.
func (s *Store) PutImportChapters(ctx context.Context, key string, data []byte) error {
return s.mc.putObject(ctx, "imports", key, "application/json", data)
}
// GetImportChapters retrieves the pre-parsed chapters JSON from MinIO.
func (s *Store) GetImportChapters(ctx context.Context, key string) ([]byte, error) {
data, err := s.mc.getObject(ctx, "imports", key)
if err != nil {
return nil, fmt.Errorf("get chapters object: %w", err)
}
return data, nil
}
func (s *Store) CoverExists(ctx context.Context, slug string) bool {
return s.mc.coverExists(ctx, CoverObjectKey(slug))
}

View File

@@ -58,6 +58,8 @@ services:
mc mb --ignore-existing local/audio;
mc mb --ignore-existing local/avatars;
mc mb --ignore-existing local/catalogue;
mc mb --ignore-existing local/translations;
mc mb --ignore-existing local/imports;
echo 'buckets ready';
"
environment:

View File

@@ -299,6 +299,40 @@ create "translation_jobs" '{
{"name":"heartbeat_at", "type":"date"}
]}'
create "import_tasks" '{
"name":"import_tasks","type":"base","fields":[
{"name":"slug", "type":"text", "required":true},
{"name":"title", "type":"text", "required":true},
{"name":"file_name", "type":"text"},
{"name":"file_type", "type":"text"},
{"name":"object_key", "type":"text"},
{"name":"chapters_key", "type":"text"},
{"name":"author", "type":"text"},
{"name":"cover_url", "type":"text"},
{"name":"genres", "type":"text"},
{"name":"summary", "type":"text"},
{"name":"book_status", "type":"text"},
{"name":"worker_id", "type":"text"},
{"name":"initiator_user_id", "type":"text"},
{"name":"status", "type":"text", "required":true},
{"name":"chapters_done", "type":"number"},
{"name":"chapters_total", "type":"number"},
{"name":"error_message", "type":"text"},
{"name":"started", "type":"date"},
{"name":"finished", "type":"date"},
{"name":"heartbeat_at", "type":"date"}
]}'
create "notifications" '{
"name":"notifications","type":"base","fields":[
{"name":"user_id", "type":"text","required":true},
{"name":"title", "type":"text","required":true},
{"name":"message", "type":"text"},
{"name":"link", "type":"text"},
{"name":"read", "type":"bool"},
{"name":"created", "type":"date"}
]}'
create "ai_jobs" '{
"name":"ai_jobs","type":"base","fields":[
{"name":"kind", "type":"text", "required":true},

View File

@@ -0,0 +1,44 @@
/* eslint-disable */
import { getLocale, experimentalStaticLocale } from '../runtime.js';
/** @typedef {import('../runtime.js').LocalizedString} LocalizedString */
/** @typedef {{}} Admin_Nav_NotificationsInputs */
const en_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
return /** @type {LocalizedString} */ (`Notifications`)
};
const ru_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
return /** @type {LocalizedString} */ (`Уведомления`)
};
const id_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
return /** @type {LocalizedString} */ (`Notifikasi`)
};
const pt_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
return /** @type {LocalizedString} */ (`Notificações`)
};
const fr_admin_nav_notifications = /** @type {(inputs: Admin_Nav_NotificationsInputs) => LocalizedString} */ () => {
return /** @type {LocalizedString} */ (`Notifications`)
};
/**
* | output |
* | --- |
* | "Notifications" |
*
* @param {Admin_Nav_NotificationsInputs} inputs
* @param {{ locale?: "en" | "ru" | "id" | "pt" | "fr" }} options
* @returns {LocalizedString}
*/
export const admin_nav_notifications = /** @type {((inputs?: Admin_Nav_NotificationsInputs, options?: { locale?: "en" | "ru" | "id" | "pt" | "fr" }) => LocalizedString) & import('../runtime.js').MessageMetadata<Admin_Nav_NotificationsInputs, { locale?: "en" | "ru" | "id" | "pt" | "fr" }, {}>} */ ((inputs = {}, options = {}) => {
const locale = experimentalStaticLocale ?? options.locale ?? getLocale()
if (locale === "en") return en_admin_nav_notifications(inputs)
if (locale === "ru") return ru_admin_nav_notifications(inputs)
if (locale === "id") return id_admin_nav_notifications(inputs)
if (locale === "pt") return pt_admin_nav_notifications(inputs)
return fr_admin_nav_notifications(inputs)
});