Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f8c66fcf63 | ||
|
|
a1def0f0f8 | ||
|
|
e0dec05885 | ||
|
|
8662aed565 |
@@ -198,6 +198,7 @@ func run() error {
|
||||
TextGen: textGenClient,
|
||||
BookWriter: store,
|
||||
AIJobStore: store,
|
||||
BookAdminStore: store,
|
||||
Log: log,
|
||||
},
|
||||
)
|
||||
|
||||
117
backend/internal/backend/handlers_books_admin.go
Normal file
117
backend/internal/backend/handlers_books_admin.go
Normal file
@@ -0,0 +1,117 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/libnovel/backend/internal/storage"
|
||||
)
|
||||
|
||||
// handleAdminArchiveBook handles PATCH /api/admin/books/{slug}/archive.
|
||||
// Soft-deletes a book by setting archived=true in PocketBase and updating the
|
||||
// Meilisearch document so it is excluded from all public search results.
|
||||
// The book data is preserved and can be restored with the unarchive endpoint.
|
||||
func (s *Server) handleAdminArchiveBook(w http.ResponseWriter, r *http.Request) {
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "missing slug")
|
||||
return
|
||||
}
|
||||
if s.deps.BookAdminStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book admin store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.deps.BookAdminStore.ArchiveBook(r.Context(), slug); err != nil {
|
||||
if errors.Is(err, storage.ErrNotFound) {
|
||||
jsonError(w, http.StatusNotFound, "book not found")
|
||||
return
|
||||
}
|
||||
s.deps.Log.Error("archive book failed", "slug", slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Update the Meilisearch document so the archived flag takes effect
|
||||
// immediately in search/catalogue results.
|
||||
if meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), slug); err == nil && ok {
|
||||
if upsertErr := s.deps.SearchIndex.UpsertBook(r.Context(), meta); upsertErr != nil {
|
||||
s.deps.Log.Warn("archive book: meili upsert failed", "slug", slug, "err", upsertErr)
|
||||
}
|
||||
}
|
||||
|
||||
s.deps.Log.Info("book archived", "slug", slug)
|
||||
writeJSON(w, http.StatusOK, map[string]string{"slug": slug, "status": "archived"})
|
||||
}
|
||||
|
||||
// handleAdminUnarchiveBook handles PATCH /api/admin/books/{slug}/unarchive.
|
||||
// Restores a previously archived book by clearing the archived flag, making it
|
||||
// publicly visible in search and catalogue results again.
|
||||
func (s *Server) handleAdminUnarchiveBook(w http.ResponseWriter, r *http.Request) {
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "missing slug")
|
||||
return
|
||||
}
|
||||
if s.deps.BookAdminStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book admin store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.deps.BookAdminStore.UnarchiveBook(r.Context(), slug); err != nil {
|
||||
if errors.Is(err, storage.ErrNotFound) {
|
||||
jsonError(w, http.StatusNotFound, "book not found")
|
||||
return
|
||||
}
|
||||
s.deps.Log.Error("unarchive book failed", "slug", slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Sync the updated archived=false state back to Meilisearch.
|
||||
if meta, ok, err := s.deps.BookReader.ReadMetadata(r.Context(), slug); err == nil && ok {
|
||||
if upsertErr := s.deps.SearchIndex.UpsertBook(r.Context(), meta); upsertErr != nil {
|
||||
s.deps.Log.Warn("unarchive book: meili upsert failed", "slug", slug, "err", upsertErr)
|
||||
}
|
||||
}
|
||||
|
||||
s.deps.Log.Info("book unarchived", "slug", slug)
|
||||
writeJSON(w, http.StatusOK, map[string]string{"slug": slug, "status": "active"})
|
||||
}
|
||||
|
||||
// handleAdminDeleteBook handles DELETE /api/admin/books/{slug}.
|
||||
// Permanently removes all data for a book:
|
||||
// - PocketBase books record and all chapters_idx records
|
||||
// - All MinIO chapter markdown objects and the cover image
|
||||
// - Meilisearch document
|
||||
//
|
||||
// This operation is irreversible. Use the archive endpoint for soft-deletion.
|
||||
func (s *Server) handleAdminDeleteBook(w http.ResponseWriter, r *http.Request) {
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "missing slug")
|
||||
return
|
||||
}
|
||||
if s.deps.BookAdminStore == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book admin store not configured")
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.deps.BookAdminStore.DeleteBook(r.Context(), slug); err != nil {
|
||||
if errors.Is(err, storage.ErrNotFound) {
|
||||
jsonError(w, http.StatusNotFound, "book not found")
|
||||
return
|
||||
}
|
||||
s.deps.Log.Error("delete book failed", "slug", slug, "err", err)
|
||||
jsonError(w, http.StatusInternalServerError, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
// Remove from Meilisearch — best-effort (log on failure, don't fail request).
|
||||
if err := s.deps.SearchIndex.DeleteBook(r.Context(), slug); err != nil {
|
||||
s.deps.Log.Warn("delete book: meili delete failed", "slug", slug, "err", err)
|
||||
}
|
||||
|
||||
s.deps.Log.Info("book deleted", "slug", slug)
|
||||
writeJSON(w, http.StatusOK, map[string]string{"slug": slug, "status": "deleted"})
|
||||
}
|
||||
141
backend/internal/backend/handlers_split.go
Normal file
141
backend/internal/backend/handlers_split.go
Normal file
@@ -0,0 +1,141 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/libnovel/backend/internal/bookstore"
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
)
|
||||
|
||||
// handleAdminSplitChapters handles POST /api/admin/books/{slug}/split-chapters.
|
||||
//
|
||||
// Request body (JSON):
|
||||
//
|
||||
// { "text": "<full text with --- dividers and optional ## Title lines>" }
|
||||
//
|
||||
// The text is split on lines containing only "---". Each segment may start with
|
||||
// a "## Title" line which becomes the chapter title; remaining lines are the
|
||||
// chapter content. Sequential chapter numbers 1..N are assigned.
|
||||
//
|
||||
// All existing chapters for the book are replaced: WriteChapter is called for
|
||||
// each new chapter (upsert by number), so chapters beyond N are not deleted —
|
||||
// use the dedup endpoint afterwards if needed.
|
||||
func (s *Server) handleAdminSplitChapters(w http.ResponseWriter, r *http.Request) {
|
||||
if s.deps.BookWriter == nil {
|
||||
jsonError(w, http.StatusServiceUnavailable, "book writer not configured")
|
||||
return
|
||||
}
|
||||
|
||||
slug := r.PathValue("slug")
|
||||
if slug == "" {
|
||||
jsonError(w, http.StatusBadRequest, "slug is required")
|
||||
return
|
||||
}
|
||||
|
||||
var req struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
||||
jsonError(w, http.StatusBadRequest, "parse body: "+err.Error())
|
||||
return
|
||||
}
|
||||
if strings.TrimSpace(req.Text) == "" {
|
||||
jsonError(w, http.StatusBadRequest, "text is required")
|
||||
return
|
||||
}
|
||||
|
||||
chapters := splitChapterText(req.Text)
|
||||
if len(chapters) == 0 {
|
||||
jsonError(w, http.StatusUnprocessableEntity, "no chapters produced from text")
|
||||
return
|
||||
}
|
||||
|
||||
for _, ch := range chapters {
|
||||
var mdContent string
|
||||
if ch.Title != "" && ch.Title != fmt.Sprintf("Chapter %d", ch.Number) {
|
||||
mdContent = fmt.Sprintf("# %s\n\n%s", ch.Title, ch.Content)
|
||||
} else {
|
||||
mdContent = fmt.Sprintf("# Chapter %d\n\n%s", ch.Number, ch.Content)
|
||||
}
|
||||
domainCh := domain.Chapter{
|
||||
Ref: domain.ChapterRef{Number: ch.Number, Title: ch.Title},
|
||||
Text: mdContent,
|
||||
}
|
||||
if err := s.deps.BookWriter.WriteChapter(r.Context(), slug, domainCh); err != nil {
|
||||
jsonError(w, http.StatusInternalServerError, fmt.Sprintf("write chapter %d: %s", ch.Number, err.Error()))
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
writeJSON(w, 0, map[string]any{
|
||||
"chapters": len(chapters),
|
||||
"slug": slug,
|
||||
})
|
||||
}
|
||||
|
||||
// splitChapterText splits text on "---" divider lines into bookstore.Chapter
|
||||
// slices. Each segment may optionally start with a "## Title" header line.
|
||||
func splitChapterText(text string) []bookstore.Chapter {
|
||||
lines := strings.Split(text, "\n")
|
||||
|
||||
// Collect raw segments split on "---" dividers.
|
||||
var segments [][]string
|
||||
cur := []string{}
|
||||
for _, line := range lines {
|
||||
if strings.TrimSpace(line) == "---" {
|
||||
segments = append(segments, cur)
|
||||
cur = []string{}
|
||||
} else {
|
||||
cur = append(cur, line)
|
||||
}
|
||||
}
|
||||
segments = append(segments, cur) // last segment
|
||||
|
||||
var chapters []bookstore.Chapter
|
||||
chNum := 0
|
||||
for _, seg := range segments {
|
||||
// Trim leading/trailing blank lines from the segment.
|
||||
start, end := 0, len(seg)
|
||||
for start < end && strings.TrimSpace(seg[start]) == "" {
|
||||
start++
|
||||
}
|
||||
for end > start && strings.TrimSpace(seg[end-1]) == "" {
|
||||
end--
|
||||
}
|
||||
seg = seg[start:end]
|
||||
if len(seg) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for a "## Title" header on the first line.
|
||||
title := ""
|
||||
contentStart := 0
|
||||
if strings.HasPrefix(strings.TrimSpace(seg[0]), "## ") {
|
||||
title = strings.TrimSpace(strings.TrimPrefix(strings.TrimSpace(seg[0]), "## "))
|
||||
contentStart = 1
|
||||
// Skip blank lines after the title.
|
||||
for contentStart < len(seg) && strings.TrimSpace(seg[contentStart]) == "" {
|
||||
contentStart++
|
||||
}
|
||||
}
|
||||
|
||||
content := strings.TrimSpace(strings.Join(seg[contentStart:], "\n"))
|
||||
if content == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
chNum++
|
||||
if title == "" {
|
||||
title = fmt.Sprintf("Chapter %d", chNum)
|
||||
}
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: chNum,
|
||||
Title: title,
|
||||
Content: content,
|
||||
})
|
||||
}
|
||||
return chapters
|
||||
}
|
||||
@@ -91,6 +91,9 @@ type Dependencies struct {
|
||||
// AIJobStore tracks long-running AI generation jobs in PocketBase.
|
||||
// If nil, job persistence is disabled (jobs still run but are not recorded).
|
||||
AIJobStore bookstore.AIJobStore
|
||||
// BookAdminStore provides admin-only operations: archive, unarchive, hard-delete.
|
||||
// If nil, the admin book management endpoints return 503.
|
||||
BookAdminStore bookstore.BookAdminStore
|
||||
// Log is the structured logger.
|
||||
Log *slog.Logger
|
||||
}
|
||||
@@ -247,6 +250,14 @@ func (s *Server) ListenAndServe(ctx context.Context) error {
|
||||
// Admin data repair endpoints
|
||||
mux.HandleFunc("POST /api/admin/dedup-chapters/{slug}", s.handleDedupChapters)
|
||||
|
||||
// Admin book management (soft-delete / hard-delete)
|
||||
mux.HandleFunc("PATCH /api/admin/books/{slug}/archive", s.handleAdminArchiveBook)
|
||||
mux.HandleFunc("PATCH /api/admin/books/{slug}/unarchive", s.handleAdminUnarchiveBook)
|
||||
mux.HandleFunc("DELETE /api/admin/books/{slug}", s.handleAdminDeleteBook)
|
||||
|
||||
// Admin chapter split (imported books)
|
||||
mux.HandleFunc("POST /api/admin/books/{slug}/split-chapters", s.handleAdminSplitChapters)
|
||||
|
||||
// Import (PDF/EPUB)
|
||||
mux.HandleFunc("POST /api/admin/import", s.handleAdminImport)
|
||||
mux.HandleFunc("GET /api/admin/import", s.handleAdminImportList)
|
||||
|
||||
@@ -216,6 +216,27 @@ type BookImporter interface {
|
||||
Import(ctx context.Context, objectKey, fileType string) ([]Chapter, error)
|
||||
}
|
||||
|
||||
// BookAdminStore covers admin-only operations for managing books in the catalogue.
|
||||
// All methods require admin authorisation at the HTTP handler level.
|
||||
type BookAdminStore interface {
|
||||
// ArchiveBook sets archived=true on a book record, hiding it from all
|
||||
// public search and catalogue responses. Returns ErrNotFound when the
|
||||
// slug does not exist.
|
||||
ArchiveBook(ctx context.Context, slug string) error
|
||||
|
||||
// UnarchiveBook clears archived on a book record, making it publicly
|
||||
// visible again. Returns ErrNotFound when the slug does not exist.
|
||||
UnarchiveBook(ctx context.Context, slug string) error
|
||||
|
||||
// DeleteBook permanently removes all data for a book:
|
||||
// - PocketBase books record
|
||||
// - All PocketBase chapters_idx records
|
||||
// - All MinIO chapter markdown objects ({slug}/chapter-*.md)
|
||||
// - MinIO cover image (covers/{slug}.jpg)
|
||||
// The caller is responsible for also deleting the Meilisearch document.
|
||||
DeleteBook(ctx context.Context, slug string) error
|
||||
}
|
||||
|
||||
// ImportFileStore uploads raw import files to object storage.
|
||||
// Kept separate from BookImporter so the HTTP handler can upload the file
|
||||
// without a concrete type assertion, regardless of which Producer is wired.
|
||||
|
||||
@@ -24,6 +24,9 @@ type BookMeta struct {
|
||||
// updated in PocketBase. Populated on read; not sent on write (PocketBase
|
||||
// manages its own updated field).
|
||||
MetaUpdated int64 `json:"meta_updated,omitempty"`
|
||||
// Archived is true when the book has been soft-deleted by an admin.
|
||||
// Archived books are excluded from all public search and catalogue responses.
|
||||
Archived bool `json:"archived,omitempty"`
|
||||
}
|
||||
|
||||
// CatalogueEntry is a lightweight book reference returned by catalogue pages.
|
||||
|
||||
@@ -32,11 +32,15 @@ type Client interface {
|
||||
// BookExists reports whether a book with the given slug is already in the
|
||||
// index. Used by the catalogue refresh to skip re-indexing known books.
|
||||
BookExists(ctx context.Context, slug string) bool
|
||||
// DeleteBook removes a book document from the search index by slug.
|
||||
DeleteBook(ctx context.Context, slug string) error
|
||||
// Search returns up to limit books matching query.
|
||||
// Archived books are always excluded.
|
||||
Search(ctx context.Context, query string, limit int) ([]domain.BookMeta, error)
|
||||
// Catalogue queries books with optional filters, sort, and pagination.
|
||||
// Returns books, the total hit count for pagination, and a FacetResult
|
||||
// with available genre and status values from the index.
|
||||
// Archived books are always excluded.
|
||||
Catalogue(ctx context.Context, q CatalogueQuery) ([]domain.BookMeta, int64, FacetResult, error)
|
||||
}
|
||||
|
||||
@@ -99,7 +103,7 @@ func Configure(host, apiKey string) error {
|
||||
return fmt.Errorf("meili: update searchable attributes: %w", err)
|
||||
}
|
||||
|
||||
filterable := []interface{}{"status", "genres"}
|
||||
filterable := []interface{}{"status", "genres", "archived"}
|
||||
if _, err := idx.UpdateFilterableAttributes(&filterable); err != nil {
|
||||
return fmt.Errorf("meili: update filterable attributes: %w", err)
|
||||
}
|
||||
@@ -128,6 +132,9 @@ type bookDoc struct {
|
||||
// MetaUpdated is the Unix timestamp (seconds) of the last PocketBase update.
|
||||
// Used for sort=update ("recently updated" ordering).
|
||||
MetaUpdated int64 `json:"meta_updated"`
|
||||
// Archived is true when the book has been soft-deleted by an admin.
|
||||
// Used as a filter to exclude archived books from all search results.
|
||||
Archived bool `json:"archived"`
|
||||
}
|
||||
|
||||
func toDoc(b domain.BookMeta) bookDoc {
|
||||
@@ -144,6 +151,7 @@ func toDoc(b domain.BookMeta) bookDoc {
|
||||
Rank: b.Ranking,
|
||||
Rating: b.Rating,
|
||||
MetaUpdated: b.MetaUpdated,
|
||||
Archived: b.Archived,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -161,6 +169,7 @@ func fromDoc(d bookDoc) domain.BookMeta {
|
||||
Ranking: d.Rank,
|
||||
Rating: d.Rating,
|
||||
MetaUpdated: d.MetaUpdated,
|
||||
Archived: d.Archived,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,13 +193,24 @@ func (c *MeiliClient) BookExists(_ context.Context, slug string) bool {
|
||||
return err == nil && doc.Slug != ""
|
||||
}
|
||||
|
||||
// DeleteBook removes a book document from the index by slug.
|
||||
// The operation is fire-and-forget (Meilisearch processes tasks asynchronously).
|
||||
func (c *MeiliClient) DeleteBook(_ context.Context, slug string) error {
|
||||
if _, err := c.idx.DeleteDocument(slug, nil); err != nil {
|
||||
return fmt.Errorf("meili: delete book %q: %w", slug, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Search returns books matching query, up to limit results.
|
||||
// Archived books are always excluded.
|
||||
func (c *MeiliClient) Search(_ context.Context, query string, limit int) ([]domain.BookMeta, error) {
|
||||
if limit <= 0 {
|
||||
limit = 20
|
||||
}
|
||||
res, err := c.idx.Search(query, &meilisearch.SearchRequest{
|
||||
Limit: int64(limit),
|
||||
Limit: int64(limit),
|
||||
Filter: "archived = false",
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("meili: search %q: %w", query, err)
|
||||
@@ -231,17 +251,15 @@ func (c *MeiliClient) Catalogue(_ context.Context, q CatalogueQuery) ([]domain.B
|
||||
Facets: []string{"genres", "status"},
|
||||
}
|
||||
|
||||
// Build filter
|
||||
var filters []string
|
||||
// Build filter — always exclude archived books
|
||||
filters := []string{"archived = false"}
|
||||
if q.Genre != "" && q.Genre != "all" {
|
||||
filters = append(filters, fmt.Sprintf("genres = %q", q.Genre))
|
||||
}
|
||||
if q.Status != "" && q.Status != "all" {
|
||||
filters = append(filters, fmt.Sprintf("status = %q", q.Status))
|
||||
}
|
||||
if len(filters) > 0 {
|
||||
req.Filter = strings.Join(filters, " AND ")
|
||||
}
|
||||
req.Filter = strings.Join(filters, " AND ")
|
||||
|
||||
// Map UI sort tokens to Meilisearch sort expressions.
|
||||
switch q.Sort {
|
||||
@@ -318,7 +336,8 @@ func sortStrings(s []string) {
|
||||
type NoopClient struct{}
|
||||
|
||||
func (NoopClient) UpsertBook(_ context.Context, _ domain.BookMeta) error { return nil }
|
||||
func (NoopClient) BookExists(_ context.Context, _ string) bool { return false }
|
||||
func (NoopClient) BookExists(_ context.Context, _ string) bool { return false }
|
||||
func (NoopClient) DeleteBook(_ context.Context, _ string) error { return nil }
|
||||
func (NoopClient) Search(_ context.Context, _ string, _ int) ([]domain.BookMeta, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
@@ -19,3 +19,53 @@ func stripMarkdown(src string) string {
|
||||
src = regexp.MustCompile(`\n{3,}`).ReplaceAllString(src, "\n\n")
|
||||
return strings.TrimSpace(src)
|
||||
}
|
||||
|
||||
// chunkText splits text into chunks of at most maxChars characters, breaking
|
||||
// at sentence boundaries (". ", "! ", "? ", "\n") so that the TTS service
|
||||
// receives natural prose fragments rather than mid-sentence cuts.
|
||||
//
|
||||
// If a single sentence exceeds maxChars it is included as its own chunk —
|
||||
// never silently truncated.
|
||||
func chunkText(text string, maxChars int) []string {
|
||||
if len(text) <= maxChars {
|
||||
return []string{text}
|
||||
}
|
||||
|
||||
// Sentence-boundary delimiters — we split AFTER these sequences.
|
||||
// Order matters: longer sequences first.
|
||||
delimiters := []string{".\n", "!\n", "?\n", ". ", "! ", "? ", "\n\n", "\n"}
|
||||
|
||||
var chunks []string
|
||||
remaining := text
|
||||
|
||||
for len(remaining) > 0 {
|
||||
if len(remaining) <= maxChars {
|
||||
chunks = append(chunks, strings.TrimSpace(remaining))
|
||||
break
|
||||
}
|
||||
|
||||
// Find the last sentence boundary within the maxChars window.
|
||||
window := remaining[:maxChars]
|
||||
cutAt := -1
|
||||
for _, delim := range delimiters {
|
||||
idx := strings.LastIndex(window, delim)
|
||||
if idx > 0 && idx+len(delim) > cutAt {
|
||||
cutAt = idx + len(delim)
|
||||
}
|
||||
}
|
||||
|
||||
if cutAt <= 0 {
|
||||
// No boundary found — hard-break at maxChars to avoid infinite loop.
|
||||
cutAt = maxChars
|
||||
}
|
||||
|
||||
chunk := strings.TrimSpace(remaining[:cutAt])
|
||||
if chunk != "" {
|
||||
chunks = append(chunks, chunk)
|
||||
}
|
||||
remaining = strings.TrimSpace(remaining[cutAt:])
|
||||
}
|
||||
|
||||
return chunks
|
||||
}
|
||||
|
||||
|
||||
@@ -656,7 +656,7 @@ func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
return
|
||||
}
|
||||
var genErr error
|
||||
audioData, genErr = r.deps.Kokoro.GenerateAudio(ctx, text, task.Voice)
|
||||
audioData, genErr = kokoroGenerateChunked(ctx, r.deps.Kokoro, text, task.Voice, log)
|
||||
if genErr != nil {
|
||||
fail(fmt.Sprintf("kokoro generate: %v", genErr))
|
||||
return
|
||||
@@ -685,6 +685,31 @@ func (r *Runner) runAudioTask(ctx context.Context, task domain.AudioTask) {
|
||||
log.Info("runner: audio task finished", "key", key)
|
||||
}
|
||||
|
||||
// kokoroGenerateChunked splits text into ~1 000-character sentence-boundary
|
||||
// chunks, calls Kokoro.GenerateAudio for each, and concatenates the raw MP3
|
||||
// bytes. This avoids EOF / timeout failures that occur when the Kokoro
|
||||
// FastAPI server receives very large inputs (e.g. a full imported PDF chapter).
|
||||
//
|
||||
// Concatenating raw MP3 frames is valid — MP3 is a frame-based format and
|
||||
// standard players handle multi-segment files correctly.
|
||||
func kokoroGenerateChunked(ctx context.Context, k kokoro.Client, text, voice string, log *slog.Logger) ([]byte, error) {
|
||||
const chunkSize = 1000
|
||||
|
||||
chunks := chunkText(text, chunkSize)
|
||||
log.Info("runner: kokoro chunked generation", "chunks", len(chunks), "total_chars", len(text))
|
||||
|
||||
var combined []byte
|
||||
for i, chunk := range chunks {
|
||||
data, err := k.GenerateAudio(ctx, chunk, voice)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("chunk %d/%d: %w", i+1, len(chunks), err)
|
||||
}
|
||||
combined = append(combined, data...)
|
||||
log.Info("runner: kokoro chunk done", "chunk", i+1, "of", len(chunks), "bytes", len(data))
|
||||
}
|
||||
return combined, nil
|
||||
}
|
||||
|
||||
// runImportTask executes one PDF/EPUB import task.
|
||||
// Preferred path: when task.ChaptersKey is set, it reads pre-parsed chapters
|
||||
// JSON from MinIO (written by the backend at upload time) and ingests them.
|
||||
|
||||
@@ -7,7 +7,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -16,16 +15,10 @@ import (
|
||||
"github.com/libnovel/backend/internal/domain"
|
||||
minio "github.com/minio/minio-go/v7"
|
||||
"github.com/pdfcpu/pdfcpu/pkg/api"
|
||||
pdfcpu "github.com/pdfcpu/pdfcpu/pkg/pdfcpu"
|
||||
"github.com/pdfcpu/pdfcpu/pkg/pdfcpu/model"
|
||||
"golang.org/x/net/html"
|
||||
)
|
||||
|
||||
// chapterHeadingRE matches common chapter heading patterns:
|
||||
// "Chapter 1", "Chapter 1:", "Chapter 1 -", "CHAPTER ONE", "1.", "Part 1", etc.
|
||||
var chapterHeadingRE = regexp.MustCompile(
|
||||
`(?i)^(?:chapter|ch\.?|part|episode|book)\s+(\d+|[ivxlcdm]+)\b|^\d{1,4}[\.\)]\s+\S`)
|
||||
|
||||
type importer struct {
|
||||
mc *minioClient
|
||||
}
|
||||
@@ -148,17 +141,16 @@ var pdfSkipBookmarks = map[string]bool{
|
||||
"appendix": true, "color insert": true, "color illustrations": true,
|
||||
}
|
||||
|
||||
// parsePDF extracts chapters from PDF bytes.
|
||||
// parsePDF extracts text from PDF bytes and returns it as a single chapter.
|
||||
//
|
||||
// The full readable text is returned as one chapter so the admin can manually
|
||||
// split it into chapters via the UI using --- markers.
|
||||
//
|
||||
// Strategy:
|
||||
// 1. Decrypt owner-protected PDFs (empty user password).
|
||||
// 2. Read the PDF outline (bookmarks) — these give chapter titles and page ranges.
|
||||
// 3. Extract raw content streams for every page using pdfcpu ExtractContent.
|
||||
// 4. For each story bookmark, concatenate the extracted text of its pages.
|
||||
//
|
||||
// Falls back to paragraph-splitting when no bookmarks are found.
|
||||
// This is fast (~100ms for a 250-page PDF) because it avoids font-glyph
|
||||
// resolution which causes older PDF libraries to hang on publisher PDFs.
|
||||
// 2. Extract raw content streams for every page using pdfcpu ExtractContent.
|
||||
// 3. Concatenate text from all pages in order, skipping front matter
|
||||
// (cover, title page, copyright — typically the first 10 pages).
|
||||
func parsePDF(data []byte) ([]bookstore.Chapter, error) {
|
||||
// Decrypt owner-protected PDFs (empty user password).
|
||||
decrypted, err := decryptPDF(data)
|
||||
@@ -186,9 +178,9 @@ func parsePDF(data []byte) ([]bookstore.Chapter, error) {
|
||||
return nil, fmt.Errorf("PDF has no content pages")
|
||||
}
|
||||
|
||||
// pdfcpu names files "out_Content_page_N.txt" — parse the page number
|
||||
// from the filename so the map is correct regardless of lexicographic order.
|
||||
// Parse page number from filename and build ordered text map.
|
||||
pageTexts := make(map[int]string, len(entries))
|
||||
maxPage := 0
|
||||
for _, e := range entries {
|
||||
pageNum := pageNumFromFilename(e.Name())
|
||||
if pageNum <= 0 {
|
||||
@@ -199,166 +191,49 @@ func parsePDF(data []byte) ([]bookstore.Chapter, error) {
|
||||
continue
|
||||
}
|
||||
pageTexts[pageNum] = fixWin1252(extractTextFromContentStream(raw))
|
||||
if pageNum > maxPage {
|
||||
maxPage = pageNum
|
||||
}
|
||||
}
|
||||
|
||||
// Try to use bookmarks (outline) for chapter structure.
|
||||
// Determine front-matter cutoff using bookmarks if available,
|
||||
// otherwise skip the first 10 pages (cover/title/copyright).
|
||||
bodyStart := 1
|
||||
bookmarks, bmErr := api.Bookmarks(bytes.NewReader(data), conf)
|
||||
if bmErr == nil && len(bookmarks) > 0 {
|
||||
chapters := chaptersFromBookmarks(bookmarks, pageTexts)
|
||||
if len(chapters) > 0 {
|
||||
return chapters, nil
|
||||
if bmErr == nil {
|
||||
for _, bm := range bookmarks {
|
||||
title := strings.ToLower(strings.TrimSpace(bm.Title))
|
||||
if !pdfSkipBookmarks[title] && bm.PageFrom > 0 {
|
||||
// First non-front-matter bookmark — body starts here.
|
||||
bodyStart = bm.PageFrom
|
||||
break
|
||||
}
|
||||
}
|
||||
} else if maxPage > 10 {
|
||||
bodyStart = 11
|
||||
}
|
||||
|
||||
// Fallback: concatenate all page texts in page order and split by heading patterns.
|
||||
// Concatenate all body pages.
|
||||
var sb strings.Builder
|
||||
maxPage := 0
|
||||
for p := range pageTexts {
|
||||
if p > maxPage {
|
||||
maxPage = p
|
||||
}
|
||||
}
|
||||
for p := 1; p <= maxPage; p++ {
|
||||
sb.WriteString(pageTexts[p])
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
chapters := extractChaptersFromText(sb.String())
|
||||
if len(chapters) == 0 {
|
||||
return nil, fmt.Errorf("could not extract any chapters from PDF")
|
||||
}
|
||||
return chapters, nil
|
||||
}
|
||||
|
||||
// chaptersFromBookmarks builds a chapter list from PDF bookmarks + per-page text.
|
||||
// It flattens the bookmark tree, skips front/back matter entries, and assigns
|
||||
// page ranges so each chapter spans from its own start page to the next
|
||||
// bookmark's start page minus one.
|
||||
func chaptersFromBookmarks(bookmarks []pdfcpu.Bookmark, pageTexts map[int]string) []bookstore.Chapter {
|
||||
// Flatten bookmark tree.
|
||||
var flat []pdfcpu.Bookmark
|
||||
var flatten func([]pdfcpu.Bookmark)
|
||||
flatten = func(bms []pdfcpu.Bookmark) {
|
||||
for _, bm := range bms {
|
||||
flat = append(flat, bm)
|
||||
flatten(bm.Kids)
|
||||
}
|
||||
}
|
||||
flatten(bookmarks)
|
||||
|
||||
// Sort by page number.
|
||||
sort.Slice(flat, func(i, j int) bool { return flat[i].PageFrom < flat[j].PageFrom })
|
||||
|
||||
// Assign PageThru for entries where it's 0 (last bookmark or missing).
|
||||
maxPage := 0
|
||||
for p := range pageTexts {
|
||||
if p > maxPage {
|
||||
maxPage = p
|
||||
}
|
||||
}
|
||||
for i := range flat {
|
||||
if flat[i].PageThru == 0 {
|
||||
if i+1 < len(flat) {
|
||||
flat[i].PageThru = flat[i+1].PageFrom - 1
|
||||
} else {
|
||||
flat[i].PageThru = maxPage
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var chapters []bookstore.Chapter
|
||||
chNum := 0
|
||||
for _, bm := range flat {
|
||||
if pdfSkipBookmarks[strings.ToLower(strings.TrimSpace(bm.Title))] {
|
||||
for p := bodyStart; p <= maxPage; p++ {
|
||||
t := strings.TrimSpace(pageTexts[p])
|
||||
if t == "" {
|
||||
continue
|
||||
}
|
||||
// Gather text for all pages in this bookmark's range.
|
||||
// The first page of each chapter is typically a decorative title page
|
||||
// (chapter number, subtitle art, series title) — skip it and start
|
||||
// from PageFrom+1 so the content begins with actual story text.
|
||||
bodyStart := bm.PageFrom + 1
|
||||
if bodyStart > bm.PageThru {
|
||||
bodyStart = bm.PageFrom // single-page section, use it
|
||||
}
|
||||
var sb strings.Builder
|
||||
for p := bodyStart; p <= bm.PageThru; p++ {
|
||||
if t, ok := pageTexts[p]; ok {
|
||||
sb.WriteString(t)
|
||||
sb.WriteByte('\n')
|
||||
}
|
||||
}
|
||||
text := cleanChapterText(strings.TrimSpace(sb.String()))
|
||||
if len(text) < 50 {
|
||||
continue // skip nearly-empty sections
|
||||
}
|
||||
chNum++
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: chNum,
|
||||
Title: bm.Title,
|
||||
Content: text,
|
||||
})
|
||||
}
|
||||
return chapters
|
||||
}
|
||||
|
||||
// cleanChapterText removes decorative header fragments that sometimes appear
|
||||
// at the start of the first body page when the chapter subtitle is printed
|
||||
// at the top of that page (e.g. "for New Journeys!I stood atop the roof...").
|
||||
//
|
||||
// It strips any prefix text up to and including the last '!' or '?' that is
|
||||
// immediately followed by a capital letter on the same line (a run-on from the
|
||||
// title art), and removes short leading lines (< 40 chars) that look like
|
||||
// title/header text rather than story content.
|
||||
func cleanChapterText(text string) string {
|
||||
lines := strings.Split(text, "\n")
|
||||
|
||||
// Find first line that is substantive story content.
|
||||
// Strategy: skip short lines at the top. The first line >= 40 chars
|
||||
// OR starting with an opening quote is the start of the story.
|
||||
start := 0
|
||||
for i, raw := range lines {
|
||||
line := strings.TrimSpace(raw)
|
||||
if line == "" {
|
||||
start = i + 1
|
||||
continue
|
||||
}
|
||||
// Long enough to be a real sentence fragment from a body page.
|
||||
if len(line) >= 40 || strings.HasPrefix(line, "\u201C") || strings.HasPrefix(line, "\"") {
|
||||
start = i
|
||||
break
|
||||
}
|
||||
// Short line — if it ends with '!' or '?' and the NEXT non-empty
|
||||
// token on the SAME line (run-on) starts a sentence, strip it.
|
||||
// This catches "for New Journeys!I stood atop..." on one line.
|
||||
start = i + 1 // tentatively skip this short line
|
||||
sb.WriteString(t)
|
||||
sb.WriteString("\n\n")
|
||||
}
|
||||
|
||||
result := strings.TrimSpace(strings.Join(lines[start:], "\n"))
|
||||
|
||||
// Strip any run-on title fragment at the very start of the first line.
|
||||
// Pattern: something ending with '!' or '?' immediately before a capital letter.
|
||||
// e.g. "for New Journeys!I stood..." → "I stood..."
|
||||
if len(result) > 0 {
|
||||
// Find last '!' or '?' in the first 80 bytes that is followed by [A-Z"].
|
||||
firstLine := result
|
||||
if nl := strings.Index(firstLine, "\n"); nl >= 0 {
|
||||
firstLine = firstLine[:nl]
|
||||
}
|
||||
for i, c := range firstLine {
|
||||
if (c == '!' || c == '?') && i+1 < len(firstLine) {
|
||||
next := rune(firstLine[i+1])
|
||||
if (next >= 'A' && next <= 'Z') || next == '\u201C' || next == '"' {
|
||||
// Strip up to and including this '!'/'?'
|
||||
result = strings.TrimSpace(result[i+1:])
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
text := strings.TrimSpace(sb.String())
|
||||
if text == "" {
|
||||
return nil, fmt.Errorf("could not extract any text from PDF")
|
||||
}
|
||||
|
||||
if result == "" {
|
||||
return text
|
||||
}
|
||||
return result
|
||||
return []bookstore.Chapter{{
|
||||
Number: 1,
|
||||
Title: "Full Text",
|
||||
Content: text,
|
||||
}}, nil
|
||||
}
|
||||
|
||||
// pageNumFromFilename extracts the page number from a pdfcpu content-stream
|
||||
@@ -637,6 +512,7 @@ func parseEPUB(data []byte) ([]bookstore.Chapter, error) {
|
||||
}
|
||||
|
||||
var chapters []bookstore.Chapter
|
||||
chNum := 0
|
||||
for i, href := range spineFiles {
|
||||
fullPath := opfDir + href
|
||||
content, err := epubFileContent(zr, fullPath)
|
||||
@@ -647,12 +523,14 @@ func parseEPUB(data []byte) ([]bookstore.Chapter, error) {
|
||||
if strings.TrimSpace(text) == "" {
|
||||
continue
|
||||
}
|
||||
chNum++
|
||||
title := titleMap[href]
|
||||
if title == "" {
|
||||
title = fmt.Sprintf("Chapter %d", i+1)
|
||||
title = fmt.Sprintf("Chapter %d", chNum)
|
||||
}
|
||||
_ = i // spine index unused for numbering
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: i + 1,
|
||||
Number: chNum,
|
||||
Title: title,
|
||||
Content: text,
|
||||
})
|
||||
@@ -949,80 +827,6 @@ func htmlToText(data []byte) string {
|
||||
return strings.TrimSpace(strings.Join(out, "\n"))
|
||||
}
|
||||
|
||||
// ── Chapter segmentation (shared by PDF and plain-text paths) ─────────────────
|
||||
|
||||
// extractChaptersFromText splits a block of plain text into chapters by
|
||||
// detecting heading lines that match chapterHeadingRE.
|
||||
// Falls back to paragraph-splitting when no headings are found.
|
||||
func extractChaptersFromText(text string) []bookstore.Chapter {
|
||||
lines := strings.Split(text, "\n")
|
||||
|
||||
type segment struct {
|
||||
title string
|
||||
number int
|
||||
lines []string
|
||||
}
|
||||
|
||||
var segments []segment
|
||||
var cur *segment
|
||||
chNum := 0
|
||||
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if chapterHeadingRE.MatchString(line) {
|
||||
if cur != nil {
|
||||
segments = append(segments, *cur)
|
||||
}
|
||||
chNum++
|
||||
// Try to parse the explicit chapter number from the heading.
|
||||
if m := regexp.MustCompile(`\d+`).FindString(line); m != "" {
|
||||
if n, err := strconv.Atoi(m); err == nil && n > 0 && n < 100000 {
|
||||
chNum = n
|
||||
}
|
||||
}
|
||||
cur = &segment{title: line, number: chNum}
|
||||
} else if cur != nil && line != "" {
|
||||
cur.lines = append(cur.lines, line)
|
||||
}
|
||||
}
|
||||
if cur != nil {
|
||||
segments = append(segments, *cur)
|
||||
}
|
||||
|
||||
// Require segments to have meaningful content (>= 100 chars).
|
||||
var chapters []bookstore.Chapter
|
||||
for _, seg := range segments {
|
||||
content := strings.Join(seg.lines, "\n")
|
||||
if len(strings.TrimSpace(content)) < 50 {
|
||||
continue
|
||||
}
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: seg.number,
|
||||
Title: seg.title,
|
||||
Content: content,
|
||||
})
|
||||
}
|
||||
|
||||
// Fallback: no headings found — split by double newlines (paragraph blocks).
|
||||
if len(chapters) == 0 {
|
||||
paragraphs := strings.Split(text, "\n\n")
|
||||
n := 0
|
||||
for _, para := range paragraphs {
|
||||
para = strings.TrimSpace(para)
|
||||
if len(para) > 100 {
|
||||
n++
|
||||
chapters = append(chapters, bookstore.Chapter{
|
||||
Number: n,
|
||||
Title: fmt.Sprintf("Chapter %d", n),
|
||||
Content: para,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return chapters
|
||||
}
|
||||
|
||||
// ── Chapter ingestion ─────────────────────────────────────────────────────────
|
||||
|
||||
// IngestChapters stores extracted chapters for a book.
|
||||
|
||||
@@ -55,6 +55,7 @@ var _ bookstore.CoverStore = (*Store)(nil)
|
||||
var _ bookstore.TranslationStore = (*Store)(nil)
|
||||
var _ bookstore.AIJobStore = (*Store)(nil)
|
||||
var _ bookstore.ChapterImageStore = (*Store)(nil)
|
||||
var _ bookstore.BookAdminStore = (*Store)(nil)
|
||||
var _ taskqueue.Producer = (*Store)(nil)
|
||||
var _ taskqueue.Consumer = (*Store)(nil)
|
||||
var _ taskqueue.Reader = (*Store)(nil)
|
||||
@@ -226,6 +227,7 @@ type pbBook struct {
|
||||
Ranking int `json:"ranking"`
|
||||
Rating float64 `json:"rating"`
|
||||
Updated string `json:"updated"`
|
||||
Archived bool `json:"archived"`
|
||||
}
|
||||
|
||||
func (b pbBook) toDomain() domain.BookMeta {
|
||||
@@ -246,6 +248,7 @@ func (b pbBook) toDomain() domain.BookMeta {
|
||||
Ranking: b.Ranking,
|
||||
Rating: b.Rating,
|
||||
MetaUpdated: metaUpdated,
|
||||
Archived: b.Archived,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -275,7 +278,7 @@ func (s *Store) ReadMetadata(ctx context.Context, slug string) (domain.BookMeta,
|
||||
}
|
||||
|
||||
func (s *Store) ListBooks(ctx context.Context) ([]domain.BookMeta, error) {
|
||||
items, err := s.pb.listAll(ctx, "books", "", "title")
|
||||
items, err := s.pb.listAll(ctx, "books", "archived=false", "title")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -376,6 +379,84 @@ func (s *Store) ReindexChapters(ctx context.Context, slug string) (int, error) {
|
||||
return count, nil
|
||||
}
|
||||
|
||||
// ── BookAdminStore ────────────────────────────────────────────────────────────
|
||||
|
||||
// ArchiveBook sets archived=true on the book record for slug.
|
||||
func (s *Store) ArchiveBook(ctx context.Context, slug string) error {
|
||||
book, err := s.getBookBySlug(ctx, slug)
|
||||
if err == ErrNotFound {
|
||||
return ErrNotFound
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("ArchiveBook: %w", err)
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/books/records/%s", book.ID),
|
||||
map[string]any{"archived": true})
|
||||
}
|
||||
|
||||
// UnarchiveBook clears archived on the book record for slug.
|
||||
func (s *Store) UnarchiveBook(ctx context.Context, slug string) error {
|
||||
book, err := s.getBookBySlug(ctx, slug)
|
||||
if err == ErrNotFound {
|
||||
return ErrNotFound
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("UnarchiveBook: %w", err)
|
||||
}
|
||||
return s.pb.patch(ctx, fmt.Sprintf("/api/collections/books/records/%s", book.ID),
|
||||
map[string]any{"archived": false})
|
||||
}
|
||||
|
||||
// DeleteBook permanently removes all data for a book:
|
||||
// - PocketBase books record
|
||||
// - All PocketBase chapters_idx records for the slug
|
||||
// - All MinIO chapter markdown objects ({slug}/chapter-*.md)
|
||||
// - MinIO cover image (covers/{slug}.jpg)
|
||||
func (s *Store) DeleteBook(ctx context.Context, slug string) error {
|
||||
// 1. Fetch the book record to get its PocketBase ID.
|
||||
book, err := s.getBookBySlug(ctx, slug)
|
||||
if err == ErrNotFound {
|
||||
return ErrNotFound
|
||||
}
|
||||
if err != nil {
|
||||
return fmt.Errorf("DeleteBook: fetch: %w", err)
|
||||
}
|
||||
|
||||
// 2. Delete all chapters_idx records.
|
||||
filter := fmt.Sprintf(`slug=%q`, slug)
|
||||
items, err := s.pb.listAll(ctx, "chapters_idx", filter, "")
|
||||
if err != nil && err != ErrNotFound {
|
||||
return fmt.Errorf("DeleteBook: list chapters_idx: %w", err)
|
||||
}
|
||||
for _, raw := range items {
|
||||
var rec struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
if json.Unmarshal(raw, &rec) == nil && rec.ID != "" {
|
||||
if delErr := s.pb.delete(ctx, fmt.Sprintf("/api/collections/chapters_idx/records/%s", rec.ID)); delErr != nil {
|
||||
s.log.Warn("DeleteBook: delete chapters_idx record failed", "slug", slug, "id", rec.ID, "err", delErr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Delete MinIO chapter objects.
|
||||
if err := s.mc.deleteObjects(ctx, s.mc.bucketChapters, slug+"/"); err != nil {
|
||||
s.log.Warn("DeleteBook: delete chapter objects failed", "slug", slug, "err", err)
|
||||
}
|
||||
|
||||
// 4. Delete MinIO cover image.
|
||||
if err := s.mc.deleteObjects(ctx, s.mc.bucketBrowse, CoverObjectKey(slug)); err != nil {
|
||||
s.log.Warn("DeleteBook: delete cover failed", "slug", slug, "err", err)
|
||||
}
|
||||
|
||||
// 5. Delete the PocketBase books record.
|
||||
if err := s.pb.delete(ctx, fmt.Sprintf("/api/collections/books/records/%s", book.ID)); err != nil {
|
||||
return fmt.Errorf("DeleteBook: delete books record: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// ── RankingStore ──────────────────────────────────────────────────────────────
|
||||
|
||||
func (s *Store) WriteRankingItem(ctx context.Context, item domain.RankingItem) error {
|
||||
|
||||
@@ -144,7 +144,8 @@ create "books" '{
|
||||
{"name":"total_chapters","type":"number"},
|
||||
{"name":"source_url", "type":"text"},
|
||||
{"name":"ranking", "type":"number"},
|
||||
{"name":"meta_updated", "type":"text"}
|
||||
{"name":"meta_updated", "type":"text"},
|
||||
{"name":"archived", "type":"bool"}
|
||||
]}'
|
||||
|
||||
create "chapters_idx" '{
|
||||
@@ -255,6 +256,7 @@ create "user_settings" '{
|
||||
{"name":"font_family", "type":"text"},
|
||||
{"name":"font_size", "type":"number"},
|
||||
{"name":"announce_chapter","type":"bool"},
|
||||
{"name":"audio_mode", "type":"text"},
|
||||
{"name":"updated", "type":"text"}
|
||||
]}'
|
||||
|
||||
@@ -389,6 +391,8 @@ add_field "user_settings" "locale" "text"
|
||||
add_field "user_settings" "font_family" "text"
|
||||
add_field "user_settings" "font_size" "number"
|
||||
add_field "user_settings" "announce_chapter" "bool"
|
||||
add_field "user_settings" "audio_mode" "text"
|
||||
add_field "books" "archived" "bool"
|
||||
|
||||
# ── 6. Indexes ────────────────────────────────────────────────────────────────
|
||||
add_index "chapters_idx" "idx_chapters_idx_slug_number" \
|
||||
|
||||
@@ -36,6 +36,14 @@ import type { Voice } from '$lib/types';
|
||||
|
||||
export type AudioStatus = 'idle' | 'loading' | 'generating' | 'ready' | 'error';
|
||||
export type NextStatus = 'none' | 'prefetching' | 'prefetched' | 'failed';
|
||||
/**
|
||||
* 'stream' – Use /api/audio-stream: audio starts playing within seconds,
|
||||
* stream is saved to MinIO concurrently. No runner task needed.
|
||||
* 'generate' – Legacy mode: queue a runner task, poll until done, then play
|
||||
* from the presigned MinIO URL. Needed for CF AI voices which
|
||||
* do not support native streaming.
|
||||
*/
|
||||
export type AudioMode = 'stream' | 'generate';
|
||||
|
||||
class AudioStore {
|
||||
// ── What is loaded ──────────────────────────────────────────────────────
|
||||
@@ -46,6 +54,13 @@ class AudioStore {
|
||||
voice = $state('af_bella');
|
||||
speed = $state(1.0);
|
||||
|
||||
/**
|
||||
* Playback mode:
|
||||
* 'stream' – pipe from /api/audio-stream (low latency, saves concurrently)
|
||||
* 'generate' – queue runner task, poll, then play presigned URL (CF AI / legacy)
|
||||
*/
|
||||
audioMode = $state<AudioMode>('stream');
|
||||
|
||||
/** Cover image URL for the currently loaded book. */
|
||||
cover = $state('');
|
||||
|
||||
|
||||
@@ -613,41 +613,95 @@
|
||||
return;
|
||||
}
|
||||
|
||||
// Slow path: audio not yet in MinIO.
|
||||
//
|
||||
// For Kokoro / PocketTTS: always use the streaming endpoint so audio
|
||||
// starts playing within seconds. The stream handler checks MinIO first
|
||||
// (fast redirect if already cached) and otherwise generates + uploads
|
||||
// concurrently. Even if the async runner is already working on this
|
||||
// chapter, the stream will redirect to MinIO the moment the runner
|
||||
// finishes — no harmful double-generation occurs because the backend
|
||||
// deduplications via AudioExists on the next request.
|
||||
if (!voice.startsWith('cfai:')) {
|
||||
// PocketTTS outputs raw WAV — skip the ffmpeg transcode entirely.
|
||||
// WAV (PCM) is natively supported on all platforms including iOS Safari.
|
||||
// Kokoro and CF AI output MP3 natively, so keep mp3 for those.
|
||||
const isPocketTTS = voices.some((v) => v.id === voice && v.engine === 'pocket-tts');
|
||||
const format = isPocketTTS ? 'wav' : 'mp3';
|
||||
const qs = new URLSearchParams({ voice, format });
|
||||
const streamUrl = `/api/audio-stream/${slug}/${chapter}?${qs}`;
|
||||
// HEAD probe: check paywall without triggering generation.
|
||||
const headRes = await fetch(streamUrl, { method: 'HEAD' }).catch(() => null);
|
||||
if (headRes?.status === 402) {
|
||||
// Slow path: audio not yet in MinIO.
|
||||
//
|
||||
// For Kokoro / PocketTTS in 'stream' mode: use the streaming endpoint so
|
||||
// audio starts playing within seconds. The stream handler checks MinIO
|
||||
// first (fast redirect if already cached) and otherwise generates +
|
||||
// uploads concurrently.
|
||||
//
|
||||
// In 'generate' mode (user preference): queue a runner task and poll,
|
||||
// same as CF AI — audio plays only after the full file is ready in MinIO.
|
||||
if (!voice.startsWith('cfai:') && audioStore.audioMode === 'stream') {
|
||||
// PocketTTS outputs raw WAV — skip the ffmpeg transcode entirely.
|
||||
// WAV (PCM) is natively supported on all platforms including iOS Safari.
|
||||
// Kokoro and CF AI output MP3 natively, so keep mp3 for those.
|
||||
const isPocketTTS = voices.some((v) => v.id === voice && v.engine === 'pocket-tts');
|
||||
const format = isPocketTTS ? 'wav' : 'mp3';
|
||||
const qs = new URLSearchParams({ voice, format });
|
||||
const streamUrl = `/api/audio-stream/${slug}/${chapter}?${qs}`;
|
||||
// HEAD probe: check paywall without triggering generation.
|
||||
const headRes = await fetch(streamUrl, { method: 'HEAD' }).catch(() => null);
|
||||
if (headRes?.status === 402) {
|
||||
audioStore.status = 'idle';
|
||||
onProRequired?.();
|
||||
return;
|
||||
}
|
||||
audioStore.audioUrl = streamUrl;
|
||||
audioStore.status = 'ready';
|
||||
maybeStartPrefetch();
|
||||
return;
|
||||
}
|
||||
|
||||
// Non-CF AI voices in 'generate' mode: queue runner task, show progress,
|
||||
// wait for full audio in MinIO before playing (same as CF AI but no preview).
|
||||
if (!voice.startsWith('cfai:')) {
|
||||
audioStore.status = 'generating';
|
||||
audioStore.isPreview = false;
|
||||
startProgress();
|
||||
|
||||
if (!presignResult.enqueued) {
|
||||
const res = await fetch(`/api/audio/${slug}/${chapter}`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ voice })
|
||||
});
|
||||
|
||||
if (res.status === 402) {
|
||||
audioStore.status = 'idle';
|
||||
stopProgress();
|
||||
onProRequired?.();
|
||||
return;
|
||||
}
|
||||
audioStore.audioUrl = streamUrl;
|
||||
audioStore.status = 'ready';
|
||||
maybeStartPrefetch();
|
||||
return;
|
||||
|
||||
if (!res.ok) throw new Error(`Generation failed: HTTP ${res.status}`);
|
||||
|
||||
if (res.status === 200) {
|
||||
await res.body?.cancel();
|
||||
await finishProgress();
|
||||
const doneUrl = await tryPresign(slug, chapter, voice);
|
||||
if (!doneUrl.ready) throw new Error('Audio generated but presign returned 404');
|
||||
audioStore.audioUrl = doneUrl.url;
|
||||
audioStore.status = 'ready';
|
||||
restoreSavedAudioTime();
|
||||
maybeStartPrefetch();
|
||||
return;
|
||||
}
|
||||
// 202 — runner task enqueued, fall through to poll.
|
||||
}
|
||||
|
||||
// CF AI voices: use preview/swap strategy.
|
||||
// 1. Fetch a short ~1-2 min preview clip from the first text chunk
|
||||
// so playback starts immediately — no more waiting behind a spinner.
|
||||
// 2. Meanwhile keep polling the full audio job; when it finishes,
|
||||
// swap the <audio> src to the full URL preserving currentTime.
|
||||
const final = await pollAudioStatus(slug, chapter, voice);
|
||||
if (final.status === 'failed') {
|
||||
throw new Error(
|
||||
`Generation failed: ${(final as { error?: string }).error ?? 'unknown error'}`
|
||||
);
|
||||
}
|
||||
|
||||
await finishProgress();
|
||||
const doneUrl = await tryPresign(slug, chapter, voice);
|
||||
if (!doneUrl.ready) throw new Error('Audio generated but presign returned 404');
|
||||
audioStore.audioUrl = doneUrl.url;
|
||||
audioStore.status = 'ready';
|
||||
restoreSavedAudioTime();
|
||||
maybeStartPrefetch();
|
||||
return;
|
||||
}
|
||||
|
||||
// CF AI voices: use preview/swap strategy.
|
||||
// 1. Fetch a short ~1-2 min preview clip from the first text chunk
|
||||
// so playback starts immediately — no more waiting behind a spinner.
|
||||
// 2. Meanwhile keep polling the full audio job; when it finishes,
|
||||
// swap the <audio> src to the full URL preserving currentTime.
|
||||
audioStore.status = 'generating';
|
||||
audioStore.isPreview = false;
|
||||
startProgress();
|
||||
@@ -1019,6 +1073,33 @@
|
||||
{#if voices.length > 0}<span class="text-(--color-border) text-xs leading-none">·</span>{/if}
|
||||
<span class="text-xs text-(--color-muted) leading-none tabular-nums">~{estimatedMinutes} min</span>
|
||||
{/if}
|
||||
<!-- Stream / Generate mode toggle -->
|
||||
{#if !audioStore.voice.startsWith('cfai:')}
|
||||
<span class="text-(--color-border) text-xs leading-none">·</span>
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => { audioStore.audioMode = audioStore.audioMode === 'stream' ? 'generate' : 'stream'; }}
|
||||
class={cn(
|
||||
'flex items-center gap-0.5 text-xs leading-none transition-colors',
|
||||
audioStore.audioMode === 'stream'
|
||||
? 'text-(--color-brand)'
|
||||
: 'text-(--color-muted) hover:text-(--color-text)'
|
||||
)}
|
||||
title={audioStore.audioMode === 'stream' ? 'Stream mode — click to switch to generate' : 'Generate mode — click to switch to stream'}
|
||||
>
|
||||
{#if audioStore.audioMode === 'stream'}
|
||||
<svg class="w-3 h-3 flex-shrink-0" fill="currentColor" viewBox="0 0 24 24">
|
||||
<path d="M8 5v14l11-7z"/>
|
||||
</svg>
|
||||
Stream
|
||||
{:else}
|
||||
<svg class="w-3 h-3 flex-shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4"/>
|
||||
</svg>
|
||||
Generate
|
||||
{/if}
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -664,24 +664,57 @@
|
||||
{/if}
|
||||
</button>
|
||||
|
||||
<!-- Announce chapter pill (only meaningful when auto-next is on) -->
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => (audioStore.announceChapter = !audioStore.announceChapter)}
|
||||
class={cn(
|
||||
'flex items-center gap-1.5 px-3 py-1.5 rounded-full text-xs font-semibold border transition-colors',
|
||||
audioStore.announceChapter
|
||||
<!-- Announce chapter pill (only meaningful when auto-next is on) -->
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => (audioStore.announceChapter = !audioStore.announceChapter)}
|
||||
class={cn(
|
||||
'flex items-center gap-1.5 px-3 py-1.5 rounded-full text-xs font-semibold border transition-colors',
|
||||
audioStore.announceChapter
|
||||
? 'border-(--color-brand) bg-(--color-brand)/15 text-(--color-brand)'
|
||||
: 'border-(--color-border) bg-(--color-surface-2) text-(--color-muted) hover:text-(--color-text)'
|
||||
)}
|
||||
aria-pressed={audioStore.announceChapter}
|
||||
title={audioStore.announceChapter ? 'Chapter announcing on' : 'Chapter announcing off'}
|
||||
>
|
||||
<svg class="w-3.5 h-3.5" fill="currentColor" viewBox="0 0 24 24">
|
||||
<path d="M3 9v6h4l5 5V4L7 9H3zm13.5 3c0-1.77-1.02-3.29-2.5-4.03v8.05c1.48-.73 2.5-2.25 2.5-4.02z"/>
|
||||
</svg>
|
||||
Announce
|
||||
</button>
|
||||
|
||||
<!-- Stream / Generate mode toggle -->
|
||||
<!-- CF AI voices are batch-only and always use generate mode regardless of this setting -->
|
||||
<button
|
||||
type="button"
|
||||
onclick={() => {
|
||||
if (!audioStore.voice.startsWith('cfai:')) {
|
||||
audioStore.audioMode = audioStore.audioMode === 'stream' ? 'generate' : 'stream';
|
||||
}
|
||||
}}
|
||||
disabled={audioStore.voice.startsWith('cfai:')}
|
||||
class={cn(
|
||||
'flex items-center gap-1.5 px-3 py-1.5 rounded-full text-xs font-semibold border transition-colors',
|
||||
audioStore.voice.startsWith('cfai:')
|
||||
? 'border-(--color-border) bg-(--color-surface-2) text-(--color-border) cursor-not-allowed opacity-50'
|
||||
: audioStore.audioMode === 'stream'
|
||||
? 'border-(--color-brand) bg-(--color-brand)/15 text-(--color-brand)'
|
||||
: 'border-(--color-border) bg-(--color-surface-2) text-(--color-muted) hover:text-(--color-text)'
|
||||
)}
|
||||
aria-pressed={audioStore.announceChapter}
|
||||
title={audioStore.announceChapter ? 'Chapter announcing on' : 'Chapter announcing off'}
|
||||
>
|
||||
)}
|
||||
aria-pressed={audioStore.audioMode === 'stream'}
|
||||
title={audioStore.voice.startsWith('cfai:') ? 'CF AI voices always use generate mode' : audioStore.audioMode === 'stream' ? 'Stream mode — audio starts instantly' : 'Generate mode — wait for full audio before playing'}
|
||||
>
|
||||
{#if audioStore.audioMode === 'stream' && !audioStore.voice.startsWith('cfai:')}
|
||||
<svg class="w-3.5 h-3.5" fill="currentColor" viewBox="0 0 24 24">
|
||||
<path d="M3 9v6h4l5 5V4L7 9H3zm13.5 3c0-1.77-1.02-3.29-2.5-4.03v8.05c1.48-.73 2.5-2.25 2.5-4.02z"/>
|
||||
<path d="M8 5v14l11-7z"/>
|
||||
</svg>
|
||||
Announce
|
||||
</button>
|
||||
{:else}
|
||||
<svg class="w-3.5 h-3.5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4"/>
|
||||
</svg>
|
||||
{/if}
|
||||
{audioStore.audioMode === 'stream' && !audioStore.voice.startsWith('cfai:') ? 'Stream' : 'Generate'}
|
||||
</button>
|
||||
|
||||
<!-- Sleep timer pill -->
|
||||
<button
|
||||
|
||||
@@ -76,6 +76,7 @@ export interface PBUserSettings {
|
||||
font_family?: string;
|
||||
font_size?: number;
|
||||
announce_chapter?: boolean;
|
||||
audio_mode?: string;
|
||||
updated?: string;
|
||||
}
|
||||
|
||||
@@ -1013,7 +1014,7 @@ export async function getSettings(
|
||||
|
||||
export async function saveSettings(
|
||||
sessionId: string,
|
||||
settings: { autoNext: boolean; voice: string; speed: number; theme?: string; locale?: string; fontFamily?: string; fontSize?: number; announceChapter?: boolean },
|
||||
settings: { autoNext: boolean; voice: string; speed: number; theme?: string; locale?: string; fontFamily?: string; fontSize?: number; announceChapter?: boolean; audioMode?: string },
|
||||
userId?: string
|
||||
): Promise<void> {
|
||||
const existing = await listOne<PBUserSettings & { id: string }>(
|
||||
@@ -1033,6 +1034,7 @@ export async function saveSettings(
|
||||
if (settings.fontFamily !== undefined) payload.font_family = settings.fontFamily;
|
||||
if (settings.fontSize !== undefined) payload.font_size = settings.fontSize;
|
||||
if (settings.announceChapter !== undefined) payload.announce_chapter = settings.announceChapter;
|
||||
if (settings.audioMode !== undefined) payload.audio_mode = settings.audioMode;
|
||||
if (userId) payload.user_id = userId;
|
||||
|
||||
if (existing) {
|
||||
|
||||
@@ -17,7 +17,7 @@ export const load: LayoutServerLoad = async ({ locals, url, cookies }) => {
|
||||
redirect(302, `/login`);
|
||||
}
|
||||
|
||||
let settings = { autoNext: false, voice: 'af_bella', speed: 1.0, theme: 'amber', locale: 'en', fontFamily: 'system', fontSize: 1.0, announceChapter: false };
|
||||
let settings = { autoNext: false, voice: 'af_bella', speed: 1.0, theme: 'amber', locale: 'en', fontFamily: 'system', fontSize: 1.0, announceChapter: false, audioMode: 'stream' };
|
||||
try {
|
||||
const row = await getSettings(locals.sessionId, locals.user?.id);
|
||||
if (row) {
|
||||
@@ -29,7 +29,8 @@ export const load: LayoutServerLoad = async ({ locals, url, cookies }) => {
|
||||
locale: row.locale ?? 'en',
|
||||
fontFamily: row.font_family ?? 'system',
|
||||
fontSize: row.font_size || 1.0,
|
||||
announceChapter: row.announce_chapter ?? false
|
||||
announceChapter: row.announce_chapter ?? false,
|
||||
audioMode: row.audio_mode ?? 'stream'
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
|
||||
@@ -157,6 +157,7 @@
|
||||
audioStore.voice = data.settings.voice;
|
||||
audioStore.speed = data.settings.speed;
|
||||
audioStore.announceChapter = data.settings.announceChapter ?? false;
|
||||
audioStore.audioMode = (data.settings.audioMode === 'generate' ? 'generate' : 'stream');
|
||||
}
|
||||
// Always sync theme + font (profile page calls invalidateAll after saving)
|
||||
currentTheme = data.settings.theme ?? 'amber';
|
||||
@@ -179,6 +180,7 @@
|
||||
const fontFamily = currentFontFamily;
|
||||
const fontSize = currentFontSize;
|
||||
const announceChapter = audioStore.announceChapter;
|
||||
const audioMode = audioStore.audioMode;
|
||||
|
||||
// Skip saving until settings have been applied from the server AND
|
||||
// at least one user-driven change has occurred after that.
|
||||
@@ -189,7 +191,7 @@
|
||||
fetch('/api/settings', {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ autoNext, voice, speed, theme, fontFamily, fontSize, announceChapter })
|
||||
body: JSON.stringify({ autoNext, voice, speed, theme, fontFamily, fontSize, announceChapter, audioMode })
|
||||
}).catch(() => {});
|
||||
}, 800) as unknown as number;
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ import { log } from '$lib/server/logger';
|
||||
|
||||
/**
|
||||
* GET /api/settings
|
||||
* Returns the current user's settings (auto_next, voice, speed, theme, locale, fontFamily, fontSize, announceChapter).
|
||||
* Returns the current user's settings (auto_next, voice, speed, theme, locale, fontFamily, fontSize, announceChapter, audioMode).
|
||||
* Returns defaults if no settings record exists yet.
|
||||
*/
|
||||
export const GET: RequestHandler = async ({ locals }) => {
|
||||
@@ -19,7 +19,8 @@ export const GET: RequestHandler = async ({ locals }) => {
|
||||
locale: settings?.locale ?? 'en',
|
||||
fontFamily: settings?.font_family ?? 'system',
|
||||
fontSize: settings?.font_size || 1.0,
|
||||
announceChapter: settings?.announce_chapter ?? false
|
||||
announceChapter: settings?.announce_chapter ?? false,
|
||||
audioMode: settings?.audio_mode ?? 'stream'
|
||||
});
|
||||
} catch (e) {
|
||||
log.error('settings', 'GET failed', { err: String(e) });
|
||||
@@ -29,7 +30,7 @@ export const GET: RequestHandler = async ({ locals }) => {
|
||||
|
||||
/**
|
||||
* PUT /api/settings
|
||||
* Body: { autoNext: boolean, voice: string, speed: number, theme?: string, locale?: string, fontFamily?: string, fontSize?: number, announceChapter?: boolean }
|
||||
* Body: { autoNext: boolean, voice: string, speed: number, theme?: string, locale?: string, fontFamily?: string, fontSize?: number, announceChapter?: boolean, audioMode?: string }
|
||||
* Saves user preferences.
|
||||
*/
|
||||
export const PUT: RequestHandler = async ({ request, locals }) => {
|
||||
@@ -73,6 +74,12 @@ export const PUT: RequestHandler = async ({ request, locals }) => {
|
||||
error(400, 'Invalid announceChapter — must be boolean');
|
||||
}
|
||||
|
||||
// audioMode is optional — if provided it must be a known value
|
||||
const validAudioModes = ['stream', 'generate'];
|
||||
if (body.audioMode !== undefined && !validAudioModes.includes(body.audioMode)) {
|
||||
error(400, `Invalid audioMode — must be one of: ${validAudioModes.join(', ')}`);
|
||||
}
|
||||
|
||||
try {
|
||||
await saveSettings(locals.sessionId, body, locals.user?.id);
|
||||
} catch (e) {
|
||||
|
||||
@@ -100,6 +100,58 @@
|
||||
const genres = $derived(parseGenres(data.book?.genres ?? []));
|
||||
const chapterList = $derived(data.chapters ?? []);
|
||||
|
||||
// ── Admin: split chapters (imported PDF/EPUB books) ──────────────────────
|
||||
const isFullTextBook = $derived(
|
||||
chapterList.length === 1 && chapterList[0].title === 'Full Text'
|
||||
);
|
||||
let splitText = $state('');
|
||||
let splitSaving = $state(false);
|
||||
let splitResult = $state<'saved' | 'error' | ''>('');
|
||||
let splitError = $state('');
|
||||
let splitOpen = $state(false);
|
||||
|
||||
$effect(() => {
|
||||
// Pre-fill the textarea with chapter 1 content when the panel is opened.
|
||||
if (splitOpen && !splitText && data.book?.slug && isFullTextBook) {
|
||||
fetch(`/api/chapter-markdown/${encodeURIComponent(data.book.slug)}/1`)
|
||||
.then((r) => r.ok ? r.text() : '')
|
||||
.then((t) => {
|
||||
// Strip leading "# Full Text\n\n" header if present.
|
||||
splitText = t.replace(/^# Full Text\n\n/, '').trim();
|
||||
})
|
||||
.catch(() => {});
|
||||
}
|
||||
});
|
||||
|
||||
async function splitChapters() {
|
||||
const slug = data.book?.slug;
|
||||
if (splitSaving || !slug) return;
|
||||
splitSaving = true;
|
||||
splitResult = '';
|
||||
splitError = '';
|
||||
try {
|
||||
const res = await fetch(`/api/admin/books/${encodeURIComponent(slug)}/split-chapters`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ text: splitText })
|
||||
});
|
||||
if (res.ok) {
|
||||
splitResult = 'saved';
|
||||
splitOpen = false;
|
||||
await invalidateAll();
|
||||
} else {
|
||||
const d = await res.json().catch(() => ({}));
|
||||
splitError = (d as any).error ?? 'Unknown error';
|
||||
splitResult = 'error';
|
||||
}
|
||||
} catch (e: any) {
|
||||
splitError = e?.message ?? '';
|
||||
splitResult = 'error';
|
||||
} finally {
|
||||
splitSaving = false;
|
||||
}
|
||||
}
|
||||
|
||||
// ── Admin: rescrape ───────────────────────────────────────────────────────
|
||||
let scraping = $state(false);
|
||||
let scrapeResult = $state<'queued' | 'busy' | 'error' | ''>('');
|
||||
@@ -979,7 +1031,7 @@
|
||||
</a>
|
||||
|
||||
<!-- Admin panel (collapsed by default, admin only) -->
|
||||
{#if data.isAdmin && book.source_url}
|
||||
{#if data.isAdmin}
|
||||
<div>
|
||||
<button
|
||||
onclick={() => (adminOpen = !adminOpen)}
|
||||
@@ -997,6 +1049,62 @@
|
||||
|
||||
{#if adminOpen}
|
||||
<div class="px-4 py-3 border-t border-(--color-border) flex flex-col gap-5">
|
||||
|
||||
<!-- Chapter split tool (only for imported books with single "Full Text" chapter) -->
|
||||
{#if isFullTextBook}
|
||||
<div class="flex flex-col gap-2">
|
||||
<div class="flex items-center justify-between">
|
||||
<p class="text-xs font-medium text-(--color-muted) uppercase tracking-wide">Split Chapters</p>
|
||||
<button
|
||||
onclick={() => { splitOpen = !splitOpen; splitResult = ''; splitError = ''; }}
|
||||
class="text-xs text-(--color-muted) hover:text-(--color-text) transition-colors"
|
||||
>
|
||||
{splitOpen ? 'Hide' : 'Edit'}
|
||||
</button>
|
||||
</div>
|
||||
{#if !splitOpen}
|
||||
<p class="text-xs text-(--color-muted)">
|
||||
This book has a single "Full Text" chapter. Use this tool to split it into chapters.
|
||||
</p>
|
||||
{/if}
|
||||
{#if splitOpen}
|
||||
<p class="text-xs text-(--color-muted)">
|
||||
Insert <code class="bg-(--color-surface-3) px-1 rounded">---</code> on its own line to divide chapters.
|
||||
Optionally start a segment with <code class="bg-(--color-surface-3) px-1 rounded">## Chapter Title</code>.
|
||||
</p>
|
||||
<textarea
|
||||
bind:value={splitText}
|
||||
rows="16"
|
||||
class="w-full px-2 py-1.5 rounded bg-(--color-surface-3) border border-(--color-border) text-(--color-text) text-xs font-mono focus:outline-none focus:border-(--color-brand) resize-y"
|
||||
placeholder="Paste or edit the full text here. Use --- to split chapters."
|
||||
></textarea>
|
||||
<div class="flex items-center gap-3 flex-wrap">
|
||||
<button
|
||||
onclick={splitChapters}
|
||||
disabled={splitSaving || !splitText.trim()}
|
||||
class="flex items-center gap-1.5 px-3 py-1.5 rounded text-xs font-medium transition-colors
|
||||
{splitSaving || !splitText.trim() ? 'bg-(--color-surface-3) text-(--color-muted) cursor-not-allowed' : 'bg-(--color-brand)/20 text-(--color-brand-dim) hover:bg-(--color-brand)/40 border border-(--color-brand)/30'}"
|
||||
>
|
||||
{#if splitSaving}
|
||||
<svg class="w-3 h-3 animate-spin" fill="none" viewBox="0 0 24 24"><circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"/><path class="opacity-75" fill="currentColor" d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4z"/></svg>
|
||||
Saving…
|
||||
{:else}
|
||||
Save chapters
|
||||
{/if}
|
||||
</button>
|
||||
{#if splitResult === 'saved'}
|
||||
<span class="text-xs text-green-400">Saved.</span>
|
||||
{:else if splitResult === 'error'}
|
||||
<span class="text-xs text-(--color-danger)">{splitError || 'Error.'}</span>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
<hr class="border-(--color-border)" />
|
||||
{/if}
|
||||
|
||||
<!-- Rescrape / range-scrape (only for scraped books with a source URL) -->
|
||||
{#if book.source_url}
|
||||
<!-- Rescrape -->
|
||||
<div class="flex items-center gap-3 flex-wrap">
|
||||
<button
|
||||
@@ -1065,6 +1173,7 @@
|
||||
</span>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<hr class="border-(--color-border)" />
|
||||
|
||||
|
||||
Reference in New Issue
Block a user