Refactor gateway into modular architecture
Split 1878-line server.py into 15 focused modules:
- config.py: all env vars and constants
- database.py: schema, init, seed logic
- sessions.py: session/token CRUD
- proxy.py: proxy_request, SERVICE_MAP, resolve_service
- responses.py: ResponseMixin for handler helpers
- auth.py: login/logout/register handlers
- dashboard.py: dashboard, apps, connections, pinning
- command.py: AI command bar
- integrations/booklore.py: auth, books, cover, import
- integrations/kindle.py: send-to-kindle, file finder
- integrations/karakeep.py: save/delete bookmarks
- integrations/qbittorrent.py: download status
- integrations/image_proxy.py: external image proxy
server.py is now thin routing only (~344 lines).
All routes, methods, status codes, and responses preserved exactly.
Added PYTHONUNBUFFERED=1 to Dockerfile for live logging.
2026-03-29 00:14:46 -05:00
|
|
|
"""
|
|
|
|
|
Platform Gateway — Booklore integration (book library manager).
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
import json
|
|
|
|
|
import time
|
|
|
|
|
|
2026-03-29 07:02:09 -05:00
|
|
|
from config import BOOKLORE_URL, BOOKLORE_USER, BOOKLORE_PASS, BOOKLORE_BOOKS_DIR
|
Refactor gateway into modular architecture
Split 1878-line server.py into 15 focused modules:
- config.py: all env vars and constants
- database.py: schema, init, seed logic
- sessions.py: session/token CRUD
- proxy.py: proxy_request, SERVICE_MAP, resolve_service
- responses.py: ResponseMixin for handler helpers
- auth.py: login/logout/register handlers
- dashboard.py: dashboard, apps, connections, pinning
- command.py: AI command bar
- integrations/booklore.py: auth, books, cover, import
- integrations/kindle.py: send-to-kindle, file finder
- integrations/karakeep.py: save/delete bookmarks
- integrations/qbittorrent.py: download status
- integrations/image_proxy.py: external image proxy
server.py is now thin routing only (~344 lines).
All routes, methods, status codes, and responses preserved exactly.
Added PYTHONUNBUFFERED=1 to Dockerfile for live logging.
2026-03-29 00:14:46 -05:00
|
|
|
from proxy import proxy_request
|
|
|
|
|
|
2026-03-29 07:02:09 -05:00
|
|
|
# Mutable auth token state (not in config — config is for immutable values)
|
|
|
|
|
_booklore_token = {"access": "", "refresh": "", "expires": 0}
|
|
|
|
|
|
Refactor gateway into modular architecture
Split 1878-line server.py into 15 focused modules:
- config.py: all env vars and constants
- database.py: schema, init, seed logic
- sessions.py: session/token CRUD
- proxy.py: proxy_request, SERVICE_MAP, resolve_service
- responses.py: ResponseMixin for handler helpers
- auth.py: login/logout/register handlers
- dashboard.py: dashboard, apps, connections, pinning
- command.py: AI command bar
- integrations/booklore.py: auth, books, cover, import
- integrations/kindle.py: send-to-kindle, file finder
- integrations/karakeep.py: save/delete bookmarks
- integrations/qbittorrent.py: download status
- integrations/image_proxy.py: external image proxy
server.py is now thin routing only (~344 lines).
All routes, methods, status codes, and responses preserved exactly.
Added PYTHONUNBUFFERED=1 to Dockerfile for live logging.
2026-03-29 00:14:46 -05:00
|
|
|
|
|
|
|
|
def booklore_auth():
|
|
|
|
|
"""Get a valid Booklore JWT token, refreshing if needed."""
|
|
|
|
|
global _booklore_token
|
|
|
|
|
if _booklore_token["access"] and time.time() < _booklore_token["expires"] - 60:
|
|
|
|
|
return _booklore_token["access"]
|
|
|
|
|
if not BOOKLORE_USER or not BOOKLORE_PASS:
|
|
|
|
|
return None
|
|
|
|
|
try:
|
|
|
|
|
body = json.dumps({"username": BOOKLORE_USER, "password": BOOKLORE_PASS}).encode()
|
|
|
|
|
status, _, resp = proxy_request(
|
|
|
|
|
f"{BOOKLORE_URL}/api/v1/auth/login", "POST",
|
|
|
|
|
{"Content-Type": "application/json"}, body, timeout=10
|
|
|
|
|
)
|
|
|
|
|
if status == 200:
|
|
|
|
|
data = json.loads(resp)
|
|
|
|
|
_booklore_token["access"] = data["accessToken"]
|
|
|
|
|
_booklore_token["refresh"] = data.get("refreshToken", "")
|
|
|
|
|
_booklore_token["expires"] = time.time() + 3600 # 1hr
|
|
|
|
|
return _booklore_token["access"]
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print(f"[Booklore] Auth failed: {e}")
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def handle_booklore_libraries(handler):
|
|
|
|
|
"""Return Booklore libraries with their paths."""
|
|
|
|
|
token = booklore_auth()
|
|
|
|
|
if not token:
|
|
|
|
|
handler._send_json({"error": "Booklore auth failed"}, 502)
|
|
|
|
|
return
|
|
|
|
|
status, _, resp = proxy_request(
|
|
|
|
|
f"{BOOKLORE_URL}/api/v1/libraries", "GET",
|
|
|
|
|
{"Authorization": f"Bearer {token}"}, timeout=10
|
|
|
|
|
)
|
|
|
|
|
if status == 200:
|
|
|
|
|
libs = json.loads(resp)
|
|
|
|
|
result = []
|
|
|
|
|
for lib in libs:
|
|
|
|
|
paths = [{"id": p["id"], "path": p.get("path", "")} for p in lib.get("paths", [])]
|
|
|
|
|
result.append({"id": lib["id"], "name": lib["name"], "paths": paths})
|
|
|
|
|
handler._send_json({"libraries": result})
|
|
|
|
|
else:
|
|
|
|
|
handler._send_json({"error": "Failed to fetch libraries"}, status)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def handle_booklore_import(handler, body):
|
|
|
|
|
"""Auto-import a file from bookdrop into a Booklore library.
|
|
|
|
|
|
|
|
|
|
Expects: {"fileName": "...", "libraryId": N, "pathId": N}
|
|
|
|
|
Flow: rescan bookdrop -> find file -> finalize import
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
data = json.loads(body)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
handler._send_json({"error": "Invalid JSON"}, 400)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
file_name = data.get("fileName", "")
|
|
|
|
|
library_id = data.get("libraryId")
|
|
|
|
|
path_id = data.get("pathId")
|
|
|
|
|
if not file_name or not library_id or not path_id:
|
|
|
|
|
handler._send_json({"error": "Missing fileName, libraryId, or pathId"}, 400)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
token = booklore_auth()
|
|
|
|
|
if not token:
|
|
|
|
|
handler._send_json({"error": "Booklore auth failed"}, 502)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
|
|
|
|
|
|
|
|
|
|
# 1. Trigger bookdrop rescan
|
|
|
|
|
proxy_request(f"{BOOKLORE_URL}/api/v1/bookdrop/rescan", "POST", headers, b"{}", timeout=15)
|
|
|
|
|
|
|
|
|
|
# 2. Poll for the file to appear (up to 15 seconds)
|
|
|
|
|
file_id = None
|
|
|
|
|
file_meta = None
|
|
|
|
|
for _ in range(6):
|
|
|
|
|
time.sleep(2.5)
|
|
|
|
|
s, _, r = proxy_request(
|
|
|
|
|
f"{BOOKLORE_URL}/api/v1/bookdrop/files?status=pending&page=0&size=100",
|
|
|
|
|
"GET", {"Authorization": f"Bearer {token}"}, timeout=10
|
|
|
|
|
)
|
|
|
|
|
if s == 200:
|
|
|
|
|
files_data = json.loads(r)
|
|
|
|
|
for f in files_data.get("content", []):
|
|
|
|
|
if f.get("fileName", "") == file_name:
|
|
|
|
|
file_id = f["id"]
|
|
|
|
|
file_meta = f.get("originalMetadata") or f.get("fetchedMetadata")
|
|
|
|
|
break
|
|
|
|
|
if file_id:
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
if not file_id:
|
|
|
|
|
handler._send_json({"error": "File not found in bookdrop after rescan", "fileName": file_name}, 404)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# 3. Build metadata with thumbnailUrl (required by Booklore)
|
|
|
|
|
metadata = {
|
|
|
|
|
"title": (file_meta or {}).get("title", file_name),
|
|
|
|
|
"subtitle": "",
|
|
|
|
|
"authors": (file_meta or {}).get("authors", []),
|
|
|
|
|
"categories": (file_meta or {}).get("categories", []),
|
|
|
|
|
"moods": [],
|
|
|
|
|
"tags": [],
|
|
|
|
|
"publisher": (file_meta or {}).get("publisher", ""),
|
|
|
|
|
"publishedDate": (file_meta or {}).get("publishedDate", ""),
|
|
|
|
|
"description": (file_meta or {}).get("description", ""),
|
|
|
|
|
"isbn": (file_meta or {}).get("isbn13", (file_meta or {}).get("isbn10", "")),
|
|
|
|
|
"language": (file_meta or {}).get("language", ""),
|
|
|
|
|
"seriesName": (file_meta or {}).get("seriesName", ""),
|
|
|
|
|
"seriesNumber": (file_meta or {}).get("seriesNumber"),
|
|
|
|
|
"seriesTotal": (file_meta or {}).get("seriesTotal"),
|
|
|
|
|
"thumbnailUrl": (file_meta or {}).get("thumbnailUrl", ""),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# 4. Finalize import
|
|
|
|
|
payload = json.dumps({"files": [{"fileId": file_id, "libraryId": library_id, "pathId": path_id, "metadata": metadata}]}).encode()
|
|
|
|
|
s, _, r = proxy_request(
|
|
|
|
|
f"{BOOKLORE_URL}/api/v1/bookdrop/imports/finalize", "POST",
|
|
|
|
|
headers, payload, timeout=30
|
|
|
|
|
)
|
|
|
|
|
if s == 200:
|
|
|
|
|
result = json.loads(r)
|
|
|
|
|
handler._send_json(result)
|
|
|
|
|
else:
|
|
|
|
|
print(f"[Booklore] Finalize failed ({s}): {r[:200]}")
|
|
|
|
|
handler._send_json({"error": "Finalize import failed", "status": s}, s)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def handle_booklore_books(handler):
|
|
|
|
|
"""Return all books from Booklore."""
|
|
|
|
|
token = booklore_auth()
|
|
|
|
|
if not token:
|
|
|
|
|
handler._send_json({"error": "Booklore auth failed"}, 502)
|
|
|
|
|
return
|
|
|
|
|
try:
|
|
|
|
|
s, _, r = proxy_request(
|
|
|
|
|
f"{BOOKLORE_URL}/api/v1/books", "GET",
|
|
|
|
|
{"Authorization": f"Bearer {token}"}, timeout=15
|
|
|
|
|
)
|
|
|
|
|
if s == 200:
|
|
|
|
|
books_raw = json.loads(r)
|
|
|
|
|
books = []
|
|
|
|
|
for b in books_raw:
|
|
|
|
|
m = b.get("metadata") or {}
|
|
|
|
|
books.append({
|
|
|
|
|
"id": b["id"],
|
|
|
|
|
"title": m.get("title") or "Untitled",
|
|
|
|
|
"authors": m.get("authors") or [],
|
|
|
|
|
"libraryId": b.get("libraryId"),
|
|
|
|
|
"libraryName": b.get("libraryName"),
|
|
|
|
|
"categories": m.get("categories") or [],
|
|
|
|
|
"pageCount": m.get("pageCount"),
|
|
|
|
|
"publisher": m.get("publisher"),
|
|
|
|
|
"isbn13": m.get("isbn13"),
|
|
|
|
|
"isbn10": m.get("isbn10"),
|
|
|
|
|
"googleId": m.get("googleId"),
|
|
|
|
|
"addedOn": b.get("addedOn"),
|
|
|
|
|
})
|
|
|
|
|
# Resolve file formats from disk
|
|
|
|
|
if BOOKLORE_BOOKS_DIR.exists():
|
|
|
|
|
# Build index: lowercase title words -> file path
|
|
|
|
|
file_index = {}
|
|
|
|
|
for ext in ["epub", "pdf", "mobi", "azw3"]:
|
|
|
|
|
for fp in BOOKLORE_BOOKS_DIR.rglob(f"*.{ext}"):
|
|
|
|
|
file_index[fp.stem.lower()] = fp
|
|
|
|
|
for book in books:
|
|
|
|
|
title_words = set(book["title"].lower().split()[:4])
|
|
|
|
|
best_match = None
|
|
|
|
|
best_score = 0
|
|
|
|
|
for fname, fp in file_index.items():
|
|
|
|
|
matches = sum(1 for w in title_words if w in fname)
|
|
|
|
|
score = matches / len(title_words) if title_words else 0
|
|
|
|
|
if score > best_score:
|
|
|
|
|
best_score = score
|
|
|
|
|
best_match = fp
|
|
|
|
|
if best_match and best_score >= 0.5:
|
|
|
|
|
book["format"] = best_match.suffix.lstrip(".").upper()
|
|
|
|
|
else:
|
|
|
|
|
book["format"] = None
|
|
|
|
|
|
|
|
|
|
handler._send_json({"books": books, "total": len(books)})
|
|
|
|
|
else:
|
|
|
|
|
handler._send_json({"error": "Failed to fetch books"}, s)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
handler._send_json({"error": str(e)}, 500)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def handle_booklore_cover(handler, book_id):
|
|
|
|
|
"""Proxy book cover image from Booklore."""
|
|
|
|
|
token = booklore_auth()
|
|
|
|
|
if not token:
|
|
|
|
|
handler._send_json({"error": "Booklore auth failed"}, 502)
|
|
|
|
|
return
|
|
|
|
|
try:
|
|
|
|
|
s, headers_raw, body = proxy_request(
|
|
|
|
|
f"{BOOKLORE_URL}/api/v1/books/{book_id}/cover", "GET",
|
|
|
|
|
{"Authorization": f"Bearer {token}"}, timeout=10
|
|
|
|
|
)
|
|
|
|
|
if s == 200 and isinstance(body, bytes):
|
|
|
|
|
ct = "image/jpeg"
|
|
|
|
|
if isinstance(headers_raw, dict):
|
|
|
|
|
ct = headers_raw.get("Content-Type", ct)
|
|
|
|
|
handler.send_response(200)
|
|
|
|
|
handler.send_header("Content-Type", ct)
|
|
|
|
|
handler.send_header("Cache-Control", "public, max-age=86400")
|
|
|
|
|
handler.end_headers()
|
|
|
|
|
if isinstance(body, str):
|
|
|
|
|
handler.wfile.write(body.encode())
|
|
|
|
|
else:
|
|
|
|
|
handler.wfile.write(body)
|
|
|
|
|
return
|
|
|
|
|
handler._send_json({"error": "Cover not found"}, 404)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
handler._send_json({"error": str(e)}, 500)
|