Refactor gateway into modular architecture
Split 1878-line server.py into 15 focused modules: - config.py: all env vars and constants - database.py: schema, init, seed logic - sessions.py: session/token CRUD - proxy.py: proxy_request, SERVICE_MAP, resolve_service - responses.py: ResponseMixin for handler helpers - auth.py: login/logout/register handlers - dashboard.py: dashboard, apps, connections, pinning - command.py: AI command bar - integrations/booklore.py: auth, books, cover, import - integrations/kindle.py: send-to-kindle, file finder - integrations/karakeep.py: save/delete bookmarks - integrations/qbittorrent.py: download status - integrations/image_proxy.py: external image proxy server.py is now thin routing only (~344 lines). All routes, methods, status codes, and responses preserved exactly. Added PYTHONUNBUFFERED=1 to Dockerfile for live logging.
This commit is contained in:
0
gateway/integrations/__init__.py
Normal file
0
gateway/integrations/__init__.py
Normal file
230
gateway/integrations/booklore.py
Normal file
230
gateway/integrations/booklore.py
Normal file
@@ -0,0 +1,230 @@
|
||||
"""
|
||||
Platform Gateway — Booklore integration (book library manager).
|
||||
"""
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
from config import (
|
||||
BOOKLORE_URL, BOOKLORE_USER, BOOKLORE_PASS,
|
||||
BOOKLORE_BOOKS_DIR, _booklore_token,
|
||||
)
|
||||
from proxy import proxy_request
|
||||
|
||||
|
||||
def booklore_auth():
|
||||
"""Get a valid Booklore JWT token, refreshing if needed."""
|
||||
global _booklore_token
|
||||
if _booklore_token["access"] and time.time() < _booklore_token["expires"] - 60:
|
||||
return _booklore_token["access"]
|
||||
if not BOOKLORE_USER or not BOOKLORE_PASS:
|
||||
return None
|
||||
try:
|
||||
body = json.dumps({"username": BOOKLORE_USER, "password": BOOKLORE_PASS}).encode()
|
||||
status, _, resp = proxy_request(
|
||||
f"{BOOKLORE_URL}/api/v1/auth/login", "POST",
|
||||
{"Content-Type": "application/json"}, body, timeout=10
|
||||
)
|
||||
if status == 200:
|
||||
data = json.loads(resp)
|
||||
_booklore_token["access"] = data["accessToken"]
|
||||
_booklore_token["refresh"] = data.get("refreshToken", "")
|
||||
_booklore_token["expires"] = time.time() + 3600 # 1hr
|
||||
return _booklore_token["access"]
|
||||
except Exception as e:
|
||||
print(f"[Booklore] Auth failed: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def handle_booklore_libraries(handler):
|
||||
"""Return Booklore libraries with their paths."""
|
||||
token = booklore_auth()
|
||||
if not token:
|
||||
handler._send_json({"error": "Booklore auth failed"}, 502)
|
||||
return
|
||||
status, _, resp = proxy_request(
|
||||
f"{BOOKLORE_URL}/api/v1/libraries", "GET",
|
||||
{"Authorization": f"Bearer {token}"}, timeout=10
|
||||
)
|
||||
if status == 200:
|
||||
libs = json.loads(resp)
|
||||
result = []
|
||||
for lib in libs:
|
||||
paths = [{"id": p["id"], "path": p.get("path", "")} for p in lib.get("paths", [])]
|
||||
result.append({"id": lib["id"], "name": lib["name"], "paths": paths})
|
||||
handler._send_json({"libraries": result})
|
||||
else:
|
||||
handler._send_json({"error": "Failed to fetch libraries"}, status)
|
||||
|
||||
|
||||
def handle_booklore_import(handler, body):
|
||||
"""Auto-import a file from bookdrop into a Booklore library.
|
||||
|
||||
Expects: {"fileName": "...", "libraryId": N, "pathId": N}
|
||||
Flow: rescan bookdrop -> find file -> finalize import
|
||||
"""
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": "Invalid JSON"}, 400)
|
||||
return
|
||||
|
||||
file_name = data.get("fileName", "")
|
||||
library_id = data.get("libraryId")
|
||||
path_id = data.get("pathId")
|
||||
if not file_name or not library_id or not path_id:
|
||||
handler._send_json({"error": "Missing fileName, libraryId, or pathId"}, 400)
|
||||
return
|
||||
|
||||
token = booklore_auth()
|
||||
if not token:
|
||||
handler._send_json({"error": "Booklore auth failed"}, 502)
|
||||
return
|
||||
|
||||
headers = {"Authorization": f"Bearer {token}", "Content-Type": "application/json"}
|
||||
|
||||
# 1. Trigger bookdrop rescan
|
||||
proxy_request(f"{BOOKLORE_URL}/api/v1/bookdrop/rescan", "POST", headers, b"{}", timeout=15)
|
||||
|
||||
# 2. Poll for the file to appear (up to 15 seconds)
|
||||
file_id = None
|
||||
file_meta = None
|
||||
for _ in range(6):
|
||||
time.sleep(2.5)
|
||||
s, _, r = proxy_request(
|
||||
f"{BOOKLORE_URL}/api/v1/bookdrop/files?status=pending&page=0&size=100",
|
||||
"GET", {"Authorization": f"Bearer {token}"}, timeout=10
|
||||
)
|
||||
if s == 200:
|
||||
files_data = json.loads(r)
|
||||
for f in files_data.get("content", []):
|
||||
if f.get("fileName", "") == file_name:
|
||||
file_id = f["id"]
|
||||
file_meta = f.get("originalMetadata") or f.get("fetchedMetadata")
|
||||
break
|
||||
if file_id:
|
||||
break
|
||||
|
||||
if not file_id:
|
||||
handler._send_json({"error": "File not found in bookdrop after rescan", "fileName": file_name}, 404)
|
||||
return
|
||||
|
||||
# 3. Build metadata with thumbnailUrl (required by Booklore)
|
||||
metadata = {
|
||||
"title": (file_meta or {}).get("title", file_name),
|
||||
"subtitle": "",
|
||||
"authors": (file_meta or {}).get("authors", []),
|
||||
"categories": (file_meta or {}).get("categories", []),
|
||||
"moods": [],
|
||||
"tags": [],
|
||||
"publisher": (file_meta or {}).get("publisher", ""),
|
||||
"publishedDate": (file_meta or {}).get("publishedDate", ""),
|
||||
"description": (file_meta or {}).get("description", ""),
|
||||
"isbn": (file_meta or {}).get("isbn13", (file_meta or {}).get("isbn10", "")),
|
||||
"language": (file_meta or {}).get("language", ""),
|
||||
"seriesName": (file_meta or {}).get("seriesName", ""),
|
||||
"seriesNumber": (file_meta or {}).get("seriesNumber"),
|
||||
"seriesTotal": (file_meta or {}).get("seriesTotal"),
|
||||
"thumbnailUrl": (file_meta or {}).get("thumbnailUrl", ""),
|
||||
}
|
||||
|
||||
# 4. Finalize import
|
||||
payload = json.dumps({"files": [{"fileId": file_id, "libraryId": library_id, "pathId": path_id, "metadata": metadata}]}).encode()
|
||||
s, _, r = proxy_request(
|
||||
f"{BOOKLORE_URL}/api/v1/bookdrop/imports/finalize", "POST",
|
||||
headers, payload, timeout=30
|
||||
)
|
||||
if s == 200:
|
||||
result = json.loads(r)
|
||||
handler._send_json(result)
|
||||
else:
|
||||
print(f"[Booklore] Finalize failed ({s}): {r[:200]}")
|
||||
handler._send_json({"error": "Finalize import failed", "status": s}, s)
|
||||
|
||||
|
||||
def handle_booklore_books(handler):
|
||||
"""Return all books from Booklore."""
|
||||
token = booklore_auth()
|
||||
if not token:
|
||||
handler._send_json({"error": "Booklore auth failed"}, 502)
|
||||
return
|
||||
try:
|
||||
s, _, r = proxy_request(
|
||||
f"{BOOKLORE_URL}/api/v1/books", "GET",
|
||||
{"Authorization": f"Bearer {token}"}, timeout=15
|
||||
)
|
||||
if s == 200:
|
||||
books_raw = json.loads(r)
|
||||
books = []
|
||||
for b in books_raw:
|
||||
m = b.get("metadata") or {}
|
||||
books.append({
|
||||
"id": b["id"],
|
||||
"title": m.get("title") or "Untitled",
|
||||
"authors": m.get("authors") or [],
|
||||
"libraryId": b.get("libraryId"),
|
||||
"libraryName": b.get("libraryName"),
|
||||
"categories": m.get("categories") or [],
|
||||
"pageCount": m.get("pageCount"),
|
||||
"publisher": m.get("publisher"),
|
||||
"isbn13": m.get("isbn13"),
|
||||
"isbn10": m.get("isbn10"),
|
||||
"googleId": m.get("googleId"),
|
||||
"addedOn": b.get("addedOn"),
|
||||
})
|
||||
# Resolve file formats from disk
|
||||
if BOOKLORE_BOOKS_DIR.exists():
|
||||
# Build index: lowercase title words -> file path
|
||||
file_index = {}
|
||||
for ext in ["epub", "pdf", "mobi", "azw3"]:
|
||||
for fp in BOOKLORE_BOOKS_DIR.rglob(f"*.{ext}"):
|
||||
file_index[fp.stem.lower()] = fp
|
||||
for book in books:
|
||||
title_words = set(book["title"].lower().split()[:4])
|
||||
best_match = None
|
||||
best_score = 0
|
||||
for fname, fp in file_index.items():
|
||||
matches = sum(1 for w in title_words if w in fname)
|
||||
score = matches / len(title_words) if title_words else 0
|
||||
if score > best_score:
|
||||
best_score = score
|
||||
best_match = fp
|
||||
if best_match and best_score >= 0.5:
|
||||
book["format"] = best_match.suffix.lstrip(".").upper()
|
||||
else:
|
||||
book["format"] = None
|
||||
|
||||
handler._send_json({"books": books, "total": len(books)})
|
||||
else:
|
||||
handler._send_json({"error": "Failed to fetch books"}, s)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": str(e)}, 500)
|
||||
|
||||
|
||||
def handle_booklore_cover(handler, book_id):
|
||||
"""Proxy book cover image from Booklore."""
|
||||
token = booklore_auth()
|
||||
if not token:
|
||||
handler._send_json({"error": "Booklore auth failed"}, 502)
|
||||
return
|
||||
try:
|
||||
s, headers_raw, body = proxy_request(
|
||||
f"{BOOKLORE_URL}/api/v1/books/{book_id}/cover", "GET",
|
||||
{"Authorization": f"Bearer {token}"}, timeout=10
|
||||
)
|
||||
if s == 200 and isinstance(body, bytes):
|
||||
ct = "image/jpeg"
|
||||
if isinstance(headers_raw, dict):
|
||||
ct = headers_raw.get("Content-Type", ct)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", ct)
|
||||
handler.send_header("Cache-Control", "public, max-age=86400")
|
||||
handler.end_headers()
|
||||
if isinstance(body, str):
|
||||
handler.wfile.write(body.encode())
|
||||
else:
|
||||
handler.wfile.write(body)
|
||||
return
|
||||
handler._send_json({"error": "Cover not found"}, 404)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": str(e)}, 500)
|
||||
36
gateway/integrations/image_proxy.py
Normal file
36
gateway/integrations/image_proxy.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""
|
||||
Platform Gateway — Image proxy (bypass hotlink protection).
|
||||
"""
|
||||
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
|
||||
from config import _ssl_ctx
|
||||
|
||||
|
||||
def handle_image_proxy(handler):
|
||||
"""Proxy external images to bypass hotlink protection (e.g. Reddit)."""
|
||||
qs = urllib.parse.urlparse(handler.path).query
|
||||
params = urllib.parse.parse_qs(qs)
|
||||
url = params.get("url", [None])[0]
|
||||
if not url:
|
||||
handler._send_json({"error": "Missing url parameter"}, 400)
|
||||
return
|
||||
try:
|
||||
req = urllib.request.Request(url, headers={
|
||||
"User-Agent": "Mozilla/5.0 (compatible; PlatformProxy/1.0)",
|
||||
"Accept": "image/*,*/*",
|
||||
"Referer": urllib.parse.urlparse(url).scheme + "://" + urllib.parse.urlparse(url).netloc + "/",
|
||||
})
|
||||
resp = urllib.request.urlopen(req, timeout=10, context=_ssl_ctx)
|
||||
body = resp.read()
|
||||
ct = resp.headers.get("Content-Type", "image/jpeg")
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", ct)
|
||||
handler.send_header("Content-Length", len(body))
|
||||
handler.send_header("Cache-Control", "public, max-age=86400")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(body)
|
||||
except Exception as e:
|
||||
print(f"[ImageProxy] Error fetching {url}: {e}")
|
||||
handler._send_json({"error": "Failed to fetch image"}, 502)
|
||||
76
gateway/integrations/karakeep.py
Normal file
76
gateway/integrations/karakeep.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Platform Gateway — Karakeep integration (bookmarking).
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
from config import KARAKEEP_URL, KARAKEEP_API_KEY
|
||||
from proxy import proxy_request
|
||||
|
||||
|
||||
def handle_karakeep_save(handler, body):
|
||||
"""Save a URL to Karakeep as a bookmark."""
|
||||
if not KARAKEEP_API_KEY:
|
||||
handler._send_json({"error": "Karakeep not configured"}, 502)
|
||||
return
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": "Invalid JSON"}, 400)
|
||||
return
|
||||
|
||||
url = data.get("url", "")
|
||||
if not url:
|
||||
handler._send_json({"error": "Missing url"}, 400)
|
||||
return
|
||||
|
||||
payload = json.dumps({"type": "link", "url": url}).encode()
|
||||
headers = {
|
||||
"Authorization": f"Bearer {KARAKEEP_API_KEY}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
try:
|
||||
status, _, resp = proxy_request(
|
||||
f"{KARAKEEP_URL}/api/v1/bookmarks", "POST",
|
||||
headers, payload, timeout=15
|
||||
)
|
||||
if status in (200, 201):
|
||||
result = json.loads(resp)
|
||||
handler._send_json({"ok": True, "id": result.get("id", "")})
|
||||
else:
|
||||
print(f"[Karakeep] Save failed ({status}): {resp[:200]}")
|
||||
handler._send_json({"error": "Failed to save", "status": status}, status)
|
||||
except Exception as e:
|
||||
print(f"[Karakeep] Error: {e}")
|
||||
handler._send_json({"error": str(e)}, 500)
|
||||
|
||||
|
||||
def handle_karakeep_delete(handler, body):
|
||||
"""Delete a bookmark from Karakeep."""
|
||||
if not KARAKEEP_API_KEY:
|
||||
handler._send_json({"error": "Karakeep not configured"}, 502)
|
||||
return
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": "Invalid JSON"}, 400)
|
||||
return
|
||||
bookmark_id = data.get("id", "")
|
||||
if not bookmark_id:
|
||||
handler._send_json({"error": "Missing id"}, 400)
|
||||
return
|
||||
headers = {
|
||||
"Authorization": f"Bearer {KARAKEEP_API_KEY}",
|
||||
}
|
||||
try:
|
||||
status, _, resp = proxy_request(
|
||||
f"{KARAKEEP_URL}/api/v1/bookmarks/{bookmark_id}", "DELETE",
|
||||
headers, timeout=10
|
||||
)
|
||||
if status in (200, 204):
|
||||
handler._send_json({"ok": True})
|
||||
else:
|
||||
handler._send_json({"error": "Delete failed", "status": status}, status)
|
||||
except Exception as e:
|
||||
print(f"[Karakeep] Delete error: {e}")
|
||||
handler._send_json({"error": str(e)}, 500)
|
||||
202
gateway/integrations/kindle.py
Normal file
202
gateway/integrations/kindle.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
Platform Gateway — Kindle integration (send books via SMTP2GO).
|
||||
"""
|
||||
|
||||
import json
|
||||
import base64
|
||||
import urllib.request
|
||||
from pathlib import Path
|
||||
|
||||
from config import (
|
||||
BOOKLORE_URL, BOOKLORE_BOOKS_DIR, BOOKDROP_DIR,
|
||||
SMTP2GO_API_KEY, SMTP2GO_FROM_EMAIL, SMTP2GO_FROM_NAME,
|
||||
KINDLE_EMAIL_1, KINDLE_EMAIL_2,
|
||||
)
|
||||
from proxy import proxy_request
|
||||
from integrations.booklore import booklore_auth
|
||||
|
||||
|
||||
def _find_book_file(book_id: str) -> tuple:
|
||||
"""Find the actual ebook file for a Booklore book ID.
|
||||
Returns (file_path, book_metadata) or (None, None)."""
|
||||
token = booklore_auth()
|
||||
if not token:
|
||||
return None, None
|
||||
|
||||
# Get book metadata from Booklore
|
||||
s, _, r = proxy_request(
|
||||
f"{BOOKLORE_URL}/api/v1/books/{book_id}", "GET",
|
||||
{"Authorization": f"Bearer {token}"}, timeout=10
|
||||
)
|
||||
if s != 200:
|
||||
return None, None
|
||||
|
||||
book = json.loads(r)
|
||||
meta = book.get("metadata", {})
|
||||
title = meta.get("title", "")
|
||||
|
||||
if not title or not BOOKLORE_BOOKS_DIR.exists():
|
||||
return None, meta
|
||||
|
||||
# Search for the file in the library directory
|
||||
title_lower = title.lower()
|
||||
title_words = set(title_lower.split()[:4]) # First 4 words for matching
|
||||
|
||||
best_match = None
|
||||
best_score = 0
|
||||
|
||||
for ext in ["epub", "pdf", "mobi", "azw3"]:
|
||||
for filepath in BOOKLORE_BOOKS_DIR.rglob(f"*.{ext}"):
|
||||
fname = filepath.stem.lower()
|
||||
# Check if title words appear in filename
|
||||
matches = sum(1 for w in title_words if w in fname)
|
||||
score = matches / len(title_words) if title_words else 0
|
||||
# Prefer epub > pdf > mobi > azw3
|
||||
ext_bonus = {"epub": 0.1, "pdf": 0.05, "mobi": 0.03, "azw3": 0.02}.get(ext, 0)
|
||||
score += ext_bonus
|
||||
if score > best_score:
|
||||
best_score = score
|
||||
best_match = filepath
|
||||
|
||||
if best_match and best_score >= 0.5:
|
||||
return best_match, meta
|
||||
return None, meta
|
||||
|
||||
|
||||
def handle_send_to_kindle(handler, book_id: str, body: bytes):
|
||||
"""Send a book file to a Kindle email via SMTP2GO API."""
|
||||
if not SMTP2GO_API_KEY or not SMTP2GO_FROM_EMAIL:
|
||||
handler._send_json({"error": "Email not configured"}, 502)
|
||||
return
|
||||
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": "Invalid JSON"}, 400)
|
||||
return
|
||||
|
||||
target = data.get("target", "1")
|
||||
kindle_email = KINDLE_EMAIL_1 if target == "1" else KINDLE_EMAIL_2
|
||||
if not kindle_email:
|
||||
handler._send_json({"error": f"Kindle target {target} not configured"}, 400)
|
||||
return
|
||||
|
||||
# Find the book file
|
||||
file_path, meta = _find_book_file(book_id)
|
||||
if not file_path or not file_path.exists():
|
||||
handler._send_json({"error": "Book file not found on disk"}, 404)
|
||||
return
|
||||
|
||||
title = meta.get("title", "Book") if meta else "Book"
|
||||
author = ", ".join(meta.get("authors", [])) if meta else ""
|
||||
|
||||
# Read file and encode as base64
|
||||
file_data = file_path.read_bytes()
|
||||
file_b64 = base64.b64encode(file_data).decode("ascii")
|
||||
filename = file_path.name
|
||||
|
||||
# Determine MIME type
|
||||
ext = file_path.suffix.lower()
|
||||
mime_map = {".epub": "application/epub+zip", ".pdf": "application/pdf", ".mobi": "application/x-mobipocket-ebook", ".azw3": "application/x-mobi8-ebook"}
|
||||
mime_type = mime_map.get(ext, "application/octet-stream")
|
||||
|
||||
# Send via SMTP2GO API
|
||||
email_payload = {
|
||||
"api_key": SMTP2GO_API_KEY,
|
||||
"sender": f"{SMTP2GO_FROM_NAME} <{SMTP2GO_FROM_EMAIL}>",
|
||||
"to": [kindle_email],
|
||||
"subject": f"{title}" + (f" - {author}" if author else ""),
|
||||
"text_body": f"Sent from Platform: {title}" + (f" by {author}" if author else ""),
|
||||
"attachments": [{
|
||||
"filename": filename,
|
||||
"fileblob": file_b64,
|
||||
"mimetype": mime_type,
|
||||
}]
|
||||
}
|
||||
|
||||
try:
|
||||
req_body = json.dumps(email_payload).encode("utf-8")
|
||||
req = urllib.request.Request(
|
||||
"https://api.smtp2go.com/v3/email/send",
|
||||
data=req_body,
|
||||
headers={"Content-Type": "application/json"},
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
result = json.loads(resp.read())
|
||||
|
||||
if result.get("data", {}).get("succeeded", 0) > 0:
|
||||
handler._send_json({
|
||||
"success": True,
|
||||
"title": title,
|
||||
"sentTo": kindle_email,
|
||||
"format": ext.lstrip(".").upper(),
|
||||
"size": len(file_data),
|
||||
})
|
||||
else:
|
||||
handler._send_json({"error": "Email send failed", "detail": result}, 500)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": f"SMTP2GO error: {str(e)}"}, 500)
|
||||
|
||||
|
||||
def handle_send_file_to_kindle(handler, body: bytes):
|
||||
"""Send a downloaded file to Kindle by filename from bookdrop directory."""
|
||||
if not SMTP2GO_API_KEY or not SMTP2GO_FROM_EMAIL:
|
||||
handler._send_json({"error": "Email not configured"}, 502)
|
||||
return
|
||||
try:
|
||||
data = json.loads(body)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": "Invalid JSON"}, 400)
|
||||
return
|
||||
|
||||
filename = data.get("filename", "")
|
||||
target = data.get("target", "1")
|
||||
title = data.get("title", filename)
|
||||
|
||||
kindle_email = KINDLE_EMAIL_1 if target == "1" else KINDLE_EMAIL_2
|
||||
if not kindle_email:
|
||||
handler._send_json({"error": f"Kindle target {target} not configured"}, 400)
|
||||
return
|
||||
|
||||
# Find file in bookdrop or booklore-books
|
||||
file_path = None
|
||||
for search_dir in [BOOKDROP_DIR, BOOKLORE_BOOKS_DIR]:
|
||||
if not search_dir.exists():
|
||||
continue
|
||||
for fp in search_dir.rglob("*"):
|
||||
if fp.is_file() and fp.name == filename:
|
||||
file_path = fp
|
||||
break
|
||||
if file_path:
|
||||
break
|
||||
|
||||
if not file_path or not file_path.exists():
|
||||
handler._send_json({"error": f"File not found: {filename}"}, 404)
|
||||
return
|
||||
|
||||
file_data = file_path.read_bytes()
|
||||
file_b64 = base64.b64encode(file_data).decode("ascii")
|
||||
|
||||
ext = file_path.suffix.lower()
|
||||
mime_map = {".epub": "application/epub+zip", ".pdf": "application/pdf", ".mobi": "application/x-mobipocket-ebook", ".azw3": "application/x-mobi8-ebook"}
|
||||
mime_type = mime_map.get(ext, "application/octet-stream")
|
||||
|
||||
email_payload = {
|
||||
"api_key": SMTP2GO_API_KEY,
|
||||
"sender": f"{SMTP2GO_FROM_NAME} <{SMTP2GO_FROM_EMAIL}>",
|
||||
"to": [kindle_email],
|
||||
"subject": title,
|
||||
"text_body": f"Sent from Platform: {title}",
|
||||
"attachments": [{"filename": filename, "fileblob": file_b64, "mimetype": mime_type}]
|
||||
}
|
||||
try:
|
||||
req_body = json.dumps(email_payload).encode("utf-8")
|
||||
req = urllib.request.Request("https://api.smtp2go.com/v3/email/send", data=req_body, headers={"Content-Type": "application/json"})
|
||||
with urllib.request.urlopen(req, timeout=30) as resp:
|
||||
result = json.loads(resp.read())
|
||||
if result.get("data", {}).get("succeeded", 0) > 0:
|
||||
handler._send_json({"success": True, "title": title, "sentTo": kindle_email, "format": ext.lstrip(".").upper()})
|
||||
else:
|
||||
handler._send_json({"error": "Email send failed", "detail": result}, 500)
|
||||
except Exception as e:
|
||||
handler._send_json({"error": f"SMTP2GO error: {str(e)}"}, 500)
|
||||
48
gateway/integrations/qbittorrent.py
Normal file
48
gateway/integrations/qbittorrent.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""
|
||||
Platform Gateway — qBittorrent integration (download status).
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
|
||||
|
||||
def handle_downloads_status(handler):
|
||||
"""Get active downloads from qBittorrent."""
|
||||
qbt_host = os.environ.get("QBITTORRENT_HOST", "192.168.1.42")
|
||||
qbt_port = os.environ.get("QBITTORRENT_PORT", "8080")
|
||||
qbt_user = os.environ.get("QBITTORRENT_USERNAME", "admin")
|
||||
qbt_pass = os.environ.get("QBITTORRENT_PASSWORD", "")
|
||||
base = f"http://{qbt_host}:{qbt_port}"
|
||||
try:
|
||||
# Login
|
||||
login_data = urllib.parse.urlencode({"username": qbt_user, "password": qbt_pass}).encode()
|
||||
req = urllib.request.Request(f"{base}/api/v2/auth/login", data=login_data)
|
||||
with urllib.request.urlopen(req, timeout=5) as resp:
|
||||
cookie = resp.headers.get("Set-Cookie", "").split(";")[0]
|
||||
|
||||
# Get torrents
|
||||
req2 = urllib.request.Request(f"{base}/api/v2/torrents/info?filter=all&sort=added_on&reverse=true&limit=20",
|
||||
headers={"Cookie": cookie})
|
||||
with urllib.request.urlopen(req2, timeout=5) as resp2:
|
||||
torrents_raw = json.loads(resp2.read())
|
||||
|
||||
torrents = []
|
||||
for t in torrents_raw:
|
||||
torrents.append({
|
||||
"hash": t["hash"],
|
||||
"name": t["name"],
|
||||
"progress": round(t["progress"] * 100, 1),
|
||||
"state": t["state"],
|
||||
"size": t["total_size"],
|
||||
"downloaded": t["downloaded"],
|
||||
"dlSpeed": t["dlspeed"],
|
||||
"upSpeed": t["upspeed"],
|
||||
"eta": t.get("eta", 0),
|
||||
"addedOn": t.get("added_on", 0),
|
||||
"category": t.get("category", ""),
|
||||
})
|
||||
handler._send_json({"torrents": torrents, "total": len(torrents)})
|
||||
except Exception as e:
|
||||
handler._send_json({"torrents": [], "total": 0, "error": str(e)})
|
||||
Reference in New Issue
Block a user