#!/usr/bin/env python3
"""
Trips - Self-hosted trip planner with SQLite backend
"""
import os
import json
import sqlite3
import uuid
import hashlib
import secrets
import shutil
import urllib.request
import urllib.parse
import urllib.error
import ssl
import base64
import io
from http.server import HTTPServer, BaseHTTPRequestHandler
from http.cookies import SimpleCookie
from datetime import datetime, date, timedelta
from pathlib import Path
import mimetypes
import re
import html
import threading
import time
# PDF support (optional)
try:
import PyPDF2
PDF_SUPPORT = True
except ImportError:
PDF_SUPPORT = False
# Configuration
PORT = int(os.environ.get("PORT", 8086))
DATA_DIR = Path(os.environ.get("DATA_DIR", "/app/data"))
DB_PATH = DATA_DIR / "trips.db"
IMAGES_DIR = DATA_DIR / "images"
USERNAME = os.environ.get("USERNAME", "admin")
PASSWORD = os.environ.get("PASSWORD", "admin")
GOOGLE_API_KEY = os.environ.get("GOOGLE_API_KEY", "")
GOOGLE_CX = os.environ.get("GOOGLE_CX", "")
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "")
OPENAI_MODEL = os.environ.get("OPENAI_MODEL", "gpt-5.2")
GEMINI_API_KEY = os.environ.get("GEMINI_API_KEY", "")
EMAIL_API_KEY = os.environ.get("EMAIL_API_KEY", "") # API key for email worker
TRIPS_API_KEY = os.environ.get("TRIPS_API_KEY", "") # Bearer token for service-to-service API access
TELEGRAM_BOT_TOKEN = os.environ.get("TELEGRAM_BOT_TOKEN", "")
TELEGRAM_CHAT_ID = os.environ.get("TELEGRAM_CHAT_ID", "")
GOOGLE_CLIENT_ID = os.environ.get("GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("GOOGLE_CLIENT_SECRET", "")
IMMICH_URL = os.environ.get("IMMICH_URL", "")
IMMICH_API_KEY = os.environ.get("IMMICH_API_KEY", "")
# OIDC Configuration (Pocket ID)
OIDC_ISSUER = os.environ.get("OIDC_ISSUER", "") # e.g., https://pocket.quadjourney.com
OIDC_CLIENT_ID = os.environ.get("OIDC_CLIENT_ID", "")
OIDC_CLIENT_SECRET = os.environ.get("OIDC_CLIENT_SECRET", "")
OIDC_REDIRECT_URI = os.environ.get("OIDC_REDIRECT_URI", "") # e.g., https://trips.quadjourney.com/auth/callback
# Ensure directories exist
DATA_DIR.mkdir(parents=True, exist_ok=True)
IMAGES_DIR.mkdir(parents=True, exist_ok=True)
DOCS_DIR = DATA_DIR / "documents"
DOCS_DIR.mkdir(parents=True, exist_ok=True)
# Session storage (now uses database for persistence)
def get_db():
"""Get database connection."""
conn = sqlite3.connect(str(DB_PATH))
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA foreign_keys = ON")
return conn
def init_db():
"""Initialize database schema."""
conn = get_db()
cursor = conn.cursor()
# Trips table
cursor.execute('''
CREATE TABLE IF NOT EXISTS trips (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
description TEXT,
start_date TEXT,
end_date TEXT,
image_path TEXT,
share_token TEXT UNIQUE,
ai_suggestions TEXT,
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
''')
# Add ai_suggestions column if it doesn't exist (migration)
try:
cursor.execute("ALTER TABLE trips ADD COLUMN ai_suggestions TEXT")
conn.commit()
except:
pass # Column already exists
# Add ai_suggestions_openai column for OpenAI suggestions (migration)
try:
cursor.execute("ALTER TABLE trips ADD COLUMN ai_suggestions_openai TEXT")
conn.commit()
except:
pass # Column already exists
# Add immich_album_id column for slideshow (migration)
try:
cursor.execute("ALTER TABLE trips ADD COLUMN immich_album_id TEXT")
conn.commit()
except:
pass # Column already exists
# Transportations table
cursor.execute('''
CREATE TABLE IF NOT EXISTS transportations (
id TEXT PRIMARY KEY,
trip_id TEXT NOT NULL,
name TEXT,
type TEXT DEFAULT 'plane',
flight_number TEXT,
from_location TEXT,
to_location TEXT,
date TEXT,
end_date TEXT,
timezone TEXT,
description TEXT,
link TEXT,
cost_points REAL DEFAULT 0,
cost_cash REAL DEFAULT 0,
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (trip_id) REFERENCES trips(id) ON DELETE CASCADE
)
''')
# Lodging table
cursor.execute('''
CREATE TABLE IF NOT EXISTS lodging (
id TEXT PRIMARY KEY,
trip_id TEXT NOT NULL,
name TEXT,
type TEXT DEFAULT 'hotel',
location TEXT,
check_in TEXT,
check_out TEXT,
timezone TEXT,
reservation_number TEXT,
description TEXT,
link TEXT,
cost_points REAL DEFAULT 0,
cost_cash REAL DEFAULT 0,
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (trip_id) REFERENCES trips(id) ON DELETE CASCADE
)
''')
# Notes table
cursor.execute('''
CREATE TABLE IF NOT EXISTS notes (
id TEXT PRIMARY KEY,
trip_id TEXT NOT NULL,
name TEXT,
content TEXT,
date TEXT,
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (trip_id) REFERENCES trips(id) ON DELETE CASCADE
)
''')
# Locations table
cursor.execute('''
CREATE TABLE IF NOT EXISTS locations (
id TEXT PRIMARY KEY,
trip_id TEXT NOT NULL,
name TEXT,
description TEXT,
latitude REAL,
longitude REAL,
category TEXT,
visit_date TEXT,
start_time TEXT,
end_time TEXT,
link TEXT,
cost_points REAL DEFAULT 0,
cost_cash REAL DEFAULT 0,
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (trip_id) REFERENCES trips(id) ON DELETE CASCADE
)
''')
# Add columns if they don't exist (migration for existing databases)
for table in ['locations', 'transportations', 'lodging']:
for col, col_type in [('start_time', 'TEXT'), ('end_time', 'TEXT'), ('link', 'TEXT'),
('cost_points', 'REAL DEFAULT 0'), ('cost_cash', 'REAL DEFAULT 0')]:
try:
cursor.execute(f"ALTER TABLE {table} ADD COLUMN {col} {col_type}")
except:
pass
# Add share_password to trips table
try:
cursor.execute("ALTER TABLE trips ADD COLUMN share_password TEXT")
except:
pass
# Add hike-specific fields to locations table
for col, col_type in [('hike_distance', 'TEXT'), ('hike_difficulty', 'TEXT'), ('hike_time', 'TEXT')]:
try:
cursor.execute(f"ALTER TABLE locations ADD COLUMN {col} {col_type}")
except:
pass
# Add lat/lng to lodging table for map display
for col, col_type in [('latitude', 'REAL'), ('longitude', 'REAL')]:
try:
cursor.execute(f"ALTER TABLE lodging ADD COLUMN {col} {col_type}")
except:
pass
# Add lat/lng for transportation from/to locations
for col, col_type in [('from_lat', 'REAL'), ('from_lng', 'REAL'), ('to_lat', 'REAL'), ('to_lng', 'REAL')]:
try:
cursor.execute(f"ALTER TABLE transportations ADD COLUMN {col} {col_type}")
except:
pass
# Add place_id and address to locations table for Google Places integration
for col, col_type in [('place_id', 'TEXT'), ('address', 'TEXT')]:
try:
cursor.execute(f"ALTER TABLE locations ADD COLUMN {col} {col_type}")
except:
pass
# Add place_id to lodging table for Google Places integration
try:
cursor.execute("ALTER TABLE lodging ADD COLUMN place_id TEXT")
except:
pass
# Add place_id for transportation from/to locations
for col in ['from_place_id', 'to_place_id']:
try:
cursor.execute(f"ALTER TABLE transportations ADD COLUMN {col} TEXT")
except:
pass
# Images table
cursor.execute('''
CREATE TABLE IF NOT EXISTS images (
id TEXT PRIMARY KEY,
entity_type TEXT NOT NULL,
entity_id TEXT NOT NULL,
file_path TEXT NOT NULL,
is_primary INTEGER DEFAULT 0,
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
''')
# Documents table (for confirmation docs, PDFs, etc.)
cursor.execute('''
CREATE TABLE IF NOT EXISTS documents (
id TEXT PRIMARY KEY,
entity_type TEXT NOT NULL,
entity_id TEXT NOT NULL,
file_path TEXT NOT NULL,
file_name TEXT NOT NULL,
mime_type TEXT,
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
''')
# Sessions table (for persistent login sessions)
cursor.execute('''
CREATE TABLE IF NOT EXISTS sessions (
token TEXT PRIMARY KEY,
username TEXT NOT NULL,
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
''')
# Pending imports table (for email-parsed bookings awaiting review)
cursor.execute('''
CREATE TABLE IF NOT EXISTS pending_imports (
id TEXT PRIMARY KEY,
entry_type TEXT NOT NULL,
parsed_data TEXT NOT NULL,
source TEXT DEFAULT 'email',
email_subject TEXT,
email_from TEXT,
status TEXT DEFAULT 'pending',
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
''')
# Quick adds table (for quick capture entries)
cursor.execute('''
CREATE TABLE IF NOT EXISTS quick_adds (
id TEXT PRIMARY KEY,
trip_id TEXT NOT NULL,
name TEXT NOT NULL,
category TEXT NOT NULL,
place_id TEXT,
address TEXT,
latitude REAL,
longitude REAL,
photo_path TEXT,
note TEXT,
captured_at TEXT NOT NULL,
status TEXT DEFAULT 'pending',
attached_to_id TEXT,
attached_to_type TEXT,
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (trip_id) REFERENCES trips(id)
)
''')
conn.commit()
conn.close()
# ==================== OpenAI Integration ====================
PARSE_SYSTEM_PROMPT = """You are a travel trip planner assistant. Parse user input about ANY travel-related item and extract structured data.
The user may type naturally like "hiking at Garden of the Gods on Monday" or paste formal booking confirmations. Handle both.
Return ONLY valid JSON with this structure:
{
"type": "flight" | "car" | "train" | "hotel" | "location" | "note",
"confidence": 0.0-1.0,
"data": {
// For flights/trains/cars (type = "flight", "car", or "train"):
"name": "Description",
"flight_number": "AA1234", // or empty for cars/trains
"from_location": "Dallas, TX (DFW)",
"to_location": "New York, NY (LGA)",
"date": "2025-01-15T08:50:00",
"end_date": "2025-01-15T14:30:00",
"timezone": "America/Chicago",
"type": "plane", // or "car" or "train"
"description": ""
// For hotels/lodging (type = "hotel"):
"name": "Specific property name like 'Alila Jabal Akhdar'",
"location": "Address or city",
"check_in": "2025-01-15T15:00:00",
"check_out": "2025-01-18T11:00:00",
"timezone": "America/New_York",
"reservation_number": "ABC123",
"type": "hotel",
"description": ""
// For locations/activities/attractions/restaurants (type = "location"):
"name": "Place or activity name",
"category": "attraction" | "restaurant" | "cafe" | "bar" | "hike" | "shopping" | "beach",
"visit_date": "2025-01-15",
"start_time": "2025-01-15T10:00:00",
"end_time": "2025-01-15T12:00:00",
"address": "",
"description": "Any details about the activity"
// For notes (type = "note") - ONLY use this for actual notes/reminders, NOT activities:
"name": "Note title",
"content": "Note content",
"date": "2025-01-15"
}
}
Guidelines:
- IMPORTANT: Activities like hiking, dining, sightseeing, visiting attractions = type "location", NOT "note"
- "note" is ONLY for actual notes, reminders, or text that doesn't describe an activity/place
- For flights, infer timezone from airport codes
- For hotels, check-in defaults to 3PM, check-out 11AM
- For hotels, use the SPECIFIC property name, not the brand
- For activities without a specific time, estimate a reasonable time
- Car rentals and rides = type "car"
- Parse dates in any format. "Monday" = next Monday relative to trip dates
- Extract as much information as possible"""
def call_openai(messages, max_completion_tokens=2000):
"""Call OpenAI API."""
if not OPENAI_API_KEY:
return {"error": "OpenAI API key not configured"}
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
data = json.dumps({
"model": OPENAI_MODEL,
"messages": messages,
"max_completion_tokens": max_completion_tokens,
"temperature": 0.1
}).encode("utf-8")
req = urllib.request.Request(
"https://api.openai.com/v1/chat/completions",
data=data,
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}"
},
method="POST"
)
try:
with urllib.request.urlopen(req, context=ssl_context, timeout=120) as response:
result = json.loads(response.read().decode())
return result["choices"][0]["message"]["content"]
except Exception as e:
return {"error": f"OpenAI API error: {str(e)}"}
def call_gemini(prompt, use_search_grounding=True):
"""Call Gemini 3 Pro API with optional Google Search grounding."""
if not GEMINI_API_KEY:
return {"error": "Gemini API key not configured"}
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
# Build request body
request_body = {
"contents": [{
"parts": [{"text": prompt}]
}],
"generationConfig": {
"temperature": 0.7,
"maxOutputTokens": 4096
}
}
# Add Google Search grounding for real-time research
if use_search_grounding:
request_body["tools"] = [{
"google_search": {}
}]
data = json.dumps(request_body).encode("utf-8")
# Use Gemini 3 Pro (latest Pro model)
url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-3-pro-preview:generateContent?key={GEMINI_API_KEY}"
req = urllib.request.Request(
url,
data=data,
headers={"Content-Type": "application/json"},
method="POST"
)
try:
with urllib.request.urlopen(req, context=ssl_context, timeout=120) as response:
result = json.loads(response.read().decode())
# Extract text from response
if "candidates" in result and result["candidates"]:
candidate = result["candidates"][0]
if "content" in candidate and "parts" in candidate["content"]:
text_parts = [p.get("text", "") for p in candidate["content"]["parts"] if "text" in p]
return "".join(text_parts)
return {"error": "No response from Gemini"}
except urllib.error.HTTPError as e:
error_body = e.read().decode() if e.fp else ""
return {"error": f"Gemini API error: {e.code} - {error_body[:500]}"}
except Exception as e:
return {"error": f"Gemini API error: {str(e)}"}
def is_airport_code(text):
"""Check if text looks like an airport code (3 uppercase letters)."""
if not text:
return False
text = text.strip()
return len(text) == 3 and text.isalpha() and text.isupper()
def format_location_for_geocoding(location, is_plane=False):
"""Format a location for geocoding, adding 'international airport' for airport codes."""
if not location:
return location
location = location.strip()
# If it's a 3-letter airport code, add "international airport"
if is_airport_code(location) or is_plane:
if is_airport_code(location):
return f"{location} international airport"
# For planes, check if the location contains an airport code in parentheses like "Muscat (MCT)"
elif is_plane and not "airport" in location.lower():
return f"{location} international airport"
return location
def get_place_details(place_id):
"""Get place details (lat, lng, address, name) from Google Places API (New).
Returns dict with latitude, longitude, address, name, types or empty dict on failure."""
if not GOOGLE_API_KEY or not place_id:
return {}
try:
url = f"https://places.googleapis.com/v1/places/{place_id}"
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
req = urllib.request.Request(url)
req.add_header('X-Goog-Api-Key', GOOGLE_API_KEY)
req.add_header('X-Goog-FieldMask', 'displayName,formattedAddress,location,types,primaryType')
with urllib.request.urlopen(req, context=ssl_context, timeout=10) as response:
place = json.loads(response.read().decode())
location = place.get("location", {})
return {
"latitude": location.get("latitude"),
"longitude": location.get("longitude"),
"address": place.get("formattedAddress", ""),
"name": place.get("displayName", {}).get("text", ""),
"types": place.get("types", []),
"primary_type": place.get("primaryType", ""),
}
except Exception as e:
print(f"Place details error for '{place_id}': {e}")
return {}
def auto_resolve_place(name, address=""):
"""Auto-lookup place_id from name using Places Autocomplete, then get details.
Returns (place_id, lat, lng, address) or (None, None, None, None)."""
if not GOOGLE_API_KEY or not name:
return None, None, None, None
try:
query = f"{name} {address}".strip() if address else name
url = "https://places.googleapis.com/v1/places:autocomplete"
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
payload = json.dumps({"input": query}).encode()
req = urllib.request.Request(url, data=payload, method="POST")
req.add_header("Content-Type", "application/json")
req.add_header("X-Goog-Api-Key", GOOGLE_API_KEY)
with urllib.request.urlopen(req, context=ssl_context, timeout=10) as response:
data = json.loads(response.read().decode())
if data.get("suggestions"):
first = data["suggestions"][0].get("placePrediction", {})
pid = first.get("place", "").replace("places/", "")
if not pid:
pid = first.get("placeId", "")
if pid:
details = get_place_details(pid)
return pid, details.get("latitude"), details.get("longitude"), details.get("address", "")
except Exception as e:
print(f"Auto-resolve place error for '{name}': {e}")
return None, None, None, None
def geocode_address(address):
"""Geocode an address using Google Maps API. Returns (lat, lng) or (None, None).
Fallback for entries without a place_id - prefer get_place_details() when possible."""
if not GOOGLE_API_KEY or not address:
return None, None
try:
encoded_address = urllib.parse.quote(address)
url = f"https://maps.googleapis.com/maps/api/geocode/json?address={encoded_address}&key={GOOGLE_API_KEY}"
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
req = urllib.request.Request(url)
with urllib.request.urlopen(req, context=ssl_context, timeout=10) as response:
data = json.loads(response.read().decode())
if data.get("status") == "OK" and data.get("results"):
location = data["results"][0]["geometry"]["location"]
return location["lat"], location["lng"]
except Exception as e:
print(f"Geocoding error for '{address}': {e}")
return None, None
def extract_pdf_text(pdf_data):
"""Extract text from PDF binary data (legacy fallback)."""
if not PDF_SUPPORT:
return None
try:
pdf_file = io.BytesIO(pdf_data)
pdf_reader = PyPDF2.PdfReader(pdf_file)
text_parts = []
for page in pdf_reader.pages:
text = page.extract_text()
if text:
text_parts.append(text)
return "\n".join(text_parts)
except Exception as e:
return None
def parse_pdf_input(pdf_base64, filename="document.pdf", trip_start_date=None, trip_end_date=None):
"""Parse a PDF file using OpenAI Responses API (required for PDF support)."""
if not OPENAI_API_KEY:
return {"error": "OpenAI API key not configured"}
context = ""
if trip_start_date and trip_end_date:
context = f"\nContext: This is for a trip from {trip_start_date} to {trip_end_date}."
# Build the prompt with system instructions included
prompt_text = f"""{PARSE_SYSTEM_PROMPT}{context}
Extract booking information from this PDF. Return structured JSON only, no markdown formatting."""
# Use Responses API format for PDF files
data = json.dumps({
"model": OPENAI_MODEL,
"input": [
{
"role": "user",
"content": [
{
"type": "input_file",
"filename": filename,
"file_data": f"data:application/pdf;base64,{pdf_base64}"
},
{
"type": "input_text",
"text": prompt_text
}
]
}
]
}).encode("utf-8")
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
req = urllib.request.Request(
"https://api.openai.com/v1/responses",
data=data,
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENAI_API_KEY}"
},
method="POST"
)
try:
with urllib.request.urlopen(req, context=ssl_context, timeout=120) as response:
result_data = json.loads(response.read().decode())
# Responses API returns: output[0].content[0].text
result = result_data["output"][0]["content"][0]["text"]
except urllib.error.HTTPError as e:
error_body = e.read().decode() if e.fp else ""
print(f"OpenAI API HTTP Error: {e.code} - {error_body}", flush=True)
return {"error": f"OpenAI API error: HTTP {e.code} - {error_body}"}
except Exception as e:
return {"error": f"OpenAI API error: {str(e)}"}
try:
result = result.strip()
if result.startswith("```json"):
result = result[7:]
if result.startswith("```"):
result = result[3:]
if result.endswith("```"):
result = result[:-3]
return json.loads(result.strip())
except json.JSONDecodeError as e:
return {"error": f"Failed to parse AI response: {str(e)}", "raw": result}
def parse_text_input(text, trip_start_date=None, trip_end_date=None):
"""Parse natural language text into structured booking data."""
context = ""
if trip_start_date and trip_end_date:
context = f"\nContext: This is for a trip from {trip_start_date} to {trip_end_date}. If the user mentions a day number (e.g., 'day 3'), calculate the actual date."
messages = [
{"role": "system", "content": PARSE_SYSTEM_PROMPT + context},
{"role": "user", "content": text}
]
result = call_openai(messages)
if isinstance(result, dict) and "error" in result:
return result
try:
# Clean up response - sometimes GPT wraps in markdown
result = result.strip()
if result.startswith("```json"):
result = result[7:]
if result.startswith("```"):
result = result[3:]
if result.endswith("```"):
result = result[:-3]
return json.loads(result.strip())
except json.JSONDecodeError as e:
return {"error": f"Failed to parse AI response: {str(e)}", "raw": result}
def parse_image_input(image_base64, mime_type="image/jpeg", trip_start_date=None, trip_end_date=None):
"""Parse an image (screenshot of booking) using OpenAI Vision."""
context = ""
if trip_start_date and trip_end_date:
context = f"\nContext: This is for a trip from {trip_start_date} to {trip_end_date}."
messages = [
{"role": "system", "content": PARSE_SYSTEM_PROMPT + context},
{
"role": "user",
"content": [
{
"type": "text",
"text": "Extract booking information from this image. Return structured JSON."
},
{
"type": "image_url",
"image_url": {
"url": f"data:{mime_type};base64,{image_base64}"
}
}
]
}
]
result = call_openai(messages, max_tokens=3000)
if isinstance(result, dict) and "error" in result:
return result
try:
result = result.strip()
if result.startswith("```json"):
result = result[7:]
if result.startswith("```"):
result = result[3:]
if result.endswith("```"):
result = result[:-3]
return json.loads(result.strip())
except json.JSONDecodeError as e:
return {"error": f"Failed to parse AI response: {str(e)}", "raw": result}
# ==================== Trail Info ====================
def fetch_trail_info(query, hints=""):
"""Fetch trail info using GPT's training data."""
# Extract trail name from URL for better prompting
trail_name = query
if "/trail/" in query:
trail_name = query.split("/trail/")[-1].replace("-", " ").replace("/", " ").strip()
# Build context with hints
hint_context = ""
if hints:
hint_context = f"\n\nAlternative names/hints provided by user: {hints}"
messages = [
{"role": "system", "content": """You are a hiking trail information assistant. Given a trail name, URL, or description, provide accurate trail information based on your training data.
You should recognize trails by:
- AllTrails URLs
- Trail names (exact or partial)
- Alternative names (e.g., "Cave of Hira" = "Jabal Al-Nour")
- Famous landmarks or pilgrimage routes
- Location descriptions
Return ONLY a JSON object with these fields:
{
"name": "Trail Name",
"distance": "X.X mi",
"difficulty": "easy|moderate|hard|expert",
"estimated_time": "X-X hours",
"elevation_gain": "XXX ft (optional)",
"trail_type": "Out & Back|Loop|Point to Point (optional)",
"found": true
}
ONLY return {"found": false} if you truly have NO information about this trail or location.
Important:
- Distance should be the TOTAL distance (round-trip for out-and-back trails)
- Use miles for distance, feet for elevation
- Difficulty: easy (flat, paved), moderate (some elevation), hard (steep/challenging), expert (technical)
- If you know the location but not exact AllTrails data, provide your best estimate based on known information about the hike/climb"""},
{"role": "user", "content": f"Get trail information for: {query}\n\nExtracted name: {trail_name}{hint_context}"}
]
result = call_openai(messages, max_completion_tokens=500)
if isinstance(result, dict) and "error" in result:
return result
try:
result = result.strip()
if result.startswith("```json"):
result = result[7:]
if result.startswith("```"):
result = result[3:]
if result.endswith("```"):
result = result[:-3]
return json.loads(result.strip())
except json.JSONDecodeError as e:
return {"error": f"Failed to parse trail info: {str(e)}", "raw": result}
def generate_attraction_description(name, category="attraction", location=""):
"""Generate a short description for an attraction using GPT."""
if not OPENAI_API_KEY:
return {"error": "OpenAI API key not configured"}
location_context = f" in {location}" if location else ""
messages = [
{"role": "system", "content": """You are a travel guide writer. Generate a concise, engaging description for tourist attractions.
Guidelines:
- Write 2-3 sentences (50-80 words)
- Mention what makes it special or notable
- Include practical info if relevant (best time to visit, what to expect)
- Keep it informative but engaging
- Don't include opening hours or prices (they change)
- Write in present tense"""},
{"role": "user", "content": f"Write a short description for: {name}{location_context} (Category: {category})"}
]
result = call_openai(messages, max_completion_tokens=200)
if isinstance(result, dict) and "error" in result:
return result
return {"description": result.strip()}
# ==================== Telegram Notifications ====================
def send_telegram_notification(message):
"""Send a notification via Telegram."""
if not TELEGRAM_BOT_TOKEN or not TELEGRAM_CHAT_ID:
print("[TELEGRAM] Not configured, skipping notification")
return False
try:
url = f"https://api.telegram.org/bot{TELEGRAM_BOT_TOKEN}/sendMessage"
data = json.dumps({
"chat_id": TELEGRAM_CHAT_ID,
"text": message,
"parse_mode": "HTML"
}).encode("utf-8")
req = urllib.request.Request(
url,
data=data,
headers={"Content-Type": "application/json"},
method="POST"
)
with urllib.request.urlopen(req, timeout=10) as response:
result = json.loads(response.read().decode())
return result.get("ok", False)
except Exception as e:
print(f"[TELEGRAM] Error sending notification: {e}")
return False
# ==================== Flight Status ====================
def get_flight_status(airline_code, flight_number, date_str=None):
"""
Fetch flight status from FlightAware by parsing embedded JSON data.
Args:
airline_code: IATA airline code (e.g., 'SV', 'AA', 'DL')
flight_number: Flight number (e.g., '20', '1234')
date_str: Optional date in YYYY-MM-DD format (not currently used)
Returns:
dict with flight status info or error
"""
try:
# FlightAware uses ICAO-style codes: SVA20 for Saudia 20
# Common IATA to ICAO prefixes - add 'A' for most airlines
flight_ident = f"{airline_code}A{flight_number}" if len(airline_code) == 2 else f"{airline_code}{flight_number}"
# Some airlines use different patterns
iata_to_icao = {
'AA': 'AAL', 'DL': 'DAL', 'UA': 'UAL', 'WN': 'SWA', 'AS': 'ASA',
'B6': 'JBU', 'NK': 'NKS', 'F9': 'FFT', 'SV': 'SVA', 'EK': 'UAE',
'QR': 'QTR', 'BA': 'BAW', 'LH': 'DLH', 'AF': 'AFR', 'KL': 'KLM',
}
if airline_code.upper() in iata_to_icao:
flight_ident = f"{iata_to_icao[airline_code.upper()]}{flight_number}"
url = f"https://www.flightaware.com/live/flight/{flight_ident}"
req = urllib.request.Request(url, headers={
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.9',
})
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
with urllib.request.urlopen(req, context=ssl_context, timeout=15) as response:
html_content = response.read().decode('utf-8')
# Extract trackpollBootstrap JSON
match = re.search(r'var trackpollBootstrap\s*=\s*(\{.*?\});\s*(?:var|)', html_content, re.DOTALL)
if not match:
return {"error": "Could not find flight data in response"}
data = json.loads(match.group(1))
# Get flights from the data
flights = data.get('flights', {})
if not flights:
return {"error": "No flight data found"}
# Get the first (most recent) flight
flight_key = list(flights.keys())[0]
flight_data = flights[flight_key]
# Get activity log with detailed flight info
activity = flight_data.get('activityLog', {}).get('flights', [])
if not activity:
return {"error": "No flight activity found"}
# Find the flight matching the requested date
flight_info = activity[0] # Default to first/most recent
if date_str:
try:
target_date = datetime.strptime(date_str, "%Y-%m-%d").date()
for flight in activity:
gate_dep = flight.get('gateDepartureTimes', {})
scheduled_ts = gate_dep.get('scheduled')
if scheduled_ts:
flight_date = datetime.fromtimestamp(scheduled_ts).date()
if flight_date == target_date:
flight_info = flight
break
except (ValueError, TypeError):
pass # Use default if date parsing fails
origin = flight_info.get('origin', {})
destination = flight_info.get('destination', {})
# Get times
takeoff_times = flight_info.get('takeoffTimes', {})
landing_times = flight_info.get('landingTimes', {})
gate_dep_times = flight_info.get('gateDepartureTimes', {})
gate_arr_times = flight_info.get('gateArrivalTimes', {})
# Calculate delays (check estimated vs scheduled for pre-departure delays)
dep_delay = 0
arr_delay = 0
# Check gate departure delay (estimated vs scheduled)
if gate_dep_times.get('scheduled'):
if gate_dep_times.get('actual'):
dep_delay = (gate_dep_times['actual'] - gate_dep_times['scheduled']) // 60
elif gate_dep_times.get('estimated'):
dep_delay = (gate_dep_times['estimated'] - gate_dep_times['scheduled']) // 60
# Check arrival delay
if gate_arr_times.get('scheduled'):
if gate_arr_times.get('actual'):
arr_delay = (gate_arr_times['actual'] - gate_arr_times['scheduled']) // 60
elif gate_arr_times.get('estimated'):
arr_delay = (gate_arr_times['estimated'] - gate_arr_times['scheduled']) // 60
# Determine status
status = "On Time"
if flight_info.get('cancelled'):
status = "Cancelled"
elif landing_times.get('actual') or gate_arr_times.get('actual'):
status = "Landed"
elif takeoff_times.get('actual'):
status = "En Route"
elif dep_delay >= 15 or arr_delay >= 15:
status = "Delayed"
# Format times (12-hour format in airport local timezone)
def format_time(ts, tz_str=None):
if not ts:
return ''
try:
from zoneinfo import ZoneInfo
dt = datetime.fromtimestamp(ts, tz=ZoneInfo('UTC'))
if tz_str:
# FlightAware uses format like ":America/New_York"
tz_name = tz_str.lstrip(':')
dt = dt.astimezone(ZoneInfo(tz_name))
return dt.strftime('%I:%M %p').lstrip('0')
except:
return ''
# Get timezones for origin/destination
origin_tz = origin.get('TZ', '')
dest_tz = destination.get('TZ', '')
result = {
"airline": airline_code.upper(),
"airline_code": airline_code.upper(),
"flight_number": str(flight_number),
"status": status,
"status_code": status[0],
"delay_departure_minutes": max(0, dep_delay),
"delay_arrival_minutes": max(0, arr_delay),
"departure": {
"airport": origin.get('iata', ''),
"airport_name": origin.get('friendlyName', ''),
"terminal": origin.get('terminal', ''),
"gate": origin.get('gate', ''),
"scheduled": format_time(gate_dep_times.get('scheduled') or takeoff_times.get('scheduled'), origin_tz),
"estimated": format_time(gate_dep_times.get('estimated'), origin_tz),
"actual": format_time(gate_dep_times.get('actual') or takeoff_times.get('actual'), origin_tz),
},
"arrival": {
"airport": destination.get('iata', ''),
"airport_name": destination.get('friendlyName', ''),
"terminal": destination.get('terminal', ''),
"gate": destination.get('gate', ''),
"scheduled": format_time(gate_arr_times.get('scheduled') or landing_times.get('scheduled'), dest_tz),
"estimated": format_time(gate_arr_times.get('estimated'), dest_tz),
"actual": format_time(gate_arr_times.get('actual') or landing_times.get('actual'), dest_tz),
},
"aircraft": flight_info.get('aircraftTypeFriendly', ''),
"duration": '',
"flightaware_url": url,
}
return result
except urllib.error.HTTPError as e:
return {"error": f"HTTP error: {e.code}"}
except urllib.error.URLError as e:
return {"error": f"URL error: {str(e)}"}
except json.JSONDecodeError as e:
return {"error": f"Failed to parse flight data: {str(e)}"}
except Exception as e:
return {"error": f"Failed to fetch flight status: {str(e)}"}
def parse_flight_number(flight_str):
"""
Parse a flight string into airline code and flight number.
Examples: 'SV20', 'SV 20', 'AA1234', 'DL 456'
"""
if not flight_str:
return None, None
# Remove spaces and uppercase
clean = flight_str.strip().upper().replace(' ', '')
# Match pattern: 2-3 letter airline code followed by 1-4 digit flight number
match = re.match(r'^([A-Z]{2,3})(\d{1,4})$', clean)
if match:
return match.group(1), match.group(2)
return None, None
def dict_from_row(row):
"""Convert sqlite3.Row to dict."""
if row is None:
return None
return dict(row)
def generate_id():
"""Generate a UUID."""
return str(uuid.uuid4())
def parse_location_string(loc):
"""Parse a location string to extract geocodable location.
For addresses like "142-30 135th Ave, Jamaica, New York USA, 11436",
returns "New York" as the geocodable city (Jamaica, Queens won't geocode properly).
"""
if not loc:
return None
parts = [p.strip() for p in loc.split(',')]
if len(parts) >= 3:
# Check if first part looks like a street address (has numbers)
if any(c.isdigit() for c in parts[0]):
# For full addresses, use the state/major city for reliable geocoding
# e.g., "142-30 135th Ave, Jamaica, New York USA, 11436" -> "New York"
state_part = parts[2].strip()
# Remove "USA", country names, and zip codes
state_part = ' '.join(word for word in state_part.split()
if not word.isdigit() and word.upper() not in ('USA', 'US', 'UK'))
if state_part:
return state_part
# Fallback to city if state is empty
return parts[1]
return parts[0]
elif len(parts) == 2:
if any(c.isdigit() for c in parts[0]):
return parts[1]
return parts[0]
return parts[0]
def get_locations_for_date(trip, date_str):
"""Get locations for a specific date, including travel days with two cities.
Returns a list of location dicts:
[{"location": "geocodable string", "city": "Display Name", "type": "lodging|departure|arrival"}]
For travel days (flight departing), returns both origin and destination.
"""
try:
target_date = datetime.strptime(date_str, '%Y-%m-%d').date()
except:
loc = get_trip_location(trip)
return [{"location": loc, "city": loc, "type": "fallback"}]
locations = []
found_lodging = None
found_transport = None
# Check for lodging that spans this date
for lodging in trip.get('lodging', []):
check_in = lodging.get('check_in', '')
check_out = lodging.get('check_out', '')
if check_in and check_out:
try:
# Handle datetime with timezone like "2025-12-24T15:00|America/New_York"
check_in_clean = check_in.split('|')[0].replace('Z', '')
check_out_clean = check_out.split('|')[0].replace('Z', '')
check_in_date = datetime.fromisoformat(check_in_clean).date()
check_out_date = datetime.fromisoformat(check_out_clean).date()
if check_in_date <= target_date < check_out_date:
loc = parse_location_string(lodging.get('location', ''))
if loc:
found_lodging = {"location": loc, "city": loc, "type": "lodging"}
break
except:
continue
# Check for transportation on this date (departures and arrivals)
for transport in trip.get('transportations', []):
transport_date = transport.get('date', '')
if transport_date:
try:
t_date_clean = transport_date.split('|')[0].replace('Z', '')
t_date = datetime.fromisoformat(t_date_clean).date()
if t_date == target_date:
from_loc = transport.get('from_location', '').strip()
to_loc = transport.get('to_location', '').strip()
if from_loc and to_loc:
found_transport = {
"from": from_loc.split(',')[0].strip(),
"to": to_loc.split(',')[0].strip()
}
break
except:
continue
# Build result based on what we found
if found_transport:
# Travel day - show both cities
locations.append({
"location": found_transport["from"],
"city": found_transport["from"],
"type": "departure"
})
locations.append({
"location": found_transport["to"],
"city": found_transport["to"],
"type": "arrival"
})
elif found_lodging:
locations.append(found_lodging)
else:
# Fallback to first lodging or transport
for lodging in trip.get('lodging', []):
loc = parse_location_string(lodging.get('location', ''))
if loc:
locations.append({"location": loc, "city": loc, "type": "lodging"})
break
if not locations:
for transport in trip.get('transportations', []):
to_loc = transport.get('to_location', '').strip()
if to_loc:
city = to_loc.split(',')[0].strip()
locations.append({"location": city, "city": city, "type": "fallback"})
break
if not locations:
name = trip.get('name', '').strip()
locations.append({"location": name, "city": name, "type": "fallback"})
return locations
def get_location_for_date(trip, date_str):
"""Get single location for a specific date (legacy wrapper)."""
locations = get_locations_for_date(trip, date_str)
if locations:
return locations[0]["location"]
return get_trip_location(trip)
def get_trip_location(trip):
"""Get primary location for a trip (for weather lookup)."""
# Try lodging locations first
for lodging in trip.get('lodging', []):
loc = parse_location_string(lodging.get('location', ''))
if loc:
return loc
# Try transportation destinations (airport codes work with geocoding API)
for transport in trip.get('transportations', []):
to_loc = transport.get('to_location', '').strip()
if to_loc:
return to_loc.split(',')[0].strip()
# Fall back to trip name
return trip.get('name', '').strip()
def hash_password(password):
"""Hash a password."""
return hashlib.sha256(password.encode()).hexdigest()
def create_session(username):
"""Create a new session in the database."""
token = secrets.token_hex(32)
conn = get_db()
cursor = conn.cursor()
cursor.execute(
"INSERT INTO sessions (token, username) VALUES (?, ?)",
(token, username)
)
conn.commit()
conn.close()
return token
def verify_session(token):
"""Verify a session token exists in the database."""
if not token:
return False
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT token FROM sessions WHERE token = ?", (token,))
result = cursor.fetchone()
conn.close()
return result is not None
def delete_session(token):
"""Delete a session from the database."""
if not token:
return
conn = get_db()
cursor = conn.cursor()
cursor.execute("DELETE FROM sessions WHERE token = ?", (token,))
conn.commit()
conn.close()
# ==================== OIDC Functions ====================
def get_oidc_config():
"""Fetch OIDC discovery document from the issuer."""
if not OIDC_ISSUER:
return None
try:
url = f"{OIDC_ISSUER}/.well-known/openid-configuration"
req = urllib.request.Request(url)
with urllib.request.urlopen(req, timeout=10) as response:
return json.loads(response.read().decode())
except Exception as e:
print(f"Failed to fetch OIDC config: {e}")
return None
def get_oidc_authorization_url(state):
"""Build the OIDC authorization URL."""
config = get_oidc_config()
if not config:
return None
auth_endpoint = config.get("authorization_endpoint")
if not auth_endpoint:
return None
params = {
"client_id": OIDC_CLIENT_ID,
"redirect_uri": OIDC_REDIRECT_URI,
"response_type": "code",
"scope": "openid email profile",
"state": state,
}
return f"{auth_endpoint}?{urllib.parse.urlencode(params)}"
def exchange_oidc_code(code):
"""Exchange authorization code for tokens."""
config = get_oidc_config()
if not config:
return None
token_endpoint = config.get("token_endpoint")
if not token_endpoint:
return None
data = urllib.parse.urlencode({
"grant_type": "authorization_code",
"code": code,
"redirect_uri": OIDC_REDIRECT_URI,
"client_id": OIDC_CLIENT_ID,
"client_secret": OIDC_CLIENT_SECRET,
}).encode()
try:
req = urllib.request.Request(token_endpoint, data=data, method="POST")
req.add_header("Content-Type", "application/x-www-form-urlencoded")
with urllib.request.urlopen(req, timeout=10) as response:
return json.loads(response.read().decode())
except Exception as e:
print(f"Failed to exchange OIDC code: {e}")
return None
def get_oidc_userinfo(access_token):
"""Fetch user info from OIDC provider."""
config = get_oidc_config()
if not config:
return None
userinfo_endpoint = config.get("userinfo_endpoint")
if not userinfo_endpoint:
return None
try:
req = urllib.request.Request(userinfo_endpoint)
req.add_header("Authorization", f"Bearer {access_token}")
with urllib.request.urlopen(req, timeout=10) as response:
return json.loads(response.read().decode())
except Exception as e:
print(f"Failed to fetch OIDC userinfo: {e}")
return None
def get_images_for_entity(entity_type, entity_id):
"""Get all images for an entity."""
conn = get_db()
cursor = conn.cursor()
cursor.execute(
"SELECT * FROM images WHERE entity_type = ? AND entity_id = ? ORDER BY is_primary DESC, created_at",
(entity_type, entity_id)
)
images = [dict_from_row(row) for row in cursor.fetchall()]
conn.close()
return images
def get_documents_for_entity(entity_type, entity_id):
"""Get all documents for an entity."""
conn = get_db()
cursor = conn.cursor()
cursor.execute(
"SELECT * FROM documents WHERE entity_type = ? AND entity_id = ? ORDER BY created_at",
(entity_type, entity_id)
)
docs = [dict_from_row(row) for row in cursor.fetchall()]
conn.close()
return docs
def get_primary_image(entity_type, entity_id):
"""Get primary image for an entity."""
conn = get_db()
cursor = conn.cursor()
cursor.execute(
"SELECT * FROM images WHERE entity_type = ? AND entity_id = ? ORDER BY is_primary DESC, created_at LIMIT 1",
(entity_type, entity_id)
)
row = cursor.fetchone()
conn.close()
return dict_from_row(row) if row else None
def parse_datetime(dt_str):
"""Parse datetime string, return (datetime_obj, display_str)."""
if not dt_str:
return None, ""
try:
# Handle format: 2026-01-20T08:00|Asia/Muscat or just 2026-01-20T08:00
if "|" in dt_str:
dt_part, tz = dt_str.split("|", 1)
else:
dt_part = dt_str
tz = ""
dt = datetime.fromisoformat(dt_part.replace("Z", ""))
display = dt.strftime("%b %d, %Y, %I:%M %p")
if tz:
display += f" ({tz})"
return dt, display
except:
return None, dt_str
def get_date_from_datetime(dt_str):
"""Extract date from datetime string."""
if not dt_str:
return None
try:
if "|" in dt_str:
dt_part = dt_str.split("|")[0]
else:
dt_part = dt_str
return datetime.fromisoformat(dt_part.replace("Z", "")).date()
except:
return None
def render_docs_html(docs):
"""Generate HTML for documents list in card view."""
if not docs:
return ""
doc_items = []
for doc in docs:
ext = doc["file_name"].rsplit(".", 1)[-1].lower() if "." in doc["file_name"] else ""
if ext == "pdf":
icon = "📄"
elif ext in ["doc", "docx"]:
icon = "📃"
elif ext in ["jpg", "jpeg", "png", "gif", "webp"]:
icon = "📷"
else:
icon = "📄"
doc_items.append(f'{icon} {html.escape(doc["file_name"])}')
return f'
Documents: {"".join(doc_items)}
'
def hotel_names_match(name1, name2):
"""Check if two hotel names are similar enough to be the same hotel."""
filler_words = {'by', 'the', 'a', 'an', 'hotel', 'hotels', 'resort', 'resorts',
'suites', 'suite', '&', 'and', '-', 'inn'}
def normalize_words(name):
words = name.lower().split()
return set(w for w in words if w not in filler_words and len(w) > 1)
words1 = normalize_words(name1)
words2 = normalize_words(name2)
if not words1 or not words2:
return name1.lower().strip() == name2.lower().strip()
common = words1 & words2
smaller_set = min(len(words1), len(words2))
match_ratio = len(common) / smaller_set if smaller_set > 0 else 0
return len(common) >= 2 and match_ratio >= 0.6
def find_duplicate_flight(trip_id, flight_number, flight_date):
"""Check if a similar flight already exists in the trip."""
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT * FROM transportations WHERE trip_id = ?", (trip_id,))
transportations = [dict_from_row(row) for row in cursor.fetchall()]
conn.close()
flight_number_normalized = flight_number.replace(" ", "").upper() if flight_number else ""
target_date = None
if flight_date:
try:
if "|" in flight_date:
flight_date = flight_date.split("|")[0]
if "T" in flight_date:
target_date = flight_date.split("T")[0]
else:
target_date = flight_date[:10]
except:
pass
for t in transportations:
existing_flight_num = (t.get("flight_number") or "").replace(" ", "").upper()
existing_date = t.get("date", "")
if existing_date:
if "|" in existing_date:
existing_date = existing_date.split("|")[0]
if "T" in existing_date:
existing_date = existing_date.split("T")[0]
if existing_flight_num and flight_number_normalized:
if existing_flight_num == flight_number_normalized:
if target_date and existing_date and target_date == existing_date:
return t
return None
def find_duplicate_hotel(trip_id, hotel_name, check_in_date, reservation_number=None):
"""Check if a similar hotel already exists in the trip."""
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT * FROM lodging WHERE trip_id = ?", (trip_id,))
lodging = [dict_from_row(row) for row in cursor.fetchall()]
conn.close()
hotel_name_normalized = hotel_name.lower().strip() if hotel_name else ""
target_date = None
if check_in_date:
try:
if "|" in check_in_date:
check_in_date = check_in_date.split("|")[0]
if "T" in check_in_date:
target_date = check_in_date.split("T")[0]
else:
target_date = check_in_date[:10]
except:
pass
for l in lodging:
existing_name = (l.get("name") or "").lower().strip()
existing_date = l.get("check_in", "")
existing_res_num = l.get("reservation_number", "")
if existing_date:
if "|" in existing_date:
existing_date = existing_date.split("|")[0]
if "T" in existing_date:
existing_date = existing_date.split("T")[0]
# Match by reservation number if provided
if reservation_number and existing_res_num:
if reservation_number.strip() == existing_res_num.strip():
return l
# Match by hotel name + check-in date
if existing_name and hotel_name_normalized:
name_match = hotel_names_match(existing_name, hotel_name_normalized)
date_match = (target_date and existing_date and target_date == existing_date)
if name_match and date_match:
return l
return None
# Weather cache (in-memory, clears on restart)
# {location_name: {"coords": (lat, lon, timezone), "expires": timestamp}}
GEOCODE_CACHE = {}
# {(lat, lon): {"data": weather_data, "expires": timestamp}}
WEATHER_CACHE = {}
CACHE_TTL = 3600 # 1 hour
# Weather code to icon/description mapping (WMO codes)
WEATHER_CODES = {
0: ("☀️", "Clear"),
1: ("🌤️", "Mostly Clear"),
2: ("⛅", "Partly Cloudy"),
3: ("☁️", "Cloudy"),
45: ("🌫️", "Foggy"),
48: ("🌫️", "Icy Fog"),
51: ("🌧️", "Light Drizzle"),
53: ("🌧️", "Drizzle"),
55: ("🌧️", "Heavy Drizzle"),
61: ("🌧️", "Light Rain"),
63: ("🌧️", "Rain"),
65: ("🌧️", "Heavy Rain"),
71: ("🌨️", "Light Snow"),
73: ("🌨️", "Snow"),
75: ("❄️", "Heavy Snow"),
77: ("🌨️", "Snow Grains"),
80: ("🌦️", "Light Showers"),
81: ("🌦️", "Showers"),
82: ("⛈️", "Heavy Showers"),
85: ("🌨️", "Snow Showers"),
86: ("🌨️", "Heavy Snow Showers"),
95: ("⛈️", "Thunderstorm"),
96: ("⛈️", "Thunderstorm + Hail"),
99: ("⛈️", "Severe Thunderstorm"),
}
def get_weather_forecast(location_name, dates):
"""
Fetch weather forecast for a location and list of dates using Open-Meteo API.
Uses caching to avoid redundant API calls.
Args:
location_name: City/location name to geocode
dates: List of date strings in YYYY-MM-DD format
Returns:
dict with date -> weather info mapping
"""
import time
now = time.time()
try:
# Step 1: Geocode the location (with cache)
cache_key = location_name.lower().strip()
cached_geo = GEOCODE_CACHE.get(cache_key)
if cached_geo and cached_geo.get("expires", 0) > now:
if cached_geo["coords"] is None:
# Cached failure - skip this location
return {"error": f"Location not found (cached): {location_name}"}
lat, lon, timezone = cached_geo["coords"]
else:
geo_url = f"https://geocoding-api.open-meteo.com/v1/search?name={urllib.parse.quote(location_name)}&count=1"
req = urllib.request.Request(geo_url, headers={
'User-Agent': 'Mozilla/5.0 (compatible; TripPlanner/1.0)'
})
with urllib.request.urlopen(req, timeout=10) as response:
geo_data = json.loads(response.read().decode('utf-8'))
if not geo_data.get('results'):
# Cache the failure to avoid retrying
GEOCODE_CACHE[cache_key] = {"coords": None, "expires": now + 86400}
return {"error": f"Location not found: {location_name}"}
lat = geo_data['results'][0]['latitude']
lon = geo_data['results'][0]['longitude']
timezone = geo_data['results'][0].get('timezone', 'auto')
# Cache for 24 hours (geocode data doesn't change)
GEOCODE_CACHE[cache_key] = {
"coords": (lat, lon, timezone),
"expires": now + 86400
}
# Step 2: Fetch weather forecast (with cache)
weather_cache_key = (round(lat, 2), round(lon, 2)) # Round to reduce cache misses
cached_weather = WEATHER_CACHE.get(weather_cache_key)
if cached_weather and cached_weather.get("expires", 0) > now:
daily = cached_weather["data"]
else:
weather_url = (
f"https://api.open-meteo.com/v1/forecast?"
f"latitude={lat}&longitude={lon}"
f"&daily=weather_code,temperature_2m_max,temperature_2m_min"
f"&temperature_unit=fahrenheit"
f"&timezone={urllib.parse.quote(timezone)}"
f"&forecast_days=16"
)
req = urllib.request.Request(weather_url, headers={
'User-Agent': 'Mozilla/5.0 (compatible; TripPlanner/1.0)'
})
with urllib.request.urlopen(req, timeout=10) as response:
weather_data = json.loads(response.read().decode('utf-8'))
if not weather_data.get('daily'):
return {"error": "No weather data available"}
daily = weather_data['daily']
# Cache weather for 1 hour
WEATHER_CACHE[weather_cache_key] = {
"data": daily,
"expires": now + CACHE_TTL
}
result = {}
for i, date in enumerate(daily.get('time', [])):
if date in dates:
code = daily['weather_code'][i]
icon, desc = WEATHER_CODES.get(code, ("❓", "Unknown"))
result[date] = {
"icon": icon,
"description": desc,
"high": round(daily['temperature_2m_max'][i]),
"low": round(daily['temperature_2m_min'][i]),
"code": code
}
return {"location": location_name, "forecasts": result}
except urllib.error.URLError as e:
return {"error": f"Failed to fetch weather: {str(e)}"}
except Exception as e:
return {"error": f"Weather error: {str(e)}"}
def prefetch_weather_for_trips():
"""Background job to prefetch weather for upcoming trips."""
try:
conn = get_db()
cursor = conn.cursor()
# Get trips with dates in the next 16 days (weather forecast limit)
today = date.today()
future = today + timedelta(days=16)
today_str = today.strftime('%Y-%m-%d')
future_str = future.strftime('%Y-%m-%d')
cursor.execute("""
SELECT * FROM trips
WHERE (start_date <= ? AND end_date >= ?)
OR (start_date >= ? AND start_date <= ?)
ORDER BY start_date
""", (future_str, today_str, today_str, future_str))
trips = [dict_from_row(row) for row in cursor.fetchall()]
for trip in trips:
trip_id = trip['id']
# Load lodging and transportations
cursor.execute("SELECT * FROM lodging WHERE trip_id = ? ORDER BY check_in", (trip_id,))
trip["lodging"] = [dict_from_row(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM transportations WHERE trip_id = ? ORDER BY date", (trip_id,))
trip["transportations"] = [dict_from_row(row) for row in cursor.fetchall()]
# Generate dates for this trip (within forecast window)
start = max(today, datetime.strptime(trip['start_date'], '%Y-%m-%d').date() if trip.get('start_date') else today)
end = min(future, datetime.strptime(trip['end_date'], '%Y-%m-%d').date() if trip.get('end_date') else future)
dates = []
current = start
while current <= end:
dates.append(current.strftime('%Y-%m-%d'))
current += timedelta(days=1)
if not dates:
continue
# Get unique locations for all dates
all_locations = set()
for date_str in dates:
locs = get_locations_for_date(trip, date_str)
for loc_info in locs:
all_locations.add(loc_info["location"])
# Prefetch weather for each location (this populates the cache)
for loc in all_locations:
get_weather_forecast(loc, dates)
time.sleep(0.5) # Be nice to the API
conn.close()
print(f"[Weather] Prefetched weather for {len(trips)} upcoming trips", flush=True)
except Exception as e:
print(f"[Weather] Prefetch error: {e}", flush=True)
def weather_prefetch_loop():
"""Background thread that periodically prefetches weather."""
# Initial delay to let server start
time.sleep(10)
while True:
try:
prefetch_weather_for_trips()
except Exception as e:
print(f"[Weather] Background job error: {e}", flush=True)
# Run every 30 minutes
time.sleep(1800)
def start_weather_prefetch_thread():
"""Start the background weather prefetch thread."""
thread = threading.Thread(target=weather_prefetch_loop, daemon=True)
thread.start()
print("[Weather] Background prefetch thread started", flush=True)
class TripHandler(BaseHTTPRequestHandler):
"""HTTP request handler."""
def log_message(self, format, *args):
"""Log HTTP requests."""
print(f"[{datetime.now().strftime('%Y-%m-%d %H:%M:%S')}] {args[0]}")
def get_session(self):
"""Get session from cookie."""
cookie = SimpleCookie(self.headers.get("Cookie", ""))
if "session" in cookie:
return cookie["session"].value
return None
def parse_cookies(self):
"""Parse all cookies into a dictionary."""
cookie = SimpleCookie(self.headers.get("Cookie", ""))
return {key: morsel.value for key, morsel in cookie.items()}
def is_authenticated(self):
"""Check if request is authenticated via session cookie or Bearer token."""
# Check Bearer token first (service-to-service API access)
auth_header = self.headers.get("Authorization", "")
if auth_header.startswith("Bearer ") and TRIPS_API_KEY:
token = auth_header[7:].strip()
if token and token == TRIPS_API_KEY:
return True
# Fall back to session cookie (web app)
session = self.get_session()
return session and verify_session(session)
def send_html(self, content, status=200):
"""Send HTML response."""
self.send_response(status)
self.send_header("Content-Type", "text/html; charset=utf-8")
self.end_headers()
self.wfile.write(content.encode())
def send_json(self, data, status=200):
"""Send JSON response."""
self.send_response(status)
self.send_header("Content-Type", "application/json")
self.end_headers()
self.wfile.write(json.dumps(data).encode())
def send_redirect(self, location):
"""Send redirect response."""
self.send_response(302)
self.send_header("Location", location)
self.end_headers()
def serve_file(self, file_path):
"""Serve a static file."""
if not file_path.exists():
self.send_error(404)
return
mime_type, _ = mimetypes.guess_type(str(file_path))
if not mime_type:
mime_type = "application/octet-stream"
self.send_response(200)
self.send_header("Content-Type", mime_type)
self.send_header("Content-Length", file_path.stat().st_size)
self.end_headers()
with open(file_path, "rb") as f:
shutil.copyfileobj(f, self.wfile)
def serve_pwa_icon(self, size):
"""Generate a simple PWA icon with airplane emoji."""
# Create a simple SVG icon and convert to PNG-like response
# For simplicity, serve an SVG that browsers will accept
svg = f''''''
self.send_response(200)
self.send_header("Content-Type", "image/svg+xml")
self.end_headers()
self.wfile.write(svg.encode())
def do_GET(self):
"""Handle GET requests."""
path = self.path.split("?")[0]
# Static files (images)
if path.startswith("/images/"):
file_path = IMAGES_DIR / path[8:]
self.serve_file(file_path)
return
# Static files (documents)
if path.startswith("/documents/"):
file_path = DOCS_DIR / path[11:]
self.serve_file(file_path)
return
# Service worker
if path == "/sw.js":
sw_path = Path(__file__).parent / "sw.js"
self.send_response(200)
self.send_header("Content-Type", "application/javascript")
self.send_header("Cache-Control", "no-cache")
self.end_headers()
with open(sw_path, "rb") as f:
self.wfile.write(f.read())
return
# PWA manifest
if path == "/manifest.json":
manifest_path = Path(__file__).parent / "manifest.json"
self.send_response(200)
self.send_header("Content-Type", "application/manifest+json")
self.end_headers()
with open(manifest_path, "rb") as f:
self.wfile.write(f.read())
return
# PWA icons (generate simple colored icons)
if path == "/icon-192.png" or path == "/icon-512.png":
size = 192 if "192" in path else 512
self.serve_pwa_icon(size)
return
# Public share view — redirect to SvelteKit frontend
if path.startswith("/share/"):
share_token = path[7:]
self.send_json({"redirect": f"/view/{share_token}"})
return
# Public share API (returns JSON for SvelteKit frontend)
if path.startswith("/api/share/trip/"):
share_token = path[16:]
self.handle_share_api(share_token)
return
# Login page
if path == "/login":
if self.is_authenticated():
self.send_redirect("/")
elif OIDC_ISSUER and OIDC_CLIENT_ID:
# Redirect to OIDC provider
# Reuse existing state if present to avoid race condition with multiple redirects
cookies = self.parse_cookies()
existing_state = cookies.get("oidc_state")
if existing_state:
state = existing_state
else:
state = secrets.token_hex(16)
auth_url = get_oidc_authorization_url(state)
if auth_url:
# Store state in a cookie for CSRF protection
self.send_response(302)
self.send_header("Location", auth_url)
if not existing_state:
self.send_header("Set-Cookie", f"oidc_state={state}; Path=/; HttpOnly; SameSite=Lax; Secure; Max-Age=600")
self.end_headers()
else:
self.send_json({"error": "OIDC configuration error"}, 500)
else:
self.send_json({"error": "Login via API token or OIDC"}, 200)
return
# Logout
if path == "/logout":
session = self.get_session()
if session:
delete_session(session)
# Get id_token for OIDC logout before clearing cookies
cookies = self.parse_cookies()
id_token = cookies.get("id_token", "")
# Clear local session and id_token cookies
self.send_response(302)
self.send_header("Set-Cookie", "session=; Path=/; Max-Age=0")
self.send_header("Set-Cookie", "id_token=; Path=/; Max-Age=0")
# If OIDC is configured, redirect to IdP logout
if OIDC_ISSUER:
config = get_oidc_config()
if config and config.get("end_session_endpoint"):
# Extract base URL (scheme + host) from redirect URI
parsed = urllib.parse.urlparse(OIDC_REDIRECT_URI)
base_url = f"{parsed.scheme}://{parsed.netloc}"
redirect_uri = urllib.parse.quote(f"{base_url}/login")
logout_url = f"{config['end_session_endpoint']}?client_id={OIDC_CLIENT_ID}&post_logout_redirect_uri={redirect_uri}"
if id_token:
logout_url += f"&id_token_hint={id_token}"
self.send_header("Location", logout_url)
self.end_headers()
return
self.send_header("Location", "/login")
self.end_headers()
return
# OIDC callback
if path.startswith("/auth/callback"):
self.handle_oidc_callback()
return
# Protected routes
if not self.is_authenticated():
# Return JSON 401 for API requests, redirect for browser
if path.startswith("/api/"):
self.send_json({"error": "Unauthorized"}, 401)
else:
self.send_redirect("/login")
return
# Home — API only, frontend is SvelteKit
if path == "/" or path == "":
self.send_json({"status": "Trips API server", "endpoints": "/api/trips, /api/trip/{id}, /api/stats"})
return
# API routes
if path == "/api/trips":
self.handle_get_trips()
return
if path.startswith("/api/trip/"):
trip_id = path[10:]
self.handle_get_trip(trip_id)
return
if path == "/api/pending-imports":
self.handle_get_pending_imports()
return
if path.startswith("/api/quick-adds"):
# Parse query params for trip_id
query_string = self.path.split("?")[1] if "?" in self.path else ""
params = urllib.parse.parse_qs(query_string)
trip_id = params.get("trip_id", [None])[0]
self.handle_get_quick_adds(trip_id)
return
if path.startswith("/api/immich/thumb/"):
asset_id = path.split("/")[-1]
self.handle_immich_thumbnail(asset_id)
return
if path.startswith("/api/places/details"):
query_string = self.path.split("?")[1] if "?" in self.path else ""
params = urllib.parse.parse_qs(query_string)
place_id = params.get("place_id", [None])[0]
self.handle_places_details(place_id)
return
if path == "/api/active-trip":
self.handle_get_active_trip()
return
if path == "/api/stats":
self.handle_get_stats()
return
if path.startswith("/api/search"):
query_string = self.path.split("?")[1] if "?" in self.path else ""
params = urllib.parse.parse_qs(query_string)
q = params.get("q", [""])[0]
self.handle_search(q)
return
self.send_error(404)
def do_POST(self):
"""Handle POST requests."""
path = self.path
content_length = int(self.headers.get("Content-Length", 0))
# Login
if path == "/login":
self.handle_login(content_length)
return
# Email parsing (API key auth)
if path == "/api/parse-email":
body = self.rfile.read(content_length) if content_length > 0 else b""
self.handle_parse_email(body)
return
# Share password verification (public - no auth required)
if path == "/api/share/verify":
body = self.rfile.read(content_length) if content_length > 0 else b""
self.handle_share_verify(body)
return
# Protected routes
if not self.is_authenticated():
self.send_json({"error": "Unauthorized"}, 401)
return
# Read body
body = self.rfile.read(content_length) if content_length > 0 else b""
# API routes
if path == "/api/trip":
self.handle_create_trip(body)
elif path == "/api/trip/update":
self.handle_update_trip(body)
elif path == "/api/trip/delete":
self.handle_delete_trip(body)
elif path == "/api/transportation":
self.handle_create_transportation(body)
elif path == "/api/transportation/update":
self.handle_update_transportation(body)
elif path == "/api/transportation/delete":
self.handle_delete_transportation(body)
elif path == "/api/lodging":
self.handle_create_lodging(body)
elif path == "/api/lodging/update":
self.handle_update_lodging(body)
elif path == "/api/lodging/delete":
self.handle_delete_lodging(body)
elif path == "/api/note":
self.handle_create_note(body)
elif path == "/api/note/update":
self.handle_update_note(body)
elif path == "/api/note/delete":
self.handle_delete_note(body)
elif path == "/api/location":
self.handle_create_location(body)
elif path == "/api/location/update":
self.handle_update_location(body)
elif path == "/api/location/delete":
self.handle_delete_location(body)
elif path == "/api/move-item":
self.handle_move_item(body)
elif path == "/api/image/upload":
self.handle_image_upload(body)
elif path == "/api/image/delete":
self.handle_image_delete(body)
elif path == "/api/image/search":
self.handle_image_search(body)
elif path == "/api/image/upload-from-url":
self.handle_image_upload_from_url(body)
elif path == "/api/share/create":
self.handle_create_share(body)
elif path == "/api/share/delete":
self.handle_delete_share(body)
elif path == "/api/share/verify":
self.handle_share_verify(body)
elif path == "/api/parse":
self.handle_parse(body)
elif path == "/api/document/upload":
self.handle_document_upload(body)
elif path == "/api/document/delete":
self.handle_document_delete(body)
elif path == "/api/check-duplicate":
self.handle_check_duplicate(body)
elif path == "/api/merge-entry":
self.handle_merge_entry(body)
elif path == "/api/flight-status":
self.handle_flight_status(body)
elif path == "/api/weather":
self.handle_weather(body)
elif path == "/api/trail-info":
self.handle_trail_info(body)
elif path == "/api/generate-description":
self.handle_generate_description(body)
elif path == "/api/pending-imports/approve":
self.handle_approve_import(body)
elif path == "/api/pending-imports/delete":
self.handle_delete_import(body)
elif path == "/api/geocode-all":
self.handle_geocode_all()
elif path == "/api/ai-guide":
self.handle_ai_guide(body)
elif path == "/api/quick-add":
self.handle_quick_add(body)
elif path == "/api/quick-add/approve":
self.handle_quick_add_approve(body)
elif path == "/api/quick-add/delete":
self.handle_quick_add_delete(body)
elif path == "/api/quick-add/attach":
self.handle_quick_add_attach(body)
elif path == "/api/places/autocomplete":
self.handle_places_autocomplete(body)
elif path == "/api/immich/photos":
self.handle_immich_photos(body)
elif path == "/api/immich/download":
self.handle_immich_download(body)
elif path.startswith("/api/immich/thumbnail/"):
asset_id = path.split("/")[-1]
self.handle_immich_thumbnail(asset_id)
elif path == "/api/immich/albums":
self.handle_immich_albums()
elif path == "/api/immich/album-photos":
self.handle_immich_album_photos(body)
elif path == "/api/google-photos/picker-config":
self.handle_google_photos_picker_config()
elif path == "/api/google-photos/create-session":
self.handle_google_photos_create_session(body)
elif path == "/api/google-photos/check-session":
self.handle_google_photos_check_session(body)
elif path == "/api/google-photos/get-media-items":
self.handle_google_photos_get_media_items(body)
elif path == "/api/google-photos/download":
self.handle_google_photos_download(body)
else:
self.send_error(404)
# ==================== Authentication ====================
def handle_login(self, content_length):
"""Handle login form submission."""
body = self.rfile.read(content_length).decode()
params = urllib.parse.parse_qs(body)
username = params.get("username", [""])[0]
password = params.get("password", [""])[0]
if username == USERNAME and password == PASSWORD:
token = create_session(username)
self.send_response(302)
self.send_header("Location", "/")
self.send_header("Set-Cookie", f"session={token}; Path=/; HttpOnly; SameSite=Lax; Secure")
self.end_headers()
else:
self.send_json({"error": "Invalid credentials"}, 401)
def handle_oidc_callback(self):
"""Handle OIDC callback with authorization code."""
# Parse query parameters
parsed = urllib.parse.urlparse(self.path)
params = urllib.parse.parse_qs(parsed.query)
code = params.get("code", [None])[0]
state = params.get("state", [None])[0]
error = params.get("error", [None])[0]
if error:
error_desc = params.get("error_description", ["Authentication failed"])[0]
self.send_json({"error": error_desc})
return
if not code:
self.send_json({"error": "No authorization code received"})
return
# Verify state (CSRF protection)
cookies = self.parse_cookies()
stored_state = cookies.get("oidc_state")
if not stored_state or stored_state != state:
self.send_json({"error": "Invalid state parameter"})
return
# Exchange code for tokens
tokens = exchange_oidc_code(code)
if not tokens:
self.send_json({"error": "Failed to exchange authorization code"})
return
access_token = tokens.get("access_token")
if not access_token:
self.send_json({"error": "No access token received"})
return
# Get user info
userinfo = get_oidc_userinfo(access_token)
if not userinfo:
self.send_json({"error": "Failed to get user info"})
return
# Create session with username from OIDC (email or preferred_username)
username = userinfo.get("email") or userinfo.get("preferred_username") or userinfo.get("sub")
session_token = create_session(username)
# Get id_token for logout
id_token = tokens.get("id_token", "")
# Clear state cookie and set session cookie
self.send_response(302)
self.send_header("Location", "/")
self.send_header("Set-Cookie", f"session={session_token}; Path=/; HttpOnly; SameSite=Lax; Secure")
self.send_header("Set-Cookie", "oidc_state=; Path=/; Max-Age=0; SameSite=Lax; Secure")
if id_token:
self.send_header("Set-Cookie", f"id_token={id_token}; Path=/; HttpOnly; SameSite=Lax; Secure")
self.end_headers()
# ==================== Trip CRUD ====================
def handle_get_trips(self):
"""Get all trips."""
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT * FROM trips ORDER BY start_date DESC")
trips = [dict_from_row(row) for row in cursor.fetchall()]
# Include cover image for each trip
for trip in trips:
image = get_primary_image("trip", trip["id"])
if image:
trip["cover_image"] = f'/images/{image["file_path"]}'
elif trip.get("image_path"):
trip["cover_image"] = f'/images/{trip["image_path"]}'
else:
trip["cover_image"] = None
conn.close()
self.send_json({"trips": trips})
def handle_get_trip(self, trip_id):
"""Get single trip with all data."""
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT * FROM trips WHERE id = ?", (trip_id,))
trip = dict_from_row(cursor.fetchone())
if not trip:
conn.close()
self.send_json({"error": "Trip not found"}, 404)
return
cursor.execute("SELECT * FROM transportations WHERE trip_id = ? ORDER BY date", (trip_id,))
trip["transportations"] = [dict_from_row(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM lodging WHERE trip_id = ? ORDER BY check_in", (trip_id,))
trip["lodging"] = [dict_from_row(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM notes WHERE trip_id = ? ORDER BY date", (trip_id,))
trip["notes"] = [dict_from_row(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM locations WHERE trip_id = ? ORDER BY visit_date", (trip_id,))
trip["locations"] = [dict_from_row(row) for row in cursor.fetchall()]
# Collect all entity IDs to fetch images
entity_ids = [trip_id] # trip itself
for t in trip["transportations"]:
entity_ids.append(t["id"])
for l in trip["lodging"]:
entity_ids.append(l["id"])
for l in trip["locations"]:
entity_ids.append(l["id"])
for n in trip["notes"]:
entity_ids.append(n["id"])
# Fetch all images for this trip's entities in one query
placeholders = ','.join('?' * len(entity_ids))
cursor.execute(f"SELECT * FROM images WHERE entity_id IN ({placeholders}) ORDER BY is_primary DESC, created_at", entity_ids)
all_images = [dict_from_row(row) for row in cursor.fetchall()]
# Build image lookup by entity_id
images_by_entity = {}
for img in all_images:
img["url"] = f'/images/{img["file_path"]}'
images_by_entity.setdefault(img["entity_id"], []).append(img)
# Attach images to each entity
trip["images"] = images_by_entity.get(trip_id, [])
for t in trip["transportations"]:
t["images"] = images_by_entity.get(t["id"], [])
for l in trip["lodging"]:
l["images"] = images_by_entity.get(l["id"], [])
for l in trip["locations"]:
l["images"] = images_by_entity.get(l["id"], [])
for n in trip["notes"]:
n["images"] = images_by_entity.get(n["id"], [])
# Hero images: trip primary + all entity images
hero_images = []
trip_img = images_by_entity.get(trip_id, [])
if trip_img:
hero_images.extend(trip_img)
for img in all_images:
if img["entity_id"] != trip_id and img not in hero_images:
hero_images.append(img)
trip["hero_images"] = hero_images
# Fetch all documents for this trip's entities
cursor.execute(f"SELECT * FROM documents WHERE entity_id IN ({placeholders}) ORDER BY created_at", entity_ids)
all_docs = [dict_from_row(row) for row in cursor.fetchall()]
docs_by_entity = {}
for doc in all_docs:
doc["url"] = f'/api/document/{doc["file_path"]}'
docs_by_entity.setdefault(doc["entity_id"], []).append(doc)
for t in trip["transportations"]:
t["documents"] = docs_by_entity.get(t["id"], [])
for l in trip["lodging"]:
l["documents"] = docs_by_entity.get(l["id"], [])
for l in trip["locations"]:
l["documents"] = docs_by_entity.get(l["id"], [])
for n in trip["notes"]:
n["documents"] = docs_by_entity.get(n["id"], [])
trip["documents"] = docs_by_entity.get(trip_id, [])
conn.close()
self.send_json(trip)
def handle_create_trip(self, body):
"""Create a new trip."""
data = json.loads(body)
trip_id = generate_id()
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO trips (id, name, description, start_date, end_date)
VALUES (?, ?, ?, ?, ?)
''', (trip_id, data.get("name", ""), data.get("description", ""),
data.get("start_date", ""), data.get("end_date", "")))
conn.commit()
conn.close()
self.send_json({"success": True, "id": trip_id})
def handle_update_trip(self, body):
"""Update a trip."""
data = json.loads(body)
trip_id = data.get("id")
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
UPDATE trips SET name = ?, description = ?, start_date = ?, end_date = ?,
share_password = ?, immich_album_id = ?
WHERE id = ?
''', (data.get("name", ""), data.get("description", ""),
data.get("start_date", ""), data.get("end_date", ""),
data.get("share_password", "") or None,
data.get("immich_album_id") or None, trip_id))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_delete_trip(self, body):
"""Delete a trip and all related data."""
data = json.loads(body)
trip_id = data.get("id")
# Delete associated images from filesystem
conn = get_db()
cursor = conn.cursor()
# Get all entity IDs for this trip
for table in ["transportations", "lodging", "notes", "locations"]:
cursor.execute(f"SELECT id FROM {table} WHERE trip_id = ?", (trip_id,))
for row in cursor.fetchall():
entity_type = table.rstrip("s") if table != "lodging" else "lodging"
cursor.execute("SELECT file_path FROM images WHERE entity_type = ? AND entity_id = ?",
(entity_type, row["id"]))
for img_row in cursor.fetchall():
img_path = IMAGES_DIR / img_row["file_path"]
if img_path.exists():
img_path.unlink()
# Delete trip (cascades to related tables)
cursor.execute("DELETE FROM trips WHERE id = ?", (trip_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
# ==================== Transportation CRUD ====================
def handle_create_transportation(self, body):
"""Create transportation."""
data = json.loads(body)
trans_id = generate_id()
from_loc = data.get("from_location", "")
to_loc = data.get("to_location", "")
trans_type = data.get("type", "plane")
from_place_id = data.get("from_place_id", "")
to_place_id = data.get("to_place_id", "")
from_lat = data.get("from_lat")
from_lng = data.get("from_lng")
to_lat = data.get("to_lat")
to_lng = data.get("to_lng")
# Resolve coordinates from place_ids if not provided
if from_place_id and not (from_lat and from_lng):
details = get_place_details(from_place_id)
from_lat = details.get("latitude") or from_lat
from_lng = details.get("longitude") or from_lng
if to_place_id and not (to_lat and to_lng):
details = get_place_details(to_place_id)
to_lat = details.get("latitude") or to_lat
to_lng = details.get("longitude") or to_lng
# Auto-resolve from/to place_ids from location names
if not from_place_id and not (from_lat and from_lng) and from_loc:
pid, plat, plng, _ = auto_resolve_place(from_loc)
if pid:
from_place_id = pid
from_lat = plat or from_lat
from_lng = plng or from_lng
if not to_place_id and not (to_lat and to_lng) and to_loc:
pid, plat, plng, _ = auto_resolve_place(to_loc)
if pid:
to_place_id = pid
to_lat = plat or to_lat
to_lng = plng or to_lng
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO transportations (id, trip_id, name, type, flight_number, from_location,
to_location, date, end_date, timezone, description, link, cost_points, cost_cash,
from_place_id, to_place_id, from_lat, from_lng, to_lat, to_lng)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (trans_id, data.get("trip_id"), data.get("name", ""), trans_type,
data.get("flight_number", ""), from_loc, to_loc,
data.get("date", ""), data.get("end_date", ""), data.get("timezone", ""),
data.get("description", ""), data.get("link", ""),
data.get("cost_points", 0), data.get("cost_cash", 0),
from_place_id, to_place_id, from_lat, from_lng, to_lat, to_lng))
conn.commit()
conn.close()
self.send_json({"success": True, "id": trans_id})
def handle_update_transportation(self, body):
"""Update transportation."""
data = json.loads(body)
from_loc = data.get("from_location", "")
to_loc = data.get("to_location", "")
trans_type = data.get("type", "plane")
from_place_id = data.get("from_place_id", "")
to_place_id = data.get("to_place_id", "")
from_lat = data.get("from_lat")
from_lng = data.get("from_lng")
to_lat = data.get("to_lat")
to_lng = data.get("to_lng")
# Resolve coordinates from place_ids if not provided
if from_place_id and not (from_lat and from_lng):
details = get_place_details(from_place_id)
from_lat = details.get("latitude") or from_lat
from_lng = details.get("longitude") or from_lng
if to_place_id and not (to_lat and to_lng):
details = get_place_details(to_place_id)
to_lat = details.get("latitude") or to_lat
to_lng = details.get("longitude") or to_lng
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
UPDATE transportations SET name = ?, type = ?, flight_number = ?, from_location = ?,
to_location = ?, date = ?, end_date = ?, timezone = ?,
description = ?, link = ?, cost_points = ?, cost_cash = ?,
from_place_id = ?, to_place_id = ?, from_lat = ?, from_lng = ?, to_lat = ?, to_lng = ?
WHERE id = ?
''', (data.get("name", ""), trans_type, data.get("flight_number", ""),
from_loc, to_loc, data.get("date", ""),
data.get("end_date", ""), data.get("timezone", ""), data.get("description", ""),
data.get("link", ""), data.get("cost_points", 0), data.get("cost_cash", 0),
from_place_id, to_place_id, from_lat, from_lng, to_lat, to_lng,
data.get("id")))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_delete_transportation(self, body):
"""Delete transportation."""
data = json.loads(body)
entity_id = data.get("id")
# Delete images
self._delete_entity_images("transportation", entity_id)
conn = get_db()
cursor = conn.cursor()
cursor.execute("DELETE FROM transportations WHERE id = ?", (entity_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
# ==================== Lodging CRUD ====================
def handle_create_lodging(self, body):
"""Create lodging."""
data = json.loads(body)
lodging_id = generate_id()
place_id = data.get("place_id")
lat = data.get("latitude")
lng = data.get("longitude")
# Resolve coordinates from place_id if not provided
if place_id and not (lat and lng):
details = get_place_details(place_id)
lat = details.get("latitude") or lat
lng = details.get("longitude") or lng
# Auto-resolve place_id from name if not provided
if not place_id and not (lat and lng) and data.get("name"):
pid, plat, plng, paddr = auto_resolve_place(data.get("name", ""), data.get("location", ""))
if pid:
place_id = pid
lat = plat or lat
lng = plng or lng
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO lodging (id, trip_id, name, type, location, check_in, check_out,
timezone, reservation_number, description, link, cost_points, cost_cash, place_id, latitude, longitude)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (lodging_id, data.get("trip_id"), data.get("name", ""), data.get("type", "hotel"),
data.get("location", ""), data.get("check_in", ""), data.get("check_out", ""),
data.get("timezone", ""), data.get("reservation_number", ""),
data.get("description", ""), data.get("link", ""),
data.get("cost_points", 0), data.get("cost_cash", 0), place_id, lat, lng))
conn.commit()
conn.close()
self.send_json({"success": True, "id": lodging_id})
def handle_update_lodging(self, body):
"""Update lodging."""
data = json.loads(body)
place_id = data.get("place_id")
lat = data.get("latitude")
lng = data.get("longitude")
# Resolve coordinates from place_id if not provided
if place_id and not (lat and lng):
details = get_place_details(place_id)
lat = details.get("latitude") or lat
lng = details.get("longitude") or lng
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
UPDATE lodging SET name = ?, type = ?, location = ?, check_in = ?, check_out = ?,
timezone = ?, reservation_number = ?, description = ?, link = ?,
cost_points = ?, cost_cash = ?, place_id = ?, latitude = ?, longitude = ?
WHERE id = ?
''', (data.get("name", ""), data.get("type", "hotel"), data.get("location", ""),
data.get("check_in", ""), data.get("check_out", ""), data.get("timezone", ""),
data.get("reservation_number", ""), data.get("description", ""),
data.get("link", ""), data.get("cost_points", 0), data.get("cost_cash", 0),
place_id, lat, lng, data.get("id")))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_delete_lodging(self, body):
"""Delete lodging."""
data = json.loads(body)
entity_id = data.get("id")
self._delete_entity_images("lodging", entity_id)
conn = get_db()
cursor = conn.cursor()
cursor.execute("DELETE FROM lodging WHERE id = ?", (entity_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
# ==================== Notes CRUD ====================
def handle_create_note(self, body):
"""Create note."""
data = json.loads(body)
note_id = generate_id()
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO notes (id, trip_id, name, content, date)
VALUES (?, ?, ?, ?, ?)
''', (note_id, data.get("trip_id"), data.get("name", ""),
data.get("content", ""), data.get("date", "")))
conn.commit()
conn.close()
self.send_json({"success": True, "id": note_id})
def handle_update_note(self, body):
"""Update note."""
data = json.loads(body)
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
UPDATE notes SET name = ?, content = ?, date = ?
WHERE id = ?
''', (data.get("name", ""), data.get("content", ""),
data.get("date", ""), data.get("id")))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_delete_note(self, body):
"""Delete note."""
data = json.loads(body)
entity_id = data.get("id")
self._delete_entity_images("note", entity_id)
conn = get_db()
cursor = conn.cursor()
cursor.execute("DELETE FROM notes WHERE id = ?", (entity_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
# ==================== Locations CRUD ====================
def handle_create_location(self, body):
"""Create location."""
data = json.loads(body)
loc_id = generate_id()
place_id = data.get("place_id")
address = data.get("address")
lat = data.get("latitude")
lng = data.get("longitude")
# Resolve coordinates from place_id if not provided
if place_id and not (lat and lng):
details = get_place_details(place_id)
lat = details.get("latitude") or lat
lng = details.get("longitude") or lng
if not address:
address = details.get("address", "")
# Auto-resolve place_id from name if not provided
if not place_id and not (lat and lng) and data.get("name"):
pid, plat, plng, paddr = auto_resolve_place(data.get("name", ""), address or "")
if pid:
place_id = pid
lat = plat or lat
lng = plng or lng
if not address:
address = paddr or ""
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO locations (id, trip_id, name, description, category, visit_date, start_time, end_time, link, cost_points, cost_cash, hike_distance, hike_difficulty, hike_time, place_id, address, latitude, longitude)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (loc_id, data.get("trip_id"), data.get("name", ""), data.get("description", ""),
data.get("category", ""),
data.get("visit_date", ""), data.get("start_time", ""), data.get("end_time", ""),
data.get("link", ""), data.get("cost_points", 0), data.get("cost_cash", 0),
data.get("hike_distance", ""), data.get("hike_difficulty", ""), data.get("hike_time", ""),
place_id, address, lat, lng))
conn.commit()
conn.close()
self.send_json({"success": True, "id": loc_id})
def handle_update_location(self, body):
"""Update location."""
data = json.loads(body)
place_id = data.get("place_id")
address = data.get("address")
lat = data.get("latitude")
lng = data.get("longitude")
# Resolve coordinates from place_id if not provided
if place_id and not (lat and lng):
details = get_place_details(place_id)
lat = details.get("latitude") or lat
lng = details.get("longitude") or lng
if not address:
address = details.get("address", "")
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
UPDATE locations SET name = ?, description = ?,
category = ?, visit_date = ?, start_time = ?, end_time = ?, link = ?,
cost_points = ?, cost_cash = ?, hike_distance = ?, hike_difficulty = ?, hike_time = ?,
place_id = ?, address = ?, latitude = ?, longitude = ?
WHERE id = ?
''', (data.get("name", ""), data.get("description", ""),
data.get("category", ""),
data.get("visit_date", ""), data.get("start_time", ""), data.get("end_time", ""),
data.get("link", ""), data.get("cost_points", 0), data.get("cost_cash", 0),
data.get("hike_distance", ""), data.get("hike_difficulty", ""), data.get("hike_time", ""),
place_id, address, lat, lng,
data.get("id")))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_delete_location(self, body):
"""Delete location."""
data = json.loads(body)
entity_id = data.get("id")
self._delete_entity_images("location", entity_id)
conn = get_db()
cursor = conn.cursor()
cursor.execute("DELETE FROM locations WHERE id = ?", (entity_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_move_item(self, body):
"""Move an item to a different date (drag and drop)."""
data = json.loads(body)
item_type = data.get("type") # transportation, lodging, note, location
item_id = data.get("id")
new_date = data.get("new_date") # YYYY-MM-DD format
if not all([item_type, item_id, new_date]):
self.send_json({"error": "Missing required fields"}, 400)
return
conn = get_db()
cursor = conn.cursor()
if item_type == "transportation":
# Get current datetime, update date portion
cursor.execute("SELECT date, end_date FROM transportations WHERE id = ?", (item_id,))
row = cursor.fetchone()
if row:
old_date = row["date"] or ""
old_end = row["end_date"] or ""
# Replace date portion, keep time
new_datetime = new_date + old_date[10:] if len(old_date) > 10 else new_date
new_end = new_date + old_end[10:] if len(old_end) > 10 else new_date
cursor.execute("UPDATE transportations SET date = ?, end_date = ? WHERE id = ?",
(new_datetime, new_end, item_id))
elif item_type == "lodging":
# Get current datetime, update date portion
cursor.execute("SELECT check_in, check_out FROM lodging WHERE id = ?", (item_id,))
row = cursor.fetchone()
if row:
old_checkin = row["check_in"] or ""
old_checkout = row["check_out"] or ""
new_checkin = new_date + old_checkin[10:] if len(old_checkin) > 10 else new_date
# Calculate checkout offset if exists
if old_checkin and old_checkout and len(old_checkin) >= 10 and len(old_checkout) >= 10:
try:
from datetime import datetime, timedelta
old_in_date = datetime.strptime(old_checkin[:10], "%Y-%m-%d")
old_out_date = datetime.strptime(old_checkout[:10], "%Y-%m-%d")
delta = old_out_date - old_in_date
new_in_date = datetime.strptime(new_date, "%Y-%m-%d")
new_out_date = new_in_date + delta
new_checkout = new_out_date.strftime("%Y-%m-%d") + old_checkout[10:]
except:
new_checkout = new_date + old_checkout[10:] if len(old_checkout) > 10 else new_date
else:
new_checkout = new_date + old_checkout[10:] if len(old_checkout) > 10 else new_date
cursor.execute("UPDATE lodging SET check_in = ?, check_out = ? WHERE id = ?",
(new_checkin, new_checkout, item_id))
elif item_type == "note":
cursor.execute("UPDATE notes SET date = ? WHERE id = ?", (new_date, item_id))
elif item_type == "location":
# Update visit_date AND start_time/end_time
cursor.execute("SELECT start_time, end_time FROM locations WHERE id = ?", (item_id,))
row = cursor.fetchone()
if row:
old_start = row["start_time"] or ""
old_end = row["end_time"] or ""
new_start = new_date + old_start[10:] if len(old_start) > 10 else new_date
new_end = new_date + old_end[10:] if len(old_end) > 10 else new_date
cursor.execute("UPDATE locations SET visit_date = ?, start_time = ?, end_time = ? WHERE id = ?",
(new_date, new_start, new_end, item_id))
else:
cursor.execute("UPDATE locations SET visit_date = ? WHERE id = ?", (new_date, item_id))
else:
conn.close()
self.send_json({"error": "Invalid item type"}, 400)
return
conn.commit()
conn.close()
# Also update in AdventureLog if possible
try:
adventurelog_endpoints = {
"transportation": f"{ADVENTURELOG_URL}/api/transportations/{item_id}/",
"lodging": f"{ADVENTURELOG_URL}/api/lodging/{item_id}/",
"note": f"{ADVENTURELOG_URL}/api/notes/{item_id}/",
"location": f"{ADVENTURELOG_URL}/api/adventures/{item_id}/"
}
adventurelog_fields = {
"transportation": "date",
"lodging": "check_in",
"note": "date",
"location": "date"
}
url = adventurelog_endpoints.get(item_type)
field = adventurelog_fields.get(item_type)
if url and field:
req = urllib.request.Request(url, method='PATCH')
req.add_header('Content-Type', 'application/json')
req.add_header('Cookie', f'sessionid={ADVENTURELOG_SESSION}')
req.data = json.dumps({field: new_date}).encode()
urllib.request.urlopen(req, timeout=10)
except Exception as e:
print(f"[MOVE] Could not update AdventureLog: {e}")
self.send_json({"success": True})
# ==================== Image Handling ====================
def _delete_entity_images(self, entity_type, entity_id):
"""Delete all images for an entity."""
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT file_path FROM images WHERE entity_type = ? AND entity_id = ?",
(entity_type, entity_id))
for row in cursor.fetchall():
img_path = IMAGES_DIR / row["file_path"]
if img_path.exists():
img_path.unlink()
cursor.execute("DELETE FROM images WHERE entity_type = ? AND entity_id = ?",
(entity_type, entity_id))
conn.commit()
conn.close()
def handle_image_upload(self, body):
"""Handle image upload (base64 JSON or multipart form-data)."""
content_type = self.headers.get("Content-Type", "")
# Handle multipart file upload
if "multipart/form-data" in content_type:
boundary = content_type.split("boundary=")[-1].encode()
parts = body.split(b"--" + boundary)
entity_type = ""
entity_id = ""
file_data = None
file_name = "image.jpg"
for part in parts:
if b'name="entity_type"' in part:
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
entity_type = part[data_start:data_end].decode()
elif b'name="entity_id"' in part:
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
entity_id = part[data_start:data_end].decode()
elif b'name="file"' in part:
if b'filename="' in part:
fn_start = part.find(b'filename="') + 10
fn_end = part.find(b'"', fn_start)
file_name = part[fn_start:fn_end].decode()
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
file_data = part[data_start:data_end]
if not all([entity_type, entity_id, file_data]):
self.send_json({"error": "Missing required fields (entity_type, entity_id, file)"}, 400)
return
image_bytes = file_data
else:
# Handle JSON with base64 image
data = json.loads(body)
entity_type = data.get("entity_type")
entity_id = data.get("entity_id")
image_data = data.get("image_data") # base64
file_name = data.get("filename", "image.jpg")
if not all([entity_type, entity_id, image_data]):
self.send_json({"error": "Missing required fields"}, 400)
return
import base64
try:
image_bytes = base64.b64decode(image_data.split(",")[-1])
except:
self.send_json({"error": "Invalid image data"}, 400)
return
# Save file
ext = Path(file_name).suffix or ".jpg"
file_id = generate_id()
saved_name = f"{file_id}{ext}"
file_path = IMAGES_DIR / saved_name
with open(file_path, "wb") as f:
f.write(image_bytes)
# Save to database
image_id = generate_id()
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO images (id, entity_type, entity_id, file_path, is_primary)
VALUES (?, ?, ?, ?, ?)
''', (image_id, entity_type, entity_id, saved_name, 0))
conn.commit()
conn.close()
self.send_json({"success": True, "id": image_id, "path": f"/images/{saved_name}"})
def handle_image_delete(self, body):
"""Delete an image."""
data = json.loads(body)
image_id = data.get("id")
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT file_path FROM images WHERE id = ?", (image_id,))
row = cursor.fetchone()
if row:
img_path = IMAGES_DIR / row["file_path"]
if img_path.exists():
img_path.unlink()
cursor.execute("DELETE FROM images WHERE id = ?", (image_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_image_search(self, body):
"""Search Google Images."""
if not GOOGLE_API_KEY or not GOOGLE_CX:
self.send_json({"error": "Google API not configured"}, 400)
return
data = json.loads(body)
query = data.get("query", "")
url = f"https://www.googleapis.com/customsearch/v1?key={GOOGLE_API_KEY}&cx={GOOGLE_CX}&searchType=image&q={urllib.parse.quote(query)}&num=10"
try:
req = urllib.request.Request(url, headers={"User-Agent": "Mozilla/5.0"})
with urllib.request.urlopen(req, timeout=10) as response:
result = json.loads(response.read().decode())
images = []
for item in result.get("items", []):
images.append({
"url": item.get("link"),
"thumbnail": item.get("image", {}).get("thumbnailLink"),
"title": item.get("title")
})
self.send_json({"images": images})
except Exception as e:
self.send_json({"error": str(e)}, 500)
def handle_image_upload_from_url(self, body):
"""Download image from URL and save."""
data = json.loads(body)
entity_type = data.get("entity_type")
entity_id = data.get("entity_id")
image_url = data.get("url")
if not all([entity_type, entity_id, image_url]):
self.send_json({"error": "Missing required fields"}, 400)
return
try:
req = urllib.request.Request(image_url, headers={"User-Agent": "Mozilla/5.0"})
with urllib.request.urlopen(req, timeout=15) as response:
image_bytes = response.read()
# Determine extension
content_type = response.headers.get("Content-Type", "image/jpeg")
ext_map = {"image/jpeg": ".jpg", "image/png": ".png", "image/gif": ".gif", "image/webp": ".webp"}
ext = ext_map.get(content_type.split(";")[0], ".jpg")
# Save file
file_id = generate_id()
file_name = f"{file_id}{ext}"
file_path = IMAGES_DIR / file_name
with open(file_path, "wb") as f:
f.write(image_bytes)
# Save to database
image_id = generate_id()
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO images (id, entity_type, entity_id, file_path, is_primary)
VALUES (?, ?, ?, ?, ?)
''', (image_id, entity_type, entity_id, file_name, 0))
conn.commit()
conn.close()
self.send_json({"success": True, "id": image_id, "path": f"/images/{file_name}"})
except Exception as e:
self.send_json({"error": str(e)}, 500)
# ==================== Share Handling ====================
def handle_create_share(self, body):
"""Create a share link for a trip."""
data = json.loads(body)
trip_id = data.get("trip_id")
share_token = secrets.token_urlsafe(16)
conn = get_db()
cursor = conn.cursor()
cursor.execute("UPDATE trips SET share_token = ? WHERE id = ?", (share_token, trip_id))
conn.commit()
conn.close()
self.send_json({"success": True, "share_token": share_token})
def handle_delete_share(self, body):
"""Remove share link from a trip."""
data = json.loads(body)
trip_id = data.get("trip_id")
conn = get_db()
cursor = conn.cursor()
cursor.execute("UPDATE trips SET share_token = NULL WHERE id = ?", (trip_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_share_verify(self, body):
"""Verify password for shared trip."""
try:
print(f"[Share] Handler called with body: {body[:100]}", flush=True)
data = json.loads(body)
share_token = data.get("share_token")
password = data.get("password", "")
print(f"[Share] Verifying token: {share_token}", flush=True)
print(f"[Share] Password entered: {repr(password)}", flush=True)
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT share_password FROM trips WHERE share_token = ?", (share_token,))
row = cursor.fetchone()
conn.close()
if not row:
print(f"[Share] Trip not found for token", flush=True)
self.send_json({"success": False, "error": "Trip not found"})
return
stored_password = row[0] or ""
print(f"[Share] Password stored: {repr(stored_password)}", flush=True)
print(f"[Share] Match: {password == stored_password}", flush=True)
if password == stored_password:
self.send_json({"success": True})
else:
self.send_json({"success": False, "error": "Incorrect password"})
except Exception as e:
print(f"[Share] ERROR: {e}", flush=True)
import traceback
traceback.print_exc()
self.send_json({"success": False, "error": str(e)})
# ==================== AI Parsing ====================
def handle_parse(self, body):
"""Parse text/image/PDF with AI."""
print(f"[PARSE] Starting parse, body size: {len(body)} bytes", flush=True)
content_type = self.headers.get("Content-Type", "")
print(f"[PARSE] Content-Type: {content_type}", flush=True)
if "multipart/form-data" in content_type:
# Handle file upload
boundary = content_type.split("boundary=")[-1].encode()
parts = body.split(b"--" + boundary)
text_input = ""
file_data = None
file_name = ""
mime_type = ""
trip_start = None
trip_end = None
for part in parts:
if b'name="text"' in part:
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
text_input = part[data_start:data_end].decode()
elif b'name="file"' in part:
# Extract filename
if b'filename="' in part:
fn_start = part.find(b'filename="') + 10
fn_end = part.find(b'"', fn_start)
file_name = part[fn_start:fn_end].decode()
# Extract content type
if b"Content-Type:" in part:
ct_start = part.find(b"Content-Type:") + 14
ct_end = part.find(b"\r\n", ct_start)
mime_type = part[ct_start:ct_end].decode().strip()
# Extract file data
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
if data_end > data_start:
file_data = part[data_start:data_end]
else:
file_data = part[data_start:]
elif b'name="trip_start"' in part:
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
trip_start = part[data_start:data_end].decode().strip()
elif b'name="trip_end"' in part:
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
trip_end = part[data_start:data_end].decode().strip()
result = None
if file_data and len(file_data) > 0:
print(f"[PARSE] File data size: {len(file_data)} bytes, name: {file_name}, mime: {mime_type}", flush=True)
file_base64 = base64.b64encode(file_data).decode()
print(f"[PARSE] Base64 size: {len(file_base64)} chars", flush=True)
# Check if it's a PDF - send directly to OpenAI Vision
if mime_type == "application/pdf" or file_name.lower().endswith(".pdf"):
print(f"[PARSE] Calling parse_pdf_input...", flush=True)
result = parse_pdf_input(file_base64, file_name, trip_start, trip_end)
print(f"[PARSE] PDF result: {str(result)[:200]}", flush=True)
else:
# It's an image
print(f"[PARSE] Calling parse_image_input...", flush=True)
result = parse_image_input(file_base64, mime_type, trip_start, trip_end)
# Include file data for document attachment if parsing succeeded
if result and "error" not in result:
result["attachment"] = {
"data": base64.b64encode(file_data).decode(),
"name": file_name,
"mime_type": mime_type
}
elif text_input:
result = parse_text_input(text_input, trip_start, trip_end)
else:
result = {"error": "No input provided"}
self.send_json(result)
else:
# JSON request (text only)
data = json.loads(body)
text = data.get("text", "")
trip_start = data.get("trip_start")
trip_end = data.get("trip_end")
if text:
result = parse_text_input(text, trip_start, trip_end)
self.send_json(result)
else:
self.send_json({"error": "No text provided"})
def handle_parse_email(self, body):
"""Parse email content sent from Cloudflare Email Worker."""
# Verify API key
api_key = self.headers.get("X-API-Key", "")
if not EMAIL_API_KEY:
self.send_json({"error": "Email API not configured"}, 503)
return
if api_key != EMAIL_API_KEY:
self.send_json({"error": "Invalid API key"}, 401)
return
print("[EMAIL] Received email for parsing", flush=True)
content_type = self.headers.get("Content-Type", "")
text_input = "" # Initialize for email metadata extraction
try:
if "multipart/form-data" in content_type:
# Handle with file attachment
boundary = content_type.split("boundary=")[-1].encode()
parts = body.split(b"--" + boundary)
text_input = ""
file_data = None
file_name = ""
mime_type = ""
for part in parts:
if b'name="text"' in part:
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
text_input = part[data_start:data_end].decode()
elif b'name="file"' in part:
if b'filename="' in part:
fn_start = part.find(b'filename="') + 10
fn_end = part.find(b'"', fn_start)
file_name = part[fn_start:fn_end].decode()
if b"Content-Type:" in part:
ct_start = part.find(b"Content-Type:") + 14
ct_end = part.find(b"\r\n", ct_start)
mime_type = part[ct_start:ct_end].decode().strip()
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
file_data = part[data_start:data_end] if data_end > data_start else part[data_start:]
# Parse based on content
if file_data:
if mime_type.startswith("image/"):
image_b64 = base64.b64encode(file_data).decode()
result = parse_image_input(image_b64, mime_type)
elif mime_type == "application/pdf":
result = parse_pdf_input(file_data)
else:
result = parse_text_input(text_input) if text_input else {"error": "Unsupported file type"}
elif text_input:
result = parse_text_input(text_input)
else:
result = {"error": "No content provided"}
else:
# JSON request (text only)
data = json.loads(body)
text = data.get("text", "")
text_input = text # For email metadata extraction
if text:
result = parse_text_input(text)
else:
result = {"error": "No text provided"}
# Save to pending imports if successfully parsed
if result and "error" not in result:
entry_type = result.get("type", "unknown")
# Extract email metadata from text content
email_subject = ""
email_from = ""
if text_input:
for line in text_input.split('\n'):
if line.startswith('Subject:'):
email_subject = line[8:].strip()[:200]
elif line.startswith('From:'):
email_from = line[5:].strip()[:200]
# Save to pending_imports table
import_id = str(uuid.uuid4())
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO pending_imports (id, entry_type, parsed_data, source, email_subject, email_from)
VALUES (?, ?, ?, ?, ?, ?)
''', (import_id, entry_type, json.dumps(result), 'email', email_subject, email_from))
conn.commit()
conn.close()
print(f"[EMAIL] Saved to pending imports: {import_id} ({entry_type})", flush=True)
result["import_id"] = import_id
self.send_json(result)
except Exception as e:
print(f"[EMAIL] Error parsing: {e}", flush=True)
self.send_json({"error": str(e)}, 500)
# ==================== Document Management ====================
def handle_document_upload(self, body):
"""Upload a document to an entity."""
content_type = self.headers.get("Content-Type", "")
if "multipart/form-data" not in content_type:
self.send_json({"error": "Must be multipart/form-data"}, 400)
return
boundary = content_type.split("boundary=")[-1].encode()
parts = body.split(b"--" + boundary)
entity_type = ""
entity_id = ""
file_data = None
file_name = ""
mime_type = ""
for part in parts:
if b'name="entity_type"' in part:
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
entity_type = part[data_start:data_end].decode()
elif b'name="entity_id"' in part:
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
entity_id = part[data_start:data_end].decode()
elif b'name="file"' in part:
# Extract filename
if b'filename="' in part:
fn_start = part.find(b'filename="') + 10
fn_end = part.find(b'"', fn_start)
file_name = part[fn_start:fn_end].decode()
# Extract content type
if b"Content-Type:" in part:
ct_start = part.find(b"Content-Type:") + 14
ct_end = part.find(b"\r\n", ct_start)
mime_type = part[ct_start:ct_end].decode().strip()
# Extract file data
data_start = part.find(b"\r\n\r\n") + 4
data_end = part.rfind(b"\r\n")
if data_end > data_start:
file_data = part[data_start:data_end]
else:
file_data = part[data_start:]
if not all([entity_type, entity_id, file_data, file_name]):
self.send_json({"error": "Missing required fields"}, 400)
return
# Save file
doc_id = generate_id()
ext = file_name.rsplit(".", 1)[-1] if "." in file_name else "dat"
stored_name = f"{doc_id}.{ext}"
file_path = DOCS_DIR / stored_name
with open(file_path, "wb") as f:
f.write(file_data)
# Save to database
conn = get_db()
cursor = conn.cursor()
cursor.execute(
"INSERT INTO documents (id, entity_type, entity_id, file_path, file_name, mime_type) VALUES (?, ?, ?, ?, ?, ?)",
(doc_id, entity_type, entity_id, stored_name, file_name, mime_type)
)
conn.commit()
conn.close()
self.send_json({
"success": True,
"document_id": doc_id,
"file_name": file_name,
"url": f"/documents/{stored_name}"
})
def handle_document_delete(self, body):
"""Delete a document."""
data = json.loads(body)
doc_id = data.get("document_id")
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT file_path FROM documents WHERE id = ?", (doc_id,))
row = cursor.fetchone()
if row:
file_path = DOCS_DIR / row["file_path"]
if file_path.exists():
file_path.unlink()
cursor.execute("DELETE FROM documents WHERE id = ?", (doc_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_check_duplicate(self, body):
"""Check for duplicate flights or hotels."""
data = json.loads(body)
trip_id = data.get("trip_id")
entry_type = data.get("type", "flight")
duplicate = None
existing_data = None
if entry_type == "flight":
flight_number = data.get("flight_number")
flight_date = data.get("date")
duplicate = find_duplicate_flight(trip_id, flight_number, flight_date)
if duplicate:
existing_data = {
"id": duplicate.get("id"),
"type": "flight",
"name": duplicate.get("name"),
"flight_number": duplicate.get("flight_number"),
"from_location": duplicate.get("from_location"),
"to_location": duplicate.get("to_location"),
"date": duplicate.get("date"),
"description": duplicate.get("description", "")
}
elif entry_type == "hotel":
hotel_name = data.get("hotel_name")
check_in = data.get("check_in")
reservation_number = data.get("reservation_number")
duplicate = find_duplicate_hotel(trip_id, hotel_name, check_in, reservation_number)
if duplicate:
existing_data = {
"id": duplicate.get("id"),
"type": "hotel",
"name": duplicate.get("name"),
"location": duplicate.get("location"),
"check_in": duplicate.get("check_in"),
"check_out": duplicate.get("check_out"),
"reservation_number": duplicate.get("reservation_number"),
"description": duplicate.get("description", "")
}
if duplicate:
self.send_json({"found": True, "existing": existing_data})
else:
self.send_json({"found": False})
def handle_merge_entry(self, body):
"""Merge new booking info into existing entry."""
data = json.loads(body)
entry_type = data.get("entry_type", "flight")
entry_id = data.get("entry_id")
existing_description = data.get("existing_description", "")
new_info = data.get("new_info", {})
new_data = data.get("new_data", {}) # Full data from email imports
attachment_data = data.get("attachment")
def merge_field(existing, new):
"""Merge two field values - fill empty or append if different."""
if not existing:
return new or ""
if not new:
return existing
if existing.strip() == new.strip():
return existing
# Append new info if different
return f"{existing}\n{new}"
try:
conn = get_db()
cursor = conn.cursor()
if entry_type == "flight":
entity_type = "transportation"
# Get existing entry
cursor.execute("SELECT * FROM transportations WHERE id = ?", (entry_id,))
existing = dict_from_row(cursor.fetchone())
if not existing:
conn.close()
self.send_json({"error": "Entry not found"})
return
# Handle doc upload format (new_info with passengers)
new_description = existing.get("description", "")
passengers = new_info.get("passengers", [])
for p in passengers:
conf = p.get("confirmation", "")
if conf and conf not in new_description:
new_description = merge_field(new_description, conf)
# Handle email import format (new_data with full fields)
if new_data:
new_description = merge_field(new_description, new_data.get("description", ""))
# Fill in empty fields only
updates = []
params = []
if not existing.get("from_location") and new_data.get("from_location"):
updates.append("from_location = ?")
params.append(new_data["from_location"])
if not existing.get("to_location") and new_data.get("to_location"):
updates.append("to_location = ?")
params.append(new_data["to_location"])
if not existing.get("date") and new_data.get("date"):
updates.append("date = ?")
params.append(new_data["date"])
if not existing.get("end_date") and new_data.get("end_date"):
updates.append("end_date = ?")
params.append(new_data["end_date"])
updates.append("description = ?")
params.append(new_description)
params.append(entry_id)
cursor.execute(f"UPDATE transportations SET {', '.join(updates)} WHERE id = ?", params)
else:
cursor.execute(
"UPDATE transportations SET description = ? WHERE id = ?",
(new_description, entry_id)
)
elif entry_type == "hotel":
entity_type = "lodging"
# Get existing entry
cursor.execute("SELECT * FROM lodging WHERE id = ?", (entry_id,))
existing = dict_from_row(cursor.fetchone())
if not existing:
conn.close()
self.send_json({"error": "Entry not found"})
return
# Handle doc upload format (new_info with reservation_number)
new_description = existing.get("description", "")
reservation_number = new_info.get("reservation_number", "")
if reservation_number and reservation_number not in new_description:
new_description = merge_field(new_description, f"Reservation: {reservation_number}")
# Handle email import format (new_data with full fields)
if new_data:
new_description = merge_field(new_description, new_data.get("description", ""))
# Add reservation number if not already present
new_res = new_data.get("reservation_number", "")
if new_res and new_res not in new_description:
new_description = merge_field(new_description, f"Reservation: {new_res}")
# Fill in empty fields only
updates = []
params = []
if not existing.get("location") and new_data.get("location"):
updates.append("location = ?")
params.append(new_data["location"])
if not existing.get("check_in") and new_data.get("check_in"):
updates.append("check_in = ?")
params.append(new_data["check_in"])
if not existing.get("check_out") and new_data.get("check_out"):
updates.append("check_out = ?")
params.append(new_data["check_out"])
if not existing.get("reservation_number") and new_data.get("reservation_number"):
updates.append("reservation_number = ?")
params.append(new_data["reservation_number"])
updates.append("description = ?")
params.append(new_description)
params.append(entry_id)
cursor.execute(f"UPDATE lodging SET {', '.join(updates)} WHERE id = ?", params)
else:
cursor.execute(
"UPDATE lodging SET description = ? WHERE id = ?",
(new_description, entry_id)
)
else:
conn.close()
self.send_json({"error": f"Unknown entry type: {entry_type}"})
return
conn.commit()
# Upload attachment if provided
attachment_result = None
if attachment_data and attachment_data.get("data"):
file_data = base64.b64decode(attachment_data["data"])
file_name = attachment_data.get("name", "attachment")
mime_type = attachment_data.get("mime_type", "application/octet-stream")
doc_id = generate_id()
ext = file_name.rsplit(".", 1)[-1] if "." in file_name else "dat"
stored_name = f"{doc_id}.{ext}"
file_path = DOCS_DIR / stored_name
with open(file_path, "wb") as f:
f.write(file_data)
cursor.execute(
"INSERT INTO documents (id, entity_type, entity_id, file_path, file_name, mime_type) VALUES (?, ?, ?, ?, ?, ?)",
(doc_id, entity_type, entry_id, stored_name, file_name, mime_type)
)
conn.commit()
attachment_result = {"id": doc_id, "file_name": file_name}
conn.close()
response_data = {"success": True, "entry_id": entry_id}
if attachment_result:
response_data["attachment"] = attachment_result
self.send_json(response_data)
except Exception as e:
self.send_json({"error": str(e)})
def handle_flight_status(self, body):
"""Get live flight status from FlightStats."""
data = json.loads(body)
flight_number = data.get("flight_number", "")
date_str = data.get("date") # Optional: YYYY-MM-DD
if not flight_number:
self.send_json({"error": "Flight number required"})
return
# Parse flight number into airline code and number
airline_code, flight_num = parse_flight_number(flight_number)
if not airline_code or not flight_num:
self.send_json({"error": f"Could not parse flight number: {flight_number}. Expected format like 'SV20' or 'AA1234'"})
return
# Fetch status
result = get_flight_status(airline_code, flight_num, date_str)
self.send_json(result)
def handle_weather(self, body):
"""Get weather forecast for locations and dates.
Accepts either:
- trip_id + dates: Will compute location for each date based on lodging/transportation
- location + dates: Uses single location for all dates (legacy)
"""
data = json.loads(body)
trip_id = data.get("trip_id", "")
location = data.get("location", "")
dates = data.get("dates", []) # List of YYYY-MM-DD strings
if not dates:
self.send_json({"error": "Dates required"})
return
# If trip_id provided, compute location(s) per date
if trip_id:
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT * FROM trips WHERE id = ?", (trip_id,))
trip = dict_from_row(cursor.fetchone())
if not trip:
conn.close()
self.send_json({"error": "Trip not found"})
return
# Load lodging and transportations for location lookup
cursor.execute("SELECT * FROM lodging WHERE trip_id = ? ORDER BY check_in", (trip_id,))
trip["lodging"] = [dict_from_row(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM transportations WHERE trip_id = ? ORDER BY date", (trip_id,))
trip["transportations"] = [dict_from_row(row) for row in cursor.fetchall()]
conn.close()
# Get locations for each date (may have multiple for travel days)
date_locations = {} # {date: [{location, city, type}, ...]}
all_unique_locations = set()
for date_str in dates:
locs = get_locations_for_date(trip, date_str)
date_locations[date_str] = locs
for loc_info in locs:
all_unique_locations.add(loc_info["location"])
# Fetch weather for each unique location (in parallel)
location_weather = {} # {location: {date: weather_data}}
from concurrent.futures import ThreadPoolExecutor, as_completed
def fetch_loc_weather(loc):
return loc, get_weather_forecast(loc, dates)
with ThreadPoolExecutor(max_workers=5) as executor:
futures = {executor.submit(fetch_loc_weather, loc): loc for loc in all_unique_locations}
for future in as_completed(futures):
try:
loc, result = future.result()
if result.get("forecasts"):
location_weather[loc] = result["forecasts"]
except Exception as e:
print(f"[Weather] Error fetching {futures[future]}: {e}", flush=True)
# Build response with weather per date, including city names
forecasts = {}
for date_str, locs in date_locations.items():
date_weather = []
for loc_info in locs:
loc = loc_info["location"]
weather = location_weather.get(loc, {}).get(date_str)
if weather:
date_weather.append({
"city": loc_info["city"],
"type": loc_info["type"],
"icon": weather.get("icon", ""),
"description": weather.get("description", ""),
"high": weather.get("high"),
"low": weather.get("low")
})
if date_weather:
forecasts[date_str] = date_weather
self.send_json({"forecasts": forecasts})
return
# Legacy: single location for all dates
if not location:
self.send_json({"error": "Location or trip_id required"})
return
result = get_weather_forecast(location, dates)
self.send_json(result)
def handle_trail_info(self, body):
"""Fetch trail info using GPT."""
data = json.loads(body)
query = data.get("query", "").strip()
hints = data.get("hints", "").strip()
if not query:
self.send_json({"error": "Trail name or URL required"})
return
result = fetch_trail_info(query, hints)
self.send_json(result)
def handle_generate_description(self, body):
"""Generate a description for an attraction using GPT."""
data = json.loads(body)
name = data.get("name", "").strip()
category = data.get("category", "attraction").strip()
location = data.get("location", "").strip()
if not name:
self.send_json({"error": "Attraction name required"})
return
result = generate_attraction_description(name, category, location)
self.send_json(result)
def handle_geocode_all(self):
"""Backfill place_ids for all locations, lodging, and transportation that don't have them."""
if not self.is_authenticated():
self.send_json({"error": "Unauthorized"}, 401)
return
def lookup_place_id(query):
"""Look up place_id using Google Places API."""
if not GOOGLE_API_KEY or not query:
return None, None
try:
url = "https://places.googleapis.com/v1/places:autocomplete"
headers = {
"Content-Type": "application/json",
"X-Goog-Api-Key": GOOGLE_API_KEY
}
payload = json.dumps({"input": query, "languageCode": "en"}).encode()
req = urllib.request.Request(url, data=payload, headers=headers, method="POST")
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
with urllib.request.urlopen(req, context=ssl_context, timeout=10) as response:
data = json.loads(response.read().decode())
if data.get("suggestions"):
first = data["suggestions"][0].get("placePrediction", {})
place_id = first.get("placeId")
address = first.get("structuredFormat", {}).get("secondaryText", {}).get("text", "")
return place_id, address
except Exception as e:
print(f"Place lookup error for '{query}': {e}")
return None, None
conn = get_db()
cursor = conn.cursor()
updated = {"locations": 0, "lodging": 0, "transportations": 0}
# Backfill locations
cursor.execute("SELECT id, name FROM locations WHERE (place_id IS NULL OR place_id = '' OR latitude IS NULL) AND name != ''")
for row in cursor.fetchall():
place_id, _ = lookup_place_id(row[1])
if place_id:
details = get_place_details(place_id)
lat = details.get("latitude")
lng = details.get("longitude")
address = details.get("address", "")
if lat and lng:
cursor.execute("UPDATE locations SET place_id = ?, address = ?, latitude = ?, longitude = ? WHERE id = ?",
(place_id, address, lat, lng, row[0]))
else:
cursor.execute("UPDATE locations SET place_id = ?, address = ? WHERE id = ?", (place_id, address, row[0]))
updated["locations"] += 1
print(f" Location: {row[1]} -> {place_id} ({lat}, {lng})")
# Backfill lodging
cursor.execute("SELECT id, name, location FROM lodging WHERE (place_id IS NULL OR place_id = '' OR latitude IS NULL)")
for row in cursor.fetchall():
query = f"{row[1] or ''} {row[2] or ''}".strip()
if query:
place_id, _ = lookup_place_id(query)
if place_id:
details = get_place_details(place_id)
lat = details.get("latitude")
lng = details.get("longitude")
if lat and lng:
cursor.execute("UPDATE lodging SET place_id = ?, latitude = ?, longitude = ? WHERE id = ?",
(place_id, lat, lng, row[0]))
else:
cursor.execute("UPDATE lodging SET place_id = ? WHERE id = ?", (place_id, row[0]))
updated["lodging"] += 1
print(f" Lodging: {query} -> {place_id} ({lat}, {lng})")
# Backfill transportation
cursor.execute("SELECT id, type, from_location, to_location, from_place_id, to_place_id FROM transportations")
for row in cursor.fetchall():
trans_id, trans_type, from_loc, to_loc, from_pid, to_pid = row
updated_trans = False
if from_loc and not from_pid:
query = format_location_for_geocoding(from_loc, is_plane=(trans_type == "plane"))
place_id, _ = lookup_place_id(query)
if place_id:
cursor.execute("UPDATE transportations SET from_place_id = ? WHERE id = ?", (place_id, trans_id))
updated_trans = True
print(f" Transport from: {from_loc} -> {place_id}")
if to_loc and not to_pid:
query = format_location_for_geocoding(to_loc, is_plane=(trans_type == "plane"))
place_id, _ = lookup_place_id(query)
if place_id:
cursor.execute("UPDATE transportations SET to_place_id = ? WHERE id = ?", (place_id, trans_id))
updated_trans = True
print(f" Transport to: {to_loc} -> {place_id}")
if updated_trans:
updated["transportations"] += 1
conn.commit()
conn.close()
self.send_json({"success": True, "updated": updated})
def handle_ai_guide(self, body):
"""Generate AI tour guide suggestions using Gemini or OpenAI."""
data = json.loads(body)
trip_id = data.get("trip_id")
provider = data.get("provider", "gemini") # "gemini" or "openai"
if not trip_id:
self.send_json({"error": "trip_id required"}, 400)
return
if provider == "gemini" and not GEMINI_API_KEY:
self.send_json({"error": "Gemini API key not configured"}, 500)
return
if provider == "openai" and not OPENAI_API_KEY:
self.send_json({"error": "OpenAI API key not configured"}, 500)
return
# Get trip data
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT * FROM trips WHERE id = ?", (trip_id,))
trip_row = cursor.fetchone()
if not trip_row:
conn.close()
self.send_json({"error": "Trip not found"}, 404)
return
trip = dict(trip_row)
# Get all trip items
cursor.execute("SELECT * FROM transportations WHERE trip_id = ?", (trip_id,))
transportations = [dict(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM lodging WHERE trip_id = ?", (trip_id,))
lodging = [dict(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM locations WHERE trip_id = ?", (trip_id,))
locations = [dict(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM notes WHERE trip_id = ?", (trip_id,))
notes = [dict(row) for row in cursor.fetchall()]
conn.close()
# Build trip summary for AI
trip_summary = f"""
TRIP: {trip.get('name', 'Unnamed Trip')}
DATES: {trip.get('start_date', 'Unknown')} to {trip.get('end_date', 'Unknown')}
DESCRIPTION: {trip.get('description', 'No description')}
TRANSPORTATION ({len(transportations)} items):
"""
for t in transportations:
trip_summary += f"- {t.get('type', 'transport')}: {t.get('name', '')} from {t.get('from_location', '?')} to {t.get('to_location', '?')} on {t.get('date', '?')}\n"
trip_summary += f"\nLODGING ({len(lodging)} items):\n"
for l in lodging:
trip_summary += f"- {l.get('type', 'hotel')}: {l.get('name', '')} at {l.get('location', '?')}, check-in {l.get('check_in', '?')}, check-out {l.get('check_out', '?')}\n"
trip_summary += f"\nPLANNED ACTIVITIES/LOCATIONS ({len(locations)} items):\n"
for loc in locations:
trip_summary += f"- {loc.get('category', 'activity')}: {loc.get('name', '')} on {loc.get('visit_date', loc.get('start_time', '?'))}\n"
if notes:
trip_summary += f"\nNOTES ({len(notes)} items):\n"
for n in notes:
trip_summary += f"- {n.get('name', '')}: {n.get('content', '')[:100]}...\n"
# Build the prompt
prompt = f"""You are an elite AI travel agent with deep local knowledge of every destination.
You act as a trip curator who elevates journeys—not just plans them. You make confident recommendations, knowing when to suggest more and when to hold back.
Your goal is to make each trip as memorable as possible without overwhelming the traveler.
You think like a well-traveled friend who knows every city inside out and genuinely wants the traveler to have an incredible experience.
You analyze the full itinerary holistically, including dates, destinations, flights, hotels, transport, transit days, planned activities, pace, and open time.
You understand travel rhythm, including arrival fatigue, recovery needs, consecutive high-effort days, and natural rest opportunities.
For each destination, you:
• Identify experiences that would be genuinely regrettable to miss
• Enhance what's already planned with better timing, nearby additions, or meaningful upgrades
• Spot scheduling issues, inefficiencies, or unrealistic pacing
• Adapt recommendations to the trip type (religious, adventure, family, solo, relaxation)
• Consider seasonality, weather, and relevant local events
You identify gaps and missed opportunities, but only suggest changes when they clearly improve flow, cultural depth, or memorability.
You optimize for experience quality, proximity, and narrative flow rather than maximizing the number of activities.
---
Here is the traveler's itinerary:
{trip_summary}
---
Structure your response as follows:
## 1. Destination Intelligence
For each destination, provide curated insights including must-do experiences, hidden gems, food highlights, and local tips. Focus on what meaningfully elevates the experience.
## 2. Day-by-Day Enhancements
Review the itinerary day by day. Enhance existing plans, identify light or overloaded days, and suggest 1–2 optional improvements only when they clearly add value.
## 3. Trip-Wide Guidance
Share advice that applies across the entire trip, including pacing, packing, cultural etiquette, timing considerations, and one high-impact surprise recommendation.
Keep recommendations selective, actionable, and easy to apply. Prioritize clarity, flow, and memorability over completeness.
---
Follow these rules at all times:
• Never repeat what is already obvious from the itinerary unless adding clear, new value.
• Do not suggest activities that conflict with existing bookings, transport, or fixed commitments.
• If a day is already full or demanding, do not add more activities; focus on optimizations instead.
• Keep recommendations intentionally limited and high-quality.
• Prioritize experiences that are culturally meaningful, locally respected, or uniquely memorable.
• Be specific and actionable with real names, precise locations, and ideal timing.
• Avoid repeating similar recommendations across destinations or days.
• Allow space for rest, spontaneity, and unplanned discovery.
• If nothing would meaningfully improve the itinerary, clearly say so.
---
Act as if you have one opportunity to make this the most memorable trip of the traveler's life.
Be thoughtful, selective, and intentional rather than comprehensive.
Every recommendation should earn its place."""
# OpenAI prompt (structured with emoji sections)
openai_prompt = f"""You are an expert travel advisor and tour guide. Analyze this trip itinerary and provide helpful, actionable suggestions.
{trip_summary}
Based on this trip, please provide:
## 🎯 Missing Must-See Attractions
List 3-5 highly-rated attractions or experiences in the destinations that are NOT already in the itinerary. Include why each is worth visiting.
## 🍽️ Restaurant Recommendations
Suggest 2-3 well-reviewed local restaurants near their lodging locations. Include cuisine type and price range.
## ⚡ Schedule Optimization
Identify any gaps, inefficiencies, or timing issues in their itinerary. Suggest improvements.
## 💎 Hidden Gems
Share 2-3 lesser-known local experiences, viewpoints, or activities that tourists often miss.
## 💡 Practical Tips
Provide 3-5 specific, actionable tips for this trip (booking requirements, best times to visit, local customs, what to bring, etc.)
Format your response with clear headers and bullet points. Be specific with names, locations, and practical details. Use your knowledge of these destinations to give genuinely helpful advice."""
# Call the appropriate AI provider
if provider == "openai":
messages = [
{"role": "system", "content": "You are an expert travel advisor who provides specific, actionable travel recommendations."},
{"role": "user", "content": openai_prompt}
]
result = call_openai(messages, max_completion_tokens=4000)
else:
# Gemini with search grounding
result = call_gemini(prompt, use_search_grounding=True)
if isinstance(result, dict) and "error" in result:
self.send_json(result, 500)
return
# Save suggestions to database (separate columns per provider)
conn = get_db()
cursor = conn.cursor()
if provider == "openai":
cursor.execute("UPDATE trips SET ai_suggestions_openai = ? WHERE id = ?", (result, trip_id))
else:
cursor.execute("UPDATE trips SET ai_suggestions = ? WHERE id = ?", (result, trip_id))
conn.commit()
conn.close()
self.send_json({"suggestions": result})
# ==================== Quick Add Handlers ====================
def handle_quick_add(self, body):
"""Create a new quick add entry."""
data = json.loads(body)
trip_id = data.get("trip_id")
name = data.get("name", "").strip()
category = data.get("category", "attraction")
place_id = data.get("place_id")
address = data.get("address")
latitude = data.get("latitude")
longitude = data.get("longitude")
photo_data = data.get("photo") # Base64 encoded
note = data.get("note", "")
captured_at = data.get("captured_at") # ISO format from client
attached_to_id = data.get("attached_to_id")
attached_to_type = data.get("attached_to_type")
if not trip_id:
self.send_json({"error": "trip_id required"}, 400)
return
if not name and not photo_data:
self.send_json({"error": "name or photo required"}, 400)
return
# Generate ID
quick_add_id = str(uuid.uuid4())
# Save photo if provided
photo_path = None
if photo_data:
try:
# Check if photo is already saved (from Immich or Google Photos)
if photo_data.startswith("immich:") or photo_data.startswith("google:"):
# Photo already saved, just use the path
photo_path = photo_data.split(":", 1)[1]
else:
# Base64 encoded photo from camera
# Remove data URL prefix if present
if "," in photo_data:
photo_data = photo_data.split(",")[1]
photo_bytes = base64.b64decode(photo_data)
photo_id = str(uuid.uuid4())
photo_path = f"{photo_id}.jpg"
with open(IMAGES_DIR / photo_path, "wb") as f:
f.write(photo_bytes)
except Exception as e:
print(f"Error saving photo: {e}")
# Use current time if not provided
if not captured_at:
captured_at = datetime.now().isoformat()
# Determine status
status = "attached" if attached_to_id else "pending"
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO quick_adds (id, trip_id, name, category, place_id, address,
latitude, longitude, photo_path, note, captured_at,
status, attached_to_id, attached_to_type)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (quick_add_id, trip_id, name, category, place_id, address,
latitude, longitude, photo_path, note, captured_at,
status, attached_to_id, attached_to_type))
# If attaching to existing item, add photo to images table
if attached_to_id and attached_to_type and photo_path:
image_id = str(uuid.uuid4())
cursor.execute('''
INSERT INTO images (id, entity_type, entity_id, file_path, is_primary)
VALUES (?, ?, ?, ?, ?)
''', (image_id, attached_to_type, attached_to_id, photo_path, 0))
conn.commit()
conn.close()
self.send_json({
"success": True,
"id": quick_add_id,
"status": status,
"photo_path": photo_path
})
def handle_quick_add_approve(self, body):
"""Approve a quick add entry and add it to the itinerary."""
data = json.loads(body)
quick_add_id = data.get("id")
if not quick_add_id:
self.send_json({"error": "id required"}, 400)
return
conn = get_db()
cursor = conn.cursor()
# Get the quick add entry
cursor.execute("SELECT * FROM quick_adds WHERE id = ?", (quick_add_id,))
row = cursor.fetchone()
if not row:
conn.close()
self.send_json({"error": "Quick add not found"}, 404)
return
qa = dict(row)
# Create a location entry from the quick add
location_id = str(uuid.uuid4())
# Parse captured_at for date and time
visit_date = None
start_time = None
end_time = None
if qa['captured_at']:
try:
# Format: 2025-12-25T13:43:24.198Z or 2025-12-25T13:43:24
captured = qa['captured_at'].replace('Z', '').split('.')[0]
visit_date = captured[:10] # YYYY-MM-DD
start_time = captured # Full datetime for start
# End time = start + 1 hour
from datetime import datetime, timedelta
dt = datetime.fromisoformat(captured)
end_dt = dt + timedelta(hours=1)
end_time = end_dt.strftime('%Y-%m-%dT%H:%M:%S')
except:
visit_date = qa['captured_at'][:10] if len(qa['captured_at']) >= 10 else None
cursor.execute('''
INSERT INTO locations (id, trip_id, name, category, description,
latitude, longitude, visit_date, start_time, end_time)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (location_id, qa['trip_id'], qa['name'], qa['category'],
qa['note'] or '', qa['latitude'], qa['longitude'],
visit_date, start_time, end_time))
# If there's a photo, add it to the images table
if qa['photo_path']:
image_id = str(uuid.uuid4())
cursor.execute('''
INSERT INTO images (id, entity_type, entity_id, file_path, is_primary)
VALUES (?, ?, ?, ?, ?)
''', (image_id, 'location', location_id, qa['photo_path'], 1))
# Update quick add status
cursor.execute("UPDATE quick_adds SET status = 'approved' WHERE id = ?", (quick_add_id,))
conn.commit()
conn.close()
self.send_json({"success": True, "location_id": location_id})
def handle_quick_add_delete(self, body):
"""Delete a quick add entry."""
data = json.loads(body)
quick_add_id = data.get("id")
if not quick_add_id:
self.send_json({"error": "id required"}, 400)
return
conn = get_db()
cursor = conn.cursor()
# Get photo path before deleting
cursor.execute("SELECT photo_path FROM quick_adds WHERE id = ?", (quick_add_id,))
row = cursor.fetchone()
if row and row['photo_path']:
photo_file = IMAGES_DIR / row['photo_path']
if photo_file.exists():
photo_file.unlink()
cursor.execute("DELETE FROM quick_adds WHERE id = ?", (quick_add_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_quick_add_attach(self, body):
"""Attach a quick add photo to an existing itinerary item."""
data = json.loads(body)
quick_add_id = data.get("id")
target_id = data.get("target_id")
target_type = data.get("target_type") # transportation, lodging, location
if not all([quick_add_id, target_id, target_type]):
self.send_json({"error": "id, target_id, and target_type required"}, 400)
return
conn = get_db()
cursor = conn.cursor()
# Get the quick add entry
cursor.execute("SELECT * FROM quick_adds WHERE id = ?", (quick_add_id,))
row = cursor.fetchone()
if not row:
conn.close()
self.send_json({"error": "Quick add not found"}, 404)
return
qa = dict(row)
# Update the target item with the photo
table_name = {
"transportation": "transportations",
"lodging": "lodging",
"location": "locations"
}.get(target_type)
if not table_name:
conn.close()
self.send_json({"error": "Invalid target_type"}, 400)
return
if qa['photo_path']:
cursor.execute(f"UPDATE {table_name} SET image_path = ? WHERE id = ?",
(qa['photo_path'], target_id))
# Update quick add status
cursor.execute('''
UPDATE quick_adds
SET status = 'attached', attached_to_id = ?, attached_to_type = ?
WHERE id = ?
''', (target_id, target_type, quick_add_id))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_places_autocomplete(self, body):
"""Get Google Places autocomplete suggestions using Places API (New)."""
data = json.loads(body)
query = data.get("query", "").strip()
latitude = data.get("latitude")
longitude = data.get("longitude")
if not query or len(query) < 2:
self.send_json({"predictions": []})
return
if not GOOGLE_API_KEY:
self.send_json({"error": "Google API key not configured"}, 500)
return
# Build request body for Places API (New)
request_body = {
"input": query
}
# Add location bias if provided
if latitude and longitude:
request_body["locationBias"] = {
"circle": {
"center": {
"latitude": float(latitude),
"longitude": float(longitude)
},
"radius": 10000.0 # 10km radius
}
}
url = "https://places.googleapis.com/v1/places:autocomplete"
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
req_data = json.dumps(request_body).encode('utf-8')
req = urllib.request.Request(url, data=req_data, method='POST')
req.add_header('Content-Type', 'application/json')
req.add_header('X-Goog-Api-Key', GOOGLE_API_KEY)
with urllib.request.urlopen(req, context=ssl_context, timeout=10) as response:
result = json.loads(response.read().decode())
predictions = []
for suggestion in result.get("suggestions", []):
place_pred = suggestion.get("placePrediction", {})
if place_pred:
# Extract place ID from the place resource name
place_name = place_pred.get("place", "")
place_id = place_name.replace("places/", "") if place_name else ""
main_text = place_pred.get("structuredFormat", {}).get("mainText", {}).get("text", "")
secondary_text = place_pred.get("structuredFormat", {}).get("secondaryText", {}).get("text", "")
predictions.append({
"place_id": place_id,
"name": main_text,
"address": secondary_text,
"description": place_pred.get("text", {}).get("text", ""),
"types": place_pred.get("types", [])
})
self.send_json({"predictions": predictions})
except Exception as e:
print(f"Places autocomplete error: {e}")
self.send_json({"error": str(e)}, 500)
# ==================== Immich Integration ====================
def handle_immich_photos(self, body):
"""Get photos from Immich server, optionally filtered by date range."""
if not IMMICH_URL or not IMMICH_API_KEY:
self.send_json({"error": "Immich not configured", "photos": []})
return
data = json.loads(body) if body else {}
start_date = data.get("start_date") # Optional: filter by trip dates
end_date = data.get("end_date")
page = data.get("page", 1)
per_page = data.get("per_page", 50)
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
# Use search/metadata endpoint for date filtering
search_url = f"{IMMICH_URL}/api/search/metadata"
search_body = {
"type": "IMAGE",
"size": per_page,
"page": page,
"order": "desc"
}
if start_date:
search_body["takenAfter"] = f"{start_date}T00:00:00.000Z"
if end_date:
search_body["takenBefore"] = f"{end_date}T23:59:59.999Z"
req_data = json.dumps(search_body).encode('utf-8')
req = urllib.request.Request(search_url, data=req_data, method='POST')
req.add_header('Content-Type', 'application/json')
req.add_header('x-api-key', IMMICH_API_KEY)
with urllib.request.urlopen(req, context=ssl_context, timeout=30) as response:
result = json.loads(response.read().decode())
photos = []
assets = result.get("assets", {}).get("items", [])
for asset in assets:
photos.append({
"id": asset.get("id"),
"thumbnail_url": f"{IMMICH_URL}/api/assets/{asset.get('id')}/thumbnail",
"original_url": f"{IMMICH_URL}/api/assets/{asset.get('id')}/original",
"filename": asset.get("originalFileName", ""),
"date": asset.get("fileCreatedAt", ""),
"type": asset.get("type", "IMAGE")
})
self.send_json({
"photos": photos,
"total": result.get("assets", {}).get("total", len(photos)),
"page": page
})
except Exception as e:
print(f"Immich error: {e}")
self.send_json({"error": str(e), "photos": []})
def handle_immich_download(self, body):
"""Download a photo from Immich and save it locally."""
if not IMMICH_URL or not IMMICH_API_KEY:
self.send_json({"error": "Immich not configured"}, 500)
return
data = json.loads(body)
asset_id = data.get("asset_id")
if not asset_id:
self.send_json({"error": "Missing asset_id"}, 400)
return
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
# Download original image from Immich
download_url = f"{IMMICH_URL}/api/assets/{asset_id}/original"
req = urllib.request.Request(download_url)
req.add_header('x-api-key', IMMICH_API_KEY)
with urllib.request.urlopen(req, context=ssl_context, timeout=60) as response:
image_data = response.read()
content_type = response.headers.get('Content-Type', 'image/jpeg')
# Determine file extension
ext = '.jpg'
if 'png' in content_type:
ext = '.png'
elif 'webp' in content_type:
ext = '.webp'
elif 'heic' in content_type:
ext = '.heic'
# Save to images directory
filename = f"{uuid.uuid4()}{ext}"
filepath = IMAGES_DIR / filename
with open(filepath, 'wb') as f:
f.write(image_data)
# If entity info provided, also create DB record
entity_type = data.get("entity_type")
entity_id = data.get("entity_id")
image_id = None
if entity_type and entity_id:
image_id = str(uuid.uuid4())
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
INSERT INTO images (id, entity_type, entity_id, file_path, is_primary)
VALUES (?, ?, ?, ?, ?)
''', (image_id, entity_type, entity_id, filename, 0))
conn.commit()
conn.close()
self.send_json({
"success": True,
"file_path": filename,
"image_id": image_id,
"size": len(image_data)
})
except Exception as e:
print(f"Immich download error: {e}")
self.send_json({"error": str(e)}, 500)
def handle_immich_thumbnail(self, asset_id):
"""Proxy thumbnail requests to Immich with authentication."""
if not IMMICH_URL or not IMMICH_API_KEY:
self.send_error(404)
return
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
thumb_url = f"{IMMICH_URL}/api/assets/{asset_id}/thumbnail"
req = urllib.request.Request(thumb_url)
req.add_header('x-api-key', IMMICH_API_KEY)
with urllib.request.urlopen(req, context=ssl_context, timeout=10) as response:
image_data = response.read()
content_type = response.headers.get('Content-Type', 'image/jpeg')
self.send_response(200)
self.send_header('Content-Type', content_type)
self.send_header('Content-Length', len(image_data))
self.send_header('Cache-Control', 'max-age=3600') # Cache for 1 hour
self.end_headers()
self.wfile.write(image_data)
except Exception as e:
print(f"Immich thumbnail error: {e}")
self.send_error(404)
def handle_immich_albums(self):
"""Get list of albums from Immich."""
if not IMMICH_URL or not IMMICH_API_KEY:
self.send_json({"error": "Immich not configured", "albums": []})
return
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
url = f"{IMMICH_URL}/api/albums"
req = urllib.request.Request(url)
req.add_header('x-api-key', IMMICH_API_KEY)
with urllib.request.urlopen(req, context=ssl_context, timeout=30) as response:
albums = json.loads(response.read().decode())
# Simplify album data
result = []
for album in albums:
result.append({
"id": album.get("id"),
"name": album.get("albumName", "Untitled"),
"asset_count": album.get("assetCount", 0),
"thumbnail_id": album.get("albumThumbnailAssetId"),
"created_at": album.get("createdAt"),
"updated_at": album.get("updatedAt")
})
# Sort by name
result.sort(key=lambda x: x["name"].lower())
self.send_json({"albums": result})
except Exception as e:
print(f"Immich albums error: {e}")
self.send_json({"error": str(e), "albums": []})
def handle_immich_album_photos(self, body):
"""Get photos from a specific Immich album."""
if not IMMICH_URL or not IMMICH_API_KEY:
self.send_json({"error": "Immich not configured", "photos": []})
return
data = json.loads(body) if body else {}
album_id = data.get("album_id")
if not album_id:
self.send_json({"error": "Missing album_id", "photos": []}, 400)
return
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
url = f"{IMMICH_URL}/api/albums/{album_id}"
req = urllib.request.Request(url)
req.add_header('x-api-key', IMMICH_API_KEY)
with urllib.request.urlopen(req, context=ssl_context, timeout=30) as response:
album = json.loads(response.read().decode())
photos = []
for asset in album.get("assets", []):
if asset.get("type") == "IMAGE":
photos.append({
"id": asset.get("id"),
"thumbnail_url": f"/api/immich/thumb/{asset.get('id')}",
"filename": asset.get("originalFileName", ""),
"date": asset.get("fileCreatedAt", "")
})
self.send_json({
"album_name": album.get("albumName", ""),
"photos": photos,
"total": len(photos)
})
except Exception as e:
print(f"Immich album photos error: {e}")
self.send_json({"error": str(e), "photos": []})
# ==================== Google Photos Integration ====================
def handle_google_photos_picker_config(self):
"""Return config for Google Photos Picker."""
if not GOOGLE_CLIENT_ID:
self.send_json({"error": "Google Photos not configured"})
return
self.send_json({
"client_id": GOOGLE_CLIENT_ID,
"api_key": GOOGLE_API_KEY,
"app_id": GOOGLE_CLIENT_ID.split('-')[0] # Project number from client ID
})
def handle_google_photos_download(self, body):
"""Download a photo from Google Photos using the provided access token and URL."""
data = json.loads(body)
photo_url = data.get("url")
access_token = data.get("access_token")
if not photo_url:
self.send_json({"error": "Missing photo URL"}, 400)
return
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
# Download the photo
req = urllib.request.Request(photo_url)
if access_token:
req.add_header('Authorization', f'Bearer {access_token}')
with urllib.request.urlopen(req, context=ssl_context, timeout=60) as response:
image_data = response.read()
content_type = response.headers.get('Content-Type', 'image/jpeg')
# Determine file extension
ext = '.jpg'
if 'png' in content_type:
ext = '.png'
elif 'webp' in content_type:
ext = '.webp'
# Save to images directory
filename = f"{uuid.uuid4()}{ext}"
filepath = IMAGES_DIR / filename
with open(filepath, 'wb') as f:
f.write(image_data)
self.send_json({
"success": True,
"file_path": filename,
"size": len(image_data)
})
except Exception as e:
print(f"Google Photos download error: {e}")
self.send_json({"error": str(e)}, 500)
def handle_google_photos_create_session(self, body):
"""Create a Google Photos Picker session."""
data = json.loads(body)
access_token = data.get("access_token")
if not access_token:
self.send_json({"error": "Missing access token"}, 400)
return
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
# Create session via Photos Picker API
url = "https://photospicker.googleapis.com/v1/sessions"
req = urllib.request.Request(url, method='POST')
req.add_header('Authorization', f'Bearer {access_token}')
req.add_header('Content-Type', 'application/json')
req.data = b'{}'
with urllib.request.urlopen(req, context=ssl_context, timeout=30) as response:
result = json.loads(response.read().decode())
self.send_json(result)
except urllib.error.HTTPError as e:
error_body = e.read().decode() if e.fp else str(e)
print(f"Google Photos create session error: {e.code} - {error_body}")
self.send_json({"error": f"API error: {e.code}"}, e.code)
except Exception as e:
print(f"Google Photos create session error: {e}")
self.send_json({"error": str(e)}, 500)
def handle_google_photos_check_session(self, body):
"""Check Google Photos Picker session status."""
data = json.loads(body)
access_token = data.get("access_token")
session_id = data.get("session_id")
if not access_token or not session_id:
self.send_json({"error": "Missing token or session_id"}, 400)
return
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
url = f"https://photospicker.googleapis.com/v1/sessions/{session_id}"
req = urllib.request.Request(url)
req.add_header('Authorization', f'Bearer {access_token}')
with urllib.request.urlopen(req, context=ssl_context, timeout=30) as response:
result = json.loads(response.read().decode())
self.send_json(result)
except Exception as e:
print(f"Google Photos check session error: {e}")
self.send_json({"error": str(e)}, 500)
def handle_google_photos_get_media_items(self, body):
"""Get selected media items from a Google Photos Picker session."""
data = json.loads(body)
access_token = data.get("access_token")
session_id = data.get("session_id")
if not access_token or not session_id:
self.send_json({"error": "Missing token or session_id"}, 400)
return
try:
ssl_context = ssl.create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = ssl.CERT_NONE
url = f"https://photospicker.googleapis.com/v1/mediaItems?sessionId={session_id}&pageSize=50"
req = urllib.request.Request(url)
req.add_header('Authorization', f'Bearer {access_token}')
with urllib.request.urlopen(req, context=ssl_context, timeout=30) as response:
result = json.loads(response.read().decode())
self.send_json(result)
except Exception as e:
print(f"Google Photos get media items error: {e}")
self.send_json({"error": str(e)}, 500)
def handle_get_active_trip(self):
"""Get the currently active trip based on today's date."""
today = datetime.now().strftime("%Y-%m-%d")
conn = get_db()
cursor = conn.cursor()
# Find trip where today falls within start_date and end_date
cursor.execute('''
SELECT id, name, start_date, end_date, image_path
FROM trips
WHERE start_date <= ? AND end_date >= ?
ORDER BY start_date DESC
LIMIT 1
''', (today, today))
row = cursor.fetchone()
conn.close()
if row:
self.send_json({
"active": True,
"trip": dict(row)
})
else:
self.send_json({"active": False, "trip": None})
def handle_get_stats(self):
"""Get aggregate stats across all trips."""
import math
def parse_city_country(address):
"""Extract city and country from a Google Places formatted address.
Format: 'Street, City, State ZIP, Country' or 'City, Country' etc."""
if not address:
return None, None
parts = [p.strip() for p in address.split(',')]
# Filter out parts that look like streets, zip codes, P.O. boxes
parts = [p for p in parts if p and not re.match(r'^\d', p)
and not p.upper().startswith('P.O.')
and len(p) > 2]
if not parts:
return None, None
country = parts[-1] if len(parts) >= 2 else None
# City is typically second-to-last, or the part before "State ZIP"
city = None
if len(parts) >= 3:
# "Street, City, State ZIP, Country" -> city is parts[-3] or parts[-2]
candidate = parts[-2]
# If it looks like "CO 80904" or "TX" (state), go one more back
if re.match(r'^[A-Z]{2}\s*\d', candidate) or (len(candidate) == 2 and candidate.isupper()):
city = parts[-3] if len(parts) >= 3 else None
else:
city = candidate
elif len(parts) == 2:
city = parts[0]
return city, country
conn = get_db()
cursor = conn.cursor()
# Total trips
cursor.execute("SELECT COUNT(*) FROM trips")
total_trips = cursor.fetchone()[0]
# Collect cities and countries from all addresses
cities = set()
countries = set()
# From locations
cursor.execute("SELECT address FROM locations WHERE address IS NOT NULL AND address != ''")
for row in cursor.fetchall():
city, country = parse_city_country(row[0])
if city:
cities.add(city)
if country:
countries.add(country)
# From lodging
cursor.execute("SELECT location FROM lodging WHERE location IS NOT NULL AND location != ''")
for row in cursor.fetchall():
city, country = parse_city_country(row[0])
if city:
cities.add(city)
if country:
countries.add(country)
# From transportation (departure/arrival cities)
cursor.execute("SELECT from_location, to_location FROM transportations WHERE from_location != '' OR to_location != ''")
for row in cursor.fetchall():
for loc in row:
if loc:
# Transport locations are often "City (CODE)" or "Airport Name (CODE)"
# Extract city name before parentheses
clean = re.sub(r'\s*\([^)]*\)\s*', '', loc).strip()
if clean and len(clean) > 2 and not re.match(r'^[A-Z]{3}$', clean):
cities.add(clean)
# Clean up cities: remove airports, duplicates with country suffix
city_blacklist_patterns = [r'airport', r'international', r'\b[A-Z]{3}\b airport']
cleaned_cities = set()
for c in cities:
if any(re.search(p, c, re.IGNORECASE) for p in city_blacklist_patterns):
continue
# Remove country suffix like "Muscat, Oman" -> "Muscat"
if ',' in c:
c = c.split(',')[0].strip()
# Skip postal codes like "Madinah 41419"
c = re.sub(r'\s+\d{4,}$', '', c).strip()
if c and len(c) > 2:
cleaned_cities.add(c)
cities = cleaned_cities
# Normalize: merge "USA"/"United States", etc.
country_aliases = {
'USA': 'United States', 'US': 'United States', 'U.S.A.': 'United States',
'UK': 'United Kingdom', 'UAE': 'United Arab Emirates',
}
normalized_countries = set()
for c in countries:
# Skip postal codes or numeric-heavy entries
if re.match(r'^[A-Z]{2}\s+\d', c) or re.match(r'^\d', c):
continue
normalized_countries.add(country_aliases.get(c, c))
# Total points/miles redeemed across all bookings
total_points = 0
for table in ['transportations', 'lodging', 'locations']:
cursor.execute(f"SELECT COALESCE(SUM(cost_points), 0) FROM {table}")
total_points += cursor.fetchone()[0]
# Total activities
cursor.execute("SELECT COUNT(*) FROM locations")
total_activities = cursor.fetchone()[0]
# --- Breakdown data for stat detail modals ---
# Trips by year
trips_by_year = {}
cursor.execute("SELECT id, name, start_date, end_date FROM trips ORDER BY start_date DESC")
for row in cursor.fetchall():
r = dict(row)
year = r['start_date'][:4] if r.get('start_date') else 'Unknown'
trips_by_year.setdefault(year, []).append({
"id": r['id'], "name": r['name'],
"start_date": r.get('start_date', ''),
"end_date": r.get('end_date', '')
})
# Cities by country - re-parse addresses to build the mapping
cities_by_country = {}
# From locations
cursor.execute("SELECT l.address, t.id as trip_id, t.name as trip_name FROM locations l JOIN trips t ON l.trip_id = t.id WHERE l.address IS NOT NULL AND l.address != ''")
for row in cursor.fetchall():
city, country = parse_city_country(row[0])
if city and country:
# Clean city
if any(re.search(p, city, re.IGNORECASE) for p in city_blacklist_patterns):
continue
if ',' in city:
city = city.split(',')[0].strip()
city = re.sub(r'\s+\d{4,}$', '', city).strip()
if not city or len(city) <= 2:
continue
country = country_aliases.get(country, country)
if re.match(r'^[A-Z]{2}\s+\d', country) or re.match(r'^\d', country):
continue
cities_by_country.setdefault(country, set()).add(city)
# From lodging
cursor.execute("SELECT l.location, t.id as trip_id, t.name as trip_name FROM lodging l JOIN trips t ON l.trip_id = t.id WHERE l.location IS NOT NULL AND l.location != ''")
for row in cursor.fetchall():
city, country = parse_city_country(row[0])
if city and country:
if any(re.search(p, city, re.IGNORECASE) for p in city_blacklist_patterns):
continue
if ',' in city:
city = city.split(',')[0].strip()
city = re.sub(r'\s+\d{4,}$', '', city).strip()
if not city or len(city) <= 2:
continue
country = country_aliases.get(country, country)
if re.match(r'^[A-Z]{2}\s+\d', country) or re.match(r'^\d', country):
continue
cities_by_country.setdefault(country, set()).add(city)
# Convert sets to sorted lists
cities_by_country = {k: sorted(v) for k, v in sorted(cities_by_country.items())}
# Points by year (based on trip start_date)
points_by_year = {}
for table in ['transportations', 'lodging', 'locations']:
cursor.execute(f"""
SELECT substr(t.start_date, 1, 4) as year, COALESCE(SUM(b.cost_points), 0) as pts
FROM {table} b JOIN trips t ON b.trip_id = t.id
WHERE t.start_date IS NOT NULL
GROUP BY year
""")
for row in cursor.fetchall():
yr = row[0] or 'Unknown'
points_by_year[yr] = points_by_year.get(yr, 0) + row[1]
# Points by category
points_by_category = {}
cursor.execute("SELECT COALESCE(SUM(cost_points), 0) FROM transportations")
points_by_category['flights'] = cursor.fetchone()[0]
cursor.execute("SELECT COALESCE(SUM(cost_points), 0) FROM lodging")
points_by_category['hotels'] = cursor.fetchone()[0]
cursor.execute("SELECT COALESCE(SUM(cost_points), 0) FROM locations")
points_by_category['activities'] = cursor.fetchone()[0]
conn.close()
self.send_json({
"total_trips": total_trips,
"cities_visited": len(cities),
"countries_visited": len(normalized_countries),
"total_points_redeemed": round(total_points),
"total_activities": total_activities,
"cities": sorted(cities),
"countries": sorted(normalized_countries),
"trips_by_year": trips_by_year,
"cities_by_country": cities_by_country,
"points_by_year": {k: round(v) for k, v in sorted(points_by_year.items(), reverse=True)},
"points_by_category": {k: round(v) for k, v in points_by_category.items()}
})
def handle_search(self, query):
"""Search across all trips, locations, lodging, transportations, and notes."""
if not query or len(query) < 2:
self.send_json({"results": []})
return
conn = get_db()
cursor = conn.cursor()
q = f"%{query}%"
results = []
# Search trips
cursor.execute("SELECT id, name, start_date, end_date FROM trips WHERE name LIKE ? OR description LIKE ?", (q, q))
for row in cursor.fetchall():
r = dict(row)
results.append({"type": "trip", "id": r["id"], "trip_id": r["id"],
"name": r["name"], "detail": f'{r.get("start_date", "")} - {r.get("end_date", "")}',
"trip_name": r["name"]})
# Search locations
cursor.execute("""SELECT l.id, l.name, l.address, l.category, l.trip_id, t.name as trip_name
FROM locations l JOIN trips t ON l.trip_id = t.id
WHERE l.name LIKE ? OR l.address LIKE ? OR l.description LIKE ?""", (q, q, q))
for row in cursor.fetchall():
r = dict(row)
detail = r.get("address") or r.get("category") or ""
results.append({"type": "location", "id": r["id"], "trip_id": r["trip_id"],
"name": r["name"], "detail": detail, "trip_name": r["trip_name"]})
# Search lodging
cursor.execute("""SELECT l.id, l.name, l.location, l.trip_id, t.name as trip_name
FROM lodging l JOIN trips t ON l.trip_id = t.id
WHERE l.name LIKE ? OR l.location LIKE ? OR l.reservation_number LIKE ? OR l.description LIKE ?""", (q, q, q, q))
for row in cursor.fetchall():
r = dict(row)
results.append({"type": "lodging", "id": r["id"], "trip_id": r["trip_id"],
"name": r["name"], "detail": r.get("location", ""), "trip_name": r["trip_name"]})
# Search transportations
cursor.execute("""SELECT tr.id, tr.name, tr.flight_number, tr.from_location, tr.to_location, tr.trip_id, t.name as trip_name
FROM transportations tr JOIN trips t ON tr.trip_id = t.id
WHERE tr.name LIKE ? OR tr.flight_number LIKE ? OR tr.from_location LIKE ? OR tr.to_location LIKE ? OR tr.description LIKE ?""",
(q, q, q, q, q))
for row in cursor.fetchall():
r = dict(row)
name = r["name"] or r.get("flight_number") or ""
detail = f'{r.get("from_location", "")} → {r.get("to_location", "")}'
results.append({"type": "transportation", "id": r["id"], "trip_id": r["trip_id"],
"name": name, "detail": detail, "trip_name": r["trip_name"]})
# Search notes
cursor.execute("""SELECT n.id, n.name, n.content, n.trip_id, t.name as trip_name
FROM notes n JOIN trips t ON n.trip_id = t.id
WHERE n.name LIKE ? OR n.content LIKE ?""", (q, q))
for row in cursor.fetchall():
r = dict(row)
results.append({"type": "note", "id": r["id"], "trip_id": r["trip_id"],
"name": r["name"], "detail": "", "trip_name": r["trip_name"]})
conn.close()
self.send_json({"results": results, "count": len(results)})
def handle_get_quick_adds(self, trip_id=None):
"""Get pending quick adds, optionally filtered by trip."""
conn = get_db()
cursor = conn.cursor()
if trip_id:
cursor.execute('''
SELECT * FROM quick_adds
WHERE trip_id = ? AND status = 'pending'
ORDER BY captured_at DESC
''', (trip_id,))
else:
cursor.execute('''
SELECT qa.*, t.name as trip_name FROM quick_adds qa
JOIN trips t ON qa.trip_id = t.id
WHERE qa.status = 'pending'
ORDER BY qa.captured_at DESC
''')
rows = cursor.fetchall()
conn.close()
quick_adds = [dict(row) for row in rows]
self.send_json({"quick_adds": quick_adds, "count": len(quick_adds)})
def handle_places_details(self, place_id):
"""Get place details including lat/lng and types using Places API (New)."""
if not place_id:
self.send_json({"error": "place_id required"}, 400)
return
if not GOOGLE_API_KEY:
self.send_json({"error": "Google API key not configured"}, 500)
return
details = get_place_details(place_id)
if not details:
self.send_json({"error": "Could not fetch place details"}, 500)
return
types = details.get("types", [])
primary_type = details.get("primary_type", "")
category = self.detect_category_from_types(types + [primary_type] if primary_type else types)
self.send_json({
"name": details.get("name", ""),
"address": details.get("address", ""),
"latitude": details.get("latitude"),
"longitude": details.get("longitude"),
"types": types,
"category": category
})
def detect_category_from_types(self, types):
"""Map Google Places types to our categories."""
type_mapping = {
# Food & Drink
"restaurant": "restaurant",
"cafe": "cafe",
"bar": "bar",
"bakery": "restaurant",
"food": "restaurant",
"meal_delivery": "restaurant",
"meal_takeaway": "restaurant",
# Attractions
"tourist_attraction": "attraction",
"museum": "museum",
"art_gallery": "museum",
"amusement_park": "attraction",
"aquarium": "attraction",
"zoo": "attraction",
"stadium": "attraction",
# Nature
"park": "park",
"natural_feature": "park",
"campground": "park",
# Shopping
"shopping_mall": "shopping",
"store": "shopping",
"supermarket": "shopping",
# Lodging
"lodging": "lodging",
"hotel": "lodging",
# Transportation
"airport": "airport",
"train_station": "train_station",
"transit_station": "transit",
"bus_station": "transit",
"car_rental": "car_rental",
"gas_station": "gas_station",
}
for t in types:
if t in type_mapping:
return type_mapping[t]
return "attraction" # Default
def handle_get_pending_imports(self):
"""Get all pending imports."""
conn = get_db()
cursor = conn.cursor()
cursor.execute('''
SELECT id, entry_type, parsed_data, source, email_subject, email_from, status, created_at
FROM pending_imports
WHERE status = 'pending'
ORDER BY created_at DESC
''')
rows = cursor.fetchall()
conn.close()
imports = []
for row in rows:
imports.append({
"id": row[0],
"entry_type": row[1],
"parsed_data": json.loads(row[2]),
"source": row[3],
"email_subject": row[4],
"email_from": row[5],
"status": row[6],
"created_at": row[7]
})
self.send_json({"imports": imports, "count": len(imports)})
def handle_approve_import(self, body):
"""Approve a pending import and add it to a trip."""
data = json.loads(body)
import_id = data.get("import_id")
trip_id = data.get("trip_id")
if not import_id or not trip_id:
self.send_json({"error": "import_id and trip_id required"}, 400)
return
# Get the pending import
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT entry_type, parsed_data FROM pending_imports WHERE id = ?", (import_id,))
row = cursor.fetchone()
if not row:
conn.close()
self.send_json({"error": "Import not found"}, 404)
return
entry_type = row[0]
parsed_raw = json.loads(row[1])
# The parsed data has a nested "data" field with the actual values
parsed_data = parsed_raw.get("data", parsed_raw)
# Create the entry based on type
entry_id = str(uuid.uuid4())
if entry_type == "flight":
cursor.execute('''
INSERT INTO transportations (id, trip_id, name, type, flight_number, from_location, to_location, date, end_date, timezone, description)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (entry_id, trip_id, parsed_data.get("name", ""), "plane",
parsed_data.get("flight_number", ""), parsed_data.get("from_location", ""),
parsed_data.get("to_location", ""), parsed_data.get("date", ""),
parsed_data.get("end_date", ""), parsed_data.get("timezone", ""),
parsed_data.get("description", "")))
elif entry_type == "hotel":
cursor.execute('''
INSERT INTO lodging (id, trip_id, name, type, location, check_in, check_out, timezone, reservation_number, description)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (entry_id, trip_id, parsed_data.get("name", ""), parsed_data.get("type", "hotel"),
parsed_data.get("location", ""), parsed_data.get("check_in", ""),
parsed_data.get("check_out", ""), parsed_data.get("timezone", ""),
parsed_data.get("reservation_number", ""), parsed_data.get("description", "")))
elif entry_type == "note":
cursor.execute('''
INSERT INTO notes (id, trip_id, name, content, date)
VALUES (?, ?, ?, ?, ?)
''', (entry_id, trip_id, parsed_data.get("name", "Imported Note"),
parsed_data.get("content", ""), parsed_data.get("date", "")))
else:
conn.close()
self.send_json({"error": f"Unknown entry type: {entry_type}"}, 400)
return
# Mark import as approved
cursor.execute("UPDATE pending_imports SET status = 'approved' WHERE id = ?", (import_id,))
conn.commit()
conn.close()
self.send_json({"success": True, "entry_id": entry_id, "entry_type": entry_type})
def handle_delete_import(self, body):
"""Delete a pending import."""
data = json.loads(body)
import_id = data.get("import_id")
if not import_id:
self.send_json({"error": "import_id required"}, 400)
return
conn = get_db()
cursor = conn.cursor()
cursor.execute("DELETE FROM pending_imports WHERE id = ?", (import_id,))
conn.commit()
conn.close()
self.send_json({"success": True})
def handle_share_api(self, share_token):
"""Return trip data as JSON for a public share token."""
conn = get_db()
cursor = conn.cursor()
cursor.execute("SELECT * FROM trips WHERE share_token = ?", (share_token,))
trip = dict_from_row(cursor.fetchone())
if not trip:
conn.close()
self.send_json({"error": "Trip not found"}, 404)
return
trip_id = trip["id"]
cursor.execute("SELECT * FROM transportations WHERE trip_id = ? ORDER BY date", (trip_id,))
trip["transportations"] = [dict_from_row(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM lodging WHERE trip_id = ? ORDER BY check_in", (trip_id,))
trip["lodging"] = [dict_from_row(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM notes WHERE trip_id = ? ORDER BY date", (trip_id,))
trip["notes"] = [dict_from_row(row) for row in cursor.fetchall()]
cursor.execute("SELECT * FROM locations WHERE trip_id = ? ORDER BY visit_date", (trip_id,))
trip["locations"] = [dict_from_row(row) for row in cursor.fetchall()]
# Images
entity_ids = [trip_id] + [i["id"] for i in trip["transportations"] + trip["lodging"] + trip["locations"] + trip["notes"]]
placeholders = ','.join('?' * len(entity_ids))
cursor.execute(f"SELECT * FROM images WHERE entity_id IN ({placeholders}) ORDER BY is_primary DESC, created_at", entity_ids)
all_images = [dict_from_row(row) for row in cursor.fetchall()]
images_by_entity = {}
for img in all_images:
img["url"] = f'/images/{img["file_path"]}'
images_by_entity.setdefault(img["entity_id"], []).append(img)
trip["images"] = images_by_entity.get(trip_id, [])
for t in trip["transportations"]:
t["images"] = images_by_entity.get(t["id"], [])
for l in trip["lodging"]:
l["images"] = images_by_entity.get(l["id"], [])
for l in trip["locations"]:
l["images"] = images_by_entity.get(l["id"], [])
hero_images = list(images_by_entity.get(trip_id, []))
for img in all_images:
if img["entity_id"] != trip_id and img not in hero_images:
hero_images.append(img)
trip["hero_images"] = hero_images
conn.close()
self.send_json(trip)
# handle_share_view removed — use /api/share/trip/{token} or SvelteKit /view/{token}
# HTML rendering removed — frontend is now SvelteKit (frontend/ directory)
def main():
"""Main entry point."""
init_db()
# Start background weather prefetch
start_weather_prefetch_thread()
server = HTTPServer(("0.0.0.0", PORT), TripHandler)
print(f"Trips server running on port {PORT}")
server.serve_forever()
if __name__ == "__main__":
main()