Refactor: Replace Immich integration with local file storage and admin auth

Co-authored-by: aider (gemini/gemini-2.5-pro) <aider@aider.chat>
This commit is contained in:
2025-11-22 20:15:12 -07:00
parent 2275774a46
commit 78452b93ef
2 changed files with 211 additions and 385 deletions

View File

@@ -19,8 +19,6 @@ import sqlite3
from datetime import datetime
from typing import Dict, List, Optional
import requests
from requests_toolbelt.multipart.encoder import MultipartEncoder, MultipartEncoderMonitor
import logging
from fastapi import FastAPI, UploadFile, WebSocket, WebSocketDisconnect, Request, Form
from fastapi.responses import HTMLResponse, JSONResponse, FileResponse, RedirectResponse, Response
@@ -66,13 +64,6 @@ try:
except Exception:
pass
# Album cache
ALBUM_ID: Optional[str] = None
def reset_album_cache() -> None:
"""Invalidate the cached Immich album id so next use re-resolves it."""
global ALBUM_ID
ALBUM_ID = None
# ---------- DB (local dedupe cache) ----------
@@ -399,13 +390,7 @@ async def favicon() -> Response:
@app.post("/api/ping")
async def api_ping() -> dict:
"""Connectivity test endpoint used by the UI to display a temporary banner."""
if SETTINGS.local_save_only:
return { "ok": True, "base_url": "Local Save Mode", "album_name": None }
return {
"ok": immich_ping(),
"base_url": SETTINGS.normalized_base_url,
"album_name": SETTINGS.album_name if SETTINGS.album_name else None
}
return { "ok": True, "base_url": "Local Save Mode", "album_name": None }
@app.get("/api/config")
async def api_config() -> dict:
@@ -426,10 +411,6 @@ async def ws_endpoint(ws: WebSocket) -> None:
session_id = data.get("session_id") or "default"
except Exception:
session_id = "default"
# If this is the first socket for a (possibly new) session, reset album cache
# so a freshly opened page can rotate the drop album by renaming the old one.
if session_id not in hub.sessions:
reset_album_cache()
await hub.connect(session_id, ws)
# keepalive to avoid proxy idle timeouts
@@ -477,54 +458,8 @@ async def api_upload(
await send_progress(session_id, item_id, "duplicate", 100, "Already uploaded from this device (local cache)")
return JSONResponse({"status": "duplicate", "id": None}, status_code=200)
if SETTINGS.local_save_only:
try:
save_dir = "./data/uploads"
os.makedirs(save_dir, exist_ok=True)
safe_name = sanitize_filename(file.filename)
save_path = os.path.join(save_dir, safe_name)
# Avoid overwriting when filenames collide (not same as duplicate)
if os.path.exists(save_path):
base, ext = os.path.splitext(safe_name)
i = 1
while os.path.exists(save_path):
save_path = os.path.join(save_dir, f"{base}_{i}{ext}")
i += 1
with open(save_path, "wb") as f:
f.write(raw)
db_insert_upload(checksum, file.filename, size, device_asset_id, None, created_iso)
await send_progress(session_id, item_id, "done", 100, f"Saved locally to {os.path.basename(save_path)}")
return JSONResponse({"status": "done", "id": None}, status_code=200)
except Exception as e:
logger.exception("Local save failed: %s", e)
await send_progress(session_id, item_id, "error", 100, "Failed to save file locally")
return JSONResponse({"error": "local_save_failed"}, status_code=500)
await send_progress(session_id, item_id, "checking", 2, "Checking duplicates…")
bulk = immich_bulk_check([{"id": item_id, "checksum": checksum}])
if bulk.get(item_id, {}).get("action") == "reject" and bulk[item_id].get("reason") == "duplicate":
asset_id = bulk[item_id].get("assetId")
db_insert_upload(checksum, file.filename, size, device_asset_id, asset_id, created_iso)
await send_progress(session_id, item_id, "duplicate", 100, "Duplicate (server)", asset_id)
return JSONResponse({"status": "duplicate", "id": asset_id}, status_code=200)
safe_name = sanitize_filename(file.filename)
def gen_encoder() -> MultipartEncoder:
return MultipartEncoder(fields={
"assetData": (safe_name, io.BytesIO(raw), file.content_type or "application/octet-stream"),
"deviceAssetId": device_asset_id,
"deviceId": f"python-{session_id}",
"fileCreatedAt": created_iso,
"fileModifiedAt": modified_iso,
"isFavorite": "false",
"filename": safe_name,
"originalFileName": safe_name,
})
encoder = gen_encoder()
# Invite token validation (if provided)
target_album_id: Optional[str] = None
target_album_name: Optional[str] = None
if invite_token:
try:
@@ -612,111 +547,91 @@ async def api_upload(
if (used_count or 0) >= (max_uses_int if max_uses_int >= 0 else 10**9):
await send_progress(session_id, item_id, "error", 100, "Invite already used up")
return JSONResponse({"error": "invite_exhausted"}, status_code=403)
target_album_id = album_id
target_album_name = album_name
async def do_upload():
await send_progress(session_id, item_id, "uploading", 0, "Uploading…")
sent = {"pct": 0}
def cb(monitor: MultipartEncoderMonitor) -> None:
if monitor.len:
pct = int(monitor.bytes_read * 100 / monitor.len)
if pct != sent["pct"]:
sent["pct"] = pct
asyncio.create_task(send_progress(session_id, item_id, "uploading", pct))
monitor = MultipartEncoderMonitor(encoder, cb)
headers = {"Accept": "application/json", "Content-Type": monitor.content_type, "x-immich-checksum": checksum, **immich_headers(request)}
album_for_saving = target_album_name if invite_token else "public"
if not invite_token and not SETTINGS.public_upload_page_enabled:
await send_progress(session_id, item_id, "error", 100, "Public uploads disabled")
return JSONResponse({"error": "public_upload_disabled"}, status_code=403)
try:
save_dir = get_or_create_album_dir(album_for_saving)
safe_name = sanitize_filename(file.filename)
save_path = os.path.join(save_dir, safe_name)
# Avoid overwriting
if os.path.exists(save_path):
base, ext = os.path.splitext(safe_name)
i = 1
while os.path.exists(save_path):
save_path = os.path.join(save_dir, f"{base}_{i}{ext}")
i += 1
with open(save_path, "wb") as f:
f.write(raw)
db_insert_upload(checksum, file.filename, size, device_asset_id, None, created_iso)
msg = f"Saved to {album_for_saving}/{os.path.basename(save_path)}"
await send_progress(session_id, item_id, "done", 100, msg)
# Increment invite usage on success
if invite_token:
try:
conn2 = sqlite3.connect(SETTINGS.state_db)
cur2 = conn2.cursor()
# Keep one-time used_count at 1; multi-use increments per asset
cur2.execute("SELECT max_uses FROM invites WHERE token = ?", (invite_token,))
row_mu = cur2.fetchone()
mx = None
try:
mx = int(row_mu[0]) if row_mu and row_mu[0] is not None else None
except Exception:
mx = None
if mx == 1:
cur2.execute("UPDATE invites SET used_count = 1 WHERE token = ?", (invite_token,))
else:
cur2.execute("UPDATE invites SET used_count = used_count + 1 WHERE token = ?", (invite_token,))
conn2.commit()
conn2.close()
except Exception as e:
logger.exception("Failed to increment invite usage: %s", e)
# Log uploader identity and file metadata
try:
r = requests.post(f"{SETTINGS.normalized_base_url}/assets", headers=headers, data=monitor, timeout=120)
if r.status_code in (200, 201):
data = r.json()
asset_id = data.get("id")
db_insert_upload(checksum, file.filename, size, device_asset_id, asset_id, created_iso)
status = data.get("status", "created")
# Add to album if configured (invite overrides .env)
if asset_id:
added = False
if invite_token:
# Only add if invite specified an album; do not fallback to env default
if target_album_id or target_album_name:
added = add_asset_to_album(asset_id, request=request, album_id_override=target_album_id, album_name_override=target_album_name)
if added:
status += f" (added to album '{target_album_name or target_album_id}')"
elif SETTINGS.album_name:
if add_asset_to_album(asset_id, request=request):
status += f" (added to album '{SETTINGS.album_name}')"
connlg = sqlite3.connect(SETTINGS.state_db)
curlg = connlg.cursor()
curlg.execute(
"""
CREATE TABLE IF NOT EXISTS upload_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
token TEXT,
uploaded_at TEXT DEFAULT CURRENT_TIMESTAMP,
ip TEXT,
user_agent TEXT,
fingerprint TEXT,
filename TEXT,
size INTEGER,
checksum TEXT,
immich_asset_id TEXT
);
"""
)
ip = None
try:
ip = (request.client.host if request and request.client else None) or request.headers.get('x-forwarded-for')
except Exception:
ip = None
ua = request.headers.get('user-agent', '') if request else ''
curlg.execute(
"INSERT INTO upload_events (token, ip, user_agent, fingerprint, filename, size, checksum, immich_asset_id) VALUES (?,?,?,?,?,?,?,?)",
(invite_token or '', ip, ua, fingerprint or '', file.filename, size, checksum, None)
)
connlg.commit()
connlg.close()
except Exception:
pass
return JSONResponse({"id": None, "status": "done"}, status_code=200)
await send_progress(session_id, item_id, "duplicate" if status == "duplicate" else "done", 100, status, asset_id)
# Increment invite usage on success
if invite_token:
try:
conn2 = sqlite3.connect(SETTINGS.state_db)
cur2 = conn2.cursor()
# Keep one-time used_count at 1; multi-use increments per asset
cur2.execute("SELECT max_uses FROM invites WHERE token = ?", (invite_token,))
row_mu = cur2.fetchone()
mx = None
try:
mx = int(row_mu[0]) if row_mu and row_mu[0] is not None else None
except Exception:
mx = None
if mx == 1:
cur2.execute("UPDATE invites SET used_count = 1 WHERE token = ?", (invite_token,))
else:
cur2.execute("UPDATE invites SET used_count = used_count + 1 WHERE token = ?", (invite_token,))
conn2.commit()
conn2.close()
except Exception as e:
logger.exception("Failed to increment invite usage: %s", e)
# Log uploader identity and file metadata
try:
connlg = sqlite3.connect(SETTINGS.state_db)
curlg = connlg.cursor()
curlg.execute(
"""
CREATE TABLE IF NOT EXISTS upload_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
token TEXT,
uploaded_at TEXT DEFAULT CURRENT_TIMESTAMP,
ip TEXT,
user_agent TEXT,
fingerprint TEXT,
filename TEXT,
size INTEGER,
checksum TEXT,
immich_asset_id TEXT
);
"""
)
ip = None
try:
ip = (request.client.host if request and request.client else None) or request.headers.get('x-forwarded-for')
except Exception:
ip = None
ua = request.headers.get('user-agent', '') if request else ''
curlg.execute(
"INSERT INTO upload_events (token, ip, user_agent, fingerprint, filename, size, checksum, immich_asset_id) VALUES (?,?,?,?,?,?,?,?)",
(invite_token or '', ip, ua, fingerprint or '', file.filename, size, checksum, asset_id or None)
)
connlg.commit()
connlg.close()
except Exception:
pass
return JSONResponse({"id": asset_id, "status": status}, status_code=200)
else:
try:
msg = r.json().get("message", r.text)
except Exception:
msg = r.text
await send_progress(session_id, item_id, "error", 100, msg)
return JSONResponse({"error": msg}, status_code=400)
except Exception as e:
await send_progress(session_id, item_id, "error", 100, str(e))
return JSONResponse({"error": str(e)}, status_code=500)
return await do_upload()
except Exception as e:
logger.exception("Local save failed: %s", e)
await send_progress(session_id, item_id, "error", 100, "Failed to save file locally")
return JSONResponse({"error": "local_save_failed"}, status_code=500)
# --------- Chunked upload endpoints ---------
@@ -880,52 +795,8 @@ async def api_upload_chunk_complete(request: Request) -> JSONResponse:
await send_progress(session_id_local, item_id_local, "duplicate", 100, "Already uploaded from this device (local cache)")
return JSONResponse({"status": "duplicate", "id": None}, status_code=200)
if SETTINGS.local_save_only:
try:
save_dir = "./data/uploads"
os.makedirs(save_dir, exist_ok=True)
safe_name = sanitize_filename(file_like_name)
save_path = os.path.join(save_dir, safe_name)
# Avoid overwriting when filenames collide (not same as duplicate)
if os.path.exists(save_path):
base, ext = os.path.splitext(safe_name)
i = 1
while os.path.exists(save_path):
save_path = os.path.join(save_dir, f"{base}_{i}{ext}")
i += 1
with open(save_path, "wb") as f:
f.write(raw)
db_insert_upload(checksum, file_like_name, file_size, device_asset_id, None, created_iso)
await send_progress(session_id_local, item_id_local, "done", 100, f"Saved locally to {os.path.basename(save_path)}")
return JSONResponse({"status": "done", "id": None}, status_code=200)
except Exception as e:
logger.exception("Local save failed: %s", e)
await send_progress(session_id_local, item_id_local, "error", 100, "Failed to save file locally")
return JSONResponse({"error": "local_save_failed"}, status_code=500)
await send_progress(session_id_local, item_id_local, "checking", 2, "Checking duplicates…")
bulk = immich_bulk_check([{ "id": item_id_local, "checksum": checksum }])
if bulk.get(item_id_local, {}).get("action") == "reject" and bulk[item_id_local].get("reason") == "duplicate":
asset_id = bulk[item_id_local].get("assetId")
db_insert_upload(checksum, file_like_name, file_size, device_asset_id, asset_id, created_iso)
await send_progress(session_id_local, item_id_local, "duplicate", 100, "Duplicate (server)", asset_id)
return JSONResponse({"status": "duplicate", "id": asset_id}, status_code=200)
safe_name2 = sanitize_filename(file_like_name)
def gen_encoder2() -> MultipartEncoder:
return MultipartEncoder(fields={
"assetData": (safe_name2, io.BytesIO(raw), content_type or "application/octet-stream"),
"deviceAssetId": device_asset_id,
"deviceId": f"python-{session_id_local}",
"fileCreatedAt": created_iso,
"fileModifiedAt": modified_iso,
"isFavorite": "false",
"filename": safe_name2,
"originalFileName": safe_name2,
})
# Invite validation/gating mirrors api_upload
target_album_id: Optional[str] = None
target_album_name: Optional[str] = None
if invite_token:
try:
@@ -1005,115 +876,94 @@ async def api_upload_chunk_complete(request: Request) -> JSONResponse:
if (used_count or 0) >= (max_uses_int if max_uses_int >= 0 else 10**9):
await send_progress(session_id_local, item_id_local, "error", 100, "Invite already used up")
return JSONResponse({"error": "invite_exhausted"}, status_code=403)
target_album_id = album_id
target_album_name = album_name
await send_progress(session_id_local, item_id_local, "uploading", 0, "Uploading…")
sent = {"pct": 0}
def cb2(monitor: MultipartEncoderMonitor) -> None:
if monitor.len:
pct = int(monitor.bytes_read * 100 / monitor.len)
if pct != sent["pct"]:
sent["pct"] = pct
asyncio.create_task(send_progress(session_id_local, item_id_local, "uploading", pct))
encoder2 = gen_encoder2()
monitor2 = MultipartEncoderMonitor(encoder2, cb2)
headers = {"Accept": "application/json", "Content-Type": monitor2.content_type, "x-immich-checksum": checksum, **immich_headers(request)}
try:
r = requests.post(f"{SETTINGS.normalized_base_url}/assets", headers=headers, data=monitor2, timeout=120)
if r.status_code in (200, 201):
data_r = r.json()
asset_id = data_r.get("id")
db_insert_upload(checksum, file_like_name, file_size, device_asset_id, asset_id, created_iso)
status = data_r.get("status", "created")
if asset_id:
added = False
if invite_token:
# Only add if invite specified an album; do not fallback to env default
if target_album_id or target_album_name:
added = add_asset_to_album(asset_id, request=request, album_id_override=target_album_id, album_name_override=target_album_name)
if added:
status += f" (added to album '{target_album_name or target_album_id}')"
elif SETTINGS.album_name:
if add_asset_to_album(asset_id, request=request):
status += f" (added to album '{SETTINGS.album_name}')"
await send_progress(session_id_local, item_id_local, "duplicate" if status == "duplicate" else "done", 100, status, asset_id)
if invite_token:
try:
conn2 = sqlite3.connect(SETTINGS.state_db)
cur2 = conn2.cursor()
cur2.execute("SELECT max_uses FROM invites WHERE token = ?", (invite_token,))
row_mu = cur2.fetchone()
mx = None
try:
mx = int(row_mu[0]) if row_mu and row_mu[0] is not None else None
except Exception:
mx = None
if mx == 1:
cur2.execute("UPDATE invites SET used_count = 1 WHERE token = ?", (invite_token,))
else:
cur2.execute("UPDATE invites SET used_count = used_count + 1 WHERE token = ?", (invite_token,))
conn2.commit()
conn2.close()
except Exception as e:
logger.exception("Failed to increment invite usage: %s", e)
# Log uploader identity and file metadata
try:
connlg = sqlite3.connect(SETTINGS.state_db)
curlg = connlg.cursor()
curlg.execute(
"""
CREATE TABLE IF NOT EXISTS upload_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
token TEXT,
uploaded_at TEXT DEFAULT CURRENT_TIMESTAMP,
ip TEXT,
user_agent TEXT,
fingerprint TEXT,
filename TEXT,
size INTEGER,
checksum TEXT,
immich_asset_id TEXT
);
"""
)
ip = None
try:
ip = (request.client.host if request and request.client else None) or request.headers.get('x-forwarded-for')
except Exception:
ip = None
ua = request.headers.get('user-agent', '') if request else ''
curlg.execute(
"INSERT INTO upload_events (token, ip, user_agent, fingerprint, filename, size, checksum, immich_asset_id) VALUES (?,?,?,?,?,?,?,?)",
(invite_token or '', ip, ua, fingerprint or '', file_like_name, file_size, checksum, asset_id or None)
)
connlg.commit()
connlg.close()
except Exception:
pass
return JSONResponse({"id": asset_id, "status": status}, status_code=200)
else:
try:
msg = r.json().get("message", r.text)
except Exception:
msg = r.text
await send_progress(session_id_local, item_id_local, "error", 100, msg)
return JSONResponse({"error": msg}, status_code=400)
except Exception as e:
await send_progress(session_id_local, item_id_local, "error", 100, str(e))
return JSONResponse({"error": str(e)}, status_code=500)
album_for_saving = target_album_name if invite_token else "public"
if not invite_token and not SETTINGS.public_upload_page_enabled:
await send_progress(session_id_local, item_id_local, "error", 100, "Public uploads disabled")
return JSONResponse({"error": "public_upload_disabled"}, status_code=403)
try:
save_dir = get_or_create_album_dir(album_for_saving)
safe_name = sanitize_filename(file_like_name)
save_path = os.path.join(save_dir, safe_name)
if os.path.exists(save_path):
base, ext = os.path.splitext(safe_name)
i = 1
while os.path.exists(save_path):
save_path = os.path.join(save_dir, f"{base}_{i}{ext}")
i += 1
with open(save_path, "wb") as f:
f.write(raw)
db_insert_upload(checksum, file_like_name, file_size, device_asset_id, None, created_iso)
msg = f"Saved to {album_for_saving}/{os.path.basename(save_path)}"
await send_progress(session_id_local, item_id_local, "done", 100, msg)
if invite_token:
try:
conn2 = sqlite3.connect(SETTINGS.state_db)
cur2 = conn2.cursor()
cur2.execute("SELECT max_uses FROM invites WHERE token = ?", (invite_token,))
row_mu = cur2.fetchone()
mx = None
try:
mx = int(row_mu[0]) if row_mu and row_mu[0] is not None else None
except Exception:
mx = None
if mx == 1:
cur2.execute("UPDATE invites SET used_count = 1 WHERE token = ?", (invite_token,))
else:
cur2.execute("UPDATE invites SET used_count = used_count + 1 WHERE token = ?", (invite_token,))
conn2.commit()
conn2.close()
except Exception as e:
logger.exception("Failed to increment invite usage: %s", e)
# Log uploader identity and file metadata
try:
connlg = sqlite3.connect(SETTINGS.state_db)
curlg = connlg.cursor()
curlg.execute(
"""
CREATE TABLE IF NOT EXISTS upload_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
token TEXT,
uploaded_at TEXT DEFAULT CURRENT_TIMESTAMP,
ip TEXT,
user_agent TEXT,
fingerprint TEXT,
filename TEXT,
size INTEGER,
checksum TEXT,
immich_asset_id TEXT
);
"""
)
ip = None
try:
ip = (request.client.host if request and request.client else None) or request.headers.get('x-forwarded-for')
except Exception:
ip = None
ua = request.headers.get('user-agent', '') if request else ''
curlg.execute(
"INSERT INTO upload_events (token, ip, user_agent, fingerprint, filename, size, checksum, immich_asset_id) VALUES (?,?,?,?,?,?,?,?)",
(invite_token or '', ip, ua, fingerprint or '', file_like_name, file_size, checksum, None)
)
connlg.commit()
connlg.close()
except Exception:
pass
return JSONResponse({"id": None, "status": "done"}, status_code=200)
except Exception as e:
await send_progress(session_id_local, item_id_local, "error", 100, "Failed to save file locally")
return JSONResponse({"error": "local_save_failed"}, status_code=500)
@app.post("/api/album/reset")
async def api_album_reset() -> dict:
"""Explicit trigger from the UI to clear cached album id."""
reset_album_cache()
return {"ok": True}
# ---------- Auth & Albums & Invites APIs ----------
@app.post("/api/login")
async def api_login(request: Request) -> JSONResponse:
"""Authenticate against Immich using email/password; store token in session."""
"""Authenticate against the local admin password."""
try:
body = await request.json()
except Exception:
@@ -1122,29 +972,21 @@ async def api_login(request: Request) -> JSONResponse:
password = (body or {}).get("password")
if not email or not password:
return JSONResponse({"error": "missing_credentials"}, status_code=400)
try:
r = requests.post(f"{SETTINGS.normalized_base_url}/auth/login", headers={"Content-Type": "application/json", "Accept": "application/json"}, json={"email": email, "password": password}, timeout=15)
except Exception as e:
logger.exception("Login request failed: %s", e)
return JSONResponse({"error": "login_failed"}, status_code=502)
if r.status_code not in (200, 201):
logger.warning("Auth rejected: %s - %s", r.status_code, r.text)
return JSONResponse({"error": "unauthorized"}, status_code=401)
data = r.json() if r.content else {}
token = data.get("accessToken")
if not token:
logger.warning("Auth response missing accessToken")
return JSONResponse({"error": "invalid_response"}, status_code=502)
# Store only token and basic info in cookie session
request.session.update({
"accessToken": token,
"userEmail": data.get("userEmail"),
"userId": data.get("userId"),
"name": data.get("name"),
"isAdmin": data.get("isAdmin", False),
})
logger.info("User %s logged in", data.get("userEmail"))
return JSONResponse({"ok": True, **{k: data.get(k) for k in ("userEmail","userId","name","isAdmin")}})
if email == "admin" and password == SETTINGS.admin_password:
user_info = {
"accessToken": "local_admin_session", # dummy value
"userEmail": "admin",
"userId": "admin",
"name": "Admin",
"isAdmin": True,
}
request.session.update(user_info)
logger.info("Admin user logged in")
return JSONResponse({"ok": True, **{k: user_info.get(k) for k in ("userEmail","userId","name","isAdmin")}})
logger.warning("Failed login attempt for user %s", email)
return JSONResponse({"error": "unauthorized"}, status_code=401)
@app.post("/api/logout")
async def api_logout(request: Request) -> dict:
@@ -1158,21 +1000,28 @@ async def logout_get(request: Request) -> RedirectResponse:
@app.get("/api/albums")
async def api_albums(request: Request) -> JSONResponse:
"""Return list of albums if authorized; logs on 401/403."""
"""Return list of albums (directories) if authorized."""
if not request.session.get("accessToken"):
return JSONResponse({"error": "unauthorized"}, status_code=401)
upload_root = "/data/uploads"
try:
r = requests.get(f"{SETTINGS.normalized_base_url}/albums", headers=immich_headers(request), timeout=10)
os.makedirs(upload_root, exist_ok=True)
# also make public dir
os.makedirs(os.path.join(upload_root, "public"), exist_ok=True)
albums = []
for name in os.listdir(upload_root):
if os.path.isdir(os.path.join(upload_root, name)):
albums.append({"id": name, "albumName": name})
return JSONResponse(albums)
except Exception as e:
logger.exception("Albums request failed: %s", e)
return JSONResponse({"error": "request_failed"}, status_code=502)
if r.status_code == 200:
return JSONResponse(r.json())
if r.status_code in (401, 403):
logger.warning("Album list not allowed: %s - %s", r.status_code, r.text)
return JSONResponse({"error": "forbidden"}, status_code=403)
return JSONResponse({"error": "unexpected_status", "status": r.status_code}, status_code=502)
logger.exception("Failed to list album directories: %s", e)
return JSONResponse({"error": "list_albums_failed"}, status_code=500)
@app.post("/api/albums")
async def api_albums_create(request: Request) -> JSONResponse:
if not request.session.get("accessToken"):
return JSONResponse({"error": "unauthorized"}, status_code=401)
try:
body = await request.json()
except Exception:
@@ -1181,16 +1030,11 @@ async def api_albums_create(request: Request) -> JSONResponse:
if not name:
return JSONResponse({"error": "missing_name"}, status_code=400)
try:
r = requests.post(f"{SETTINGS.normalized_base_url}/albums", headers={**immich_headers(request), "Content-Type": "application/json"}, json={"albumName": name}, timeout=10)
get_or_create_album_dir(name)
return JSONResponse({"id": name, "albumName": name}, status_code=201)
except Exception as e:
logger.exception("Create album failed: %s", e)
return JSONResponse({"error": "request_failed"}, status_code=502)
if r.status_code in (200, 201):
return JSONResponse(r.json(), status_code=201)
if r.status_code in (401, 403):
logger.warning("Create album forbidden: %s - %s", r.status_code, r.text)
return JSONResponse({"error": "forbidden"}, status_code=403)
return JSONResponse({"error": "unexpected_status", "status": r.status_code, "body": r.text}, status_code=502)
logger.exception("Create album directory failed: %s", e)
return JSONResponse({"error": "create_album_failed"}, status_code=500)
# ---------- Invites (one-time/expiring links) ----------
@@ -1277,15 +1121,13 @@ async def api_invites_create(request: Request) -> JSONResponse:
max_uses = int(max_uses)
except Exception:
max_uses = 1
# Allow blank album for invites (no album association)
if not album_name and SETTINGS.album_name and not album_id and album_name is not None:
album_name = SETTINGS.album_name
# If only album_name provided, resolve or create now to fix to an ID
resolved_album_id = None
if not album_id and album_name:
resolved_album_id = get_or_create_album(request=request, album_name_override=album_name)
else:
resolved_album_id = album_id
# Allow blank album for invites (will default to public)
if not album_name:
album_name = "public"
# Ensure album directory exists
get_or_create_album_dir(album_name)
resolved_album_id = None # not used
# Compute expiry
expires_at = None
if expires_days is not None:
@@ -1328,12 +1170,12 @@ async def api_invites_create(request: Request) -> JSONResponse:
if pw_hash:
cur.execute(
"INSERT INTO invites (token, album_id, album_name, max_uses, expires_at, password_hash, owner_user_id, owner_email, owner_name, name) VALUES (?,?,?,?,?,?,?,?,?,?)",
(token, resolved_album_id, album_name, max_uses, expires_at, pw_hash, owner_user_id, owner_email, owner_name, default_link_name)
(token, None, album_name, max_uses, expires_at, pw_hash, owner_user_id, owner_email, owner_name, default_link_name)
)
else:
cur.execute(
"INSERT INTO invites (token, album_id, album_name, max_uses, expires_at, owner_user_id, owner_email, owner_name, name) VALUES (?,?,?,?,?,?,?,?,?)",
(token, resolved_album_id, album_name, max_uses, expires_at, owner_user_id, owner_email, owner_name, default_link_name)
(token, None, album_name, max_uses, expires_at, owner_user_id, owner_email, owner_name, default_link_name)
)
conn.commit()
conn.close()
@@ -1351,7 +1193,7 @@ async def api_invites_create(request: Request) -> JSONResponse:
"token": token,
"url": f"/invite/{token}",
"absoluteUrl": absolute,
"albumId": resolved_album_id,
"albumId": None,
"albumName": album_name,
"maxUses": max_uses,
"expiresAt": expires_at,

View File

@@ -13,10 +13,8 @@ from dotenv import load_dotenv
@dataclass
class Settings:
"""App settings loaded from environment variables (.env)."""
immich_base_url: str
immich_api_key: str
admin_password: str
max_concurrent: int
album_name: str = ""
public_upload_page_enabled: bool = False
public_base_url: str = ""
state_db: str = ""
@@ -25,16 +23,6 @@ class Settings:
chunked_uploads_enabled: bool = False
chunk_size_mb: int = 95
@property
def normalized_base_url(self) -> str:
"""Return the base URL without a trailing slash for clean joining and display."""
return self.immich_base_url.rstrip("/")
@property
def local_save_only(self) -> bool:
"""True if configured to save locally instead of uploading to Immich."""
return str(self.immich_base_url).lower() == "false"
def load_settings() -> Settings:
"""Load settings from .env, applying defaults when absent."""
# Load environment variables from .env once here so importers dont have to
@@ -42,9 +30,7 @@ def load_settings() -> Settings:
load_dotenv()
except Exception:
pass
base = os.getenv("IMMICH_BASE_URL", "http://127.0.0.1:2283/api")
api_key = os.getenv("IMMICH_API_KEY", "")
album_name = os.getenv("IMMICH_ALBUM_NAME", "")
admin_password = os.getenv("ADMIN_PASSWORD", "admin") # Default for convenience, should be changed
# Safe defaults: disable public uploader and invites unless explicitly enabled
def as_bool(v: str, default: bool = False) -> bool:
if v is None:
@@ -64,10 +50,8 @@ def load_settings() -> Settings:
except ValueError:
chunk_size_mb = 95
return Settings(
immich_base_url=base,
immich_api_key=api_key,
admin_password=admin_password,
max_concurrent=maxc,
album_name=album_name,
public_upload_page_enabled=public_upload,
public_base_url=os.getenv("PUBLIC_BASE_URL", ""),
state_db=state_db,