Added cleanup of page-cache from streaming
This commit is contained in:
147
app/main.py
147
app/main.py
@@ -38,6 +38,16 @@ app_logger.handlers.clear()
|
||||
app_logger.addHandler(_handler)
|
||||
app_logger.propagate = False
|
||||
|
||||
def _truthy(v: str | None) -> bool:
|
||||
return str(v or "").strip().lower() in ("1", "true", "yes", "on")
|
||||
|
||||
PAGE_CACHE_DIR = Path("/data/pages") # already present in your file
|
||||
PAGE_CACHE_TTL_DAYS = int(os.getenv("PAGE_CACHE_TTL_DAYS", "14")) # delete book caches idle > 14 days
|
||||
PAGE_CACHE_MAX_BYTES = int(os.getenv("PAGE_CACHE_MAX_BYTES", str(10*1024*1024*1024))) # 10 GiB cap by default
|
||||
PAGE_CACHE_AUTOCLEAN = _truthy(os.getenv("PAGE_CACHE_AUTOCLEAN", "true")) # run background cleaner
|
||||
PAGE_CACHE_CLEAN_INTERVAL_MIN = int(os.getenv("PAGE_CACHE_CLEAN_INTERVAL_MIN", "360")) # every 6h
|
||||
|
||||
|
||||
def _mask_headers(h: dict) -> dict:
|
||||
masked = {}
|
||||
for k, v in h.items():
|
||||
@@ -295,6 +305,14 @@ def startup():
|
||||
t = threading.Thread(target=_run_precache_thumbs, args=(THUMB_WORKERS,), daemon=True)
|
||||
t.start()
|
||||
|
||||
# Start pages auto-clean thread
|
||||
if PAGE_CACHE_AUTOCLEAN:
|
||||
t = threading.Thread(target=_autoclean_loop, daemon=True)
|
||||
t.start()
|
||||
app_logger.info(f"Page cache auto-clean enabled: every {PAGE_CACHE_CLEAN_INTERVAL_MIN} min, "
|
||||
f"ttl={PAGE_CACHE_TTL_DAYS}d, cap={PAGE_CACHE_MAX_BYTES} bytes")
|
||||
|
||||
|
||||
conn = db.connect()
|
||||
try:
|
||||
has_any = conn.execute("SELECT EXISTS(SELECT 1 FROM items LIMIT 1)").fetchone()[0] == 1
|
||||
@@ -724,8 +742,123 @@ def pse_page(path: str = Query(...), page: int = Query(0, ge=0), _=Depends(requi
|
||||
cache_dir = _book_cache_dir(path)
|
||||
dest = cache_dir / f"{page+1:04d}.jpg"
|
||||
out = _ensure_page_jpeg(abs_cbz, inner, dest)
|
||||
# --- heartbeat: mark this book cache as recently used ---
|
||||
try:
|
||||
(cache_dir / ".last").touch()
|
||||
except Exception:
|
||||
pass
|
||||
return FileResponse(out, media_type="image/jpeg")
|
||||
|
||||
# -------- Page cache cleanup --------
|
||||
_LAST_CACHE_CLEAN = {"ts": 0.0, "deleted_dirs": 0, "deleted_bytes": 0, "reason": ""}
|
||||
|
||||
def _dir_size(p: Path) -> int:
|
||||
total = 0
|
||||
for root, _, files in os.walk(p):
|
||||
for fn in files:
|
||||
try:
|
||||
total += (Path(root) / fn).stat().st_size
|
||||
except Exception:
|
||||
pass
|
||||
return total
|
||||
|
||||
def _book_cache_entries() -> list[tuple[Path, float, int]]:
|
||||
"""
|
||||
Returns list of (dir_path, last_mtime, size_bytes) for each book cache dir.
|
||||
last_mtime prefers .last heartbeat; falls back to dir mtime.
|
||||
"""
|
||||
entries = []
|
||||
if not PAGE_CACHE_DIR.exists():
|
||||
return entries
|
||||
for d in PAGE_CACHE_DIR.iterdir():
|
||||
if not d.is_dir():
|
||||
continue
|
||||
hb = d / ".last"
|
||||
try:
|
||||
last = hb.stat().st_mtime if hb.exists() else d.stat().st_mtime
|
||||
except Exception:
|
||||
last = 0.0
|
||||
try:
|
||||
sz = _dir_size(d)
|
||||
except Exception:
|
||||
sz = 0
|
||||
entries.append((d, last, sz))
|
||||
return entries
|
||||
|
||||
def _remove_dir(p: Path) -> int:
|
||||
"""Remove directory tree, return bytes freed (best-effort)."""
|
||||
size = 0
|
||||
try:
|
||||
size = _dir_size(p)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
for root, dirs, files in os.walk(p, topdown=False):
|
||||
for fn in files:
|
||||
try: (Path(root) / fn).unlink()
|
||||
except Exception: pass
|
||||
for dn in dirs:
|
||||
try: (Path(root) / dn).rmdir()
|
||||
except Exception: pass
|
||||
p.rmdir()
|
||||
except Exception:
|
||||
pass
|
||||
return size
|
||||
|
||||
def _clean_page_cache(ttl_days: int, max_bytes: int) -> dict:
|
||||
now = time.time()
|
||||
ttl_secs = max(0, int(ttl_days)) * 86400
|
||||
entries = _book_cache_entries()
|
||||
|
||||
deleted_dirs = 0
|
||||
deleted_bytes = 0
|
||||
|
||||
# 1) TTL eviction
|
||||
if ttl_secs > 0:
|
||||
for d, last, _sz in entries:
|
||||
if (now - last) > ttl_secs:
|
||||
deleted_bytes += _remove_dir(d)
|
||||
deleted_dirs += 1
|
||||
# refresh list after TTL deletes
|
||||
entries = _book_cache_entries()
|
||||
|
||||
# 2) Size cap eviction
|
||||
total_bytes = sum(sz for _d, _last, sz in entries)
|
||||
if max_bytes > 0 and total_bytes > max_bytes:
|
||||
# sort by last mtime ascending (oldest first)
|
||||
entries.sort(key=lambda t: t[1])
|
||||
i = 0
|
||||
while total_bytes > max_bytes and i < len(entries):
|
||||
d, _last, sz = entries[i]
|
||||
total_bytes -= sz
|
||||
deleted_bytes += _remove_dir(d)
|
||||
deleted_dirs += 1
|
||||
i += 1
|
||||
|
||||
_LAST_CACHE_CLEAN.update({"ts": now, "deleted_dirs": deleted_dirs, "deleted_bytes": deleted_bytes, "reason": "manual/auto"})
|
||||
return dict(_LAST_CACHE_CLEAN)
|
||||
|
||||
def _page_cache_status() -> dict:
|
||||
entries = _book_cache_entries()
|
||||
return {
|
||||
"dir_count": len(entries),
|
||||
"total_bytes": sum(sz for _d, _last, sz in entries),
|
||||
"last_clean": _LAST_CACHE_CLEAN,
|
||||
"ttl_days": PAGE_CACHE_TTL_DAYS,
|
||||
"max_bytes": PAGE_CACHE_MAX_BYTES,
|
||||
}
|
||||
|
||||
def _autoclean_loop():
|
||||
while True:
|
||||
try:
|
||||
_clean_page_cache(PAGE_CACHE_TTL_DAYS, PAGE_CACHE_MAX_BYTES)
|
||||
except Exception as e:
|
||||
app_logger.error(f"page cache autoclean error: {e}")
|
||||
# sleep
|
||||
interval = max(1, PAGE_CACHE_CLEAN_INTERVAL_MIN) * 60
|
||||
time.sleep(interval)
|
||||
|
||||
|
||||
# -------------------- Dashboard & stats --------------------
|
||||
@app.get("/dashboard", response_class=HTMLResponse)
|
||||
def dashboard(_=Depends(require_basic)):
|
||||
@@ -860,10 +993,7 @@ def _smartlists_load():
|
||||
return [] # default
|
||||
|
||||
def _smartlists_save(lists):
|
||||
# backup old file
|
||||
if SMARTLISTS_PATH.exists():
|
||||
ts = time.strftime("%Y%m%d-%H%M%S")
|
||||
SMARTLISTS_PATH.rename(SMARTLISTS_PATH.with_suffix(f".{ts}.bak"))
|
||||
SMARTLISTS_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
with SMARTLISTS_PATH.open("w", encoding="utf-8") as f:
|
||||
json.dump(lists, f, ensure_ascii=False, indent=2)
|
||||
|
||||
@@ -960,3 +1090,12 @@ def thumbs_errors_log(_=Depends(require_basic)):
|
||||
filename="thumbs_errors.log",
|
||||
headers={"Cache-Control": "no-store"}
|
||||
)
|
||||
|
||||
@app.get("/pages/cache/status", response_class=JSONResponse)
|
||||
def pages_cache_status(_=Depends(require_basic)):
|
||||
return JSONResponse(_page_cache_status())
|
||||
|
||||
@app.post("/admin/pages/cleanup", response_class=JSONResponse)
|
||||
def admin_pages_cleanup(_=Depends(require_basic)):
|
||||
res = _clean_page_cache(PAGE_CACHE_TTL_DAYS, PAGE_CACHE_MAX_BYTES)
|
||||
return JSONResponse({"ok": True, **res})
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
.footer-note { color: var(--bs-secondary-color); }
|
||||
.kpis .card { transition: transform .15s ease; }
|
||||
.kpis .card:hover { transform: translateY(-2px); }
|
||||
.cache-pill { font-variant-numeric: tabular-nums; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
@@ -32,8 +33,9 @@
|
||||
<div class="ms-auto d-flex align-items-center gap-2">
|
||||
<span class="navbar-text small text-secondary me-2">
|
||||
<span id="lastUpdated">—</span> • Covers: <span id="covers">—</span>
|
||||
<!-- errors badge + download link -->
|
||||
• Errors: <a id="errLink" href="#" class="link-danger text-decoration-none"><span id="errCount">0</span></a>
|
||||
<!-- NEW: live page cache status -->
|
||||
• Cache: <span id="cacheStatus" class="badge text-bg-light cache-pill">—</span>
|
||||
</span>
|
||||
<button id="thumbsBtn" class="btn btn-sm btn-outline-primary">
|
||||
<i class="bi bi-images me-1"></i> Pre-cache Thumbnails
|
||||
@@ -41,6 +43,10 @@
|
||||
<button id="reindexBtn" class="btn btn-sm btn-outline-secondary">
|
||||
<i class="bi bi-arrow-repeat me-1"></i> Reindex
|
||||
</button>
|
||||
<!-- NEW: Clean Page Cache -->
|
||||
<button id="cleanCacheBtn" class="btn btn-sm btn-outline-danger">
|
||||
<i class="bi bi-trash3 me-1"></i> Clean Page Cache
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
@@ -149,6 +155,16 @@
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<!-- Toast (bottom-right) for one-off messages like cache cleanup result) -->
|
||||
<div class="position-fixed bottom-0 end-0 p-3" style="z-index:1080">
|
||||
<div id="toast" class="toast align-items-center text-bg-dark border-0" role="alert" aria-live="assertive" aria-atomic="true">
|
||||
<div class="d-flex">
|
||||
<div id="toastBody" class="toast-body">Done.</div>
|
||||
<button type="button" class="btn-close btn-close-white me-2 m-auto" data-bs-dismiss="toast" aria-label="Close"></button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<footer class="container my-4 small footer-note">
|
||||
<div class="d-flex justify-content-between">
|
||||
<span>ComicOPDS Dashboard</span>
|
||||
@@ -160,7 +176,7 @@
|
||||
<script>
|
||||
const baseOptions = {
|
||||
responsive: true, maintainAspectRatio: false,
|
||||
plugins: { legend: { position: 'bottom', labels:{usePointStyle:true, boxWidth:8} }, tooltip:{mode:'index', intersect:true} },
|
||||
plugins: { legend: { position: 'bottom', labels:{usePointStyle:true, boxWidth:8} }, tooltip:{mode:'index', intersect:false} },
|
||||
interaction:{ mode:'nearest', axis:'x', intersect:false },
|
||||
scales:{ x:{ ticks:{ maxRotation:0, autoSkip:true } }, y:{ beginAtZero:true, ticks:{ precision:0 } } }
|
||||
};
|
||||
@@ -172,7 +188,7 @@
|
||||
function mapTimeline(d){ const l=(d.timeline_by_year||[]).filter(x=>x.year!=null).sort((a,b)=>(+a.year)-(+b.year)); return {labels:l.map(x=>String(x.year)),values:l.map(x=>x.count??0)}; }
|
||||
function mapWriters(d){ const a=Array.isArray(d.top_writers)?d.top_writers:[]; return {labels:a.map(x=>x.writer??'(Unknown)'),values:a.map(x=>x.count??0)}; }
|
||||
|
||||
// NEW: Formats from stats.formats_breakdown (array of {format, count})
|
||||
// Formats from stats.formats_breakdown (array of {format, count})
|
||||
function mapFormats(d){
|
||||
const arr = Array.isArray(d.formats_breakdown) ? d.formats_breakdown : [];
|
||||
const labels = arr.map(x => x.format || '(unknown)');
|
||||
@@ -180,6 +196,13 @@
|
||||
return { labels, values, kinds: arr.length };
|
||||
}
|
||||
|
||||
function fmtBytes(n){
|
||||
if(!Number.isFinite(n)) return "—";
|
||||
const u=['B','KB','MB','GB','TB']; let i=0; let v=Number(n);
|
||||
while(v>=1024 && i<u.length-1){ v/=1024; i++; }
|
||||
return (v>=10? v.toFixed(0): v.toFixed(1))+" "+u[i];
|
||||
}
|
||||
|
||||
async function load(){
|
||||
const d = await jget("/stats.json");
|
||||
document.getElementById("lastUpdated").textContent = d.last_updated? new Date(d.last_updated*1000).toLocaleString() : "—";
|
||||
@@ -257,29 +280,6 @@
|
||||
setTimeout(pollIndex, delay);
|
||||
}
|
||||
|
||||
// ----- Errors counter + download -----
|
||||
async function downloadErrors() {
|
||||
try {
|
||||
const resp = await fetch("/thumbs/errors/log", { credentials: "include" });
|
||||
if (!resp.ok) throw new Error("HTTP " + resp.status);
|
||||
const blob = await resp.blob();
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement("a");
|
||||
a.href = url;
|
||||
a.download = "thumbs_errors.log";
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
a.remove();
|
||||
setTimeout(() => URL.revokeObjectURL(url), 1000);
|
||||
} catch (e) {
|
||||
alert("Download failed: " + (e?.message || e));
|
||||
}
|
||||
}
|
||||
document.getElementById("errLink").addEventListener("click", (ev) => {
|
||||
ev.preventDefault();
|
||||
downloadErrors();
|
||||
});
|
||||
|
||||
// ----- Thumbnails pre-cache progress -----
|
||||
async function pollThumbs(){
|
||||
let delay=5000;
|
||||
@@ -307,6 +307,30 @@
|
||||
setTimeout(pollThumbs, delay);
|
||||
}
|
||||
|
||||
// ----- Errors counter + download -----
|
||||
async function downloadErrors() {
|
||||
try {
|
||||
const resp = await fetch("/thumbs/errors/log", { credentials: "include" });
|
||||
if (!resp.ok) throw new Error("HTTP " + resp.status);
|
||||
const blob = await resp.blob();
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement("a");
|
||||
a.href = url;
|
||||
a.download = "thumbs_errors.log";
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
a.remove();
|
||||
setTimeout(() => URL.revokeObjectURL(url), 1000);
|
||||
} catch (e) {
|
||||
alert("Download failed: " + (e?.message || e));
|
||||
}
|
||||
}
|
||||
document.getElementById("errLink").addEventListener("click", (ev) => {
|
||||
ev.preventDefault();
|
||||
downloadErrors();
|
||||
});
|
||||
|
||||
// ----- Buttons -----
|
||||
function showIndexPending() {
|
||||
const box = document.getElementById("indexProgress");
|
||||
box.classList.remove("d-none");
|
||||
@@ -317,7 +341,6 @@
|
||||
bar.style.width = "100%";
|
||||
bar.textContent = "Starting…";
|
||||
}
|
||||
|
||||
function showThumbsPending() {
|
||||
const box = document.getElementById("thumbsProgress");
|
||||
box.classList.remove("d-none");
|
||||
@@ -333,9 +356,9 @@
|
||||
try {
|
||||
btn.disabled = true;
|
||||
btn.innerHTML = '<span class="spinner-border spinner-border-sm me-1" role="status" aria-hidden="true"></span> Starting…';
|
||||
showThumbsPending(); // show immediately
|
||||
showThumbsPending();
|
||||
await fetch("/admin/thumbs/precache", { method: "POST", credentials: "include" });
|
||||
setTimeout(pollThumbs, 200); // kick the poll
|
||||
setTimeout(pollThumbs, 200);
|
||||
} catch (e) {
|
||||
alert("Failed to start thumbnails pre-cache: " + (e?.message || e));
|
||||
} finally {
|
||||
@@ -350,9 +373,9 @@
|
||||
try {
|
||||
btn.disabled = true;
|
||||
btn.innerHTML = '<span class="spinner-border spinner-border-sm me-1" role="status" aria-hidden="true"></span> Reindexing…';
|
||||
showIndexPending(); // show immediately
|
||||
showIndexPending();
|
||||
await fetch("/admin/reindex", { method: "POST", credentials: "include" });
|
||||
setTimeout(pollIndex, 200); // kick the poll
|
||||
setTimeout(pollIndex, 200);
|
||||
} catch (e) {
|
||||
alert("Reindex failed: " + (e?.message || e));
|
||||
} finally {
|
||||
@@ -360,10 +383,47 @@
|
||||
}
|
||||
});
|
||||
|
||||
// NEW: Clean page cache
|
||||
async function updateCacheStatus() {
|
||||
try{
|
||||
const s = await jget("/pages/cache/status");
|
||||
const badge = document.getElementById("cacheStatus");
|
||||
badge.textContent = `${s.dir_count ?? 0} dirs • ${fmtBytes(s.total_bytes ?? 0)}`;
|
||||
} catch {
|
||||
document.getElementById("cacheStatus").textContent = "—";
|
||||
}
|
||||
}
|
||||
async function cleanCache() {
|
||||
const btn = document.getElementById("cleanCacheBtn");
|
||||
const original = btn.innerHTML;
|
||||
btn.disabled = true;
|
||||
btn.innerHTML = '<span class="spinner-border spinner-border-sm me-1" role="status" aria-hidden="true"></span> Cleaning…';
|
||||
try {
|
||||
const resp = await fetch("/admin/pages/cleanup", { method:"POST", credentials:"include" });
|
||||
const data = await resp.json().catch(()=>({}));
|
||||
// toast
|
||||
const toastEl = document.getElementById('toast');
|
||||
document.getElementById('toastBody').textContent =
|
||||
`Cache cleaned: ${data.deleted_dirs ?? 0} dirs, ${fmtBytes(data.deleted_bytes ?? 0)} freed.`;
|
||||
const t = new bootstrap.Toast(toastEl, { delay: 4000 });
|
||||
t.show();
|
||||
} catch (e) {
|
||||
alert("Cache cleanup failed: " + (e?.message || e));
|
||||
} finally {
|
||||
await updateCacheStatus();
|
||||
setTimeout(() => { btn.disabled = false; btn.innerHTML = original; }, 500);
|
||||
}
|
||||
}
|
||||
document.getElementById("cleanCacheBtn").addEventListener("click", cleanCache);
|
||||
|
||||
// Initial load & polls
|
||||
load();
|
||||
pollIndex();
|
||||
pollThumbs();
|
||||
updateCacheStatus();
|
||||
// refresh cache pill periodically
|
||||
setInterval(updateCacheStatus, 120000); // every 2 min
|
||||
|
||||
// Errors counter
|
||||
(function pollErrors(){
|
||||
let delay=8000;
|
||||
|
||||
Reference in New Issue
Block a user