2023-01-15 10:37:05 +01:00
|
|
|
from flask import Flask, render_template, send_from_directory, request
|
2022-06-07 09:16:43 +02:00
|
|
|
from flask_httpauth import HTTPBasicAuth
|
|
|
|
from werkzeug.security import check_password_hash
|
|
|
|
from gevent.pywsgi import WSGIServer
|
|
|
|
import timeit
|
|
|
|
import sqlite3
|
|
|
|
import os
|
2023-01-20 15:17:50 +01:00
|
|
|
from PIL import Image
|
2022-06-07 09:16:43 +02:00
|
|
|
import zipfile
|
2022-06-07 21:07:28 +02:00
|
|
|
import gzip
|
2022-06-07 09:16:43 +02:00
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
import re
|
|
|
|
import datetime
|
|
|
|
import sys
|
2022-06-07 21:07:28 +02:00
|
|
|
import time
|
2023-01-20 10:07:05 +01:00
|
|
|
from pathlib import Path
|
2023-01-20 15:17:50 +01:00
|
|
|
from io import BytesIO
|
2022-06-07 09:16:43 +02:00
|
|
|
|
|
|
|
from opds import fromdir
|
2023-01-19 22:33:11 +01:00
|
|
|
import config,extras
|
2022-06-07 09:16:43 +02:00
|
|
|
|
|
|
|
app = Flask(__name__, static_url_path="", static_folder="static")
|
|
|
|
auth = HTTPBasicAuth()
|
|
|
|
|
|
|
|
@auth.verify_password
|
|
|
|
def verify_password(username, password):
|
|
|
|
if not config.TEENYOPDS_ADMIN_PASSWORD:
|
|
|
|
return True
|
|
|
|
elif username in config.users and check_password_hash(
|
|
|
|
config.users.get(username), password
|
|
|
|
):
|
|
|
|
return username
|
|
|
|
|
|
|
|
|
|
|
|
@app.route("/")
|
2023-01-15 10:37:05 +01:00
|
|
|
def startpage():
|
2023-01-19 15:41:27 +01:00
|
|
|
#result = "Hello, World!"
|
2023-01-15 10:37:05 +01:00
|
|
|
conn = sqlite3.connect('app.db')
|
|
|
|
cursor = conn.cursor()
|
2023-01-19 22:21:46 +01:00
|
|
|
cursor.execute("select * from comics LIMIT " + str(config.DEFAULT_SEARCH_NUMBER) + ";")
|
2023-01-15 10:37:05 +01:00
|
|
|
result = cursor.fetchall()
|
|
|
|
conn.close()
|
|
|
|
return render_template("start.html", result=result)
|
|
|
|
|
2022-06-07 09:16:43 +02:00
|
|
|
@app.route("/healthz")
|
|
|
|
def healthz():
|
|
|
|
return "ok"
|
|
|
|
|
2023-01-20 11:38:38 +01:00
|
|
|
@app.route("/generate")
|
|
|
|
def generate():
|
|
|
|
force = request.args.get('force')
|
|
|
|
generated = 0
|
|
|
|
comiccount = 0
|
|
|
|
files_without_comicinfo = 0
|
|
|
|
errorcount = 0
|
|
|
|
skippedcount = 0
|
|
|
|
for root, dirs, files in os.walk(os.path.abspath(config.CONTENT_BASE_DIR)):
|
|
|
|
for file in files:
|
|
|
|
f = os.path.join(root, file)
|
|
|
|
if f.endswith('.cbz'):
|
|
|
|
try:
|
|
|
|
comiccount = comiccount + 1
|
|
|
|
s = zipfile.ZipFile(f)
|
|
|
|
filelist = zipfile.ZipFile.namelist(s)
|
|
|
|
if filelist[0] == 'ComicInfo.xml':
|
|
|
|
Bs_data = BeautifulSoup(s.open('ComicInfo.xml').read(), "xml")
|
|
|
|
CVDB=extras.get_cvdb(Bs_data.select('Notes'))
|
|
|
|
if force == 'True':
|
|
|
|
cover = s.open(filelist[1]).read()
|
2023-01-20 15:17:50 +01:00
|
|
|
|
|
|
|
image = Image.open(BytesIO(cover))
|
|
|
|
image.thumbnail(config.MAXSIZE,Image.ANTIALIAS)
|
|
|
|
image.save(config.THUMBNAIL_DIR + "/" + str(CVDB) + ".jpg")
|
|
|
|
|
|
|
|
# Old way of saving without resize
|
|
|
|
#c = open(config.THUMBNAIL_DIR + "/" + str(CVDB) + ".jpg", 'wb+')
|
|
|
|
#c.write(cover)
|
|
|
|
#c.close()
|
2023-01-20 11:38:38 +01:00
|
|
|
generated = generated + 1
|
|
|
|
elif Path(config.THUMBNAIL_DIR + "/" + str(CVDB) + ".jpg").exists() == False:
|
|
|
|
cover = s.open(filelist[1]).read()
|
2023-01-20 15:25:38 +01:00
|
|
|
image = Image.open(BytesIO(cover))
|
|
|
|
image.thumbnail(config.MAXSIZE,Image.ANTIALIAS)
|
|
|
|
image.save(config.THUMBNAIL_DIR + "/" + str(CVDB) + ".jpg")
|
2023-01-20 11:38:38 +01:00
|
|
|
generated = generated + 1
|
|
|
|
else:
|
|
|
|
skippedcount = skippedcount + 1
|
|
|
|
else:
|
|
|
|
files_withtout_comicinfo = files_without_comicinfo + 1
|
|
|
|
except Exception as e:
|
|
|
|
errorcount = errorcount + 1
|
2023-01-20 15:17:50 +01:00
|
|
|
config._print("Error (/generate): " + str(e))
|
|
|
|
config._print(f)
|
2023-01-20 11:38:38 +01:00
|
|
|
return "Forced generation: " + str(force) + "<br>Comics: " + str(comiccount) + "<br>Generated: " + str(generated) + "<br>CBZ files without ComicInfo.xml: " + str(files_without_comicinfo) + "<br>Errors: " + str(errorcount) + "<br>Skipped: " + str(skippedcount)
|
|
|
|
|
|
|
|
|
2022-06-07 09:16:43 +02:00
|
|
|
@app.route('/import')
|
|
|
|
def import2sql():
|
2022-06-07 21:44:57 +02:00
|
|
|
conn = sqlite3.connect('app.db')
|
2022-06-07 09:16:43 +02:00
|
|
|
list = []
|
2023-01-17 14:59:50 +01:00
|
|
|
comiccount = 0
|
|
|
|
importcount = 0
|
2023-01-20 10:07:05 +01:00
|
|
|
coverscount = 0
|
2023-01-17 14:59:50 +01:00
|
|
|
skippedcount = 0
|
|
|
|
errorcount = 0
|
2023-01-19 15:41:27 +01:00
|
|
|
comics_with_errors = []
|
2022-06-07 20:10:59 +02:00
|
|
|
start_time = timeit.default_timer()
|
2022-06-07 09:16:43 +02:00
|
|
|
for root, dirs, files in os.walk(os.path.abspath(config.CONTENT_BASE_DIR)):
|
|
|
|
for file in files:
|
|
|
|
f = os.path.join(root, file)
|
2022-06-07 21:59:03 +02:00
|
|
|
if f.endswith('.cbz'):
|
2022-06-07 21:44:57 +02:00
|
|
|
try:
|
2023-01-17 14:59:50 +01:00
|
|
|
comiccount = comiccount + 1
|
2022-06-07 21:59:03 +02:00
|
|
|
s = zipfile.ZipFile(f)
|
2023-01-19 22:21:46 +01:00
|
|
|
filelist = zipfile.ZipFile.namelist(s)
|
|
|
|
if filelist[0] == 'ComicInfo.xml':
|
|
|
|
filemodtime = os.path.getmtime(f)
|
|
|
|
Bs_data = BeautifulSoup(s.open('ComicInfo.xml').read(), "xml")
|
2023-01-19 22:33:11 +01:00
|
|
|
CVDB=extras.get_cvdb(Bs_data.select('Notes'))
|
2023-01-19 22:21:46 +01:00
|
|
|
ISSUE=Bs_data.select('Number')[0].text
|
|
|
|
SERIES=Bs_data.select('Series')[0].text
|
|
|
|
VOLUME=Bs_data.select('Volume')[0].text
|
|
|
|
PUBLISHER=Bs_data.select('Publisher')[0].text
|
|
|
|
try:
|
|
|
|
TITLE=Bs_data.select('Title')[0].text
|
|
|
|
except:
|
|
|
|
TITLE="" #sometimes title is blank.
|
|
|
|
PATH=f
|
|
|
|
UPDATED=filemodtime
|
|
|
|
#print(UPDATED,file=sys.stdout)
|
2023-01-19 22:33:11 +01:00
|
|
|
#sql="INSERT OR REPLACE INTO COMICS (CVDB,ISSUE,SERIES,VOLUME, PUBLISHER, TITLE, FILE,PATH,UPDATED) VALUES ("+CVDB+",'"+ISSUE+"','"+SERIES+"','"+VOLUME+"','"+PUBLISHER+"','"+TITLE+"','"+file+"','" + f + "','" + UPDATED + "')"
|
2023-01-19 22:21:46 +01:00
|
|
|
#print(sql,file=sys.stdout)
|
|
|
|
#conn.execute(sql);
|
2023-01-17 14:59:50 +01:00
|
|
|
|
2023-01-19 22:21:46 +01:00
|
|
|
# CREATE TABLE IF MISSING
|
|
|
|
# create table COMICS (CVDB, ISSUE, SERIES,VOLUME,PUBLISHER,TITLE,FILE,PATH,UPDATED,PRIMARY KEY(CVDB))
|
|
|
|
try:
|
2023-01-19 22:33:11 +01:00
|
|
|
query = "SELECT UPDATED FROM COMICS WHERE CVDB = '" + str(CVDB) + "';"
|
2023-01-19 22:21:46 +01:00
|
|
|
savedmodtime = conn.execute(query).fetchone()[0]
|
|
|
|
except:
|
|
|
|
savedmodtime = 0
|
|
|
|
if savedmodtime < filemodtime:
|
2023-01-19 22:33:11 +01:00
|
|
|
conn.execute("INSERT OR REPLACE INTO COMICS (CVDB,ISSUE,SERIES,VOLUME, PUBLISHER, TITLE, FILE,PATH,UPDATED) VALUES (?,?,?,?,?,?,?,?,?)", (CVDB, ISSUE, SERIES, VOLUME, PUBLISHER, TITLE, file, f, UPDATED))
|
2023-01-19 22:21:46 +01:00
|
|
|
conn.commit()
|
2023-01-20 11:45:09 +01:00
|
|
|
config._print("Adding: " + str(CVDB))
|
2023-01-19 22:21:46 +01:00
|
|
|
importcount = importcount + 1
|
2023-01-20 10:07:05 +01:00
|
|
|
elif Path(config.THUMBNAIL_DIR + "/" + str(CVDB) + ".jpg").exists() == False:
|
|
|
|
cover = s.open(filelist[1]).read()
|
|
|
|
c = open(config.THUMBNAIL_DIR + "/" + str(CVDB) + ".jpg", 'wb+')
|
|
|
|
c.write(cover)
|
|
|
|
c.close()
|
|
|
|
coverscount = coverscount + 1
|
2023-01-19 22:21:46 +01:00
|
|
|
else:
|
2023-01-20 11:45:09 +01:00
|
|
|
config._print("Skipping: " + f)
|
2023-01-19 22:21:46 +01:00
|
|
|
skippedcount = skippedcount + 1
|
|
|
|
except Exception as e:
|
2023-01-17 14:59:50 +01:00
|
|
|
errorcount = errorcount + 1
|
2023-01-19 15:41:27 +01:00
|
|
|
comics_with_errors.append(f)
|
2023-01-20 11:45:09 +01:00
|
|
|
config._print(e)
|
|
|
|
config._print(comics_with_errors)
|
2022-06-07 09:16:43 +02:00
|
|
|
conn.close()
|
2022-06-07 20:10:59 +02:00
|
|
|
elapsed = timeit.default_timer() - start_time
|
2023-01-17 14:59:50 +01:00
|
|
|
elapsed_time = "IMPORTED IN: " + str(round(elapsed,2)) + "s"
|
2023-01-20 10:07:05 +01:00
|
|
|
import_stats = elapsed_time + "<br>Comics: " + str(comiccount) + "<br>Imported: " + str(importcount) + "<br>Covers: " + str(coverscount) + "<br>Skipped: " + str(skippedcount) + "<br>Errors: " + str(errorcount)
|
2023-01-19 15:41:27 +01:00
|
|
|
return import_stats #+ "<br>" + ['<li>' + x + '</li>' for x in comics_with_errors]
|
2022-06-07 09:16:43 +02:00
|
|
|
|
|
|
|
@app.route("/content/<path:path>")
|
|
|
|
@auth.login_required
|
|
|
|
def send_content(path):
|
2023-01-17 14:59:50 +01:00
|
|
|
print('content')
|
2022-06-07 09:16:43 +02:00
|
|
|
return send_from_directory(config.CONTENT_BASE_DIR, path)
|
|
|
|
|
2023-01-19 22:21:46 +01:00
|
|
|
@app.route("/image/<path:path>")
|
|
|
|
def image(path):
|
|
|
|
return send_from_directory(config.THUMBNAIL_DIR,path)
|
|
|
|
|
2022-06-07 09:16:43 +02:00
|
|
|
@app.route("/catalog")
|
2023-01-19 15:41:27 +01:00
|
|
|
@app.route("/catalog/")
|
2022-06-07 09:16:43 +02:00
|
|
|
@app.route("/catalog/<path:path>")
|
|
|
|
@auth.login_required
|
|
|
|
def catalog(path=""):
|
2023-01-20 09:08:36 +01:00
|
|
|
config._print("path: " + path)
|
|
|
|
config._print("root_url: " + request.root_url)
|
|
|
|
config._print("url: " + request.url)
|
|
|
|
config._print("CONTENT_BASE_DIR: " + config.CONTENT_BASE_DIR)
|
2023-01-15 10:37:05 +01:00
|
|
|
#print("PRESSED ON")
|
2022-06-07 09:16:43 +02:00
|
|
|
start_time = timeit.default_timer()
|
2023-01-15 10:37:05 +01:00
|
|
|
#print(request.root_url)
|
2022-06-07 09:16:43 +02:00
|
|
|
c = fromdir(request.root_url, request.url, config.CONTENT_BASE_DIR, path)
|
|
|
|
elapsed = timeit.default_timer() - start_time
|
2023-01-19 22:21:46 +01:00
|
|
|
print("-----------------------------------------------------------------------------------------------------------------------")
|
2023-01-17 14:59:50 +01:00
|
|
|
print("RENDERED IN: " + str(round(elapsed,2))+"s")
|
2022-06-07 09:16:43 +02:00
|
|
|
|
|
|
|
return c.render()
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
#http_server = WSGIServer(("", 5000), app)
|
|
|
|
#http_server.serve_forever()
|
|
|
|
app.run(debug=True,host='0.0.0.0')
|