Testing zip errors

This commit is contained in:
Frederik Baerentsen 2022-06-07 21:07:28 +02:00
parent e847d27809
commit cab1dc2dcb
5 changed files with 40 additions and 98 deletions

View File

@ -7,5 +7,5 @@ services:
ports: ports:
- '5000:5000' - '5000:5000'
volumes: volumes:
- '/opt/data/Comics/ComicRack:/library:ro' - '/opt/data/Comics/ComicRack/Oni Press:/library:ro'
- '${PWD}/:/app' - '${PWD}/:/app'

14
main.py
View File

@ -6,10 +6,12 @@ import timeit
import sqlite3 import sqlite3
import os import os
import zipfile import zipfile
import gzip
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
import re import re
import datetime import datetime
import sys import sys
import time
from opds import fromdir from opds import fromdir
import config import config
@ -35,14 +37,18 @@ def healthz():
@app.route('/import') @app.route('/import')
def import2sql(): def import2sql():
conn = sqlite3.connect('app.db') conn = sqlite3.connect('/app/app.db')
list = [] list = []
start_time = timeit.default_timer() start_time = timeit.default_timer()
for root, dirs, files in os.walk(os.path.abspath(config.CONTENT_BASE_DIR)): for root, dirs, files in os.walk(os.path.abspath(config.CONTENT_BASE_DIR)):
for file in files: for file in files:
f = os.path.join(root, file) f = os.path.join(root, file)
#try:
print(f,file=sys.stdout)
try:
s = zipfile.ZipFile(f) s = zipfile.ZipFile(f)
#s = gzip.GzipFile(f)
Bs_data = BeautifulSoup(s.open('ComicInfo.xml').read(), "xml") Bs_data = BeautifulSoup(s.open('ComicInfo.xml').read(), "xml")
#print(Bs_data.select('Series')[0].text, file=sys.stderr) #print(Bs_data.select('Series')[0].text, file=sys.stderr)
#print(Bs_data.select('Title')[0].text, file=sys.stderr) #print(Bs_data.select('Title')[0].text, file=sys.stderr)
@ -57,11 +63,13 @@ def import2sql():
TITLE=Bs_data.select('Title')[0].text TITLE=Bs_data.select('Title')[0].text
PATH=f PATH=f
UPDATED=str(datetime.datetime.now()) UPDATED=str(datetime.datetime.now())
print(UPDATED,file=sys.stdout) #print(UPDATED,file=sys.stdout)
sql="INSERT OR REPLACE INTO COMICS (CVDB,ISSUE,SERIES,VOLUME, PUBLISHER, TITLE, FILE,PATH,UPDATED) VALUES ("+CVDB[0]+",'"+ISSUE+"','"+SERIES+"','"+VOLUME+"','"+PUBLISHER+"','"+TITLE+"','"+file+"','" + f + "','" + UPDATED + "')" sql="INSERT OR REPLACE INTO COMICS (CVDB,ISSUE,SERIES,VOLUME, PUBLISHER, TITLE, FILE,PATH,UPDATED) VALUES ("+CVDB[0]+",'"+ISSUE+"','"+SERIES+"','"+VOLUME+"','"+PUBLISHER+"','"+TITLE+"','"+file+"','" + f + "','" + UPDATED + "')"
print(sql,file=sys.stdout) #print(sql,file=sys.stdout)
conn.execute(sql); conn.execute(sql);
conn.commit() conn.commit()
except:
print(f,file=sys.stdout)
conn.close() conn.close()
elapsed = timeit.default_timer() - start_time elapsed = timeit.default_timer() - start_time

View File

@ -1,77 +0,0 @@
[
{
"SQL TEST": [
{
"SQL": "(series like '%Aqua%' or series like '%girl%') and issue in ('1','2','5','10') and title not like '%Annual%'"
}
]
},{
"Man 2020,2019": [
{
"title": "Man",
"volume": [
"2020",
"2019"
],
"publisher": "",
"series": "",
"issue": ""
}
]
},
{
"DC (BAT)": [
{
"title": "",
"volume": "",
"publisher": "DC Comics",
"series": "Bat",
"issue": ""
}
]
},{
"Marvel": [
{
"title": "",
"volume": "",
"publisher": "marvel",
"series": "",
"issue": ""
}
]
},
{
"Girl": [
{
"title": ["girl","man","World"],
"volume": "",
"publisher": "",
"series": "girl",
"issue": ""
}
]
},
{
"Aquaman": [
{
"title": "",
"volume": "",
"publisher": "",
"series": "aquaman",
"issue": ["2","3","5","10","22"]
}
]
}
,
{
"Girl series": [
{
"title": "",
"volume": "",
"publisher": "",
"series": "girl",
"issue": "2"
}
]
}
]

View File

@ -5,3 +5,4 @@ Flask-HTTPAuth==4.5.0
gevent==21.8.0 gevent==21.8.0
bs4 bs4
lxml lxml
gzip

View File

@ -5,6 +5,16 @@
"SQL": "(series like '%Aqua%' or series like '%girl%') and issue in ('1','2','5','10') and title not like '%Annual%'" "SQL": "(series like '%Aqua%' or series like '%girl%') and issue in ('1','2','5','10') and title not like '%Annual%'"
} }
] ]
},{
"Letter 44": [
{
"title": "",
"volume": "",
"publisher": "",
"series": "Letter 44",
"issue": ""
}
]
},{ },{
"Man 2020,2019": [ "Man 2020,2019": [
{ {