Testing zip errors

This commit is contained in:
Frederik Baerentsen 2022-06-07 21:07:28 +02:00
parent e847d27809
commit cab1dc2dcb
5 changed files with 40 additions and 98 deletions

View File

@ -7,5 +7,5 @@ services:
ports: ports:
- '5000:5000' - '5000:5000'
volumes: volumes:
- '/opt/data/Comics/ComicRack:/library:ro' - '/opt/data/Comics/ComicRack/Oni Press:/library:ro'
- '${PWD}/:/app' - '${PWD}/:/app'

48
main.py
View File

@ -6,10 +6,12 @@ import timeit
import sqlite3 import sqlite3
import os import os
import zipfile import zipfile
import gzip
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
import re import re
import datetime import datetime
import sys import sys
import time
from opds import fromdir from opds import fromdir
import config import config
@ -35,33 +37,39 @@ def healthz():
@app.route('/import') @app.route('/import')
def import2sql(): def import2sql():
conn = sqlite3.connect('app.db') conn = sqlite3.connect('/app/app.db')
list = [] list = []
start_time = timeit.default_timer() start_time = timeit.default_timer()
for root, dirs, files in os.walk(os.path.abspath(config.CONTENT_BASE_DIR)): for root, dirs, files in os.walk(os.path.abspath(config.CONTENT_BASE_DIR)):
for file in files: for file in files:
f = os.path.join(root, file) f = os.path.join(root, file)
s = zipfile.ZipFile(f) #try:
Bs_data = BeautifulSoup(s.open('ComicInfo.xml').read(), "xml") print(f,file=sys.stdout)
#print(Bs_data.select('Series')[0].text, file=sys.stderr) try:
#print(Bs_data.select('Title')[0].text, file=sys.stderr) s = zipfile.ZipFile(f)
CVDB=re.findall('(?<=\[CVDB)(.*)(?=].)', Bs_data.select('Notes')[0].text) #s = gzip.GzipFile(f)
#list.append('CVDB'+CVDB[0] + ': ' + Bs_data.select('Series')[0].text + "(" + Bs_data.select('Volume')[0].text + ") : " + Bs_data.select('Number')[0].text ) Bs_data = BeautifulSoup(s.open('ComicInfo.xml').read(), "xml")
#print(list, file=sys.stdout) #print(Bs_data.select('Series')[0].text, file=sys.stderr)
#print(Bs_data.select('Title')[0].text, file=sys.stderr)
CVDB=re.findall('(?<=\[CVDB)(.*)(?=].)', Bs_data.select('Notes')[0].text)
#list.append('CVDB'+CVDB[0] + ': ' + Bs_data.select('Series')[0].text + "(" + Bs_data.select('Volume')[0].text + ") : " + Bs_data.select('Number')[0].text )
#print(list, file=sys.stdout)
ISSUE=Bs_data.select('Number')[0].text ISSUE=Bs_data.select('Number')[0].text
SERIES=Bs_data.select('Series')[0].text SERIES=Bs_data.select('Series')[0].text
VOLUME=Bs_data.select('Volume')[0].text VOLUME=Bs_data.select('Volume')[0].text
PUBLISHER=Bs_data.select('Publisher')[0].text PUBLISHER=Bs_data.select('Publisher')[0].text
TITLE=Bs_data.select('Title')[0].text TITLE=Bs_data.select('Title')[0].text
PATH=f PATH=f
UPDATED=str(datetime.datetime.now()) UPDATED=str(datetime.datetime.now())
print(UPDATED,file=sys.stdout) #print(UPDATED,file=sys.stdout)
sql="INSERT OR REPLACE INTO COMICS (CVDB,ISSUE,SERIES,VOLUME, PUBLISHER, TITLE, FILE,PATH,UPDATED) VALUES ("+CVDB[0]+",'"+ISSUE+"','"+SERIES+"','"+VOLUME+"','"+PUBLISHER+"','"+TITLE+"','"+file+"','" + f + "','" + UPDATED + "')" sql="INSERT OR REPLACE INTO COMICS (CVDB,ISSUE,SERIES,VOLUME, PUBLISHER, TITLE, FILE,PATH,UPDATED) VALUES ("+CVDB[0]+",'"+ISSUE+"','"+SERIES+"','"+VOLUME+"','"+PUBLISHER+"','"+TITLE+"','"+file+"','" + f + "','" + UPDATED + "')"
print(sql,file=sys.stdout) #print(sql,file=sys.stdout)
conn.execute(sql); conn.execute(sql);
conn.commit() conn.commit()
except:
print(f,file=sys.stdout)
conn.close() conn.close()
elapsed = timeit.default_timer() - start_time elapsed = timeit.default_timer() - start_time

View File

@ -1,77 +0,0 @@
[
{
"SQL TEST": [
{
"SQL": "(series like '%Aqua%' or series like '%girl%') and issue in ('1','2','5','10') and title not like '%Annual%'"
}
]
},{
"Man 2020,2019": [
{
"title": "Man",
"volume": [
"2020",
"2019"
],
"publisher": "",
"series": "",
"issue": ""
}
]
},
{
"DC (BAT)": [
{
"title": "",
"volume": "",
"publisher": "DC Comics",
"series": "Bat",
"issue": ""
}
]
},{
"Marvel": [
{
"title": "",
"volume": "",
"publisher": "marvel",
"series": "",
"issue": ""
}
]
},
{
"Girl": [
{
"title": ["girl","man","World"],
"volume": "",
"publisher": "",
"series": "girl",
"issue": ""
}
]
},
{
"Aquaman": [
{
"title": "",
"volume": "",
"publisher": "",
"series": "aquaman",
"issue": ["2","3","5","10","22"]
}
]
}
,
{
"Girl series": [
{
"title": "",
"volume": "",
"publisher": "",
"series": "girl",
"issue": "2"
}
]
}
]

View File

@ -5,3 +5,4 @@ Flask-HTTPAuth==4.5.0
gevent==21.8.0 gevent==21.8.0
bs4 bs4
lxml lxml
gzip

View File

@ -5,6 +5,16 @@
"SQL": "(series like '%Aqua%' or series like '%girl%') and issue in ('1','2','5','10') and title not like '%Annual%'" "SQL": "(series like '%Aqua%' or series like '%girl%') and issue in ('1','2','5','10') and title not like '%Annual%'"
} }
] ]
},{
"Letter 44": [
{
"title": "",
"volume": "",
"publisher": "",
"series": "Letter 44",
"issue": ""
}
]
},{ },{
"Man 2020,2019": [ "Man 2020,2019": [
{ {