Compare commits
2 Commits
be552a5580
...
e3f51d18c9
Author | SHA1 | Date | |
---|---|---|---|
|
e3f51d18c9 | ||
|
10a4662545 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -1,2 +1,3 @@
|
|||||||
~*
|
~*
|
||||||
.apikey
|
.apikey
|
||||||
|
.fuse*
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
26
Update Missing/log/20220715.log
Normal file
26
Update Missing/log/20220715.log
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
log: 20220715
|
||||||
|
sh: Starting update.sh
|
||||||
|
py: Reading in current database
|
||||||
|
py: Querying ComicVine for new issues
|
||||||
|
py:0/1330 Since 2022-07-14
|
||||||
|
py:100/1330 Since 2022-07-14
|
||||||
|
py:200/1330 Since 2022-07-14
|
||||||
|
py:300/1330 Since 2022-07-14
|
||||||
|
py:400/1330 Since 2022-07-14
|
||||||
|
py:500/1330 Since 2022-07-14
|
||||||
|
py:600/1330 Since 2022-07-14
|
||||||
|
py:700/1330 Since 2022-07-14
|
||||||
|
py:800/1330 Since 2022-07-14
|
||||||
|
py:900/1330 Since 2022-07-14
|
||||||
|
py:1000/1330 Since 2022-07-14
|
||||||
|
py:1100/1330 Since 2022-07-14
|
||||||
|
py:1200/1330 Since 2022-07-14
|
||||||
|
py:1300/1330 Since 2022-07-14
|
||||||
|
py: Writing missings to file
|
||||||
|
py: Writing database to file
|
||||||
|
py: Done! 200 comics added to database! (0 skipped and 1130 comics already in database)
|
||||||
|
py: 882746 comics in databased not retrieved in this round.
|
||||||
|
py: 0 comics updated in database.
|
||||||
|
py: Ids with error in server:
|
||||||
|
py: 0
|
||||||
|
sh: Error: cannot git push
|
@ -1,39 +1,33 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
d=`date +%Y%m%d`
|
||||||
|
|
||||||
|
|
||||||
|
log2file() {
|
||||||
|
echo "sh: $1" >> "log/$d.log"
|
||||||
|
}
|
||||||
|
|
||||||
error_exit() {
|
error_exit() {
|
||||||
echo "Error: $1"
|
log2file "Error: $1"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
#source ../.apikey || error_exit "Cannot find apikey"
|
touch "log/$d.log"
|
||||||
|
|
||||||
d=`date +%Y%m%d`
|
echo "log: $d" >> "log/$d.log"
|
||||||
#mv "missing.mcl" "missing_old.mcl"
|
|
||||||
|
log2file "Starting update.sh"
|
||||||
|
|
||||||
fromdate=$(ls -1 *_latest.mcl |grep -Eo '[[:digit:]]{8}') || error_exit "cannot latest mcl file"
|
fromdate=$(ls -1 *_latest.mcl |grep -Eo '[[:digit:]]{8}') || error_exit "cannot latest mcl file"
|
||||||
|
|
||||||
#fromdate2=$(date -d $fromdate +%Y-%m-%d)
|
|
||||||
|
|
||||||
#d2=`date +%Y-%m-%d`
|
|
||||||
|
|
||||||
#TEMP="${fromdate}"
|
|
||||||
#TEMP+="_latest.mcl "
|
|
||||||
#TEMP+="${d}"
|
|
||||||
#TEMP+=".mcl "
|
|
||||||
#TEMP+="${api_key} "
|
|
||||||
#TEMP+="${fromdate2} "
|
|
||||||
#TEMP+="${d2}"
|
|
||||||
|
|
||||||
|
|
||||||
python2 update_missing.py
|
python2 update_missing.py
|
||||||
|
|
||||||
|
#mv "$d"".mcl" "$d""_latest.mcl" || error_exit "cannot cp $d to latest"
|
||||||
mv "$d"".mcl" "$d""_latest.mcl" || error_exit "cannot cp $d to latest"
|
|
||||||
mv "$fromdate""_latest.mcl" "archive/""$fromdate"".mcl" || error_exit "cannot mv to archive"
|
mv "$fromdate""_latest.mcl" "archive/""$fromdate"".mcl" || error_exit "cannot mv to archive"
|
||||||
|
|
||||||
git add "$d""_latest.mcl" "archive/" || error_exit "cannot git add"
|
git add "$d""_latest.mcl" "archive/" "*" >> "log/$d.log" || error_exit "cannot git add"
|
||||||
|
|
||||||
git commit -m "Updated $d" || error_exit "cannot git commit"
|
git commit -m "Updated $d" >> "log/$d.log" || error_exit "cannot git commit"
|
||||||
|
|
||||||
git push origin master || error_exit "cannot git push"
|
git push origin master >> "log/$d.log" || error_exit "cannot git push"
|
||||||
|
|
||||||
|
@ -61,7 +61,9 @@ with open("../.apikey", "r") as f:
|
|||||||
api_key = str(data[0].strip('\n'))
|
api_key = str(data[0].strip('\n'))
|
||||||
start_date = str(datetime.strptime(re.search('[0-9]{8}',temp).group(),"%Y%m%d").strftime("%Y-%m-%d"))
|
start_date = str(datetime.strptime(re.search('[0-9]{8}',temp).group(),"%Y%m%d").strftime("%Y-%m-%d"))
|
||||||
end_date = str(date.today().strftime("%Y-%m-%d"))
|
end_date = str(date.today().strftime("%Y-%m-%d"))
|
||||||
|
today_date = str(date.today().strftime("%Y%m%d"))
|
||||||
|
|
||||||
|
f1=open('log/'+today_date+'.log', 'a')
|
||||||
|
|
||||||
if len(sys.argv) > 5:
|
if len(sys.argv) > 5:
|
||||||
print "Using argvs"
|
print "Using argvs"
|
||||||
@ -77,7 +79,8 @@ issues_volume = {}
|
|||||||
skip_header = True
|
skip_header = True
|
||||||
cont = 0
|
cont = 0
|
||||||
|
|
||||||
print ("Reading in current database")
|
print >>f1, "py: Reading in current database"
|
||||||
|
|
||||||
for line in comiclist:
|
for line in comiclist:
|
||||||
if skip_header:
|
if skip_header:
|
||||||
skip_header = False
|
skip_header = False
|
||||||
@ -100,7 +103,7 @@ for line in comiclist:
|
|||||||
|
|
||||||
comiclist.close()
|
comiclist.close()
|
||||||
|
|
||||||
print ("Querying ComicVine for new issues")
|
print >>f1, "py: Querying ComicVine for new issues"
|
||||||
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
|
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'}
|
||||||
new_comics_cont = 0
|
new_comics_cont = 0
|
||||||
old_comics_cont = 0
|
old_comics_cont = 0
|
||||||
@ -120,13 +123,13 @@ while offset < max:
|
|||||||
try:
|
try:
|
||||||
request_url = "https://comicvine.gamespot.com/api/issues/?api_key=" + api_key +"&limit=" + str(limit) + "&format=json&offset=" + str(offset) + "&field_list=id,issue_number,volume&filter=date_last_updated:" + start_date + "|" + end_date + "&sort=id"
|
request_url = "https://comicvine.gamespot.com/api/issues/?api_key=" + api_key +"&limit=" + str(limit) + "&format=json&offset=" + str(offset) + "&field_list=id,issue_number,volume&filter=date_last_updated:" + start_date + "|" + end_date + "&sort=id"
|
||||||
|
|
||||||
print request_url
|
#print request_url
|
||||||
r = requests.get(request_url, headers=headers)
|
r = requests.get(request_url, headers=headers)
|
||||||
json_obj = r.json()
|
json_obj = r.json()
|
||||||
|
|
||||||
max = json_obj['number_of_total_results']
|
max = json_obj['number_of_total_results']
|
||||||
|
|
||||||
print (str(min(offset,max)) + "/" + str(max) + " Since " + start_date)
|
print >>f1, "py:" + str(min(offset,max)) + "/" + str(max) + " Since " + start_date
|
||||||
|
|
||||||
for i in json_obj['results']:
|
for i in json_obj['results']:
|
||||||
volume_id = i['volume']['id']
|
volume_id = i['volume']['id']
|
||||||
@ -150,10 +153,10 @@ while offset < max:
|
|||||||
FindingError = False
|
FindingError = False
|
||||||
|
|
||||||
if skip == 1:
|
if skip == 1:
|
||||||
print ("Comic with error found, id= " + str(issue_id+1))
|
print >>f1, "py: Comic with error found, id= " + str(issue_id+1)
|
||||||
ErrorIds += ";"+ str(issue_id+1)
|
ErrorIds += ";"+ str(issue_id+1)
|
||||||
comic_skip_cont += 1
|
comic_skip_cont += 1
|
||||||
print ("Continue loading comics now...")
|
print >>f1, "py: Continue loading comics now..."
|
||||||
FindingError = True
|
FindingError = True
|
||||||
|
|
||||||
skip = 0
|
skip = 0
|
||||||
@ -162,18 +165,18 @@ while offset < max:
|
|||||||
|
|
||||||
except:
|
except:
|
||||||
if retry < 4 and not FindingError:
|
if retry < 4 and not FindingError:
|
||||||
print ("Error. Trying Again...")
|
print >>f1, "py: Error. Trying Again..."
|
||||||
retry += 1
|
retry += 1
|
||||||
else:
|
else:
|
||||||
|
|
||||||
if not FindingError:
|
if not FindingError:
|
||||||
|
|
||||||
print ("Finding Error in comic list: " + str(100-limit) + "%")
|
print >>f1, "py: Finding Error in comic list: " + str(100-limit) + "%"
|
||||||
skip = 1
|
skip = 1
|
||||||
limit -= 1
|
limit -= 1
|
||||||
|
|
||||||
if limit == 0 or FindingError:
|
if limit == 0 or FindingError:
|
||||||
print ("Comic with error found, id= " + str(issue_id+offset))
|
print >>f1, "py: Comic with error found, id= " + str(issue_id+offset)
|
||||||
FindingError = True
|
FindingError = True
|
||||||
limit = 1
|
limit = 1
|
||||||
offset += 1
|
offset += 1
|
||||||
@ -186,7 +189,7 @@ for issue_id in issues_number.keys():
|
|||||||
comics[issues_volume[issue_id]] = {}
|
comics[issues_volume[issue_id]] = {}
|
||||||
comics[issues_volume[issue_id]][issue_id]=issues_number[issue_id]
|
comics[issues_volume[issue_id]][issue_id]=issues_number[issue_id]
|
||||||
|
|
||||||
print ("Writing missings to file")
|
print >>f1, "py: Writing missings to file"
|
||||||
|
|
||||||
deleted_file = open("Deleted_Comics.txt", "wb")
|
deleted_file = open("Deleted_Comics.txt", "wb")
|
||||||
|
|
||||||
@ -196,7 +199,7 @@ for issue_id in non_retrieved_comics.keys():
|
|||||||
|
|
||||||
deleted_file.close()
|
deleted_file.close()
|
||||||
|
|
||||||
print ("Writing database to file")
|
print >>f1, "py: Writing database to file"
|
||||||
|
|
||||||
outfile = open(out_file,"wb")
|
outfile = open(out_file,"wb")
|
||||||
outfile.write("Missing;" + end_date + "\n")
|
outfile.write("Missing;" + end_date + "\n")
|
||||||
@ -212,9 +215,9 @@ for volume_id in sorted(comics.iterkeys()):
|
|||||||
|
|
||||||
outfile.close()
|
outfile.close()
|
||||||
|
|
||||||
print ("Done! " + str(new_comics_cont) + " comics added to database! (" + str(comic_skip_cont)+ " skipped and " + str(old_comics_cont) + " comics already in database)" )
|
print >>f1, "py: Done! " + str(new_comics_cont) + " comics added to database! (" + str(comic_skip_cont)+ " skipped and " + str(old_comics_cont) + " comics already in database)"
|
||||||
print (str(deleted_comics_cont) + " comics in databased not retrieved in this round.")
|
print >>f1, "py: " + str(deleted_comics_cont) + " comics in databased not retrieved in this round."
|
||||||
print (str(updated_comics_cont) + " comics updated in database.")
|
print >>f1, "py: " + str(updated_comics_cont) + " comics updated in database."
|
||||||
print ("Ids with error in server: " + ErrorIds[1:])
|
print >>f1, "py: Ids with error in server: " + ErrorIds[1:]
|
||||||
print (cont)
|
print >>f1, "py: " + str(cont)
|
||||||
#raw_input("Press Enter to continue...")
|
#raw_input("Press Enter to continue...")
|
||||||
|
Loading…
Reference in New Issue
Block a user