add except for parsing

This commit is contained in:
Valentin CZERYBA 2023-03-09 20:35:47 +01:00
parent bf5a5b7eb3
commit 699cdc350e

View File

@ -33,8 +33,12 @@ def getScriptCss(url, js, css, logger):
for anchor in script: for anchor in script:
src = anchor.get("src", "/") src = anchor.get("src", "/")
if src != "/": if src != "/":
try:
u = urlparse(url) u = urlparse(url)
o = urlparse(src) o = urlparse(src)
except Exception as err:
logger.error("parsing error : {0}".format(err))
exit(1)
if o.netloc == "": if o.netloc == "":
o = o._replace(netloc=u.netloc) o = o._replace(netloc=u.netloc)
o = o._replace(scheme=u.scheme) o = o._replace(scheme=u.scheme)
@ -46,8 +50,12 @@ def getScriptCss(url, js, css, logger):
if rel[0] == "stylesheet": if rel[0] == "stylesheet":
href = anchor.get("href", "/") href = anchor.get("href", "/")
if href != "/": if href != "/":
try:
u = urlparse(url) u = urlparse(url)
o = urlparse(href) o = urlparse(href)
except Exception as err:
logger.error("parsing error : {0}".format(err))
exit(1)
if o.netloc == "": if o.netloc == "":
o = o._replace(netloc=u.netloc) o = o._replace(netloc=u.netloc)
o = o._replace(scheme=u.scheme) o = o._replace(scheme=u.scheme)
@ -129,8 +137,12 @@ def getUrlPage(url, logger):
for title in h2: for title in h2:
href = title.find_all("a")[0].get("href", "/") href = title.find_all("a")[0].get("href", "/")
if href not in webpage: if href not in webpage:
try:
o = urlparse(href) o = urlparse(href)
o = o._replace(scheme="https").geturl() o = o._replace(scheme="https").geturl()
except Exception as err:
logger.error("parsing error : {0}".format(err))
exit(1)
webpage.append(o) webpage.append(o)
return webpage return webpage
@ -138,7 +150,11 @@ def getUrlPage(url, logger):
def downloadPage(webpage, backup_dir, logger): def downloadPage(webpage, backup_dir, logger):
for i in range(0, len(webpage)): for i in range(0, len(webpage)):
try:
o = urlparse(webpage[i]) o = urlparse(webpage[i])
except Exception as err:
logger.error("parsing error : {0}".format(err))
exit(1)
path_web = o.path.split("/") path_web = o.path.split("/")
filePageWeb = path_web[len(path_web)-1] filePageWeb = path_web[len(path_web)-1]
path_web.pop(len(path_web)-1) path_web.pop(len(path_web)-1)
@ -154,7 +170,11 @@ def downloadPage(webpage, backup_dir, logger):
if len(dir_page_web) > 0 and len(filePageWeb) > 0: if len(dir_page_web) > 0 and len(filePageWeb) > 0:
fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb) fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb)
logger.info("{0}/{1} : {2}".format(i+1, len(webpage), fileDownload)) logger.info("{0}/{1} : {2}".format(i+1, len(webpage), fileDownload))
try:
open(fileDownload, "wb").write(r.content) open(fileDownload, "wb").write(r.content)
except Exception as err:
logger.error("file error : {0}".format(err))
exit(1)
if __name__ == '__main__': if __name__ == '__main__':
@ -188,7 +208,7 @@ if __name__ == '__main__':
o = o._replace(scheme="https") o = o._replace(scheme="https")
url = o.geturl().replace(":///", "://") url = o.geturl().replace(":///", "://")
except Exception as err: except Exception as err:
logger.error("Error : {0}".format(err)) logger.error("parsing error : {0}".format(err))
if args.js is False: if args.js is False:
script = getScriptCss(url, True, False, logger) script = getScriptCss(url, True, False, logger)
downloadPage(script, "{0}/{1}/{2}".format(args.dir, o.path, "dists/js"), logger) downloadPage(script, "{0}/{1}/{2}".format(args.dir, o.path, "dists/js"), logger)