diff --git a/web_scrap.py b/web_scrap.py index d11f7f3..ee92912 100644 --- a/web_scrap.py +++ b/web_scrap.py @@ -12,11 +12,19 @@ def mkdirPath(path_dir, logger): repath = "/".join(makedir) if not os.path.exists(repath): logger.debug("Dossier crée : {0}".format(repath)) - os.mkdir(repath) + try: + os.mkdir(repath) + except Exception as err: + logger.error("Directory error : {0}".format(err)) + exit(1) def getScriptCss(url, js, css, logger): - page = requests.get(url) + try: + page = requests.get(url) + except Exception as err: + logger.error("Connection error : {0}".format(err)) + exit(1) page_url = [] if page.status_code == 200: soup = BeautifulSoup(page.text, 'html.parser') @@ -25,8 +33,12 @@ def getScriptCss(url, js, css, logger): for anchor in script: src = anchor.get("src", "/") if src != "/": - u = urlparse(url) - o = urlparse(src) + try: + u = urlparse(url) + o = urlparse(src) + except Exception as err: + logger.error("parsing error : {0}".format(err)) + exit(1) if o.netloc == "": o = o._replace(netloc=u.netloc) o = o._replace(scheme=u.scheme) @@ -38,8 +50,12 @@ def getScriptCss(url, js, css, logger): if rel[0] == "stylesheet": href = anchor.get("href", "/") if href != "/": - u = urlparse(url) - o = urlparse(href) + try: + u = urlparse(url) + o = urlparse(href) + except Exception as err: + logger.error("parsing error : {0}".format(err)) + exit(1) if o.netloc == "": o = o._replace(netloc=u.netloc) o = o._replace(scheme=u.scheme) @@ -51,7 +67,11 @@ def getScriptCss(url, js, css, logger): def getImg(webpage, logger): page_img = [] for i in webpage: - page = requests.get(i) + try: + page = requests.get(i) + except Exception as err: + logger.error("Connection error : {0}".format(err)) + exit(1) if page.status_code == 200: soup = BeautifulSoup(page.text, 'html.parser') img = soup.find_all("img") @@ -67,7 +87,11 @@ def getImg(webpage, logger): return page_img def getUrlPage(url, logger): - page = requests.get(url) + try: + page = requests.get(url) + except Exception as err: + logger.error("Connection error : {0}".format(err)) + exit(1) page_url = [] if page.status_code == 200: soup = BeautifulSoup(page.text, 'html.parser') @@ -79,7 +103,11 @@ def getUrlPage(url, logger): webpage = [] for i in page_url: - page = requests.get(i) + try: + page = requests.get(i) + except Exception as err: + logger.error("Connection error : {0}".format(err)) + exit(1) if page.status_code == 200: logger.info("page : {0}".format(i)) if i not in webpage: @@ -109,8 +137,12 @@ def getUrlPage(url, logger): for title in h2: href = title.find_all("a")[0].get("href", "/") if href not in webpage: - o = urlparse(href) - o = o._replace(scheme="https").geturl() + try: + o = urlparse(href) + o = o._replace(scheme="https").geturl() + except Exception as err: + logger.error("parsing error : {0}".format(err)) + exit(1) webpage.append(o) return webpage @@ -118,19 +150,31 @@ def getUrlPage(url, logger): def downloadPage(webpage, backup_dir, logger): for i in range(0, len(webpage)): - o = urlparse(webpage[i]) + try: + o = urlparse(webpage[i]) + except Exception as err: + logger.error("parsing error : {0}".format(err)) + exit(1) path_web = o.path.split("/") filePageWeb = path_web[len(path_web)-1] path_web.pop(len(path_web)-1) dir_page_web = "/".join(path_web) mkdirPath("{0}/{1}/{2}".format(backup_dir, o.netloc, dir_page_web), logger) - r = requests.get(webpage[i]) + try: + r = requests.get(webpage[i]) + except Exception as err: + logger.error("Connection error : {0}".format(err)) + exit(1) if r.status_code == 200: fileDownload = "{0}/{1}/index.html".format(backup_dir, o.netloc) if len(dir_page_web) > 0 and len(filePageWeb) > 0: fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb) logger.info("{0}/{1} : {2}".format(i+1, len(webpage), fileDownload)) - open(fileDownload, "wb").write(r.content) + try: + open(fileDownload, "wb").write(r.content) + except Exception as err: + logger.error("file error : {0}".format(err)) + exit(1) if __name__ == '__main__': @@ -159,10 +203,12 @@ if __name__ == '__main__': formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') ch.setFormatter(formatter) logger.addHandler(ch) - - o = urlparse(args.url) - o = o._replace(scheme="https") - url = o.geturl().replace(":///", "://") + try: + o = urlparse(args.url) + o = o._replace(scheme="https") + url = o.geturl().replace(":///", "://") + except Exception as err: + logger.error("parsing error : {0}".format(err)) if args.js is False: script = getScriptCss(url, True, False, logger) downloadPage(script, "{0}/{1}/{2}".format(args.dir, o.path, "dists/js"), logger)