re-organisation fonction et renommage
This commit is contained in:
parent
e809e376e5
commit
559f5f1e83
20
web_scrap.py
20
web_scrap.py
@ -3,7 +3,7 @@ from bs4 import BeautifulSoup
|
|||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
import requests, os, argparse, logging
|
import requests, os, argparse, logging
|
||||||
|
|
||||||
def mkdir_path(path_dir, logger):
|
def mkdirPath(path_dir, logger):
|
||||||
if not os.path.exists(path_dir):
|
if not os.path.exists(path_dir):
|
||||||
makedir = []
|
makedir = []
|
||||||
pathh = path_dir.split("/")
|
pathh = path_dir.split("/")
|
||||||
@ -64,22 +64,20 @@ def getUrlPage(url, logger):
|
|||||||
return webpage
|
return webpage
|
||||||
|
|
||||||
|
|
||||||
def downloadPage(url, backup_dir, logger):
|
def downloadPageHTML(webpage, backup_dir, logger):
|
||||||
o = urlparse(url)
|
|
||||||
o = o._replace(scheme="https")
|
|
||||||
webpage = getUrlPage(o.geturl().replace(":///", "://"), logger)
|
|
||||||
for i in range(0, len(webpage)):
|
for i in range(0, len(webpage)):
|
||||||
o = urlparse(webpage[i])
|
o = urlparse(webpage[i])
|
||||||
path_web = o.path.split("/")
|
path_web = o.path.split("/")
|
||||||
filePageWeb = path_web[len(path_web)-1]
|
filePageWeb = path_web[len(path_web)-1]
|
||||||
path_web.pop(len(path_web)-1)
|
path_web.pop(len(path_web)-1)
|
||||||
dir_page_web = "/".join(path_web)
|
dir_page_web = "/".join(path_web)
|
||||||
mkdir_path("{0}/{1}".format(backup_dir, dir_page_web), logger)
|
mkdirPath("{0}/{1}".format(backup_dir, dir_page_web), logger)
|
||||||
r = requests.get(webpage[i])
|
r = requests.get(webpage[i])
|
||||||
if r.status_code == 200:
|
if r.status_code == 200:
|
||||||
fileDownload = "{0}/index.html".format(backup_dir)
|
fileDownload = "{0}/index.html".format(backup_dir)
|
||||||
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
||||||
fileDownload = "{0}/{1}/{2}".format(backup_dir, dir_page_web, filePageWeb)
|
fileDownload = "{0}{1}/{2}".format(backup_dir, dir_page_web, filePageWeb)
|
||||||
logger.info("{0}/{1} : {2}".format(i, len(webpage), fileDownload))
|
logger.info("{0}/{1} : {2}".format(i, len(webpage), fileDownload))
|
||||||
open(fileDownload, "wb").write(r.content)
|
open(fileDownload, "wb").write(r.content)
|
||||||
|
|
||||||
@ -95,7 +93,7 @@ if __name__ == '__main__':
|
|||||||
logger = logging.getLogger('web_scrap')
|
logger = logging.getLogger('web_scrap')
|
||||||
ch = logging.StreamHandler()
|
ch = logging.StreamHandler()
|
||||||
|
|
||||||
if args.debug is not None:
|
if args.debug is True:
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
ch.setLevel(logging.DEBUG)
|
ch.setLevel(logging.DEBUG)
|
||||||
else:
|
else:
|
||||||
@ -106,4 +104,8 @@ if __name__ == '__main__':
|
|||||||
ch.setFormatter(formatter)
|
ch.setFormatter(formatter)
|
||||||
logger.addHandler(ch)
|
logger.addHandler(ch)
|
||||||
|
|
||||||
downloadPage(args.url, args.dir, logger)
|
o = urlparse(args.url)
|
||||||
|
o = o._replace(scheme="https")
|
||||||
|
webpage = getUrlPage(o.geturl().replace(":///", "://"), logger)
|
||||||
|
|
||||||
|
downloadPageHTML(webpage, args.dir, logger)
|
Loading…
x
Reference in New Issue
Block a user