logger web_scrap
This commit is contained in:
parent
c7dc2d626f
commit
3ccebbac36
37
web_scrap.py
37
web_scrap.py
@ -1,9 +1,9 @@
|
|||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
import requests, os, argparse
|
import requests, os, argparse, logging
|
||||||
|
|
||||||
def mkdir_path(path_dir):
|
def mkdir_path(path_dir, logger):
|
||||||
if not os.path.exists(path_dir):
|
if not os.path.exists(path_dir):
|
||||||
makedir = []
|
makedir = []
|
||||||
pathh = path_dir.split("/")
|
pathh = path_dir.split("/")
|
||||||
@ -11,10 +11,11 @@ def mkdir_path(path_dir):
|
|||||||
makedir.append(i)
|
makedir.append(i)
|
||||||
repath = "/".join(makedir)
|
repath = "/".join(makedir)
|
||||||
if not os.path.exists(repath):
|
if not os.path.exists(repath):
|
||||||
|
logger.debug("Dossier crée : {0}".format(repath))
|
||||||
os.mkdir(repath)
|
os.mkdir(repath)
|
||||||
|
|
||||||
|
|
||||||
def getUrlPage(url):
|
def getUrlPage(url, logger):
|
||||||
page = requests.get(url)
|
page = requests.get(url)
|
||||||
page_url = []
|
page_url = []
|
||||||
if page.status_code == 200:
|
if page.status_code == 200:
|
||||||
@ -29,7 +30,7 @@ def getUrlPage(url):
|
|||||||
for i in page_url:
|
for i in page_url:
|
||||||
page = requests.get(i)
|
page = requests.get(i)
|
||||||
if page.status_code == 200:
|
if page.status_code == 200:
|
||||||
print("page : {0}".format(i))
|
logger.info("page : {0}".format(i))
|
||||||
if i not in webpage:
|
if i not in webpage:
|
||||||
webpage.append(i)
|
webpage.append(i)
|
||||||
soup = BeautifulSoup(page.text, 'html.parser')
|
soup = BeautifulSoup(page.text, 'html.parser')
|
||||||
@ -47,7 +48,7 @@ def getUrlPage(url):
|
|||||||
url_paging = "{0}/archives/p{1}-10.html".format(url, paging)
|
url_paging = "{0}/archives/p{1}-10.html".format(url, paging)
|
||||||
if len(categorie) > 2:
|
if len(categorie) > 2:
|
||||||
url_paging = "{0}/archives/{1}/p{2}-10.html".format(url, categorie[2], paging)
|
url_paging = "{0}/archives/{1}/p{2}-10.html".format(url, categorie[2], paging)
|
||||||
print(url_paging)
|
logger.info(url_paging)
|
||||||
if url_paging not in webpage:
|
if url_paging not in webpage:
|
||||||
webpage.append(url_paging)
|
webpage.append(url_paging)
|
||||||
page = requests.get(url_paging)
|
page = requests.get(url_paging)
|
||||||
@ -63,23 +64,23 @@ def getUrlPage(url):
|
|||||||
return webpage
|
return webpage
|
||||||
|
|
||||||
|
|
||||||
def downloadPage(url, backup_dir):
|
def downloadPage(url, backup_dir, logger):
|
||||||
o = urlparse(url)
|
o = urlparse(url)
|
||||||
o = o._replace(scheme="https")
|
o = o._replace(scheme="https")
|
||||||
webpage = getUrlPage(o.geturl().replace(":///", "://"))
|
webpage = getUrlPage(o.geturl().replace(":///", "://"), logger)
|
||||||
for i in range(0, len(webpage)):
|
for i in range(0, len(webpage)):
|
||||||
o = urlparse(webpage[i])
|
o = urlparse(webpage[i])
|
||||||
path_web = o.path.split("/")
|
path_web = o.path.split("/")
|
||||||
filePageWeb = path_web[len(path_web)-1]
|
filePageWeb = path_web[len(path_web)-1]
|
||||||
path_web.pop(len(path_web)-1)
|
path_web.pop(len(path_web)-1)
|
||||||
dir_page_web = "/".join(path_web)
|
dir_page_web = "/".join(path_web)
|
||||||
mkdir_path("{0}/{1}".format(backup_dir, dir_page_web))
|
mkdir_path("{0}/{1}".format(backup_dir, dir_page_web), logger)
|
||||||
r = requests.get(webpage[i])
|
r = requests.get(webpage[i])
|
||||||
if r.status_code == 200:
|
if r.status_code == 200:
|
||||||
fileDownload = "{0}/index.html".format(backup_dir)
|
fileDownload = "{0}/index.html".format(backup_dir)
|
||||||
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
||||||
fileDownload = "{0}/{1}/{2}".format(backup_dir, dir_page_web, filePageWeb)
|
fileDownload = "{0}/{1}/{2}".format(backup_dir, dir_page_web, filePageWeb)
|
||||||
print("{0}/{1} : {2}".format(i, len(webpage), fileDownload))
|
logger.info("{0}/{1} : {2}".format(i, len(webpage), fileDownload))
|
||||||
open(fileDownload, "wb").write(r.content)
|
open(fileDownload, "wb").write(r.content)
|
||||||
|
|
||||||
|
|
||||||
@ -89,6 +90,20 @@ if __name__ == '__main__':
|
|||||||
parser.add_argument("--dir",
|
parser.add_argument("--dir",
|
||||||
default="backup",
|
default="backup",
|
||||||
help="backup file path")
|
help="backup file path")
|
||||||
parser.add_argument("--verbosity", help="Verbosity", action="store_false")
|
parser.add_argument("--debug", help="Verbosity", action="store_true")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
downloadPage(args.url, args.dir)
|
logger = logging.getLogger('web_scrap')
|
||||||
|
ch = logging.StreamHandler()
|
||||||
|
|
||||||
|
if args.debug is not None:
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
ch.setLevel(logging.DEBUG)
|
||||||
|
else:
|
||||||
|
logger.setLevel(logging.INFO)
|
||||||
|
ch.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
ch.setFormatter(formatter)
|
||||||
|
logger.addHandler(ch)
|
||||||
|
|
||||||
|
downloadPage(args.url, args.dir, logger)
|
Loading…
x
Reference in New Issue
Block a user