Compare commits
17 Commits
e809e376e5
...
1.0.0
Author | SHA1 | Date | |
---|---|---|---|
4b9a790f8a | |||
a6d642811e | |||
dfc9c4277b | |||
89ea5720e5 | |||
c62e3e6646 | |||
a04baa4dca | |||
36bd30bd5a | |||
699cdc350e | |||
bf5a5b7eb3 | |||
2b3729a7bc | |||
4d073e0254 | |||
77e61ef571 | |||
21d24d638d | |||
896cfa0d52 | |||
06599d99fa | |||
1a67ab7dbf | |||
559f5f1e83 |
47
backup_canalblog.sh
Executable file
47
backup_canalblog.sh
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
TAR=/usr/bin/tar
|
||||
PYTHON=/usr/bin/python3
|
||||
GZIP=/usr/bin/gzip
|
||||
SCRIPTDIR=/home/valentin/script
|
||||
WEBSCRAP=${SCRIPTDIR}/web_scrap.py
|
||||
URL=www.clarissariviere.com
|
||||
DATE=$(date +%Y%m%d)
|
||||
DIRECTORY=/home/valentin/backup
|
||||
BACKUPDIR=/home/valentin/backup_clarissa
|
||||
LIST=${BACKUPDIR}/backup.list
|
||||
fileBackup="backup-clarissa-${DATE}"
|
||||
LOGFILE=web_scrap.txt
|
||||
SENDER="valczebackup@gmail.com"
|
||||
if [ $(date +%u) -eq 5 ]; then
|
||||
echo > ${LIST}
|
||||
rm -rf "${BACKUPDIR}/*-incr.tar.gz"
|
||||
fileBackup="${fileBackup}-full"
|
||||
subject="Sauvegarde full"
|
||||
else
|
||||
fileBackup="${fileBackup}-incr"
|
||||
subject="Sauvegarde incremental"
|
||||
fi
|
||||
subject="${subject} ${URL} ${DATE}"
|
||||
echo > ${BACKUPDIR}/${LOGFILE}
|
||||
${PYTHON} ${WEBSCRAP} --url ${URL} --dir ${DIRECTORY} --quiet --logfile ${BACKUPDIR}/${LOGFILE}
|
||||
if [ ${?} -ne 0 ]; then
|
||||
subject="${subject} echoue : recuperation page"
|
||||
echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
|
||||
exit 1
|
||||
fi
|
||||
${TAR} --create --file="${BACKUPDIR}/${fileBackup}.tar" --listed-incremental=${LIST} ${DIRECTORY}
|
||||
if [ ${?} -ne 0 ]; then
|
||||
subject="${subject} echoue : archivage page "
|
||||
echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
|
||||
exit 1
|
||||
fi
|
||||
${GZIP} -f -9 "${BACKUPDIR}/${fileBackup}.tar"
|
||||
if [ ${?} -ne 0 ]; then
|
||||
subject="${subject} echoue : compression archive "
|
||||
echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
|
||||
exit 1
|
||||
fi
|
||||
subject="${subject} OK"
|
||||
echo ${subject}| mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
|
||||
exit 0
|
190
web_scrap.py
190
web_scrap.py
@@ -3,7 +3,7 @@ from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import requests, os, argparse, logging
|
||||
|
||||
def mkdir_path(path_dir, logger):
|
||||
def mkdirPath(path_dir, logger):
|
||||
if not os.path.exists(path_dir):
|
||||
makedir = []
|
||||
pathh = path_dir.split("/")
|
||||
@@ -12,11 +12,88 @@ def mkdir_path(path_dir, logger):
|
||||
repath = "/".join(makedir)
|
||||
if not os.path.exists(repath):
|
||||
logger.debug("Dossier crée : {0}".format(repath))
|
||||
os.mkdir(repath)
|
||||
try:
|
||||
if len(repath) > 0:
|
||||
os.mkdir(repath)
|
||||
except Exception as err:
|
||||
logger.error("Directory error : {0}".format(err))
|
||||
logger.debug("Directory error : {0} {1} {2} {3} {4}".format(err, path_dir, repath, pathh, makedir))
|
||||
exit(1)
|
||||
|
||||
|
||||
def getScriptCss(url, js, css, logger):
|
||||
try:
|
||||
page = requests.get(url)
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
if js is True:
|
||||
script = soup.find_all("script")
|
||||
for anchor in script:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
try:
|
||||
u = urlparse(url)
|
||||
o = urlparse(src)
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
if css is True:
|
||||
link = soup.find_all("link")
|
||||
for anchor in link:
|
||||
rel = anchor.get("rel")
|
||||
if rel[0] == "stylesheet":
|
||||
href = anchor.get("href", "/")
|
||||
if href != "/":
|
||||
try:
|
||||
u = urlparse(url)
|
||||
o = urlparse(href)
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
|
||||
|
||||
return page_url
|
||||
|
||||
def getImg(webpage, logger):
|
||||
page_img = []
|
||||
for i in webpage:
|
||||
try:
|
||||
page = requests.get(i)
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
img = soup.find_all("img")
|
||||
logger.info("image from page: {0} : ".format(i))
|
||||
for anchor in img:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
if src not in page_img:
|
||||
logger.info("image: {0} : ".format(src))
|
||||
page_img.append(src)
|
||||
|
||||
|
||||
return page_img
|
||||
|
||||
def getUrlPage(url, logger):
|
||||
page = requests.get(url)
|
||||
try:
|
||||
page = requests.get(url)
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
@@ -28,7 +105,11 @@ def getUrlPage(url, logger):
|
||||
|
||||
webpage = []
|
||||
for i in page_url:
|
||||
page = requests.get(i)
|
||||
try:
|
||||
page = requests.get(i)
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
if page.status_code == 200:
|
||||
logger.info("page : {0}".format(i))
|
||||
if i not in webpage:
|
||||
@@ -58,30 +139,44 @@ def getUrlPage(url, logger):
|
||||
for title in h2:
|
||||
href = title.find_all("a")[0].get("href", "/")
|
||||
if href not in webpage:
|
||||
o = urlparse(href)
|
||||
o = o._replace(scheme="https").geturl()
|
||||
try:
|
||||
o = urlparse(href)
|
||||
o = o._replace(scheme="https").geturl()
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
webpage.append(o)
|
||||
return webpage
|
||||
|
||||
|
||||
def downloadPage(url, backup_dir, logger):
|
||||
o = urlparse(url)
|
||||
o = o._replace(scheme="https")
|
||||
webpage = getUrlPage(o.geturl().replace(":///", "://"), logger)
|
||||
def downloadPage(webpage, backup_dir, logger):
|
||||
|
||||
for i in range(0, len(webpage)):
|
||||
o = urlparse(webpage[i])
|
||||
try:
|
||||
o = urlparse(webpage[i])
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
path_web = o.path.split("/")
|
||||
filePageWeb = path_web[len(path_web)-1]
|
||||
path_web.pop(len(path_web)-1)
|
||||
dir_page_web = "/".join(path_web)
|
||||
mkdir_path("{0}/{1}".format(backup_dir, dir_page_web), logger)
|
||||
r = requests.get(webpage[i])
|
||||
mkdirPath("{0}/{1}/{2}".format(backup_dir, o.netloc, dir_page_web), logger)
|
||||
try:
|
||||
r = requests.get(webpage[i])
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
if r.status_code == 200:
|
||||
fileDownload = "{0}/index.html".format(backup_dir)
|
||||
fileDownload = "{0}/{1}/index.html".format(backup_dir, o.netloc)
|
||||
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
||||
fileDownload = "{0}/{1}/{2}".format(backup_dir, dir_page_web, filePageWeb)
|
||||
logger.info("{0}/{1} : {2}".format(i, len(webpage), fileDownload))
|
||||
open(fileDownload, "wb").write(r.content)
|
||||
fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb)
|
||||
logger.info("{0}/{1} : {2}".format(i+1, len(webpage), fileDownload))
|
||||
try:
|
||||
open(fileDownload, "wb").write(r.content)
|
||||
except Exception as err:
|
||||
logger.error("file error : {0}".format(err))
|
||||
exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
@@ -91,19 +186,56 @@ if __name__ == '__main__':
|
||||
default="backup",
|
||||
help="backup file path")
|
||||
parser.add_argument("--debug", help="Verbosity", action="store_true")
|
||||
parser.add_argument("--logfile", help="Log file", default="")
|
||||
parser.add_argument("--no-css", help="No CSS", dest="css", action="store_true")
|
||||
parser.add_argument("--no-js", help="No JS", dest="js", action="store_true")
|
||||
parser.add_argument("--no-img", help="No img", dest="img", action="store_true")
|
||||
parser.add_argument("--no-html", help="No HTML", dest="html", action="store_true")
|
||||
parser.add_argument("--quiet", help="No console output", action="store_true")
|
||||
args = parser.parse_args()
|
||||
logger = logging.getLogger('web_scrap')
|
||||
ch = logging.StreamHandler()
|
||||
|
||||
if args.debug is not None:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
ch.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.setLevel(logging.INFO)
|
||||
ch.setLevel(logging.INFO)
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
downloadPage(args.url, args.dir, logger)
|
||||
if args.quiet is False:
|
||||
ch = logging.StreamHandler()
|
||||
if args.debug is True:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
ch.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.setLevel(logging.INFO)
|
||||
ch.setLevel(logging.INFO)
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
|
||||
if len(args.logfile) > 0:
|
||||
fileHandler = logging.FileHandler(args.logfile)
|
||||
if args.debug is True:
|
||||
fileHandler.setLevel(logging.DEBUG)
|
||||
else:
|
||||
fileHandler.setLevel(logging.INFO)
|
||||
fileHandler.setFormatter(formatter)
|
||||
logger.addHandler(fileHandler)
|
||||
|
||||
try:
|
||||
o = urlparse(args.url)
|
||||
o = o._replace(scheme="https")
|
||||
url = o.geturl().replace(":///", "://")
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
if args.js is False:
|
||||
script = getScriptCss(url, True, False, logger)
|
||||
downloadPage(script, "{0}/{1}/{2}".format(args.dir, o.path, "dists/js"), logger)
|
||||
|
||||
if args.css is False:
|
||||
css = getScriptCss(url, False, True, logger)
|
||||
downloadPage(css, "{0}/{1}/{2}".format(args.dir, o.path, "dists/css"), logger)
|
||||
|
||||
if args.html is False or args.img is False:
|
||||
webpage = getUrlPage(url, logger)
|
||||
if args.html is False:
|
||||
downloadPage(webpage, args.dir, logger)
|
||||
|
||||
if args.img is False:
|
||||
page_src = getImg(webpage, logger)
|
||||
downloadPage(page_src, "{0}/{1}/{2}".format(args.dir, o.path, "img"), logger)
|
||||
|
Reference in New Issue
Block a user