Compare commits
234 Commits
Author | SHA1 | Date | |
---|---|---|---|
f8e8239f40 | |||
19f96abb2d | |||
a936857eb9 | |||
9ab484da8d | |||
c275f10fb0 | |||
fa1854052b | |||
0e5afe60a7 | |||
d2cfb949f8 | |||
bce1643135 | |||
0c94783852 | |||
6f78a48e57 | |||
dbab60c7dc | |||
41e192f903 | |||
dce2c2dfa5 | |||
eaec1ba9d4 | |||
3059f785c2 | |||
279a9f2786 | |||
963f83ae81 | |||
7b154e3a1d | |||
e5109204aa | |||
2279e4b0b6 | |||
2e21040196 | |||
b4d0fe8aa0 | |||
6401692d0d | |||
1fc9c48d2c | |||
d9c20cedcb | |||
9d41e57379 | |||
d88ae7ed44 | |||
50bf31d334 | |||
9b58b45ae8 | |||
418bea3778 | |||
5959ab5b2e | |||
e4eb1b6b68 | |||
9ed5ffe399 | |||
28b513e1b2 | |||
02f0c20bd0 | |||
1655217050 | |||
49c1552062 | |||
7f800c8f7b | |||
5399b12133 | |||
b7493206a2 | |||
8de7485775 | |||
eee14e08f1 | |||
cf2c1aa617 | |||
e17cace820 | |||
941776a7c1 | |||
72fbe0a364 | |||
1f8ea70b40 | |||
c6894648d1 | |||
d5ddf1601b | |||
aa8ac9cfcb | |||
82f9acd658 | |||
7593b68b6c | |||
5fe4b1f786 | |||
0445054dc8 | |||
be9ad9b934 | |||
e6328135da | |||
dded126926 | |||
bca529f3c3 | |||
5059a15826 | |||
20c4adb3cf | |||
74fa87ea73 | |||
03f833a2c3 | |||
9acb620f93 | |||
c6ccf98b1b | |||
95f5203727 | |||
9bbf769b40 | |||
f0b4ba5e27 | |||
47f504beb5 | |||
4b6b06aade | |||
d10867a983 | |||
6fba5f009a | |||
699cecad4f | |||
9f87f38347 | |||
55d62cebfb | |||
193b0e6ef7 | |||
b88917127d | |||
781d8959c4 | |||
a67ff868f3 | |||
8e0abc40bd | |||
9149a6c5cb | |||
d1b6e8048a | |||
0eab1d885b | |||
35ff22d463 | |||
7dace5bdb7 | |||
703cc8922a | |||
ff3ee301fb | |||
04da5bc5f6 | |||
f01a69a1e7 | |||
da4db0277a | |||
7228911e68 | |||
9e7e1b27fd | |||
16368c13bb | |||
c631909cb6 | |||
3e76892676 | |||
3e75f05340 | |||
e48b262d7e | |||
2f1c081823 | |||
4bd6f5c038 | |||
d3a03e1cb3 | |||
f507efce60 | |||
75c9fa0ad3 | |||
110ccc4bb1 | |||
269a9e9ccd | |||
4c0ec09d91 | |||
42cfb30583 | |||
c76b20e64a | |||
aff69bfcbc | |||
fd426f150d | |||
e21721cac1 | |||
69504687ef | |||
fb59746fc0 | |||
5916cbff00 | |||
cd2fbd5372 | |||
f3b04f9459 | |||
a400375e01 | |||
351cb10f01 | |||
5c5dc707f5 | |||
f69298179a | |||
d3ec7d147d | |||
0fc6e78a18 | |||
3718b807ba | |||
75772ba7f0 | |||
769b7f43fc | |||
ba42d56be1 | |||
d18f4e1579 | |||
8bdaea3910 | |||
f3cb5c4069 | |||
cfb24bed0e | |||
ee8674fd59 | |||
ece4d78dd8 | |||
3d7aa19441 | |||
3c2f1cc017 | |||
f9be6770e3 | |||
21d2f35e6e | |||
4789fe80aa | |||
3161a06459 | |||
1f6bd96a8e | |||
b359521001 | |||
73c0998ae0 | |||
939e744d1d | |||
0029898e6e | |||
ab3720fbbc | |||
7a1286c4e2 | |||
5a4bdbb420 | |||
bf4c2480f8 | |||
a0b816fe18 | |||
08ff16527d | |||
0acd5067cb | |||
aaac2385a3 | |||
88f258ffba | |||
a39e2200bd | |||
5a5658d955 | |||
4e6ae92217 | |||
34d6cc39d2 | |||
c44ffc5a86 | |||
ca39826a11 | |||
f8d103ff61 | |||
1c252c9a14 | |||
84cc204007 | |||
edb9442b1c | |||
d64aed6240 | |||
a5e7cb89f7 | |||
ae7cb1e4e0 | |||
4cf301b216 | |||
581b6941a6 | |||
bd8ac241c1 | |||
0e15e88f31 | |||
b54785c455 | |||
1600a17383 | |||
74e7f1d74b | |||
225c7ecabb | |||
1311ef2ff2 | |||
f5e82fe4c4 | |||
76d2771886 | |||
335266e1ad | |||
a856311f04 | |||
7848968fa1 | |||
05a3a28c6f | |||
7c75116c5b | |||
48e77084e8 | |||
aa5c8893ec | |||
4ddc4a7cd3 | |||
ed78f22f2e | |||
e74dfc2b73 | |||
cd50e45493 | |||
19c62f38d4 | |||
bba6cd1ca7 | |||
9ed08ea964 | |||
cd6b03b0ff | |||
7e484fa308 | |||
ebc6206ec9 | |||
b3f623cbd5 | |||
8384dcb2b6 | |||
2289066dd5 | |||
481fc40929 | |||
6f7504e669 | |||
d58ead52b2 | |||
9ab33c169e | |||
34115a3a7d | |||
1f7e442d04 | |||
5768b37cd1 | |||
ba511bc6c4 | |||
665f1474f2 | |||
404ad5dd6c | |||
501876dac2 | |||
c9b1264153 | |||
f77274f00e | |||
1e162662e6 | |||
ec4135c5d0 | |||
cb64dd47ab | |||
42b7e7e408 | |||
cc33ab34df | |||
f07f8c040f | |||
4054f41e9b | |||
faa22f1438 | |||
bcb3abce01 | |||
e0b4895b62 | |||
066d8cae52 | |||
90881eb037 | |||
c92f24e6af | |||
301f1e2d4b | |||
e1b0c0cba8 | |||
f250637912 | |||
19229bc65b | |||
d96d38e508 | |||
dc0fd0c781 | |||
e3b9e92c23 | |||
82ce3d1a2b | |||
605bd06e51 | |||
491f15ae3c | |||
0c41dc3e65 | |||
3622e37942 | |||
eae95d5671 |
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
backup*/
|
||||
wp-navigation
|
||||
*.log
|
||||
__pycache__/
|
||||
wp-gallery
|
@@ -3,8 +3,8 @@
|
||||
TAR=/usr/bin/tar
|
||||
PYTHON=/usr/bin/python3
|
||||
GZIP=/usr/bin/gzip
|
||||
SCRIPTDIR=/home/valentin/script
|
||||
WEBSCRAP=${SCRIPTDIR}/web_scrap.py
|
||||
SCRIPTDIR=/home/valentin/script/webscrap
|
||||
WEBSCRAP=${SCRIPTDIR}/import_export_canalblog.py
|
||||
URL=www.clarissariviere.com
|
||||
DATE=$(date +%Y%m%d)
|
||||
DIRECTORY=/home/valentin/backup
|
||||
@@ -24,8 +24,8 @@ else
|
||||
fi
|
||||
subject="${subject} ${URL} ${DATE}"
|
||||
echo > ${BACKUPDIR}/${LOGFILE}
|
||||
${PYTHON} ${WEBSCRAP} --url ${URL} --dir ${DIRECTORY} --quiet --logfile ${BACKUPDIR}/${LOGFILE}
|
||||
if [ ${?} -ne 0 ]; then
|
||||
${PYTHON} ${WEBSCRAP} --quiet --logfile ${BACKUPDIR}/${LOGFILE} --parallel 20 export --url ${URL} --directory ${DIRECTORY}
|
||||
if [ ${?} -ne 0 ]; then
|
||||
subject="${subject} echoue : recuperation page"
|
||||
echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}
|
||||
exit 1
|
||||
|
374
import_export_canalblog.py
Normal file
374
import_export_canalblog.py
Normal file
@@ -0,0 +1,374 @@
|
||||
#!/usr/bin/python3
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from getpass import getpass
|
||||
from urllib.parse import urlparse
|
||||
from concurrent import futures
|
||||
from concurrent.futures import as_completed, wait, ALL_COMPLETED
|
||||
|
||||
import argparse, logging, threading, os, glob
|
||||
from lib.WPImport import WPimport
|
||||
from lib.WPExport import WPExport
|
||||
from lib.WPRemove import WPRemove
|
||||
from lib.WPChange import WPChange
|
||||
from lib.WPMenu import WPMenu
|
||||
|
||||
def errorRevert(logger, revert, tmp):
|
||||
if revert is True:
|
||||
files_tmp = glob.glob("{0}/*.json".format(tmp))
|
||||
if len(files_tmp) == 0:
|
||||
logger.error("Error revert, because files not found")
|
||||
exit(1)
|
||||
if len(files_tmp) != int(args.parallel):
|
||||
for file_r in files_tmp:
|
||||
os.remove(file_r)
|
||||
logger.error("Error revert, because number files tmp is incompatible with parallel number")
|
||||
exit(1)
|
||||
|
||||
def change(index, number, args, logger, tmp, revert):
|
||||
changeWp = WPChange(logger=logger, index_name=index, number_thread=number, tmp=tmp)
|
||||
changeWp.fromDirectory(args.directory, revert)
|
||||
|
||||
del changeWp
|
||||
|
||||
def remove(index, number, args, basic, logger, ssl_wordpress):
|
||||
removeWp = WPRemove(basic=basic, wordpress="", logger=logger, ssl_wordpress=ssl_wordpress, index_name=index, number_thread=number)
|
||||
if args.remove == True:
|
||||
for i in args.wordpress.split(","):
|
||||
removeWp.setUrl(i)
|
||||
removeWp.cleanPosts()
|
||||
removeWp.cleanTags()
|
||||
removeWp.cleanCategories()
|
||||
removeWp.cleanMedia()
|
||||
else:
|
||||
for i in args.wordpress.split(","):
|
||||
removeWp.setUrl(i)
|
||||
if args.posts == True:
|
||||
removeWp.cleanPosts()
|
||||
if args.categories == True:
|
||||
removeWp.cleanCategories()
|
||||
if args.tags == True:
|
||||
removeWp.cleanTags()
|
||||
if args.media == True:
|
||||
removeWp.cleanMedia()
|
||||
del removeWp
|
||||
|
||||
|
||||
def download(name_thread, max_thread, url, logger, parser, directory, html, img, ssl_canalblog, revert, tmp):
|
||||
exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, directory=directory, ssl_canalblog=ssl_canalblog)
|
||||
if revert is False:
|
||||
exportWp.getUrlPage(name_thread, max_thread)
|
||||
for i in ["article", "page"]:
|
||||
for j in ["publications", "principal"]:
|
||||
if html is False:
|
||||
exportWp.downloadHTML(j, i)
|
||||
|
||||
if img is False:
|
||||
exportWp.downloadImg(j, i)
|
||||
del exportWp
|
||||
|
||||
|
||||
def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, basic, serial, ssl_wordpress, ssl_canalblog, create, update, image, revert, tmp, author):
|
||||
canalblog = canalblog.split(",")
|
||||
wordpress = wordpress.split(",")
|
||||
name = "Thread-{0}".format(int(name_thread) + 1)
|
||||
protocol = "https"
|
||||
if ssl_canalblog is False:
|
||||
protocol = "http"
|
||||
if serial is False:
|
||||
for canal in canalblog:
|
||||
try:
|
||||
o = urlparse(canal)
|
||||
o = o._replace(scheme=protocol)
|
||||
url = o.geturl().replace(":///", "://")
|
||||
except Exception as err:
|
||||
logger.error("{0} : parsing error : {1}".format(name, err))
|
||||
exit(1)
|
||||
exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, ssl_canalblog=ssl_canalblog, tmp=tmp)
|
||||
if not revert:
|
||||
exportWp.getUrlPage(name_thread, max_thread)
|
||||
del exportWp
|
||||
for j in wordpress:
|
||||
importWp = WPimport(name=name, basic=basic, wordpress=j, logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image, tmp=tmp, author=author)
|
||||
for k in ["article", "page"]:
|
||||
for l in ["publications", "principal"]:
|
||||
importWp.fromUrl(l, k)
|
||||
|
||||
del importWp
|
||||
else:
|
||||
if len(canalblog) != len(wordpress):
|
||||
logger.error("{0} : ERREUR : Le nombre de dossier n'est pas equivalent au nombre d'URL wordpress".format(name))
|
||||
exit(1)
|
||||
for i in range(0, len(canalblog)-1):
|
||||
try:
|
||||
o = urlparse(canalblog[i])
|
||||
o = o._replace(scheme=protocol)
|
||||
url = o.geturl().replace(":///", "://")
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
exportWp = WPExport(name=name, url=url, logger=logger, parser=parser, ssl_canalblog=ssl_canalblog)
|
||||
if not revert:
|
||||
exportWp.getUrlPage(name_thread, max_thread)
|
||||
del exportWp
|
||||
importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image, tmp=tmp, author=author)
|
||||
|
||||
for k in ["article", "page"]:
|
||||
for l in ["publications", "principal"]:
|
||||
importWp.fromUrl(webpage[l][k])
|
||||
|
||||
del importWp
|
||||
|
||||
|
||||
def importDirectory(name_thread, max_thread, directory, logger, parser, wordpress, basic, serial, ssl_wordpress, create, update, image, revert, author):
|
||||
name = "Thread-{0}".format(int(name_thread) + 1)
|
||||
directory = directory.split(",")
|
||||
wordpress = wordpress.split(",")
|
||||
if serial is False:
|
||||
for i in wordpress:
|
||||
importWp = WPimport(name=name, basic=basic, wordpress=i, logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image, author=author)
|
||||
for j in directory:
|
||||
importWp.fromDirectory(j, name_thread, max_thread, revert)
|
||||
del importWp
|
||||
|
||||
else:
|
||||
if len(directory) != len(wordpress):
|
||||
logger.error("{0} : Error : Number directory is different than wordpress".format(name))
|
||||
exit(1)
|
||||
for i in range(0, len(wordpress)-1):
|
||||
importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image, author=author)
|
||||
importWp.fromDirectory(directory[i], name_thread, max_thread, revert)
|
||||
del importWp
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--debug", help="Verbosity", action="store_true")
|
||||
parser.add_argument("--logfile", help="Log file", default="")
|
||||
parser.add_argument("--quiet", help="No console output", action="store_true")
|
||||
parser.add_argument("--parser", help="Parser content", default="html.parser")
|
||||
parser.add_argument("--parallel", help="Define number thread (default : 1)", default=1)
|
||||
parser.add_argument("--no-ssl", help="No ssl for canalblog and/or wordpress (example wordpress,canalblog)", dest="ssl", default="")
|
||||
parser.add_argument("--revert", help="Restart a work from stopping work", action="store_true")
|
||||
parser.add_argument("--tmp", help="directory tmp", default="/tmp/import_export_canablog")
|
||||
|
||||
subparsers = parser.add_subparsers(dest="command")
|
||||
|
||||
import_parser = subparsers.add_parser("import")
|
||||
import_parser.add_argument("--user", help="wordpress user", required=True)
|
||||
import_parser.add_argument("--password", help="password wordpress's user", default="")
|
||||
import_parser.add_argument("--file", help="HTML file", default="")
|
||||
import_parser.add_argument("--directory", help="HTML directory", default="")
|
||||
import_parser.add_argument("--canalblog", help="URL Canalblog", default="")
|
||||
import_parser.add_argument("--wordpress", help="URL Wordpress", required=True)
|
||||
import_parser.add_argument("--serial", help="Serial execution", action="store_true")
|
||||
import_parser.add_argument("--remove-all", dest="remove", help="Remove all", action="store_true")
|
||||
import_parser.add_argument("--remove-posts", help="Remove all posts", dest="posts", action="store_true")
|
||||
import_parser.add_argument("--remove-categories", help="Remove all categories", dest="categories", action="store_true")
|
||||
import_parser.add_argument("--remove-tags", help="Remove all tags", dest="tags", action="store_true")
|
||||
import_parser.add_argument("--remove-media", help="Remove all media", dest="media", action="store_true")
|
||||
import_parser.add_argument("--no-create", help="No create post", dest="create", default="store_false", action="store_true")
|
||||
import_parser.add_argument("--no-update", help="No update post", dest="update", default="store_false", action="store_true")
|
||||
import_parser.add_argument("--no-image", help="No image add or update", dest="image", default="store_false", action="store_true")
|
||||
import_parser.add_argument("--no-menu", help="No menu add or update", dest="menu", default="store_false", action="store_true")
|
||||
|
||||
import_parser.add_argument("--author", dest="author", help="Define author", default="")
|
||||
|
||||
|
||||
|
||||
remove_parser = subparsers.add_parser("remove")
|
||||
remove_parser.add_argument("--user", help="wordpress user", required=True)
|
||||
remove_parser.add_argument("--password", help="password wordpress's user", default="")
|
||||
remove_parser.add_argument("--wordpress", help="URL Wordpress", required=True)
|
||||
remove_parser.add_argument("--all", dest="remove", help="Remove all (posts, media, tags, categories)", action="store_true")
|
||||
remove_parser.add_argument("--posts", help="Remove all posts", action="store_true")
|
||||
remove_parser.add_argument("--categories", help="Remove all categories", action="store_true")
|
||||
remove_parser.add_argument("--tags", help="Remove all tags", action="store_true")
|
||||
remove_parser.add_argument("--media", help="Remove all media", action="store_true")
|
||||
|
||||
|
||||
|
||||
export_parser = subparsers.add_parser("export")
|
||||
|
||||
export_parser.add_argument("--url", help="canblog URL to be scraping", required=True)
|
||||
export_parser.add_argument("--directory",
|
||||
default="backup",
|
||||
help="backup file path")
|
||||
export_parser.add_argument("--no-css", help="No CSS", dest="css", action="store_true")
|
||||
export_parser.add_argument("--no-js", help="No JS", dest="js", action="store_true")
|
||||
export_parser.add_argument("--no-img", help="No img", dest="img", action="store_true")
|
||||
export_parser.add_argument("--no-html", help="No HTML", dest="html", action="store_true")
|
||||
|
||||
change_parser = subparsers.add_parser("change")
|
||||
change_parser.add_argument("--directory",
|
||||
default="",
|
||||
help="Directory")
|
||||
change_parser.add_argument("--file",
|
||||
default="",
|
||||
help="File")
|
||||
|
||||
menu_parser = subparsers.add_parser("menu")
|
||||
menu_parser.add_argument("--user", help="wordpress user", required=True)
|
||||
menu_parser.add_argument("--password", help="password wordpress's user", default="")
|
||||
menu_parser.add_argument("--file", help="HTML file", default="")
|
||||
menu_parser.add_argument("--canalblog", help="URL Canalblog", default="")
|
||||
menu_parser.add_argument("--wordpress", help="URL Wordpress", required=True)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
logger = logging.getLogger('import export canalblog')
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
ssl_canalblog = True
|
||||
ssl_wordpress = True
|
||||
|
||||
for i in args.ssl.split(","):
|
||||
if i == "canalblog":
|
||||
ssl_canalblog = False
|
||||
if i == "wordpress":
|
||||
ssl_wordpress = False
|
||||
|
||||
if args.quiet is False:
|
||||
ch = logging.StreamHandler()
|
||||
if args.debug is True:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
ch.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.setLevel(logging.INFO)
|
||||
ch.setLevel(logging.INFO)
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
|
||||
if len(args.logfile) > 0:
|
||||
fileHandler = logging.FileHandler(args.logfile)
|
||||
if args.debug is True:
|
||||
fileHandler.setLevel(logging.DEBUG)
|
||||
else:
|
||||
fileHandler.setLevel(logging.INFO)
|
||||
fileHandler.setFormatter(formatter)
|
||||
logger.addHandler(fileHandler)
|
||||
|
||||
os.makedirs(args.tmp, exist_ok=True)
|
||||
|
||||
if args.command == "import" or args.command == "remove" or args.command == "menu":
|
||||
password = args.password
|
||||
if len(args.password) == 0:
|
||||
password = getpass()
|
||||
if len(password) == 0:
|
||||
logger.error("No password error !!! ")
|
||||
exit(1)
|
||||
|
||||
basic = HTTPBasicAuth(args.user, password)
|
||||
if args.command == "import":
|
||||
wordpress = args.wordpress.split(",")
|
||||
importWp = WPimport(basic=basic, wordpress="", logger=logger, parser=args.parser, ssl_wordpress=ssl_wordpress, author=args.author, ssl_canalblog=ssl_canalblog)
|
||||
if len(args.file) > 0:
|
||||
for i in wordpress:
|
||||
importWp.setUrl(i)
|
||||
importWp.fromFile(files=args.file.split(","))
|
||||
if args.menu is False:
|
||||
menuWp = WPMenu(name="Thread-1", basic=basic, wordpress=args.wordpress, logger=logger, parser=args.parser, ssl_canalblog=ssl_canalblog, ssl_wordpress=ssl_wordpress)
|
||||
menuWp.fromFile("{0}".format(args.file.split(",")[0]))
|
||||
if len(args.directory) > 0:
|
||||
try:
|
||||
with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
|
||||
wait_for = [ ex.submit(remove, i, args.parallel, args, basic, logger, ssl_wordpress) for i in range(0, int(args.parallel)) ]
|
||||
wait(wait_for, return_when=ALL_COMPLETED)
|
||||
errorRevert(logger, args.revert, args.tmp)
|
||||
wait_for = [
|
||||
ex.submit(importDirectory, i, int(args.parallel), args.directory, logger, args.parser, args.wordpress, basic, args.serial, ssl_wordpress, args.create, args.update, args.image, args.revert, args.author)
|
||||
for i in range(0, int(args.parallel))
|
||||
]
|
||||
if args.menu is False:
|
||||
wait(wait_for, return_when=ALL_COMPLETED)
|
||||
menuWp = WPMenu(name="Thread-1", basic=basic, wordpress=args.wordpress, logger=logger, parser=args.parser, ssl_canalblog=ssl_canalblog, ssl_wordpress=ssl_wordpress)
|
||||
menuWp.fromFile("{0}/index.html".format(args.directory))
|
||||
except Exception as err:
|
||||
logger.error("Threading error : {0}".format(err))
|
||||
if len(args.canalblog) > 0:
|
||||
try:
|
||||
with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
|
||||
wait_for = [ ex.submit(remove, i, args.parallel, args, basic, logger, ssl_wordpress) for i in range(0, int(args.parallel)) ]
|
||||
wait(wait_for, return_when=ALL_COMPLETED)
|
||||
errorRevert(logger, args.revert, args.tmp)
|
||||
wait_for = [
|
||||
ex.submit(importUrl, i, int(args.parallel), args.canalblog, logger, args.parser, args.wordpress, basic, args.serial, ssl_wordpress, ssl_canalblog, args.create, args.update, args.image, args.revert, args.tmp, args.author)
|
||||
for i in range(0, int(args.parallel))
|
||||
]
|
||||
if args.menu is False:
|
||||
wait(wait_for, return_when=ALL_COMPLETED)
|
||||
menuWp = WPMenu(name="Thread-1", basic=basic, wordpress=args.wordpress, logger=logger, parser=args.parser, ssl_canalblog=ssl_canalblog, ssl_wordpress=ssl_wordpress)
|
||||
menuWp.fromUrl(args.canalblog)
|
||||
|
||||
|
||||
|
||||
except Exception as err:
|
||||
logger.error("Threading error : {0}".format(err))
|
||||
exit(0)
|
||||
|
||||
|
||||
if args.command == "export":
|
||||
canalblog = args.url.split(",")
|
||||
protocol = "https"
|
||||
if ssl_canalblog is False:
|
||||
protocol = "http"
|
||||
exportWp = WPExport(logger=logger, parser=args.parser, directory=args.directory, ssl_canalblog=ssl_canalblog)
|
||||
for canal in canalblog:
|
||||
try:
|
||||
o = urlparse(canal)
|
||||
o = o._replace(scheme=protocol)
|
||||
url = o.geturl().replace(":///", "://")
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
exportWp.setUrl(url)
|
||||
if args.js is False:
|
||||
exportWp.downloadJs()
|
||||
|
||||
if args.css is False:
|
||||
exportWp.downloadCss()
|
||||
del exportWp
|
||||
|
||||
|
||||
|
||||
if args.html is False or args.img is False:
|
||||
try:
|
||||
with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
|
||||
wait_for = [
|
||||
ex.submit(download, i, int(args.parallel), url, logger, args.parser, args.directory, args.html, args.img, ssl_canalblog, args.revert, args.tmp)
|
||||
for i in range(0, int(args.parallel))
|
||||
]
|
||||
except Exception as err:
|
||||
logger.error("Threading error : {0}".format(err))
|
||||
exit(0)
|
||||
|
||||
|
||||
if args.command == "remove":
|
||||
try:
|
||||
with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
|
||||
wait_for = [ ex.submit(remove, i, args.parallel, args, basic, logger, ssl_wordpress) for i in range(0, int(args.parallel)) ]
|
||||
except Exception as err:
|
||||
logger.error("Thread error for remove : {0}".format(err))
|
||||
exit(0)
|
||||
|
||||
if args.command == "change":
|
||||
if len(args.directory) > 0:
|
||||
try:
|
||||
with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
|
||||
errorRevert(logger, args.revert, args.tmp)
|
||||
wait_for = [ ex.submit(change, i, args.parallel, args, logger, args.tmp, args.revert) for i in range(0, int(args.parallel)) ]
|
||||
except Exception as err:
|
||||
logger.error("Thread error for remove : {0}".format(err))
|
||||
if len(args.file) > 0:
|
||||
changeWp = WPChange(logger=logger)
|
||||
for filei in args.file.split(","):
|
||||
changeWp.fromFile(filei)
|
||||
exit(0)
|
||||
|
||||
if args.command == "menu":
|
||||
menuWp = WPMenu(name="Thread-1", basic=basic, wordpress=args.wordpress, logger=logger, parser=args.parser, ssl_canalblog=ssl_canalblog, ssl_wordpress=ssl_wordpress)
|
||||
if len(args.file) > 0:
|
||||
menuWp.fromFile(args.file)
|
||||
if len(args.canalblog) > 0:
|
||||
menuWp.fromUrl(args.canalblog)
|
||||
exit(0)
|
173
lib/WPChange.py
Normal file
173
lib/WPChange.py
Normal file
@@ -0,0 +1,173 @@
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import requests, os, logging, re, json
|
||||
|
||||
class WPChange:
|
||||
# Constructor
|
||||
def __init__(self, index_name=1, number_thread=1, logger=None, parser="html.parser", tmp="/tmp/import_export_canablog"):
|
||||
self._name = "Thread-{0}".format(index_name)
|
||||
self._logger = logger
|
||||
self._number_thread = number_thread
|
||||
self._parser = parser
|
||||
self._tmp = tmp
|
||||
self._index_name = index_name
|
||||
|
||||
# Destructor
|
||||
def __del__(self):
|
||||
print("{0} : Import finished".format(self._name))
|
||||
|
||||
|
||||
# Public method
|
||||
|
||||
## from file
|
||||
|
||||
def fromFile(self, files=[], number_thread=1, max_thread=1):
|
||||
divFiles = int(len(files) / max_thread)
|
||||
currentRangeFiles = int(divFiles * (number_thread))
|
||||
firstRange = int(currentRangeFiles - divFiles)
|
||||
self._logger.debug("{0} : index : {1}".format(self._name,number_thread))
|
||||
|
||||
|
||||
self._logger.debug("{0} : first range : {1}".format(self._name,firstRange))
|
||||
self._logger.debug("{0} : last range : {1}".format(self._name,currentRangeFiles))
|
||||
|
||||
for i in range(firstRange, currentRangeFiles):
|
||||
|
||||
if os.path.exists(files[i]):
|
||||
self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, currentRangeFiles + 1, files[i]))
|
||||
self._change(files[i])
|
||||
|
||||
|
||||
## From directory
|
||||
|
||||
|
||||
def fromDirectory(self, directory="", revert=False):
|
||||
self._directory = directory
|
||||
directory = "{0}/archives".format(directory)
|
||||
directories = self._getDirectories([], "{0}".format(directory))
|
||||
if len(directories) > 0:
|
||||
files = self._getFiles(directories)
|
||||
if revert is False:
|
||||
self._tmpFiles(files=files, number_thread=self._index_name, max_thread=self._number_thread)
|
||||
self._fromFileTmp()
|
||||
else:
|
||||
self._logger.error("{0} : No files for {1}".format(self._name, directory))
|
||||
|
||||
|
||||
def fromFile(self, files=[]):
|
||||
for i in range(0, len(files)):
|
||||
if os.path.exists(files[i]):
|
||||
self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, len(files), files[i]))
|
||||
self._change(files[i])
|
||||
|
||||
|
||||
# Private method
|
||||
|
||||
def _fromFileTmp(self):
|
||||
try:
|
||||
with open("{0}/{1}.json".format(self._tmp, self._name)) as file:
|
||||
files = json.loads(file.read())
|
||||
self._logger.debug("{0} : size of webpage : {1}".format(self._name, len(files)))
|
||||
for i in range(0, len(files)):
|
||||
if os.path.exists(files[i]):
|
||||
self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, len(files), files[i]))
|
||||
self._change(files[i])
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex))
|
||||
|
||||
|
||||
def _tmpFiles(self, files=[], number_thread=1, max_thread=1):
|
||||
print()
|
||||
divFiles = int(len(files) / int(max_thread))
|
||||
currentRangeFiles = int(divFiles * (int(number_thread)+1))
|
||||
firstRange = int(currentRangeFiles - divFiles)
|
||||
self._logger.debug("{0} : index : {1}".format(self._name,number_thread))
|
||||
|
||||
self._logger.debug("{0} : first range : {1}".format(self._name,firstRange))
|
||||
self._logger.debug("{0} : last range : {1}".format(self._name,currentRangeFiles))
|
||||
webpage = []
|
||||
for i in range(firstRange, currentRangeFiles):
|
||||
webpage.append(files[i])
|
||||
|
||||
try:
|
||||
string_webpage = json.dumps(webpage)
|
||||
open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(string_webpage)
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Error for writing webpage : {1}".format(self._name, ex))
|
||||
|
||||
## Get all files
|
||||
|
||||
def _getFiles(self, item):
|
||||
files = []
|
||||
for i in item:
|
||||
for j in os.listdir(i):
|
||||
if os.path.isfile("{0}/{1}".format(i, j)):
|
||||
files.append("{0}/{1}".format(i, j))
|
||||
return files
|
||||
|
||||
|
||||
## Get directories
|
||||
|
||||
def _getDirectories(self, subdirectory, item):
|
||||
sub = subdirectory
|
||||
for i in os.listdir(item):
|
||||
if os.path.isdir("{0}/{1}".format(item, i)):
|
||||
sub.append("{0}/{1}".format(item, i))
|
||||
subdirectory = self._getDirectories(sub, "{0}/{1}".format(item, i))
|
||||
return subdirectory
|
||||
|
||||
## Change path img file
|
||||
|
||||
def _change(self, file):
|
||||
ext_img = ["png", "svg", "gif", "jpg", "jpeg"]
|
||||
try:
|
||||
with open(file, 'r') as f:
|
||||
content = f.read()
|
||||
soup = BeautifulSoup(content, self._parser)
|
||||
img = soup.find_all("img")
|
||||
for i in img:
|
||||
src = i.get("src")
|
||||
o = urlparse(src)
|
||||
if len(o.netloc) > 0:
|
||||
self._logger.info("{0} : Change source image {1} /img/{2}/{3}".format(self._name, src, o.netloc, o.path))
|
||||
content = content.replace(src, "/img/{0}/{1}".format(o.netloc, o.path))
|
||||
script = soup.find_all("script", {"type": "text/javascript"})
|
||||
for i in script:
|
||||
src = i.get("src")
|
||||
if src is not None:
|
||||
o = urlparse(src)
|
||||
if len(o.netloc) > 0:
|
||||
self._logger.info("{0} : Change source js {1} /dists/js/{2}/{3}".format(self._name, src, o.netloc, o.path))
|
||||
content = content.replace(src, "/dists/js/{0}/{1}".format(o.netloc, o.path))
|
||||
link = soup.find_all("link", {"rel": "stylesheet"})
|
||||
for i in link:
|
||||
href = i.get("href")
|
||||
if href is not None:
|
||||
o = urlparse(href)
|
||||
if len(o.netloc) > 0:
|
||||
self._logger.info("{0} : Change source css {1} /dists/css/{2}/{3}".format(self._name, href, o.netloc, o.path))
|
||||
content = content.replace(href, "/dists/css/{0}/{1}".format(o.netloc, o.path))
|
||||
|
||||
a = soup.find_all("a", {"target": "_blank"})
|
||||
for i in a:
|
||||
href = i.get("href")
|
||||
if href is not None:
|
||||
o = urlparse(href)
|
||||
if len(o.netloc) > 0:
|
||||
ext = o.path.split(".")[len(o.path.split("."))-1]
|
||||
if ext in ext_img:
|
||||
self._logger.info("{0} : Change a img {1} /img/{2}/{3}".format(self._name, href, o.netloc, o.path))
|
||||
content = content.replace(href, "/img/{0}/{1}".format(o.netloc, o.path))
|
||||
try:
|
||||
with open(file, "w") as f:
|
||||
self._logger.info("{0} : File write : {1}".format(self._name, file))
|
||||
f.write(content)
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Error for write file {1} : {2}".format(self._name, file, ex))
|
||||
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Error for read file {1} : {2}".format(self._name, file, ex))
|
||||
|
||||
|
||||
|
||||
|
326
lib/WPExport.py
Normal file
326
lib/WPExport.py
Normal file
@@ -0,0 +1,326 @@
|
||||
#!/usr/bin/python3
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import requests, os, argparse, logging, json
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
|
||||
class WPExport:
|
||||
def __init__(self, name = "Thread-0", url = "", logger = None, parser = "html.parser", directory = "backup", ssl_canalblog=True, tmp="/tmp/import_export_canablog"):
|
||||
self._url = url
|
||||
self._logger = logger
|
||||
self._parser = parser
|
||||
self._dir = directory
|
||||
self._name = name
|
||||
self._protocol = "https"
|
||||
if ssl_canalblog is False:
|
||||
self._protocol = "http"
|
||||
|
||||
self._request = requests.Session()
|
||||
|
||||
retries = Retry(total=10,
|
||||
status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2)
|
||||
|
||||
self._request.mount('{0}://'.format(self._protocol), HTTPAdapter(max_retries=retries))
|
||||
self._tmp = tmp
|
||||
|
||||
# Destructor
|
||||
def __del__(self):
|
||||
self._logger.info("{0} : Export finished for {1}".format(self._name, self._url))
|
||||
|
||||
# Public method
|
||||
|
||||
|
||||
# Set name
|
||||
|
||||
def setName(self, name):
|
||||
self._name = "Thread-{0}".format(int(name) + 1)
|
||||
|
||||
# Set URL
|
||||
|
||||
def setUrl(self, url):
|
||||
self._url = url
|
||||
|
||||
# Download JS
|
||||
|
||||
def downloadJs(self):
|
||||
script = self._getScriptCss(True, False)
|
||||
o = urlparse(self._url)
|
||||
self._downloadPage(script, "{0}/{1}/{2}".format(self._dir, o.path, "dists/js"))
|
||||
|
||||
# Download CSS
|
||||
|
||||
def downloadCss(self):
|
||||
css = self._getScriptCss(False, True)
|
||||
o = urlparse(self._url)
|
||||
self._downloadPage(css, "{0}/{1}/{2}".format(self._dir, o.path, "dists/css"))
|
||||
|
||||
# Download HTML
|
||||
|
||||
def downloadHTML(self, first, second):
|
||||
try:
|
||||
with open("{0}/{1}.json".format(self._tmp, self._name)) as file:
|
||||
webpage = json.loads(file.read())
|
||||
self._downloadPage(webpage[first][second], self._dir)
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex))
|
||||
|
||||
# Download Image
|
||||
|
||||
def downloadImg(self, first, second):
|
||||
try:
|
||||
with open("{0}/{1}.json".format(self._tmp, self._name)) as file:
|
||||
webpage = json.loads(file.read())
|
||||
page_src = self._getImg(webpage[first][second])
|
||||
o = urlparse(self._url)
|
||||
self._downloadPage(page_src, "{0}/{1}/{2}".format(self._dir, o.path, "img"))
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex))
|
||||
|
||||
|
||||
|
||||
|
||||
# Get URL
|
||||
def getUrlPage(self, index_thread, max_thread):
|
||||
try:
|
||||
page = self._request.get(self._url)
|
||||
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
ul = soup.find_all("ul", id="listsmooth")
|
||||
for anchor in ul[0].find_all("a"):
|
||||
href = anchor.get('href', '/')
|
||||
if href != "#":
|
||||
page_url.append(href)
|
||||
else:
|
||||
self._logger.error("{0} : URL did not get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error : {1}".format(self._name, err))
|
||||
|
||||
webpage = {"principal": {"page":[], "article":[]}, "publications": {"page":[], "article":[]}}
|
||||
for i in page_url:
|
||||
section = "publications"
|
||||
o = urlparse(i)
|
||||
o = o._replace(scheme=self._protocol)
|
||||
i = o.geturl().replace(":///", "://")
|
||||
if i == "{0}/".format(self._url):
|
||||
section = "principal"
|
||||
try:
|
||||
page = self._request.get(i)
|
||||
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : page : {1}".format(self._name, i))
|
||||
if i not in webpage[section]["page"]:
|
||||
webpage[section]["page"].append(i)
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
class_div = soup.find_all("div", class_="pagingfirstline")
|
||||
if len(class_div) > 0:
|
||||
pagingfirstline = class_div[0].find_all("a")
|
||||
if len(pagingfirstline) > 1:
|
||||
lastpage = pagingfirstline[len(pagingfirstline)-1].get("href", "/")
|
||||
self._logger.debug("{0} : Last page {1}".format(self._name, lastpage))
|
||||
|
||||
element_lastpage = lastpage.split("/")[len(lastpage.split("/"))-1]
|
||||
number_page = element_lastpage.split("-")[0].split("p")[1]
|
||||
number_lastpage = int(number_page) / 10
|
||||
|
||||
setPageDivided = int(number_lastpage) / max_thread
|
||||
if setPageDivided > int(setPageDivided):
|
||||
setPageDivided = setPageDivided + 1
|
||||
setPagePart = setPageDivided * (index_thread + 1)
|
||||
firstPagePart = (setPagePart - setPageDivided)
|
||||
|
||||
self._logger.debug("{0} : Total page : {1}".format(self._name,int(number_lastpage)))
|
||||
self._logger.debug("{0} : First range : {1}".format(self._name, int(firstPagePart)))
|
||||
self._logger.debug("{0} : Last range : {1}".format(self._name, int(setPagePart)))
|
||||
|
||||
for j in range(int(firstPagePart),int(setPagePart)+1):
|
||||
paging = j * 10
|
||||
categorie = urlparse(i).path.split("/")
|
||||
url_paging = "{0}/archives/p{1}-10.html".format(self._url, paging)
|
||||
if len(categorie) > 2:
|
||||
url_paging = "{0}/archives/{1}/p{2}-10.html".format(self._url, categorie[2], paging)
|
||||
self._logger.info("{0} : {1}".format(self._name, url_paging))
|
||||
if url_paging not in webpage[section]["page"]:
|
||||
webpage[section]["page"].append(url_paging)
|
||||
page = self._request.get(url_paging)
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
h2 = soup.find_all("h2")
|
||||
self._logger.debug("{0} : {1} H2 : {2}".format(self._name, url_paging, h2))
|
||||
for title in h2:
|
||||
self._logger.debug("{0} : {1} a : {2}".format(self._name, url_paging, title.find_all("a")))
|
||||
href = title.find_all("a")[0].get("href", "/")
|
||||
if href not in webpage[section]["article"]:
|
||||
try:
|
||||
o = urlparse(href)
|
||||
o = o._replace(scheme="https").geturl()
|
||||
webpage[section]["article"].append(o)
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
else:
|
||||
self._logger.error("{0} : web didn't get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
try:
|
||||
string_webpage = json.dumps(webpage)
|
||||
open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(string_webpage)
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Error for writing webpage : {1}".format(self._name, ex))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Private method
|
||||
#
|
||||
# Create path
|
||||
def _mkdirPath(self, path_dir):
|
||||
if not os.path.exists(path_dir):
|
||||
makedir = []
|
||||
pathh = path_dir.split("/")
|
||||
for i in pathh:
|
||||
makedir.append(i)
|
||||
repath = "/".join(makedir)
|
||||
if not os.path.exists(repath):
|
||||
self._logger.debug("{0} : Dossier crée : {1}".format(self._name, repath))
|
||||
try:
|
||||
if len(repath) > 0:
|
||||
os.mkdir(repath)
|
||||
except Exception as err:
|
||||
self._logger.error("Directory error : {0}".format(err))
|
||||
self._logger.debug("Directory error : {0} {1} {2} {3} {4}".format(err, path_dir, repath, pathh, makedir))
|
||||
exit(1)
|
||||
|
||||
|
||||
# Get Css and JS
|
||||
def _getScriptCss(self, js, css):
|
||||
try:
|
||||
page = self._request.get(self._url)
|
||||
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
if js is True:
|
||||
script = soup.find_all("script")
|
||||
for anchor in script:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
try:
|
||||
u = urlparse(self._url)
|
||||
o = urlparse(src)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
|
||||
if css is True:
|
||||
link = soup.find_all("link")
|
||||
for anchor in link:
|
||||
rel = anchor.get("rel")
|
||||
if rel[0] == "stylesheet":
|
||||
href = anchor.get("href", "/")
|
||||
if href != "/":
|
||||
try:
|
||||
u = urlparse(self._url)
|
||||
o = urlparse(href)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
|
||||
else:
|
||||
self._logger.error("JS or CSS did not get due status code : {0}".format(page.status_code))
|
||||
self._logger.debug(page.content)
|
||||
except ConnectionError as err:
|
||||
self._logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("Exception error : {0}".format(err))
|
||||
|
||||
return page_url
|
||||
|
||||
# Get image
|
||||
|
||||
def _getImg(self, webpage):
|
||||
page_img = []
|
||||
for i in webpage:
|
||||
try:
|
||||
page = self._request.get(i)
|
||||
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
img = soup.find_all("img")
|
||||
self._logger.info("{0} : image from page: {1} : ".format(self._name,i))
|
||||
for anchor in img:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
if src not in page_img:
|
||||
self._logger.info("{0} : image: {1} : ".format(self._name, src))
|
||||
page_img.append(src)
|
||||
else:
|
||||
self._logger.error("{0} : Image did not get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error : {1}".format(self._name, err))
|
||||
|
||||
return page_img
|
||||
|
||||
|
||||
# Download page
|
||||
def _downloadPage(self, webpage, backup_dir):
|
||||
|
||||
for i in range(0, len(webpage)):
|
||||
try:
|
||||
o = urlparse(webpage[i])
|
||||
|
||||
path_web = o.path.split("/")
|
||||
filePageWeb = path_web[len(path_web)-1]
|
||||
path_web.pop(len(path_web)-1)
|
||||
dir_page_web = "/".join(path_web)
|
||||
self._mkdirPath("{0}/{1}/{2}".format(backup_dir, o.netloc, dir_page_web))
|
||||
try:
|
||||
r = self._request.get(webpage[i])
|
||||
|
||||
if r.status_code == 200:
|
||||
fileDownload = "{0}/{1}/index.html".format(backup_dir, o.netloc)
|
||||
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
||||
fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb)
|
||||
self._logger.info("{0} : {1}/{2} : {3}".format(self._name, i+1, len(webpage), fileDownload))
|
||||
try:
|
||||
open(fileDownload, "wb").write(r.content)
|
||||
except Exception as err:
|
||||
self._logger.error("file error : {0}".format(err))
|
||||
exit(1)
|
||||
else:
|
||||
self._logger.error("Not download due status code : {0}".format(r.status_code))
|
||||
self._logger.debug(r.content)
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} Exception error : {1}".format(self._name, err))
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
996
lib/WPImport.py
Normal file
996
lib/WPImport.py
Normal file
@@ -0,0 +1,996 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import requests, os, logging, re, json
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
|
||||
class WPimport:
|
||||
# Constructor
|
||||
def __init__(self, name="Thread-0", basic=None, wordpress="", logger=None, parser="html.parser", ssl_wordpress=True, no_create=False, no_update=False, no_image=False, tmp="/tmp/import_export_canablog", author="", ssl_canalblog=True):
|
||||
self._name = name
|
||||
self._basic = basic
|
||||
self._wordpress = wordpress
|
||||
self._logger = logger
|
||||
self._parser = parser
|
||||
self._headers_json = {'Content-Type': 'application/json; charset=utf-8', 'Accept':'application/json'}
|
||||
self._protocol = "https"
|
||||
self._directory = "backup"
|
||||
if ssl_wordpress is False:
|
||||
self._protocol = "http"
|
||||
self._request = requests.Session()
|
||||
self._ssl_canalblog = ssl_canalblog
|
||||
retries = Retry(connect=10, read=10, redirect=5,
|
||||
status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2)
|
||||
|
||||
self._request.mount('{0}://'.format(self._protocol), HTTPAdapter(max_retries=retries))
|
||||
self._no_create = no_create
|
||||
self._no_update = no_update
|
||||
self._no_image = no_image
|
||||
self._tmp = tmp
|
||||
self._author = author
|
||||
|
||||
# Destructor
|
||||
def __del__(self):
|
||||
print("{0} : Import finished for {1}".format(self._name, self._wordpress))
|
||||
|
||||
# Public method
|
||||
|
||||
def setUrl(self, wordpress):
|
||||
self._wordpress = wordpress
|
||||
|
||||
def fromUrl(self, first, second):
|
||||
try:
|
||||
with open("{0}/{1}.json".format(self._tmp, self._name)) as file:
|
||||
webpage_content = json.loads(file.read())
|
||||
self._logger.debug("{0} : size of webpage : {1}".format(self._name, len(webpage_content)))
|
||||
webpage = webpage_content[first][second]
|
||||
for i in range(0, len(webpage)):
|
||||
try:
|
||||
r = self._request.get(webpage[i])
|
||||
if r.status_code == 200:
|
||||
self._logger.info("{0} : ({1}/{2}) : Page is importing : {3}".format(self._name, i+1, len(webpage), webpage[i]))
|
||||
soup = BeautifulSoup(r.content, self._parser)
|
||||
articlebody = soup.find_all("div", class_="articlebody")
|
||||
if len(articlebody) > 0:
|
||||
self._addOrUpdatePost(soup)
|
||||
else:
|
||||
albumbody = soup.find_all("div", class_="albumbody")
|
||||
if len(albumbody) > 0:
|
||||
self._addOrUpdateAlbum(soup)
|
||||
else:
|
||||
self._addOrUpdateFeaturedMedia(soup)
|
||||
del webpage_content[first][second][i]
|
||||
webpage_content = json.dumps(webpage_content)
|
||||
open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(webpage_content)
|
||||
else:
|
||||
self._logger.error("{0} : Connection error for get url {1} with status code : {2}".format(self._name, webpage[i], r.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get url {1} : {2}".format(self._name, webpage[i], err))
|
||||
exit(1)
|
||||
except IOError as err:
|
||||
self._logger.error("{0} : Connection error for IO url {1} : {2}".format(self._name, webpage[i], err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get url {1} : {2}".format(self._name, webpage[i], err))
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex))
|
||||
|
||||
|
||||
def fromDirectory(self, directory="", number_thread=1, max_thread=1, revert=False):
|
||||
self._directory = directory
|
||||
directory = "{0}/archives".format(directory)
|
||||
directories = self._getDirectories([], "{0}".format(directory))
|
||||
if len(directories) > 0:
|
||||
files = self._getFiles(directories)
|
||||
if revert is False:
|
||||
self._tmpFiles(files=files, number_thread=number_thread, max_thread=max_thread)
|
||||
self._fromFileTmp()
|
||||
else:
|
||||
self._logger.error("{0} : No files for {1}".format(self._name, directory))
|
||||
|
||||
|
||||
def fromFile(self, files=[]):
|
||||
for i in range(0, len(files)):
|
||||
if os.path.exists(files[i]):
|
||||
self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, len(files), files[i]))
|
||||
with open(files[i], 'r') as f:
|
||||
content = f.read()
|
||||
self._logger.debug("{0} : Size of article : {1}".format(self._name, len(content)))
|
||||
soup = BeautifulSoup(content, self._parser)
|
||||
articlebody = soup.find_all("div", class_="articlebody")
|
||||
self._logger.debug("{0} : Number of article : {1}".format(self._name, len(articlebody)))
|
||||
if len(articlebody) > 0:
|
||||
self._addOrUpdatePost(soup)
|
||||
else:
|
||||
albumbody = soup.find_all("div", class_="albumbody")
|
||||
if len(albumbody) > 0:
|
||||
self._addOrUpdateAlbum(soup)
|
||||
else:
|
||||
self._addOrUpdateFeaturedMedia(soup)
|
||||
|
||||
|
||||
|
||||
# Private method
|
||||
|
||||
|
||||
def _getAuthor(self, author):
|
||||
params = {"search":author, "per_page":100}
|
||||
author = 0
|
||||
try:
|
||||
self._logger.info("{0} : Search author : {1}".format(self._name, author))
|
||||
page = self._request.get("{1}://{0}/wp-json/wp/v2/users".format(self._wordpress, self._protocol), auth=self._basic, headers=self._headers_json, params=params)
|
||||
self._logger.debug("{0} : End Search author : {1}".format(self._name, author))
|
||||
self._logger.debug("{0} : Debug requests : {1}".format(self._name, page.content))
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : Get author : {1}".format(self._name, author))
|
||||
result = page.json()
|
||||
for a in result:
|
||||
author = a["id"]
|
||||
else:
|
||||
self._logger.error("{0} : Connection error with status code for get author : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get author : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get author : {1}".format(self._name, err))
|
||||
return author
|
||||
|
||||
def _getInfoAlbum(self, link):
|
||||
if self._ssl_canalblog:
|
||||
link = link.replace("http", "https").replace("httpss", "https")
|
||||
self._logger.info("{0} : Info album : {1}".format(self._name, link))
|
||||
link_o = urlparse(link)
|
||||
if len(link_o.netloc) > 0:
|
||||
self._logger.info("{0} : get album info from web : {1}".format(self._name, link_o))
|
||||
try:
|
||||
response = self._request.get(link)
|
||||
if response.status_code == 200:
|
||||
self._logger.info("{0} : get content info from web : {1}".format(self._name, link))
|
||||
page_img = response.content
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get album info : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get album info : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
else:
|
||||
self._logger.info("{0} : get album info from file : {1}".format(self._name, link_o))
|
||||
if os.path.exists("{0}/..{1}".format(self._directory, link_o)):
|
||||
page_img = open("{0}/..{1}".format(self._directory, link_o), "r")
|
||||
soup = BeautifulSoup(page_img, self._parser)
|
||||
paragraphs = soup.find("div", class_="albumbody").find_all("p")
|
||||
for paragraph in paragraphs:
|
||||
self._logger.info("{0} get paragraph : {1}".format(self._name, paragraph))
|
||||
split_paragraph = str(paragraph).split("<br>")
|
||||
self._logger.info("{0} length paragraph splitted : {1}".format(self._name, len(split_paragraph)))
|
||||
if len(split_paragraph) == 1:
|
||||
split_paragraph = str(paragraph).split("<br/>")
|
||||
self._logger.info("{0} get paragraph splitted : {1}".format(self._name, split_paragraph))
|
||||
if len(split_paragraph) > 1:
|
||||
if len(split_paragraph[1].split(":")) > 1:
|
||||
author = split_paragraph[1].split(":")[1].replace(" ", "").lower()
|
||||
return author
|
||||
|
||||
|
||||
def _addOrUpdateAlbum(self, soup):
|
||||
self._logger.info("{0} : Add/Update Album".format(self._name))
|
||||
albumbody = soup.find("div", class_="albumbody")
|
||||
albumtitle = albumbody.find("h2").get_text()
|
||||
self._logger.debug("{0} : Title of the album : {1}".format(self._name, albumtitle))
|
||||
albumdesc = albumbody.find("div", class_="albumdesc").find("p")
|
||||
img_a = albumbody.find_all("img")
|
||||
list_img = []
|
||||
page_is_exist = False
|
||||
|
||||
if self._no_image is False:
|
||||
self._logger.debug("{0} : Number of image's tag : {1}".format(self._name, len(img_a)))
|
||||
|
||||
for i in img_a:
|
||||
new_img = {}
|
||||
href_img = i.get("src")
|
||||
href_img_o = urlparse(href_img)
|
||||
try:
|
||||
if len(href_img_o.netloc) > 0:
|
||||
img_ok = False
|
||||
href_img = href_img.replace("_q", "_o")
|
||||
page_img = self._request.get(href_img)
|
||||
if page_img.status_code == 200:
|
||||
img_ok = True
|
||||
else:
|
||||
if os.path.exists("{0}/..{1}".format(self._directory, href_img)):
|
||||
page_img = open("{0}/..{1}".format(self._directory, href_img), "r")
|
||||
img_ok = True
|
||||
self._logger.debug("{0} : Status code for image {1} : {2}".format(self._name, href_img, page_img.status_code))
|
||||
if img_ok is True:
|
||||
media=self._addOrUpdateMedia(href_img, page_img)
|
||||
new_img["id"]=media["id"]
|
||||
new_img["new_src"]=media["rendered"]
|
||||
list_img.append(new_img)
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
self._logger.debug("{0} content img : {1}".format(self._name, list_img))
|
||||
content_html = ""
|
||||
if len(list_img) > 0:
|
||||
|
||||
content_html = "<!-- wp:paragraph -->\n{0}\n<!-- /wp:paragraph -->\n\n".format(albumdesc)
|
||||
|
||||
|
||||
|
||||
for i in range(0, len(list_img)):
|
||||
content_html = content_html + "<!-- wp:image {\"id\":id-image,\"sizeSlug\":\"large\",\"linkDestination\":\"none\"} --><figure class=\"wp-block-image size-large\"><img src=\"src-image\" alt=\"\" class=\"wp-image-id-image\"/></figure><!-- /wp:image -->\n\n".replace("id-image", str(list_img[i]["id"])).replace("src-image", list_img[i]["new_src"])
|
||||
|
||||
self._logger.info("{0} : content html : {1}".format(self._name, content_html))
|
||||
if len(content_html) > 0:
|
||||
data = {"title":albumtitle, "content":content_html, "status":"publish"}
|
||||
if len(self._author) > 0:
|
||||
author = self._getAuthor(self._author)
|
||||
else:
|
||||
link_a = albumbody.find_all("a")
|
||||
for i in link_a:
|
||||
if re.search(r"/albums/", i.get("href", "/")):
|
||||
href_a = i.get("href", "/")
|
||||
break
|
||||
author = self._getInfoAlbum(href_a)
|
||||
self._logger.info("{0} : author : {1}".format(self._name, author))
|
||||
author = self._getAuthor(author)
|
||||
data = {"title":albumtitle, "content":content_html, "status":"publish"}
|
||||
|
||||
if author != 0:
|
||||
data = {"title":albumtitle, "content":content_html, "status":"publish", "author":author}
|
||||
self._logger.debug("{0} : data for album page : {1}".format(self._name, data))
|
||||
for index in range(1,10):
|
||||
params = {"search": albumtitle, "per_page":100, "page": index}
|
||||
try:
|
||||
self._logger.info("{0} : Search post with index {2} : {1}".format(self._name, albumtitle, index))
|
||||
page = self._request.get("{1}://{0}/wp-json/wp/v2/pages".format(self._wordpress, self._protocol), auth=self._basic, params=params, headers=self._headers_json)
|
||||
if page.status_code == 200:
|
||||
self._logger.debug("{0} : Encoding : {1}".format(self._name, page.encoding))
|
||||
page.encoding = "utf-8"
|
||||
result = page.json()
|
||||
if len(result) == 0:
|
||||
break
|
||||
self._logger.info("{0} : Number result posts : {1}".format(self._name, len(result)))
|
||||
count = 0
|
||||
for i in result:
|
||||
title_rendered = i["title"]["rendered"]
|
||||
self._logger.info("{0} : Search title pages for |{2}| : |{1}|".format(self._name, title_rendered, albumtitle))
|
||||
if len(title_rendered) != len(albumtitle):
|
||||
title_rendered = self._replaceCaracter(title_rendered)
|
||||
self._logger.debug("{0} : Search title pages for |{2}| : |{1}|".format(self._name, title_rendered, albumtitle))
|
||||
self._logger.debug("{0} : SIze of title : {1} - {2}".format(self._name, len(albumtitle), len(title_rendered)))
|
||||
if title_rendered == albumtitle:
|
||||
if self._no_update is False:
|
||||
page_is_exist = True
|
||||
post_id = i["id"]
|
||||
count = count + 1
|
||||
if count > 1:
|
||||
self._logger.info("{0} : Page {1} is double and going to delete".format(self._name, albumtitle))
|
||||
try:
|
||||
params = {"force":1}
|
||||
page = self._request.delete("{2}://{0}/wp-json/wp/v2/pages/{1}".format(self._wordpress, post_id, self._protocol), auth=self._basic, headers=self._headers_json, params=params)
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : Page deleted : {1}".format(self._name, albumtitle))
|
||||
else:
|
||||
self._logger.error("{0} : Page not updated due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for deleted page : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for deleted page : {1}".format(self._name, err))
|
||||
|
||||
else:
|
||||
self._logger.debug("{0} : Data for page to update : {1}".format(self._name, i))
|
||||
self._logger.info("{0} : Page {1} already exist and going to update".format(self._name, albumtitle))
|
||||
|
||||
try:
|
||||
page = self._request.post("{2}://{0}/wp-json/wp/v2/pages/{1}".format(self._wordpress, post_id, self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
|
||||
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
self._logger.info("{0} : page updated : {1}".format(self._name, albumtitle))
|
||||
else:
|
||||
self._logger.error("{0} : page not updated due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for update page : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for update page : {1}".format(self._name, err))
|
||||
elif page.status_code == 400:
|
||||
self._logger.error("{0} : Connection for update post unauthorized : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
break
|
||||
else:
|
||||
self._logger.error("{0} : Connection for update page error with status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for search page : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for search page : {1}".format(self._name, err))
|
||||
|
||||
if page_is_exist is False and self._no_create is False:
|
||||
try:
|
||||
self._logger.info("{0} : Creating page : {1}".format(self._name, data["title"]))
|
||||
page = self._request.post("{1}://{0}/wp-json/wp/v2/pages".format(self._wordpress, self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
|
||||
|
||||
if page.status_code == 201:
|
||||
result = page.json()
|
||||
self._logger.info("{0} : page added : {1}".format(self._name, result["title"]["raw"]))
|
||||
|
||||
else:
|
||||
self._logger.error("{0} : page not added due status code : {1}".format(self._name, r.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for create page : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for create page : {1}".format(self._name, err))
|
||||
|
||||
|
||||
|
||||
|
||||
def _fromFileTmp(self):
|
||||
try:
|
||||
with open("{0}/{1}.json".format(self._tmp, self._name)) as file:
|
||||
files = json.loads(file.read())
|
||||
self._logger.debug("{0} : size of webpage : {1}".format(self._name, len(files)))
|
||||
for i in range(0, len(files)):
|
||||
if os.path.exists(files[i]):
|
||||
self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, len(files), files[i]))
|
||||
with open(files[i], 'r') as f:
|
||||
content = f.read()
|
||||
self._logger.debug("{0} : Size of article : {1}".format(self._name, len(content)))
|
||||
soup = BeautifulSoup(content, self._parser)
|
||||
articlebody = soup.find_all("div", class_="articlebody")
|
||||
self._logger.debug("{0} : Number of article : {1}".format(self._name, len(articlebody)))
|
||||
if len(articlebody) > 0:
|
||||
self._addOrUpdatePost(soup)
|
||||
else:
|
||||
self._addOrUpdateFeaturedMedia(soup)
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex))
|
||||
|
||||
|
||||
def _tmpFiles(self, files=[], number_thread=1, max_thread=1):
|
||||
divFiles = int(len(files) / max_thread)
|
||||
currentRangeFiles = int(divFiles * (number_thread+1))
|
||||
firstRange = int(currentRangeFiles - divFiles)
|
||||
self._logger.debug("{0} : index : {1}".format(self._name,number_thread))
|
||||
|
||||
self._logger.debug("{0} : first range : {1}".format(self._name,firstRange))
|
||||
self._logger.debug("{0} : last range : {1}".format(self._name,currentRangeFiles))
|
||||
webpage = []
|
||||
for i in range(firstRange, currentRangeFiles):
|
||||
webpage.append(files[i])
|
||||
|
||||
try:
|
||||
string_webpage = json.dumps(webpage)
|
||||
open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(string_webpage)
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Error for writing webpage : {1}".format(self._name, ex))
|
||||
|
||||
|
||||
## replace caracter
|
||||
|
||||
def _replaceCaracter(self, title_rendered):
|
||||
list_replace = {'’': "'", '–': '-', '…': '...', '« ': '"', ' »': '"', '« ': '"', ' »': '"', '’': "'", '"‘': "'"}
|
||||
for old, new in list_replace.items():
|
||||
title_rendered = title_rendered.replace(old, new)
|
||||
return title_rendered
|
||||
|
||||
## remove space
|
||||
|
||||
def _removeSpace(self, title):
|
||||
if title[len(title)-1] == " ":
|
||||
title = title[:-1]
|
||||
if title[0] == " ":
|
||||
title = title[1:]
|
||||
return title
|
||||
|
||||
## Get all files
|
||||
|
||||
def _getFiles(self, item):
|
||||
files = []
|
||||
for i in item:
|
||||
for j in os.listdir(i):
|
||||
if os.path.isfile("{0}/{1}".format(i, j)):
|
||||
files.append("{0}/{1}".format(i, j))
|
||||
return files
|
||||
|
||||
## Get directories
|
||||
|
||||
def _getDirectories(self, subdirectory, item):
|
||||
sub = subdirectory
|
||||
for i in os.listdir(item):
|
||||
if os.path.isdir("{0}/{1}".format(item, i)):
|
||||
sub.append("{0}/{1}".format(item, i))
|
||||
subdirectory = self._getDirectories(sub, "{0}/{1}".format(item, i))
|
||||
return subdirectory
|
||||
|
||||
|
||||
def _linkFeaturedMedia(self, img_src, h2, result):
|
||||
try:
|
||||
page = self._request.get(img_src)
|
||||
if page.status_code == 200:
|
||||
name_img = img_src.replace("_q", "")
|
||||
name_img = name_img.split("/")[len(name_img.split("/"))-1]
|
||||
params = {"search": name_img}
|
||||
try:
|
||||
page = self._request.get("{1}://{0}/wp-json/wp/v2/media".format(self._wordpress, self._protocol), auth=self._basic, params=params)
|
||||
if page.status_code == 200:
|
||||
res = page.json()
|
||||
if len(res) > 0:
|
||||
id_media = res[0]["id"]
|
||||
data = {"featured_media": id_media}
|
||||
try:
|
||||
r = self._request.post("{2}://{0}/wp-json/wp/v2/posts/{1}".format(self._wordpress, result[0]["id"], self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
|
||||
if r.status_code == 200:
|
||||
self._logger.info("{0} : Add media featured : {1}".format(self._name, r.json()["title"]["raw"]))
|
||||
else:
|
||||
self._logger.error("{0} : Connection error with status code for featured media : {1}".format(self._name, r.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for post media featured : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for post media featured : {1}".format(self._name, err))
|
||||
else:
|
||||
self._logger.info("{0} : No media found for {1}".format(self._name, h2))
|
||||
try:
|
||||
page = self._request.get(img_src.replace("_q", ""))
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : Get image for media featured : {1}".format(self._name, img_src.replace("_q", "")))
|
||||
media = self._addOrUpdateMedia(img_src.replace("_q", ""), page)
|
||||
if media["id"] != "":
|
||||
self._linkFeaturedMedia(img_src.replace("_q", ""), h2)
|
||||
else:
|
||||
self._logger.error("{0} : Connection error with status code for get image for featured media : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get to image for featured media : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get to image media featured : {1}".format(self._name, err))
|
||||
else:
|
||||
self._logger.error("{0} : Connection error with status code for search featured media: {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error search featured media : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error search featured media : {1}".format(self._name, err))
|
||||
else:
|
||||
self._logger.error("{0} : Connection error for get featured media with status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get featured media : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get featured media : {1}".format(self._name, err))
|
||||
|
||||
## Add or update featured media
|
||||
|
||||
def _addOrUpdateFeaturedMedia(self, soup):
|
||||
item_div = soup.find_all("div", {"data-edittype": "post"})
|
||||
for i in item_div:
|
||||
h2 = i.find_all("h2")[0].text
|
||||
params = {"search":h2, "type":"post"}
|
||||
try:
|
||||
page = self._request.get("{1}://{0}/wp-json/wp/v2/search".format(self._wordpress, self._protocol), auth=self._basic, params=params)
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
if len(result) > 0:
|
||||
if h2 == self._replaceCaracter(result[0]["title"]):
|
||||
img = i.find_all("img")
|
||||
if len(img) > 0:
|
||||
img_src = img[0].get("src")
|
||||
self._linkFeaturedMedia(img_src, h2, result)
|
||||
else:
|
||||
self._logger.error("{0} : Connection error with status code for featured media : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
|
||||
## Association image to post
|
||||
|
||||
def _linkImgPost(self, title, list_img, post_id):
|
||||
for i in list_img:
|
||||
data = {"post": post_id}
|
||||
try:
|
||||
r = self._request.post("{2}://{0}/wp-json/wp/v2/media/{1}".format(self._wordpress, i["id"], self._protocol), auth=self._basic, data=data)
|
||||
if r.status_code == 200:
|
||||
self._logger.info("{0} : Link image to post {1}".format(self._name, title))
|
||||
else:
|
||||
self._logger.error("{0} Connection error with status code for link image to post : {1}".format(self._name, r.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for link image to post : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for link image to post : {1}".format(self._name, err))
|
||||
|
||||
|
||||
## Add or update img
|
||||
|
||||
def _addOrUpdateMedia(self, href_img, page):
|
||||
media_authorized = ["png", "jpg", "jpeg", "svg", "gif"]
|
||||
media = {"id":"", "rendered":""}
|
||||
split_fileimg = href_img.split("/")
|
||||
img_name = split_fileimg[len(split_fileimg)-1]
|
||||
img_type_file = img_name.split(".")[len(img_name.split("."))-1]
|
||||
is_img = True
|
||||
if img_type_file not in media_authorized:
|
||||
self._logger.error("{0} : Element {1} is not image".format(self._name,img_name))
|
||||
is_img = False
|
||||
if is_img is True:
|
||||
self._logger.debug("{0} : Search for image {1} with URL {2}".format(self._name, img_name, "{1}://{0}/wp-json/wp/v2/media".format(self._wordpress, self._protocol)))
|
||||
params = { "search": img_name}
|
||||
try:
|
||||
r = self._request.get("{1}://{0}/wp-json/wp/v2/media".format(self._wordpress, self._protocol), auth=self._basic, params=params)
|
||||
|
||||
self._logger.debug("{0} : Search for image {1} and his status code {2}".format(self._name, img_name, r.status_code))
|
||||
if r.status_code == 200:
|
||||
res = r.json()
|
||||
self._logger.debug("{0} : Number of image in search : {1}".format(self._name, len(res)))
|
||||
if len(res) > 0:
|
||||
params = {"force":1}
|
||||
try:
|
||||
r = self._request.delete("{2}://{0}/wp-json/wp/v2/media/{1}".format(self._wordpress, res[0]["id"], self._protocol), auth=self._basic, params=params)
|
||||
if r.status_code == 200:
|
||||
self._logger.info("{0} : Image removed {1}".format(self._name, img_name))
|
||||
else:
|
||||
self._logger.error("{0} : Image {1} not removed due status code : {2}".format(self._name, img_name, r.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} Connection error for delete image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} Exception error for delete image : {1}".format(self._name, err))
|
||||
|
||||
data = page.content
|
||||
img_type = "image/{0}".format(img_type_file)
|
||||
if img_type_file == "jpg":
|
||||
img_type = "image/jpeg"
|
||||
headers={ 'Content-Type': img_type,'Content-Disposition' : 'attachment; filename={0}'.format(img_name)}
|
||||
try:
|
||||
r = self._request.post("{1}://{0}/wp-json/wp/v2/media".format(self._wordpress, self._protocol), auth=self._basic, headers=headers, data=data)
|
||||
|
||||
if r.status_code == 201:
|
||||
self._logger.info("{0} : Image added {1}".format(self._name, img_name))
|
||||
res = r.json()
|
||||
media["id"] = res["id"]
|
||||
media["rendered"] = res["guid"]["rendered"]
|
||||
else:
|
||||
self._logger.error("{0} : Image {1}.{2} not added due status code : {3}".format(self._name, img_name, img_type, r.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for add image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for add image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
|
||||
else:
|
||||
self._logger.error("{0} : Connection error for search image with status code : {1}".format(self._name, r.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for search media : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for search media : {1}".format(self._name, err))
|
||||
return media
|
||||
|
||||
## Add or update comment
|
||||
|
||||
def _addOrUpdateComment(self, post, comment, title):
|
||||
for i in comment:
|
||||
|
||||
try:
|
||||
params = {"post": post, "author_name":i["author"], "date":i["date"]}
|
||||
page = self._request.get("{1}://{0}/wp-json/wp/v2/comments".format(self._wordpress, self._protocol), auth=self._basic, params=params)
|
||||
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
for j in result:
|
||||
try:
|
||||
params = {"force":1}
|
||||
page = self._request.delete("{2}://{0}/wp-json/wp/v2/comments/{1}".format(self._wordpress, j["id"], self._protocol), params=params, auth=self._basic)
|
||||
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : Comment deleted for {1}".format(self._name, title))
|
||||
self._logger.debug("{0} : Comment deleted : {1}".format(self._name, j))
|
||||
else:
|
||||
self._logger.error("{0} : Comment not deleted for {1} due status code : {2}".format(self._name, title, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for delete comment : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for delete comment : {1}".format(self._name, err))
|
||||
|
||||
else:
|
||||
self._logger.error("{0} : Comment not listed for {1} due status code : {2}".format(self._name, title, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for search comment : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for search comment : {1}".format(self._name, err))
|
||||
|
||||
for i in comment:
|
||||
data = {"post": post, "content": i["content"], "date": i["date"], "author_name": i["author"], "status": "approved"}
|
||||
|
||||
if i["parent_id"] != -1:
|
||||
parent_id = int(i["parent_id"])
|
||||
params = {"post": post, "author_name":comment[parent_id]["author"], "date":comment[parent_id]["date"]}
|
||||
try:
|
||||
page = self._request.get("{1}://{0}/wp-json/wp/v2/comments".format(self._wordpress, self._protocol), auth=self._basic, params=params)
|
||||
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
if len(result) > 0:
|
||||
data["parent"]=result[0]["id"]
|
||||
else:
|
||||
self._logger.error("{0} : Connection error for parent comment with status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for parent comment : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for parent comment : {1}".format(self._name, err))
|
||||
|
||||
try:
|
||||
page = self._request.post("{1}://{0}/wp-json/wp/v2/comments".format(self._wordpress, self._protocol), auth=self._basic, data=data)
|
||||
|
||||
if page.status_code == 201:
|
||||
self._logger.info("{0} : Comment added for {1}".format(self._name, title))
|
||||
self._logger.debug("{0} : Data : {1}".format(self._name, data))
|
||||
else:
|
||||
self._logger.error("{0} : Comment not added for {1} due status code : {2}".format(self._name, title, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for add comment : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for add comment : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
|
||||
## Check class name
|
||||
|
||||
def _hasClassName(self, tag, className):
|
||||
for i in tag["class"]:
|
||||
if i == className:
|
||||
return True
|
||||
return False
|
||||
|
||||
## Get class name
|
||||
|
||||
def _getClassName(self, tag, className):
|
||||
for i in tag["class"]:
|
||||
if re.match(className, i):
|
||||
return i
|
||||
return ""
|
||||
|
||||
## Get all comments
|
||||
|
||||
def _getComment(self, comment):
|
||||
comment_post = []
|
||||
for i in range(0, len(comment)):
|
||||
comment_div = comment[i].find("div", class_="comment_item")
|
||||
comment_item = comment_div.text.split("\n")
|
||||
footer = comment_div.find_all("div", class_="itemfooter")
|
||||
comment_author = footer[0].text.split(",")[0].replace("Posté par ", "")
|
||||
comment_date = footer[0].find_all("abbr")[0].get("title")
|
||||
comment_content = "<p>"
|
||||
for j in range(0, len(comment_item)-2):
|
||||
if len(comment_item[j]) > 0:
|
||||
comment_content = comment_content + comment_item[j] + "<br />"
|
||||
comment_content = comment_content + "</p>"
|
||||
parent = -1
|
||||
if self._hasClassName(comment[i], "level-1") is False:
|
||||
block = False
|
||||
className = self._getClassName(comment[i], "level-").split("-")
|
||||
level = 1
|
||||
if len(className) > 0:
|
||||
level = int(className[1])
|
||||
for j in range(i-1, 0, -1):
|
||||
if block is False:
|
||||
levelName = "level-{0}".format(level - 1)
|
||||
if self._hasClassName(comment[j], levelName) is True:
|
||||
parent = j
|
||||
block = True
|
||||
|
||||
comment_post.append({"author": comment_author, "date": comment_date, "content": comment_content, "parent_id":parent})
|
||||
return comment_post
|
||||
## Add or Update post
|
||||
|
||||
def _addOrUpdatePost(self, soup):
|
||||
tags = []
|
||||
month = {"janvier":"01", "février": "02", "mars": "03", "avril":"04", "mai": "05", "juin": "06", "juillet": "07", "août": "08", "septembre": "09", "octobre": "10", "novembre": "11", "décembre": "12"}
|
||||
liste = ["categories", "tags"]
|
||||
elements = {}
|
||||
element = {}
|
||||
listelement = {}
|
||||
|
||||
for i in liste:
|
||||
element[i] = []
|
||||
listelement[i] = []
|
||||
|
||||
articletitle = soup.find_all("h2", class_="articletitle")
|
||||
self._logger.debug("{0} : Title of the article : {1}".format(self._name, articletitle))
|
||||
articlebody = soup.find_all("div", class_="articlebody")
|
||||
articledate = soup.find_all("span", class_="articledate")
|
||||
articleacreator = soup.find_all("span", class_="articlecreator")
|
||||
dateheader = soup.find_all("div", class_="dateheader")
|
||||
itemfooter = soup.find_all("div", class_="itemfooter")
|
||||
comment = soup.find_all("li", class_="comment")
|
||||
img_a = articlebody[0].find_all("a", {"target": "_blank"})
|
||||
self._logger.debug("{0} : Number of image's link : {1}".format(self._name, len(img_a)))
|
||||
list_img = []
|
||||
if self._no_image is False:
|
||||
for i in img_a:
|
||||
new_img = {}
|
||||
img = i.find_all("img")
|
||||
self._logger.debug("{0} : Number of image's tag : {1}".format(self._name, len(img)))
|
||||
if len(img) > 0:
|
||||
href_a = i.get("href")
|
||||
href_img = img[0].get("src")
|
||||
href_a_o = urlparse(href_a)
|
||||
href_img_o = urlparse(href_img)
|
||||
new_img["old_src"]=href_img
|
||||
new_img["old_href"]=href_a
|
||||
try:
|
||||
if len(href_img_o.netloc) > 0:
|
||||
img_ok = False
|
||||
page_img = self._request.get(href_img)
|
||||
|
||||
if page_img.status_code == 404:
|
||||
href_img = href_a
|
||||
try:
|
||||
page_img = self._request.get(href_a)
|
||||
if page_img.status_code == 200:
|
||||
img_ok = True
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
|
||||
else:
|
||||
if os.path.exists("{0}/..{1}".format(self._directory, href_img)):
|
||||
page_img = open("{0}/..{1}".format(self._directory, href_img), "r")
|
||||
img_ok = True
|
||||
else:
|
||||
if os.path.exists("{0}/..{1}".format(self._directory, href_a)):
|
||||
page_img = open("{0}/..{1}".format(self._directory, href_a), "r")
|
||||
img_ok = True
|
||||
self._logger.debug("{0} : Status code for image {1} : {2}".format(self._name, href_img, page_img.status_code))
|
||||
if img_ok is True:
|
||||
media=self._addOrUpdateMedia(href_img, page_img)
|
||||
new_img["id"]=media["id"]
|
||||
new_img["new_src"]=media["rendered"]
|
||||
list_img.append(new_img)
|
||||
if href_img != href_a:
|
||||
media=self._addOrUpdateMedia(href_a, page_img)
|
||||
new_img["id"]=media["id"]
|
||||
new_img["new_src"]=media["rendered"]
|
||||
list_img.append(new_img)
|
||||
if page_img.status_code not in [200, 404]:
|
||||
self._logger.error("{0} : Connection error with status code for get image : {1}".format(self._name, page_img.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page_img.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get image : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
|
||||
self._logger.debug("{0} : Number of image : {1}".format(self._name, len(list_img)))
|
||||
comment_post = self._getComment(comment)
|
||||
|
||||
a = itemfooter[0].find_all("a", {"rel": True})
|
||||
for i in a:
|
||||
rel = i.get("rel")
|
||||
if rel[0] == 'tag':
|
||||
href = i.get("href")
|
||||
if re.search(r'/tag/', href):
|
||||
element["tags"].append(i.text)
|
||||
if re.search(r'/archives/', href):
|
||||
element["categories"].append(i.text)
|
||||
for i in liste:
|
||||
for j in element[i]:
|
||||
element_exist = False
|
||||
title_element = self._removeSpace(j)
|
||||
for index in range(1,10):
|
||||
self._logger.info("{0} : search {1} with index {2} : {3}".format(self._name, i, index, title_element))
|
||||
try:
|
||||
params = {"search":title_element, "per_page":"100", "page":index}
|
||||
page = self._request.get("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i, self._protocol), auth=self._basic, params=params)
|
||||
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
self._logger.debug("{0} : content {3} {2} : {1}".format(self._name, result, title_element, i))
|
||||
if len(result) > 0:
|
||||
for k in result:
|
||||
title_rendered = k["name"]
|
||||
self._logger.debug("{0} : content {2} : {1}".format(self._name, title_rendered, i))
|
||||
self._logger.debug("{0} : size of content {3} : {2} - {1}".format(self._name, len(title_rendered), len(title_element), i))
|
||||
if len(title_element) != len(title_rendered):
|
||||
title_rendered = self._replaceCaracter(title_rendered)
|
||||
|
||||
if title_element == title_rendered:
|
||||
self._logger.info("{0} : {1} found : {2}".format(self._name, i, title_rendered))
|
||||
element_exist = True
|
||||
listelement[i].append(k["id"])
|
||||
else:
|
||||
break
|
||||
elif page.status_code == 400:
|
||||
self._logger.error("{0} : {1} not found due status code : {2}".format(self._name, i, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
break
|
||||
else:
|
||||
self._logger.error("{0} : {1} not found due status code : {2}".format(self._name, i, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for {1} : {2}".format(self._name, i, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for {1} : {2}".format(self._name, i, err))
|
||||
self._logger.debug("{0} : Element {3} {2} is {1}".format(self._name, element_exist, title_element, i))
|
||||
if element_exist == False:
|
||||
data = {"name": title_element}
|
||||
self._logger.info("{0} : Create {1} : {2}".format(self._name, i, title_element))
|
||||
self._logger.debug("{0} : Data : {1}".format(self._name, data))
|
||||
try:
|
||||
page = self._request.post("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i, self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
|
||||
|
||||
if page.status_code == 201:
|
||||
self._logger.info("{0} : {1} created : {2}".format(self._name, i, j))
|
||||
result = page.json()
|
||||
listelement[i].append(result["id"])
|
||||
else:
|
||||
self._logger.error("{0} : {1} not added due status code : {2}".format(self._name, i, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for post {1} : {2}".format(self._name, i, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for post {1} : {2}".format(self._name, i, err))
|
||||
|
||||
title = articletitle[0].text
|
||||
author = articleacreator[0].text.lower()
|
||||
if len(self._author) > 0:
|
||||
author = self._author
|
||||
body = articlebody[0].find_all("p")
|
||||
bodyhtml = "<p>"
|
||||
for i in body:
|
||||
if len(i.text) == 1:
|
||||
bodyhtml = bodyhtml + "<br />"
|
||||
else:
|
||||
bodyhtml = bodyhtml + str(i).replace("<p>", "").replace("</p>", "").replace("<br>", "<br />") + "<br />"
|
||||
bodyhtml = bodyhtml + "</p>"
|
||||
for i in list_img:
|
||||
o = urlparse(i["new_src"])
|
||||
bodyhtml = bodyhtml.replace(i["old_href"], o.path)
|
||||
bodyhtml = bodyhtml.replace(i["old_src"], o.path)
|
||||
hour = articledate[0].text
|
||||
time = ""
|
||||
if len(dateheader) > 0:
|
||||
time = dateheader[0].text.split(" ")
|
||||
self._logger.debug("{0} : Title post : |{1}|".format(self._name, title))
|
||||
title = self._removeSpace(title)
|
||||
self._logger.debug("{0} : Rendered Title post : |{1}|".format(self._name, title))
|
||||
data = {"title":title, "content":bodyhtml, "status":"publish", "tags": listelement["tags"], "categories": listelement["categories"]}
|
||||
if len(dateheader) > 0:
|
||||
data = {"title":title, "content":bodyhtml, "status":"publish", "date": "{0}-{1}-{2}T{3}:00".format(time[2],month[time[1]],time[0], hour), "tags": listelement["tags"], "categories": listelement["categories"]}
|
||||
self._logger.debug("{0} : Data for post : |{1}| : {2}" .format(self._name, title, data))
|
||||
|
||||
data["author"] = self._getAuthor(author)
|
||||
page_is_exist = False
|
||||
|
||||
for index in range(1,10):
|
||||
params = {"search": title, "per_page":100, "page": index}
|
||||
try:
|
||||
self._logger.info("{0} : Search post with index {2} : {1}".format(self._name, title, index))
|
||||
page = self._request.get("{1}://{0}/wp-json/wp/v2/posts".format(self._wordpress, self._protocol), auth=self._basic, params=params, headers=self._headers_json)
|
||||
if page.status_code == 200:
|
||||
self._logger.debug("{0} : Encoding : {1}".format(self._name, page.encoding))
|
||||
page.encoding = "utf-8"
|
||||
result = page.json()
|
||||
if len(result) == 0:
|
||||
break
|
||||
self._logger.info("{0} : Number result posts : {1}".format(self._name, len(result)))
|
||||
count = 0
|
||||
for i in result:
|
||||
title_rendered = i["title"]["rendered"]
|
||||
self._logger.info("{0} : Search title posts for |{2}| : |{1}|".format(self._name, title_rendered, title))
|
||||
if len(title_rendered) != len(title):
|
||||
title_rendered = self._replaceCaracter(title_rendered)
|
||||
self._logger.debug("{0} : Search title posts for |{2}| : |{1}|".format(self._name, title_rendered, title))
|
||||
self._logger.debug("{0} : SIze of title : {1} - {2}".format(self._name, len(title), len(title_rendered)))
|
||||
if title_rendered == title:
|
||||
if self._no_update is False:
|
||||
page_is_exist = True
|
||||
post_id = i["id"]
|
||||
count = count + 1
|
||||
if count > 1:
|
||||
self._logger.info("{0} : Page {1} is double and going to delete".format(self._name, title))
|
||||
try:
|
||||
params = {"force":1}
|
||||
page = self._request.delete("{2}://{0}/wp-json/wp/v2/posts/{1}".format(self._wordpress, post_id, self._protocol), auth=self._basic, headers=self._headers_json, params=params)
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : Post deleted : {1}".format(self._name, title))
|
||||
else:
|
||||
self._logger.error("{0} : Post not updated due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for deleted post : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for deleted post : {1}".format(self._name, err))
|
||||
|
||||
else:
|
||||
self._logger.debug("{0} : Data for post to update : {1}".format(self._name, i))
|
||||
self._logger.info("{0} : Page {1} already exist and going to update".format(self._name, title))
|
||||
|
||||
try:
|
||||
page = self._request.post("{2}://{0}/wp-json/wp/v2/posts/{1}".format(self._wordpress, post_id, self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
|
||||
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
self._logger.info("{0} : Post updated : {1}".format(self._name, title))
|
||||
self._addOrUpdateComment(result["id"], comment_post, result["title"]["raw"])
|
||||
self._linkImgPost(result["title"]["raw"], list_img, result["id"])
|
||||
else:
|
||||
self._logger.error("{0} : Post not updated due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for update post : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for update post : {1}".format(self._name, err))
|
||||
if page.status_code == 400:
|
||||
self._logger.error("{0} : Connection for update post unauthorized : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
break
|
||||
else:
|
||||
self._logger.error("{0} : Connection for update post error with status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for search post : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for search post : {1}".format(self._name, err))
|
||||
|
||||
if page_is_exist is False and self._no_create is False:
|
||||
try:
|
||||
self._logger.info("{0} : Creating posts : {1}".format(self._name, data["title"]))
|
||||
page = self._request.post("{1}://{0}/wp-json/wp/v2/posts".format(self._wordpress, self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
|
||||
|
||||
if page.status_code == 201:
|
||||
result = page.json()
|
||||
self._logger.info("{0} : Post added : {1}".format(self._name, result["title"]["raw"]))
|
||||
self._addOrUpdateComment(result["id"], comment_post, result["title"]["raw"])
|
||||
self._linkImgPost(result["title"]["raw"], list_img, result["id"])
|
||||
else:
|
||||
self._logger.error("{0} : Post not added due status code : {1}".format(self._name, r.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for create post : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for create post : {1}".format(self._name, err))
|
394
lib/WPMenu.py
Normal file
394
lib/WPMenu.py
Normal file
@@ -0,0 +1,394 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import requests, os, logging, re, json
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
|
||||
class WPMenu:
|
||||
# Constructor
|
||||
def __init__(self, name="Thread-0", basic=None, canalblog="", wordpress="", logger=None, parser="html.parser", ssl_canalblog=True, ssl_wordpress=True):
|
||||
self._name = name
|
||||
self._basic = basic
|
||||
self._canalblog = canalblog
|
||||
self._wordpress = wordpress
|
||||
self._logger = logger
|
||||
self._parser = parser
|
||||
self._headers_json = {'Content-Type': 'application/json; charset=utf-8', 'Accept':'application/json'}
|
||||
self._protocol_wordpress = "https"
|
||||
self._protocol_canalblog = "https"
|
||||
self._directory = "backup"
|
||||
|
||||
if ssl_wordpress is False:
|
||||
self._protocol_wordpress = "http"
|
||||
if ssl_canalblog is False:
|
||||
self._protocol_canalblog = "http"
|
||||
self._request_canalblog = requests.Session()
|
||||
self._request_wordpress = requests.Session()
|
||||
|
||||
retries = Retry(connect=10, read=10, redirect=5,
|
||||
status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2)
|
||||
|
||||
self._request_canalblog.mount('{0}://'.format(self._protocol_canalblog), HTTPAdapter(max_retries=retries))
|
||||
self._request_wordpress.mount('{0}://'.format(self._protocol_wordpress), HTTPAdapter(max_retries=retries))
|
||||
|
||||
|
||||
# Destructor
|
||||
def __del__(self):
|
||||
print("{0} : Import finished for {1}".format(self._name, self._wordpress))
|
||||
|
||||
# Public method
|
||||
|
||||
## From file
|
||||
|
||||
def fromFile(self, files):
|
||||
if os.path.exists(files):
|
||||
with open(files, 'r') as f:
|
||||
self._logger.info("{0} : File is being processed : {1}".format(self._name, files))
|
||||
content = f.read()
|
||||
self._menu(content)
|
||||
else:
|
||||
self._logger.error("{0} : File isn't exist : {1}".format(self._name, files))
|
||||
|
||||
|
||||
|
||||
## Get from URL
|
||||
|
||||
def fromUrl(self, canalblog):
|
||||
self._canalblog = canalblog
|
||||
try:
|
||||
o = urlparse(canalblog)
|
||||
o = o._replace(scheme=self._protocol_canalblog)
|
||||
i = o.geturl().replace(":///", "://")
|
||||
page = self._request_canalblog.get(i)
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : Page web is being processed : {1}".format(self._name, i))
|
||||
self._menu(page.content)
|
||||
else:
|
||||
self._logger.error("{0} : index didn't get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get url {1} : {2}".format(self._name, canalblog, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get url {1} : {2}".format(self._name, canalblog, err))
|
||||
|
||||
## replace caracter
|
||||
|
||||
def _replaceCaracter(self, title_rendered):
|
||||
list_replace = {'’': "'", '–': '-', '…': '...', '« ': '"', ' »': '"', '« ': '"', ' »': '"', '’': "'", '"‘': "'"}
|
||||
for old, new in list_replace.items():
|
||||
title_rendered = title_rendered.replace(old, new)
|
||||
return title_rendered
|
||||
|
||||
def _getIdfromTitlePost(self, content):
|
||||
idMenu = {"id":0, "type":"", "link":""}
|
||||
soup = BeautifulSoup(content, self._parser)
|
||||
articletitle = soup.find_all("h2", class_="articletitle")
|
||||
if len(articletitle) > 0:
|
||||
articletitle = articletitle[0].get_text()
|
||||
search = "posts"
|
||||
post_type = "post"
|
||||
if len(articletitle) == 0:
|
||||
articletitle = soup.find_all("div", class_="albumbody")
|
||||
if len(articletitle) > 0:
|
||||
articletitle = articletitle[0].find("h2").get_text()
|
||||
search = "pages"
|
||||
post_type = "page"
|
||||
|
||||
exist = False
|
||||
for index in range(1,10):
|
||||
if exist is False:
|
||||
params = {"search":articletitle, "per_page":100, "page":index}
|
||||
try:
|
||||
self._logger.debug("{0} : Get Url for {3} : {1} {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/{2}".format(self._wordpress, self._protocol_wordpress, search), params, search))
|
||||
page = self._request_wordpress.get("{1}://{0}/wp-json/wp/v2/{2}".format(self._wordpress, self._protocol_wordpress, search), auth=self._basic, params=params)
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
self._logger.info("{0} : Get content {2} : {1}".format(self._name, len(result), search))
|
||||
if len(result) > 0:
|
||||
for i in result:
|
||||
title_rendered = i["title"]["rendered"]
|
||||
if len(articletitle) != len(title_rendered):
|
||||
title_rendered = self._replaceCaracter(title_rendered)
|
||||
self._logger.debug("{0} : comparaison debug {1} {2}".format(self._name, articletitle, title_rendered))
|
||||
if articletitle == title_rendered:
|
||||
self._logger.debug("{0} : get {2} id : {1}".format(self._name, i, search))
|
||||
idMenu = {"id":i["id"], "type":post_type, "link": i["link"]}
|
||||
exist = True
|
||||
else:
|
||||
self._logger.debug("{0} : {2} {1}".format(self._name, result, len(result)))
|
||||
break
|
||||
elif page.status_code == 400:
|
||||
self._logger.debug("{0} : {2} {1}".format(self._name, page.content, page.status_code))
|
||||
break
|
||||
else:
|
||||
self._logger.error("{0} : Post didn't get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get content : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get content : {1} ".format(self._name, err))
|
||||
return idMenu
|
||||
|
||||
def _getIdFromPost(self, href):
|
||||
idMenu = {"id":0, "type":"", "link":""}
|
||||
o = urlparse(href)
|
||||
if len(o.netloc) > 0:
|
||||
try:
|
||||
page = self._request_canalblog.get(href)
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : Get content : {1}".format(self._name, href))
|
||||
idMenu = self._getIdfromTitlePost(page.content)
|
||||
else:
|
||||
self._logger.error("{0} : {2} didn't get due status code : {1}".format(self._name, page.status_code, href))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get url {1} : {2}".format(self._name, href, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get url {1} : {2}".format(self._name, href, err))
|
||||
else:
|
||||
if os.path.exists("{0}/..{1}".format(self._directory, o.path)):
|
||||
try:
|
||||
content = open("{0}/..{1}".format(self._directory, o.path), "r")
|
||||
idMenu = self._getIdfromTitlePost(content)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get file content {1} : {2}".format(self._name, href, err))
|
||||
return idMenu
|
||||
|
||||
|
||||
def _getIdFromReverse(self, title, href):
|
||||
self._logger.info("{0} : get title {1} from href {2}".format(self._name, title, href))
|
||||
idMenu = {"id":0, "type":"", "link":""}
|
||||
if href != "#":
|
||||
title = href[::-1]
|
||||
second_title = title.split("/")[2]
|
||||
second_title = second_title[::-1]
|
||||
link = title.split("/")[0]
|
||||
link = link[::-1]
|
||||
title = title.split("/")[1]
|
||||
title = title[::-1]
|
||||
self._logger.info("{0} link {1} title {2}".format(self._name, link, title))
|
||||
if link == "index.html":
|
||||
if second_title == "albums":
|
||||
idMenu = self._getIdFromPost(href)
|
||||
else:
|
||||
idMenu = self._getId(title)
|
||||
else:
|
||||
idMenu = self._getIdFromPost(href)
|
||||
|
||||
return idMenu
|
||||
|
||||
def _getId(self, title):
|
||||
idMenu = {"id": 0, "type":"", "link":""}
|
||||
exist = False
|
||||
if exist is False:
|
||||
for i in ["categories", "tags"]:
|
||||
typeId = "category"
|
||||
if i == "tags":
|
||||
typeId = "tag"
|
||||
for index in range(1,10):
|
||||
try:
|
||||
params = {"search":title, "per_page":"100", "page":index}
|
||||
self._logger.info("{0} Get menu {1} {2} {3}".format(self._name, "{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i, self._protocol_wordpress), index, title))
|
||||
page = self._request_wordpress.get("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i, self._protocol_wordpress), auth=self._basic, params=params)
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
if len(result) > 0:
|
||||
for j in result:
|
||||
self._logger.info("{0} info : {1} {2} {3}".format(self._name, j["name"], j["slug"], title))
|
||||
if j["name"] == title or j["slug"] == title:
|
||||
self._logger.info("{0} : comparaison ok : {1} {2}".format(self._name, j["id"], i))
|
||||
|
||||
idMenu = {"id": j["id"], "type": typeId, "link": j["link"]}
|
||||
exist = True
|
||||
else:
|
||||
break
|
||||
elif page.status_code == 400:
|
||||
break
|
||||
else:
|
||||
self._logger.error("{0} : {2} didn't get due status code : {1}".format(self._name, page.status_code, i))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get url {1} : {2}".format(self._name, "{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i, self._protocol_wordpress), err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get url {1} : {2}".format(self._name, "{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i, self._protocol_wordpress), err))
|
||||
return idMenu
|
||||
|
||||
def _menu(self, content):
|
||||
|
||||
soup = BeautifulSoup(content, self._parser)
|
||||
ul = soup.find("ul", id="listsmooth")
|
||||
menu = list()
|
||||
children = list()
|
||||
for anchor in ul.find_all("li"):
|
||||
|
||||
parent = anchor.find("a").get_text().replace(" \xa0", "")
|
||||
href = anchor.find("a").get("href")
|
||||
if href == "{0}://{1}/".format(self._protocol_canalblog, self._canalblog):
|
||||
parent = "home"
|
||||
itemMenu = {"id":"", "type":"", "title": parent, "link":"", "href":href, "children":list()}
|
||||
if href == "#":
|
||||
li = anchor.find("ul").find_all("li")
|
||||
for child in li:
|
||||
a = child.find("a")
|
||||
href = a.get("href")
|
||||
self._logger.info("{0} Parent {1} : Child {2}".format(self._name, parent, a.get_text()))
|
||||
children.append({"title": a.get_text(), "parent": parent, "href":href, "link":""})
|
||||
menu.append(itemMenu)
|
||||
|
||||
|
||||
for i in range(0, len(children)):
|
||||
self._logger.info("{0} : Child {1} {2}".format(self._name, children[i], i))
|
||||
for j in range(0, len(menu)):
|
||||
if j < len(menu):
|
||||
if menu[j]["title"] == children[i]["title"]:
|
||||
self._logger.info("{0} : Parent {1} {2}".format(self._name, menu[j], j))
|
||||
del menu[j]
|
||||
|
||||
for j in range(0, len(menu)):
|
||||
self._logger.info("{0} : Children for : {1}".format(self._name, menu[j]["title"]))
|
||||
if menu[j]["title"] == children[i]["parent"]:
|
||||
menu[j]["children"].append({"id":"", "type":"", "title":children[i]["title"], "parent": children[i]["parent"], "link":"", "href":children[i]["href"]})
|
||||
|
||||
for i in range(0, len(menu)):
|
||||
self._logger.info("{0} : Menu {1} {2}".format(self._name, menu[i]["title"], len(menu[i]["children"])))
|
||||
if menu[i]["title"] != "home":
|
||||
for j in range(0, len(menu[i]["children"])):
|
||||
idMenu = self._getId(menu[i]["children"][j]["title"])
|
||||
if idMenu["id"] == 0:
|
||||
self._logger.debug("{0} : content children {1}".format(self._name, menu[i]["children"][j]))
|
||||
idMenu = self._getIdFromReverse(menu[i]["children"][j]["title"], menu[i]["children"][j]["href"])
|
||||
if idMenu["id"] != 0:
|
||||
menu[i]["children"][j] = {"id":idMenu["id"], "type": idMenu["type"], "link": idMenu["link"], "title": menu[i]["children"][j]["title"], "parent": menu[i]["children"][j]["parent"]}
|
||||
idMenu = self._getId(menu[i]["title"])
|
||||
self._logger.debug("{0} : content parent {1}".format(self._name, menu[i]))
|
||||
self._logger.debug("{0} : content idMenu {1}".format(self._name, idMenu))
|
||||
|
||||
if idMenu["id"] == 0:
|
||||
idMenu = self._getIdFromReverse(menu[i]["title"], menu[i]["href"])
|
||||
if idMenu["id"] != 0:
|
||||
menu[i] = {"id":idMenu["id"], "type": idMenu["type"], "title":menu[i]["title"], "link":idMenu["link"], "children": menu[i]["children"]}
|
||||
|
||||
self._createMenu(menu)
|
||||
|
||||
|
||||
|
||||
|
||||
def _createItemMenu(self, idMenu, itemMenu, parent):
|
||||
idItemMenu = 0
|
||||
self._logger.info("{0} : Create item menu from API Wordpress : {1}".format(self._name, self._wordpress))
|
||||
try:
|
||||
params = {"search": itemMenu["title"], "menus": idMenu}
|
||||
page = self._request_wordpress.get("{1}://{0}/wp-json/wp/v2/menu-items".format(self._wordpress, self._protocol_wordpress), auth=self._basic, params=params)
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
for i in result:
|
||||
if self._replaceCaracter(i["title"]["rendered"]) == itemMenu["title"]:
|
||||
idItemMenu = int(i["id"])
|
||||
self._logger.info("{0} : Length of result for item menus : {1}".format(self._name, len(result)))
|
||||
url = "{1}://{0}/wp-json/wp/v2/menu-items".format(self._wordpress, self._protocol_wordpress)
|
||||
if idItemMenu != 0:
|
||||
url = "{1}://{0}/wp-json/wp/v2/menu-items/{2}".format(self._wordpress, self._protocol_wordpress, idItemMenu)
|
||||
|
||||
try:
|
||||
objectt = itemMenu["type"]
|
||||
if objectt == "tag":
|
||||
objectt = "post_tag"
|
||||
data = {"title": itemMenu["title"], "status": "publish", "parent":parent, "menus":idMenu, "url":"#"}
|
||||
if itemMenu["title"] == "home":
|
||||
data = {"title": itemMenu["title"], "status": "publish", "parent":parent, "menus":idMenu, "url":"{0}://{1}".format(self._protocol_wordpress, self._wordpress)}
|
||||
if type(itemMenu["id"]) is str:
|
||||
if len(itemMenu["id"]) > 0:
|
||||
data = {"title": itemMenu["title"], "status": "publish", "parent":parent, "url": itemMenu["link"], "menus":idMenu, "object":objectt, "object_id":int(itemMenu["id"])}
|
||||
elif type(itemMenu["id"]) is int:
|
||||
data = {"title": itemMenu["title"], "status": "publish", "parent":parent, "url": itemMenu["link"], "menus":idMenu, "object":objectt, "object_id":itemMenu["id"]}
|
||||
self._logger.debug("{0} : data for create/update : {1}".format(self._name, data))
|
||||
page = self._request_wordpress.post(url, auth=self._basic, headers=self._headers_json, data=json.dumps(data))
|
||||
|
||||
if page.status_code in [201, 200]:
|
||||
result = page.json()
|
||||
idItemMenu = int(result["id"])
|
||||
self._logger.info("{0} : create/update item menu : {1}".format(self._name, itemMenu["title"]))
|
||||
else:
|
||||
self._logger.error("{0} : Create menu items for {2} didn't get due status code : {1}".format(self._name, page.status_code, itemMenu["title"]))
|
||||
self._logger.debug("{0} : {1} {2}".format(self._name, page.content, itemMenu))
|
||||
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for create item menu {1} : {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/menu-items".format(self._wordpress, self._protocol_wordpress), err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for create item menu {1} : {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/menu-items".format(self._wordpress, self._protocol_wordpress), err))
|
||||
else:
|
||||
self._logger.error("{0} : Get menu items didn't get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get item menus {1} : {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/menu-items".format(self._wordpress, self._protocol_wordpress), err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get item menus {1} : {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/menu-items".format(self._wordpress, self._protocol_wordpress), err))
|
||||
return idItemMenu
|
||||
|
||||
def _createMenu(self, menu):
|
||||
title = "Menu {0}".format(self._wordpress)
|
||||
self._logger.info("{0} : Create menu from API Wordpress : {1}".format(self._name, title))
|
||||
try:
|
||||
params = {"search": title}
|
||||
page = self._request_wordpress.get("{1}://{0}/wp-json/wp/v2/menus".format(self._wordpress, self._protocol_wordpress), auth=self._basic, params=params)
|
||||
if page.status_code == 200:
|
||||
result = page.json()
|
||||
self._logger.info("{0} : Get content menus : {1}".format(self._name, len(result)))
|
||||
idMenu = 0
|
||||
if len(result) == 0:
|
||||
|
||||
self._logger.info("{0} : Create menu : {1}".format(self._name, title))
|
||||
data = {"name": title}
|
||||
try:
|
||||
page = self._request_wordpress.post("{1}://{0}/wp-json/wp/v2/menus".format(self._wordpress, self._protocol_wordpress), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
|
||||
if page.status_code == 201:
|
||||
result = page.json()
|
||||
self._logger.debug("{0} : Get menus : {1}".format(self._name, result))
|
||||
|
||||
if len(result) > 0:
|
||||
idMenu = result["id"]
|
||||
else:
|
||||
self._logger.error("{0} : Post menu didn't get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for create menu {1} : {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/menus".format(self._wordpress, self._protocol_wordpress), err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get menu {1} : {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/menus".format(self._wordpress, self._protocol_wordpress), err))
|
||||
else:
|
||||
self._logger.debug("{0} : Get menus : {1}".format(self._name, result))
|
||||
for i in result:
|
||||
self._logger.debug("{0} : List menus : {1}".format(self._name, i))
|
||||
if i["name"] == title:
|
||||
idMenu = i["id"]
|
||||
self._logger.info("{0} : Get ID menus : {1}".format(self._name, idMenu))
|
||||
self._addItemMenu(menu, idMenu)
|
||||
else:
|
||||
self._logger.error("{0} : Get menu didn't get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get menu {1} : {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/menus".format(self._wordpress, self._protocol_wordpress), err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get menu {1} : {2}".format(self._name, "{1}://{0}/wp-json/wp/v2/menus".format(self._wordpress, self._protocol_wordpress), err))
|
||||
|
||||
|
||||
def _addItemMenu(self, menu, idMenu):
|
||||
self._logger.info("{0} : add item to menu : {1}".format(self._name, idMenu))
|
||||
parent = 0
|
||||
for i in menu:
|
||||
parent = 0
|
||||
self._logger.debug("{0} : debug create item menu : {1}".format(self._name, i))
|
||||
parent = self._createItemMenu(idMenu, i, parent)
|
||||
for j in i["children"]:
|
||||
self._createItemMenu(idMenu, j, parent)
|
128
lib/WPRemove.py
Normal file
128
lib/WPRemove.py
Normal file
@@ -0,0 +1,128 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import requests, os, logging, re, json
|
||||
from requests.adapters import HTTPAdapter
|
||||
from requests.packages.urllib3.util.retry import Retry
|
||||
|
||||
class WPRemove:
|
||||
# Constructor
|
||||
def __init__(self, index_name=1, number_thread=1, basic=None, wordpress="", logger=None, ssl_wordpress=True):
|
||||
self._basic = basic
|
||||
self._wordpress = wordpress
|
||||
self._logger = logger
|
||||
self._headers_json = {'Content-Type': 'application/json', 'Accept':'application/json'}
|
||||
self._name = "Thread-{0}".format(index_name)
|
||||
self._index_thread = index_name
|
||||
self._protocol = "https"
|
||||
self._number_thread = number_thread
|
||||
if ssl_wordpress is False:
|
||||
self._protocol = "http"
|
||||
self._request = requests.Session()
|
||||
|
||||
retries = Retry(connect=10, read=10, redirect=5,
|
||||
status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2)
|
||||
|
||||
self._request.mount('{0}://'.format(self._protocol), HTTPAdapter(max_retries=retries))
|
||||
|
||||
|
||||
# Destructor
|
||||
def __del__(self):
|
||||
print("{0} : Import finished for {1}".format(self._name, self._wordpress))
|
||||
|
||||
# Public method
|
||||
|
||||
def _getCount(self, composant):
|
||||
count = 0
|
||||
try:
|
||||
params = {"per_page":1}
|
||||
self._logger.info("{0} : Get count {2} to remove for url : {1}".format(self._name, self._wordpress, composant))
|
||||
r = self._request.get("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, composant, self._protocol), params=params, auth=self._basic, headers=self._headers_json)
|
||||
if r.status_code == 200:
|
||||
count = int(r.headers["X-WP-Total"])
|
||||
else:
|
||||
self._logger.error("{0} : Error for list to remove {1} due status code {2}".format(self._name, composant, r.status_code))
|
||||
self._logger.debug("{0} : Content error for {1} : {2}".format(self._name, composant, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for list {1} to remove : {2}".format(self._name, composant, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for list {1} to remove : {2}".format(self._name, composant, err))
|
||||
return count
|
||||
|
||||
def setUrl(self, wordpress):
|
||||
self._wordpress = wordpress
|
||||
|
||||
def cleanPosts(self):
|
||||
self._removeAll("posts")
|
||||
|
||||
def cleanTags(self):
|
||||
self._removeAll("tags")
|
||||
|
||||
def cleanCategories(self):
|
||||
self._removeAll("categories")
|
||||
|
||||
def cleanMedia(self):
|
||||
self._removeAll("media")
|
||||
|
||||
# Private method
|
||||
|
||||
def _removeAll(self, composant):
|
||||
count = self._getCount(composant)
|
||||
self._logger.debug("{0} : Count for {1} : {2}".format(self._name, composant, count))
|
||||
if count > 0:
|
||||
self._logger.debug("{0} : Number thread for {1} : {2}".format(self._name, composant, self._number_thread))
|
||||
|
||||
page = count / int(self._number_thread)
|
||||
self._logger.debug("{0} : Page for {1} : {2}".format(self._name, composant, page))
|
||||
|
||||
if page > int(page):
|
||||
page = int(page) + 1
|
||||
if page > 100:
|
||||
page = 100
|
||||
params = {"per_page":page, "page":self._index_thread}
|
||||
self._logger.info("{0} : Params for {1} : {2}".format(self._name, composant, params))
|
||||
|
||||
try:
|
||||
self._logger.info("{0} : List {2} to remove for url : {1}".format(self._name, self._wordpress, composant))
|
||||
|
||||
r = self._request.get("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, composant, self._protocol), auth=self._basic, params=params, headers=self._headers_json)
|
||||
|
||||
if r.status_code == 200:
|
||||
result = r.json()
|
||||
if len(result) > 0:
|
||||
for i in result:
|
||||
is_delete = True
|
||||
self._logger.info(i["slug"])
|
||||
if i["slug"] == "non-classe":
|
||||
is_delete = False
|
||||
if is_delete is True:
|
||||
if composant == "tags" or composant == "categories":
|
||||
title = i["name"]
|
||||
else:
|
||||
title = i["title"]["rendered"]
|
||||
self._logger.info("{0} : Remove {2} for url {1} : {3}".format(self._name, self._wordpress, composant, title))
|
||||
params = {"force":1}
|
||||
try:
|
||||
r = self._request.delete("{3}://{0}/wp-json/wp/v2/{1}/{2}".format(self._wordpress, composant, i["id"], self._protocol), auth=self._basic, headers=self._headers_json , params=params)
|
||||
if r.status_code == 200:
|
||||
self._logger.info("{0} : Post removed for URL {1} {2} : {3}".format(self._name, self._wordpress, composant, title))
|
||||
else:
|
||||
self._logger.error("{0} : Connection error for post {1} {2} {3} with status code {4}".format(self._name, self._wordpress, composant, title, r.status_code))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for {1} remove : {2}".format(self._name, composant, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for {1} remove : {2}".format(self._name, composant, err))
|
||||
self._removeAll(composant)
|
||||
if r.status_code == 400:
|
||||
self._logger.error("{0} : No content for {1} to remove : {2}".format(self._name, composant, r.status_code))
|
||||
else:
|
||||
self._logger.error("{0} : Error for list to remove {1} due status code {2}".format(self._name, composant, r.status_code))
|
||||
self._logger.debug("{0} : Content error for {1} : {2}".format(self._name, composant, r.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for list {1} to remove : {2}".format(self._name, composant, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for list {1} to remove : {2}".format(self._name, composant, err))
|
0
lib/__init__.py
Normal file
0
lib/__init__.py
Normal file
241
web_scrap.py
241
web_scrap.py
@@ -1,241 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
from bs4 import BeautifulSoup
|
||||
from urllib.parse import urlparse
|
||||
import requests, os, argparse, logging
|
||||
|
||||
def mkdirPath(path_dir, logger):
|
||||
if not os.path.exists(path_dir):
|
||||
makedir = []
|
||||
pathh = path_dir.split("/")
|
||||
for i in pathh:
|
||||
makedir.append(i)
|
||||
repath = "/".join(makedir)
|
||||
if not os.path.exists(repath):
|
||||
logger.debug("Dossier crée : {0}".format(repath))
|
||||
try:
|
||||
if len(repath) > 0:
|
||||
os.mkdir(repath)
|
||||
except Exception as err:
|
||||
logger.error("Directory error : {0}".format(err))
|
||||
logger.debug("Directory error : {0} {1} {2} {3} {4}".format(err, path_dir, repath, pathh, makedir))
|
||||
exit(1)
|
||||
|
||||
|
||||
def getScriptCss(url, js, css, logger):
|
||||
try:
|
||||
page = requests.get(url)
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
if js is True:
|
||||
script = soup.find_all("script")
|
||||
for anchor in script:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
try:
|
||||
u = urlparse(url)
|
||||
o = urlparse(src)
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
if css is True:
|
||||
link = soup.find_all("link")
|
||||
for anchor in link:
|
||||
rel = anchor.get("rel")
|
||||
if rel[0] == "stylesheet":
|
||||
href = anchor.get("href", "/")
|
||||
if href != "/":
|
||||
try:
|
||||
u = urlparse(url)
|
||||
o = urlparse(href)
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
|
||||
|
||||
return page_url
|
||||
|
||||
def getImg(webpage, logger):
|
||||
page_img = []
|
||||
for i in webpage:
|
||||
try:
|
||||
page = requests.get(i)
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
img = soup.find_all("img")
|
||||
logger.info("image from page: {0} : ".format(i))
|
||||
for anchor in img:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
if src not in page_img:
|
||||
logger.info("image: {0} : ".format(src))
|
||||
page_img.append(src)
|
||||
|
||||
|
||||
return page_img
|
||||
|
||||
def getUrlPage(url, logger):
|
||||
try:
|
||||
page = requests.get(url)
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
ul = soup.find_all("ul", id="listsmooth")
|
||||
for anchor in ul[0].find_all("a"):
|
||||
href = anchor.get('href', '/')
|
||||
if href != "#":
|
||||
page_url.append(href)
|
||||
|
||||
webpage = []
|
||||
for i in page_url:
|
||||
try:
|
||||
page = requests.get(i)
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
if page.status_code == 200:
|
||||
logger.info("page : {0}".format(i))
|
||||
if i not in webpage:
|
||||
webpage.append(i)
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
class_div = pagingfirstline = soup.find_all("div", class_="pagingfirstline")
|
||||
if len(class_div) > 0:
|
||||
pagingfirstline = class_div[0].find_all("a")
|
||||
if len(pagingfirstline) > 1:
|
||||
lastpage = pagingfirstline[len(pagingfirstline)-1].get("href", "/")
|
||||
element_lastpage = lastpage.split("/")[len(lastpage.split("/"))-1]
|
||||
number_page = element_lastpage.split("-")[0].split("p")[1]
|
||||
number_lastpage = int(number_page) / 10
|
||||
for j in range(1,int(number_lastpage)):
|
||||
paging = j * 10
|
||||
categorie = urlparse(i).path.split("/")
|
||||
url_paging = "{0}/archives/p{1}-10.html".format(url, paging)
|
||||
if len(categorie) > 2:
|
||||
url_paging = "{0}/archives/{1}/p{2}-10.html".format(url, categorie[2], paging)
|
||||
logger.info(url_paging)
|
||||
if url_paging not in webpage:
|
||||
webpage.append(url_paging)
|
||||
page = requests.get(url_paging)
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, 'html.parser')
|
||||
h2 = soup.find_all("h2")
|
||||
for title in h2:
|
||||
href = title.find_all("a")[0].get("href", "/")
|
||||
if href not in webpage:
|
||||
try:
|
||||
o = urlparse(href)
|
||||
o = o._replace(scheme="https").geturl()
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
webpage.append(o)
|
||||
return webpage
|
||||
|
||||
|
||||
def downloadPage(webpage, backup_dir, logger):
|
||||
|
||||
for i in range(0, len(webpage)):
|
||||
try:
|
||||
o = urlparse(webpage[i])
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
path_web = o.path.split("/")
|
||||
filePageWeb = path_web[len(path_web)-1]
|
||||
path_web.pop(len(path_web)-1)
|
||||
dir_page_web = "/".join(path_web)
|
||||
mkdirPath("{0}/{1}/{2}".format(backup_dir, o.netloc, dir_page_web), logger)
|
||||
try:
|
||||
r = requests.get(webpage[i])
|
||||
except Exception as err:
|
||||
logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
if r.status_code == 200:
|
||||
fileDownload = "{0}/{1}/index.html".format(backup_dir, o.netloc)
|
||||
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
||||
fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb)
|
||||
logger.info("{0}/{1} : {2}".format(i+1, len(webpage), fileDownload))
|
||||
try:
|
||||
open(fileDownload, "wb").write(r.content)
|
||||
except Exception as err:
|
||||
logger.error("file error : {0}".format(err))
|
||||
exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--url", help="canblog URL to be scraping", required=True)
|
||||
parser.add_argument("--dir",
|
||||
default="backup",
|
||||
help="backup file path")
|
||||
parser.add_argument("--debug", help="Verbosity", action="store_true")
|
||||
parser.add_argument("--logfile", help="Log file", default="")
|
||||
parser.add_argument("--no-css", help="No CSS", dest="css", action="store_true")
|
||||
parser.add_argument("--no-js", help="No JS", dest="js", action="store_true")
|
||||
parser.add_argument("--no-img", help="No img", dest="img", action="store_true")
|
||||
parser.add_argument("--no-html", help="No HTML", dest="html", action="store_true")
|
||||
parser.add_argument("--quiet", help="No console output", action="store_true")
|
||||
args = parser.parse_args()
|
||||
logger = logging.getLogger('web_scrap')
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
|
||||
if args.quiet is False:
|
||||
ch = logging.StreamHandler()
|
||||
if args.debug is True:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
ch.setLevel(logging.DEBUG)
|
||||
else:
|
||||
logger.setLevel(logging.INFO)
|
||||
ch.setLevel(logging.INFO)
|
||||
ch.setFormatter(formatter)
|
||||
logger.addHandler(ch)
|
||||
|
||||
|
||||
if len(args.logfile) > 0:
|
||||
fileHandler = logging.FileHandler(args.logfile)
|
||||
if args.debug is True:
|
||||
fileHandler.setLevel(logging.DEBUG)
|
||||
else:
|
||||
fileHandler.setLevel(logging.INFO)
|
||||
fileHandler.setFormatter(formatter)
|
||||
logger.addHandler(fileHandler)
|
||||
|
||||
try:
|
||||
o = urlparse(args.url)
|
||||
o = o._replace(scheme="https")
|
||||
url = o.geturl().replace(":///", "://")
|
||||
except Exception as err:
|
||||
logger.error("parsing error : {0}".format(err))
|
||||
if args.js is False:
|
||||
script = getScriptCss(url, True, False, logger)
|
||||
downloadPage(script, "{0}/{1}/{2}".format(args.dir, o.path, "dists/js"), logger)
|
||||
|
||||
if args.css is False:
|
||||
css = getScriptCss(url, False, True, logger)
|
||||
downloadPage(css, "{0}/{1}/{2}".format(args.dir, o.path, "dists/css"), logger)
|
||||
|
||||
if args.html is False or args.img is False:
|
||||
webpage = getUrlPage(url, logger)
|
||||
if args.html is False:
|
||||
downloadPage(webpage, args.dir, logger)
|
||||
|
||||
if args.img is False:
|
||||
page_src = getImg(webpage, logger)
|
||||
downloadPage(page_src, "{0}/{1}/{2}".format(args.dir, o.path, "img"), logger)
|
Reference in New Issue
Block a user