76 Commits

Author SHA1 Message Date
b88917127d Merge pull request 'webpage-file' (#20) from webpage-file into master
Reviewed-on: #20
2023-06-26 22:28:26 +00:00
781d8959c4 fix tmp directory parameter 2023-06-27 00:25:23 +02:00
a67ff868f3 fix json read file 2023-06-26 23:52:03 +02:00
8e0abc40bd check files tmp 2023-06-26 23:09:54 +02:00
9149a6c5cb rollback webpage 2023-06-26 22:44:42 +02:00
d1b6e8048a add revert files json 2023-06-25 21:16:05 +02:00
0eab1d885b add open file tmp 2023-06-20 21:38:39 +02:00
35ff22d463 change parameter for webpage 2023-06-20 00:17:38 +02:00
7dace5bdb7 add file tmp 2023-06-19 23:58:59 +02:00
703cc8922a Merge pull request 'diff-img' (#19) from diff-img into master
Reviewed-on: #19
2023-06-16 22:08:50 +00:00
ff3ee301fb diff img path done 2023-06-15 00:10:44 +02:00
04da5bc5f6 diff path network 2023-06-13 22:00:51 +02:00
f01a69a1e7 Merge pull request 'wpchange' (#18) from wpchange into master
Reviewed-on: #18
2023-06-12 22:48:57 +00:00
da4db0277a add img a change 2023-06-13 00:46:18 +02:00
7228911e68 add js and css 2023-06-13 00:38:34 +02:00
9e7e1b27fd change WIP test 2023-06-11 20:24:22 +02:00
16368c13bb add WPChange 2023-06-10 01:58:08 +02:00
c631909cb6 WPchange wip 2023-06-06 00:22:16 +02:00
3e76892676 add wpchange 2023-06-05 23:46:57 +02:00
3e75f05340 Merge pull request 'add-parameter' (#17) from add-parameter into master
Reviewed-on: #17
2023-06-05 20:58:51 +00:00
e48b262d7e add parameter no-image 2023-06-03 09:07:33 +02:00
2f1c081823 add parameter 2023-06-01 15:28:48 +02:00
4bd6f5c038 Merge pull request 'add wait' (#16) from wait_remove into master
Reviewed-on: #16
2023-05-29 21:36:38 +00:00
d3a03e1cb3 add wait 2023-05-29 23:36:11 +02:00
f507efce60 Merge pull request 'replace-exception' (#15) from replace-exception into master
Reviewed-on: #15
2023-05-29 21:29:18 +00:00
75c9fa0ad3 fix if 2023-05-28 22:42:38 +02:00
110ccc4bb1 replace exception for wpexport 2023-05-28 22:42:04 +02:00
269a9e9ccd add replace exception import 2023-05-28 22:31:46 +02:00
4c0ec09d91 move exception 2023-05-28 22:07:43 +02:00
42cfb30583 Merge pull request 'remove-thread' (#14) from remove-thread into master
Reviewed-on: #14
2023-05-26 22:18:19 +00:00
c76b20e64a add remove multithread 2023-05-27 00:16:41 +02:00
aff69bfcbc add multithread for remove 2023-05-27 00:06:11 +02:00
fd426f150d add variable 2023-05-26 17:50:57 +02:00
e21721cac1 move exception 2023-05-26 17:44:28 +02:00
69504687ef add count 2023-05-26 16:38:19 +02:00
fb59746fc0 Merge pull request 'https' (#13) from https into master
Reviewed-on: #13
2023-05-26 09:24:21 +00:00
5916cbff00 fix parameter 2023-05-26 10:04:36 +02:00
cd2fbd5372 add protocol https/http 2023-05-25 00:31:34 +02:00
f3b04f9459 update script backup 2023-05-24 23:34:03 +02:00
a400375e01 remove slugify import 2023-05-24 23:30:23 +02:00
351cb10f01 Merge pull request 'fix-media' (#12) from fix-media into master
Reviewed-on: #12
2023-05-23 14:47:07 +00:00
5c5dc707f5 fix headers search author 2023-05-23 16:46:07 +02:00
f69298179a reduce line code and add private method 2023-05-23 13:45:59 +02:00
d3ec7d147d loop replace 2023-05-23 11:22:37 +02:00
0fc6e78a18 fix title rendered 2023-05-23 00:02:51 +02:00
3718b807ba more message debug 2023-05-21 21:14:36 +02:00
75772ba7f0 remove doublon 2023-05-21 21:12:00 +02:00
769b7f43fc fix add or update post 2023-05-18 00:24:41 +02:00
ba42d56be1 fix webpage 2023-05-16 00:15:16 +02:00
d18f4e1579 Add clean 2023-05-15 23:51:45 +02:00
8bdaea3910 add remove command 2023-05-15 23:42:18 +02:00
f3cb5c4069 fix parameters 2023-05-15 23:22:41 +02:00
cfb24bed0e add remove parameters 2023-05-15 23:21:25 +02:00
ee8674fd59 add remove class 2023-05-15 23:13:55 +02:00
ece4d78dd8 add remove all 2023-05-14 18:35:36 +02:00
3d7aa19441 add update 2023-05-12 00:16:58 +02:00
3c2f1cc017 separate publication and principal 2023-05-07 17:38:44 +02:00
f9be6770e3 separate article and page 2023-05-07 09:26:48 +02:00
21d2f35e6e add password parameter and fix post to delete 75% 2023-05-04 00:47:06 +02:00
4789fe80aa fix import 50% 2023-05-02 16:59:31 +02:00
3161a06459 Merge pull request 'thread' (#9) from thread into master
Reviewed-on: #9
2023-05-01 20:05:02 +00:00
1f6bd96a8e add del 2023-05-01 21:58:47 +02:00
b359521001 fix from directory import 2023-05-01 21:44:33 +02:00
73c0998ae0 fix thread fromDirectory and fromUrl 2023-05-01 21:18:57 +02:00
939e744d1d remove draft file 2023-05-01 15:45:59 +02:00
0029898e6e add debug message + fix error directory list 2023-05-01 15:45:34 +02:00
ab3720fbbc fix directory in thread 2023-04-29 22:26:47 +02:00
7a1286c4e2 add thread for directory import 2023-04-28 23:37:13 +02:00
5a4bdbb420 add name thread in message logger 2023-04-28 23:14:57 +02:00
bf4c2480f8 import threading for directory WIP 2023-04-27 00:00:53 +02:00
a0b816fe18 add debug thread 2023-04-26 23:03:43 +02:00
08ff16527d fix thread in parallelism 2023-04-25 16:15:45 +02:00
0acd5067cb thread 50% 2023-04-25 00:34:25 +02:00
aaac2385a3 fix previos commit 2023-04-24 23:16:53 +02:00
88f258ffba Add parallelism 2023-04-24 23:15:29 +02:00
a39e2200bd add function 2023-04-22 00:07:54 +02:00
7 changed files with 1206 additions and 503 deletions

6
.gitignore vendored
View File

@@ -1,4 +1,4 @@
backup/ backup*/
backup1/ wp-navigation
web_scrap.log *.log
__pycache__/ __pycache__/

View File

@@ -3,8 +3,8 @@
TAR=/usr/bin/tar TAR=/usr/bin/tar
PYTHON=/usr/bin/python3 PYTHON=/usr/bin/python3
GZIP=/usr/bin/gzip GZIP=/usr/bin/gzip
SCRIPTDIR=/home/valentin/script SCRIPTDIR=/home/valentin/script/webscrap
WEBSCRAP=${SCRIPTDIR}/web_scrap.py WEBSCRAP=${SCRIPTDIR}/import_export_canalblog.py
URL=www.clarissariviere.com URL=www.clarissariviere.com
DATE=$(date +%Y%m%d) DATE=$(date +%Y%m%d)
DIRECTORY=/home/valentin/backup DIRECTORY=/home/valentin/backup
@@ -24,7 +24,7 @@ else
fi fi
subject="${subject} ${URL} ${DATE}" subject="${subject} ${URL} ${DATE}"
echo > ${BACKUPDIR}/${LOGFILE} echo > ${BACKUPDIR}/${LOGFILE}
${PYTHON} ${WEBSCRAP} --url ${URL} --dir ${DIRECTORY} --quiet --logfile ${BACKUPDIR}/${LOGFILE} ${PYTHON} ${WEBSCRAP} --quiet --logfile ${BACKUPDIR}/${LOGFILE} --parallel 20 export --url ${URL} --directory ${DIRECTORY}
if [ ${?} -ne 0 ]; then if [ ${?} -ne 0 ]; then
subject="${subject} echoue : recuperation page" subject="${subject} echoue : recuperation page"
echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER} echo ${subject} | mail -s "${subject}" -A ${BACKUPDIR}/${LOGFILE} ${SENDER}

View File

@@ -2,9 +2,130 @@
from requests.auth import HTTPBasicAuth from requests.auth import HTTPBasicAuth
from getpass import getpass from getpass import getpass
from urllib.parse import urlparse from urllib.parse import urlparse
import argparse, logging from concurrent import futures
from concurrent.futures import as_completed, wait, ALL_COMPLETED
import argparse, logging, threading, os, glob
from lib.WPImport import WPimport from lib.WPImport import WPimport
from lib.WPExport import WPExport from lib.WPExport import WPExport
from lib.WPRemove import WPRemove
from lib.WPChange import WPChange
def change(index, number, args, logger):
changeWp = WPChange(logger=logger, index_name=index, number_thread=number)
changeWp.fromDirectory(args.directory)
del changeWp
def remove(index, number, args, basic, logger, ssl_wordpress):
removeWp = WPRemove(basic=basic, wordpress="", logger=logger, ssl_wordpress=ssl_wordpress, index_name=index, number_thread=number)
if args.remove == True:
for i in args.wordpress.split(","):
removeWp.setUrl(i)
removeWp.cleanPosts()
removeWp.cleanTags()
removeWp.cleanCategories()
removeWp.cleanMedia()
else:
for i in args.wordpress.split(","):
removeWp.setUrl(i)
if args.posts == True:
removeWp.cleanPosts()
if args.categories == True:
removeWp.cleanCategories()
if args.tags == True:
removeWp.cleanTags()
if args.media == True:
removeWp.cleanMedia()
del removeWp
def download(name_thread, max_thread, url, logger, parser, directory, html, img, ssl_canalblog, revert, tmp):
exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, directory=directory, ssl_canalblog=ssl_canalblog)
if not revert:
exportWp.getUrlPage(name_thread, max_thread)
for i in ["article", "page"]:
for j in ["publications", "principal"]:
if html is False:
exportWp.downloadHTML(j, i)
if img is False:
exportWp.downloadImg(j, i)
del exportWp
def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, basic, serial, ssl_wordpress, ssl_canalblog, create, update, image, revert, tmp):
canalblog = canalblog.split(",")
wordpress = wordpress.split(",")
name = "Thread-{0}".format(int(name_thread) + 1)
protocol = "https"
if ssl_canalblog is False:
protocol = "http"
if serial is False:
for canal in canalblog:
try:
o = urlparse(canal)
o = o._replace(scheme=protocol)
url = o.geturl().replace(":///", "://")
except Exception as err:
logger.error("{0} : parsing error : {1}".format(name, err))
exit(1)
exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, ssl_canalblog=ssl_canalblog, tmp=tmp)
if not revert:
exportWp.getUrlPage(name_thread, max_thread)
del exportWp
for j in wordpress:
importWp = WPimport(name=name, basic=basic, wordpress=j, logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image, tmp=tmp)
for k in ["article", "page"]:
for l in ["publications", "principal"]:
importWp.fromUrl(l, k)
del importWp
else:
if len(canalblog) != len(wordpress):
logger.error("{0} : ERREUR : Le nombre de dossier n'est pas equivalent au nombre d'URL wordpress".format(name))
exit(1)
for i in range(0, len(canalblog)-1):
try:
o = urlparse(canalblog[i])
o = o._replace(scheme=protocol)
url = o.geturl().replace(":///", "://")
except Exception as err:
logger.error("parsing error : {0}".format(err))
exit(1)
exportWp = WPExport(name=name, url=url, logger=logger, parser=parser, ssl_canalblog=ssl_canalblog)
if not revert:
exportWp.getUrlPage(name_thread, max_thread)
del exportWp
importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image, tmp=tmp)
for k in ["article", "page"]:
for l in ["publications", "principal"]:
importWp.fromUrl(webpage[l][k])
del importWp
def importDirectory(name_thread, max_thread, directory, logger, parser, wordpress, basic, serial, ssl_wordpress, create, update, image):
name = "Thread-{0}".format(int(name_thread) + 1)
directory = directory.split(",")
wordpress = wordpress.split(",")
if serial is False:
for i in wordpress:
importWp = WPimport(name=name, basic=basic, wordpress=i, logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image)
for j in directory:
importWp.fromDirectory(j, name_thread, max_thread)
del importWp
else:
if len(directory) != len(wordpress):
logger.error("{0} : Error : Number directory is different than wordpress".format(name))
exit(1)
for i in range(0, len(wordpress)-1):
importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image)
importWp.fromDirectory(directory[i])
del importWp
if __name__ == '__main__': if __name__ == '__main__':
@@ -13,16 +134,42 @@ if __name__ == '__main__':
parser.add_argument("--logfile", help="Log file", default="") parser.add_argument("--logfile", help="Log file", default="")
parser.add_argument("--quiet", help="No console output", action="store_true") parser.add_argument("--quiet", help="No console output", action="store_true")
parser.add_argument("--parser", help="Parser content", default="html.parser") parser.add_argument("--parser", help="Parser content", default="html.parser")
parser.add_argument("--parallel", help="Define number thread (default : 1)", default=1)
parser.add_argument("--no-ssl", help="No ssl for canalblog and/or wordpress (example wordpress,canalblog)", dest="ssl", default="")
parser.add_argument("--revert", help="Restart a work from stopping work", action="store_true")
parser.add_argument("--tmp", help="directory tmp", default="/tmp/import_export_canablog")
subparsers = parser.add_subparsers(dest="command") subparsers = parser.add_subparsers(dest="command")
import_parser = subparsers.add_parser("import") import_parser = subparsers.add_parser("import")
import_parser.add_argument("--user", help="wordpress user", required=True) import_parser.add_argument("--user", help="wordpress user", required=True)
import_parser.add_argument("--password", help="password wordpress's user", default="")
import_parser.add_argument("--file", help="HTML file", default="") import_parser.add_argument("--file", help="HTML file", default="")
import_parser.add_argument("--directory", help="HTML directory", default="") import_parser.add_argument("--directory", help="HTML directory", default="")
import_parser.add_argument("--canalblog", help="URL Canalblog", default="") import_parser.add_argument("--canalblog", help="URL Canalblog", default="")
import_parser.add_argument("--wordpress", help="URL Wordpress", required=True) import_parser.add_argument("--wordpress", help="URL Wordpress", required=True)
import_parser.add_argument("--serial", help="Serial execution", action="store_true") import_parser.add_argument("--serial", help="Serial execution", action="store_true")
import_parser.add_argument("--remove-all", dest="remove", help="Remove all", action="store_true")
import_parser.add_argument("--remove-posts", help="Remove all posts", dest="posts", action="store_true")
import_parser.add_argument("--remove-categories", help="Remove all categories", dest="categories", action="store_true")
import_parser.add_argument("--remove-tags", help="Remove all tags", dest="tags", action="store_true")
import_parser.add_argument("--remove-media", help="Remove all media", dest="media", action="store_true")
import_parser.add_argument("--no-create", help="No create post", dest="create", default="store_false", action="store_true")
import_parser.add_argument("--no-update", help="No update post", dest="update", default="store_false", action="store_true")
import_parser.add_argument("--no-image", help="No image add or update", dest="image", default="store_false", action="store_true")
remove_parser = subparsers.add_parser("remove")
remove_parser.add_argument("--user", help="wordpress user", required=True)
remove_parser.add_argument("--password", help="password wordpress's user", default="")
remove_parser.add_argument("--wordpress", help="URL Wordpress", required=True)
remove_parser.add_argument("--all", dest="remove", help="Remove all (posts, media, tags, categories)", action="store_true")
remove_parser.add_argument("--posts", help="Remove all posts", action="store_true")
remove_parser.add_argument("--categories", help="Remove all categories", action="store_true")
remove_parser.add_argument("--tags", help="Remove all tags", action="store_true")
remove_parser.add_argument("--media", help="Remove all media", action="store_true")
export_parser = subparsers.add_parser("export") export_parser = subparsers.add_parser("export")
@@ -36,12 +183,27 @@ if __name__ == '__main__':
export_parser.add_argument("--no-img", help="No img", dest="img", action="store_true") export_parser.add_argument("--no-img", help="No img", dest="img", action="store_true")
export_parser.add_argument("--no-html", help="No HTML", dest="html", action="store_true") export_parser.add_argument("--no-html", help="No HTML", dest="html", action="store_true")
change_parser = subparsers.add_parser("change")
change_parser.add_argument("--directory",
default="",
help="Directory")
change_parser.add_argument("--file",
default="",
help="File")
args = parser.parse_args() args = parser.parse_args()
logger = logging.getLogger('import export canalblog') logger = logging.getLogger('import export canalblog')
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ssl_canalblog = True
ssl_wordpress = True
for i in args.ssl.split(","):
if i == "canalblog":
ssl_canalblog = False
if i == "wordpress":
ssl_wordpress = False
if args.quiet is False: if args.quiet is False:
ch = logging.StreamHandler() ch = logging.StreamHandler()
@@ -64,80 +226,70 @@ if __name__ == '__main__':
fileHandler.setFormatter(formatter) fileHandler.setFormatter(formatter)
logger.addHandler(fileHandler) logger.addHandler(fileHandler)
if args.command == "import": os.makedirs(args.tmp, exist_ok=True)
password = getpass()
if len(password) == 0: if args.command == "import" or args.command == "remove":
logger.error("No password error !!! ") password = args.password
exit(1) if len(args.password) == 0:
password = getpass()
if len(password) == 0:
logger.error("No password error !!! ")
exit(1)
basic = HTTPBasicAuth(args.user, password) basic = HTTPBasicAuth(args.user, password)
if args.command == "import":
wordpress = args.wordpress.split(",") wordpress = args.wordpress.split(",")
importWp = WPimport(basic, "", logger, args.parser) importWp = WPimport(basic=basic, wordpress="", logger=logger, parser=args.parser, ssl_wordpress=ssl_wordpress)
if len(args.file) > 0: if len(args.file) > 0:
for i in wordpress: for i in wordpress:
importWp.setUrl(i) importWp.setUrl(i)
importWp.fromFile(args.file.split(",")) importWp.fromFile(files=args.file.split(","))
exit(0)
if len(args.directory) > 0: if len(args.directory) > 0:
directory = args.directory.split(",") try:
if args.serial is False: with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
for i in wordpress: wait_for = [ ex.submit(remove, i, args.parallel, args, basic, logger, ssl_wordpress) for i in range(0, int(args.parallel)) ]
importWp.setUrl(i) wait(wait_for, return_when=ALL_COMPLETED)
for j in directory: wait_for = [
importWp.fromDirectory(j) ex.submit(importDirectory, i, int(args.parallel), args.directory, logger, args.parser, args.wordpress, basic, args.serial, ssl_wordpress, args.create, args.update, args.image)
else: for i in range(0, int(args.parallel))
if len(directory) != len(wordpress): ]
logger.error("ERREUR : Le nombre de dossier n'est pas equivalent au nombre d'URL wordpress") except Exception as err:
exit(1) logger.error("Threading error : {0}".format(err))
for i in range(0, len(wordpress)-1):
importWp.setUrl(wordpress[i])
importWp.fromDirectory(directory[i])
exit(0)
if len(args.canalblog) > 0: if len(args.canalblog) > 0:
exportWp = WPExport("", logger, args.parser, args.directory) try:
canalblog = args.canalblog.split(",") with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
wordpress = args.wordpress.split(",") wait_for = [ ex.submit(remove, i, args.parallel, args, basic, logger, ssl_wordpress) for i in range(0, int(args.parallel)) ]
wait(wait_for, return_when=ALL_COMPLETED)
if args.revert is True:
files_tmp = glob.glob("{0}/*.json".format(args.tmp))
if len(files_tmp) == 0:
logger.error("Error revert, because files not found")
exit(1)
if len(files_tmp) != int(args.parallel):
for file_r in files_tmp:
os.remove(file_r)
if args.serial is False: wait_for = [
for canal in canalblog: ex.submit(importUrl, i, int(args.parallel), args.canalblog, logger, args.parser, args.wordpress, basic, args.serial, ssl_wordpress, ssl_canalblog, args.create, args.update, args.image, args.revert, args.tmp)
try: for i in range(0, int(args.parallel))
o = urlparse(canal) ]
o = o._replace(scheme="https")
url = o.geturl().replace(":///", "://")
except Exception as err:
logger.error("parsing error : {0}".format(err))
exit(1)
exportWp.setUrl(url)
webpage = exportWp.getUrlPage()
for j in wordpress:
importWp.setUrl(j)
importWp.fromUrl(webpage)
else:
if len(canalblog) != len(wordpress):
logger.error("ERREUR : Le nombre de dossier n'est pas equivalent au nombre d'URL wordpress")
exit(1)
for i in range(0, len(canalblog)-1):
try:
o = urlparse(canalblog[i])
o = o._replace(scheme="https")
url = o.geturl().replace(":///", "://")
except Exception as err:
logger.error("parsing error : {0}".format(err))
exit(1)
exportWp.setUrl(url)
webpage = exportWp.getUrlPage()
importWp.setUrl(wordpress[i])
importWp.fromUrl(webpage)
except Exception as err:
logger.error("Threading error : {0}".format(err))
exit(0)
if args.command == "export": if args.command == "export":
canalblog = args.url.split(",") canalblog = args.url.split(",")
exportWp = WPExport("", logger, args.parser, args.directory) protocol = "https"
if ssl_canalblog is False:
protocol = "http"
exportWp = WPExport(logger=logger, parser=args.parser, directory=args.directory, ssl_canalblog=ssl_canalblog)
for canal in canalblog: for canal in canalblog:
try: try:
o = urlparse(canal) o = urlparse(canal)
o = o._replace(scheme="https") o = o._replace(scheme=protocol)
url = o.geturl().replace(":///", "://") url = o.geturl().replace(":///", "://")
except Exception as err: except Exception as err:
logger.error("parsing error : {0}".format(err)) logger.error("parsing error : {0}".format(err))
@@ -148,12 +300,39 @@ if __name__ == '__main__':
if args.css is False: if args.css is False:
exportWp.downloadCss() exportWp.downloadCss()
del exportWp
if args.html is False or args.img is False: if args.html is False or args.img is False:
webpage = exportWp.getUrlPage() try:
if args.html is False: with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
exportWp.downloadHTML(webpage) wait_for = [
ex.submit(download, i, int(args.parallel), url, logger, args.parser, args.directory, args.html, args.img, ssl_canalblog, args.revert, args.tmp)
if args.img is False: for i in range(0, int(args.parallel))
exportWp.downloadImg(webpage) ]
except Exception as err:
logger.error("Threading error : {0}".format(err))
exit(0)
if args.command == "remove":
try:
with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
wait_for = [ ex.submit(remove, i, args.parallel, args, basic, logger, ssl_wordpress) for i in range(0, int(args.parallel)) ]
except Exception as err:
logger.error("Thread error for remove : {0}".format(err))
exit(0)
if args.command == "change":
if len(args.directory) > 0:
try:
with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex:
wait_for = [ ex.submit(change, i, args.parallel, args, logger) for i in range(0, int(args.parallel)) ]
except Exception as err:
logger.error("Thread error for remove : {0}".format(err))
if len(args.file) > 0:
changeWp = WPChange(logger=logger)
for filei in args.file.split(","):
changeWp.fromFile(filei)
exit(0) exit(0)

128
lib/WPChange.py Normal file
View File

@@ -0,0 +1,128 @@
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import requests, os, logging, re, json
class WPChange:
# Constructor
def __init__(self, index_name=1, number_thread=1, logger=None, parser="html.parser"):
self._name = "Thread-{0}".format(index_name)
self._logger = logger
self._number_thread = number_thread
self._parser = parser
# Destructor
def __del__(self):
print("{0} : Import finished".format(self._name))
# Public method
## from file
def fromFile(self, files=[], number_thread=1, max_thread=1):
divFiles = int(len(files) / max_thread)
currentRangeFiles = int(divFiles * (number_thread))
firstRange = int(currentRangeFiles - divFiles)
self._logger.debug("{0} : index : {1}".format(self._name,number_thread))
self._logger.debug("{0} : first range : {1}".format(self._name,firstRange))
self._logger.debug("{0} : last range : {1}".format(self._name,currentRangeFiles))
for i in range(firstRange, currentRangeFiles):
if os.path.exists(files[i]):
self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, currentRangeFiles + 1, files[i]))
self._change(files[i])
## From directory
def fromDirectory(self, directory="", number_thread=1, max_thread=1):
directory = "{0}/archives".format(directory)
directories = self._getDirectories([], "{0}".format(directory))
if len(directories) > 0:
files = self._getFiles(directories)
self.fromFile(files, number_thread, max_thread)
else:
self._logger.error("{0} : No files for {1}".format(self._name, directory))
# Private method
## Get all files
def _getFiles(self, item):
files = []
for i in item:
for j in os.listdir(i):
if os.path.isfile("{0}/{1}".format(i, j)):
files.append("{0}/{1}".format(i, j))
return files
## Get directories
def _getDirectories(self, subdirectory, item):
sub = subdirectory
for i in os.listdir(item):
if os.path.isdir("{0}/{1}".format(item, i)):
sub.append("{0}/{1}".format(item, i))
subdirectory = self._getDirectories(sub, "{0}/{1}".format(item, i))
return subdirectory
## Change path img file
def _change(self, file):
ext_img = ["png", "svg", "gif", "jpg", "jpeg"]
try:
with open(file, 'r') as f:
content = f.read()
soup = BeautifulSoup(content, self._parser)
img = soup.find_all("img")
for i in img:
src = i.get("src")
o = urlparse(src)
if len(o.netloc) > 0:
self._logger.info("{0} : Change source image {1} /img/{2}/{3}".format(self._name, src, o.netloc, o.path))
content = content.replace(src, "/img/{0}/{1}".format(o.netloc, o.path))
script = soup.find_all("script", {"type": "text/javascript"})
for i in script:
src = i.get("src")
if src is not None:
o = urlparse(src)
if len(o.netloc) > 0:
self._logger.info("{0} : Change source js {1} /dists/js/{2}/{3}".format(self._name, src, o.netloc, o.path))
content = content.replace(src, "/dists/js/{0}/{1}".format(o.netloc, o.path))
link = soup.find_all("link", {"rel": "stylesheet"})
for i in link:
href = i.get("href")
if href is not None:
o = urlparse(href)
if len(o.netloc) > 0:
self._logger.info("{0} : Change source css {1} /dists/css/{2}/{3}".format(self._name, href, o.netloc, o.path))
content = content.replace(href, "/dists/css/{0}/{1}".format(o.netloc, o.path))
a = soup.find_all("a", {"target": "_blank"})
for i in a:
href = i.get("href")
if href is not None:
o = urlparse(href)
if len(o.netloc) > 0:
ext = o.path.split(".")[len(o.path.split("."))-1]
if ext in ext_img:
self._logger.info("{0} : Change a img {1} /img/{2}/{3}".format(self._name, href, o.netloc, o.path))
content = content.replace(href, "/img/{0}/{1}".format(o.netloc, o.path))
try:
with open(file, "w") as f:
self._logger.info("{0} : File write : {1}".format(self._name, file))
f.write(content)
except Exception as ex:
self._logger.error("{0} : Error for write file {1} : {2}".format(self._name, file, ex))
except Exception as ex:
self._logger.error("{0} : Error for read file {1} : {2}".format(self._name, file, ex))

View File

@@ -1,27 +1,41 @@
#!/usr/bin/python3 #!/usr/bin/python3
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from urllib.parse import urlparse from urllib.parse import urlparse
import requests, os, argparse, logging import requests, os, argparse, logging, json
from requests.adapters import HTTPAdapter from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry from requests.packages.urllib3.util.retry import Retry
class WPExport: class WPExport:
def __init__(self, url, logger, parser, directory): def __init__(self, name = "Thread-0", url = "", logger = None, parser = "html.parser", directory = "backup", ssl_canalblog=True, tmp="/tmp/import_export_canablog"):
self._url = url self._url = url
self._logger = logger self._logger = logger
self._parser = parser self._parser = parser
self._dir = directory self._dir = directory
self._name = name
self._protocol = "https"
if ssl_canalblog is False:
self._protocol = "http"
self._request = requests.Session() self._request = requests.Session()
retries = Retry(total=10, retries = Retry(total=10,
status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2) status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2)
self._request.mount('http://', HTTPAdapter(max_retries=retries)) self._request.mount('{0}://'.format(self._protocol), HTTPAdapter(max_retries=retries))
self._tmp = tmp
# Destructor
def __del__(self):
self._logger.info("{0} : Export finished for {1}".format(self._name, self._url))
# Public method # Public method
# Set name
def setName(self, name):
self._name = "Thread-{0}".format(int(name) + 1)
# Set URL # Set URL
def setUrl(self, url): def setUrl(self, url):
@@ -43,85 +57,119 @@ class WPExport:
# Download HTML # Download HTML
def downloadHTML(self, webpage): def downloadHTML(self, first, second):
self._downloadPage(webpage, self._dir) self._downloadPage(webpage[first][second], self._dir)
# Download Image # Download Image
def downloadImg(self, webpage): def downloadImg(self, first, second):
page_src = self._getImg(webpage) page_src = self._getImg(webpage[first][second])
o = urlparse(self._url) o = urlparse(self._url)
self._downloadPage(page_src, "{0}/{1}/{2}".format(self._dir, o.path, "img")) self._downloadPage(page_src, "{0}/{1}/{2}".format(self._dir, o.path, "img"))
# Get URL # Get URL
def getUrlPage(self): def getUrlPage(self, index_thread, max_thread):
try: try:
page = self._request.get(self._url) page = self._request.get(self._url)
except Exception as err:
self._logger.error("Connection error : {0}".format(err)) page_url = []
if page.status_code == 200:
soup = BeautifulSoup(page.text, self._parser)
ul = soup.find_all("ul", id="listsmooth")
for anchor in ul[0].find_all("a"):
href = anchor.get('href', '/')
if href != "#":
page_url.append(href)
else:
self._logger.error("{0} : URL did not get due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
exit(1) exit(1)
page_url = [] except Exception as err:
if page.status_code == 200: self._logger.error("{0} : Exception error : {1}".format(self._name, err))
soup = BeautifulSoup(page.text, self._parser)
ul = soup.find_all("ul", id="listsmooth")
for anchor in ul[0].find_all("a"):
href = anchor.get('href', '/')
if href != "#":
page_url.append(href)
else:
self._logger.error("Url did not get due status code : {0}".format(page.status_code))
self._logger.debug(page.content)
webpage = {"principal": {"page":[], "article":[]}, "publications": {"page":[], "article":[]}}
webpage = []
for i in page_url: for i in page_url:
section = "publications"
o = urlparse(i)
o = o._replace(scheme=self._protocol)
i = o.geturl().replace(":///", "://")
if i == "{0}/".format(self._url):
section = "principal"
try: try:
page = self._request.get(i) page = self._request.get(i)
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
if page.status_code == 200:
self._logger.info("page : {0}".format(i))
if i not in webpage:
webpage.append(i)
soup = BeautifulSoup(page.text, self._parser)
class_div = pagingfirstline = soup.find_all("div", class_="pagingfirstline")
if len(class_div) > 0:
pagingfirstline = class_div[0].find_all("a")
if len(pagingfirstline) > 1:
lastpage = pagingfirstline[len(pagingfirstline)-1].get("href", "/")
element_lastpage = lastpage.split("/")[len(lastpage.split("/"))-1]
number_page = element_lastpage.split("-")[0].split("p")[1]
number_lastpage = int(number_page) / 10
for j in range(1,int(number_lastpage)):
paging = j * 10
categorie = urlparse(i).path.split("/")
url_paging = "{0}/archives/p{1}-10.html".format(self._url, paging)
if len(categorie) > 2:
url_paging = "{0}/archives/{1}/p{2}-10.html".format(self._url, categorie[2], paging)
self._logger.info(url_paging)
if url_paging not in webpage:
webpage.append(url_paging)
page = self._request.get(url_paging)
if page.status_code == 200:
soup = BeautifulSoup(page.text, self._parser)
h2 = soup.find_all("h2")
for title in h2:
href = title.find_all("a")[0].get("href", "/")
if href not in webpage:
try:
o = urlparse(href)
o = o._replace(scheme="https").geturl()
except Exception as err:
self._logger.error("parsing error : {0}".format(err))
exit(1)
webpage.append(o)
else:
self._logger.error("web didn't get due status code : {0}".format(page.status_code))
self._logger.debug(page.content)
return webpage if page.status_code == 200:
self._logger.info("{0} : page : {1}".format(self._name, i))
if i not in webpage[section]["page"]:
webpage[section]["page"].append(i)
soup = BeautifulSoup(page.text, self._parser)
class_div = soup.find_all("div", class_="pagingfirstline")
if len(class_div) > 0:
pagingfirstline = class_div[0].find_all("a")
if len(pagingfirstline) > 1:
lastpage = pagingfirstline[len(pagingfirstline)-1].get("href", "/")
self._logger.debug("{0} : Last page {1}".format(self._name, lastpage))
element_lastpage = lastpage.split("/")[len(lastpage.split("/"))-1]
number_page = element_lastpage.split("-")[0].split("p")[1]
number_lastpage = int(number_page) / 10
setPageDivided = int(number_lastpage) / max_thread
if setPageDivided > int(setPageDivided):
setPageDivided = setPageDivided + 1
setPagePart = setPageDivided * (index_thread + 1)
firstPagePart = (setPagePart - setPageDivided)
self._logger.debug("{0} : Total page : {1}".format(self._name,int(number_lastpage)))
self._logger.debug("{0} : First range : {1}".format(self._name, int(firstPagePart)))
self._logger.debug("{0} : Last range : {1}".format(self._name, int(setPagePart)))
for j in range(int(firstPagePart),int(setPagePart)+1):
paging = j * 10
categorie = urlparse(i).path.split("/")
url_paging = "{0}/archives/p{1}-10.html".format(self._url, paging)
if len(categorie) > 2:
url_paging = "{0}/archives/{1}/p{2}-10.html".format(self._url, categorie[2], paging)
self._logger.info("{0} : {1}".format(self._name, url_paging))
if url_paging not in webpage[section]["page"]:
webpage[section]["page"].append(url_paging)
page = self._request.get(url_paging)
if page.status_code == 200:
soup = BeautifulSoup(page.text, self._parser)
h2 = soup.find_all("h2")
self._logger.debug("{0} : {1} H2 : {2}".format(self._name, url_paging, h2))
for title in h2:
self._logger.debug("{0} : {1} a : {2}".format(self._name, url_paging, title.find_all("a")))
href = title.find_all("a")[0].get("href", "/")
if href not in webpage[section]["article"]:
try:
o = urlparse(href)
o = o._replace(scheme="https").geturl()
webpage[section]["article"].append(o)
except Exception as err:
self._logger.error("parsing error : {0}".format(err))
exit(1)
else:
self._logger.error("{0} : web didn't get due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error : {1}".format(self._name, err))
exit(1)
try:
string_webpage = json.dumps(webpage)
open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(string_webpage)
except Exception as ex:
self._logger.error("{0} : Error for writing webpage : {1}".format(self._name, ex))
# Private method # Private method
@@ -135,7 +183,7 @@ class WPExport:
makedir.append(i) makedir.append(i)
repath = "/".join(makedir) repath = "/".join(makedir)
if not os.path.exists(repath): if not os.path.exists(repath):
self._logger.debug("Dossier crée : {0}".format(repath)) self._logger.debug("{0} : Dossier crée : {1}".format(self._name, repath))
try: try:
if len(repath) > 0: if len(repath) > 0:
os.mkdir(repath) os.mkdir(repath)
@@ -149,47 +197,52 @@ class WPExport:
def _getScriptCss(self, js, css): def _getScriptCss(self, js, css):
try: try:
page = self._request.get(self._url) page = self._request.get(self._url)
except Exception as err:
self._logger.error("Connection error : {0}".format(err)) page_url = []
exit(1) if page.status_code == 200:
page_url = [] soup = BeautifulSoup(page.text, self._parser)
if page.status_code == 200: if js is True:
soup = BeautifulSoup(page.text, self._parser) script = soup.find_all("script")
if js is True: for anchor in script:
script = soup.find_all("script") src = anchor.get("src", "/")
for anchor in script: if src != "/":
src = anchor.get("src", "/")
if src != "/":
try:
u = urlparse(self._url)
o = urlparse(src)
except Exception as err:
self._logger.error("parsing error : {0}".format(err))
exit(1)
if o.netloc == "":
o = o._replace(netloc=u.netloc)
o = o._replace(scheme=u.scheme)
page_url.append(o.geturl())
if css is True:
link = soup.find_all("link")
for anchor in link:
rel = anchor.get("rel")
if rel[0] == "stylesheet":
href = anchor.get("href", "/")
if href != "/":
try: try:
u = urlparse(self._url) u = urlparse(self._url)
o = urlparse(href) o = urlparse(src)
if o.netloc == "":
o = o._replace(netloc=u.netloc)
o = o._replace(scheme=u.scheme)
page_url.append(o.geturl())
except Exception as err: except Exception as err:
self._logger.error("parsing error : {0}".format(err)) self._logger.error("parsing error : {0}".format(err))
exit(1) exit(1)
if o.netloc == "":
o = o._replace(netloc=u.netloc) if css is True:
o = o._replace(scheme=u.scheme) link = soup.find_all("link")
page_url.append(o.geturl()) for anchor in link:
else: rel = anchor.get("rel")
self._logger.error("JS or CSS did not get due status code : {0}".format(page.status_code)) if rel[0] == "stylesheet":
self._logger.debug(page.content) href = anchor.get("href", "/")
if href != "/":
try:
u = urlparse(self._url)
o = urlparse(href)
if o.netloc == "":
o = o._replace(netloc=u.netloc)
o = o._replace(scheme=u.scheme)
page_url.append(o.geturl())
except Exception as err:
self._logger.error("parsing error : {0}".format(err))
exit(1)
else:
self._logger.error("JS or CSS did not get due status code : {0}".format(page.status_code))
self._logger.debug(page.content)
except ConnectionError as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
except Exception as err:
self._logger.error("Exception error : {0}".format(err))
return page_url return page_url
@@ -200,22 +253,25 @@ class WPExport:
for i in webpage: for i in webpage:
try: try:
page = self._request.get(i) page = self._request.get(i)
except Exception as err:
self._logger.error("Connection error : {0}".format(err)) if page.status_code == 200:
soup = BeautifulSoup(page.text, self._parser)
img = soup.find_all("img")
self._logger.info("{0} : image from page: {1} : ".format(self._name,i))
for anchor in img:
src = anchor.get("src", "/")
if src != "/":
if src not in page_img:
self._logger.info("{0} : image: {1} : ".format(self._name, src))
page_img.append(src)
else:
self._logger.error("{0} : Image did not get due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
exit(1) exit(1)
if page.status_code == 200: except Exception as err:
soup = BeautifulSoup(page.text, self._parser) self._logger.error("{0} : Exception error : {1}".format(self._name, err))
img = soup.find_all("img")
self._logger.info("image from page: {0} : ".format(i))
for anchor in img:
src = anchor.get("src", "/")
if src != "/":
if src not in page_img:
self._logger.info("image: {0} : ".format(src))
page_img.append(src)
else:
self._logger.error("Image did not get due status code : {0}".format(page.status_code))
self._logger.debug(page.content)
return page_img return page_img
@@ -226,29 +282,33 @@ class WPExport:
for i in range(0, len(webpage)): for i in range(0, len(webpage)):
try: try:
o = urlparse(webpage[i]) o = urlparse(webpage[i])
path_web = o.path.split("/")
filePageWeb = path_web[len(path_web)-1]
path_web.pop(len(path_web)-1)
dir_page_web = "/".join(path_web)
self._mkdirPath("{0}/{1}/{2}".format(backup_dir, o.netloc, dir_page_web))
try:
r = self._request.get(webpage[i])
if r.status_code == 200:
fileDownload = "{0}/{1}/index.html".format(backup_dir, o.netloc)
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb)
self._logger.info("{0} : {1}/{2} : {3}".format(self._name, i+1, len(webpage), fileDownload))
try:
open(fileDownload, "wb").write(r.content)
except Exception as err:
self._logger.error("file error : {0}".format(err))
exit(1)
else:
self._logger.error("Not download due status code : {0}".format(r.status_code))
self._logger.debug(r.content)
except ConnectionError as err:
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} Exception error : {1}".format(self._name, err))
except Exception as err: except Exception as err:
self._logger.error("parsing error : {0}".format(err)) self._logger.error("parsing error : {0}".format(err))
exit(1) exit(1)
path_web = o.path.split("/")
filePageWeb = path_web[len(path_web)-1]
path_web.pop(len(path_web)-1)
dir_page_web = "/".join(path_web)
self._mkdirPath("{0}/{1}/{2}".format(backup_dir, o.netloc, dir_page_web))
try:
r = self._request.get(webpage[i])
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
if r.status_code == 200:
fileDownload = "{0}/{1}/index.html".format(backup_dir, o.netloc)
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb)
self._logger.info("{0}/{1} : {2}".format(i+1, len(webpage), fileDownload))
try:
open(fileDownload, "wb").write(r.content)
except Exception as err:
self._logger.error("file error : {0}".format(err))
exit(1)
else:
self._logger.error("Not download due status code : {0}".format(r.status_code))
self._logger.debug(r.content)

View File

@@ -8,67 +8,130 @@ from requests.packages.urllib3.util.retry import Retry
class WPimport: class WPimport:
# Constructor # Constructor
def __init__(self, basic, wordpress, logger, parser): def __init__(self, name="Thread-0", basic=None, wordpress="", logger=None, parser="html.parser", ssl_wordpress=True, no_create=False, no_update=False, no_image=False, tmp="/tmp/import_export_canablog"):
self._name = name
self._basic = basic self._basic = basic
self._wordpress = wordpress self._wordpress = wordpress
self._logger = logger self._logger = logger
self._parser = parser self._parser = parser
self._headers_json = {'Content-Type': 'application/json', 'Accept':'application/json'} self._headers_json = {'Content-Type': 'application/json; charset=utf-8', 'Accept':'application/json'}
self._protocol = "https"
self._directory = "backup"
if ssl_wordpress is False:
self._protocol = "http"
self._request = requests.Session() self._request = requests.Session()
retries = Retry(total=10, retries = Retry(connect=10, read=10, redirect=5,
status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2) status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2)
self._request.mount('http://', HTTPAdapter(max_retries=retries)) self._request.mount('{0}://'.format(self._protocol), HTTPAdapter(max_retries=retries))
self._no_create = no_create
self._no_update = no_update
self._no_image = no_image
self._tmp = tmp
# Destructor
def __del__(self):
print("{0} : Import finished for {1}".format(self._name, self._wordpress))
# Public method # Public method
def setUrl(self, wordpress): def setUrl(self, wordpress):
self._wordpress = wordpress self._wordpress = wordpress
def fromUrl(self, webpage): def fromUrl(self, first, second):
for i in range(0, len(webpage)): try:
try: with open("{0}/{1}.json".format(self._tmp, self._name)) as file:
r = self._request.get(webpage[i]) webpage_content = json.loads(file.read())
except Exception as err: self._logger.debug("{0} : size of webpage : {1}".format(self._name, len(webpage_content)))
self._logger.error("Connection error : {0}".format(err)) webpage = webpage_content[first][second]
exit(1) for i in range(0, len(webpage)):
if r.status_code == 200: try:
self._logger.info("({0}/{1} : Page en cours d'import : {2}".format(i+1, len(webpage), webpage[i])) r = self._request.get(webpage[i])
soup = BeautifulSoup(r.content, self._parser) if r.status_code == 200:
articlebody = soup.find_all("div", class_="articlebody") self._logger.info("{0} : ({1}/{2}) : Page is importing : {3}".format(self._name, i+1, len(webpage), webpage[i]))
if len(articlebody) > 0: soup = BeautifulSoup(r.content, self._parser)
self._addOrUpdatePost(soup) articlebody = soup.find_all("div", class_="articlebody")
else: if len(articlebody) > 0:
self._addOrUpdateFeaturedMedia(soup) self._addOrUpdatePost(soup)
else: else:
self._logger.error("Connection error with status code : {0}".format(r.status_code)) self._addOrUpdateFeaturedMedia(soup)
self._logger.debug(r.content) del webpage_content[first][second][i]
webpage_content = json.dumps(webpage_content)
open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(webpage_content)
else:
self._logger.error("{0} : Connection error for get url {1} with status code : {2}".format(self._name, webpage[i], r.status_code))
self._logger.debug("{0} : {1}".format(self._name, r.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for get url {1} : {2}".format(self._name, webpage[i], err))
exit(1)
except IOError as err:
self._logger.error("{0} : Connection error for IO url {1} : {2}".format(self._name, webpage[i], err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for get url {1} : {2}".format(self._name, webpage[i], err))
except Exception as ex:
self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex))
def fromDirectory(self, directory): def fromDirectory(self, directory="", number_thread=1, max_thread=1):
self._directory = directory
directory = "{0}/archives".format(directory) directory = "{0}/archives".format(directory)
directories = self._getDirectories([], "{0}".format(directory)) directories = self._getDirectories([], "{0}".format(directory))
files = self._getFiles(directories) if len(directories) > 0:
self.fromFile(files) files = self._getFiles(directories)
self.fromFile(files=files, number_thread=number_thread, max_thread=max_thread)
else:
self._logger.error("{0} : No files for {1}".format(self._name, directory))
def fromFile(self, files):
for file in files:
if os.path.exists(file):
self._logger.info("Fichier en cours de traitement : {0}".format(file))
with open(file, 'r') as f: def fromFile(self, files=[], number_thread=1, max_thread=1):
divFiles = int(len(files) / max_thread)
currentRangeFiles = int(divFiles * (number_thread+1))
firstRange = int(currentRangeFiles - divFiles)
self._logger.debug("{0} : index : {1}".format(self._name,number_thread))
self._logger.debug("{0} : first range : {1}".format(self._name,firstRange))
self._logger.debug("{0} : last range : {1}".format(self._name,currentRangeFiles))
for i in range(firstRange, currentRangeFiles):
if os.path.exists(files[i]):
self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, currentRangeFiles + 1, files[i]))
with open(files[i], 'r') as f:
content = f.read() content = f.read()
self._logger.debug("{0} : Size of article : {1}".format(self._name, len(content)))
soup = BeautifulSoup(content, self._parser) soup = BeautifulSoup(content, self._parser)
articlebody = soup.find_all("div", class_="articlebody") articlebody = soup.find_all("div", class_="articlebody")
self._logger.debug("{0} : Number of article : {1}".format(self._name, len(articlebody)))
if len(articlebody) > 0: if len(articlebody) > 0:
self._addOrUpdatePost(soup) self._addOrUpdatePost(soup)
else: else:
self._addOrUpdateFeaturedMedia(soup) self._addOrUpdateFeaturedMedia(soup)
# Private method # Private method
## replace caracter
def _replaceCaracter(self, title_rendered):
list_replace = {'’': "'", '–': '-', '…': '...', '« ': '"', ' »': '"', '« ': '"', ' »': '"', '’': "'", '"‘': "'"}
for old, new in list_replace.items():
title_rendered = title_rendered.replace(old, new)
return title_rendered
## remove space
def _removeSpace(self, title):
if title[len(title)-1] == " ":
title = title[:-1]
if title[0] == " ":
title = title[1:]
return title
## Get all files ## Get all files
def _getFiles(self, item): def _getFiles(self, item):
@@ -97,61 +160,66 @@ class WPimport:
h2 = i.find_all("h2")[0].text h2 = i.find_all("h2")[0].text
params = {"search":h2, "type":"post"} params = {"search":h2, "type":"post"}
try: try:
page = self._request.get("http://{0}/wp-json/wp/v2/search".format(self._wordpress), auth=self._basic, params=params) page = self._request.get("{1}://{0}/wp-json/wp/v2/search".format(self._wordpress, self._protocol), auth=self._basic, params=params)
except Exception as err: if page.status_code == 200:
self._logger.error("Connection error : {0}".format(err)) result = page.json()
exit(1) if len(result) > 0:
if page.status_code == 200: if h2 == result[0]["title"]:
result = page.json() img = i.find_all("img")
if len(result) > 0: if len(img) > 0:
if h2 == result[0]["title"]: img_src = img[0].get("src")
img = i.find_all("img")
if len(img) > 0:
img_src = img[0].get("src")
try:
page = self._request.get(img_src)
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
if page.status_code == 200:
name_img = img_src.replace("_q", "")
name_img = name_img.split("/")[len(name_img.split("/"))-1]
params = {"search": name_img}
try: try:
page = self._request.get("http://{0}/wp-json/wp/v2/media".format(self._wordpress), auth=self._basic, params=params) page = self._request.get(img_src)
except Exception as err: if page.status_code == 200:
self._logger.error("Connection error : {0}".format(err)) name_img = img_src.replace("_q", "")
exit(1) name_img = name_img.split("/")[len(name_img.split("/"))-1]
if page.status_code == 200: params = {"search": name_img}
res = page.json()
if len(res) > 0:
id_media = res[0]["id"]
data = {"featured_media": id_media}
try: try:
r = self._request.post("http://{0}/wp-json/wp/v2/posts/{1}".format(self._wordpress, result[0]["id"]), auth=self._basic, headers=self._headers_json, data=json.dumps(data)) page = self._request.get("{1}://{0}/wp-json/wp/v2/media".format(self._wordpress, self._protocol), auth=self._basic, params=params)
except Exception as err:
self._logger.error("Connection error : {0}".format(err)) if page.status_code == 200:
res = page.json()
if len(res) > 0:
id_media = res[0]["id"]
data = {"featured_media": id_media}
try:
r = self._request.post("{2}://{0}/wp-json/wp/v2/posts/{1}".format(self._wordpress, result[0]["id"], self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
if r.status_code == 200:
self._logger.info("{0} : Add media featured : {1}".format(self._name, r.json()["title"]["raw"]))
else:
self._logger.error("{0} : Connection error with status code for featured media : {1}".format(self._name, r.status_code))
self._logger.debug("{0} : {1}".format(self._name, r.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for post media featured : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for post media featured : {1}".format(self._name, err))
else:
self._logger.info("{0} : No media found for {1}".format(self._name, h2))
else:
self._logger.error("{0} : Connection error with status code for search featured media: {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error search featured media : {1}".format(self._name, err))
exit(1) exit(1)
if r.status_code == 200: except Exception as err:
self._logger.info("Ajout media featured : {0}".format(r.json()["title"]["raw"])) self._logger.error("{0} : Exception error search featured media : {1}".format(self._name, err))
else:
self._logger.error("Connection error with status code : {0}".format(r.status_code))
self._logger.debug(r.content)
else: else:
self._logger.info("Aucun media trouvé pour {0}".format(h2)) self._logger.error("{0} : Connection error for get featured media with status code : {1}".format(self._name, page.status_code))
else: self._logger.debug("{0} : {1}".format(self._name, page.content))
self._logger.error("Connection error with status code : {0}".format(page.status_code)) except ConnectionError as err:
self._logger.debug(page.content) self._logger.error("{0} : Connection error for get featured media : {1}".format(self._name, err))
exit(1)
else: except Exception as err:
self._logger.error("Connection error with status code : {0}".format(page.status_code)) self._logger.error("{0} : Exception error for get featured media : {1}".format(self._name, err))
self._logger.debug(page.content) else:
self._logger.error("{0} : Connection error with status code for featured media : {1}".format(self._name, page.status_code))
else: self._logger.debug("{0} : {1}".format(self._name, page.content))
self._logger.error("Connection error with status code : {0}".format(page.status_code)) except ConnectionError as err:
self._logger.debug(page.content) self._logger.error("{0} : Connection error : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
## Association image to post ## Association image to post
@@ -159,67 +227,87 @@ class WPimport:
for i in list_img: for i in list_img:
data = {"post": post_id} data = {"post": post_id}
try: try:
r = self._request.post("http://{0}/wp-json/wp/v2/media/{1}".format(self._wordpress, i["id"]), auth=self._basic, data=data) r = self._request.post("{2}://{0}/wp-json/wp/v2/media/{1}".format(self._wordpress, i["id"], self._protocol), auth=self._basic, data=data)
except Exception as err: if r.status_code == 200:
self._logger.error("Connection error : {0}".format(err)) self._logger.info("{0} : Link image to post {1}".format(self._name, title))
else:
self._logger.error("{0} Connection error with status code for link image to post : {1}".format(self._name, r.status_code))
self._logger.debug("{0} : {1}".format(self._name, r.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for link image to post : {1}".format(self._name, err))
exit(1) exit(1)
if r.status_code == 200: except Exception as err:
self._logger.info("Association d'une image à l'article {0}".format(title)) self._logger.error("{0} : Exception error for link image to post : {1}".format(self._name, err))
else:
self._logger.error("Connection error with status code : {0}".format(r.status_code))
self._logger.debug(r.content)
## Add or update img ## Add or update img
def _addOrUpdateMedia(self, href_img, page): def _addOrUpdateMedia(self, href_img, page):
media_authorized = ["png", "jpg", "jpeg", "svg", "gif"]
media = {"id":"", "rendered":""} media = {"id":"", "rendered":""}
split_fileimg = href_img.split("/") split_fileimg = href_img.split("/")
img_name = split_fileimg[len(split_fileimg)-1] img_name = split_fileimg[len(split_fileimg)-1]
params = { "search": img_name} img_type_file = img_name.split(".")[len(img_name.split("."))-1]
try: is_img = True
r = self._request.get("http://{0}/wp-json/wp/v2/media".format(self._wordpress), auth=self._basic, params=params) if img_type_file not in media_authorized:
except Exception as err: self._logger.error("{0} : Element {1} is not image".format(self._name,img_name))
self._logger.error("Connection error : {0}".format(err)) is_img = False
exit(1) if is_img is True:
if r.status_code == 200: self._logger.debug("{0} : Search for image {1} with URL {2}".format(self._name, img_name, "{1}://{0}/wp-json/wp/v2/media".format(self._wordpress, self._protocol)))
res = r.json() params = { "search": img_name}
if len(res) > 0:
params = {"force":1}
try:
r = self._request.delete("http://{0}/wp-json/wp/v2/media/{1}".format(self._wordpress, res[0]["id"]), auth=self._basic, params=params)
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
if r.status_code == 200:
self._logger.info("Image removed {0}".format(img_name))
else:
self._logger.error("Image not removed due status code : {0}".format(r.status_code))
self._logger.debug(r.content)
data = page.content
img_type = "image/png"
if img_name.split(".")[1] == "jpg" or img_name.split(".")[1] == "jpeg":
img_type = "image/jpg"
headers={ 'Content-Type': img_type,'Content-Disposition' : 'attachment; filename={0}'.format(img_name)}
try: try:
r = self._request.post("http://{0}/wp-json/wp/v2/media".format(self._wordpress), auth=self._basic, headers=headers, data=data) r = self._request.get("{1}://{0}/wp-json/wp/v2/media".format(self._wordpress, self._protocol), auth=self._basic, params=params)
except Exception as err:
self._logger.error("Connection error : {0}".format(err)) self._logger.debug("{0} : Search for image {1} and his status code {2}".format(self._name, img_name, r.status_code))
if r.status_code == 200:
res = r.json()
self._logger.debug("{0} : Number of image in search : {1}".format(self._name, len(res)))
if len(res) > 0:
params = {"force":1}
try:
r = self._request.delete("{2}://{0}/wp-json/wp/v2/media/{1}".format(self._wordpress, res[0]["id"], self._protocol), auth=self._basic, params=params)
if r.status_code == 200:
self._logger.info("{0} : Image removed {1}".format(self._name, img_name))
else:
self._logger.error("{0} : Image {1} not removed due status code : {2}".format(self._name, img_name, r.status_code))
self._logger.debug("{0} : {1}".format(self._name, r.content))
except ConnectionError as err:
self._logger.error("{0} Connection error for delete image : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} Exception error for delete image : {1}".format(self._name, err))
data = page.content
img_type = "image/{0}".format(img_type_file)
if img_type_file == "jpg":
img_type = "image/jpeg"
headers={ 'Content-Type': img_type,'Content-Disposition' : 'attachment; filename={0}'.format(img_name)}
try:
r = self._request.post("{1}://{0}/wp-json/wp/v2/media".format(self._wordpress, self._protocol), auth=self._basic, headers=headers, data=data)
if r.status_code == 201:
self._logger.info("{0} : Image added {1}".format(self._name, img_name))
res = r.json()
media["id"] = res["id"]
media["rendered"] = res["guid"]["rendered"]
else:
self._logger.error("{0} : Image {1}.{2} not added due status code : {3}".format(self._name, img_name, img_type, r.status_code))
self._logger.debug("{0} : {1}".format(self._name, r.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for add image : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for add image : {1}".format(self._name, err))
exit(1)
else:
self._logger.error("{0} : Connection error for search image with status code : {1}".format(self._name, r.status_code))
self._logger.debug("{0} : {1}".format(self._name, r.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for search media : {1}".format(self._name, err))
exit(1) exit(1)
if r.status_code == 201: except Exception as err:
self._logger.info("Image added {0}".format(img_name)) self._logger.error("{0} : Exception error for search media : {1}".format(self._name, err))
res = r.json()
media["id"] = res["id"]
media["rendered"] = res["guid"]["rendered"]
else:
self._logger.error("Image not added due status code : {0}".format(r.status_code))
self._logger.debug(r.content)
else:
self._logger.error("Connection error with status code : {0}".format(r.status_code))
self._logger.debug(r.content)
return media return media
## Add or update comment ## Add or update comment
@@ -229,29 +317,35 @@ class WPimport:
try: try:
params = {"post": post, "author_name":i["author"], "date":i["date"]} params = {"post": post, "author_name":i["author"], "date":i["date"]}
page = self._request.get("http://{0}/wp-json/wp/v2/comments".format(self._wordpress), auth=self._basic, params=params) page = self._request.get("{1}://{0}/wp-json/wp/v2/comments".format(self._wordpress, self._protocol), auth=self._basic, params=params)
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
if page.status_code == 200:
result = page.json()
for j in result:
try:
params = {"force":1}
page = self._request.delete("http://{0}/wp-json/wp/v2/comments/{1}".format(self._wordpress, j["id"]), params=params, auth=self._basic)
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
if page.status_code == 200:
self._logger.info("Comment deleted for {0}".format(title))
self._logger.debug("Comment deleted : {0}".format(j))
else:
self._logger.error("Comment not deleted for {0} due status code : {1}".format(title, page.status_code))
self._logger.debug(page.content)
else: if page.status_code == 200:
self._logger.error("Comment not listed for {0} due status code : {1}".format(title, page.status_code)) result = page.json()
self._logger.debug(page.content) for j in result:
try:
params = {"force":1}
page = self._request.delete("{2}://{0}/wp-json/wp/v2/comments/{1}".format(self._wordpress, j["id"], self._protocol), params=params, auth=self._basic)
if page.status_code == 200:
self._logger.info("{0} : Comment deleted for {1}".format(self._name, title))
self._logger.debug("{0} : Comment deleted : {1}".format(self._name, j))
else:
self._logger.error("{0} : Comment not deleted for {1} due status code : {2}".format(self._name, title, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for delete comment : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for delete comment : {1}".format(self._name, err))
else:
self._logger.error("{0} : Comment not listed for {1} due status code : {2}".format(self._name, title, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for search comment : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for search comment : {1}".format(self._name, err))
for i in comment: for i in comment:
data = {"post": post, "content": i["content"], "date": i["date"], "author_name": i["author"], "status": "approved"} data = {"post": post, "content": i["content"], "date": i["date"], "author_name": i["author"], "status": "approved"}
@@ -260,29 +354,36 @@ class WPimport:
parent_id = int(i["parent_id"]) parent_id = int(i["parent_id"])
params = {"post": post, "author_name":comment[parent_id]["author"], "date":comment[parent_id]["date"]} params = {"post": post, "author_name":comment[parent_id]["author"], "date":comment[parent_id]["date"]}
try: try:
page = self._request.get("http://{0}/wp-json/wp/v2/comments".format(self._wordpress), auth=self._basic, params=params) page = self._request.get("{1}://{0}/wp-json/wp/v2/comments".format(self._wordpress, self._protocol), auth=self._basic, params=params)
except Exception as err:
self._logger.error("Connection error : {0}".format(err)) if page.status_code == 200:
result = page.json()
if len(result) > 0:
data["parent"]=result[0]["id"]
else:
self._logger.error("{0} : Connection error for parent comment with status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for parent comment : {1}".format(self._name, err))
exit(1) exit(1)
if page.status_code == 200: except Exception as err:
result = page.json() self._logger.error("{0} : Exception error for parent comment : {1}".format(self._name, err))
if len(result) > 0:
data["parent"]=result[0]["id"]
else:
self._logger.error("Connection error for parent comment with status code : {0}".format(page.status_code))
self._logger.debug(page.content)
try: try:
page = self._request.post("http://{0}/wp-json/wp/v2/comments".format(self._wordpress), auth=self._basic, data=data) page = self._request.post("{1}://{0}/wp-json/wp/v2/comments".format(self._wordpress, self._protocol), auth=self._basic, data=data)
except Exception as err:
self._logger.error("Connection error : {0}".format(err)) if page.status_code == 201:
self._logger.info("{0} : Comment added for {1}".format(self._name, title))
self._logger.debug("{0} : Data : {1}".format(self._name, data))
else:
self._logger.error("{0} : Comment not added for {1} due status code : {2}".format(self._name, title, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for add comment : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for add comment : {1}".format(self._name, err))
exit(1) exit(1)
if page.status_code == 201:
self._logger.info("Comment added for {0}".format(title))
self._logger.debug("Data : {0}".format(data))
else:
self._logger.error("Comment not added for {0} due status code : {1}".format(title, page.status_code))
self._logger.debug(page.content)
## Check class name ## Check class name
@@ -346,6 +447,7 @@ class WPimport:
listelement[i] = [] listelement[i] = []
articletitle = soup.find_all("h2", class_="articletitle") articletitle = soup.find_all("h2", class_="articletitle")
self._logger.debug("{0} : Title of the article : {1}".format(self._name, articletitle))
articlebody = soup.find_all("div", class_="articlebody") articlebody = soup.find_all("div", class_="articlebody")
articledate = soup.find_all("span", class_="articledate") articledate = soup.find_all("span", class_="articledate")
articleacreator = soup.find_all("span", class_="articlecreator") articleacreator = soup.find_all("span", class_="articlecreator")
@@ -353,43 +455,68 @@ class WPimport:
itemfooter = soup.find_all("div", class_="itemfooter") itemfooter = soup.find_all("div", class_="itemfooter")
comment = soup.find_all("li", class_="comment") comment = soup.find_all("li", class_="comment")
img_a = articlebody[0].find_all("a", {"target": "_blank"}) img_a = articlebody[0].find_all("a", {"target": "_blank"})
self._logger.debug("{0} : Number of image's link : {1}".format(self._name, len(img_a)))
list_img = [] list_img = []
for i in img_a: if self._no_image is False:
new_img = {} for i in img_a:
img = i.find_all("img") new_img = {}
if len(img) > 0: img = i.find_all("img")
href_a = i.get("href") self._logger.debug("{0} : Number of image's tag : {1}".format(self._name, len(img)))
href_img = img[0].get("src") if len(img) > 0:
new_img["old_src"]=href_img href_a = i.get("href")
new_img["old_href"]=href_a href_img = img[0].get("src")
try: href_a_o = urlparse(href_a)
page_img = self._request.get(href_img) href_img_o = urlparse(href_img)
except Exception as err: new_img["old_src"]=href_img
self._logger.error("Connection error : {0}".format(err)) new_img["old_href"]=href_a
exit(1)
if page_img.status_code == 404:
href_img = href_a
try: try:
page_img = self._request.get(href_a) if len(href_img_o.netloc) > 0:
img_ok = False
page_img = self._request.get(href_img)
if page_img.status_code == 404:
href_img = href_a
try:
page_img = self._request.get(href_a)
if page_img.status_code == 200:
img_ok = True
except ConnectionError as err:
self._logger.error("{0} : Connection error for get image : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for get image : {1}".format(self._name, err))
exit(1)
else:
if os.path.exists("{0}/..{1}".format(self._directory, href_img)):
page_img = open("{0}/..{1}".format(self._directory, href_img), "r")
img_ok = True
else:
if os.path.exists("{0}/..{1}".format(self._directory, href_a)):
page_img = open("{0}/..{1}".format(self._directory, href_a), "r")
img_ok = True
self._logger.debug("{0} : Status code for image {1} : {2}".format(self._name, href_img, page_img.status_code))
if img_ok is True:
media=self._addOrUpdateMedia(href_img, page_img)
new_img["id"]=media["id"]
new_img["new_src"]=media["rendered"]
list_img.append(new_img)
if href_img != href_a:
media=self._addOrUpdateMedia(href_a, page_img)
new_img["id"]=media["id"]
new_img["new_src"]=media["rendered"]
list_img.append(new_img)
if page_img.status_code not in [200, 404]:
self._logger.error("{0} : Connection error with status code for get image : {1}".format(self._name, page_img.status_code))
self._logger.debug("{0} : {1}".format(self._name, page_img.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for get image : {1}".format(self._name, err))
exit(1)
except Exception as err: except Exception as err:
self._logger.error("Connection error : {0}".format(err)) self._logger.error("{0} : Exception error for get image : {1}".format(self._name, err))
exit(1) exit(1)
if page_img.status_code == 200: self._logger.debug("{0} : Number of image : {1}".format(self._name, len(list_img)))
media=self._addOrUpdateMedia(href_img, page_img)
new_img["id"]=media["id"]
new_img["new_src"]=media["rendered"]
list_img.append(new_img)
if href_img != href_a:
media=self._addOrUpdateMedia(href_a, page_img)
new_img["id"]=media["id"]
new_img["new_src"]=media["rendered"]
list_img.append(new_img)
if page_img.status_code not in [200, 404]:
self._logger.error("Connection error with status code : {0}".format(page_img.status_code))
self._logger.debug(page_img.content)
comment_post = self._getComment(comment) comment_post = self._getComment(comment)
a = itemfooter[0].find_all("a", {"rel": True}) a = itemfooter[0].find_all("a", {"rel": True})
@@ -404,38 +531,62 @@ class WPimport:
for i in liste: for i in liste:
for j in element[i]: for j in element[i]:
element_exist = False element_exist = False
try: title_element = self._removeSpace(j)
params = {"params":j} for index in range(1,10):
page = self._request.get("http://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i), auth=self._basic, params=params) self._logger.info("{0} : search {1} with index {2} : {3}".format(self._name, i, index, title_element))
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
if page.status_code == 200:
element_exist = True
result = page.json()
listelement[i].append(result[0]["id"])
else:
self._logger.error("{0} not found due status code : {1}".format(i, page.status_code))
self._logger.debug(page.content)
if element_exist is False:
data = {"name": j}
self._logger.debug("URL : {0} ".format("http://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i)))
self._logger.debug("data : {0}".format(data))
self._logger.debug("headers : {0}".format(self._headers_form))
try: try:
page = self._request.post("http://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i), auth=self._basic, headers=self._headers_json, data=data) params = {"search":title_element, "per_page":"100", "page":index}
except Exception as err: page = self._request.get("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i, self._protocol), auth=self._basic, params=params)
self._logger.error("Connection error : {0}".format(err))
if page.status_code == 200:
result = page.json()
self._logger.debug("{0} : content {3} {2} : {1}".format(self._name, result, title_element, i))
if len(result) > 0:
for k in result:
title_rendered = k["name"]
self._logger.debug("{0} : content {2} : {1}".format(self._name, title_rendered, i))
self._logger.debug("{0} : size of content {3} : {2} - {1}".format(self._name, len(title_rendered), len(title_element), i))
if len(title_element) != len(title_rendered):
title_rendered = self._replaceCaracter(title_rendered)
if title_element == title_rendered:
self._logger.info("{0} : {1} found : {2}".format(self._name, i, title_rendered))
element_exist = True
listelement[i].append(k["id"])
else:
break
if page.status_code == 400:
self._logger.error("{0} : {1} not found due status code : {2}".format(self._name, i, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
break
else:
self._logger.error("{0} : {1} not found due status code : {2}".format(self._name, i, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for {1} : {2}".format(self._name, i, err))
exit(1) exit(1)
if page.status_code == 201: except Exception as err:
result = page.json() self._logger.error("{0} : Exception error for {1} : {2}".format(self._name, i, err))
listelement[i].append(result["id"]) self._logger.debug("{0} : Element {3} {2} is {1}".format(self._name, element_exist, title_element, i))
else: if element_exist == False:
self._logger.error("{0} not added due status code : {1}".format(i, page.status_code)) data = {"name": title_element}
self._logger.debug(page.content) self._logger.info("{0} : Create {1} : {2}".format(self._name, i, title_element))
self._logger.debug("{0} : Data : {1}".format(self._name, data))
try:
page = self._request.post("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, i, self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
if page.status_code == 201:
self._logger.info("{0} : {1} created : {2}".format(self._name, i, j))
result = page.json()
listelement[i].append(result["id"])
else:
self._logger.error("{0} : {1} not added due status code : {2}".format(self._name, i, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for post {1} : {2}".format(self._name, i, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for post {1} : {2}".format(self._name, i, err))
title = articletitle[0].text title = articletitle[0].text
author = articleacreator[0].text.lower() author = articleacreator[0].text.lower()
@@ -453,66 +604,123 @@ class WPimport:
bodyhtml = bodyhtml.replace(i["old_src"], o.path) bodyhtml = bodyhtml.replace(i["old_src"], o.path)
hour = articledate[0].text hour = articledate[0].text
time = dateheader[0].text.split(" ") time = dateheader[0].text.split(" ")
self._logger.debug("{0} : Title post : |{1}|".format(self._name, title))
title = self._removeSpace(title)
self._logger.debug("{0} : Rendered Title post : |{1}|".format(self._name, title))
data = {"title":title, "content":bodyhtml, "status":"publish", "date": "{0}-{1}-{2}T{3}:00".format(time[2],month[time[1]],time[0], hour), "tags": listelement["tags"], "categories": listelement["categories"]} data = {"title":title, "content":bodyhtml, "status":"publish", "date": "{0}-{1}-{2}T{3}:00".format(time[2],month[time[1]],time[0], hour), "tags": listelement["tags"], "categories": listelement["categories"]}
params = {"search":author} self._logger.debug("{0} : Data for post : |{1}| : {2}" .format(self._name, title, data))
try:
page = self._request.get("http://{0}/wp-json/wp/v2/users".format(self._wordpress), auth=self._basic, params=params)
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1)
if page.status_code == 200:
result = page.json()
data["author"] = result[0]["id"]
else:
self._logger.error("Connection error with status code : {0}".format(page.status_code))
self._logger.debug(page.content)
params = {"search":author, "per_page":100}
params = {"search":title}
try: try:
page = self._request.get("http://{0}/wp-json/wp/v2/posts".format(self._wordpress), auth=self._basic, params=params) self._logger.info("{0} : Search author : {1}".format(self._name, author))
except Exception as err: page = self._request.get("{1}://{0}/wp-json/wp/v2/users".format(self._wordpress, self._protocol), auth=self._basic, headers=self._headers_json, params=params)
self._logger.error("Connection error : {0}".format(err)) self._logger.debug("{0} : End Search author : {1}".format(self._name, author))
exit(1) self._logger.debug("{0} : Debug requests : {1}".format(self._name, page.content))
page_exist = True if page.status_code == 200:
headers = {'Content-Type': 'application/json', 'Accept':'application/json'} self._logger.info("{0} : Get author id : {1}".format(self._name, result))
if page.status_code == 200: result = page.json()
result = page.json() for a in result:
if len(result) == 0: data["author"] = a["id"]
page_exist = False
else: else:
self._logger.info("La page {0} existe deja et mis à jour".format(title)) self._logger.error("{0} : Connection error with status code for get author : {1}".format(self._name, page.status_code))
post_id = result[0]["id"] self._logger.debug("{0} : {1}".format(page.content))
try: except ConnectionError as err:
page = self._request.post("http://{0}/wp-json/wp/v2/posts/{1}".format(self._wordpress, post_id), auth=self._basic, headers=headers, data=json.dumps(data)) self._logger.error("{0} : Connection error for get author : {1}".format(self._name, err))
except Exception as err: exit(1)
self._logger.error("Connection error : {0}".format(err)) except Exception as err:
exit(1) self._logger.error("{0} : Exception error for get author : {1}".format(self._name, err))
page_is_exist = False
for index in range(1,10):
params = {"search": title, "per_page":100, "page": index}
try:
self._logger.info("{0} : Search post with index {2} : {1}".format(self._name, title, index))
page = self._request.get("{1}://{0}/wp-json/wp/v2/posts".format(self._wordpress, self._protocol), auth=self._basic, params=params, headers=self._headers_json)
if page.status_code == 200: if page.status_code == 200:
self._logger.debug("{0} : Encoding : {1}".format(self._name, page.encoding))
page.encoding = "utf-8"
result = page.json() result = page.json()
self._logger.info("Article mis à jour : {0}".format(result["title"]["raw"])) if len(result) == 0:
break
self._logger.info("{0} : Number result posts : {1}".format(self._name, len(result)))
count = 0
for i in result:
title_rendered = i["title"]["rendered"]
self._logger.info("{0} : Search title posts for |{2}| : |{1}|".format(self._name, title_rendered, title))
if len(title_rendered) != len(title):
title_rendered = self._replaceCaracter(title_rendered)
self._logger.debug("{0} : Search title posts for |{2}| : |{1}|".format(self._name, title_rendered, title))
self._logger.debug("{0} : SIze of title : {1} - {2}".format(self._name, len(title), len(title_rendered)))
if title_rendered == title:
if self._no_update is False:
page_is_exist = True
post_id = i["id"]
count = count + 1
if count > 1:
self._logger.info("{0} : Page {1} is double and going to delete".format(self._name, title))
try:
params = {"force":1}
page = self._request.delete("{2}://{0}/wp-json/wp/v2/posts/{1}".format(self._wordpress, post_id, self._protocol), auth=self._basic, headers=self._headers_json, params=params)
if page.status_code == 200:
self._logger.info("{0} : Post deleted : {1}".format(self._name, title))
else:
self._logger.error("{0} : Post not updated due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for deleted post : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for deleted post : {1}".format(self._name, err))
else:
self._logger.debug("{0} : Data for post to update : {1}".format(self._name, i))
self._logger.info("{0} : Page {1} already exist and going to update".format(self._name, title))
try:
page = self._request.post("{2}://{0}/wp-json/wp/v2/posts/{1}".format(self._wordpress, post_id, self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
if page.status_code == 200:
result = page.json()
self._logger.info("{0} : Post updated : {1}".format(self._name, title))
self._addOrUpdateComment(result["id"], comment_post, result["title"]["raw"])
self._linkImgPost(result["title"]["raw"], list_img, result["id"])
else:
self._logger.error("{0} : Post not updated due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for update post : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for update post : {1}".format(self._name, err))
if page.status_code == 400:
self._logger.error("{0} : Connection for update post unauthorized : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
break
else:
self._logger.error("{0} : Connection for update post error with status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for search post : {1}".format(self._name, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for search post : {1}".format(self._name, err))
if page_is_exist is False and self._no_create is False:
try:
self._logger.info("{0} : Creating posts : {1}".format(self._name, data["title"]))
page = self._request.post("{1}://{0}/wp-json/wp/v2/posts".format(self._wordpress, self._protocol), auth=self._basic, headers=self._headers_json, data=json.dumps(data))
if page.status_code == 201:
result = page.json()
self._logger.info("{0} : Post added : {1}".format(self._name, result["title"]["raw"]))
self._addOrUpdateComment(result["id"], comment_post, result["title"]["raw"]) self._addOrUpdateComment(result["id"], comment_post, result["title"]["raw"])
self._linkImgPost(result["title"]["raw"], list_img, result["id"]) self._linkImgPost(result["title"]["raw"], list_img, result["id"])
else: else:
self._logger.error("Post not updated due status code : {0}".format(page.status_code)) self._logger.error("{0} : Post not added due status code : {1}".format(self._name, r.status_code))
self._logger.debug(page.content) self._logger.debug("{0} : {1}".format(self._name, r.content))
except ConnectionError as err:
else: self._logger.error("{0} : Connection error for create post : {1}".format(self._name, err))
self._logger.error("Connection for update post error with status code : {0}".format(page.status_code))
self._logger.debug(page.content)
if page_exist == False:
try:
page = self._request.post("http://{0}/wp-json/wp/v2/posts".format(self._wordpress), auth=self._basic, headers=headers, data=json.dumps(data))
except Exception as err:
self._logger.error("Connection error : {0}".format(err))
exit(1) exit(1)
if page.status_code == 201: except Exception as err:
result = page.json() self._logger.error("{0} : Exception error for create post : {1}".format(self._name, err))
self._logger.info("Article ajoute : {0}".format(result["title"]["raw"]))
self._addOrUpdateComment(result["id"], comment_post, result["title"]["raw"])
self._linkImgPost(result["title"]["raw"], list_img, result["id"])
else:
self._logger.error("Post not added due status code : {0}".format(r.status_code))
self._logger.debug(r.content)

128
lib/WPRemove.py Normal file
View File

@@ -0,0 +1,128 @@
#!/usr/bin/python3
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import requests, os, logging, re, json
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
class WPRemove:
# Constructor
def __init__(self, index_name=1, number_thread=1, basic=None, wordpress="", logger=None, ssl_wordpress=True):
self._basic = basic
self._wordpress = wordpress
self._logger = logger
self._headers_json = {'Content-Type': 'application/json', 'Accept':'application/json'}
self._name = "Thread-{0}".format(index_name)
self._index_thread = index_name
self._protocol = "https"
self._number_thread = number_thread
if ssl_wordpress is False:
self._protocol = "http"
self._request = requests.Session()
retries = Retry(connect=10, read=10, redirect=5,
status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2)
self._request.mount('{0}://'.format(self._protocol), HTTPAdapter(max_retries=retries))
# Destructor
def __del__(self):
print("{0} : Import finished for {1}".format(self._name, self._wordpress))
# Public method
def _getCount(self, composant):
count = 0
try:
params = {"per_page":1}
self._logger.info("{0} : Get count {2} to remove for url : {1}".format(self._name, self._wordpress, composant))
r = self._request.get("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, composant, self._protocol), params=params, auth=self._basic, headers=self._headers_json)
if r.status_code == 200:
count = int(r.headers["X-WP-Total"])
else:
self._logger.error("{0} : Error for list to remove {1} due status code {2}".format(self._name, composant, r.status_code))
self._logger.debug("{0} : Content error for {1} : {2}".format(self._name, composant, r.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for list {1} to remove : {2}".format(self._name, composant, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for list {1} to remove : {2}".format(self._name, composant, err))
return count
def setUrl(self, wordpress):
self._wordpress = wordpress
def cleanPosts(self):
self._removeAll("posts")
def cleanTags(self):
self._removeAll("tags")
def cleanCategories(self):
self._removeAll("categories")
def cleanMedia(self):
self._removeAll("media")
# Private method
def _removeAll(self, composant):
count = self._getCount(composant)
self._logger.debug("{0} : Count for {1} : {2}".format(self._name, composant, count))
if count > 0:
self._logger.debug("{0} : Number thread for {1} : {2}".format(self._name, composant, self._number_thread))
page = count / int(self._number_thread)
self._logger.debug("{0} : Page for {1} : {2}".format(self._name, composant, page))
if page > int(page):
page = int(page) + 1
if page > 100:
page = 100
params = {"per_page":page, "page":self._index_thread}
self._logger.info("{0} : Params for {1} : {2}".format(self._name, composant, params))
try:
self._logger.info("{0} : List {2} to remove for url : {1}".format(self._name, self._wordpress, composant))
r = self._request.get("{2}://{0}/wp-json/wp/v2/{1}".format(self._wordpress, composant, self._protocol), auth=self._basic, params=params, headers=self._headers_json)
if r.status_code == 200:
result = r.json()
if len(result) > 0:
for i in result:
is_delete = True
self._logger.info(i["slug"])
if i["slug"] == "non-classe":
is_delete = False
if is_delete is True:
if composant == "tags" or composant == "categories":
title = i["name"]
else:
title = i["title"]["rendered"]
self._logger.info("{0} : Remove {2} for url {1} : {3}".format(self._name, self._wordpress, composant, title))
params = {"force":1}
try:
r = self._request.delete("{3}://{0}/wp-json/wp/v2/{1}/{2}".format(self._wordpress, composant, i["id"], self._protocol), auth=self._basic, headers=self._headers_json , params=params)
if r.status_code == 200:
self._logger.info("{0} : Post removed for URL {1} {2} : {3}".format(self._name, self._wordpress, composant, title))
else:
self._logger.error("{0} : Connection error for post {1} {2} {3} with status code {4}".format(self._name, self._wordpress, composant, title, r.status_code))
except ConnectionError as err:
self._logger.error("{0} : Connection error for {1} remove : {2}".format(self._name, composant, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for {1} remove : {2}".format(self._name, composant, err))
self._removeAll(composant)
if r.status_code == 400:
self._logger.error("{0} : No content for {1} to remove : {2}".format(self._name, composant, r.status_code))
else:
self._logger.error("{0} : Error for list to remove {1} due status code {2}".format(self._name, composant, r.status_code))
self._logger.debug("{0} : Content error for {1} : {2}".format(self._name, composant, r.content))
except ConnectionError as err:
self._logger.error("{0} : Connection error for list {1} to remove : {2}".format(self._name, composant, err))
exit(1)
except Exception as err:
self._logger.error("{0} : Exception error for list {1} to remove : {2}".format(self._name, composant, err))