diff --git a/import_export_canalblog.py b/import_export_canalblog.py index 418df05..86ffde7 100644 --- a/import_export_canalblog.py +++ b/import_export_canalblog.py @@ -5,7 +5,7 @@ from urllib.parse import urlparse from concurrent import futures from concurrent.futures import as_completed, wait, ALL_COMPLETED -import argparse, logging, threading +import argparse, logging, threading, os, glob from lib.WPImport import WPimport from lib.WPExport import WPExport from lib.WPRemove import WPRemove @@ -40,21 +40,21 @@ def remove(index, number, args, basic, logger, ssl_wordpress): del removeWp -def download(name_thread, max_thread, url, logger, parser, directory, html, img, ssl_canalblog): +def download(name_thread, max_thread, url, logger, parser, directory, html, img, ssl_canalblog, revert, tmp): exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, directory=directory, ssl_canalblog=ssl_canalblog) - webpage = exportWp.getUrlPage(name_thread, max_thread) + if not revert: + exportWp.getUrlPage(name_thread, max_thread) for i in ["article", "page"]: for j in ["publications", "principal"]: if html is False: - exportWp.downloadHTML(webpage[j][i]) + exportWp.downloadHTML(j, i) if img is False: - exportWp.downloadImg(webpage[j][i]) + exportWp.downloadImg(j, i) + del exportWp - - -def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, basic, serial, ssl_wordpress, ssl_canalblog, create, update, image): +def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, basic, serial, ssl_wordpress, ssl_canalblog, create, update, image, revert, tmp): canalblog = canalblog.split(",") wordpress = wordpress.split(",") name = "Thread-{0}".format(int(name_thread) + 1) @@ -70,14 +70,15 @@ def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, bas except Exception as err: logger.error("{0} : parsing error : {1}".format(name, err)) exit(1) - exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, ssl_canalblog=ssl_canalblog) - webpage = exportWp.getUrlPage(name_thread, max_thread) + exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, ssl_canalblog=ssl_canalblog, tmp=tmp) + if not revert: + exportWp.getUrlPage(name_thread, max_thread) del exportWp for j in wordpress: - importWp = WPimport(name=name, basic=basic, wordpress=j, logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image) + importWp = WPimport(name=name, basic=basic, wordpress=j, logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image, tmp=tmp) for k in ["article", "page"]: for l in ["publications", "principal"]: - importWp.fromUrl(webpage[l][k]) + importWp.fromUrl(l, k) del importWp else: @@ -93,9 +94,10 @@ def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, bas logger.error("parsing error : {0}".format(err)) exit(1) exportWp = WPExport(name=name, url=url, logger=logger, parser=parser, ssl_canalblog=ssl_canalblog) - webpage = exportWp.getUrlPage(name_thread, max_thread) + if not revert: + exportWp.getUrlPage(name_thread, max_thread) del exportWp - importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image) + importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image, tmp=tmp) for k in ["article", "page"]: for l in ["publications", "principal"]: @@ -117,7 +119,7 @@ def importDirectory(name_thread, max_thread, directory, logger, parser, wordpres else: if len(directory) != len(wordpress): - logger.error("{0} : Error : Number directory is differant than wordpress".format(name)) + logger.error("{0} : Error : Number directory is different than wordpress".format(name)) exit(1) for i in range(0, len(wordpress)-1): importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser, ssl_wordpress=ssl_wordpress, no_create=create, no_update=update, no_image=image) @@ -134,6 +136,8 @@ if __name__ == '__main__': parser.add_argument("--parser", help="Parser content", default="html.parser") parser.add_argument("--parallel", help="Define number thread (default : 1)", default=1) parser.add_argument("--no-ssl", help="No ssl for canalblog and/or wordpress (example wordpress,canalblog)", dest="ssl", default="") + parser.add_argument("--revert", help="Restart a work from stopping work", action="store_true") + parser.add_argument("--tmp", help="directory tmp", default="/tmp/import_export_canablog") subparsers = parser.add_subparsers(dest="command") @@ -187,8 +191,7 @@ if __name__ == '__main__': default="", help="File") - - + args = parser.parse_args() logger = logging.getLogger('import export canalblog') @@ -223,6 +226,8 @@ if __name__ == '__main__': fileHandler.setFormatter(formatter) logger.addHandler(fileHandler) + os.makedirs(args.tmp, exist_ok=True) + if args.command == "import" or args.command == "remove": password = args.password if len(args.password) == 0: @@ -255,8 +260,17 @@ if __name__ == '__main__': with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex: wait_for = [ ex.submit(remove, i, args.parallel, args, basic, logger, ssl_wordpress) for i in range(0, int(args.parallel)) ] wait(wait_for, return_when=ALL_COMPLETED) + if args.revert is True: + files_tmp = glob.glob("{0}/*.json".format(args.tmp)) + if len(files_tmp) == 0: + logger.error("Error revert, because files not found") + exit(1) + if len(files_tmp) != int(args.parallel): + for file_r in files_tmp: + os.remove(file_r) + wait_for = [ - ex.submit(importUrl, i, int(args.parallel), args.canalblog, logger, args.parser, args.wordpress, basic, args.serial, ssl_wordpress, ssl_canalblog, args.create, args.update, args.image) + ex.submit(importUrl, i, int(args.parallel), args.canalblog, logger, args.parser, args.wordpress, basic, args.serial, ssl_wordpress, ssl_canalblog, args.create, args.update, args.image, args.revert, args.tmp) for i in range(0, int(args.parallel)) ] @@ -294,7 +308,7 @@ if __name__ == '__main__': try: with futures.ThreadPoolExecutor(max_workers=int(args.parallel)) as ex: wait_for = [ - ex.submit(download, i, int(args.parallel), url, logger, args.parser, args.directory, args.html, args.img, ssl_canalblog) + ex.submit(download, i, int(args.parallel), url, logger, args.parser, args.directory, args.html, args.img, ssl_canalblog, args.revert, args.tmp) for i in range(0, int(args.parallel)) ] except Exception as err: diff --git a/lib/WPExport.py b/lib/WPExport.py index f0775e7..53dac23 100644 --- a/lib/WPExport.py +++ b/lib/WPExport.py @@ -1,12 +1,12 @@ #!/usr/bin/python3 from bs4 import BeautifulSoup from urllib.parse import urlparse -import requests, os, argparse, logging +import requests, os, argparse, logging, json from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry class WPExport: - def __init__(self, name = "Thread-0", url = "", logger = None, parser = "html.parser", directory = "backup", ssl_canalblog=True): + def __init__(self, name = "Thread-0", url = "", logger = None, parser = "html.parser", directory = "backup", ssl_canalblog=True, tmp="/tmp/import_export_canablog"): self._url = url self._logger = logger self._parser = parser @@ -22,7 +22,7 @@ class WPExport: status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2) self._request.mount('{0}://'.format(self._protocol), HTTPAdapter(max_retries=retries)) - + self._tmp = tmp # Destructor def __del__(self): @@ -57,13 +57,13 @@ class WPExport: # Download HTML - def downloadHTML(self, webpage): - self._downloadPage(webpage, self._dir) + def downloadHTML(self, first, second): + self._downloadPage(webpage[first][second], self._dir) # Download Image - def downloadImg(self, webpage): - page_src = self._getImg(webpage) + def downloadImg(self, first, second): + page_src = self._getImg(webpage[first][second]) o = urlparse(self._url) self._downloadPage(page_src, "{0}/{1}/{2}".format(self._dir, o.path, "img")) @@ -161,7 +161,14 @@ class WPExport: except Exception as err: self._logger.error("{0} : Exception error : {1}".format(self._name, err)) exit(1) - return webpage + try: + string_webpage = json.dumps(webpage) + open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(string_webpage) + except Exception as ex: + self._logger.error("{0} : Error for writing webpage : {1}".format(self._name, ex)) + + + diff --git a/lib/WPImport.py b/lib/WPImport.py index 42d6596..432d50d 100644 --- a/lib/WPImport.py +++ b/lib/WPImport.py @@ -8,7 +8,7 @@ from requests.packages.urllib3.util.retry import Retry class WPimport: # Constructor - def __init__(self, name="Thread-0", basic=None, wordpress="", logger=None, parser="html.parser", ssl_wordpress=True, no_create=False, no_update=False, no_image=False): + def __init__(self, name="Thread-0", basic=None, wordpress="", logger=None, parser="html.parser", ssl_wordpress=True, no_create=False, no_update=False, no_image=False, tmp="/tmp/import_export_canablog"): self._name = name self._basic = basic self._wordpress = wordpress @@ -28,6 +28,7 @@ class WPimport: self._no_create = no_create self._no_update = no_update self._no_image = no_image + self._tmp = tmp # Destructor def __del__(self): @@ -38,26 +39,39 @@ class WPimport: def setUrl(self, wordpress): self._wordpress = wordpress - def fromUrl(self, webpage): - for i in range(0, len(webpage)): - try: - r = self._request.get(webpage[i]) - if r.status_code == 200: - self._logger.info("{0} : ({1}/{2}) : Page is importing : {3}".format(self._name, i+1, len(webpage), webpage[i])) - soup = BeautifulSoup(r.content, self._parser) - articlebody = soup.find_all("div", class_="articlebody") - if len(articlebody) > 0: - self._addOrUpdatePost(soup) - else: - self._addOrUpdateFeaturedMedia(soup) - else: - self._logger.error("{0} : Connection error for get url {1} with status code : {2}".format(self._name, webpage[i], r.status_code)) - self._logger.debug("{0} : {1}".format(self._name, r.content)) - except ConnectionError as err: - self._logger.error("{0} : Connection error for get url {1} : {2}".format(self._name, webpage[i], err)) - exit(1) - except Exception as err: - self._logger.error("{0} : Exception error for get url {1} : {2}".format(self._name, webpage[i], err)) + def fromUrl(self, first, second): + try: + with open("{0}/{1}.json".format(self._tmp, self._name)) as file: + webpage_content = json.loads(file.read()) + self._logger.debug("{0} : size of webpage : {1}".format(self._name, len(webpage_content))) + webpage = webpage_content[first][second] + for i in range(0, len(webpage)): + try: + r = self._request.get(webpage[i]) + if r.status_code == 200: + self._logger.info("{0} : ({1}/{2}) : Page is importing : {3}".format(self._name, i+1, len(webpage), webpage[i])) + soup = BeautifulSoup(r.content, self._parser) + articlebody = soup.find_all("div", class_="articlebody") + if len(articlebody) > 0: + self._addOrUpdatePost(soup) + else: + self._addOrUpdateFeaturedMedia(soup) + del webpage_content[first][second][i] + webpage_content = json.dumps(webpage_content) + open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(webpage_content) + else: + self._logger.error("{0} : Connection error for get url {1} with status code : {2}".format(self._name, webpage[i], r.status_code)) + self._logger.debug("{0} : {1}".format(self._name, r.content)) + except ConnectionError as err: + self._logger.error("{0} : Connection error for get url {1} : {2}".format(self._name, webpage[i], err)) + exit(1) + except IOError as err: + self._logger.error("{0} : Connection error for IO url {1} : {2}".format(self._name, webpage[i], err)) + exit(1) + except Exception as err: + self._logger.error("{0} : Exception error for get url {1} : {2}".format(self._name, webpage[i], err)) + except Exception as ex: + self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex)) def fromDirectory(self, directory="", number_thread=1, max_thread=1): @@ -66,9 +80,13 @@ class WPimport: directories = self._getDirectories([], "{0}".format(directory)) if len(directories) > 0: files = self._getFiles(directories) - self.fromFile(files, number_thread, max_thread) + self.fromFile(files=files, number_thread=number_thread, max_thread=max_thread) else: self._logger.error("{0} : No files for {1}".format(self._name, directory)) + + + + def fromFile(self, files=[], number_thread=1, max_thread=1):