from bs4 import BeautifulSoup from urllib.parse import urlparse import requests, os, logging, re, json class WPChange: # Constructor def __init__(self, index_name=1, number_thread=1, logger=None, parser="html.parser", tmp="/tmp/import_export_canablog"): self._name = "Thread-{0}".format(index_name) self._logger = logger self._number_thread = number_thread self._parser = parser self._tmp = tmp self._index_name = index_name # Destructor def __del__(self): print("{0} : Import finished".format(self._name)) # Public method ## from file def fromFile(self, files=[], number_thread=1, max_thread=1): divFiles = int(len(files) / max_thread) currentRangeFiles = int(divFiles * (number_thread)) firstRange = int(currentRangeFiles - divFiles) self._logger.debug("{0} : index : {1}".format(self._name,number_thread)) self._logger.debug("{0} : first range : {1}".format(self._name,firstRange)) self._logger.debug("{0} : last range : {1}".format(self._name,currentRangeFiles)) for i in range(firstRange, currentRangeFiles): if os.path.exists(files[i]): self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, currentRangeFiles + 1, files[i])) self._change(files[i]) ## From directory def fromDirectory(self, directory="", revert=False): self._directory = directory directory = "{0}/archives".format(directory) directories = self._getDirectories([], "{0}".format(directory)) if len(directories) > 0: files = self._getFiles(directories) if revert is False: self._tmpFiles(files=files, number_thread=self._index_name, max_thread=self._number_thread) self._fromFileTmp() else: self._logger.error("{0} : No files for {1}".format(self._name, directory)) def fromFile(self, files=[]): for i in range(0, len(files)): if os.path.exists(files[i]): self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, len(files), files[i])) self._change(files[i]) # Private method def _fromFileTmp(self): try: with open("{0}/{1}.json".format(self._tmp, self._name)) as file: files = json.loads(file.read()) self._logger.debug("{0} : size of webpage : {1}".format(self._name, len(files))) for i in range(0, len(files)): if os.path.exists(files[i]): self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, len(files), files[i])) self._change(files[i]) except Exception as ex: self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex)) def _tmpFiles(self, files=[], number_thread=1, max_thread=1): print() divFiles = int(len(files) / int(max_thread)) currentRangeFiles = int(divFiles * (int(number_thread)+1)) firstRange = int(currentRangeFiles - divFiles) self._logger.debug("{0} : index : {1}".format(self._name,number_thread)) self._logger.debug("{0} : first range : {1}".format(self._name,firstRange)) self._logger.debug("{0} : last range : {1}".format(self._name,currentRangeFiles)) webpage = [] for i in range(firstRange, currentRangeFiles): webpage.append(files[i]) try: string_webpage = json.dumps(webpage) open("{0}/{1}.json".format(self._tmp, self._name), "wt").write(string_webpage) except Exception as ex: self._logger.error("{0} : Error for writing webpage : {1}".format(self._name, ex)) ## Get all files def _getFiles(self, item): files = [] for i in item: for j in os.listdir(i): if os.path.isfile("{0}/{1}".format(i, j)): files.append("{0}/{1}".format(i, j)) return files ## Get directories def _getDirectories(self, subdirectory, item): sub = subdirectory for i in os.listdir(item): if os.path.isdir("{0}/{1}".format(item, i)): sub.append("{0}/{1}".format(item, i)) subdirectory = self._getDirectories(sub, "{0}/{1}".format(item, i)) return subdirectory ## Change path img file def _change(self, file): ext_img = ["png", "svg", "gif", "jpg", "jpeg"] try: with open(file, 'r') as f: content = f.read() soup = BeautifulSoup(content, self._parser) img = soup.find_all("img") for i in img: src = i.get("src") o = urlparse(src) if len(o.netloc) > 0: self._logger.info("{0} : Change source image {1} /img/{2}/{3}".format(self._name, src, o.netloc, o.path)) content = content.replace(src, "/img/{0}/{1}".format(o.netloc, o.path)) script = soup.find_all("script", {"type": "text/javascript"}) for i in script: src = i.get("src") if src is not None: o = urlparse(src) if len(o.netloc) > 0: self._logger.info("{0} : Change source js {1} /dists/js/{2}/{3}".format(self._name, src, o.netloc, o.path)) content = content.replace(src, "/dists/js/{0}/{1}".format(o.netloc, o.path)) link = soup.find_all("link", {"rel": "stylesheet"}) for i in link: href = i.get("href") if href is not None: o = urlparse(href) if len(o.netloc) > 0: self._logger.info("{0} : Change source css {1} /dists/css/{2}/{3}".format(self._name, href, o.netloc, o.path)) content = content.replace(href, "/dists/css/{0}/{1}".format(o.netloc, o.path)) a = soup.find_all("a", {"target": "_blank"}) for i in a: href = i.get("href") if href is not None: o = urlparse(href) if len(o.netloc) > 0: ext = o.path.split(".")[len(o.path.split("."))-1] if ext in ext_img: self._logger.info("{0} : Change a img {1} /img/{2}/{3}".format(self._name, href, o.netloc, o.path)) content = content.replace(href, "/img/{0}/{1}".format(o.netloc, o.path)) try: with open(file, "w") as f: self._logger.info("{0} : File write : {1}".format(self._name, file)) f.write(content) except Exception as ex: self._logger.error("{0} : Error for write file {1} : {2}".format(self._name, file, ex)) except Exception as ex: self._logger.error("{0} : Error for read file {1} : {2}".format(self._name, file, ex))