from bs4 import BeautifulSoup from urllib.parse import urlparse import requests, os, logging, re, json class WPChange: # Constructor def __init__(self, index_name=1, number_thread=1, logger=None, parser="html.parser"): self._name = "Thread-{0}".format(index_name) self._logger = logger self._number_thread = number_thread self._parser = parser # Destructor def __del__(self): print("{0} : Import finished".format(self._name)) # Public method ## from file def fromFile(self, files=[], number_thread=1, max_thread=1): divFiles = int(len(files) / max_thread) currentRangeFiles = int(divFiles * (number_thread+1)) firstRange = int(currentRangeFiles - divFiles) self._logger.debug("{0} : index : {1}".format(self._name,number_thread)) self._logger.debug("{0} : first range : {1}".format(self._name,firstRange)) self._logger.debug("{0} : last range : {1}".format(self._name,currentRangeFiles)) for i in range(firstRange, currentRangeFiles): if os.path.exists(files[i]): self._logger.info("{0} : ({1}/{2}) File is being processed : {3}".format(self._name, i+1, currentRangeFiles + 1, files[i])) self._change(files[i]) ## From directory def fromDirectory(self, directory="", number_thread=1, max_thread=1): directory = "{0}/archives".format(directory) directories = self._getDirectories([], "{0}".format(directory)) if len(directories) > 0: files = self._getFiles(directories) self.fromFile(files, number_thread, max_thread) else: self._logger.error("{0} : No files for {1}".format(self._name, directory)) # Private method ## Get all files def _getFiles(self, item): files = [] for i in item: for j in os.listdir(i): if os.path.isfile("{0}/{1}".format(i, j)): files.append("{0}/{1}".format(i, j)) return files ## Get directories def _getDirectories(self, subdirectory, item): sub = subdirectory for i in os.listdir(item): if os.path.isdir("{0}/{1}".format(item, i)): sub.append("{0}/{1}".format(item, i)) subdirectory = self._getDirectories(sub, "{0}/{1}".format(item, i)) return subdirectory ## Change path img file def _change(self, file): with open(file, 'r') as f: content = f.read() soup = BeautifulSoup(content, self._parser) img = soup.find_all("img") for i in img: src = i.get("src") self._logger.info(src)