separate publication and principal

This commit is contained in:
Valentin CZERYBA 2023-05-07 17:38:44 +02:00
parent f9be6770e3
commit 3c2f1cc017
3 changed files with 42 additions and 32 deletions

View File

@ -14,13 +14,13 @@ from lib.WPExport import WPExport
def download(name_thread, max_thread, url, logger, parser, directory, html, img): def download(name_thread, max_thread, url, logger, parser, directory, html, img):
exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, directory=directory) exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, directory=directory)
webpage = exportWp.getUrlPage(name_thread, max_thread) webpage = exportWp.getUrlPage(name_thread, max_thread)
for i in ["article", "page"]:
for j in ["publications", "principal"]:
if html is False: if html is False:
exportWp.downloadHTML(webpage["article"]) exportWp.downloadHTML(webpage[j][i])
exportWp.downloadHTML(webpage["page"])
if img is False: if img is False:
exportWp.downloadImg(webpage["article"]) exportWp.downloadImg(webpage[j][i])
exportWp.downloadImg(webpage["page"])
@ -44,7 +44,10 @@ def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, bas
del exportWp del exportWp
for j in wordpress: for j in wordpress:
importWp = WPimport(name=name, basic=basic, wordpress=j, logger=logger, parser=parser) importWp = WPimport(name=name, basic=basic, wordpress=j, logger=logger, parser=parser)
importWp.fromUrl(webpage) for k in ["article", "page"]:
for l in ["publications", "principal"]:
importWp.fromUrl(webpage[l][k])
del importWp del importWp
else: else:
if len(canalblog) != len(wordpress): if len(canalblog) != len(wordpress):
@ -62,7 +65,11 @@ def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, bas
webpage = exportWp.getUrlPage(name_thread, max_thread) webpage = exportWp.getUrlPage(name_thread, max_thread)
del exportWp del exportWp
importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser) importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser)
importWp.fromUrl(webpage)
for k in ["article", "page"]:
for l in ["publications", "principal"]:
importWp.fromUrl(webpage[l][k])
del importWp del importWp

View File

@ -85,9 +85,14 @@ class WPExport:
self._logger.error("{0} : URL did not get due status code : {1}".format(self._name, page.status_code)) self._logger.error("{0} : URL did not get due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content)) self._logger.debug("{0} : {1}".format(self._name, page.content))
webpage = {"page":[], "article":[]} webpage = {"principal": {"page":[], "article":[]}, "publications": {"page":[], "article":[]}}
for i in page_url: for i in page_url:
section = "publications"
o = urlparse(i)
o = o._replace(scheme="https")
i = o.geturl().replace(":///", "://")
if i == "{0}/".format(self._url):
section = "principal"
try: try:
page = self._request.get(i) page = self._request.get(i)
except Exception as err: except Exception as err:
@ -95,8 +100,8 @@ class WPExport:
exit(1) exit(1)
if page.status_code == 200: if page.status_code == 200:
self._logger.info("{0} : page : {1}".format(self._name, i)) self._logger.info("{0} : page : {1}".format(self._name, i))
if i not in webpage["page"]: if i not in webpage[section]["page"]:
webpage["page"].append(i) webpage[section]["page"].append(i)
soup = BeautifulSoup(page.text, self._parser) soup = BeautifulSoup(page.text, self._parser)
class_div = pagingfirstline = soup.find_all("div", class_="pagingfirstline") class_div = pagingfirstline = soup.find_all("div", class_="pagingfirstline")
if len(class_div) > 0: if len(class_div) > 0:
@ -122,22 +127,22 @@ class WPExport:
if len(categorie) > 2: if len(categorie) > 2:
url_paging = "{0}/archives/{1}/p{2}-10.html".format(self._url, categorie[2], paging) url_paging = "{0}/archives/{1}/p{2}-10.html".format(self._url, categorie[2], paging)
self._logger.info("{0} : {1}".format(self._name, url_paging)) self._logger.info("{0} : {1}".format(self._name, url_paging))
if url_paging not in webpage["page"]: if url_paging not in webpage[section]["page"]:
webpage["page"].append(url_paging) webpage[section]["page"].append(url_paging)
page = self._request.get(url_paging) page = self._request.get(url_paging)
if page.status_code == 200: if page.status_code == 200:
soup = BeautifulSoup(page.text, self._parser) soup = BeautifulSoup(page.text, self._parser)
h2 = soup.find_all("h2") h2 = soup.find_all("h2")
for title in h2: for title in h2:
href = title.find_all("a")[0].get("href", "/") href = title.find_all("a")[0].get("href", "/")
if href not in webpage["article"]: if href not in webpage[section]["article"]:
try: try:
o = urlparse(href) o = urlparse(href)
o = o._replace(scheme="https").geturl() o = o._replace(scheme="https").geturl()
except Exception as err: except Exception as err:
self._logger.error("parsing error : {0}".format(err)) self._logger.error("parsing error : {0}".format(err))
exit(1) exit(1)
webpage["article"].append(o) webpage[section]["article"].append(o)
else: else:
self._logger.error("{0} : web didn't get due status code : {1}".format(self._name, page.status_code)) self._logger.error("{0} : web didn't get due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content)) self._logger.debug("{0} : {1}".format(self._name, page.content))

View File

@ -512,7 +512,6 @@ class WPimport:
if len(result) > 0: if len(result) > 0:
for i in result: for i in result:
self._logger.debug("{0} : Data for post to delete : {1}".format(self._name, i)) self._logger.debug("{0} : Data for post to delete : {1}".format(self._name, i))
if i["title"]["rendered"] == title:
self._logger.info("{0} : Page {1} already exist and going to delete".format(self._name, title)) self._logger.info("{0} : Page {1} already exist and going to delete".format(self._name, title))
post_id = i["id"] post_id = i["id"]
try: try:
@ -524,7 +523,6 @@ class WPimport:
if page.status_code == 200: if page.status_code == 200:
result = page.json() result = page.json()
self._logger.info("{0} : Post deleted : {1}".format(self._name, title)) self._logger.info("{0} : Post deleted : {1}".format(self._name, title))
else: else:
self._logger.error("{0} : Post not deleted due status code : {1}".format(self._name, page.status_code)) self._logger.error("{0} : Post not deleted due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content)) self._logger.debug("{0} : {1}".format(self._name, page.content))