separate publication and principal

This commit is contained in:
Valentin CZERYBA 2023-05-07 17:38:44 +02:00
parent f9be6770e3
commit 3c2f1cc017
3 changed files with 42 additions and 32 deletions

View File

@ -14,13 +14,13 @@ from lib.WPExport import WPExport
def download(name_thread, max_thread, url, logger, parser, directory, html, img):
exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, directory=directory)
webpage = exportWp.getUrlPage(name_thread, max_thread)
for i in ["article", "page"]:
for j in ["publications", "principal"]:
if html is False:
exportWp.downloadHTML(webpage["article"])
exportWp.downloadHTML(webpage["page"])
exportWp.downloadHTML(webpage[j][i])
if img is False:
exportWp.downloadImg(webpage["article"])
exportWp.downloadImg(webpage["page"])
exportWp.downloadImg(webpage[j][i])
@ -44,7 +44,10 @@ def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, bas
del exportWp
for j in wordpress:
importWp = WPimport(name=name, basic=basic, wordpress=j, logger=logger, parser=parser)
importWp.fromUrl(webpage)
for k in ["article", "page"]:
for l in ["publications", "principal"]:
importWp.fromUrl(webpage[l][k])
del importWp
else:
if len(canalblog) != len(wordpress):
@ -62,7 +65,11 @@ def importUrl(name_thread, max_thread, canalblog, logger, parser, wordpress, bas
webpage = exportWp.getUrlPage(name_thread, max_thread)
del exportWp
importWp = WPimport(name=name, basic=basic, wordpress=wordpress[i], logger=logger, parser=parser)
importWp.fromUrl(webpage)
for k in ["article", "page"]:
for l in ["publications", "principal"]:
importWp.fromUrl(webpage[l][k])
del importWp

View File

@ -85,9 +85,14 @@ class WPExport:
self._logger.error("{0} : URL did not get due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))
webpage = {"page":[], "article":[]}
webpage = {"principal": {"page":[], "article":[]}, "publications": {"page":[], "article":[]}}
for i in page_url:
section = "publications"
o = urlparse(i)
o = o._replace(scheme="https")
i = o.geturl().replace(":///", "://")
if i == "{0}/".format(self._url):
section = "principal"
try:
page = self._request.get(i)
except Exception as err:
@ -95,8 +100,8 @@ class WPExport:
exit(1)
if page.status_code == 200:
self._logger.info("{0} : page : {1}".format(self._name, i))
if i not in webpage["page"]:
webpage["page"].append(i)
if i not in webpage[section]["page"]:
webpage[section]["page"].append(i)
soup = BeautifulSoup(page.text, self._parser)
class_div = pagingfirstline = soup.find_all("div", class_="pagingfirstline")
if len(class_div) > 0:
@ -122,22 +127,22 @@ class WPExport:
if len(categorie) > 2:
url_paging = "{0}/archives/{1}/p{2}-10.html".format(self._url, categorie[2], paging)
self._logger.info("{0} : {1}".format(self._name, url_paging))
if url_paging not in webpage["page"]:
webpage["page"].append(url_paging)
if url_paging not in webpage[section]["page"]:
webpage[section]["page"].append(url_paging)
page = self._request.get(url_paging)
if page.status_code == 200:
soup = BeautifulSoup(page.text, self._parser)
h2 = soup.find_all("h2")
for title in h2:
href = title.find_all("a")[0].get("href", "/")
if href not in webpage["article"]:
if href not in webpage[section]["article"]:
try:
o = urlparse(href)
o = o._replace(scheme="https").geturl()
except Exception as err:
self._logger.error("parsing error : {0}".format(err))
exit(1)
webpage["article"].append(o)
webpage[section]["article"].append(o)
else:
self._logger.error("{0} : web didn't get due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))

View File

@ -512,7 +512,6 @@ class WPimport:
if len(result) > 0:
for i in result:
self._logger.debug("{0} : Data for post to delete : {1}".format(self._name, i))
if i["title"]["rendered"] == title:
self._logger.info("{0} : Page {1} already exist and going to delete".format(self._name, title))
post_id = i["id"]
try:
@ -524,7 +523,6 @@ class WPimport:
if page.status_code == 200:
result = page.json()
self._logger.info("{0} : Post deleted : {1}".format(self._name, title))
else:
self._logger.error("{0} : Post not deleted due status code : {1}".format(self._name, page.status_code))
self._logger.debug("{0} : {1}".format(self._name, page.content))