replace exception for wpexport
This commit is contained in:
parent
269a9e9ccd
commit
110ccc4bb1
301
lib/WPExport.py
301
lib/WPExport.py
@ -72,20 +72,23 @@ class WPExport:
|
||||
def getUrlPage(self, index_thread, max_thread):
|
||||
try:
|
||||
page = self._request.get(self._url)
|
||||
except Exception as err:
|
||||
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
ul = soup.find_all("ul", id="listsmooth")
|
||||
for anchor in ul[0].find_all("a"):
|
||||
href = anchor.get('href', '/')
|
||||
if href != "#":
|
||||
page_url.append(href)
|
||||
else:
|
||||
self._logger.error("{0} : URL did not get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
ul = soup.find_all("ul", id="listsmooth")
|
||||
for anchor in ul[0].find_all("a"):
|
||||
href = anchor.get('href', '/')
|
||||
if href != "#":
|
||||
page_url.append(href)
|
||||
else:
|
||||
self._logger.error("{0} : URL did not get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error : {1}".format(self._name, err))
|
||||
|
||||
webpage = {"principal": {"page":[], "article":[]}, "publications": {"page":[], "article":[]}}
|
||||
for i in page_url:
|
||||
@ -97,63 +100,67 @@ class WPExport:
|
||||
section = "principal"
|
||||
try:
|
||||
page = self._request.get(i)
|
||||
except Exception as err:
|
||||
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : page : {1}".format(self._name, i))
|
||||
if i not in webpage[section]["page"]:
|
||||
webpage[section]["page"].append(i)
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
class_div = soup.find_all("div", class_="pagingfirstline")
|
||||
if len(class_div) > 0:
|
||||
pagingfirstline = class_div[0].find_all("a")
|
||||
if len(pagingfirstline) > 1:
|
||||
lastpage = pagingfirstline[len(pagingfirstline)-1].get("href", "/")
|
||||
self._logger.debug("{0} : Last page {1}".format(self._name, lastpage))
|
||||
|
||||
element_lastpage = lastpage.split("/")[len(lastpage.split("/"))-1]
|
||||
number_page = element_lastpage.split("-")[0].split("p")[1]
|
||||
number_lastpage = int(number_page) / 10
|
||||
|
||||
setPageDivided = int(number_lastpage) / max_thread
|
||||
if setPageDivided > int(setPageDivided):
|
||||
setPageDivided = setPageDivided + 1
|
||||
setPagePart = setPageDivided * (index_thread + 1)
|
||||
firstPagePart = (setPagePart - setPageDivided)
|
||||
|
||||
self._logger.debug("{0} : Total page : {1}".format(self._name,int(number_lastpage)))
|
||||
self._logger.debug("{0} : First range : {1}".format(self._name, int(firstPagePart)))
|
||||
self._logger.debug("{0} : Last range : {1}".format(self._name, int(setPagePart)))
|
||||
|
||||
for j in range(int(firstPagePart),int(setPagePart)+1):
|
||||
paging = j * 10
|
||||
categorie = urlparse(i).path.split("/")
|
||||
url_paging = "{0}/archives/p{1}-10.html".format(self._url, paging)
|
||||
if len(categorie) > 2:
|
||||
url_paging = "{0}/archives/{1}/p{2}-10.html".format(self._url, categorie[2], paging)
|
||||
self._logger.info("{0} : {1}".format(self._name, url_paging))
|
||||
if url_paging not in webpage[section]["page"]:
|
||||
webpage[section]["page"].append(url_paging)
|
||||
page = self._request.get(url_paging)
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
h2 = soup.find_all("h2")
|
||||
self._logger.debug("{0} : {1} H2 : {2}".format(self._name, url_paging, h2))
|
||||
for title in h2:
|
||||
self._logger.debug("{0} : {1} a : {2}".format(self._name, url_paging, title.find_all("a")))
|
||||
href = title.find_all("a")[0].get("href", "/")
|
||||
if href not in webpage[section]["article"]:
|
||||
try:
|
||||
o = urlparse(href)
|
||||
o = o._replace(scheme="https").geturl()
|
||||
webpage[section]["article"].append(o)
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
else:
|
||||
self._logger.error("{0} : web didn't get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : page : {1}".format(self._name, i))
|
||||
if i not in webpage[section]["page"]:
|
||||
webpage[section]["page"].append(i)
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
class_div = soup.find_all("div", class_="pagingfirstline")
|
||||
if len(class_div) > 0:
|
||||
pagingfirstline = class_div[0].find_all("a")
|
||||
if len(pagingfirstline) > 1:
|
||||
lastpage = pagingfirstline[len(pagingfirstline)-1].get("href", "/")
|
||||
self._logger.debug("{0} : Last page {1}".format(self._name, lastpage))
|
||||
|
||||
element_lastpage = lastpage.split("/")[len(lastpage.split("/"))-1]
|
||||
number_page = element_lastpage.split("-")[0].split("p")[1]
|
||||
number_lastpage = int(number_page) / 10
|
||||
|
||||
setPageDivided = int(number_lastpage) / max_thread
|
||||
if setPageDivided > int(setPageDivided):
|
||||
setPageDivided = setPageDivided + 1
|
||||
setPagePart = setPageDivided * (index_thread + 1)
|
||||
firstPagePart = (setPagePart - setPageDivided)
|
||||
|
||||
self._logger.debug("{0} : Total page : {1}".format(self._name,int(number_lastpage)))
|
||||
self._logger.debug("{0} : First range : {1}".format(self._name, int(firstPagePart)))
|
||||
self._logger.debug("{0} : Last range : {1}".format(self._name, int(setPagePart)))
|
||||
|
||||
for j in range(int(firstPagePart),int(setPagePart)+1):
|
||||
paging = j * 10
|
||||
categorie = urlparse(i).path.split("/")
|
||||
url_paging = "{0}/archives/p{1}-10.html".format(self._url, paging)
|
||||
if len(categorie) > 2:
|
||||
url_paging = "{0}/archives/{1}/p{2}-10.html".format(self._url, categorie[2], paging)
|
||||
self._logger.info("{0} : {1}".format(self._name, url_paging))
|
||||
if url_paging not in webpage[section]["page"]:
|
||||
webpage[section]["page"].append(url_paging)
|
||||
page = self._request.get(url_paging)
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
h2 = soup.find_all("h2")
|
||||
self._logger.debug("{0} : {1} H2 : {2}".format(self._name, url_paging, h2))
|
||||
for title in h2:
|
||||
self._logger.debug("{0} : {1} a : {2}".format(self._name, url_paging, title.find_all("a")))
|
||||
href = title.find_all("a")[0].get("href", "/")
|
||||
if href not in webpage[section]["article"]:
|
||||
try:
|
||||
o = urlparse(href)
|
||||
o = o._replace(scheme="https").geturl()
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
webpage[section]["article"].append(o)
|
||||
else:
|
||||
self._logger.error("{0} : web didn't get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
return webpage
|
||||
|
||||
|
||||
@ -183,47 +190,52 @@ class WPExport:
|
||||
def _getScriptCss(self, js, css):
|
||||
try:
|
||||
page = self._request.get(self._url)
|
||||
except Exception as err:
|
||||
self._logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
if js is True:
|
||||
script = soup.find_all("script")
|
||||
for anchor in script:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
try:
|
||||
u = urlparse(self._url)
|
||||
o = urlparse(src)
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
if css is True:
|
||||
link = soup.find_all("link")
|
||||
for anchor in link:
|
||||
rel = anchor.get("rel")
|
||||
if rel[0] == "stylesheet":
|
||||
href = anchor.get("href", "/")
|
||||
if href != "/":
|
||||
|
||||
page_url = []
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
if js is True:
|
||||
script = soup.find_all("script")
|
||||
for anchor in script:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
try:
|
||||
u = urlparse(self._url)
|
||||
o = urlparse(href)
|
||||
o = urlparse(src)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
else:
|
||||
self._logger.error("JS or CSS did not get due status code : {0}".format(page.status_code))
|
||||
self._logger.debug(page.content)
|
||||
|
||||
if css is True:
|
||||
link = soup.find_all("link")
|
||||
for anchor in link:
|
||||
rel = anchor.get("rel")
|
||||
if rel[0] == "stylesheet":
|
||||
href = anchor.get("href", "/")
|
||||
if href != "/":
|
||||
try:
|
||||
u = urlparse(self._url)
|
||||
o = urlparse(href)
|
||||
if o.netloc == "":
|
||||
o = o._replace(netloc=u.netloc)
|
||||
o = o._replace(scheme=u.scheme)
|
||||
page_url.append(o.geturl())
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
|
||||
else:
|
||||
self._logger.error("JS or CSS did not get due status code : {0}".format(page.status_code))
|
||||
self._logger.debug(page.content)
|
||||
except ConnectionError as err:
|
||||
self._logger.error("Connection error : {0}".format(err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("Exception error : {0}".format(err))
|
||||
|
||||
return page_url
|
||||
|
||||
@ -234,22 +246,25 @@ class WPExport:
|
||||
for i in webpage:
|
||||
try:
|
||||
page = self._request.get(i)
|
||||
except Exception as err:
|
||||
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
img = soup.find_all("img")
|
||||
self._logger.info("{0} : image from page: {1} : ".format(self._name,i))
|
||||
for anchor in img:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
if src not in page_img:
|
||||
self._logger.info("{0} : image: {1} : ".format(self._name, src))
|
||||
page_img.append(src)
|
||||
else:
|
||||
self._logger.error("{0} : Image did not get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
if page.status_code == 200:
|
||||
soup = BeautifulSoup(page.text, self._parser)
|
||||
img = soup.find_all("img")
|
||||
self._logger.info("{0} : image from page: {1} : ".format(self._name,i))
|
||||
for anchor in img:
|
||||
src = anchor.get("src", "/")
|
||||
if src != "/":
|
||||
if src not in page_img:
|
||||
self._logger.info("{0} : image: {1} : ".format(self._name, src))
|
||||
page_img.append(src)
|
||||
else:
|
||||
self._logger.error("{0} : Image did not get due status code : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error : {1}".format(self._name, err))
|
||||
|
||||
return page_img
|
||||
|
||||
@ -260,31 +275,33 @@ class WPExport:
|
||||
for i in range(0, len(webpage)):
|
||||
try:
|
||||
o = urlparse(webpage[i])
|
||||
|
||||
path_web = o.path.split("/")
|
||||
filePageWeb = path_web[len(path_web)-1]
|
||||
path_web.pop(len(path_web)-1)
|
||||
dir_page_web = "/".join(path_web)
|
||||
self._mkdirPath("{0}/{1}/{2}".format(backup_dir, o.netloc, dir_page_web))
|
||||
try:
|
||||
r = self._request.get(webpage[i])
|
||||
|
||||
if r.status_code == 200:
|
||||
fileDownload = "{0}/{1}/index.html".format(backup_dir, o.netloc)
|
||||
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
||||
fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb)
|
||||
self._logger.info("{0} : {1}/{2} : {3}".format(self._name, i+1, len(webpage), fileDownload))
|
||||
try:
|
||||
open(fileDownload, "wb").write(r.content)
|
||||
except Exception as err:
|
||||
self._logger.error("file error : {0}".format(err))
|
||||
exit(1)
|
||||
else:
|
||||
self._logger.error("Not download due status code : {0}".format(r.status_code))
|
||||
self._logger.debug(r.content)
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} Exception error : {1}".format(self._name, err))
|
||||
except Exception as err:
|
||||
self._logger.error("parsing error : {0}".format(err))
|
||||
exit(1)
|
||||
path_web = o.path.split("/")
|
||||
filePageWeb = path_web[len(path_web)-1]
|
||||
path_web.pop(len(path_web)-1)
|
||||
dir_page_web = "/".join(path_web)
|
||||
self._mkdirPath("{0}/{1}/{2}".format(backup_dir, o.netloc, dir_page_web))
|
||||
try:
|
||||
r = self._request.get(webpage[i])
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} Exception error : {1}".format(self._name, err))
|
||||
if r.status_code == 200:
|
||||
fileDownload = "{0}/{1}/index.html".format(backup_dir, o.netloc)
|
||||
if len(dir_page_web) > 0 and len(filePageWeb) > 0:
|
||||
fileDownload = "{0}/{1}{2}/{3}".format(backup_dir, o.netloc, dir_page_web, filePageWeb)
|
||||
self._logger.info("{0} : {1}/{2} : {3}".format(self._name, i+1, len(webpage), fileDownload))
|
||||
try:
|
||||
open(fileDownload, "wb").write(r.content)
|
||||
except Exception as err:
|
||||
self._logger.error("file error : {0}".format(err))
|
||||
exit(1)
|
||||
else:
|
||||
self._logger.error("Not download due status code : {0}".format(r.status_code))
|
||||
self._logger.debug(r.content)
|
||||
|
Loading…
x
Reference in New Issue
Block a user