Compare commits
19 Commits
Author | SHA1 | Date | |
---|---|---|---|
9ab484da8d | |||
c275f10fb0 | |||
fa1854052b | |||
0e5afe60a7 | |||
d2cfb949f8 | |||
bce1643135 | |||
0c94783852 | |||
6f78a48e57 | |||
dbab60c7dc | |||
41e192f903 | |||
dce2c2dfa5 | |||
eaec1ba9d4 | |||
3059f785c2 | |||
279a9f2786 | |||
963f83ae81 | |||
7b154e3a1d | |||
e5109204aa | |||
2279e4b0b6 | |||
2e21040196 |
@@ -55,7 +55,7 @@ def remove(index, number, args, basic, logger, ssl_wordpress):
|
||||
|
||||
def download(name_thread, max_thread, url, logger, parser, directory, html, img, ssl_canalblog, revert, tmp):
|
||||
exportWp = WPExport(name="Thread-{0}".format(int(name_thread) + 1), url=url, logger=logger, parser=parser, directory=directory, ssl_canalblog=ssl_canalblog)
|
||||
if not revert:
|
||||
if revert is False:
|
||||
exportWp.getUrlPage(name_thread, max_thread)
|
||||
for i in ["article", "page"]:
|
||||
for j in ["publications", "principal"]:
|
||||
@@ -170,7 +170,7 @@ if __name__ == '__main__':
|
||||
import_parser.add_argument("--no-create", help="No create post", dest="create", default="store_false", action="store_true")
|
||||
import_parser.add_argument("--no-update", help="No update post", dest="update", default="store_false", action="store_true")
|
||||
import_parser.add_argument("--no-image", help="No image add or update", dest="image", default="store_false", action="store_true")
|
||||
import_parser.add_argument("--author-album", dest=author, help="Define author for page album", default="")
|
||||
import_parser.add_argument("--author", dest="author", help="Define author", default="")
|
||||
|
||||
|
||||
|
||||
@@ -259,7 +259,7 @@ if __name__ == '__main__':
|
||||
basic = HTTPBasicAuth(args.user, password)
|
||||
if args.command == "import":
|
||||
wordpress = args.wordpress.split(",")
|
||||
importWp = WPimport(basic=basic, wordpress="", logger=logger, parser=args.parser, ssl_wordpress=ssl_wordpress, author=args.author)
|
||||
importWp = WPimport(basic=basic, wordpress="", logger=logger, parser=args.parser, ssl_wordpress=ssl_wordpress, author=args.author, ssl_canalblog=ssl_canalblog)
|
||||
if len(args.file) > 0:
|
||||
for i in wordpress:
|
||||
importWp.setUrl(i)
|
||||
|
@@ -58,14 +58,26 @@ class WPExport:
|
||||
# Download HTML
|
||||
|
||||
def downloadHTML(self, first, second):
|
||||
self._downloadPage(webpage[first][second], self._dir)
|
||||
try:
|
||||
with open("{0}/{1}.json".format(self._tmp, self._name)) as file:
|
||||
webpage = json.loads(file.read())
|
||||
self._downloadPage(webpage[first][second], self._dir)
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex))
|
||||
|
||||
# Download Image
|
||||
|
||||
def downloadImg(self, first, second):
|
||||
page_src = self._getImg(webpage[first][second])
|
||||
o = urlparse(self._url)
|
||||
self._downloadPage(page_src, "{0}/{1}/{2}".format(self._dir, o.path, "img"))
|
||||
try:
|
||||
with open("{0}/{1}.json".format(self._tmp, self._name)) as file:
|
||||
webpage = json.loads(file.read())
|
||||
page_src = self._getImg(webpage[first][second])
|
||||
o = urlparse(self._url)
|
||||
self._downloadPage(page_src, "{0}/{1}/{2}".format(self._dir, o.path, "img"))
|
||||
except Exception as ex:
|
||||
self._logger.error("{0} : Read file json from tmp : {1}".format(self._name, ex))
|
||||
|
||||
|
||||
|
||||
|
||||
# Get URL
|
||||
|
@@ -8,7 +8,7 @@ from requests.packages.urllib3.util.retry import Retry
|
||||
|
||||
class WPimport:
|
||||
# Constructor
|
||||
def __init__(self, name="Thread-0", basic=None, wordpress="", logger=None, parser="html.parser", ssl_wordpress=True, no_create=False, no_update=False, no_image=False, tmp="/tmp/import_export_canablog", author=""):
|
||||
def __init__(self, name="Thread-0", basic=None, wordpress="", logger=None, parser="html.parser", ssl_wordpress=True, no_create=False, no_update=False, no_image=False, tmp="/tmp/import_export_canablog", author="", ssl_canalblog=True):
|
||||
self._name = name
|
||||
self._basic = basic
|
||||
self._wordpress = wordpress
|
||||
@@ -20,7 +20,7 @@ class WPimport:
|
||||
if ssl_wordpress is False:
|
||||
self._protocol = "http"
|
||||
self._request = requests.Session()
|
||||
|
||||
self._ssl_canalblog = ssl_canalblog
|
||||
retries = Retry(connect=10, read=10, redirect=5,
|
||||
status_forcelist=[429, 500, 502, 503, 504], backoff_factor=2)
|
||||
|
||||
@@ -139,6 +139,43 @@ class WPimport:
|
||||
self._logger.error("{0} : Exception error for get author : {1}".format(self._name, err))
|
||||
return author
|
||||
|
||||
def _getInfoAlbum(self, link):
|
||||
if self._ssl_canalblog:
|
||||
link = link.replace("http", "https").replace("httpss", "https")
|
||||
self._logger.info("{0} : Info album : {1}".format(self._name, link))
|
||||
link_o = urlparse(link)
|
||||
if len(link_o.netloc) > 0:
|
||||
self._logger.info("{0} : get album info from web : {1}".format(self._name, link_o))
|
||||
try:
|
||||
response = self._request.get(link)
|
||||
if response.status_code == 200:
|
||||
self._logger.info("{0} : get content info from web : {1}".format(self._name, link))
|
||||
page_img = response.content
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get album info : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get album info : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
else:
|
||||
self._logger.info("{0} : get album info from file : {1}".format(self._name, link_o))
|
||||
if os.path.exists("{0}/..{1}".format(self._directory, link_o)):
|
||||
page_img = open("{0}/..{1}".format(self._directory, link_o), "r")
|
||||
soup = BeautifulSoup(page_img, self._parser)
|
||||
paragraphs = soup.find("div", class_="albumbody").find_all("p")
|
||||
for paragraph in paragraphs:
|
||||
self._logger.info("{0} get paragraph : {1}".format(self._name, paragraph))
|
||||
split_paragraph = str(paragraph).split("<br>")
|
||||
self._logger.info("{0} length paragraph splitted : {1}".format(self._name, len(split_paragraph)))
|
||||
if len(split_paragraph) == 1:
|
||||
split_paragraph = str(paragraph).split("<br/>")
|
||||
self._logger.info("{0} get paragraph splitted : {1}".format(self._name, split_paragraph))
|
||||
if len(split_paragraph) > 1:
|
||||
if len(split_paragraph[1].split(":")) > 1:
|
||||
author = split_paragraph[1].split(":")[1].replace(" ", "").lower()
|
||||
return author
|
||||
|
||||
|
||||
def _addOrUpdateAlbum(self, soup):
|
||||
self._logger.info("{0} : Add/Update Album".format(self._name))
|
||||
albumbody = soup.find("div", class_="albumbody")
|
||||
@@ -195,8 +232,19 @@ class WPimport:
|
||||
data = {"title":albumtitle, "content":content_html, "status":"publish"}
|
||||
if len(self._author) > 0:
|
||||
author = self._getAuthor(self._author)
|
||||
if author != 0:
|
||||
data = {"title":albumtitle, "content":content_html, "status":"publish", "author":author}
|
||||
else:
|
||||
link_a = albumbody.find_all("a")
|
||||
for i in link_a:
|
||||
if re.search(r"/albums/", i.get("href", "/")):
|
||||
href_a = i.get("href", "/")
|
||||
break
|
||||
author = self._getInfoAlbum(href_a)
|
||||
self._logger.info("{0} : author : {1}".format(self._name, author))
|
||||
author = self._getAuthor(author)
|
||||
data = {"title":albumtitle, "content":content_html, "status":"publish"}
|
||||
|
||||
if author != 0:
|
||||
data = {"title":albumtitle, "content":content_html, "status":"publish", "author":author}
|
||||
self._logger.debug("{0} : data for album page : {1}".format(self._name, data))
|
||||
for index in range(1,10):
|
||||
params = {"search": albumtitle, "per_page":100, "page": index}
|
||||
@@ -257,7 +305,7 @@ class WPimport:
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for update page : {1}".format(self._name, err))
|
||||
if page.status_code == 400:
|
||||
elif page.status_code == 400:
|
||||
self._logger.error("{0} : Connection for update post unauthorized : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
break
|
||||
@@ -413,6 +461,21 @@ class WPimport:
|
||||
self._logger.error("{0} : Exception error for post media featured : {1}".format(self._name, err))
|
||||
else:
|
||||
self._logger.info("{0} : No media found for {1}".format(self._name, h2))
|
||||
try:
|
||||
page = self._request.get(img_src.replace("_q", ""))
|
||||
if page.status_code == 200:
|
||||
self._logger.info("{0} : Get image for media featured : {1}".format(self._name, img_src.replace("_q", "")))
|
||||
media = self._addOrUpdateMedia(img_src.replace("_q", ""), page)
|
||||
if media["id"] != "":
|
||||
self._addOrUpdateFeaturedMedia(soup)
|
||||
else:
|
||||
self._logger.error("{0} : Connection error with status code for get image for featured media : {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
except ConnectionError as err:
|
||||
self._logger.error("{0} : Connection error for get to image for featured media : {1}".format(self._name, err))
|
||||
exit(1)
|
||||
except Exception as err:
|
||||
self._logger.error("{0} : Exception error for get to image media featured : {1}".format(self._name, err))
|
||||
else:
|
||||
self._logger.error("{0} : Connection error with status code for search featured media: {1}".format(self._name, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
@@ -772,7 +835,7 @@ class WPimport:
|
||||
listelement[i].append(k["id"])
|
||||
else:
|
||||
break
|
||||
if page.status_code == 400:
|
||||
elif page.status_code == 400:
|
||||
self._logger.error("{0} : {1} not found due status code : {2}".format(self._name, i, page.status_code))
|
||||
self._logger.debug("{0} : {1}".format(self._name, page.content))
|
||||
break
|
||||
@@ -806,7 +869,9 @@ class WPimport:
|
||||
self._logger.error("{0} : Exception error for post {1} : {2}".format(self._name, i, err))
|
||||
|
||||
title = articletitle[0].text
|
||||
author = articleacreator[0].text.lower()
|
||||
author = articleacreator[0].text.lower()
|
||||
if len(self._author) > 0:
|
||||
author = self._author
|
||||
body = articlebody[0].find_all("p")
|
||||
bodyhtml = "<p>"
|
||||
for i in body:
|
||||
@@ -820,12 +885,15 @@ class WPimport:
|
||||
bodyhtml = bodyhtml.replace(i["old_href"], o.path)
|
||||
bodyhtml = bodyhtml.replace(i["old_src"], o.path)
|
||||
hour = articledate[0].text
|
||||
time = dateheader[0].text.split(" ")
|
||||
time = ""
|
||||
if len(dateheader) > 0:
|
||||
time = dateheader[0].text.split(" ")
|
||||
self._logger.debug("{0} : Title post : |{1}|".format(self._name, title))
|
||||
title = self._removeSpace(title)
|
||||
self._logger.debug("{0} : Rendered Title post : |{1}|".format(self._name, title))
|
||||
|
||||
data = {"title":title, "content":bodyhtml, "status":"publish", "date": "{0}-{1}-{2}T{3}:00".format(time[2],month[time[1]],time[0], hour), "tags": listelement["tags"], "categories": listelement["categories"]}
|
||||
data = {"title":title, "content":bodyhtml, "status":"publish", "tags": listelement["tags"], "categories": listelement["categories"]}
|
||||
if len(dateheader) > 0:
|
||||
data = {"title":title, "content":bodyhtml, "status":"publish", "date": "{0}-{1}-{2}T{3}:00".format(time[2],month[time[1]],time[0], hour), "tags": listelement["tags"], "categories": listelement["categories"]}
|
||||
self._logger.debug("{0} : Data for post : |{1}| : {2}" .format(self._name, title, data))
|
||||
|
||||
data["author"] = self._getAuthor(author)
|
||||
|
Reference in New Issue
Block a user