add urlparse
This commit is contained in:
parent
d21af4f60a
commit
3c76cab9a7
@ -3,6 +3,7 @@
|
|||||||
# Python 3
|
# Python 3
|
||||||
# Extraction des liens d'une page web
|
# Extraction des liens d'une page web
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
from urllib.parse import urlparse
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
URL = "www.clarissariviere.com"
|
URL = "www.clarissariviere.com"
|
||||||
@ -21,6 +22,8 @@ if page.status_code == 200:
|
|||||||
webpage = []
|
webpage = []
|
||||||
for i in page_url:
|
for i in page_url:
|
||||||
page = requests.get(i)
|
page = requests.get(i)
|
||||||
|
o = urlparse(i)
|
||||||
|
print(o.path)
|
||||||
if page.status_code == 200:
|
if page.status_code == 200:
|
||||||
print("page : {0}".format(i))
|
print("page : {0}".format(i))
|
||||||
soup = BeautifulSoup(page.text, 'html.parser')
|
soup = BeautifulSoup(page.text, 'html.parser')
|
||||||
@ -34,10 +37,10 @@ for i in page_url:
|
|||||||
number_lastpage = int(number_page) / 10
|
number_lastpage = int(number_page) / 10
|
||||||
for j in range(1,int(number_lastpage)):
|
for j in range(1,int(number_lastpage)):
|
||||||
paging = j * 10
|
paging = j * 10
|
||||||
categorie = i.split("/")
|
categorie = urlparse(i).path.split("/")
|
||||||
url_paging = "https://{0}/archives/p{1}-10.html".format(URL, paging)
|
url_paging = "https://{0}/archives/p{1}-10.html".format(URL, paging)
|
||||||
if len(categorie) != 4:
|
if len(categorie) > 2:
|
||||||
url_paging = "https://{0}/archives/{1}/p{2}-10.html".format(URL, categorie[4], paging)
|
url_paging = "https://{0}/archives/{1}/p{2}-10.html".format(URL, categorie[2], paging)
|
||||||
print(url_paging)
|
print(url_paging)
|
||||||
page = requests.get(url_paging)
|
page = requests.get(url_paging)
|
||||||
if page.status_code == 200:
|
if page.status_code == 200:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user