-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathasync_parser.py
42 lines (37 loc) · 1.47 KB
/
async_parser.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from datetime import datetime
import asyncio
import aiofiles
from aiocsv import AsyncWriter
async def parse_data():
ua = UserAgent()
# url can be https://ek.ua/ua/list/122/apple/
url = "YOUR LINK"
headers = {
"User-Agent" : ua.random,
"accept" : "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7"
}
cookies = {
"holder" : "1"
}
now = datetime.now()
async with aiofiles.open(f"{now.strftime('%d-%m-%Y %H-%M-%S')}.csv", "w", encoding="cp1251") as ex_file:
writer = AsyncWriter(ex_file)
await writer.writerow(["Товар", "Ціна"])
while True:
responce = requests.get(url=url, headers=headers, cookies=cookies)
soup = BeautifulSoup(responce.text, features="html.parser")
phone_div = soup.findAll("td", class_="model-short-info")
phone_div1 = soup.findAll("div", class_="model-price-range")
for i, j in zip(phone_div, phone_div1):
await writer.writerow([i.find("a")["title"], j.find("a").text])
if soup.select_one("a.select + a.ib"):
url = "https://ek.ua" + soup.select_one("a.select + a.ib")["href"]
else:
break
async def main():
await parse_data()
if __name__ == "__main__":
asyncio.run(main())