import trio
import httpx
from bs4 import BeautifulSoup
import pandas as pd
# pip install trio httpx
mainurl = 'https://www.financialexpress.com/economy/'
news = [
'finmin-asks-ministries-to-restrict-expenses-within-prescribed-limit/2410766/',
'uk-inflation-hits-near-30-year-high-pressuring-boe-and-households/2410761/',
'economic-recovery-yet-to-attain-durability-says-report/2410690/',
'vagaries-of-weather-drive-near-13-lakh-maha-farmers-to-crop-insurance-scheme/2410030/'
]
allin = []
async def get_soup(content):
return BeautifulSoup(content, 'lxml')
async def worker(receiver):
async with receiver:
async for client, new in receiver:
r = await client.get(mainurl + new)
soup = await get_soup(r.text)
prs = [x.text for x in soup.select(
'.entry-content > p:not(:last-child)')]
title = soup.select_one('.wp-block-post-title').text
author = soup.select_one('div.author-link a').text
publish = soup.select_one(
'[itemprop="article:published_time"]')['content'].split('T')[0].split('-')
target = [title, author, *publish, prs]
allin.append(target)
async def main():
async with httpx.AsyncClient(timeout=None) as client, trio.open_nursery() as nurse:
sender, receiver = trio.open_memory_channel(0)
async with receiver:
for _ in range(5):
nurse.start_soon(worker, receiver.clone())
async with sender:
for new in news:
await sender.send([client, new])
if __name__ == "__main__":
trio.run(main)
df = pd.DataFrame(
allin, columns=['Title', 'Author', 'Year', 'Month', 'Day', 'Paragraphs'])
print(df)
df.to_csv('data.csv', index=False)