|
|
|
|
|
""" |
|
News source extractor: |
|
|
|
This script is designed to extract the content of news articles from various French media sources. |
|
The URLs of these articles are retrieved from the `base_news` table, where articles marked with |
|
a `step` value of '0' are pending extraction. |
|
|
|
To install the necessary packages: |
|
pip install aiohttp mysql-connector-python |
|
|
|
Once extracted, the content of each article is saved locally for further processing. This separation |
|
of content fetching and processing is intentional to optimize resource management. |
|
|
|
The script operates in batches, processing a defined number of entries (`NB_BY_STEP`) at a time. |
|
After extraction, the `step` value of the processed articles is updated to '1' to indicate completion. |
|
|
|
For performance monitoring, the script outputs the processed article IDs, URLs, and calculates the average |
|
processing time per article. |
|
|
|
Author : Guillaume Eckendoerffer |
|
Date : 02-10-23 |
|
Repository : https://github.com/Eckendoerffer/TorchTrainerFlow/ |
|
https://huggingface.co/datasets/eckendoerffer/news_fr |
|
""" |
|
|
|
import asyncio |
|
import aiohttp |
|
import time, os |
|
from config import DB_CONFIG |
|
from utils import create_connection, mysqli_return_number, save_to_file |
|
|
|
NB_BY_STEP = 20 |
|
path = os.getcwd() |
|
|
|
async def fetch_and_save(url, id_source): |
|
time_start_item = time.time() |
|
try: |
|
async with aiohttp.ClientSession() as session: |
|
async with session.get(url, timeout=10) as response: |
|
byte_content = await response.read() |
|
try: |
|
text_content = byte_content.decode('utf-8') |
|
except UnicodeDecodeError: |
|
text_content = byte_content.decode('ISO-8859-1') |
|
|
|
save_to_file(f"{path}/sources/html_news/{id_source}.txt", text_content) |
|
time_end_item = time.time() |
|
print(f'{id_source}) {time_end_item-time_start_item:.5f} {url}') |
|
except aiohttp.client_exceptions.TooManyRedirects: |
|
print(f"Too many redirects for URL: {url}") |
|
except aiohttp.client_exceptions.ClientConnectorError: |
|
print(f"Failed to connect to URL: {url}") |
|
except Exception as e: |
|
print(f"Unexpected error for URL {url}: {str(e)}") |
|
|
|
async def main(): |
|
connection = create_connection(DB_CONFIG) |
|
while True: |
|
time_start = time.time() |
|
cursor = connection.cursor() |
|
cursor.execute(f"SELECT `id`, `url` FROM `base_news` WHERE `step`='0' ORDER BY RAND() LIMIT {NB_BY_STEP}") |
|
rows = cursor.fetchall() |
|
cursor.close() |
|
if not rows: |
|
break |
|
tasks = [] |
|
for row in rows: |
|
id_source, url = row |
|
cursor = connection.cursor() |
|
cursor.execute(f"UPDATE `base_news` SET `step`='1' WHERE `id`='{id_source}' LIMIT 1") |
|
cursor.close() |
|
tasks.append(fetch_and_save(url.strip(), id_source)) |
|
|
|
await asyncio.gather(*tasks) |
|
|
|
nb_base = mysqli_return_number(connection, "SELECT COUNT(`id`) FROM `base_news` WHERE `step`='0'") |
|
time_elapsed = time.time() - time_start |
|
time_per_item = time_elapsed / NB_BY_STEP |
|
print(f"Remaining: {nb_base} - Time: {time_per_item:.3f}s/url") |
|
|
|
|
|
connection.close() |
|
|
|
if __name__ == "__main__": |
|
asyncio.run(main()) |
|
|
|
|
|
|