eckendoerffer commited on
Commit
9a6d6eb
1 Parent(s): 5302b25

Delete extract_news/4_extract_news_url.py

Browse files
Files changed (1) hide show
  1. extract_news/4_extract_news_url.py +0 -128
extract_news/4_extract_news_url.py DELETED
@@ -1,128 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
-
3
- """
4
- Random Line Fetcher for Large Datasets
5
-
6
- Extracts and stores relevant links from local French online news articles.
7
-
8
- pip install beautifulsoup4 mysql-connector-python colorama
9
-
10
- Author : Guillaume Eckendoerffer
11
- Date : 28-09-23
12
- Repository : https://github.com/Eckendoerffer/TorchTrainerFlow/
13
- https://huggingface.co/datasets/eckendoerffer/news_fr
14
- """
15
-
16
- import os
17
- from bs4 import BeautifulSoup
18
- import mysql.connector
19
- import hashlib
20
- from colorama import Fore, init
21
-
22
- # Database configuration
23
- db_config = {
24
- "host": "[host]",
25
- "user": "[user]",
26
- "password": "[passwd]",
27
- "database": "[database]"
28
- }
29
-
30
- conn = mysql.connector.connect(**db_config)
31
- cursor = conn.cursor()
32
- query = "SELECT `key_media` FROM `base_news` WHERE `key_media` != ''"
33
- cursor.execute(query)
34
- keys = cursor.fetchall()
35
- formatted_keys = "|".join([key[0] for key in keys]) + "|"
36
-
37
- init(autoreset=True)
38
-
39
- def get_dom_path(url):
40
- from urllib.parse import urlparse
41
- parsed_url = urlparse(url)
42
- return f"{parsed_url.scheme}://{parsed_url.netloc}"
43
-
44
- def get_html_content(file_path):
45
- with open(file_path, 'r', encoding='utf8', errors='ignore') as file:
46
- return file.read()
47
-
48
- def mysqli_return_number(conn, query, params=None):
49
- cursor = conn.cursor()
50
- cursor.execute(query)
51
- result = cursor.fetchone()
52
- cursor.close()
53
- return result[0] if result else 0
54
-
55
- def process_news_source(id_source, url_source, id_media):
56
- global formatted_keys
57
-
58
- dom = get_dom_path(url_source)
59
- cursor.execute(f"UPDATE `base_news` SET `link`='1' WHERE `id`='{id_source}' LIMIT 1")
60
- conn.commit()
61
-
62
- file_path = f"sources/html_news/{id_source}.txt"
63
- if os.path.exists(file_path):
64
- html_content = get_html_content(file_path)
65
- else:
66
- return
67
-
68
- print(f"{id_source} {url_source} {id_media} ({len(html_content)})")
69
-
70
- soup = BeautifulSoup(html_content, 'html.parser')
71
- nb_add = 0
72
- for link in soup.find_all('a'):
73
- url = link.get('href')
74
- if url is None:
75
- continue
76
- url = url.split("#")[0]
77
- url = url.split("?")[0]
78
-
79
- if not url:
80
- continue
81
- if not "//" in url:
82
- url = f"{dom}/{url}" if url[0] != '/' else f"{dom}{url}"
83
- elif "http" not in url:
84
- url = 'https:' + url
85
- if not url.startswith(("http://", "https://")) or url.count(' ') or url.count('%') or url.count('\''):
86
- continue
87
-
88
- key = hashlib.md5(url.encode()).hexdigest()
89
- nb_base_news = formatted_keys.count(f'{key}|')
90
-
91
- if url.startswith(dom):
92
- if nb_base_news:
93
- #print(Fore.YELLOW + url)
94
- continue
95
- elif (
96
- url.count("-") > 6 and
97
- not any(substring in url for substring in ['replay', 'video', 'login', '/inloggen', '?redirect', '.jpg', '.png', 'mailto'])
98
- ):
99
- print(Fore.GREEN + url)
100
- insert_query = f"INSERT INTO `base_news` (`id`, `key_media`, `media`, `url`, `step`) VALUES (NULL, '{key}', '{id_media}', '{url}', '0');"
101
- cursor.execute(insert_query)
102
- conn.commit()
103
- formatted_keys = f'{formatted_keys}{key}|'
104
- nb_add += 1
105
- else:
106
- #print(Fore.RED + url)
107
- continue
108
-
109
- def process():
110
- global formatted_keys
111
-
112
- cursor = conn.cursor()
113
- query = ("SELECT `id`, `url`, `media` FROM `base_news` WHERE `link`='0' AND `step` > 0 ORDER BY Rand() LIMIT 1000")
114
- cursor.execute(query)
115
- rows = cursor.fetchall()
116
-
117
- if not rows:
118
- print('No unprocessed news source found.')
119
-
120
- for row in rows:
121
- id_source, url_source, id_media = row
122
- process_news_source(id_source, url_source, id_media)
123
-
124
- while True:
125
- process()
126
-
127
- conn.close()
128
-