-
Notifications
You must be signed in to change notification settings - Fork 24
/
main.py
126 lines (94 loc) · 3.24 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
import argparse
import itertools
import string
import time
import urllib.request
from bs4 import BeautifulSoup
API = "https://www.urbandictionary.com/browse.php?character={0}"
MAX_ATTEMPTS = 10
DELAY = 10
NUMBER_SIGN = "*"
# https://stackoverflow.com/a/554580/306149
class NoRedirection(urllib.request.HTTPErrorProcessor):
def http_response(self, request, response):
return response
https_response = http_response
def extract_page_entries(html):
soup = BeautifulSoup(html, "html.parser")
# find word list element, this might change in the future
ul = soup.find_all("ul", class_="mt-3 columns-2 md:columns-3")[0]
for li in ul.find_all("li"):
a = li.find("a").string
if a:
yield a
def get_next(html):
soup = BeautifulSoup(html, "html.parser")
next_link = soup.find("a", {"rel": "next"})
if next_link:
href = next_link["href"]
return "https://www.urbandictionary.com" + href
return None
def extract_letter_entries(letter):
url = API.format(letter)
attempt = 0
while url:
print(url)
response = urllib.request.urlopen(url)
code = response.getcode()
if code == 200:
content = response.read()
yield list(extract_page_entries(content))
url = get_next(content)
attempt = 0
else:
print(f"Trying again, expected response code: 200, got {code}")
attempt += 1
if attempt > MAX_ATTEMPTS:
break
time.sleep(DELAY * attempt)
opener = urllib.request.build_opener(
NoRedirection, urllib.request.HTTPCookieProcessor()
)
urllib.request.install_opener(opener)
letters = list(string.ascii_uppercase) + ["#"]
def download_letter_entries(letter, file, remove_dead):
file = file.format(letter)
entries = itertools.chain.from_iterable(list(extract_letter_entries(letter)))
if remove_dead:
all_data = entries
else:
with open(file, "r", encoding="utf-8") as f:
old_data = [line.strip() for line in f.readlines()]
all_data = sorted(set(old_data).union(set(entries)), key=str.casefold)
with open(file, "w", encoding="utf-8") as f:
f.write("\n".join(all_data) + "\n")
def download_entries(letters, file, remove_dead):
for letter in letters:
print(f"======={letter}=======")
download_letter_entries(letter, file, remove_dead)
parser = argparse.ArgumentParser(description="Download urban dictionary words.")
parser.add_argument(
"letters", metavar="L", type=str, nargs="*", help="Letters to download."
)
parser.add_argument(
"--ifile",
dest="ifile",
help="input file name. Contains a list of letters separated by a newline",
default="input.list",
)
parser.add_argument(
"--out",
dest="out",
help="output file name. May be a format string",
default="data/{0}.data",
)
parser.add_argument(
"--remove-dead", action="store_true", help="Removes entries that no longer exist."
)
args = parser.parse_args()
letters = [letter.upper() for letter in args.letters]
if not letters:
with open(args.ifile, "r") as ifile:
for row in ifile:
letters.append(row.strip())
download_entries(letters, args.out, args.remove_dead)