-
-
Notifications
You must be signed in to change notification settings - Fork 5
Expand file tree
/
Copy pathRouterScrape.py
More file actions
37 lines (26 loc) · 1.21 KB
/
RouterScrape.py
File metadata and controls
37 lines (26 loc) · 1.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
import requests
from bs4 import BeautifulSoup
ROUTER_URL = "http://www.routerpasswords.com/"
def get_router_names():
"""Scrape all vendor names from routerpasswords.com"""
name_list = []
soup = requests.get(url=ROUTER_URL)
soup = BeautifulSoup(soup.text, "lxml")
for name in soup.findAll("option"):
name_list.append(name.text)
return name_list
def download_router_info(name, path):
"""Searches for information on a selected vendor and saves response in a text file"""
passwords_list = []
name = name.upper()
#params = {"findpass": 1, "router": f"{name}", "findpassword": "Find Password"}
soup = requests.post(url=ROUTER_URL+"/router-password/"+name)
soup = BeautifulSoup(soup.text, "lxml")
for tags in soup.findAll("td"):
passwords_list.append(tags.text.replace("\n", "").replace("\r", ""))
with open(path, "a", encoding="utf-8", newline="") as pass_write:
for n, info in enumerate(passwords_list):
# write value and a comma unless it’s the 5th item (then newline)
end_char = "\n" if (n + 1) % 5 == 0 else ","
pass_write.write(info.replace("\n", "").replace("\r", ""))
pass_write.write(end_char)