-
-
Notifications
You must be signed in to change notification settings - Fork 1k
/
Copy pathfetchparse.py
102 lines (79 loc) · 2.76 KB
/
fetchparse.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
"""
An exceptionally lousy site spider
Ken Kinder <[email protected]>
Updated for newparallel by Min Ragan-Kelley <[email protected]>
This module gives an example of how the task interface to the
IPython controller works. Before running this script start the IPython controller
and some engines using something like::
ipcluster start -n 4
"""
import sys
import time
import bs4 # noqa this isn't necessary, but it helps throw the dependency error earlier
import ipyparallel as ipp
def fetchAndParse(url, data=None):
from urllib.parse import urljoin
import bs4 # noqa
import requests
links = []
r = requests.get(url, data=data)
r.raise_for_status()
if 'text/html' in r.headers.get('content-type'):
doc = bs4.BeautifulSoup(r.text, "html.parser")
for node in doc.findAll('a'):
href = node.get('href', None)
if href:
links.append(urljoin(url, href))
return links
class DistributedSpider:
# Time to wait between polling for task results.
pollingDelay = 0.5
def __init__(self, site):
self.client = ipp.Client()
self.view = self.client.load_balanced_view()
self.mux = self.client[:]
self.allLinks = []
self.linksWorking = {}
self.linksDone = {}
self.site = site
def visitLink(self, url):
if url not in self.allLinks:
self.allLinks.append(url)
if url.startswith(self.site):
print(' ', url)
self.linksWorking[url] = self.view.apply(fetchAndParse, url)
def onVisitDone(self, links, url):
print(url + ':')
self.linksDone[url] = None
del self.linksWorking[url]
for link in links:
self.visitLink(link)
def run(self):
self.visitLink(self.site)
while self.linksWorking:
print(len(self.linksWorking), 'pending...')
self.synchronize()
time.sleep(self.pollingDelay)
def synchronize(self):
for url, ar in list(self.linksWorking.items()):
# Calling get_task_result with block=False will return None if the
# task is not done yet. This provides a simple way of polling.
try:
links = ar.get(0)
except ipp.error.TimeoutError:
continue
except Exception as e:
self.linksDone[url] = None
del self.linksWorking[url]
print(f'{url}: {e}')
else:
self.onVisitDone(links, url)
def main():
if len(sys.argv) > 1:
site = sys.argv[1]
else:
site = input('Enter site to crawl: ')
distributedSpider = DistributedSpider(site)
distributedSpider.run()
if __name__ == '__main__':
main()