-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathmapper.py
150 lines (132 loc) · 4.33 KB
/
mapper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
from urllib.request import urlopen
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import threading
import queue
import time
import sys
import getopt
from util import *
class GraphThread(threading.Thread):
def __init__(self, p, graph, queue):
threading.Thread.__init__(self)
self.queue = queue
self.graph = graph
self.p = p
def run(self):
while True:
link = self.queue.get()
page = self.graph.findPage(link) if self.graph.findPage(link) else self.graph.findDeadPage(link)
if page:
pass
else:
page = Page(link)
if not page.dead_page:
self.graph.addPage(page)
else:
self.graph.addDeadPage(page)
if not page.dead_page:
self.graph.addNext(self.p, page)
self.graph.buildGraph(page)
self.queue.task_done()
class Graph():
def __init__(self):
self.pages = []
self.dead_pages = []
def buildGraph(self, p):
if not p.visited and not p.dead_page:
p.visited = True
if not self.findPage(p.url):
self.addPage(p)
for link in p.in_links:
page = self.findPage(link) if self.findPage(link) else self.findDeadPage(link)
if page:
pass
else:
page = Page(link)
if not page.dead_page:
self.addPage(page)
else:
self.addDeadPage(page)
if not page.dead_page:
self.addNext(p, page)
self.buildGraph(page)
def findPage(self, url):
for p in self.pages:
if p.url == url:
return p
def findDeadPage(self, url):
for p in self.dead_pages:
if p.url == url:
return p
def addPage(self, p):
if not self.findPage(p.url):
self.pages.append(p)
return p
def addDeadPage(self, p):
if not self.findDeadPage(p.url):
self.dead_pages.append(p)
return p
def addNext(self, current, next_page):
current.next.append(next_page)
class Page():
def __init__(self, url):
self.url = str(url)
self.next = []
self.visited = False
self.dead_page = False
parse_result = urlparse(url)
self.domain = parse_result.netloc
self.path = parse_result.path if parse_result.path != '' else '/'
self.scheme = parse_result.scheme
try:
self.doc = urlopen(self.url)
self.__findInternalLinks()
except Exception as e:
self.dead_page = True
def __eq__(self, other):
return self.url == other.url
def __hash__(self):
return hash(self.url)
def __findInternalLinks(self):
if self.doc:
content = self.doc.read()
soup = BeautifulSoup(content, 'html.parser')
self.in_links = []
for a in soup.find_all('a', href=True):
parts = urlparse(a['href'])
if parts.netloc == self.domain or parts.netloc == '':
if parts.netloc == self.domain:
if not parts.scheme:
link = self.scheme + "://" + a['href']
else:
link = a['href']
else:
link = self.scheme + "://" + self.domain + a['href']
self.in_links.append(link)
print(self.url + " has next: " + a['href'])
self.in_links = set(self.in_links)
def mapper(argv):
url = None
try:
opts, args = getopt.getopt(argv, "hl:")
except getopt.GetoptError:
print_usage()
for opt, arg in opts:
if opt == '-h':
print_usage()
sys.exit()
elif opt == '-l':
url = url_fix(arg)
if not url:
print_usage()
mp = Page(url)
graph = Graph()
graph.buildGraph(mp)
print(len(graph.pages))
for p in graph.pages:
print(p.url)
if __name__ == "__main__":
start_time = time.time()
mapper(sys.argv[1:])
print("execution time: ", time.time() - start_time, "seconds")