-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnewproxy.repy
208 lines (171 loc) · 5.27 KB
/
newproxy.repy
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
include httpserver.repy
"""
<Program Name>
viewproxy.repy
<Started>
June 3rd, 2011
<Author>
Evan Brynne
<Purpose>
This program creates a proxyserver on the current vessel at a user specified
port. This file derives some of its functionality from YoonSung Hong's proxyserver.repy.
It uses his page chacheing design, which returns a locally cached version of a page if the
user enables the setting.
This file relies on a slightly modified version of httpserver.repy, though
the changes may later be merged with trunk version.
<Exception>
None
"""
def clearCookies():
print "I should have a clear cookies function"
def useCaching(cache):
mycontext['usecache'] = cache
def handlePing(srcip, srcport, mess, ch):
print "MESS: %s" % mess
mycontext['ping_list'].append(getruntime() - mycontext['elapsed_start'])
mycontext['ping_running'] = False
def pingTest(post_map):
myIp = getmyip()
mycontext['ping_list'] = []
numTests = int(post_map['numTests'])
print "NumTests: %s" % post_map['numTests']
print "Page: %s" % post_map['page']
try:
for i in range(0, numTests):
mycontext['ping_start'] = getruntime()
print "Start: %s" % mycontext['ping_start']
url = post_map['page'].split("www.")[1]
#ip = gethostbyname_ex(url)[2][0]
print url[:-1]
sockObj = timeout_openconn(url[:-1], 80, timeout=10)
sockObj.send("GET %s HTTP/1.0\r\n" % post_map['page'])
headersstr = ""
while not headersstr.endswith("\r\n\r\n"):
try:
headersstr += sockObj.recv(1)
print headersstr
except Exception, e:
if str(e) == "Socket closed":
break
else:
raise
print "HeadSTR: %s" %s
#recvmess(myIp, 63138, handlePing)
#print "After RecvMess"
#
#sendmess(url[:-1], 63138, "Ping!", myIp, 63138)
#print "Sent Message"
#mycontext['ping_running'] = True
#while mycontext['ping_running']:
#print "Looping!"
#sleep(.2)
except Exception, e:
print "Error pinging: %s" % str(e)
return "%s" % mycontext['ping_list']
def loadPage(post_map):
print post_map
url = post_map['page']
headers={'User-Agent': post_map['useragent']}
if url in mycontext['cookies']:
headers['Cookie'] = mycontext['cookies'][url]
try:
loaded = httpretrieve_open(url, httpheaders=headers)
except Exception, e:
print "Error loading page: %s" % str(e)
return e
if url in mycontext['cookies']:
mycontext['cookies'][url] = list(set(mycontext['cookies'][url].extend(loaded.headers['Set-Cookie'])))
else:
mycontext['cookies'][url] = loaded.headers['Set-Cookie']
return loaded.read()
funcs = {'/page' : loadPage, '/viewpoints/setcache' : useCaching, \
'/viewpoints/clearcookies' : clearCookies, '/latency' : pingTest}
def proxyServer(request):
"""
<Purpose>
Recieve request from viewpoints server, and after decrypting the data,
load the content from the url or from the local cache.
<Exception>
None
<Return>
Dictionary that contains header information and Html string
of web page content.
"""
print "Request: %s" % request
# Interpret the client requests
httppath = request['path']
query = request['querystr']
# Bind path and query
completeUrl = httppath
if query:
completeUrl += "?" + query
posted_data = None
# Check for posted data
if request['verb'] == 'POST':
posted_data = request['datastream'].read()
params = urllib_unquote_parameters(posted_data)
htmlresponse = "None"
print "CUrl: %s" % completeUrl
if completeUrl in funcs.keys():
htmlresponse = funcs[completeUrl](params)
else:
htmlresponse = 'Header Text: %s <br \> Map: %s' % (request, params)
#if mycontext['']
#cachedata = getCache(completeUrl)
# Header + Content sent to client(web-browser)
#print "HTML: %s" % htmlresponse
res = {}
res["version"] = "1.1"
res["statuscode"] = 200
res["statusmsg"] = "OK"
res["headers"] = {}
res["message"] = htmlresponse
return res
def getCache(url):
"""
<Purpose>
Retrieves cached data for a specified url. Speeds content load times if
the user enable this functionality.
<Exception>
None
<Return>
String of data if cache for url exist. Otherwise, returns None.
"""
# dictionary saves cache data with url keys.
cachelistbyurl = mycontext['cache']
if url in cachelistbyurl:
return cachelistbyurl[url]
else:
None
if callfunc=='initialize':
"""
<Purpose>
Start the proxy server at user's ip and changable port.
<Arguments>
port:
The port which the server should listen at. If no port is provided it
will default to port 8008
<Exceptions>
None
<Returns>
None.
"""
# Proxy host/ip and port number
port = 8008
ip = getmyip()
if len(callargs) > 1:
raise Exception("Too many call arguments")
elif len(callargs) == 1:
port = int(callargs[0])
ip = getmyip()
# Cache Setup
mycontext['usecache'] = False
mycontext['cache'] = dict([])
# Sever/Port information
mycontext['ip'] = ip
mycontext['port'] = port
mycontext['cookies'] = {}
# Build proxy server
viewProxy = httpserver_registercallback((ip, port), proxyServer)
# Report
print "##### Proxy running on " + ip + ":" + str(port)