-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathpicohttp.py
More file actions
189 lines (149 loc) · 6.23 KB
/
picohttp.py
File metadata and controls
189 lines (149 loc) · 6.23 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
from picopath import PicoPath, invalid_path
class PicoHTTP:
_hexdig = '0123456789ABCDEFabcdef'
_hextobyte = None
def __init__(self, ip, http_port):
self._ip = ip
self._http_port = http_port
def urldecode(self, string):
"""urldecode('abc%20def') -> b'abc def'."""
# Note: strings are encoded as UTF-8. This is only an issue if it contains
# unescaped non-ASCII characters, which URIs should not.
if not string:
return b''
if isinstance(string, str):
string = string.encode('utf-8')
bits = string.split(b'%')
if len(bits) == 1:
return string
res = [bits[0]]
append = res.append
# Delay the initialization of the table to not waste memory
# if the function is never called
if self._hextobyte is None:
self._hextobyte = {(a + b).encode(): bytes([int(a + b, 16)])
for a in self._hexdig for b in self._hexdig}
for item in bits[1:]:
try:
append(self._hextobyte[item[:2]])
append(item[2:])
except KeyError:
append(b'%')
append(item)
return b''.join(res)
async def listener(self, reader, writer):
print("HTTP client connected")
request_line = await reader.readline()
# We are not interested in HTTP request headers, skip them
while await reader.readline() != b"\r\n":
pass
request = request_line.decode().strip()
print("Request:", request_line)
if not request.startswith('GET'):
error = "Only GET requests allowed\r\n"
print(f"HTTP error: {error}")
writer.write('HTTP/1.0 405 Method Not Allowed\r\nContent-type: text/plain\r\n\r\n')
writer.write(error)
await writer.drain()
await writer.wait_closed()
print("Client disconnected")
return
selector = self.urldecode(request.split()[1]).decode()
print("Selector:", selector)
if selector:
if not selector.startswith('/'):
selector = '/' + selector
relative = selector[1:]
else:
relative = ""
# this is a quick hack to fix Mac/IOS' requests
# intercepted by the captive portal
if relative == "hotspot-detect.html":
relative = ""
selector = "/"
absolute_path = "/gopher/" + relative
print("Absolute path: {}".format(absolute_path))
if invalid_path(absolute_path):
error = f"Invalid path: {absolute_path}\r\n"
print(f"HTTP error: {error}")
writer.write('HTTP/1.0 404 Not Found\r\nContent-type: text/plain\r\n\r\n')
writer.write(error)
await writer.drain()
await writer.wait_closed()
print("Client disconnected")
return
elif PicoPath(absolute_path).is_file():
print('Success: file: {}'.format(absolute_path))
writer.write('HTTP/1.0 200 OK\r\nContent-type: text/plain\r\n\r\n')
# I got some memory errors with 64KB blocks,
# so I am keeping them tiny for now
block_size = 1 * 1024
# preallocate a buffer of block_size to load data into
buf = bytearray(block_size)
with open(absolute_path, 'rb') as inf:
bytes_read = inf.readinto(buf)
while bytes_read == block_size :
try:
writer.write(buf)
except MemoryError as e:
# happens (also) when out_buf gets too
# large, so let us drain and retry
print(f"[w] {e} => will drain and retry")
await writer.drain()
writer.write(buf)
# read next chunk into buf
bytes_read = inf.readinto(buf)
# finally, write the remaining bytes
# (note that there's a chance we'll get
# a MemoryError here too...)
writer.write(buf[:bytes_read])
await writer.drain()
await writer.wait_closed()
else:
absolute_path = absolute_path + "/gophermap"
f = open(absolute_path)
gmap = f.read()
f.close()
response = '''
<html><head><title>gophermap</title></head>
<style type='text/css'> pre {display: inline;} </style>
<body>
<span style="font-family:Courier; white-space:pre">
'''
txt = gmap.splitlines()
outln = []
for line in txt:
if line:
cols = line.split('\t')
else:
cols = ['i']
if len(cols) == 1:
# translate plain text rows
response_line = f"{line}"
elif len(cols) == 2:
### SUPER EARLY IMPLEMENTATION!
# - does not handle http URLs properly
# - does not work with links to external gopherholes
ident = cols[0][0]
text = cols[0][1:]
ref = cols[1]
# parse HTTP links
if ident == 'h' and ref.startswith("URL:"):
ref = ref[4:]
# print(f"{ident} - {ref}")
# fix relative paths
if ref[0] != '/' and selector != "/":
ref = '{}/{}'.format(selector, ref)
response_line = f'<a href="{ref}">{text}</a>'
else:
response_line = ""
outln.append(response_line)
outln.append("</span></body></html>")
response = response + '\r\n'.join(outln)
# print(response)
writer.write('HTTP/1.0 200 OK\r\nContent-type: text/html\r\n\r\n')
writer.write(response)
writer.write('\r\n')
await writer.drain()
await writer.wait_closed()
print("Client disconnected")