Skip to content

Commit 985ee19

Browse files
committed
fixes #562
1 parent 954233c commit 985ee19

File tree

5 files changed

+139
-33
lines changed

5 files changed

+139
-33
lines changed

fastcore/_modidx.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -377,8 +377,11 @@
377377
'fastcore.net.Request.summary': ('net.html#request.summary', 'fastcore/net.py'),
378378
'fastcore.net._socket_det': ('net.html#_socket_det', 'fastcore/net.py'),
379379
'fastcore.net.do_request': ('net.html#do_request', 'fastcore/net.py'),
380+
'fastcore.net.http_response': ('net.html#http_response', 'fastcore/net.py'),
381+
'fastcore.net.recv_once': ('net.html#recv_once', 'fastcore/net.py'),
380382
'fastcore.net.start_client': ('net.html#start_client', 'fastcore/net.py'),
381383
'fastcore.net.start_server': ('net.html#start_server', 'fastcore/net.py'),
384+
'fastcore.net.tobytes': ('net.html#tobytes', 'fastcore/net.py'),
382385
'fastcore.net.urlcheck': ('net.html#urlcheck', 'fastcore/net.py'),
383386
'fastcore.net.urlclean': ('net.html#urlclean', 'fastcore/net.py'),
384387
'fastcore.net.urldest': ('net.html#urldest', 'fastcore/net.py'),

fastcore/net.py

Lines changed: 52 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,9 @@
33
# %% auto 0
44
__all__ = ['url_default_headers', 'ExceptionsHTTP', 'urlquote', 'urlwrap', 'HTTP4xxClientError', 'HTTP5xxServerError',
55
'urlopener', 'urlopen', 'urlread', 'urljson', 'urlcheck', 'urlclean', 'urlretrieve', 'urldest', 'urlsave',
6-
'urlvalid', 'urlrequest', 'urlsend', 'do_request', 'start_server', 'start_client', 'HTTP400BadRequestError',
7-
'HTTP401UnauthorizedError', 'HTTP402PaymentRequiredError', 'HTTP403ForbiddenError', 'HTTP404NotFoundError',
6+
'urlvalid', 'urlrequest', 'urlsend', 'do_request', 'start_server', 'start_client', 'tobytes',
7+
'http_response', 'recv_once', 'HTTP400BadRequestError', 'HTTP401UnauthorizedError',
8+
'HTTP402PaymentRequiredError', 'HTTP403ForbiddenError', 'HTTP404NotFoundError',
89
'HTTP405MethodNotAllowedError', 'HTTP406NotAcceptableError', 'HTTP407ProxyAuthRequiredError',
910
'HTTP408RequestTimeoutError', 'HTTP409ConflictError', 'HTTP410GoneError', 'HTTP411LengthRequiredError',
1011
'HTTP412PreconditionFailedError', 'HTTP413PayloadTooLargeError', 'HTTP414URITooLongError',
@@ -27,7 +28,7 @@
2728
from urllib.parse import urlencode,urlparse,urlunparse
2829
from http.client import InvalidURL
2930

30-
# %% ../nbs/03b_net.ipynb 4
31+
# %% ../nbs/03b_net.ipynb 5
3132
url_default_headers = {
3233
"Accept":
3334
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
@@ -41,7 +42,7 @@
4142
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36"
4243
}
4344

44-
# %% ../nbs/03b_net.ipynb 5
45+
# %% ../nbs/03b_net.ipynb 6
4546
def urlquote(url):
4647
"Update url's path with `urllib.parse.quote`"
4748
subdelims = "!$&'()*+,;="
@@ -52,31 +53,31 @@ def urlquote(url):
5253
for i in range(3,6): p[i] = urllib.parse.quote(p[i], safe=safe)
5354
return urlunparse(p)
5455

55-
# %% ../nbs/03b_net.ipynb 8
56+
# %% ../nbs/03b_net.ipynb 9
5657
def urlwrap(url, data=None, headers=None):
5758
"Wrap `url` in a urllib `Request` with `urlquote`"
5859
return url if isinstance(url,Request) else Request(urlquote(url), data=data, headers=headers or {})
5960

60-
# %% ../nbs/03b_net.ipynb 9
61+
# %% ../nbs/03b_net.ipynb 10
6162
ExceptionsHTTP = {}
6263

63-
# %% ../nbs/03b_net.ipynb 10
64+
# %% ../nbs/03b_net.ipynb 11
6465
class HTTP4xxClientError(HTTPError):
6566
"Base class for client exceptions (code 4xx) from `url*` functions"
6667
pass
6768

68-
# %% ../nbs/03b_net.ipynb 11
69+
# %% ../nbs/03b_net.ipynb 12
6970
class HTTP5xxServerError(HTTPError):
7071
"Base class for server exceptions (code 5xx) from `url*` functions"
7172
pass
7273

73-
# %% ../nbs/03b_net.ipynb 14
74+
# %% ../nbs/03b_net.ipynb 15
7475
def urlopener():
7576
_opener = urllib.request.build_opener()
7677
_opener.addheaders = list(url_default_headers.items())
7778
return _opener
7879

79-
# %% ../nbs/03b_net.ipynb 15
80+
# %% ../nbs/03b_net.ipynb 16
8081
# install_opener(_opener)
8182

8283
_httperrors = (
@@ -95,10 +96,10 @@ def _init(self, url, hdrs, fp, msg=msg, code=code): HTTP4xxClientError.__init__(
9596
cls = type(nm, (HTTP4xxClientError,), {'__init__':_init})
9697
globals()[nm] = ExceptionsHTTP[code] = cls
9798

98-
# %% ../nbs/03b_net.ipynb 16
99+
# %% ../nbs/03b_net.ipynb 17
99100
_all_ = ['HTTP400BadRequestError', 'HTTP401UnauthorizedError', 'HTTP402PaymentRequiredError', 'HTTP403ForbiddenError', 'HTTP404NotFoundError', 'HTTP405MethodNotAllowedError', 'HTTP406NotAcceptableError', 'HTTP407ProxyAuthRequiredError', 'HTTP408RequestTimeoutError', 'HTTP409ConflictError', 'HTTP410GoneError', 'HTTP411LengthRequiredError', 'HTTP412PreconditionFailedError', 'HTTP413PayloadTooLargeError', 'HTTP414URITooLongError', 'HTTP415UnsupportedMediaTypeError', 'HTTP416RangeNotSatisfiableError', 'HTTP417ExpectationFailedError', 'HTTP418AmAteapotError', 'HTTP421MisdirectedRequestError', 'HTTP422UnprocessableEntityError', 'HTTP423LockedError', 'HTTP424FailedDependencyError', 'HTTP425TooEarlyError', 'HTTP426UpgradeRequiredError', 'HTTP428PreconditionRequiredError', 'HTTP429TooManyRequestsError', 'HTTP431HeaderFieldsTooLargeError', 'HTTP451LegalReasonsError']
100101

101-
# %% ../nbs/03b_net.ipynb 17
102+
# %% ../nbs/03b_net.ipynb 18
102103
def urlopen(url, data=None, headers=None, timeout=None, **kwargs):
103104
"Like `urllib.request.urlopen`, but first `urlwrap` the `url`, and encode `data`"
104105
if kwargs and not data: data=kwargs
@@ -110,7 +111,7 @@ def urlopen(url, data=None, headers=None, timeout=None, **kwargs):
110111
e.msg += f"\n====Error Body====\n{e.read().decode(errors='ignore')}"
111112
raise
112113

113-
# %% ../nbs/03b_net.ipynb 20
114+
# %% ../nbs/03b_net.ipynb 21
114115
def urlread(url, data=None, headers=None, decode=True, return_json=False, return_headers=False, timeout=None, **kwargs):
115116
"Retrieve `url`, using `data` dict or `kwargs` to `POST` if present"
116117
try:
@@ -123,13 +124,13 @@ def urlread(url, data=None, headers=None, decode=True, return_json=False, return
123124
if return_json: res = loads(res)
124125
return (res,dict(hdrs)) if return_headers else res
125126

126-
# %% ../nbs/03b_net.ipynb 21
127+
# %% ../nbs/03b_net.ipynb 22
127128
def urljson(url, data=None, timeout=None):
128129
"Retrieve `url` and decode json"
129130
res = urlread(url, data=data, timeout=timeout)
130131
return json.loads(res) if res else {}
131132

132-
# %% ../nbs/03b_net.ipynb 23
133+
# %% ../nbs/03b_net.ipynb 24
133134
def urlcheck(url, headers=None, timeout=10):
134135
if not url: return True
135136
try:
@@ -138,12 +139,12 @@ def urlcheck(url, headers=None, timeout=10):
138139
except socket.timeout: return False
139140
except InvalidURL: return False
140141

141-
# %% ../nbs/03b_net.ipynb 24
142+
# %% ../nbs/03b_net.ipynb 25
142143
def urlclean(url):
143144
"Remove fragment, params, and querystring from `url` if present"
144145
return urlunparse(urlparse(str(url))[:3]+('','',''))
145146

146-
# %% ../nbs/03b_net.ipynb 26
147+
# %% ../nbs/03b_net.ipynb 27
147148
def urlretrieve(url, filename=None, reporthook=None, data=None, headers=None, timeout=None):
148149
"Same as `urllib.request.urlretrieve` but also works with `Request` objects"
149150
with contextlib.closing(urlopen(url, data, headers=headers, timeout=timeout)) as fp:
@@ -169,43 +170,43 @@ def urlretrieve(url, filename=None, reporthook=None, data=None, headers=None, ti
169170
raise ContentTooShortError(f"retrieval incomplete: got only {read} out of {size} bytes", headers)
170171
return filename,headers
171172

172-
# %% ../nbs/03b_net.ipynb 27
173+
# %% ../nbs/03b_net.ipynb 28
173174
def urldest(url, dest=None):
174175
name = urlclean(Path(url).name)
175176
if dest is None: dest = name
176177
dest = Path(dest)
177178
return dest/name if dest.is_dir() else dest
178179

179-
# %% ../nbs/03b_net.ipynb 28
180+
# %% ../nbs/03b_net.ipynb 29
180181
def urlsave(url, dest=None, reporthook=None, headers=None, timeout=None):
181182
"Retrieve `url` and save based on its name"
182183
dest = urldest(url, dest)
183184
dest.parent.mkdir(parents=True, exist_ok=True)
184185
nm,msg = urlretrieve(url, dest, reporthook, headers=headers, timeout=timeout)
185186
return nm
186187

187-
# %% ../nbs/03b_net.ipynb 30
188+
# %% ../nbs/03b_net.ipynb 31
188189
def urlvalid(x):
189190
"Test if `x` is a valid URL"
190191
return all (getattrs(urlparse(str(x)), 'scheme', 'netloc'))
191192

192-
# %% ../nbs/03b_net.ipynb 32
193+
# %% ../nbs/03b_net.ipynb 33
193194
def urlrequest(url, verb, headers=None, route=None, query=None, data=None, json_data=True):
194195
"`Request` for `url` with optional route params replaced by `route`, plus `query` string, and post `data`"
195196
if route: url = url.format(**route)
196197
if query: url += '?' + urlencode(query)
197198
if isinstance(data,dict): data = (json.dumps if json_data else urlencode)(data).encode('ascii')
198199
return Request(url, headers=headers or {}, data=data or None, method=verb.upper())
199200

200-
# %% ../nbs/03b_net.ipynb 35
201+
# %% ../nbs/03b_net.ipynb 36
201202
@patch
202203
def summary(self:Request, skip=None)->dict:
203204
"Summary containing full_url, headers, method, and data, removing `skip` from headers"
204205
res = L('full_url','method','data').map_dict(partial(getattr,self))
205206
res['headers'] = {k:v for k,v in self.headers.items() if k not in listify(skip)}
206207
return res
207208

208-
# %% ../nbs/03b_net.ipynb 37
209+
# %% ../nbs/03b_net.ipynb 38
209210
def urlsend(url, verb, headers=None, route=None, query=None, data=None, json_data=True,
210211
return_json=True, return_headers=False, debug=None, timeout=None):
211212
"Send request with `urlrequest`, converting result to json if `return_json`"
@@ -217,7 +218,7 @@ def urlsend(url, verb, headers=None, route=None, query=None, data=None, json_dat
217218

218219
return urlread(req, return_json=return_json, return_headers=return_headers, timeout=timeout)
219220

220-
# %% ../nbs/03b_net.ipynb 38
221+
# %% ../nbs/03b_net.ipynb 39
221222
def do_request(url, post=False, headers=None, **data):
222223
"Call GET or json-encoded POST on `url`, depending on `post`"
223224
if data:
@@ -227,13 +228,13 @@ def do_request(url, post=False, headers=None, **data):
227228
data = None
228229
return urljson(Request(url, headers=headers, data=data or None))
229230

230-
# %% ../nbs/03b_net.ipynb 39
231+
# %% ../nbs/03b_net.ipynb 41
231232
def _socket_det(port,host,dgram):
232233
if isinstance(port,int): family,addr = socket.AF_INET,(host or socket.gethostname(),port)
233234
else: family,addr = socket.AF_UNIX,port
234235
return family,addr,(socket.SOCK_STREAM,socket.SOCK_DGRAM)[dgram]
235236

236-
# %% ../nbs/03b_net.ipynb 40
237+
# %% ../nbs/03b_net.ipynb 42
237238
def start_server(port, host=None, dgram=False, reuse_addr=True, n_queue=None):
238239
"Create a `socket` server on `port`, with optional `host`, of type `dgram`"
239240
listen_args = [n_queue] if n_queue else []
@@ -247,10 +248,34 @@ def start_server(port, host=None, dgram=False, reuse_addr=True, n_queue=None):
247248
s.listen(*listen_args)
248249
return s
249250

250-
# %% ../nbs/03b_net.ipynb 42
251+
# %% ../nbs/03b_net.ipynb 44
251252
def start_client(port, host=None, dgram=False):
252253
"Create a `socket` client on `port`, with optional `host`, of type `dgram`"
253254
family,addr,typ = _socket_det(port,host,dgram)
254255
s = socket.socket(family, typ)
255256
s.connect(addr)
256257
return s
258+
259+
# %% ../nbs/03b_net.ipynb 45
260+
def tobytes(s:str)->bytes:
261+
"Convert `s` into HTTP-ready bytes format"
262+
return s.replace('\n', '\r\n').encode('utf-8')
263+
264+
# %% ../nbs/03b_net.ipynb 47
265+
def http_response(body=None, status=200, hdrs=None, **kwargs):
266+
"Create an HTTP-ready response, adding `kwargs` to `hdrs`"
267+
kwargs = {k.replace('_','-'):v for k,v in kwargs.items()}
268+
hdrs = hdrs or {}
269+
hdrs = {**hdrs, **kwargs}
270+
status_line = f"HTTP/1.1 {status} OK\n"
271+
if body: hdrs['Content-Length'] = len(body)
272+
headers = ''.join([f"{k}: {v}\n" for k, v in hdrs.items()])
273+
return tobytes(status_line+headers+"\n" + str(body))
274+
275+
# %% ../nbs/03b_net.ipynb 49
276+
@threaded
277+
def recv_once(d:dict, host:str='localhost', port:int=8000):
278+
"Spawn a thread to receive a single HTTP request and store in `d['r']`"
279+
conn,addr = start_server(port,host).accept()
280+
d['r'] = conn.recv(1024)
281+
conn.sendall(http_response(d['r']))

fastcore/xml.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,8 @@
1919

2020
# %% ../nbs/11_xml.ipynb 4
2121
def _attrmap(o):
22-
o = dict(htmlClass='class', cls='class', klass='class', fr='for', htmlFor='for').get(o, o)
22+
o = dict(htmlClass='class', cls='class', _class='class', klass='class',
23+
_for='for', fr='for', htmlFor='for').get(o, o)
2324
return o.lstrip('_').replace('_', '-')
2425

2526
# %% ../nbs/11_xml.ipynb 5

nbs/03b_net.ipynb

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,13 @@
4949
"> Network, HTTP, and URL functions"
5050
]
5151
},
52+
{
53+
"cell_type": "markdown",
54+
"metadata": {},
55+
"source": [
56+
"## URLs"
57+
]
58+
},
5259
{
5360
"cell_type": "code",
5461
"execution_count": null,
@@ -642,6 +649,13 @@
642649
" return urljson(Request(url, headers=headers, data=data or None))"
643650
]
644651
},
652+
{
653+
"cell_type": "markdown",
654+
"metadata": {},
655+
"source": [
656+
"## Basic client/server"
657+
]
658+
},
645659
{
646660
"cell_type": "code",
647661
"execution_count": null,
@@ -698,6 +712,70 @@
698712
" return s"
699713
]
700714
},
715+
{
716+
"cell_type": "code",
717+
"execution_count": null,
718+
"metadata": {},
719+
"outputs": [],
720+
"source": [
721+
"#|export\n",
722+
"def tobytes(s:str)->bytes:\n",
723+
" \"Convert `s` into HTTP-ready bytes format\"\n",
724+
" return s.replace('\\n', '\\r\\n').encode('utf-8')"
725+
]
726+
},
727+
{
728+
"cell_type": "code",
729+
"execution_count": null,
730+
"metadata": {},
731+
"outputs": [],
732+
"source": [
733+
"test_eq(tobytes('foo\\nbar'), b'foo\\r\\nbar')"
734+
]
735+
},
736+
{
737+
"cell_type": "code",
738+
"execution_count": null,
739+
"metadata": {},
740+
"outputs": [],
741+
"source": [
742+
"#|export\n",
743+
"def http_response(body=None, status=200, hdrs=None, **kwargs):\n",
744+
" \"Create an HTTP-ready response, adding `kwargs` to `hdrs`\"\n",
745+
" kwargs = {k.replace('_','-'):v for k,v in kwargs.items()}\n",
746+
" hdrs = hdrs or {}\n",
747+
" hdrs = {**hdrs, **kwargs}\n",
748+
" status_line = f\"HTTP/1.1 {status} OK\\n\"\n",
749+
" if body: hdrs['Content-Length'] = len(body)\n",
750+
" headers = ''.join([f\"{k}: {v}\\n\" for k, v in hdrs.items()])\n",
751+
" return tobytes(status_line+headers+\"\\n\" + str(body))"
752+
]
753+
},
754+
{
755+
"cell_type": "code",
756+
"execution_count": null,
757+
"metadata": {},
758+
"outputs": [],
759+
"source": [
760+
"exp = b'HTTP/1.1 200 OK\\r\\nUser-Agent: me\\r\\nContent-Length: 4\\r\\n\\r\\nbody'\n",
761+
"test_eq(http_response('body', 200, User_Agent='me'), exp)"
762+
]
763+
},
764+
{
765+
"cell_type": "code",
766+
"execution_count": null,
767+
"metadata": {},
768+
"outputs": [],
769+
"source": [
770+
"#|export\n",
771+
"@threaded\n",
772+
"def recv_once(d:dict, host:str='localhost', port:int=8000):\n",
773+
" \"Spawn a thread to receive a single HTTP request and store in `d['r']`\"\n",
774+
" conn,addr = start_server(port,host).accept()\n",
775+
" d['r'] = conn.recv(1024)\n",
776+
" conn.sendall(http_response(d['r']))"
777+
]
778+
},
701779
{
702780
"cell_type": "markdown",
703781
"metadata": {},

nbs/11_xml.ipynb

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,8 @@
5656
"source": [
5757
"#| export\n",
5858
"def _attrmap(o):\n",
59-
" o = dict(htmlClass='class', cls='class', klass='class', fr='for', htmlFor='for').get(o, o)\n",
59+
" o = dict(htmlClass='class', cls='class', _class='class', klass='class',\n",
60+
" _for='for', fr='for', htmlFor='for').get(o, o)\n",
6061
" return o.lstrip('_').replace('_', '-')"
6162
]
6263
},
@@ -139,7 +140,7 @@
139140
" (['head', (['title', ('Some page',), {}],), {}],\n",
140141
" ['body',\n",
141142
" (['div',\n",
142-
" (['p', ('Some text',), {}],\n",
143+
" ('Some text',\n",
143144
" ['input', (), {'name': 'me'}],\n",
144145
" ['img', (), {'src': 'filename'}]),\n",
145146
" {'class': 'myclass'}],),\n",
@@ -151,7 +152,7 @@
151152
"source": [
152153
"samp = Html(\n",
153154
" Head(Title('Some page')),\n",
154-
" Body(Div(P('Some text'), Input(name='me'), Img(src=\"filename\"), klass='myclass'))\n",
155+
" Body(Div('Some text', Input(name='me'), Img(src=\"filename\"), klass='myclass'))\n",
155156
")\n",
156157
"pprint(samp)"
157158
]
@@ -261,9 +262,7 @@
261262
" </head>\n",
262263
" <body>\n",
263264
" <div class=\"myclass\">\n",
264-
" <p>\n",
265265
"Some text\n",
266-
" </p>\n",
267266
" <input name=\"me\">\n",
268267
" <img src=\"filename\">\n",
269268
" </div>\n",

0 commit comments

Comments
 (0)