1+ # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/03b_net.ipynb (unless otherwise specified).
2+
3+ __all__ = ['urlquote' , 'urlwrap' , 'ExceptionsHTTP' , 'HTTP4xxClientError' , 'HTTP5xxServerError' ,
4+ 'HTTP400BadRequestError' , 'HTTP401UnauthorizedError' , 'HTTP402PaymentRequiredError' , 'HTTP403ForbiddenError' ,
5+ 'HTTP404NotFoundError' , 'HTTP405MethodNotAllowedError' , 'HTTP406NotAcceptableError' ,
6+ 'HTTP407ProxyAuthRequiredError' , 'HTTP408RequestTimeoutError' , 'HTTP409ConflictError' , 'HTTP410GoneError' ,
7+ 'HTTP411LengthRequiredError' , 'HTTP412PreconditionFailedError' , 'HTTP413PayloadTooLargeError' ,
8+ 'HTTP414URITooLongError' , 'HTTP415UnsupportedMediaTypeError' , 'HTTP416RangeNotSatisfiableError' ,
9+ 'HTTP417ExpectationFailedError' , 'HTTP418AmAteapotError' , 'HTTP421MisdirectedRequestError' ,
10+ 'HTTP422UnprocessableEntityError' , 'HTTP423LockedError' , 'HTTP424FailedDependencyError' ,
11+ 'HTTP425TooEarlyError' , 'HTTP426UpgradeRequiredError' , 'HTTP428PreconditionRequiredError' ,
12+ 'HTTP429TooManyRequestsError' , 'HTTP431HeaderFieldsTooLargeError' , 'HTTP451LegalReasonsError' , 'urlopen' ,
13+ 'urlread' , 'urljson' , 'urlcheck' , 'urlclean' , 'urlsave' , 'urlvalid' , 'urlrequest' , 'urlsend' , 'do_request' ,
14+ 'start_server' , 'start_client' ]
15+
16+ # Cell
17+ from .imports import *
18+ from .foundation import *
19+ from .basics import *
20+ from .xtras import *
21+ from .parallel import *
22+ from functools import wraps
23+
24+ import json ,urllib
25+ # import mimetypes,pickle,random,json,subprocess,shlex,bz2,gzip,zipfile,tarfile
26+ import socket ,urllib .request ,http ,urllib
27+ from contextlib import contextmanager ,ExitStack
28+ from urllib .request import Request
29+ from urllib .error import HTTPError ,URLError
30+ from urllib .parse import urlencode ,urlparse ,urlunparse
31+ from http .client import InvalidURL
32+
33+ # Cell
34+ _ua = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36'
35+
36+ # Cell
37+ def urlquote (url ):
38+ "Update url's path with `urllib.parse.quote`"
39+ subdelims = "!$&'()*+,;="
40+ gendelims = ":?#[]@"
41+ safe = subdelims + gendelims + "%/"
42+ p = list (urlparse (url ))
43+ p [2 ] = urllib .parse .quote (p [2 ], safe = safe )
44+ for i in range (3 ,6 ): p [i ] = urllib .parse .quote (p [i ], safe = safe )
45+ return urlunparse (p )
46+
47+ # Cell
48+ def urlwrap (url , data = None , headers = None ):
49+ "Wrap `url` in a urllib `Request` with `urlquote`"
50+ return url if isinstance (url ,Request ) else Request (urlquote (url ), data = data , headers = headers or {})
51+
52+ # Cell
53+ ExceptionsHTTP = {}
54+
55+ # Cell
56+ class HTTP4xxClientError (HTTPError ):
57+ "Base class for client exceptions (code 4xx) from `url*` functions"
58+ pass
59+
60+ # Cell
61+ class HTTP5xxServerError (HTTPError ):
62+ "Base class for server exceptions (code 5xx) from `url*` functions"
63+ pass
64+
65+ # Cell
66+ _opener = urllib .request .build_opener ()
67+ _opener .addheaders = [('User-agent' , _ua )]
68+
69+ _httperrors = (
70+ (400 ,'Bad Request' ),(401 ,'Unauthorized' ),(402 ,'Payment Required' ),(403 ,'Forbidden' ),(404 ,'Not Found' ),
71+ (405 ,'Method Not Allowed' ),(406 ,'Not Acceptable' ),(407 ,'Proxy Auth Required' ),(408 ,'Request Timeout' ),
72+ (409 ,'Conflict' ),(410 ,'Gone' ),(411 ,'Length Required' ),(412 ,'Precondition Failed' ),(413 ,'Payload Too Large' ),
73+ (414 ,'URI Too Long' ),(415 ,'Unsupported Media Type' ),(416 ,'Range Not Satisfiable' ),(417 ,'Expectation Failed' ),
74+ (418 ,'Am A teapot' ),(421 ,'Misdirected Request' ),(422 ,'Unprocessable Entity' ),(423 ,'Locked' ),(424 ,'Failed Dependency' ),
75+ (425 ,'Too Early' ),(426 ,'Upgrade Required' ),(428 ,'Precondition Required' ),(429 ,'Too Many Requests' ),
76+ (431 ,'Header Fields Too Large' ),(451 ,'Legal Reasons' )
77+ )
78+
79+ for code ,msg in _httperrors :
80+ nm = f'HTTP{ code } { msg .replace (" " ,"" )} Error'
81+ cls = get_class (nm , 'url' , 'hdrs' , 'fp' , sup = HTTP4xxClientError , msg = msg , code = code )
82+ globals ()[nm ] = ExceptionsHTTP [code ] = cls
83+
84+ # Cell
85+ #nbdev_comment _all_ = ['HTTP400BadRequestError', 'HTTP401UnauthorizedError', 'HTTP402PaymentRequiredError', 'HTTP403ForbiddenError', 'HTTP404NotFoundError', 'HTTP405MethodNotAllowedError', 'HTTP406NotAcceptableError', 'HTTP407ProxyAuthRequiredError', 'HTTP408RequestTimeoutError', 'HTTP409ConflictError', 'HTTP410GoneError', 'HTTP411LengthRequiredError', 'HTTP412PreconditionFailedError', 'HTTP413PayloadTooLargeError', 'HTTP414URITooLongError', 'HTTP415UnsupportedMediaTypeError', 'HTTP416RangeNotSatisfiableError', 'HTTP417ExpectationFailedError', 'HTTP418AmAteapotError', 'HTTP421MisdirectedRequestError', 'HTTP422UnprocessableEntityError', 'HTTP423LockedError', 'HTTP424FailedDependencyError', 'HTTP425TooEarlyError', 'HTTP426UpgradeRequiredError', 'HTTP428PreconditionRequiredError', 'HTTP429TooManyRequestsError', 'HTTP431HeaderFieldsTooLargeError', 'HTTP451LegalReasonsError']
86+
87+ # Cell
88+ def urlopen (url , data = None , headers = None , ** kwargs ):
89+ "Like `urllib.request.urlopen`, but first `urlwrap` the `url`, and encode `data`"
90+ if kwargs and not data : data = kwargs
91+ if data is not None :
92+ if not isinstance (data , (str ,bytes )): data = urlencode (data )
93+ if not isinstance (data , bytes ): data = data .encode ('ascii' )
94+ return _opener .open (urlwrap (url , data = data , headers = headers ))
95+
96+ # Cell
97+ def urlread (url , data = None , headers = None , ** kwargs ):
98+ "Retrieve `url`, using `data` dict or `kwargs` to `POST` if present"
99+ try :
100+ with urlopen (url , data = data , headers = headers , ** kwargs ) as res : return res .read ()
101+ except HTTPError as e :
102+ if 400 <= e .code < 500 : raise ExceptionsHTTP [e .code ](e .url , e .hdrs , e .fp )
103+ else : raise
104+
105+ # Cell
106+ def urljson (url , data = None ):
107+ "Retrieve `url` and decode json"
108+ res = urlread (url , data = data )
109+ return json .loads (res ) if res else {}
110+
111+ # Cell
112+ def urlcheck (url , timeout = 10 ):
113+ if not url : return True
114+ try :
115+ with urlopen (url , timeout = timeout ) as u : return u .status < 400
116+ except URLError : return False
117+ except socket .timeout : return False
118+ except InvalidURL : return False
119+
120+ # Cell
121+ def urlclean (url ):
122+ "Remove fragment, params, and querystring from `url` if present"
123+ return urlunparse (urlparse (url )[:3 ]+ ('' ,'' ,'' ))
124+
125+ # Cell
126+ def urlsave (url , dest = None ):
127+ "Retrieve `url` and save based on its name"
128+ res = urlread (urlwrap (url ))
129+ if dest is None : dest = Path (url ).name
130+ name = urlclean (dest )
131+ Path (name ).write_bytes (res )
132+ return dest
133+
134+ # Cell
135+ def urlvalid (x ):
136+ "Test if `x` is a valid URL"
137+ return all (getattrs (urlparse (str (x )), 'scheme' , 'netloc' ))
138+
139+ # Cell
140+ def urlrequest (url , verb , headers = None , route = None , query = None , data = None , json_data = True ):
141+ "`Request` for `url` with optional route params replaced by `route`, plus `query` string, and post `data`"
142+ if route : url = url .format (** route )
143+ if query : url += '?' + urlencode (query )
144+ if isinstance (data ,dict ): data = (json .dumps if json_data else urlencode )(data ).encode ('ascii' )
145+ return Request (url , headers = headers or {}, data = data or None , method = verb .upper ())
146+
147+ # Cell
148+ @patch
149+ def summary (self :Request , skip = None )-> dict :
150+ "Summary containing full_url, headers, method, and data, removing `skip` from headers"
151+ res = L ('full_url' ,'method' ,'data' ).map_dict (partial (getattr ,self ))
152+ res ['headers' ] = {k :v for k ,v in self .headers .items () if k not in listify (skip )}
153+ return res
154+
155+ # Cell
156+ def urlsend (url , verb , headers = None , route = None , query = None , data = None , json_data = True , return_json = True , debug = None ):
157+ "Send request with `urlrequest`, converting result to json if `return_json`"
158+ req = urlrequest (url , verb , headers , route = route , query = query , data = data , json_data = json_data )
159+ if debug : debug (req )
160+ res = urlread (req )
161+ return loads (res ) if return_json else res
162+
163+ # Cell
164+ def do_request (url , post = False , headers = None , ** data ):
165+ "Call GET or json-encoded POST on `url`, depending on `post`"
166+ if data :
167+ if post : data = json .dumps (data ).encode ('ascii' )
168+ else :
169+ url += "?" + urlencode (data )
170+ data = None
171+ return urljson (Request (url , headers = headers , data = data or None ))
172+
173+ # Cell
174+ def _socket_det (port ,host ,dgram ):
175+ if isinstance (port ,int ): family ,addr = socket .AF_INET ,(host or socket .gethostname (),port )
176+ else : family ,addr = socket .AF_UNIX ,port
177+ return family ,addr ,(socket .SOCK_STREAM ,socket .SOCK_DGRAM )[dgram ]
178+
179+ # Cell
180+ def start_server (port , host = None , dgram = False , reuse_addr = True , n_queue = None ):
181+ "Create a `socket` server on `port`, with optional `host`, of type `dgram`"
182+ listen_args = [n_queue ] if n_queue else []
183+ family ,addr ,typ = _socket_det (port ,host ,dgram )
184+ if family == socket .AF_UNIX :
185+ if os .path .exists (addr ): os .unlink (addr )
186+ assert not os .path .exists (addr ), f"{ addr } in use"
187+ s = socket .socket (family , typ )
188+ if reuse_addr and family == socket .AF_INET : s .setsockopt (socket .SOL_SOCKET , socket .SO_REUSEADDR , 1 )
189+ s .bind (addr )
190+ s .listen (* listen_args )
191+ return s
192+
193+ # Cell
194+ def start_client (port , host = None , dgram = False ):
195+ "Create a `socket` client on `port`, with optional `host`, of type `dgram`"
196+ family ,addr ,typ = _socket_det (port ,host ,dgram )
197+ s = socket .socket (family , typ )
198+ s .connect (addr )
199+ return s
0 commit comments