1212PORT = 8888
1313
1414# Load the lookup_table
15- with open (' lookup_table.pkl' , 'rb' ) as f :
15+ with open (" lookup_table.pkl" , "rb" ) as f :
1616 lookup_table = pickle .load (f )
1717
18+
1819class handler (http .server .SimpleHTTPRequestHandler ):
1920 def do_GET (self ):
2021 # Parse the path and query parameters
2122 parsed_path = urlparse (self .path )
2223 path = parsed_path .path
2324 query_params = parse_qs (parsed_path .query )
24-
25- if path == '/' :
25+
26+ if path == "/" :
2627 # Handle the root path
2728 # Get the date parameter if provided
28- date_param = query_params .get (' date' , [None ])[0 ]
29-
29+ date_param = query_params .get (" date" , [None ])[0 ]
30+
3031 # Get all dates from the lookup table
3132 all_dates = set ()
3233 for ticker_data in lookup_table .values ():
3334 all_dates .update (ticker_data .keys ())
3435 all_dates = sorted (all_dates )
35-
36+
3637 # If date is None, get the latest date from the lookup table
3738 if date_param is None :
3839 if all_dates :
@@ -41,109 +42,131 @@ def do_GET(self):
4142 self .send_response (200 )
4243 self .send_header ("Content-type" , "text/html" )
4344 self .end_headers ()
44- html_content = '<html><body><h1>No data available.</h1></body></html>'
45+ html_content = (
46+ "<html><body><h1>No data available.</h1></body></html>"
47+ )
4548 self .wfile .write (html_content .encode ())
4649 return
4750 else :
4851 latest_date = date_param
49-
52+
5053 # Ensure latest_date is in all_dates
5154 if latest_date not in all_dates :
5255 # Handle the case where the provided date is invalid
5356 self .send_response (400 )
5457 self .send_header ("Content-type" , "text/html" )
5558 self .end_headers ()
56- error_html = f' <html><body><h1>Error: No data available for date { latest_date } </h1></body></html>'
59+ error_html = f" <html><body><h1>Error: No data available for date { latest_date } </h1></body></html>"
5760 self .wfile .write (error_html .encode ())
5861 return
59-
62+
6063 # Now, get the anomalies for the latest_date
6164 anomalies = []
6265 for ticker , date_data in lookup_table .items ():
6366 if latest_date in date_data :
6467 data = date_data [latest_date ]
65- trades = data [' trades' ]
66- avg_trades = data [' avg_trades' ]
67- std_trades = data [' std_trades' ]
68+ trades = data [" trades" ]
69+ avg_trades = data [" avg_trades" ]
70+ std_trades = data [" std_trades" ]
6871 if (
69- avg_trades is not None and
70- std_trades is not None and
71- std_trades > 0
72+ avg_trades is not None
73+ and std_trades is not None
74+ and std_trades > 0
7275 ):
7376 z_score = (trades - avg_trades ) / std_trades
7477 threshold_multiplier = 3 # Adjust as needed
7578 if z_score > threshold_multiplier :
76- anomalies .append ({
77- 'ticker' : ticker ,
78- 'date' : latest_date ,
79- 'trades' : trades ,
80- 'avg_trades' : avg_trades ,
81- 'std_trades' : std_trades ,
82- 'z_score' : z_score ,
83- 'close_price' : data ['close_price' ],
84- 'price_diff' : data ['price_diff' ]
85- })
79+ anomalies .append (
80+ {
81+ "ticker" : ticker ,
82+ "date" : latest_date ,
83+ "trades" : trades ,
84+ "avg_trades" : avg_trades ,
85+ "std_trades" : std_trades ,
86+ "z_score" : z_score ,
87+ "close_price" : data ["close_price" ],
88+ "price_diff" : data ["price_diff" ],
89+ }
90+ )
8691 # Sort anomalies by trades in descending order
87- anomalies .sort (key = lambda x : x [' trades' ], reverse = True )
92+ anomalies .sort (key = lambda x : x [" trades" ], reverse = True )
8893 # Generate the HTML to display the anomalies
8994 self .send_response (200 )
9095 self .send_header ("Content-type" , "text/html" )
9196 self .end_headers ()
9297 # Build the HTML content
93- html_content = '<html><link href="https://cdn.jsdelivr.net/npm/[email protected] /dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous"><script src="https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/tablesort.min.js" integrity="sha512-F/gIMdDfda6OD2rnzt/Iyp2V9JLHlFQ+EUyixDg9+rkwjqgW1snpkpx7FD5FV1+gG2fmFj7I3r6ReQDUidHelA==" crossorigin="anonymous" referrerpolicy="no-referrer"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/sorts/tablesort.number.min.js" integrity="sha512-dRD755QRxlybm0h3LXXIGrFcjNakuxW3reZqnPtUkMv6YsSWoJf+slPjY5v4lZvx2ss+wBZQFegepmA7a2W9eA==" crossorigin="anonymous" referrerpolicy="no-referrer"></script><head><title>Anomalies for {}</title></head><body>' .
format (
latest_date )
94- html_content += '<div id="container" style="padding:4px;"><h1>Anomalies for {}</h1>' .format (latest_date )
98+ html_content = '<html><link href="https://cdn.jsdelivr.net/npm/[email protected] /dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-QWTKZyjpPEjISv5WaRU9OFeRpok6YctnYmDr5pNlyT2bRjXh0JMhjY6hW+ALEwIH" crossorigin="anonymous"><script src="https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/tablesort.min.js" integrity="sha512-F/gIMdDfda6OD2rnzt/Iyp2V9JLHlFQ+EUyixDg9+rkwjqgW1snpkpx7FD5FV1+gG2fmFj7I3r6ReQDUidHelA==" crossorigin="anonymous" referrerpolicy="no-referrer"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/sorts/tablesort.number.min.js" integrity="sha512-dRD755QRxlybm0h3LXXIGrFcjNakuxW3reZqnPtUkMv6YsSWoJf+slPjY5v4lZvx2ss+wBZQFegepmA7a2W9eA==" crossorigin="anonymous" referrerpolicy="no-referrer"></script><head><title>Anomalies for {}</title></head><body>' .
format (
99+ latest_date
100+ )
101+ html_content += '<div id="container" style="padding:4px;"><h1>Anomalies for {}</h1>' .format (
102+ latest_date
103+ )
95104 # Add navigation links (prev and next dates)
96105 current_index = all_dates .index (latest_date )
97106 prev_date = all_dates [current_index - 1 ] if current_index > 0 else None
98- next_date = all_dates [current_index + 1 ] if current_index < len (all_dates ) - 1 else None
99- html_content += '<p>'
107+ next_date = (
108+ all_dates [current_index + 1 ]
109+ if current_index < len (all_dates ) - 1
110+ else None
111+ )
112+ html_content += "<p>"
100113 if prev_date :
101- html_content += '<a href="/?date={}">Previous Date</a> ' .format (prev_date )
114+ html_content += '<a href="/?date={}">Previous Date</a> ' .format (
115+ prev_date
116+ )
102117 if next_date :
103118 html_content += '<a href="/?date={}">Next Date</a> ' .format (next_date )
104- html_content += ' </p>'
119+ html_content += " </p>"
105120 # Display the anomalies in a table
106- html_content += '<table id="anomalies" class="table table-striped table-hover">'
107- html_content += '<thead><tr>'
108- html_content += '<th>Ticker</th>'
109- html_content += '<th>Trades</th>'
110- html_content += '<th>Avg Trades</th>'
111- html_content += '<th>Std Dev</th>'
112- html_content += '<th>Z-score</th>'
113- html_content += '<th>Close Price</th>'
114- html_content += '<th>Price Diff</th>'
115- html_content += '<th>Chart</th>'
116- html_content += '</tr></thead><tbody>'
121+ html_content += (
122+ '<table id="anomalies" class="table table-striped table-hover">'
123+ )
124+ html_content += "<thead><tr>"
125+ html_content += "<th>Ticker</th>"
126+ html_content += "<th>Trades</th>"
127+ html_content += "<th>Avg Trades</th>"
128+ html_content += "<th>Std Dev</th>"
129+ html_content += "<th>Z-score</th>"
130+ html_content += "<th>Close Price</th>"
131+ html_content += "<th>Price Diff</th>"
132+ html_content += "<th>Chart</th>"
133+ html_content += "</tr></thead><tbody>"
117134 for anomaly in anomalies :
118- html_content += ' <tr>'
119- html_content += ' <td>{}</td>' .format (anomaly [' ticker' ])
120- html_content += ' <td>{}</td>' .format (anomaly [' trades' ])
121- html_content += ' <td>{:.2f}</td>' .format (anomaly [' avg_trades' ])
122- html_content += ' <td>{:.2f}</td>' .format (anomaly [' std_trades' ])
123- html_content += ' <td>{:.2f}</td>' .format (anomaly [' z_score' ])
124- html_content += ' <td>{:.2f}</td>' .format (anomaly [' close_price' ])
125- html_content += ' <td>{:.2f}</td>' .format (anomaly [' price_diff' ])
135+ html_content += " <tr>"
136+ html_content += " <td>{}</td>" .format (anomaly [" ticker" ])
137+ html_content += " <td>{}</td>" .format (anomaly [" trades" ])
138+ html_content += " <td>{:.2f}</td>" .format (anomaly [" avg_trades" ])
139+ html_content += " <td>{:.2f}</td>" .format (anomaly [" std_trades" ])
140+ html_content += " <td>{:.2f}</td>" .format (anomaly [" z_score" ])
141+ html_content += " <td>{:.2f}</td>" .format (anomaly [" close_price" ])
142+ html_content += " <td>{:.2f}</td>" .format (anomaly [" price_diff" ])
126143 # Add a link to the chart
127- html_content += '<td><a href="/chart?ticker={}&date={}">View Chart</a></td>' .format (anomaly ['ticker' ], latest_date )
128- html_content += '</tr>'
144+ html_content += (
145+ '<td><a href="/chart?ticker={}&date={}">View Chart</a></td>' .format (
146+ anomaly ["ticker" ], latest_date
147+ )
148+ )
149+ html_content += "</tr>"
129150 html_content += '</tbody></table><script>new Tablesort(document.getElementById("anomalies"));</script>'
130- html_content += ' </div></body></html>'
151+ html_content += " </div></body></html>"
131152 self .wfile .write (html_content .encode ())
132- elif path == ' /chart' :
153+ elif path == " /chart" :
133154 # Handle the chart page
134155 # Get 'ticker' and 'date' from query parameters
135- ticker = query_params .get (' ticker' , [None ])[0 ]
136- date = query_params .get (' date' , [None ])[0 ]
156+ ticker = query_params .get (" ticker" , [None ])[0 ]
157+ date = query_params .get (" date" , [None ])[0 ]
137158 if ticker is None or date is None :
138159 # Return an error page
139160 self .send_response (400 )
140161 self .send_header ("Content-type" , "text/html" )
141162 self .end_headers ()
142- error_html = ' <html><body><h1>Error: Missing ticker or date parameter</h1></body></html>'
163+ error_html = " <html><body><h1>Error: Missing ticker or date parameter</h1></body></html>"
143164 self .wfile .write (error_html .encode ())
144165 else :
145166 # Fetch minute aggregates for the ticker and date
146- client = RESTClient (trace = True ) # POLYGON_API_KEY environment variable is used
167+ client = RESTClient (
168+ trace = True
169+ ) # POLYGON_API_KEY environment variable is used
147170 try :
148171 aggs = []
149172 date_from = date
@@ -166,7 +189,7 @@ def do_GET(self):
166189 agg .open ,
167190 agg .high ,
168191 agg .low ,
169- agg .close
192+ agg .close ,
170193 ]
171194 data .append (new_record )
172195 # Generate the HTML for the chart page
@@ -239,23 +262,31 @@ def do_GET(self):
239262 </div>
240263 </body>
241264 </html>
242- """ % (json .dumps (data ), ticker , date , ticker )
265+ """ % (
266+ json .dumps (data ),
267+ ticker ,
268+ date ,
269+ ticker ,
270+ )
243271 self .send_response (200 )
244272 self .send_header ("Content-type" , "text/html" )
245- self .send_header (' Access-Control-Allow-Origin' , '*' )
273+ self .send_header (" Access-Control-Allow-Origin" , "*" )
246274 self .end_headers ()
247275 self .wfile .write (chart_html .encode ())
248276 except Exception as e :
249277 # Handle exceptions
250278 self .send_response (500 )
251279 self .send_header ("Content-type" , "text/html" )
252280 self .end_headers ()
253- error_html = '<html><body><h1>Error fetching data: {}</h1></body></html>' .format (str (e ))
281+ error_html = "<html><body><h1>Error fetching data: {}</h1></body></html>" .format (
282+ str (e )
283+ )
254284 self .wfile .write (error_html .encode ())
255285 else :
256286 # Serve files from the current directory
257287 super ().do_GET ()
258288
289+
259290def run_server ():
260291 with socketserver .TCPServer (("" , PORT ), handler ) as httpd :
261292 print ("serving at port" , PORT )
@@ -266,5 +297,6 @@ def run_server():
266297 httpd .shutdown ()
267298 httpd .server_close ()
268299
269- if __name__ == '__main__' :
300+
301+ if __name__ == "__main__" :
270302 run_server ()
0 commit comments