1
1
from api .api import admin_api
2
+ import shutil
2
3
import os
3
4
from datetime import datetime
4
5
import json
5
6
from sqlalchemy .sql import text
7
+
8
+ from sqlalchemy .dialects .postgresql import insert
9
+ from sqlalchemy import Table , Column , Integer , String , MetaData , ForeignKey , exc , select
6
10
from pipeline import flow_script
7
11
from config import engine
8
12
from flask import request , redirect , jsonify , current_app , abort
15
19
16
20
ALLOWED_EXTENSIONS = {"csv" , "xlsx" }
17
21
22
+ metadata = MetaData ()
18
23
19
24
def __allowed_file (filename ):
20
25
return "." in filename and filename .rsplit ("." , 1 )[1 ].lower () in ALLOWED_EXTENSIONS
21
26
27
+ kvt = Table ("kv_unique" , metadata , autoload = True , autoload_with = engine )
28
+
29
+
22
30
23
31
# file upload tutorial
24
32
@admin_api .route ("/api/file" , methods = ["POST" ])
@@ -36,7 +44,7 @@ def uploadCSV():
36
44
finally :
37
45
file .close ()
38
46
39
- return jsonify ({ "success" : "uploaded file" });
47
+ return redirect ( "/" )
40
48
41
49
42
50
@admin_api .route ("/api/listCurrentFiles" , methods = ["GET" ])
@@ -55,38 +63,38 @@ def list_current_files():
55
63
@admin_api .route ("/api/execute" , methods = ["GET" ])
56
64
def execute ():
57
65
current_app .logger .info ("Execute flow" )
66
+ flow_script .start_flow ()
58
67
59
- try :
60
- last_execution_file = open (LOGS_PATH + "last_execution.json" , "r" )
61
- last_execution_details = json .loads (last_execution_file .read ())
62
-
63
- if last_execution_details != "Running" :
64
- last_execution_file = open (LOGS_PATH + "last_execution.json" , "w" )
65
- last_execution_file .write (json .dumps ("Running" ))
66
- last_execution_file .close ()
67
-
68
- flow_script .start_flow ()
69
-
70
- statistics = get_statistics ()
68
+ current_time = datetime .now ().ctime ()
69
+ statistics = get_statistics ()
71
70
72
- last_execution_details = {"stats" : statistics }
71
+ last_execution_details = {"executionTime" : current_time , "stats" : statistics }
72
+ last_ex_json = (json .dumps (last_execution_details ))
73
73
74
- except Exception as e :
75
- last_execution_details = {"stats" : {"Execution Error" : str (e )}}
76
- return abort (500 )
77
-
78
- finally :
79
- current_time = datetime .now ().ctime ()
80
-
81
- last_execution_details ["executionTime" ] = current_time
82
- last_execution_file = open (LOGS_PATH + "last_execution.json" , "w" )
83
- last_execution_file .write (json .dumps (last_execution_details ))
84
- last_execution_file .close ()
74
+ # Write Last Execution stats to DB
75
+ # See Alembic Revision ID: 05e0693f8cbb for table definition
76
+ with engine .connect () as connection :
77
+ ins_stmt = insert (kvt ).values ( # Postgres-specific insert() supporting ON CONFLICT
78
+ keycol = 'last_execution_time' ,
79
+ valcol = last_ex_json ,
80
+ )
81
+ # If key already present in DB, do update instead
82
+ upsert = ins_stmt .on_conflict_do_update (
83
+ constraint = 'kv_unique_keycol_key' ,
84
+ set_ = dict (valcol = last_ex_json )
85
+ )
86
+
87
+ try :
88
+ connection .execute (upsert )
89
+ except Exception as e :
90
+ current_app .logger .error ("Insert/Update failed on Last Execution stats" )
91
+ current_app .logger .exception (e )
85
92
86
93
return jsonify (success = True )
87
94
88
95
89
96
def get_statistics ():
97
+
90
98
with engine .connect () as connection :
91
99
query_matches = text ("SELECT count(*) FROM (SELECT distinct matching_id from pdp_contacts) as a;" )
92
100
query_total_count = text ("SELECT count(*) FROM pdp_contacts;" )
@@ -104,26 +112,22 @@ def get_statistics():
104
112
105
113
@admin_api .route ("/api/statistics" , methods = ["GET" ])
106
114
def list_statistics ():
107
- try :
108
- last_execution_file = open (LOGS_PATH + "last_execution.json" , "r" )
109
- last_execution_details = json .loads (last_execution_file .read ())
110
- last_execution_file .close ()
115
+ """ Pull Last Execution stats from DB. """
116
+ current_app .logger .info ("list_statistics() request" )
117
+ last_execution_details = '{}' # Empty but valid JSON
111
118
112
- except (FileNotFoundError ):
113
- current_app .logger .error ("last_execution.json file was missing" )
114
- return abort (500 )
119
+ try : # See Alembic Revision ID: 05e0693f8cbb for table definition
120
+ with engine .connect () as connection :
121
+ s = text ("select valcol from kv_unique where keycol = 'last_execution_time';" )
122
+ result = connection .execute (s )
123
+ last_execution_details = result .fetchone ()[0 ]
115
124
116
- except (json .JSONDecodeError ):
117
- current_app .logger .error (
118
- "last_execution.json could not be decoded - possible corruption"
119
- )
120
- return abort (500 )
121
125
122
126
except Exception as e :
123
- current_app .logger .error ("Failure reading last_execution.json: " , e )
124
- return abort (500 )
127
+ current_app .logger .error ("Failure reading Last Execution stats from DB" )
128
+ # return abort(500) # Weird but not worth a 500
125
129
126
- return jsonify ( last_execution_details )
130
+ return last_execution_details
127
131
128
132
129
133
"""
0 commit comments