14
14
import pickle
15
15
import pickletools
16
16
import os
17
- from pipreqs import pipreqs
18
17
19
18
# %%
20
19
class JSONFiles :
@@ -58,17 +57,6 @@ def writeVarJSON(cls, inputData, isInput=True, jPath=Path.cwd()):
58
57
else :
59
58
isStr = False
60
59
61
- # loop through all predict variables to determine their name, length,
62
- # type, and level; append each to outputJSON
63
- for name in predictNames :
64
- if isSeries :
65
- predict = inputDF
66
- else :
67
- predict = inputDF [name ]
68
- firstRow = predict .loc [predict .first_valid_index ()]
69
- dType = predict .dtypes .name
70
- isStr = type (firstRow ) is str
71
-
72
60
if isStr :
73
61
outputLevel = "nominal"
74
62
outputType = "string"
@@ -315,6 +303,7 @@ def writeFileMetadataJSON(cls, modelPrefix, jPath=Path.cwd(), isH2OModel=False):
315
303
)
316
304
)
317
305
306
+ @classmethod
318
307
def writeBaseFitStat (
319
308
self , csvPath = None , jPath = Path .cwd (), userInput = False , tupleList = None
320
309
):
@@ -448,6 +437,7 @@ def writeBaseFitStat(
448
437
)
449
438
)
450
439
440
+ @classmethod
451
441
def calculateFitStat (
452
442
self , validateData = None , trainData = None , testData = None , jPath = Path .cwd ()
453
443
):
@@ -606,6 +596,7 @@ def calculateFitStat(
606
596
)
607
597
)
608
598
599
+ @classmethod
609
600
def generateROCLiftStat (
610
601
self ,
611
602
targetName ,
@@ -974,9 +965,9 @@ def convertDataRole(self, dataRole):
974
965
975
966
return conversion
976
967
977
- def get_imports (self ):
968
+ def getCurrentScopedImports (self ):
978
969
"""
979
- Gets the Python modules from the current scope's global variables.
970
+ Gets the Python modules from the current scope's global variables.
980
971
981
972
Yields
982
973
-------
@@ -992,8 +983,8 @@ def get_imports(self):
992
983
elif isinstance (val , type ):
993
984
name = val .__module__ .split ("." )[0 ]
994
985
yield name
995
-
996
- def get_pickle_file (self , pPath ):
986
+
987
+ def getPickleFile (self , pPath ):
997
988
"""
998
989
Given a file path, retrieve the pickle file(s).
999
990
@@ -1002,21 +993,21 @@ def get_pickle_file(self, pPath):
1002
993
pPath : str
1003
994
File location for the input pickle file. Default is the current
1004
995
working directory.
1005
-
996
+
1006
997
Returns
1007
998
-------
1008
999
list
1009
1000
A list of pickle files.
1010
1001
"""
1011
-
1002
+
1012
1003
fileNames = []
1013
1004
fileNames .extend (sorted (Path (pPath ).glob ("*.pickle" )))
1014
1005
return fileNames
1015
1006
1016
- def get_modules_from_pickle_file (self , pickle_file ):
1007
+ def getDependenciesFromPickleFile (self , pickleFile ):
1017
1008
"""
1018
- Reads the pickled byte stream from a file object, serializes the pickled byte
1019
- stream as a bytes object, and inspects the bytes object for all Python modules
1009
+ Reads the pickled byte stream from a file object, serializes the pickled byte
1010
+ stream as a bytes object, and inspects the bytes object for all Python modules
1020
1011
and aggregates them in a set.
1021
1012
1022
1013
Parameters
@@ -1030,13 +1021,14 @@ def get_modules_from_pickle_file(self, pickle_file):
1030
1021
A set of modules obtained from the pickle stream.
1031
1022
"""
1032
1023
1033
- with (open (pickle_file , "rb" )) as openfile :
1024
+ with (open (pickleFile , "rb" )) as openfile :
1034
1025
obj = pickle .load (openfile )
1035
1026
dumps = pickle .dumps (obj )
1036
1027
1037
- modules = {mod .split ("." )[0 ] for mod , _ in self .get_names (dumps )}
1028
+ modules = {mod .split ("." )[0 ] for mod , _ in self .getNames (dumps )}
1038
1029
return modules
1039
1030
1031
+ @classmethod
1040
1032
def createRequirementsJSON (self , jPath = Path .cwd ()):
1041
1033
"""
1042
1034
Searches the root of the project for all Python modules and writes them to a requirements.json file.
@@ -1047,32 +1039,12 @@ def createRequirementsJSON(self, jPath=Path.cwd()):
1047
1039
The path to a Python project, by default Path.cwd().
1048
1040
"""
1049
1041
1050
- # imports = list(set(self.get_imports()))
1051
-
1052
- # with open(os.path.join(jPath, "imports.py"), "w") as file:
1053
- # for item in imports:
1054
- # file.write("import %s\n" % item)
1055
-
1056
- pipreqs .init (
1057
- {
1058
- "<path>" : jPath ,
1059
- "--savepath" : None ,
1060
- "--print" : False ,
1061
- "--use-local" : None ,
1062
- "--force" : True ,
1063
- "--proxy" : None ,
1064
- "--pypi-server" : None ,
1065
- "--diff" : None ,
1066
- "--clean" : None ,
1067
- }
1068
- )
1069
-
1070
1042
module_version_map = {}
1071
1043
pickle_files = self .get_pickle_file (jPath )
1072
1044
requirements_txt_file = os .path .join (jPath , "requirements.txt" )
1073
1045
with open (requirements_txt_file , "r" ) as f :
1074
1046
modules_requirements_txt = set ()
1075
- for pickle_file in pickle_files :
1047
+ for pickle_file in pickle_files :
1076
1048
modules_pickle = self .get_modules_from_pickle_file (pickle_file )
1077
1049
for line in f :
1078
1050
module_parts = line .rstrip ().split ("==" )
@@ -1105,13 +1077,11 @@ def createRequirementsJSON(self, jPath=Path.cwd()):
1105
1077
)
1106
1078
with open (os .path .join (jPath , "requirements.json" ), "w" ) as file :
1107
1079
print (j , file = file )
1108
-
1109
- # Delete requirements.txt file after requirements.json has been written.
1110
- os .remove (requirements_txt_file )
1111
1080
1112
- def get_names (self , stream ):
1081
+ def getNames (self , stream ):
1113
1082
"""
1114
- Generates (module, class_name) tuples from a pickle stream. Extracts all class names referenced by GLOBAL and STACK_GLOBAL opcodes.
1083
+ Generates (module, class_name) tuples from a pickle stream. Extracts all class names referenced
1084
+ by GLOBAL and STACK_GLOBAL opcodes.
1115
1085
1116
1086
Credit: https://stackoverflow.com/questions/64850179/inspecting-a-pickle-dump-for-dependencies
1117
1087
More information here: https://github.com/python/cpython/blob/main/Lib/pickletools.py
@@ -1168,5 +1138,4 @@ def get_names(self, stream):
1168
1138
if len (after ) == 1 and op .arg is not None :
1169
1139
stack .append (arg )
1170
1140
else :
1171
- stack .extend (after )
1172
-
1141
+ stack .extend (after )
0 commit comments