2222
2323# Operator internal names to call if first name is not available
2424# Allows deprecating internal names associated to public Python operator modules
25- operator_aliases = {
25+ OPERATOR_ALIASES = {
2626 "support_provider_cyclic" : "mapdl::rst::support_provider_cyclic" ,
2727 "NMISC" : "mapdl::nmisc" ,
2828 "SMISC" : "mapdl::smisc" ,
3131 "MCF" : "U" ,
3232}
3333
34+ BUILT_IN_TYPES = ("int" , "double" , "string" , "bool" , "float" , "str" , "dict" )
35+
36+ TYPES_WITHOUT_PYTHON_IMPLEMENTATION = (
37+ "Materials" ,
38+ "AnsDispatchHolder" ,
39+ "Stream" ,
40+ "AbstractFieldSupport" ,
41+ "AnyCollection" ,
42+ "CustomTypeFieldsContainer" ,
43+ "MeshSelectionManager" ,
44+ "Class Dataprocessing::Dpftypecollection<Class Dataprocessing::Cpropertyfield>" ,
45+ "Struct Iansdispatch" ,
46+ "PropertyFieldsContainer" ,
47+ "Class Dataprocessing::Crstfilewrapper" ,
48+ "Char" ,
49+ )
50+
3451
3552def find_class_origin (class_name : str , package_name : str = "ansys.dpf.core" ) -> Optional [str ]:
3653 """Find the fully qualified import path where a class is originally defined."""
@@ -121,14 +138,22 @@ def build_pin_data(pins, output=False):
121138 docstring_types = map_types (type_names )
122139 parameter_types = " or " .join (docstring_types )
123140 parameter_types = "\n " .join (wrap (parameter_types , subsequent_indent = " " , width = 60 ))
124- type_list_for_annotation = " | " .join (docstring_types )
141+
142+ type_list_for_annotation = " | " .join (
143+ docstring_type
144+ for docstring_type in docstring_types
145+ if docstring_type
146+ not in TYPES_WITHOUT_PYTHON_IMPLEMENTATION # Types without python implementations can't be typechecked
147+ )
148+
149+ # if not type_list_for_annotation:
150+ # pass
125151
126152 pin_name = specification .name
127153 pin_name = pin_name .replace ("<" , "_" )
128154 pin_name = pin_name .replace (">" , "_" )
129155
130156 main_type = docstring_types [0 ] if len (docstring_types ) >= 1 else ""
131- built_in_types = ("int" , "double" , "string" , "bool" , "float" , "str" , "dict" )
132157
133158 # Case where output pin has multiple types.
134159 multiple_types = len (type_names ) >= 2
@@ -139,6 +164,9 @@ def build_pin_data(pins, output=False):
139164 document = specification .document
140165 document_pin_docstring = document .replace ("\n " , "\n " )
141166
167+ # if output and multiple_types:
168+ # pass
169+
142170 pin_data = {
143171 "id" : id ,
144172 "name" : pin_name ,
@@ -153,7 +181,7 @@ def build_pin_data(pins, output=False):
153181 "type_list_for_annotation" : type_list_for_annotation ,
154182 "types_for_docstring" : parameter_types ,
155183 "main_type" : main_type ,
156- "built_in_main_type" : main_type in built_in_types ,
184+ "built_in_main_type" : main_type in BUILT_IN_TYPES ,
157185 "optional" : specification .optional ,
158186 "document" : document ,
159187 "document_pin_docstring" : document_pin_docstring ,
@@ -188,6 +216,7 @@ def build_operator(
188216 category ,
189217 specification_description ,
190218):
219+ # global all_input_output_types, types_without_concrete_definiton
191220 input_pins = []
192221 if specification .inputs :
193222 input_pins = build_pin_data (specification .inputs )
@@ -203,22 +232,26 @@ def build_operator(
203232
204233 date_and_time = datetime .now ().strftime ("%m/%d/%Y, %H:%M:%S" )
205234
206- built_in_types = ("int" , "double" , "string" , "bool" , "float" , "str" , "dict" )
207235 annotation_import_types = set ()
208236 for input_pin in input_pins :
209237 annotation_import_types .update (input_pin ["docstring_types" ])
210238 for output_pin in output_pins :
211239 annotation_import_types .update (output_pin ["docstring_types" ])
212240 annotation_import_list = []
213241 for annotation_type in annotation_import_types :
214- if annotation_type in built_in_types :
242+ if annotation_type in BUILT_IN_TYPES + TYPES_WITHOUT_PYTHON_IMPLEMENTATION :
215243 continue
244+ definition_location = find_class_origin (annotation_type )
216245 annotation_import_list .append (
217246 {
218247 "class_name" : annotation_type ,
219- "definition_location" : find_class_origin ( annotation_type ) ,
248+ "definition_location" : definition_location ,
220249 }
221250 )
251+ # if not definition_location:
252+ # types_without_concrete_definiton.update([annotation_type])
253+
254+ # all_input_output_types.update(annotation_import_types)
222255
223256 data = {
224257 "operator_name" : operator_name ,
@@ -236,15 +269,16 @@ def build_operator(
236269 "date_and_time" : date_and_time ,
237270 "has_input_aliases" : has_input_aliases ,
238271 "has_output_aliases" : has_output_aliases ,
239- "has_internal_name_alias" : operator_name in operator_aliases .keys (),
240- "internal_name_alias" : operator_aliases .get (operator_name ),
272+ "has_internal_name_alias" : operator_name in OPERATOR_ALIASES .keys (),
273+ "internal_name_alias" : OPERATOR_ALIASES .get (operator_name ),
241274 }
242275
243276 this_path = os .path .dirname (os .path .abspath (__file__ ))
244277 mustache_file = os .path .join (this_path , "operator.mustache" )
245278 with open (mustache_file , "r" ) as f :
246279 cls = chevron .render (f , data )
247280 try :
281+ # return cls
248282 return black .format_str (cls , mode = black .FileMode ())
249283 except Exception as e :
250284 print (f"{ operator_name = } " )
@@ -317,10 +351,10 @@ def build_operators():
317351 # Convert Markdown descriptions to RST
318352 specification_description = translator .convert (specification .description )
319353
320- if "stress" != scripting_name :
354+ if scripting_name not in ( "stress" , "propertyfield_get_attribute" , "mesh_support_provider" ) :
321355 continue
322- if "stress" == scripting_name :
323- pass
356+ # if "stress" == scripting_name:
357+ # pass
324358
325359 # Write to operator file
326360 operator_file = os .path .join (category_path , scripting_name + ".py" )
@@ -351,24 +385,24 @@ def build_operators():
351385 print (f"Generated { succeeded } out of { len (available_operators )} ({ hidden } hidden)" )
352386
353387 # Create __init__.py files
354- print (f"Generating __init__.py files..." )
355- with open (
356- os .path .join (this_path , "__init__.py" ), "w" , encoding = "utf-8" , newline = "\u000a "
357- ) as main_init :
358- for category in sorted (categories ):
359- # Add category to main init file imports
360- main_init .write (f"from . import { category } \n " )
361- # Create category init file
362- category_operators = os .listdir (os .path .join (this_path , category .split ("." )[0 ]))
363- with open (
364- os .path .join (this_path , category , "__init__.py" ),
365- "w" ,
366- encoding = "utf-8" ,
367- newline = "\u000a " ,
368- ) as category_init :
369- for category_operator in sorted (category_operators ):
370- operator_name = category_operator .split ("." )[0 ]
371- category_init .write (f"from .{ operator_name } import { operator_name } \n " )
388+ # print(f"Generating __init__.py files...")
389+ # with open(
390+ # os.path.join(this_path, "__init__.py"), "w", encoding="utf-8", newline="\u000a"
391+ # ) as main_init:
392+ # for category in sorted(categories):
393+ # # Add category to main init file imports
394+ # main_init.write(f"from . import {category}\n")
395+ # # Create category init file
396+ # category_operators = os.listdir(os.path.join(this_path, category.split(".")[0]))
397+ # with open(
398+ # os.path.join(this_path, category, "__init__.py"),
399+ # "w",
400+ # encoding="utf-8",
401+ # newline="\u000a",
402+ # ) as category_init:
403+ # for category_operator in sorted(category_operators):
404+ # operator_name = category_operator.split(".")[0]
405+ # category_init.write(f"from .{operator_name} import {operator_name}\n")
372406
373407 if succeeded == len (available_operators ) - hidden :
374408 print ("Success" )
@@ -380,7 +414,11 @@ def build_operators():
380414
381415
382416if __name__ == "__main__" :
417+ # all_input_output_types = set()
418+ # types_without_concrete_definiton = set()
383419 dpf .set_default_server_context (dpf .AvailableServerContexts .premium )
384420 dpf .start_local_server (config = dpf .AvailableServerConfigs .LegacyGrpcServer )
385421 build_operators ()
386422 dpf .SERVER .shutdown ()
423+ # print(all_input_output_types)
424+ # print(types_without_concrete_definiton)
0 commit comments