@@ -46,8 +46,6 @@ def AggregationSourceDiscovered(cls, event_handler: EventHandlerInterface, event
4646 response = response .json ()
4747
4848 ### Save agent registration
49- # connection_method_name = connectionMethodId.split('/')[-1]
50- # connection_method_name = connectionMethodId[:-len(connection_method_name)]
5149 event_handler .core .storage_backend .write (response )
5250
5351 aggregation_source_id = str (uuid .uuid4 ())
@@ -85,8 +83,6 @@ def ResourceCreated(cls, event_handler: EventHandlerInterface, event: dict, cont
8583 # sunfishAliasDB contains renaming data, the alias xref array, the boundaryLink
8684 # data, and assorted flags that are used during upload renaming and final merge of
8785 # boundary components based on boundary links.
88-
89- #
9086 #
9187
9288 logger .info ("New resource created" )
@@ -149,7 +145,6 @@ def TriggerEvent(cls, event_handler: EventHandlerInterface, event: dict, context
149145 #
150146 logger .info ("TriggerEvent method called" )
151147 file_to_send = event ['MessageArgs' ][0 ] # relative Resource Path
152- #file_path = os.path.join(self.conf['redfish_root'], file_to_send)
153148 hostname = event ['MessageArgs' ][1 ] # target address
154149 destination = hostname + "/EventListener" # may match a Subscription object's 'Destination' property
155150 logger .debug (f"path of file_to_send is { file_to_send } " )
@@ -299,7 +294,6 @@ def forward_event(self, list, payload):
299294 Returns:
300295 list: list of all the reachable subcribers for the event.
301296 """
302- # resp = 400
303297
304298 for id in list :
305299 path = os .path .join (self .redfish_root , 'EventService' , 'Subscriptions' , id )
@@ -403,39 +397,24 @@ def handleNestedObject(self, obj):
403397 # this needs to be done on ALL agents, not just the one we just uploaded
404398 RedfishEventHandler .updateAllAgentsRedirectedLinks (self )
405399
406- return visited #why not the 'fetched' list?
400+ return visited
407401
408402 def create_uploaded_object (self , path : str , payload : dict ):
409403 # before to add the ID and to call the methods there should be the json validation
410404
411405 # generate unique uuid if is not present
412406 if '@odata.id' not in payload and 'Id' not in payload :
413407 pass
414- #id = str(uuid.uuid4())
415- #to_add = {
416- #'Id': id,
417- #'@odata.id': os.path.join(path, id)
418- #}
419- #payload.update(to_add)
420408 raise exception (f"create_uploaded_object: no Redfish ID (@odata.id) found" )
421409
422- #object_type = self._get_type(payload)
423410 # we assume agents can upload collections, just not the root level collections
424411 # we will check for uploaded collections later
425- #if "Collection" in object_type:
426- #raise CollectionNotSupported()
427412
428413 payload_to_write = payload
429414
430415 try :
431- # 1. check the path target of the operation exists
432- # self.storage_backend.read(path)
433- # 2. we don't check the manager; we assume uploading agent is the manager unless it says otherwise
434- #agent_response = self.objects_manager.forward_to_manager(SunfishRequestType.CREATE, path, payload=payload)
435- #if agent_response:
436- #payload_to_write = agent_response
437- # 3. should be no custom handler, this is not a POST, we upload the objects directly into the Redfish database
438- #self.objects_handler.dispatch(object_type, path, SunfishRequestType.CREATE, payload=payload)
416+ # this would be another location to verify new object to be written
417+ # meets Sunfish and Redfish requirements
439418 pass
440419 except ResourceNotFound :
441420 logger .error ("The collection where the resource is to be created does not exist." )
@@ -445,7 +424,7 @@ def create_uploaded_object(self, path: str, payload: dict):
445424 # The object does not have a handler.
446425 logger .debug (f"The object { object_type } does not have a custom handler" )
447426 pass
448- # 4. persist change in Sunfish tree
427+ # persist change in Sunfish tree
449428 return self .storage_backend .write (payload_to_write )
450429
451430 def get_aggregation_source (self , aggregation_source ):
@@ -500,7 +479,10 @@ def fetchResource(self, obj_id, aggregation_source):
500479 if response .status_code == 200 : # Agent must have returned this object
501480 redfish_obj = response .json ()
502481 # however, it must be a minimally valid object
503- # the following test should really be more extensive, but for now:
482+ # This would be a great spot to insert a call to a Redfish schema validation function
483+ # that could return a grading of this new redfish_obj: [PASS, FAIL, CAUTIONS]
484+ # However, we are debugging not just code, but also new Redfish schema,
485+ # so for now we just test for two required Redfish Properties to help weed out obviously incorrect responses
504486 if '@odata.id' in redfish_obj and '@odata.type' in redfish_obj :
505487
506488 # now rename if necessary and copy object into Sunfish inventory
@@ -608,7 +590,6 @@ def createInspectedObject(self,redfish_obj, aggregation_source):
608590 if redfish_obj ["Oem" ]["Sunfish_RM" ]["BoundaryComponent" ] == "BoundaryPort" :
609591 RedfishEventHandler .track_boundary_port (self , redfish_obj , aggregation_source )
610592 # is this new object a new fabric object with same fabric UUID as an existing fabric?
611- # RedfishEventHandler.checkForAliasedFabrics(self, redfish_obj, aggregation_source)
612593 RedfishEventHandler .create_uploaded_object (self , file_path , redfish_obj )
613594
614595 return redfish_obj
@@ -863,7 +844,6 @@ def redirectUpstreamPortLinks(self,owning_agent_id, agent_bp_obj,uri_aliasDB):
863844 # extract the Endpoint URI associated with this parent object
864845 host_obj = self .storage_backend .read (host_link )
865846 redirected_endpoint = host_obj ["Links" ]["Endpoints" ][0 ]["@odata.id" ]
866- #redirected_endpoint = "None" #for now, to test
867847
868848 if "Links" not in agent_bp_obj :
869849 agent_bp_obj ["Links" ] = {}
0 commit comments