4343
4444async def _setup_dsm (app : web .Application ):
4545 cfg = app [APP_CONFIG_KEY ]
46- main_cfg = cfg ["main" ]
4746
4847 main_cfg = cfg ["main" ]
4948
@@ -57,7 +56,8 @@ async def _setup_dsm(app: web.Application):
5756 s3_cfg = get_config_s3 (app )
5857 bucket_name = s3_cfg ["bucket_name" ]
5958
60- dsm = DataStorageManager (s3_client , engine , loop , pool , bucket_name )
59+ testing = main_cfg ["testing" ]
60+ dsm = DataStorageManager (s3_client , engine , loop , pool , bucket_name , not testing )
6161
6262 app [APP_DSM_KEY ] = dsm
6363
@@ -112,6 +112,8 @@ class DataStorageManager:
112112 loop : object
113113 pool : ThreadPoolExecutor
114114 simcore_bucket_name : str
115+ has_project_db : bool
116+
115117 datcore_tokens : Dict [str , DatCoreApiToken ]= attr .Factory (dict )
116118 # TODO: perhaps can be used a cache? add a lifetime?
117119
@@ -185,25 +187,29 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re
185187 d = FileMetaData (** result_dict )
186188 data .append (d )
187189
188- uuid_name_dict = {}
189- # now parse the project to search for node/project names
190- try :
191- async with self .engine .acquire () as conn :
192- joint_table = user_to_projects .join (projects )
193- query = sa .select ([projects ]).select_from (joint_table )\
194- .where (user_to_projects .c .user_id == user_id )
195-
196- async for row in conn .execute (query ):
197- proj_data = {key :value for key ,value in row .items ()}
198-
199- uuid_name_dict [proj_data ["uuid" ]] = proj_data ["name" ]
200- wb = proj_data ['workbench' ]
201- for node in wb .keys ():
202- uuid_name_dict [node ] = wb [node ]['label' ]
203- except DBAPIError as _err :
204- logger .exception ("Error querying database for project names" )
190+ if self .has_project_db :
191+ uuid_name_dict = {}
192+ # now parse the project to search for node/project names
193+ try :
194+ async with self .engine .acquire () as conn :
195+ joint_table = user_to_projects .join (projects )
196+ query = sa .select ([projects ]).select_from (joint_table )\
197+ .where (user_to_projects .c .user_id == user_id )
198+
199+ async for row in conn .execute (query ):
200+ proj_data = {key :value for key ,value in row .items ()}
201+
202+ uuid_name_dict [proj_data ["uuid" ]] = proj_data ["name" ]
203+ wb = proj_data ['workbench' ]
204+ for node in wb .keys ():
205+ uuid_name_dict [node ] = wb [node ]['label' ]
206+ except DBAPIError as _err :
207+ logger .exception ("Error querying database for project names" )
208+
209+ if not uuid_name_dict :
210+ # there seems to be no project whatsoever for user_id
211+ return []
205212
206- if uuid_name_dict :
207213 # only keep files from non-deleted project --> This needs to be fixed
208214 clean_data = []
209215 for d in data :
@@ -228,16 +234,14 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re
228234 clean_data .append (d )
229235
230236 data = clean_data
231- for d in data :
232- logger .info (d )
233237
234238 # same as above, make sure file is physically present on s3
235239 clean_data = []
236240 # MaG: This is inefficient: Do this automatically when file is modified
237241 _loop = asyncio .get_event_loop ()
238242 session = aiobotocore .get_session (loop = _loop )
239243 async with session .create_client ('s3' , endpoint_url = "http://" + self .s3_client .endpoint , aws_access_key_id = self .s3_client .access_key ,
240- aws_secret_access_key = self .s3_client .secret_key ) as client :
244+ aws_secret_access_key = self .s3_client .secret_key ) as client :
241245 responses = await asyncio .gather (* [client .list_objects_v2 (Bucket = d .bucket_name , Prefix = _d ) for _d in [__d .object_name for __d in data ]])
242246 for d , resp in zip (data , responses ):
243247 if 'Contents' in resp :
0 commit comments