@@ -663,7 +663,7 @@ async def delete_freeze_dataset_version(
663663 if (
664664 frozen_dataset := await DatasetFreezeDB .find_one (
665665 DatasetFreezeDB .origin_id == PydanticObjectId (dataset_id ),
666- DatasetFreezeDB .frozen_version_num == frozen_version_num ,
666+ DatasetFreezeDxB .frozen_version_num == frozen_version_num ,
667667 )
668668 ) is not None :
669669 return await _delete_frozen_dataset (frozen_dataset , fs , hard_delete = False )
@@ -837,8 +837,8 @@ async def get_dataset_folders_and_files(
837837 return page .dict ()
838838 raise HTTPException (status_code = 404 , detail = f"Dataset { dataset_id } not found" )
839839
840- @router .get ("/{dataset_id}/folders" , response_model = Paged )
841- async def get_dataset_folders (
840+ @router .get ("/{dataset_id}/all_folders" )
841+ async def get_dataset_folders_all (
842842 dataset_id : str ,
843843 authenticated : bool = Depends (CheckStatus ("AUTHENTICATED" )),
844844 public : bool = Depends (CheckStatus ("PUBLIC" )),
@@ -866,7 +866,31 @@ async def get_dataset_folders(
866866 FolderFileViewList .dataset_id == PydanticObjectId (dataset_id ),
867867 ]
868868
869- folders = (await FolderFileViewList .find (* query ).to_list ())
869+ folders = (await FolderFileViewList .find (* query ).aggregate (
870+ [
871+ _get_page_query (
872+ skip ,
873+ limit ,
874+ sort_clause = {
875+ "$sort" : {
876+ "object_type" : - 1 , # folder first
877+ "created" : - 1 , # then sort by created descendingly
878+ }
879+ },
880+ )
881+ ],
882+ ).to_list ())
883+ page_metadata = _construct_page_metadata (folders , skip , limit )
884+
885+ page = Paged (
886+ metadata = page_metadata ,
887+ data = [
888+ FolderOut (id = item .pop ("_id" ), ** item )
889+ for item in folders [0 ]["data" ]
890+ ],
891+ )
892+
893+ return page .dict ()
870894
871895 return folders .dict ()
872896 raise HTTPException (status_code = 404 , detail = f"Dataset { dataset_id } not found" )
0 commit comments