Skip to content

Commit 1533ef4

Browse files
committed
something works now
1 parent 802ee61 commit 1533ef4

File tree

1 file changed

+23
-16
lines changed

1 file changed

+23
-16
lines changed

scripts/migration/migrate.py

Lines changed: 23 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -121,21 +121,24 @@ def get_clowder_v1_user_collections(headers, user_v1):
121121

122122
def get_clowder_v1_dataset_collections(headers, user_v1, dataset_id):
123123
matching_collections = []
124-
endpoint = f"{CLOWDER_V1}/api/collections"
124+
endpoint = f"{CLOWDER_V1}/api/collections/allCollections"
125125
response = requests.get(endpoint, headers=headers)
126-
user_collections = [
127-
col for col in response.json() if col["authorId"] == user_v1["id"]
128-
]
126+
user_collections = response.json()
129127
for collection in user_collections:
130128
collection_id = collection["id"]
129+
if collection['name'] == 'subchild':
130+
print("HERE")
131131
collection_dataset_endpoint = (
132132
f"{CLOWDER_V1}/api/collections/{collection_id}/datasets"
133133
)
134-
dataset_response = requests.get(collection_dataset_endpoint, headers)
135-
datasets = dataset_response.json()
136-
for ds in datasets:
137-
if ds["id"] == dataset_id:
138-
matching_collections.append(collection)
134+
try:
135+
dataset_response = requests.get(collection_dataset_endpoint, headers=headers)
136+
datasets = dataset_response.json()
137+
for ds in datasets:
138+
if ds["id"] == dataset_id:
139+
matching_collections.append(collection)
140+
except Exception as e:
141+
print('Exception', e)
139142
return matching_collections
140143

141144

@@ -485,12 +488,11 @@ def add_children(collection_hierarchy_json, remaining_collections):
485488

486489
def build_collection_hierarchy(collection_id, headers):
487490
self_and_ancestors = get_clowder_v1_collection_self_and_ancestors(
488-
collection_id=TEST_COL_ID, self_and_ancestors=[], headers=clowder_headers_v1
491+
collection_id=collection_id, self_and_ancestors=[], headers=headers
489492
)
490493
self_and_ancestors_collections = get_clowder_v1_collections(
491494
self_and_ancestors, headers=clowder_headers_v1
492495
)
493-
root_collections = []
494496
children = []
495497
remaining_collections = []
496498
for col in self_and_ancestors_collections:
@@ -501,12 +503,13 @@ def build_collection_hierarchy(collection_id, headers):
501503
parent_collection_ids = parent_collection_ids.rstrip(" ")
502504
if parent_collection_ids == "":
503505
root_col_entry = {"name": col["name"], "id": col["id"], "parents": []}
504-
root_collections.append(root_col_entry)
506+
children.append(root_col_entry)
505507
else:
506508
remaining_collections.append(col)
509+
507510
while len(remaining_collections) > 0:
508511
children, remaining_collections = add_children(
509-
root_collections, remaining_collections
512+
children, remaining_collections
510513
)
511514
print("Now we are done")
512515
return children
@@ -524,11 +527,15 @@ def build_collection_metadata_for_v1_dataset(dataset_id, user_v1, headers):
524527

525528
if __name__ == "__main__":
526529
# users_v1 = get_clowder_v1_users()
530+
endpoint = 'https://clowder.ncsa.illinois.edu/clowder/api/me'
531+
response = requests.get(endpoint, headers=base_headers_v1)
532+
user_v1 = response.json()
527533
TEST_COL_ID = "66d0a6c0e4b09db0f11b24e4"
528534
ROOT_COL_ID = "66d0a6aae4b09db0f11b24dd"
529-
result = build_collection_hierarchy(
530-
collection_id=TEST_COL_ID, headers=clowder_headers_v1
531-
)
535+
TEST_DATASET_ID = '66d0a604e4b09db0f11b2494'
536+
# ds_cols = get_clowder_v1_dataset_collections(headers=base_headers_v1, user_v1=user_v1, dataset_id=TEST_DATASET_ID)
537+
538+
result = build_collection_metadata_for_v1_dataset(dataset_id=TEST_DATASET_ID, user_v1=user_v1, headers=clowder_headers_v1)
532539
# parents = get_clowder_v1_parent_collection_ids(current_collection_id=TEST_COL_ID, headers=clowder_headers_v1)
533540
self_and_ancestors = get_clowder_v1_collection_self_and_ancestors(
534541
collection_id=TEST_COL_ID, self_and_ancestors=[], headers=clowder_headers_v1

0 commit comments

Comments
 (0)