Skip to content

Commit 909b8f6

Browse files
committed
Merge branch 'main' into 978-migrate-users
2 parents 074154e + bb35e0b commit 909b8f6

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+1748
-874
lines changed

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,3 +82,4 @@ secrets.yaml
8282
# faker
8383
official.csv
8484
fact.png
85+
jupyterhub/.env-dev

README.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,9 @@ There is a few other documentation links available on the [website](https://clow
3636
## Installation
3737

3838
The easiest way of running Clowder v2 is checking out the [code](https://github.com/clowder-framework/clowder2)
39-
and running `docker compose up` in the main directory.
39+
and running `docker compose up` in the main directory. If you would like to run Clowder with JupyterHub,
40+
you can use our script `docker-prod.sh` to start the services. Run `./docker-prod.sh prod up` to start the services
41+
and `./docker-prod.sh prod down` to stop them.
4042

4143
Helm charts are available for running Clowder v2 on Kubernetes. See the [helm](https://github.com/clowder-framework/clowder2/tree/main/deployments/kubernetes/charts) directory for more information.
4244

backend/Pipfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ name = "pypi"
55

66
[packages]
77
fastapi = "0.95.1"
8+
pydantic = "1.10.13"
89
uvicorn = "0.21.1"
910
motor = "3.1.2"
1011
mongoengine = "0.27.0"
@@ -21,7 +22,6 @@ pika = "1.3.1"
2122
aio-pika = "9.0.5"
2223
elasticsearch = "8.7.0"
2324
pipenv = "2023.4.20"
24-
install = "1.3.5"
2525
rocrate = "0.7.0"
2626
httpx = "0.24.0"
2727
packaging = "23.1"

backend/Pipfile.lock

Lines changed: 754 additions & 722 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

backend/app/routers/authentication.py

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,5 @@
11
import json
22

3-
from beanie import PydanticObjectId
4-
from fastapi import APIRouter, Depends, HTTPException
5-
from keycloak.exceptions import (
6-
KeycloakAuthenticationError,
7-
KeycloakGetError,
8-
KeycloakPostError,
9-
)
10-
from passlib.hash import bcrypt
11-
123
from app.keycloak_auth import (
134
create_user,
145
enable_disable_user,
@@ -17,6 +8,14 @@
178
)
189
from app.models.datasets import DatasetDBViewList
1910
from app.models.users import UserDB, UserIn, UserLogin, UserOut
11+
from beanie import PydanticObjectId
12+
from fastapi import APIRouter, Depends, HTTPException
13+
from keycloak.exceptions import (
14+
KeycloakAuthenticationError,
15+
KeycloakGetError,
16+
KeycloakPostError,
17+
)
18+
from passlib.hash import bcrypt
2019

2120
router = APIRouter()
2221

backend/app/routers/authorization.py

Lines changed: 26 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@
2121
GroupAndRole,
2222
UserAndRole,
2323
)
24-
from app.models.files import FileDB, FileOut
2524
from app.models.groups import GroupDB
2625
from app.models.users import UserDB
2726
from app.routers.authentication import get_admin, get_admin_mode
@@ -207,9 +206,9 @@ async def set_dataset_group_role(
207206
auth_db.user_ids.append(u.user.email)
208207
await auth_db.replace()
209208
await index_dataset(
210-
es, DatasetOut(**dataset.dict()), auth_db.user_ids
209+
es, DatasetOut(**dataset.dict()), auth_db.user_ids, update=True
211210
)
212-
await index_dataset_files(es, str(dataset_id))
211+
await index_dataset_files(es, str(dataset_id), update=True)
213212
if len(readonly_user_ids) > 0:
214213
readonly_auth_db = AuthorizationDB(
215214
creator=user_id,
@@ -220,7 +219,10 @@ async def set_dataset_group_role(
220219
)
221220
await readonly_auth_db.insert()
222221
await index_dataset(
223-
es, DatasetOut(**dataset.dict()), readonly_auth_db.user_ids
222+
es,
223+
DatasetOut(**dataset.dict()),
224+
readonly_auth_db.user_ids,
225+
update=True,
224226
)
225227
await index_dataset_files(es, str(dataset_id), update=True)
226228
return auth_db.dict()
@@ -244,9 +246,12 @@ async def set_dataset_group_role(
244246
)
245247
await readonly_auth_db.insert()
246248
await index_dataset(
247-
es, DatasetOut(**dataset.dict()), readonly_auth_db.user_ids
249+
es,
250+
DatasetOut(**dataset.dict()),
251+
readonly_auth_db.user_ids,
252+
update=True,
248253
)
249-
await index_dataset_files(es, str(dataset_id))
254+
await index_dataset_files(es, str(dataset_id), update=True)
250255
if len(user_ids) > 0:
251256
auth_db = AuthorizationDB(
252257
creator=user_id,
@@ -258,9 +263,9 @@ async def set_dataset_group_role(
258263
# if there are read only users add them with the role of viewer
259264
await auth_db.insert()
260265
await index_dataset(
261-
es, DatasetOut(**dataset.dict()), auth_db.user_ids
266+
es, DatasetOut(**dataset.dict()), auth_db.user_ids, update=True
262267
)
263-
await index_dataset_files(es, str(dataset_id))
268+
await index_dataset_files(es, str(dataset_id), update=True)
264269
return auth_db.dict()
265270
else:
266271
raise HTTPException(status_code=404, detail=f"Group {group_id} not found")
@@ -314,7 +319,9 @@ async def set_dataset_user_role(
314319
else:
315320
auth_db.user_ids.append(username)
316321
await auth_db.save()
317-
await index_dataset(es, DatasetOut(**dataset.dict()), auth_db.user_ids)
322+
await index_dataset(
323+
es, DatasetOut(**dataset.dict()), auth_db.user_ids, update=True
324+
)
318325
await index_dataset_files(es, dataset_id, update=True)
319326
return auth_db.dict()
320327
else:
@@ -326,8 +333,10 @@ async def set_dataset_user_role(
326333
user_ids=[username],
327334
)
328335
await auth_db.insert()
329-
await index_dataset(es, DatasetOut(**dataset.dict()), [username])
330-
await index_dataset_files(es, dataset_id)
336+
await index_dataset(
337+
es, DatasetOut(**dataset.dict()), [username], update=True
338+
)
339+
await index_dataset_files(es, dataset_id, update=True)
331340
return auth_db.dict()
332341
else:
333342
raise HTTPException(status_code=404, detail=f"User {username} not found")
@@ -364,7 +373,9 @@ async def remove_dataset_group_role(
364373
auth_db.user_ids.remove(u.user.email)
365374
await auth_db.save()
366375
# Update elasticsearch index with new users
367-
await index_dataset(es, DatasetOut(**dataset.dict()), auth_db.user_ids)
376+
await index_dataset(
377+
es, DatasetOut(**dataset.dict()), auth_db.user_ids, update=True
378+
)
368379
await index_dataset_files(es, str(dataset_id), update=True)
369380
return auth_db.dict()
370381
else:
@@ -401,7 +412,9 @@ async def remove_dataset_user_role(
401412
auth_db.user_ids.remove(username)
402413
await auth_db.save()
403414
# Update elasticsearch index with updated users
404-
await index_dataset(es, DatasetOut(**dataset.dict()), auth_db.user_ids)
415+
await index_dataset(
416+
es, DatasetOut(**dataset.dict()), auth_db.user_ids, update=True
417+
)
405418
await index_dataset_files(es, dataset_id, update=True)
406419
return auth_db.dict()
407420
else:

backend/app/routers/files.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -239,9 +239,12 @@ async def update_file(
239239

240240
await new_version.insert()
241241
# Update entry to the file index
242-
await index_file(es, FileOut(**updated_file.dict()))
242+
await index_file(es, FileOut(**updated_file.dict()), update=True)
243243
await _resubmit_file_extractors(
244-
updated_file, rabbitmq_client, user, credentials
244+
FileOut(**updated_file.dict()),
245+
rabbitmq_client=rabbitmq_client,
246+
user=user,
247+
credentials=credentials,
245248
)
246249

247250
# updating metadata in elasticsearch

backend/app/routers/groups.py

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,6 @@
11
from datetime import datetime
22
from typing import Optional
33

4-
from beanie import PydanticObjectId
5-
from beanie.operators import Or, Push, RegEx
6-
from bson.objectid import ObjectId
7-
from fastapi import APIRouter, Depends, HTTPException
84
from app import dependencies
95
from app.deps.authorization_deps import AuthorizationDB, GroupAuthorization
106
from app.keycloak_auth import get_current_user, get_user
@@ -14,7 +10,6 @@
1410
from app.models.pages import Paged, _construct_page_metadata, _get_page_query
1511
from app.models.users import UserDB, UserOut
1612
from app.routers.authentication import get_admin, get_admin_mode
17-
1813
from app.search.index import index_dataset, index_dataset_files
1914
from beanie import PydanticObjectId
2015
from beanie.operators import Or, Push, RegEx
@@ -267,7 +262,7 @@ async def add_member(
267262
)
268263
) is not None:
269264
await index_dataset(
270-
es, DatasetOut(**dataset.dict()), auth.user_ids
265+
es, DatasetOut(**dataset.dict()), auth.user_ids, update=True
271266
)
272267
await index_dataset_files(es, str(auth.dataset_id), update=True)
273268
return group.dict()
@@ -312,7 +307,9 @@ async def remove_member(
312307
if (
313308
dataset := await DatasetDB.get(PydanticObjectId(auth.dataset_id))
314309
) is not None:
315-
await index_dataset(es, DatasetOut(**dataset.dict()), auth.user_ids)
310+
await index_dataset(
311+
es, DatasetOut(**dataset.dict()), auth.user_ids, update=True
312+
)
316313
await index_dataset_files(es, str(auth.dataset_id), update=True)
317314

318315
return group.dict()

backend/app/routers/metadata_datasets.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ async def add_dataset_metadata(
111111
await md.insert()
112112

113113
# Add an entry to the metadata index
114-
await index_dataset(es, dataset)
114+
await index_dataset(es, DatasetOut(**dataset.dict()), update=True)
115115
return md.dict()
116116

117117

@@ -163,7 +163,7 @@ async def replace_dataset_metadata(
163163
await md.replace()
164164

165165
# Update entry to the metadata index
166-
await index_dataset(es, dataset, update=True)
166+
await index_dataset(es, DatasetOut(**dataset.dict()), update=True)
167167
return md.dict()
168168
else:
169169
raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found")
@@ -229,8 +229,9 @@ async def update_dataset_metadata(
229229

230230
md = await MetadataDB.find_one(*query)
231231
if md is not None:
232-
await index_dataset(es, dataset, update=True)
233-
return await patch_metadata(md, content, es)
232+
patched_metadata = await patch_metadata(md, content, es)
233+
await index_dataset(es, DatasetOut(**dataset.dict()), update=True)
234+
return patched_metadata
234235
else:
235236
raise HTTPException(
236237
status_code=404, detail="Metadata matching the query not found"

backend/app/routers/metadata_files.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ async def add_file_metadata(
148148
await md.insert()
149149

150150
# Add an entry to the metadata index
151-
await index_file(es, FileOut(**file.dict()))
151+
await index_file(es, FileOut(**file.dict()), update=True)
152152
return md.dict()
153153

154154

0 commit comments

Comments
 (0)