Skip to content

Commit 78ac621

Browse files
authored
Include parameters with extractor (#770)
* include parameters * black format * fix the content type guess * revert * black format * return correct userout for api key too * black * add extractors yml for testing * update the script * use the latest tag
1 parent 3b0b152 commit 78ac621

File tree

5 files changed

+60
-6
lines changed

5 files changed

+60
-6
lines changed

backend/app/keycloak_auth.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -183,8 +183,8 @@ async def get_current_user(
183183
ListenerAPIKeyDB.key == payload["key"],
184184
)
185185
) is not None:
186-
user_out = await UserDB.find_one(UserDB.email == key.user)
187-
return user_out.dict()
186+
user = await UserDB.find_one(UserDB.email == key.user)
187+
return UserOut(**user.dict())
188188
elif (
189189
key := await UserAPIKeyDB.find_one(
190190
UserAPIKeyDB.user == payload["user"],
@@ -203,7 +203,7 @@ async def get_current_user(
203203
)
204204
else:
205205
user = await UserDB.find_one(UserDB.email == key.user)
206-
return user.dict()
206+
return UserOut(**user.dict())
207207
else:
208208
raise HTTPException(
209209
status_code=401,

backend/app/models/listeners.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,7 @@ class EventListenerJobMessage(BaseModel):
160160
id: str
161161
datasetId: str
162162
job_id: str
163+
parameters: Optional[dict] = None
163164

164165

165166
class EventListenerDatasetJobMessage(BaseModel):

backend/app/rabbitmq/listeners.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,11 @@
77
from pika.adapters.blocking_connection import BlockingChannel
88

99
from app import dependencies
10-
from app.keycloak_auth import get_token
1110
from app.models.config import ConfigEntryDB
1211
from app.models.datasets import DatasetOut
1312
from app.models.files import FileOut
1413
from app.models.listeners import (
1514
EventListenerJobDB,
16-
EventListenerDB,
1715
EventListenerJobMessage,
1816
EventListenerDatasetJobMessage,
1917
)
@@ -76,6 +74,7 @@ async def submit_file_job(
7674
datasetId=str(file_out.dataset_id),
7775
secretKey=current_secretKey,
7876
job_id=str(job.id),
77+
parameters=parameters,
7978
)
8079
reply_to = await create_reply_queue()
8180
print("RABBITMQ_CLIENT: " + str(rabbitmq_client))

backend/app/routers/utils.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import mimetypes
2-
import io
32
from typing import Optional
43

54
from app.models.files import ContentType
@@ -19,5 +18,8 @@ def get_content_type(
1918
if content_type is None:
2019
content_type = mimetypes.guess_type(filename)
2120
content_type = content_type[0] if len(content_type) > 1 else content_type
21+
# If still cant guess the content_type, set it to default
22+
if content_type is None:
23+
content_type = "application/octet-stream"
2224
type_main = content_type.split("/")[0] if type(content_type) is str else "N/A"
2325
return ContentType(content_type=content_type, main_type=type_main)

docker-compose.extractors.yml

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
version: '3.7'
2+
3+
4+
services:
5+
name-entity-recognition:
6+
image: socialmediamacroscope/name_entity_recognition_extractor:latest
7+
environment:
8+
CLOWDER_VERSION: 2
9+
RABBITMQ_URI: amqp://guest:guest@rabbitmq:5672/%2F
10+
networks:
11+
- clowder2
12+
restart: unless-stopped
13+
14+
network-analysis:
15+
image: socialmediamacroscope/network_analysis_extractor:latest
16+
environment:
17+
CLOWDER_VERSION: 2
18+
RABBITMQ_URI: amqp://guest:guest@rabbitmq:5672/%2F
19+
networks:
20+
- clowder2
21+
restart: unless-stopped
22+
23+
topic-modeling:
24+
image: socialmediamacroscope/topic_modeling_extractor:latest
25+
environment:
26+
CLOWDER_VERSION: 2
27+
RABBITMQ_URI: amqp://guest:guest@rabbitmq:5672/%2F
28+
networks:
29+
- clowder2
30+
restart: unless-stopped
31+
32+
natural-language-preprocessing:
33+
image: socialmediamacroscope/preprocessing_extractor:latest
34+
environment:
35+
CLOWDER_VERSION: 2
36+
RABBITMQ_URI: amqp://guest:guest@rabbitmq:5672/%2F
37+
networks:
38+
- clowder2
39+
restart: unless-stopped
40+
41+
sentiment-analysis:
42+
image: socialmediamacroscope/sentiment_analysis_extractor:latest
43+
environment:
44+
CLOWDER_VERSION: 2
45+
RABBITMQ_URI: amqp://guest:guest@rabbitmq:5672/%2F
46+
networks:
47+
- clowder2
48+
restart: unless-stopped
49+
50+
51+
networks:
52+
clowder2:

0 commit comments

Comments
 (0)