Skip to content

Commit dbe7cf5

Browse files
authored
backend: remove file paths from /chat (#652)
* wip * fix tests * remove file path from factory * fix tests
1 parent 2e3c971 commit dbe7cf5

File tree

9 files changed

+38
-27
lines changed

9 files changed

+38
-27
lines changed

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ exec-db:
4444

4545
.PHONY: migration
4646
migration:
47-
docker compose run --build backend alembic -c src/backend/alembic.ini revision --autogenerate
47+
docker compose run --build backend alembic -c src/backend/alembic.ini revision --autogenerate -m "$(message)"
4848

4949
.PHONY: migrate
5050
migrate:
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
"""Remove file path
2+
3+
Revision ID: 08bcb9a24d9b
4+
Revises: c301506b3676
5+
Create Date: 2024-08-21 15:59:18.678457
6+
7+
"""
8+
from typing import Sequence, Union
9+
10+
import sqlalchemy as sa
11+
from alembic import op
12+
13+
# revision identifiers, used by Alembic.
14+
revision: str = '08bcb9a24d9b'
15+
down_revision: Union[str, None] = 'c301506b3676'
16+
branch_labels: Union[str, Sequence[str], None] = None
17+
depends_on: Union[str, Sequence[str], None] = None
18+
19+
20+
def upgrade() -> None:
21+
# ### commands auto generated by Alembic - please adjust! ###
22+
op.drop_column('files', 'file_path')
23+
# ### end Alembic commands ###
24+
25+
26+
def downgrade() -> None:
27+
# ### commands auto generated by Alembic - please adjust! ###
28+
op.add_column('files', sa.Column('file_path', sa.VARCHAR(), autoincrement=False, nullable=False))
29+
# ### end Alembic commands ###

src/backend/database_models/file.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ class File(Base):
99

1010
user_id: Mapped[str] = mapped_column(String, nullable=True)
1111
file_name: Mapped[str]
12-
file_path: Mapped[str]
1312
file_size: Mapped[int] = mapped_column(default=0)
1413
file_content: Mapped[str] = mapped_column(default="")
1514

src/backend/schemas/file.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@ class File(BaseModel):
1212
conversation_id: str
1313
file_content: str
1414
file_name: str
15-
file_path: str
1615
file_size: int = Field(default=0, ge=0)
1716

1817
class Config:
@@ -26,7 +25,6 @@ class ConversationFilePublic(BaseModel):
2625

2726
conversation_id: str
2827
file_name: str
29-
file_path: str
3028
file_size: int = Field(default=0, ge=0)
3129

3230

@@ -37,7 +35,6 @@ class AgentFilePublic(BaseModel):
3735
updated_at: datetime.datetime
3836

3937
file_name: str
40-
file_path: str
4138
file_size: int = Field(default=0, ge=0)
4239

4340
class ListConversationFile(ConversationFilePublic):

src/backend/services/chat.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,6 @@
4444
StreamToolResult,
4545
ToolInputType,
4646
)
47-
from backend.schemas.cohere_chat import CohereChatRequest
4847
from backend.schemas.context import Context
4948
from backend.schemas.conversation import UpdateConversationRequest
5049
from backend.schemas.search_query import SearchQuery
@@ -127,14 +126,13 @@ def process_chat(
127126
id=str(uuid4()),
128127
)
129128

130-
if isinstance(chat_request, CohereChatRequest):
131-
if should_store:
132-
attach_files_to_messages(
133-
session,
134-
user_id,
135-
user_message.id,
136-
chat_request.file_ids,
137-
)
129+
if should_store:
130+
attach_files_to_messages(
131+
session,
132+
user_id,
133+
user_message.id,
134+
chat_request.file_ids
135+
)
138136

139137
chat_history = create_chat_history(
140138
conversation, next_message_position, chat_request

src/backend/services/file.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -430,7 +430,6 @@ def get_files_in_compass(
430430
id=file_id,
431431
file_name=fetched_doc["file_name"],
432432
file_size=fetched_doc["file_size"],
433-
file_path=fetched_doc["file_path"],
434433
file_content=fetched_doc["text"],
435434
user_id=user_id,
436435
created_at=datetime.fromisoformat(fetched_doc["created_at"]),
@@ -491,7 +490,6 @@ async def consolidate_agent_files_in_compass(
491490
"custom_context": {
492491
"file_id": file_id,
493492
"file_name": fetched_doc["file_name"],
494-
"file_path": fetched_doc["file_path"],
495493
"file_size": fetched_doc["file_size"],
496494
"user_id": fetched_doc["user_id"],
497495
"created_at": fetched_doc["created_at"],
@@ -574,7 +572,6 @@ async def insert_files_in_compass(
574572
"custom_context": {
575573
"file_id": new_file_id,
576574
"file_name": filename,
577-
"file_path": filename,
578575
"file_size": file.size,
579576
"user_id": user_id,
580577
"created_at": datetime.now().isoformat(),
@@ -596,7 +593,6 @@ async def insert_files_in_compass(
596593
file_name=filename,
597594
id=new_file_id,
598595
file_size=file.size,
599-
file_path=filename,
600596
user_id=user_id,
601597
created_at=datetime.now(),
602598
updated_at=datetime.now(),
@@ -661,7 +657,6 @@ async def insert_files_in_db(
661657
FileModel(
662658
file_name=filename,
663659
file_size=file.size,
664-
file_path=filename,
665660
file_content=cleaned_content,
666661
user_id=user_id,
667662
)
@@ -682,7 +677,6 @@ def attach_conversation_id_to_files(
682677
conversation_id=conversation_id,
683678
file_name=file.file_name,
684679
file_size=file.file_size,
685-
file_path=file.file_path,
686680
user_id=file.user_id,
687681
created_at=file.created_at,
688682
updated_at=file.updated_at,

src/backend/tests/unit/crud/test_file.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,34 +13,29 @@ def conversation(session, user):
1313
def test_create_file(session, user):
1414
file_data = File(
1515
file_name="test.txt",
16-
file_path="/tmp/test.txt",
1716
file_size=100,
1817
user_id=user.id,
1918
)
2019

2120
file = file_crud.create_file(session, file_data)
2221
assert file.file_name == file_data.file_name
23-
assert file.file_path == file_data.file_path
2422
assert file.file_size == file_data.file_size
2523
assert file.user_id == file_data.user_id
2624

2725
file = file_crud.get_file(session, file.id, user.id)
2826
assert file.file_name == file_data.file_name
29-
assert file.file_path == file_data.file_path
3027
assert file.file_size == file_data.file_size
3128
assert file.user_id == file_data.user_id
3229

3330

3431
def test_batch_create_files(session, user):
3532
file_data = File(
3633
file_name="test.txt",
37-
file_path="/tmp/test.txt",
3834
file_size=100,
3935
user_id=user.id,
4036
)
4137
file_data2 = File(
4238
file_name="test2.txt",
43-
file_path="/tmp/test2.txt",
4439
file_size=100,
4540
user_id=user.id,
4641
)

src/backend/tests/unit/factories/file.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,5 +11,4 @@ class Meta:
1111

1212
user_id = factory.Faker("uuid4")
1313
file_name = factory.Faker("file_name")
14-
file_path = factory.Faker("file_path")
1514
file_size = factory.Faker("random_int", min=1, max=20000000)

src/backend/tools/tavily.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -140,7 +140,7 @@ def to_langchain_tool(self) -> TavilySearchResults:
140140
internet_search.name = "internet_search"
141141
internet_search.description = "Returns a list of relevant document snippets for a textual query retrieved from the internet."
142142

143-
# pydantic v1 base model
143+
# Pydantic v1 base model
144144
from langchain_core.pydantic_v1 import BaseModel, Field
145145

146146
class TavilySearchInput(BaseModel):

0 commit comments

Comments
 (0)