Skip to content

Commit 00a64ac

Browse files
densumeshcdxker
authored andcommitted
cleanup: create file object in handler
1 parent cff6b51 commit 00a64ac

File tree

2 files changed

+17
-19
lines changed

2 files changed

+17
-19
lines changed

server/src/bin/file-worker.rs

Lines changed: 4 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,7 @@ use trieve_server::{
1414
operators::{
1515
clickhouse_operator::{ClickHouseEvent, EventQueue},
1616
dataset_operator::get_dataset_and_organization_from_dataset_id_query,
17-
file_operator::{
18-
create_file_chunks, create_file_query, get_aws_bucket, preprocess_file_to_chunks,
19-
},
17+
file_operator::{create_file_chunks, get_aws_bucket, preprocess_file_to_chunks},
2018
group_operator::{create_group_from_file_query, create_groups_query},
2119
},
2220
};
@@ -313,17 +311,6 @@ async fn upload_file(
313311
)
314312
.await?;
315313

316-
let file_size_mb = (file_data.len() as f64 / 1024.0 / 1024.0).round() as i64;
317-
318-
let created_file = create_file_query(
319-
file_id,
320-
file_size_mb,
321-
file_worker_message.upload_file_data.clone(),
322-
file_worker_message.dataset_id,
323-
web_pool.clone(),
324-
)
325-
.await?;
326-
327314
let group_id = if !file_worker_message
328315
.upload_file_data
329316
.pdf2md_options
@@ -365,7 +352,7 @@ async fn upload_file(
365352

366353
let group_id = chunk_group.id;
367354

368-
create_group_from_file_query(group_id, created_file.id, web_pool.clone())
355+
create_group_from_file_query(group_id, file_worker_message.file_id, web_pool.clone())
369356
.await
370357
.map_err(|e| {
371358
log::error!("Could not create group from file {:?}", e);
@@ -568,7 +555,7 @@ async fn upload_file(
568555

569556
if !new_chunks.is_empty() {
570557
create_file_chunks(
571-
created_file.id,
558+
file_worker_message.file_id,
572559
file_worker_message.upload_file_data.clone(),
573560
new_chunks.clone(),
574561
dataset_org_plan_sub.clone(),
@@ -724,7 +711,7 @@ async fn upload_file(
724711
.collect::<Vec<_>>();
725712

726713
create_file_chunks(
727-
created_file.id,
714+
file_worker_message.file_id,
728715
file_worker_message.upload_file_data,
729716
chunks,
730717
dataset_org_plan_sub,

server/src/handlers/file_handler.rs

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@ use crate::{
1212
operators::{
1313
crawl_operator::{process_crawl_doc, Document},
1414
file_operator::{
15-
delete_file_query, get_aws_bucket, get_csvjsonl_aws_bucket, get_dataset_file_query,
16-
get_file_query,
15+
create_file_query, delete_file_query, get_aws_bucket, get_csvjsonl_aws_bucket,
16+
get_dataset_file_query, get_file_query,
1717
},
1818
organization_operator::{get_file_size_sum_org, hash_function},
1919
},
@@ -189,6 +189,17 @@ pub async fn upload_file_handler(
189189
ServiceError::BadRequest("Could not upload file to S3".to_string())
190190
})?;
191191

192+
let file_size_mb = (decoded_file_data.len() as f64 / 1024.0 / 1024.0).round() as i64;
193+
194+
create_file_query(
195+
file_id,
196+
file_size_mb,
197+
upload_file_data.clone(),
198+
dataset_org_plan_sub.dataset.id,
199+
pool.clone(),
200+
)
201+
.await?;
202+
192203
let message = FileWorkerMessage {
193204
file_id,
194205
dataset_id: dataset_org_plan_sub.dataset.id,

0 commit comments

Comments
 (0)