diff --git a/backend/src/reports/reports.service.ts b/backend/src/reports/reports.service.ts
index 876f877e..2e841ea6 100644
--- a/backend/src/reports/reports.service.ts
+++ b/backend/src/reports/reports.service.ts
@@ -8,11 +8,11 @@ import {
import { ConfigService } from '@nestjs/config';
import {
DynamoDBClient,
- ScanCommand,
GetItemCommand,
UpdateItemCommand,
DynamoDBServiceException,
PutItemCommand,
+ QueryCommand,
} from '@aws-sdk/client-dynamodb';
import { marshall, unmarshall } from '@aws-sdk/util-dynamodb';
import { Report, ReportStatus } from './models/report.model';
@@ -60,10 +60,10 @@ export class ReportsService {
}
try {
- // If the table has a GSI for userId, use QueryCommand instead
- const command = new ScanCommand({
+ // Use QueryCommand instead of ScanCommand since userId is the partition key
+ const command = new QueryCommand({
TableName: this.tableName,
- FilterExpression: 'userId = :userId',
+ KeyConditionExpression: 'userId = :userId',
ExpressionAttributeValues: marshall({
':userId': userId,
}),
@@ -105,23 +105,21 @@ export class ReportsService {
typeof queryDto.limit === 'string' ? parseInt(queryDto.limit, 10) : queryDto.limit || 10;
try {
- // If the table has a GSI for userId, use QueryCommand instead
- const command = new ScanCommand({
+ // Use the GSI userIdCreatedAtIndex with QueryCommand for efficient retrieval
+ // This is much more efficient than a ScanCommand
+ const command = new QueryCommand({
TableName: this.tableName,
- FilterExpression: 'userId = :userId',
+ IndexName: 'userIdCreatedAtIndex', // Use the GSI for efficient queries
+ KeyConditionExpression: 'userId = :userId',
ExpressionAttributeValues: marshall({
':userId': userId,
}),
- Limit: limit * 5, // Fetch more items since we'll filter by userId
+ ScanIndexForward: false, // Get items in descending order (newest first)
+ Limit: limit, // Only fetch the number of items we need
});
const response = await this.dynamoClient.send(command);
- const reports = (response.Items || []).map(item => unmarshall(item) as Report);
-
- // Sort by createdAt in descending order
- return reports
- .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime())
- .slice(0, limit);
+ return (response.Items || []).map(item => unmarshall(item) as Report);
} catch (error: unknown) {
this.logger.error(`Error fetching latest reports for user ${userId}:`);
this.logger.error(error);
@@ -131,6 +129,26 @@ export class ReportsService {
throw new InternalServerErrorException(
`Table "${this.tableName}" not found. Please check your database configuration.`,
);
+ } else if (error.name === 'ValidationException') {
+ // This could happen if the GSI doesn't exist
+ this.logger.warn('GSI validation error, falling back to standard query');
+
+ // Fallback to standard query and sort in memory if GSI has issues
+ const fallbackCommand = new QueryCommand({
+ TableName: this.tableName,
+ KeyConditionExpression: 'userId = :userId',
+ ExpressionAttributeValues: marshall({
+ ':userId': userId,
+ }),
+ });
+
+ const fallbackResponse = await this.dynamoClient.send(fallbackCommand);
+ const reports = (fallbackResponse.Items || []).map(item => unmarshall(item) as Report);
+
+ // Sort by createdAt in descending order
+ return reports
+ .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime())
+ .slice(0, limit);
}
}
@@ -330,25 +348,75 @@ export class ReportsService {
throw new ForbiddenException('User ID is required');
}
+ // Log the actual filePath being searched for debugging
+ this.logger.log(`Searching for report with filePath: "${filePath}" for user ${userId}`);
+
try {
- // Since filePath isn't a key attribute, we need to scan with filter
- const command = new ScanCommand({
+ const command = new QueryCommand({
TableName: this.tableName,
- FilterExpression: 'filePath = :filePath AND userId = :userId',
+ KeyConditionExpression: 'userId = :userId',
+ FilterExpression: 'filePath = :filePath',
ExpressionAttributeValues: marshall({
- ':filePath': filePath,
':userId': userId,
+ ':filePath': filePath,
}),
Limit: 1, // We only want one record
});
+ this.logger.log('Executing QueryCommand with params:', {
+ TableName: this.tableName,
+ KeyConditionExpression: 'userId = :userId',
+ FilterExpression: 'filePath = :filePath',
+ Values: {
+ userId,
+ filePath,
+ },
+ });
+
const response = await this.dynamoClient.send(command);
+ this.logger.log(`Query response received, found ${response.Items?.length || 0} items`);
+
if (!response.Items || response.Items.length === 0) {
+ // If no exact match, try with case-insensitive comparison as a fallback
+ this.logger.log('No exact match found, trying with case-insensitive search');
+
+ // Get all items for the user and filter manually for case-insensitive match
+ const allUserItemsCommand = new QueryCommand({
+ TableName: this.tableName,
+ KeyConditionExpression: 'userId = :userId',
+ ExpressionAttributeValues: marshall({
+ ':userId': userId,
+ }),
+ });
+
+ const allUserResponse = await this.dynamoClient.send(allUserItemsCommand);
+
+ if (!allUserResponse.Items || allUserResponse.Items.length === 0) {
+ return null;
+ }
+
+ // Convert items and find case-insensitive match
+ const allReports = allUserResponse.Items.map(item => unmarshall(item) as Report);
+ const matchingReport = allReports.find(
+ report => report.filePath.toLowerCase() === filePath.toLowerCase(),
+ );
+
+ if (matchingReport) {
+ this.logger.log(
+ `Found case-insensitive match for ${filePath}: ${matchingReport.filePath}`,
+ );
+
+ return matchingReport;
+ }
+
return null;
}
- return unmarshall(response.Items[0]) as Report;
+ const result = unmarshall(response.Items[0]) as Report;
+ this.logger.log(`Found report with ID ${result.id}`);
+
+ return result;
} catch (error: unknown) {
this.logger.error(`Error finding report with filePath ${filePath}:`);
this.logger.error(error);
diff --git a/frontend/src/common/api/reportService.ts b/frontend/src/common/api/reportService.ts
index d16fc4e1..ec4f3d5f 100644
--- a/frontend/src/common/api/reportService.ts
+++ b/frontend/src/common/api/reportService.ts
@@ -1,9 +1,41 @@
import axios, { AxiosProgressEvent } from 'axios';
-import { MedicalReport } from '../models/medicalReport';
+import { MedicalReport, ReportCategory, ReportStatus } from '../models/medicalReport';
import { fetchAuthSession } from '@aws-amplify/auth';
// Get the API URL from environment variables
const API_URL = import.meta.env.VITE_BASE_URL_API || '';
+// Mock data for testing and development
+const mockReports: MedicalReport[] = [
+ {
+ id: '1',
+ userId: 'user1',
+ title: 'Blood Test Report',
+ category: ReportCategory.GENERAL,
+ bookmarked: false,
+ isProcessed: true,
+ labValues: [],
+ summary: 'Blood test results within normal range',
+ status: ReportStatus.UNREAD,
+ filePath: '/reports/blood-test.pdf',
+ createdAt: '2023-04-15T12:30:00Z',
+ updatedAt: '2023-04-15T12:30:00Z',
+ },
+ {
+ id: '2',
+ userId: 'user1',
+ title: 'Heart Checkup',
+ category: ReportCategory.HEART,
+ bookmarked: true,
+ isProcessed: true,
+ labValues: [],
+ summary: 'Heart functioning normally',
+ status: ReportStatus.READ,
+ filePath: '/reports/heart-checkup.pdf',
+ createdAt: '2023-04-10T10:15:00Z',
+ updatedAt: '2023-04-10T10:15:00Z',
+ },
+];
+
/**
* Interface for upload progress callback
*/
@@ -14,16 +46,22 @@ export interface UploadProgressCallback {
/**
* Creates an authenticated request config with bearer token
*/
-export const getAuthConfig = async (signal?: AbortSignal): Promise<{ headers: { Accept: string, 'Content-Type': string, Authorization: string }, signal?: AbortSignal, onUploadProgress?: (progressEvent: AxiosProgressEvent) => void }> => {
+export const getAuthConfig = async (
+ signal?: AbortSignal,
+): Promise<{
+ headers: { Accept: string; 'Content-Type': string; Authorization: string };
+ signal?: AbortSignal;
+ onUploadProgress?: (progressEvent: AxiosProgressEvent) => void;
+}> => {
const session = await fetchAuthSession();
const idToken = session.tokens?.idToken?.toString() || '';
return {
- headers: {
- Accept: 'application/json',
- 'Content-Type': 'application/json',
- Authorization: idToken ? `Bearer ${idToken}` : ''
- },
- signal
+ headers: {
+ Accept: 'application/json',
+ 'Content-Type': 'application/json',
+ Authorization: idToken ? `Bearer ${idToken}` : '',
+ },
+ signal,
};
};
@@ -47,7 +85,7 @@ export class ReportError extends Error {
export const uploadReport = async (
file: File,
onProgress?: UploadProgressCallback,
- signal?: AbortSignal
+ signal?: AbortSignal,
): Promise => {
try {
// Import s3StorageService dynamically to avoid circular dependency
@@ -58,7 +96,7 @@ export const uploadReport = async (
file,
'reports',
onProgress as (progress: number) => void,
- signal
+ signal,
);
// Then create the report record with the S3 key
@@ -70,7 +108,7 @@ export const uploadReport = async (
{
filePath: s3Key,
},
- config
+ config,
);
return response.data;
@@ -79,7 +117,7 @@ export const uploadReport = async (
if (signal?.aborted) {
throw new DOMException('The operation was aborted', 'AbortError');
}
-
+
if (axios.isAxiosError(error)) {
console.error('API Error Details:', error.response?.data, error.response?.headers);
throw new ReportError(`Failed to upload report: ${error.message}`);
@@ -95,7 +133,10 @@ export const uploadReport = async (
*/
export const fetchLatestReports = async (limit = 3): Promise => {
try {
- const response = await axios.get(`${API_URL}/api/reports/latest?limit=${limit}`, await getAuthConfig());
+ const response = await axios.get(
+ `${API_URL}/api/reports/latest?limit=${limit}`,
+ await getAuthConfig(),
+ );
console.log('response', response.data);
console.log('API_URL', API_URL);
return response.data;
@@ -113,7 +154,7 @@ export const fetchLatestReports = async (limit = 3): Promise =>
*/
export const fetchAllReports = async (): Promise => {
try {
- const response = await axios.get(`${API_URL}/api/reports`, await getAuthConfig() );
+ const response = await axios.get(`${API_URL}/api/reports`, await getAuthConfig());
return response.data;
} catch (error) {
if (axios.isAxiosError(error)) {
@@ -131,7 +172,7 @@ export const fetchAllReports = async (): Promise => {
export const markReportAsRead = async (reportId: string): Promise => {
try {
const response = await axios.patch(`${API_URL}/api/reports/${reportId}`, {
- status: 'READ'
+ status: 'READ',
});
return response.data;
@@ -149,17 +190,24 @@ export const markReportAsRead = async (reportId: string): Promise
* @param isBookmarked - Boolean indicating if the report should be bookmarked or not
* @returns Promise with the updated report
*/
-export const toggleReportBookmark = async (reportId: string, isBookmarked: boolean): Promise => {
+export const toggleReportBookmark = async (
+ reportId: string,
+ isBookmarked: boolean,
+): Promise => {
try {
- await axios.patch(`${API_URL}/api/reports/${reportId}/bookmark`, {
- bookmarked: isBookmarked
- }, await getAuthConfig());
+ await axios.patch(
+ `${API_URL}/api/reports/${reportId}/bookmark`,
+ {
+ bookmarked: isBookmarked,
+ },
+ await getAuthConfig(),
+ );
// In a real implementation, this would return the response from the API
// return response.data;
// For now, we'll mock the response
- const report = mockReports.find(r => r.id === reportId);
+ const report = mockReports.find((r) => r.id === reportId);
if (!report) {
throw new Error(`Report with ID ${reportId} not found`);
diff --git a/frontend/src/common/components/Router/TabNavigation.tsx b/frontend/src/common/components/Router/TabNavigation.tsx
index eb8536b4..27989d83 100644
--- a/frontend/src/common/components/Router/TabNavigation.tsx
+++ b/frontend/src/common/components/Router/TabNavigation.tsx
@@ -18,6 +18,7 @@ import ChatPage from 'pages/Chat/ChatPage';
import UploadPage from 'pages/Upload/UploadPage';
import ReportDetailPage from 'pages/Reports/ReportDetailPage';
import ReportsListPage from 'pages/Reports/ReportsListPage';
+import Processing from 'pages/Processing/Processing';
/**
* The `TabNavigation` component provides a router outlet for all of the
@@ -90,6 +91,9 @@ const TabNavigation = (): JSX.Element => {
+
+
+
diff --git a/frontend/src/common/components/Upload/UploadModal.tsx b/frontend/src/common/components/Upload/UploadModal.tsx
index 40c46434..d187a0fc 100644
--- a/frontend/src/common/components/Upload/UploadModal.tsx
+++ b/frontend/src/common/components/Upload/UploadModal.tsx
@@ -6,15 +6,22 @@ import {
IonIcon,
IonProgressBar,
IonLabel,
- IonItem
+ IonItem,
} from '@ionic/react';
-import { closeOutline, cloudUploadOutline, documentOutline, checkmarkOutline } from 'ionicons/icons';
+import {
+ closeOutline,
+ cloudUploadOutline,
+ documentOutline,
+ checkmarkOutline,
+} from 'ionicons/icons';
import { useTranslation } from 'react-i18next';
import { UploadStatus, useFileUpload } from '../../hooks/useFileUpload';
import { MedicalReport } from '../../models/medicalReport';
import './UploadModal.scss';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import { faCircleXmark } from '@fortawesome/free-regular-svg-icons';
+import { useHistory } from 'react-router';
+import { useTimeout } from '../../hooks/useTimeout';
export interface UploadModalProps {
isOpen: boolean;
@@ -25,12 +32,14 @@ export interface UploadModalProps {
const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): JSX.Element => {
const { t } = useTranslation();
+ const history = useHistory();
+ const { setTimeout } = useTimeout();
const fileInputRef = useRef(null);
// Track the upload result to use when the user closes the success screen
const [uploadResult, setUploadResult] = useState(null);
// Track when to show the upload cancelled notice
const [showCancelNotice, setShowCancelNotice] = useState(false);
-
+
const {
file,
status,
@@ -40,12 +49,30 @@ const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): J
uploadFile,
reset,
formatFileSize,
- cancelUpload
- } = useFileUpload({
+ cancelUpload,
+ } = useFileUpload({
// Override onUploadComplete to store the result and not call the parent immediately
- onUploadComplete: (result) => {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ onUploadComplete: (result: any) => {
setUploadResult(result);
- }
+
+ // Automatically redirect to processing screen after 2 seconds
+ setTimeout(() => {
+ reset();
+ onClose();
+ if (onUploadComplete) {
+ onUploadComplete(result);
+ }
+ // Navigate to the processing tab with filePath in state
+ if (file) {
+ history.push('/tabs/processing', {
+ filePath: result.filePath,
+ });
+ } else {
+ history.push('/tabs/processing');
+ }
+ }, 2000);
+ },
});
// Effect to automatically start upload when a file is selected and validated
@@ -62,7 +89,7 @@ const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): J
if (!files || files.length === 0) {
return;
}
-
+
selectFile(files[0]);
// Upload will be triggered by the useEffect
};
@@ -89,12 +116,12 @@ const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): J
if (status === UploadStatus.SUCCESS && uploadResult && onUploadComplete) {
onUploadComplete(uploadResult);
}
-
+
// Reset state
reset();
setUploadResult(null);
setShowCancelNotice(false);
-
+
// Close modal
onClose();
};
@@ -114,7 +141,7 @@ const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): J
{t('upload.imageSizeLimit')} / {t('upload.pdfSizeLimit')}
-
+
{/* Show cancel notice */}
{showCancelNotice && (
@@ -124,15 +151,15 @@ const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): J
Upload cancelled.
)}
-
- {error && !showCancelNotice &&
-
-
-
+
+ {error && !showCancelNotice && (
+
+
+
{error}
- }
+ )}
-
+
{t('upload.selectFile')}
@@ -155,7 +178,7 @@ const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): J
-
+
{/* File display item */}
{file && (
@@ -168,19 +191,16 @@ const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): J
{formatFileSize(file.size)} • {Math.ceil((1 - progress) * 10)} seconds left
{/* Progress bar */}
-
+
)}
-
+
{/* Cancel button - updated to match the size of the upload button */}
-
{t('upload.addNewFile')}
{showCloseButton && (
-
+
)}
@@ -247,4 +264,4 @@ const UploadModal = ({ isOpen, onClose, onUploadComplete }: UploadModalProps): J
);
};
-export default UploadModal;
\ No newline at end of file
+export default UploadModal;
diff --git a/frontend/src/common/hooks/useTimeout.ts b/frontend/src/common/hooks/useTimeout.ts
new file mode 100644
index 00000000..59147401
--- /dev/null
+++ b/frontend/src/common/hooks/useTimeout.ts
@@ -0,0 +1,31 @@
+import { useCallback, useEffect, useRef } from 'react';
+
+/**
+ * Custom hook for handling setTimeout with cleanup
+ */
+export const useTimeout = () => {
+ const timeoutRef = useRef(null);
+
+ // Clear the timeout when component unmounts or when called manually
+ const clearTimeout = useCallback(() => {
+ if (timeoutRef.current) {
+ window.clearTimeout(timeoutRef.current);
+ timeoutRef.current = null;
+ }
+ }, []);
+
+ // Set a new timeout
+ const setTimeout = useCallback((callback: () => void, delay: number) => {
+ // Clear any existing timeout first
+ clearTimeout();
+ // Set the new timeout
+ timeoutRef.current = window.setTimeout(callback, delay);
+ }, [clearTimeout]);
+
+ // Clean up on unmount
+ useEffect(() => {
+ return clearTimeout;
+ }, [clearTimeout]);
+
+ return { setTimeout, clearTimeout };
+};
\ No newline at end of file
diff --git a/frontend/src/common/services/storage/s3-storage-service.ts b/frontend/src/common/services/storage/s3-storage-service.ts
index 1de63ff4..35cd9afa 100644
--- a/frontend/src/common/services/storage/s3-storage-service.ts
+++ b/frontend/src/common/services/storage/s3-storage-service.ts
@@ -1,4 +1,13 @@
-import { S3Client, PutObjectCommand, GetObjectCommand } from '@aws-sdk/client-s3';
+import {
+ S3Client,
+ PutObjectCommand,
+ GetObjectCommand,
+ CreateMultipartUploadCommand,
+ UploadPartCommand,
+ CompleteMultipartUploadCommand,
+ AbortMultipartUploadCommand,
+ CompletedPart,
+} from '@aws-sdk/client-s3';
import { getSignedUrl } from '@aws-sdk/s3-request-presigner';
import { v4 as uuidv4 } from 'uuid';
import { fetchAuthSession } from '@aws-amplify/auth';
@@ -28,19 +37,23 @@ export class S3StorageService {
private s3Client: S3Client;
private readonly bucketName: string;
private readonly region: string;
-
+ // Set a chunk size of 5MB as recommended by AWS for multipart uploads
+ private readonly CHUNK_SIZE = 5 * 1024 * 1024; // 5MB in bytes
+ // File size threshold for when to use multipart upload
+ private readonly MULTIPART_THRESHOLD = 10 * 1024 * 1024; // 10MB
+
constructor() {
// Get region and bucket name from configuration
this.region = S3_CONFIG.REGION;
this.bucketName = S3_CONFIG.BUCKET;
-
+
// Initialize S3 client
this.s3Client = new S3Client({
region: this.region,
// Credentials will be loaded dynamically when needed
});
}
-
+
/**
* Get temporary AWS credentials from Cognito identity pool
* @returns Promise with credentials
@@ -50,15 +63,27 @@ export class S3StorageService {
if (!session.credentials) {
throw new Error('No credentials available');
}
-
+
return {
accessKeyId: session.credentials.accessKeyId,
secretAccessKey: session.credentials.secretAccessKey,
sessionToken: session.credentials.sessionToken,
- expiration: session.credentials.expiration
+ expiration: session.credentials.expiration,
};
}
+ /**
+ * Updates the S3 client with fresh credentials
+ */
+ private async refreshCredentials() {
+ const credentials = await this.getCredentials();
+
+ this.s3Client = new S3Client({
+ region: this.region,
+ credentials,
+ });
+ }
+
/**
* Uploads a file to S3 storage
* @param file - The file to upload
@@ -68,133 +93,322 @@ export class S3StorageService {
* @returns Promise with the S3 key of the uploaded file
*/
public async uploadFile(
- file: File,
- folder: string = 'reports',
+ file: File,
+ folder: string = 'reports',
onProgress?: StorageProgressCallback,
- signal?: AbortSignal
+ signal?: AbortSignal,
): Promise {
// Check if already aborted before starting
if (signal?.aborted) {
throw new DOMException('The operation was aborted', 'AbortError');
}
-
- // Set up progress simulation timers that we'll need to clear if aborted
- const progressTimers: NodeJS.Timeout[] = [];
-
+
+ // Generate a unique filename to prevent collisions
+ const uniqueFilename = `${uuidv4()}-${file.name}`;
+ const key = `${folder}/${uniqueFilename}`;
+
+ // Check if file size exceeds the multipart threshold
+ if (file.size > this.MULTIPART_THRESHOLD) {
+ return this.uploadLargeFile(file, key, onProgress, signal);
+ } else {
+ return this.uploadSmallFile(file, key, onProgress, signal);
+ }
+ }
+
+ /**
+ * Gets a pre-signed URL for uploading to S3
+ * @param key - S3 key (path) for the file
+ * @param contentType - Content type of the file
+ * @returns Promise with the signed URL
+ */
+ private async getPresignedUploadUrl(key: string, contentType: string): Promise {
+ const command = new PutObjectCommand({
+ Bucket: this.bucketName,
+ Key: key,
+ ContentType: contentType,
+ });
+
+ return await getSignedUrl(this.s3Client, command, { expiresIn: 3600 });
+ }
+
+ /**
+ * Uploads a small file using XMLHttpRequest for better progress tracking
+ * @param file - The file to upload
+ * @param key - S3 key to use for the file
+ * @param onProgress - Optional callback for tracking upload progress
+ * @param signal - Optional abort signal for canceling the request
+ * @returns Promise with the S3 key of the uploaded file
+ */
+ private async uploadSmallFile(
+ file: File,
+ key: string,
+ onProgress?: StorageProgressCallback,
+ signal?: AbortSignal,
+ ): Promise {
try {
- // Get credentials
- const credentials = await this.getCredentials();
-
+ // Refresh credentials
+ await this.refreshCredentials();
+
// Check if aborted after getting credentials
if (signal?.aborted) {
throw new DOMException('The operation was aborted', 'AbortError');
}
-
- // Update client with fresh credentials
- this.s3Client = new S3Client({
- region: this.region,
- credentials
- });
-
- // Generate a unique filename to prevent collisions
- const uniqueFilename = `${uuidv4()}-${file.name}`;
- const key = `${folder}/${uniqueFilename}`;
-
- // Check if aborted before reading file
+
+ // Get a pre-signed URL for putting the object
+ const presignedUrl = await this.getPresignedUploadUrl(key, file.type);
+
+ // Check if aborted after getting pre-signed URL
if (signal?.aborted) {
throw new DOMException('The operation was aborted', 'AbortError');
}
-
- // Upload file to S3
- const arrayBuffer = await file.arrayBuffer();
-
- // Check if aborted after reading file
+
+ // Use XMLHttpRequest for better progress tracking
+ return new Promise((resolve, reject) => {
+ const xhr = new XMLHttpRequest();
+
+ // Set up progress tracking
+ if (onProgress) {
+ xhr.upload.onprogress = (event) => {
+ if (event.lengthComputable) {
+ const progress = event.loaded / event.total;
+ onProgress(progress);
+ }
+ };
+ }
+
+ // Set up completion and error handlers
+ xhr.onload = () => {
+ if (xhr.status >= 200 && xhr.status < 300) {
+ // Ensure 100% progress is reported on success
+ if (onProgress) onProgress(1);
+ resolve(key);
+ } else {
+ reject(new StorageError(`Upload failed with status ${xhr.status}: ${xhr.statusText}`));
+ }
+ };
+
+ xhr.onerror = () => {
+ reject(new StorageError('Network error occurred during upload'));
+ };
+
+ xhr.onabort = () => {
+ reject(new DOMException('The operation was aborted', 'AbortError'));
+ };
+
+ // Open the request
+ xhr.open('PUT', presignedUrl);
+
+ // Set content type explicitly
+ xhr.setRequestHeader('Content-Type', file.type);
+
+ // Start the upload
+ xhr.send(file);
+
+ // Set up abort handling if signal is provided
+ if (signal) {
+ const abortHandler = () => xhr.abort();
+ signal.addEventListener('abort', abortHandler);
+
+ // Clean up event listener when request completes or errors
+ const cleanupAbortHandler = () => {
+ signal.removeEventListener('abort', abortHandler);
+ };
+
+ xhr.onload = (_event) => {
+ cleanupAbortHandler();
+ if (xhr.status >= 200 && xhr.status < 300) {
+ if (onProgress) onProgress(1);
+ resolve(key);
+ } else {
+ reject(
+ new StorageError(`Upload failed with status ${xhr.status}: ${xhr.statusText}`),
+ );
+ }
+ };
+
+ xhr.onerror = (_event) => {
+ cleanupAbortHandler();
+ reject(new StorageError('Network error occurred during upload'));
+ };
+
+ xhr.onabort = (_event) => {
+ cleanupAbortHandler();
+ reject(new DOMException('The operation was aborted', 'AbortError'));
+ };
+ }
+ });
+ } catch (error) {
+ // Check if this is an abort error
+ if (signal?.aborted || (error instanceof DOMException && error.name === 'AbortError')) {
+ throw new DOMException('The operation was aborted', 'AbortError');
+ }
+
+ console.error('Error uploading file to S3:', error);
+ throw new StorageError(
+ error instanceof Error
+ ? `Failed to upload file: ${error.message}`
+ : 'Failed to upload file',
+ );
+ }
+ }
+
+ /**
+ * Uploads a large file using S3 multipart upload
+ * @param file - The file to upload
+ * @param key - S3 key to use for the file
+ * @param onProgress - Optional callback for tracking upload progress
+ * @param signal - Optional abort signal for canceling the request
+ * @returns Promise with the S3 key of the uploaded file
+ */
+ private async uploadLargeFile(
+ file: File,
+ key: string,
+ onProgress?: StorageProgressCallback,
+ signal?: AbortSignal,
+ ): Promise {
+ try {
+ // Refresh credentials
+ await this.refreshCredentials();
+
+ // Check if aborted after getting credentials
if (signal?.aborted) {
throw new DOMException('The operation was aborted', 'AbortError');
}
-
- const fileBuffer = new Uint8Array(arrayBuffer);
-
- // Create the upload command
- const uploadParams = {
- Bucket: this.bucketName,
- Key: key,
- Body: fileBuffer,
- ContentType: file.type
- };
-
- // For progress tracking, we'd need to use a more advanced approach
- // using the AWS SDK v3's send middleware, but for simplicity, we'll
- // simulate progress here
- if (onProgress && !signal?.aborted) {
- // Simulate progress update with ability to cancel
- onProgress(0.1);
-
- // Store timers so they can be cleared if aborted
- progressTimers.push(
- setTimeout(() => {
- if (!signal?.aborted) onProgress(0.3);
- }, 300),
- setTimeout(() => {
- if (!signal?.aborted) onProgress(0.6);
- }, 600),
- setTimeout(() => {
- if (!signal?.aborted) onProgress(0.8);
- }, 800)
- );
+
+ // Initialize multipart upload
+ const createMultipartUploadResponse = await this.s3Client.send(
+ new CreateMultipartUploadCommand({
+ Bucket: this.bucketName,
+ Key: key,
+ ContentType: file.type,
+ }),
+ );
+
+ const uploadId = createMultipartUploadResponse.UploadId;
+ if (!uploadId) {
+ throw new StorageError('Failed to initialize multipart upload');
}
-
- // Setup abort handler for the signal
- let abortHandler: (() => void) | undefined;
-
- if (signal) {
- const abortPromise = new Promise((_, reject) => {
- abortHandler = () => {
- reject(new DOMException('The operation was aborted', 'AbortError'));
- };
- signal.addEventListener('abort', abortHandler);
- });
-
- try {
- // Create a race between the upload and abortion
- await Promise.race([
- this.s3Client.send(new PutObjectCommand(uploadParams)),
- abortPromise
- ]);
- } finally {
- // Clean up the event listener to prevent memory leaks
- if (abortHandler && signal) {
- signal.removeEventListener('abort', abortHandler);
+
+ try {
+ // Calculate total number of chunks
+ const totalChunks = Math.ceil(file.size / this.CHUNK_SIZE);
+ const uploadPartPromises = [];
+ const uploadedParts: CompletedPart[] = [];
+
+ // Track progress for multipart upload
+ let totalUploaded = 0;
+ const fileSize = file.size;
+
+ // Report initial progress
+ if (onProgress && !signal?.aborted) {
+ onProgress(0);
+ }
+
+ // Upload each chunk
+ for (let partNumber = 1; partNumber <= totalChunks; partNumber++) {
+ // Check if aborted before processing each chunk
+ if (signal?.aborted) {
+ throw new DOMException('The operation was aborted', 'AbortError');
}
+
+ // Calculate chunk boundaries
+ const start = (partNumber - 1) * this.CHUNK_SIZE;
+ const end = Math.min(partNumber * this.CHUNK_SIZE, file.size);
+ const chunkSize = end - start;
+
+ // Extract chunk from file
+ const chunk = file.slice(start, end);
+ const chunkBuffer = await chunk.arrayBuffer();
+
+ // Create promise for uploading this part
+ const uploadPartPromise = this.s3Client
+ .send(
+ new UploadPartCommand({
+ Bucket: this.bucketName,
+ Key: key,
+ PartNumber: partNumber,
+ UploadId: uploadId,
+ Body: new Uint8Array(chunkBuffer),
+ }),
+ )
+ .then((response) => {
+ // Check if aborted before updating progress
+ if (signal?.aborted) {
+ throw new DOMException('The operation was aborted', 'AbortError');
+ }
+
+ // Add to list of uploaded parts
+ uploadedParts.push({
+ ETag: response.ETag,
+ PartNumber: partNumber,
+ });
+
+ // Update progress
+ totalUploaded += chunkSize;
+ if (onProgress && !signal?.aborted) {
+ onProgress(totalUploaded / fileSize);
+ }
+
+ return response;
+ });
+
+ uploadPartPromises.push(uploadPartPromise);
}
- } else {
- // Upload to S3 without abort capability
- await this.s3Client.send(new PutObjectCommand(uploadParams));
- }
-
- // Clean up all progress timers before marking complete
- progressTimers.forEach(timer => clearTimeout(timer));
-
- // Complete the progress if needed
- if (onProgress && !signal?.aborted) {
- onProgress(1);
+
+ // Wait for all parts to upload
+ await Promise.all(uploadPartPromises);
+
+ // Check if aborted before completing
+ if (signal?.aborted) {
+ throw new DOMException('The operation was aborted', 'AbortError');
+ }
+
+ // Complete the multipart upload
+ await this.s3Client.send(
+ new CompleteMultipartUploadCommand({
+ Bucket: this.bucketName,
+ Key: key,
+ UploadId: uploadId,
+ MultipartUpload: {
+ Parts: uploadedParts.sort((a, b) => a.PartNumber! - b.PartNumber!),
+ },
+ }),
+ );
+
+ // Ensure 100% progress is reported
+ if (onProgress && !signal?.aborted) {
+ onProgress(1);
+ }
+
+ return key;
+ } catch (error) {
+ // Abort the multipart upload if anything went wrong
+ if (uploadId) {
+ try {
+ await this.s3Client.send(
+ new AbortMultipartUploadCommand({
+ Bucket: this.bucketName,
+ Key: key,
+ UploadId: uploadId,
+ }),
+ );
+ } catch (abortError) {
+ console.error('Error aborting multipart upload:', abortError);
+ }
+ }
+ throw error;
}
-
- return key;
} catch (error) {
- // Clean up all progress timers
- progressTimers.forEach(timer => clearTimeout(timer));
-
// Check if this is an abort error
if (signal?.aborted || (error instanceof DOMException && error.name === 'AbortError')) {
throw new DOMException('The operation was aborted', 'AbortError');
}
-
+
console.error('Error uploading file to S3:', error);
throw new StorageError(
- error instanceof Error
- ? `Failed to upload file: ${error.message}`
- : 'Failed to upload file'
+ error instanceof Error
+ ? `Failed to upload file: ${error.message}`
+ : 'Failed to upload file',
);
}
}
@@ -207,34 +421,28 @@ export class S3StorageService {
*/
public async getSignedUrl(key: string, expiresIn: number = 3600): Promise {
try {
- // Get credentials
- const credentials = await this.getCredentials();
-
- // Update client with fresh credentials
- this.s3Client = new S3Client({
- region: this.region,
- credentials
- });
-
+ // Refresh credentials
+ await this.refreshCredentials();
+
// Create the command to get the object
const command = new GetObjectCommand({
Bucket: this.bucketName,
- Key: key
+ Key: key,
});
-
+
// Generate signed URL
const url = await getSignedUrl(this.s3Client, command, { expiresIn });
return url;
} catch (error) {
console.error('Error getting signed URL from S3:', error);
throw new StorageError(
- error instanceof Error
- ? `Failed to get signed URL: ${error.message}`
- : 'Failed to get signed URL'
+ error instanceof Error
+ ? `Failed to get signed URL: ${error.message}`
+ : 'Failed to get signed URL',
);
}
}
}
// Export a singleton instance
-export const s3StorageService = new S3StorageService();
\ No newline at end of file
+export const s3StorageService = new S3StorageService();
diff --git a/frontend/src/pages/Processing/Processing.scss b/frontend/src/pages/Processing/Processing.scss
new file mode 100644
index 00000000..a6a4c546
--- /dev/null
+++ b/frontend/src/pages/Processing/Processing.scss
@@ -0,0 +1,102 @@
+.processing-page {
+ --background: #1c1c1e;
+
+ &__container {
+ background-color: white;
+ border-radius: 32px;
+ height: 100%;
+ margin: 0;
+ padding: 24px;
+ display: flex;
+ flex-direction: column;
+ position: relative;
+ }
+
+ &__header {
+ display: flex;
+ flex-direction: column;
+ margin-top: 20px;
+ }
+
+ &__avatar-wrapper {
+ margin-bottom: 20px;
+
+ // Styles for the Avatar component - adjusted size to match design
+ .ls-avatar {
+ --size: 60px !important;
+ margin: 0;
+
+ // This makes sure even round avatars are sized correctly
+ &.ls-avatar--round {
+ width: var(--size);
+ height: var(--size);
+ }
+ }
+ }
+
+ &__title {
+ margin-bottom: 40px;
+ }
+
+ &__subtitle {
+ font-size: 20px;
+ color: #9BA1AB;
+ margin: 0 0 5px 0;
+ font-weight: 400;
+ }
+
+ &__heading {
+ font-size: 36px;
+ font-weight: 600;
+ color: #394150;
+ margin: 0;
+ }
+
+ &__animation {
+ flex: 1;
+ display: flex;
+ align-items: center;
+ justify-content: center;
+
+ &-circle {
+ // Adjusted animation size to match design
+ width: 180px;
+ height: 180px;
+ border-radius: 50%;
+ background: radial-gradient(circle at 30% 30%, #4f7dff 10%, #c97eff 40%, #ff8afc 80%);
+ box-shadow:
+ 0 0 40px rgba(201, 126, 255, 0.5),
+ 0 0 80px rgba(201, 126, 255, 0.3);
+ position: relative;
+ overflow: hidden;
+ animation: pulse 3s infinite ease-in-out;
+
+ &::after {
+ content: "";
+ position: absolute;
+ top: 0;
+ left: 0;
+ right: 0;
+ bottom: 0;
+ background: linear-gradient(135deg, rgba(255,255,255,0.4) 0%, transparent 50%);
+ border-radius: 50%;
+ }
+ }
+ }
+}
+
+// Animation for the pulse effect
+@keyframes pulse {
+ 0% {
+ transform: scale(0.95);
+ opacity: 0.8;
+ }
+ 50% {
+ transform: scale(1);
+ opacity: 1;
+ }
+ 100% {
+ transform: scale(0.95);
+ opacity: 0.8;
+ }
+}
\ No newline at end of file
diff --git a/frontend/src/pages/Processing/Processing.tsx b/frontend/src/pages/Processing/Processing.tsx
new file mode 100644
index 00000000..27b0a152
--- /dev/null
+++ b/frontend/src/pages/Processing/Processing.tsx
@@ -0,0 +1,117 @@
+import { IonContent, IonPage } from '@ionic/react';
+import { useCurrentUser } from '../../common/hooks/useAuth';
+import Avatar from '../../common/components/Icon/Avatar';
+import { useLocation, useHistory } from 'react-router-dom';
+import { useEffect, useState } from 'react';
+import { useAxios } from '../../common/hooks/useAxios';
+import './Processing.scss';
+import { getAuthConfig } from 'common/api/reportService';
+const API_URL = import.meta.env.VITE_BASE_URL_API || '';
+
+/**
+ * Processing page that shows while the system analyzes uploaded documents
+ * This page automatically displays after a successful upload
+ */
+const Processing: React.FC = () => {
+ const currentUser = useCurrentUser();
+ const firstName = currentUser?.name?.split(' ')[0];
+ const axios = useAxios();
+ const history = useHistory();
+
+ // States to track processing
+ const [isProcessing, setIsProcessing] = useState(true);
+ const [processingError, setProcessingError] = useState(null);
+
+ // Get the location state which may contain the filePath
+ const location = useLocation<{ filePath: string }>();
+ const filePath = location.state?.filePath;
+ const [reportId, setReportId] = useState(null);
+ const [isFetching, setIsFetching] = useState(false);
+
+ // Send the API request when component mounts
+ useEffect(() => {
+ if (!filePath) {
+ setProcessingError('No file path provided');
+ setIsProcessing(false);
+ return;
+ }
+
+ if (reportId && isFetching) {
+ return;
+ }
+
+ const processFile = async () => {
+ setIsFetching(true);
+
+ try {
+ // Send POST request to backend API
+ const response = await axios.post(
+ `${API_URL}/api/document-processor/process-file`,
+ { filePath },
+ await getAuthConfig(),
+ );
+ setReportId(response.data.reportId);
+
+ console.log('File processed successfully:', response.data);
+ } catch (error) {
+ console.error('Error processing file:', error);
+ setProcessingError('Failed to process the file. Please try again.');
+ setIsProcessing(false);
+ }
+ };
+
+ processFile();
+ }, [filePath, axios, history]);
+
+ return (
+
+
+
+ {/* Header with avatar */}
+
+
+
+ {/* Title section */}
+
+
+ Just a few seconds{firstName && ', ' + firstName}!
+
+
+ {processingError ? 'Processing Error' : 'Processing Data...'}
+
+ {processingError &&
{processingError}
}
+
+
+
+ {/* Animation circle */}
+ {isProcessing && (
+
+ )}
+
+ {/* Error state - show retry button */}
+ {processingError && (
+
+
+
+ )}
+
+
+
+ );
+};
+
+export default Processing;
diff --git a/frontend/src/pages/Upload/__tests__/UploadPage.test.tsx b/frontend/src/pages/Upload/__tests__/UploadPage.test.tsx
index e8d5af66..06107e8a 100644
--- a/frontend/src/pages/Upload/__tests__/UploadPage.test.tsx
+++ b/frontend/src/pages/Upload/__tests__/UploadPage.test.tsx
@@ -28,9 +28,13 @@ vi.mock('react-router-dom', async () => {
// Mock the UploadModal component
vi.mock('common/components/Upload/UploadModal', () => {
- const ModalMock = ({ isOpen, onClose, onUploadComplete }: {
- isOpen: boolean;
- onClose: () => void;
+ const ModalMock = ({
+ isOpen,
+ onClose,
+ onUploadComplete,
+ }: {
+ isOpen: boolean;
+ onClose: () => void;
onUploadComplete: (report: MedicalReport) => void;
}) => {
if (!isOpen) return null;
@@ -38,27 +42,33 @@ vi.mock('common/components/Upload/UploadModal', () => {
// Mock report that will be returned on upload complete
const mockReport: MedicalReport = {
id: '123',
+ userId: 'test-user',
title: 'Test Report',
category: ReportCategory.GENERAL,
createdAt: '2023-01-01',
status: ReportStatus.UNREAD,
+ bookmarked: false,
+ isProcessed: true,
+ labValues: [],
+ summary: 'Test report summary',
+ filePath: '/reports/test-report.pdf',
+ updatedAt: '2023-01-01',
};
-
+
return (
-
-
);
};
-
+
return {
- default: ModalMock
+ default: ModalMock,
};
});
@@ -69,12 +79,19 @@ vi.mock('@ionic/react', () => ({
IonPage: ({ children }: { children: React.ReactNode }) => {children}
,
IonTitle: ({ children }: { children: React.ReactNode }) => {children}
,
IonToolbar: ({ children }: { children: React.ReactNode }) => {children}
,
- IonButton: ({ children, onClick }: {
- children: React.ReactNode;
+ IonButton: ({
+ children,
+ onClick,
+ }: {
+ children: React.ReactNode;
onClick?: () => void;
expand?: string;
className?: string;
- }) => {children},
+ }) => (
+
+ {children}
+
+ ),
}));
describe('UploadPage', () => {
@@ -82,103 +99,103 @@ describe('UploadPage', () => {
// Reset mocks before each test
vi.clearAllMocks();
});
-
+
test('renders correctly', () => {
render(
-
+ ,
);
-
+
// Check for key elements
expect(screen.getByText('pages.upload.title')).toBeInTheDocument();
expect(screen.getByText('pages.upload.subtitle')).toBeInTheDocument();
expect(screen.getByText('pages.upload.description')).toBeInTheDocument();
expect(screen.getByText('upload.selectFile')).toBeInTheDocument();
});
-
+
test('opens modal when button is clicked', () => {
render(
-
+ ,
);
-
+
// Modal should not be visible initially
expect(screen.queryByTestId('upload-modal')).not.toBeInTheDocument();
-
+
// Click the button to open modal
const button = screen.getByTestId('select-file-btn');
fireEvent.click(button);
-
+
// Modal should now be visible
expect(screen.getByTestId('upload-modal')).toBeInTheDocument();
});
-
+
test('closes modal when onClose is called', () => {
render(
-
+ ,
);
-
+
// Open the modal first
const openButton = screen.getByTestId('select-file-btn');
fireEvent.click(openButton);
-
+
// Modal should be visible
expect(screen.getByTestId('upload-modal')).toBeInTheDocument();
-
+
// Click the close button
const closeButton = screen.getByTestId('close-modal-btn');
fireEvent.click(closeButton);
-
+
// Modal should now be hidden
expect(screen.queryByTestId('upload-modal')).not.toBeInTheDocument();
});
-
+
test('navigates to home page after successful upload', () => {
render(
-
+ ,
);
-
+
// Open the modal first
const openButton = screen.getByTestId('select-file-btn');
fireEvent.click(openButton);
-
+
// Complete the upload
const completeButton = screen.getByTestId('complete-upload-btn');
fireEvent.click(completeButton);
-
+
// Should navigate to home page
expect(mockHistoryPush).toHaveBeenCalledWith('/tabs/home');
-
+
// Modal should be closed after upload completion
expect(screen.queryByTestId('upload-modal')).not.toBeInTheDocument();
});
-
+
test('handles canceling an upload', () => {
render(
-
+ ,
);
-
+
// Open the modal
const openButton = screen.getByTestId('select-file-btn');
fireEvent.click(openButton);
-
+
// Verify modal is shown
expect(screen.getByTestId('upload-modal')).toBeInTheDocument();
-
+
// Cancel/close the upload
const closeButton = screen.getByTestId('close-modal-btn');
fireEvent.click(closeButton);
-
+
// Modal should be hidden and no navigation should happen
expect(screen.queryByTestId('upload-modal')).not.toBeInTheDocument();
expect(mockHistoryPush).not.toHaveBeenCalled();
});
-});
\ No newline at end of file
+});