Skip to content

Commit 35c5519

Browse files
feat(files): gmail upload attachment, workspace files, file storage limits (#1666)
* feat(gmail): add attachment uploads * add workspace files * update landing page * fix lint * fix test * fixed UI * added additional S3 tools to upload files * added search filters for gmail trigger * added files to every block * works * fix * register sharepoint tool --------- Co-authored-by: waleed <waleed>
1 parent d92d9a0 commit 35c5519

File tree

103 files changed

+14672
-617
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

103 files changed

+14672
-617
lines changed

apps/docs/content/docs/en/tools/gmail.mdx

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@ Send emails using Gmail
7070
| `body` | string | Yes | Email body content |
7171
| `cc` | string | No | CC recipients \(comma-separated\) |
7272
| `bcc` | string | No | BCC recipients \(comma-separated\) |
73+
| `attachments` | file[] | No | Files to attach to the email |
7374

7475
#### Output
7576

@@ -91,6 +92,7 @@ Draft emails using Gmail
9192
| `body` | string | Yes | Email body content |
9293
| `cc` | string | No | CC recipients \(comma-separated\) |
9394
| `bcc` | string | No | BCC recipients \(comma-separated\) |
95+
| `attachments` | file[] | No | Files to attach to the email draft |
9496

9597
#### Output
9698

apps/docs/content/docs/en/tools/s3.mdx

Lines changed: 94 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
---
22
title: S3
3-
description: View S3 files
3+
description: Upload, download, list, and manage S3 files
44
---
55

66
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -62,12 +62,37 @@ In Sim, the S3 integration enables your agents to retrieve and access files stor
6262

6363
## Usage Instructions
6464

65-
Integrate S3 into the workflow. Can get presigned URLs for S3 objects. Requires access key and secret access key.
65+
Integrate S3 into the workflow. Upload files, download objects, list bucket contents, delete objects, and copy objects between buckets. Requires AWS access key and secret access key.
6666

6767

6868

6969
## Tools
7070

71+
### `s3_put_object`
72+
73+
Upload a file to an AWS S3 bucket
74+
75+
#### Input
76+
77+
| Parameter | Type | Required | Description |
78+
| --------- | ---- | -------- | ----------- |
79+
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
80+
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
81+
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
82+
| `bucketName` | string | Yes | S3 bucket name |
83+
| `objectKey` | string | Yes | Object key/path in S3 \(e.g., folder/filename.ext\) |
84+
| `file` | file | No | File to upload |
85+
| `content` | string | No | Text content to upload \(alternative to file\) |
86+
| `contentType` | string | No | Content-Type header \(auto-detected from file if not provided\) |
87+
| `acl` | string | No | Access control list \(e.g., private, public-read\) |
88+
89+
#### Output
90+
91+
| Parameter | Type | Description |
92+
| --------- | ---- | ----------- |
93+
| `url` | string | URL of the uploaded S3 object |
94+
| `metadata` | object | Upload metadata including ETag and location |
95+
7196
### `s3_get_object`
7297

7398
Retrieve an object from an AWS S3 bucket
@@ -87,6 +112,73 @@ Retrieve an object from an AWS S3 bucket
87112
| `url` | string | Pre-signed URL for downloading the S3 object |
88113
| `metadata` | object | File metadata including type, size, name, and last modified date |
89114

115+
### `s3_list_objects`
116+
117+
List objects in an AWS S3 bucket
118+
119+
#### Input
120+
121+
| Parameter | Type | Required | Description |
122+
| --------- | ---- | -------- | ----------- |
123+
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
124+
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
125+
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
126+
| `bucketName` | string | Yes | S3 bucket name |
127+
| `prefix` | string | No | Prefix to filter objects \(e.g., folder/\) |
128+
| `maxKeys` | number | No | Maximum number of objects to return \(default: 1000\) |
129+
| `continuationToken` | string | No | Token for pagination |
130+
131+
#### Output
132+
133+
| Parameter | Type | Description |
134+
| --------- | ---- | ----------- |
135+
| `objects` | array | List of S3 objects |
136+
137+
### `s3_delete_object`
138+
139+
Delete an object from an AWS S3 bucket
140+
141+
#### Input
142+
143+
| Parameter | Type | Required | Description |
144+
| --------- | ---- | -------- | ----------- |
145+
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
146+
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
147+
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
148+
| `bucketName` | string | Yes | S3 bucket name |
149+
| `objectKey` | string | Yes | Object key/path to delete |
150+
151+
#### Output
152+
153+
| Parameter | Type | Description |
154+
| --------- | ---- | ----------- |
155+
| `deleted` | boolean | Whether the object was successfully deleted |
156+
| `metadata` | object | Deletion metadata |
157+
158+
### `s3_copy_object`
159+
160+
Copy an object within or between AWS S3 buckets
161+
162+
#### Input
163+
164+
| Parameter | Type | Required | Description |
165+
| --------- | ---- | -------- | ----------- |
166+
| `accessKeyId` | string | Yes | Your AWS Access Key ID |
167+
| `secretAccessKey` | string | Yes | Your AWS Secret Access Key |
168+
| `region` | string | Yes | AWS region \(e.g., us-east-1\) |
169+
| `sourceBucket` | string | Yes | Source bucket name |
170+
| `sourceKey` | string | Yes | Source object key/path |
171+
| `destinationBucket` | string | Yes | Destination bucket name |
172+
| `destinationKey` | string | Yes | Destination object key/path |
173+
| `acl` | string | No | Access control list for the copied object \(e.g., private, public-read\) |
174+
175+
#### Output
176+
177+
| Parameter | Type | Description |
178+
| --------- | ---- | ----------- |
179+
| `url` | string | URL of the copied S3 object |
180+
| `metadata` | object | Copy operation metadata |
181+
90182

91183

92184
## Notes

apps/sim/app/(landing)/components/landing-pricing/landing-pricing.tsx

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import {
88
Code2,
99
Database,
1010
DollarSign,
11+
HardDrive,
1112
Users,
1213
Workflow,
1314
} from 'lucide-react'
@@ -42,6 +43,7 @@ interface PricingTier {
4243
*/
4344
const FREE_PLAN_FEATURES: PricingFeature[] = [
4445
{ icon: DollarSign, text: '$10 usage limit' },
46+
{ icon: HardDrive, text: '5GB file storage' },
4547
{ icon: Workflow, text: 'Public template access' },
4648
{ icon: Users, text: 'Community support' },
4749
{ icon: Database, text: 'Limited log retention' },

apps/sim/app/api/files/parse/route.ts

Lines changed: 64 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import { isSupportedFileType, parseFile } from '@/lib/file-parsers'
88
import { createLogger } from '@/lib/logs/console/logger'
99
import { validateExternalUrl } from '@/lib/security/input-validation'
1010
import { downloadFile, isUsingCloudStorage } from '@/lib/uploads'
11+
import { extractStorageKey } from '@/lib/uploads/file-utils'
1112
import { UPLOAD_DIR_SERVER } from '@/lib/uploads/setup.server'
1213
import '@/lib/uploads/setup.server'
1314

@@ -69,13 +70,13 @@ export async function POST(request: NextRequest) {
6970

7071
try {
7172
const requestData = await request.json()
72-
const { filePath, fileType } = requestData
73+
const { filePath, fileType, workspaceId } = requestData
7374

7475
if (!filePath || (typeof filePath === 'string' && filePath.trim() === '')) {
7576
return NextResponse.json({ success: false, error: 'No file path provided' }, { status: 400 })
7677
}
7778

78-
logger.info('File parse request received:', { filePath, fileType })
79+
logger.info('File parse request received:', { filePath, fileType, workspaceId })
7980

8081
if (Array.isArray(filePath)) {
8182
const results = []
@@ -89,7 +90,7 @@ export async function POST(request: NextRequest) {
8990
continue
9091
}
9192

92-
const result = await parseFileSingle(path, fileType)
93+
const result = await parseFileSingle(path, fileType, workspaceId)
9394
if (result.metadata) {
9495
result.metadata.processingTime = Date.now() - startTime
9596
}
@@ -117,7 +118,7 @@ export async function POST(request: NextRequest) {
117118
})
118119
}
119120

120-
const result = await parseFileSingle(filePath, fileType)
121+
const result = await parseFileSingle(filePath, fileType, workspaceId)
121122

122123
if (result.metadata) {
123124
result.metadata.processingTime = Date.now() - startTime
@@ -153,7 +154,11 @@ export async function POST(request: NextRequest) {
153154
/**
154155
* Parse a single file and return its content
155156
*/
156-
async function parseFileSingle(filePath: string, fileType?: string): Promise<ParseResult> {
157+
async function parseFileSingle(
158+
filePath: string,
159+
fileType?: string,
160+
workspaceId?: string
161+
): Promise<ParseResult> {
157162
logger.info('Parsing file:', filePath)
158163

159164
if (!filePath || filePath.trim() === '') {
@@ -174,7 +179,7 @@ async function parseFileSingle(filePath: string, fileType?: string): Promise<Par
174179
}
175180

176181
if (filePath.startsWith('http://') || filePath.startsWith('https://')) {
177-
return handleExternalUrl(filePath, fileType)
182+
return handleExternalUrl(filePath, fileType, workspaceId)
178183
}
179184

180185
const isS3Path = filePath.includes('/api/files/serve/s3/')
@@ -216,10 +221,16 @@ function validateFilePath(filePath: string): { isValid: boolean; error?: string
216221

217222
/**
218223
* Handle external URL
224+
* If workspaceId is provided, checks if file already exists and saves to workspace if not
219225
*/
220-
async function handleExternalUrl(url: string, fileType?: string): Promise<ParseResult> {
226+
async function handleExternalUrl(
227+
url: string,
228+
fileType?: string,
229+
workspaceId?: string
230+
): Promise<ParseResult> {
221231
try {
222232
logger.info('Fetching external URL:', url)
233+
logger.info('WorkspaceId for URL save:', workspaceId)
223234

224235
const urlValidation = validateExternalUrl(url, 'fileUrl')
225236
if (!urlValidation.isValid) {
@@ -231,6 +242,34 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
231242
}
232243
}
233244

245+
// Extract filename from URL
246+
const urlPath = new URL(url).pathname
247+
const filename = urlPath.split('/').pop() || 'download'
248+
const extension = path.extname(filename).toLowerCase().substring(1)
249+
250+
logger.info(`Extracted filename: ${filename}, workspaceId: ${workspaceId}`)
251+
252+
// If workspaceId provided, check if file already exists in workspace
253+
if (workspaceId) {
254+
const { fileExistsInWorkspace, listWorkspaceFiles } = await import(
255+
'@/lib/uploads/workspace-files'
256+
)
257+
const exists = await fileExistsInWorkspace(workspaceId, filename)
258+
259+
if (exists) {
260+
logger.info(`File ${filename} already exists in workspace, using existing file`)
261+
// Get existing file and parse from storage
262+
const workspaceFiles = await listWorkspaceFiles(workspaceId)
263+
const existingFile = workspaceFiles.find((f) => f.name === filename)
264+
265+
if (existingFile) {
266+
// Parse from workspace storage instead of re-downloading
267+
const storageFilePath = `/api/files/serve/${existingFile.key}`
268+
return handleCloudFile(storageFilePath, fileType)
269+
}
270+
}
271+
}
272+
234273
const response = await fetch(url, {
235274
signal: AbortSignal.timeout(DOWNLOAD_TIMEOUT_MS),
236275
})
@@ -251,9 +290,23 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
251290

252291
logger.info(`Downloaded file from URL: ${url}, size: ${buffer.length} bytes`)
253292

254-
const urlPath = new URL(url).pathname
255-
const filename = urlPath.split('/').pop() || 'download'
256-
const extension = path.extname(filename).toLowerCase().substring(1)
293+
// If workspaceId provided, save to workspace storage
294+
if (workspaceId) {
295+
try {
296+
const { getSession } = await import('@/lib/auth')
297+
const { uploadWorkspaceFile } = await import('@/lib/uploads/workspace-files')
298+
299+
const session = await getSession()
300+
if (session?.user?.id) {
301+
const mimeType = response.headers.get('content-type') || getMimeType(extension)
302+
await uploadWorkspaceFile(workspaceId, session.user.id, buffer, filename, mimeType)
303+
logger.info(`Saved URL file to workspace storage: ${filename}`)
304+
}
305+
} catch (saveError) {
306+
// Log but don't fail - continue with parsing even if save fails
307+
logger.warn(`Failed to save URL file to workspace:`, saveError)
308+
}
309+
}
257310

258311
if (extension === 'pdf') {
259312
return await handlePdfBuffer(buffer, filename, fileType, url)
@@ -281,16 +334,7 @@ async function handleExternalUrl(url: string, fileType?: string): Promise<ParseR
281334
*/
282335
async function handleCloudFile(filePath: string, fileType?: string): Promise<ParseResult> {
283336
try {
284-
let cloudKey: string
285-
if (filePath.includes('/api/files/serve/s3/')) {
286-
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/s3/')[1])
287-
} else if (filePath.includes('/api/files/serve/blob/')) {
288-
cloudKey = decodeURIComponent(filePath.split('/api/files/serve/blob/')[1])
289-
} else if (filePath.startsWith('/api/files/serve/')) {
290-
cloudKey = decodeURIComponent(filePath.substring('/api/files/serve/'.length))
291-
} else {
292-
cloudKey = filePath
293-
}
337+
const cloudKey = extractStorageKey(filePath)
294338

295339
logger.info('Extracted cloud key:', cloudKey)
296340

apps/sim/app/api/files/upload/route.ts

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,8 @@ export async function POST(request: NextRequest) {
6666
logger.info(
6767
`Uploading files for execution-scoped storage: workflow=${workflowId}, execution=${executionId}`
6868
)
69+
} else if (workspaceId) {
70+
logger.info(`Uploading files for workspace-scoped storage: workspace=${workspaceId}`)
6971
}
7072

7173
const uploadResults = []
@@ -83,6 +85,7 @@ export async function POST(request: NextRequest) {
8385
const bytes = await file.arrayBuffer()
8486
const buffer = Buffer.from(bytes)
8587

88+
// Priority 1: Execution-scoped storage (temporary, 5 min expiry)
8689
if (workflowId && executionId) {
8790
const { uploadExecutionFile } = await import('@/lib/workflows/execution-file-storage')
8891
const userFile = await uploadExecutionFile(
@@ -100,6 +103,47 @@ export async function POST(request: NextRequest) {
100103
continue
101104
}
102105

106+
// Priority 2: Workspace-scoped storage (persistent, no expiry)
107+
if (workspaceId) {
108+
try {
109+
const { uploadWorkspaceFile } = await import('@/lib/uploads/workspace-files')
110+
const userFile = await uploadWorkspaceFile(
111+
workspaceId,
112+
session.user.id,
113+
buffer,
114+
originalName,
115+
file.type || 'application/octet-stream'
116+
)
117+
118+
uploadResults.push(userFile)
119+
continue
120+
} catch (workspaceError) {
121+
// Check error type
122+
const errorMessage =
123+
workspaceError instanceof Error ? workspaceError.message : 'Upload failed'
124+
const isDuplicate = errorMessage.includes('already exists')
125+
const isStorageLimitError =
126+
errorMessage.includes('Storage limit exceeded') ||
127+
errorMessage.includes('storage limit')
128+
129+
logger.warn(`Workspace file upload failed: ${errorMessage}`)
130+
131+
// Determine appropriate status code
132+
let statusCode = 500
133+
if (isDuplicate) statusCode = 409
134+
else if (isStorageLimitError) statusCode = 413
135+
136+
return NextResponse.json(
137+
{
138+
success: false,
139+
error: errorMessage,
140+
isDuplicate,
141+
},
142+
{ status: statusCode }
143+
)
144+
}
145+
}
146+
103147
try {
104148
logger.info(`Uploading file: ${originalName}`)
105149
const result = await uploadFile(buffer, originalName, file.type, file.size)

0 commit comments

Comments
 (0)