Skip to content

Commit 8fa8fd9

Browse files
committed
refactor: update storage configuration for AWS S3-compatible object storage
- Migrate environment variables to AWS-style naming - Implement signed request generation for S3 storage - Update storage utility functions to use signed requests - Add new dev script to run without mocks - Update documentation and example environment file
1 parent 63de8f4 commit 8fa8fd9

File tree

9 files changed

+152
-29
lines changed

9 files changed

+152
-29
lines changed

.env.example

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,8 @@ GITHUB_TOKEN="MOCK_GITHUB_TOKEN"
2121
ALLOW_INDEXING="true"
2222

2323
# Tigris Object Storage (S3-compatible) Configuration
24-
STORAGE_ACCESS_KEY="mock-access-key"
25-
STORAGE_SECRET_KEY="mock-secret-key"
26-
STORAGE_BUCKET="my-app-bucket"
27-
STORAGE_ENDPOINT="https://my-app-storage.example.com"
24+
AWS_ACCESS_KEY_ID="mock-access-key"
25+
AWS_SECRET_ACCESS_KEY="mock-secret-key"
26+
AWS_REGION="auto"
27+
AWS_ENDPOINT_URL_S3="https://fly.storage.tigris.dev"
28+
BUCKET_NAME="mock-bucket"
Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { invariantResponse } from '@epic-web/invariant'
22
import { prisma } from '#app/utils/db.server.ts'
3-
import { getImageUrl } from '#app/utils/storage.server.ts'
3+
import { getSignedGetRequestInfo } from '#app/utils/storage.server.ts'
44
import { type Route } from './+types/note-images.$imageId.ts'
55

66
export async function loader({ params }: Route.LoaderArgs) {
@@ -10,5 +10,7 @@ export async function loader({ params }: Route.LoaderArgs) {
1010
select: { storageKey: true },
1111
})
1212
invariantResponse(noteImage, 'Note image not found', { status: 404 })
13-
return fetch(getImageUrl(noteImage.storageKey))
13+
14+
const { url, headers } = getSignedGetRequestInfo(noteImage.storageKey)
15+
return fetch(url, { headers })
1416
}
Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { invariantResponse } from '@epic-web/invariant'
22
import { prisma } from '#app/utils/db.server.ts'
3-
import { getImageUrl } from '#app/utils/storage.server.ts'
3+
import { getSignedGetRequestInfo } from '#app/utils/storage.server.ts'
44
import { type Route } from './+types/user-images.$imageId.ts'
55

66
export async function loader({ params }: Route.LoaderArgs) {
@@ -10,5 +10,6 @@ export async function loader({ params }: Route.LoaderArgs) {
1010
select: { storageKey: true },
1111
})
1212
invariantResponse(userImage, 'User image not found', { status: 404 })
13-
return fetch(getImageUrl(userImage.storageKey))
13+
const { url, headers } = getSignedGetRequestInfo(userImage.storageKey)
14+
return fetch(url, { headers })
1415
}

app/utils/env.server.ts

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,11 @@ const schema = z.object({
1919
ALLOW_INDEXING: z.enum(['true', 'false']).optional(),
2020

2121
// Tigris Object Storage Configuration
22-
STORAGE_ACCESS_KEY: z.string(),
23-
STORAGE_SECRET_KEY: z.string(),
24-
STORAGE_BUCKET: z.string(),
25-
STORAGE_ENDPOINT: z.string().url(),
22+
AWS_ACCESS_KEY_ID: z.string(),
23+
AWS_SECRET_ACCESS_KEY: z.string(),
24+
AWS_REGION: z.string(),
25+
AWS_ENDPOINT_URL_S3: z.string().url(),
26+
BUCKET_NAME: z.string(),
2627
})
2728

2829
declare global {

app/utils/storage.server.ts

Lines changed: 127 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,19 @@
1+
import { createHash, createHmac } from 'crypto'
12
import { type FileUpload } from '@mjackson/form-data-parser'
23
import { createId } from '@paralleldrive/cuid2'
34

4-
const STORAGE_ENDPOINT = process.env.STORAGE_ENDPOINT
5-
const STORAGE_BUCKET = process.env.STORAGE_BUCKET
6-
const STORAGE_ACCESS_KEY = process.env.STORAGE_ACCESS_KEY
7-
const STORAGE_SECRET_KEY = process.env.STORAGE_SECRET_KEY
5+
const STORAGE_ENDPOINT = process.env.AWS_ENDPOINT_URL_S3
6+
const STORAGE_BUCKET = process.env.BUCKET_NAME
7+
const STORAGE_ACCESS_KEY = process.env.AWS_ACCESS_KEY_ID
8+
const STORAGE_SECRET_KEY = process.env.AWS_SECRET_ACCESS_KEY
9+
const STORAGE_REGION = process.env.AWS_REGION
810

911
async function uploadToStorage(file: File | FileUpload, key: string) {
10-
const url = getImageUrl(key)
12+
const { url, headers } = getSignedPutRequestInfo(file, key)
13+
1114
const uploadResponse = await fetch(url, {
1215
method: 'PUT',
13-
headers: {
14-
'Content-Type': file.type,
15-
Authorization: `Basic ${btoa(`${STORAGE_ACCESS_KEY}:${STORAGE_SECRET_KEY}`)}`,
16-
'x-amz-meta-upload-date': new Date().toISOString(),
17-
},
16+
headers,
1817
body: file instanceof File ? file : Buffer.from(await file.arrayBuffer()),
1918
})
2019

@@ -50,6 +49,123 @@ export async function uploadNoteImage(
5049
return uploadToStorage(file, key)
5150
}
5251

53-
export function getImageUrl(imageId: string) {
52+
function getImageUrl(imageId: string) {
5453
return `${STORAGE_ENDPOINT}/${STORAGE_BUCKET}/${imageId}`
5554
}
55+
56+
function hmacSha256(key: string | Buffer, message: string) {
57+
const hmac = createHmac('sha256', key)
58+
hmac.update(message)
59+
return hmac.digest()
60+
}
61+
62+
function sha256(message: string) {
63+
const hash = createHash('sha256')
64+
hash.update(message)
65+
return hash.digest('hex')
66+
}
67+
68+
function getSignatureKey(
69+
key: string,
70+
dateStamp: string,
71+
regionName: string,
72+
serviceName: string,
73+
) {
74+
const kDate = hmacSha256(`AWS4${key}`, dateStamp)
75+
const kRegion = hmacSha256(kDate, regionName)
76+
const kService = hmacSha256(kRegion, serviceName)
77+
const kSigning = hmacSha256(kService, 'aws4_request')
78+
return kSigning
79+
}
80+
81+
function getBaseSignedRequestInfo({
82+
method,
83+
key,
84+
contentType,
85+
uploadDate,
86+
}: {
87+
method: 'GET' | 'PUT'
88+
key: string
89+
contentType?: string
90+
uploadDate?: string
91+
}) {
92+
const url = getImageUrl(key)
93+
const endpoint = new URL(url)
94+
95+
// Prepare date strings
96+
const amzDate = new Date().toISOString().replace(/[:-]|\.\d{3}/g, '')
97+
const dateStamp = amzDate.slice(0, 8)
98+
99+
// Build headers array conditionally
100+
const headers = [
101+
...(contentType ? [`content-type:${contentType}`] : []),
102+
`host:${endpoint.host}`,
103+
`x-amz-content-sha256:UNSIGNED-PAYLOAD`,
104+
`x-amz-date:${amzDate}`,
105+
...(uploadDate ? [`x-amz-meta-upload-date:${uploadDate}`] : []),
106+
]
107+
108+
const canonicalHeaders = headers.join('\n') + '\n'
109+
const signedHeaders = headers.map((h) => h.split(':')[0]).join(';')
110+
111+
const canonicalRequest = [
112+
method,
113+
`/${STORAGE_BUCKET}/${key}`,
114+
'', // canonicalQueryString
115+
canonicalHeaders,
116+
signedHeaders,
117+
'UNSIGNED-PAYLOAD',
118+
].join('\n')
119+
120+
// Prepare string to sign
121+
const algorithm = 'AWS4-HMAC-SHA256'
122+
const credentialScope = `${dateStamp}/${STORAGE_REGION}/s3/aws4_request`
123+
const stringToSign = [
124+
algorithm,
125+
amzDate,
126+
credentialScope,
127+
sha256(canonicalRequest),
128+
].join('\n')
129+
130+
// Calculate signature
131+
const signingKey = getSignatureKey(
132+
STORAGE_SECRET_KEY,
133+
dateStamp,
134+
STORAGE_REGION,
135+
's3',
136+
)
137+
const signature = createHmac('sha256', signingKey)
138+
.update(stringToSign)
139+
.digest('hex')
140+
141+
const baseHeaders = {
142+
'X-Amz-Date': amzDate,
143+
'X-Amz-Content-SHA256': 'UNSIGNED-PAYLOAD',
144+
Authorization: [
145+
`${algorithm} Credential=${STORAGE_ACCESS_KEY}/${credentialScope}`,
146+
`SignedHeaders=${signedHeaders}`,
147+
`Signature=${signature}`,
148+
].join(', '),
149+
}
150+
151+
return { url, baseHeaders }
152+
}
153+
154+
function getSignedPutRequestInfo(file: File | FileUpload, key: string) {
155+
const uploadDate = new Date().toISOString()
156+
const { url, baseHeaders } = getBaseSignedRequestInfo({
157+
method: 'PUT',
158+
key,
159+
contentType: file.type,
160+
uploadDate,
161+
})
162+
163+
return {
164+
url,
165+
headers: {
166+
...baseHeaders,
167+
'Content-Type': file.type,
168+
'X-Amz-Meta-Upload-Date': uploadDate,
169+
},
170+
}
171+
}

docs/deployment.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,9 @@ Prior to your first deployment, you'll need to do a few things:
108108

109109
This will create a Tigris object storage bucket for both your production and
110110
staging environments. The bucket will be used for storing uploaded files and
111-
other objects in your application.
111+
other objects in your application. This will also automatically create the
112+
necessary environment variables for your app. During local development, this
113+
is completely mocked out so you don't need to worry about it.
112114

113115
9. Commit!
114116

package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
"build:server": "tsx ./other/build-server.ts",
1717
"predev": "npm run build:icons --silent",
1818
"dev": "cross-env NODE_ENV=development MOCKS=true node ./server/dev-server.js",
19+
"dev:no-mocks": "cross-env NODE_ENV=development node ./server/dev-server.js",
1920
"prisma:studio": "prisma studio",
2021
"format": "prettier --write .",
2122
"lint": "eslint .",

server/dev-server.js

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ if (process.env.NODE_ENV === 'production') {
1010
shell: true,
1111
env: {
1212
FORCE_COLOR: true,
13-
MOCKS: true,
1413
...process.env,
1514
},
1615
// https://github.com/sindresorhus/execa/issues/433

tests/mocks/tigris.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@ const __dirname = path.dirname(__filename)
1212
const FIXTURES_DIR = path.join(__dirname, '..', 'fixtures')
1313
const MOCK_STORAGE_DIR = path.join(FIXTURES_DIR, 'uploaded')
1414
const FIXTURES_IMAGES_DIR = path.join(FIXTURES_DIR, 'images')
15-
const STORAGE_ENDPOINT = process.env.STORAGE_ENDPOINT
16-
const STORAGE_BUCKET = process.env.STORAGE_BUCKET
17-
const STORAGE_ACCESS_KEY = process.env.STORAGE_ACCESS_KEY
18-
const STORAGE_SECRET_KEY = process.env.STORAGE_SECRET_KEY
15+
const STORAGE_ENDPOINT = process.env.AWS_ENDPOINT_URL_S3
16+
const STORAGE_BUCKET = process.env.BUCKET_NAME
17+
const STORAGE_ACCESS_KEY = process.env.AWS_ACCESS_KEY_ID
18+
const STORAGE_SECRET_KEY = process.env.AWS_SECRET_ACCESS_KEY
1919

2020
function validateAuth(headers: Headers) {
2121
const authHeader = headers.get('Authorization')

0 commit comments

Comments
 (0)