Skip to content

Commit

Permalink
fix: allow copying objects to same destination and overwrite metadata
Browse files Browse the repository at this point in the history
  • Loading branch information
fenos committed Jan 21, 2025
1 parent 13fd5fe commit 0717258
Show file tree
Hide file tree
Showing 6 changed files with 139 additions and 27 deletions.
21 changes: 20 additions & 1 deletion src/http/routes/object/copyObject.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { FromSchema } from 'json-schema-to-ts'
import { createDefaultSchema } from '../../routes-helper'
import { AuthenticatedRequest } from '../../types'
import { ROUTE_OPERATIONS } from '../operations'
import { parseUserMetadata } from '@storage/uploader'
import { objectSchema } from '@storage/schemas'

const copyRequestBodySchema = {
type: 'object',
Expand All @@ -11,14 +13,23 @@ const copyRequestBodySchema = {
sourceKey: { type: 'string', examples: ['folder/source.png'] },
destinationBucket: { type: 'string', examples: ['users'] },
destinationKey: { type: 'string', examples: ['folder/destination.png'] },
metadata: {
type: 'object',
properties: {
cacheControl: { type: 'string' },
mimetype: { type: 'string' },
},
},
copyMetadata: { type: 'boolean', examples: [true] },
},
required: ['sourceKey', 'bucketId', 'destinationKey'],
} as const
const successResponseSchema = {
type: 'object',
properties: {
Id: { type: 'string' },
Key: { type: 'string', examples: ['folder/destination.png'] },
...objectSchema.properties,
},
required: ['Key'],
}
Expand Down Expand Up @@ -48,21 +59,29 @@ export default async function routes(fastify: FastifyInstance) {
},
},
async (request, response) => {
const { sourceKey, destinationKey, bucketId, destinationBucket } = request.body
const { sourceKey, destinationKey, bucketId, destinationBucket, metadata } = request.body

const destinationBucketId = destinationBucket || bucketId
const userMetadata = request.headers['x-metadata']

const result = await request.storage.from(bucketId).copyObject({
sourceKey,
destinationBucket: destinationBucketId,
destinationKey,
owner: request.owner,
userMetadata:
typeof userMetadata === 'string' ? parseUserMetadata(userMetadata) : undefined,
metadata: metadata,
copyMetadata: request.body.copyMetadata ?? true,
upsert: request.headers['x-upsert'] === 'true',
})

return response.status(result.httpStatusCode ?? 200).send({
// Deprecated, remove in next major
Id: result.destObject.id,
Key: `${destinationBucketId}/${destinationKey}`,

...result.destObject,
})
}
)
Expand Down
2 changes: 1 addition & 1 deletion src/storage/backend/s3.ts
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ export class S3Backend implements StorageBackendAdapter {
try {
const command = new CopyObjectCommand({
Bucket: bucket,
CopySource: `${bucket}/${withOptionalVersion(source, version)}`,
CopySource: encodeURIComponent(`${bucket}/${withOptionalVersion(source, version)}`),
Key: withOptionalVersion(destination, destinationVersion),
CopySourceIfMatch: conditions?.ifMatch,
CopySourceIfNoneMatch: conditions?.ifNoneMatch,
Expand Down
36 changes: 19 additions & 17 deletions src/storage/object.ts
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,9 @@ export class ObjectStorage {
* @param owner
* @param conditions
* @param copyMetadata
* @param upsert
* @param fileMetadata
* @param userMetadata
*/
async copyObject({
sourceKey,
Expand All @@ -288,16 +291,14 @@ export class ObjectStorage {
'bucket_id,metadata,user_metadata,version'
)

if (s3SourceKey === s3DestinationKey) {
return {
destObject: originObject,
httpStatusCode: 200,
eTag: originObject.metadata?.eTag,
lastModified: originObject.metadata?.lastModified
? new Date(originObject.metadata.lastModified as string)
: undefined,
}
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { cacheControl, mimetype, ...baseMetadata } = originObject.metadata || {}
const destinationMetadata = copyMetadata
? originObject.metadata || {}
: {
...baseMetadata,
...(fileMetadata || {}),
}

await this.uploader.canUpload({
bucketId: destinationBucket,
Expand All @@ -313,7 +314,7 @@ export class ObjectStorage {
originObject.version,
s3DestinationKey,
newVersion,
fileMetadata,
destinationMetadata,
conditions
)

Expand All @@ -334,14 +335,16 @@ export class ObjectStorage {
}
)

const destinationMetadata = copyMetadata ? originObject.metadata : fileMetadata || {}

const destinationObject = await db.upsertObject({
...originObject,
bucket_id: destinationBucket,
name: destinationKey,
owner,
metadata: destinationMetadata,
metadata: {
...destinationMetadata,
lastModified: copyResult.lastModified,
eTag: copyResult.eTag,
},
user_metadata: copyMetadata ? originObject.user_metadata : userMetadata,
version: newVersion,
})
Expand Down Expand Up @@ -402,9 +405,8 @@ export class ObjectStorage {
mustBeValidKey(destinationObjectName)

const newVersion = randomUUID()
const s3SourceKey = encodeURIComponent(
`${this.db.tenantId}/${this.bucketId}/${sourceObjectName}`
)
const s3SourceKey = `${this.db.tenantId}/${this.bucketId}/${sourceObjectName}`

const s3DestinationKey = `${this.db.tenantId}/${destinationBucket}/${destinationObjectName}`

await this.db.testPermission((db) => {
Expand Down
21 changes: 13 additions & 8 deletions src/storage/uploader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -354,8 +354,6 @@ export async function fileUploadFromRequest(
mimeType = request.headers['content-type'] || 'application/octet-stream'
cacheControl = request.headers['cache-control'] ?? 'no-cache'

const customMd = request.headers['x-metadata']

if (
options.allowedMimeTypes &&
options.allowedMimeTypes.length > 0 &&
Expand All @@ -364,13 +362,10 @@ export async function fileUploadFromRequest(
validateMimeType(mimeType, options.allowedMimeTypes)
}

const customMd = request.headers['x-metadata']

if (typeof customMd === 'string') {
try {
const json = Buffer.from(customMd, 'base64').toString('utf8')
userMetadata = JSON.parse(json)
} catch (e) {
// no-op
}
userMetadata = parseUserMetadata(customMd)
}
isTruncated = () => {
// @todo more secure to get this from the stream or from s3 in the next step
Expand All @@ -387,6 +382,16 @@ export async function fileUploadFromRequest(
}
}

export function parseUserMetadata(metadata: string) {
try {
const json = Buffer.from(metadata, 'base64').toString('utf8')
return JSON.parse(json) as Record<string, string>
} catch (e) {
// no-op
return undefined
}
}

export async function getStandardMaxFileSizeLimit(
tenantId: string,
bucketSizeLimit?: number | null
Expand Down
59 changes: 59 additions & 0 deletions src/test/object.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1271,6 +1271,65 @@ describe('testing copy object', () => {
})
})

test('can copy objects to itself overwriting their metadata', async () => {
const copiedKey = 'casestudy-2349.png'
const response = await app().inject({
method: 'POST',
url: '/object/copy',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'x-upsert': 'true',
'x-metadata': Buffer.from(
JSON.stringify({
newMetadata: 'test1',
})
).toString('base64'),
},
payload: {
bucketId: 'bucket2',
sourceKey: `authenticated/${copiedKey}`,
destinationKey: `authenticated/${copiedKey}`,
metadata: {
cacheControl: 'max-age=999',
mimetype: 'image/gif',
},
copyMetadata: false,
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.copyObject).toBeCalled()
const parsedBody = JSON.parse(response.body)

expect(parsedBody.Key).toBe(`bucket2/authenticated/${copiedKey}`)
expect(parsedBody.name).toBe(`authenticated/${copiedKey}`)
expect(parsedBody.bucket_id).toBe(`bucket2`)
expect(parsedBody.metadata).toEqual(
expect.objectContaining({
cacheControl: 'max-age=999',
mimetype: 'image/gif',
})
)

const conn = await getSuperuserPostgrestClient()
const object = await conn
.table('objects')
.select('*')
.where('bucket_id', 'bucket2')
.where('name', `authenticated/${copiedKey}`)
.first()

expect(object).not.toBeFalsy()
expect(object.user_metadata).toEqual({
newMetadata: 'test1',
})
expect(object.metadata).toEqual(
expect.objectContaining({
cacheControl: 'max-age=999',
mimetype: 'image/gif',
})
)
})

test('can copy objects excluding their metadata', async () => {
const copiedKey = 'casestudy-2450.png'
const response = await app().inject({
Expand Down
27 changes: 27 additions & 0 deletions src/test/s3-protocol.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -991,6 +991,33 @@ describe('S3 Protocol', () => {
expect(headObj.CacheControl).toBe('max-age=2009')
})

it('will allow copying an object in the same path, just altering its metadata', async () => {
const bucketName = await createBucket(client)
const fileName = 'test-copy-1.jpg'

await uploadFile(client, bucketName, fileName, 1)

const copyObjectCommand = new CopyObjectCommand({
Bucket: bucketName,
Key: fileName,
CopySource: `${bucketName}/${fileName}`,
ContentType: 'image/png',
CacheControl: 'max-age=2009',
})

const resp = await client.send(copyObjectCommand)
expect(resp.CopyObjectResult?.ETag).toBeTruthy()

const headObjectCommand = new HeadObjectCommand({
Bucket: bucketName,
Key: fileName,
})

const headObj = await client.send(headObjectCommand)
expect(headObj.ContentType).toBe('image/png')
expect(headObj.CacheControl).toBe('max-age=2009')
})

it('will not be able to copy an object that doesnt exists', async () => {
const bucketName1 = await createBucket(client)
await uploadFile(client, bucketName1, 'test-copy-1.jpg', 1)
Expand Down

0 comments on commit 0717258

Please sign in to comment.