Skip to content

Commit

Permalink
fix: allow copying objects to same destination and overwrite metadata (
Browse files Browse the repository at this point in the history
  • Loading branch information
fenos authored Jan 22, 2025
1 parent cf23e7d commit ddc5163
Show file tree
Hide file tree
Showing 8 changed files with 158 additions and 31 deletions.
21 changes: 20 additions & 1 deletion src/http/routes/object/copyObject.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { FromSchema } from 'json-schema-to-ts'
import { createDefaultSchema } from '../../routes-helper'
import { AuthenticatedRequest } from '../../types'
import { ROUTE_OPERATIONS } from '../operations'
import { parseUserMetadata } from '@storage/uploader'
import { objectSchema } from '@storage/schemas'

const copyRequestBodySchema = {
type: 'object',
Expand All @@ -11,14 +13,23 @@ const copyRequestBodySchema = {
sourceKey: { type: 'string', examples: ['folder/source.png'] },
destinationBucket: { type: 'string', examples: ['users'] },
destinationKey: { type: 'string', examples: ['folder/destination.png'] },
metadata: {
type: 'object',
properties: {
cacheControl: { type: 'string' },
mimetype: { type: 'string' },
},
},
copyMetadata: { type: 'boolean', examples: [true] },
},
required: ['sourceKey', 'bucketId', 'destinationKey'],
} as const
const successResponseSchema = {
type: 'object',
properties: {
Id: { type: 'string' },
Key: { type: 'string', examples: ['folder/destination.png'] },
...objectSchema.properties,
},
required: ['Key'],
}
Expand Down Expand Up @@ -48,21 +59,29 @@ export default async function routes(fastify: FastifyInstance) {
},
},
async (request, response) => {
const { sourceKey, destinationKey, bucketId, destinationBucket } = request.body
const { sourceKey, destinationKey, bucketId, destinationBucket, metadata } = request.body

const destinationBucketId = destinationBucket || bucketId
const userMetadata = request.headers['x-metadata']

const result = await request.storage.from(bucketId).copyObject({
sourceKey,
destinationBucket: destinationBucketId,
destinationKey,
owner: request.owner,
userMetadata:
typeof userMetadata === 'string' ? parseUserMetadata(userMetadata) : undefined,
metadata: metadata,
copyMetadata: request.body.copyMetadata ?? true,
upsert: request.headers['x-upsert'] === 'true',
})

return response.status(result.httpStatusCode ?? 200).send({
// Deprecated, remove in next major
Id: result.destObject.id,
Key: `${destinationBucketId}/${destinationKey}`,

...result.destObject,
})
}
)
Expand Down
3 changes: 3 additions & 0 deletions src/http/routes/s3/commands/copy-object.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { S3ProtocolHandler } from '@storage/protocols/s3/s3-handler'
import { S3Router } from '../router'
import { ROUTE_OPERATIONS } from '../../operations'
import { MetadataDirective } from '@aws-sdk/client-s3'

const CopyObjectInput = {
summary: 'Copy Object',
Expand All @@ -20,6 +21,7 @@ const CopyObjectInput = {
'x-amz-copy-source-if-modified-since': { type: 'string' },
'x-amz-copy-source-if-none-match': { type: 'string' },
'x-amz-copy-source-if-unmodified-since': { type: 'string' },
'x-amz-metadata-directive': { type: 'string' },
'content-encoding': { type: 'string' },
'content-type': { type: 'string' },
'cache-control': { type: 'string' },
Expand All @@ -42,6 +44,7 @@ export default function CopyObject(s3Router: S3Router) {
CopySource: req.Headers['x-amz-copy-source'],
ContentType: req.Headers['content-type'],
CacheControl: req.Headers['cache-control'],
MetadataDirective: req.Headers['x-amz-metadata-directive'] as MetadataDirective | undefined,
Expires: req.Headers.expires ? new Date(req.Headers.expires) : undefined,
ContentEncoding: req.Headers['content-encoding'],
CopySourceIfMatch: req.Headers['x-amz-copy-source-if-match'],
Expand Down
2 changes: 1 addition & 1 deletion src/storage/backend/s3/adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ export class S3Backend implements StorageBackendAdapter {
try {
const command = new CopyObjectCommand({
Bucket: bucket,
CopySource: `${bucket}/${withOptionalVersion(source, version)}`,
CopySource: encodeURIComponent(`${bucket}/${withOptionalVersion(source, version)}`),
Key: withOptionalVersion(destination, destinationVersion),
CopySourceIfMatch: conditions?.ifMatch,
CopySourceIfNoneMatch: conditions?.ifNoneMatch,
Expand Down
36 changes: 19 additions & 17 deletions src/storage/object.ts
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,9 @@ export class ObjectStorage {
* @param owner
* @param conditions
* @param copyMetadata
* @param upsert
* @param fileMetadata
* @param userMetadata
*/
async copyObject({
sourceKey,
Expand All @@ -288,16 +291,14 @@ export class ObjectStorage {
'bucket_id,metadata,user_metadata,version'
)

if (s3SourceKey === s3DestinationKey) {
return {
destObject: originObject,
httpStatusCode: 200,
eTag: originObject.metadata?.eTag,
lastModified: originObject.metadata?.lastModified
? new Date(originObject.metadata.lastModified as string)
: undefined,
}
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const baseMetadata = originObject.metadata || {}
const destinationMetadata = copyMetadata
? baseMetadata
: {
...baseMetadata,
...(fileMetadata || {}),
}

await this.uploader.canUpload({
bucketId: destinationBucket,
Expand All @@ -313,7 +314,7 @@ export class ObjectStorage {
originObject.version,
s3DestinationKey,
newVersion,
fileMetadata,
destinationMetadata,
conditions
)

Expand All @@ -334,14 +335,16 @@ export class ObjectStorage {
}
)

const destinationMetadata = copyMetadata ? originObject.metadata : fileMetadata || {}

const destinationObject = await db.upsertObject({
...originObject,
bucket_id: destinationBucket,
name: destinationKey,
owner,
metadata: destinationMetadata,
metadata: {
...destinationMetadata,
lastModified: copyResult.lastModified,
eTag: copyResult.eTag,
},
user_metadata: copyMetadata ? originObject.user_metadata : userMetadata,
version: newVersion,
})
Expand Down Expand Up @@ -402,9 +405,8 @@ export class ObjectStorage {
mustBeValidKey(destinationObjectName)

const newVersion = randomUUID()
const s3SourceKey = encodeURIComponent(
`${this.db.tenantId}/${this.bucketId}/${sourceObjectName}`
)
const s3SourceKey = `${this.db.tenantId}/${this.bucketId}/${sourceObjectName}`

const s3DestinationKey = `${this.db.tenantId}/${destinationBucket}/${destinationObjectName}`

await this.db.testPermission((db) => {
Expand Down
5 changes: 5 additions & 0 deletions src/storage/protocols/s3/s3-handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1012,6 +1012,11 @@ export class S3ProtocolHandler {
throw ERRORS.MissingParameter('CopySource')
}

if (!command.MetadataDirective) {
// default metadata directive is copy
command.MetadataDirective = 'COPY'
}

const copyResult = await this.storage.from(sourceBucket).copyObject({
sourceKey,
destinationBucket: Bucket,
Expand Down
21 changes: 13 additions & 8 deletions src/storage/uploader.ts
Original file line number Diff line number Diff line change
Expand Up @@ -354,8 +354,6 @@ export async function fileUploadFromRequest(
mimeType = request.headers['content-type'] || 'application/octet-stream'
cacheControl = request.headers['cache-control'] ?? 'no-cache'

const customMd = request.headers['x-metadata']

if (
options.allowedMimeTypes &&
options.allowedMimeTypes.length > 0 &&
Expand All @@ -364,13 +362,10 @@ export async function fileUploadFromRequest(
validateMimeType(mimeType, options.allowedMimeTypes)
}

const customMd = request.headers['x-metadata']

if (typeof customMd === 'string') {
try {
const json = Buffer.from(customMd, 'base64').toString('utf8')
userMetadata = JSON.parse(json)
} catch (e) {
// no-op
}
userMetadata = parseUserMetadata(customMd)
}
isTruncated = () => {
// @todo more secure to get this from the stream or from s3 in the next step
Expand All @@ -387,6 +382,16 @@ export async function fileUploadFromRequest(
}
}

export function parseUserMetadata(metadata: string) {
try {
const json = Buffer.from(metadata, 'base64').toString('utf8')
return JSON.parse(json) as Record<string, string>
} catch (e) {
// no-op
return undefined
}
}

export async function getStandardMaxFileSizeLimit(
tenantId: string,
bucketSizeLimit?: number | null
Expand Down
72 changes: 68 additions & 4 deletions src/test/object.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1216,7 +1216,8 @@ describe('testing copy object', () => {
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.copyObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket2/authenticated/casestudy11.png"}`)
const jsonResponse = await response.json()
expect(jsonResponse.Key).toBe(`bucket2/authenticated/casestudy11.png`)
})

test('can copy objects across buckets', async () => {
Expand All @@ -1235,7 +1236,9 @@ describe('testing copy object', () => {
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.copyObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket3/authenticated/casestudy11.png"}`)
const jsonResponse = await response.json()

expect(jsonResponse.Key).toBe(`bucket3/authenticated/casestudy11.png`)
})

test('can copy objects keeping their metadata', async () => {
Expand All @@ -1255,7 +1258,8 @@ describe('testing copy object', () => {
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.copyObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket2/authenticated/${copiedKey}"}`)
const jsonResponse = response.json()
expect(jsonResponse.Key).toBe(`bucket2/authenticated/${copiedKey}`)

const conn = await getSuperuserPostgrestClient()
const object = await conn
Expand All @@ -1271,6 +1275,65 @@ describe('testing copy object', () => {
})
})

test('can copy objects to itself overwriting their metadata', async () => {
const copiedKey = 'casestudy-2349.png'
const response = await app().inject({
method: 'POST',
url: '/object/copy',
headers: {
authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`,
'x-upsert': 'true',
'x-metadata': Buffer.from(
JSON.stringify({
newMetadata: 'test1',
})
).toString('base64'),
},
payload: {
bucketId: 'bucket2',
sourceKey: `authenticated/${copiedKey}`,
destinationKey: `authenticated/${copiedKey}`,
metadata: {
cacheControl: 'max-age=999',
mimetype: 'image/gif',
},
copyMetadata: false,
},
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.copyObject).toBeCalled()
const parsedBody = JSON.parse(response.body)

expect(parsedBody.Key).toBe(`bucket2/authenticated/${copiedKey}`)
expect(parsedBody.name).toBe(`authenticated/${copiedKey}`)
expect(parsedBody.bucket_id).toBe(`bucket2`)
expect(parsedBody.metadata).toEqual(
expect.objectContaining({
cacheControl: 'max-age=999',
mimetype: 'image/gif',
})
)

const conn = await getSuperuserPostgrestClient()
const object = await conn
.table('objects')
.select('*')
.where('bucket_id', 'bucket2')
.where('name', `authenticated/${copiedKey}`)
.first()

expect(object).not.toBeFalsy()
expect(object.user_metadata).toEqual({
newMetadata: 'test1',
})
expect(object.metadata).toEqual(
expect.objectContaining({
cacheControl: 'max-age=999',
mimetype: 'image/gif',
})
)
})

test('can copy objects excluding their metadata', async () => {
const copiedKey = 'casestudy-2450.png'
const response = await app().inject({
Expand All @@ -1288,7 +1351,8 @@ describe('testing copy object', () => {
})
expect(response.statusCode).toBe(200)
expect(S3Backend.prototype.copyObject).toBeCalled()
expect(response.body).toBe(`{"Key":"bucket2/authenticated/${copiedKey}"}`)
const jsonResponse = response.json()
expect(jsonResponse.Key).toBe(`bucket2/authenticated/${copiedKey}`)

const conn = await getSuperuserPostgrestClient()
const object = await conn
Expand Down
29 changes: 29 additions & 0 deletions src/test/s3-protocol.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -976,6 +976,7 @@ describe('S3 Protocol', () => {
CopySource: `${bucketName}/test-copy-1.jpg`,
ContentType: 'image/png',
CacheControl: 'max-age=2009',
MetadataDirective: 'REPLACE',
})

const resp = await client.send(copyObjectCommand)
Expand All @@ -991,6 +992,34 @@ describe('S3 Protocol', () => {
expect(headObj.CacheControl).toBe('max-age=2009')
})

it('will allow copying an object in the same path, just altering its metadata', async () => {
const bucketName = await createBucket(client)
const fileName = 'test-copy-1.jpg'

await uploadFile(client, bucketName, fileName, 1)

const copyObjectCommand = new CopyObjectCommand({
Bucket: bucketName,
Key: fileName,
CopySource: `${bucketName}/${fileName}`,
ContentType: 'image/png',
CacheControl: 'max-age=2009',
MetadataDirective: 'REPLACE',
})

const resp = await client.send(copyObjectCommand)
expect(resp.CopyObjectResult?.ETag).toBeTruthy()

const headObjectCommand = new HeadObjectCommand({
Bucket: bucketName,
Key: fileName,
})

const headObj = await client.send(headObjectCommand)
expect(headObj.ContentType).toBe('image/png')
expect(headObj.CacheControl).toBe('max-age=2009')
})

it('will not be able to copy an object that doesnt exists', async () => {
const bucketName1 = await createBucket(client)
await uploadFile(client, bucketName1, 'test-copy-1.jpg', 1)
Expand Down

0 comments on commit ddc5163

Please sign in to comment.