diff --git a/src/http/routes/object/copyObject.ts b/src/http/routes/object/copyObject.ts index 2761bc95..19f3b056 100644 --- a/src/http/routes/object/copyObject.ts +++ b/src/http/routes/object/copyObject.ts @@ -3,6 +3,8 @@ import { FromSchema } from 'json-schema-to-ts' import { createDefaultSchema } from '../../routes-helper' import { AuthenticatedRequest } from '../../types' import { ROUTE_OPERATIONS } from '../operations' +import { parseUserMetadata } from '@storage/uploader' +import { objectSchema } from '@storage/schemas' const copyRequestBodySchema = { type: 'object', @@ -11,6 +13,13 @@ const copyRequestBodySchema = { sourceKey: { type: 'string', examples: ['folder/source.png'] }, destinationBucket: { type: 'string', examples: ['users'] }, destinationKey: { type: 'string', examples: ['folder/destination.png'] }, + metadata: { + type: 'object', + properties: { + cacheControl: { type: 'string' }, + mimetype: { type: 'string' }, + }, + }, copyMetadata: { type: 'boolean', examples: [true] }, }, required: ['sourceKey', 'bucketId', 'destinationKey'], @@ -18,7 +27,9 @@ const copyRequestBodySchema = { const successResponseSchema = { type: 'object', properties: { + Id: { type: 'string' }, Key: { type: 'string', examples: ['folder/destination.png'] }, + ...objectSchema.properties, }, required: ['Key'], } @@ -48,21 +59,29 @@ export default async function routes(fastify: FastifyInstance) { }, }, async (request, response) => { - const { sourceKey, destinationKey, bucketId, destinationBucket } = request.body + const { sourceKey, destinationKey, bucketId, destinationBucket, metadata } = request.body const destinationBucketId = destinationBucket || bucketId + const userMetadata = request.headers['x-metadata'] const result = await request.storage.from(bucketId).copyObject({ sourceKey, destinationBucket: destinationBucketId, destinationKey, owner: request.owner, + userMetadata: + typeof userMetadata === 'string' ? parseUserMetadata(userMetadata) : undefined, + metadata: metadata, copyMetadata: request.body.copyMetadata ?? true, + upsert: request.headers['x-upsert'] === 'true', }) return response.status(result.httpStatusCode ?? 200).send({ + // Deprecated, remove in next major Id: result.destObject.id, Key: `${destinationBucketId}/${destinationKey}`, + + ...result.destObject, }) } ) diff --git a/src/http/routes/s3/commands/copy-object.ts b/src/http/routes/s3/commands/copy-object.ts index 1f7932cb..29d5992a 100644 --- a/src/http/routes/s3/commands/copy-object.ts +++ b/src/http/routes/s3/commands/copy-object.ts @@ -1,6 +1,7 @@ import { S3ProtocolHandler } from '@storage/protocols/s3/s3-handler' import { S3Router } from '../router' import { ROUTE_OPERATIONS } from '../../operations' +import { MetadataDirective } from '@aws-sdk/client-s3' const CopyObjectInput = { summary: 'Copy Object', @@ -20,6 +21,7 @@ const CopyObjectInput = { 'x-amz-copy-source-if-modified-since': { type: 'string' }, 'x-amz-copy-source-if-none-match': { type: 'string' }, 'x-amz-copy-source-if-unmodified-since': { type: 'string' }, + 'x-amz-metadata-directive': { type: 'string' }, 'content-encoding': { type: 'string' }, 'content-type': { type: 'string' }, 'cache-control': { type: 'string' }, @@ -42,6 +44,7 @@ export default function CopyObject(s3Router: S3Router) { CopySource: req.Headers['x-amz-copy-source'], ContentType: req.Headers['content-type'], CacheControl: req.Headers['cache-control'], + MetadataDirective: req.Headers['x-amz-metadata-directive'] as MetadataDirective | undefined, Expires: req.Headers.expires ? new Date(req.Headers.expires) : undefined, ContentEncoding: req.Headers['content-encoding'], CopySourceIfMatch: req.Headers['x-amz-copy-source-if-match'], diff --git a/src/storage/backend/s3/adapter.ts b/src/storage/backend/s3/adapter.ts index 2d6abd0e..fdde4da3 100644 --- a/src/storage/backend/s3/adapter.ts +++ b/src/storage/backend/s3/adapter.ts @@ -231,7 +231,7 @@ export class S3Backend implements StorageBackendAdapter { try { const command = new CopyObjectCommand({ Bucket: bucket, - CopySource: `${bucket}/${withOptionalVersion(source, version)}`, + CopySource: encodeURIComponent(`${bucket}/${withOptionalVersion(source, version)}`), Key: withOptionalVersion(destination, destinationVersion), CopySourceIfMatch: conditions?.ifMatch, CopySourceIfNoneMatch: conditions?.ifNoneMatch, diff --git a/src/storage/object.ts b/src/storage/object.ts index ca44a744..2776ae9f 100644 --- a/src/storage/object.ts +++ b/src/storage/object.ts @@ -263,6 +263,9 @@ export class ObjectStorage { * @param owner * @param conditions * @param copyMetadata + * @param upsert + * @param fileMetadata + * @param userMetadata */ async copyObject({ sourceKey, @@ -288,16 +291,14 @@ export class ObjectStorage { 'bucket_id,metadata,user_metadata,version' ) - if (s3SourceKey === s3DestinationKey) { - return { - destObject: originObject, - httpStatusCode: 200, - eTag: originObject.metadata?.eTag, - lastModified: originObject.metadata?.lastModified - ? new Date(originObject.metadata.lastModified as string) - : undefined, - } - } + // eslint-disable-next-line @typescript-eslint/no-unused-vars + const baseMetadata = originObject.metadata || {} + const destinationMetadata = copyMetadata + ? baseMetadata + : { + ...baseMetadata, + ...(fileMetadata || {}), + } await this.uploader.canUpload({ bucketId: destinationBucket, @@ -313,7 +314,7 @@ export class ObjectStorage { originObject.version, s3DestinationKey, newVersion, - fileMetadata, + destinationMetadata, conditions ) @@ -334,14 +335,16 @@ export class ObjectStorage { } ) - const destinationMetadata = copyMetadata ? originObject.metadata : fileMetadata || {} - const destinationObject = await db.upsertObject({ ...originObject, bucket_id: destinationBucket, name: destinationKey, owner, - metadata: destinationMetadata, + metadata: { + ...destinationMetadata, + lastModified: copyResult.lastModified, + eTag: copyResult.eTag, + }, user_metadata: copyMetadata ? originObject.user_metadata : userMetadata, version: newVersion, }) @@ -402,9 +405,8 @@ export class ObjectStorage { mustBeValidKey(destinationObjectName) const newVersion = randomUUID() - const s3SourceKey = encodeURIComponent( - `${this.db.tenantId}/${this.bucketId}/${sourceObjectName}` - ) + const s3SourceKey = `${this.db.tenantId}/${this.bucketId}/${sourceObjectName}` + const s3DestinationKey = `${this.db.tenantId}/${destinationBucket}/${destinationObjectName}` await this.db.testPermission((db) => { diff --git a/src/storage/protocols/s3/s3-handler.ts b/src/storage/protocols/s3/s3-handler.ts index 54c4b11b..baa6a3ed 100644 --- a/src/storage/protocols/s3/s3-handler.ts +++ b/src/storage/protocols/s3/s3-handler.ts @@ -1012,6 +1012,11 @@ export class S3ProtocolHandler { throw ERRORS.MissingParameter('CopySource') } + if (!command.MetadataDirective) { + // default metadata directive is copy + command.MetadataDirective = 'COPY' + } + const copyResult = await this.storage.from(sourceBucket).copyObject({ sourceKey, destinationBucket: Bucket, diff --git a/src/storage/uploader.ts b/src/storage/uploader.ts index d085b77d..2018e359 100644 --- a/src/storage/uploader.ts +++ b/src/storage/uploader.ts @@ -354,8 +354,6 @@ export async function fileUploadFromRequest( mimeType = request.headers['content-type'] || 'application/octet-stream' cacheControl = request.headers['cache-control'] ?? 'no-cache' - const customMd = request.headers['x-metadata'] - if ( options.allowedMimeTypes && options.allowedMimeTypes.length > 0 && @@ -364,13 +362,10 @@ export async function fileUploadFromRequest( validateMimeType(mimeType, options.allowedMimeTypes) } + const customMd = request.headers['x-metadata'] + if (typeof customMd === 'string') { - try { - const json = Buffer.from(customMd, 'base64').toString('utf8') - userMetadata = JSON.parse(json) - } catch (e) { - // no-op - } + userMetadata = parseUserMetadata(customMd) } isTruncated = () => { // @todo more secure to get this from the stream or from s3 in the next step @@ -387,6 +382,16 @@ export async function fileUploadFromRequest( } } +export function parseUserMetadata(metadata: string) { + try { + const json = Buffer.from(metadata, 'base64').toString('utf8') + return JSON.parse(json) as Record + } catch (e) { + // no-op + return undefined + } +} + export async function getStandardMaxFileSizeLimit( tenantId: string, bucketSizeLimit?: number | null diff --git a/src/test/object.test.ts b/src/test/object.test.ts index bb941a80..844ebc4c 100644 --- a/src/test/object.test.ts +++ b/src/test/object.test.ts @@ -1216,7 +1216,8 @@ describe('testing copy object', () => { }) expect(response.statusCode).toBe(200) expect(S3Backend.prototype.copyObject).toBeCalled() - expect(response.body).toBe(`{"Key":"bucket2/authenticated/casestudy11.png"}`) + const jsonResponse = await response.json() + expect(jsonResponse.Key).toBe(`bucket2/authenticated/casestudy11.png`) }) test('can copy objects across buckets', async () => { @@ -1235,7 +1236,9 @@ describe('testing copy object', () => { }) expect(response.statusCode).toBe(200) expect(S3Backend.prototype.copyObject).toBeCalled() - expect(response.body).toBe(`{"Key":"bucket3/authenticated/casestudy11.png"}`) + const jsonResponse = await response.json() + + expect(jsonResponse.Key).toBe(`bucket3/authenticated/casestudy11.png`) }) test('can copy objects keeping their metadata', async () => { @@ -1255,7 +1258,8 @@ describe('testing copy object', () => { }) expect(response.statusCode).toBe(200) expect(S3Backend.prototype.copyObject).toBeCalled() - expect(response.body).toBe(`{"Key":"bucket2/authenticated/${copiedKey}"}`) + const jsonResponse = response.json() + expect(jsonResponse.Key).toBe(`bucket2/authenticated/${copiedKey}`) const conn = await getSuperuserPostgrestClient() const object = await conn @@ -1271,6 +1275,65 @@ describe('testing copy object', () => { }) }) + test('can copy objects to itself overwriting their metadata', async () => { + const copiedKey = 'casestudy-2349.png' + const response = await app().inject({ + method: 'POST', + url: '/object/copy', + headers: { + authorization: `Bearer ${process.env.AUTHENTICATED_KEY}`, + 'x-upsert': 'true', + 'x-metadata': Buffer.from( + JSON.stringify({ + newMetadata: 'test1', + }) + ).toString('base64'), + }, + payload: { + bucketId: 'bucket2', + sourceKey: `authenticated/${copiedKey}`, + destinationKey: `authenticated/${copiedKey}`, + metadata: { + cacheControl: 'max-age=999', + mimetype: 'image/gif', + }, + copyMetadata: false, + }, + }) + expect(response.statusCode).toBe(200) + expect(S3Backend.prototype.copyObject).toBeCalled() + const parsedBody = JSON.parse(response.body) + + expect(parsedBody.Key).toBe(`bucket2/authenticated/${copiedKey}`) + expect(parsedBody.name).toBe(`authenticated/${copiedKey}`) + expect(parsedBody.bucket_id).toBe(`bucket2`) + expect(parsedBody.metadata).toEqual( + expect.objectContaining({ + cacheControl: 'max-age=999', + mimetype: 'image/gif', + }) + ) + + const conn = await getSuperuserPostgrestClient() + const object = await conn + .table('objects') + .select('*') + .where('bucket_id', 'bucket2') + .where('name', `authenticated/${copiedKey}`) + .first() + + expect(object).not.toBeFalsy() + expect(object.user_metadata).toEqual({ + newMetadata: 'test1', + }) + expect(object.metadata).toEqual( + expect.objectContaining({ + cacheControl: 'max-age=999', + mimetype: 'image/gif', + }) + ) + }) + test('can copy objects excluding their metadata', async () => { const copiedKey = 'casestudy-2450.png' const response = await app().inject({ @@ -1288,7 +1351,8 @@ describe('testing copy object', () => { }) expect(response.statusCode).toBe(200) expect(S3Backend.prototype.copyObject).toBeCalled() - expect(response.body).toBe(`{"Key":"bucket2/authenticated/${copiedKey}"}`) + const jsonResponse = response.json() + expect(jsonResponse.Key).toBe(`bucket2/authenticated/${copiedKey}`) const conn = await getSuperuserPostgrestClient() const object = await conn diff --git a/src/test/s3-protocol.test.ts b/src/test/s3-protocol.test.ts index 221b54ad..6d505298 100644 --- a/src/test/s3-protocol.test.ts +++ b/src/test/s3-protocol.test.ts @@ -976,6 +976,7 @@ describe('S3 Protocol', () => { CopySource: `${bucketName}/test-copy-1.jpg`, ContentType: 'image/png', CacheControl: 'max-age=2009', + MetadataDirective: 'REPLACE', }) const resp = await client.send(copyObjectCommand) @@ -991,6 +992,34 @@ describe('S3 Protocol', () => { expect(headObj.CacheControl).toBe('max-age=2009') }) + it('will allow copying an object in the same path, just altering its metadata', async () => { + const bucketName = await createBucket(client) + const fileName = 'test-copy-1.jpg' + + await uploadFile(client, bucketName, fileName, 1) + + const copyObjectCommand = new CopyObjectCommand({ + Bucket: bucketName, + Key: fileName, + CopySource: `${bucketName}/${fileName}`, + ContentType: 'image/png', + CacheControl: 'max-age=2009', + MetadataDirective: 'REPLACE', + }) + + const resp = await client.send(copyObjectCommand) + expect(resp.CopyObjectResult?.ETag).toBeTruthy() + + const headObjectCommand = new HeadObjectCommand({ + Bucket: bucketName, + Key: fileName, + }) + + const headObj = await client.send(headObjectCommand) + expect(headObj.ContentType).toBe('image/png') + expect(headObj.CacheControl).toBe('max-age=2009') + }) + it('will not be able to copy an object that doesnt exists', async () => { const bucketName1 = await createBucket(client) await uploadFile(client, bucketName1, 'test-copy-1.jpg', 1)