diff --git a/constants.js b/constants.js index 8e713bc40a..2fa0a49238 100644 --- a/constants.js +++ b/constants.js @@ -279,6 +279,14 @@ const constants = { rateLimitDefaultConfigCacheTTL: 30000, // 30 seconds rateLimitDefaultBurstCapacity: 1, rateLimitCleanupInterval: 10000, // 10 seconds + // Metadata allowed to be returned by getObjectAttributes API + allowedObjectAttributes: new Set([ + 'StorageClass', + 'ObjectSize', + 'ObjectParts', + 'Checksum', + 'ETag', + ]), }; module.exports = constants; diff --git a/lib/api/api.js b/lib/api/api.js index 195441c3fe..058e63c2e6 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -56,6 +56,7 @@ const { objectDelete } = require('./objectDelete'); const objectDeleteTagging = require('./objectDeleteTagging'); const objectGet = require('./objectGet'); const objectGetACL = require('./objectGetACL'); +const objectGetAttributes = require('./objectGetAttributes.js'); const objectGetLegalHold = require('./objectGetLegalHold'); const objectGetRetention = require('./objectGetRetention'); const objectGetTagging = require('./objectGetTagging'); @@ -471,6 +472,7 @@ const api = { objectDeleteTagging, objectGet, objectGetACL, + objectGetAttributes, objectGetLegalHold, objectGetRetention, objectGetTagging, diff --git a/lib/api/apiUtils/object/parseAttributesHeader.js b/lib/api/apiUtils/object/parseAttributesHeader.js new file mode 100644 index 0000000000..0abe19a697 --- /dev/null +++ b/lib/api/apiUtils/object/parseAttributesHeader.js @@ -0,0 +1,25 @@ +const { errorInstances } = require('arsenal'); +const { allowedObjectAttributes } = require('../../../../constants'); + +/** + * parseAttributesHeaders - Parse and validate the x-amz-object-attributes header + * @param {object} headers - request headers + * @returns {string[]} - array of valid attribute names + * @throws {Error} - InvalidRequest if header is missing/empty, InvalidArgument if attribute is invalid + */ +function parseAttributesHeaders(headers) { + const attributes = headers['x-amz-object-attributes']?.split(',').map(attr => attr.trim()) ?? []; + if (attributes.length === 0) { + throw errorInstances.InvalidRequest.customizeDescription( + 'The x-amz-object-attributes header specifying the attributes to be retrieved is either missing or empty', + ); + } + + if (attributes.some(attr => !allowedObjectAttributes.has(attr))) { + throw errorInstances.InvalidArgument.customizeDescription('Invalid attribute name specified.'); + } + + return attributes; +} + +module.exports = parseAttributesHeaders; diff --git a/lib/api/objectGetAttributes.js b/lib/api/objectGetAttributes.js new file mode 100644 index 0000000000..86f09fe31f --- /dev/null +++ b/lib/api/objectGetAttributes.js @@ -0,0 +1,141 @@ +const { promisify } = require('util'); +const xml2js = require('xml2js'); +const { errors } = require('arsenal'); +const { standardMetadataValidateBucketAndObj } = require('../metadata/metadataUtils'); +const collectCorsHeaders = require('../utilities/collectCorsHeaders'); +const parseAttributesHeaders = require('./apiUtils/object/parseAttributesHeader'); +const { decodeVersionId, getVersionIdResHeader } = require('./apiUtils/object/versioning'); +const { checkExpectedBucketOwner } = require('./apiUtils/authorization/bucketOwner'); +const { pushMetric } = require('../utapi/utilities'); +const { getPartCountFromMd5 } = require('./apiUtils/object/partInfo'); + +const OBJECT_GET_ATTRIBUTES = 'objectGetAttributes'; + +const checkExpectedBucketOwnerPromise = promisify(checkExpectedBucketOwner); +const validateBucketAndObj = promisify(standardMetadataValidateBucketAndObj); + +/** + * buildXmlResponse - Build XML response for GetObjectAttributes + * @param {object} objMD - object metadata + * @param {array} attributes - requested attributes + * @returns {string} XML response + */ +function buildXmlResponse(objMD, attributes) { + const attrResp = {}; + + if (attributes.includes('ETag')) { + attrResp.ETag = objMD['content-md5']; + } + + // NOTE: Checksum is not implemented + if (attributes.includes('Checksum')) { + attrResp.Checksum = {}; + } + + if (attributes.includes('ObjectParts')) { + const partCount = getPartCountFromMd5(objMD); + if (partCount) { + attrResp.ObjectParts = { PartsCount: partCount }; + } + } + + if (attributes.includes('StorageClass')) { + attrResp.StorageClass = objMD['x-amz-storage-class']; + } + + if (attributes.includes('ObjectSize')) { + attrResp.ObjectSize = objMD['content-length']; + } + + const builder = new xml2js.Builder(); + return builder.buildObject({ GetObjectAttributesResponse: attrResp }); +} + +/** + * objectGetAttributes - Retrieves all metadata from an object without returning the object itself + * @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info + * @param {object} request - http request object + * @param {object} log - Werelogs logger + * @returns {Promise} - { xml, responseHeaders } + * @throws {ArsenalError} NoSuchVersion - if versionId specified but not found + * @throws {ArsenalError} NoSuchKey - if object not found + * @throws {ArsenalError} MethodNotAllowed - if object is a delete marker + */ +async function objectGetAttributes(authInfo, request, log) { + log.trace('processing request', { method: OBJECT_GET_ATTRIBUTES }); + const { bucketName, objectKey, headers, actionImplicitDenies } = request; + + const versionId = decodeVersionId(request.query); + if (versionId instanceof Error) { + log.debug('invalid versionId query', { versionId: request.query.versionId, error: versionId }); + throw versionId; + } + + const metadataValParams = { + authInfo, + bucketName, + objectKey, + versionId, + getDeleteMarker: true, + requestType: request.apiMethods || OBJECT_GET_ATTRIBUTES, + request, + }; + + const { bucket, objMD } = await validateBucketAndObj(metadataValParams, actionImplicitDenies, log); + await checkExpectedBucketOwnerPromise(headers, bucket, log); + + const responseHeaders = collectCorsHeaders(headers.origin, request.method, bucket); + + if (!objMD) { + const err = versionId ? errors.NoSuchVersion : errors.NoSuchKey; + log.debug('object not found', { bucket: bucketName, key: objectKey, versionId }); + err.responseHeaders = responseHeaders; + throw err; + } + + responseHeaders['x-amz-version-id'] = getVersionIdResHeader(bucket.getVersioningConfiguration(), objMD); + responseHeaders['Last-Modified'] = objMD['last-modified'] && new Date(objMD['last-modified']).toUTCString(); + + if (objMD.isDeleteMarker) { + log.debug('attempt to get attributes of a delete marker', { bucket: bucketName, key: objectKey, versionId }); + responseHeaders['x-amz-delete-marker'] = true; + const err = errors.MethodNotAllowed; + err.responseHeaders = responseHeaders; + throw err; + } + + const attributes = parseAttributesHeaders(headers); + + pushMetric(OBJECT_GET_ATTRIBUTES, log, { + authInfo, + bucket: bucketName, + keys: [objectKey], + versionId: objMD?.versionId, + location: objMD?.dataStoreName, + }); + + const xml = buildXmlResponse(objMD, attributes); + return { xml, responseHeaders }; +} + +/** + * objectGetAttributesCallback - Callback wrapper for objectGetAttributes + * @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info + * @param {object} request - http request object + * @param {object} log - Werelogs logger + * @param {function} callback - callback to server (err, xml, responseHeaders) + * @return {undefined} + */ +function objectGetAttributesCallback(authInfo, request, log, callback) { + objectGetAttributes(authInfo, request, log) + .then(result => callback(null, result.xml, result.responseHeaders)) + .catch(err => { + log.debug('error processing request', { + error: err, + method: OBJECT_GET_ATTRIBUTES, + }); + return callback(err, null, err.responseHeaders || {}); + }); +} + +module.exports = objectGetAttributesCallback; diff --git a/lib/metadata/metadataUtils.js b/lib/metadata/metadataUtils.js index 38b39315a3..9979c79243 100644 --- a/lib/metadata/metadataUtils.js +++ b/lib/metadata/metadataUtils.js @@ -1,4 +1,5 @@ const async = require('async'); +const { promisify } = require('util'); const { errors } = require('arsenal'); const metadata = require('./wrapper'); @@ -399,6 +400,16 @@ function standardMetadataValidateBucketAndObj(params, actionImplicitDenies, log, return callback(null, bucket, objMD); }); } + +standardMetadataValidateBucketAndObj[promisify.custom] = (params, action, log) => new Promise((resolve, reject) => { + standardMetadataValidateBucketAndObj(params, action, log, (err, bucket, objMD) => { + if (err) { + return reject(err); + } + return resolve({ bucket, objMD }); + }); +}); + /** standardMetadataValidateBucket - retrieve bucket from metadata and check if user * is authorized to access it * @param {object} params - function parameters diff --git a/package.json b/package.json index bc42457bbe..921dd215df 100644 --- a/package.json +++ b/package.json @@ -21,7 +21,7 @@ "dependencies": { "@azure/storage-blob": "^12.28.0", "@hapi/joi": "^17.1.1", - "arsenal": "git+https://github.com/scality/Arsenal#8.2.43", + "arsenal": "git+https://github.com/scality/Arsenal#feature/ARSN-549/get-object-attributes", "async": "2.6.4", "aws-sdk": "^2.1692.0", "bucketclient": "scality/bucketclient#8.2.7", diff --git a/tests/functional/aws-node-sdk/test/object/objectGetAttributes.js b/tests/functional/aws-node-sdk/test/object/objectGetAttributes.js new file mode 100644 index 0000000000..0eb10e561e --- /dev/null +++ b/tests/functional/aws-node-sdk/test/object/objectGetAttributes.js @@ -0,0 +1,264 @@ +const assert = require('assert'); +const { S3 } = require('aws-sdk'); +const getConfig = require('../support/config'); + +const bucket = 'testbucket'; +const key = 'testobject'; +const body = 'hello world!'; +const expectedMD5 = 'fc3ff98e8c6a0d3087d515c0473f8677'; + +describe('objectGetAttributes', () => { + let s3; + + before(() => { + const config = getConfig('default', { signatureVersion: 'v4' }); + s3 = new S3(config); + }); + + beforeEach(async () => { + await s3.createBucket({ Bucket: bucket }).promise(); + await s3.putObject({ Bucket: bucket, Key: key, Body: body }).promise(); + }); + + afterEach(async () => { + await s3.deleteObject({ Bucket: bucket, Key: key }).promise(); + await s3.deleteBucket({ Bucket: bucket }).promise(); + }); + + it('should fail with a wrong bucket owner header', async () => { + try { + await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['ETag'], + ExpectedBucketOwner: 'wrongAccountId', + }) + .promise(); + assert.fail('Expected AccessDenied error'); + } catch (err) { + assert.strictEqual(err.code, 'AccessDenied'); + assert.strictEqual(err.message, 'Access Denied'); + } + }); + + it('should fail because attributes header is missing', async () => { + try { + await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: [], + }) + .promise(); + assert.fail('Expected InvalidArgument error'); + } catch (err) { + assert.strictEqual(err.code, 'InvalidArgument'); + assert.strictEqual(err.message, 'Invalid attribute name specified.'); + } + }); + + it('should fail because attribute name is invalid', async () => { + try { + await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['InvalidAttribute'], + }) + .promise(); + assert.fail('Expected InvalidArgument error'); + } catch (err) { + assert.strictEqual(err.code, 'InvalidArgument'); + assert.strictEqual(err.message, 'Invalid attribute name specified.'); + } + }); + + it('should return NoSuchKey for non-existent object', async () => { + try { + await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: 'nonexistent', + ObjectAttributes: ['ETag'], + }) + .promise(); + assert.fail('Expected NoSuchKey error'); + } catch (err) { + assert.strictEqual(err.code, 'NoSuchKey'); + assert.strictEqual(err.message, 'The specified key does not exist.'); + } + }); + + it('should return all attributes', async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['ETag', 'Checksum', 'ObjectParts', 'StorageClass', 'ObjectSize'], + }) + .promise(); + + assert.strictEqual(data.ETag, expectedMD5); + assert.strictEqual(data.StorageClass, 'STANDARD'); + assert.strictEqual(data.ObjectSize, body.length); + assert.deepStrictEqual(data.Checksum, {}, 'Checksum should be present'); + assert.strictEqual(data.ObjectParts, undefined, "ObjectParts shouldn't be present for non-MPU object"); + assert(data.LastModified, 'LastModified should be present'); + }); + + it('should return ETag', async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['ETag'], + }) + .promise(); + + assert.strictEqual(data.ETag, expectedMD5); + }); + + it('should return Checksum', async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['Checksum'], + }) + .promise(); + + assert.deepStrictEqual(data.Checksum, {}, 'Checksum should be present'); + }); + + it("shouldn't return ObjectParts for non-MPU objects", async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['ObjectParts'], + }) + .promise(); + + assert.strictEqual(data.ObjectParts, undefined, "ObjectParts shouldn't be present"); + }); + + it('should return StorageClass', async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['StorageClass'], + }) + .promise(); + + assert.strictEqual(data.StorageClass, 'STANDARD'); + }); + + it('should return ObjectSize', async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['ObjectSize'], + }) + .promise(); + + assert.strictEqual(data.ObjectSize, body.length); + }); + + it('should return LastModified', async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['ETag'], + }) + .promise(); + + assert(data.LastModified, 'LastModified should be present'); + assert(data.LastModified instanceof Date, 'LastModified should be a Date'); + assert(!isNaN(data.LastModified.getTime()), 'LastModified should be a valid date'); + }); +}); + +describe('Test get object attributes with multipart upload', () => { + let s3; + const mpuKey = 'mpuObject'; + const partSize = 5 * 1024 * 1024; // Minimum part size is 5MB + const partCount = 3; + + before(async () => { + const config = getConfig('default', { signatureVersion: 'v4' }); + s3 = new S3(config); + + await s3.createBucket({ Bucket: bucket }).promise(); + + const createResult = await s3 + .createMultipartUpload({ + Bucket: bucket, + Key: mpuKey, + }) + .promise(); + const uploadId = createResult.UploadId; + + const partData = Buffer.alloc(partSize, 'a'); + const parts = []; + for (let i = 1; i <= partCount; i++) { + const uploadResult = await s3 + .uploadPart({ + Bucket: bucket, + Key: mpuKey, + PartNumber: i, + UploadId: uploadId, + Body: partData, + }) + .promise(); + parts.push({ PartNumber: i, ETag: uploadResult.ETag }); + } + + await s3 + .completeMultipartUpload({ + Bucket: bucket, + Key: mpuKey, + UploadId: uploadId, + MultipartUpload: { Parts: parts }, + }) + .promise(); + }); + + after(async () => { + await s3.deleteObject({ Bucket: bucket, Key: mpuKey }).promise(); + await s3.deleteBucket({ Bucket: bucket }).promise(); + }); + + it('should return TotalPartsCount for MPU object', async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: mpuKey, + ObjectAttributes: ['ObjectParts'], + }) + .promise(); + + assert(data.ObjectParts, 'ObjectParts should be present'); + assert.strictEqual(data.ObjectParts.TotalPartsCount, partCount); + }); + + it('should return TotalPartsCount along with other attributes for MPU object', async () => { + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: mpuKey, + ObjectAttributes: ['ETag', 'ObjectParts', 'ObjectSize', 'StorageClass'], + }) + .promise(); + + assert(data.ETag, 'ETag should be present'); + assert(data.ETag.includes(`-${partCount}`), `ETag should indicate MPU with ${partCount} parts`); + assert(data.ObjectParts, 'ObjectParts should be present'); + assert.strictEqual(data.ObjectParts.TotalPartsCount, partCount); + assert.strictEqual(data.ObjectSize, partSize * partCount); + assert.strictEqual(data.StorageClass, 'STANDARD'); + }); +}); diff --git a/tests/functional/aws-node-sdk/test/versioning/objectGetAttributes.js b/tests/functional/aws-node-sdk/test/versioning/objectGetAttributes.js new file mode 100644 index 0000000000..8b932cdfae --- /dev/null +++ b/tests/functional/aws-node-sdk/test/versioning/objectGetAttributes.js @@ -0,0 +1,142 @@ +const assert = require('assert'); +const { promisify } = require('util'); +const { S3 } = require('aws-sdk'); +const getConfig = require('../support/config'); +const { removeAllVersions, versioningEnabled } = require('../../lib/utility/versioning-util.js'); + +const removeAllVersionsPromise = promisify(removeAllVersions); + +const bucket = 'testbucket'; +const key = 'testobject'; +const body = 'hello world!'; +const expectedMD5 = 'fc3ff98e8c6a0d3087d515c0473f8677'; + +describe('Test get object attributes with versioning', () => { + let s3; + + before(() => { + const config = getConfig('default', { signatureVersion: 'v4' }); + s3 = new S3(config); + }); + + beforeEach(async () => { + await s3.createBucket({ Bucket: bucket }).promise(); + await s3 + .putBucketVersioning({ + Bucket: bucket, + VersioningConfiguration: versioningEnabled, + }) + .promise(); + }); + + afterEach(async () => { + await removeAllVersionsPromise({ Bucket: bucket }); + await s3.deleteBucket({ Bucket: bucket }).promise(); + }); + + it('should return NoSuchVersion for non-existent versionId', async () => { + await s3 + .putObject({ + Bucket: bucket, + Key: key, + Body: body, + }) + .promise(); + + const fakeVersionId = '111111111111111111111111111111111111111175636f7270'; + + try { + await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + VersionId: fakeVersionId, + ObjectAttributes: ['ETag'], + }) + .promise(); + assert.fail('Expected NoSuchVersion error'); + } catch (err) { + assert.strictEqual(err.code, 'NoSuchVersion'); + assert.strictEqual( + err.message, + 'Indicates that the version ID specified in the request does not match an existing version.', + ); + } + }); + + it('should return MethodNotAllowed for delete marker', async () => { + await s3 + .putObject({ + Bucket: bucket, + Key: key, + Body: body, + }) + .promise(); + + await s3 + .deleteObject({ + Bucket: bucket, + Key: key, + }) + .promise(); + + try { + await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['ETag'], + }) + .promise(); + assert.fail('Expected MethodNotAllowed error'); + } catch (err) { + assert.strictEqual(err.code, 'MethodNotAllowed'); + assert.strictEqual(err.message, 'The specified method is not allowed against this resource.'); + } + }); + + it('should return attributes for specific version', async () => { + const putResult = await s3 + .putObject({ + Bucket: bucket, + Key: key, + Body: body, + }) + .promise(); + const versionId = putResult.VersionId; + + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + VersionId: versionId, + ObjectAttributes: ['ETag', 'ObjectSize'], + }) + .promise(); + + assert.strictEqual(data.ETag, expectedMD5); + assert.strictEqual(data.ObjectSize, body.length); + assert(data.LastModified, 'LastModified should be present'); + }); + + it('should return VersionId for versioned object', async () => { + const putResult = await s3 + .putObject({ + Bucket: bucket, + Key: key, + Body: body, + }) + .promise(); + const versionId = putResult.VersionId; + + const data = await s3 + .getObjectAttributes({ + Bucket: bucket, + Key: key, + ObjectAttributes: ['ETag'], + }) + .promise(); + + assert.strictEqual(data.VersionId, versionId); + }); +}); diff --git a/tests/unit/api/apiUtils/object/parseAttributesHeader.js b/tests/unit/api/apiUtils/object/parseAttributesHeader.js new file mode 100644 index 0000000000..68acf14a57 --- /dev/null +++ b/tests/unit/api/apiUtils/object/parseAttributesHeader.js @@ -0,0 +1,211 @@ +const assert = require('assert'); + +const parseAttributesHeaders = require('../../../../../lib/api/apiUtils/object/parseAttributesHeader'); + +describe('parseAttributesHeaders', () => { + describe('missing or empty header', () => { + it('should throw InvalidRequest error when header is missing', () => { + const headers = {}; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidRequest, true); + assert.strictEqual( + err.description, + 'The x-amz-object-attributes header specifying the attributes to be retrieved is either missing or empty', + ); + return true; + }, + ); + }); + + it('should throw InvalidArgument error when header is empty string', () => { + const headers = { 'x-amz-object-attributes': '' }; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + return true; + }, + ); + }); + + it('should throw InvalidArgument error when header contains only whitespace', () => { + const headers = { 'x-amz-object-attributes': ' ' }; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + return true; + }, + ); + }); + + it('should throw InvalidArgument error when header contains only commas', () => { + const headers = { 'x-amz-object-attributes': ',,,' }; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + return true; + }, + ); + }); + }); + + describe('invalid attribute names', () => { + it('should throw InvalidArgument error for single invalid attribute', () => { + const headers = { 'x-amz-object-attributes': 'InvalidAttribute' }; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + return true; + }, + ); + }); + + it('should throw InvalidArgument error when one attribute is invalid among valid ones', () => { + const headers = { 'x-amz-object-attributes': 'ETag,InvalidAttribute,ObjectSize' }; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + return true; + }, + ); + }); + + it('should throw InvalidArgument error for multiple invalid attributes', () => { + const headers = { 'x-amz-object-attributes': 'Invalid1,Invalid2' }; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + return true; + }, + ); + }); + }); + + describe('valid attribute names', () => { + it('should return array with single valid attribute ETag', () => { + const headers = { 'x-amz-object-attributes': 'ETag' }; + const result = parseAttributesHeaders(headers); + + assert(Array.isArray(result)); + assert.deepStrictEqual(result, ['ETag']); + }); + + it('should return array with single valid attribute StorageClass', () => { + const headers = { 'x-amz-object-attributes': 'StorageClass' }; + const result = parseAttributesHeaders(headers); + + assert(Array.isArray(result)); + assert.deepStrictEqual(result, ['StorageClass']); + }); + + it('should return array with single valid attribute ObjectSize', () => { + const headers = { 'x-amz-object-attributes': 'ObjectSize' }; + const result = parseAttributesHeaders(headers); + + assert(Array.isArray(result)); + assert.deepStrictEqual(result, ['ObjectSize']); + }); + + it('should return array with single valid attribute ObjectParts', () => { + const headers = { 'x-amz-object-attributes': 'ObjectParts' }; + const result = parseAttributesHeaders(headers); + + assert(Array.isArray(result)); + assert.deepStrictEqual(result, ['ObjectParts']); + }); + + it('should return array with single valid attribute Checksum', () => { + const headers = { 'x-amz-object-attributes': 'Checksum' }; + const result = parseAttributesHeaders(headers); + + assert(Array.isArray(result)); + assert.deepStrictEqual(result, ['Checksum']); + }); + + it('should return array with multiple valid attributes', () => { + const headers = { 'x-amz-object-attributes': 'ETag,ObjectSize,StorageClass' }; + const result = parseAttributesHeaders(headers); + + assert(Array.isArray(result)); + assert.deepStrictEqual(result, ['ETag', 'ObjectSize', 'StorageClass']); + }); + + it('should return array with all valid attributes', () => { + const headers = { 'x-amz-object-attributes': 'StorageClass,ObjectSize,ObjectParts,Checksum,ETag' }; + const result = parseAttributesHeaders(headers); + + assert(Array.isArray(result)); + assert.strictEqual(result.length, 5); + assert(result.includes('StorageClass')); + assert(result.includes('ObjectSize')); + assert(result.includes('ObjectParts')); + assert(result.includes('Checksum')); + assert(result.includes('ETag')); + }); + }); + + describe('whitespace handling', () => { + it('should trim whitespace around attribute names', () => { + const headers = { 'x-amz-object-attributes': ' ETag , ObjectSize ' }; + const result = parseAttributesHeaders(headers); + + assert(Array.isArray(result)); + assert.deepStrictEqual(result, ['ETag', 'ObjectSize']); + }); + + it('should throw InvalidArgument for extra commas between attributes', () => { + const headers = { 'x-amz-object-attributes': 'ETag,,ObjectSize' }; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + return true; + }, + ); + }); + + it('should throw InvalidArgument for leading and trailing commas', () => { + const headers = { 'x-amz-object-attributes': ',ETag,ObjectSize,' }; + + assert.throws( + () => parseAttributesHeaders(headers), + err => { + assert(err.is); + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + return true; + }, + ); + }); + }); +}); diff --git a/tests/unit/api/objectGetAttributes.js b/tests/unit/api/objectGetAttributes.js new file mode 100644 index 0000000000..2e2064d15d --- /dev/null +++ b/tests/unit/api/objectGetAttributes.js @@ -0,0 +1,488 @@ +const assert = require('assert'); +const crypto = require('crypto'); +const { parseStringPromise } = require('xml2js'); + +const { bucketPut } = require('../../../lib/api/bucketPut'); +const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning'); +const { cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils } = require('../helpers'); +const completeMultipartUpload = require('../../../lib/api/completeMultipartUpload'); +const DummyRequest = require('../DummyRequest'); +const initiateMultipartUpload = require('../../../lib/api/initiateMultipartUpload'); +const objectPut = require('../../../lib/api/objectPut'); +const { objectDelete } = require('../../../lib/api/objectDelete'); +const objectGetAttributes = require('../../../lib/api/objectGetAttributes'); +const objectPutPart = require('../../../lib/api/objectPutPart'); + +const log = new DummyRequestLogger(); +const authInfo = makeAuthInfo('accessKey1'); +const namespace = 'default'; +const bucketName = 'bucketname'; +const objectName = 'objectName'; +const body = 'hello world!'; +const postBody = Buffer.from(body, 'utf8'); +const expectedMD5 = 'fc3ff98e8c6a0d3087d515c0473f8677'; + +// Promisify helper for functions with non-standard callback signatures +const promisify = fn => (...args) => new Promise((resolve, reject) => { + fn(...args, (err, ...results) => { + if (err) { + reject(err); + } else { + resolve(results); + } + }); +}); + +const bucketPutAsync = promisify(bucketPut); +const bucketPutVersioningAsync = promisify(bucketPutVersioning); +const objectPutAsync = promisify(objectPut); +const objectDeleteAsync = promisify(objectDelete); +const objectGetAttributesAsync = promisify(objectGetAttributes); +const initiateMultipartUploadAsync = promisify(initiateMultipartUpload); +const objectPutPartAsync = promisify(objectPutPart); +const completeMultipartUploadAsync = promisify(completeMultipartUpload); + +const testPutBucketRequest = { + bucketName, + namespace, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + url: `/${bucketName}`, + actionImplicitDenies: false, +}; + +const createGetAttributesRequest = (attributes, options = {}) => { + const key = options.objectKey || objectName; + return { + bucketName, + namespace, + objectKey: key, + headers: { + 'x-amz-object-attributes': attributes.join(','), + ...options.headers, + }, + url: `/${bucketName}/${key}`, + query: options.query || {}, + actionImplicitDenies: false, + }; +}; + +describe('objectGetAttributes API', () => { + beforeEach(async () => { + cleanup(); + const testPutObjectRequest = new DummyRequest( + { + bucketName, + namespace, + objectKey: objectName, + headers: { + 'content-length': `${postBody.length}`, + }, + parsedContentLength: postBody.length, + url: `/${bucketName}/${objectName}`, + }, + postBody, + ); + await bucketPutAsync(authInfo, testPutBucketRequest, log); + await objectPutAsync(authInfo, testPutObjectRequest, undefined, log); + }); + + it('should fail because attributes header is missing', async () => { + const testGetRequest = { + bucketName, + namespace, + objectKey: objectName, + headers: {}, + url: `/${bucketName}/${objectName}`, + query: {}, + actionImplicitDenies: false, + }; + + try { + await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert.fail('Expected error was not thrown'); + } catch (err) { + assert.strictEqual(err.is.InvalidRequest, true); + assert.strictEqual( + err.description, + 'The x-amz-object-attributes header specifying the attributes ' + + 'to be retrieved is either missing or empty', + ); + } + }); + + it('should fail because attributes header is empty', async () => { + const testGetRequest = { + bucketName, + namespace, + objectKey: objectName, + headers: { + 'x-amz-object-attributes': '', + }, + url: `/${bucketName}/${objectName}`, + query: {}, + actionImplicitDenies: false, + }; + + try { + await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert.fail('Expected error was not thrown'); + } catch (err) { + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + } + }); + + it('should fail because attribute name is invalid', async () => { + const testGetRequest = createGetAttributesRequest(['InvalidAttribute']); + + try { + await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert.fail('Expected error was not thrown'); + } catch (err) { + assert.strictEqual(err.is.InvalidArgument, true); + assert.strictEqual(err.description, 'Invalid attribute name specified.'); + } + }); + + it('should return NoSuchKey for non-existent object', async () => { + const testGetRequest = createGetAttributesRequest(['ETag'], { + objectKey: 'nonexistent', + }); + + try { + await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert.fail('Expected error was not thrown'); + } catch (err) { + assert.strictEqual(err.is.NoSuchKey, true); + assert.strictEqual(err.description, 'The specified key does not exist.'); + } + }); + + it('should fail because of bad bucket owner', async () => { + const testGetRequest = createGetAttributesRequest(['ETag'], { + headers: { + 'x-amz-expected-bucket-owner': 'wrongAccountId', + }, + }); + + try { + await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert.fail('Expected error was not thrown'); + } catch (err) { + assert.strictEqual(err.is.AccessDenied, true); + assert.strictEqual(err.description, 'Access Denied'); + } + }); + + it('should return all attributes', async () => { + const testGetRequest = createGetAttributesRequest([ + 'ETag', + 'Checksum', + 'ObjectParts', + 'StorageClass', + 'ObjectSize', + ]); + + const [xml, headers] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert(xml, 'Response XML should be present'); + assert(headers['Last-Modified'], 'Last-Modified header should be present'); + + const result = await parseStringPromise(xml); + const response = result.GetObjectAttributesResponse; + + assert.strictEqual(response.ETag[0], expectedMD5); + assert.strictEqual(response.StorageClass[0], 'STANDARD'); + assert.strictEqual(response.ObjectSize[0], String(body.length)); + assert.deepStrictEqual(response.Checksum[0], '', 'Checksum should be empty'); + assert.strictEqual(response.ObjectParts, undefined, "ObjectParts shouldn't be present for non-MPU object"); + assert(headers['Last-Modified'], 'LastModified should be present'); + }); + + it('should return ETag', async () => { + const testGetRequest = createGetAttributesRequest(['ETag']); + + const [xml] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + const result = await parseStringPromise(xml); + assert.strictEqual(result.GetObjectAttributesResponse.ETag[0], expectedMD5); + }); + + it('should return Checksum', async () => { + const testGetRequest = createGetAttributesRequest(['Checksum']); + + const [xml] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + const result = await parseStringPromise(xml); + assert.deepStrictEqual(result.GetObjectAttributesResponse.Checksum[0], '', 'Checksum should be empty'); + }); + + it("shouldn't return ObjectParts for non-MPU object", async () => { + const testGetRequest = createGetAttributesRequest(['ObjectParts']); + + const [xml] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + const result = await parseStringPromise(xml); + assert.strictEqual( + result.GetObjectAttributesResponse.ObjectParts, + undefined, + "ObjectParts shouldn't be present", + ); + }); + + it('should return StorageClass', async () => { + const testGetRequest = createGetAttributesRequest(['StorageClass']); + + const [xml] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + const result = await parseStringPromise(xml); + assert.strictEqual(result.GetObjectAttributesResponse.StorageClass[0], 'STANDARD'); + }); + + it('should return ObjectSize', async () => { + const testGetRequest = createGetAttributesRequest(['ObjectSize']); + + const [xml] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + const result = await parseStringPromise(xml); + assert.strictEqual(result.GetObjectAttributesResponse.ObjectSize[0], String(body.length)); + }); + + it('should return LastModified in response headers', async () => { + const testGetRequest = createGetAttributesRequest(['ETag']); + + const [, headers] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert(headers['Last-Modified'], 'Last-Modified should be present'); + assert(!isNaN(new Date(headers['Last-Modified']).getTime()), 'Last-Modified should be a valid date'); + }); +}); + +describe('objectGetAttributes API with multipart upload', () => { + const partCount = 2; + const partBody = Buffer.from('I am a part\n', 'utf8'); + + const createMpuObject = async () => { + const initiateRequest = { + bucketName, + namespace, + objectKey: objectName, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + url: `/${objectName}?uploads`, + actionImplicitDenies: false, + }; + + const [result] = await initiateMultipartUploadAsync(authInfo, initiateRequest, log); + const json = await parseStringPromise(result); + const testUploadId = json.InitiateMultipartUploadResult.UploadId[0]; + const partHash = crypto.createHash('md5').update(partBody).digest('hex'); + + const completeParts = []; + for (let i = 1; i <= partCount; i++) { + const partRequest = new DummyRequest( + { + bucketName, + namespace, + objectKey: objectName, + headers: { + host: `${bucketName}.s3.amazonaws.com`, + 'content-length': '5242880', + }, + parsedContentLength: 5242880, + url: `/${objectName}?partNumber=${i}&uploadId=${testUploadId}`, + query: { + partNumber: String(i), + uploadId: testUploadId, + }, + partHash, + }, + partBody, + ); + await objectPutPartAsync(authInfo, partRequest, undefined, log); + completeParts.push(`${i}"${partHash}"`); + } + + const completeBody = + `${completeParts.join('')}`; + + const completeRequest = { + bucketName, + namespace, + objectKey: objectName, + parsedHost: 's3.amazonaws.com', + url: `/${objectName}?uploadId=${testUploadId}`, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + query: { uploadId: testUploadId }, + post: completeBody, + actionImplicitDenies: false, + }; + + await completeMultipartUploadAsync(authInfo, completeRequest, log); + }; + + beforeEach(async () => { + cleanup(); + await bucketPutAsync(authInfo, testPutBucketRequest, log); + await createMpuObject(); + }); + + it('should return TotalPartsCount for MPU object', async () => { + const testGetRequest = createGetAttributesRequest(['ObjectParts']); + + const [xml] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + const result = await parseStringPromise(xml); + const response = result.GetObjectAttributesResponse; + + assert(response.ObjectParts, 'ObjectParts should be present'); + assert.strictEqual(response.ObjectParts[0].PartsCount[0], String(partCount)); + }); + + it('should return TotalPartsCount along with other attributes for MPU object', async () => { + const testGetRequest = createGetAttributesRequest(['ETag', 'ObjectParts', 'ObjectSize', 'StorageClass']); + + const [xml] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + const result = await parseStringPromise(xml); + const response = result.GetObjectAttributesResponse; + + assert(response.ETag, 'ETag should be present'); + assert(response.ETag[0].includes(`-${partCount}`), `ETag should indicate MPU with ${partCount} parts`); + assert(response.ObjectParts, 'ObjectParts should be present'); + assert.strictEqual(response.ObjectParts[0].PartsCount[0], String(partCount)); + assert(response.ObjectSize, 'ObjectSize should be present'); + assert.strictEqual(response.StorageClass[0], 'STANDARD'); + }); +}); + +describe('objectGetAttributes API with versioning', () => { + const enableVersioningRequest = versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled'); + + beforeEach(async () => { + cleanup(); + await bucketPutAsync(authInfo, testPutBucketRequest, log); + await bucketPutVersioningAsync(authInfo, enableVersioningRequest, log); + }); + + it('should return NoSuchVersion for non-existent versionId', async () => { + const testPutObjectRequest = new DummyRequest( + { + bucketName, + namespace, + objectKey: objectName, + headers: { + 'content-length': `${postBody.length}`, + }, + parsedContentLength: postBody.length, + url: `/${bucketName}/${objectName}`, + }, + postBody, + ); + + const fakeVersionId = '111111111111111111111111111111111111111175636f7270'; + + await objectPutAsync(authInfo, testPutObjectRequest, undefined, log); + const testGetRequest = createGetAttributesRequest(['ETag'], { + query: { versionId: fakeVersionId }, + }); + + try { + await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert.fail('Expected error was not thrown'); + } catch (err) { + assert.strictEqual(err.is.NoSuchVersion, true); + assert.strictEqual( + err.description, + 'Indicates that the version ID specified in the request does not match an existing version.', + ); + } + }); + + it('should return MethodNotAllowed for delete marker', async () => { + const testPutObjectRequest = new DummyRequest( + { + bucketName, + namespace, + objectKey: objectName, + headers: { + 'content-length': `${postBody.length}`, + }, + parsedContentLength: postBody.length, + url: `/${bucketName}/${objectName}`, + }, + postBody, + ); + + const testDeleteRequest = { + bucketName, + namespace, + objectKey: objectName, + headers: {}, + url: `/${bucketName}/${objectName}`, + actionImplicitDenies: false, + }; + + await objectPutAsync(authInfo, testPutObjectRequest, undefined, log); + await objectDeleteAsync(authInfo, testDeleteRequest, log); + + const testGetRequest = createGetAttributesRequest(['ETag']); + + try { + await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert.fail('Expected error was not thrown'); + } catch (err) { + assert.strictEqual(err.is.MethodNotAllowed, true); + assert.strictEqual(err.description, 'The specified method is not allowed against this resource.'); + assert.strictEqual(err.responseHeaders['x-amz-delete-marker'], true); + } + }); + + it('should return attributes for specific version', async () => { + const testPutObjectRequest = new DummyRequest( + { + bucketName, + namespace, + objectKey: objectName, + headers: { + 'content-length': `${postBody.length}`, + }, + parsedContentLength: postBody.length, + url: `/${bucketName}/${objectName}`, + }, + postBody, + ); + + const [resHeaders] = await objectPutAsync(authInfo, testPutObjectRequest, undefined, log); + const versionId = resHeaders['x-amz-version-id']; + assert(versionId, 'Version ID should be present'); + + const testGetRequest = createGetAttributesRequest(['ETag', 'ObjectSize'], { + query: { versionId }, + }); + + const [xml, headers] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert(headers['Last-Modified'], 'Last-Modified should be present'); + + const result = await parseStringPromise(xml); + const response = result.GetObjectAttributesResponse; + + assert.strictEqual(response.ETag[0], expectedMD5); + assert.strictEqual(response.ObjectSize[0], String(body.length)); + }); + + it('should return VersionId in response headers for versioned object', async () => { + const testPutObjectRequest = new DummyRequest( + { + bucketName, + namespace, + objectKey: objectName, + headers: { + 'content-length': `${postBody.length}`, + }, + parsedContentLength: postBody.length, + url: `/${bucketName}/${objectName}`, + }, + postBody, + ); + + const [resHeaders] = await objectPutAsync(authInfo, testPutObjectRequest, undefined, log); + const versionId = resHeaders['x-amz-version-id']; + assert(versionId, 'Version ID should be present from PUT'); + + const testGetRequest = createGetAttributesRequest(['ETag']); + + const [, headers] = await objectGetAttributesAsync(authInfo, testGetRequest, log); + assert.strictEqual(headers['x-amz-version-id'], versionId); + }); +}); diff --git a/yarn.lock b/yarn.lock index 1ec0431703..3fb12ebf2e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -602,9 +602,9 @@ integrity sha512-EKQmr16tM8s16vTT3cA5L0kZZcTMU5DUOZTuvpnY738m+jyP3JIUj+Mm1xc1rsLkGBQ/gVnfKYPwOmPg1tUR4Q== "@hapi/tlds@^1.1.1": - version "1.1.3" - resolved "https://registry.yarnpkg.com/@hapi/tlds/-/tlds-1.1.3.tgz#bf5fee927d213f140cd54d4650965e504a546789" - integrity sha512-QIvUMB5VZ8HMLZF9A2oWr3AFM430QC8oGd0L35y2jHpuW6bIIca6x/xL7zUf4J7L9WJ3qjz+iJII8ncaeMbpSg== + version "1.1.4" + resolved "https://registry.yarnpkg.com/@hapi/tlds/-/tlds-1.1.4.tgz#df4a7b59082b54ba4f3b7b38f781e2ac3cbc359a" + integrity sha512-Fq+20dxsxLaUn5jSSWrdtSRcIUba2JquuorF9UW1wIJS5cSUwxIsO2GIhaWynPRflvxSzFN+gxKte2HEW1OuoA== "@hapi/topo@^5.0.0", "@hapi/topo@^5.1.0": version "5.1.0" @@ -648,16 +648,21 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/retry/-/retry-0.4.2.tgz#1860473de7dfa1546767448f333db80cb0ff2161" integrity sha512-xeO57FpIu4p1Ri3Jq/EXq4ClRm86dVF2z/+kvFnyqVYRavTZmaFaUBbWCOuuTh0o/g7DSsk6kc2vrS4Vl5oPOQ== -"@ioredis/commands@1.4.0", "@ioredis/commands@^1.3.0": - version "1.4.0" - resolved "https://registry.yarnpkg.com/@ioredis/commands/-/commands-1.4.0.tgz#9f657d51cdd5d2fdb8889592aa4a355546151f25" - integrity sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ== +"@ioredis/commands@1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@ioredis/commands/-/commands-1.5.0.tgz#3dddcea446a4b1dc177d0743a1e07ff50691652a" + integrity sha512-eUgLqrMf8nJkZxT24JvVRrQya1vZkQh8BBeYNwGDqa5I0VUi8ACx7uFvAaLxintokpTenkK6DASvo/bvNbBGow== "@ioredis/commands@^1.1.1": version "1.2.0" resolved "https://registry.yarnpkg.com/@ioredis/commands/-/commands-1.2.0.tgz#6d61b3097470af1fdbbe622795b8921d42018e11" integrity sha512-Sx1pU8EM64o2BrqNpEO1CNLtKQwyhuXuqyfH7oGKCk+1a33d2r5saW8zNwm3j6BTExtjrv2BxTgzzkMwts6vGg== +"@ioredis/commands@^1.3.0": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@ioredis/commands/-/commands-1.4.0.tgz#9f657d51cdd5d2fdb8889592aa4a355546151f25" + integrity sha512-aFT2yemJJo+TZCmieA7qnYGQooOS7QfNmYrzGtsYd3g9j5iDP8AimYYAesf79ohjbLG12XxC4nG5DyEnC88AsQ== + "@isaacs/cliui@^8.0.2": version "8.0.2" resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" @@ -1082,9 +1087,9 @@ integrity sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA== "@standard-schema/spec@^1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@standard-schema/spec/-/spec-1.0.0.tgz#f193b73dc316c4170f2e82a881da0f550d551b9c" - integrity sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA== + version "1.1.0" + resolved "https://registry.yarnpkg.com/@standard-schema/spec/-/spec-1.1.0.tgz#a79b55dbaf8604812f52d140b2c9ab41bc150bb8" + integrity sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w== "@types/async@^3.2.24": version "3.2.24" @@ -1527,9 +1532,9 @@ arraybuffer.prototype.slice@^1.0.4: optionalDependencies: ioctl "^2.0.2" -"arsenal@git+https://github.com/scality/Arsenal#8.2.43": +"arsenal@git+https://github.com/scality/Arsenal#feature/ARSN-549/get-object-attributes": version "8.2.43" - resolved "git+https://github.com/scality/Arsenal#c2375224bb0f0a0391c0509b4545859b77f87c92" + resolved "git+https://github.com/scality/Arsenal#ec21fa885c611498584ef3c56bfd62047c640e9e" dependencies: "@azure/identity" "^4.13.0" "@azure/storage-blob" "^12.28.0" @@ -3792,11 +3797,11 @@ ioredis@^5.6.1: standard-as-callback "^2.1.0" ioredis@^5.8.1: - version "5.8.2" - resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-5.8.2.tgz#c7a228a26cf36f17a5a8011148836877780e2e14" - integrity sha512-C6uC+kleiIMmjViJINWk80sOQw5lEzse1ZmvD+S/s8p8CWapftSaC+kocGTx6xrbrJ4WmYQGC08ffHLr6ToR6Q== + version "5.9.2" + resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-5.9.2.tgz#ffdce2a019950299716e88ee56cd5802b399b108" + integrity sha512-tAAg/72/VxOUW7RQSX1pIxJVucYKcjFjfvj60L57jrZpYCHC3XN0WCQ3sNYL4Gmvv+7GPvTAjc+KSdeNuE8oWQ== dependencies: - "@ioredis/commands" "1.4.0" + "@ioredis/commands" "1.5.0" cluster-key-slot "^1.1.0" debug "^4.3.4" denque "^2.1.0" @@ -4319,9 +4324,9 @@ joi@^17.13.3: "@sideway/pinpoint" "^2.0.0" joi@^18.0.1: - version "18.0.1" - resolved "https://registry.yarnpkg.com/joi/-/joi-18.0.1.tgz#1e1885d035cc6ca1624e81bf22112e7c1ee38e1b" - integrity sha512-IiQpRyypSnLisQf3PwuN2eIHAsAIGZIrLZkd4zdvIar2bDyhM91ubRjy8a3eYablXsh9BeI/c7dmPYHca5qtoA== + version "18.0.2" + resolved "https://registry.yarnpkg.com/joi/-/joi-18.0.2.tgz#30ced6aed00a7848cc11f92859515258301dc3a4" + integrity sha512-RuCOQMIt78LWnktPoeBL0GErkNaJPTBGcYuyaBvUOQSpcpcLfWrHPPihYdOGbV5pam9VTWbeoF7TsGiHugcjGA== dependencies: "@hapi/address" "^5.1.1" "@hapi/formula" "^3.0.2" @@ -5146,7 +5151,7 @@ mongodb@^6.11.0: bson "^6.10.3" mongodb-connection-string-url "^3.0.0" -mongodb@^6.17.0, mongodb@^6.20.0: +mongodb@^6.17.0: version "6.20.0" resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-6.20.0.tgz#5212dcf512719385287aa4574265352eefb01d8e" integrity sha512-Tl6MEIU3K4Rq3TSHd+sZQqRBoGlFsOgNrH5ltAcFBV62Re3Fd+FcaVf8uSEQFOJ51SDowDVttBTONMfoYWrWlQ== @@ -5155,6 +5160,15 @@ mongodb@^6.17.0, mongodb@^6.20.0: bson "^6.10.4" mongodb-connection-string-url "^3.0.2" +mongodb@^6.20.0: + version "6.21.0" + resolved "https://registry.yarnpkg.com/mongodb/-/mongodb-6.21.0.tgz#f83355905900f2e7a912593f0315d5e2e0bda576" + integrity sha512-URyb/VXMjJ4da46OeSXg+puO39XH9DeQpWCslifrRn9JWugy0D+DvvBvkm2WxmHe61O/H19JM66p1z7RHVkZ6A== + dependencies: + "@mongodb-js/saslprep" "^1.3.0" + bson "^6.10.4" + mongodb-connection-string-url "^3.0.2" + ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"