Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 51 additions & 5 deletions lib/api/apiUtils/object/createAndStoreObject.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,49 @@ const { config } = require('../../../Config');
const validateWebsiteHeader = require('./websiteServing')
.validateWebsiteHeader;
const applyZenkoUserMD = require('./applyZenkoUserMD');
const {
algorithms,
getChecksumDataFromHeaders,
arsenalErrorFromChecksumError,
} = require('../integrity/validateChecksums');

const { externalBackends, versioningNotImplBackends } = constants;

const externalVersioningErrorMessage = 'We do not currently support putting ' +
'a versioned object to a location-constraint of type Azure or GCP.';
'a versioned object to a location-constraint of type Azure or GCP.';

/**
* Validate and compute the checksum for a zero-size object body.
* Parses the checksum headers, validates the client-supplied digest against
* the empty-body hash, sets metadataStoreParams.checksum on success, and
* calls back with an error on mismatch or invalid headers.
*
* @param {object} headers - request headers
* @param {object} metadataStoreParams - metadata params (checksum field set in-place)
* @param {function} callback - (err) callback
* @return {undefined}
*/
function zeroSizeBodyChecksumCheck(headers, metadataStoreParams, callback) {
const checksumData = getChecksumDataFromHeaders(headers);
if (checksumData.error) {
return callback(arsenalErrorFromChecksumError(checksumData));
}
// For trailer format with zero decoded bytes, the trailer in the body is
// never read (stream bypassed), so expected is always undefined here.
// We still compute and store the empty-body hash for the announced algorithm.
const { algorithm, expected } = checksumData;
return Promise.resolve(algorithms[algorithm].digest(Buffer.alloc(0)))
.then(value => {
if (expected !== undefined && expected !== value) {
return callback(errorInstances.BadDigest.customizeDescription(
`The ${algorithm.toUpperCase()} you specified did not match the calculated checksum.`
));
}
// eslint-disable-next-line no-param-reassign
metadataStoreParams.checksum = { algorithm, value, type: 'FULL_OBJECT' };
return callback(null);
}, err => callback(err));
}

function _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
metadataStoreParams, dataToDelete, log, requestMethod, callback) {
Expand Down Expand Up @@ -217,7 +255,13 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
if (size === 0) {
if (!dontSkipBackend[locationType]) {
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
return next(null, null, null);
// Delete markers are zero-byte versioned tombstones with
// no body, ETag, or checksum — skip checksum handling.
if (isDeleteMarker) {
return next(null, null, null, null);
}
return zeroSizeBodyChecksumCheck(request.headers, metadataStoreParams,
err => next(err, null, null, null));
}

// Handle mdOnlyHeader as a metadata only operation. If
Expand All @@ -243,14 +287,14 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
dataStoreVersionId: versionId,
dataStoreMD5: _md5,
};
return next(null, dataGetInfo, _md5);
return next(null, dataGetInfo, _md5, null);
}
}

return dataStore(objectKeyContext, cipherBundle, request, size,
streamingV4Params, backendInfo, log, next);
},
function processDataResult(dataGetInfo, calculatedHash, next) {
function processDataResult(dataGetInfo, calculatedHash, checksum, next) {
if (dataGetInfo === null || dataGetInfo === undefined) {
return next(null, null);
}
Expand All @@ -264,7 +308,8 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
: `1:${calculatedHash}`;
const dataGetInfoArr = [{ key, size, start: 0, dataStoreName,
dataStoreType, dataStoreETag: prefixedDataStoreETag,
dataStoreVersionId }];
dataStoreVersionId
}];
if (cipherBundle) {
dataGetInfoArr[0].cryptoScheme = cipherBundle.cryptoScheme;
dataGetInfoArr[0].cipheredDataKey =
Expand All @@ -275,6 +320,7 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
dataGetInfoArr[0].size = mdOnlySize;
}
metadataStoreParams.contentMD5 = calculatedHash;
metadataStoreParams.checksum = checksum;
return next(null, dataGetInfoArr);
},
function getVersioningInfo(infoArr, next) {
Expand Down
11 changes: 8 additions & 3 deletions lib/api/apiUtils/object/storeObject.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,13 @@ const { arsenalErrorFromChecksumError } = require('../../apiUtils/integrity/vali
* @param {object} dataRetrievalInfo - object containing the keys of stored data
* @param {number} dataRetrievalInfo.key - key of the stored data
* @param {string} dataRetrievalInfo.dataStoreName - the implName of the data
* @param {object} checksumStream - checksum transform stream with digest/algoName properties
* @param {object} log - request logger instance
* @param {function} cb - callback to send error or move to next task
* @return {function} - calls callback with arguments:
* error, dataRetrievalInfo, and completedHash (if any)
*/
function checkHashMatchMD5(stream, hashedStream, dataRetrievalInfo, log, cb) {
function checkHashMatchMD5(stream, hashedStream, dataRetrievalInfo, checksumStream, log, cb) {
const contentMD5 = stream.contentMD5;
const completedHash = hashedStream.completedHash;
if (contentMD5 && completedHash && contentMD5 !== completedHash) {
Expand All @@ -37,7 +38,10 @@ function checkHashMatchMD5(stream, hashedStream, dataRetrievalInfo, log, cb) {
return cb(errors.BadDigest);
});
}
return cb(null, dataRetrievalInfo, completedHash);
const checksum = checksumStream.digest
? { algorithm: checksumStream.algoName, value: checksumStream.digest, type: 'FULL_OBJECT' }
: null;
return cb(null, dataRetrievalInfo, completedHash, checksum);
}

/**
Expand Down Expand Up @@ -107,7 +111,8 @@ function dataStore(objectContext, cipherBundle, stream, size,
return cbOnce(arsenalErrorFromChecksumError(checksumErr));
});
}
return checkHashMatchMD5(stream, hashedStream, dataRetrievalInfo, log, cbOnce);
return checkHashMatchMD5(stream, hashedStream, dataRetrievalInfo,
checksumedStream.stream, log, cbOnce);
};

// ChecksumTransform._flush computes the digest asynchronously for
Expand Down
4 changes: 4 additions & 0 deletions lib/api/objectPut.js
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,10 @@ function objectPut(authInfo, request, streamingV4Params, log, callback) {
if (storingResult) {
// ETag's hex should always be enclosed in quotes
responseHeaders.ETag = `"${storingResult.contentMD5}"`;
if (storingResult.checksum) {
const { checksumAlgorithm, checksumValue } = storingResult.checksum;
responseHeaders[`x-amz-checksum-${checksumAlgorithm}`] = checksumValue;
}
}
const vcfg = bucket.getVersioningConfiguration();
const isVersionedObj = vcfg && vcfg.Status === 'Enabled';
Expand Down
15 changes: 14 additions & 1 deletion lib/routes/routeBackbeat.js
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,14 @@ function putData(request, response, bucketInfo, objMd, log, callback) {
}
return dataStore(
context, cipherBundle, request, payloadLen, {},
backendInfo, log, (err, retrievalInfo, md5) => {
backendInfo, log,
// The callback's 4th arg (checksum) is intentionally ignored: any
// x-amz-checksum-* header sent by Backbeat is already validated
// inside dataStore by ChecksumTransform. The computed value is not
// stored here because this is a data-only write — metadata is
// written separately by Backbeat, which should propagate the source
// object's checksum.
(err, retrievalInfo, md5) => {
if (err) {
log.error('error putting data', {
error: err,
Expand Down Expand Up @@ -853,6 +860,12 @@ function putObject(request, response, log, callback) {
const payloadLen = parseInt(request.headers['content-length'], 10);
const backendInfo = new BackendInfo(config, storageLocation);
return dataStore(context, CIPHER, request, payloadLen, {}, backendInfo, log,
// The callback's 4th arg (checksum) is intentionally ignored: any
// x-amz-checksum-* header sent by Backbeat is already validated inside
// dataStore by ChecksumTransform. The computed value is not stored here
// because this is a data-only write to an external backend — metadata
// is managed separately by Backbeat, which should propagate the source
// object's checksum.
(err, retrievalInfo, md5) => {
if (err) {
log.error('error putting data', {
Expand Down
7 changes: 6 additions & 1 deletion lib/services.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const { errors, s3middleware } = require('arsenal');
const ObjectMD = require('arsenal').models.ObjectMD;
const BucketInfo = require('arsenal').models.BucketInfo;
const ObjectMDArchive = require('arsenal').models.ObjectMDArchive;
const ObjectMDChecksum = require('arsenal').models.ObjectMDChecksum;
const { versioning } = require('arsenal');
const acl = require('./metadata/acl');
const constants = require('../constants');
Expand Down Expand Up @@ -102,7 +103,7 @@ const services = {
* @return {function} executes callback with err or ETag as arguments
*/
metadataStoreObject(bucketName, dataGetInfo, cipherBundle, params, cb) {
const { objectKey, authInfo, size, contentMD5, metaHeaders,
const { objectKey, authInfo, size, contentMD5, checksum, metaHeaders,
contentType, cacheControl, contentDisposition, contentEncoding,
expires, multipart, headers, overrideMetadata, log,
lastModifiedDate, versioning, versionId, uploadId,
Expand Down Expand Up @@ -138,6 +139,9 @@ const services = {
// CreationTime needs to be carried over so that it remains static
.setCreationTime(creationTime)
.setOriginOp(originOp);
if (checksum) {
md.setChecksum(new ObjectMDChecksum(checksum.algorithm, checksum.value, checksum.type));
}
// Sending in last modified date in object put copy since need
// to return the exact date in the response
if (lastModifiedDate) {
Expand Down Expand Up @@ -329,6 +333,7 @@ const services = {
tags: md.getTags(),
contentMD5,
versionId,
checksum: md.getChecksum(),
});
});
},
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"@azure/storage-blob": "^12.28.0",
"@hapi/joi": "^17.1.1",
"@smithy/node-http-handler": "^3.0.0",
"arsenal": "git+https://github.com/scality/Arsenal#8.3.6",
"arsenal": "git+https://github.com/scality/Arsenal#8.3.7",
"async": "2.6.4",
"bucketclient": "scality/bucketclient#8.2.7",
"bufferutil": "^4.0.8",
Expand Down
8 changes: 8 additions & 0 deletions tests/functional/aws-node-sdk/test/object/mpuVersion.js
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,14 @@ function checkObjMdAndUpdate(objMDBefore, objMDAfter, props) {
// eslint-disable-next-line no-param-reassign
delete objMDBefore['content-type'];
}
if (objMDBefore.checksum && !objMDAfter.checksum) {
// The initial PutObject stores a checksum, but the MPU restore path does not
// (CompleteMultipartUpload checksum storage is not yet implemented).
// Once it is, the restored object should carry a checksum and this workaround
// should be removed.
// eslint-disable-next-line no-param-reassign
delete objMDBefore.checksum;
}
}

function clearUploadIdAndRestoreStatusFromVersions(versions) {
Expand Down
20 changes: 10 additions & 10 deletions tests/functional/aws-node-sdk/test/object/putVersion.js
Original file line number Diff line number Diff line change
Expand Up @@ -258,7 +258,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'originOp']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'originOp', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -309,7 +309,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -360,7 +360,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -408,7 +408,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -460,7 +460,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -515,7 +515,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -568,7 +568,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -620,7 +620,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -679,7 +679,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down Expand Up @@ -726,7 +726,7 @@ describe('PUT object with x-scal-s3-version-id header', () => {
assert.deepStrictEqual(versionsAfter, versionsBefore);

checkObjMdAndUpdate(objMDBefore, objMDAfter, ['location', 'content-length', 'originOp',
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName']);
'microVersionId', 'x-amz-restore', 'archive', 'dataStoreName', 'checksum']);
assert.deepStrictEqual(objMDAfter, objMDBefore);
return done();
});
Expand Down
Loading
Loading