Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
155 changes: 144 additions & 11 deletions lib/api/apiUtils/integrity/validateChecksums.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,12 @@ const ChecksumError = Object.freeze({
MultipleChecksumTypes: 'MultipleChecksumTypes',
MissingCorresponding: 'MissingCorresponding',
MalformedChecksum: 'MalformedChecksum',
TrailerAlgoMismatch: 'TrailerAlgoMismatch',
TrailerChecksumMalformed: 'TrailerChecksumMalformed',
TrailerMissing: 'TrailerMissing',
TrailerUnexpected: 'TrailerUnexpected',
TrailerAndChecksum: 'TrailerAndChecksum',
TrailerNotSupported: 'TrailerNotSupported',
});

const base64Regex = /^[A-Za-z0-9+/]*={0,2}$/;
Expand All @@ -56,35 +62,51 @@ const algorithms = Object.freeze({
const result = await crc.digest();
return Buffer.from(result).toString('base64');
},
digestFromHash: async hash => {
const result = await hash.digest();
return Buffer.from(result).toString('base64');
},
isValidDigest: expected => typeof expected === 'string' && expected.length === 12 && base64Regex.test(expected),
createHash: () => new CrtCrc64Nvme()
},
crc32: {
digest: data => {
const input = Buffer.isBuffer(data) ? data : Buffer.from(data);
return uint32ToBase64(new Crc32().update(input).digest() >>> 0); // >>> 0 coerce number to uint32
},
digestFromHash: hash => {
const result = hash.digest();
return uint32ToBase64(result >>> 0);
},
isValidDigest: expected => typeof expected === 'string' && expected.length === 8 && base64Regex.test(expected),
createHash: () => new Crc32()
},
crc32c: {
digest: data => {
const input = Buffer.isBuffer(data) ? data : Buffer.from(data);
return uint32ToBase64(new Crc32c().update(input).digest() >>> 0); // >>> 0 coerce number to uint32
},
digestFromHash: hash => uint32ToBase64(hash.digest() >>> 0),
isValidDigest: expected => typeof expected === 'string' && expected.length === 8 && base64Regex.test(expected),
createHash: () => new Crc32c()
},
sha1: {
digest: data => {
const input = Buffer.isBuffer(data) ? data : Buffer.from(data);
return crypto.createHash('sha1').update(input).digest('base64');
},
digestFromHash: hash => hash.digest('base64'),
isValidDigest: expected => typeof expected === 'string' && expected.length === 28 && base64Regex.test(expected),
createHash: () => crypto.createHash('sha1')
},
sha256: {
digest: data => {
const input = Buffer.isBuffer(data) ? data : Buffer.from(data);
return crypto.createHash('sha256').update(input).digest('base64');
},
digestFromHash: hash => hash.digest('base64'),
isValidDigest: expected => typeof expected === 'string' && expected.length === 44 && base64Regex.test(expected),
createHash: () => crypto.createHash('sha256')
}
});

Expand Down Expand Up @@ -132,7 +154,7 @@ async function validateXAmzChecksums(headers, body) {
return { error: ChecksumError.AlgoNotSupportedSDK, details: { algorithm: sdkAlgo } };
}

// If AWS there is a mismatch, AWS returns the same error as if the algo was invalid.
// If there is a mismatch, AWS returns the same error as if the algo was invalid.
if (sdkLowerAlgo !== algo) {
return { error: ChecksumError.AlgoNotSupportedSDK, details: { algorithm: sdkAlgo } };
}
Expand All @@ -141,6 +163,94 @@ async function validateXAmzChecksums(headers, body) {
return null;
}

function getChecksumDataFromHeaders(headers) {
const checkSdk = algo => {
if (!('x-amz-sdk-checksum-algorithm' in headers)) {
return null;
}

const sdkAlgo = headers['x-amz-sdk-checksum-algorithm'];
if (typeof sdkAlgo !== 'string') {
return { error: ChecksumError.AlgoNotSupportedSDK, details: { algorithm: sdkAlgo } };
}

const sdkLowerAlgo = sdkAlgo.toLowerCase();
if (!(sdkLowerAlgo in algorithms)) {
return { error: ChecksumError.AlgoNotSupportedSDK, details: { algorithm: sdkAlgo } };
}

// If there is a mismatch, AWS returns the same error as if the algo was invalid.
if (sdkLowerAlgo !== algo) {
return { error: ChecksumError.AlgoNotSupportedSDK, details: { algorithm: sdkAlgo } };
}

return null;
};

const checksumHeaders = Object.keys(headers).filter(header => header.startsWith('x-amz-checksum-'));
const xAmzChecksumCnt = checksumHeaders.length;
if (xAmzChecksumCnt > 1) {
return { error: ChecksumError.MultipleChecksumTypes, details: { algorithms: checksumHeaders } };
}

if (xAmzChecksumCnt === 0 && !('x-amz-trailer' in headers) && 'x-amz-sdk-checksum-algorithm' in headers) {
return {
error: ChecksumError.MissingCorresponding,
details: { expected: headers['x-amz-sdk-checksum-algorithm'] }
};
}

if ('x-amz-trailer' in headers) {
if (xAmzChecksumCnt !== 0) {
return {
error: ChecksumError.TrailerAndChecksum,
details: { trailer: headers['x-amz-trailer'], checksum: checksumHeaders },
};
}

const trailer = headers['x-amz-trailer'];
if (!trailer.startsWith('x-amz-checksum-')) {
return { error: ChecksumError.TrailerNotSupported, details: { value: trailer } };
}

const trailerAlgo = trailer.slice('x-amz-checksum-'.length);
if (!(trailerAlgo in algorithms)) {
return { error: ChecksumError.TrailerNotSupported, details: { value: trailer } };
}

const err = checkSdk(trailerAlgo);
if (err) {
return err;
}

return { algorithm: trailerAlgo, isTrailer: true, expected: undefined };
}

if (xAmzChecksumCnt === 0) {
// There was no x-amz-checksum- or x-amz-trailer return crc64nvme.
// The calculated crc64nvme will be stored in the object metadata.
return { algorithm: 'crc64nvme', isTrailer: false, expected: undefined };
}

// No x-amz-sdk-checksum-algorithm we expect one x-amz-checksum-[crc64nvme, crc32, crc32C, sha1, sha256].
const algo = checksumHeaders[0].slice('x-amz-checksum-'.length);
if (!(algo in algorithms)) {
return { error: ChecksumError.AlgoNotSupported, details: { algorithm: algo } };
}

const expected = headers[`x-amz-checksum-${algo}`];
if (!algorithms[algo].isValidDigest(expected)) {
return { error: ChecksumError.MalformedChecksum, details: { algorithm: algo, expected } };
}

const err = checkSdk(algo);
if (err) {
return err;
}

return { algorithm: algo, isTrailer: false, expected };
}

/**
* validateChecksumsNoChunking - Validate the checksums of a request.
* @param {object} headers - http headers
Expand Down Expand Up @@ -183,16 +293,7 @@ async function validateChecksumsNoChunking(headers, body) {
return err;
}

async function defaultValidationFunc(request, body, log) {
const err = await validateChecksumsNoChunking(request.headers, body);
if (!err) {
return null;
}

if (err.error !== ChecksumError.MissingChecksum) {
log.debug('failed checksum validation', { method: request.apiMethod }, err);
}

function arsenalErrorFromChecksumError(err) {
switch (err.error) {
case ChecksumError.MissingChecksum:
return null;
Expand Down Expand Up @@ -225,11 +326,40 @@ async function defaultValidationFunc(request, body, log) {
);
case ChecksumError.MD5Invalid:
return ArsenalErrors.InvalidDigest;
case ChecksumError.TrailerAlgoMismatch:
return ArsenalErrors.MalformedTrailerError;
case ChecksumError.TrailerMissing:
return ArsenalErrors.MalformedTrailerError;
case ChecksumError.TrailerUnexpected:
return ArsenalErrors.MalformedTrailerError;
case ChecksumError.TrailerChecksumMalformed:
return ArsenalErrors.InvalidRequest.customizeDescription(
`Value for x-amz-checksum-${err.details.algorithm} trailing header is invalid.`
);
case ChecksumError.TrailerAndChecksum:
return ArsenalErrors.InvalidRequest.customizeDescription('Expecting a single x-amz-checksum- header');
case ChecksumError.TrailerNotSupported:
return ArsenalErrors.InvalidRequest.customizeDescription(
'The value specified in the x-amz-trailer header is not supported'
);
default:
return ArsenalErrors.BadDigest;
}
}

async function defaultValidationFunc(request, body, log) {
const err = await validateChecksumsNoChunking(request.headers, body);
if (!err) {
return null;
}

if (err.error !== ChecksumError.MissingChecksum) {
log.debug('failed checksum validation', { method: request.apiMethod }, err);
}

return arsenalErrorFromChecksumError(err);
}

/**
* validateMethodChecksumsNoChunking - Validate the checksums of a request.
* @param {object} request - http request
Expand All @@ -253,5 +383,8 @@ module.exports = {
ChecksumError,
validateChecksumsNoChunking,
validateMethodChecksumNoChunking,
getChecksumDataFromHeaders,
arsenalErrorFromChecksumError,
algorithms,
checksumedMethods,
};
56 changes: 51 additions & 5 deletions lib/api/apiUtils/object/createAndStoreObject.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,49 @@ const { config } = require('../../../Config');
const validateWebsiteHeader = require('./websiteServing')
.validateWebsiteHeader;
const applyZenkoUserMD = require('./applyZenkoUserMD');
const {
algorithms,
getChecksumDataFromHeaders,
arsenalErrorFromChecksumError,
} = require('../integrity/validateChecksums');

const { externalBackends, versioningNotImplBackends } = constants;

const externalVersioningErrorMessage = 'We do not currently support putting ' +
'a versioned object to a location-constraint of type Azure or GCP.';
'a versioned object to a location-constraint of type Azure or GCP.';

/**
* Validate and compute the checksum for a zero-size object body.
* Parses the checksum headers, validates the client-supplied digest against
* the empty-body hash, sets metadataStoreParams.checksum on success, and
* calls back with an error on mismatch or invalid headers.
*
* @param {object} headers - request headers
* @param {object} metadataStoreParams - metadata params (checksum field set in-place)
* @param {function} callback - (err) callback
* @return {undefined}
*/
function zeroSizeBodyChecksumCheck(headers, metadataStoreParams, callback) {
const checksumData = getChecksumDataFromHeaders(headers);
if (checksumData.error) {
return callback(arsenalErrorFromChecksumError(checksumData));
}
// For trailer format with zero decoded bytes, the trailer in the body is
// never read (stream bypassed), so expected is always undefined here.
// We still compute and store the empty-body hash for the announced algorithm.
const { algorithm, expected } = checksumData;
return Promise.resolve(algorithms[algorithm].digest(Buffer.alloc(0)))
.then(value => {
if (expected !== undefined && expected !== value) {
return callback(errors.BadDigest.customizeDescription(
`The ${algorithm.toUpperCase()} you specified did not match the calculated checksum.`
));
}
// eslint-disable-next-line no-param-reassign
metadataStoreParams.checksum = { algorithm, value, type: 'FULL_OBJECT' };
return callback(null);
}, err => callback(err));
}

function _storeInMDandDeleteData(bucketName, dataGetInfo, cipherBundle,
metadataStoreParams, dataToDelete, log, requestMethod, callback) {
Expand Down Expand Up @@ -217,7 +255,13 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
if (size === 0) {
if (!dontSkipBackend[locationType]) {
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
return next(null, null, null);
// Delete markers are zero-byte versioned tombstones with
// no body, ETag, or checksum — skip checksum handling.
if (isDeleteMarker) {
return next(null, null, null, null);
}
return zeroSizeBodyChecksumCheck(request.headers, metadataStoreParams,
err => next(err, null, null, null));
}

// Handle mdOnlyHeader as a metadata only operation. If
Expand All @@ -243,14 +287,14 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
dataStoreVersionId: versionId,
dataStoreMD5: _md5,
};
return next(null, dataGetInfo, _md5);
return next(null, dataGetInfo, _md5, null);
}
}

return dataStore(objectKeyContext, cipherBundle, request, size,
streamingV4Params, backendInfo, log, next);
},
function processDataResult(dataGetInfo, calculatedHash, next) {
function processDataResult(dataGetInfo, calculatedHash, checksum, next) {
if (dataGetInfo === null || dataGetInfo === undefined) {
return next(null, null);
}
Expand All @@ -264,7 +308,8 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
: `1:${calculatedHash}`;
const dataGetInfoArr = [{ key, size, start: 0, dataStoreName,
dataStoreType, dataStoreETag: prefixedDataStoreETag,
dataStoreVersionId }];
dataStoreVersionId
}];
if (cipherBundle) {
dataGetInfoArr[0].cryptoScheme = cipherBundle.cryptoScheme;
dataGetInfoArr[0].cipheredDataKey =
Expand All @@ -275,6 +320,7 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
dataGetInfoArr[0].size = mdOnlySize;
}
metadataStoreParams.contentMD5 = calculatedHash;
metadataStoreParams.checksum = checksum;
return next(null, dataGetInfoArr);
},
function getVersioningInfo(infoArr, next) {
Expand Down
Loading
Loading