Skip to content

Commit

Permalink
SNOW-1431870 Add cloud integration for GCM encryption
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-pfus committed Nov 6, 2024
1 parent d13c63c commit d0d61b5
Showing 11 changed files with 560 additions and 235 deletions.
Original file line number Diff line number Diff line change
@@ -1105,6 +1105,7 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
isClientSideEncrypted =
jsonNode.path("data").path("stageInfo").path("isClientSideEncrypted").asBoolean(true);
}
String ciphers = jsonNode.path("data").path("stageInfo").path("ciphers").asText();

// endPoint is currently known to be set for Azure stages or S3. For S3 it will be set
// specifically
@@ -1165,7 +1166,8 @@ static StageInfo getStageInfo(JsonNode jsonNode, SFSession session) throws Snowf
stageRegion,
endPoint,
stgAcct,
isClientSideEncrypted);
isClientSideEncrypted,
ciphers);

// Setup pre-signed URL into stage info if pre-signed URL is returned.
if (stageInfo.getStageType() == StageInfo.StageType.GCS) {
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
package net.snowflake.client.jdbc.cloud.storage;

import com.google.common.base.Strings;
import java.io.File;
import java.io.InputStream;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import javax.crypto.BadPaddingException;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.jdbc.ErrorCode;
import net.snowflake.client.jdbc.SnowflakeSQLLoggedException;
import net.snowflake.client.log.SFLogger;
import net.snowflake.client.log.SFLoggerFactory;
import net.snowflake.common.core.RemoteStoreFileEncryptionMaterial;
import net.snowflake.common.core.SqlState;

class DecryptionHelper {
private static final SFLogger logger = SFLoggerFactory.getLogger(SnowflakeGCSClient.class);

private final String queryId;
private final SFBaseSession session;
private final String key;
private final String keyIv;
private final String dataIv;
private final String keyAad;
private final String dataAad;
private final StageInfo.Ciphers ciphers;

private DecryptionHelper(
String queryId,
SFBaseSession session,
String key,
String keyIv,
String dataIv,
String keyAad,
String dataAad,
StageInfo.Ciphers ciphers) {
this.queryId = queryId;
this.session = session;
this.key = key;
this.keyIv = keyIv;
this.dataIv = dataIv;
this.keyAad = keyAad;
this.dataAad = dataAad;
this.ciphers = ciphers;
}

static DecryptionHelper forCbc(
String queryId, SFBaseSession session, String key, String contentIv)
throws SnowflakeSQLLoggedException {
if (Strings.isNullOrEmpty(key) || Strings.isNullOrEmpty(contentIv)) {
throw exception(queryId, session);
}
return new DecryptionHelper(
queryId, session, key, null, contentIv, null, null, StageInfo.Ciphers.AESECB_AESCBC);
}

static DecryptionHelper forGcm(
String queryId,
SFBaseSession session,
String key,
String keyIv,
String dataIv,
String keyAad,
String dataAad)
throws SnowflakeSQLLoggedException {
if (Strings.isNullOrEmpty(key)
|| Strings.isNullOrEmpty(keyIv)
|| Strings.isNullOrEmpty(dataIv)
|| keyAad == null
|| dataAad == null) {
throw exception(queryId, session);
}
return new DecryptionHelper(
queryId, session, key, keyIv, dataIv, keyAad, dataAad, StageInfo.Ciphers.AESGCM_AESGCM);
}

void validate() throws SnowflakeSQLLoggedException {
if (key == null
|| dataIv == null
|| (ciphers == StageInfo.Ciphers.AESGCM_AESGCM && keyIv == null)) {
throw new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"File metadata incomplete");
}
}

void decryptFile(File file, RemoteStoreFileEncryptionMaterial encMat)
throws SnowflakeSQLLoggedException {
try {
switch (ciphers) {
case AESECB_AESCBC:
EncryptionProvider.decrypt(file, key, dataIv, encMat);
case AESGCM_AESGCM:
GcmEncryptionProvider.decryptFile(file, key, dataIv, keyIv, encMat, dataAad, keyAad);
default:
throw new IllegalArgumentException("unsuported ciphers: " + ciphers);
}
} catch (Exception ex) {
logger.error("Error decrypting file", ex);
throw new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"Cannot decrypt file");
}
}

InputStream decryptStream(InputStream inputStream, RemoteStoreFileEncryptionMaterial encMat)
throws NoSuchPaddingException, NoSuchAlgorithmException, InvalidKeyException,
BadPaddingException, IllegalBlockSizeException, InvalidAlgorithmParameterException {
switch (ciphers) {
case AESGCM_AESGCM:
return GcmEncryptionProvider.decryptStream(
inputStream, key, dataIv, keyIv, encMat, dataAad, keyAad);
case AESECB_AESCBC:
return EncryptionProvider.decryptStream(inputStream, key, dataIv, encMat);
}
throw new IllegalArgumentException("unsupported ciphers: " + ciphers);
}

private static SnowflakeSQLLoggedException exception(String queryId, SFBaseSession session) {
return new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"File metadata incomplete");
}
}
Original file line number Diff line number Diff line change
@@ -8,10 +8,6 @@
import static net.snowflake.client.core.HttpUtil.setSessionlessProxyForAzure;
import static net.snowflake.client.jdbc.SnowflakeUtil.systemGetProperty;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.microsoft.azure.storage.OperationContext;
import com.microsoft.azure.storage.StorageCredentials;
import com.microsoft.azure.storage.StorageCredentialsAnonymous;
@@ -35,15 +31,12 @@
import java.net.URI;
import java.net.URISyntaxException;
import java.security.InvalidKeyException;
import java.util.AbstractMap;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.Base64;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.snowflake.client.core.ObjectMapperFactory;
import net.snowflake.client.core.SFBaseSession;
import net.snowflake.client.core.SFSession;
import net.snowflake.client.core.SFSessionProperty;
@@ -82,8 +75,9 @@ public class SnowflakeAzureClient implements SnowflakeStorageClient {
private OperationContext opContext = null;
private SFBaseSession session;

private StorageClientHelper storageClientHelper;

private SnowflakeAzureClient() {}
;

/*
* Factory method for a SnowflakeAzureClient object
@@ -162,6 +156,7 @@ private void setupAzureClient(
} catch (URISyntaxException ex) {
throw new IllegalArgumentException("invalid_azure_credentials");
}
storageClientHelper = new StorageClientHelper(this, encMat, session, stageInfo.getCiphers());
}

// Returns the Max number of retry attempts
@@ -352,26 +347,17 @@ public void download(
// Get the user-defined BLOB metadata
Map<String, String> userDefinedMetadata =
SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

String key = encryptionData.getKey();
String iv = encryptionData.getValue();
DecryptionHelper decryptionHelper =
storageClientHelper.parseEncryptionDataFromJson(
userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) {
stopwatch.restart();
if (key == null || iv == null) {
throw new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"File metadata incomplete");
}
decryptionHelper.validate();

// Decrypt file
try {
EncryptionProvider.decrypt(localFile, key, iv, this.encMat);
decryptionHelper.decryptFile(localFile, encMat);
stopwatch.stop();
long decryptMillis = stopwatch.elapsedMillis();
logger.info(
@@ -452,24 +438,15 @@ public InputStream downloadToStream(
long downloadMillis = stopwatch.elapsedMillis();
Map<String, String> userDefinedMetadata =
SnowflakeUtil.createCaseInsensitiveMap(blob.getMetadata());
AbstractMap.SimpleEntry<String, String> encryptionData =
parseEncryptionData(userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);
String key = encryptionData.getKey();
String iv = encryptionData.getValue();
DecryptionHelper decryptionHelper =
storageClientHelper.parseEncryptionDataFromJson(
userDefinedMetadata.get(AZ_ENCRYPTIONDATAPROP), queryId);

if (this.isEncrypting() && this.getEncryptionKeySize() <= 256) {
decryptionHelper.validate();
stopwatch.restart();
if (key == null || iv == null) {
throw new SnowflakeSQLLoggedException(
queryId,
session,
ErrorCode.INTERNAL_ERROR.getMessageCode(),
SqlState.INTERNAL_ERROR,
"File metadata incomplete");
}

try {
InputStream is = EncryptionProvider.decryptStream(stream, key, iv, encMat);
InputStream is = decryptionHelper.decryptStream(stream, encMat);
stopwatch.stop();
long decryptMillis = stopwatch.elapsedMillis();
logger.info(
@@ -706,9 +683,7 @@ private SFPair<InputStream, Boolean> createUploadStream(
toClose.add(srcFileStream);

// Encrypt
stream =
EncryptionProvider.encrypt(
meta, originalContentLength, uploadStream, this.encMat, this);
stream = storageClientHelper.encrypt(meta, originalContentLength, uploadStream);
uploadFromStream = true;
} catch (Exception ex) {
logger.error("Failed to encrypt input", ex);
@@ -935,51 +910,6 @@ private static URI buildAzureStorageEndpointURI(String storageEndPoint, String s
return storageEndpoint;
}

/*
* buildEncryptionMetadataJSON
* Takes the base64-encoded iv and key and creates the JSON block to be
* used as the encryptiondata metadata field on the blob.
*/
private String buildEncryptionMetadataJSON(String iv64, String key64) {
return String.format(
"{\"EncryptionMode\":\"FullBlob\",\"WrappedContentKey\""
+ ":{\"KeyId\":\"symmKey1\",\"EncryptedKey\":\"%s\""
+ ",\"Algorithm\":\"AES_CBC_256\"},\"EncryptionAgent\":"
+ "{\"Protocol\":\"1.0\",\"EncryptionAlgorithm\":"
+ "\"AES_CBC_256\"},\"ContentEncryptionIV\":\"%s\""
+ ",\"KeyWrappingMetadata\":{\"EncryptionLibrary\":"
+ "\"Java 5.3.0\"}}",
key64, iv64);
}

/*
* parseEncryptionData
* Takes the json string in the encryptiondata metadata field of the encrypted
* blob and parses out the key and iv. Returns the pair as key = key, iv = value.
*/
private SimpleEntry<String, String> parseEncryptionData(String jsonEncryptionData, String queryId)
throws SnowflakeSQLException {
ObjectMapper mapper = ObjectMapperFactory.getObjectMapper();
JsonFactory factory = mapper.getFactory();
try {
JsonParser parser = factory.createParser(jsonEncryptionData);
JsonNode encryptionDataNode = mapper.readTree(parser);

String iv = encryptionDataNode.get("ContentEncryptionIV").asText();
String key = encryptionDataNode.get("WrappedContentKey").get("EncryptedKey").asText();

return new SimpleEntry<String, String>(key, iv);
} catch (Exception ex) {
throw new SnowflakeSQLLoggedException(
queryId,
session,
SqlState.SYSTEM_ERROR,
ErrorCode.IO_ERROR.getMessageCode(),
ex,
"Error parsing encryption data as json" + ": " + ex.getMessage());
}
}

/** Returns the material descriptor key */
@Override
public String getMatdescKey() {
@@ -997,12 +927,34 @@ public void addEncryptionMetadata(
meta.addUserMetadata(getMatdescKey(), matDesc.toString());
meta.addUserMetadata(
AZ_ENCRYPTIONDATAPROP,
buildEncryptionMetadataJSON(
storageClientHelper.buildEncryptionMetadataJSONForEcbCbc(
Base64.getEncoder().encodeToString(ivData),
Base64.getEncoder().encodeToString(encryptedKey)));
meta.setContentLength(contentLength);
}

@Override
public void addEncryptionMetadataForGcm(
StorageObjectMetadata meta,
MatDesc matDesc,
byte[] encryptedKey,
byte[] dataIvBytes,
byte[] keyIvBytes,
byte[] keyAad,
byte[] dataAad,
long contentLength) {
meta.addUserMetadata(getMatdescKey(), matDesc.toString());
meta.addUserMetadata(
AZ_ENCRYPTIONDATAPROP,
storageClientHelper.buildEncryptionMetadataJSONForGcm(
Base64.getEncoder().encodeToString(keyIvBytes),
Base64.getEncoder().encodeToString(encryptedKey),
Base64.getEncoder().encodeToString(dataIvBytes),
Base64.getEncoder().encodeToString(keyAad),
Base64.getEncoder().encodeToString(dataAad)));
meta.setContentLength(contentLength);
}

/** Adds digest metadata to the StorageObjectMetadata object */
@Override
public void addDigestMetadata(StorageObjectMetadata meta, String digest) {
Loading

0 comments on commit d0d61b5

Please sign in to comment.