From fe8e84810d15f8b9f8e0f3a5aa577906fabeb351 Mon Sep 17 00:00:00 2001 From: sfc-gh-ext-simba-lf <115584722+sfc-gh-ext-simba-lf@users.noreply.github.com> Date: Mon, 28 Oct 2024 09:58:47 -0700 Subject: [PATCH 1/5] SNOW-1657238: Fix incorrect row count for rows loaded (#1044) --- .../IntegrationTests/SFDbCommandIT.cs | 90 +++++++++++++++++++ Snowflake.Data/Core/ResultSetUtil.cs | 8 +- 2 files changed, 95 insertions(+), 3 deletions(-) diff --git a/Snowflake.Data.Tests/IntegrationTests/SFDbCommandIT.cs b/Snowflake.Data.Tests/IntegrationTests/SFDbCommandIT.cs index 5aa01ee46..5950e4f9b 100755 --- a/Snowflake.Data.Tests/IntegrationTests/SFDbCommandIT.cs +++ b/Snowflake.Data.Tests/IntegrationTests/SFDbCommandIT.cs @@ -8,6 +8,8 @@ using System.Threading; using System.Threading.Tasks; using Snowflake.Data.Core; +using System.Linq; +using System.IO; namespace Snowflake.Data.Tests.IntegrationTests { @@ -1674,5 +1676,93 @@ public async Task TestCommandWithCommentEmbeddedAsync() Assert.AreEqual("--", reader.GetString(0)); } } + + [Test] + public void TestExecuteNonQueryReturnsCorrectRowCountForUploadWithMultipleFiles() + { + const int NumberOfFiles = 5; + const int NumberOfRows = 3; + const int ExpectedRowCount = NumberOfFiles * NumberOfRows; + + using (SnowflakeDbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString + "poolingEnabled=false"; + conn.Open(); + + using (SnowflakeDbCommand cmd = (SnowflakeDbCommand)conn.CreateCommand()) + { + var tempFolder = $"{Path.GetTempPath()}Temp_{Guid.NewGuid()}"; + + try + { + // Arrange + Directory.CreateDirectory(tempFolder); + var data = string.Concat(Enumerable.Repeat(string.Join(",", "TestData") + "\n", NumberOfRows)); + for (int i = 0; i < NumberOfFiles; i++) + { + File.WriteAllText(Path.Combine(tempFolder, $"{TestContext.CurrentContext.Test.Name}_{i}.csv"), data); + } + CreateOrReplaceTable(conn, TableName, new[] { "COL1 STRING" }); + cmd.CommandText = $"PUT file://{Path.Combine(tempFolder, "*.csv")} @%{TableName} AUTO_COMPRESS=FALSE"; + var reader = cmd.ExecuteReader(); + + // Act + cmd.CommandText = $"COPY INTO {TableName} FROM @%{TableName} PATTERN='.*.csv' FILE_FORMAT=(TYPE=CSV)"; + int actualRowCount = cmd.ExecuteNonQuery(); + + // Assert + Assert.AreEqual(ExpectedRowCount, actualRowCount); + } + finally + { + Directory.Delete(tempFolder, true); + } + } + } + } + + [Test] + public async Task TestExecuteNonQueryAsyncReturnsCorrectRowCountForUploadWithMultipleFiles() + { + const int NumberOfFiles = 5; + const int NumberOfRows = 3; + const int ExpectedRowCount = NumberOfFiles * NumberOfRows; + + using (SnowflakeDbConnection conn = new SnowflakeDbConnection()) + { + conn.ConnectionString = ConnectionString + "poolingEnabled=false"; + conn.Open(); + + using (SnowflakeDbCommand cmd = (SnowflakeDbCommand)conn.CreateCommand()) + { + var tempFolder = $"{Path.GetTempPath()}Temp_{Guid.NewGuid()}"; + + try + { + // Arrange + Directory.CreateDirectory(tempFolder); + var data = string.Concat(Enumerable.Repeat(string.Join(",", "TestData") + "\n", NumberOfRows)); + for (int i = 0; i < NumberOfFiles; i++) + { + File.WriteAllText(Path.Combine(tempFolder, $"{TestContext.CurrentContext.Test.Name}_{i}.csv"), data); + } + CreateOrReplaceTable(conn, TableName, new[] { "COL1 STRING" }); + cmd.CommandText = $"PUT file://{Path.Combine(tempFolder, "*.csv")} @%{TableName} AUTO_COMPRESS=FALSE"; + var reader = cmd.ExecuteReader(); + + // Act + cmd.CommandText = $"COPY INTO {TableName} FROM @%{TableName} PATTERN='.*.csv' FILE_FORMAT=(TYPE=CSV)"; + int actualRowCount = await cmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + // Assert + Assert.AreEqual(ExpectedRowCount, actualRowCount); + } + finally + { + Directory.Delete(tempFolder, true); + } + } + } + } } } diff --git a/Snowflake.Data/Core/ResultSetUtil.cs b/Snowflake.Data/Core/ResultSetUtil.cs index 9d62a17d7..236efab9c 100755 --- a/Snowflake.Data/Core/ResultSetUtil.cs +++ b/Snowflake.Data/Core/ResultSetUtil.cs @@ -36,9 +36,11 @@ internal static int CalculateUpdateCount(this SFBaseResultSet resultSet) var index = resultSet.sfResultSetMetaData.GetColumnIndexByName("rows_loaded"); if (index >= 0) { - resultSet.Next(); - updateCount = resultSet.GetInt64(index); - resultSet.Rewind(); + while (resultSet.Next()) + { + updateCount += resultSet.GetInt64(index); + } + while (resultSet.Rewind()) {} } break; case SFStatementType.COPY_UNLOAD: From b21bfb34a716062f6a7425d74325ac00dc6b6963 Mon Sep 17 00:00:00 2001 From: Dariusz Stempniak Date: Mon, 28 Oct 2024 22:04:16 +0100 Subject: [PATCH 2/5] SNOW-1739483 improve calculation of time to wait before retry (#1046) --- Snowflake.Data/Core/HttpUtil.cs | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/Snowflake.Data/Core/HttpUtil.cs b/Snowflake.Data/Core/HttpUtil.cs index f835b7eb5..fa52348e8 100755 --- a/Snowflake.Data/Core/HttpUtil.cs +++ b/Snowflake.Data/Core/HttpUtil.cs @@ -406,7 +406,7 @@ protected override async Task SendAsync(HttpRequestMessage } else if (childCts != null && childCts.Token.IsCancellationRequested) { - logger.Warn($"Http request timeout. Retry the request after {backOffInSec} sec."); + logger.Warn($"Http request timeout. Retry the request after max {backOffInSec} sec."); } else { @@ -465,7 +465,7 @@ protected override async Task SendAsync(HttpRequestMessage logger.Info("Response returned was null."); } - if (restTimeout.TotalSeconds > 0 && totalRetryTime > restTimeout.TotalSeconds) + if (restTimeout.TotalSeconds > 0 && totalRetryTime >= restTimeout.TotalSeconds) { logger.Debug($"stop retry as connection_timeout {restTimeout.TotalSeconds} sec. reached"); if (response != null) @@ -478,6 +478,12 @@ protected override async Task SendAsync(HttpRequestMessage throw new OperationCanceledException(errorMessage); } + if (restTimeout.TotalSeconds > 0 && totalRetryTime + backOffInSec > restTimeout.TotalSeconds) + { + // No need to wait more than necessary if it can be avoided. + backOffInSec = (int)restTimeout.TotalSeconds - totalRetryTime; + } + retryCount++; if ((maxRetryCount > 0) && (retryCount > maxRetryCount)) { @@ -516,15 +522,6 @@ protected override async Task SendAsync(HttpRequestMessage // Multiply sleep by 2 for non-login requests backOffInSec *= 2; } - - totalRetryTime = (int)((DateTimeOffset.UtcNow.ToUnixTimeMilliseconds() - startTimeInMilliseconds) / 1000); - if ((restTimeout.TotalSeconds > 0) && (totalRetryTime + backOffInSec > restTimeout.TotalSeconds)) - { - // No need to wait more than necessary if it can be avoided. - // If the rest timeout will be reached before the next back-off, - // then use the remaining connection timeout. - backOffInSec = Math.Min(backOffInSec, (int)restTimeout.TotalSeconds - totalRetryTime + 1); - } } } } From 5e166938f691b15f548ea7b577800e89a4a1b2a7 Mon Sep 17 00:00:00 2001 From: Dominik Przybysz <132913826+sfc-gh-dprzybysz@users.noreply.github.com> Date: Wed, 30 Oct 2024 11:12:10 +0100 Subject: [PATCH 3/5] SNOW-1756807: Add note about GCP regional endpoints (#1051) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 0378b5416..408563f25 100644 --- a/README.md +++ b/README.md @@ -162,6 +162,8 @@ Read more in [certificate validation](doc/CertficateValidation.md) docs. were not performed where the insecureMode flag was set to false, which is the default setting. From version v2.1.5 CRL is working back as intended. +5. This driver currently does not support GCP regional endpoints. Please ensure that any workloads using through this driver do not require support for regional endpoints on GCP. If you have questions about this, please contact Snowflake Support. + Note that the driver is now targeting .NET Standard 2.0. When upgrading, you might also need to run “Update-Package -reinstall” to update the dependencies. See more: From d8e4b63339d4b9f5500b8fcdcef87e4ca7a38f5a Mon Sep 17 00:00:00 2001 From: Dariusz Stempniak Date: Thu, 31 Oct 2024 14:44:28 +0100 Subject: [PATCH 4/5] SNOW-1672654 Support for empty encryptionMaterial (#1048) --- .../IntegrationTests/SFPutGetTest.cs | 110 +++++++++++------- .../UnitTests/SFAzureClientTest.cs | 1 - .../UnitTests/SFGCSClientTest.cs | 1 - .../UnitTests/SFRemoteStorageClientTest.cs | 1 - .../UnitTests/SFS3ClientTest.cs | 1 - .../Core/FileTransfer/SFFileTransferAgent.cs | 31 ++--- .../FileTransfer/StorageClient/SFGCSClient.cs | 48 ++++---- .../FileTransfer/StorageClient/SFS3Client.cs | 11 +- .../StorageClient/SFSnowflakeAzureClient.cs | 64 +++++----- 9 files changed, 155 insertions(+), 113 deletions(-) diff --git a/Snowflake.Data.Tests/IntegrationTests/SFPutGetTest.cs b/Snowflake.Data.Tests/IntegrationTests/SFPutGetTest.cs index 975d041e0..2ef0c7ef9 100644 --- a/Snowflake.Data.Tests/IntegrationTests/SFPutGetTest.cs +++ b/Snowflake.Data.Tests/IntegrationTests/SFPutGetTest.cs @@ -31,6 +31,7 @@ class SFPutGetTest : SFBaseTest [ThreadStatic] private static string t_schemaName; [ThreadStatic] private static string t_tableName; [ThreadStatic] private static string t_stageName; + [ThreadStatic] private static string t_stageNameSse; // server side encryption without client side encryption [ThreadStatic] private static string t_fileName; [ThreadStatic] private static string t_outputFileName; [ThreadStatic] private static string t_inputFilePath; @@ -41,7 +42,7 @@ class SFPutGetTest : SFBaseTest [ThreadStatic] private static string t_destCompressionType; [ThreadStatic] private static bool t_autoCompress; [ThreadStatic] private static List t_filesToDelete; - + public enum StageType { USER, @@ -63,7 +64,7 @@ public static void OneTimeTearDown() // Delete temp output directory and downloaded files Directory.Delete(s_outputDirectory, true); } - + [SetUp] public void SetUp() { @@ -73,6 +74,7 @@ public void SetUp() t_schemaName = testConfig.schema; t_tableName = $"TABLE_{threadSuffix}"; t_stageName = $"STAGE_{threadSuffix}"; + t_stageNameSse = $"STAGE_{threadSuffix}_SSE"; t_filesToDelete = new List(); using (var conn = new SnowflakeDbConnection(ConnectionString)) @@ -88,6 +90,10 @@ public void SetUp() // Create temp stage command.CommandText = $"CREATE OR REPLACE STAGE {t_schemaName}.{t_stageName}"; command.ExecuteNonQuery(); + + // Create temp stage without client side encryption + command.CommandText = $"CREATE OR REPLACE STAGE {t_schemaName}.{t_stageNameSse} ENCRYPTION = (TYPE = 'SNOWFLAKE_SSE')"; + command.ExecuteNonQuery(); } } } @@ -109,7 +115,7 @@ public void TearDown() command.ExecuteNonQuery(); } } - + // Delete temp files if necessary if (t_filesToDelete != null) { @@ -130,7 +136,7 @@ public void TestPutFileAsteriskWildcard() $"{absolutePathPrefix}_three.csv" }; PrepareFileData(files); - + // Set the PUT query variables t_inputFilePath = $"{absolutePathPrefix}*"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; @@ -142,7 +148,7 @@ public void TestPutFileAsteriskWildcard() VerifyFilesAreUploaded(conn, files, t_internalStagePath); } } - + [Test] public void TestPutFileAsteriskWildcardWithExtension() { @@ -167,7 +173,7 @@ public void TestPutFileAsteriskWildcardWithExtension() VerifyFilesAreUploaded(conn, files, t_internalStagePath); } } - + [Test] public void TestPutFileQuestionMarkWildcard() { @@ -180,7 +186,7 @@ public void TestPutFileQuestionMarkWildcard() PrepareFileData(files); // Create file which should be omitted during the transfer PrepareFileData($"{absolutePathPrefix}_four.csv"); - + // Set the PUT query variables t_inputFilePath = $"{absolutePathPrefix}_?.csv"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; @@ -192,14 +198,14 @@ public void TestPutFileQuestionMarkWildcard() VerifyFilesAreUploaded(conn, files, t_internalStagePath); } } - + [Test] public void TestPutFileRelativePathWithoutDirectory() { // Set the PUT query variables t_inputFilePath = $"{Guid.NewGuid()}_1.csv"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; - + PrepareFileData(t_inputFilePath); using (var conn = new SnowflakeDbConnection(ConnectionString)) @@ -226,7 +232,7 @@ public void TestPutGetOnClosedConnectionThrowsWithoutQueryId([Values("GET", "PUT SnowflakeDbExceptionAssert.HasErrorCode(snowflakeDbException, SFError.EXECUTE_COMMAND_ON_CLOSED_CONNECTION); } } - + [Test] public void TestGetNonExistentFileReturnsFalseAndDoesNotThrow() { @@ -236,7 +242,7 @@ public void TestGetNonExistentFileReturnsFalseAndDoesNotThrow() // Act using (var conn = new SnowflakeDbConnection(ConnectionString)) { - conn.Open(); + conn.Open(); var sql = $"GET {t_internalStagePath}/{t_fileName} file://{s_outputDirectory}"; using (var command = conn.CreateCommand()) { @@ -246,7 +252,7 @@ public void TestGetNonExistentFileReturnsFalseAndDoesNotThrow() } } } - + [Test] public void TestPutNonExistentFileThrowsWithQueryId() { @@ -256,14 +262,14 @@ public void TestPutNonExistentFileThrowsWithQueryId() // Act using (var conn = new SnowflakeDbConnection(ConnectionString)) { - conn.Open(); + conn.Open(); var snowflakeDbException = Assert.Throws(() => PutFile(conn)); Assert.IsNotNull(snowflakeDbException); Assert.IsNotNull(snowflakeDbException.QueryId); SnowflakeDbExceptionAssert.HasErrorCode(snowflakeDbException, SFError.IO_ERROR_ON_GETPUT_COMMAND); } } - + [Test] public void TestPutFileProvidesQueryIdOnFailure() { @@ -285,7 +291,7 @@ public void TestPutFileProvidesQueryIdOnFailure() SnowflakeDbExceptionAssert.HasErrorCode(snowflakeDbException, SFError.IO_ERROR_ON_GETPUT_COMMAND); } } - + [Test] public void TestPutFileWithSyntaxErrorProvidesQueryIdOnFailure() { @@ -308,7 +314,7 @@ public void TestPutFileWithSyntaxErrorProvidesQueryIdOnFailure() Assert.That(snowflakeDbException.InnerException, Is.Null); } } - + [Test] public void TestPutFileProvidesQueryIdOnSuccess() { @@ -323,7 +329,7 @@ public void TestPutFileProvidesQueryIdOnSuccess() { conn.Open(); var queryId = PutFile(conn); - + // Assert Assert.IsNotNull(queryId); Assert.DoesNotThrow(()=>Guid.Parse(queryId)); @@ -337,11 +343,11 @@ public void TestPutFileRelativePathWithDirectory() var guid = Guid.NewGuid(); var relativePath = $"{guid}"; Directory.CreateDirectory(relativePath); - + // Set the PUT query variables t_inputFilePath = $"{relativePath}{Path.DirectorySeparatorChar}{guid}_1.csv"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; - + PrepareFileData(t_inputFilePath); using (var conn = new SnowflakeDbConnection(ConnectionString)) @@ -351,7 +357,7 @@ public void TestPutFileRelativePathWithDirectory() VerifyFilesAreUploaded(conn, new List { t_inputFilePath }, t_internalStagePath); } } - + [Test] public void TestPutFileRelativePathAsteriskWildcard() { @@ -374,7 +380,7 @@ public void TestPutFileRelativePathAsteriskWildcard() VerifyFilesAreUploaded(conn, files, t_internalStagePath); } } - + [Test] // presigned url is enabled on CI so we need to disable the test // it should be enabled when downscoped credential is the default option @@ -384,7 +390,7 @@ public void TestPutFileWithoutOverwriteFlagSkipsSecondUpload() // Set the PUT query variables t_inputFilePath = $"{Guid.NewGuid()}.csv"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; - + PrepareFileData(t_inputFilePath); using (var conn = new SnowflakeDbConnection(ConnectionString)) @@ -395,18 +401,18 @@ public void TestPutFileWithoutOverwriteFlagSkipsSecondUpload() PutFile(conn, expectedStatus: ResultStatus.SKIPPED); } } - + [Test] public void TestPutFileWithOverwriteFlagRunsSecondUpload() { var overwriteAttribute = "OVERWRITE=TRUE"; - + // Set the PUT query variables t_inputFilePath = $"{Guid.NewGuid()}.csv"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; - + PrepareFileData(t_inputFilePath); - + using (var conn = new SnowflakeDbConnection(ConnectionString)) { conn.Open(); @@ -415,7 +421,7 @@ public void TestPutFileWithOverwriteFlagRunsSecondUpload() PutFile(conn, overwriteAttribute, expectedStatus: ResultStatus.UPLOADED); } } - + [Test] public void TestPutDirectoryAsteriskWildcard() { @@ -431,7 +437,7 @@ public void TestPutDirectoryAsteriskWildcard() PrepareFileData(fullPath); files.Add(fullPath); } - + // Set the PUT query variables t_inputFilePath = $"{path}*{Path.DirectorySeparatorChar}*"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; @@ -459,7 +465,7 @@ public void TestPutDirectoryQuestionMarkWildcard() PrepareFileData(fullPath); files.Add(fullPath); } - + // Set the PUT query variables t_inputFilePath = $"{path}_?{Path.DirectorySeparatorChar}{guid}_?_file.csv"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; @@ -471,7 +477,7 @@ public void TestPutDirectoryQuestionMarkWildcard() VerifyFilesAreUploaded(conn, files, t_internalStagePath); } } - + [Test] public void TestPutDirectoryMixedWildcard() { @@ -487,7 +493,7 @@ public void TestPutDirectoryMixedWildcard() PrepareFileData(fullPath); files.Add(fullPath); } - + // Set the PUT query variables t_inputFilePath = $"{path}_*{Path.DirectorySeparatorChar}{guid}_?_file.csv"; t_internalStagePath = $"@{t_schemaName}.{t_stageName}"; @@ -499,7 +505,7 @@ public void TestPutDirectoryMixedWildcard() VerifyFilesAreUploaded(conn, files, t_internalStagePath); } } - + [Test] public void TestPutGetCommand( [Values("none", "gzip", "bzip2", "brotli", "deflate", "raw_deflate", "zstd")] string sourceFileCompressionType, @@ -517,7 +523,24 @@ public void TestPutGetCommand( GetFile(conn); } } - + + [Test] + public void TestPutGetCommandForNamedStageWithoutClientSideEncryption( + [Values("none", "gzip")] string sourceFileCompressionType, + [Values("", "/DEEP/TEST_PATH")] string stagePath, + [Values] bool autoCompress) + { + PrepareTest(sourceFileCompressionType, StageType.NAMED, stagePath, autoCompress, false); + + using (var conn = new SnowflakeDbConnection(ConnectionString)) + { + conn.Open(); + PutFile(conn); + CopyIntoTable(conn); + GetFile(conn); + } + } + // Test small file upload/download with GCS_USE_DOWNSCOPED_CREDENTIAL set to true [Test] [IgnoreOnEnvIs("snowflake_cloud_env", new [] { "AWS", "AZURE" })] @@ -536,14 +559,15 @@ public void TestPutGetGcsDownscopedCredential( GetFile(conn); } } - - private void PrepareTest(string sourceFileCompressionType, StageType stageType, string stagePath, bool autoCompress) + + private void PrepareTest(string sourceFileCompressionType, StageType stageType, string stagePath, + bool autoCompress, bool clientEncryption = true) { t_stageType = stageType; t_sourceCompressionType = sourceFileCompressionType; t_autoCompress = autoCompress; // Prepare temp file name with specified file extension - t_fileName = Guid.NewGuid() + ".csv" + + t_fileName = Guid.NewGuid() + ".csv" + (t_autoCompress? SFFileCompressionTypes.LookUpByName(t_sourceCompressionType).FileExtension: ""); t_inputFilePath = Path.GetTempPath() + t_fileName; if (IsCompressedByTheDriver()) @@ -570,7 +594,9 @@ private void PrepareTest(string sourceFileCompressionType, StageType stageType, t_internalStagePath = $"@{t_schemaName}.%{t_tableName}{stagePath}"; break; case StageType.NAMED: - t_internalStagePath = $"@{t_schemaName}.{t_stageName}{stagePath}"; + t_internalStagePath = clientEncryption + ? $"@{t_schemaName}.{t_stageName}{stagePath}" + : $"@{t_schemaName}.{t_stageNameSse}{stagePath}"; break; } } @@ -579,11 +605,11 @@ private static bool IsCompressedByTheDriver() { return t_sourceCompressionType == "none" && t_autoCompress; } - + // PUT - upload file from local directory to the stage string PutFile( - SnowflakeDbConnection conn, - String additionalAttribute = "", + SnowflakeDbConnection conn, + String additionalAttribute = "", ResultStatus expectedStatus = ResultStatus.UPLOADED) { string queryId; @@ -704,7 +730,7 @@ private void ProcessFile(String command, SnowflakeDbConnection connection) { switch (command) { - case "GET": + case "GET": GetFile(connection); break; case "PUT": @@ -747,7 +773,7 @@ private static void PrepareFileData(string file) // Prepare csv raw data and write to temp files var rawDataRow = string.Join(",", s_colData) + "\n"; var rawData = string.Concat(Enumerable.Repeat(rawDataRow, NumberOfRows)); - + File.WriteAllText(file, rawData); t_filesToDelete.Add(file); } diff --git a/Snowflake.Data.Tests/UnitTests/SFAzureClientTest.cs b/Snowflake.Data.Tests/UnitTests/SFAzureClientTest.cs index a1c791071..08b85a9b5 100644 --- a/Snowflake.Data.Tests/UnitTests/SFAzureClientTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFAzureClientTest.cs @@ -69,7 +69,6 @@ class SFAzureClientTest : SFBaseTest stageInfo = new PutGetStageInfo() { endPoint = EndPoint, - isClientSideEncrypted = true, location = Location, locationType = SFRemoteStorageUtil.AZURE_FS, path = LocationPath, diff --git a/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs b/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs index 0fad57542..d47742743 100644 --- a/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFGCSClientTest.cs @@ -62,7 +62,6 @@ class SFGCSClientTest : SFBaseTest stageInfo = new PutGetStageInfo() { endPoint = null, - isClientSideEncrypted = true, location = Location, locationType = SFRemoteStorageUtil.GCS_FS, path = LocationPath, diff --git a/Snowflake.Data.Tests/UnitTests/SFRemoteStorageClientTest.cs b/Snowflake.Data.Tests/UnitTests/SFRemoteStorageClientTest.cs index 0e9d53767..76ec7c557 100644 --- a/Snowflake.Data.Tests/UnitTests/SFRemoteStorageClientTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFRemoteStorageClientTest.cs @@ -87,7 +87,6 @@ class SFRemoteStorageClientTest : SFBaseTest stageInfo = new PutGetStageInfo() { endPoint = EndPoint, - isClientSideEncrypted = true, location = Location, locationType = SFRemoteStorageUtil.GCS_FS, path = LocationPath, diff --git a/Snowflake.Data.Tests/UnitTests/SFS3ClientTest.cs b/Snowflake.Data.Tests/UnitTests/SFS3ClientTest.cs index 54647db8b..5432b0121 100644 --- a/Snowflake.Data.Tests/UnitTests/SFS3ClientTest.cs +++ b/Snowflake.Data.Tests/UnitTests/SFS3ClientTest.cs @@ -87,7 +87,6 @@ class SFS3ClientTest : SFBaseTest stageInfo = new PutGetStageInfo() { endPoint = Endpoint, - isClientSideEncrypted = true, location = Location, locationType = SFRemoteStorageUtil.S3_FS, path = LocationPath, diff --git a/Snowflake.Data/Core/FileTransfer/SFFileTransferAgent.cs b/Snowflake.Data/Core/FileTransfer/SFFileTransferAgent.cs index b27daa51f..1ee8557b6 100644 --- a/Snowflake.Data/Core/FileTransfer/SFFileTransferAgent.cs +++ b/Snowflake.Data/Core/FileTransfer/SFFileTransferAgent.cs @@ -468,7 +468,7 @@ private void updatePresignedUrl() fileMeta.stageInfo = response.data.stageInfo; fileMeta.presignedUrl = response.data.stageInfo.presignedUrl; - } + } } else if (CommandTypes.DOWNLOAD == CommandType) { @@ -477,7 +477,7 @@ private void updatePresignedUrl() FilesMetas[index].presignedUrl = TransferMetadata.presignedUrls[index]; } } - } + } } /// @@ -544,7 +544,10 @@ private void initEncryptionMaterial() { if (CommandTypes.UPLOAD == CommandType) { - EncryptionMaterials.Add(TransferMetadata.encryptionMaterial[0]); + if (TransferMetadata.stageInfo.isClientSideEncrypted) + { + EncryptionMaterials.Add(TransferMetadata.encryptionMaterial[0]); + } } } @@ -670,7 +673,9 @@ private void initFileMetadata( overwrite = TransferMetadata.overwrite, presignedUrl = TransferMetadata.stageInfo.presignedUrl, parallel = TransferMetadata.parallel, - encryptionMaterial = TransferMetadata.encryptionMaterial[index], + encryptionMaterial = index < TransferMetadata.encryptionMaterial.Count + ? TransferMetadata.encryptionMaterial[index] + : null, MaxBytesInMemory = GetFileTransferMaxBytesInMemory(), _operationType = CommandTypes.DOWNLOAD }; @@ -715,7 +720,7 @@ private int GetFileTransferMaxBytesInMemory() return FileTransferConfiguration.DefaultMaxBytesInMemory; } } - + /// /// Expand the wildcards if any to generate the list of paths for all files matched by the wildcards. /// Also replace the relative paths to the absolute paths for the files if needed. @@ -731,7 +736,7 @@ private List expandFileNames(string location) var directoryName = Path.GetDirectoryName(location); var foundDirectories = ExpandDirectories(directoryName); var filePaths = new List(); - + if (ContainsWildcard(fileName)) { foreach (var directory in foundDirectories) @@ -756,8 +761,8 @@ private List expandFileNames(string location) { filePaths.AddRange( Directory.GetFiles( - directory, - fileName, + directory, + fileName, SearchOption.TopDirectoryOnly)); } } @@ -788,7 +793,7 @@ private List expandFileNames(string location) return filePaths; } - + /// /// Expand the wildcards in the directory path to generate the list of directories to be searched for the files. /// @@ -803,7 +808,7 @@ private static IEnumerable ExpandDirectories(string directoryPath) { return new List { Path.GetFullPath(directoryPath) + Path.DirectorySeparatorChar }; } - + var pathParts = directoryPath.Split(Path.DirectorySeparatorChar); var resolvedPaths = new List(); @@ -863,7 +868,7 @@ private static IEnumerable ExpandDirectories(string directoryPath) private static string ExpandHomeDirectoryIfNeeded(string directoryPath) { if (!directoryPath.Contains('~')) return directoryPath; - + var homePath = (Environment.OSVersion.Platform == PlatformID.Unix || Environment.OSVersion.Platform == PlatformID.MacOSX) ? Environment.GetEnvironmentVariable("HOME") @@ -1036,7 +1041,7 @@ private async Task UploadFilesInSequentialAsync( { await updatePresignedUrlAsync(cancellationToken).ConfigureAwait(false); } - + // Break out of loop if file is successfully uploaded or already exists if (fileMetadata.resultStatus == ResultStatus.UPLOADED.ToString() || fileMetadata.resultStatus == ResultStatus.SKIPPED.ToString()) @@ -1429,7 +1434,7 @@ private void initFileMetadataForUpload() throw new ArgumentException("No file found for: " + TransferMetadata.src_locations[0].ToString()); } } - + private static bool IsDirectory(string path) { var attr = File.GetAttributes(path); diff --git a/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs b/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs index 9e588e921..f56baf2fa 100644 --- a/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs +++ b/Snowflake.Data/Core/FileTransfer/StorageClient/SFGCSClient.cs @@ -240,11 +240,9 @@ internal string generateFileURL(string stageLocation, string fileName) /// The encryption metadata for the header. public void UploadFile(SFFileMetadata fileMetadata, Stream fileBytesStream, SFEncryptionMetadata encryptionMetadata) { - String encryptionData = GetUploadEncryptionData(encryptionMetadata); - try { - WebRequest request = GetUploadFileRequest(fileMetadata, encryptionMetadata, encryptionData); + WebRequest request = GetUploadFileRequest(fileMetadata, encryptionMetadata); Stream dataStream = request.GetRequestStream(); fileBytesStream.Position = 0; @@ -271,11 +269,9 @@ public void UploadFile(SFFileMetadata fileMetadata, Stream fileBytesStream, SFEn /// The encryption metadata for the header. public async Task UploadFileAsync(SFFileMetadata fileMetadata, Stream fileByteStream, SFEncryptionMetadata encryptionMetadata, CancellationToken cancellationToken) { - String encryptionData = GetUploadEncryptionData(encryptionMetadata); - try { - WebRequest request = GetUploadFileRequest(fileMetadata, encryptionMetadata, encryptionData); + WebRequest request = GetUploadFileRequest(fileMetadata, encryptionMetadata); Stream dataStream = await request.GetRequestStreamAsync().ConfigureAwait(false); fileByteStream.Position = 0; @@ -294,14 +290,19 @@ public async Task UploadFileAsync(SFFileMetadata fileMetadata, Stream fileByteSt } } - private WebRequest GetUploadFileRequest(SFFileMetadata fileMetadata, SFEncryptionMetadata encryptionMetadata, String encryptionData) + private WebRequest GetUploadFileRequest(SFFileMetadata fileMetadata, SFEncryptionMetadata encryptionMetadata) { // Issue the POST/PUT request WebRequest request = _customWebRequest == null ? FormBaseRequest(fileMetadata, "PUT") : _customWebRequest; request.Headers.Add(GCS_METADATA_SFC_DIGEST, fileMetadata.sha256Digest); - request.Headers.Add(GCS_METADATA_MATDESC_KEY, encryptionMetadata.matDesc); - request.Headers.Add(GCS_METADATA_ENCRYPTIONDATAPROP, encryptionData); + if (fileMetadata.stageInfo.isClientSideEncrypted) + { + String encryptionData = GetUploadEncryptionData(ref fileMetadata, encryptionMetadata); + + request.Headers.Add(GCS_METADATA_MATDESC_KEY, encryptionMetadata.matDesc); + request.Headers.Add(GCS_METADATA_ENCRYPTIONDATAPROP, encryptionData); + } return request; } @@ -311,7 +312,7 @@ private WebRequest GetUploadFileRequest(SFFileMetadata fileMetadata, SFEncryptio /// /// The encryption metadata for the header. /// Stream content. - private String GetUploadEncryptionData(SFEncryptionMetadata encryptionMetadata) + private String GetUploadEncryptionData(ref SFFileMetadata fileMetadata, SFEncryptionMetadata encryptionMetadata) { // Create the encryption header value string encryptionData = JsonConvert.SerializeObject(new EncryptionData @@ -415,20 +416,23 @@ private void HandleDownloadResponse(HttpWebResponse response, SFFileMetadata fil WebHeaderCollection headers = response.Headers; // Get header values - dynamic encryptionData = JsonConvert.DeserializeObject(headers.Get(GCS_METADATA_ENCRYPTIONDATAPROP)); - string matDesc = headers.Get(GCS_METADATA_MATDESC_KEY); - - // Get encryption metadata from encryption data header value - SFEncryptionMetadata encryptionMetadata = null; - if (encryptionData != null) + var encryptionDataStr = headers.Get(GCS_METADATA_ENCRYPTIONDATAPROP); + if (encryptionDataStr != null) { - encryptionMetadata = new SFEncryptionMetadata + dynamic encryptionData = JsonConvert.DeserializeObject(encryptionDataStr); + string matDesc = headers.Get(GCS_METADATA_MATDESC_KEY); + + // Get encryption metadata from encryption data header value + if (encryptionData != null) { - iv = encryptionData["ContentEncryptionIV"], - key = encryptionData["WrappedContentKey"]["EncryptedKey"], - matDesc = matDesc - }; - fileMetadata.encryptionMetadata = encryptionMetadata; + SFEncryptionMetadata encryptionMetadata = new SFEncryptionMetadata + { + iv = encryptionData["ContentEncryptionIV"], + key = encryptionData["WrappedContentKey"]["EncryptedKey"], + matDesc = matDesc + }; + fileMetadata.encryptionMetadata = encryptionMetadata; + } } fileMetadata.sha256Digest = headers.Get(GCS_METADATA_SFC_DIGEST); diff --git a/Snowflake.Data/Core/FileTransfer/StorageClient/SFS3Client.cs b/Snowflake.Data/Core/FileTransfer/StorageClient/SFS3Client.cs index b6896cc79..ea0eb3fd0 100644 --- a/Snowflake.Data/Core/FileTransfer/StorageClient/SFS3Client.cs +++ b/Snowflake.Data/Core/FileTransfer/StorageClient/SFS3Client.cs @@ -422,10 +422,13 @@ private PutObjectRequest GetPutObjectRequest(ref AmazonS3Client client, SFFileMe ContentType = HTTP_HEADER_VALUE_OCTET_STREAM }; - // Populate the S3 Request Metadata - putObjectRequest.Metadata.Add(AMZ_META_PREFIX + AMZ_IV, encryptionMetadata.iv); - putObjectRequest.Metadata.Add(AMZ_META_PREFIX + AMZ_KEY, encryptionMetadata.key); - putObjectRequest.Metadata.Add(AMZ_META_PREFIX + AMZ_MATDESC, encryptionMetadata.matDesc); + if (stageInfo.isClientSideEncrypted) + { + // Populate the S3 Request Metadata + putObjectRequest.Metadata.Add(AMZ_META_PREFIX + AMZ_IV, encryptionMetadata.iv); + putObjectRequest.Metadata.Add(AMZ_META_PREFIX + AMZ_KEY, encryptionMetadata.key); + putObjectRequest.Metadata.Add(AMZ_META_PREFIX + AMZ_MATDESC, encryptionMetadata.matDesc); + } return putObjectRequest; } diff --git a/Snowflake.Data/Core/FileTransfer/StorageClient/SFSnowflakeAzureClient.cs b/Snowflake.Data/Core/FileTransfer/StorageClient/SFSnowflakeAzureClient.cs index f0ad3f09e..98c2694cb 100644 --- a/Snowflake.Data/Core/FileTransfer/StorageClient/SFSnowflakeAzureClient.cs +++ b/Snowflake.Data/Core/FileTransfer/StorageClient/SFSnowflakeAzureClient.cs @@ -158,13 +158,17 @@ private FileHeader HandleFileHeaderResponse(ref SFFileMetadata fileMetadata, Blo { fileMetadata.resultStatus = ResultStatus.UPLOADED.ToString(); - dynamic encryptionData = JsonConvert.DeserializeObject(response.Metadata["encryptiondata"]); - SFEncryptionMetadata encryptionMetadata = new SFEncryptionMetadata + SFEncryptionMetadata encryptionMetadata = null; + if (response.Metadata.TryGetValue("encryptiondata", out var encryptionDataStr)) { - iv = encryptionData["ContentEncryptionIV"], - key = encryptionData.WrappedContentKey["EncryptedKey"], - matDesc = response.Metadata["matdesc"] - }; + dynamic encryptionData = JsonConvert.DeserializeObject(encryptionDataStr); + encryptionMetadata = new SFEncryptionMetadata + { + iv = encryptionData["ContentEncryptionIV"], + key = encryptionData.WrappedContentKey["EncryptedKey"], + matDesc = response.Metadata["matdesc"] + }; + } return new FileHeader { @@ -242,31 +246,35 @@ public async Task UploadFileAsync(SFFileMetadata fileMetadata, Stream fileBytesS /// The encryption metadata for the header. private BlobClient GetUploadFileBlobClient(ref IDictionarymetadata, SFFileMetadata fileMetadata, SFEncryptionMetadata encryptionMetadata) { - // Create the JSON for the encryption data header - string encryptionData = JsonConvert.SerializeObject(new EncryptionData + if (fileMetadata.stageInfo.isClientSideEncrypted) { - EncryptionMode = "FullBlob", - WrappedContentKey = new WrappedContentInfo - { - KeyId = "symmKey1", - EncryptedKey = encryptionMetadata.key, - Algorithm = "AES_CBC_256" - }, - EncryptionAgent = new EncryptionAgentInfo + // Create the JSON for the encryption data header + string encryptionData = JsonConvert.SerializeObject(new EncryptionData { - Protocol = "1.0", - EncryptionAlgorithm = "AES_CBC_256" - }, - ContentEncryptionIV = encryptionMetadata.iv, - KeyWrappingMetadata = new KeyWrappingMetadataInfo - { - EncryptionLibrary = "Java 5.3.0" - } - }); + EncryptionMode = "FullBlob", + WrappedContentKey = new WrappedContentInfo + { + KeyId = "symmKey1", + EncryptedKey = encryptionMetadata.key, + Algorithm = "AES_CBC_256" + }, + EncryptionAgent = new EncryptionAgentInfo + { + Protocol = "1.0", + EncryptionAlgorithm = "AES_CBC_256" + }, + ContentEncryptionIV = encryptionMetadata.iv, + KeyWrappingMetadata = new KeyWrappingMetadataInfo + { + EncryptionLibrary = "Java 5.3.0" + } + }); + + // Create the metadata to use for the header + metadata.Add("encryptiondata", encryptionData); + metadata.Add("matdesc", encryptionMetadata.matDesc); + } - // Create the metadata to use for the header - metadata.Add("encryptiondata", encryptionData); - metadata.Add("matdesc", encryptionMetadata.matDesc); metadata.Add("sfcdigest", fileMetadata.sha256Digest); PutGetStageInfo stageInfo = fileMetadata.stageInfo; From d69e2cb42ebd0f076476b979dcd249a0470f4e71 Mon Sep 17 00:00:00 2001 From: Krzysztof Nozderko Date: Mon, 4 Nov 2024 16:09:33 +0100 Subject: [PATCH 5/5] MINOR: Bumped up DotNet connector MINOR version from 4.1.0 to 4.2.0 (#1054) Co-authored-by: Jenkins User <900904> --- Snowflake.Data/Snowflake.Data.csproj | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Snowflake.Data/Snowflake.Data.csproj b/Snowflake.Data/Snowflake.Data.csproj index 35599c903..a0b09fade 100644 --- a/Snowflake.Data/Snowflake.Data.csproj +++ b/Snowflake.Data/Snowflake.Data.csproj @@ -11,7 +11,7 @@ Snowflake Computing, Inc Snowflake Connector for .NET Snowflake - 4.1.0 + 4.2.0 Full 7.3