Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -911,6 +911,7 @@ jobs:
- suite-delta-lake-databricks143
- suite-delta-lake-databricks154
- suite-delta-lake-databricks164
- suite-delta-lake-databricks173
- suite-exasol
- suite-ranger
- suite-gcs
Expand Down Expand Up @@ -961,6 +962,9 @@ jobs:
- suite: suite-delta-lake-databricks164
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-delta-lake-databricks173
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-snowflake
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.SNOWFLAKE_PASSWORD != '' }}
Expand Down Expand Up @@ -1010,6 +1014,7 @@ jobs:
DATABRICKS_143_JDBC_URL: ""
DATABRICKS_154_JDBC_URL: ""
DATABRICKS_164_JDBC_URL: ""
DATABRICKS_173_JDBC_URL: ""
DATABRICKS_LOGIN: ""
DATABRICKS_TOKEN: ""
GCP_CREDENTIALS_KEY: ""
Expand Down Expand Up @@ -1082,6 +1087,7 @@ jobs:
DATABRICKS_143_JDBC_URL: ${{ vars.DATABRICKS_143_JDBC_URL }}
DATABRICKS_154_JDBC_URL: ${{ vars.DATABRICKS_154_JDBC_URL }}
DATABRICKS_164_JDBC_URL: ${{ vars.DATABRICKS_164_JDBC_URL }}
DATABRICKS_173_JDBC_URL: ${{ vars.DATABRICKS_173_JDBC_URL }}
DATABRICKS_LOGIN: token
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
GCP_CREDENTIALS_KEY: ${{ secrets.GCP_CREDENTIALS_KEY }}
Expand Down
2 changes: 1 addition & 1 deletion docs/src/main/sphinx/connector/delta-lake.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ data.
To connect to Databricks Delta Lake, you need:

- Tables written by Databricks Runtime 7.3 LTS, 9.1 LTS, 10.4 LTS, 11.3 LTS,
12.2 LTS, 13.3 LTS, 14.3 LTS, 15.4 LTS and 16.4 LTS are supported.
12.2 LTS, 13.3 LTS, 14.3 LTS, 15.4 LTS, 16.4 LTS and 17.3 LTS are supported.
- Deployments using AWS, HDFS, Azure Storage, and Google Cloud Storage (GCS) are
fully supported.
- Network access from the coordinator and workers to the Delta Lake storage.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -252,7 +252,7 @@ public static Map<String, Object> jsonEncodeMax(Map<String, Optional<Statistics<
private static Map<String, Object> jsonEncode(Map<String, Optional<Statistics<?>>> stats, Map<String, Type> typeForColumn, BiFunction<Type, Statistics<?>, Optional<Object>> accessor)
{
Map<String, Optional<Object>> allStats = stats.entrySet().stream()
.filter(entry -> entry.getValue() != null && entry.getValue().isPresent() && !entry.getValue().get().isEmpty())
.filter(entry -> entry.getValue() != null && entry.getValue().isPresent() && !entry.getValue().get().isEmpty() && typeForColumn.containsKey(entry.getKey()))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What extra stats deltas are written, are they for complex types?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No, it was int column.

Copy link
Member Author

@ebyhr ebyhr Oct 31, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

CREATE TABLE default.test_173 (lower int, UPPER int, downpart int) USING DELTA PARTITIONED BY (downpart) LOCATION '...';
INSERT INTO default.test_173 VALUES(1, 1, 0), (2, 2, 0), (3, 3, 1);
parquet-tools cat downpart=0/part-00000-31146b9a-9daf-4e82-b055-4dce0911af57.c000.snappy.parquet
[{"Downpart":0,"Lower":1,"UPPER":1},{"Downpart":0,"Lower":2,"UPPER":2}]

parquet-tools cat downpart=1/part-00000-7e668519-eb3a-4322-a1e4-8f6ba7b54a7b.c000.snappy.parquet
[{"Downpart":1,"Lower":3,"UPPER":3}]
cat 00000000000000000001.json | jq 'add.stats'
null
"{\"numRecords\":2,\"minValues\":{\"lower\":1,\"UPPER\":1},\"maxValues\":{\"lower\":2,\"UPPER\":2},\"nullCount\":{\"lower\":0,\"UPPER\":0},\"tightBounds\":true}"
"{\"numRecords\":1,\"minValues\":{\"lower\":3,\"UPPER\":3},\"maxValues\":{\"lower\":3,\"UPPER\":3},\"nullCount\":{\"lower\":0,\"UPPER\":0},\"tightBounds\":true}"

The add entry doesn't contain the part stats, so just skipping should be fine.

.collect(toImmutableMap(Map.Entry::getKey, entry -> accessor.apply(typeForColumn.get(entry.getKey()), entry.getValue().get())));

return allStats.entrySet().stream()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,9 @@ public final class TestGroups
public static final String DELTA_LAKE_DATABRICKS_133 = "delta-lake-databricks-133";
public static final String DELTA_LAKE_DATABRICKS_143 = "delta-lake-databricks-143";
public static final String DELTA_LAKE_DATABRICKS_154 = "delta-lake-databricks-154";
public static final String DELTA_LAKE_DATABRICKS_164 = "delta-lake-databricks-164";
// TODO: Remove it once we support generatedColumns, particularly for writes in Delta Lake
public static final String DELTA_LAKE_EXCLUDE_164 = "delta-lake-exclude-164";
public static final String DELTA_LAKE_EXCLUDE_173 = "delta-lake-exclude-173";
public static final String DELTA_LAKE_ALLUXIO_CACHING = "delta-lake-alluxio-caching";
public static final String HUDI = "hudi";
public static final String PARQUET = "parquet";
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.launcher.env.environment;

import com.google.inject.Inject;
import io.trino.tests.product.launcher.docker.DockerFiles;
import io.trino.tests.product.launcher.env.common.Standard;
import io.trino.tests.product.launcher.env.common.TestsEnvironment;

import static io.trino.testing.SystemEnvironmentUtils.requireEnv;

@TestsEnvironment
public class EnvSinglenodeDeltaLakeDatabricks173
extends AbstractSinglenodeDeltaLakeDatabricks
{
@Inject
public EnvSinglenodeDeltaLakeDatabricks173(Standard standard, DockerFiles dockerFiles)
{
super(standard, dockerFiles);
}

@Override
String databricksTestJdbcUrl()
{
return requireEnv("DATABRICKS_173_JDBC_URL");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@
import java.util.List;

import static io.trino.tests.product.TestGroups.CONFIGURED_FEATURES;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164;
import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;

public class SuiteDeltaLakeDatabricks164
Expand All @@ -34,8 +33,7 @@ public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks164.class)
.withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS)
.withExcludedGroups(DELTA_LAKE_EXCLUDE_164)
.withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS_164)
.withExcludedTests(getExcludedTests())
.build());
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.tests.product.launcher.suite.suites;

import com.google.common.collect.ImmutableList;
import io.trino.tests.product.launcher.env.EnvironmentConfig;
import io.trino.tests.product.launcher.env.environment.EnvSinglenodeDeltaLakeDatabricks173;
import io.trino.tests.product.launcher.suite.SuiteDeltaLakeDatabricks;
import io.trino.tests.product.launcher.suite.SuiteTestRun;

import java.util.List;

import static io.trino.tests.product.TestGroups.CONFIGURED_FEATURES;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_173;
import static io.trino.tests.product.launcher.suite.SuiteTestRun.testOnEnvironment;

public class SuiteDeltaLakeDatabricks173
extends SuiteDeltaLakeDatabricks
{
@Override
public List<SuiteTestRun> getTestRuns(EnvironmentConfig config)
{
return ImmutableList.of(
testOnEnvironment(EnvSinglenodeDeltaLakeDatabricks173.class)
.withGroups(CONFIGURED_FEATURES, DELTA_LAKE_DATABRICKS)
.withExcludedGroups(DELTA_LAKE_EXCLUDE_173)
.withExcludedTests(getExcludedTests())
.build());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DatabricksVersion.DATABRICKS_143_RUNTIME_VERSION;
Expand Down Expand Up @@ -73,7 +74,7 @@ public void testNonLowercaseColumnNames()
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testNonLowercaseFieldNames()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.S3ClientFactory.createS3Client;
Expand Down Expand Up @@ -539,7 +540,7 @@ public void testMergeDeleteIntoTableWithCdfEnabled(String columnMappingMode)
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testMergeMixedDeleteAndUpdateIntoTableWithCdfEnabled()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.S3ClientFactory.createS3Client;
Expand Down Expand Up @@ -276,7 +277,7 @@ private void trinoUsesCheckpointInterval(String deltaTableProperties)
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testDatabricksUsesCheckpointInterval()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_EXCLUDE_173;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE;
Expand All @@ -53,7 +54,7 @@
public class TestDeltaLakeColumnMappingMode
extends BaseTestDeltaLakeS3Storage
{
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testColumnMappingModeNone()
{
Expand Down Expand Up @@ -240,9 +241,9 @@ public void testTrinoColumnMappingMode(String mode)
")"));
}

// When setting the table property `delta.columnMapping.mode` on Databricks 16.4, it will enable the `delta.feature.generatedColumns`
// When setting the table property `delta.columnMapping.mode` on Databricks >= 16.x, it will enable the `delta.feature.generatedColumns`
// feature, which is not supported by Trino.
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingDataProvider")
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_EXCLUDE_173, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS}, dataProvider = "columnMappingDataProvider")
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testDeltaColumnMappingMode(String mode)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE;
Expand All @@ -42,7 +43,7 @@
public class TestDeltaLakeCreateTableAsSelectCompatibility
extends BaseTestDeltaLakeS3Storage
{
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testTrinoTypesWithDatabricks()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE;
Expand All @@ -44,7 +45,7 @@
public class TestDeltaLakeInsertCompatibility
extends BaseTestDeltaLakeS3Storage
{
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testInsertCompatibility()
{
Expand Down Expand Up @@ -80,7 +81,7 @@ public void testInsertCompatibility()
}
}

@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testPartitionedInsertCompatibility()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_133;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_143;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_154;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_DATABRICKS_164;
import static io.trino.tests.product.TestGroups.DELTA_LAKE_OSS;
import static io.trino.tests.product.TestGroups.PROFILE_SPECIFIC_TESTS;
import static io.trino.tests.product.deltalake.util.DeltaLakeTestUtils.DATABRICKS_COMMUNICATION_FAILURE_ISSUE;
Expand All @@ -40,7 +41,7 @@
public class TestDeltaLakeSelectCompatibility
extends BaseTestDeltaLakeS3Storage
{
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Test(groups = {DELTA_LAKE_DATABRICKS, DELTA_LAKE_DATABRICKS_133, DELTA_LAKE_DATABRICKS_143, DELTA_LAKE_DATABRICKS_154, DELTA_LAKE_DATABRICKS_164, DELTA_LAKE_OSS, PROFILE_SPECIFIC_TESTS})
@Flaky(issue = DATABRICKS_COMMUNICATION_FAILURE_ISSUE, match = DATABRICKS_COMMUNICATION_FAILURE_MATCH)
public void testPartitionedSelectSpecialCharacters()
{
Expand Down