diff --git a/itests/hive-iceberg/pom.xml b/itests/hive-iceberg/pom.xml
index 170a6c89abd8..1ad246437768 100644
--- a/itests/hive-iceberg/pom.xml
+++ b/itests/hive-iceberg/pom.xml
@@ -46,6 +46,12 @@
tests
${project.version}
+
+ org.keycloak
+ keycloak-admin-client
+ ${keycloak.version}
+ test
+
org.apache.hive
hive-standalone-metastore-common
diff --git a/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientIT.java b/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITBase.java
similarity index 79%
rename from itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientIT.java
rename to itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITBase.java
index db5329c64312..788850c7b243 100644
--- a/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientIT.java
+++ b/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITBase.java
@@ -23,9 +23,9 @@
import org.apache.hadoop.hive.metastore.HiveMetaHookLoader;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.ServletSecurity;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.GetTableRequest;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
@@ -46,59 +46,54 @@
import org.apache.iceberg.hive.CatalogUtils;
import org.apache.iceberg.hive.HiveSchemaUtil;
import org.apache.iceberg.rest.extension.HiveRESTCatalogServerExtension;
-import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
-import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
-import org.junit.jupiter.api.TestInstance;
-import org.junit.jupiter.api.extension.RegisterExtension;
-
-import java.util.Collections;
-import java.util.Map;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
+import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertThrows;
/*
- * This test is an integration test for the hive-iceberg REST Catalog client and HMS REST Catalog Server.
- * It uses the HiveMetaStoreClient backed by hive-iceberg REST catalog adapter to connect to the HMS RESTCatalog Server.
+ * This is an integration test for the HiveMetaStoreClient and HMS REST Catalog Server. It creates and uses the
+ * HMS IMetaStoreClient backed by HiveMetaStoreClient adapter to connect to the HMS RESTCatalog Server.
* The flow is as follows:
* Hive ql wrapper --> HiveMetaStoreClient --> HiveRESTCatalogClient --> HMS RESTCatalog Server --> HMS
*/
-@TestInstance(TestInstance.Lifecycle.PER_CLASS)
-public class TestHiveRESTCatalogClientIT {
-
- private static final String DB_NAME = "ice_db";
- private static final String TABLE_NAME = "ice_tbl";
- private static final String CATALOG_NAME = "ice01";
- private static final String HIVE_ICEBERG_STORAGE_HANDLER = "org.apache.iceberg.mr.hive.HiveIcebergStorageHandler";
+public abstract class TestHiveRESTCatalogClientITBase {
+
+ static final String DB_NAME = "ice_db";
+ static final String TABLE_NAME = "ice_tbl";
+ static final String CATALOG_NAME = "ice01";
+ static final String HIVE_ICEBERG_STORAGE_HANDLER = "org.apache.iceberg.mr.hive.HiveIcebergStorageHandler";
+ static final String REST_CATALOG_PREFIX = String.format("%s%s.", CatalogUtils.CATALOG_CONFIG_PREFIX, CATALOG_NAME);
+
+ HiveConf hiveConf;
+ Configuration conf;
+ Hive hive;
+ IMetaStoreClient msClient;
- private Configuration conf;
- private HiveConf hiveConf;
- private Hive hive;
-
- private IMetaStoreClient msClient;
-
- @RegisterExtension
- private static final HiveRESTCatalogServerExtension REST_CATALOG_EXTENSION =
- HiveRESTCatalogServerExtension.builder(ServletSecurity.AuthType.NONE)
- .addMetaStoreSchemaClassName(ITestsSchemaInfo.class)
- .build();
+ abstract HiveRESTCatalogServerExtension getHiveRESTCatalogServerExtension();
- @BeforeAll
- public void setup() throws Exception {
- // Starting msClient with Iceberg REST Catalog client underneath
- String restCatalogPrefix = String.format("%s%s.", CatalogUtils.CATALOG_CONFIG_PREFIX, CATALOG_NAME);
+ public void setupConf() {
+ HiveRESTCatalogServerExtension restCatalogExtension = getHiveRESTCatalogServerExtension();
- conf = REST_CATALOG_EXTENSION.getConf();
+ conf = restCatalogExtension.getConf();
MetastoreConf.setVar(conf, MetastoreConf.ConfVars.METASTORE_CLIENT_IMPL,
"org.apache.iceberg.hive.client.HiveRESTCatalogClient");
conf.set(MetastoreConf.ConfVars.CATALOG_DEFAULT.getVarname(), CATALOG_NAME);
- conf.set(restCatalogPrefix + "uri", REST_CATALOG_EXTENSION.getRestEndpoint());
- conf.set(restCatalogPrefix + "type", CatalogUtil.ICEBERG_CATALOG_TYPE_REST);
+ conf.set(REST_CATALOG_PREFIX + "uri", restCatalogExtension.getRestEndpoint());
+ conf.set(REST_CATALOG_PREFIX + "type", CatalogUtil.ICEBERG_CATALOG_TYPE_REST);
+ }
+
+ @BeforeEach
+ void setup() throws Exception {
+ setupConf();
HiveMetaHookLoader hookLoader = tbl -> {
HiveStorageHandler storageHandler;
@@ -109,18 +104,19 @@ public void setup() throws Exception {
}
return storageHandler == null ? null : storageHandler.getMetaHook();
};
-
+
msClient = new HiveMetaStoreClient(conf, hookLoader);
hiveConf = new HiveConf(conf, HiveConf.class);
hive = Hive.get(hiveConf);
}
- @AfterAll public void tearDown() {
+ @AfterEach
+ public void tearDown() {
if (msClient != null) {
msClient.close();
}
}
-
+
@Test
public void testIceberg() throws Exception {
@@ -142,7 +138,7 @@ public void testIceberg() throws Exception {
// --- Get Databases ---
List dbs = msClient.getDatabases(CATALOG_NAME, "ice_*");
Assertions.assertEquals(1, dbs.size());
- Assertions.assertEquals(DB_NAME, dbs.get(0));
+ Assertions.assertEquals(DB_NAME, dbs.getFirst());
// --- Get All Databases ---
List allDbs = msClient.getAllDatabases(CATALOG_NAME);
@@ -151,7 +147,7 @@ public void testIceberg() throws Exception {
Assertions.assertTrue(allDbs.contains(DB_NAME));
// --- Create Table ---
- org.apache.hadoop.hive.metastore.api.Table tTable = createPartitionedTable(msClient,
+ Table tTable = createPartitionedTable(msClient,
CATALOG_NAME, DB_NAME, TABLE_NAME, new java.util.HashMap<>());
Assertions.assertNotNull(tTable);
Assertions.assertEquals(HiveMetaHook.ICEBERG, tTable.getParameters().get(HiveMetaHook.TABLE_TYPE));
@@ -166,7 +162,12 @@ public void testIceberg() throws Exception {
Assertions.assertTrue(msClient.tableExists(CATALOG_NAME, DB_NAME, TABLE_NAME));
// --- Get Table ---
- org.apache.hadoop.hive.metastore.api.Table table = msClient.getTable(CATALOG_NAME, DB_NAME, TABLE_NAME);
+ GetTableRequest getTableRequest = new GetTableRequest();
+ getTableRequest.setCatName(CATALOG_NAME);
+ getTableRequest.setDbName(DB_NAME);
+ getTableRequest.setTblName(TABLE_NAME);
+
+ Table table = msClient.getTable(getTableRequest);
Assertions.assertEquals(DB_NAME, table.getDbName());
Assertions.assertEquals(TABLE_NAME, table.getTableName());
Assertions.assertEquals(HIVE_ICEBERG_STORAGE_HANDLER, table.getParameters().get("storage_handler"));
@@ -193,8 +194,8 @@ public void testIceberg() throws Exception {
Assertions.assertFalse(msClient.getAllDatabases(CATALOG_NAME).contains(DB_NAME));
}
- private static Table createPartitionedTable(IMetaStoreClient db, String catName, String dbName, String tableName,
- Map tableParameters) throws Exception {
+ private static Table createPartitionedTable(IMetaStoreClient db, String catName, String dbName, String tableName,
+ Map tableParameters) throws Exception {
db.dropTable(catName, dbName, tableName);
Table table = new Table();
table.setCatName(catName);
@@ -222,6 +223,12 @@ private static Table createPartitionedTable(IMetaStoreClient db, String catName,
table.getParameters().put(TableProperties.DEFAULT_PARTITION_SPEC, specString);
db.createTable(table);
- return db.getTable(catName, dbName, tableName);
+
+ GetTableRequest getTableRequest = new GetTableRequest();
+ getTableRequest.setCatName(CATALOG_NAME);
+ getTableRequest.setDbName(DB_NAME);
+ getTableRequest.setTblName(TABLE_NAME);
+
+ return db.getTable(getTableRequest);
}
}
diff --git a/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITNoAuth.java b/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITNoAuth.java
new file mode 100644
index 000000000000..eaec8243a82f
--- /dev/null
+++ b/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITNoAuth.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive;
+
+import org.apache.hadoop.hive.metastore.ServletSecurity;
+import org.apache.iceberg.rest.extension.HiveRESTCatalogServerExtension;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.extension.RegisterExtension;
+
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class TestHiveRESTCatalogClientITNoAuth extends TestHiveRESTCatalogClientITBase {
+
+ @RegisterExtension
+ private static final HiveRESTCatalogServerExtension REST_CATALOG_EXTENSION =
+ HiveRESTCatalogServerExtension.builder(ServletSecurity.AuthType.NONE)
+ .addMetaStoreSchemaClassName(ITestsSchemaInfo.class)
+ .build();
+
+ @Override
+ HiveRESTCatalogServerExtension getHiveRESTCatalogServerExtension() {
+ return REST_CATALOG_EXTENSION;
+ }
+}
diff --git a/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITOauth2.java b/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITOauth2.java
new file mode 100644
index 000000000000..d2c483ae7690
--- /dev/null
+++ b/itests/hive-iceberg/src/test/java/org/apache/hive/TestHiveRESTCatalogClientITOauth2.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hive;
+
+import org.apache.hadoop.hive.metastore.ServletSecurity;
+import org.apache.iceberg.rest.extension.HiveRESTCatalogServerExtension;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.extension.RegisterExtension;
+
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class TestHiveRESTCatalogClientITOauth2 extends TestHiveRESTCatalogClientITBase {
+
+ @RegisterExtension
+ private static final HiveRESTCatalogServerExtension REST_CATALOG_EXTENSION =
+ HiveRESTCatalogServerExtension.builder(ServletSecurity.AuthType.OAUTH2)
+ .addMetaStoreSchemaClassName(ITestsSchemaInfo.class)
+ .build();
+
+ @Override
+ public void setupConf() {
+ super.setupConf();
+
+ // Oauth2 properties
+ conf.set(REST_CATALOG_PREFIX + "rest.auth.type", "oauth2");
+ conf.set(REST_CATALOG_PREFIX + "oauth2-server-uri", REST_CATALOG_EXTENSION.getOAuth2TokenEndpoint());
+ conf.set(REST_CATALOG_PREFIX + "credential", REST_CATALOG_EXTENSION.getOAuth2ClientCredential());
+ }
+
+ @Override
+ HiveRESTCatalogServerExtension getHiveRESTCatalogServerExtension() {
+ return REST_CATALOG_EXTENSION;
+ }
+}
diff --git a/itests/qtest-iceberg/pom.xml b/itests/qtest-iceberg/pom.xml
index c7cd70d74795..9099ba38a6d7 100644
--- a/itests/qtest-iceberg/pom.xml
+++ b/itests/qtest-iceberg/pom.xml
@@ -480,6 +480,37 @@
testcontainers
test
+
+ org.keycloak
+ keycloak-admin-client
+ ${keycloak.version}
+ test
+
+
+ jakarta.annotation
+ jakarta.annotation-api
+ 2.1.1
+
+
+ com.nimbusds
+ oauth2-oidc-sdk
+ ${nimbus-oauth.version}
+
+
+ jakarta.xml.bind
+ jakarta.xml.bind-api
+ 4.0.4
+
+
+ jakarta.activation
+ jakarta.activation-api
+ 2.1.2
+
+
+ org.glassfish.jaxb
+ jaxb-runtime
+ ${jaxb-runtime.version}
+
diff --git a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/HiveRESTCatalogServerExtension.java b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/HiveRESTCatalogServerExtension.java
index bc39ab612ea4..771e03f9c855 100644
--- a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/HiveRESTCatalogServerExtension.java
+++ b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/HiveRESTCatalogServerExtension.java
@@ -24,33 +24,63 @@
import org.apache.hadoop.hive.metastore.ServletSecurity.AuthType;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.iceberg.rest.extension.OAuth2AuthorizationServer;
import org.apache.iceberg.rest.extension.RESTCatalogServer;
import org.junit.rules.ExternalResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.Map;
public class HiveRESTCatalogServerExtension extends ExternalResource {
private final Configuration conf;
+ private final OAuth2AuthorizationServer authorizationServer;
private final RESTCatalogServer restCatalogServer;
- private HiveRESTCatalogServerExtension(AuthType authType, Class extends MetaStoreSchemaInfo> schemaInfoClass) {
+ static final String HMS_ID = "hive-metastore";
+ static final String HMS_SECRET = "hive-metastore-secret";
+
+ private static final Logger LOG = LoggerFactory.getLogger(HiveRESTCatalogServerExtension.class);
+
+ private HiveRESTCatalogServerExtension(AuthType authType, Class extends MetaStoreSchemaInfo> schemaInfoClass,
+ Map configurations) {
this.conf = MetastoreConf.newMetastoreConf();
MetastoreConf.setVar(conf, ConfVars.CATALOG_SERVLET_AUTH, authType.name());
+ if (authType == AuthType.OAUTH2) {
+ authorizationServer = new OAuth2AuthorizationServer();
+ MetastoreConf.setVar(conf, ConfVars.CATALOG_SERVLET_AUTH, "oauth2");
+ MetastoreConf.setVar(conf, ConfVars.CATALOG_SERVLET_AUTH_OAUTH2_CLIENT_ID, HMS_ID);
+ MetastoreConf.setVar(conf, ConfVars.CATALOG_SERVLET_AUTH_OAUTH2_CLIENT_SECRET, HMS_SECRET);
+ MetastoreConf.setVar(conf, ConfVars.CATALOG_SERVLET_AUTH_OAUTH2_AUDIENCE, HMS_ID);
+ MetastoreConf.setVar(conf, ConfVars.CATALOG_SERVLET_AUTH_OAUTH2_PRINCIPAL_MAPPER_REGEX_FIELD, "email");
+ MetastoreConf.setVar(conf, ConfVars.CATALOG_SERVLET_AUTH_OAUTH2_PRINCIPAL_MAPPER_REGEX_PATTERN,
+ "(.*)@example.com");
+ } else {
+ authorizationServer = null;
+ }
+ configurations.forEach(conf::set);
restCatalogServer = new RESTCatalogServer();
if (schemaInfoClass != null) {
restCatalogServer.setSchemaInfoClass(schemaInfoClass);
}
}
- public Configuration getConf() {
- return conf;
- }
-
@Override
protected void before() throws Throwable {
+ if (authorizationServer != null) {
+ authorizationServer.start();
+ LOG.info("An authorization server {} started", authorizationServer.getIssuer());
+ MetastoreConf.setVar(conf, ConfVars.CATALOG_SERVLET_AUTH_OAUTH2_ISSUER, authorizationServer.getIssuer());
+ }
restCatalogServer.start(conf);
}
@Override
protected void after() {
+ if (authorizationServer != null) {
+ authorizationServer.stop();
+ }
restCatalogServer.stop();
}
@@ -58,9 +88,18 @@ public String getRestEndpoint() {
return restCatalogServer.getRestEndpoint();
}
+ public String getOAuth2TokenEndpoint() {
+ return authorizationServer.getTokenEndpoint();
+ }
+
+ public String getOAuth2ClientCredential() {
+ return authorizationServer.getClientCredential();
+ }
+
public static class Builder {
private final AuthType authType;
private Class extends MetaStoreSchemaInfo> metaStoreSchemaClass;
+ private final Map configurations = new HashMap<>();
private Builder(AuthType authType) {
this.authType = authType;
@@ -72,7 +111,7 @@ public Builder addMetaStoreSchemaClassName(Class extends MetaStoreSchemaInfo>
}
public HiveRESTCatalogServerExtension build() {
- return new HiveRESTCatalogServerExtension(authType, metaStoreSchemaClass);
+ return new HiveRESTCatalogServerExtension(authType, metaStoreSchemaClass, configurations);
}
}
diff --git a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.java b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.java
index 70b9985ce0f9..07a49d3a2713 100644
--- a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.java
+++ b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogGravitinoLlapLocalCliDriver.java
@@ -30,6 +30,7 @@
import org.apache.iceberg.CatalogUtil;
import org.apache.iceberg.hive.CatalogUtils;
import org.apache.iceberg.hive.client.HiveRESTCatalogClient;
+import org.apache.iceberg.rest.extension.OAuth2AuthorizationServer;
import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
@@ -41,6 +42,7 @@
import org.junit.runners.Parameterized.Parameters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.testcontainers.containers.Network;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.containers.wait.strategy.WaitAllStrategy;
@@ -80,12 +82,16 @@ public class TestIcebergRESTCatalogGravitinoLlapLocalCliDriver {
private static final DockerImageName GRAVITINO_IMAGE =
DockerImageName.parse("apache/gravitino-iceberg-rest:1.0.0");
+ private static final String OAUTH2_SERVER_ICEBERG_CLIENT_ID = "iceberg-client";
+ private static final String OAUTH2_SERVER_ICEBERG_CLIENT_SECRET = "iceberg-client-secret";
+
private final String name;
private final File qfile;
private GenericContainer> gravitinoContainer;
private Path warehouseDir;
private final ScheduledExecutorService fileSyncExecutor = Executors.newSingleThreadScheduledExecutor();
+ private OAuth2AuthorizationServer oAuth2AuthorizationServer;
@Parameters(name = "{0}")
public static List
*/
@SuppressWarnings("resource")
- private void startGravitinoContainer() {
+ private void startGravitinoContainer(Network dockerNetwork) {
gravitinoContainer = new GenericContainer<>(GRAVITINO_IMAGE)
.withExposedPorts(GRAVITINO_HTTP_PORT)
// Update entrypoint to create the warehouse directory before starting the server
@@ -175,6 +193,8 @@ private void startGravitinoContainer() {
),
GRAVITINO_H2_LIB
)
+ // Use the same Docker network as the OAuth2 server so they can communicate
+ .withNetwork(dockerNetwork)
// Wait for the server to be fully started
.waitingFor(
new WaitAllStrategy()
@@ -254,6 +274,11 @@ private void syncWarehouseDir() {
}
}
}
+
+ private void startOAuth2AuthorizationServer(Network dockerNetwork) {
+ oAuth2AuthorizationServer = new OAuth2AuthorizationServer(dockerNetwork, false);
+ oAuth2AuthorizationServer.start();
+ }
private void createWarehouseDir() {
try {
@@ -276,12 +301,26 @@ private void prepareGravitinoConfig() throws IOException {
String updatedContent = content
.replace("/WAREHOUSE_DIR", warehouseDir.toString())
+ .replace("OAUTH2_SERVER_URI", oAuth2AuthorizationServer.getIssuer())
+ .replace("OAUTH2_JWKS_URI", getJwksUri())
+ .replace("OAUTH2_CLIENT_ID", OAUTH2_SERVER_ICEBERG_CLIENT_ID)
+ .replace("OAUTH2_CLIENT_SECRET", OAUTH2_SERVER_ICEBERG_CLIENT_SECRET)
.replace("HTTP_PORT", String.valueOf(GRAVITINO_HTTP_PORT));
Path configFile = warehouseDir.resolve(GRAVITINO_CONF_FILE_TEMPLATE);
Files.writeString(configFile, updatedContent);
}
+ private String getJwksUri() {
+ String reachableHost = oAuth2AuthorizationServer.getKeycloackContainerDockerInternalHostName();
+ int internalPort = 8080; // Keycloak container's internal port
+ return oAuth2AuthorizationServer.getIssuer()
+ .replace("localhost", reachableHost)
+ .replace("127.0.0.1", reachableHost)
+ // replace issuer's mapped port with keyclock container's internal port
+ .replaceFirst(":[0-9]+", ":" + internalPort);
+ }
+
@Test
public void testCliDriver() throws Exception {
CLI_ADAPTER.runTest(name, qfile);
diff --git a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogHMSLlapLocalCliDriver.java b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogHMSLlapLocalCliDriver.java
index 66a2985f4f99..2f5031601de6 100644
--- a/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogHMSLlapLocalCliDriver.java
+++ b/itests/qtest-iceberg/src/test/java/org/apache/hadoop/hive/cli/TestIcebergRESTCatalogHMSLlapLocalCliDriver.java
@@ -28,6 +28,7 @@
import org.apache.hive.ITestsSchemaInfo;
import org.apache.iceberg.CatalogUtil;
import org.apache.iceberg.hive.CatalogUtils;
+import org.junit.After;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
@@ -36,21 +37,14 @@
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.Path;
import java.util.List;
-import java.util.stream.Stream;
@RunWith(Parameterized.class)
public class TestIcebergRESTCatalogHMSLlapLocalCliDriver {
- private static final Logger LOG = LoggerFactory.getLogger(
- TestIcebergRESTCatalogHMSLlapLocalCliDriver.class);
private static final String CATALOG_NAME = "ice01";
private static final CliAdapter CLI_ADAPTER =
new CliConfigs.TestIcebergRESTCatalogHMSLlapLocalCliDriver().getCliAdapter();
@@ -60,7 +54,7 @@ public class TestIcebergRESTCatalogHMSLlapLocalCliDriver {
@ClassRule
public static final HiveRESTCatalogServerExtension REST_CATALOG_EXTENSION =
- HiveRESTCatalogServerExtension.builder(ServletSecurity.AuthType.NONE)
+ HiveRESTCatalogServerExtension.builder(ServletSecurity.AuthType.OAUTH2)
.addMetaStoreSchemaClassName(ITestsSchemaInfo.class)
.build();
@@ -90,26 +84,16 @@ public void setupHiveConfig() {
MetastoreConf.setVar(conf, MetastoreConf.ConfVars.CATALOG_DEFAULT, CATALOG_NAME);
conf.set(restCatalogPrefix + "uri", REST_CATALOG_EXTENSION.getRestEndpoint());
conf.set(restCatalogPrefix + "type", CatalogUtil.ICEBERG_CATALOG_TYPE_REST);
- }
- @Before
- public void cleanUpRestCatalogServerTmpDir() throws IOException {
- try (Stream children = Files.list(REST_CATALOG_EXTENSION.getRestCatalogServer().getWarehouseDir())) {
- children
- .filter(path -> !path.getFileName().toString().equals("derby.log"))
- .filter(path -> !path.getFileName().toString().equals("metastore_db"))
- .forEach(path -> {
- try {
- if (Files.isDirectory(path)) {
- FileUtils.deleteDirectory(path.toFile());
- } else {
- Files.delete(path);
- }
- } catch (IOException e) {
- LOG.error("Failed to delete path: {}", path, e);
- }
- });
- }
+ // auth configs
+ conf.set(restCatalogPrefix + "rest.auth.type", "oauth2");
+ conf.set(restCatalogPrefix + "oauth2-server-uri", REST_CATALOG_EXTENSION.getOAuth2TokenEndpoint());
+ conf.set(restCatalogPrefix + "credential", REST_CATALOG_EXTENSION.getOAuth2ClientCredential());
+ }
+
+ @After
+ public void tearDown() throws IOException {
+ FileUtils.deleteDirectory(REST_CATALOG_EXTENSION.getRestCatalogServer().getWarehouseDir().toFile());
}
@Test
diff --git a/itests/qtest-iceberg/src/test/resources/gravitino-h2-test-template.conf b/itests/qtest-iceberg/src/test/resources/gravitino-h2-test-template.conf
index 12009e7fa8f3..7d88014eea7c 100644
--- a/itests/qtest-iceberg/src/test/resources/gravitino-h2-test-template.conf
+++ b/itests/qtest-iceberg/src/test/resources/gravitino-h2-test-template.conf
@@ -11,4 +11,20 @@ gravitino.iceberg-rest.jdbc-password = ""
gravitino.iceberg-rest.jdbc-initialize = true
# --- Warehouse Location (where data files are stored) ---
-gravitino.iceberg-rest.warehouse = file:///WAREHOUSE_DIR/iceberg_warehouse
\ No newline at end of file
+gravitino.iceberg-rest.warehouse = file:///WAREHOUSE_DIR/iceberg_warehouse
+
+# --- OAuth2 Authentication ---
+gravitino.authenticators = oauth
+
+gravitino.authenticator.oauth.serverUri = OAUTH2_SERVER_URI
+gravitino.authenticator.oauth.tokenPath = /protocol/openid-connect/token
+gravitino.authenticator.oauth.clientId = OAUTH2_CLIENT_ID
+gravitino.authenticator.oauth.scope = openid catalog
+gravitino.authenticator.oauth.clientSecret = OAUTH2_CLIENT_SECRET
+
+gravitino.authenticator.oauth.tokenValidatorClass = org.apache.gravitino.server.authentication.JwksTokenValidator
+gravitino.authenticator.oauth.jwksUri = OAUTH2_JWKS_URI/protocol/openid-connect/certs
+gravitino.authenticator.oauth.provider = default
+gravitino.authenticator.oauth.principalFields = sub
+gravitino.authenticator.oauth.allowSkewSecs = 60
+gravitino.authenticator.oauth.serviceAudience = hive-metastore
diff --git a/pom.xml b/pom.xml
index 5b20fc538339..f8d9eaf01f03 100644
--- a/pom.xml
+++ b/pom.xml
@@ -240,6 +240,8 @@
5.3.39
2.4.4
2025-01-01T00:00:00Z
+ 26.0.6
+ 11.28
diff --git a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/extension/OAuth2AuthorizationServer.java b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/extension/OAuth2AuthorizationServer.java
index 4f7731d2569d..4f339e035cc2 100644
--- a/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/extension/OAuth2AuthorizationServer.java
+++ b/standalone-metastore/metastore-rest-catalog/src/test/java/org/apache/iceberg/rest/extension/OAuth2AuthorizationServer.java
@@ -32,6 +32,7 @@
import org.keycloak.representations.idm.ProtocolMapperRepresentation;
import org.keycloak.representations.idm.RealmRepresentation;
import org.testcontainers.containers.GenericContainer;
+import org.testcontainers.containers.Network;
import org.testcontainers.utility.DockerImageName;
public class OAuth2AuthorizationServer {
@@ -46,6 +47,18 @@ public class OAuth2AuthorizationServer {
private String issuer;
private String tokenEndpoint;
private String accessToken;
+ private final boolean accessTokenHeaderTypeRfc9068;
+ private final Network dockerNetwork;
+
+ public OAuth2AuthorizationServer(Network dockerNetwork, boolean accessTokenHeaderTypeRfc9068) {
+ this.dockerNetwork = dockerNetwork;
+ this.accessTokenHeaderTypeRfc9068 = accessTokenHeaderTypeRfc9068;
+ }
+
+ public OAuth2AuthorizationServer() {
+ dockerNetwork = Network.newNetwork();
+ accessTokenHeaderTypeRfc9068 = true;
+ }
private static RealmResource createRealm(Keycloak keycloak) {
var realm = new RealmRepresentation();
@@ -100,7 +113,7 @@ private static ProtocolMapperRepresentation createEmailClaim() {
return mapper;
}
- private static void createClient(RealmResource realm, List scopes,
+ private void createClient(RealmResource realm, List scopes,
List protocolMappers) {
var client = new ClientRepresentation();
client.setClientId(ICEBERG_CLIENT_ID);
@@ -110,7 +123,8 @@ private static void createClient(RealmResource realm, List scopes,
client.setPublicClient(false);
client.setServiceAccountsEnabled(true);
client.setOptionalClientScopes(scopes);
- client.setAttributes(Collections.singletonMap("access.token.header.type.rfc9068", "true"));
+ client.setAttributes(Collections.singletonMap("access.token.header.type.rfc9068",
+ Boolean.valueOf(accessTokenHeaderTypeRfc9068).toString()));
client.setProtocolMappers(protocolMappers);
realm.clients().create(client).close();
}
@@ -128,12 +142,13 @@ private static String getAccessToken(String url, List scopes) {
}
}
- void start() {
+ public void start() {
container = new GenericContainer<>(DockerImageName.parse("quay.io/keycloak/keycloak:26.3.4"))
.withEnv("KEYCLOAK_ADMIN", "admin")
.withEnv("KEYCLOAK_ADMIN_PASSWORD", "admin")
.withCommand("start-dev")
.withExposedPorts(8080)
+ .withNetwork(dockerNetwork)
.withStartupTimeout(Duration.ofMinutes(5));
container.start();
@@ -152,26 +167,30 @@ void start() {
accessToken = getAccessToken(base, List.of("catalog"));
}
- void stop() {
+ public void stop() {
if (container != null) {
container.stop();
keycloak.close();
}
}
- String getIssuer() {
+ public String getIssuer() {
return issuer;
}
- String getTokenEndpoint() {
+ public String getTokenEndpoint() {
return tokenEndpoint;
}
- String getClientCredential() {
+ public String getClientCredential() {
return "%s:%s".formatted(ICEBERG_CLIENT_ID, ICEBERG_CLIENT_SECRET);
}
- String getAccessToken() {
+ public String getAccessToken() {
return accessToken;
}
+
+ public String getKeycloackContainerDockerInternalHostName() {
+ return container.getNetworkAliases().get(0);
+ }
}