diff --git a/getting-started/spark/notebooks/SparkPolaris.ipynb b/getting-started/spark/notebooks/SparkPolaris.ipynb
index adb2f1a2ce..48fff32ace 100644
--- a/getting-started/spark/notebooks/SparkPolaris.ipynb
+++ b/getting-started/spark/notebooks/SparkPolaris.ipynb
@@ -256,7 +256,7 @@
"\n",
"spark = (SparkSession.builder\n",
" .config(\"spark.sql.catalog.spark_catalog\", \"org.apache.iceberg.spark.SparkSessionCatalog\")\n",
- " .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.7.1,org.apache.hadoop:hadoop-aws:3.4.0,software.amazon.awssdk:bundle:2.23.19,software.amazon.awssdk:url-connection-client:2.23.19\")\n",
+ " .config(\"spark.jars.packages\", \"org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1,org.apache.hadoop:hadoop-aws:3.4.0,software.amazon.awssdk:bundle:2.23.19,software.amazon.awssdk:url-connection-client:2.23.19\")\n",
" .config('spark.sql.iceberg.vectorization.enabled', 'false')\n",
" \n",
" # Configure the 'polaris' catalog as an Iceberg rest catalog\n",
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 1f868f7029..ad834e45bb 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -19,7 +19,7 @@
[versions]
hadoop = "3.4.1"
-iceberg = "1.7.1"
+iceberg = "1.8.1" # Ensure to update the iceberg version in regtests to keep regtests up-to-date
quarkus = "3.19.4"
immutables = "2.10.1"
picocli = "4.7.6"
diff --git a/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisApplicationIntegrationTest.java b/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisApplicationIntegrationTest.java
index 2437601e06..99e736f798 100644
--- a/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisApplicationIntegrationTest.java
+++ b/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisApplicationIntegrationTest.java
@@ -454,8 +454,7 @@ public void testIcebergRegisterTableInExternalCatalog() throws IOException {
.assignUUID()
.addPartitionSpec(PartitionSpec.unpartitioned())
.addSortOrder(SortOrder.unsorted())
- .addSchema(
- new Schema(Types.NestedField.of(1, false, "col1", Types.StringType.get())), 1)
+ .addSchema(new Schema(Types.NestedField.of(1, false, "col1", Types.StringType.get())))
.build();
TableMetadataParser.write(tableMetadata, fileIo.newOutputFile(metadataLocation));
@@ -498,7 +497,7 @@ public void testIcebergUpdateTableInExternalCatalog() throws IOException {
.assignUUID()
.addPartitionSpec(PartitionSpec.unpartitioned())
.addSortOrder(SortOrder.unsorted())
- .addSchema(new Schema(col1), 1)
+ .addSchema(new Schema(col1))
.build();
TableMetadataParser.write(tableMetadata, fileIo.newOutputFile(metadataLocation));
@@ -546,8 +545,7 @@ public void testIcebergDropTableInExternalCatalog() throws IOException {
.assignUUID()
.addPartitionSpec(PartitionSpec.unpartitioned())
.addSortOrder(SortOrder.unsorted())
- .addSchema(
- new Schema(Types.NestedField.of(1, false, "col1", Types.StringType.get())), 1)
+ .addSchema(new Schema(Types.NestedField.of(1, false, "col1", Types.StringType.get())))
.build();
TableMetadataParser.write(tableMetadata, fileIo.newOutputFile(metadataLocation));
diff --git a/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisRestCatalogIntegrationTest.java b/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisRestCatalogIntegrationTest.java
index a832553cf8..83c9791b3c 100644
--- a/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisRestCatalogIntegrationTest.java
+++ b/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisRestCatalogIntegrationTest.java
@@ -76,7 +76,6 @@
import org.apache.polaris.core.admin.model.ViewGrant;
import org.apache.polaris.core.admin.model.ViewPrivilege;
import org.apache.polaris.core.config.FeatureConfiguration;
-import org.apache.polaris.core.config.PolarisConfiguration;
import org.apache.polaris.core.entity.CatalogEntity;
import org.apache.polaris.core.entity.PolarisEntityConstants;
import org.apache.polaris.service.it.env.CatalogApi;
@@ -105,7 +104,7 @@
* @implSpec @implSpec This test expects the server to be configured with the following features
* configured:
*
- * - {@link PolarisConfiguration#ALLOW_EXTERNAL_CATALOG_CREDENTIAL_VENDING}: {@code false}
+ *
- {@link FeatureConfiguration#ALLOW_EXTERNAL_CATALOG_CREDENTIAL_VENDING}: {@code false}
*
*/
@ExtendWith(PolarisIntegrationTestExtension.class)
@@ -118,16 +117,16 @@ public class PolarisRestCatalogIntegrationTest extends CatalogTests
private static URI externalCatalogBase;
protected static final String VIEW_QUERY = "select * from ns1.layer1_table";
- private static String principalRoleName;
private static ClientCredentials adminCredentials;
- private static PrincipalWithCredentials principalCredentials;
private static PolarisApiEndpoints endpoints;
private static PolarisClient client;
private static ManagementApi managementApi;
- private static CatalogApi catalogApi;
+ private PrincipalWithCredentials principalCredentials;
+ private CatalogApi catalogApi;
private RESTCatalog restCatalog;
private String currentCatalogName;
+ private Map restCatalogConfig;
private final String catalogBaseLocation =
s3BucketBase + "/" + System.getenv("USER") + "/path/to/data";
@@ -159,10 +158,6 @@ static void setup(
endpoints = apiEndpoints;
client = polarisClient(endpoints);
managementApi = client.managementApi(credentials);
- String principalName = client.newEntityName("snowman-rest");
- principalRoleName = client.newEntityName("rest-admin");
- principalCredentials = managementApi.createPrincipalWithRole(principalName, principalRoleName);
- catalogApi = client.catalogApi(principalCredentials);
URI testRootUri = IntegrationTestsHelper.getTemporaryDirectory(tempDir);
s3BucketBase = testRootUri.resolve("my-bucket");
externalCatalogBase = testRootUri.resolve("external-catalog");
@@ -175,10 +170,9 @@ static void close() throws Exception {
@BeforeEach
public void before(TestInfo testInfo) {
- String principalName = "snowman-rest-" + UUID.randomUUID();
- principalRoleName = "rest-admin-" + UUID.randomUUID();
- PrincipalWithCredentials principalCredentials =
- managementApi.createPrincipalWithRole(principalName, principalRoleName);
+ String principalName = client.newEntityName("snowman-rest");
+ String principalRoleName = client.newEntityName("rest-admin");
+ principalCredentials = managementApi.createPrincipalWithRole(principalName, principalRoleName);
catalogApi = client.catalogApi(principalCredentials);
@@ -219,29 +213,26 @@ public void before(TestInfo testInfo) {
managementApi.createCatalog(principalRoleName, catalog);
- Optional restCatalogConfig =
+ restCatalogConfig =
testInfo
.getTestMethod()
- .flatMap(
- m ->
- Optional.ofNullable(
- m.getAnnotation(
- PolarisRestCatalogIntegrationTest.RestCatalogConfig.class)));
- ImmutableMap.Builder extraPropertiesBuilder = ImmutableMap.builder();
- restCatalogConfig.ifPresent(
- config -> {
- for (int i = 0; i < config.value().length; i += 2) {
- extraPropertiesBuilder.put(config.value()[i], config.value()[i + 1]);
- }
- });
-
- restCatalog =
- IcebergHelper.restCatalog(
- client,
- endpoints,
- principalCredentials,
- currentCatalogName,
- extraPropertiesBuilder.build());
+ .map(m -> m.getAnnotation(RestCatalogConfig.class))
+ .map(RestCatalogConfig::value)
+ .map(
+ values -> {
+ if (values.length % 2 != 0) {
+ throw new IllegalArgumentException(
+ String.format("Missing value for config '%s'", values[values.length - 1]));
+ }
+ Map config = new HashMap<>();
+ for (int i = 0; i < values.length; i += 2) {
+ config.put(values[i], values[i + 1]);
+ }
+ return config;
+ })
+ .orElse(ImmutableMap.of());
+
+ restCatalog = initCatalog(currentCatalogName, ImmutableMap.of());
}
@AfterEach
@@ -254,6 +245,26 @@ protected RESTCatalog catalog() {
return restCatalog;
}
+ /**
+ * Initialize a RESTCatalog for testing.
+ *
+ * @param catalogName this parameter is currently unused.
+ * @param additionalProperties additional properties to apply on top of the default test settings
+ * @return a configured instance of RESTCatalog
+ */
+ @Override
+ protected RESTCatalog initCatalog(String catalogName, Map additionalProperties) {
+ ImmutableMap.Builder extraPropertiesBuilder = ImmutableMap.builder();
+ extraPropertiesBuilder.putAll(restCatalogConfig);
+ extraPropertiesBuilder.putAll(additionalProperties);
+ return IcebergHelper.restCatalog(
+ client,
+ endpoints,
+ principalCredentials,
+ currentCatalogName,
+ extraPropertiesBuilder.buildKeepingLast());
+ }
+
@Override
protected boolean requiresNamespaceCreate() {
return true;
diff --git a/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisRestCatalogViewIntegrationBase.java b/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisRestCatalogViewIntegrationBase.java
index 28c38c99e0..8dc48837bf 100644
--- a/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisRestCatalogViewIntegrationBase.java
+++ b/integration-tests/src/main/java/org/apache/polaris/service/it/test/PolarisRestCatalogViewIntegrationBase.java
@@ -42,6 +42,8 @@
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.api.extension.ExtendWith;
@@ -113,7 +115,16 @@ public void before(TestInfo testInfo) {
managementApi.createCatalog(principalRoleName, catalog);
restCatalog =
- IcebergHelper.restCatalog(client, endpoints, principalCredentials, catalogName, Map.of());
+ IcebergHelper.restCatalog(
+ client,
+ endpoints,
+ principalCredentials,
+ catalogName,
+ Map.of(
+ org.apache.iceberg.CatalogProperties.VIEW_DEFAULT_PREFIX + "key1",
+ "catalog-default-key1",
+ org.apache.iceberg.CatalogProperties.VIEW_DEFAULT_PREFIX + "key2",
+ "catalog-default-key2"));
}
@AfterEach
@@ -156,4 +167,16 @@ protected boolean supportsServerSideRetry() {
protected boolean overridesRequestedLocation() {
return true;
}
+
+ /** TODO: Unblock this test, see: https://github.com/apache/polaris/issues/1273 */
+ @Override
+ @Test
+ @Disabled(
+ """
+ Disabled because the behavior is not applicable to Polaris.
+ To unblock, update this to expect an exception and add a Polaris-specific test.
+ """)
+ public void createViewWithCustomMetadataLocation() {
+ super.createViewWithCustomMetadataLocation();
+ }
}
diff --git a/quarkus/service/build.gradle.kts b/quarkus/service/build.gradle.kts
index 2185c3ca11..ebc2c2917e 100644
--- a/quarkus/service/build.gradle.kts
+++ b/quarkus/service/build.gradle.kts
@@ -23,6 +23,11 @@ plugins {
id("polaris-quarkus")
}
+configurations.all {
+ // exclude junit4 dependency for this module
+ exclude(group = "junit", module = "junit")
+}
+
dependencies {
implementation(project(":polaris-core"))
implementation(project(":polaris-api-management-service"))
diff --git a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogHandlerWrapperAuthzTest.java b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogHandlerWrapperAuthzTest.java
index 527f23d0be..e028e5e3b3 100644
--- a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogHandlerWrapperAuthzTest.java
+++ b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogHandlerWrapperAuthzTest.java
@@ -1721,7 +1721,7 @@ public Catalog createCallContextCatalog(
FileIO fileIO = CatalogUtil.loadFileIO(fileIoImpl, Map.of(), new Configuration());
TableMetadata tableMetadata =
TableMetadata.buildFromEmpty()
- .addSchema(SCHEMA, SCHEMA.highestFieldId())
+ .addSchema(SCHEMA)
.setLocation(
String.format("%s/bucket/table/metadata/v1.metadata.json", storageLocation))
.addPartitionSpec(PartitionSpec.unpartitioned())
diff --git a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogTest.java b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogTest.java
index 8d520b9a9f..4a5506fa22 100644
--- a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogTest.java
+++ b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogTest.java
@@ -62,7 +62,6 @@
import org.apache.iceberg.UpdateSchema;
import org.apache.iceberg.catalog.CatalogTests;
import org.apache.iceberg.catalog.Namespace;
-import org.apache.iceberg.catalog.SupportsNamespaces;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.exceptions.AlreadyExistsException;
import org.apache.iceberg.exceptions.BadRequestException;
@@ -128,6 +127,7 @@
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.junit.jupiter.params.ParameterizedTest;
@@ -176,15 +176,12 @@ public Map getConfigOverrides() {
private IcebergCatalog catalog;
private CallContext callContext;
- private AwsStorageConfigInfo storageConfigModel;
- private StsClient stsClient;
private String realmName;
private PolarisMetaStoreManager metaStoreManager;
private PolarisCallContext polarisContext;
private PolarisAdminService adminService;
private PolarisEntityManager entityManager;
private FileIOFactory fileIOFactory;
- private AuthenticatedPolarisPrincipal authenticatedRoot;
private PolarisEntity catalogEntity;
private SecurityContext securityContext;
@@ -231,7 +228,8 @@ public void before(TestInfo testInfo) {
"root")
.getEntity()));
- authenticatedRoot = new AuthenticatedPolarisPrincipal(rootEntity, Set.of());
+ AuthenticatedPolarisPrincipal authenticatedRoot =
+ new AuthenticatedPolarisPrincipal(rootEntity, Set.of());
securityContext = Mockito.mock(SecurityContext.class);
when(securityContext.getUserPrincipal()).thenReturn(authenticatedRoot);
@@ -245,7 +243,7 @@ public void before(TestInfo testInfo) {
new PolarisAuthorizerImpl(new PolarisConfigurationStore() {}));
String storageLocation = "s3://my-bucket/path/to/data";
- storageConfigModel =
+ AwsStorageConfigInfo storageConfigModel =
AwsStorageConfigInfo.builder()
.setRoleArn("arn:aws:iam::012345678901:role/jdoe")
.setExternalId("externalId")
@@ -266,10 +264,6 @@ public void before(TestInfo testInfo) {
.setStorageConfigurationInfo(storageConfigModel, storageLocation)
.build());
- PolarisPassthroughResolutionView passthroughView =
- new PolarisPassthroughResolutionView(
- callContext, entityManager, securityContext, CATALOG_NAME);
- TaskExecutor taskExecutor = Mockito.mock();
RealmEntityManagerFactory realmEntityManagerFactory =
new RealmEntityManagerFactory(createMockMetaStoreManagerFactory());
this.fileIOFactory =
@@ -292,19 +286,7 @@ public void before(TestInfo testInfo) {
isA(AwsStorageConfigurationInfo.class)))
.thenReturn((PolarisStorageIntegration) storageIntegration);
- this.catalog =
- new IcebergCatalog(
- entityManager,
- metaStoreManager,
- callContext,
- passthroughView,
- securityContext,
- taskExecutor,
- fileIOFactory);
- this.catalog.initialize(
- CATALOG_NAME,
- ImmutableMap.of(
- CatalogProperties.FILE_IO_IMPL, "org.apache.iceberg.inmemory.InMemoryFileIO"));
+ this.catalog = initCatalog("my-catalog", ImmutableMap.of());
}
@AfterEach
@@ -318,6 +300,37 @@ protected IcebergCatalog catalog() {
return catalog;
}
+ /**
+ * Initialize a IcebergCatalog for testing.
+ *
+ * @param catalogName this parameter is currently unused.
+ * @param additionalProperties additional properties to apply on top of the default test settings
+ * @return a configured instance of IcebergCatalog
+ */
+ @Override
+ protected IcebergCatalog initCatalog(
+ String catalogName, Map additionalProperties) {
+ PolarisPassthroughResolutionView passthroughView =
+ new PolarisPassthroughResolutionView(
+ callContext, entityManager, securityContext, CATALOG_NAME);
+ TaskExecutor taskExecutor = Mockito.mock();
+ IcebergCatalog icebergCatalog =
+ new IcebergCatalog(
+ entityManager,
+ metaStoreManager,
+ callContext,
+ passthroughView,
+ securityContext,
+ taskExecutor,
+ fileIOFactory);
+ ImmutableMap.Builder propertiesBuilder =
+ ImmutableMap.builder()
+ .put(CatalogProperties.FILE_IO_IMPL, "org.apache.iceberg.inmemory.InMemoryFileIO")
+ .putAll(additionalProperties);
+ icebergCatalog.initialize(CATALOG_NAME, propertiesBuilder.buildKeepingLast());
+ return icebergCatalog;
+ }
+
@Override
protected boolean requiresNamespaceCreate() {
return true;
@@ -373,6 +386,20 @@ public Map purgeRealms(Iterable realms) {
};
}
+ /** TODO: Unblock this test, see: https://github.com/apache/polaris/issues/1272 */
+ @Override
+ @Test
+ @Disabled(
+ """
+ Disabled because the behavior is not applicable to Polaris.
+ To unblock:
+ 1) Align Polaris behavior with the superclass by handling empty namespaces the same way, or
+ 2) Modify this test to expect an exception and add a Polaris-specific version.
+ """)
+ public void listNamespacesWithEmptyNamespace() {
+ super.listNamespacesWithEmptyNamespace();
+ }
+
@Test
public void testRenameTableMissingDestinationNamespace() {
Assumptions.assumeTrue(
@@ -697,7 +724,7 @@ public void testCreateNotificationCreateTableInExternalLocation() {
TableMetadata.buildFromEmpty()
.assignUUID()
.setLocation(anotherTableLocation)
- .addSchema(SCHEMA, 4)
+ .addSchema(SCHEMA)
.addPartitionSpec(PartitionSpec.unpartitioned())
.addSortOrder(SortOrder.unsorted())
.build();
@@ -754,7 +781,7 @@ public void testCreateNotificationCreateTableOutsideOfMetadataLocation() {
TableMetadata.buildFromEmpty()
.assignUUID()
.setLocation(anotherTableLocation)
- .addSchema(SCHEMA, 4)
+ .addSchema(SCHEMA)
.addPartitionSpec(PartitionSpec.unpartitioned())
.addSortOrder(SortOrder.unsorted())
.build();
@@ -831,7 +858,7 @@ public void testUpdateNotificationCreateTableInExternalLocation() {
TableMetadata.buildFromEmpty()
.assignUUID()
.setLocation(anotherTableLocation)
- .addSchema(SCHEMA, 4)
+ .addSchema(SCHEMA)
.addPartitionSpec(PartitionSpec.unpartitioned())
.addSortOrder(SortOrder.unsorted())
.build();
@@ -1401,7 +1428,7 @@ public void testDropNotificationWhenTableExists() {
@Override
public void testDropTableWithPurge() {
if (this.requiresNamespaceCreate()) {
- ((SupportsNamespaces) catalog).createNamespace(NS);
+ catalog.createNamespace(NS);
}
Assertions.assertThatPredicate(catalog::tableExists)
@@ -1497,7 +1524,7 @@ public void testDropTableWithPurgeDisabled() {
CatalogProperties.FILE_IO_IMPL, "org.apache.iceberg.inmemory.InMemoryFileIO"));
if (this.requiresNamespaceCreate()) {
- ((SupportsNamespaces) noPurgeCatalog).createNamespace(NS);
+ noPurgeCatalog.createNamespace(NS);
}
Assertions.assertThatPredicate(noPurgeCatalog::tableExists)
diff --git a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogViewTest.java b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogViewTest.java
index bea9aa3100..7d817bec4f 100644
--- a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogViewTest.java
+++ b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/catalog/IcebergCatalogViewTest.java
@@ -199,7 +199,12 @@ public void before(TestInfo testInfo) {
this.catalog.initialize(
CATALOG_NAME,
ImmutableMap.of(
- CatalogProperties.FILE_IO_IMPL, "org.apache.iceberg.inmemory.InMemoryFileIO"));
+ CatalogProperties.FILE_IO_IMPL,
+ "org.apache.iceberg.inmemory.InMemoryFileIO",
+ CatalogProperties.VIEW_DEFAULT_PREFIX + "key1",
+ "catalog-default-key1",
+ CatalogProperties.VIEW_DEFAULT_PREFIX + "key2",
+ "catalog-default-key2"));
}
@AfterEach
diff --git a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/it/QuarkusApplicationIntegrationTest.java b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/it/QuarkusApplicationIntegrationTest.java
index ba1395ac8e..9c72c9b065 100644
--- a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/it/QuarkusApplicationIntegrationTest.java
+++ b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/it/QuarkusApplicationIntegrationTest.java
@@ -36,6 +36,7 @@
import org.apache.iceberg.rest.HTTPClient;
import org.apache.iceberg.rest.RESTClient;
import org.apache.iceberg.rest.auth.AuthConfig;
+import org.apache.iceberg.rest.auth.AuthSession;
import org.apache.iceberg.rest.auth.OAuth2Util;
import org.apache.iceberg.rest.responses.OAuthTokenResponse;
import org.apache.polaris.service.auth.TokenBrokerFactory;
@@ -72,6 +73,7 @@ public void testIcebergRestApiRefreshExpiredToken(
HTTPClient.builder(Map.of())
.withHeader(endpoints.realmHeaderName(), endpoints.realmId())
.uri(path)
+ .withAuthSession(AuthSession.EMPTY)
.build()) {
String credentialString =
clientCredentials.clientId() + ":" + clientCredentials.clientSecret();
@@ -102,6 +104,7 @@ public void testIcebergRestApiRefreshValidToken(
HTTPClient.builder(Map.of())
.withHeader(endpoints.realmHeaderName(), endpoints.realmId())
.uri(path)
+ .withAuthSession(AuthSession.EMPTY)
.build()) {
var response =
client.postForm(
@@ -142,6 +145,7 @@ public void testIcebergRestApiInvalidToken(
HTTPClient.builder(Map.of())
.withHeader(endpoints.realmHeaderName(), endpoints.realmId())
.uri(path)
+ .withAuthSession(AuthSession.EMPTY)
.build()) {
var response =
client.postForm(
diff --git a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/task/TaskTestUtils.java b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/task/TaskTestUtils.java
index 9cd0ddd9e4..d7538b923f 100644
--- a/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/task/TaskTestUtils.java
+++ b/quarkus/service/src/test/java/org/apache/polaris/service/quarkus/task/TaskTestUtils.java
@@ -100,8 +100,7 @@ static TableMetadata writeTableMetadata(
tmBuilder
.setLocation("path/to/table")
.addSchema(
- new Schema(List.of(Types.NestedField.of(1, false, "field1", Types.StringType.get()))),
- 1)
+ new Schema(List.of(Types.NestedField.of(1, false, "field1", Types.StringType.get()))))
.addSortOrder(SortOrder.unsorted())
.assignUUID(UUID.randomUUID().toString())
.addPartitionSpec(PartitionSpec.unpartitioned());
@@ -110,7 +109,7 @@ static TableMetadata writeTableMetadata(
for (Snapshot snapshot : snapshots) {
tmBuilder.addSnapshot(snapshot);
if (statisticsFiles != null) {
- tmBuilder.setStatistics(snapshot.snapshotId(), statisticsFiles.get(statisticsFileIndex++));
+ tmBuilder.setStatistics(statisticsFiles.get(statisticsFileIndex++));
}
}
TableMetadata tableMetadata = tmBuilder.build();
diff --git a/regtests/setup.sh b/regtests/setup.sh
index 91a8bf5f65..e65e5a337b 100755
--- a/regtests/setup.sh
+++ b/regtests/setup.sh
@@ -31,7 +31,7 @@ if [ -z "${SPARK_HOME}" ]; then
fi
SPARK_CONF="${SPARK_HOME}/conf/spark-defaults.conf"
DERBY_HOME="/tmp/derby"
-ICEBERG_VERSION="1.7.1"
+ICEBERG_VERSION="1.8.1"
export PYTHONPATH="${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"
# Ensure binaries are downloaded locally
diff --git a/regtests/t_pyspark/src/iceberg_spark.py b/regtests/t_pyspark/src/iceberg_spark.py
index 9b6a393d09..cd0c4ade52 100644
--- a/regtests/t_pyspark/src/iceberg_spark.py
+++ b/regtests/t_pyspark/src/iceberg_spark.py
@@ -72,7 +72,7 @@ def __enter__(self):
"""Initial method for Iceberg Spark session. Creates a Spark session with specified configs.
"""
packages = [
- "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.7.1",
+ "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1",
"org.apache.hadoop:hadoop-aws:3.4.0",
"software.amazon.awssdk:bundle:2.23.19",
"software.amazon.awssdk:url-connection-client:2.23.19",
diff --git a/regtests/t_spark_sql/ref/spark_sql_basic.sh.ref b/regtests/t_spark_sql/ref/spark_sql_basic.sh.ref
index a23d1d941e..4cd8ce0f81 100755
--- a/regtests/t_spark_sql/ref/spark_sql_basic.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_basic.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_basic_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
+{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_basic_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","HEAD /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","HEAD /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","POST /v1/{prefix}/transactions/commit","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","HEAD /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/regtests/t_spark_sql/ref/spark_sql_views.sh.ref b/regtests/t_spark_sql/ref/spark_sql_views.sh.ref
index 8e4bff2702..853c736dba 100755
--- a/regtests/t_spark_sql/ref/spark_sql_views.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_views.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_views_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
+{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_views_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","HEAD /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","HEAD /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","POST /v1/{prefix}/transactions/commit","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","HEAD /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/service/common/src/main/java/org/apache/polaris/service/catalog/iceberg/IcebergCatalog.java b/service/common/src/main/java/org/apache/polaris/service/catalog/iceberg/IcebergCatalog.java
index a6c718151f..3f9722f793 100644
--- a/service/common/src/main/java/org/apache/polaris/service/catalog/iceberg/IcebergCatalog.java
+++ b/service/common/src/main/java/org/apache/polaris/service/catalog/iceberg/IcebergCatalog.java
@@ -217,6 +217,10 @@ public void initialize(String name, Map properties) {
name,
this.catalogName);
+ // Ensure catalogProperties is assigned before calling metricsReporter() for proper
+ // functionality.
+ catalogProperties = properties;
+
// Base location from catalogEntity is primary source of truth, otherwise fall through
// to the same key from the properties map, and finally fall through to WAREHOUSE_LOCATION.
String baseLocation =
@@ -263,7 +267,6 @@ public void initialize(String name, Map properties) {
closeableGroup.addCloseable(metricsReporter());
closeableGroup.setSuppressCloseFailure(true);
- catalogProperties = properties;
tableDefaultProperties =
PropertyUtil.propertiesWithPrefix(properties, CatalogProperties.TABLE_DEFAULT_PREFIX);
diff --git a/service/common/src/main/java/org/apache/polaris/service/catalog/iceberg/IcebergCatalogAdapter.java b/service/common/src/main/java/org/apache/polaris/service/catalog/iceberg/IcebergCatalogAdapter.java
index d88ea3a7e4..7cb4f2f957 100644
--- a/service/common/src/main/java/org/apache/polaris/service/catalog/iceberg/IcebergCatalogAdapter.java
+++ b/service/common/src/main/java/org/apache/polaris/service/catalog/iceberg/IcebergCatalogAdapter.java
@@ -91,23 +91,27 @@ public class IcebergCatalogAdapter
ImmutableSet.builder()
.add(Endpoint.V1_LIST_NAMESPACES)
.add(Endpoint.V1_LOAD_NAMESPACE)
+ .add(Endpoint.V1_NAMESPACE_EXISTS)
.add(Endpoint.V1_CREATE_NAMESPACE)
.add(Endpoint.V1_UPDATE_NAMESPACE)
.add(Endpoint.V1_DELETE_NAMESPACE)
.add(Endpoint.V1_LIST_TABLES)
.add(Endpoint.V1_LOAD_TABLE)
+ .add(Endpoint.V1_TABLE_EXISTS)
.add(Endpoint.V1_CREATE_TABLE)
.add(Endpoint.V1_UPDATE_TABLE)
.add(Endpoint.V1_DELETE_TABLE)
.add(Endpoint.V1_RENAME_TABLE)
.add(Endpoint.V1_REGISTER_TABLE)
.add(Endpoint.V1_REPORT_METRICS)
+ .add(Endpoint.V1_COMMIT_TRANSACTION)
.build();
private static final Set VIEW_ENDPOINTS =
ImmutableSet.builder()
.add(Endpoint.V1_LIST_VIEWS)
.add(Endpoint.V1_LOAD_VIEW)
+ .add(Endpoint.V1_VIEW_EXISTS)
.add(Endpoint.V1_CREATE_VIEW)
.add(Endpoint.V1_UPDATE_VIEW)
.add(Endpoint.V1_DELETE_VIEW)
diff --git a/service/common/src/main/java/org/apache/polaris/service/task/ManifestFileCleanupTaskHandler.java b/service/common/src/main/java/org/apache/polaris/service/task/ManifestFileCleanupTaskHandler.java
index 84e9683a27..3173dc25e7 100644
--- a/service/common/src/main/java/org/apache/polaris/service/task/ManifestFileCleanupTaskHandler.java
+++ b/service/common/src/main/java/org/apache/polaris/service/task/ManifestFileCleanupTaskHandler.java
@@ -87,10 +87,7 @@ private boolean cleanUpManifestFile(
StreamSupport.stream(
Spliterators.spliteratorUnknownSize(dataFiles.iterator(), Spliterator.IMMUTABLE),
false)
- .map(
- file ->
- tryDelete(
- tableId, fileIO, manifestFile.path(), file.path().toString(), null, 1))
+ .map(file -> tryDelete(tableId, fileIO, manifestFile.path(), file.location(), null, 1))
.toList();
LOGGER.debug(
"Scheduled {} data files to be deleted from manifest {}",
diff --git a/service/common/src/test/java/org/apache/polaris/service/exception/ExceptionMapperTest.java b/service/common/src/test/java/org/apache/polaris/service/exception/ExceptionMapperTest.java
index 2a27f2f381..bc5f84c1d3 100644
--- a/service/common/src/test/java/org/apache/polaris/service/exception/ExceptionMapperTest.java
+++ b/service/common/src/test/java/org/apache/polaris/service/exception/ExceptionMapperTest.java
@@ -42,7 +42,8 @@ public class ExceptionMapperTest {
@ParameterizedTest
@MethodSource("testFullExceptionIsLogged")
- public void testFullExceptionIsLogged(ExceptionMapper mapper, Exception exception, Level level) {
+ public void testFullExceptionIsLogged(
+ ExceptionMapper mapper, Exception exception, Level level) {
Logger logger = (Logger) LoggerFactory.getLogger(mapper.getClass());
ListAppender listAppender = new ListAppender<>();
listAppender.start();
diff --git a/site/content/in-dev/unreleased/quickstart.md b/site/content/in-dev/unreleased/quickstart.md
index 9ab93c7fec..1b45ce91b0 100644
--- a/site/content/in-dev/unreleased/quickstart.md
+++ b/site/content/in-dev/unreleased/quickstart.md
@@ -280,7 +280,7 @@ _Note: the credentials provided here are those for our principal, not the root c
```shell
bin/spark-shell \
---packages org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.7.1,org.apache.hadoop:hadoop-aws:3.4.0 \
+--packages org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.8.1,org.apache.hadoop:hadoop-aws:3.4.0 \
--conf spark.sql.extensions=org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions \
--conf spark.sql.catalog.quickstart_catalog.warehouse=quickstart_catalog \
--conf spark.sql.catalog.quickstart_catalog.header.X-Iceberg-Access-Delegation=vended-credentials \