Skip to content

Commit

Permalink
[#6536] improvement(authz): Create Ranger service if service is absent (
Browse files Browse the repository at this point in the history
#6575)

### What changes were proposed in this pull request?

 Create Ranger service if service is absent

### Why are the changes needed?

Fix: #6536 

### Does this PR introduce _any_ user-facing change?

Yes, I will add the document.

### How was this patch tested?

Add a UT.
  • Loading branch information
jerqi authored Mar 7, 2025
1 parent 61cfb52 commit 1297713
Show file tree
Hide file tree
Showing 10 changed files with 258 additions and 139 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@
import org.apache.kyuubi.plugin.spark.authz.AccessControlException;
import org.apache.ranger.RangerServiceException;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.model.RangerService;
import org.apache.spark.sql.SparkSession;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
Expand Down Expand Up @@ -210,6 +211,39 @@ protected void createCatalog() {
metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, "hive", "comment", catalogConf);
catalog = metalake.loadCatalog(catalogName);
LOG.info("Catalog created: {}", catalog);

// Test to create chained authorization plugin automatically
Map<String, String> autoProperties = new HashMap<>();
autoProperties.put(HiveConstants.METASTORE_URIS, HIVE_METASTORE_URIS);
autoProperties.put(IMPERSONATION_ENABLE, "true");
autoProperties.put(Catalog.AUTHORIZATION_PROVIDER, "chain");
autoProperties.put(ChainedAuthorizationProperties.CHAIN_PLUGINS_PROPERTIES_KEY, "hive1,hdfs1");
autoProperties.put("authorization.chain.hive1.provider", "ranger");
autoProperties.put("authorization.chain.hive1.ranger.auth.type", RangerContainer.authType);
autoProperties.put("authorization.chain.hive1.ranger.admin.url", RangerITEnv.RANGER_ADMIN_URL);
autoProperties.put("authorization.chain.hive1.ranger.username", RangerContainer.rangerUserName);
autoProperties.put("authorization.chain.hive1.ranger.password", RangerContainer.rangerPassword);
autoProperties.put("authorization.chain.hive1.ranger.service.type", "HadoopSQL");
autoProperties.put("authorization.chain.hive1.ranger.service.name", "test899");
autoProperties.put("authorization.chain.hive1.ranger.service.create-if-absent", "true");
autoProperties.put("authorization.chain.hdfs1.provider", "ranger");
autoProperties.put("authorization.chain.hdfs1.ranger.auth.type", RangerContainer.authType);
autoProperties.put("authorization.chain.hdfs1.ranger.admin.url", RangerITEnv.RANGER_ADMIN_URL);
autoProperties.put("authorization.chain.hdfs1.ranger.username", RangerContainer.rangerUserName);
autoProperties.put("authorization.chain.hdfs1.ranger.password", RangerContainer.rangerPassword);
autoProperties.put("authorization.chain.hdfs1.ranger.service.type", "HDFS");
autoProperties.put("authorization.chain.hdfs1.ranger.service.name", "test833");
autoProperties.put("authorization.chain.hdfs1.ranger.service.create-if-absent", "true");
metalake.createCatalog("test", Catalog.Type.RELATIONAL, "hive", "comment", autoProperties);
try {
RangerService rangerService = RangerITEnv.rangerClient.getService("test833");
Assertions.assertNotNull(rangerService);
rangerService = RangerITEnv.rangerClient.getService("test899");
Assertions.assertNotNull(rangerService);
} catch (Exception e) {
Assertions.fail();
}
metalake.dropCatalog("test", true);
}

private String storageLocation(String dirName) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@

/** The properties for Ranger authorization plugin. */
public class RangerAuthorizationProperties extends AuthorizationProperties {
public static final String RANGER_PREFIX = "authorization.ranger";

/** Ranger admin web URIs */
public static final String RANGER_ADMIN_URL = "authorization.ranger.admin.url";

Expand All @@ -46,13 +48,34 @@ public class RangerAuthorizationProperties extends AuthorizationProperties {
*/
public static final String RANGER_PASSWORD = "authorization.ranger.password";

public static final String RANGER_SERVICE_CREATE_IF_ABSENT =
"authorization.ranger.service.create-if-absent";

public static final String HADOOP_SECURITY_AUTHENTICATION =
"authorization.ranger.hadoop.security.authentication";
public static final String DEFAULT_HADOOP_SECURITY_AUTHENTICATION = "simple";
public static final String HADOOP_RPC_PROTECTION = "authorization.ranger.hadoop.rpc.protection";
public static final String DEFAULT_HADOOP_RPC_PROTECTION = "authentication";
public static final String HADOOP_SECURITY_AUTHORIZATION =
"authorization.ranger.hadoop.security.authorization";
public static final String FS_DEFAULT_NAME = "authorization.ranger.fs.default.name";
public static final String FS_DEFAULT_VALUE = "hdfs://127.0.0.1:8090";

public static final String JDBC_DRIVER_CLASS_NAME = "authorization.ranger.jdbc.driverClassName";

public static final String DEFAULT_JDBC_DRIVER_CLASS_NAME = "org.apache.hive.jdbc.HiveDriver";

public static final String JDBC_URL = "authorization.ranger.jdbc.url";

public static final String DEFAULT_JDBC_URL = "jdbc:hive2://127.0.0.1:8081";

public RangerAuthorizationProperties(Map<String, String> properties) {
super(properties);
}

@Override
public String getPropertiesPrefix() {
return "authorization.ranger";
return RANGER_PREFIX;
}

@Override
Expand All @@ -63,9 +86,6 @@ public void validate() {
Preconditions.checkArgument(
properties.containsKey(RANGER_SERVICE_TYPE),
String.format("%s is required", RANGER_SERVICE_TYPE));
Preconditions.checkArgument(
properties.containsKey(RANGER_SERVICE_NAME),
String.format("%s is required", RANGER_SERVICE_NAME));
Preconditions.checkArgument(
properties.containsKey(RANGER_AUTH_TYPE),
String.format("%s is required", RANGER_AUTH_TYPE));
Expand All @@ -76,15 +96,16 @@ public void validate() {
Preconditions.checkArgument(
properties.get(RANGER_ADMIN_URL) != null,
String.format("%s is required", RANGER_ADMIN_URL));
Preconditions.checkArgument(
properties.get(RANGER_SERVICE_NAME) != null,
String.format("%s is required", RANGER_SERVICE_NAME));
Preconditions.checkArgument(
properties.get(RANGER_AUTH_TYPE) != null,
String.format("%s is required", RANGER_AUTH_TYPE));
Preconditions.checkArgument(
properties.get(RANGER_USERNAME) != null, String.format("%s is required", RANGER_USERNAME));
Preconditions.checkArgument(
properties.get(RANGER_PASSWORD) != null, String.format("%s is required", RANGER_PASSWORD));

Preconditions.checkArgument(
properties.get(RANGER_SERVICE_NAME) != null,
String.format("%s is required", RANGER_SERVICE_NAME));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
import org.apache.gravitino.authorization.SecurableObject;
import org.apache.gravitino.authorization.common.PathBasedMetadataObject;
import org.apache.gravitino.authorization.common.PathBasedSecurableObject;
import org.apache.gravitino.authorization.common.RangerAuthorizationProperties;
import org.apache.gravitino.authorization.ranger.reference.RangerDefines;
import org.apache.gravitino.exceptions.AuthorizationPluginException;
import org.apache.gravitino.utils.MetadataObjectUtil;
Expand Down Expand Up @@ -679,4 +680,45 @@ public Boolean onMetadataUpdated(MetadataObjectChange... changes) throws Runtime
}
return Boolean.TRUE;
}

@Override
protected String getServiceType() {
return HDFS_SERVICE_TYPE;
}

@Override
protected Map<String, String> getServiceConfigs(Map<String, String> config) {
return ImmutableMap.<String, String>builder()
.put(
RangerAuthorizationProperties.RANGER_USERNAME.substring(getPrefixLength()),
config.get(RangerAuthorizationProperties.RANGER_USERNAME))
.put(
RangerAuthorizationProperties.RANGER_PASSWORD.substring(getPrefixLength()),
config.get(RangerAuthorizationProperties.RANGER_PASSWORD))
.put(
RangerAuthorizationProperties.HADOOP_SECURITY_AUTHENTICATION.substring(
getPrefixLength()),
getConfValue(
config,
RangerAuthorizationProperties.HADOOP_SECURITY_AUTHENTICATION,
RangerAuthorizationProperties.DEFAULT_HADOOP_SECURITY_AUTHENTICATION))
.put(
RangerAuthorizationProperties.HADOOP_RPC_PROTECTION.substring(getPrefixLength()),
getConfValue(
config,
RangerAuthorizationProperties.HADOOP_RPC_PROTECTION,
RangerAuthorizationProperties.DEFAULT_HADOOP_RPC_PROTECTION))
.put(
RangerAuthorizationProperties.HADOOP_SECURITY_AUTHORIZATION.substring(
getPrefixLength()),
getConfValue(
config, RangerAuthorizationProperties.HADOOP_SECURITY_AUTHORIZATION, "false"))
.put(
RangerAuthorizationProperties.FS_DEFAULT_NAME.substring(getPrefixLength()),
getConfValue(
config,
RangerAuthorizationProperties.FS_DEFAULT_NAME,
RangerAuthorizationProperties.FS_DEFAULT_VALUE))
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
import org.apache.gravitino.authorization.Privilege;
import org.apache.gravitino.authorization.SecurableObject;
import org.apache.gravitino.authorization.SecurableObjects;
import org.apache.gravitino.authorization.common.RangerAuthorizationProperties;
import org.apache.gravitino.authorization.ranger.RangerPrivileges.RangerHadoopSQLPrivilege;
import org.apache.gravitino.authorization.ranger.reference.RangerDefines.PolicyResource;
import org.apache.gravitino.exceptions.AuthorizationPluginException;
Expand Down Expand Up @@ -802,4 +803,33 @@ public Boolean onMetadataUpdated(MetadataObjectChange... changes) throws Runtime
}
return Boolean.TRUE;
}

@Override
protected String getServiceType() {
return HADOOP_SQL_SERVICE_TYPE;
}

@Override
protected Map<String, String> getServiceConfigs(Map<String, String> config) {
return ImmutableMap.<String, String>builder()
.put(
RangerAuthorizationProperties.RANGER_USERNAME.substring(getPrefixLength()),
config.get(RangerAuthorizationProperties.RANGER_USERNAME))
.put(
RangerAuthorizationProperties.RANGER_PASSWORD.substring(getPrefixLength()),
config.get(RangerAuthorizationProperties.RANGER_PASSWORD))
.put(
RangerAuthorizationProperties.JDBC_DRIVER_CLASS_NAME.substring(getPrefixLength()),
getConfValue(
config,
RangerAuthorizationProperties.JDBC_DRIVER_CLASS_NAME,
RangerAuthorizationProperties.DEFAULT_JDBC_DRIVER_CLASS_NAME))
.put(
RangerAuthorizationProperties.JDBC_URL.substring(getPrefixLength()),
getConfValue(
config,
RangerAuthorizationProperties.JDBC_URL,
RangerAuthorizationProperties.DEFAULT_JDBC_URL))
.build();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.sun.jersey.api.client.ClientResponse;
import java.io.IOException;
import java.time.Instant;
import java.util.Arrays;
Expand Down Expand Up @@ -56,6 +57,7 @@
import org.apache.gravitino.utils.PrincipalUtils;
import org.apache.ranger.RangerServiceException;
import org.apache.ranger.plugin.model.RangerPolicy;
import org.apache.ranger.plugin.model.RangerService;
import org.apache.ranger.plugin.util.GrantRevokeRoleRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand All @@ -74,6 +76,8 @@
public abstract class RangerAuthorizationPlugin
implements AuthorizationPlugin, AuthorizationPrivilegesMappingProvider {
private static final Logger LOG = LoggerFactory.getLogger(RangerAuthorizationPlugin.class);
protected static final String HDFS_SERVICE_TYPE = "hdfs";
protected static final String HADOOP_SQL_SERVICE_TYPE = "hive";

protected String metalake;
protected final String rangerServiceName;
Expand All @@ -87,13 +91,22 @@ protected RangerAuthorizationPlugin(String metalake, Map<String, String> config)
new RangerAuthorizationProperties(config);
rangerAuthorizationProperties.validate();
String rangerUrl = config.get(RangerAuthorizationProperties.RANGER_ADMIN_URL);

String authType = config.get(RangerAuthorizationProperties.RANGER_AUTH_TYPE);

rangerAdminName = config.get(RangerAuthorizationProperties.RANGER_USERNAME);

// Apache Ranger Password should be minimum 8 characters with min one alphabet and one numeric.
String password = config.get(RangerAuthorizationProperties.RANGER_PASSWORD);

rangerServiceName = config.get(RangerAuthorizationProperties.RANGER_SERVICE_NAME);
rangerClient = new RangerClientExtension(rangerUrl, authType, rangerAdminName, password);

if (Boolean.parseBoolean(
config.get(RangerAuthorizationProperties.RANGER_SERVICE_CREATE_IF_ABSENT))) {
createRangerServiceIfNecessary(config, rangerServiceName);
}

rangerHelper =
new RangerHelper(
rangerClient,
Expand Down Expand Up @@ -769,6 +782,34 @@ public Boolean onGroupAcquired(Group group) {
return Boolean.TRUE;
}

private void createRangerServiceIfNecessary(Map<String, String> config, String serviceName) {
try {
rangerClient.getService(serviceName);
} catch (RangerServiceException rse) {
if (rse.getStatus().equals(ClientResponse.Status.NOT_FOUND)) {
try {
RangerService rangerService = new RangerService();
rangerService.setType(getServiceType());
rangerService.setName(serviceName);
rangerService.setConfigs(getServiceConfigs(config));
rangerClient.createService(rangerService);
// We should remove some default policies, they will cause users to get more policies
// than they should do.
List<RangerPolicy> policies = rangerClient.getPoliciesInService(serviceName);
for (RangerPolicy policy : policies) {
rangerClient.deletePolicy(policy.getId());
}
} catch (RangerServiceException crse) {
throw new AuthorizationPluginException(
"Fail to create ranger service %s, exception: %s", serviceName, crse.getMessage());
}
} else {
throw new AuthorizationPluginException(
"Fail to get ranger service name %s, exception: %s", serviceName, rse.getMessage());
}
}
}

/**
* Add the securable object's privilege to the Ranger policy. <br>
* 1. Find the policy base the metadata object. <br>
Expand Down Expand Up @@ -959,6 +1000,22 @@ protected void removePolicyByMetadataObject(AuthorizationMetadataObject authzMet
}
}

protected String getConfValue(Map<String, String> conf, String key, String defaultValue) {
if (conf.containsKey(key)) {
return conf.get(key);
}
return defaultValue;
}

protected abstract String getServiceType();

protected abstract Map<String, String> getServiceConfigs(Map<String, String> config);

protected int getPrefixLength() {
// We should consider `.`. We need to add 1
return RangerAuthorizationProperties.RANGER_PREFIX.length() + 1;
}

@Override
public void close() throws IOException {}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import java.util.List;
import java.util.Map;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.Configs;
Expand All @@ -35,6 +36,7 @@
import org.apache.gravitino.integration.test.container.RangerContainer;
import org.apache.gravitino.integration.test.util.GravitinoITUtils;
import org.apache.kyuubi.plugin.spark.authz.AccessControlException;
import org.apache.ranger.plugin.model.RangerService;
import org.apache.spark.SparkUnsupportedOperationException;
import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.SparkSession;
Expand Down Expand Up @@ -196,6 +198,45 @@ public void createCatalog() {
metalake.createCatalog(catalogName, Catalog.Type.RELATIONAL, provider, "comment", properties);
catalog = metalake.loadCatalog(catalogName);
LOG.info("Catalog created: {}", catalog);

// Test to create catalog automatically
Map<String, String> uuidProperties =
ImmutableMap.of(
HiveConstants.METASTORE_URIS,
HIVE_METASTORE_URIS,
IMPERSONATION_ENABLE,
"true",
AUTHORIZATION_PROVIDER,
"ranger",
RangerAuthorizationProperties.RANGER_SERVICE_TYPE,
"HadoopSQL",
RangerAuthorizationProperties.RANGER_ADMIN_URL,
RangerITEnv.RANGER_ADMIN_URL,
RangerAuthorizationProperties.RANGER_AUTH_TYPE,
RangerContainer.authType,
RangerAuthorizationProperties.RANGER_USERNAME,
RangerContainer.rangerUserName,
RangerAuthorizationProperties.RANGER_PASSWORD,
RangerContainer.rangerPassword,
RangerAuthorizationProperties.RANGER_SERVICE_NAME,
"test555",
RangerAuthorizationProperties.RANGER_SERVICE_CREATE_IF_ABSENT,
"true");

try {
List<RangerService> serviceList = RangerITEnv.rangerClient.findServices(Maps.newHashMap());
int expectServiceCount = serviceList.size() + 1;
Catalog catalogTest =
metalake.createCatalog(
"test", Catalog.Type.RELATIONAL, provider, "comment", uuidProperties);
Map<String, String> newProperties = catalogTest.properties();
Assertions.assertTrue(newProperties.containsKey("authorization.ranger.service.name"));
serviceList = RangerITEnv.rangerClient.findServices(Maps.newHashMap());
Assertions.assertEquals(expectServiceCount, serviceList.size());
metalake.dropCatalog("test", true);
} catch (Exception e) {
throw new RuntimeException(e);
}
}

protected void checkTableAllPrivilegesExceptForCreating() {
Expand Down
Loading

0 comments on commit 1297713

Please sign in to comment.