Skip to content

Commit

Permalink
address comment
Browse files Browse the repository at this point in the history
  • Loading branch information
xunliu committed Jan 6, 2025
1 parent bed3e13 commit bf659d9
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 119 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
Expand All @@ -36,6 +35,7 @@
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.gravitino.Entity;
import org.apache.gravitino.GravitinoEnv;
Expand Down Expand Up @@ -64,6 +64,7 @@

public class RangerAuthorizationHDFSPlugin extends RangerAuthorizationPlugin {
private static final Logger LOG = LoggerFactory.getLogger(RangerAuthorizationHDFSPlugin.class);
private static final Pattern HDFS_PATTERN = Pattern.compile("^hdfs://[^/]*");

public RangerAuthorizationHDFSPlugin(String metalake, Map<String, String> config) {
super(metalake, config);
Expand Down Expand Up @@ -127,6 +128,10 @@ public List<String> policyResourceDefinesRule() {
return ImmutableList.of(RangerDefines.PolicyResource.PATH.getName());
}

String getAuthorizationPath(PathBasedMetadataObject pathBasedMetadataObject) {
return HDFS_PATTERN.matcher(pathBasedMetadataObject.path()).replaceAll("");
}

/**
* Find the managed policy for the ranger securable object.
*
Expand All @@ -148,7 +153,8 @@ public RangerPolicy findManagedPolicy(AuthorizationMetadataObject authzMetadataO
(PathBasedMetadataObject) authzMetadataObject;
Map<String, String> preciseFilters = new HashMap<>();
for (int i = 0; i < nsMetadataObj.size() && i < policyResourceDefinesRule().size(); i++) {
preciseFilters.put(policyResourceDefinesRule().get(i), pathAuthzMetadataObject.path());
preciseFilters.put(
policyResourceDefinesRule().get(i), getAuthorizationPath(pathAuthzMetadataObject));
}
policies =
policies.stream()
Expand Down Expand Up @@ -197,7 +203,8 @@ protected List<RangerPolicy> wildcardSearchPolies(
.forEach(
resourceDefine -> {
searchFilters.put(
SearchFilter.RESOURCE_PREFIX + resourceDefine, pathBasedMetadataObject.path());
SearchFilter.RESOURCE_PREFIX + resourceDefine,
getAuthorizationPath(pathBasedMetadataObject));
});
try {
return rangerClient.findPolicies(searchFilters);
Expand All @@ -216,75 +223,25 @@ protected List<RangerPolicy> wildcardSearchPolies(
protected void doRenameMetadataObject(
AuthorizationMetadataObject authzMetadataObject,
AuthorizationMetadataObject newAuthzMetadataObject) {
List<Map<String, String>> mappingOldAndNewMetadata;
if (newAuthzMetadataObject.type().equals(SCHEMA)) {
mappingOldAndNewMetadata =
ImmutableList.of(
ImmutableMap.of(
authzMetadataObject.names().get(0), newAuthzMetadataObject.names().get(0)),
ImmutableMap.of(RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL),
ImmutableMap.of(RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL));
} else if (newAuthzMetadataObject.type().equals(TABLE)) {
mappingOldAndNewMetadata =
ImmutableList.of(
ImmutableMap.of(
authzMetadataObject.names().get(0), newAuthzMetadataObject.names().get(0)),
ImmutableMap.of(
authzMetadataObject.names().get(1), newAuthzMetadataObject.names().get(1)),
ImmutableMap.of(RangerHelper.RESOURCE_ALL, RangerHelper.RESOURCE_ALL));
} else if (newAuthzMetadataObject.type().equals(COLUMN)) {
mappingOldAndNewMetadata =
ImmutableList.of(
ImmutableMap.of(
authzMetadataObject.names().get(0), newAuthzMetadataObject.names().get(0)),
ImmutableMap.of(
authzMetadataObject.names().get(1), newAuthzMetadataObject.names().get(1)),
ImmutableMap.of(
authzMetadataObject.names().get(2), newAuthzMetadataObject.names().get(2)));
} else if (newAuthzMetadataObject.type().equals(PATH)) {
// do nothing when fileset is renamed
return;
} else {
throw new IllegalArgumentException(
"Unsupported metadata object type: " + authzMetadataObject.type());
}

List<String> oldMetadataNames = new ArrayList<>();
List<String> newMetadataNames = new ArrayList<>();
for (int index = 0; index < mappingOldAndNewMetadata.size(); index++) {
oldMetadataNames.add(mappingOldAndNewMetadata.get(index).keySet().stream().findFirst().get());
newMetadataNames.add(mappingOldAndNewMetadata.get(index).values().stream().findFirst().get());

AuthorizationMetadataObject.Type type;
if (index == 0) {
type = RangerHadoopSQLMetadataObject.Type.SCHEMA;
} else if (index == 1) {
type = RangerHadoopSQLMetadataObject.Type.TABLE;
} else {
type = RangerHadoopSQLMetadataObject.Type.COLUMN;
}
AuthorizationMetadataObject oldHadoopSQLMetadataObject =
new RangerHadoopSQLMetadataObject(
AuthorizationMetadataObject.getParentFullName(oldMetadataNames),
AuthorizationMetadataObject.getLastName(oldMetadataNames),
type);
AuthorizationMetadataObject newHadoopSQLMetadataObject =
new RangerHadoopSQLMetadataObject(
AuthorizationMetadataObject.getParentFullName(newMetadataNames),
AuthorizationMetadataObject.getLastName(newMetadataNames),
type);
updatePolicyByMetadataObject(
newAuthzMetadataObject.type().metadataObjectType(),
oldHadoopSQLMetadataObject,
newHadoopSQLMetadataObject);
}
Preconditions.checkArgument(
authzMetadataObject instanceof PathBasedMetadataObject,
"The metadata object must be a PathBasedMetadataObject");
Preconditions.checkArgument(
newAuthzMetadataObject instanceof PathBasedMetadataObject,
"The metadata object must be a PathBasedMetadataObject");
updatePolicyByMetadataObject(
newAuthzMetadataObject.type().metadataObjectType(),
authzMetadataObject,
newAuthzMetadataObject);
}

@Override
protected void updatePolicyByMetadataObject(
MetadataObject.Type operationType,
AuthorizationMetadataObject oldAuthzMetaobject,
AuthorizationMetadataObject newAuthzMetaobject) {
PathBasedMetadataObject newPathBasedMetadataObject =
(PathBasedMetadataObject) newAuthzMetaobject;
List<RangerPolicy> oldPolicies = wildcardSearchPolies(oldAuthzMetaobject);
List<RangerPolicy> existNewPolicies = wildcardSearchPolies(newAuthzMetaobject);
if (oldPolicies.isEmpty()) {
Expand All @@ -294,43 +251,19 @@ protected void updatePolicyByMetadataObject(
if (!existNewPolicies.isEmpty()) {
LOG.warn("The Ranger policy for the metadata object({}) already exists!", newAuthzMetaobject);
}
Map<MetadataObject.Type, Integer> operationTypeIndex =
ImmutableMap.of(
MetadataObject.Type.SCHEMA, 0,
MetadataObject.Type.TABLE, 1,
MetadataObject.Type.COLUMN, 2);
oldPolicies.stream()
.forEach(
policy -> {
try {
String policyName = policy.getName();
int index = operationTypeIndex.get(operationType);

// Update the policy name is following Gravitino's spec
if (policy
.getName()
.equals(
AuthorizationSecurableObject.DOT_JOINER.join(oldAuthzMetaobject.names()))) {
List<String> policyNames =
Lists.newArrayList(
AuthorizationSecurableObject.DOT_SPLITTER.splitToList(policyName));
Preconditions.checkArgument(
policyNames.size() >= oldAuthzMetaobject.names().size(),
String.format("The policy name(%s) is invalid!", policyName));
if (policyNames.get(index).equals(RangerHelper.RESOURCE_ALL)) {
// Doesn't need to rename the policy `*`
return;
}
policyNames.set(index, newAuthzMetaobject.names().get(index));
policy.setName(AuthorizationSecurableObject.DOT_JOINER.join(policyNames));
}
policy.setName(getAuthorizationPath(newPathBasedMetadataObject));
// Update the policy resource name to new name
policy
.getResources()
.put(
rangerHelper.policyResourceDefines.get(index),
rangerHelper.policyResourceDefines.get(0),
new RangerPolicy.RangerPolicyResource(
newAuthzMetaobject.names().get(index)));
getAuthorizationPath(newPathBasedMetadataObject)));

boolean alreadyExist =
existNewPolicies.stream()
Expand Down Expand Up @@ -367,7 +300,8 @@ protected void doRemoveMetadataObject(AuthorizationMetadataObject authzMetadataO
doRemoveSchemaMetadataObject(authzMetadataObject);
} else if (authzMetadataObject.type().equals(TABLE)) {
doRemoveTableMetadataObject(authzMetadataObject);
} else if (authzMetadataObject.type().equals(PATH)) {
} else if (authzMetadataObject.type().equals(COLUMN)
|| authzMetadataObject.type().equals(PATH)) {
removePolicyByMetadataObject(authzMetadataObject);
} else {
throw new IllegalArgumentException(
Expand Down Expand Up @@ -471,9 +405,10 @@ protected RangerPolicy createPolicyAddResources(AuthorizationMetadataObject meta
PathBasedMetadataObject pathBasedMetadataObject = (PathBasedMetadataObject) metadataObject;
RangerPolicy policy = new RangerPolicy();
policy.setService(rangerServiceName);
policy.setName(pathBasedMetadataObject.path());
policy.setName(getAuthorizationPath(pathBasedMetadataObject));
RangerPolicy.RangerPolicyResource policyResource =
new RangerPolicy.RangerPolicyResource(pathBasedMetadataObject.path(), false, true);
new RangerPolicy.RangerPolicyResource(
getAuthorizationPath(pathBasedMetadataObject), false, true);
policy.getResources().put(RangerDefines.PolicyResource.PATH.getName(), policyResource);
return policy;
}
Expand Down Expand Up @@ -569,7 +504,7 @@ public List<AuthorizationSecurableObject> translatePrivilege(SecurableObject sec
});
break;
default:
checkOmissionTranslate(
checkUnprocessedTranslate(
Privileges.UseSchema.allow(),
securableObject.type(),
gravitinoPrivilege.name());
Expand Down Expand Up @@ -600,7 +535,7 @@ public List<AuthorizationSecurableObject> translatePrivilege(SecurableObject sec
});
break;
default:
checkOmissionTranslate(
checkUnprocessedTranslate(
Privileges.CreateSchema.allow(),
securableObject.type(),
gravitinoPrivilege.name());
Expand Down Expand Up @@ -633,7 +568,7 @@ public List<AuthorizationSecurableObject> translatePrivilege(SecurableObject sec
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
pathBasedMetadataObject.names(),
pathBasedMetadataObject.path(),
getAuthorizationPath(pathBasedMetadataObject),
PathBasedMetadataObject.Type.PATH,
rangerPrivileges));
});
Expand All @@ -654,7 +589,7 @@ public List<AuthorizationSecurableObject> translatePrivilege(SecurableObject sec
return rangerSecurableObjects;
}

private void checkOmissionTranslate(
private void checkUnprocessedTranslate(
Privileges.GenericPrivilege<?> privilege,
MetadataObject.Type gravitinoType,
Privilege.Name gravitinoPrivilege) {
Expand Down Expand Up @@ -685,7 +620,7 @@ public List<AuthorizationSecurableObject> translateOwner(MetadataObject gravitin
rangerSecurableObjects.add(
generateAuthorizationSecurableObject(
pathBasedMetadataObject.names(),
pathBasedMetadataObject.path(),
getAuthorizationPath(pathBasedMetadataObject),
PathBasedMetadataObject.Type.PATH,
ownerMappingRule()));
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ protected void doRemoveMetadataObject(AuthorizationMetadataObject authzMetadataO
doRemoveSchemaMetadataObject(authzMetadataObject);
} else if (type.equals(TABLE)) {
doRemoveTableMetadataObject(authzMetadataObject);
} else if (type.equals(COLUMN) || type.equals(PATH)) {
} else if (type.equals(COLUMN)) {
removePolicyByMetadataObject(authzMetadataObject);
} else {
throw new IllegalArgumentException(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -231,11 +231,7 @@ public void stop() throws IOException {
Arrays.stream(metalake.listCatalogs())
.forEach(
catalogName -> {
try {
metalake.dropCatalog(catalogName, true);
} catch (Exception e) {
// Ignore exception
}
metalake.dropCatalog(catalogName, true);
});
client.dropMetalake(metalakeName, true);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import org.apache.gravitino.Catalog;
import org.apache.gravitino.Entity;
Expand Down Expand Up @@ -74,7 +73,6 @@ public class AuthorizationUtils {
private static final Set<Privilege.Name> TOPIC_PRIVILEGES =
Sets.immutableEnumSet(
Privilege.Name.CREATE_TOPIC, Privilege.Name.PRODUCE_TOPIC, Privilege.Name.CONSUME_TOPIC);
private static final Pattern HDFS_PATTERN = Pattern.compile("^hdfs://[^/]*");

private AuthorizationUtils() {}

Expand Down Expand Up @@ -398,7 +396,6 @@ public static List<String> getMetadataObjectLocation(
{
NameIdentifier[] identifiers =
GravitinoEnv.getInstance().catalogDispatcher().listCatalogs(Namespace.of(metalake));
List<String> finalLocationPath = locations;
Arrays.stream(identifiers)
.collect(Collectors.toList())
.forEach(
Expand All @@ -420,9 +417,7 @@ public static List<String> getMetadataObjectLocation(
Preconditions.checkArgument(
defaultSchemaLocation != null,
String.format("Catalog %s location is not found", ident));
String location =
HDFS_PATTERN.matcher(defaultSchemaLocation).replaceAll("");
finalLocationPath.add(location);
locations.add(defaultSchemaLocation);
}
}
});
Expand All @@ -443,8 +438,7 @@ public static List<String> getMetadataObjectLocation(
Preconditions.checkArgument(
defaultSchemaLocation != null,
String.format("Catalog %s location is not found", ident));
String location = HDFS_PATTERN.matcher(defaultSchemaLocation).replaceAll("");
locations.add(location);
locations.add(defaultSchemaLocation);
}
}
}
Expand All @@ -456,8 +450,7 @@ public static List<String> getMetadataObjectLocation(
String schemaLocation = schema.properties().get(HiveConstants.LOCATION);
Preconditions.checkArgument(
schemaLocation != null, String.format("Schema %s location is not found", ident));
String location = HDFS_PATTERN.matcher(schemaLocation).replaceAll("");
locations.add(location);
locations.add(schemaLocation);
}
}
break;
Expand All @@ -468,8 +461,7 @@ public static List<String> getMetadataObjectLocation(
String schemaLocation = table.properties().get(HiveConstants.LOCATION);
Preconditions.checkArgument(
schemaLocation != null, String.format("Table %s location is not found", ident));
String location = HDFS_PATTERN.matcher(schemaLocation).replaceAll("");
locations.add(location);
locations.add(schemaLocation);
}
}
break;
Expand All @@ -483,7 +475,7 @@ public static List<String> getMetadataObjectLocation(
Preconditions.checkArgument(
filesetLocation != null,
String.format("Fileset %s location is not found", identifier));
locations.add(HDFS_PATTERN.matcher(filesetLocation).replaceAll(""));
locations.add(filesetLocation);
break;
case TOPIC:
break;
Expand Down

0 comments on commit bf659d9

Please sign in to comment.