Follow this shell script to generate the 3000 ranger policy files,
You need to update the "create_policy.json" file with the correct service name in your cluster ( "service": "c249_hive")
++++++++++++++++++++++++++++++++++
"create_policy.json"
++++++++++++++++++++++++++++++++++
{
"allowExceptions": [],
"denyExceptions": [],
"denyPolicyItems": [
{
"accesses": [
{
"isAllowed": true,
"type": "drop"
}
],
"conditions": [],
"delegateAdmin": true,
"groups": ["hadoop"],
"users": []
}
],
"description": "Policy for Service: c249_hive",
"isAuditEnabled": true,
"isEnabled": true,
"name": "c249_hive_test-1",
"policyItems": [
{
"accesses": [
{
"isAllowed": true,
"type": "select"
},
{
"isAllowed": true,
"type": "update"
},
{
"isAllowed": true,
"type": "create"
},
{
"isAllowed": true,
"type": "drop"
}
],
"conditions": [],
"delegateAdmin": true,
"groups": ["public"],
"users": []
}
],
"resources": {
"database": {
"isExcludes": false,
"isRecursive": false,
"values": [
"rajesh"
]
},
"table": {
"isExcludes": false,
"isRecursive": false,
"values": [
"*"
]
},
"column": {
"isExcludes": false,
"isRecursive": false,
"values": [
"*"
]
}
},
"service": "c249_hive",
"version": 1
}
#++++++++++Shell Script++++++++++++++
#To prepare 3000 Hive policy json files
for i in {1..3000}
do
#cloning the 1 policy file into multiple copies in each iteration
cp create_policy.json create_policy_$i.json
#Updating the database name with some non-existent unique database name rajesh-1, rajesh-2,...till rajesh-3000
sed -i -e "s/rajesh/rajesh-$i/g" create_policy_$i.json
#Updating the policy names with unique policy names, c249_hive_test-1, c249_hive_test-2, ..till c249_hive_test-3000
sed -i -e "s/c249_hive_test/c249_hive_test-$i/g" create_policy_$i.json
done
#To create the ranger policies in Ranger admin through curl
for i in {1..3000}
do
curl -u admin:admin -H "Content-Type: application/json" -X POST http://c249-node5.example.com:6080/service/public/v2/api/policy -d @create_policy_$i.json
done
#++++++++++End of Shell Script++++++++++++++
Subscribe to:
Post Comments (Atom)
Boost Your Download Speed with lftp Segmentation
Looking for a faster way to download files via sftp to a Linux machine? Try using "lftp" instead. This tool offers segmented downl...
Other relevant topics
-
Hive metastore DB Connection verification from Command line:- You can run the following on any node where ambari agent installed on the ...
-
Using PIG to load into Hbase:- In this article we will see how to join 2 datasets in PIG and load the joined, filtered data into HBASE t...
-
+++++++++++++++++++++++++++++++++++++++++++ How to move a region from 1 RS to another Region server:- ++++++++++++++++++++++++++++++++++++...
No comments:
Post a Comment