2010-09-20 14:36:35

by Paul Nuzzi

[permalink] [raw]
Subject: [refpolicy] [PATCH] hadoop 2/10 -- hadoop


Signed-off-by: Paul Nuzzi <[email protected]>

---
policy/modules/apps/hadoop.fc | 10 +
policy/modules/apps/hadoop.if | 236 ++++++++++++++++++++++++++++++++++++++++++
policy/modules/apps/hadoop.te | 93 ++++++++++++++++
3 files changed, 339 insertions(+)

diff --git a/policy/modules/apps/hadoop.fc b/policy/modules/apps/hadoop.fc
new file mode 100644
index 0000000..aa2012e
--- /dev/null
+++ b/policy/modules/apps/hadoop.fc
@@ -0,0 +1,10 @@
+/usr/lib/hadoop(.*)?/bin/hadoop -- gen_context(system_u:object_r:hadoop_exec_t,s0)
+
+/etc/hadoop(/.*)? gen_context(system_u:object_r:hadoop_etc_t,s0)
+/etc/hadoop-0.20(/.*)? gen_context(system_u:object_r:hadoop_etc_t,s0)
+
+/var/lib/hadoop(.*)? gen_context(system_u:object_r:hadoop_data_t,s0)
+
+/var/log/hadoop(.*)? gen_context(system_u:object_r:hadoop_log_t,s0)
+
+/var/run/hadoop(.*)? gen_context(system_u:object_r:hadoop_run_t,s0)
diff --git a/policy/modules/apps/hadoop.if b/policy/modules/apps/hadoop.if
new file mode 100644
index 0000000..b1513f0
--- /dev/null
+++ b/policy/modules/apps/hadoop.if
@@ -0,0 +1,236 @@
+## <summary> Hadoop client </summary>
+
+########################################
+## <summary>
+## Create a domain that can transition with hadoop_exec_t
+## </summary>
+## <param name="domain">
+## <summary>
+## Initial domain
+## </summary>
+## </param>
+## <param name="domain">
+## <summary>
+## Domain to transfer to with hadoop_exec_t
+## </summary>
+## </param>
+#
+interface(`hadoop_runas', `
+ gen_require(`
+ type hadoop_exec_t;
+ ')
+
+ domtrans_pattern($1, hadoop_exec_t, $2)
+ domain_entry_file($2, hadoop_exec_t)
+')
+
+########################################
+## <summary>
+## Give permission to a domain to access hadoop_etc_t
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain needing read and execute permission
+## </summary>
+## </param>
+#
+interface(`hadoop_rx_etc', `
+ gen_require(`
+ type hadoop_etc_t;
+ ')
+
+ allow $1 hadoop_etc_t:dir search_dir_perms;
+ allow $1 hadoop_etc_t:lnk_file { read getattr };
+ allow $1 hadoop_etc_t:file { read_file_perms execute execute_no_trans};
+')
+
+########################################
+## <summary>
+## Transition from hadoop_log_t to desired log file type
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain that transfers file domains
+## </summary>
+## </param>
+## <param name="type">
+## <summary>
+## Log file type
+## </summary>
+## </param>
+#
+interface(`hadoop_transition_log_file', `
+ gen_require(`
+ type hadoop_log_t;
+ ')
+
+ type_transition $1 hadoop_log_t:{ dir file } $2;
+')
+
+########################################
+## <summary>
+## Transition from hadoop_tmp_t to desired log file type
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain that transfers file domains
+## </summary>
+## </param>
+## <param name="type">
+## <summary>
+## Log file type
+## </summary>
+## </param>
+#
+interface(`hadoop_transition_tmp_file', `
+ gen_require(`
+ type hadoop_tmp_t;
+ ')
+
+ type_transition $1 hadoop_tmp_t:file $2;
+')
+
+########################################
+## <summary>
+## Transition from hadoop_run_t to desired log file type
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain that transfers file domains
+## </summary>
+## </param>
+## <param name="type">
+## <summary>
+## Run file type
+## </summary>
+## </param>
+#
+interface(`hadoop_transition_run_file', `
+ gen_require(`
+ type hadoop_run_t;
+ ')
+
+ type_transition $1 hadoop_run_t:file $2;
+')
+
+########################################
+## <summary>
+## Transition from hadoop_data_t to desired data file type
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain that transfers file domains
+## </summary>
+## </param>
+## <param name="type">
+## <summary>
+## Run file type
+## </summary>
+## </param>
+#
+interface(`hadoop_transition_data_file', `
+ gen_require(`
+ type hadoop_data_t;
+ ')
+
+ type_transition $1 hadoop_data_t:{ dir file } $2;
+')
+
+########################################
+## <summary>
+## Give permission to a domain to access hadoop_data_t
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain needing permission
+## </summary>
+## </param>
+#
+interface(`hadoop_manage_data_dir', `
+ gen_require(`
+ type hadoop_data_t;
+ ')
+
+ manage_dirs_pattern($1, hadoop_data_t, hadoop_data_t)
+')
+
+########################################
+## <summary>
+## Give permission to a domain to access hadoop_log_t
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain needing permission
+## </summary>
+## </param>
+#
+interface(`hadoop_manage_log_dir', `
+ gen_require(`
+ type hadoop_log_t;
+ ')
+
+ manage_dirs_pattern($1, hadoop_log_t, hadoop_log_t)
+')
+
+########################################
+## <summary>
+## Give permission to a domain to manage hadoop_run_t
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain needing permission
+## </summary>
+## </param>
+#
+interface(`hadoop_manage_run', `
+ gen_require(`
+ type hadoop_run_t;
+ ')
+
+ manage_dirs_pattern($1, hadoop_run_t, hadoop_run_t)
+ manage_files_pattern($1, hadoop_run_t, hadoop_run_t)
+')
+
+########################################
+## <summary>
+## Give permission to a domain to getattr hadoop_run_t
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain needing permission
+## </summary>
+## </param>
+#
+interface(`hadoop_getattr_run_dir', `
+ gen_require(`
+ type hadoop_run_t;
+ ')
+
+ allow $1 hadoop_run_t:dir getattr;
+')
+
+########################################
+## <summary>
+## Role access for hadoop
+## </summary>
+## <param name="role">
+## <summary>
+## Role allowed access
+## </summary>
+## </param>
+## <param name="domain">
+## <summary>
+## User domain for the role
+## </summary>
+## </param>
+#
+interface(`hadoop_role',`
+ gen_require(`
+ type hadoop_exec_t;
+ type hadoop_t;
+ ')
+
+ role $1 types hadoop_t;
+ allow $2 hadoop_exec_t:file { execute execute_no_trans };
+ domtrans_pattern($2, hadoop_exec_t, hadoop_t)
+')
diff --git a/policy/modules/apps/hadoop.te b/policy/modules/apps/hadoop.te
new file mode 100644
index 0000000..85d1690
--- /dev/null
+++ b/policy/modules/apps/hadoop.te
@@ -0,0 +1,93 @@
+policy_module(hadoop,1.0.0)
+
+type hadoop_t;
+domain_type(hadoop_t)
+
+type hadoop_exec_t;
+files_type(hadoop_exec_t)
+allow hadoop_t hadoop_exec_t:file { read_file_perms entrypoint execute execute_no_trans };
+allow hadoop_t hadoop_exec_t:lnk_file { read };
+optional_policy(`
+ unconfined_run_to(hadoop_t, hadoop_exec_t)
+')
+
+type hadoop_etc_t;
+files_type(hadoop_etc_t)
+
+type hadoop_data_t;
+files_type(hadoop_data_t)
+manage_files_pattern(hadoop_t, hadoop_data_t, hadoop_data_t)
+hadoop_manage_data_dir(hadoop_t)
+
+type hadoop_log_t;
+files_type(hadoop_log_t)
+
+type hadoop_run_t;
+files_type(hadoop_run_t)
+
+type hadoop_tmp_t;
+files_tmp_file(hadoop_tmp_t)
+allow hadoop_t hadoop_tmp_t:dir manage_dir_perms;
+allow hadoop_t hadoop_tmp_t:file manage_file_perms;
+
+libs_use_ld_so(hadoop_t)
+libs_use_shared_libs(hadoop_t)
+corecmd_exec_bin(hadoop_t)
+corecmd_exec_shell(hadoop_t)
+miscfiles_read_localization(hadoop_t)
+dev_read_urand(hadoop_t)
+kernel_read_network_state(hadoop_t)
+kernel_read_system_state(hadoop_t)
+files_read_etc_files(hadoop_t)
+files_manage_generic_tmp_files(hadoop_t)
+files_manage_generic_tmp_dirs(hadoop_t)
+fs_getattr_xattr_fs(hadoop_t)
+allow hadoop_t self:process { execmem getsched setsched signal setrlimit };
+allow hadoop_t self:fifo_file { read write getattr ioctl };
+allow hadoop_t self:capability sys_resource;
+allow hadoop_t self:key write;
+nis_use_ypbind(hadoop_t)
+nscd_socket_use(hadoop_t)
+corenet_tcp_connect_portmap_port(hadoop_t)
+userdom_dontaudit_search_user_home_dirs(hadoop_t)
+files_dontaudit_search_spool(hadoop_t)
+java_exec(hadoop_t)
+hadoop_rx_etc(hadoop_t)
+hadoop_manage_log_dir(hadoop_t)
+
+dev_read_rand(hadoop_t)
+dev_read_sysfs(hadoop_t)
+files_read_var_lib_files(hadoop_t)
+hadoop_manage_data_dir(hadoop_t)
+hadoop_getattr_run_dir(hadoop_t)
+dontaudit hadoop_t self:netlink_route_socket { create ioctl read getattr write setattr append bind connect getopt setopt shutdown nlmsg_read nlmsg_write };
+
+allow hadoop_t self:tcp_socket create_stream_socket_perms;
+corenet_tcp_sendrecv_generic_if(hadoop_t)
+corenet_tcp_sendrecv_all_nodes(hadoop_t)
+corenet_all_recvfrom_unlabeled(hadoop_t)
+corenet_tcp_bind_all_nodes(hadoop_t)
+sysnet_read_config(hadoop_t)
+corenet_tcp_sendrecv_all_ports(hadoop_t)
+corenet_tcp_bind_all_ports(hadoop_t)
+corenet_tcp_connect_generic_port(hadoop_t)
+
+allow hadoop_t self:udp_socket create_socket_perms;
+allow hadoop_t self:process signull;
+corenet_udp_sendrecv_generic_if(hadoop_t)
+corenet_udp_sendrecv_all_nodes(hadoop_t)
+corenet_udp_bind_all_nodes(hadoop_t)
+corenet_udp_bind_all_ports(hadoop_t)
+
+files_read_usr_files(hadoop_t)
+files_read_all_files(hadoop_t)
+
+corenet_tcp_connect_zope_port(hadoop_t)
+corenet_tcp_connect_hadoop_namenode_port(hadoop_t)
+
+hadoop_namenode_signull(hadoop_t)
+hadoop_datanode_signull(hadoop_t)
+hadoop_jobtracker_signull(hadoop_t)
+hadoop_secondarynamenode_signull(hadoop_t)
+hadoop_tasktracker_signull(hadoop_t)
+