2010-09-20 14:38:24

by Paul Nuzzi

[permalink] [raw]
Subject: [refpolicy] [PATCH] hadoop 4/10 -- hadoop_datanode


Signed-off-by: Paul Nuzzi <[email protected]>

---
policy/modules/services/hadoop_datanode.fc | 5 +
policy/modules/services/hadoop_datanode.if | 48 +++++++++++
policy/modules/services/hadoop_datanode.te | 118 +++++++++++++++++++++++++++++
3 files changed, 171 insertions(+)

diff --git a/policy/modules/services/hadoop_datanode.fc b/policy/modules/services/hadoop_datanode.fc
new file mode 100644
index 0000000..9bb7ebe
--- /dev/null
+++ b/policy/modules/services/hadoop_datanode.fc
@@ -0,0 +1,5 @@
+/etc/rc\.d/init\.d/hadoop-(.*)?-datanode -- gen_context(system_u:object_r:hadoop_datanode_initrc_exec_t, s0)
+
+/var/log/hadoop(.*)?/hadoop-hadoop-datanode-(.*)? gen_context(system_u:object_r:hadoop_datanode_log_t, s0)
+
+/var/lib/hadoop(.*)?/cache/hadoop/dfs/data(/.*)? gen_context(system_u:object_r:hadoop_datanode_data_t, s0)
diff --git a/policy/modules/services/hadoop_datanode.if b/policy/modules/services/hadoop_datanode.if
new file mode 100644
index 0000000..ae8c0b3
--- /dev/null
+++ b/policy/modules/services/hadoop_datanode.if
@@ -0,0 +1,48 @@
+## <summary>Hadoop DataNode</summary>
+
+########################################
+## <summary>
+## Give permission to a domain to signull hadoop_datanode_t
+## </summary>
+## <param name="domain">
+## <summary>
+## Domain needing permission
+## </summary>
+## </param>
+#
+interface(`hadoop_datanode_signull', `
+ gen_require(`
+ type hadoop_datanode_t;
+ ')
+
+ allow $1 hadoop_datanode_t:process { signull };
+')
+
+########################################
+## <summary>
+## Role access for hadoop_datanode
+## </summary>
+## <param name="role">
+## <summary>
+## Role allowed access
+## </summary>
+## </param>
+## <param name="domain">
+## <summary>
+## User domain for the role
+## </summary>
+## </param>
+#
+interface(`hadoop_datanode_role',`
+ gen_require(`
+ type hadoop_datanode_initrc_t;
+ type hadoop_datanode_initrc_exec_t;
+ type hadoop_datanode_t;
+ ')
+
+ role $1 types { hadoop_datanode_initrc_t hadoop_datanode_t };
+ allow $2 hadoop_datanode_initrc_exec_t:file { execute execute_no_trans };
+ domtrans_pattern($2, hadoop_datanode_initrc_exec_t, hadoop_datanode_initrc_t)
+ allow $2 hadoop_datanode_t:process signal;
+')
+
diff --git a/policy/modules/services/hadoop_datanode.te b/policy/modules/services/hadoop_datanode.te
new file mode 100644
index 0000000..50f9ca6
--- /dev/null
+++ b/policy/modules/services/hadoop_datanode.te
@@ -0,0 +1,118 @@
+policy_module(hadoop_datanode,1.0.0)
+
+attribute hadoop_datanode_domain;
+
+type hadoop_datanode_initrc_t;
+domain_type(hadoop_datanode_initrc_t)
+typeattribute hadoop_datanode_initrc_t hadoop_datanode_domain;
+
+type hadoop_datanode_initrc_exec_t;
+files_type(hadoop_datanode_initrc_exec_t)
+
+init_daemon_domain(hadoop_datanode_initrc_t, hadoop_datanode_initrc_exec_t)
+allow hadoop_datanode_initrc_t self:capability { setuid setgid sys_tty_config};
+corecmd_exec_all_executables(hadoop_datanode_initrc_t)
+files_manage_generic_locks(hadoop_datanode_initrc_t)
+init_read_utmp(hadoop_datanode_initrc_t)
+init_write_utmp(hadoop_datanode_initrc_t)
+kernel_read_kernel_sysctls(hadoop_datanode_initrc_t)
+kernel_read_sysctl(hadoop_datanode_initrc_t)
+logging_send_syslog_msg(hadoop_datanode_initrc_t)
+logging_send_audit_msgs(hadoop_datanode_initrc_t)
+hadoop_manage_run(hadoop_datanode_initrc_t)
+allow hadoop_datanode_initrc_t hadoop_datanode_t:process { signull signal };
+
+type hadoop_datanode_t;
+typeattribute hadoop_datanode_t hadoop_datanode_domain;
+hadoop_runas(hadoop_datanode_initrc_t, hadoop_datanode_t)
+role system_r types hadoop_datanode_t;
+domain_type(hadoop_datanode_t)
+optional_policy(`
+ unconfined_run_to(hadoop_datanode_initrc_t, hadoop_datanode_initrc_exec_t)
+ unconfined_roletrans(hadoop_datanode_t)
+')
+
+libs_use_ld_so(hadoop_datanode_domain)
+libs_use_shared_libs(hadoop_datanode_domain)
+miscfiles_read_localization(hadoop_datanode_domain)
+dev_read_urand(hadoop_datanode_domain)
+kernel_read_network_state(hadoop_datanode_domain)
+files_read_etc_files(hadoop_datanode_domain)
+files_read_usr_files(hadoop_datanode_domain)
+kernel_read_system_state(hadoop_datanode_domain)
+nscd_socket_use(hadoop_datanode_domain)
+java_exec(hadoop_datanode_domain)
+hadoop_rx_etc(hadoop_datanode_domain)
+hadoop_manage_log_dir(hadoop_datanode_domain)
+files_manage_generic_tmp_files(hadoop_datanode_domain)
+files_manage_generic_tmp_dirs(hadoop_datanode_domain)
+fs_getattr_xattr_fs(hadoop_datanode_domain)
+allow hadoop_datanode_domain self:process { execmem getsched setsched signal setrlimit };
+allow hadoop_datanode_domain self:fifo_file { read write getattr ioctl };
+allow hadoop_datanode_domain self:capability sys_resource;
+allow hadoop_datanode_domain self:key write;
+nis_use_ypbind(hadoop_datanode_domain)
+corenet_tcp_connect_portmap_port(hadoop_datanode_domain)
+userdom_dontaudit_search_user_home_dirs(hadoop_datanode_domain)
+files_dontaudit_search_spool(hadoop_datanode_domain)
+
+
+type hadoop_datanode_pid_t;
+files_pid_file(hadoop_datanode_pid_t)
+allow hadoop_datanode_domain hadoop_datanode_pid_t:file manage_file_perms;
+allow hadoop_datanode_domain hadoop_datanode_pid_t:dir rw_dir_perms;
+files_pid_filetrans(hadoop_datanode_domain,hadoop_datanode_pid_t,file)
+hadoop_transition_run_file(hadoop_datanode_initrc_t, hadoop_datanode_pid_t)
+
+type hadoop_datanode_log_t;
+logging_log_file(hadoop_datanode_log_t)
+allow hadoop_datanode_domain hadoop_datanode_log_t:file manage_file_perms;
+allow hadoop_datanode_domain hadoop_datanode_log_t:dir { setattr rw_dir_perms };
+logging_log_filetrans(hadoop_datanode_domain,hadoop_datanode_log_t,{file dir})
+hadoop_transition_log_file(hadoop_datanode_t, hadoop_datanode_log_t)
+hadoop_transition_log_file(hadoop_datanode_initrc_t, hadoop_datanode_log_t)
+
+type hadoop_datanode_data_t;
+files_type(hadoop_datanode_data_t)
+allow hadoop_datanode_t hadoop_datanode_data_t:file manage_file_perms;
+allow hadoop_datanode_t hadoop_datanode_data_t:dir manage_dir_perms;
+hadoop_transition_data_file(hadoop_datanode_t, hadoop_datanode_data_t)
+
+type hadoop_datanode_tmp_t;
+files_tmp_file(hadoop_datanode_tmp_t)
+allow hadoop_datanode_t hadoop_datanode_tmp_t:file manage_file_perms;
+files_tmp_filetrans(hadoop_datanode_t, hadoop_datanode_tmp_t, file)
+
+corecmd_exec_bin(hadoop_datanode_t)
+corecmd_exec_shell(hadoop_datanode_t)
+dev_read_rand(hadoop_datanode_t)
+dev_read_sysfs(hadoop_datanode_t)
+files_read_var_lib_files(hadoop_datanode_t)
+hadoop_manage_data_dir(hadoop_datanode_t)
+hadoop_getattr_run_dir(hadoop_datanode_t)
+dontaudit hadoop_datanode_t self:netlink_route_socket { create ioctl read getattr write setattr append bind connect getopt setopt shutdown nlmsg_read nlmsg_write };
+
+allow hadoop_datanode_t self:tcp_socket create_stream_socket_perms;
+corenet_tcp_sendrecv_generic_if(hadoop_datanode_t)
+corenet_tcp_sendrecv_all_nodes(hadoop_datanode_t)
+corenet_all_recvfrom_unlabeled(hadoop_datanode_t)
+corenet_tcp_bind_all_nodes(hadoop_datanode_t)
+sysnet_read_config(hadoop_datanode_t)
+corenet_tcp_sendrecv_all_ports(hadoop_datanode_t)
+corenet_tcp_bind_all_ports(hadoop_datanode_t)
+corenet_tcp_connect_generic_port(hadoop_datanode_t)
+
+allow hadoop_datanode_t self:udp_socket create_socket_perms;
+corenet_udp_sendrecv_generic_if(hadoop_datanode_t)
+corenet_udp_sendrecv_all_nodes(hadoop_datanode_t)
+corenet_udp_bind_all_nodes(hadoop_datanode_t)
+corenet_udp_bind_all_ports(hadoop_datanode_t)
+
+fs_getattr_xattr_fs(hadoop_datanode_t)
+corenet_tcp_connect_hadoop_namenode_port(hadoop_datanode_t)
+
+hadoop_namenode_signull(hadoop_datanode_t)
+hadoop_jobtracker_signull(hadoop_datanode_t)
+hadoop_secondarynamenode_signull(hadoop_datanode_t)
+hadoop_tasktracker_signull(hadoop_datanode_t)
+