参考文档地址:https://blog.csdn.net/zhanglong_4444/article/details/99471211
#使用jdk的路径(指定本地安装的路径)
export JAVA_HOME=${JAVA_HOME}
#配置文件的路径如果$HADOOP_CONF_DIR 没有值时,会赋予默认值/etc/hadoop
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
# The maximum amount of heap to use, in MB. Default is 1000.
#如果不指定大小会默认指定1000MB
#namenode,secondarynamenode,jobtracker,datanode,tasktracker守护进程内存
export HADOOP_HEAPSIZE=
#NameNode的初始化堆内存大小,默认也是1000M
export HADOOP_NAMENODE_INIT_HEAPSIZE=""
# Enable extra debugging of Hadoop's JAAS binding, used to set up
# Kerberos security.
#设置jaas绑定、开启Kerberos 安全认证
export HADOOP_JAAS_DEBUG=true
#使用Ipv4禁用Ipv6
export HADOOP_OPTS="$HADOOP_OPTS -Djava.net.preferIPv4Stack=true"
#大部分情况下,这个统一设置的值可能并不适合。例如对于namenode节点,
#1000M的内存只能存储几百万个文件的数据块的引用。
# 单独设置namenode、secondrynamenode、datanode、blancer 、jobtrackerde 的内存,可以通过下面的参数来设置
HADOOP_NAMENODE_OPTS=2048
HADOOP_SECONDARYNAMENODE_OPTS
HADOOP_DATANODE_OPTS
HADOOP_BALANCER_OPTS
HADOOP_JOBTRACKER_OPTS
# Command specific options appended to HADOOP_OPTS when specified
#DataNode、NameNode、secondrynamenode、nfs3、 的JVM参数设置
export HADOOP_NAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_NAMENODE_OPTS"
export HADOOP_DATANODE_OPTS="-Dhadoop.security.logger=ERROR,RFAS $HADOOP_DATANODE_OPTS"
export HADOOP_SECONDARYNAMENODE_OPTS="-Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,RFAS} -Dhdfs.audit.logger=${HDFS_AUDIT_LOGGER:-INFO,NullAppender} $HADOOP_SECONDARYNAMENODE_OPTS"
export HADOOP_NFS3_OPTS="$HADOOP_NFS3_OPTS"
#这个是在HDFS格式化时需要的JVM配置,也就是执行hdfs namenode -format时的JVM配置
export HADOOP_PORTMAP_OPTS="-Xmx512m $HADOOP_PORTMAP_OPTS"
# The following applies to multiple commands (fs, dfs, fsck, distcp etc)
#设置 HADOOP_CLIENT 参数 fs, dfs, fsck,distcp etc 命令涉及
export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS"
# set heap args when HADOOP_HEAPSIZE is empty
#如果未设置HADOOP_CLIENT_OPTS,则会设置默认值512m
if [ "$HADOOP_HEAPSIZE" = "" ]; then
export HADOOP_CLIENT_OPTS="-Xmx512m $HADOOP_CLIENT_OPTS"
fi
#配置日志路径
# Where log files are stored. $HADOOP_HOME/logs by default.
export HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$USER
# Where log files are stored in the secure data environment.
export HADOOP_SECURE_DN_LOG_DIR=${HADOOP_LOG_DIR}/${HADOOP_HDFS_USER}
# The directory where pid files are stored. /tmp by default.
# NOTE: this should be set to a directory that can only be written to by
# the user that will run the hadoop daemons. Otherwise there is the
# potential for a symlink attack.
#配置hadoop pid文件路径
export HADOOP_PID_DIR=${HADOOP_PID_DIR}
export HADOOP_SECURE_DN_PID_DIR=${HADOOP_PID_DIR}
# A string representing this instance of hadoop. $USER by default.
#hadoop默认用户$USER为当前用户
export HADOOP_IDENT_STRING=$USER
网友评论