-
下载Hadoop2.5.0
-
准备工作
mkdir -p /opt/modules //安装目录
tar -zxf hadoop-2.5.0.tar.gz -C /opt/modules //解压
rm -rf /opt/modules/hadoop-2.5.0/share/doc //删除文档,占用空间
rm /opt/modules/hadoop-2.5.0/etc/hadoop/*.cmd 删除windows执行文件
cd /opt/modules/hadoop-2.5.0/etc/hadoop
-
配置hadoop.evn.sh
export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_91.jdk/Contents/Home //配置java环境
-
配置/etc/host
在 /etc/host 加上
127.0.0.1 bigdata-msenior01.kfk.com
-
配置core-site.xml
先执行 mkdir /data/tmp 创建存储路径
然后修改core-site
<configuration>
<!--配置域名和端口-->
<property>
<name>fs.defaultFS</name>
<value>hdfs://bigdata-msenior01.kfk.com:9000</value>
</property>
<property>
<!--配置存储路径-->
<name>hadoop.tmp.dir</name>
<value>/Users/gaowenfeng/opt/modules/hadoop-2.5.0/data/tmp</value>
</property>
</configuration>
详情请参考
http://hadoop.apache.org/docs/r2.5.2/hadoop-project-dist/hadoop-common/SingleCluster.html
http://hadoop.apache.org/docs/r2.5.2/hadoop-project-dist/hadoop-common/core-default.xml
-
配置hdfs-site.xml
<configuration>
<!--配置副本数-->
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<!--配置默认不进行权限校验-->
<property>
<name>dfs.permissions.enabled</name>
<value>false</value>
</property>
</configuration>
详情请参考
http://hadoop.apache.org/docs/r2.5.2/hadoop-project-dist/hadoop-common/SingleCluster.html
http://hadoop.apache.org/docs/r2.5.2/hadoop-project-dist/hadoop-common/hdfs-default.xml
-
配置slaves
修改localhost为 bigdata-msenior01.kfk.com
-
系统的 format
cd /opt/modules/hadoop2.5.0/ && bin/hdfs namenode -format
-
启动
cd /opt/modules/hadoop2.5.0/ && sbin/hadoop-daemon.sh start namenode
cd /opt/modules/hadoop2.5.0/ && sbin/hadoop-daemon.sh start datanode
网友评论