Liux 环境准备
- 版本 CentOS 7
- JDK 1.8
- Hadoop 2.7.6
root 账户激活(设置root密码)
sudo passwd root
JDK安装
cd /software/java
tar -zxvf jdk-8u172-linux-x64.tar.gz
JDK 环境变量配置
vim /etc/profile
# 在最下面添加下面两行
export JAVA_HOME=/software/java/jdk1.8.0_172
export PATH=$PATH:$JAVA_HOME/bin
Hadoop安装
cd /software/hadoop
tar -zxvf hadoop-2.7.6.tar.gz
# 在最下面添加下面两行
export HADOOP_HOME=/software/hadoop/hadoop-2.7.6
export PATH=$PATH:$HADOOP_HOME/bin
配置Hadoop core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<!-- 格式为hdfs://主机地址:端口 -->
<value>hdfs://10.211.55.12:8020</value>
</property>
</configuration>
开启50070端口(Hadoop用户界面)
# 开放50070端口
firewall-cmd --add-port=50070/tcp --permanent
# 重载防火墙
firewall-cmd --reload
或者你也可以关闭CentOS 7 Firewall
# 停止firewall
systemctl stop firewalld.service
# 禁止firewall开机启动
systemctl disable firewalld.service
# 查看防火墙状态
firewall-cmd --state
SSH免密登录设置
(确保每台机器都可以免密包括自己连自己)
# 生成密钥 输入后一路回车即可
ssh-keygen -t rsa
# 发送密钥
ssh-copy-id -i ~/.ssh/id_rsa.pub root@10.211.55.13
# 发送密钥
ssh-copy-id -i ~/.ssh/id_rsa.pub root@10.211.55.14
core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://10.211.55.12:8020</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/opt/module/hadoop-2.7.6/data/tmp</value>
</property>
hdfs-site.xml
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>10.211.55.14:50090</value>
</property>
mapred-site.xml
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
yarn-site.xml
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>10.211.55.13</value>
</property>
网友评论