切换源
sudo cp /etc/apt/sources.list /etc/apt/sources.list_bak
sudo sed -i "s@http://.*archive.ubuntu.com@http://repo.huaweicloud.com@g" /etc/apt/sources.list
sudo sed -i "s@http://.*security.ubuntu.com@http://repo.huaweicloud.com@g" /etc/apt/sources.list
安装必要软件
sudo apt install vim tree nload wget curl ntp net-tools sysstat psmisc unzip unrar -y
sudo apt install openjdk-8-jdk -y
设置时区
sudo timedatectl set-timezone Asia/Shanghai
下载Hadoop
wget https://dlcdn.apache.org/hadoop/common/hadoop-3.2.4/hadoop-3.2.4.tar.gz
添加Hosts
编辑/etc/hosts,添加
127.0.0.1 hadoop-ubuntu
配置免登录
ssh-keygen
ssh-copy-id root@hadoop-ubuntu
部署
1.解压缩
tar -xvf hadoop-3.2.4.tar.gz
mv hadoop-3.2.4 /opt/hadoop
mkdir -p /opt/hadoop/data
2.编辑 /opt/hadoop/etc/hadoop/hadoop-env.sh
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root
- 编辑 /opt/hadoop/etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs .defaultFs</name>
<value>hdfs://hadoop-ubuntu:8020</value>
</property>
<!--指定hadoop数据的存储目录,需要手动创建目录 -->
<property>
<name>hadoop.tmp.dir</name>
<value>/opt/hadoop/data</value>
</property>
<!--配置HDFS网页登录使用的静态用户为hadoop -->
<property>
<name>hadoop.http.staticuser.user</name>
<value>hadoop</value>
</property>
</configuration>
- 编辑/opt/hadoop/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.namenode.http-address</name>
<value>hadoop-ubuntu:9870</value>
</property>
<property>
<name>dfs.namenode.secondary.http-address</name>
<value>hadoop-ubuntu:9868</value>
</property>
</configuration>
- 编辑/opt/hadoop/etc/hadoop/yarn-site.xml
<configuration>
<!-- site specific YARN configuration properties -->
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce shuffle</value>
</property>
<!--指定ResourceManager的地址-->
<property>
<name>yarn.resourcemanager,hostname</name>
<value>hadoop-ubuntu</value>
</property>
</configuration>
- 编辑/opt/hadoop/etc/hadoop/mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
启动
cd /opt/hadoop/etc/hadoop/sbin
./start-all.sh
访问
- HDFS
http://127.0.0.1:9870 - YARN
http://127.0.0.1:8088/cluster
网友评论