课程目标
掌握配置Hadoop的分布式环境和高级特性
掌握利用Sqoop和Flume进行数据的交换
什么是Zookeeper
掌握利用HBase存储和访问数据
一、搭建Hadoop的全分布模式

image.png
#mkdir tools
#mkdir training
#cd training
#tar -zxvf jdk-8u144-linux-x64.tar.gz
#vi ~/.bash_profile
export JAVA_HOME=/root/training/jdk1.8.0_144
export PATH=$JAVA_HOME/bin:$PATH
#source .bash_profile
#which java
#vi /etc/hosts
192.168.56.21 hadoop21
192.168.56.22 hadoop22
192.168.56.23 hadoop23
192.168.56.24 hadoop24
设置免密码登录(每台机器都弄)
#cd
#ssh-keygen -t rsa
#ssh-copy-id -i .ssh/id_rsa.pub root@hadoop21
#ssh-copy-id -i .ssh/id_rsa.pub root@hadoop22
#ssh-copy-id -i .ssh/id_rsa.pub root@hadoop23
#ssh-copy-id -i .ssh/id_rsa.pub root@hadoop24
#cd .ssh
#ls
#more authorized_keys
hadoop21
#cd tools
#tar -zxvf hadoop-2.4.1.tar.gz -C /root/training/
#cd ~/training
#cd hadoop-2.4.1/
#vi ~/.bash_profile
export HADOOP_HOME=/root/training/hadoop-2.4.1
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
#source ~/.bash_profile
#cd etc/hadoop
#vi hadoop-env.sh
export JAVA_HOME=/root/training/jdk1.8.0_144
#vi hdfs-site.xml
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
</property>
#mkdir /root/training/hadoop-2.4.1/data
#vi core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://192.168.56.21:9000</value>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/root/training/hadoop-2.4.1/data</value>
</property>
#vi slaves
192.168.56.22
192.168.56.23
192.168.56.24
# cp mapred-site.xml.template mapred-site.xml
#vi mapred-site.xml
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
#vi yarn-site.xml
<property>
<name>yarn.resourcemanager.hostname</name>
<value>192.168.56.21</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
#cd ..
#cd ..
#cd share
#rm -rf doc
#cd ~/training
# scp -r hadoop-2.4.1/ root@hadoop22:/root/training/
# scp -r hadoop-2.4.1/ root@hadoop23:/root/training/
# scp -r hadoop-2.4.1/ root@hadoop24:/root/training/
#vi ~/.bash_profile
export HADOOP_HOME=/root/training/hadoop-2.4.1
export PATH=$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH
#source ~/.bash_profile
#pwd
/root/training/hadoop-2.4.1/tmm
#hdfs namenode -format
#cd ~/training/hadoop-2.4.1
#start-all.sh
#jps
网友评论