美文网首页
hadoop1.0 安装

hadoop1.0 安装

作者: Kcrise | 来源:发表于2019-06-11 15:39 被阅读0次
    1. 集群规划及环境
        VMware Workstation 15
        CentOS 7.6
        192.168.10.21   (master)
        192.168.10.22   (slave1)
        192.168.10.23   (slave2)
    
    1. 虚拟机安装、网络配置
    1. 安装jdk 1.8
    1. hadoop 1.2.1 安装
    master主机上传hadoop安装包
    
    [root@master]/root$l
    总用量 37208
    -rw-r--r--. 1 root root 38096663 6月   3 20:50 hadoop-1.2.1-bin.tar.gz
    -rw-------. 1 root root     1257 6月   4 22:39 anaconda-ks.cfg
    
    解压缩包
    [root@master]/root$tar -gxvf hadoop-1.2.1-bin.tar.gz -C /usr/local/src
    

    5.修改配置文件

    (1) masters
    [root@master]/root$cd /usr/local/src/hadoop-1.2.1/conf/
    [root@master]/usr/local/src/hadoop-1.2.1/conf$vi master
    localhost修改为
    master
    (2)slaves
    [root@master]/usr/local/src/hadoop-1.2.1/conf$vi slaves
    localhost修改为
    slave1
    slave2
    (3)core-site.xml
    [root@master]/usr/local/src/hadoop-1.2.1/conf$vi core-site.xml
    
    <configuration>
        <property>
            <name>hadoop.tmp.dir</name>
            <value>/usr/local/src/hadoop-1.2.1/tmp</value>
        </property>
    
        <property>
            <name>fs.default.name</name>
            <value>hdfs://192.168.10.21:9000</value>
        </property>
    </configuration>
    
    (4)mapred-site.xml
    [root@master]/usr/local/src/hadoop-1.2.1/conf$vi mapred-site.xml
    
    <configuration>
        <property>
            <name>mapred.job.tracker</name>
            <value>http://192.168.10.21:9001</value>
        </property>
    </configuration>
    
    
    (5)hdfs-site.xml
    [root@master]/usr/local/src/hadoop-1.2.1/conf$vi hdfs-site.xml
    
    <configuration>
        <property>
            <name>dfs.replication</name>
            <value>3</value>
        </property>
    </configuration>
    
    (6)hadoop-env.sh
    [root@master]/usr/local/src/hadoop-1.2.1/conf$vi hadoop-env.sh
    
    行尾追加
    export JAVA_HOME=/opt/jdk
    
    

    6.初始化hdfs

    [root@master]/root$cd /usr/local/src/hadoop-1.2.1/bin
    [root@master]/usr/local/src/hadoop-1.2.1/bin$./hadoop namenode -format
    19/06/10 10:31:43 INFO namenode.NameNode: STARTUP_MSG: 
    /************************************************************
    STARTUP_MSG: Starting NameNode
    STARTUP_MSG:   host = master/192.168.10.21
    STARTUP_MSG:   args = [-format]
    STARTUP_MSG:   version = 1.2.1
    STARTUP_MSG:   build = https://svn.apache.org/repos/asf/hadoop/common/branches/branch-1.2 -r 1503152; compiled by 'mattf' on Mon Jul 22 15:23:09 PDT 2013
    STARTUP_MSG:   java = 1.8.0_162
    ************************************************************/
    19/06/10 10:31:43 INFO util.GSet: Computing capacity for map BlocksMap
    19/06/10 10:31:43 INFO util.GSet: VM type       = 64-bit
    19/06/10 10:31:43 INFO util.GSet: 2.0% max memory = 932184064
    19/06/10 10:31:43 INFO util.GSet: capacity      = 2^21 = 2097152 entries
    19/06/10 10:31:43 INFO util.GSet: recommended=2097152, actual=2097152
    19/06/10 10:31:43 INFO namenode.FSNamesystem: fsOwner=root
    19/06/10 10:31:43 INFO namenode.FSNamesystem: supergroup=supergroup
    19/06/10 10:31:43 INFO namenode.FSNamesystem: isPermissionEnabled=true
    19/06/10 10:31:43 INFO namenode.FSNamesystem: dfs.block.invalidate.limit=100
    19/06/10 10:31:43 INFO namenode.FSNamesystem: isAccessTokenEnabled=false accessKeyUpdateInterval=0 min(s), accessTokenLifetime=0 min(s)
    19/06/10 10:31:43 INFO namenode.FSEditLog: dfs.namenode.edits.toleration.length = 0
    19/06/10 10:31:43 INFO namenode.NameNode: Caching file names occuring more than 10 times 
    19/06/10 10:31:43 INFO common.Storage: Image file /usr/local/src/hadoop-1.2.1/tmp/dfs/name/current/fsimage of size 110 bytes saved in 0 seconds.
    19/06/10 10:31:43 INFO namenode.FSEditLog: closing edit log: position=4, editlog=/usr/local/src/hadoop-1.2.1/tmp/dfs/name/current/edits
    19/06/10 10:31:43 INFO namenode.FSEditLog: close success: truncate to 4, editlog=/usr/local/src/hadoop-1.2.1/tmp/dfs/name/current/edits
    19/06/10 10:31:43 INFO common.Storage: Storage directory /usr/local/src/hadoop-1.2.1/tmp/dfs/name has been successfully formatted.
    19/06/10 10:31:43 INFO namenode.NameNode: SHUTDOWN_MSG: 
    /************************************************************
    SHUTDOWN_MSG: Shutting down NameNode at master/192.168.10.21
    ************************************************************/
    
    

    7.启动hadoop

    [root@master]/root$cd /usr/local/src/hadoop-1.2.1/bin
    [root@master]/usr/local/src/hadoop-1.2.1/bin$./start-all.sh 
    starting namenode, logging to /usr/local/src/hadoop-1.2.1/libexec/../logs/hadoop-root-namenode-master.out
    slave1: starting datanode, logging to /usr/local/src/hadoop-1.2.1/libexec/../logs/hadoop-root-datanode-slave1.out
    slave2: starting datanode, logging to /usr/local/src/hadoop-1.2.1/libexec/../logs/hadoop-root-datanode-slave2.out
    master: starting secondarynamenode, logging to /usr/local/src/hadoop-1.2.1/libexec/../logs/hadoop-root-secondarynamenode-master.out
    starting jobtracker, logging to /usr/local/src/hadoop-1.2.1/libexec/../logs/hadoop-root-jobtracker-master.out
    slave1: starting tasktracker, logging to /usr/local/src/hadoop-1.2.1/libexec/../logs/hadoop-root-tasktracker-slave1.out
    slave2: starting tasktracker, logging to /usr/local/src/hadoop-1.2.1/libexec/../logs/hadoop-root-tasktracker-slave2.out
    
    

    8.配置创建软连接

    [root@master]/usr/local/src$l
    总用量 4
    drwxr-xr-x. 16 root root 4096 6月  10 10:36 hadoop-1.2.1
    lrwxrwxrwx.  1 root root   12 6月  10 10:47 hadoop -> hadoop-1.2.1
    [root@master]/usr/local/src$ln -s hadoop-1.2.1 hadoop
    

    8.配置PATH

    [root@master]/usr/local/src$vi /etc/profile 
    新增
    #######################hadoop##########################
    export HADOOP_INSTALL=/usr/local/src/hadoop
    export PATH=$PATH:HADOOP_INSTALL/bin
    

    9.同步/etc/profile文件到slave1和slave2

    [root@master]/root$put /etc/profile /etc
    profile                                                                                                                                  100% 2770   685.2KB/s   00:00    
    profile                                                                                                                                  100% 2770     1.4MB/s   00:00    
    [root@master]/root$
    

    9.修改windos主机的HOSTS文件

    C:\Windows\System32\drivers\etc\HOSTS
    
    
    192.168.10.21 master
    192.168.10.22 slave1
    192.168.10.23 slave2
    

    10.测试

    (1)浏览器访问namenode节点
    http://192.168.10.21:50070    ok
    (2)hadoop 命令行模式
    [root@master]/root$hadoop fs -ls /
    Found 1 items
    drwxr-xr-x   - root supergroup          0 2019-06-10 10:36 /usr
    上传文件
    [root@master]/root$hadoop fs -put data.txt /
    [root@master]/root$hadoop fs -ls /
    Found 2 items
    -rw-r--r--   3 root supergroup 1074963960 2019-06-11 15:21 /data.txt
    drwxr-xr-x   - root supergroup          0 2019-06-10 16:43 /usr
    [root@master]/root$
    

    11.监控界面

    NameNode
    http://192.168.10.21:50070

    SecondaryNameNode
    http://master:50090/status.jsp

    DataNode
    http://slave1:50075/
    http://slave2:50075/

    JobTracker
    http://master:50030/jobtracker.jsp

    TaskTracker
    http://slave1:50060/tasktracker.jsp
    http://slave2:50060/tasktracker.jsp

    相关文章

      网友评论

          本文标题:hadoop1.0 安装

          本文链接:https://www.haomeiwen.com/subject/wayqfctx.html