美文网首页我爱编程
hadoop入门(二)

hadoop入门(二)

作者: Chen_xy | 来源:发表于2017-10-10 19:23 被阅读0次

java访问hdfs

一.启动集群,验证集群是否启动

#start-dfs.sh
#hdfs dfsadmin -report

二.修改hadoop数据存放目录
1.所有机器都要修改

#cd /tmp
#cd /usr/local/hadoop/etc/hadoop
#ls
#vim core-site.xml
<property>
    <name>hadoop.tmp.dir</name>
    <value>/var/hadoop</value>
</property>

2,格式化

#hdfs namenode -format
#stop-dfs.sh
#start-dfs.sh

三.进入java配置
1.导入必要的包,新建class
(1).share--hadoop--common--hadoop-common-2.7.3jar
(2).share--hadoop--common--lib--全部
(3).share--hadoop--hdfs--hadoop-hdfs-2.7.3jar
2.新建文件

#cd
#vi hello.test
hello bj
hello sh
hello sz
hello AMD690G
#hadoop fs -put ./hello.txt /
# hadoop fs -ls /
Found 1 items
-rw-r--r--   2 root supergroup         41 2017-10-06 23:12 /hello.txt
image.png

3.添加配置文件,解决权限问题。

#vim hdfs-site.xml
<property>
    <name>dfs.permissions.enabled</name>
    <value>false</value>
</property>
#stop-dfs.sh
#start-dfs.sh
image.png image.png

全部代码

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class hellohdfs2 {

    public static void main(String[] args) throws Exception{
        /*URL url = new URL("http://www.baidu.com");
        InputStream in = url.openStream();
        IOUtils.copyBytes(in, System.out , 4096, true);*/
        
        /*URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
        URL url = new URL("hdfs://192.168.56.10:9000/hello.txt");
        InputStream in = url.openStream();
        IOUtils.copyBytes(in, System.out , 4096, true);*/
        
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://192.168.56.10:9000");
        FileSystem fileSystem = FileSystem.get(conf);
        
        /*boolean success = fileSystem.mkdirs(new Path("/msb"));
        System.out.println(success);
        
        success = fileSystem.exists(new Path("/hello.txt"));
        System.out.println(success);  //判断文件是否存在
        
        success = fileSystem.delete(new Path("/msb"), true);
        System.out.println(success);  //删除目录
        
        success = fileSystem.exists(new Path("/msb"));
        System.out.println(success);*/   
        
        /*FSDataOutputStream out = fileSystem.create(new Path("/test.data"), true);
        FileInputStream fis = new FileInputStream("c:/test/core-site.xml");
        IOUtils.copyBytes(fis, out, 4096, true);*/
        
        /*FSDataOutputStream out = fileSystem.create(new Path("/test.data"), true);
        FileInputStream in = new FileInputStream("c:/test/core-site.xml");
        byte[] buf = new byte[4096];
        int len = in.read(buf);
        while(len !=-1) {
            out.write(buf, 0, len);
            len = in.read(buf);
        }
        in.close();
        out.close();*/
        
        FileStatus[] statuses = fileSystem.listStatus(new Path("/"));
        //System.out.println(statuses.length);
        for(FileStatus status : statuses) {
            System.out.println(status.getPath());
            System.out.println(status.getPermission());
            System.out.println(status.getReplication());
        }
    }

}

相关文章

网友评论

    本文标题:hadoop入门(二)

    本文链接:https://www.haomeiwen.com/subject/usqbyxtx.html