美文网首页
hadoop实战-2.windows远程操作hdfs分布式文件管

hadoop实战-2.windows远程操作hdfs分布式文件管

作者: 笨鸡 | 来源:发表于2019-03-14 14:59 被阅读0次

    1.搭建windows平台的hadoop运行环境

    注意文件路径的空格,配置好环境变量

    2.开启IDEA,导入hadoop的common和hdfs的jar包

    hadoop导包.png

    3.建HelloHDFS类

    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.*;
    import org.apache.hadoop.io.IOUtils;
    
    import java.io.FileInputStream;
    import java.io.InputStream;
    import java.net.URL;
    
    public class HelloHDFS {
    
        public static void main(String[] args) throws Exception {
    
            URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
            URL url = new URL("hdfs://192.168.56.100:9000/input.txt");
            InputStream ins = url.openStream();
            IOUtils.copyBytes(ins, System.out, 4096, false);
    
            // 加java访问hadoop的权限
    //            System.setProperty("HADOOP_USER_NAME", "root");
    
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", "hdfs://192.168.56.100:9000");
            FileSystem fileSystem = FileSystem.get(conf);
    
            boolean success = fileSystem.mkdirs(new Path("/msb"));
            System.out.println(success);
    
            success = fileSystem.exists(new Path("/hello.txt"));
            System.out.println(success);
    
            success = fileSystem.delete(new Path("/msb"), true);
            System.out.println(success);
    
            success = fileSystem.exists(new Path("/msb"));
            System.out.println(success);
    
    //            FSDataOutputStream out = fileSystem.create(new Path("/aliyun.txt"), true);
    //            FileInputStream fis = new FileInputStream("C:/Users/Jay/Desktop/aliyun.txt");
    //            IOUtils.copyBytes(fis, out, 4096, true);
    
            FSDataOutputStream out = fileSystem.create(new Path("/test.doc"), true);
            FileInputStream in = new FileInputStream("C:/Users/Jay/Desktop/test.doc");
            byte[] buf = new byte[4096];
            int len = in.read(buf);
            while (len != -1) {
                out.write(buf, 0, len);
                len = in.read(buf);
            }
            in.close();
            out.close();
    
            FileStatus[] statuses = fileSystem.listStatus(new Path("/"));
            for (FileStatus status : statuses) {
                System.out.println(status.getPath());
                System.out.println(status.getPermission());
                System.out.println(status.getReplication());
            }
        }
    }
    

    4.运行结果

    hdfs运行结果.png

    5.输入http://192.168.56.100:50070查看

    hadoop浏览器查看.png

    喜欢的话,希望您动动小手点个赞支持下哦

    相关文章

      网友评论

          本文标题:hadoop实战-2.windows远程操作hdfs分布式文件管

          本文链接:https://www.haomeiwen.com/subject/dbgqmqtx.html