1.搭建windows平台的hadoop运行环境
注意文件路径的空格,配置好环境变量
2.开启IDEA,导入hadoop的common和hdfs的jar包
hadoop导包.png
3.建HelloHDFS类
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URL;
public class HelloHDFS {
public static void main(String[] args) throws Exception {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
URL url = new URL("hdfs://192.168.56.100:9000/input.txt");
InputStream ins = url.openStream();
IOUtils.copyBytes(ins, System.out, 4096, false);
// 加java访问hadoop的权限
// System.setProperty("HADOOP_USER_NAME", "root");
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://192.168.56.100:9000");
FileSystem fileSystem = FileSystem.get(conf);
boolean success = fileSystem.mkdirs(new Path("/msb"));
System.out.println(success);
success = fileSystem.exists(new Path("/hello.txt"));
System.out.println(success);
success = fileSystem.delete(new Path("/msb"), true);
System.out.println(success);
success = fileSystem.exists(new Path("/msb"));
System.out.println(success);
// FSDataOutputStream out = fileSystem.create(new Path("/aliyun.txt"), true);
// FileInputStream fis = new FileInputStream("C:/Users/Jay/Desktop/aliyun.txt");
// IOUtils.copyBytes(fis, out, 4096, true);
FSDataOutputStream out = fileSystem.create(new Path("/test.doc"), true);
FileInputStream in = new FileInputStream("C:/Users/Jay/Desktop/test.doc");
byte[] buf = new byte[4096];
int len = in.read(buf);
while (len != -1) {
out.write(buf, 0, len);
len = in.read(buf);
}
in.close();
out.close();
FileStatus[] statuses = fileSystem.listStatus(new Path("/"));
for (FileStatus status : statuses) {
System.out.println(status.getPath());
System.out.println(status.getPermission());
System.out.println(status.getReplication());
}
}
}
4.运行结果
hdfs运行结果.png
5.输入http://192.168.56.100:50070查看
hadoop浏览器查看.png
网友评论