package com.hdfs;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
public class HdfsTest {
private FileSystem fs = null;
private List<String> hdfsPathsLists;
private FileSystem local = null;
@ Before
public void init() throws Exception {
fs = FileSystem.get(new URI("hdfs://localhost:9000"), new Configuration(), "root");
local = FileSystem.getLocal(new Configuration());
}
@After
public void close() throws Exception {
fs.close();
local.close();
}
@Test
public void testMkdir() throws Exception {
boolean flag = fs.mkdirs(new Path("/javaApi/mk/dir1/dir2"));
System.err.println(flag ? "success" : "fail");
}
@Test
public void getDirList() throws Exception {
hdfsPathsLists = new ArrayList<>();
getHdfsPaths(new Path("/"));
for (String p : hdfsPathsLists) {
System.err.println(p);
}
}
private void getHdfsPaths(Path path) throws Exception {
FileStatus[] dirs = fs.listStatus(path);
for (FileStatus s : dirs) {
hdfsPathsLists.add(s.getPath().toString());
if (s.isDirectory()) {
getHdfsPaths(s.getPath());
}
}
}
@Test
public void testRMdir() throws Exception {
boolean flag = fs.deleteOnExit(new Path("/java"));
System.err.println(flag ? "success" : "fail");
}
@Test
public void testexistsFile() throws Exception {
String src = "hdfs://localhost:9000/README1.txt";
boolean b = fs.exists(new Path(src));
if (b) {
System.err.println(b + " exist");
} else {
System.err.println(b + " not exist");
}
}
@Test
public void testfilesFile() throws Exception {
String src = "hdfs://localhost:9000/README1.txt";
boolean b = fs.isDirectory(new Path(src));
if (b) {
System.err.println("directories");
} else if (fs.isFile(new Path(src))) {
System.err.println("file");
} else {
System.err.println("I dont know");
}
}
@Test
public void testrenameFile() throws Exception {
String oldpath = "hdfs://localhost:9000/README1.txt";
String newpath = "hdfs://localhost:9000/shiyanbar.txt";
Boolean b = fs.rename(new Path(oldpath), new Path(newpath));
System.err.println(b + "rename sucess");
}
@Test
public void testmovetohdfsFile() throws Exception {
String oldpath = "file:///simple/test/word.txt";
String newpath = "hdfs://localhost:9000/user/";
fs.moveFromLocalFile(new Path(oldpath), new Path(newpath));
System.err.println("move success");
}
@Test
public void testUploadFile() throws Exception {
String src = "file:///simple/employees.txt";
String hdfsDst = "/";
fs.copyFromLocalFile(new Path(src), new Path(hdfsDst));
System.err.println("upload sucess");
}
@Test
public void testDownloadFile() throws Exception {
String src = "hdfs://localhost:9000/shiyanbar.txt";
String local = "/data/dataset";
fs.copyToLocalFile(new Path(src), new Path(local));
System.err.println("downLoad sucess");
}
@Test
public void testMergelocalfiles() {
try {
Path inputDir = new Path("file:///opt/hadoop-2.7.3/etc/hadoop/");
Path hdfsFile = new Path( "/output3");
FileStatus[] inputFiles = local.listStatus(inputDir);
FSDataOutputStream out = fs.create(hdfsFile);
for (int i = 0; i < inputFiles.length; i++) {
System.err.println(inputFiles[i].getPath().getName());
FSDataInputStream in = local.open(inputFiles[i].getPath());
byte[] buffer = new byte[256];
int bytesRead = 0;
while ((bytesRead = in.read(buffer)) > 0) {
out.write(buffer, 0, bytesRead);
}
in.close();
}
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
网友评论