引入pom 依赖。hadoop-client 版本对应 cdh版本
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>${hadoop.version}</version>
</dependency>
</dependencies>
public class HDFSAppTest {
public final String HDFS_PATH = "hdfs://localhost:8020";
FileSystem fileSystem = null;
Configuration configuration = new Configuration();
@Before
public void setUp() throws Exception{
configuration = new Configuration();
fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration);
System.out.println("HDFSAppTest---->setUp");
}
@After
public void tairDown() {
configuration = null;
fileSystem = null;
System.out.println("HDFSAppTest--->tairDown");
}
/**
* 创建HDFS目录
*/
@Test
public void mkdir() throws IOException {
fileSystem.mkdirs(new Path("/hdfsjavaapi/test"));
}
/**
* 创建 HDFS 文件
* @throws IOException
*/
@Test
public void createFile() throws IOException {
FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("/hdfsjavaapi/test/file.txt"));
fsDataOutputStream.write("javaApi Operate".getBytes());
fsDataOutputStream.flush();
}
/**
* 查看HDFS 文件的内容
*/
@Test
public void cat() throws IOException {
FSDataInputStream inputStream = fileSystem.open(new Path("/hdfsjavaapi/test/file.txt"));
IOUtils.copyBytes(inputStream, System.out, 1024);
inputStream.close();
}
/**
* 重命名
*/
@Test
public void renameFile() throws IOException {
Path oldPath = new Path("/hdfsjavaapi/test/file.txt");
Path newPath = new Path("/hdfsjavaapi/test/filenew.txt");
fileSystem.rename(oldPath, newPath);
}
/**
* 拷贝 local file to Hadoop
*/
@Test
public void copyLocalFileToHDFS() throws IOException {
Path localPath = new Path("/Users/zhangyinghao/Desktop/zyh.mov");
Path targetPath = new Path("/hdfsjavaapi/test/test.mov");
fileSystem.copyFromLocalFile(localPath, targetPath);
}
/**
* 拷贝 local file to Hadoop 带进度条
*/
@Test
public void copyLocalFileToHDFSWithProgress() throws IOException {
InputStream inputStream =new BufferedInputStream(
new FileInputStream(
new File("/需要上传的文件路径")
)
);
FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path("上传到hadoop 的文件路径"), new Progressable() {
public void progress() {
System.out.println(".");
}
});
IOUtils.copyBytes(inputStream, fsDataOutputStream,4096);
}
/**
* copy to Local
*/
@Test
public void copyToLocal() throws IOException {
Path localPath = new Path("");
Path targetPath = new Path("");
fileSystem.copyToLocalFile(targetPath,localPath);
}
/**
* 展示所有的文件列表 hdfs 查看某个目录下的所有文件
*/
@Test
public void listFiles() throws IOException {
FileStatus[] fileStatuses = fileSystem.listStatus(new Path("/"));
for (FileStatus fileStatus : fileStatuses) {
String isDir = fileStatus.isDirectory() ? "文件夹" : "文件";
//文件包含的副本数
short replication = fileStatus.getReplication();
//文件大小
long len = fileStatus.getLen();
//文件全路径
String path = fileStatus.getPath().toString();
System.out.println(isDir + "\t" + replication + "\t" + len + "\t" + path);
}
}
/**
* 删除指定路径目录 是否 递归删除
* @throws Exception
*/
@Test
public void delete() throws Exception {
fileSystem.delete(new Path(""), true);
}
}
网友评论