package com.caozg.wordcount;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.text.SimpleDateFormat;
import java.util.Date;
public class HdfsAPI {
private static Logger logger= LoggerFactory.getLogger(HdfsAPI.class);
private static Configuration configuration=new Configuration();
private FileSystem fdfs=null;
private final static String HDFSURI="hdfs://localhost:9000";
public FileSystem initdfs() throws URISyntaxException {
URI uri=new URI(HDFSURI);
configuration.addResource("core-site.xml");
configuration.addResource("hdfs-site.xml");
try {
fdfs = FileSystem.get(uri, configuration);
} catch (IOException e) {
e.printStackTrace();
}
return fdfs;
}
/**
* 测试HDFS的连接情况
*/
@Test
public void testHdfsConnect(){
try {
FileSystem initdfs = initdfs();
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
/**
* 检查指定目录文件是否存在
*/
@Test
public void testCheckFile(){
try {
Path path = new Path("hdfs://localhost:9000/input/readme.txt");
FileSystem hdfs = initdfs();
boolean result = hdfs.exists(path);
logger.info(result?"文件存在":"文件不存在");
} catch (URISyntaxException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 在hdfs上新建文件夹
*/
@Test
public void testMakeDir(){
try {
Path path = new Path("hdfs://localhost:9000/input/tiffData");
FileSystem hdfs = initdfs();
boolean result = hdfs.mkdirs(path);
logger.info(result?"添加文件夹成功":"添加文件夹失败");
} catch (URISyntaxException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 在hdfs上删除文件夹
*/
@Test
public void testDelDir(){
try {
Path path = new Path("hdfs://localhost:9000/input/tiffData");
FileSystem hdfs = initdfs();
boolean result = hdfs.delete(path, false);
logger.info(result?"删除文件夹成功":"删除文件夹失败");
} catch (URISyntaxException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/** 获取指定目录下的所有文件*/
@Test
public void testGetFileAll(){
try {
FileSystem hdfs = initdfs();
RemoteIterator<LocatedFileStatus> listFiles = hdfs.listFiles(new Path("/"),true);
SimpleDateFormat sdf=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
while (listFiles.hasNext()){
LocatedFileStatus fileStatus = listFiles.next();
/** 权限*/
FsPermission permission = fileStatus.getPermission();
/** 拥有者*/
String owner = fileStatus.getOwner();
/** 组*/
String group = fileStatus.getGroup();
long len = fileStatus.getLen();
long modificationTime = fileStatus.getModificationTime();
Path path = fileStatus.getPath();
logger.info("===============================================");
logger.info("权限为 [{}]",permission);
logger.info("拥有者为 [{}]",owner);
logger.info("组为 [{}]",group);
logger.info("文件大小byte [{}]",len);
logger.info("修改时间为 [{}]",sdf.format(new Date(modificationTime)));
logger.info("文件路径为 [{}]",path);
}
} catch (URISyntaxException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 从HDFS上拷贝文件到本地
*/
@Test
public void testDownLoadFile() {
Path src = new Path("hdfs://localhost:9000/input/readme.txt");
Path des = new Path("/home/caozg/Desktop/test3.txt");
try {
FileSystem hdfs = initdfs();
hdfs.copyToLocalFile(src, des);
} catch (IOException e) {
e.printStackTrace();
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
/**
* 从本地上拷贝文件到 HDFS
*/
@Test
public void testUpLoadFile() {
Path desc = new Path("hdfs://localhost:9000/input/test.txt");
Path src = new Path("/home/caozg/Desktop/test.txt");
try {
FileSystem hdfs = initdfs();
hdfs.copyFromLocalFile(src,desc);
} catch (IOException e) {
e.printStackTrace();
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
}
网友评论