查看文件内容
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HCat{
public void cat(String fpname) throws Exception{
Path path = new Path(fpname);
FileSystem fs = FileSystem.get(new URI("hdfs://localhost"), new Configuration());
BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
String line = br.readLine();
while(line != null){
System.out.println(line);
line = br.readLine();
}
br.close();
}
public static void main(String[] args){
HCat hcat = new HCat();
try{
hcat.cat("/temp/usr/install.log");
}catch (Exception e){
e.printStackTrace();
}
}
}
往HDFS中上传文件
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HPut {
// rpath --source filename
// dpath --target path
// fname --target name
public void put(String rpath, String dpath, String fname) throws Exception {
Path path = new Path(dpath);
FileSystem fs = FileSystem.get(new URI("hdfs://localhost"), new Configuration());
// if(fs.exists(path)){
// fs.delete(path, true);
// }
if (!fs.exists(path)) {
fs.mkdirs(path);
Path filePath = new Path(dpath + fname);
StringBuffer sb = new StringBuffer();
File f = new File(rpath);
BufferedReader br = new BufferedReader(new FileReader(f));
String line = br.readLine();
while (line != null) {
System.out.println(line);
sb.append(line);
line = br.readLine();
}
FSDataOutputStream out = fs.create(filePath);
byte[] bytes = br.toString().getBytes();
out.write(bytes);
br.close();
out.close();
}
}
public static void main(String[] args) {
HPut hput = new HPut();
try {
hput.put("/home/root/HCat.java", "/temp/usr/test/", "HCat.java");
} catch (Exception e) {
e.printStackTrace();
}
}
}
网友评论