1. 安装native库
cloudera版本没有native
tar包中没有native库
http://archive.cloudera.com/cdh5/cdh/5/
rpm包中有native库,拷贝过去即可
http://archive.cloudera.com/cdh5/redhat/6/x86_64/cdh/5.3.2/RPMS/x86_64/
2. 编译连接
#include <stdio.h>
#include<unistd.h>
#include<string.h>
#include<stdlib.h>
#include "hdfs.h"
int main(int argc, char **argv) {
hdfsFS fs = hdfsConnect("10.35.0.36", 8020);
const char* writePath = "/test/liudong_testfile.txt";
hdfsFile writeFile = hdfsOpenFile(fs, writePath, O_WRONLY|O_CREAT, 0, 0, 0);
if(!writeFile) {
fprintf(stderr, "Failed to open %s for writing!\n", writePath);
exit(-1);
}
char* buffer = "Hello, World!";
tSize num_written_bytes = hdfsWrite(fs, writeFile, (void*)buffer, strlen(buffer)+1);
if (hdfsFlush(fs, writeFile)) {
fprintf(stderr, "Failed to 'flush' %s\n", writePath);
exit(-1);
}
hdfsCloseFile(fs, writeFile);
printf("success");
}
g++ -o ./test ./test.cc
-I../include/hdfs
-L../lib/hdfs
-L/usr/java/jdk1.7.0_21/jre/lib/amd64/server
-lhdfs -ljvm
- -I../include/hdfs
指出hdfs.h 所在的路径 - -L../lib/hdfs
指出libhdfs.a所在的路径 - -L/usr/java/jdk1.7.0_21/jre/lib/amd64/server
指出libjvm.so所在路径 - -lhdfs -ljvm
指出连接libhdfs, libjvm
3. 运行
- 要特别注意classpath的设定
这个地方有一个陷阱, 如下方式是错误的:
export CLASSPATH=hadoop classpath
classpath必须直接包含jar文件,而不是用wildcard来匹配,注意jar文件最好不要重复
for jar_file in `find /usr/local/hadoop-cdh -name "*.jar"`;do
jar_name=${jar_file##*/}
echo ${CLASSPATH}|grep ${jar_name} > /dev/null 2>&1
if [ $? -eq 1 ];then
export CLASSPATH=${CLASSPATH}:${jar_file}
fi
done
- 要注意LD_LIBRARY_PATH的设定
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/hadoop-cdh/lib/native:/usr/java/jdk1.7.0_21/jre/lib/amd64/server
网友评论