介绍
在hive中,可以对其进行添加自定义的函数来满足自己的特定场景的功能,hive中可以提供三种自定义函数:
- 普通函数:一进一出
- UDAF函数:多进一出
- UDTF函数:一进多出
实现
下面就实现一个将json的属性转换成列的厘子
- 实现udtf的类必须实现GenericUDTF类,并覆盖以下的三个方法:
- initialize:根据其名字就能看出,主要做一些初始化的工作
2.process:程序的主要执行方法,数据会进入这个方法进行解析,并最后使用forward进行输出
3.close:关闭资源,可以不用实现,do nothing
- initialize实现
@Override
public StructObjectInspector initialize(StructObjectInspector args) throws UDFArgumentException {
List<? extends StructField> fieldRefs = args.getAllStructFieldRefs();
if (fieldRefs.size() != 1) {
throw new IllegalArgumentException("NameParserGenericUDTF() takes exactly one argument");
}
ObjectInspector objectInspector = fieldRefs.get(0).getFieldObjectInspector();
if (ObjectInspector.Category.PRIMITIVE != objectInspector.getCategory()
&& ((PrimitiveObjectInspector) objectInspector).getPrimitiveCategory()
!= PrimitiveObjectInspector.PrimitiveCategory.STRING) {
throw new UDFArgumentException("NameParserGenericUDTF() takes a string as a parameter");
}
stringOI = (PrimitiveObjectInspector) objectInspector;
// 输出的字段和结构
List<String> fieldNames = new ArrayList<>();
List<ObjectInspector> fieldOIs = new ArrayList<>();
fieldNames.add("name");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
fieldNames.add("age");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
- process实现
String json = stringOI.getPrimitiveJavaObject(args[0]).toString();
List<Object[]> results = handlerRecord(json);
for (Object[] result : results) {
forward(result);
}
完整代码
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.hadoop.hive.ql.exec.Description;
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import java.util.ArrayList;
import java.util.List;
/**
* 自定义udtf函数,实现一进多出
*
* @author zhangap
* @wechat zapjone
* @version 1.0, 2019/7/4
*/
@Description(name = "ik_parse_json", value = "_FUNC_(json) - ",
extended = "Example:\n"
+ " > SELECT _FUNC_(json) from src;")
public class JsonTransformUDTF extends GenericUDTF {
private PrimitiveObjectInspector stringOI;
@Override
public StructObjectInspector initialize(StructObjectInspector args) throws UDFArgumentException {
List<? extends StructField> fieldRefs = args.getAllStructFieldRefs();
if (fieldRefs.size() != 1) {
throw new IllegalArgumentException("NameParserGenericUDTF() takes exactly one argument");
}
ObjectInspector objectInspector = fieldRefs.get(0).getFieldObjectInspector();
if (ObjectInspector.Category.PRIMITIVE != objectInspector.getCategory()
&& ((PrimitiveObjectInspector) objectInspector).getPrimitiveCategory()
!= PrimitiveObjectInspector.PrimitiveCategory.STRING) {
throw new UDFArgumentException("NameParserGenericUDTF() takes a string as a parameter");
}
stringOI = (PrimitiveObjectInspector) objectInspector;
// 输出的字段和结构
List<String> fieldNames = new ArrayList<>();
List<ObjectInspector> fieldOIs = new ArrayList<>();
fieldNames.add("name");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
fieldNames.add("age");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
@Override
public void process(Object[] args) throws HiveException {
String json = stringOI.getPrimitiveJavaObject(args[0]).toString();
List<Object[]> results = handlerRecord(json);
for (Object[] result : results) {
forward(result);
}
}
@Override
public void close() throws HiveException {
// do nothing
}
private List<Object[]> handlerRecord(String json) {
JSONObject jsonObject = JSON.parseObject(json);
List<Object[]> result = new ArrayList<>();
result.add(new Object[]{jsonObject.getString("name"), jsonObject.getString("age")});
return result;
}
}
验证效果
- 在hive中创建表
create table test_json_parse(json string)stored as textfile;
- 编写json文件并上传到hive表结构目录下
json文件内容(000000_0):{"name":"Chain","age":10000}
hdfs上传:hdfs dfs -put 000000_0 /user/hive/warehouse/test_json_parse
- 添加jar包,和注册临时函数
add jar /home/zap/jar/udtf-0.0.1-SNAPSHOT-jar-with-dependencies.jar;
create temporary function ik_parse_json as 'JsonTransformUDTF';
-
最终效果
hive实现效果
网友评论