1.JDBC链接Hive
注意
-
Caused by: java.lang.RuntimeException: java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.authorize.AuthorizationException): User: root is not allowed to impersonate anonymous
-
设置Hadoop的HDFS的代理用户
vi core-site.xml
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
JDBCUtil.java
package demo.jdbc;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class JDBCUtils {
// hive driver
private static String driver = "org.apache.hive.jdbc.HiveDriver";
// hive URL
private static String url = "jdbc:hive2://bigdata111:10000/default";
static {
try {
Class.forName(driver);
} catch (Exception e) {
e.printStackTrace();
}
}
// Connection
public static Connection getConnection() {
try {
return DriverManager.getConnection(url);
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
// release
public static void release(Connection conn, Statement st, ResultSet rs) {
if (rs != null) {
try {
rs.close();
} catch (SQLException e) {
e.printStackTrace();
} finally {
rs = null;
}
}
if (st != null) {
try {
st.close();
} catch (SQLException e) {
e.printStackTrace();
} finally {
st = null;
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
e.printStackTrace();
} finally {
conn = null;
}
}
}
}
TestJDBC.java
package demo.jdbc;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
public class TestJDBC {
public static void main(String[] args) {
String sql = "select * from emp";
Connection conn = null;
Statement st = null;
ResultSet rs = null;
try {
conn = JDBCUtils.getConnection();
st = conn.createStatement();
rs = st.executeQuery(sql);
while (rs.next()) {
String name = rs.getString("ename");
String sal = rs.getString("sal");
System.out.println(name + "\t" + sal);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
JDBCUtils.release(conn, st, rs);
}
}
}
2.UDF
- 需要的jar:$HIVE_HOME/lib
package demo.udf;
import org.apache.hadoop.hive.ql.exec.UDF;
public class MyConcatString extends UDF {
public String evaluate(String a,String b) {
return a+"****"+b;
}
}
- 生成jar包:myudf.jar
- 使用add命令加入Hive的classpath
- hive中执行:
add jar /root/temp/myudf.jar;
- hive中执行:
- 创建临时函数:
create temporary function myconact as 'demo.udf.MyConcatString';
create temporary function checksal as 'demo.udf.CheckSalaryGrade';
网友评论