美文网首页大数据
hiveUDF-SM3加密验证

hiveUDF-SM3加密验证

作者: 堂哥000 | 来源:发表于2021-11-26 11:57 被阅读0次

    近期有业务需要用到SM3加密方法,HIVE没有自带的SM3加密算法,故此建立此UDF函数

    import org.bouncycastle.crypto.digests.SM3Digest;
    import org.bouncycastle.crypto.macs.HMac;
    import org.bouncycastle.crypto.params.KeyParameter;
    import org.bouncycastle.jce.provider.BouncyCastleProvider;
    import org.bouncycastle.pqc.math.linearalgebra.ByteUtils;
    
    import java.io.UnsupportedEncodingException;
    import java.security.Security;
    import org.apache.hadoop.hive.ql.exec.UDF;
    /**
     * sm3加密算法工具类
     * 加密与加密结果验证(不可逆算法)
     */
    public class Sm3Utils extends UDF {
        private static final String ENCODING = "UTF-8";
        static {
            Security.addProvider(new BouncyCastleProvider());
        }
        /**
         * sm3算法加密
         * @explain
         * @param paramStr
         *      待加密字符串
         * @return 返回加密后,固定长度=32的16进制字符串
         */
        public String evaluate(String paramStr)  {
            // 将返回的hash值转换成16进制字符串
            String resultHexString = null;
            // 将字符串转换成byte数组
            byte[] srcData = new byte[0];
            try {
                srcData = paramStr.getBytes(ENCODING);
            } catch (UnsupportedEncodingException e) {
                e.printStackTrace();
            }
            // 调用hash()
            byte[] resultHash = hash(srcData);
            // 将返回的hash值转换成16进制字符串
            resultHexString = ByteUtils.toHexString(resultHash);
            //resultHexString = ToHexString.bytesToHexString(resultHash);
            return resultHexString;
        }
    
        /**
         * 返回长度=32的byte数组
         * @explain 生成对应的hash值
         * @param srcData
         * @return
         */
        public static byte[] hash(byte[] srcData) {
            SM3Digest digest = new SM3Digest();
            digest.update(srcData, 0, srcData.length);
            byte[] hash = new byte[digest.getDigestSize()];
            digest.doFinal(hash, 0);
            return hash;
        }
        /**
         * 通过密钥进行加密
         * @explain 指定密钥进行加密
         * @param key
         *      密钥
         * @param srcData
         *      被加密的byte数组
         * @return
         */
        public static byte[] hmac(byte[] key, byte[] srcData) {
            KeyParameter keyParameter = new KeyParameter(key);
            SM3Digest digest = new SM3Digest();
            HMac mac = new HMac(digest);
            mac.init(keyParameter);
            mac.update(srcData, 0, srcData.length);
            byte[] result = new byte[mac.getMacSize()];
            mac.doFinal(result, 0);
            return result;
        }
    }
    

    使用到的pom.xml文件

    <?xml version="1.0" encoding="UTF-8"?>
    <project xmlns="http://maven.apache.org/POM/4.0.0"
             xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
             xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <modelVersion>4.0.0</modelVersion>
    
        <groupId>org.example</groupId>
        <artifactId>sm3_dz</artifactId>
        <version>1.0-SNAPSHOT</version>
    
        <properties>
            <maven.compiler.source>8</maven.compiler.source>
            <maven.compiler.target>8</maven.compiler.target>
                <hadoop.version>2.6.0-cdh5.7.0</hadoop.version>
                <hive.version>1.1.0-cdh5.15.2</hive.version>
                <spark.version>2.4.0</spark.version>
                <spark.version>2.13.6</spark.version>
            </properties>
            <dependencies>
                    <!--sm3,sm4加密算法-->
                    <dependency>
                        <groupId>org.bouncycastle</groupId>
                        <artifactId>bcprov-jdk15on</artifactId>
                        <version>1.64</version>
                    </dependency>
                <dependency>
                    <groupId>commons-codec</groupId>
                    <artifactId>commons-codec</artifactId>
                    <version>1.14</version>
                </dependency>
                <dependency>
                    <groupId>commons-logging</groupId>
                    <artifactId>commons-logging</artifactId>
                    <version>1.2</version>
                </dependency>
    
                <dependency>
                    <groupId>commons-collections</groupId>
                    <artifactId>commons-collections</artifactId>
                    <version>3.2.2</version>
                </dependency>
    
                <dependency>
                    <groupId>org.apache.hadoop</groupId>
                    <artifactId>hadoop-client</artifactId>
                    <version>${hadoop.version}</version>
                </dependency>
                <dependency>
                    <groupId>org.apache.hadoop</groupId>
                    <artifactId>hadoop-hdfs</artifactId>
                    <version>${hadoop.version}</version>
                </dependency>
                <dependency>
                    <groupId>org.apache.hive</groupId>
                    <artifactId>hive-exec</artifactId>
                    <version>${hive.version}</version>
                </dependency>
                <dependency>
                    <groupId>org.apache.hadoop</groupId>
                    <artifactId>hadoop-common</artifactId>
                    <version>${hadoop.version}</version>
                </dependency>
                <dependency>
                    <groupId>org.pentaho</groupId>
                    <artifactId>pentaho-aggdesigner-algorithm</artifactId>
                    <version>5.1.5-jhyde</version>
                    <scope>test</scope>
                </dependency>
    
                <dependency>
                    <groupId>org.apache.calcite.avatica</groupId>
                    <artifactId>avatica</artifactId>
                    <version>1.8.0</version>
                </dependency>
                <dependency>
                    <groupId>commons-codec</groupId>
                    <artifactId>commons-codec</artifactId>
                    <version>1.4</version>
                </dependency>
                <dependency>
                    <groupId>xalan</groupId>
                    <artifactId>xalan</artifactId>
                    <version>2.6.0</version>
                </dependency>
            </dependencies>
            <repositories>
                <repository>
                    <id>spring-plugin</id>
                    <url>https://repo.spring.io/plugins-release/</url>
                </repository>
                <repository>
                    <id>cloudera</id>
                    <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
                </repository>
            </repositories>
        <build>
            <finalName>${project.artifactId}-${project.version}</finalName>
            <plugins>
                <plugin>
                    <artifactId>maven-assembly-plugin</artifactId>
                    <configuration>
                        <descriptorRefs>
                            <descriptorRef>jar-with-dependencies</descriptorRef>
                        </descriptorRefs>
                        <appendAssemblyId>false</appendAssemblyId>
                    </configuration>
                    <executions>
                        <execution>
                            <id>make-assembly</id>
                            <phase>package</phase>
                            <goals>
                                <goal>single</goal>
                            </goals>
                        </execution>
                    </executions>
                </plugin>
                <plugin>
                    <groupId>org.scala-tools</groupId>
                    <artifactId>maven-scala-plugin</artifactId>
                    <version>2.15.2</version>
                    <executions>
                        <execution>
                            <goals>
                                <goal>compile</goal>
                                <goal>testCompile</goal>
                            </goals>
                        </execution>
                    </executions>
                </plugin>
            </plugins>
        </build>
    </project>
    

    在beeline内注册该函数

    CREATE FUNCTION udf.sm3 as 'com.udf.SM3' using jar 'hdfs:///user/udf/sm3-1.0-SNAPSHOT.jar' ;
    
    测试用例
    select udf.sm3('大爷')
    
    测试结果
    image.png

    相关文章

      网友评论

        本文标题:hiveUDF-SM3加密验证

        本文链接:https://www.haomeiwen.com/subject/fzitxrtx.html