美文网首页
集成Mybatis

集成Mybatis

作者: yehongyu_2018 | 来源:发表于2019-01-04 12:17 被阅读0次

集成Mybatis到项目的步骤

  1. 初始化Mybatis的SqlSessionFactory,需要根据配置文件中mysql的相关参数来配置,参考MyBatisSqlSession代码;
  2. 添加mybatis-config.xml文件,配置mybatis的参数,包含DataSource的配置;
  3. 实现应用自身的DataSourceFactory,参考HikariDataSourceFactory代码;
  4. 实现DAOFactoryImpl来获取每个表对应的DAO对象,参考DAOFactoryImpl。

内部机制:

Mybatis从conf文件中获取到mysql的配置,并通过mybatis-config.xml文件将配置传给DataSource。

MyBatisSqlSession的实现

package co.yhy.api.persistence;

import co.yhy.common.ServiceConfig;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.io.Reader;
import java.util.Properties;

public class MyBatisSqlSession {
    private static final Logger LOGGER = LoggerFactory.getLogger(MyBatisSqlSession.class);
    private volatile static MyBatisSqlSession instance;
    private volatile boolean myBatisInitStatus = false;
    private SqlSessionFactory factory;

    private MyBatisSqlSession() {
    }

    public static MyBatisSqlSession getInstance() {
        if (instance == null) {
            synchronized (MyBatisSqlSession.class) {
                if (instance == null) {
                    instance = new MyBatisSqlSession();
                }
            }
        }
        return instance;
    }

    public boolean getMyBatisInitStatus() {
        return this.myBatisInitStatus;
    }

    public boolean init(ServiceConfig config) {
        try {
            Properties properties = new Properties();
            properties.setProperty(DAOFactory.CONF_DRIVER_CLASSNAME, config.getConfig(DAOFactory.CONF_DRIVER_CLASSNAME, "com.mysql.cj.jdbc.Driver"));
            properties.setProperty(DAOFactory.CONF_JDBC_URL, config.getConfig(DAOFactory.CONF_JDBC_URL, ""));
            properties.setProperty(DAOFactory.CONF_USERNAME, config.getConfig(DAOFactory.CONF_USERNAME, ""));
            properties.setProperty(DAOFactory.CONF_PASSWORD, config.getConfig(DAOFactory.CONF_PASSWORD, ""));
            properties.setProperty(DAOFactory.CONF_MAX_POOLSIZE, config.getConfig(DAOFactory.CONF_MAX_POOLSIZE, "10"));
            properties.setProperty(DAOFactory.CONF_AUTO_COMMIT, config.getConfig(DAOFactory.CONF_AUTO_COMMIT, "false"));
            properties.setProperty(DAOFactory.CONF_CACHE_PREPARESTATEMENT, config.getConfig(DAOFactory.CONF_CACHE_PREPARESTATEMENT, "true"));
            properties.setProperty(DAOFactory.CONF_PREPARESTATEMENT_CACHE_SIZE, config.getConfig(DAOFactory.CONF_PREPARESTATEMENT_CACHE_SIZE, "300"));
            properties.setProperty(DAOFactory.CONF_PREPARESTATEMENT_CACHE_LIMIT, config.getConfig(DAOFactory.CONF_PREPARESTATEMENT_CACHE_LIMIT, "2048"));
            properties.setProperty(DAOFactory.CONF_LEAK_DETECTION_THRESHOLD, config.getConfig(DAOFactory.CONF_LEAK_DETECTION_THRESHOLD, "0"));
            properties.setProperty(DAOFactory.CONF_MAXLIFETIME, config.getConfig(DAOFactory.CONF_MAXLIFETIME, "300000"));
            properties.setProperty(DAOFactory.CONF_CONNECTION_TIMEOUT, config.getConfig(DAOFactory.CONF_CONNECTION_TIMEOUT, "10000"));

            try (Reader reader = Resources.getResourceAsReader("mybatis-config.xml")) {
                factory = new SqlSessionFactoryBuilder().build(reader, properties);
                myBatisInitStatus = true;
                LOGGER.info("mybatis init success.");
            } catch (IOException e) {
                LOGGER.error("load mybatis-config.xml failed, {}", e.getMessage());
            }
        } catch (Exception e) {
            LOGGER.error("MyBatisSqlSession init failed ", e);
        }
        return myBatisInitStatus;
    }

    public SqlSessionFactory getSqlSessionFactory() {
        return factory;
    }

}

mybatis-config.xml文件的配置

<?xml version='1.0' encoding='UTF-8' ?>
<!DOCTYPE configuration
        PUBLIC '-//mybatis.org//DTD Config 3.0//EN'
        'http://mybatis.org/dtd/mybatis-3-config.dtd'>
<configuration>
    <settings>
        <setting name="autoMappingUnknownColumnBehavior" value="WARNING"/>
        <!--设置超时时间,它决定驱动等待数据库响应的秒数。-->
        <setting name="defaultStatementTimeout" value="25"/>
        <setting name="mapUnderscoreToCamelCase" value="true"/>
    </settings>

    <typeAliases>
        <typeAlias type='co.yhy.api.persistence.HikariDataSourceFactory' alias='HikariDataSourceFactory'/>
        <typeAlias type='co.yhy.common.pojo.Device' alias='Device'/>
        <typeAlias type='co.yhy.common.pojo.AppInfo' alias='AppInfo'/>
        <typeAlias type='co.yhy.api.persistence.resultMap.DataResult' alias='DataResult'/>
        <typeAlias type='co.yhy.api.persistence.resultMap.HistoryResult' alias='HistoryResult'/>
         </typeAliases>

    <typeHandlers>
        <typeHandler jdbcType="BLOB" javaType="byte[]" handler="org.apache.ibatis.type.BlobTypeHandler"/>
    </typeHandlers>

    <environments default='development'>
        <environment id='development'>
            <transactionManager type='JDBC'/>
            <dataSource type='HikariDataSourceFactory'>
                <property name="data_source.driverClassName" value="${data_source.driverClassName}"/>
                <property name="data_source.jdbc_url" value="${data_source.jdbc_url}"/>
                <property name="data_source.username" value="${data_source.username}"/>
                <property name="data_source.password" value="${data_source.password}"/>
                <property name="data_source.maximumPoolSize" value="${data_source.maximumPoolSize}"/>
                <property name="data_source.autoCommit" value="${data_source.autoCommit}"/>
                <property name="data_source.cachePrepStmts" value="${data_source.cachePrepStmts}"/>
                <property name="data_source.prepStmtCacheSize" value="${data_source.prepStmtCacheSize}"/>
                <property name="data_source.prepStmtCacheSqlLimit" value="${data_source.prepStmtCacheSqlLimit}"/>
                <property name="data_source.leakDetectionThreshold" value="${data_source.leakDetectionThreshold}"/>
                <property name="data_source.connectionTimeout" value="${data_source.connectionTimeout}"/>
                <property name="data_source.maxLifetime" value="${data_source.maxLifetime}"/>
            </dataSource>
        </environment>
    </environments>

    <mappers>
        <mapper resource='co/yhy/api/mappers/HistoryMapper.xml'/>
        <mapper resource='co/yhy/api/mappers/ConfigMapper.xml'/>
    </mappers>
</configuration

HikariDataSourceFactory的实现

package co.yhy.api.persistence;


import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.ibatis.datasource.DataSourceFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import javax.sql.DataSource;
import java.util.Properties;

public class HikariDataSourceFactory implements DataSourceFactory {

    private Properties properties;
    private static final Logger LOGGER = LoggerFactory.getLogger(HikariDataSourceFactory.class);

    @Override
    public void setProperties(Properties props) {
        this.properties = props;
    }

    @Override
    public DataSource getDataSource() {
        try {
            HikariConfig hikariConfig = new HikariConfig();
            hikariConfig.setJdbcUrl(properties.getProperty(DAOFactory.CONF_JDBC_URL, ""));
            hikariConfig.setUsername(properties.getProperty(DAOFactory.CONF_USERNAME, ""));
            hikariConfig.setPassword(properties.getProperty(DAOFactory.CONF_PASSWORD, ""));
            hikariConfig.setMaximumPoolSize(Integer.parseInt(properties.getProperty(DAOFactory.CONF_MAX_POOLSIZE, "10")));
            hikariConfig.setAutoCommit(Boolean.valueOf(properties.getProperty(DAOFactory.CONF_AUTO_COMMIT, "false")
            ));

            hikariConfig.setLeakDetectionThreshold(Long.parseLong(properties.getProperty(DAOFactory.CONF_LEAK_DETECTION_THRESHOLD, "0")));
            hikariConfig.setMaxLifetime(Long.parseLong(properties.getProperty(DAOFactory.CONF_MAXLIFETIME, "300000")));
            hikariConfig.setConnectionTimeout(Long.parseLong(properties.getProperty(DAOFactory.CONF_CONNECTION_TIMEOUT, "10000")));

            hikariConfig.addDataSourceProperty("cachePrepStmts",
                    properties.getProperty(DAOFactory.CONF_CACHE_PREPARESTATEMENT, "true"));
            hikariConfig.addDataSourceProperty("prepStmtCacheSize",
                    properties.getProperty(DAOFactory.CONF_PREPARESTATEMENT_CACHE_SIZE, "300"));
            hikariConfig.addDataSourceProperty("prepStmtCacheSqlLimit",
                    properties.getProperty(DAOFactory.CONF_PREPARESTATEMENT_CACHE_LIMIT, "2048"));

            return new HikariDataSource(hikariConfig);
        } catch (Exception e) {
            LOGGER.error("create HikariConfig data source error {} ", e);
        }
        return null;
    }
}

DAOFactoryImpl的实现

package co.yhy.api.persistence.impl;

import co.yhy.api.persistence.*;

public class DAOFactoryImpl extends DAOFactory {

    private static volatile DAOFactoryImpl instance = null;

    private DAOFactoryImpl() {
    }

    public static DAOFactoryImpl getInstance() {
        if (instance == null) {
            synchronized (DAOFactoryImpl.class) {
                if (instance == null) {
                    instance = new DAOFactoryImpl();
                }
            }
        }
        return instance;
    }

    @Override
    public IDeviceDAO getHistoryDAO() { return new HistoryDAO(); }

   @Override
    public IConfigDAO getConfigDAO() { return new ConfigDAO(); }

}
package co.yhy.api.persistence;

import co.yhy.api.persistence.impl.DAOFactoryImpl;

public abstract class DAOFactory {
    public static final String CONF_JDBC_URL = "data_source.jdbc_url";
    public static final String CONF_USERNAME = "data_source.username";
    public static final String CONF_PASSWORD = "data_source.password";
    public static final String CONF_MAX_POOLSIZE = "data_source.maximumPoolSize";
    public static final String CONF_AUTO_COMMIT = "data_source.autoCommit";
    public static final String CONF_DATA_SOURCE_CLASS_NAME = "data_source.dataSourceClassName";
    public static final String CONF_DRIVER_CLASSNAME = "data_source.driverClassName";
    public static final String CONF_CACHE_PREPARESTATEMENT = "data_source.cachePrepStmts";
    public static final String CONF_PREPARESTATEMENT_CACHE_SIZE = "data_source.prepStmtCacheSize";
    public static final String CONF_PREPARESTATEMENT_CACHE_LIMIT = "data_source.prepStmtCacheSqlLimit";
    public static final String CONF_LEAK_DETECTION_THRESHOLD = "data_source.leakDetectionThreshold";
    public static final String CONF_MAXLIFETIME = "data_source.maxLifetime";
    public static final String CONF_CONNECTION_TIMEOUT = "data_source.connectionTimeout";

    public static final int MYSQL = 1;

    public static DAOFactory getDAOFactory(
            int whichFactory) {
        switch (whichFactory) {
            case MYSQL:
                return DAOFactoryImpl.getInstance();
            default:
                return null;
        }
    }

    public abstract IHistoryDAO getHistoryDAO();

    public abstract IConfigDAO getConfigDAO();

}

ConfigDAO的实现和使用

  1. 实现ConfigMapper.java,包含sql的接口;
  2. 实现ConfigMapper.xml,具体每个sql的实现,并将ConfigMapper.xml路径添加到mybatis-config.xml中;
  3. 实现IConfigDAO接口,实现ConfigDAO 实现config表的所有增删改查操作。

1. 实现ConfigMapper.java,包含sql的接口

package co.yhy.api.persistence.mappers;

import org.apache.ibatis.annotations.Param;

import java.sql.Timestamp;

public interface ConfigMapper {

    int createConfig(@Param("project") String project, @Param("confName") String confName, @Param("confValue") String value,
                     @Param("updateTime") Timestamp updateTime);

    String getConfig(@Param("project") String project, @Param("confName") String confName);

    int deleteConfig(@Param("project") String project, @Param("confName") String confName);

}

2. 实现ConfigMapper.xml,具体每个sql的实现

<?xml version='1.0' encoding='UTF-8' ?>
<!DOCTYPE mapper PUBLIC '-//mybatis.org//DTD Mapper 3.0//EN'
        'http://mybatis.org/dtd/mybatis-3-mapper.dtd'>

<mapper namespace='co.yhy.api.persistence.mappers.ConfigMapper'>

    <insert id='createConfig'>
        INSERT INTO configs (project, conf_name, conf_value) VALUES
        ( #{project, jdbcType=VARCHAR}, #{confName, jdbcType=VARCHAR}, #{confValue, jdbcType=VARCHAR} )
        ON DUPLICATE KEY UPDATE conf_value=#{confValue}, update_time=#{updateTime}
    </insert>

    <select id="getConfig" resultType='String'>
        SELECT conf_value FROM configs
        <where>
            project=#{project} AND conf_name=#{confName}
        </where>
    </select>

    <delete id="deleteConfig">
        DELETE FROM configs
        <where>
            project=#{project} AND conf_name=#{confName}
        </where>
    </delete>

</mapper>

3. 实现接口IConfigDAO和ConfigDAO和BasicDAO

package co.yhy.api.persistence;


public interface IConfigDAO {

    int createConfig(String project, String confName, String value) throws PersistenceException;

    String readConfig(String project, String confName) throws PersistenceException;

    int deleteConfig(String project, String confName) throws PersistenceException;

}
package co.yhy.api.persistence.impl;

import co.yhy.api.persistence.BasicDAO;
import co.yhy.api.persistence.IConfigDAO;
import co.yhy.api.persistence.PersistenceException;
import co.yhy.api.persistence.mappers.ConfigMapper;
import co.yhy.api.util.ApiUtility;

import java.sql.Timestamp;

public class ConfigDAO extends BasicDAO implements IConfigDAO {

    @Override
    public int createConfig(String project, String confName, String confValue) throws PersistenceException {
        return autoCommitWithReturn("create config fail", sqlSession -> {
            ConfigMapper configMapper = sqlSession.getMapper(ConfigMapper.class);
            Timestamp updateTime = ApiUtility.getCurrentTimestamp();
            return configMapper.createConfig(project, confName, confValue, updateTime);
        });
    }

    @Override
    public String readConfig(String project, String confName) throws PersistenceException {
        return query("read config fail", sqlSession -> {
            ConfigMapper configMapper = sqlSession.getMapper(ConfigMapper.class);
            return configMapper.getConfig(project, confName);
        });
    }

    @Override
    public int deleteConfig(String project, String confName) throws PersistenceException {
        return autoCommitWithReturn("delete config fail", sqlSession -> {
            ConfigMapper configMapper = sqlSession.getMapper(ConfigMapper.class);
            return configMapper.deleteConfig(project, confName);
        });
    }
}
package co.yhy.api.persistence;

import co.yhy.falcon.Falcon;
import co.yhy.api.exception.InvalidClientIdException;
import co.yhy.common.Constants;
import co.yhy.api.ApiConstants;
import org.apache.ibatis.session.SqlSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public abstract class BasicDAO {
    private static final Logger LOGGER = LoggerFactory.getLogger(BasicDAO.class);

    public interface DBAutoCommit {
        void commit(SqlSession sqlSession) throws PersistenceException, InvalidClientIdException;
    }

    public interface DBQuery<T> {
        T query(SqlSession sqlSession) throws PersistenceException, InvalidClientIdException;
    }

    public interface DBAutoCommitWithReturn<T> {
        T commitWithReturn(SqlSession sqlSession) throws PersistenceException;
    }

    protected void autoCommit(String errMsg, DBAutoCommit operation) throws PersistenceException {
        SqlSession sqlSession = null;
        try {
            sqlSession = MyBatisSqlSession.getInstance().getSqlSessionFactory().openSession();
            operation.commit(sqlSession);
            sqlSession.commit();
        } catch (Exception e) {
            rollback(sqlSession);
            LOGGER.error(errMsg, e);
            Falcon.sum(Constants.DB_OPERATION_FAIL_NUM, 1);
            throw new PersistenceException(e);
        } finally {
            close(sqlSession);
        }
    }

    @SuppressWarnings(value = "unchecked")
    protected <T> T autoCommitWithReturn(String errMsg, DBAutoCommitWithReturn operation) throws PersistenceException{
        SqlSession sqlSession = null;
        try {
            sqlSession = MyBatisSqlSession.getInstance().getSqlSessionFactory().openSession();
            T t = (T) operation.commitWithReturn(sqlSession);
            sqlSession.commit();
            return t;
        } catch (Exception e) {
            rollback(sqlSession);
            LOGGER.error(errMsg, e);
            Falcon.sum(Constants.DB_OPERATION_FAIL_NUM, 1);
            throw new PersistenceException(e);
        } finally {
            close(sqlSession);
        }
    }

    protected <T> T query(String errMsg, DBQuery<T> operation) throws PersistenceException {
        SqlSession sqlSession = null;
        try {
            sqlSession = MyBatisSqlSession.getInstance().getSqlSessionFactory().openSession();
            return operation.query(sqlSession);
        } catch (Exception e) {
            LOGGER.error(errMsg, e);
            Falcon.sum(Constants.DB_OPERATION_FAIL_NUM, 1);
            throw new PersistenceException(e);
        } finally {
            close(sqlSession);
        }
    }


    private void rollback(SqlSession sqlSession) {
        try {
            if (sqlSession != null) {
                sqlSession.rollback();
            }
        } catch (Exception e) {
            LOGGER.error(String.format("Failed to rollback: %s.", e.getMessage()));
        }
    }

    private void close(SqlSession sqlSession) {
        Falcon.sum(ApiConstants.METRIC_DB_OPERATION_NUM, 1);
        if (sqlSession != null) {
            sqlSession.close();
        }
    }
}

启动mybatis,完成sql操作

ServiceConfig serviceConfig = ServiceConfig.getInstance();
boolean result = serviceConfig.init(confFilePath);
MyBatisSqlSession.getInstance().init(serviceConfig);
DAOFactory factory = DAOFactory.getDAOFactory(DAOFactory.MYSQL);
configDAO = factory.getConfigDAO();
configDAO.createConfig(......);
configDAO.readConfig(......);
configDAO.deleteConfig(......);

相关文章

网友评论

      本文标题:集成Mybatis

      本文链接:https://www.haomeiwen.com/subject/quwqrqtx.html