美文网首页
ElasticSearch之深度分页

ElasticSearch之深度分页

作者: 球球T爸爸 | 来源:发表于2017-02-09 16:38 被阅读0次

    <h4>Lucene 3.5 提供深度分页支持 searchAfter方法 (http://www.cnblogs.com/yuanermen/archive/2012/02/09/2343993.html)</h4>
    <pre>
    @Override
    public List<BlogsDO> searchBlogsList(String content,String bTypeId,String sDate,String eDate,Page page) throws IOException, ParseException {
    List<BlogsDO> blogList=new ArrayList<BlogsDO>() ;
    // TODO Auto-generated method stub
    TokenStream tokenStream=null;
    try{
    analyzer = new IKAnalyzer();
    //获取IndexSearcher 对象
    IndexSearcher indexSearch =commonIndexWriter.getIndexSearcher();
    QueryParser queryParser= commonIndexWriter.getQueryParser();
    //搜索条件的结合
    String str="";
    if(StringUtils.isNotEmpty(content)){
    str="title:"+content+" content:"+content;
    }
    if(StringUtils.isNotEmpty(bTypeId) && !bTypeId.equals("-1")){
    if(StringUtils.isNotEmpty(str)){
    str=str+" AND bTypeId:"+bTypeId;
    }else{
    str=str+" bTypeId:"+bTypeId;
    }
    }
    if(StringUtils.isNotEmpty(sDate)){
    if(StringUtils.isNotEmpty(str)){
    str=str+" AND gmt_create:["+sDate+" TO "+eDate+"]";
    }else{
    str=str+" gmt_create:["+sDate+" TO "+eDate+"]";
    }
    }
    //设置搜索条件
    Query query=queryParser.parse(str);
    //查询搜索引擎
    TopDocs result = indexSearch.search(query, 10);
    //上一页的最后一个document索引
    int index=(page.getCurrentPage()-1)*page.getPerPageSize();
    ScoreDoc scoreDoc=null;
    //如果当前页是第一页面scoreDoc=null。
    if(index>0){
    //因为索引是从0开始所以要index-1
    scoreDoc=result.scoreDocs[index-1];
    }
    //分页处理
    TopDocs hits= indexSearch.searchAfter(scoreDoc, query, page.getPerPageSize());
    //设置分页的总记录数
    page.setCounts(hits.totalHits);
    BlogsDO blog=null;
    //循环hits.scoreDocs数据,并使用indexSearch.doc方法把Document还原,再拿出对应的字段的值
    for (int i = 0; i < hits.scoreDocs.length; i++) {
    ScoreDoc sdoc = hits.scoreDocs[i];
    Document doc = indexSearch.doc(sdoc.doc);
    blog=new BlogsDO();
    String title=doc.get("title");
    String mark=doc.get("content");
    //加亮处理
    SimpleHTMLFormatter simplehtml=new SimpleHTMLFormatter("<font color='red'>", "</font>");
    Highlighter highlighter = new Highlighter(simplehtml,new QueryScorer(query));
    if(title!=null){
    tokenStream = analyzer.tokenStream("title",new StringReader(title));
    String highLightText = highlighter.getBestFragment(tokenStream, title);
    blog.setTitle(highLightText==null?title:highLightText);
    }else{
    blog.setTitle(title);
    }
    //加亮处理
    if(mark!=null){
    tokenStream = analyzer.tokenStream("content",new StringReader(mark));
    String highLightText = highlighter.getBestFragment(tokenStream, mark);
    blog.setContent(highLightText==null?mark:highLightText);
    }else{
    blog.setContent(mark);
    }
    blog.setBlogsId(Integer.valueOf(doc.get("blogsId")));
    blog.setNickName(doc.get("nickName"));
    blog.setbTypeId(doc.get("bTypeId"));
    blog.setbTypeName(doc.get("bTypeName"));
    blog.setRevDate(doc.get("gmt_create"));
    SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
    blog.setGmtCreate(sdf.parse(doc.get("gmt_create")));
    blogList.add(blog);
    }
    indexSearch.close();
    }catch (java.text.ParseException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
    }catch (InvalidTokenOffsetsException e) {
    // TODO Auto-generated catch block
    e.printStackTrace();
    }
    return blogList;
    </pre>

    相关文章

      网友评论

          本文标题:ElasticSearch之深度分页

          本文链接:https://www.haomeiwen.com/subject/qyqwittx.html