solr-geohsah 按照距离搜索分组
通过solr的domain-import,将mysql的数据通过查询,导入到solr中。java通过使用solrj,链接solr,调用domaininport,并将分页参数设置到domain-import中(防止查询所有数据),通过线程sleep,查询solr导入情况,如果导入成功,则继续导入下一页,直到所有数据导入完成。数据导入完成之后,客户端通过地图的缩放等级,发送当前位置的精度纬度,以及当前地图显示的最大距离,将数据发送值服务器,服务器通过查询solr中数据,将对应数据所在的经纬度、统计等信息返回给客户端。
solr chema配置
<fieldtype name="geohash" class="solr.GeoHashField"/>
从数据导入solr配置
<dataConfig> <dataSource type="JdbcDataSource" driver="com.mysql.jdbc.Driver" url="jdbc:mysql://192.168.211.226:3306/adwl_dev_v2?useUnicode=true&characterEncoding=utf8" user="root" password="111111" /> <document> <entity name="solr_docking_insert" transformer="DateFormatTransformer" query=" sql查询*** limit ${dataimporter.request.startpage},${dataimporter.request.endpage}"> </entity> </document> </dataConfig>
//动态调用SolrDomainImport
public static String importData(Long start,String url){
System.out.println("货源导入数据开始:"+start+" "+perPage);
Map<String , String> map=new HashMap<String, String>();
if(isclear){
try {
Thread.sleep(2000);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
isclear=false;
map.put("clean", "true");
}else{
map.put("clean", "false");
}
map.put("command", "full-import");
map.put("commit", "true");
map.put("debug", "false");
map.put("endpage", perPage+"");
map.put("indent", "true");
map.put("optimize", "false");
map.put("startpage", start+"");
map.put("verbose", "false");
map.put("wt", "json");
//向移动交互服务发送httppost请求
String str= HttpPostUtil.doPost(url, map,null);
return str;
}
public SimpleResult groupCargo(LbsSearchRequestDto lbsSearchRequestDto){
try{
if(lbsSearchRequestDto==null)
return new SimpleResult("body不能为空", StateCode.VALIDATOR_FAIL, null, false);
LbsSearchBodyDto lbsBdoy=lbsSearchRequestDto.getBodyDto();
if(lbsBdoy.getLevel()==null)
return new SimpleResult("Level不能为空", StateCode.VALIDATOR_FAIL, null, false);
List<String> fl=new ArrayList<String>();
//level为省级按照省分组
if(lbsBdoy.getLevel()==1){
fl.add("rciShipperCity:notfind");
fl.add("rciShipperArea:notfind");
//level为市级按照市分组
}else if(lbsBdoy.getLevel()==2){
fl.add("rciShipperArea:notfind");
fl.add("-rciShipperCity:notfind");
//level为区县级按照区县分组
}else if(lbsBdoy.getLevel()==3){
fl.add("-rciShipperArea:notfind");
}
if(lbsBdoy.getPageSize()==null)lbsBdoy.setPageSize(0);
if(lbsBdoy.getPageNumber()==null)lbsBdoy.setPageNumber(50);
//按照经纬度和距离查询
PageData data=solrCargoLLSearch.queryLL(lbsBdoy.getPageSize(), lbsBdoy.getPageNumber(), lbsBdoy.getD(), "geohash", lbsBdoy.getLat(),lbsBdoy.getLon(), fl);
LbsSearchGroupResponseDto lsgrb=new LbsSearchGroupResponseDto();
LbsSearchGroupResponseBodyDto lsgrbd=new LbsSearchGroupResponseBodyDto();
lsgrb.setBodyDto(lsgrbd);
List<LbsGroupMsg> lbsGroupMsgs=new ArrayList<LbsGroupMsg>();
String solrMsg=data.getData();
JSONArray array=JSONArray.parseArray(solrMsg);
for(int i=0;i<array.size();i++){
//TODO数据整合
}
lsgrbd.setLbsGroupMsgs(lbsGroupMsgs);
return new SimpleResult("成功", 200, lsgrbd, true);
}catch(Exception e){
e.printStackTrace();
return new SimpleResult("服务器错误", 500, null, true);
}
}
/**
*
* @param orderField 排序字段 必须为index字段
* @param start 分页开始值
* @param rows 分页结束值
* @param filters 条件
* @return
*/
public PageData queryDataArrays(String orderField,Integer start,Integer rows,Map<String, String>latAndlon,List<String>filters){
PageData pageData=null;
//HttpSolrServer server = SolrServerFactory.getCargoServer();
SolrQuery query = new SolrQuery(q);
query.setStart(start);
query.setRows(rows);
if(null!=latAndlon){
query.addFilterQuery("{!geofilt}");
Set<String>set=latAndlon.keySet();
for(String key:set){
System.out.println(key+" : "+latAndlon.get(key));
query.set(key, latAndlon.get(key));
}
}
if(filters!=null)
for(String filter:filters)
query.addFilterQuery(filter);
if(orderField!=null)
query.addSort(orderField, ORDER.desc);
try {
QueryResponse rsp = getServer().query(query);
SolrDocumentList docs = rsp.getResults();
pageData=new PageData();
pageData.setSize(docs.getNumFound());
pageData.setData(getJsons(docs));
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return pageData;
}