es之java插入优化(批量插入)
如果有大批量的文档数据需要插入,这个时候单挑插入操作显然是不合理的;
之前学习的命令行批量执行方式:
POST /_bulk { "delete": { "_index": "website", "_type": "blog", "_id": "123" }} { "create": { "_index": "website", "_type": "blog", "_id": "123" }} { "title": "My first blog post" } { "index": { "_index": "website", "_type": "blog" }} { "title": "My second blog post" } { "update": { "_index": "website", "_type": "blog", "_id": "123", "_retry_on_conflict" : 3} } { "doc" : {"title" : "My updated blog post"} }
@Test public void BulkInsertDocument() throws IOException { BulkRequestBuilder bulkRequest = client.prepareBulk(); // either use client#prepare, or use Requests# to directly build index/delete requests bulkRequest.add(client.prepareIndex("weibo", "article", "1") .setSource(jsonBuilder() .startObject() .field("user", "张三") .field("postDate", new Date()) .field("message", "近段时间关于马蓉的消息传得到处都是,而且我们也发现她这一段时间似乎小动作不断。对于她近期的所做所为,王宝强方面则显得冷静一些,要求法庭二审选择公开审理。") .endObject() ) ); bulkRequest.add(client.prepareIndex("weibo", "article", "2") .setSource(jsonBuilder() .startObject() .field("user", "王二") .field("postDate", new Date()) .field("message", "新浪2016里约奥运站为您全程报道第31届里约夏季奥林匹克运动会,金牌榜,赛程赛果,赛事直播,高清图片,中国队比赛信息第一时间推送,与奥运人物微博互动") .endObject() ) ); BulkResponse bulkResponse = bulkRequest.get(); if (bulkResponse.hasFailures()) { // process failures by iterating through each bulk response item } }
这种方式也会有问题,比如在批量插入的时候,多大数据量插入一次,多少条插入一次,多少秒插入一次这样的定时定量的优化都是没有的,那么接下来让我们在看一下批量插入的优化操作
2:优化后的批量执行方式
@Test public void BulkInsertDocumen2t() throws Exception { BulkProcessor bulkProcessor = BulkProcessor.builder(client, new BulkProcessor.Listener() { //todo beforeBulk会在批量提交之前执行 public void beforeBulk(long l, org.elasticsearch.action.bulk.BulkRequest bulkRequest) { System.out.println("---尝试操作" + bulkRequest.numberOfActions() + "条数据---"); } //TODO 第一个afterBulk会在批量成功后执行,可以跟beforeBulk配合计算批量所需时间 public void afterBulk(long l, org.elasticsearch.action.bulk.BulkRequest bulkRequest, BulkResponse bulkResponse) { System.out.println("---尝试操作" + bulkRequest.numberOfActions() + "条数据成功---"); } //TODO 第二个afterBulk会在批量失败后执行 public void afterBulk(long l, org.elasticsearch.action.bulk.BulkRequest bulkRequest, Throwable throwable) { System.out.println("---尝试操作" + bulkRequest.numberOfActions() + "条数据失败---"); } }) // 1w次请求执行一次bulk .setBulkActions(10000) // 1gb的数据刷新一次bulk .setBulkSize(new ByteSizeValue(1, ByteSizeUnit.GB)) // 固定5s必须刷新一次 .setFlushInterval(TimeValue.timeValueSeconds(5)) // 并发请求数量, 0不并发, 1并发允许执行 .setConcurrentRequests(1) // 设置退避, 100ms后执行, 最大请求3次 .setBackoffPolicy( BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(100), 3)) .build(); // 添加请求数据 Map<String, Object> m = new HashMap<String, Object>(); m.put("document", "这是异步批量插入测试"); bulkProcessor.add(new IndexRequest("testblog", "test", "1").source(m)); bulkProcessor.add(new IndexRequest("testblog", "test", "2").source(m)); bulkProcessor.flush(); // bulkProcessor.add(new DeleteRequest("testblog", "test", "2")); // 关闭 bulkProcessor.awaitClose(10, TimeUnit.MINUTES); }