ElasticSearch
环境准备
1.安装ElasticSearch
# 创建网络
docker network create es-net
docker network ls
# 拉取镜像
docker pull elasticsearch:7.12.1
# 创建容器
docker run -d \
--name es \
-e "ES_JAVA_OPTS=-Xms512m -Xmx512m" \
-e "discovery.type=single-node" \
-v es-data:/usr/share/elasticsearch/data \
-v es-plugins:/usr/share/elasticsearch/plugins \
--privileged \
--network es-net \
-p 9200:9200 \
-p 9300:9300 \
elasticsearch:7.12.1
2.安装kibana
# 拉取镜像
docker pull kibana:7.12.1
# 创建容器
docker run -d \
--name kibana \
-e ELASTICSEARCH_HOSTS=http://es:9200 \
--network es-net \
-p 5601:5601 \
kibana:7.12.1
3.安装IK分词器插件
# 1.从github下载IK分词插件
# https://github.com/medcl/elasticsearch-analysis-ik/releases/tag/v7.12.1
# 2.查看卷(找到es容器插件的位置)
docker volume inspect es-plugins
# /var/lib/docker/volumes/es-plugins/_data
# 3.将解压后的目录上传到插件目录
# 4.重启es
docker restart es
# 5.测试
# http://10.10.0.100:5601/app/dev_tools#/console
GET /_analyze
{
"text": "我爱中国ok",
"analyzer": "ik_smart"
}
# analyzer取ik_smart表示智能切分,粗粒度;取ik_max_word表示最细切分,细粒度。
# 6.自定义词典
# 修改插件目录下配置文件
config/IKAnalyzer.cfg.xml
<!--用户可以在这里配置自己的扩展字典 -->
<entry key="ext_dict">ext.dic</entry>
<!--用户可以在这里配置自己的扩展停止词字典-->
<entry key="ext_stopwords">stopword.dic</entry>
# .dic是词典文件,内容可自定义
# 修改后需重启es容器
索引库的操作
1.使用kibana的dev tools操作索引库
(1)创建索引库
格式:PUT /索引库名
举例:
PUT /book
{
"mappings": {
"properties": {
"name":{
"type": "text",
"analyzer": "ik_smart"
},
"price":{
"type": "float"
},
"author":{
"properties": {
"firstname":{
"type":"keyword"
},
"lastname":{
"type":"keyword"
}
}
}
}
}
}
(2)查看索引库
格式:GET /索引库名
举例:GET /book
(3)删除索引库
格式:DELETE /索引库名
举例:DELETE /book
2.使用RestClient操作索引库
(1)导入依赖
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>7.12.1</version>
</dependency>
注意:RestClient组件的版本必须和ES版本一致
(2)Java编程
点击查看代码
public class RestClientCRUD {
RestHighLevelClient client = new RestHighLevelClient(RestClient.builder(new HttpHost("10.10.0.100", 9200, "http")));
/**
* 查
* @throws IOException
*/
@Test
public void test1() throws IOException {
GetRequest request = new GetRequest("book", "1");
GetResponse response = client.get(request, RequestOptions.DEFAULT);
String jsonStr = response.getSourceAsString();
System.out.println(jsonStr);
Book book = JSON.parseObject(jsonStr, Book.class); //将json字符串转换为java对象
System.out.println(book.toString());
client.close();
}
/**
* 增
* @throws IOException
*/
@Test
public void test2() throws IOException {
String jsonStr="{" +
" \"name\":\"mysql数据库教程\"," +
" \"author\":{" +
" \"firstname\":\"诸葛\"," +
" \"lastname\":\"亮\"" +
" }," +
" \"price\":213," +
" \"category\":\"计算机类\"" +
"}";
IndexRequest request = new IndexRequest("book");
request.id("1");
request.source(jsonStr, XContentType.JSON);
client.index(request,RequestOptions.DEFAULT);
client.close();
}
/**
* 改
* @throws IOException
*/
@Test
public void test3() throws IOException {
UpdateRequest request = new UpdateRequest("book", "1");
request.doc("price",999,"name","Spring Cloud微服务");
client.update(request,RequestOptions.DEFAULT);
client.close();
}
/**
* 删
* @throws IOException
*/
@Test
public void test4() throws IOException {
DeleteRequest request = new DeleteRequest("book", "2");
client.delete(request,RequestOptions.DEFAULT);
client.close();
}
/**
* 批量添加
* @throws IOException
*/
@Test
public void test5() throws IOException {
String jsonStr1="{" +
" \"name\":\"Java web教程\"," +
" \"author\":{" +
" \"firstname\":\"赵\"," +
" \"lastname\":\"云\"" +
" }," +
" \"price\":219," +
" \"category\":\"计算机类\"" +
"}";
String jsonStr2="{" +
" \"name\":\"hadoop教程\"," +
" \"author\":{" +
" \"firstname\":\"关\"," +
" \"lastname\":\"羽\"" +
" }," +
" \"price\":413," +
" \"category\":\"计算机类\"" +
"}";
IndexRequest request1 = new IndexRequest("book").id("101").source(jsonStr1,XContentType.JSON);
IndexRequest request2 = new IndexRequest("book").id("102").source(jsonStr2,XContentType.JSON);
BulkRequest bulkRequest = new BulkRequest();
bulkRequest.add(request1).add(request2);
client.bulk(bulkRequest,RequestOptions.DEFAULT);
client.close();
}
}