我是elk stack的新手
我已经尝试过this但没有得到工作流程..
例如在搜索查询下面执行
POST <index-name>/_search?scroll=2m
{
"query": {"match_all": {}}
}
GET /_search/scroll
{
"scroll_id" : "<scroll_id>"
}
"took" : 2,
"timed_out" : false,
"terminated_early" : true,
"_shards" : {
"total" : 1,
"successful" : 1,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : {
"value" : 13059,
"relation" : "eq"
}
"caused_by" : {
"type" : "search_context_missing_exception",
"reason" : "No search context found for id"
Kibana 7.9.3
Elastic Search 7.9.3
scroll_id
值在每个响应中都会发生变化。因此下一次搜索调用需要使用上一次搜索响应中的新滚动 ID。
你正确地开始了
POST <index-name>/_search?scroll=2m
{
"query": {"match_all": {}}
}
在您得到的响应中,一个名为
_scroll_id
的字段包含下一个滚动 ID 以用于下一次调用(如光标),我们称它为 scroll_id_1
:
GET /_search/scroll
{
"scroll_id" : "<scroll_id_1>",
"scroll": "2m"
}
在下一个响应中,您将获得一个新的
_scroll_id
值(我们称之为scroll_id_2
),您需要将其用于下一次调用:
GET /_search/scroll
{
"scroll_id" : "<scroll_id_2>",
"scroll": "2m"
}
你一直这样做,直到得到一个空的结果集,此时你可以清除搜索上下文
DELETE /_search/scroll
{
"scroll_id" : "<scroll_id_n>"
}
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.time.LocalDate;
public class ElasticServiceCaller {
private static final String INDEX_NAME = "my_index";
private static final String DOC_TYPE = "_doc";
private static final String FILE_PATH = "requestbody.txt";
public void callService(RestHighLevelClient client) throws IOException {
// Read request body from file
BufferedReader reader = new BufferedReader(new FileReader(FILE_PATH));
byte[] requestBodyBytes = reader.lines().reduce("", String::concat).getBytes();
reader.close();
// Parse date range, log file path, criteria, and key-value pair from request body
LocalDate startDate = null, endDate = null;
String logFilePath = null, criteria = null, key = null, value = null;
XContentParser parser = XContentFactory.createParser(
XContentType.JSON.xContent(), requestBodyBytes);
XContentParser.Token token;
while ((token = parser.nextToken()) != null) {
if (token == XContentParser.Token.FIELD_NAME) {
String fieldName = parser.currentName();
if ("date_range".equals(fieldName)) {
parser.nextToken(); // move to START_OBJECT token
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
String name = parser.currentName();
parser.nextToken();
if ("gte".equals(name)) {
startDate = LocalDate.parse(parser.text());
} else if ("lte".equals(name)) {
endDate = LocalDate.parse(parser.text());
}
}
} else if ("log_file_path".equals(fieldName)) {
logFilePath = parser.nextText();
} else if ("criteria".equals(fieldName)) {
criteria = parser.nextText();
} else if ("key".equals(fieldName)) {
key = parser.nextText();
} else if ("value".equals(fieldName)) {
value = parser.nextText();
}
}
}
// Build index request and execute
IndexRequest request = new IndexRequest(INDEX_NAME, DOC_TYPE);
XContentBuilder builder = XContentFactory.jsonBuilder()
.startObject()
.field("start_date", startDate.toString())
.field("end_date", endDate.toString())
.field("log_file_path", logFilePath)
.field("criteria", criteria)
.field("key", key)
.field("value", value)
.endObject();
request.source(builder);
client.index(request, RequestOptions.DEFAULT);
}
}