I cannot find out how to query elastic to find data for multiple intervals, increasing in one day increments through the end of the month.
For instance, I want to look at 7 day intervals in the month of January. 1-7, 2-8, 3-9, 4-10, etc. But I'm getting like this for the given query: 1-7, 8-15, 16-23, etc.
Does anyone know if this is possible in elastic or how to write a query with results I wrote above for consecutive days?
Here is my attempt:
{
"size": 0,
"query": {
"bool": {,
"filter": [
{
"range": {
"associated_datetime": {
"gte": "14/12/2021 19:31:56",
"lte": "14/12/2022 19:31:56",
"format": "dd/MM/yyyy HH:mm:ss"
}
}
}
]
}
},
"aggs": {
"incident": {
"date_histogram": {
"field": "associated_datetime",
"calendar_interval": "week"
},
"aggs": {
"associated_to.id": {
"terms": {
"size": 10000,
"field": "associated_to.id"
}
}
}
}
}
}
Output for the above query looks like this (aggregation object):
"aggregations": {
"incident": {
"buckets": [
{
"key_as_string": "2022-01-03T00:00:00.000Z",
"key": 1641168000000,
"doc_count": 2,
"associated_to.id": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": 4,
"doc_count": 2
}
]
}
},
{
"key_as_string": "2022-01-10T00:00:00.000Z",
"key": 1641772800000,
"doc_count": 1,
"associated_to.id": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": 2,
"doc_count": 1
}
]
}
},
{
"key_as_string": "2022-01-17T00:00:00.000Z",
"key": 1642377600000,
"doc_count": 1,
"associated_to.id": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": 2,
"doc_count": 1
}
]
}
},
{
"key_as_string": "2022-03-07T00:00:00.000Z",
"key": 1646611200000,
"doc_count": 1,
"associated_to.id": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": 4,
"doc_count": 1
}
]
}
},
{
"key_as_string": "2022-03-21T00:00:00.000Z",
"key": 1647820800000,
"doc_count": 7,
"associated_to.id": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": 37,
"doc_count": 2
},
{
"key": 38,
"doc_count": 2
},
{
"key": 39,
"doc_count": 2
},
{
"key": 40,
"doc_count": 1
}
]
}
},
{
"key_as_string": "2022-05-16T00:00:00.000Z",
"key": 1652659200000,
"doc_count": 1,
"associated_to.id": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": 4,
"doc_count": 1
}
]
}
},
{
"key_as_string": "2022-11-14T00:00:00.000Z",
"key": 1668384000000,
"doc_count": 3,
"associated_to.id": {
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0,
"buckets": [
{
"key": 2,
"doc_count": 2
},
{
"key": 37,
"doc_count": 1
},
{
"key": 38,
"doc_count": 1
},
{
"key": 39,
"doc_count": 1
},
{
"key": 40,
"doc_count": 1
},
{
"key": 41,
"doc_count": 1
},
{
"key": 42,
"doc_count": 1
}
]
}
}
]
}
}
CodePudding user response:
One way to do it is with a date_range
aggregation (note: the to
date of the ranges are exclusive):
{
"size": 0,
"query": {
"bool": {
"filter": [
{
"range": {
"associated_datetime": {
"gte": "14/12/2021 19:31:56",
"lte": "14/12/2022 19:31:56",
"format": "dd/MM/yyyy HH:mm:ss"
}
}
}
]
}
},
"aggs": {
"incident": {
"date_range": {
"field": "associated_datetime",
"ranges": [
{
"from": "2022-01-01",
"to": "2022-01-08"
},
{
"from": "2022-01-02",
"to": "2022-01-09"
},
{
"from": "2022-01-03",
"to": "2022-01-10"
},
...
]
},
"aggs": {
"associated_to.id": {
"terms": {
"size": 10000,
"field": "associated_to.id"
}
}
}
}
}
}