@@ -85,7 +85,8 @@ func (query *DateHistogram) TranslateSqlResponseToJson(rows []model.QueryResultR
8585 // Implement default when query.minDocCount == DefaultMinDocCount, we need to return
8686 // all buckets between the first bucket that matches documents and the last one.
8787
88- if query .minDocCount == 0 {
88+ fmt .Println ("query.minDocCount" , query .minDocCount , "query.ebmin" , query .ebmin )
89+ if query .minDocCount == 0 || query .ebmin != 0 {
8990 rows = query .NewRowsTransformer ().Transform (query .ctx , rows )
9091 }
9192
@@ -99,7 +100,7 @@ func (query *DateHistogram) TranslateSqlResponseToJson(rows []model.QueryResultR
99100 responseKey := query .calculateResponseKey (originalKey )
100101
101102 response = append (response , model.JsonMap {
102- // OriginalKeyName: originalKey,
103+ OriginalKeyName : originalKey ,
103104 "key" : responseKey ,
104105 "doc_count" : docCount ,
105106 "key_as_string" : query .calculateKeyAsString (responseKey ),
@@ -119,6 +120,7 @@ func (query *DateHistogram) String() string {
119120// only intervals <= days are needed
120121func (query * DateHistogram ) intervalAsDuration () time.Duration {
121122 var intervalInHoursOrLess string
123+ //fmt.Println("query.interval", query.interval)
122124 if strings .HasSuffix (query .interval , "d" ) {
123125 // time.ParseDuration doesn't accept > hours, we need to convert days to hours
124126 daysNr , err := strconv .Atoi (strings .TrimSuffix (query .interval , "d" ))
@@ -130,7 +132,9 @@ func (query *DateHistogram) intervalAsDuration() time.Duration {
130132 } else {
131133 intervalInHoursOrLess = query .interval
132134 }
135+ //fmt.Println("intervalInHoursOrLess", intervalInHoursOrLess)
133136 duration , _ := time .ParseDuration (intervalInHoursOrLess )
137+ //fmt.Println("duration", duration)
134138 return duration
135139}
136140
@@ -228,7 +232,7 @@ func (query *DateHistogram) calculateResponseKey(originalKey int64) int64 {
228232}
229233
230234func (query * DateHistogram ) calculateKeyAsString (key int64 ) string {
231- return time .UnixMilli (key ).In (query .wantedTimezone ).Format ("2006-01-02T15 :04:05.000-07:00 " )
235+ return time .UnixMilli (key ).In (query .wantedTimezone ).Format ("2006/01/02 15 :04:05" )
232236}
233237
234238func (query * DateHistogram ) OriginalKeyToKeyAsString (originalKey any ) string {
@@ -241,16 +245,14 @@ func (query *DateHistogram) SetMinDocCountToZero() {
241245}
242246
243247func (query * DateHistogram ) NewRowsTransformer () model.QueryRowsTransformer {
244- differenceBetweenTwoNextKeys := int64 (1 )
245- if query .intervalType == DateHistogramCalendarInterval {
246- duration , err := kibana .ParseInterval (query .interval )
247- if err == nil {
248- differenceBetweenTwoNextKeys = duration .Milliseconds ()
249- } else {
250- logger .ErrorWithCtx (query .ctx ).Err (err )
251- differenceBetweenTwoNextKeys = 0
252- }
248+ duration , err := kibana .ParseInterval (query .interval )
249+ var differenceBetweenTwoNextKeys int64
250+ if err == nil {
251+ differenceBetweenTwoNextKeys = duration .Milliseconds ()
252+ } else {
253+ logger .ErrorWithCtx (query .ctx ).Err (err )
253254 }
255+ fmt .Println ("differenceBetweenTwoNextKeys" , differenceBetweenTwoNextKeys )
254256 return & DateHistogramRowsTransformer {MinDocCount : query .minDocCount , differenceBetweenTwoNextKeys : differenceBetweenTwoNextKeys , EmptyValue : 0 , ebmin : query .ebmin , ebmax : query .ebmax }
255257}
256258
@@ -267,7 +269,7 @@ type DateHistogramRowsTransformer struct {
267269// if MinDocCount == 0, and we have buckets e.g. [key, value1], [key+10, value2], we need to insert [key+1, 0], [key+2, 0]...
268270// CAUTION: a different kind of postprocessing is needed for MinDocCount > 1, but I haven't seen any query with that yet, so not implementing it now.
269271func (qt * DateHistogramRowsTransformer ) Transform (ctx context.Context , rowsFromDB []model.QueryResultRow ) []model.QueryResultRow {
270- if qt .MinDocCount != 0 || qt .differenceBetweenTwoNextKeys == 0 || len ( rowsFromDB ) < 1 {
272+ if qt .MinDocCount != 0 || qt .differenceBetweenTwoNextKeys == 0 {
271273 // we only add empty rows, when
272274 // a) MinDocCount == 0
273275 // b) we have valid differenceBetweenTwoNextKeys (>0)
@@ -281,7 +283,9 @@ func (qt *DateHistogramRowsTransformer) Transform(ctx context.Context, rowsFromD
281283
282284 emptyRowsAdded := 0
283285 postprocessedRows := make ([]model.QueryResultRow , 0 , len (rowsFromDB ))
284- postprocessedRows = append (postprocessedRows , rowsFromDB [0 ])
286+ if len (rowsFromDB ) > 0 {
287+ postprocessedRows = append (postprocessedRows , rowsFromDB [0 ])
288+ }
285289 for i := 1 ; i < len (rowsFromDB ); i ++ {
286290 if len (rowsFromDB [i - 1 ].Cols ) < 2 || len (rowsFromDB [i ].Cols ) < 2 {
287291 logger .ErrorWithCtx (ctx ).Msgf (
@@ -301,17 +305,31 @@ func (qt *DateHistogramRowsTransformer) Transform(ctx context.Context, rowsFromD
301305 }
302306 postprocessedRows = append (postprocessedRows , rowsFromDB [i ])
303307 }
304- /*
305- fmt.Println("postprocessedRows 1", postprocessedRows, qt.getKey(postprocessedRows[0])-qt.differenceBetweenTwoNextKeys, qt.ebmin, (qt.getKey(postprocessedRows[0])-qt.differenceBetweenTwoNextKeys)*qt.differenceBetweenTwoNextKeys)
306-
307- for maybePreKey, i := qt.getKey(postprocessedRows[0])-qt.differenceBetweenTwoNextKeys, 0; i < 96; maybePreKey*qt.differenceBetweenTwoNextKeys >= qt.ebminmaybePreKey, i = maybePreKey-qt.differenceBetweenTwoNextKeys, i+1 {
308- preRow := postprocessedRows[0].Copy()
309- preRow.Cols[len(preRow.Cols)-2].Value = maybePreKey
310- preRow.Cols[len(preRow.Cols)-1].Value = qt.EmptyValue
311- postprocessedRows = append([]model.QueryResultRow{preRow}, postprocessedRows...)
312- emptyRowsAdded++
313- }
314- */
308+
309+ //fmt.Println("postprocessedRows 1", postprocessedRows, qt.getKey(postprocessedRows[0])*qt.differenceBetweenTwoNextKeys-qt.differenceBetweenTwoNextKeys, qt.ebmin, qt.differenceBetweenTwoNextKeys)
310+ fmt .Println ("pre: " , len (postprocessedRows ), emptyRowsAdded )
311+ if qt .ebmin == 0 {
312+ return postprocessedRows
313+ }
314+
315+ if len (postprocessedRows ) == 0 {
316+ postprocessedRows = append (postprocessedRows , model.QueryResultRow {
317+ Cols : []model.QueryResultCol {
318+ {Value : (qt .ebmin + 1000 * 60 * 60 * 2 )/ qt .differenceBetweenTwoNextKeys - 1 },
319+ {Value : qt .EmptyValue },
320+ },
321+ })
322+ }
323+ // gk*d-d = d(gk - 1)
324+ // gk*d-2d = d (gk-2) = d(gk-1) - d
325+ for maybePreKey := (qt .ebmin + 1000 * 60 * 60 * 2 ) / qt .differenceBetweenTwoNextKeys ; maybePreKey * qt .differenceBetweenTwoNextKeys < qt .ebmax + 1000 * 60 * 60 * 2 ; maybePreKey ++ {
326+ preRow := postprocessedRows [0 ].Copy ()
327+ preRow .Cols [len (preRow .Cols )- 2 ].Value = maybePreKey
328+ preRow .Cols [len (preRow .Cols )- 1 ].Value = qt .EmptyValue
329+ postprocessedRows = append (postprocessedRows , preRow )
330+ emptyRowsAdded ++
331+ }
332+ fmt .Println ("post:" , len (postprocessedRows ), emptyRowsAdded )
315333 return postprocessedRows
316334}
317335
0 commit comments