diff --git a/platform/frontend_connectors/physical_functions.go b/platform/frontend_connectors/physical_functions.go new file mode 100644 index 000000000..a5f56e8de --- /dev/null +++ b/platform/frontend_connectors/physical_functions.go @@ -0,0 +1,14 @@ +// Copyright Quesma, licensed under the Elastic License 2.0. +// SPDX-License-Identifier: Elastic-2.0 +package frontend_connectors + +const ( + CLickhouseDateHourFunction = "toHour" + ClickhouseFromUnixTimeFunction64mili = "fromUnixTimestamp64Milli" + ClickhouseFromUnixTimeFunction = "fromUnixTimestamp" + ClickhouseToTimezone = "toTimezone" + ClickhousetoUnixTimestamp64Milli = "toUnixTimestamp64Milli" + DorisDateHourFunction = "HOUR" + DorisFromUnixTimeFunction = "FROM_UNIXTIME" + DorisFromUnixTimeFunction64mili = "FROM_MILLISECOND" +) diff --git a/platform/frontend_connectors/schema_transformer.go b/platform/frontend_connectors/schema_transformer.go index eb2f9e3b5..c85d2c723 100644 --- a/platform/frontend_connectors/schema_transformer.go +++ b/platform/frontend_connectors/schema_transformer.go @@ -20,6 +20,11 @@ import ( "strings" ) +type TransformationsChain struct { + TransformationName string + Transformation func(schema.Schema, *model.Query) (*model.Query, error) +} + type SchemaCheckPass struct { cfg *config.QuesmaConfiguration tableDiscovery database_common.TableDiscovery @@ -910,10 +915,15 @@ func (s *SchemaCheckPass) convertQueryDateTimeFunctionToClickhouse(indexSchema s if len(e.Args) != 1 { return e } - return model.NewFunction("toHour", e.Args[0].Accept(b).(model.Expr)) + return model.NewFunction(CLickhouseDateHourFunction, e.Args[0].Accept(b).(model.Expr)) + + case model.FromUnixTimeFunction64mili: + args := b.VisitChildren(e.Args) + return model.NewFunction(ClickhouseFromUnixTimeFunction64mili, args...) - // TODO this is a place for over date/time related functions - // add more + case model.FromUnixTimeFunction: + args := b.VisitChildren(e.Args) + return model.NewFunction(ClickhouseFromUnixTimeFunction, args...) default: return visitFunction(b, e) @@ -940,11 +950,14 @@ func (s *SchemaCheckPass) convertQueryDateTimeFunctionToDoris(indexSchema schema if len(e.Args) != 1 { return e } - return model.NewFunction("HOUR", e.Args[0].Accept(b).(model.Expr)) - - // TODO this is a place for over date/time related functions - // add more - + return model.NewFunction(DorisDateHourFunction, e.Args[0].Accept(b).(model.Expr)) + case model.FromUnixTimeFunction: + args := b.VisitChildren(e.Args) + return model.NewFunction(DorisFromUnixTimeFunction, args...) + + case model.FromUnixTimeFunction64mili: + args := b.VisitChildren(e.Args) + return model.NewFunction(DorisFromUnixTimeFunction64mili, args...) default: return visitFunction(b, e) } @@ -1083,7 +1096,7 @@ func (s *SchemaCheckPass) acceptIntsAsTimestamps(indexSchema schema.Schema, quer } } if ok { - if f, okF := model.ToFunction(expr); okF && f.Name == "fromUnixTimestamp64Milli" && len(f.Args) == 1 { + if f, okF := model.ToFunction(expr); okF && f.Name == model.FromUnixTimeFunction64mili && len(f.Args) == 1 { if l, okL := model.ToLiteral(f.Args[0]); okL { if _, exists := l.Format(); exists { // heuristics: it's a date <=> it has a format return model.NewInfixExpr(col, e.Op, f.Args[0]) @@ -1096,16 +1109,16 @@ func (s *SchemaCheckPass) acceptIntsAsTimestamps(indexSchema schema.Schema, quer } visitor.OverrideVisitFunction = func(b *model.BaseExprVisitor, f model.FunctionExpr) interface{} { - if f.Name == "toUnixTimestamp64Milli" && len(f.Args) == 1 { + if f.Name == ClickhousetoUnixTimestamp64Milli && len(f.Args) == 1 { if col, ok := model.ExtractColRef(f.Args[0]); ok && table.IsInt(col.ColumnName) { // erases toUnixTimestamp64Milli return f.Args[0] } } - if f.Name == "toTimezone" && len(f.Args) == 2 { + if f.Name == ClickhouseToTimezone && len(f.Args) == 2 { if col, ok := model.ExtractColRef(f.Args[0]); ok && table.IsInt(col.ColumnName) { // adds fromUnixTimestamp64Milli - return model.NewFunction("toTimezone", model.NewFunction("fromUnixTimestamp64Milli", f.Args[0]), f.Args[1]) + return model.NewFunction(ClickhouseToTimezone, model.NewFunction(model.FromUnixTimeFunction64mili, f.Args[0]), f.Args[1]) } } return visitFunction(b, f) @@ -1119,12 +1132,8 @@ func (s *SchemaCheckPass) acceptIntsAsTimestamps(indexSchema schema.Schema, quer return query, nil } -func (s *SchemaCheckPass) Transform(plan *model.ExecutionPlan) (*model.ExecutionPlan, error) { - - transformationChain := []struct { - TransformationName string - Transformation func(schema.Schema, *model.Query) (*model.Query, error) - }{ +func (s *SchemaCheckPass) makeTransformations(backendConnectorType quesma_api.BackendConnectorType) []TransformationsChain { + transformationChain := []TransformationsChain{ // Section 1: from logical to physical {TransformationName: "PhysicalFromExpressionTransformation", Transformation: s.applyPhysicalFromExpression}, {TransformationName: "WildcardExpansion", Transformation: s.applyWildcardExpansion}, @@ -1149,31 +1158,23 @@ func (s *SchemaCheckPass) Transform(plan *model.ExecutionPlan) (*model.Execution // Section 3: backend specific transformations // fallback to clickhouse date functions if no backend connector is set - if plan.BackendConnector == nil { + + if backendConnectorType == quesma_api.ClickHouseSQLBackend { transformationChain = append(transformationChain, struct { TransformationName string Transformation func(schema.Schema, *model.Query) (*model.Query, error) }{TransformationName: "QuesmaDateFunctions", Transformation: s.convertQueryDateTimeFunctionToClickhouse}) - } else { - if plan.BackendConnector.GetId() == quesma_api.ClickHouseSQLBackend { - transformationChain = append(transformationChain, struct { - TransformationName string - Transformation func(schema.Schema, *model.Query) (*model.Query, error) - }{TransformationName: "QuesmaDateFunctions", Transformation: s.convertQueryDateTimeFunctionToClickhouse}) - } - - if plan.BackendConnector.GetId() == quesma_api.DorisSQLBackend { - transformationChain = append(transformationChain, struct { - TransformationName string - Transformation func(schema.Schema, *model.Query) (*model.Query, error) - }{TransformationName: "QuesmaDateFunctions", Transformation: s.convertQueryDateTimeFunctionToDoris}) - } } - transformationChain = append(transformationChain, - []struct { + + if backendConnectorType == quesma_api.DorisSQLBackend { + transformationChain = append(transformationChain, struct { TransformationName string Transformation func(schema.Schema, *model.Query) (*model.Query, error) - }{ + }{TransformationName: "QuesmaDateFunctions", Transformation: s.convertQueryDateTimeFunctionToDoris}) + } + + transformationChain = append(transformationChain, + []TransformationsChain{ {TransformationName: "IpTransformation", Transformation: s.applyIpTransformations}, {TransformationName: "GeoTransformation", Transformation: s.applyGeoTransformations}, {TransformationName: "ArrayTransformation", Transformation: s.applyArrayTransformations}, @@ -1184,7 +1185,20 @@ func (s *SchemaCheckPass) Transform(plan *model.ExecutionPlan) (*model.Execution {TransformationName: "BooleanLiteralTransformation", Transformation: s.applyBooleanLiteralLowering}, }..., ) + return transformationChain +} + +func (s *SchemaCheckPass) determineBackendConnectorType(plan *model.ExecutionPlan) quesma_api.BackendConnectorType { + if plan != nil && plan.BackendConnector != nil { + return plan.BackendConnector.GetId() + } + return quesma_api.ClickHouseSQLBackend +} + +func (s *SchemaCheckPass) Transform(plan *model.ExecutionPlan) (*model.ExecutionPlan, error) { + backendConnectorType := s.determineBackendConnectorType(plan) + transformationChain := s.makeTransformations(backendConnectorType) for k, query := range plan.Queries { var err error diff --git a/platform/frontend_connectors/schema_transformer_test.go b/platform/frontend_connectors/schema_transformer_test.go index fdaa2bce8..dddea08f8 100644 --- a/platform/frontend_connectors/schema_transformer_test.go +++ b/platform/frontend_connectors/schema_transformer_test.go @@ -2039,7 +2039,7 @@ func Test_acceptIntsAsTimestamps(t *testing.T) { model.NewInfixExpr( model.NewFunction("timeZoneOffset", model.NewFunction( "toTimezone", - model.NewFunction("fromUnixTimestamp64Milli", model.NewColumnRef("timestampInt")), + model.NewFunction(model.FromUnixTimeFunction64mili, model.NewColumnRef("timestampInt")), model.NewLiteral("'Europe/Warsaw'")), ), "*", diff --git a/platform/frontend_connectors/search_opensearch_test.go b/platform/frontend_connectors/search_opensearch_test.go index 04dce5b85..7433ae901 100644 --- a/platform/frontend_connectors/search_opensearch_test.go +++ b/platform/frontend_connectors/search_opensearch_test.go @@ -55,6 +55,7 @@ func TestSearchOpensearch(t *testing.T) { assert.NoError(t, err, "no ParseQuery error") assert.True(t, len(queries) > 0, "len queries > 0") whereClause := model.AsString(queries[0].SelectCommand.WhereClause) + // This checks where clause after parsing and before transformations assert.Contains(t, tt.WantedSql, whereClause, "contains wanted sql") for _, wantedQuery := range tt.WantedQueries { diff --git a/platform/frontend_connectors/search_test.go b/platform/frontend_connectors/search_test.go index b7cfad782..f1e7c2b98 100644 --- a/platform/frontend_connectors/search_test.go +++ b/platform/frontend_connectors/search_test.go @@ -83,7 +83,7 @@ func TestAsyncSearchHandler(t *testing.T) { }, } - for i, tt := range testdata.TestsAsyncSearch { + for i, tt := range testdata.TestsAsyncSearchAfterTransformations { t.Run(util.PrettyTestName(tt.Name, i), func(t *testing.T) { conn, mock := util.InitSqlMockWithPrettySqlAndPrint(t, false) db := backend_connectors.NewClickHouseBackendConnectorWithConnection("", conn) @@ -302,7 +302,7 @@ func TestSearchHandler(t *testing.T) { }, } - for i, tt := range testdata.TestsSearch { + for i, tt := range testdata.TestsSearchAfterTransformations { t.Run(util.PrettyTestName(tt.Name, i), func(t *testing.T) { var conn *sql.DB var mock sqlmock.Sqlmock @@ -433,7 +433,7 @@ func TestSearchHandlerNoAttrsConfig(t *testing.T) { }, } - for i, tt := range testdata.TestsSearchNoAttrs { + for i, tt := range testdata.TestsSearchNoAttrsAfterTransformations { t.Run(util.PrettyTestName(tt.Name, i), func(t *testing.T) { conn, mock := util.InitSqlMockWithPrettyPrint(t, false) defer conn.Close() @@ -482,7 +482,7 @@ func TestAsyncSearchFilter(t *testing.T) { }, }, } - for i, tt := range testdata.TestSearchFilter { + for i, tt := range testdata.TestSearchFilterAfterTransformations { t.Run(util.PrettyTestName(tt.Name, i), func(t *testing.T) { var conn *sql.DB var mock sqlmock.Sqlmock diff --git a/platform/frontend_connectors/terms_enum_test.go b/platform/frontend_connectors/terms_enum_test.go index 08ee6e267..ba5ee2e5b 100644 --- a/platform/frontend_connectors/terms_enum_test.go +++ b/platform/frontend_connectors/terms_enum_test.go @@ -128,8 +128,8 @@ func testHandleTermsEnumRequest(t *testing.T, requestBody []byte, fieldName stri ctx = context.WithValue(context.Background(), tracing.RequestIdCtxKey, "test") qt := &elastic_query_dsl.ClickhouseQueryTranslator{Table: table, Ctx: ctx, Schema: s.Tables[schema.IndexName(testTableName)]} // Here we additionally verify that terms for `_tier` are **NOT** included in the SQL query - expectedQuery1 := fmt.Sprintf(`SELECT DISTINCT %s FROM %s WHERE (("epoch_time">=fromUnixTimestamp(1709036700) AND "epoch_time"<=fromUnixTimestamp(1709037659)) AND ("epoch_time_datetime64">=fromUnixTimestamp64Milli(1709036700000) AND "epoch_time_datetime64"<=fromUnixTimestamp64Milli(1709037659999))) LIMIT 13`, fieldName, testTableName) - expectedQuery2 := fmt.Sprintf(`SELECT DISTINCT %s FROM %s WHERE (("epoch_time">=fromUnixTimestamp(1709036700) AND "epoch_time"<=fromUnixTimestamp(1709037659)) AND ("epoch_time_datetime64">=fromUnixTimestamp64Milli(1709036700000) AND "epoch_time_datetime64"<=fromUnixTimestamp64Milli(1709037659999))) LIMIT 13`, fieldName, testTableName) + expectedQuery1 := fmt.Sprintf(`SELECT DISTINCT %s FROM %s WHERE (("epoch_time">=__quesma_from_unixtime(1709036700) AND "epoch_time"<=__quesma_from_unixtime(1709037659)) AND ("epoch_time_datetime64">=__quesma_from_unixtime64mili(1709036700000) AND "epoch_time_datetime64"<=__quesma_from_unixtime64mili(1709037659999))) LIMIT 13`, fieldName, testTableName) + expectedQuery2 := fmt.Sprintf(`SELECT DISTINCT %s FROM %s WHERE (("epoch_time">=__quesma_from_unixtime(1709036700) AND "epoch_time"<=__quesma_from_unixtime(1709037659)) AND ("epoch_time_datetime64">=__quesma_from_unixtime64mili(1709036700000) AND "epoch_time_datetime64"<=__quesma_from_unixtime64mili(1709037659999))) LIMIT 13`, fieldName, testTableName) // Once in a while `AND` conditions could be swapped, so we match both cases mock.ExpectQuery(fmt.Sprintf("%s|%s", regexp.QuoteMeta(expectedQuery1), regexp.QuoteMeta(expectedQuery2))). diff --git a/platform/model/constants.go b/platform/model/constants.go index a409d9c24..1ef9933bc 100644 --- a/platform/model/constants.go +++ b/platform/model/constants.go @@ -7,6 +7,8 @@ const ( FullTextFieldNamePlaceHolder = "__quesma_fulltext_field_name" TimestampFieldName = "@timestamp" - DateHourFunction = "__quesma_date_hour" - MatchOperator = "__quesma_match" + DateHourFunction = "__quesma_date_hour" + MatchOperator = "__quesma_match" + FromUnixTimeFunction = "__quesma_from_unixtime" + FromUnixTimeFunction64mili = "__quesma_from_unixtime64mili" ) diff --git a/platform/model/where_visitor.go b/platform/model/where_visitor.go index 4bb9628d1..c376496b2 100644 --- a/platform/model/where_visitor.go +++ b/platform/model/where_visitor.go @@ -19,14 +19,14 @@ func FindTimestampLowerBound(field ColumnRef, whereClause Expr) (timestampInMill visitor := NewBaseVisitor() visitor.OverrideVisitInfix = func(visitor *BaseExprVisitor, e InfixExpr) interface{} { if columnRef, ok := e.Left.(ColumnRef); ok && columnRef == field && e.Op == ">=" || e.Op == ">" { - if fun, ok := e.Right.(FunctionExpr); ok && fun.Name == "fromUnixTimestamp64Milli" && len(fun.Args) == 1 { + if fun, ok := e.Right.(FunctionExpr); ok && fun.Name == FromUnixTimeFunction64mili && len(fun.Args) == 1 { if rhs, ok := fun.Args[0].(LiteralExpr); ok { if rhsInt64, ok := util.ExtractInt64Maybe(rhs.Value); ok { timestampInMillis = min(timestampInMillis, rhsInt64) found = true } } - } else if fun, ok := e.Right.(FunctionExpr); ok && fun.Name == "fromUnixTimestamp" && len(fun.Args) == 1 { + } else if fun, ok := e.Right.(FunctionExpr); ok && fun.Name == FromUnixTimeFunction64mili && len(fun.Args) == 1 { if rhs, ok := fun.Args[0].(LiteralExpr); ok { if rhsInt64, ok := util.ExtractInt64Maybe(rhs.Value); ok { timestampInMillis = min(timestampInMillis, rhsInt64*1000) // seconds -> milliseconds diff --git a/platform/parsers/elastic_query_dsl/dates.go b/platform/parsers/elastic_query_dsl/dates.go index 546dd0834..c2896fb43 100644 --- a/platform/parsers/elastic_query_dsl/dates.go +++ b/platform/parsers/elastic_query_dsl/dates.go @@ -85,7 +85,7 @@ func (dm DateManager) ParseDateUsualFormat(exprFromRequest any, datetimeType dat case database_common.DateTime64: threeDigitsOfPrecisionSuffice := utcTs.UnixNano()%1_000_000 == 0 if threeDigitsOfPrecisionSuffice { - return model.NewFunction("fromUnixTimestamp64Milli", addFormat(utcTs.UnixMilli())), true + return model.NewFunction(model.FromUnixTimeFunction64mili, addFormat(utcTs.UnixMilli())), true } else { return model.NewFunction( "toDateTime64", @@ -98,7 +98,7 @@ func (dm DateManager) ParseDateUsualFormat(exprFromRequest any, datetimeType dat ), true } case database_common.DateTime: - return model.NewFunction("fromUnixTimestamp", addFormat(utcTs.Unix())), true + return model.NewFunction(model.FromUnixTimeFunction, addFormat(utcTs.Unix())), true default: logger.WarnWithCtx(dm.ctx).Msgf("Unknown datetimeType: %v", datetimeType) } diff --git a/platform/parsers/elastic_query_dsl/pancake_sql_query_generation_test.go b/platform/parsers/elastic_query_dsl/pancake_sql_query_generation_test.go index c4de202d0..92e507d9e 100644 --- a/platform/parsers/elastic_query_dsl/pancake_sql_query_generation_test.go +++ b/platform/parsers/elastic_query_dsl/pancake_sql_query_generation_test.go @@ -107,8 +107,7 @@ func TestPancakeQueryGeneration(t *testing.T) { } prettyExpectedSql := util.SqlPrettyPrint([]byte(strings.TrimSpace(expectedSql))) - util.AssertSqlEqual(t, prettyExpectedSql, prettyPancakeSql) - + assert.Equal(t, util.RemoveAllWhitespaces(prettyPancakeSql), util.RemoveAllWhitespaces(prettyExpectedSql)) _, ok := pancakeSql.Type.(PancakeQueryType) if !ok { assert.Fail(t, "Expected pancake query type") diff --git a/platform/parsers/elastic_query_dsl/query_parser_range_test.go b/platform/parsers/elastic_query_dsl/query_parser_range_test.go index 95262f592..be46690d8 100644 --- a/platform/parsers/elastic_query_dsl/query_parser_range_test.go +++ b/platform/parsers/elastic_query_dsl/query_parser_range_test.go @@ -36,7 +36,7 @@ var parseRangeTests = []parseRangeTest{ }, Config: database_common.NewNoTimestampOnlyStringAttrCHConfig(), }, - `("timestamp">=fromUnixTimestamp64Milli(1706881636029) AND "timestamp"<=fromUnixTimestamp64Milli(1707486436029))`, + `("timestamp">=__quesma_from_unixtime64mili(1706881636029) AND "timestamp"<=__quesma_from_unixtime64mili(1707486436029))`, }, { "parseDateTimeBestEffort", @@ -55,7 +55,7 @@ var parseRangeTests = []parseRangeTest{ }, Config: database_common.NewNoTimestampOnlyStringAttrCHConfig(), }, - `("timestamp">=fromUnixTimestamp(1706881636) AND "timestamp"<=fromUnixTimestamp(1707486436))`, + `("timestamp">=__quesma_from_unixtime(1706881636) AND "timestamp"<=__quesma_from_unixtime(1707486436))`, }, { "numeric range", @@ -91,7 +91,7 @@ var parseRangeTests = []parseRangeTest{ }, Config: database_common.NewNoTimestampOnlyStringAttrCHConfig(), }, - `("timestamp">=fromUnixTimestamp64Milli(1706881636000) AND "timestamp"<=fromUnixTimestamp64Milli(1707486436000))`, + `("timestamp">=__quesma_from_unixtime64mili(1706881636000) AND "timestamp"<=__quesma_from_unixtime64mili(1707486436000))`, }, } diff --git a/platform/testdata/aggregation_requests.go b/platform/testdata/aggregation_requests.go index 572036db6..e5df3250f 100644 --- a/platform/testdata/aggregation_requests.go +++ b/platform/testdata/aggregation_requests.go @@ -121,7 +121,7 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeSQL: `SELECT maxOrNull("AvgTicketPrice") AS "metric__maxAgg_col_0", ` + `minOrNull("AvgTicketPrice") AS "metric__minAgg_col_0" ` + `FROM ` + TableName + ` ` + - `WHERE ("timestamp">=fromUnixTimestamp64Milli(1706881636029) AND "timestamp"<=fromUnixTimestamp64Milli(1707486436029))`, + `WHERE ("timestamp">=__quesma_from_unixtime64mili(1706881636029) AND "timestamp"<=__quesma_from_unixtime64mili(1707486436029))`, }, { // [1] TestName: "2 sibling count aggregations", @@ -312,7 +312,7 @@ var AggregationTests = []AggregationTestCase{ countIf("FlightDelay" __quesma_match true) AS "metric__0__1-bucket_col_0", countIf("Cancelled" __quesma_match true) AS "metric__0__3-bucket_col_0" FROM ` + TableName + ` - WHERE ("timestamp">=fromUnixTimestamp64Milli(1706881636029) AND "timestamp"<=fromUnixTimestamp64Milli(1707486436029)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1706881636029) AND "timestamp"<=__quesma_from_unixtime64mili(1707486436029)) GROUP BY "OriginCityName" AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC LIMIT 1001`, @@ -515,7 +515,7 @@ var AggregationTests = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" FROM ` + TableName + ` - WHERE ("timestamp">=fromUnixTimestamp64Milli(1706881636029) AND "timestamp"<=fromUnixTimestamp64Milli(1707486436029)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1706881636029) AND "timestamp"<=__quesma_from_unixtime64mili(1707486436029)) GROUP BY "FlightDelayType" AS "aggr__0__key_0", toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__1__key_0")) @@ -615,7 +615,7 @@ var AggregationTests = []AggregationTestCase{ }, ExpectedPancakeSQL: `SELECT sumOrNull("taxful_total_price") AS "metric__0_col_0" ` + `FROM ` + TableName + ` ` + - `WHERE ("order_date">=fromUnixTimestamp64Milli(1707213597034) AND "order_date"<=fromUnixTimestamp64Milli(1707818397034))`, + `WHERE ("order_date">=__quesma_from_unixtime64mili(1707213597034) AND "order_date"<=__quesma_from_unixtime64mili(1707818397034))`, }, { // [4] TestName: "cardinality", @@ -747,7 +747,7 @@ var AggregationTests = []AggregationTestCase{ "OriginCityName" AS "aggr__suggestions__key_0", count(*) AS "aggr__suggestions__count" FROM ` + TableName + ` - WHERE ("timestamp">=fromUnixTimestamp64Milli(1706881636029) AND "timestamp"<=fromUnixTimestamp64Milli(1707486436029)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1706881636029) AND "timestamp"<=__quesma_from_unixtime64mili(1707486436029)) GROUP BY "OriginCityName" AS "aggr__suggestions__key_0" ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, @@ -863,7 +863,7 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT countIf("FlightDelay" __quesma_match true) AS "metric__0-bucket_col_0" FROM ` + TableName + ` - WHERE ("timestamp">=fromUnixTimestamp64Milli(1706881636029) AND "timestamp"<=fromUnixTimestamp64Milli(1707486436029))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1706881636029) AND "timestamp"<=__quesma_from_unixtime64mili(1707486436029))`, }, { // [6] TestName: "filters", @@ -1017,17 +1017,17 @@ var AggregationTests = []AggregationTestCase{ }}, }, ExpectedPancakeSQL: ` - SELECT countIf(("timestamp">=fromUnixTimestamp64Milli(1706881636029) AND - "timestamp"<=fromUnixTimestamp64Milli(1707486436029))) AS + SELECT countIf(("timestamp">=__quesma_from_unixtime64mili(1706881636029) AND + "timestamp"<=__quesma_from_unixtime64mili(1707486436029))) AS "filter_0__aggr__time_offset_split__count", - countIf(("timestamp">=fromUnixTimestamp64Milli(1706276836029) AND "timestamp" - <=fromUnixTimestamp64Milli(1706881636029))) AS + countIf(("timestamp">=__quesma_from_unixtime64mili(1706276836029) AND + "timestamp"<=__quesma_from_unixtime64mili(1706881636029))) AS "filter_1__aggr__time_offset_split__count" FROM __quesma_table_name - WHERE ("FlightDelay" __quesma_match true AND (("timestamp">=fromUnixTimestamp64Milli( - 1706881636029) AND "timestamp"<=fromUnixTimestamp64Milli(1707486436029)) OR ( - "timestamp">=fromUnixTimestamp64Milli(1706276836029) AND "timestamp"<= - fromUnixTimestamp64Milli(1706881636029))))`, + WHERE ("FlightDelay" __quesma_match true AND (("timestamp">=__quesma_from_unixtime64mili( + 1706881636029) AND "timestamp"<=__quesma_from_unixtime64mili(1707486436029)) OR ( + "timestamp">=__quesma_from_unixtime64mili(1706276836029) AND "timestamp"<= + __quesma_from_unixtime64mili(1706881636029))))`, }, { // [7] TestName: "top hits, quite complex", @@ -1617,8 +1617,8 @@ var AggregationTests = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT "FlightDelayMin" AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM ` + TableName + ` - WHERE (("timestamp">=fromUnixTimestamp64Milli(1706881636029) AND "timestamp"<= - fromUnixTimestamp64Milli(1707486436029)) AND NOT ("FlightDelayMin" __quesma_match 0)) + WHERE (("timestamp">=__quesma_from_unixtime64mili(1706881636029) AND + "timestamp"<=__quesma_from_unixtime64mili(1707486436029)) AND NOT ("FlightDelayMin" __quesma_match 0)) GROUP BY "FlightDelayMin" AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, }, @@ -1841,8 +1841,8 @@ var AggregationTests = []AggregationTestCase{ count(*) AS "aggr__0__1__count" FROM __quesma_table_name WHERE ("host.name" __quesma_match '%prometheus%' AND ("@timestamp">= - fromUnixTimestamp64Milli(1706891809940) AND "@timestamp"<= - fromUnixTimestamp64Milli(1707496609940))) + __quesma_from_unixtime64mili(1706891809940) AND "@timestamp"<= + __quesma_from_unixtime64mili(1707496609940))) GROUP BY "severity" AS "aggr__0__key_0", toInt64((toUnixTimestamp64Milli("@timestamp")+timeZoneOffset(toTimezone( "@timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__1__key_0")) @@ -2190,8 +2190,8 @@ var AggregationTests = []AggregationTestCase{ "order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("order_date">=fromUnixTimestamp64Milli(1707213597034) AND "order_date"<= - fromUnixTimestamp64Milli(1707818397034)) AND "taxful_total_price" > '250') + WHERE (("order_date">=__quesma_from_unixtime64mili(1707213597034) AND "order_date"<= + __quesma_from_unixtime64mili(1707818397034)) AND "taxful_total_price" > '250') GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0" ORDER BY "aggr__1__2__key_0" ASC`, @@ -2202,8 +2202,8 @@ var AggregationTests = []AggregationTestCase{ "order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("order_date">=fromUnixTimestamp64Milli(1707213597034) AND "order_date" - <=fromUnixTimestamp64Milli(1707818397034)) AND "taxful_total_price" > '250') + WHERE (("order_date">=__quesma_from_unixtime64mili(1707213597034) AND "order_date" + <=__quesma_from_unixtime64mili(1707818397034)) AND "taxful_total_price" > '250') GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset( toTimezone("order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0" @@ -2220,8 +2220,8 @@ var AggregationTests = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone( "order_date", 'Europe/Warsaw'))*1000) / 43200000)) - WHERE (("order_date">=fromUnixTimestamp64Milli(1707213597034) AND "order_date" - <=fromUnixTimestamp64Milli(1707818397034)) AND "taxful_total_price" > '250')) + WHERE (("order_date">=__quesma_from_unixtime64mili(1707213597034) AND "order_date" + <=__quesma_from_unixtime64mili(1707818397034)) AND "taxful_total_price" > '250')) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__4_col_0", "top_metrics__1__2__4_col_1", "top_hits_rank" FROM "quesma_top_hits_join" @@ -2233,8 +2233,8 @@ var AggregationTests = []AggregationTestCase{ "order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("order_date">=fromUnixTimestamp64Milli(1707213597034) AND "order_date" - <=fromUnixTimestamp64Milli(1707818397034)) AND "taxful_total_price" > '250') + WHERE (("order_date">=__quesma_from_unixtime64mili(1707213597034) AND "order_date" + <=__quesma_from_unixtime64mili(1707818397034)) AND "taxful_total_price" > '250') GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset( toTimezone("order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0" @@ -2251,8 +2251,8 @@ var AggregationTests = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone( "order_date", 'Europe/Warsaw'))*1000) / 43200000)) - WHERE (("order_date">=fromUnixTimestamp64Milli(1707213597034) AND "order_date" - <=fromUnixTimestamp64Milli(1707818397034)) AND "taxful_total_price" > '250')) + WHERE (("order_date">=__quesma_from_unixtime64mili(1707213597034) AND "order_date" + <=__quesma_from_unixtime64mili(1707818397034)) AND "taxful_total_price" > '250')) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__5_col_0", "top_metrics__1__2__5_col_1", "top_hits_rank" FROM "quesma_top_hits_join" @@ -2451,8 +2451,8 @@ var AggregationTests = []AggregationTestCase{ FROM ( SELECT "host.name" FROM __quesma_table_name - WHERE (("@timestamp">=fromUnixTimestamp64Milli(1706009236820) AND "@timestamp" - <=fromUnixTimestamp64Milli(1706010136820)) AND + WHERE (("@timestamp">=__quesma_from_unixtime64mili(1706009236820) AND "@timestamp" + <=__quesma_from_unixtime64mili(1706010136820)) AND "__quesma_fulltext_field_name" iLIKE '%user%') LIMIT 8000) GROUP BY "host.name" AS "aggr__sample__top_values__key_0" @@ -2597,7 +2597,7 @@ var AggregationTests = []AggregationTestCase{ FROM ` + TableName + ` WHERE (` + fullTextFieldName + ` iLIKE '%user%' AND - ("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481))) + ("@timestamp">=__quesma_from_unixtime64mili(1706020999481) AND "@timestamp"<=__quesma_from_unixtime64mili(1706021899481))) GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -2737,7 +2737,7 @@ var AggregationTests = []AggregationTestCase{ toInt64(toUnixTimestamp64Milli("@timestamp") / 60000) AS "aggr__stats__series__key_0", count(*) AS "aggr__stats__series__count" FROM ` + TableName + ` - WHERE ("@timestamp">fromUnixTimestamp64Milli(1706194439033) AND "@timestamp"<=fromUnixTimestamp64Milli(1706195339033)) + WHERE ("@timestamp">__quesma_from_unixtime64mili(1706194439033) AND "@timestamp"<=__quesma_from_unixtime64mili(1706195339033)) GROUP BY COALESCE("event.dataset", 'unknown') AS "aggr__stats__key_0", toInt64(toUnixTimestamp64Milli("@timestamp") / 60000) AS "aggr__stats__series__key_0")) @@ -2972,7 +2972,7 @@ var AggregationTests = []AggregationTestCase{ "aggr__0__key_0", count(*) AS "aggr__0__count", sumOrNull("taxful_total_price") AS "metric__0__1_col_0" FROM ` + TableName + ` - WHERE ("order_date">=fromUnixTimestamp64Milli(1708364456351) AND "order_date"<=fromUnixTimestamp64Milli(1708969256351)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1708364456351) AND "order_date"<=__quesma_from_unixtime64mili(1708969256351)) GROUP BY toInt64(toUnixTimestamp64Milli("order_date") / 86400000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -3082,8 +3082,8 @@ var AggregationTests = []AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "message" AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1708456413795) AND "timestamp"<= - fromUnixTimestamp64Milli(1708488074920)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1708456413795) AND "timestamp"<= + __quesma_from_unixtime64mili(1708488074920)) GROUP BY "message" AS "aggr__0__key_0" ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC LIMIT 4`, @@ -3275,7 +3275,7 @@ var AggregationTests = []AggregationTestCase{ sumOrNullIf("taxful_total_price", "products.product_name" __quesma_match '%watch%') AS "metric__0__1-bucket__1-metric_col_0" FROM ` + TableName + ` - WHERE ("order_date">=fromUnixTimestamp64Milli(1708627654149) AND "order_date"<=fromUnixTimestamp64Milli(1709232454149)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1708627654149) AND "order_date"<=__quesma_from_unixtime64mili(1709232454149)) GROUP BY toInt64(toUnixTimestamp64Milli("order_date") / 43200000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -3502,12 +3502,12 @@ var AggregationTests = []AggregationTestCase{ sumOrNull("taxful_total_price") AS "metric__time_offset_split__0__1_col_0", sumOrNull("taxful_total_price") AS "metric__time_offset_split__0__2_col_0" FROM __quesma_table_name - WHERE ((("order_date">=fromUnixTimestamp64Milli(1708639056376) AND "order_date" - <=fromUnixTimestamp64Milli(1709243856376)) OR ("order_date">= - fromUnixTimestamp64Milli(1708034256376) AND "order_date"<= - fromUnixTimestamp64Milli(1708639056376))) AND ("order_date">= - fromUnixTimestamp64Milli(1708639056376) AND "order_date"<= - fromUnixTimestamp64Milli(1709243856376))) + WHERE ((("order_date">=__quesma_from_unixtime64mili(1708639056376) AND "order_date" + <=__quesma_from_unixtime64mili(1709243856376)) OR ("order_date">= + __quesma_from_unixtime64mili(1708034256376) AND "order_date"<= + __quesma_from_unixtime64mili(1708639056376))) AND ("order_date">= + __quesma_from_unixtime64mili(1708639056376) AND "order_date"<= + __quesma_from_unixtime64mili(1709243856376))) GROUP BY toInt64(toUnixTimestamp64Milli("order_date") / 86400000) AS "aggr__time_offset_split__0__key_0" ORDER BY "aggr__time_offset_split__0__key_0" ASC`, @@ -3519,12 +3519,12 @@ var AggregationTests = []AggregationTestCase{ sumOrNull("taxful_total_price") AS "metric__time_offset_split__0__1_col_0", sumOrNull("taxful_total_price") AS "metric__time_offset_split__0__2_col_0" FROM __quesma_table_name - WHERE ((("order_date">=fromUnixTimestamp64Milli(1708639056376) AND - "order_date"<=fromUnixTimestamp64Milli(1709243856376)) OR - ("order_date">=fromUnixTimestamp64Milli(1708034256376) AND - "order_date"<=fromUnixTimestamp64Milli(1708639056376))) AND - ("order_date">=fromUnixTimestamp64Milli(1708034256376) AND - "order_date"<=fromUnixTimestamp64Milli(1708639056376))) + WHERE ((("order_date">=__quesma_from_unixtime64mili(1708639056376) AND + "order_date"<=__quesma_from_unixtime64mili(1709243856376)) OR + ("order_date">=__quesma_from_unixtime64mili(1708034256376) AND + "order_date"<=__quesma_from_unixtime64mili(1708639056376))) AND + ("order_date">=__quesma_from_unixtime64mili(1708034256376) AND + "order_date"<=__quesma_from_unixtime64mili(1708639056376))) GROUP BY toInt64(toUnixTimestamp64Milli("order_date") / 86400000) AS "aggr__time_offset_split__0__key_0" ORDER BY "aggr__time_offset_split__0__key_0" ASC`, @@ -3661,7 +3661,7 @@ var AggregationTests = []AggregationTestCase{ FROM ( SELECT "@timestamp" FROM ` + TableName + ` - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1709815794995) AND "@timestamp"<=fromUnixTimestamp64Milli(1709816694995)) + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1709815794995) AND "@timestamp"<=__quesma_from_unixtime64mili(1709816694995)) LIMIT 20000) GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 15000) AS "aggr__sampler__eventRate__key_0" @@ -4036,8 +4036,8 @@ var AggregationTests = []AggregationTestCase{ FROM ( SELECT "bytes_gauge" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1709932426749) AND "timestamp"<= - fromUnixTimestamp64Milli(1711228426749)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1709932426749) AND "timestamp"<= + __quesma_from_unixtime64mili(1711228426749)) LIMIT 20000)`, ExpectedAdditionalPancakeSQLs: []string{` SELECT sum(count(*)) OVER () AS "aggr__sample__count", @@ -4127,8 +4127,8 @@ var AggregationTests = []AggregationTestCase{ FROM ( SELECT "bytes_gauge" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1709932426749) AND "timestamp"<= - fromUnixTimestamp64Milli(1711228426749)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1709932426749) AND "timestamp"<= + __quesma_from_unixtime64mili(1711228426749)) LIMIT 20000) GROUP BY "bytes_gauge" AS "aggr__sample__bytes_gauge_top__key_0" ORDER BY "aggr__sample__bytes_gauge_top__count" DESC, @@ -4340,8 +4340,8 @@ var AggregationTests = []AggregationTestCase{ countIf("bytes_gauge"<6.555) AS "range_3__aggr__2__count", countIf("bytes_gauge" IS NOT NULL) AS "range_4__aggr__2__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1713269711790) AND "timestamp"<= - fromUnixTimestamp64Milli(1713270611790))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1713269711790) AND "timestamp"<= + __quesma_from_unixtime64mili(1713270611790))`, ExpectedAdditionalPancakeSQLs: []string{` SELECT countIf(("bytes_gauge">=0 AND "bytes_gauge"<1000)) AS "range_0__aggr__3__count", @@ -4351,8 +4351,8 @@ var AggregationTests = []AggregationTestCase{ countIf("bytes_gauge"<6.555) AS "range_3__aggr__3__count", countIf("bytes_gauge" IS NOT NULL) AS "range_4__aggr__3__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1713269711790) AND "timestamp"<= - fromUnixTimestamp64Milli(1713270611790))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1713269711790) AND "timestamp"<= + __quesma_from_unixtime64mili(1713270611790))`, }, }, { // [22] @@ -4492,7 +4492,7 @@ var AggregationTests = []AggregationTestCase{ countIf("timestamp">=toInt64(toUnixTimestamp('2024-04-14'))) AS "range_2__aggr__2__count" FROM ` + TableName + ` - WHERE ("timestamp">=fromUnixTimestamp64Milli(1712388530059) AND "timestamp"<=fromUnixTimestamp64Milli(1713288530059))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1712388530059) AND "timestamp"<=__quesma_from_unixtime64mili(1713288530059))`, }, { // [23] TestName: "significant terms aggregation: same as terms for now", @@ -4828,7 +4828,7 @@ var AggregationTests = []AggregationTestCase{ SELECT floor("bytes"/100)*100 AS "aggr__2__key_0", count(*) AS "aggr__2__count" FROM ` + TableName + ` - WHERE ("timestamp">=fromUnixTimestamp64Milli(1715348876077) AND "timestamp"<=fromUnixTimestamp64Milli(1715349776077)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1715348876077) AND "timestamp"<=__quesma_from_unixtime64mili(1715349776077)) GROUP BY floor("bytes"/100)*100 AS "aggr__2__key_0" ORDER BY "aggr__2__key_0" ASC`, }, @@ -4966,8 +4966,8 @@ var AggregationTests = []AggregationTestCase{ SELECT toInt64(toUnixTimestamp64Milli("timestamp") / 30000) AS "aggr__2__key_0", count(*) AS "aggr__2__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1715351342900) AND "timestamp"<= - fromUnixTimestamp64Milli(1715352242900)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1715351342900) AND "timestamp"<= + __quesma_from_unixtime64mili(1715352242900)) GROUP BY toInt64(toUnixTimestamp64Milli("timestamp") / 30000) AS "aggr__2__key_0" ORDER BY "aggr__2__key_0" ASC`, @@ -5690,7 +5690,7 @@ var AggregationTests = []AggregationTestCase{ "machine.os" AS "aggr__2__key_0", count(*) AS "aggr__2__count", uniq("clientip") AS "metric__2__1_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1715322159037) AND "timestamp"<=fromUnixTimestamp64Milli(1715376159037)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1715322159037) AND "timestamp"<=__quesma_from_unixtime64mili(1715376159037)) GROUP BY "machine.os" AS "aggr__2__key_0" ORDER BY "metric__2__1_col_0" DESC, "aggr__2__key_0" ASC LIMIT 6`, @@ -6202,8 +6202,8 @@ var AggregationTests = []AggregationTestCase{ stddevPop("bytes") AS "metric__0__2_col_8", stddevSamp("bytes") AS "metric__0__2_col_9" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1716327334210) AND "timestamp"<= - fromUnixTimestamp64Milli(1716381334210)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1716327334210) AND "timestamp"<= + __quesma_from_unixtime64mili(1716381334210)) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 600000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, diff --git a/platform/testdata/aggregation_requests_2.go b/platform/testdata/aggregation_requests_2.go index 2dd2e106f..43eed540c 100644 --- a/platform/testdata/aggregation_requests_2.go +++ b/platform/testdata/aggregation_requests_2.go @@ -607,7 +607,7 @@ var AggregationTests2 = []AggregationTestCase{ quantiles(0.020000)("timestamp") AS "metric__2__1_col_1", sumOrNull("count") AS "metric__2__2_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1713401475845) AND "timestamp"<=fromUnixTimestamp64Milli(1714697475845)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1713401475845) AND "timestamp"<=__quesma_from_unixtime64mili(1714697475845)) GROUP BY "response" AS "aggr__2__key_0" ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 4`, @@ -1677,7 +1677,7 @@ var AggregationTests2 = []AggregationTestCase{ floor("bytes2"/5)*5 AS "aggr__2__3__key_0", count(*) AS "aggr__2__3__count" FROM ` + TableName + ` - WHERE ("timestamp">=fromUnixTimestamp64Milli(1715348876077) AND "timestamp"<=fromUnixTimestamp64Milli(1715349776077)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1715348876077) AND "timestamp"<=__quesma_from_unixtime64mili(1715349776077)) GROUP BY floor("bytes"/100)*100 AS "aggr__2__key_0", floor("bytes2"/5)*5 AS "aggr__2__3__key_0")) ORDER BY "aggr__2__order_1_rank" ASC, "aggr__2__3__order_1_rank" ASC`, @@ -1871,7 +1871,7 @@ var AggregationTests2 = []AggregationTestCase{ floor("bytes2"/5)*5 AS "aggr__2__3__key_0", count(*) AS "aggr__2__3__count" FROM ` + TableName + ` - WHERE ("timestamp">=fromUnixTimestamp64Milli(1715348876077) AND "timestamp"<=fromUnixTimestamp64Milli(1715349776077)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1715348876077) AND "timestamp"<=__quesma_from_unixtime64mili(1715349776077)) GROUP BY floor("bytes"/100)*100 AS "aggr__2__key_0", floor("bytes2"/5)*5 AS "aggr__2__3__key_0")) ORDER BY "aggr__2__order_1_rank" ASC, "aggr__2__3__order_1_rank" ASC`, @@ -2473,8 +2473,8 @@ var AggregationTests2 = []AggregationTestCase{ quantiles(0.750000)("docker.cpu.total.pct") AS "metric__0__1__2_col_0" FROM __quesma_table_name WHERE ("data_stream.dataset"='docker.cpu' AND ("@timestamp">= - fromUnixTimestamp64Milli(1723967652291) AND "@timestamp"<= - fromUnixTimestamp64Milli(1725263652291))) + __quesma_from_unixtime64mili(1723967652291) AND "@timestamp"<= + __quesma_from_unixtime64mili(1725263652291))) GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 43200000) AS "aggr__0__key_0", "container.name" AS "aggr__0__1__key_0")) WHERE "aggr__0__1__order_1_rank"<=6 @@ -3759,7 +3759,7 @@ var AggregationTests2 = []AggregationTestCase{ "aggr__histo__0__order_1_rank" FROM ( SELECT toInt64(toUnixTimestamp(COALESCE("customer_birth_date", - fromUnixTimestamp(1706021760))) / 30) AS "aggr__histo__key_0", + __quesma_from_unixtime(1706021760))) / 30) AS "aggr__histo__key_0", sum(count(*)) OVER (PARTITION BY "aggr__histo__key_0") AS "aggr__histo__count", sum(count(*)) OVER (PARTITION BY "aggr__histo__key_0") AS @@ -3767,7 +3767,7 @@ var AggregationTests2 = []AggregationTestCase{ count(*) AS "aggr__histo__0__count" FROM __quesma_table_name GROUP BY toInt64(toUnixTimestamp(COALESCE("customer_birth_date", - fromUnixTimestamp(1706021760))) / 30) AS "aggr__histo__key_0", + __quesma_from_unixtime(1706021760))) / 30) AS "aggr__histo__key_0", "type" AS "aggr__histo__0__key_0")) WHERE "aggr__histo__0__order_1_rank"<=11 ORDER BY "aggr__histo__order_1_rank" ASC, "aggr__histo__0__order_1_rank" ASC`, @@ -3909,40 +3909,40 @@ var AggregationTests2 = []AggregationTestCase{ }, ExpectedPancakeSQL: ` SELECT toInt64(toUnixTimestamp(COALESCE("customer_birth_date", - fromUnixTimestamp(1706878800))) / 90) AS "aggr__histo1__key_0", + __quesma_from_unixtime(1706878800))) / 90) AS "aggr__histo1__key_0", count(*) AS "aggr__histo1__count" FROM __quesma_table_name GROUP BY toInt64(toUnixTimestamp(COALESCE("customer_birth_date", - fromUnixTimestamp(1706878800))) / 90) AS "aggr__histo1__key_0" + __quesma_from_unixtime(1706878800))) / 90) AS "aggr__histo1__key_0" ORDER BY "aggr__histo1__key_0" ASC`, ExpectedAdditionalPancakeSQLs: []string{ `SELECT toInt64(toUnixTimestamp(COALESCE("customer_birth_date", - fromUnixTimestamp(1706878800))) / 90) AS "aggr__histo2__key_0", + __quesma_from_unixtime(1706878800))) / 90) AS "aggr__histo2__key_0", count(*) AS "aggr__histo2__count" FROM __quesma_table_name GROUP BY toInt64(toUnixTimestamp(COALESCE("customer_birth_date", - fromUnixTimestamp(1706878800))) / 90) AS "aggr__histo2__key_0" + __quesma_from_unixtime(1706878800))) / 90) AS "aggr__histo2__key_0" ORDER BY "aggr__histo2__key_0" ASC`, `SELECT toInt64(toUnixTimestamp64Milli(COALESCE("customer_birth_date_datetime64", - fromUnixTimestamp64Milli(1706878800000))) / 90000) AS "aggr__histo3__key_0", + __quesma_from_unixtime64mili(1706878800000))) / 90000) AS "aggr__histo3__key_0", count(*) AS "aggr__histo3__count" FROM __quesma_table_name GROUP BY toInt64(toUnixTimestamp64Milli(COALESCE("customer_birth_date_datetime64", - fromUnixTimestamp64Milli(1706878800000))) / 90000) AS "aggr__histo3__key_0" + __quesma_from_unixtime64mili(1706878800000))) / 90000) AS "aggr__histo3__key_0" ORDER BY "aggr__histo3__key_0" ASC`, `SELECT toInt64(toUnixTimestamp64Milli(COALESCE("customer_birth_date_datetime64", - fromUnixTimestamp64Milli(1706853600000))) / 90000) AS "aggr__histo4__key_0", + __quesma_from_unixtime64mili(1706853600000))) / 90000) AS "aggr__histo4__key_0", count(*) AS "aggr__histo4__count" FROM __quesma_table_name GROUP BY toInt64(toUnixTimestamp64Milli(COALESCE("customer_birth_date_datetime64", - fromUnixTimestamp64Milli(1706853600000))) / 90000) AS "aggr__histo4__key_0" + __quesma_from_unixtime64mili(1706853600000))) / 90000) AS "aggr__histo4__key_0" ORDER BY "aggr__histo4__key_0" ASC`, `SELECT toInt64(toUnixTimestamp(COALESCE("customer_birth_date", - fromUnixTimestamp(1706853600))) / 90) AS "aggr__histo5__key_0", + __quesma_from_unixtime(1706853600))) / 90) AS "aggr__histo5__key_0", count(*) AS "aggr__histo5__count" FROM __quesma_table_name GROUP BY toInt64(toUnixTimestamp(COALESCE("customer_birth_date", - fromUnixTimestamp(1706853600))) / 90) AS "aggr__histo5__key_0" + __quesma_from_unixtime(1706853600))) / 90) AS "aggr__histo5__key_0" ORDER BY "aggr__histo5__key_0" ASC`, }, }, diff --git a/platform/testdata/clients/clover.go b/platform/testdata/clients/clover.go index 52ab9b493..96fb1a520 100644 --- a/platform/testdata/clients/clover.go +++ b/platform/testdata/clients/clover.go @@ -350,8 +350,8 @@ var CloverTests = []testdata.AggregationTestCase{ countIf(NOT ("table.flower" __quesma_match 'clover')) AS "metric__timeseries__a2-numerator_col_0" FROM __quesma_table_name - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1728640683723) AND "@timestamp"<= - fromUnixTimestamp64Milli(1728641583723))`, + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1728640683723) AND "@timestamp"<= + __quesma_from_unixtime64mili(1728641583723))`, }, { // [2] TestName: "simplest auto_date_histogram", @@ -477,8 +477,8 @@ var CloverTests = []testdata.AggregationTestCase{ ExpectedPancakeSQL: ` SELECT count(*) AS "aggr__timeseries__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1728581627125) AND "timestamp"<= - fromUnixTimestamp64Milli(1728635627125))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1728581627125) AND "timestamp"<= + __quesma_from_unixtime64mili(1728635627125))`, AdditionalAcceptableDifference: []string{"key_as_string"}, // timezone differences between local and github runs... There's always 2h difference between those, need to investigate. Maybe come back to .UTC() so there's no "+timezone" (e.g. +02:00)? }, { // [3] @@ -648,8 +648,8 @@ var CloverTests = []testdata.AggregationTestCase{ countIf(NOT ("a.b_str" IS NOT NULL)) AS "metric__timeseries__f2-numerator_col_0" FROM __quesma_table_name - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1721399904783) AND "@timestamp"<= - fromUnixTimestamp64Milli(1730475504783))`, + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1721399904783) AND "@timestamp"<= + __quesma_from_unixtime64mili(1730475504783))`, }, { // [4] TestName: "todo", @@ -949,8 +949,8 @@ var CloverTests = []testdata.AggregationTestCase{ toInt64(toUnixTimestamp64Milli("@timestamp") / 604800000) AS "aggr__q__time_buckets__key_0", count(*) AS "aggr__q__time_buckets__count" FROM __quesma_table_name - WHERE (("@timestamp">=fromUnixTimestamp64Milli(1728507729621) AND "@timestamp"<= - fromUnixTimestamp64Milli(1728507732621)) AND "__quesma_fulltext_field_name" + WHERE (("@timestamp">=__quesma_from_unixtime64mili(1728507729621) AND "@timestamp"<= + __quesma_from_unixtime64mili(1728507732621)) AND "__quesma_fulltext_field_name" __quesma_match '%') GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 604800000) AS "aggr__q__time_buckets__key_0" diff --git a/platform/testdata/dates.go b/platform/testdata/dates.go index 301c89c4a..f55307171 100644 --- a/platform/testdata/dates.go +++ b/platform/testdata/dates.go @@ -317,8 +317,8 @@ var AggregationTestsWithDates = []AggregationTestCase{ "@timestamp", 'Europe/Warsaw'))*1000) / 10000) AS "aggr__timeseries__key_0", count(*) AS "aggr__timeseries__count" FROM __quesma_table_name - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1730370296174) AND "@timestamp"<= - fromUnixTimestamp64Milli(1730370596174)) + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1730370296174) AND "@timestamp"<= + __quesma_from_unixtime64mili(1730370596174)) GROUP BY toInt64((toUnixTimestamp64Milli("@timestamp")+timeZoneOffset(toTimezone ("@timestamp", 'Europe/Warsaw'))*1000) / 10000) AS "aggr__timeseries__key_0" ORDER BY "aggr__timeseries__key_0" ASC`, @@ -498,8 +498,8 @@ var AggregationTestsWithDates = []AggregationTestCase{ "@timestamp", 'Europe/Warsaw'))*1000) / 10000) AS "aggr__timeseries__key_0", count(*) AS "aggr__timeseries__count" FROM __quesma_table_name - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1730370296174) AND "@timestamp"<= - fromUnixTimestamp64Milli(1730370596174)) + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1730370296174) AND "@timestamp"<= + __quesma_from_unixtime64mili(1730370596174)) GROUP BY toInt64((toUnixTimestamp64Milli("@timestamp")+timeZoneOffset(toTimezone ("@timestamp", 'Europe/Warsaw'))*1000) / 10000) AS "aggr__timeseries__key_0" ORDER BY "aggr__timeseries__key_0" ASC`, @@ -628,8 +628,8 @@ var AggregationTestsWithDates = []AggregationTestCase{ count(*) AS "aggr__0__count", sumOrNull("body_bytes_sent") AS "metric__0__1_col_0" FROM __quesma_table_name - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1259327903466) AND "@timestamp"<= - fromUnixTimestamp64Milli(1732713503466)) + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1259327903466) AND "@timestamp"<= + __quesma_from_unixtime64mili(1732713503466)) GROUP BY toInt64((toUnixTimestamp64Milli("@timestamp")+timeZoneOffset(toTimezone ("@timestamp", 'Europe/Warsaw'))*1000) / 86400000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, diff --git a/platform/testdata/kibana-visualize/aggregation_requests.go b/platform/testdata/kibana-visualize/aggregation_requests.go index b58d2994c..00d826fce 100644 --- a/platform/testdata/kibana-visualize/aggregation_requests.go +++ b/platform/testdata/kibana-visualize/aggregation_requests.go @@ -251,8 +251,8 @@ var AggregationTests = []testdata.AggregationTestCase{ "aggr__0__1__parent_count", "severity" AS "aggr__0__1__key_0", "source" AS "aggr__0__1__key_1", count(*) AS "aggr__0__1__count" FROM __quesma_table_name - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1716811196627) AND - "@timestamp"<=fromUnixTimestamp64Milli(1716812096627)) + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1716811196627) AND + "@timestamp"<=__quesma_from_unixtime64mili(1716812096627)) GROUP BY toInt64((toUnixTimestamp64Milli("@timestamp")+timeZoneOffset( toTimezone("@timestamp", 'Europe/Warsaw'))*1000) / 30000) AS "aggr__0__key_0", "severity" AS "aggr__0__1__key_0", @@ -1337,8 +1337,8 @@ var AggregationTests = []testdata.AggregationTestCase{ avgOrNull("FlightDelayMin") AS "metric__0__1_col_3", sumOrNull("FlightDelayMin") AS "metric__0__1_col_4" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1725723024239) AND "timestamp"<= - fromUnixTimestamp64Milli(1727019024239)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1725723024239) AND "timestamp"<= + __quesma_from_unixtime64mili(1727019024239)) GROUP BY "Carrier" AS "aggr__0__key_0" ORDER BY "metric__0__1_col_1" DESC, "metric__0__1_col_0" DESC, "metric__0__1_col_3" DESC, "metric__0__1_col_2" ASC, @@ -1505,8 +1505,8 @@ var AggregationTests = []testdata.AggregationTestCase{ stddevPop("FlightDelayMin") AS "metric__0__1_col_8", stddevSamp("FlightDelayMin") AS "metric__0__1_col_9" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1725723024239) AND "timestamp"<= - fromUnixTimestamp64Milli(1727019024239)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1725723024239) AND "timestamp"<= + __quesma_from_unixtime64mili(1727019024239)) GROUP BY "Carrier" AS "aggr__0__key_0" ORDER BY "metric__0__1_col_1" DESC, "metric__0__1_col_0" DESC, "metric__0__1_col_3" DESC, "metric__0__1_col_2" ASC, @@ -2098,8 +2098,8 @@ var AggregationTests = []testdata.AggregationTestCase{ toStartOfWeek(subDate(now(), INTERVAL 1 week)))) AND "timestamp"=fromUnixTimestamp64Milli(1258014686584) AND "timestamp" - <=fromUnixTimestamp64Milli(1731400286584)) AND ("timestamp">=toInt64( + WHERE (("timestamp">=__quesma_from_unixtime64mili(1258014686584) AND "timestamp" + <=__quesma_from_unixtime64mili(1731400286584)) AND ("timestamp">=toInt64( toUnixTimestamp(toStartOfWeek(subDate(now(), INTERVAL 1 week)))) AND "timestamp"=toInt64(toUnixTimestamp(subDate (now(), INTERVAL 1 day)))) AS "metric__2__3__4__1_col_0" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1258014686584) AND "timestamp" - <=fromUnixTimestamp64Milli(1731400286584)) AND "timestamp">=toInt64( + WHERE (("timestamp">=__quesma_from_unixtime64mili(1258014686584) AND "timestamp" + <=__quesma_from_unixtime64mili(1731400286584)) AND "timestamp">=toInt64( toUnixTimestamp(subDate(now(), INTERVAL 1 day)))) GROUP BY "DistanceKilometers" AS "aggr__2__3__key_0", toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( @@ -3916,8 +3916,8 @@ var AggregationTests = []testdata.AggregationTestCase{ sumOrNull("DistanceKilometers") AS "metric__2__second_col_0", sumOrNull("DistanceKilometers") AS "metric__2__week_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1731584141864) AND "timestamp"<= - fromUnixTimestamp64Milli(1731585041864)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1731584141864) AND "timestamp"<= + __quesma_from_unixtime64mili(1731585041864)) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 30000) AS "aggr__2__key_0" ORDER BY "aggr__2__key_0" ASC`, @@ -4079,8 +4079,8 @@ var AggregationTests = []testdata.AggregationTestCase{ sumOrNull("DistanceKilometers") AS "metric__2__week_col_0", count("DistanceKilometers") AS "metric__2__year_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1668427553316) AND "timestamp"<= - fromUnixTimestamp64Milli(1731585953316)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1668427553316) AND "timestamp"<= + __quesma_from_unixtime64mili(1731585953316)) GROUP BY toInt64(toUnixTimestamp(toStartOfMonth(toTimezone("timestamp", 'Europe/Warsaw'))))*1000 AS "aggr__2__key_0" ORDER BY "aggr__2__key_0" ASC`, diff --git a/platform/testdata/kibana-visualize/pipeline_aggregation_requests.go b/platform/testdata/kibana-visualize/pipeline_aggregation_requests.go index 00863aab6..a122a7cf5 100644 --- a/platform/testdata/kibana-visualize/pipeline_aggregation_requests.go +++ b/platform/testdata/kibana-visualize/pipeline_aggregation_requests.go @@ -198,8 +198,8 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{ "aggr__2__1-bucket__key_0", count(*) AS "aggr__2__1-bucket__count", maxOrNull("timestamp") AS "metric__2__1-bucket__1-metric_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1726848963807) AND "timestamp" - <=fromUnixTimestamp64Milli(1728144963807)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1726848963807) AND "timestamp" + <=__quesma_from_unixtime64mili(1728144963807)) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset( toTimezone("timestamp", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__2__key_0", diff --git a/platform/testdata/kibana_sample_data_ecommerce.go b/platform/testdata/kibana_sample_data_ecommerce.go index 30a55beef..a68a73c82 100644 --- a/platform/testdata/kibana_sample_data_ecommerce.go +++ b/platform/testdata/kibana_sample_data_ecommerce.go @@ -108,8 +108,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ SELECT maxOrNull("total_quantity") AS "metric__maxAgg_col_0", minOrNull("total_quantity") AS "metric__minAgg_col_0" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date"<= - fromUnixTimestamp64Milli(1740584933594))`, + WHERE ("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date"<= + __quesma_from_unixtime64mili(1740584933594))`, }, { // [1] TestName: "Promotions tracking (request 1/3)", @@ -382,8 +382,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ "order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date"<= - fromUnixTimestamp64Milli(1740584933594)) AND "taxful_total_price" > '250') + WHERE (("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date"<= + __quesma_from_unixtime64mili(1740584933594)) AND "taxful_total_price" > '250') GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0" ORDER BY "aggr__1__2__key_0" ASC`, @@ -394,8 +394,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ "order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date" - <=fromUnixTimestamp64Milli(1740584933594)) AND "taxful_total_price" > '250') + WHERE (("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date" + <=__quesma_from_unixtime64mili(1740584933594)) AND "taxful_total_price" > '250') GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset( toTimezone("order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0" @@ -412,8 +412,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone( "order_date", 'Europe/Warsaw'))*1000) / 43200000)) - WHERE (("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date" - <=fromUnixTimestamp64Milli(1740584933594)) AND "taxful_total_price" > '250')) + WHERE (("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date" + <=__quesma_from_unixtime64mili(1740584933594)) AND "taxful_total_price" > '250')) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__4_col_0", "top_metrics__1__2__4_col_1", "top_hits_rank" FROM "quesma_top_hits_join" @@ -426,8 +426,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ "order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date" - <=fromUnixTimestamp64Milli(1740584933594)) AND "taxful_total_price" > '250') + WHERE (("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date" + <=__quesma_from_unixtime64mili(1740584933594)) AND "taxful_total_price" > '250') GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset( toTimezone("order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__1__2__key_0" @@ -444,8 +444,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone( "order_date", 'Europe/Warsaw'))*1000) / 43200000)) - WHERE (("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date" - <=fromUnixTimestamp64Milli(1740584933594)) AND "taxful_total_price" > '250')) + WHERE (("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date" + <=__quesma_from_unixtime64mili(1740584933594)) AND "taxful_total_price" > '250')) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__5_col_0", "top_metrics__1__2__5_col_1", "top_hits_rank" FROM "quesma_top_hits_join" @@ -692,8 +692,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ sumOrNullIf("taxful_total_price", "products.product_name" __quesma_match '%trouser%') AS "metric__0__1-bucket__1-metric_col_0" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1739979776601) AND "order_date"<= - fromUnixTimestamp64Milli(1740584576601)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1739979776601) AND "order_date"<= + __quesma_from_unixtime64mili(1740584576601)) GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -865,8 +865,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ OR "__quesma_fulltext_field_name" __quesma_match 'dress%')) AS "metric__0__1-bucket__1-metric_col_0" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1740234098238) AND "order_date"<= - fromUnixTimestamp64Milli(1740838898238)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1740234098238) AND "order_date"<= + __quesma_from_unixtime64mili(1740838898238)) GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 43200000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -965,8 +965,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT sumOrNull("taxful_total_price") AS "metric__0_col_0" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date"<= - fromUnixTimestamp64Milli(1740584933594))`, + WHERE ("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date"<= + __quesma_from_unixtime64mili(1740584933594))`, }, { // [5] TestName: "Median spending", @@ -1067,8 +1067,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT quantiles(0.500000)("taxful_total_price") AS "metric__0_col_0" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date"<= - fromUnixTimestamp64Milli(1740584933594))`, + WHERE ("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date"<= + __quesma_from_unixtime64mili(1740584933594))`, }, { // [6] TestName: "Avg. items sold", @@ -1164,8 +1164,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT avgOrNull("total_quantity") AS "metric__0_col_0" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date"<= - fromUnixTimestamp64Milli(1740584933594))`, + WHERE ("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date"<= + __quesma_from_unixtime64mili(1740584933594))`, }, { // [7] TestName: "TODO Transactions per day", @@ -1395,12 +1395,12 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ sumOrNull("products.quantity") AS "metric__time_offset_split__0__1_col_0", sumOrNull("products.quantity") AS "metric__time_offset_split__0__2_col_0" FROM __quesma_table_name - WHERE ((("order_date">=fromUnixTimestamp64Milli(1740234098238) AND "order_date" - <=fromUnixTimestamp64Milli(1740838898238)) OR ("order_date">= - fromUnixTimestamp64Milli(1739629298238) AND "order_date"<= - fromUnixTimestamp64Milli(1740234098238))) AND ("order_date">= - fromUnixTimestamp64Milli(1740234098238) AND "order_date"<= - fromUnixTimestamp64Milli(1740838898238))) + WHERE ((("order_date">=__quesma_from_unixtime64mili(1740234098238) AND "order_date" + <=__quesma_from_unixtime64mili(1740838898238)) OR ("order_date">= + __quesma_from_unixtime64mili(1739629298238) AND "order_date"<= + __quesma_from_unixtime64mili(1740234098238))) AND ("order_date">= + __quesma_from_unixtime64mili(1740234098238) AND "order_date"<= + __quesma_from_unixtime64mili(1740838898238))) GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 86400000) AS "aggr__time_offset_split__0__key_0" @@ -1425,12 +1425,12 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ sumOrNull("products.quantity") AS "metric__time_offset_split__0__1_col_0", sumOrNull("products.quantity") AS "metric__time_offset_split__0__2_col_0" FROM __quesma_table_name - WHERE ((("order_date">=fromUnixTimestamp64Milli(1740234098238) AND "order_date" - <=fromUnixTimestamp64Milli(1740838898238)) OR ("order_date">= - fromUnixTimestamp64Milli(1739629298238) AND "order_date"<= - fromUnixTimestamp64Milli(1740234098238))) AND ("order_date">= - fromUnixTimestamp64Milli(1739629298238) AND "order_date"<= - fromUnixTimestamp64Milli(1740234098238))) + WHERE ((("order_date">=__quesma_from_unixtime64mili(1740234098238) AND "order_date" + <=__quesma_from_unixtime64mili(1740838898238)) OR ("order_date">= + __quesma_from_unixtime64mili(1739629298238) AND "order_date"<= + __quesma_from_unixtime64mili(1740234098238))) AND ("order_date">= + __quesma_from_unixtime64mili(1739629298238) AND "order_date"<= + __quesma_from_unixtime64mili(1740234098238))) GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 86400000) AS "aggr__time_offset_split__0__key_0" @@ -1657,12 +1657,12 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ sumOrNull("taxful_total_price") AS "metric__time_offset_split__0__1_col_0", sumOrNull("taxful_total_price") AS "metric__time_offset_split__0__2_col_0" FROM __quesma_table_name - WHERE ((("order_date">=fromUnixTimestamp64Milli(1740234098238) AND "order_date" - <=fromUnixTimestamp64Milli(1740838898238)) OR ("order_date">= - fromUnixTimestamp64Milli(1739629298238) AND "order_date"<= - fromUnixTimestamp64Milli(1740234098238))) AND ("order_date">= - fromUnixTimestamp64Milli(1740234098238) AND "order_date"<= - fromUnixTimestamp64Milli(1740838898238))) + WHERE ((("order_date">=__quesma_from_unixtime64mili(1740234098238) AND "order_date" + <=__quesma_from_unixtime64mili(1740838898238)) OR ("order_date">= + __quesma_from_unixtime64mili(1739629298238) AND "order_date"<= + __quesma_from_unixtime64mili(1740234098238))) AND ("order_date">= + __quesma_from_unixtime64mili(1740234098238) AND "order_date"<= + __quesma_from_unixtime64mili(1740838898238))) GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 86400000) AS "aggr__time_offset_split__0__key_0" @@ -1694,12 +1694,12 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ sumOrNull("taxful_total_price") AS "metric__time_offset_split__0__1_col_0", sumOrNull("taxful_total_price") AS "metric__time_offset_split__0__2_col_0" FROM __quesma_table_name - WHERE ((("order_date">=fromUnixTimestamp64Milli(1740234098238) AND "order_date" - <=fromUnixTimestamp64Milli(1740838898238)) OR ("order_date">= - fromUnixTimestamp64Milli(1739629298238) AND "order_date"<= - fromUnixTimestamp64Milli(1740234098238))) AND ("order_date">= - fromUnixTimestamp64Milli(1739629298238) AND "order_date"<= - fromUnixTimestamp64Milli(1740234098238))) + WHERE ((("order_date">=__quesma_from_unixtime64mili(1740234098238) AND "order_date" + <=__quesma_from_unixtime64mili(1740838898238)) OR ("order_date">= + __quesma_from_unixtime64mili(1739629298238) AND "order_date"<= + __quesma_from_unixtime64mili(1740234098238))) AND ("order_date">= + __quesma_from_unixtime64mili(1739629298238) AND "order_date"<= + __quesma_from_unixtime64mili(1740234098238))) GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 86400000) AS "aggr__time_offset_split__0__key_0" @@ -2057,8 +2057,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ "order_date", 'Europe/Warsaw'))*1000) / 86400000) AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1740234098238) AND - "order_date"<=fromUnixTimestamp64Milli(1740838898238)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1740234098238) AND + "order_date"<=__quesma_from_unixtime64mili(1740838898238)) GROUP BY "category" AS "aggr__0__key_0", toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone( "order_date", 'Europe/Warsaw'))*1000) / 86400000) AS "aggr__0__1__key_0")) @@ -2277,8 +2277,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ count(*) AS "aggr__0__count", sumOrNull("taxful_total_price") AS "metric__0__1_col_0" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1739980133594) AND "order_date"<= - fromUnixTimestamp64Milli(1740584933594)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1739980133594) AND "order_date"<= + __quesma_from_unixtime64mili(1740584933594)) GROUP BY toInt64((toUnixTimestamp64Milli("order_date")+timeZoneOffset(toTimezone ("order_date", 'Europe/Warsaw'))*1000) / 86400000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -2474,8 +2474,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ count(*) AS "metric__gridSplit__gridCentroid_col_2", sumOrNull("taxful_total_price") AS "metric__gridSplit__sum_of_taxful_total_price_col_0" FROM __quesma_table_name - WHERE ("geoip.location" IS NOT NULL AND ("order_date">=fromUnixTimestamp64Milli( - 1740143222223) AND "order_date"<=fromUnixTimestamp64Milli(1740748022223))) + WHERE ("geoip.location" IS NOT NULL AND ("order_date">=__quesma_from_unixtime64mili( + 1740143222223) AND "order_date"<=__quesma_from_unixtime64mili(1740748022223))) GROUP BY FLOOR(((__quesma_geo_lon("geoip.location")+180)/360)*POWER(2, 5)) AS "aggr__gridSplit__key_0", FLOOR((1-LOG(TAN(RADIANS(__quesma_geo_lat("geoip.location")))+(1/COS(RADIANS( __quesma_geo_lat("geoip.location")))))/PI())/2*POWER(2, 5)) AS "aggr__gridSplit__key_1" @@ -2617,8 +2617,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__join__parent_count", "geoip.region_name" AS "aggr__join__key_0", count(*) AS "aggr__join__count" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1740234098238) AND "order_date"<= - fromUnixTimestamp64Milli(1740838898238)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1740234098238) AND "order_date"<= + __quesma_from_unixtime64mili(1740838898238)) GROUP BY "geoip.region_name" AS "aggr__join__key_0" ORDER BY "aggr__join__count" DESC, "aggr__join__key_0" ASC LIMIT 5`, @@ -2807,8 +2807,8 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ "geoip.country_iso_code" AS "aggr__join__key_0", count(*) AS "aggr__join__count" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1740234098238) AND "order_date"<= - fromUnixTimestamp64Milli(1740838898238)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1740234098238) AND "order_date"<= + __quesma_from_unixtime64mili(1740838898238)) GROUP BY "geoip.country_iso_code" AS "aggr__join__key_0" ORDER BY "aggr__join__count" DESC, "aggr__join__key_0" ASC LIMIT 65536`, @@ -2924,7 +2924,7 @@ var KibanaSampleDataEcommerce = []AggregationTestCase{ argMinOrNull(__quesma_geo_lon("originlocation"), __quesma_geo_lat( "originlocation")) AS "metric__fitToBounds_col_3" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740924992069) AND "timestamp"<= - fromUnixTimestamp64Milli(1741529792069))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740924992069) AND "timestamp"<= + __quesma_from_unixtime64mili(1741529792069))`, }, } diff --git a/platform/testdata/kibana_sample_data_flights.go b/platform/testdata/kibana_sample_data_flights.go index 845f13921..c09e64382 100644 --- a/platform/testdata/kibana_sample_data_flights.go +++ b/platform/testdata/kibana_sample_data_flights.go @@ -107,8 +107,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ SELECT maxOrNull("AvgTicketPrice") AS "metric__maxAgg_col_0", minOrNull("AvgTicketPrice") AS "metric__minAgg_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853))`, }, { // [1] TestName: "fill out when panel starts missing - don't know which panel it is", @@ -221,8 +221,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ SELECT maxOrNull("FlightDelayMin") AS "metric__maxAgg_col_0", minOrNull("FlightDelayMin") AS "metric__minAgg_col_0" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND NOT ("FlightDelayMin" __quesma_match 0))`, + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND NOT ("FlightDelayMin" __quesma_match 0))`, }, { // [2] TestName: "Delays & Cancellations (request 1/2)", @@ -799,8 +799,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true')) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0" @@ -812,8 +812,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true')) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset( toTimezone("timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS @@ -831,8 +831,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000)) - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true'))) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__4_col_0", "top_metrics__1__2__4_col_1", "top_hits_rank" @@ -846,8 +846,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true')) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset( toTimezone("timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS @@ -865,8 +865,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000)) - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true'))) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__5_col_0", "top_metrics__1__2__5_col_1", "top_hits_rank" @@ -880,8 +880,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true')) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset( toTimezone("timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS @@ -899,8 +899,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000)) - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true'))) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__6_col_0", "top_metrics__1__2__6_col_1", "top_hits_rank" @@ -914,8 +914,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true')) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset( toTimezone("timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS @@ -933,8 +933,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000)) - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND ("FlightDelay" __quesma_match 'true' + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND ("FlightDelay" __quesma_match 'true' AND "Cancelled" __quesma_match 'true'))) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__7_col_0", "top_metrics__1__2__7_col_1", "top_hits_rank" @@ -1055,8 +1055,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT countIf("FlightDelay" __quesma_match true) AS "metric__0-bucket_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853))`, }, { // [4] TestName: "Cancelled", @@ -1169,8 +1169,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT countIf("Cancelled" __quesma_match true) AS "metric__0-bucket_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853))`, }, { // [5] TestName: "Delayed/Cancelled vs 1 week earlier", @@ -1324,17 +1324,17 @@ var KibanaSampleDataFlights = []AggregationTestCase{ }}, }, ExpectedPancakeSQL: ` - SELECT countIf(("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND - "timestamp"<=fromUnixTimestamp64Milli(1740835408853))) AS + SELECT countIf(("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND + "timestamp"<=__quesma_from_unixtime64mili(1740835408853))) AS "filter_0__aggr__time_offset_split__count", - countIf(("timestamp">=fromUnixTimestamp64Milli(1739625808853) AND "timestamp" - <=fromUnixTimestamp64Milli(1740230608853))) AS + countIf(("timestamp">=__quesma_from_unixtime64mili(1739625808853) AND "timestamp" + <=__quesma_from_unixtime64mili(1740230608853))) AS "filter_1__aggr__time_offset_split__count" FROM __quesma_table_name - WHERE ("Cancelled" __quesma_match true AND (("timestamp">=fromUnixTimestamp64Milli( - 1740230608853) AND "timestamp"<=fromUnixTimestamp64Milli(1740835408853)) OR ( - "timestamp">=fromUnixTimestamp64Milli(1739625808853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740230608853))))`, + WHERE ("Cancelled" __quesma_match true AND (("timestamp">=__quesma_from_unixtime64mili( + 1740230608853) AND "timestamp"<=__quesma_from_unixtime64mili(1740835408853)) OR ( + "timestamp">=__quesma_from_unixtime64mili(1739625808853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740230608853))))`, }, { // [6] TestName: "Flight count", @@ -1453,8 +1453,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -1641,8 +1641,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ countIf("FlightDelay" __quesma_match 'true') AS "metric__0__1-bucket_col_0", countIf("__quesma_fulltext_field_name" __quesma_match '%') AS "metric__0__2-bucket_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -1846,8 +1846,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ countIf("FlightDelay" __quesma_match true) AS "metric__0__1-bucket_col_0", countIf("Cancelled" __quesma_match true) AS "metric__0__3-bucket_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) GROUP BY "OriginCityName" AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC LIMIT 1001`, @@ -2045,8 +2045,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__1__key_0", count(*) AS "aggr__0__1__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp" - <=fromUnixTimestamp64Milli(1740835408853)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp" + <=__quesma_from_unixtime64mili(1740835408853)) GROUP BY "FlightDelayType" AS "aggr__0__key_0", toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__1__key_0")) @@ -2175,8 +2175,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "DestWeather" AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) GROUP BY "DestWeather" AS "aggr__0__key_0" ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC LIMIT 3`, @@ -2309,8 +2309,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__0__parent_count", "FlightDelayType" AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND NOT ("FlightDelayType" + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND NOT ("FlightDelayType" __quesma_match '%No Delay%')) GROUP BY "FlightDelayType" AS "aggr__0__key_0" ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC @@ -2507,8 +2507,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ count(*) AS "metric__gridSplit__gridCentroid_col_2", sumOrNull("FlightDelayMin") AS "metric__gridSplit__sum_of_FlightDelayMin_col_0" FROM __quesma_table_name - WHERE ("OriginLocation" IS NOT NULL AND ("timestamp">=fromUnixTimestamp64Milli( - 1740230608853) AND "timestamp"<=fromUnixTimestamp64Milli(1740835408853))) + WHERE ("OriginLocation" IS NOT NULL AND ("timestamp">=__quesma_from_unixtime64mili( + 1740230608853) AND "timestamp"<=__quesma_from_unixtime64mili(1740835408853))) GROUP BY FLOOR(((__quesma_geo_lon("OriginLocation")+180)/360)*POWER(2, 7)) AS "aggr__gridSplit__key_0", FLOOR((1-LOG(TAN(RADIANS(__quesma_geo_lat("OriginLocation")))+(1/COS(RADIANS( __quesma_geo_lat("OriginLocation")))))/PI())/2*POWER(2, 7)) AS "aggr__gridSplit__key_1" @@ -2633,8 +2633,8 @@ var KibanaSampleDataFlights = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT "FlightDelayMin" AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740230608853) AND "timestamp"<= - fromUnixTimestamp64Milli(1740835408853)) AND NOT ("FlightDelayMin" __quesma_match 0)) + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740230608853) AND "timestamp"<= + __quesma_from_unixtime64mili(1740835408853)) AND NOT ("FlightDelayMin" __quesma_match 0)) GROUP BY "FlightDelayMin" AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, }, diff --git a/platform/testdata/kibana_sample_data_logs.go b/platform/testdata/kibana_sample_data_logs.go index 925a833b8..f4ac3783b 100644 --- a/platform/testdata/kibana_sample_data_logs.go +++ b/platform/testdata/kibana_sample_data_logs.go @@ -115,8 +115,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ SELECT maxOrNull("bytes") AS "metric__maxAgg_col_0", minOrNull("bytes") AS "metric__minAgg_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103))`, }, { // [1] TestName: "Response Codes Over Time + Annotations (1/2 request, Annotations part)", @@ -347,8 +347,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) AND ("tags" __quesma_match 'error' AND + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) AND ("tags" __quesma_match 'error' AND "tags" __quesma_match 'security')) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0" @@ -361,8 +361,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) AND ("tags" __quesma_match 'error' AND + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) AND ("tags" __quesma_match 'error' AND "tags" __quesma_match 'security')) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset( toTimezone("timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS @@ -380,8 +380,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000)) - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) AND ("tags" __quesma_match 'error' AND + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) AND ("tags" __quesma_match 'error' AND "tags" __quesma_match 'security'))) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__4_col_0", "top_metrics__1__2__4_col_1", "top_hits_rank" @@ -395,8 +395,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__1__2__key_0", count(*) AS "aggr__1__2__count" FROM __quesma_table_name - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) AND ("tags" __quesma_match 'error' AND + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) AND ("tags" __quesma_match 'error' AND "tags" __quesma_match 'security')) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset( toTimezone("timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS @@ -414,8 +414,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ __quesma_table_name AS "hit_table" ON ("group_table"."aggr__1__2__key_0"= toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000)) - WHERE (("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) AND ("tags" __quesma_match 'error' AND + WHERE (("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) AND ("tags" __quesma_match 'error' AND "tags" __quesma_match 'security'))) SELECT "aggr__1__count", "aggr__1__2__key_0", "aggr__1__2__count", "top_metrics__1__2__5_col_0", "top_metrics__1__2__5_col_1", "top_hits_rank" @@ -563,8 +563,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT uniq("clientip") AS "metric__0_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103))`, }, { // [3] TestName: "Response Codes Over Time + Annotations (2/2 request)", @@ -862,8 +862,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ countIf(("response">=400 AND "response"<500)) AS "filter_1__aggr__0__1__count" , countIf("response">=500) AS "filter_2__aggr__0__1__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) GROUP BY toInt64((toUnixTimestamp64Milli("timestamp")+timeZoneOffset(toTimezone( "timestamp", 'Europe/Warsaw'))*1000) / 10800000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, @@ -989,8 +989,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT countIf("response">=500) AS "metric__0-bucket_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103))`, }, { // [5] TestName: "HTTP 4xx", @@ -1133,8 +1133,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT countIf(("response">=400 AND "response"<500)) AS "metric__0-bucket_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103))`, + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103))`, }, { // [6] TestName: "Table gz, css, zip, etc.", @@ -1368,14 +1368,14 @@ var KibanaSampleDataLogs = []AggregationTestCase{ "extension" AS "aggr__0__key_0", count(*) AS "aggr__0__count", sumOrNull("bytes") AS "metric__0__1_col_0", uniq("clientip") AS "metric__0__3_col_0", - countIf(("timestamp">=fromUnixTimestamp64Milli(1740749972445) AND "timestamp" - <=fromUnixTimestamp64Milli(1740753572445))) AS "aggr__0__2-bucket__count", - sumOrNullIf("bytes", ("timestamp">=fromUnixTimestamp64Milli(1740749972445) AND - "timestamp"<=fromUnixTimestamp64Milli(1740753572445))) AS + countIf(("timestamp">=__quesma_from_unixtime64mili(1740749972445) AND "timestamp" + <=__quesma_from_unixtime64mili(1740753572445))) AS "aggr__0__2-bucket__count", + sumOrNullIf("bytes", ("timestamp">=__quesma_from_unixtime64mili(1740749972445) AND + "timestamp"<=__quesma_from_unixtime64mili(1740753572445))) AS "metric__0__2-bucket__2-metric_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740092400000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740753572445)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740092400000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740753572445)) GROUP BY "extension" AS "aggr__0__key_0" ORDER BY "metric__0__1_col_0" DESC, "aggr__0__key_0" ASC LIMIT 11`, @@ -1384,14 +1384,14 @@ var KibanaSampleDataLogs = []AggregationTestCase{ "extension" AS "aggr__0__key_0", count(*) AS "aggr__0__count", sumOrNull("bytes") AS "metric__0__1_col_0", uniq("clientip") AS "metric__0__3_col_0", - countIf(("timestamp">=fromUnixTimestamp64Milli(1740749972445) AND "timestamp" - <=fromUnixTimestamp64Milli(1740753572445))) AS "aggr__0__4-bucket__count", - uniqIf("clientip", ("timestamp">=fromUnixTimestamp64Milli(1740749972445) AND - "timestamp"<=fromUnixTimestamp64Milli(1740753572445))) AS + countIf(("timestamp">=__quesma_from_unixtime64mili(1740749972445) AND "timestamp" + <=__quesma_from_unixtime64mili(1740753572445))) AS "aggr__0__4-bucket__count", + uniqIf("clientip", ("timestamp">=__quesma_from_unixtime64mili(1740749972445) AND + "timestamp"<=__quesma_from_unixtime64mili(1740753572445))) AS "metric__0__4-bucket__4-metric_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740092400000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740753572445)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740092400000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740753572445)) GROUP BY "extension" AS "aggr__0__key_0" ORDER BY "metric__0__1_col_0" DESC, "aggr__0__key_0" ASC LIMIT 11`, @@ -1689,8 +1689,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ quantiles(0.950000)("bytes") AS "metric__0__7_col_0", quantiles(0.500000)("bytes") AS "metric__0__8_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) GROUP BY "url" AS "aggr__0__key_0" ORDER BY "aggr__0__count" DESC, "aggr__0__key_0" ASC LIMIT 1001`, @@ -1896,8 +1896,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ count(*) AS "metric__gridSplit__gridCentroid_col_2", sumOrNull("bytes") AS "metric__gridSplit__sum_of_bytes_col_0" FROM __quesma_table_name - WHERE ("geo.coordinates" IS NOT NULL AND ("timestamp">=fromUnixTimestamp64Milli( - 1740178800000) AND "timestamp"<=fromUnixTimestamp64Milli(1740831278103))) + WHERE ("geo.coordinates" IS NOT NULL AND ("timestamp">=__quesma_from_unixtime64mili( + 1740178800000) AND "timestamp"<=__quesma_from_unixtime64mili(1740831278103))) GROUP BY FLOOR(((__quesma_geo_lon("geo.coordinates")+180)/360)*POWER(2, 6)) AS "aggr__gridSplit__key_0", FLOOR((1-LOG(TAN(RADIANS(__quesma_geo_lat("geo.coordinates")))+(1/COS( @@ -2024,8 +2024,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__join__parent_count", "geo.dest" AS "aggr__join__key_0", count(*) AS "aggr__join__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) GROUP BY "geo.dest" AS "aggr__join__key_0" ORDER BY "aggr__join__count" DESC, "aggr__join__key_0" ASC LIMIT 65536`, @@ -2213,8 +2213,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ count(*) AS "aggr__countries__hours__count", uniq("clientip") AS "metric__countries__hours__unique_col_0" FROM __quesma_table_name - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1740178800000) AND - "@timestamp"<=fromUnixTimestamp64Milli(1740831278103)) + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1740178800000) AND + "@timestamp"<=__quesma_from_unixtime64mili(1740831278103)) GROUP BY "geo.dest" AS "aggr__countries__key_0", "hour_of_day" AS "aggr__countries__hours__key_0")) WHERE "aggr__countries__order_1_rank"<=26 @@ -2349,8 +2349,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ SELECT "machine.os" AS "aggr__table__key_0", "geo.dest" AS "aggr__table__key_1", count(*) AS "aggr__table__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) GROUP BY "machine.os" AS "aggr__table__key_0", "geo.dest" AS "aggr__table__key_1" ORDER BY "aggr__table__count" DESC, "aggr__table__key_0" ASC, @@ -2482,8 +2482,8 @@ var KibanaSampleDataLogs = []AggregationTestCase{ ExpectedPancakeSQL: ` SELECT floor("bytes"/50)*50 AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1740178800000) AND "timestamp"<= - fromUnixTimestamp64Milli(1740831278103)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1740178800000) AND "timestamp"<= + __quesma_from_unixtime64mili(1740831278103)) GROUP BY floor("bytes"/50)*50 AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, }, diff --git a/platform/testdata/opensearch-visualize/aggregation_requests.go b/platform/testdata/opensearch-visualize/aggregation_requests.go index 64abfe233..45e482d15 100644 --- a/platform/testdata/opensearch-visualize/aggregation_requests.go +++ b/platform/testdata/opensearch-visualize/aggregation_requests.go @@ -771,8 +771,8 @@ var AggregationTests = []testdata.AggregationTestCase{ "response" AS "aggr__2__key_0", count(*) AS "aggr__2__count", maxOrNull("timestamp") AS "metric__2__1_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1713401399517) AND "timestamp"<= - fromUnixTimestamp64Milli(1714697399517)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1713401399517) AND "timestamp"<= + __quesma_from_unixtime64mili(1714697399517)) GROUP BY "response" AS "aggr__2__key_0" ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 4`, @@ -915,8 +915,8 @@ var AggregationTests = []testdata.AggregationTestCase{ "response" AS "aggr__2__key_0", count(*) AS "aggr__2__count", minOrNull("timestamp") AS "metric__2__1_col_0" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1713401460471) AND "timestamp"<= - fromUnixTimestamp64Milli(1714697460471)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1713401460471) AND "timestamp"<= + __quesma_from_unixtime64mili(1714697460471)) GROUP BY "response" AS "aggr__2__key_0" ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 4`, @@ -1092,8 +1092,8 @@ var AggregationTests = []testdata.AggregationTestCase{ quantiles(0.950000)("timestamp") AS "metric__2__1_col_5", quantiles(0.990000)("timestamp") AS "metric__2__1_col_6" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1713401475845) AND "timestamp"<= - fromUnixTimestamp64Milli(1714697475845)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1713401475845) AND "timestamp"<= + __quesma_from_unixtime64mili(1714697475845)) GROUP BY "response" AS "aggr__2__key_0" ORDER BY "aggr__2__count" DESC, "aggr__2__key_0" ASC LIMIT 4`, diff --git a/platform/testdata/opensearch-visualize/pipeline_aggregation_requests.go b/platform/testdata/opensearch-visualize/pipeline_aggregation_requests.go index ef22e6bb8..63e0a374b 100644 --- a/platform/testdata/opensearch-visualize/pipeline_aggregation_requests.go +++ b/platform/testdata/opensearch-visualize/pipeline_aggregation_requests.go @@ -128,8 +128,8 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{ ExpectedPancakeSQL: ` SELECT "day_of_week_i" AS "aggr__2__key_0", count(*) AS "aggr__2__count" FROM __quesma_table_name - WHERE ("order_date">=fromUnixTimestamp64Milli(1706095390802) AND "order_date"<= - fromUnixTimestamp64Milli(1715163790802)) + WHERE ("order_date">=__quesma_from_unixtime64mili(1706095390802) AND "order_date"<= + __quesma_from_unixtime64mili(1715163790802)) GROUP BY "day_of_week_i" AS "aggr__2__key_0" ORDER BY "aggr__2__key_0" ASC`, }, @@ -2853,8 +2853,8 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__1-bucket__parent_count", "clientip" AS "aggr__1-bucket__key_0", count(*) AS "aggr__1-bucket__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1715413213606) AND "timestamp"<= - fromUnixTimestamp64Milli(1715467213606)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1715413213606) AND "timestamp"<= + __quesma_from_unixtime64mili(1715467213606)) GROUP BY "clientip" AS "aggr__1-bucket__key_0" ORDER BY "aggr__1-bucket__key_0" DESC LIMIT 6`, @@ -3385,8 +3385,8 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__1-bucket__parent_count", "Cancelled" AS "aggr__1-bucket__key_0", count(*) AS "aggr__1-bucket__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1714255011264) AND "timestamp"<= - fromUnixTimestamp64Milli(1715551011264)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1714255011264) AND "timestamp"<= + __quesma_from_unixtime64mili(1715551011264)) GROUP BY "Cancelled" AS "aggr__1-bucket__key_0" ORDER BY "aggr__1-bucket__key_0" DESC LIMIT 6`, @@ -4324,8 +4324,8 @@ var PipelineAggregationTests = []testdata.AggregationTestCase{ SELECT sum(count(*)) OVER () AS "aggr__1-bucket__parent_count", "extension" AS "aggr__1-bucket__key_0", count(*) AS "aggr__1-bucket__count" FROM __quesma_table_name - WHERE ("timestamp">=fromUnixTimestamp64Milli(1714256186906) AND "timestamp"<= - fromUnixTimestamp64Milli(1715552186906)) + WHERE ("timestamp">=__quesma_from_unixtime64mili(1714256186906) AND "timestamp"<= + __quesma_from_unixtime64mili(1715552186906)) GROUP BY "extension" AS "aggr__1-bucket__key_0" ORDER BY "aggr__1-bucket__key_0" DESC LIMIT 6`, diff --git a/platform/testdata/opensearch_requests.go b/platform/testdata/opensearch_requests.go index fa0ae2d18..d138e6521 100644 --- a/platform/testdata/opensearch_requests.go +++ b/platform/testdata/opensearch_requests.go @@ -80,7 +80,7 @@ var OpensearchSearchTests = []SearchTestCase{ "track_total_hits": true }`, WantedSql: []string{ - `("__timestamp">=fromUnixTimestamp64Milli(1712236698149) AND "__timestamp"<=fromUnixTimestamp64Milli(1712237598149))`, + `("__timestamp">=__quesma_from_unixtime64mili(1712236698149) AND "__timestamp"<=__quesma_from_unixtime64mili(1712237598149))`, }, WantedQueryType: model.ListAllFields, WantedQueries: []string{ @@ -171,7 +171,7 @@ var OpensearchSearchTests = []SearchTestCase{ "track_total_hits": true }`, WantedSql: []string{ - `("__timestamp">=fromUnixTimestamp64Milli(1712236698149) AND "__timestamp"<=fromUnixTimestamp64Milli(1712237598149))`, + `("__timestamp">=__quesma_from_unixtime64mili(1712236698149) AND "__timestamp"<=__quesma_from_unixtime64mili(1712237598149))`, }, WantedQueryType: model.Normal, WantedQueries: []string{ diff --git a/platform/testdata/request_after_transformations.go b/platform/testdata/request_after_transformations.go new file mode 100644 index 000000000..4bba3c7e9 --- /dev/null +++ b/platform/testdata/request_after_transformations.go @@ -0,0 +1,2993 @@ +// Copyright Quesma, licensed under the Elastic License 2.0. +// SPDX-License-Identifier: Elastic-2.0 +package testdata + +import ( + "github.com/QuesmaOrg/quesma/platform/model" +) + +var TestsAsyncSearchAfterTransformations = []AsyncSearchTestCase{ + { // [0] + "Facets: aggregate by field + additionally match user (filter)", + `{ + "aggs": { + "sample": { + "aggs": { + "sample_count": { + "value_count": { + "field": "host.name" + } + }, + "top_values": { + "terms": { + "field": "host.name", + "shard_size": 25, + "size": 10 + } + } + }, + "sampler": { + "shard_size": 5000 + } + } + }, + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-23T11:27:16.820Z", + "lte": "2024-01-23T11:42:16.820Z" + } + } + }, + { + "bool": { + "filter": [ + { + "multi_match": { + "lenient": true, + "query": "user", + "type": "best_fields" + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + } + ] + } + }, + "runtime_mappings": {}, + "size": "0", + "track_total_hits": false +}`, + `{ + "completion_time_in_millis": 1706010201967, + "expiration_time_in_millis": 1706010261964, + "is_partial": false, + "is_running": false, + "response": { + "_shards": { + "failed": 0, + "skipped": 0, + "successful": 1, + "total": 1 + }, + "aggregations": { + "sample": { + "doc_count": 442, + "sample_count": { + "value": 442 + }, + "top_values": { + "buckets": [ + { + "doc_count": 30, + "key": "hephaestus" + }, + { + "doc_count": 29, + "key": "poseidon" + }, + { + "doc_count": 28, + "key": "jupiter" + }, + { + "doc_count": 26, + "key": "selen" + }, + { + "doc_count": 24, + "key": "demeter" + }, + { + "doc_count": 24, + "key": "iris" + }, + { + "doc_count": 24, + "key": "pan" + }, + { + "doc_count": 22, + "key": "hades" + }, + { + "doc_count": 22, + "key": "hermes" + }, + { + "doc_count": 21, + "key": "persephone" + } + ], + "doc_count_error_upper_bound": 0, + "sum_other_doc_count": 192 + } + } + }, + "hits": { + "hits": [], + "max_score": null, + "total": { + "relation": "eq", + "value": 442 + } + }, + "timed_out": false, + "took": 3 + }, + "start_time_in_millis": 1706010201964 +}`, + "no comment yet", + model.HitsCountInfo{Type: model.Normal}, + []string{ + `SELECT sum(count(*)) OVER () AS "aggr__sample__count", + sum(count("host_name")) OVER () AS "metric__sample__sample_count_col_0", + sum(count(*)) OVER () AS "aggr__sample__top_values__parent_count", + "host_name" AS "aggr__sample__top_values__key_0", + count(*) AS "aggr__sample__top_values__count" + FROM ( + SELECT "host_name" + FROM __quesma_table_name + WHERE (("@timestamp">=fromUnixTimestamp64Milli(1706009236820) AND "@timestamp" + <=fromUnixTimestamp64Milli(1706010136820)) AND "message" iLIKE '%user%') + LIMIT 20000) + GROUP BY "host_name" AS "aggr__sample__top_values__key_0" + ORDER BY "aggr__sample__top_values__count" DESC, + "aggr__sample__top_values__key_0" ASC + LIMIT 11`, + }, + true, + }, + { // [1] + "ListByField: query one field, last 'size' results, return list of just that field, no timestamp, etc.", + `{ + "_source": false, + "fields": [ + { + "field": "message" + } + ], + "sort": [ + { + "@timestamp": { + "format": "strict_date_optional_time", + "order": "desc", + "unmapped_type": "boolean" + } + } + ], + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-23T14:43:19.481Z", + "lte": "2024-01-23T14:58:19.481Z" + } + } + }, + { + "bool": { + "filter": [ + { + "multi_match": { + "lenient": true, + "query": "user", + "type": "best_fields" + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + }, + { + "exists": { + "field": "message" + } + } + ] + } + }, + "runtime_mappings": {}, + "size": 100, + "track_total_hits": true +} +`, + `{ + "completion_time_in_millis": 1706021975541, + "expiration_time_in_millis": 1706022035538, + "is_partial": false, + "is_running": false, + "response": { + "_shards": { + "failed": 0, + "skipped": 0, + "successful": 1, + "total": 1 + }, + "hits": { + "hits": [ + { + "_id": "DuLTNo0Bxs2mnbSyknVe", + "_index": ".ds-logs-generic-default-2024.01.23-000001", + "_score": 0.0, + "fields": { + "message": [ + "User logged in" + ] + } + }, + { + "_id": "B-LTNo0Bxs2mnbSydXWB", + "_index": ".ds-logs-generic-default-2024.01.23-000001", + "_score": 0.0, + "fields": { + "message": [ + "User password reset requested" + ] + } + }, + { + "_id": "CeLTNo0Bxs2mnbSyfnWi", + "_index": ".ds-logs-generic-default-2024.01.23-000001", + "_score": 0.0, + "fields": { + "message": [ + "User password reset requested" + ] + } + }, + { + "_id": "C-LTNo0Bxs2mnbSyiXUd", + "_index": ".ds-logs-generic-default-2024.01.23-000001", + "_score": 0.0, + "fields": { + "message": [ + "User logged out" + ] + } + }, + { + "_id": "DeLTNo0Bxs2mnbSyjXV7", + "_index": ".ds-logs-generic-default-2024.01.23-000001", + "_score": 0.0, + "fields": { + "message": [ + "User password changed" + ] + } + } + ], + "max_score": 0.0, + "total": { + "relation": "eq", + "value": 97 + } + }, + "timed_out": false, + "took": 3 + }, + "start_time_in_millis": 1706021975538 +} +`, "there should be 97 results, I truncated most of them", + model.HitsCountInfo{Type: model.ListByField, RequestedFields: []string{"message"}, Size: 100}, + []string{ + `SELECT "message" + FROM __quesma_table_name + WHERE ((("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481)) + AND "message" iLIKE '%user%') AND "message" IS NOT NULL) + ORDER BY "@timestamp" DESC + LIMIT 100`, + `SELECT count(*) AS "column_0" + FROM __quesma_table_name + WHERE ((("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481)) + AND "message" iLIKE '%user%') AND "message" IS NOT NULL)`, + }, + false, + }, + { // [2] + "ListAllFields: search all fields, return JSON + count (we don't return count atm)", + `{ + "_source": false, + "fields": [ + { + "field": "*", + "include_unmapped": "true" + }, + { + "field": "@timestamp", + "format": "strict_date_optional_time" + } + ], + "highlight": { + "fields": { + "*": {} + }, + "fragment_size": 2147483647, + "post_tags": [ + "@/kibana-highlighted-field@" + ], + "pre_tags": [ + "@kibana-highlighted-field@" + ] + }, + "query": { + "bool": { + "filter": [ + { + "multi_match": { + "lenient": true, + "query": "user", + "type": "best_fields" + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-23T14:43:19.481Z", + "lte": "2024-01-23T14:58:19.481Z" + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + }, + "runtime_mappings": {}, + "script_fields": {}, + "size": 500, + "sort": [ + { + "@timestamp": { + "format": "strict_date_optional_time", + "order": "desc", + "unmapped_type": "boolean" + } + }, + { + "_doc": { + "order": "desc", + "unmapped_type": "boolean" + } + } + ], + "stored_fields": [ + "*" + ], + "track_total_hits": false, + "version": true +} +`, + `{ + "completion_time_in_millis": 1706021899599, + "expiration_time_in_millis": 1706021959593, + "id": "FlpxWGNLeG9rVF82aTJEYXItU1BGVncdNVFvOUloYTBUZ3U0Q25MRTJtQTA0dzoyMTEyNjk=", + "is_partial": false, + "is_running": false, + "response": { + "_shards": { + "failed": 0, + "skipped": 0, + "successful": 1, + "total": 1 + }, + "hits": { + "hits": [ + { + "_id": "PeLUNo0Bxs2mnbSyS3Wk", + "_index": ".ds-logs-generic-default-2024.01.23-000001", + "_score": null, + "_version": 1, + "fields": { + "@timestamp": [ + "2024-01-23T14:58:19.172Z" + ], + "data_stream.type": [ + "logs" + ], + "host.name": [ + "athena" + ], + "host.name.text": [ + "athena" + ], + "message": [ + "User password changed" + ], + "service.name": [ + "backend" + ], + "service.name.text": [ + "backend" + ], + "severity": [ + "error" + ], + "source": [ + "alpine" + ], + "timestamp": [ + "2024-01-23T14:58:19.168Z" + ] + }, + "highlight": { + "message": [ + "@kibana-highlighted-field@User@/kibana-highlighted-field@ password changed" + ] + }, + "sort": [ + "2024-01-23T14:58:19.172Z", + 0 + ] + }, + { + "_id": "OuLUNo0Bxs2mnbSyRXX_", + "_index": ".ds-logs-generic-default-2024.01.23-000001", + "_score": null, + "_version": 1, + "fields": { + "@timestamp": [ + "2024-01-23T14:58:17.726Z" + ], + "data_stream.type": [ + "logs" + ], + "host.name": [ + "apollo" + ], + "host.name.text": [ + "apollo" + ], + "message": [ + "User password reset failed" + ], + "service.name": [ + "worker" + ], + "service.name.text": [ + "worker" + ], + "severity": [ + "info" + ], + "source": [ + "debian" + ], + "timestamp": [ + "2024-01-23T14:58:17.714Z" + ] + }, + "highlight": { + "message": [ + "@kibana-highlighted-field@User@/kibana-highlighted-field@ password reset failed" + ] + }, + "sort": [ + "2024-01-23T14:58:17.726Z", + 1 + ] + }, + { + "_id": "OeLUNo0Bxs2mnbSyP3Xl", + "_index": ".ds-logs-generic-default-2024.01.23-000001", + "_score": null, + "_version": 1, + "fields": { + "@timestamp": [ + "2024-01-23T14:58:16.165Z" + ], + "data_stream.type": [ + "logs" + ], + "host.name": [ + "hestia" + ], + "host.name.text": [ + "hestia" + ], + "message": [ + "User logged out" + ], + "service.name": [ + "cron" + ], + "service.name.text": [ + "cron" + ], + "severity": [ + "info" + ], + "source": [ + "suse" + ], + "timestamp": [ + "2024-01-23T14:58:16.154Z" + ] + }, + "highlight": { + "message": [ + "@kibana-highlighted-field@User@/kibana-highlighted-field@ logged out" + ] + }, + "sort": [ + "2024-01-23T14:58:16.165Z", + 2 + ] + } + ] + } + }`, + "Truncated most results. TODO Check what's at the end of response, probably count?", + model.HitsCountInfo{Type: model.ListAllFields, RequestedFields: []string{"*"}, Size: 500}, + []string{` + SELECT "@timestamp", "event_dataset", "host_name", "message", "properties_isreg" + FROM __quesma_table_name + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481))) + ORDER BY "@timestamp" DESC + LIMIT 500`, + }, + false, + }, + { // [3] + "Histogram: possible query nr 1", + `{ + "_source": { + "excludes": [] + }, + "aggs": { + "0": { + "date_histogram": { + "field": "@timestamp", + "fixed_interval": "30s", + "min_doc_count": 1 + } + } + }, + "fields": [ + { + "field": "@timestamp", + "format": "date_time" + } + ], + "query": { + "bool": { + "filter": [ + { + "multi_match": { + "lenient": true, + "query": "user", + "type": "best_fields" + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-23T14:43:19.481Z", + "lte": "2024-01-23T14:58:19.481Z" + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + }, + "runtime_mappings": {}, + "script_fields": {}, + "size": 100, + "stored_fields": [ + "*" + ], + "track_total_hits": 1000 +} +`, `{ + "completion_time_in_millis": 1706021899595, + "expiration_time_in_millis": 1706021959594, + "id": "FjFQMlBUNnJmUU1pWml0WkllNmJWYXcdNVFvOUloYTBUZ3U0Q25MRTJtQTA0dzoyMTEyNzI=", + "is_partial": false, + "is_running": false, + "response": { + "_shards": { + "failed": 0, + "skipped": 0, + "successful": 1, + "total": 1 + }, + "aggregations": { + "0": { + "buckets": [ + { + "doc_count": 2, + "key": 1706021670000, + "key_as_string": "2024-01-23T15:54:30.000+01:00" + }, + { + "doc_count": 13, + "key": 1706021700000, + "key_as_string": "2024-01-23T15:55:00.000+01:00" + }, + { + "doc_count": 14, + "key": 1706021730000, + "key_as_string": "2024-01-23T15:55:30.000+01:00" + }, + { + "doc_count": 14, + "key": 1706021760000, + "key_as_string": "2024-01-23T15:56:00.000+01:00" + }, + { + "doc_count": 15, + "key": 1706021790000, + "key_as_string": "2024-01-23T15:56:30.000+01:00" + }, + { + "doc_count": 13, + "key": 1706021820000, + "key_as_string": "2024-01-23T15:57:00.000+01:00" + }, + { + "doc_count": 15, + "key": 1706021850000, + "key_as_string": "2024-01-23T15:57:30.000+01:00" + }, + { + "doc_count": 11, + "key": 1706021880000, + "key_as_string": "2024-01-23T15:58:00.000+01:00" + } + ] + } + }, + "hits": { + "hits": [], + "max_score": null, + "total": { + "relation": "eq", + "value": 97 + } + }, + "timed_out": false, + "took": 1 + }, + "start_time_in_millis": 1706021899594 +} +`, + "no comment yet", + model.HitsCountInfo{Type: model.ListByField, RequestedFields: []string{"@timestamp"}, Size: 100}, + []string{ + `SELECT sum(count(*)) OVER () AS "metric____quesma_total_count_col_0", + toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0", + count(*) AS "aggr__0__count" + FROM __quesma_table_name + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481))) + GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0" + ORDER BY "aggr__0__key_0" ASC`, + `SELECT "@timestamp" + FROM __quesma_table_name + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481))) + LIMIT 100`, + }, + true, + }, + { // [4] + "Histogram: possible query nr 2", + `{ + "size":0, + "query": { + "range": { + "@timestamp": { + "gt": "2024-01-25T14:53:59.033Z", + "lte": "2024-01-25T15:08:59.033Z", + "format": "strict_date_optional_time" + } + } + }, + "aggs": { + "stats": { + "terms": { + "field": "event.dataset", + "size": 4, + "missing": "unknown" + }, + "aggs": { + "series": { + "date_histogram": { + "field": "@timestamp", + "fixed_interval": "60s" + } + } + } + } + }, + "track_total_hits":false +}`, + `{}`, + "no comment yet", + model.HitsCountInfo{Type: model.Normal}, + []string{ + `SELECT "aggr__stats__parent_count", "aggr__stats__key_0", "aggr__stats__count", + "aggr__stats__series__key_0", "aggr__stats__series__count" + FROM ( + SELECT "aggr__stats__parent_count", "aggr__stats__key_0", + "aggr__stats__count", "aggr__stats__series__key_0", + "aggr__stats__series__count", + dense_rank() OVER (ORDER BY "aggr__stats__count" DESC, "aggr__stats__key_0" + ASC) AS "aggr__stats__order_1_rank", + dense_rank() OVER (PARTITION BY "aggr__stats__key_0" ORDER BY + "aggr__stats__series__key_0" ASC) AS "aggr__stats__series__order_1_rank" + FROM ( + SELECT sum(count(*)) OVER () AS "aggr__stats__parent_count", + COALESCE("event_dataset", 'unknown') AS "aggr__stats__key_0", + sum(count(*)) OVER (PARTITION BY "aggr__stats__key_0") AS + "aggr__stats__count", + toInt64(toUnixTimestamp64Milli("@timestamp") / 60000) AS + "aggr__stats__series__key_0", count(*) AS "aggr__stats__series__count" + FROM __quesma_table_name + WHERE ("@timestamp">fromUnixTimestamp64Milli(1706194439033) AND "@timestamp"<=fromUnixTimestamp64Milli(1706195339033)) + GROUP BY COALESCE("event_dataset", 'unknown') AS "aggr__stats__key_0", + toInt64(toUnixTimestamp64Milli("@timestamp") / 60000) AS + "aggr__stats__series__key_0")) + WHERE "aggr__stats__order_1_rank"<=4 + ORDER BY "aggr__stats__order_1_rank" ASC, + "aggr__stats__series__order_1_rank" ASC`, + }, + true, + }, + { // [5] + "Earliest/latest timestamp", + `{ + "aggs": { + "earliest_timestamp": { + "min": { + "field": "@timestamp" + } + }, + "latest_timestamp": { + "max": { + "field": "@timestamp" + } + } + }, + "query": { + "bool": { + "filter": [ + { + "multi_match": { + "lenient": true, + "query": "posei", + "type": "best_fields" + } + }, + { + "match_phrase": { + "message": "User logged out" + } + }, + { + "match_phrase": { + "host.name": "poseidon" + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + }, + "size": 0, + "track_total_hits": 1 + }`, + `{ + "completion_time_in_millis": 1706551812667, + "expiration_time_in_millis": 1706551872665, + "is_partial": false, + "is_running": false, + "response": { + "_shards": { + "failed": 0, + "skipped": 0, + "successful": 1, + "total": 1 + }, + "aggregations": { + "earliest_timestamp": { + "value": null + }, + "latest_timestamp": { + "value": null + } + }, + "hits": { + "hits": [], + "max_score": null, + "total": { + "relation": "eq", + "value": 0 + } + }, + "timed_out": false, + "took": 2 + }, + "start_time_in_millis": 1706551812665 + }`, + "no comment yet", + model.HitsCountInfo{Type: model.Normal}, + []string{ + `SELECT minOrNull("@timestamp") AS "metric__earliest_timestamp_col_0", + maxOrNull("@timestamp") AS "metric__latest_timestamp_col_0", + count(*) AS "metric____quesma_total_count_col_0" + FROM __quesma_table_name + WHERE (("message" iLIKE '%posei%' AND "message" ILIKE '%User logged out%') AND + "host_name" ILIKE '%poseidon%')`, + }, + true, + }, + { // [6] + "VERY simple ListAllFields", + `{ + "fields": [ + "*" + ], + "size": 50, + "track_total_hits": false + }`, + ``, + "no comment yet", + model.HitsCountInfo{Type: model.ListAllFields, RequestedFields: []string{"*"}, Size: 50}, + []string{ + `SELECT "@timestamp", "event_dataset", "host_name", "message", "properties_isreg" + FROM __quesma_table_name + LIMIT 50`, + }, + false, + }, + { // [7] + "Timestamp in epoch_millis + select one field", + `{ + "_source": false, + "fields": [ + "properties::isreg" + ], + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "format": "epoch_millis", + "gte": 1710171234276, + "lte": 1710172134276 + } + } + }, + { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "format": "epoch_millis", + "gte": 1710171234276, + "lte": 1710172134276 + } + } + }, + { + "bool": { + "filter": [], + "must": [ + { + "match_all": {} + } + ], + "must_not": [] + } + } + ] + } + }, + { + "exists": { + "field": "properties::isreg" + } + } + ] + } + }, + "size": 100, + "track_total_hits": false + }`, + ``, + "happens e.g. in Explorer > Field Statistics view", + model.HitsCountInfo{Type: model.ListByField, RequestedFields: []string{"properties::isreg"}, Size: 100}, + []string{` + SELECT "properties_isreg" + FROM __quesma_table_name + WHERE ((("@timestamp">=fromUnixTimestamp64Milli(1710171234276) AND "@timestamp" + <=fromUnixTimestamp64Milli(1710172134276)) AND ("@timestamp">= + fromUnixTimestamp64Milli(1710171234276) AND "@timestamp"<= + fromUnixTimestamp64Milli(1710172134276))) AND "properties_isreg" IS NOT NULL) + LIMIT 100`, + }, + false, + }, +} + +var TestsSearchAfterTransformations = []SearchTestCase{ + { // [0] + "Match all", + ` + { + "query": { + "match_all": {} + }, + "track_total_hits": false + }`, + []string{""}, + model.ListAllFields, + []string{ + `SELECT "message" FROM ` + TableName + ` LIMIT 10`, + }, + []string{}, + }, + { // [1] + "Term as dictionary", + ` + { + "query": { + "bool": { + "filter": + { + "term": { + "type": "task" + } + } + } + }, + "track_total_hits": true + }`, + []string{`"type"='task'`}, + model.ListAllFields, + []string{ + `SELECT "message" FROM ` + TableName + ` WHERE "type"='task' LIMIT 10`, + `SELECT count(*) AS "column_0" FROM ` + TableName, + }, + []string{}, + }, + { // [2] + "Term as array", + ` + { + "query": { + "bool": { + "filter": [ + { + "term": { + "type": "task" + } + }, + { + "terms": { + "task.enabled": [true, 54, "abc", "abc's"] + } + } + ] + } + }, + "track_total_hits": true + }`, + []string{`("type"='task' AND "task.enabled" IN tuple(true, 54, 'abc', 'abc\'s'))`}, + model.ListAllFields, + []string{ + `SELECT "message" FROM ` + TableName + ` WHERE ("type"='task' AND "task.enabled" IN tuple(true, 54, 'abc', 'abc\\'s')) LIMIT 10`, + `SELECT count(*) AS "column_0" FROM ` + TableName, + }, + []string{}, + }, + { // [3] + "Sample log query", + ` + { + "query": { + "bool": { + "must": [], + "filter": [ + { + "multi_match": { + "type": "best_fields", + "query": "user", + "lenient": true + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-17T10:28:18.815Z", + "lte": "2024-01-17T10:43:18.815Z" + } + } + }], + "should": [], + "must_not": [] + } + }, + "track_total_hits": true + }`, + []string{ + `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705487298815) AND "@timestamp"<=fromUnixTimestamp64Milli(1705488198815)))`, + }, + model.ListAllFields, + []string{ + `SELECT "message" FROM ` + TableName + ` WHERE ("message" iLIKE '%user%' ` + + `AND ("@timestamp">=fromUnixTimestamp64Milli(1705487298815) AND "@timestamp"<=fromUnixTimestamp64Milli(1705488198815))) ` + + `LIMIT 10`, + `SELECT count(*) AS "column_0" FROM ` + TableName, + }, + []string{}, + }, + { // [4] + "Multiple bool query", + ` + { + "query": { + "bool" : { + "must" : { + "term" : { "user.id" : "kimchy" } + }, + "filter": { + "term" : { "tags" : "production" } + }, + "must_not" : { + "range" : { + "age" : { "gte" : 10, "lte" : 20 } + } + }, + "should" : [ + { "term" : { "tags" : "env1" } }, + { "term" : { "tags" : "deployed" } } + ], + "minimum_should_match" : 1, + "boost" : 1.0 + } + }, + "track_total_hits": true + }`, + []string{ + `((("user.id"='kimchy' AND "tags"='production') AND ("tags"='env1' OR "tags"='deployed')) AND NOT (("age">=10 AND "age"<=20)))`, + }, + model.ListAllFields, + []string{ + `SELECT "message" FROM ` + TableName + ` WHERE ((("user.id"='kimchy' AND "tags"='production') ` + + `AND ("tags"='env1' OR "tags"='deployed')) AND NOT (("age".=.0 AND "age".=.0))) ` + + `LIMIT 10`, + `SELECT count(*) AS "column_0" FROM ` + TableName + ` ` + + `WHERE ((("user.id"='kimchy' AND "tags"='production') ` + + `AND ("tags"='env1' OR "tags"='deployed')) AND NOT (("age".=.0 AND "age".=.0)))`, + }, + []string{}, + }, + { // [5] + "Match phrase", + ` + { + "query": { + "bool": { + "filter": [ + { + "bool": { + "must": [], + "filter": [ + { + "match_phrase": { + "host_name.keyword": "prometheus" + } + } + ], + "should": [], + "must_not": [] + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"host_name" __quesma_match '%prometheus%'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "host_name"='prometheus' LIMIT 10`}, + []string{}, + }, + { // [6] + "Match", + ` + { + "query": { + "match": { + "message": "this is a test" + } + }, + "size": 100, + "track_total_hits": false + }`, + []string{`((("message" __quesma_match '%this%' OR "message" __quesma_match '%is%') OR "message" __quesma_match '%a%') OR "message" __quesma_match '%test%')`}, + model.ListAllFields, + []string{ + `SELECT "message" FROM ` + TableName + ` WHERE ((("message" ILIKE '%this%' OR "message" ILIKE '%is%') ` + + `OR "message" ILIKE '%a%') OR "message" ILIKE '%test%') ` + + `LIMIT 100`, + }, + []string{}, + }, + { // [7] + "Terms", + ` + { + "query": { + "bool": { + "must": [ + { + "terms": { + "status": ["pending"] + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"status"='pending'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "status"='pending'`}, + []string{}, + }, + { // [8] + "Exists", + ` + { + "query": { + "bool": { + "filter": [ + { + "bool": { + "should": [ + { + "bool": { + "must": [ + { + "term": { + "type": "upgrade-assistant-reindex-operation" + } + } + ], + "must_not": [ + { + "exists": { + "field": "namespace" + } + }, + { + "exists": { + "field": "namespaces" + } + } + ] + } + } + ], + "minimum_should_match": 1 + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{ + `("type"='upgrade-assistant-reindex-operation' AND NOT (("namespace" IS NOT NULL OR "namespaces" IS NOT NULL)))`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE ("type"='upgrade-assistant-reindex-operation' AND NOT (("namespace" IS NOT NULL OR "namespaces" IS NOT NULL)))`, + }, + []string{}, + }, + { // [9] + "Simple query string", + ` + { + "query": { + "bool": { + "must": [ + { + "simple_query_string": { + "query": "endpoint_event_filters", + "fields": [ + "exception-list-agnostic.list_id" + ], + "default_operator": "OR" + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"exception-list-agnostic.list_id" __quesma_match 'endpoint\_event\_filters'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "exception-list-agnostic.list_id"='endpoint\\_event\\_filters'`}, + []string{}, + }, + { // [10] + "Simple query string wildcard", + ` + { + "query": { + "bool": { + "must": [ + { + "simple_query_string": { + "query": "ingest-agent-policies", + "lenient": true, + "fields": [ + "*" + ], + "default_operator": "OR" + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{fullTextFieldName + ` __quesma_match 'ingest-agent-policies'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE ` + fullTextFieldName + ` ILIKE 'ingest-agent-policies'`}, + []string{}, + }, + { // [11] + "Simple wildcard", + ` + { + "query": { + "bool": { + "must": [ + { + "wildcard": { + "task.taskType": { + "value": "alerting:*" + } + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"task.taskType" iLIKE 'alerting:%'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "task.taskType" iLIKE 'alerting:%'`}, + []string{}, + }, + { // [12] + "Simple prefix ver1", + ` + { + "query": { + "bool": { + "must": [ + { + "prefix": { + "alert.actions.actionRef": { + "value": "preconfigured:" + } + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"alert.actions.actionRef" iLIKE 'preconfigured:%'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "alert.actions.actionRef" iLIKE 'preconfigured:%'`}, + []string{}, + }, + { // [13] + "Simple prefix ver2", + ` + { + "query": { + "prefix" : { "user" : "ki" } + }, + "track_total_hits": false, + "size": 10 + }`, + []string{`"user" iLIKE 'ki%'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "user" iLIKE 'ki%'`}, + []string{}, + }, + { // [14] + "Prefix with special characters", + ` + { + "query": { + "prefix" : { "user" : "ki%_\\ \\%" } + }, + "track_total_hits": false, + "size": 10 + }`, + []string{`"user" iLIKE 'ki\%\_\\ \\\%%'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "user" iLIKE 'ki\\%\\_\\\\ \\\\\\%%'`}, + []string{}, + }, + { // [15] + "Query string, wildcards don't work properly", + ` + { + "query": { + "query_string": { + "fields": [ + "message" + ], + "query": "\"* logged\"" + } + }, + "track_total_hits": false, + "size": 1 + }`, + []string{`"message" __quesma_match '% logged'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "message" ILIKE '% logged'`}, + []string{}, + }, + { // [16] + "Empty bool", + ` + { + "query": { + "bool": { + "must": [], + "filter": [], + "should": [], + "must_not": [] + } + }, + "track_total_hits": true + }`, + []string{""}, + model.ListAllFields, + []string{ + `SELECT count(*) AS "column_0" FROM ` + TableName, + `SELECT "message" FROM ` + TableName, + }, + []string{}, + }, + { // [17] + "Simplest 'match_phrase'", + `{ + "query": { + "match_phrase": { + "message": "this is a test" + } + }, + "track_total_hits": false + }`, + []string{`"message" __quesma_match '%this is a test%'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "message" ILIKE '%this is a test%'`}, + []string{}, + }, + { // [18] + "More nested 'match_phrase'", + `{ + "query": { + "match_phrase": { + "message": { + "query": "this is a test", + "analyzer": "my_analyzer" + } + } + }, + "track_total_hits": false + }`, + []string{`"message" __quesma_match '%this is a test%'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "message" ILIKE '%this is a test%'`}, + []string{}, + }, + { // [19] + "Simple nested", + ` + { + "query": { + "bool": { + "must": [ + { + "nested": { + "path": "references", + "query": { + "bool": { + "must": [ + { + "term": { + "references.type": "tag" + } + } + ] + } + } + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"references.type"='tag'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "references.type"='tag'`}, + []string{}, + }, + { // [20] + "random simple test", + ` + { + "size": 0, + "timeout": "1000ms", + "terminate_after": 100000, + "query": { + "bool": { + "filter": [ + { + "bool": { + "must": [], + "filter": [ + { + "multi_match": { + "type": "best_fields", + "query": "user", + "lenient": true + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-22T09:26:10.299Z", + "lte": "2024-01-22T09:41:10.299Z" + } + } + } + ], + "should": [], + "must_not": [] + } + } + ] + } + }, + "aggs": { + "suggestions": { + "terms": { + "size": 10, + "field": "stream.namespace", + "shard_size": 10, + "order": { + "_count": "desc" + } + } + }, + "unique_terms": { + "cardinality": { + "field": "stream.namespace" + } + } + }, + "runtime_mappings": {}, + "track_total_hits": true + } + `, + []string{ + `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299)))`, + `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) ` + + `AND "stream.namespace" IS NOT NULL)`, + }, + model.Normal, + []string{}, + []string{ + `SELECT uniqMerge(uniqState("stream_namespace")) OVER () AS + "metric__unique_terms_col_0", + sum(count(*)) OVER () AS "metric____quesma_total_count_col_0", + sum(count(*)) OVER () AS "aggr__suggestions__parent_count", + "stream_namespace" AS "aggr__suggestions__key_0", + count(*) AS "aggr__suggestions__count" + FROM __quesma_table_name + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) + GROUP BY "stream_namespace" AS "aggr__suggestions__key_0" + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC + LIMIT 11`, + }, + }, + { // [21] + "termWithCompoundValue", + ` + { + "size": 0, + "track_total_hits": false, + "timeout": "1000ms", + "terminate_after": 100000, + "query": { + "bool": { + "filter": [ + { + "bool": { + "must": [], + "filter": [ + { + "bool": { + "should": [ + { + "term": { + "service.name": { + "value": "admin" + } + } + } + ], + "minimum_should_match": 1 + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-22T14:34:35.873Z", + "lte": "2024-01-22T14:49:35.873Z" + } + } + } + ], + "should": [], + "must_not": [] + } + } + ] + } + }, + "aggs": { + "suggestions": { + "terms": { + "size": 10, + "field": "namespace", + "shard_size": 10, + "order": { + "_count": "desc" + } + } + }, + "unique_terms": { + "cardinality": { + "field": "namespace" + } + } + }, + "runtime_mappings": {} + } + `, + []string{ + `(("service.name"='admin' AND ("@timestamp">=fromUnixTimestamp64Milli(1705934075873) AND "@timestamp"<=fromUnixTimestamp64Milli(1705934975873))) ` + + `AND "namespace" IS NOT NULL)`, + `("service.name"='admin' AND ("@timestamp">=fromUnixTimestamp64Milli(1705934075873) AND "@timestamp"<=fromUnixTimestamp64Milli(1705934975873)))`, + }, + model.Normal, + []string{}, + []string{ + `SELECT uniqMerge(uniqState("namespace")) OVER () AS "metric__unique_terms_col_0" + , sum(count(*)) OVER () AS "aggr__suggestions__parent_count", + "namespace" AS "aggr__suggestions__key_0", + count(*) AS "aggr__suggestions__count" + FROM __quesma_table_name + WHERE ("service_name"='admin' AND ("@timestamp">=fromUnixTimestamp64Milli(1705934075873) AND "@timestamp"<=fromUnixTimestamp64Milli(1705934975873))) + GROUP BY "namespace" AS "aggr__suggestions__key_0" + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC + LIMIT 11`, + }, + }, + { // [22] + "count(*) as /_search query. With filter", // response should be just ["hits"]["total"]["value"] == result of count(*) + `{ + "aggs": { + "suggestions": { + "terms": { + "field": "stream.namespace", + "order": { + "_count": "desc" + }, + "shard_size": 10, + "size": 10 + } + }, + "unique_terms": { + "cardinality": { + "field": "stream.namespace" + } + } + }, + "query": { + "bool": { + "filter": [ + { + "bool": { + "filter": [ + { + "match_phrase": { + "message": "User logged out" + } + }, + { + "match_phrase": { + "host.name": "poseidon" + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-29T15:36:36.491Z", + "lte": "2024-01-29T18:11:36.491Z" + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + } + ] + } + }, + "runtime_mappings": {}, + "size": 0, + "terminate_after": 100000, + "timeout": "1000ms", + "track_total_hits": true + }`, + []string{ + `(("message" __quesma_match '%User logged out%' AND "host.name" __quesma_match '%poseidon%') ` + + `AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491)))`, + `((("message" __quesma_match '%User logged out%' AND "host.name" __quesma_match '%poseidon%') ` + + `AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491))) ` + + `AND "stream.namespace" IS NOT NULL)`, + }, + model.Normal, + []string{}, + []string{ + `SELECT uniqMerge(uniqState("stream_namespace")) OVER () AS + "metric__unique_terms_col_0", + sum(count(*)) OVER () AS "metric____quesma_total_count_col_0", + sum(count(*)) OVER () AS "aggr__suggestions__parent_count", + "stream_namespace" AS "aggr__suggestions__key_0", + count(*) AS "aggr__suggestions__count" + FROM __quesma_table_name + WHERE (("message" ILIKE '%User logged out%' AND "host_name"='poseidon') + AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491))) + GROUP BY "stream_namespace" AS "aggr__suggestions__key_0" + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC + LIMIT 11`, + }, + }, + { // [23] + "count(*) as /_search or /logs-*-/_search query. Without filter", // response should be just ["hits"]["total"]["value"] == result of count(*) + `{ + "aggs": { + "suggestions": { + "terms": { + "field": "namespace", + "order": { + "_count": "desc" + }, + "shard_size": 10, + "size": 10 + } + }, + "unique_terms": { + "cardinality": { + "field": "namespace" + } + } + }, + "query": { + "bool": { + "filter": [ + { + "bool": { + "filter": [ + { + "multi_match": { + "lenient": true, + "query": "user", + "type": "best_fields" + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-22T09:26:10.299Z", + "lte": "2024-01-22T09:41:10.299Z" + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + } + ] + } + }, + "runtime_mappings": {}, + "size": 0, + "track_total_hits": false, + "terminate_after": 100000, + "timeout": "1000ms" + }`, + []string{ + `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) ` + + `AND "namespace" IS NOT NULL)`, + `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299)))`, + }, + model.Normal, + []string{}, + []string{ + `SELECT uniqMerge(uniqState("namespace")) OVER () AS "metric__unique_terms_col_0" + , sum(count(*)) OVER () AS "aggr__suggestions__parent_count", + "namespace" AS "aggr__suggestions__key_0", + count(*) AS "aggr__suggestions__count" + FROM __quesma_table_name + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) + GROUP BY "namespace" AS "aggr__suggestions__key_0" + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC + LIMIT 11`, + }, + }, + { // [24] + "count(*) as /_search query. With filter", // response should be just ["hits"]["total"]["value"] == result of count(*) + `{ + "aggs": { + "suggestions": { + "terms": { + "field": "namespace", + "order": { + "_count": "desc" + }, + "shard_size": 10, + "size": 10 + } + }, + "unique_terms": { + "cardinality": { + "field": "namespace" + } + } + }, + "query": { + "bool": { + "filter": [ + { + "bool": { + "filter": [ + { + "match_phrase": { + "message": "User logged out" + } + }, + { + "match_phrase": { + "host.name": "poseidon" + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-29T15:36:36.491Z", + "lte": "2024-01-29T18:11:36.491Z" + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + } + ] + } + }, + "runtime_mappings": {}, + "size": 0, + "track_total_hits": false, + "terminate_after": 100000, + "timeout": "1000ms" + }`, + []string{ + `(("message" __quesma_match '%User logged out%' AND "host.name" __quesma_match '%poseidon%') ` + + `AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491))) ` + + `AND "namespace" IS NOT NULL)`, + `(("message" __quesma_match '%User logged out%' AND "host.name" __quesma_match '%poseidon%') ` + + `AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491)))`, + }, + model.Normal, + []string{}, + []string{ + `SELECT uniqMerge(uniqState("namespace")) OVER () AS "metric__unique_terms_col_0" + , sum(count(*)) OVER () AS "aggr__suggestions__parent_count", + "namespace" AS "aggr__suggestions__key_0", + count(*) AS "aggr__suggestions__count" + FROM __quesma_table_name + WHERE (("message" ILIKE '%User logged out%' AND "host_name"='poseidon') + AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491))) + GROUP BY "namespace" AS "aggr__suggestions__key_0" + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC + LIMIT 11`, + }, + }, + { // [25] + "count(*) as /_search or /logs-*-/_search query. Without filter", // response should be just ["hits"]["total"]["value"] == result of count(*) + `{ + "aggs": { + "suggestions": { + "terms": { + "field": "namespace", + "order": { + "_count": "desc" + }, + "shard_size": 10, + "size": 10 + } + }, + "unique_terms": { + "cardinality": { + "field": "namespace" + } + } + }, + "query": { + "bool": { + "filter": [ + { + "bool": { + "filter": [ + { + "multi_match": { + "lenient": true, + "query": "user", + "type": "best_fields" + } + }, + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-22T09:26:10.299Z", + "lte": "2024-01-22T09:41:10.299Z" + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + } + ] + } + }, + "runtime_mappings": {}, + "size": 0, + "track_total_hits": false, + "terminate_after": 100000, + "timeout": "1000ms" + }`, + []string{ + `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) AND "namespace" IS NOT NULL)`, + `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299)))`, + }, + model.Normal, + []string{}, + []string{ + `SELECT uniqMerge(uniqState("namespace")) OVER () AS "metric__unique_terms_col_0" + , sum(count(*)) OVER () AS "aggr__suggestions__parent_count", + "namespace" AS "aggr__suggestions__key_0", + count(*) AS "aggr__suggestions__count" + FROM __quesma_table_name + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) + GROUP BY "namespace" AS "aggr__suggestions__key_0" + ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC + LIMIT 11`, + }, + }, + { // [26] + "_search, only one so far with fields, we're not sure if SELECT * is correct, or should be SELECT @timestamp", + `{ + "_source": { + "excludes": [] + }, + "fields": [ + { + "field": "message", + "format": "date_time" + } + ], + "pit": { + "id": "gcSHBAEqLmRzLWxvZ3MtZ2VuZXJpYy1kZWZhdWx0LTIwMjQuMDMuMjYtMDAwMDAxFndZdXQ5SG5wU0RTZExnV0lkXzRWT1EAFkRwRTBHbm5uVDlpLTR0MnZONXY0dFEAAAAAAAAALAUWTTBidDdzcWJTWGlZamxpTGE3WW5IUQABFndZdXQ5SG5wU0RTZExnV0lkXzRWT1EAAA==", + "keep_alive": "30s" + }, + "query": { + "bool": { + "filter": [], + "must": [], + "must_not": [], + "should": [] + } + }, + "runtime_mappings": {}, + "script_fields": {}, + "size": 500, + "track_total_hits": false, + "stored_fields": [ + "*" + ], + "timeout": "30000ms", + "track_total_hits": true + }`, + []string{""}, + model.ListByField, + []string{ + `SELECT count(*) AS "column_0" FROM ` + TableName, + `SELECT "message" FROM ` + TableName + ` LIMIT 500`, + }, + []string{}, + }, + { // [27] + "Empty must", + ` + { + "query": { + "bool": { + "must": {} + } + }, + "track_total_hits": true + }`, + []string{``}, + model.ListAllFields, + []string{ + `SELECT count(*) AS "column_0" FROM ` + TableName, + `SELECT "message" FROM ` + TableName + ` LIMIT 10`, + }, + []string{}, + }, + { // [28] + "Empty must not", + ` + { + "query": { + "bool": { + "must_not": {} + } + }, + "track_total_hits": false + }`, + []string{``}, + model.ListAllFields, + []string{ + `SELECT "message" FROM ` + TableName + ` LIMIT 10`, + }, + []string{}, + }, + { // [29] + "Empty should", + ` + { + "query": { + "bool": { + "should": {} + } + }, + "track_total_hits": false + }`, + []string{``}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName}, + []string{}, + }, + { // [30] + "Empty all bools", + ` + { + "query": { + "bool": { + "should": {}, + "must": {}, + "must_not": {}, + "filter": {} + } + }, + "track_total_hits": true + }`, + []string{``}, + model.ListAllFields, + []string{ + `SELECT count(*) AS "column_0" FROM ` + TableName, + `SELECT "message" FROM ` + TableName, + }, + []string{}, + }, + { // [31] + "Some bools empty, some not", + ` + { + "query": { + "bool": { + "should": [], + "must": { + "match_phrase": { + "message": "User logged out" + } + }, + "must_not": {}, + "filter": [ + { + "match_phrase": { + "message": "User logged out" + } + } + ] + } + }, + "track_total_hits": false, + "size": 12 + }`, + []string{`("message" __quesma_match '%User logged out%' AND "message" __quesma_match '%User logged out%')`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE ("message" ILIKE '%User logged out%' AND "message" ILIKE '%User logged out%')`, + }, + []string{}, + }, + { // [32] + "Match all (empty query)", + `{}`, + []string{""}, + model.ListAllFields, + []string{ + `SELECT count(*) AS "column_0" FROM (SELECT 1 FROM ` + TableName + ` LIMIT 10000)`, + `SELECT "message" FROM __quesma_table_name LIMIT 10`, + }, + []string{}, + }, + { // [33] + "Constant score query", + `{ + "query": { + "constant_score": { + "filter": { + "term": { "user.id": "kimchy" } + }, + "boost": 1.2 + } + }, + "track_total_hits": false + }`, + []string{`"user.id"='kimchy'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "user.id"='kimchy'`}, + []string{}, + }, + { // [34] this is a snowflake case as `_id` is a special field in ES and in clickhouse we compute + "Match phrase using _id field", + `{ + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "format": "strict_date_optional_time", + "gte": "2024-01-22T09:26:10.299Z" + } + } + }, + { + "match_phrase": { + "_id": "323032342d30352d32342031333a33323a34372e333037202b3030303020555443qqq111111111111111111111111111111111111111111111111111111111111" + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{ + `("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp" = toDateTime64('2024-05-24 13:32:47.307',3))`, + }, + model.ListAllFields, + // TestSearchHandler is pretty blunt with config loading so the test below can't be used. + // We will probably refactor it as we move forwards with schema which will get even more side-effecting + []string{`SELECT "message" FROM ` + TableName + ` WHERE ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp" = toDateTime64('2024-05-24 13:32:47.307',3)) LIMIT 10`}, + []string{}, + }, + { // [35] Comments in queries + "Comments in filter", + `{ + "query": { /*one comment */ + "bool": { + "must": { + "term": { "user.id": "kimchy" } // One comment + } + } + }, + "track_total_hits": false + }`, + []string{`"user.id"='kimchy'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "user.id"='kimchy'`}, + []string{}, + }, + { // [36] terms with range + "Terms with range", + `{ + "size": 1, + "query": { + "bool": { + "filter": [ + { + "terms": { + "cliIP": [ + "2601:204:c503:c240:9c41:5531:ad94:4d90", + "50.116.43.98", + "75.246.0.64" + ] + } + }, + { + "range": { + "@timestamp": { + "gte": "2024-05-16T00:00:00", + "lte": "2024-05-17T23:59:59" + } + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`("cliIP" IN tuple('2601:204:c503:c240:9c41:5531:ad94:4d90', '50.116.43.98', '75.246.0.64') AND ("@timestamp">=fromUnixTimestamp64Milli(1715817600000) AND "@timestamp"<=fromUnixTimestamp64Milli(1715990399000)))`}, + model.ListAllFields, + //[]model.Query{withLimit(justSimplestWhere(`("cliIP" IN ('2601:204:c503:c240:9c41:5531:ad94:4d90','50.116.43.98','75.246.0.64') AND ("@timestamp">=parseDateTime64BestEffort('2024-05-16T00:00:00') AND "@timestamp"<=parseDateTime64BestEffort('2024-05-17T23:59:59')))`), 1)}, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE ("cliIP" IN tuple('2601:204:c503:c240:9c41:5531:ad94:4d90', '50.116.43.98', '75.246.0.64') ` + + `AND ("@timestamp">=fromUnixTimestamp64Milli(1715817600000) AND "@timestamp"<=fromUnixTimestamp64Milli(1715990399000))) ` + + `LIMIT 1`, + }, + []string{}, + }, + { // [37] + "Simple regexp (can be simply transformed to one LIKE)", + `{ + "query": { + "bool": { + "filter": [ + { + "regexp": { + "field": { + "value": ".*-abb-all-li.mit.*s-5" + } + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"field" LIKE '%-abb-all-li_mit%s-5'`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "field" LIKE '%-abb-all-li_mit%s-5' ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [38] + "Simple regexp (can be simply transformed to one LIKE), with _, which needs to be escaped", + `{ + "query": { + "bool": { + "filter": [ + { + "regexp": { + "field": { + "value": ".*_.." + } + } + } + ] + } + }, + "track_total_hits": false + }`, + // Escaping _ twice ("\\_") seemed wrong, but it actually works in Clickhouse! + // \\\\ means 2 escaped backslashes, actual returned string is "\\" + []string{`"field" LIKE '%\\___'`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "field" LIKE '%\\\\___' ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [39] + "Complex regexp 1 (can't be transformed to LIKE)", + `{ + "query": { + "bool": { + "filter": [ + { + "regexp": { + "field": { + "value": "a*-abb-all-li.mit.*s-5" + } + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"field" REGEXP 'a*-abb-all-li.mit.*s-5'`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "field" REGEXP 'a*-abb-all-li.mit.*s-5' ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [40] + "Complex regexp 2 (can't be transformed to LIKE)", + `{ + "query": { + "bool": { + "filter": [ + { + "regexp": { + "field": { + "value": "a?" + } + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"field" REGEXP 'a?'`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "field" REGEXP 'a\?' ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [41] + `Escaping of ', \, \t and \n`, + ` + { + "query": { + "bool": { + "filter": [ + { + "match_phrase": { + "message": "\nMen's Clothing \\ \t" + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`"message" __quesma_match '% +Men\'s Clothing \\ %'`}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + ` WHERE "message" ILIKE '% +Men\\'s Clothing \\\\ %' LIMIT 10`}, + []string{}, + }, + { // [42] + "ids, 0 values", + `{ + "query": { + "ids": { + "values": [] + } + }, + "track_total_hits": false + }`, + []string{`false`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE false ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [43] + "ids, 1 value", + `{ + "query": { + "ids": { + "values": ["323032342d31322d32312030373a32393a30332e333637202b3030303020555443qqq1111111111111111111111111111111111111111"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2024-12-21 07:29:03.367',3)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2024-12-21 07:29:03.367',3) ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [44] + // DateTime64(3 or 9) are "normal"/standard. We check weird one like 2. + "ids, 1 value, different DateTime format: with timezone, precision: 2", + `{ + "query": { + "ids": { + "values": ["323032352d30372d30362030393a33383a30332e3132202b3030303020555443qqq3635363236333330333833373335333633363331333736333335333736353632363333313337333233313330363536353330333236313332363633323631333833323634333833313333333033333636333933353332333033363631333533363335333833323333363233393333333733333635333733353338333736333632"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2025-07-06 09:38:03.12',2)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2025-07-06 09:38:03.12',2) ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [45] + // important test, DateTime64(3) is pretty standard + "ids, 1 value, different DateTime format: with timezone, precision: 3", + `{ + "query": { + "ids": { + "values": ["323032352d30372d30342031353a33323a34332e333737202b303230302043455354qqq3332363233363331363636353633333933323338363133353339333233323330333036313335333833343332363536333633363533343330363333373632363333393636363233303337333936313632333136323330333736313633333933303635333436313336363133383632333433313330363133353634333733353631"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2025-07-04 13:32:43.377',3)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2025-07-04 13:32:43.377',3) ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [46] + // important test, DateTime64(9) is pretty standard + "ids, 1 value, different DateTime format: with timezone, precision: 9", + `{ + "query": { + "ids": { + "values": ["323032352d30372d30362031303a31313a30332e313233343536373839202b3030303020555443qqq3338363633373635363433333334333333353331333936333334363336333634333836313632363136343634333633343634363433393337333333393338333933323634333533393334333936333635363333353338333233313331363436313337333533333338333133333339333933383335333033393636363633343636"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2025-07-06 10:11:03.123456789',9)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2025-07-06 10:11:03.123456789',9) ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [47] + // DateTime64(3 or 9) are "normal"/standard. We check weird one like 7. + "ids, 1 value, different DateTime format: with timezone, precision: 7", + `{ + "query": { + "ids": { + "values": ["323032352d30372d30362030393a33363a30332e32353531323336202b3030303020555443qqq3338333636363634333733363634333533363333333336363339333736343330363136323334363136343631363533333336363133313636333236323337333936313632333133323335333733363632363633313335333136323334333336333636333833373333363333343331363336313330333133363636333136353631"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2025-07-06 09:36:03.2551236',7)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2025-07-06 09:36:03.2551236',7) ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [48] + // DateTime64(3 or 9) are "normal"/standard. We check weird one like 7. + "ids, 1 value, different DateTime format: with timezone, precision: 7, but timestamp with only 1 (.1)", + `{ + "query": { + "ids": { + "values": ["323032352d30372d30362030393a33383a30332e31202b3030303020555443qqq3339333533343339333033303332333533363631333033323333333936363335333636333339363436363632333336323336363233383332333233353335363233343631363436363332363433383331363636333636333033353333333833363631333533333338333133303334333336313634333733393631363333333633"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2025-07-06 09:38:03.1',1)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2025-07-06 09:38:03.1',1) ` + + `LIMIT 10`, + }, + []string{}, + }, + { // [49] + "ids, 2+ values", + `{ + "query": { + "ids": { + "values": [ + "323032342d31322d32312030373a32393a30332e333637202b3030303020555443qqq111111111111111111111111111", + "323032342d31322d32312030373a32393a30322e393932202b3030303020555443qqq111111111111111111111111111" + ] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" IN tuple(toDateTime64('2024-12-21 07:29:03.367',3), toDateTime64('2024-12-21 07:29:02.992',3))`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" IN tuple(toDateTime64('2024-12-21 07:29:03.367',3), toDateTime64('2024-12-21 07:29:02.992',3)) ` + + `LIMIT 10000`, + }, + []string{}, + }, + { // [50] + "ids with DateTime64(9) (trailing zeroes)", + `{ + "query": { + "ids": { + "values": ["323032342d31322d32312030373a32393a30332e333637303030303030qqq123qqq11111111111111111111111111111111111111111111111"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2024-12-21 07:29:03.367',3)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2024-12-21 07:29:03.367',3) ` + + `LIMIT 10000`, + }, + []string{}, + }, + { // [51] + "ids with DateTime64(9) (no trailing zeroes)", + `{ + "query": { + "ids": { + "values": ["323032342d31322d32312030373a32393a30332e313233343536373839qqq123qqq11111111111111111111111111111111111111111111111"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2024-12-21 07:29:03.123456789',9)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2024-12-21 07:29:03.123456789',9) ` + + `LIMIT 10000`, + }, + []string{}, + }, + { // [52] + "ids with DateTime64(0)", + `{ + "query": { + "ids": { + "values": ["323032342d31322d32312030373a32393a3033qqq11111111111111111111111111111111111111111111111"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2024-12-21 07:29:03.',0)`}, + // dot at the end doesn't matter - CH accepts it exactly like it wasn't there + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2024-12-21 07:29:03.',0) ` + + `LIMIT 10000`, + }, + []string{}, + }, + { // [53] + "ids with DateTime64(1)", + `{ + "query": { + "ids": { + "values": ["323032342d31322d32312030373a32393a30332e33qqq11111111111111111111111111111111111111111111111"] + } + }, + "track_total_hits": false + }`, + []string{`"@timestamp" = toDateTime64('2024-12-21 07:29:03.3',1)`}, + model.ListAllFields, + []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE "@timestamp" = toDateTime64('2024-12-21 07:29:03.3',1) ` + + `LIMIT 10000`, + }, + []string{}, + }, + { // [54] + Name: "range with int as datetime. when all query tests use transformers, expected results should be different", + QueryJson: ` + { + "query": { + "bool": { + "filter": [ + { + "range": { + "tsAsUInt64": { + "format": "strict_date_optional_time", + "gte": "2025-03-25T12:32:51.527Z", + "lte": "2025-03-25T12:47:51.527Z" + } + } + } + ] + } + }, + "track_total_hits": false + }`, + WantedSql: []string{`("tsAsUInt64">='2025-03-25T12:32:51.527Z' AND "tsAsUInt64"<='2025-03-25T12:47:51.527Z')`}, + WantedQueryType: model.ListAllFields, + WantedRegexes: []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE ("tsAsUInt64">=1742905971527 AND "tsAsUInt64"<=1742906871527) ` + + `LIMIT 10`, + }, + }, + { // [55] + Name: "range with int not as datetime. when all query tests use transformers, expected results should be different", + QueryJson: ` + { + "query": { + "bool": { + "filter": [ + { + "range": { + "tsAsUInt64": { + "gte": 15, + "lte": "2025" + } + } + } + ] + } + }, + "track_total_hits": false + }`, + WantedSql: []string{`("tsAsUInt64">=15 AND "tsAsUInt64"<=2025)`}, + WantedQueryType: model.ListAllFields, + WantedRegexes: []string{ + `SELECT "message" ` + + `FROM ` + TableName + ` ` + + `WHERE ("tsAsUInt64">=15 AND "tsAsUInt64"<=1735689600000) ` + + `LIMIT 10`, + }, + }, + { // [56] + "_index term", + `{ + "query": { /*one comment */ + "bool": { + "must": { + "term": { "_index": "Quesma" } + } + } + }, + "track_total_hits": false + }`, + []string{"true"}, + model.ListAllFields, + []string{`SELECT "message" FROM ` + TableName + " WHERE true"}, + []string{}, + }, +} + +var TestSearchRuntimeMappingsAfterTransformations = []SearchTestCase{ + + { // [0] + "Match all - runtime mappings", + ` + { + "fields": [ + "hour_of_day" + ], + "query": { + "match_all": {} + }, + "track_total_hits": false, + "runtime_mappings": { + "hour_of_day": { + "type": "long", + "script": { + "source": "emit(doc['timestamp'].value.getHour());" + } + } + } +}`, + []string{""}, + model.ListAllFields, + ////[]model.Query{newSimplestQuery()}, + []string{ + `SELECT toHour("@timestamp") AS "hour_of_day" FROM ` + TableName + ` LIMIT 10`, + }, + []string{}, + }, +} + +var TestsSearchNoAttrsAfterTransformations = []SearchTestCase{ + { + "Test empty ANDs, ORs and NOTs... idk, this test is very old and weird, better write to Krzysiek if it fails for you", + ` + { + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "gte": "2024-01-25T13:22:45.968Z", + "lte": "2024-01-25T13:37:45.968Z" + } + } + }, + { + "exists": { + "field": "summary" + } + }, + { + "bool": { + "must_not": { + "exists": { + "field": "run_once" + } + } + } + } + ] + } + }, + "track_total_hits": false + }`, + []string{`((("@timestamp">=fromUnixTimestamp64Milli(1706188965968) AND "@timestamp"<=fromUnixTimestamp64Milli(1706189865968)) AND "summary" IS NOT NULL) AND NOT ("run_once" IS NOT NULL))`}, + model.ListAllFields, + []string{ + `SELECT "@timestamp", "message" FROM __quesma_table_name WHERE ((("@timestamp">=fromUnixTimestamp64Milli(1706188965968) AND "@timestamp"<=fromUnixTimestamp64Milli(1706189865968)) AND NULL IS NOT NULL) AND NOT (NULL IS NOT NULL)) LIMIT 10`, + }, + []string{}, + }, +} + +var TestSearchFilterAfterTransformations = []SearchTestCase{ + { // [0] + "Empty filter clause", + `{ + "_source": { + "excludes": [] + }, + "aggs": { + "0": { + "date_histogram": { + "field": "@timestamp", + "fixed_interval": "30s", + "min_doc_count": 1 + } + } + }, + "fields": [ + { + "field": "@timestamp", + "format": "date_time" + } + ], + "query": { + "bool": { + "filter": [ + ], + "must": [], + "must_not": [], + "should": [] + } + }, + "runtime_mappings": {}, + "script_fields": {}, + "size": 0, + "track_total_hits": false, + "stored_fields": [ + "*" + ] + }`, + []string{}, + model.Normal, + []string{}, + []string{ + `SELECT toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0" + , count(*) AS "aggr__0__count" + FROM __quesma_table_name + GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS + "aggr__0__key_0" + ORDER BY "aggr__0__key_0" ASC`, + }, + }, + { // [1] + "Filter with now in range", + `{ + "_source": { + "excludes": [] + }, + "aggs": { + "0": { + "date_histogram": { + "field": "@timestamp", + "fixed_interval": "30s", + "min_doc_count": 1 + } + } + }, + "fields": [ + { + "field": "@timestamp", + "format": "date_time" + } + ], + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "gt": "now-15m" + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + }, + "runtime_mappings": {}, + "script_fields": {}, + "size": 0, + "stored_fields": [ + "*" + ], + "track_total_hits": true + }`, + []string{}, + model.Normal, + []string{}, + []string{ + `SELECT sum(count(*)) OVER () AS "metric____quesma_total_count_col_0", + toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0", + count(*) AS "aggr__0__count" + FROM __quesma_table_name + WHERE "@timestamp">subDate(now(), INTERVAL 15 minute) + GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS + "aggr__0__key_0" + ORDER BY "aggr__0__key_0" ASC`, + }, + }, + { // [2] + "Range with int timestamps", + `{ + "_source": { + "excludes": [] + }, + "aggs": { + "0": { + "date_histogram": { + "field": "@timestamp", + "fixed_interval": "30s", + "min_doc_count": 1 + } + } + }, + "fields": [ + { + "field": "@timestamp", + "format": "date_time" + } + ], + "query": { + "bool": { + "filter": [ + { + "range": { + "@timestamp": { + "format": "epoch_millis||strict_date_optional_time", + "gte": 1727858503270, + "lte": 1727859403270 + } + } + } + ], + "must": [], + "must_not": [], + "should": [] + } + }, + "runtime_mappings": {}, + "script_fields": {}, + "size": 0, + "stored_fields": [ + "*" + ], + "track_total_hits": true + }`, + []string{}, + model.Normal, + []string{}, + []string{ + `SELECT sum(count(*)) OVER () AS "metric____quesma_total_count_col_0", + toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0", + count(*) AS "aggr__0__count" + FROM __quesma_table_name + WHERE ("@timestamp">=fromUnixTimestamp64Milli(1727858503270) AND "@timestamp"<=fromUnixTimestamp64Milli(1727859403270)) + GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS + "aggr__0__key_0" + ORDER BY "aggr__0__key_0" ASC`, + }, + }, + { // [3] + "Empty filter", + ` + { + "query": { + "bool": { + "filter": {} + } + }, + "track_total_hits": false + }`, + []string{}, + model.Normal, + []string{}, + []string{`SELECT "@timestamp", "message" FROM __quesma_table_name LIMIT 10`}, + }, + { // [4] + "Empty filter with other clauses", + ` + { + "query": { + "bool" : { + "must" : { + "term" : { "user.id" : "kimchy" } + }, + "filter": {}, + "must_not" : { + "range" : { + "age" : { "gte" : 10, "lte" : 20 } + } + }, + "should" : [ + { "term" : { "tags" : "env1" } }, + { "term" : { "tags" : "deployed" } } + ], + "minimum_should_match" : 1, + "boost" : 1.0 + } + }, + "track_total_hits": false + }`, + []string{ + `("user.id"='kimchy' AND ("tags"='env1' OR "tags"='deployed')) AND NOT ("age"<=20 AND "age">=10)`, + `("user.id"='kimchy' AND ("tags"='env1' OR "tags"='deployed')) AND NOT ("age">=10 AND "age"<=20)`, + }, + model.Normal, + []string{ + `SELECT "@timestamp", "message" ` + + `FROM ` + TableName + ` ` + + `WHERE (("attributes_values"['user.id']='kimchy' AND ("attributes_values"['tags']='env1' OR "attributes_values"['tags']='deployed')) ` + + `AND NOT (("attributes_values"['age']>=10 AND "attributes_values"['age']<=20))) ` + + `LIMIT 10`, + }, + []string{}, + }, +} diff --git a/platform/testdata/requests.go b/platform/testdata/requests.go index 74de55573..f8a5fb775 100644 --- a/platform/testdata/requests.go +++ b/platform/testdata/requests.go @@ -156,8 +156,8 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ FROM ( SELECT "host_name" FROM __quesma_table_name - WHERE (("@timestamp">=fromUnixTimestamp64Milli(1706009236820) AND "@timestamp" - <=fromUnixTimestamp64Milli(1706010136820)) AND "message" iLIKE '%user%') + WHERE (("@timestamp">=__quesma_from_unixtime64mili(1706009236820) AND "@timestamp" + <=__quesma_from_unixtime64mili(1706010136820)) AND "message" iLIKE '%user%') LIMIT 20000) GROUP BY "host_name" AS "aggr__sample__top_values__key_0" ORDER BY "aggr__sample__top_values__count" DESC, @@ -306,13 +306,13 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ []string{ `SELECT "message" FROM __quesma_table_name - WHERE ((("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481)) + WHERE ((("@timestamp">=__quesma_from_unixtime64mili(1706020999481) AND "@timestamp"<=__quesma_from_unixtime64mili(1706021899481)) AND "message" iLIKE '%user%') AND "message" IS NOT NULL) ORDER BY "@timestamp" DESC LIMIT 100`, `SELECT count(*) AS "column_0" FROM __quesma_table_name - WHERE ((("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481)) + WHERE ((("@timestamp">=__quesma_from_unixtime64mili(1706020999481) AND "@timestamp"<=__quesma_from_unixtime64mili(1706021899481)) AND "message" iLIKE '%user%') AND "message" IS NOT NULL)`, }, false, @@ -557,7 +557,7 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ []string{` SELECT "@timestamp", "event_dataset", "host_name", "message", "properties_isreg" FROM __quesma_table_name - WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481))) + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1706020999481) AND "@timestamp"<=__quesma_from_unixtime64mili(1706021899481))) ORDER BY "@timestamp" DESC LIMIT 500`, }, @@ -697,12 +697,12 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM __quesma_table_name - WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481))) + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1706020999481) AND "@timestamp"<=__quesma_from_unixtime64mili(1706021899481))) GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, `SELECT "@timestamp" FROM __quesma_table_name - WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1706020999481) AND "@timestamp"<=fromUnixTimestamp64Milli(1706021899481))) + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1706020999481) AND "@timestamp"<=__quesma_from_unixtime64mili(1706021899481))) LIMIT 100`, }, true, @@ -761,7 +761,7 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ toInt64(toUnixTimestamp64Milli("@timestamp") / 60000) AS "aggr__stats__series__key_0", count(*) AS "aggr__stats__series__count" FROM __quesma_table_name - WHERE ("@timestamp">fromUnixTimestamp64Milli(1706194439033) AND "@timestamp"<=fromUnixTimestamp64Milli(1706195339033)) + WHERE ("@timestamp">__quesma_from_unixtime64mili(1706194439033) AND "@timestamp"<=__quesma_from_unixtime64mili(1706195339033)) GROUP BY COALESCE("event_dataset", 'unknown') AS "aggr__stats__key_0", toInt64(toUnixTimestamp64Milli("@timestamp") / 60000) AS "aggr__stats__series__key_0")) @@ -941,10 +941,10 @@ var TestsAsyncSearch = []AsyncSearchTestCase{ []string{` SELECT "properties_isreg" FROM __quesma_table_name - WHERE ((("@timestamp">=fromUnixTimestamp64Milli(1710171234276) AND "@timestamp" - <=fromUnixTimestamp64Milli(1710172134276)) AND ("@timestamp">= - fromUnixTimestamp64Milli(1710171234276) AND "@timestamp"<= - fromUnixTimestamp64Milli(1710172134276))) AND "properties_isreg" IS NOT NULL) + WHERE ((("@timestamp">=__quesma_from_unixtime64mili(1710171234276) AND "@timestamp" + <=__quesma_from_unixtime64mili(1710172134276)) AND ("@timestamp">= + __quesma_from_unixtime64mili(1710171234276) AND "@timestamp"<= + __quesma_from_unixtime64mili(1710172134276))) AND "properties_isreg" IS NOT NULL) LIMIT 100`, }, false, @@ -1053,12 +1053,12 @@ var TestsSearch = []SearchTestCase{ "track_total_hits": true }`, []string{ - `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705487298815) AND "@timestamp"<=fromUnixTimestamp64Milli(1705488198815)))`, + `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705487298815) AND "@timestamp"<=__quesma_from_unixtime64mili(1705488198815)))`, }, model.ListAllFields, []string{ `SELECT "message" FROM ` + TableName + ` WHERE ("message" iLIKE '%user%' ` + - `AND ("@timestamp">=fromUnixTimestamp64Milli(1705487298815) AND "@timestamp"<=fromUnixTimestamp64Milli(1705488198815))) ` + + `AND ("@timestamp">=__quesma_from_unixtime64mili(1705487298815) AND "@timestamp"<=__quesma_from_unixtime64mili(1705488198815))) ` + `LIMIT 10`, `SELECT count(*) AS "column_0" FROM ` + TableName, }, @@ -1530,8 +1530,8 @@ var TestsSearch = []SearchTestCase{ } `, []string{ - `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299)))`, - `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) ` + + `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299)))`, + `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299))) ` + `AND "stream.namespace" IS NOT NULL)`, }, model.Normal, @@ -1544,7 +1544,7 @@ var TestsSearch = []SearchTestCase{ "stream_namespace" AS "aggr__suggestions__key_0", count(*) AS "aggr__suggestions__count" FROM __quesma_table_name - WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299))) GROUP BY "stream_namespace" AS "aggr__suggestions__key_0" ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, @@ -1617,9 +1617,9 @@ var TestsSearch = []SearchTestCase{ } `, []string{ - `(("service.name"='admin' AND ("@timestamp">=fromUnixTimestamp64Milli(1705934075873) AND "@timestamp"<=fromUnixTimestamp64Milli(1705934975873))) ` + + `(("service.name"='admin' AND ("@timestamp">=__quesma_from_unixtime64mili(1705934075873) AND "@timestamp"<=__quesma_from_unixtime64mili(1705934975873))) ` + `AND "namespace" IS NOT NULL)`, - `("service.name"='admin' AND ("@timestamp">=fromUnixTimestamp64Milli(1705934075873) AND "@timestamp"<=fromUnixTimestamp64Milli(1705934975873)))`, + `("service.name"='admin' AND ("@timestamp">=__quesma_from_unixtime64mili(1705934075873) AND "@timestamp"<=__quesma_from_unixtime64mili(1705934975873)))`, }, model.Normal, []string{}, @@ -1629,7 +1629,7 @@ var TestsSearch = []SearchTestCase{ "namespace" AS "aggr__suggestions__key_0", count(*) AS "aggr__suggestions__count" FROM __quesma_table_name - WHERE ("service_name"='admin' AND ("@timestamp">=fromUnixTimestamp64Milli(1705934075873) AND "@timestamp"<=fromUnixTimestamp64Milli(1705934975873))) + WHERE ("service_name"='admin' AND ("@timestamp">=__quesma_from_unixtime64mili(1705934075873) AND "@timestamp"<=__quesma_from_unixtime64mili(1705934975873))) GROUP BY "namespace" AS "aggr__suggestions__key_0" ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, @@ -1697,9 +1697,9 @@ var TestsSearch = []SearchTestCase{ }`, []string{ `(("message" __quesma_match '%User logged out%' AND "host.name" __quesma_match '%poseidon%') ` + - `AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491)))`, + `AND ("@timestamp">=__quesma_from_unixtime64mili(1706542596491) AND "@timestamp"<=__quesma_from_unixtime64mili(1706551896491)))`, `((("message" __quesma_match '%User logged out%' AND "host.name" __quesma_match '%poseidon%') ` + - `AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491))) ` + + `AND ("@timestamp">=__quesma_from_unixtime64mili(1706542596491) AND "@timestamp"<=__quesma_from_unixtime64mili(1706551896491))) ` + `AND "stream.namespace" IS NOT NULL)`, }, model.Normal, @@ -1713,7 +1713,7 @@ var TestsSearch = []SearchTestCase{ count(*) AS "aggr__suggestions__count" FROM __quesma_table_name WHERE (("message" ILIKE '%User logged out%' AND "host_name"='poseidon') - AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491))) + AND ("@timestamp">=__quesma_from_unixtime64mili(1706542596491) AND "@timestamp"<=__quesma_from_unixtime64mili(1706551896491))) GROUP BY "stream_namespace" AS "aggr__suggestions__key_0" ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, @@ -1777,9 +1777,9 @@ var TestsSearch = []SearchTestCase{ "timeout": "1000ms" }`, []string{ - `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) ` + + `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299))) ` + `AND "namespace" IS NOT NULL)`, - `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299)))`, + `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299)))`, }, model.Normal, []string{}, @@ -1789,7 +1789,7 @@ var TestsSearch = []SearchTestCase{ "namespace" AS "aggr__suggestions__key_0", count(*) AS "aggr__suggestions__count" FROM __quesma_table_name - WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299))) GROUP BY "namespace" AS "aggr__suggestions__key_0" ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, @@ -1857,10 +1857,10 @@ var TestsSearch = []SearchTestCase{ }`, []string{ `(("message" __quesma_match '%User logged out%' AND "host.name" __quesma_match '%poseidon%') ` + - `AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491))) ` + + `AND ("@timestamp">=__quesma_from_unixtime64mili(1706542596491) AND "@timestamp"<=__quesma_from_unixtime64mili(1706551896491))) ` + `AND "namespace" IS NOT NULL)`, `(("message" __quesma_match '%User logged out%' AND "host.name" __quesma_match '%poseidon%') ` + - `AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491)))`, + `AND ("@timestamp">=__quesma_from_unixtime64mili(1706542596491) AND "@timestamp"<=__quesma_from_unixtime64mili(1706551896491)))`, }, model.Normal, []string{}, @@ -1871,7 +1871,7 @@ var TestsSearch = []SearchTestCase{ count(*) AS "aggr__suggestions__count" FROM __quesma_table_name WHERE (("message" ILIKE '%User logged out%' AND "host_name"='poseidon') - AND ("@timestamp">=fromUnixTimestamp64Milli(1706542596491) AND "@timestamp"<=fromUnixTimestamp64Milli(1706551896491))) + AND ("@timestamp">=__quesma_from_unixtime64mili(1706542596491) AND "@timestamp"<=__quesma_from_unixtime64mili(1706551896491))) GROUP BY "namespace" AS "aggr__suggestions__key_0" ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, @@ -1935,8 +1935,8 @@ var TestsSearch = []SearchTestCase{ "timeout": "1000ms" }`, []string{ - `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) AND "namespace" IS NOT NULL)`, - `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299)))`, + `((` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299))) AND "namespace" IS NOT NULL)`, + `(` + fullTextFieldName + ` iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299)))`, }, model.Normal, []string{}, @@ -1946,7 +1946,7 @@ var TestsSearch = []SearchTestCase{ "namespace" AS "aggr__suggestions__key_0", count(*) AS "aggr__suggestions__count" FROM __quesma_table_name - WHERE ("message" iLIKE '%user%' AND ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp"<=fromUnixTimestamp64Milli(1705916470299))) + WHERE ("message" iLIKE '%user%' AND ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp"<=__quesma_from_unixtime64mili(1705916470299))) GROUP BY "namespace" AS "aggr__suggestions__key_0" ORDER BY "aggr__suggestions__count" DESC, "aggr__suggestions__key_0" ASC LIMIT 11`, @@ -2157,12 +2157,12 @@ var TestsSearch = []SearchTestCase{ "track_total_hits": false }`, []string{ - `("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp" = toDateTime64('2024-05-24 13:32:47.307',3))`, + `("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp" = toDateTime64('2024-05-24 13:32:47.307',3))`, }, model.ListAllFields, // TestSearchHandler is pretty blunt with config loading so the test below can't be used. // We will probably refactor it as we move forwards with schema which will get even more side-effecting - []string{`SELECT "message" FROM ` + TableName + ` WHERE ("@timestamp">=fromUnixTimestamp64Milli(1705915570299) AND "@timestamp" = toDateTime64('2024-05-24 13:32:47.307',3)) LIMIT 10`}, + []string{`SELECT "message" FROM ` + TableName + ` WHERE ("@timestamp">=__quesma_from_unixtime64mili(1705915570299) AND "@timestamp" = toDateTime64('2024-05-24 13:32:47.307',3)) LIMIT 10`}, []string{}, }, { // [35] Comments in queries @@ -2211,14 +2211,14 @@ var TestsSearch = []SearchTestCase{ }, "track_total_hits": false }`, - []string{`("cliIP" IN tuple('2601:204:c503:c240:9c41:5531:ad94:4d90', '50.116.43.98', '75.246.0.64') AND ("@timestamp">=fromUnixTimestamp64Milli(1715817600000) AND "@timestamp"<=fromUnixTimestamp64Milli(1715990399000)))`}, + []string{`("cliIP" IN tuple('2601:204:c503:c240:9c41:5531:ad94:4d90', '50.116.43.98', '75.246.0.64') AND ("@timestamp">=__quesma_from_unixtime64mili(1715817600000) AND "@timestamp"<=__quesma_from_unixtime64mili(1715990399000)))`}, model.ListAllFields, //[]model.Query{withLimit(justSimplestWhere(`("cliIP" IN ('2601:204:c503:c240:9c41:5531:ad94:4d90','50.116.43.98','75.246.0.64') AND ("@timestamp">=parseDateTime64BestEffort('2024-05-16T00:00:00') AND "@timestamp"<=parseDateTime64BestEffort('2024-05-17T23:59:59')))`), 1)}, []string{ `SELECT "message" ` + `FROM ` + TableName + ` ` + `WHERE ("cliIP" IN tuple('2601:204:c503:c240:9c41:5531:ad94:4d90', '50.116.43.98', '75.246.0.64') ` + - `AND ("@timestamp">=fromUnixTimestamp64Milli(1715817600000) AND "@timestamp"<=fromUnixTimestamp64Milli(1715990399000))) ` + + `AND ("@timestamp">=__quesma_from_unixtime64mili(1715817600000) AND "@timestamp"<=__quesma_from_unixtime64mili(1715990399000))) ` + `LIMIT 1`, }, []string{}, @@ -2755,10 +2755,10 @@ var TestsSearchNoAttrs = []SearchTestCase{ }, "track_total_hits": false }`, - []string{`((("@timestamp">=fromUnixTimestamp64Milli(1706188965968) AND "@timestamp"<=fromUnixTimestamp64Milli(1706189865968)) AND "summary" IS NOT NULL) AND NOT ("run_once" IS NOT NULL))`}, + []string{`((("@timestamp">=__quesma_from_unixtime64mili(1706188965968) AND "@timestamp"<=__quesma_from_unixtime64mili(1706189865968)) AND "summary" IS NOT NULL) AND NOT ("run_once" IS NOT NULL))`}, model.ListAllFields, []string{ - `SELECT "@timestamp", "message" FROM __quesma_table_name WHERE ((("@timestamp">=fromUnixTimestamp64Milli(1706188965968) AND "@timestamp"<=fromUnixTimestamp64Milli(1706189865968)) AND NULL IS NOT NULL) AND NOT (NULL IS NOT NULL)) LIMIT 10`, + `SELECT "@timestamp", "message" FROM __quesma_table_name WHERE ((("@timestamp">=__quesma_from_unixtime64mili(1706188965968) AND "@timestamp"<=__quesma_from_unixtime64mili(1706189865968)) AND NULL IS NOT NULL) AND NOT (NULL IS NOT NULL)) LIMIT 10`, }, []string{}, }, @@ -2929,7 +2929,7 @@ var TestSearchFilter = []SearchTestCase{ toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0", count(*) AS "aggr__0__count" FROM __quesma_table_name - WHERE ("@timestamp">=fromUnixTimestamp64Milli(1727858503270) AND "@timestamp"<=fromUnixTimestamp64Milli(1727859403270)) + WHERE ("@timestamp">=__quesma_from_unixtime64mili(1727858503270) AND "@timestamp"<=__quesma_from_unixtime64mili(1727859403270)) GROUP BY toInt64(toUnixTimestamp64Milli("@timestamp") / 30000) AS "aggr__0__key_0" ORDER BY "aggr__0__key_0" ASC`, diff --git a/platform/util/utils.go b/platform/util/utils.go index 464a6feb4..0aba6ef58 100644 --- a/platform/util/utils.go +++ b/platform/util/utils.go @@ -22,6 +22,7 @@ import ( "strings" "sync" "testing" + "unicode" ) type JsonMap = map[string]interface{} @@ -1055,3 +1056,13 @@ func PrintfIfErr(err error, msg string, args ...any) { fmt.Println(err) } } + +func RemoveAllWhitespaces(s string) string { + result := make([]rune, 0, len(s)) + for _, r := range s { + if !unicode.IsSpace(r) { + result = append(result, r) + } + } + return string(result) +} diff --git a/platform/v2/core/backend_connectors.go b/platform/v2/core/backend_connectors.go index c76d2eac6..3c2f1628f 100644 --- a/platform/v2/core/backend_connectors.go +++ b/platform/v2/core/backend_connectors.go @@ -7,7 +7,7 @@ import "context" type BackendConnectorType int const ( - NoopBackend = iota + NoopBackend BackendConnectorType = iota MySQLBackend PgSQLBackend ClickHouseSQLBackend