Skip to content
Merged
1 change: 1 addition & 0 deletions runtime/drivers/registry.go
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,7 @@ func (i *Instance) Config() (InstanceConfig, error) {
MetricsApproximateComparisonsCTE: false,
MetricsApproxComparisonTwoPhaseLimit: 250,
MetricsExactifyDruidTopN: false,
MetricsNullFillingImplementation: "pushdown",
AlertsDefaultStreamingRefreshCron: "0 0 * * *", // Every 24 hours
AlertsFastStreamingRefreshCron: "*/10 * * * *", // Every 10 minutes
}
Expand Down
4 changes: 4 additions & 0 deletions runtime/metricsview/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -1069,6 +1069,10 @@ func (a *AST) buildSpineSelect(alias string, spine *Spine, tr *TimeRange) (*Sele
return nil, nil
}

if spine.Where != nil && spine.TimeRange != nil {
return nil, errors.New("spine cannot have both 'where' and 'time_range'")
}

if spine.Where != nil {
// Using buildWhereForUnderlyingTable to include security filters.
// Note that buildWhereForUnderlyingTable handles nil expressions gracefully.
Expand Down
98 changes: 52 additions & 46 deletions runtime/queries/metricsview_timeseries_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,10 +169,8 @@ func TestMetricsViewsTimeseries_quarter_grain_IST(t *testing.T) {
require.NoError(t, err)
require.NotEmpty(t, q.Result)
rows := q.Result.Data
require.Len(t, rows, 6)
require.Len(t, rows, 5)
i := 0
require.Equal(t, parseTime(t, "2022-10-31T18:30:00Z").AsTime(), rows[i].Ts.AsTime())
i++
require.Equal(t, parseTime(t, "2022-12-31T18:30:00Z").AsTime(), rows[i].Ts.AsTime())
i++
require.Equal(t, parseTime(t, "2023-03-31T18:30:00Z").AsTime(), rows[i].Ts.AsTime())
Expand Down Expand Up @@ -348,7 +346,7 @@ func TestMetricsViewTimeSeries_DayLightSavingsBackwards_Continuous_Second(t *tes
rows := q.Result.Data
require.Len(t, rows, 1)
i := 0
require.Equal(t, parseTime(t, "2023-11-05T05:00:00Z").AsTime(), rows[i].Ts.AsTime())
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What's the reasoning behind this test case change? If the time range is:

start=2023-11-05T05:00:00.000Z
end=2023-11-05T05:00:01.000Z

then you'd expect it to return a timestamp that is inside the time range in UTC?

Do I have this mapping right?

  • 4:00 UTC is 0:00 in New York
  • 5:00 UTC is 1:00 in New York
  • 6:00 UTC is 1:00 in New York
  • 7:00 UTC is 2:00 in New York

So asking for the time between 5:00 and 5:01 UTC should still give 5:00 UTC, not 6:00 UTC? However if it was asking for the time between 1:00 and 1:01 New York, then it would be acceptable to return 6:00 UTC (although ideally, it should return two rows, both 5:00 UTC and 6:00 UTC).

Copy link
Member Author

@pjain1 pjain1 Dec 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The timezone is America/New_York in the test case, and duckdb seems to be resolving 1:00 America/New_York as 6:00 UTC instead of 5:00.

Copy link
Member Author

@pjain1 pjain1 Dec 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Actually it relates to this change as well on line 435 below.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree It does look odd though, can try around rewriting range query for duckdb but not sure if it would work. Last option is to always create a manual inline query like we do for other olaps.

Copy link
Member Author

@pjain1 pjain1 Dec 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let me know your view on this if we should try changing it further or use olap approach if change does not work or just keep it this way.

Copy link
Member Author

@pjain1 pjain1 Dec 29, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah there were cases that were not taken care of when passing UTC values, I will need to revisit it.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It does not seem to be working for this simple case

SELECT timezone('America/Los_Angeles', range) AS "timestamp" FROM range('2024-03-10T08:00:00Z'::TIMESTAMP, '2024-03-13T07:00:00Z'::TIMESTAMP, INTERVAL '1 DAY');
┌──────────────────────────┐
│        timestamp         │
│ timestamp with time zone │
├──────────────────────────┤
│ 2024-03-10 15:00:00+00   │
│ 2024-03-11 15:00:00+00   │
│ 2024-03-12 15:00:00+00   │
└──────────────────────────┘

expected 2024-03-10 08:00:00+00, 2024-03-11 07:00:00+00 and 2024-03-12 07:00:00+00. Also tried

SELECT range AT TIME ZONE 'America/New_York' AS "timestamp" FROM range('2023-11-05T05:00:00Z'::TIMESTAMPTZ AT TIME ZONE 'America/New_York', '2023-11-05T05:01:00Z'::TIMESTAMPTZ AT TIME ZONE 'America/New_York', INTERVAL '1 MINUTE');
┌──────────────────────────┐
│        timestamp         │
│ timestamp with time zone │
├──────────────────────────┤
│ 2023-11-05 06:00:00+00   │
└──────────────────────────┘

So its a duckdb thing where is interprets 01:00 as 06:00

D SELECT range AT TIME ZONE 'America/New_York' AS "timestamp" FROM range('2023-11-05T05:00:00Z'::TIMESTAMPTZ AT TIME ZONE 'America/New_York', '2023-11-05T05:00:01Z'::TIMESTAMPTZ AT TIME ZONE 'America/New_York', INTERVAL '1 SECOND');
┌──────────────────────────┐
│        timestamp         │
│ timestamp with time zone │
├──────────────────────────┤
│ 2023-11-05 06:00:00+00   │
└──────────────────────────┘
D SELECT range  AS "timestamp" FROM range('2023-11-05T05:00:00Z'::TIMESTAMPTZ AT TIME ZONE 'America/New_York', '2023-11-05T05:00:01Z'::TIMESTAMPTZ AT TIME ZONE 'America/New_York', INTERVAL '1 SECOND');
┌─────────────────────┐
│      timestamp      │
│      timestamp      │
├─────────────────────┤
│ 2023-11-05 01:00:00 │
└─────────────────────┘

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In your assessment, how does this impact the correctness of the output charts/values? Our desired behavior is to have a bin for every hour, so ideally on DST days we'd output either 23 or 25 bins (in the case of 25, there would be two bins with the same label after each other).

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I checked and see that now there would be 23 or 25 bins depending on time going backwards or forwards. Earlier there were always 24 bins. I don't see any broken chart because of it around the time change.

Copy link
Member Author

@pjain1 pjain1 Jan 5, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

However, I do see now chart skips the time bin at which time goes backwards in Nov which is consistent with the data sent (does not break the chart though). In case of time going forwards in March, chart skips the time bin in both new and pushdown implementation so a UI quirk.

require.Equal(t, parseTime(t, "2023-11-05T06:00:00Z").AsTime(), rows[i].Ts.AsTime())

q = &queries.MetricsViewTimeSeries{
MeasureNames: []string{"total_records"},
Expand Down Expand Up @@ -388,7 +386,7 @@ func TestMetricsViewTimeSeries_DayLightSavingsBackwards_Continuous_Minute(t *tes
rows := q.Result.Data
require.Len(t, rows, 1)
i := 0
require.Equal(t, parseTime(t, "2023-11-05T05:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Equal(t, parseTime(t, "2023-11-05T06:00:00Z").AsTime(), rows[i].Ts.AsTime())

q = &queries.MetricsViewTimeSeries{
MeasureNames: []string{"total_records"},
Expand Down Expand Up @@ -426,14 +424,13 @@ func TestMetricsViewTimeSeries_DayLightSavingsBackwards_Continuous_Hourly(t *tes
require.NoError(t, err)
require.NotEmpty(t, q.Result)
rows := q.Result.Data
require.Len(t, rows, 5)
require.Len(t, rows, 4)
i := 0
require.Equal(t, parseTime(t, "2023-11-05T03:00:00Z").AsTime(), rows[i].Ts.AsTime())
i++
require.Equal(t, parseTime(t, "2023-11-05T04:00:00Z").AsTime(), rows[i].Ts.AsTime())
i++
require.Equal(t, parseTime(t, "2023-11-05T05:00:00Z").AsTime(), rows[i].Ts.AsTime())
i++
// no 05:00 hour since 04:00 to 05:00 UTC are same because of DST fall back
require.Equal(t, parseTime(t, "2023-11-05T06:00:00Z").AsTime(), rows[i].Ts.AsTime())
i++
require.Equal(t, parseTime(t, "2023-11-05T07:00:00Z").AsTime(), rows[i].Ts.AsTime())
Expand All @@ -460,17 +457,15 @@ func TestMetricsViewTimeSeries_DayLightSavingsBackwards_Sparse_Hourly(t *testing
require.NoError(t, err)
require.NotEmpty(t, q.Result)
rows := q.Result.Data
require.Len(t, rows, 5)
require.Len(t, rows, 4)
i := 0
require.Equal(t, parseTime(t, "2023-11-05T03:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["total_records"])
i++
require.Equal(t, parseTime(t, "2023-11-05T04:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Nil(t, q.Result.Data[i].Records.AsMap()["total_records"])
i++
require.Equal(t, parseTime(t, "2023-11-05T05:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["total_records"])
i++
// no 05:00 hour since 04:00 to 05:00 UTC are same because of DST fall back
require.Equal(t, parseTime(t, "2023-11-05T06:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Nil(t, q.Result.Data[i].Records.AsMap()["total_records"])
i++
Expand Down Expand Up @@ -587,7 +582,7 @@ func TestMetricsViewTimeSeries_DayLightSavingsForwards_Continuous_Hourly(t *test
require.NoError(t, err)
require.NotEmpty(t, q.Result)
rows := q.Result.Data
require.Len(t, rows, 5)
require.Len(t, rows, 6)
i := 0
require.Equal(t, parseTime(t, "2023-03-12T04:00:00Z").AsTime(), rows[i].Ts.AsTime())
i++
Expand All @@ -597,6 +592,8 @@ func TestMetricsViewTimeSeries_DayLightSavingsForwards_Continuous_Hourly(t *test
i++
require.Equal(t, parseTime(t, "2023-03-12T07:00:00Z").AsTime(), rows[i].Ts.AsTime())
i++
require.Equal(t, parseTime(t, "2023-03-12T07:00:00Z").AsTime(), rows[i].Ts.AsTime())
i++
require.Equal(t, parseTime(t, "2023-03-12T08:00:00Z").AsTime(), rows[i].Ts.AsTime())
}

Expand All @@ -621,7 +618,7 @@ func TestMetricsViewTimeSeries_DayLightSavingsForwards_Sparse_Hourly(t *testing.
require.NoError(t, err)
require.NotEmpty(t, q.Result)
rows := q.Result.Data
require.Len(t, rows, 5)
require.Len(t, rows, 6)
i := 0
require.Equal(t, parseTime(t, "2023-03-12T04:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Nil(t, q.Result.Data[i].Records.AsMap()["total_records"])
Expand All @@ -635,6 +632,9 @@ func TestMetricsViewTimeSeries_DayLightSavingsForwards_Sparse_Hourly(t *testing.
require.Equal(t, parseTime(t, "2023-03-12T07:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["total_records"])
i++
require.Equal(t, parseTime(t, "2023-03-12T07:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["total_records"])
i++
require.Equal(t, parseTime(t, "2023-03-12T08:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Nil(t, q.Result.Data[i].Records.AsMap()["total_records"])
}
Expand Down Expand Up @@ -674,23 +674,11 @@ func TestMetricsViewTimeSeries_having_clause(t *testing.T) {
require.NoError(t, err)
require.NotEmpty(t, q.Result)
rows := q.Result.Data
require.Len(t, rows, 6)
require.Len(t, rows, 2)
i := 0
require.Equal(t, parseTime(t, "2019-01-01T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["sum_imps"])
i++
require.Equal(t, parseTime(t, "2019-01-02T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Nil(t, q.Result.Data[i].Records.AsMap()["sum_imps"])
i++
require.Equal(t, parseTime(t, "2019-01-03T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Nil(t, q.Result.Data[i].Records.AsMap()["sum_imps"])
i++
require.Equal(t, parseTime(t, "2019-01-04T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Nil(t, q.Result.Data[i].Records.AsMap()["sum_imps"])
i++
require.Equal(t, parseTime(t, "2019-01-05T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.Nil(t, q.Result.Data[i].Records.AsMap()["sum_imps"])
i++
require.Equal(t, parseTime(t, "2019-01-06T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["sum_imps"])
}
Expand Down Expand Up @@ -738,33 +726,51 @@ func TestMetricsTimeseries_measure_filters_same_name(t *testing.T) {
err = q.Resolve(context.Background(), rt, instanceID, 0)
require.NoError(t, err)
require.NotEmpty(t, q.Result)
outputResult(q.Result.Meta, q.Result.Data)
rows := q.Result.Data
require.Len(t, rows, 13)
i := 0
require.Equal(t, "null", fieldsToString(q.Result.Data[i].Records, "bid_price"))
require.Equal(t, parseTime(t, "2022-01-03T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, "null", fieldsToString(q.Result.Data[i].Records, "bid_price"))
require.Equal(t, parseTime(t, "2022-01-04T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, "3", fieldsToString(q.Result.Data[i].Records, "bid_price"))
require.Equal(t, parseTime(t, "2022-01-06T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, "3", fieldsToString(q.Result.Data[i].Records, "bid_price"))

require.Equal(t, parseTime(t, "2022-01-07T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-08T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-09T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-11T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-12T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-13T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-15T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-18T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-21T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
i++
require.Equal(t, parseTime(t, "2022-01-23T00:00:00Z").AsTime(), rows[i].Ts.AsTime())
require.NotNil(t, q.Result.Data[i].Records.AsMap()["bid_price"])
}

func toStructpbValue(t *testing.T, v any) *structpb.Value {
sv, err := structpb.NewValue(v)
require.NoError(t, err)
return sv
}

func outputResult(schema []*runtimev1.MetricsViewColumn, data []*runtimev1.TimeSeriesValue) {
for _, s := range schema {
fmt.Printf("%v,", s.Name)
}
fmt.Println()
for i, row := range data {
for _, s := range schema {
fmt.Printf("%s %v,", row.Ts.AsTime().Format(time.RFC3339), row.Records.Fields[s.Name].AsInterface())
}
fmt.Printf(" %d \n", i)
}
}
Loading