Skip to content

Commit 2650669

Browse files
Merge branch 'main' into feat/issues/query/final/testing
2 parents acdb2da + 6a62962 commit 2650669

File tree

7 files changed

+2167
-30
lines changed

7 files changed

+2167
-30
lines changed

pkg/types/querybuildertypes/querybuildertypesv5/builder_elements.go

Lines changed: 173 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@ package querybuildertypesv5
22

33
import (
44
"encoding/json"
5+
"math"
6+
"slices"
57
"time"
68

79
"github.com/SigNoz/signoz/pkg/errors"
@@ -135,6 +137,168 @@ var (
135137
ReduceToMedian = ReduceTo{valuer.NewString("median")}
136138
)
137139

140+
// FunctionReduceTo applies the reduceTo operator to a time series and returns a new series with the reduced value
141+
// reduceTo can be one of: last, sum, avg, min, max, count, median
142+
// if reduceTo is not recognized, the function returns the original series
143+
func FunctionReduceTo(result *TimeSeries, reduceTo ReduceTo) *TimeSeries {
144+
if len(result.Values) == 0 {
145+
return result
146+
}
147+
148+
var reducedValue float64
149+
var reducedTimestamp int64
150+
151+
switch reduceTo {
152+
case ReduceToLast:
153+
// Take the last point's value and timestamp
154+
lastPoint := result.Values[len(result.Values)-1]
155+
reducedValue = lastPoint.Value
156+
reducedTimestamp = lastPoint.Timestamp
157+
158+
case ReduceToSum:
159+
// Sum all values, use last timestamp
160+
var sum float64
161+
for _, point := range result.Values {
162+
if !math.IsNaN(point.Value) {
163+
sum += point.Value
164+
}
165+
}
166+
reducedValue = sum
167+
reducedTimestamp = result.Values[len(result.Values)-1].Timestamp
168+
169+
case ReduceToAvg:
170+
// Calculate average of all values, use last timestamp
171+
var sum float64
172+
var count int
173+
for _, point := range result.Values {
174+
if !math.IsNaN(point.Value) {
175+
sum += point.Value
176+
count++
177+
}
178+
}
179+
if count > 0 {
180+
reducedValue = sum / float64(count)
181+
} else {
182+
reducedValue = math.NaN()
183+
}
184+
reducedTimestamp = result.Values[len(result.Values)-1].Timestamp
185+
186+
case ReduceToMin:
187+
// Find minimum value, use its timestamp
188+
var min float64 = math.Inf(1)
189+
var minTimestamp int64
190+
for _, point := range result.Values {
191+
if !math.IsNaN(point.Value) && point.Value < min {
192+
min = point.Value
193+
minTimestamp = point.Timestamp
194+
}
195+
}
196+
if math.IsInf(min, 1) {
197+
reducedValue = math.NaN()
198+
reducedTimestamp = result.Values[len(result.Values)-1].Timestamp
199+
} else {
200+
reducedValue = min
201+
reducedTimestamp = minTimestamp
202+
}
203+
204+
case ReduceToMax:
205+
// Find maximum value, use its timestamp
206+
var max float64 = math.Inf(-1)
207+
var maxTimestamp int64
208+
for _, point := range result.Values {
209+
if !math.IsNaN(point.Value) && point.Value > max {
210+
max = point.Value
211+
maxTimestamp = point.Timestamp
212+
}
213+
}
214+
if math.IsInf(max, -1) {
215+
reducedValue = math.NaN()
216+
reducedTimestamp = result.Values[len(result.Values)-1].Timestamp
217+
} else {
218+
reducedValue = max
219+
reducedTimestamp = maxTimestamp
220+
}
221+
222+
case ReduceToCount:
223+
// Count non-NaN values, use last timestamp
224+
var count float64
225+
for _, point := range result.Values {
226+
if !math.IsNaN(point.Value) {
227+
count++
228+
}
229+
}
230+
reducedValue = count
231+
reducedTimestamp = result.Values[len(result.Values)-1].Timestamp
232+
233+
case ReduceToMedian:
234+
// Calculate median of all non-NaN values
235+
// maintain pair of value and timestamp and sort by value
236+
var values []struct {
237+
Value float64
238+
Timestamp int64
239+
}
240+
for _, point := range result.Values {
241+
if !math.IsNaN(point.Value) {
242+
values = append(values, struct {
243+
Value float64
244+
Timestamp int64
245+
}{
246+
Value: point.Value,
247+
Timestamp: point.Timestamp,
248+
})
249+
}
250+
}
251+
252+
if len(values) == 0 {
253+
reducedValue = math.NaN()
254+
reducedTimestamp = result.Values[len(result.Values)-1].Timestamp
255+
} else {
256+
slices.SortFunc(values, func(i, j struct {
257+
Value float64
258+
Timestamp int64
259+
}) int {
260+
if i.Value < j.Value {
261+
return -1
262+
}
263+
if i.Value > j.Value {
264+
return 1
265+
}
266+
return 0
267+
})
268+
269+
if len(values)%2 == 0 {
270+
// Even number of values - average of middle two
271+
mid := len(values) / 2
272+
reducedValue = (values[mid-1].Value + values[mid].Value) / 2
273+
reducedTimestamp = (values[mid-1].Timestamp + values[mid].Timestamp) / 2
274+
} else {
275+
// Odd number of values - middle value
276+
reducedValue = values[len(values)/2].Value
277+
reducedTimestamp = values[len(values)/2].Timestamp
278+
}
279+
}
280+
281+
case ReduceToUnknown:
282+
fallthrough
283+
default:
284+
// No reduction, return original series
285+
return result
286+
}
287+
288+
// Create new TimeSeries with single reduced point
289+
reducedSeries := &TimeSeries{
290+
Labels: result.Labels, // Preserve original labels
291+
Values: []*TimeSeriesValue{
292+
{
293+
Timestamp: reducedTimestamp,
294+
Value: reducedValue,
295+
},
296+
},
297+
}
298+
299+
return reducedSeries
300+
}
301+
138302
type TraceAggregation struct {
139303
// aggregation expression - example: count(), sum(item_price), countIf(day > 10)
140304
Expression string `json:"expression"`
@@ -205,17 +369,19 @@ type SecondaryAggregation struct {
205369
LimitBy LimitBy `json:"limitBy,omitempty"`
206370
}
207371

372+
type FunctionArg struct {
373+
// name of the argument
374+
Name string `json:"name,omitempty"`
375+
// value of the argument
376+
Value string `json:"value"`
377+
}
378+
208379
type Function struct {
209380
// name of the function
210-
Name string `json:"name"`
381+
Name FunctionName `json:"name"`
211382

212383
// args is the arguments to the function
213-
Args []struct {
214-
// name of the argument
215-
Name string `json:"name,omitempty"`
216-
// value of the argument
217-
Value string `json:"value"`
218-
} `json:"args,omitempty"`
384+
Args []FunctionArg `json:"args,omitempty"`
219385
}
220386

221387
type LimitBy struct {

0 commit comments

Comments
 (0)