console: use templates to build SQL query

This is needed if we want to be able to mix use of several tables
inside a single query (for example, flows_1m0s for a part of the query
and flows_5m0s for another part to overlay historical data).

Also, the way we handle time buckets is now cleaner. The previous way
had two stages of rounding and was incorrect. We were discarding the
first and last value for this reason. The new way only has one stage
of rounding and is correct. It tries hard to align the buckets at the
specified start time. We don't need to discard these values anymore.
We still discard the last one because it could be incomplete (when end
is "now").
This commit is contained in:
Vincent Bernat
2022-08-09 11:40:51 +02:00
parent 61b3baa406
commit 67703cc61e
11 changed files with 535 additions and 344 deletions

View File

@@ -4,16 +4,15 @@
package console package console
import ( import (
"bytes"
"encoding/json"
"errors" "errors"
"fmt" "fmt"
"regexp"
"strconv"
"strings" "strings"
"text/template"
"time" "time"
) )
var resolutionRegexp = regexp.MustCompile(`{resolution->(\d+)}`)
// flowsTable describe a consolidated or unconsolidated flows table. // flowsTable describe a consolidated or unconsolidated flows table.
type flowsTable struct { type flowsTable struct {
Name string Name string
@@ -21,95 +20,6 @@ type flowsTable struct {
Oldest time.Time Oldest time.Time
} }
// Build a query against the flows table or one of the consolidated
// version depending on the information needed. The provided query
// should contain `{table}` which will be replaced by the appropriate
// flows table and {timefilter} which will be replaced by the
// appropriate time filter.
func (c *Component) queryFlowsTable(query string, mainTableRequired bool, start, end time.Time, targetResolution time.Duration) string {
c.flowsTablesLock.RLock()
defer c.flowsTablesLock.RUnlock()
// Select table
table := "flows"
resolution := time.Second
if !mainTableRequired {
// We can use the consolidated data. The first
// criteria is to find the tables matching the time
// criteria.
candidates := []int{}
for idx, table := range c.flowsTables {
if start.After(table.Oldest.Add(table.Resolution)) {
candidates = append(candidates, idx)
}
}
if len(candidates) == 0 {
// No candidate, fallback to the one with oldest data
best := 0
for idx, table := range c.flowsTables {
if c.flowsTables[best].Oldest.After(table.Oldest.Add(table.Resolution)) {
best = idx
}
}
candidates = []int{best}
// Add other candidates that are not far off in term of oldest data
for idx, table := range c.flowsTables {
if idx == best {
continue
}
if c.flowsTables[best].Oldest.After(table.Oldest) {
candidates = append(candidates, idx)
}
}
}
if len(candidates) > 1 {
// Use resolution to find the best one
best := 0
for _, idx := range candidates {
if c.flowsTables[idx].Resolution > targetResolution {
continue
}
if c.flowsTables[idx].Resolution > c.flowsTables[best].Resolution {
best = idx
}
}
candidates = []int{best}
}
table = c.flowsTables[candidates[0]].Name
resolution = c.flowsTables[candidates[0]].Resolution
}
if resolution == 0 {
resolution = time.Second
}
// Build timefilter to match the resolution
start = start.Truncate(resolution)
end = end.Truncate(resolution)
timeFilterStart := fmt.Sprintf(`toDateTime('%s', 'UTC')`, start.UTC().Format("2006-01-02 15:04:05"))
timeFilterStop := fmt.Sprintf(`toDateTime('%s', 'UTC')`, end.UTC().Format("2006-01-02 15:04:05"))
timeFilter := fmt.Sprintf(`TimeReceived BETWEEN %s AND %s`, timeFilterStart, timeFilterStop)
c.metrics.clickhouseQueries.WithLabelValues(table).Inc()
query = strings.ReplaceAll(query, "{timefilter}", timeFilter)
query = strings.ReplaceAll(query, "{timefilter.Start}", timeFilterStart)
query = strings.ReplaceAll(query, "{timefilter.Stop}", timeFilterStop)
query = strings.ReplaceAll(query, "{table}", table)
query = strings.ReplaceAll(query, "{resolution}", strconv.Itoa(int(resolution.Seconds())))
query = resolutionRegexp.ReplaceAllStringFunc(query, func(in string) string {
matches := resolutionRegexp.FindStringSubmatch(in)
target, err := strconv.Atoi(matches[1])
if err != nil {
panic(err)
}
target = target / int(resolution.Seconds()) * int(resolution.Seconds())
if target < 1 {
target = 1
}
return strconv.Itoa(target)
})
return query
}
// refreshFlowsTables refreshes the information we have about flows // refreshFlowsTables refreshes the information we have about flows
// tables (live one and consolidated ones). This information includes // tables (live one and consolidated ones). This information includes
// the consolidation interval and the oldest available data. // the consolidation interval and the oldest available data.
@@ -166,3 +76,184 @@ AND engine LIKE '%MergeTree'
c.flowsTablesLock.Unlock() c.flowsTablesLock.Unlock()
return nil return nil
} }
// finalizeQuery builds the finalized query. A single "context"
// function is provided to return a `Context` struct with all the
// information needed.
func (c *Component) finalizeQuery(query string) string {
t := template.Must(template.New("query").
Funcs(template.FuncMap{
"context": c.contextFunc,
}).
Option("missingkey=error").
Parse(strings.TrimSpace(query)))
buf := bytes.NewBufferString("")
if err := t.Execute(buf, nil); err != nil {
c.r.Err(err).Str("query", query).Msg("invalid query")
panic(err)
}
return buf.String()
}
type inputContext struct {
Start time.Time `json:"start"`
End time.Time `json:"end"`
MainTableRequired bool `json:"main-table-required,omitempty"`
Points uint `json:"points"`
Units string `json:"units,omitempty"`
}
type context struct {
Table string
Timefilter string
TimefilterStart string
TimefilterEnd string
Units string
Interval uint64
ToStartOfInterval func(string) string
}
// templateEscape escapes `{{` and `}}` from a string. In fact, only
// the opening tag needs to be escaped.
func templateEscape(input string) string {
return strings.ReplaceAll(input, `{{`, `{{"{{"}}`)
}
// templateDate turns a date into an UTC string compatible with ClickHouse.
func templateDate(input time.Time) string {
return input.UTC().Format("2006-01-02 15:04:05")
}
// templateWhere transforms a filter to a WHERE clause
func templateWhere(qf queryFilter) string {
if qf.Filter == "" {
return `{{ .Timefilter }}`
}
return fmt.Sprintf(`{{ .Timefilter }} AND (%s)`, templateEscape(qf.Filter))
}
// templateTable builds a template directive to select the right table
func templateContext(context inputContext) string {
encoded, err := json.Marshal(context)
if err != nil {
panic(err)
}
return fmt.Sprintf("context `%s`", string(encoded))
}
func (c *Component) contextFunc(inputStr string) context {
var input inputContext
if err := json.Unmarshal([]byte(inputStr), &input); err != nil {
panic(err)
}
targetInterval := time.Duration(uint64(input.End.Sub(input.Start)) / uint64(input.Points))
if targetInterval < time.Second {
targetInterval = time.Second
}
c.flowsTablesLock.RLock()
defer c.flowsTablesLock.RUnlock()
// Select table
table := "flows"
computedInterval := time.Second
if !input.MainTableRequired && len(c.flowsTables) > 0 {
// We can use the consolidated data. The first
// criteria is to find the tables matching the time
// criteria.
candidates := []int{}
for idx, table := range c.flowsTables {
if input.Start.After(table.Oldest.Add(table.Resolution)) {
candidates = append(candidates, idx)
}
}
if len(candidates) == 0 {
// No candidate, fallback to the one with oldest data
best := 0
for idx, table := range c.flowsTables {
if c.flowsTables[best].Oldest.After(table.Oldest.Add(table.Resolution)) {
best = idx
}
}
candidates = []int{best}
// Add other candidates that are not far off in term of oldest data
for idx, table := range c.flowsTables {
if idx == best {
continue
}
if c.flowsTables[best].Oldest.After(table.Oldest) {
candidates = append(candidates, idx)
}
}
}
if len(candidates) > 1 {
// Use interval to find the best one
best := 0
for _, idx := range candidates {
if c.flowsTables[idx].Resolution > targetInterval {
continue
}
if c.flowsTables[idx].Resolution > c.flowsTables[best].Resolution {
best = idx
}
}
candidates = []int{best}
}
table = c.flowsTables[candidates[0]].Name
computedInterval = c.flowsTables[candidates[0]].Resolution
}
if computedInterval < time.Second {
computedInterval = time.Second
}
// Make start/end match the computed interval (currently equal to the table resolution)
start := input.Start.Truncate(computedInterval)
end := input.End.Truncate(computedInterval)
// Adapt the computed interval to match the target one more closely
if targetInterval > computedInterval {
computedInterval = targetInterval.Truncate(computedInterval)
}
// Adapt end to ensure we get a full interval
end = start.Add(end.Sub(start).Truncate(computedInterval))
// Now, toStartOfInterval will provide an incorrect value. We
// compute a correction offset. Go's truncate seems to
// be different from what we expect.
computedIntervalOffset := start.UTC().Sub(
time.Unix(start.UTC().Unix()/
int64(computedInterval.Seconds())*
int64(computedInterval.Seconds()), 0))
diffOffset := uint64(computedInterval.Seconds()) - uint64(computedIntervalOffset.Seconds())
// Compute all strings
timefilterStart := fmt.Sprintf(`toDateTime('%s', 'UTC')`, start.UTC().Format("2006-01-02 15:04:05"))
timefilterEnd := fmt.Sprintf(`toDateTime('%s', 'UTC')`, end.UTC().Format("2006-01-02 15:04:05"))
timefilter := fmt.Sprintf(`TimeReceived BETWEEN %s AND %s`, timefilterStart, timefilterEnd)
var units string
switch input.Units {
case "pps":
units = `SUM(Packets*SamplingRate)`
case "l3bps":
units = `SUM(Bytes*SamplingRate*8)`
case "l2bps":
units = `SUM((Bytes+18*Packets)*SamplingRate*8)`
}
c.metrics.clickhouseQueries.WithLabelValues(table).Inc()
return context{
Table: table,
Timefilter: timefilter,
TimefilterStart: timefilterStart,
TimefilterEnd: timefilterEnd,
Units: units,
Interval: uint64(computedInterval.Seconds()),
ToStartOfInterval: func(field string) string {
return fmt.Sprintf(
`toStartOfInterval(%s + INTERVAL %d second, INTERVAL %d second) - INTERVAL %d second`,
field,
diffOffset,
uint64(computedInterval.Seconds()),
diffOffset)
},
}
}

View File

@@ -4,6 +4,7 @@
package console package console
import ( import (
"fmt"
"testing" "testing"
"time" "time"
@@ -70,113 +71,143 @@ AND engine LIKE '%MergeTree'
} }
} }
func TestQueryFlowsTables(t *testing.T) { func TestFinalizeQuery(t *testing.T) {
cases := []struct { cases := []struct {
Description string Description string
Tables []flowsTable Tables []flowsTable
Query string Query string
MainTableRequired bool Context inputContext
Start time.Time Expected string
End time.Time
Resolution time.Duration
Expected string
}{ }{
{ {
Description: "query with source port", Description: "simple query without additional tables",
Query: "SELECT TimeReceived, SrcPort FROM {table} WHERE {timefilter}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }}",
MainTableRequired: true, Context: inputContext{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Expected: "SELECT TimeReceived, SrcPort FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')", Points: 86400,
},
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')",
}, {
Description: "query with source port",
Query: "SELECT TimeReceived, SrcPort FROM {{ .Table }} WHERE {{ .Timefilter }}",
Context: inputContext{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
MainTableRequired: true,
Points: 86400,
},
Expected: "SELECT TimeReceived, SrcPort FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')",
}, { }, {
Description: "only flows table available", Description: "only flows table available",
Tables: []flowsTable{{"flows", 0, time.Date(2022, 03, 10, 15, 45, 10, 0, time.UTC)}}, Tables: []flowsTable{{"flows", 0, time.Date(2022, 03, 10, 15, 45, 10, 0, time.UTC)}},
Query: "SELECT 1 FROM {table} WHERE {timefilter}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }}",
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')", End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 86400,
},
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')",
}, { }, {
Description: "timefilter.Start and timefilter.Stop", Description: "timefilter.Start and timefilter.Stop",
Tables: []flowsTable{{"flows", 0, time.Date(2022, 03, 10, 15, 45, 10, 0, time.UTC)}}, Tables: []flowsTable{{"flows", 0, time.Date(2022, 03, 10, 15, 45, 10, 0, time.UTC)}},
Query: "SELECT {timefilter.Start}, {timefilter.Stop}", Query: "SELECT {{ .TimefilterStart }}, {{ .TimefilterEnd }}",
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
Expected: "SELECT toDateTime('2022-04-10 15:45:10', 'UTC'), toDateTime('2022-04-11 15:45:10', 'UTC')", End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 86400,
},
Expected: "SELECT toDateTime('2022-04-10 15:45:10', 'UTC'), toDateTime('2022-04-11 15:45:10', 'UTC')",
}, { }, {
Description: "only flows table and out of range request", Description: "only flows table and out of range request",
Tables: []flowsTable{{"flows", 0, time.Date(2022, 04, 10, 22, 45, 10, 0, time.UTC)}}, Tables: []flowsTable{{"flows", 0, time.Date(2022, 04, 10, 22, 45, 10, 0, time.UTC)}},
Query: "SELECT 1 FROM {table} WHERE {timefilter}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }}",
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')", End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 86400,
},
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')",
}, { }, {
Description: "select consolidated table", Description: "select consolidated table",
Tables: []flowsTable{ Tables: []flowsTable{
{"flows", 0, time.Date(2022, 03, 10, 22, 45, 10, 0, time.UTC)}, {"flows", 0, time.Date(2022, 03, 10, 22, 45, 10, 0, time.UTC)},
{"flows_1m0s", time.Minute, time.Date(2022, 04, 2, 22, 45, 10, 0, time.UTC)}, {"flows_1m0s", time.Minute, time.Date(2022, 04, 2, 22, 45, 10, 0, time.UTC)},
}, },
Query: "SELECT 1 FROM {table} WHERE {timefilter}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }} // {{ .Interval }}",
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
Resolution: 2 * time.Minute, End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows_1m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:00', 'UTC') AND toDateTime('2022-04-11 15:45:00', 'UTC')", Points: 720, // 2-minute resolution
},
Expected: "SELECT 1 FROM flows_1m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:00', 'UTC') AND toDateTime('2022-04-11 15:45:00', 'UTC') // 120",
}, { }, {
Description: "select consolidated table out of range", Description: "select consolidated table out of range",
Tables: []flowsTable{ Tables: []flowsTable{
{"flows", 0, time.Date(2022, 04, 10, 22, 45, 10, 0, time.UTC)}, {"flows", 0, time.Date(2022, 04, 10, 22, 45, 10, 0, time.UTC)},
{"flows_1m0s", time.Minute, time.Date(2022, 04, 10, 17, 45, 10, 0, time.UTC)}, {"flows_1m0s", time.Minute, time.Date(2022, 04, 10, 17, 45, 10, 0, time.UTC)},
}, },
Query: "SELECT 1 FROM {table} WHERE {timefilter}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }}",
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
Resolution: 2 * time.Minute, End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows_1m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:00', 'UTC') AND toDateTime('2022-04-11 15:45:00', 'UTC')", Points: 720, // 2-minute resolution,
},
Expected: "SELECT 1 FROM flows_1m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:00', 'UTC') AND toDateTime('2022-04-11 15:45:00', 'UTC')",
}, { }, {
Description: "select flows table out of range", Description: "select flows table out of range",
Tables: []flowsTable{ Tables: []flowsTable{
{"flows", 0, time.Date(2022, 04, 10, 16, 45, 10, 0, time.UTC)}, {"flows", 0, time.Date(2022, 04, 10, 16, 45, 10, 0, time.UTC)},
{"flows_1m0s", time.Minute, time.Date(2022, 04, 10, 17, 45, 10, 0, time.UTC)}, {"flows_1m0s", time.Minute, time.Date(2022, 04, 10, 17, 45, 10, 0, time.UTC)},
}, },
Query: "SELECT 1 FROM {table} WHERE {timefilter}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }}",
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
Resolution: 2 * time.Minute, End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')", Points: 720, // 2-minute resolution,
},
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC')",
}, { }, {
Description: "select flows table better resolution", Description: "select flows table with better resolution",
Tables: []flowsTable{ Tables: []flowsTable{
{"flows", 0, time.Date(2022, 03, 10, 16, 45, 10, 0, time.UTC)}, {"flows", 0, time.Date(2022, 03, 10, 16, 45, 10, 0, time.UTC)},
{"flows_1m0s", time.Minute, time.Date(2022, 03, 10, 17, 45, 10, 0, time.UTC)}, {"flows_1m0s", time.Minute, time.Date(2022, 03, 10, 17, 45, 10, 0, time.UTC)},
}, },
Query: "SELECT 1 FROM {table} WHERE {timefilter} // {resolution} // {resolution->864}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }} // {{ .Interval }}",
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
Resolution: 30 * time.Second, End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC') // 1 // 864", Points: 2880,
},
Expected: "SELECT 1 FROM flows WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:10', 'UTC') AND toDateTime('2022-04-11 15:45:10', 'UTC') // 30",
}, { }, {
Description: "select consolidated table better resolution", Description: "select consolidated table with better resolution",
Tables: []flowsTable{ Tables: []flowsTable{
{"flows", 0, time.Date(2022, 03, 10, 22, 45, 10, 0, time.UTC)}, {"flows", 0, time.Date(2022, 03, 10, 22, 45, 10, 0, time.UTC)},
{"flows_5m0s", 5 * time.Minute, time.Date(2022, 04, 2, 22, 45, 10, 0, time.UTC)}, {"flows_5m0s", 5 * time.Minute, time.Date(2022, 04, 2, 22, 45, 10, 0, time.UTC)},
{"flows_1m0s", time.Minute, time.Date(2022, 04, 2, 22, 45, 10, 0, time.UTC)}, {"flows_1m0s", time.Minute, time.Date(2022, 04, 2, 22, 45, 10, 0, time.UTC)},
}, },
Query: "SELECT 1 FROM {table} WHERE {timefilter} // {resolution} // {resolution->864}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }} // {{ .Interval }}",
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
Resolution: 2 * time.Minute, End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows_1m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:00', 'UTC') AND toDateTime('2022-04-11 15:45:00', 'UTC') // 60 // 840", Points: 720, // 2-minute resolution,
},
Expected: "SELECT 1 FROM flows_1m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:00', 'UTC') AND toDateTime('2022-04-11 15:45:00', 'UTC') // 120",
}, { }, {
Description: "select consolidated table better range", Description: "select consolidated table with better range",
Tables: []flowsTable{ Tables: []flowsTable{
{"flows", 0, time.Date(2022, 04, 10, 22, 45, 10, 0, time.UTC)}, {"flows", 0, time.Date(2022, 04, 10, 22, 45, 10, 0, time.UTC)},
{"flows_5m0s", 5 * time.Minute, time.Date(2022, 04, 2, 22, 45, 10, 0, time.UTC)}, {"flows_5m0s", 5 * time.Minute, time.Date(2022, 04, 2, 22, 45, 10, 0, time.UTC)},
{"flows_1m0s", time.Minute, time.Date(2022, 04, 10, 22, 45, 10, 0, time.UTC)}, {"flows_1m0s", time.Minute, time.Date(2022, 04, 10, 22, 45, 10, 0, time.UTC)},
}, },
Query: "SELECT 1 FROM {table} WHERE {timefilter}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }}",
Start: time.Date(2022, 04, 10, 15, 46, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 46, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 46, 10, 0, time.UTC),
Resolution: 2 * time.Minute, End: time.Date(2022, 04, 11, 15, 46, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows_5m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:00', 'UTC') AND toDateTime('2022-04-11 15:45:00', 'UTC')", Points: 720, // 2-minute resolution,
},
Expected: "SELECT 1 FROM flows_5m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:45:00', 'UTC') AND toDateTime('2022-04-11 15:45:00', 'UTC')",
}, { }, {
Description: "select best resolution when equality for oldest data", Description: "select best resolution when equality for oldest data",
Tables: []flowsTable{ Tables: []flowsTable{
@@ -184,11 +215,31 @@ func TestQueryFlowsTables(t *testing.T) {
{"flows_1m0s", time.Minute, time.Date(2022, 04, 10, 22, 40, 00, 0, time.UTC)}, {"flows_1m0s", time.Minute, time.Date(2022, 04, 10, 22, 40, 00, 0, time.UTC)},
{"flows_1h0m0s", time.Hour, time.Date(2022, 04, 10, 22, 00, 10, 0, time.UTC)}, {"flows_1h0m0s", time.Hour, time.Date(2022, 04, 10, 22, 00, 10, 0, time.UTC)},
}, },
Query: "SELECT 1 FROM {table} WHERE {timefilter}", Query: "SELECT 1 FROM {{ .Table }} WHERE {{ .Timefilter }}",
Start: time.Date(2022, 04, 10, 15, 46, 10, 0, time.UTC), Context: inputContext{
End: time.Date(2022, 04, 11, 15, 46, 10, 0, time.UTC), Start: time.Date(2022, 04, 10, 15, 46, 10, 0, time.UTC),
Resolution: 2 * time.Minute, End: time.Date(2022, 04, 11, 15, 46, 10, 0, time.UTC),
Expected: "SELECT 1 FROM flows_1m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:46:00', 'UTC') AND toDateTime('2022-04-11 15:46:00', 'UTC')", Points: 720, // 2-minute resolution,
},
Expected: "SELECT 1 FROM flows_1m0s WHERE TimeReceived BETWEEN toDateTime('2022-04-10 15:46:00', 'UTC') AND toDateTime('2022-04-11 15:46:00', 'UTC')",
}, {
Description: "query with escaped template",
Query: `SELECT TimeReceived, SrcPort WHERE InIfDescription = '{{"{{"}} hello }}'`,
Context: inputContext{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 86400,
},
Expected: `SELECT TimeReceived, SrcPort WHERE InIfDescription = '{{ hello }}'`,
}, {
Description: "use of ToStartOfInterval",
Query: `{{ call .ToStartOfInterval "TimeReceived" }}`,
Context: inputContext{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 720,
},
Expected: `toStartOfInterval(TimeReceived + INTERVAL 50 second, INTERVAL 120 second) - INTERVAL 50 second`,
}, },
} }
@@ -196,9 +247,10 @@ func TestQueryFlowsTables(t *testing.T) {
for _, tc := range cases { for _, tc := range cases {
t.Run(tc.Description, func(t *testing.T) { t.Run(tc.Description, func(t *testing.T) {
c.flowsTables = tc.Tables c.flowsTables = tc.Tables
got := c.queryFlowsTable(tc.Query, tc.MainTableRequired, tc.Start, tc.End, tc.Resolution) got := c.finalizeQuery(
fmt.Sprintf(`{{ with %s }}%s{{ end }}`, templateContext(tc.Context), tc.Query))
if diff := helpers.Diff(got, tc.Expected); diff != "" { if diff := helpers.Diff(got, tc.Expected); diff != "" {
t.Fatalf("queryFlowsTable(): (-got, +want):\n%s", diff) t.Fatalf("finalizeQuery(): (-got, +want):\n%s", diff)
} }
}) })
} }

View File

@@ -73,7 +73,7 @@ const options = computed(() => ({
}, },
data: (data.value?.data || []) data: (data.value?.data || [])
.map(({ t, gbps }) => [t, gbps]) .map(({ t, gbps }) => [t, gbps])
.slice(1, -1), .slice(0, -1),
}, },
], ],
})); }));

View File

@@ -126,7 +126,7 @@ const graph = computed(() => {
(row, rowIdx) => row[timeIdx] * (data.axis[rowIdx] == 1 ? 1 : -1) (row, rowIdx) => row[timeIdx] * (data.axis[rowIdx] == 1 ? 1 : -1)
), ),
]) ])
.slice(1, -1), .slice(0, -1),
], ],
}, },
xAxis = { xAxis = {

View File

@@ -19,7 +19,7 @@ import (
type graphHandlerInput struct { type graphHandlerInput struct {
Start time.Time `json:"start" binding:"required"` Start time.Time `json:"start" binding:"required"`
End time.Time `json:"end" binding:"required,gtfield=Start"` End time.Time `json:"end" binding:"required,gtfield=Start"`
Points int `json:"points" binding:"required,min=5,max=2000"` // minimum number of points Points uint `json:"points" binding:"required,min=5,max=2000"` // minimum number of points
Dimensions []queryColumn `json:"dimensions"` // group by ... Dimensions []queryColumn `json:"dimensions"` // group by ...
Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions
Filter queryFilter `json:"filter"` // where ... Filter queryFilter `json:"filter"` // where ...
@@ -54,28 +54,12 @@ func (input graphHandlerInput) reverseDirection() graphHandlerInput {
} }
func (input graphHandlerInput) toSQL1(axis int, skipWith bool) string { func (input graphHandlerInput) toSQL1(axis int, skipWith bool) string {
interval := int64((input.End.Sub(input.Start).Seconds())) / int64(input.Points) where := templateWhere(input.Filter)
slot := fmt.Sprintf(`{resolution->%d}`, interval)
// Filter
where := input.Filter.Filter
if where == "" {
where = "{timefilter}"
} else {
where = fmt.Sprintf("{timefilter} AND (%s)", where)
}
// Select // Select
fields := []string{ fields := []string{
fmt.Sprintf(`toStartOfInterval(TimeReceived, INTERVAL %s second) AS time`, slot), `{{ call .ToStartOfInterval "TimeReceived" }} AS time`,
} `{{ .Units }}/{{ .Interval }} AS xps`,
switch input.Units {
case "pps":
fields = append(fields, fmt.Sprintf(`SUM(Packets*SamplingRate/%s) AS xps`, slot))
case "l3bps":
fields = append(fields, fmt.Sprintf(`SUM(Bytes*SamplingRate*8/%s) AS xps`, slot))
case "l2bps":
fields = append(fields, fmt.Sprintf(`SUM((Bytes+18*Packets)*SamplingRate*8/%s) AS xps`, slot))
} }
selectFields := []string{} selectFields := []string{}
dimensions := []string{} dimensions := []string{}
@@ -99,7 +83,7 @@ func (input graphHandlerInput) toSQL1(axis int, skipWith bool) string {
with := []string{} with := []string{}
if len(dimensions) > 0 && !skipWith { if len(dimensions) > 0 && !skipWith {
with = append(with, fmt.Sprintf( with = append(with, fmt.Sprintf(
"rows AS (SELECT %s FROM {table} WHERE %s GROUP BY %s ORDER BY SUM(Bytes) DESC LIMIT %d)", "rows AS (SELECT %s FROM {{ .Table }} WHERE %s GROUP BY %s ORDER BY SUM(Bytes) DESC LIMIT %d)",
strings.Join(dimensions, ", "), strings.Join(dimensions, ", "),
where, where,
strings.Join(dimensions, ", "), strings.Join(dimensions, ", "),
@@ -107,34 +91,40 @@ func (input graphHandlerInput) toSQL1(axis int, skipWith bool) string {
} }
withStr := "" withStr := ""
if len(with) > 0 { if len(with) > 0 {
withStr = fmt.Sprintf("WITH\n %s", strings.Join(with, ",\n ")) withStr = fmt.Sprintf("\nWITH\n %s", strings.Join(with, ",\n "))
} }
sqlQuery := fmt.Sprintf(` sqlQuery := fmt.Sprintf(`
%s {{ with %s }}%s
SELECT %d AS axis, * FROM ( SELECT %d AS axis, * FROM (
SELECT SELECT
%s %s
FROM {table} FROM {{ .Table }}
WHERE %s WHERE %s
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL %s second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP %s)`, withStr, axis, strings.Join(fields, ",\n "), where, slot, slot) STEP {{ .Interval }})
return sqlQuery {{ end }}`,
templateContext(inputContext{
Start: input.Start,
End: input.End,
MainTableRequired: requireMainTable(input.Dimensions, input.Filter),
Points: input.Points,
Units: input.Units,
}),
withStr, axis, strings.Join(fields, ",\n "), where)
return strings.TrimSpace(sqlQuery)
} }
// graphHandlerInputToSQL converts a graph input to an SQL request // graphHandlerInputToSQL converts a graph input to an SQL request
func (input graphHandlerInput) toSQL() string { func (input graphHandlerInput) toSQL() string {
result := input.toSQL1(1, false) parts := []string{input.toSQL1(1, false)}
if input.Bidirectional { if input.Bidirectional {
part2 := input.reverseDirection().toSQL1(2, true) parts = append(parts, input.reverseDirection().toSQL1(2, true))
result = fmt.Sprintf(`%s
UNION ALL
%s`, result, strings.TrimSpace(part2))
} }
return strings.TrimSpace(result) return strings.TrimSpace(strings.Join(parts, "\nUNION ALL\n"))
} }
func (c *Component) graphHandlerFunc(gc *gin.Context) { func (c *Component) graphHandlerFunc(gc *gin.Context) {
@@ -146,12 +136,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
} }
sqlQuery := input.toSQL() sqlQuery := input.toSQL()
resolution := time.Duration(int64(input.End.Sub(input.Start).Nanoseconds()) / int64(input.Points)) sqlQuery = c.finalizeQuery(sqlQuery)
if resolution < time.Second {
resolution = time.Second
}
sqlQuery = c.queryFlowsTable(sqlQuery, requireMainTable(input.Dimensions, input.Filter),
input.Start, input.End, resolution)
gc.Header("X-SQL-Query", strings.ReplaceAll(sqlQuery, "\n", " ")) gc.Header("X-SQL-Query", strings.ReplaceAll(sqlQuery, "\n", " "))
results := []struct { results := []struct {

View File

@@ -72,18 +72,20 @@ func TestGraphQuerySQL(t *testing.T) {
Units: "l3bps", Units: "l3bps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l3bps"}@@ }}
SELECT 1 AS axis, * FROM ( SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
emptyArrayString() AS dimensions emptyArrayString() AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864})`, STEP {{ .Interval }})
{{ end }}`,
}, { }, {
Description: "no dimensions, no filters, l2 bps", Description: "no dimensions, no filters, l2 bps",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -95,18 +97,21 @@ ORDER BY time WITH FILL
Units: "l2bps", Units: "l2bps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l2bps"}@@ }}
SELECT 1 AS axis, * FROM ( SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM((Bytes+18*Packets)*SamplingRate*8/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
emptyArrayString() AS dimensions emptyArrayString() AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864})`, STEP {{ .Interval }})
{{ end }}
`,
}, { }, {
Description: "no dimensions, no filters, pps", Description: "no dimensions, no filters, pps",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -118,18 +123,20 @@ ORDER BY time WITH FILL
Units: "pps", Units: "pps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"pps"}@@ }}
SELECT 1 AS axis, * FROM ( SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM(Packets*SamplingRate/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
emptyArrayString() AS dimensions emptyArrayString() AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864})`, STEP {{ .Interval }})
{{ end }}`,
}, { }, {
Description: "no dimensions", Description: "no dimensions",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -141,18 +148,45 @@ ORDER BY time WITH FILL
Units: "l3bps", Units: "l3bps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l3bps"}@@ }}
SELECT 1 AS axis, * FROM ( SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
emptyArrayString() AS dimensions emptyArrayString() AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} AND (DstCountry = 'FR' AND SrcCountry = 'US') WHERE {{ .Timefilter }} AND (DstCountry = 'FR' AND SrcCountry = 'US')
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864})`, STEP {{ .Interval }})
{{ end }}`,
}, {
Description: "no dimensions, escaped filter",
Input: graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{},
Filter: queryFilter{Filter: "InIfDescription = '{{ hello }}' AND SrcCountry = 'US'"},
Units: "l3bps",
},
Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l3bps"}@@ }}
SELECT 1 AS axis, * FROM (
SELECT
{{ call .ToStartOfInterval "TimeReceived" }} AS time,
{{ .Units }}/{{ .Interval }} AS xps,
emptyArrayString() AS dimensions
FROM {{ .Table }}
WHERE {{ .Timefilter }} AND (InIfDescription = '{{"{{"}} hello }}' AND SrcCountry = 'US')
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM {{ .TimefilterStart }}
TO {{ .TimefilterEnd }}
STEP {{ .Interval }})
{{ end }}`,
}, { }, {
Description: "no dimensions, reverse direction", Description: "no dimensions, reverse direction",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -168,31 +202,35 @@ ORDER BY time WITH FILL
Bidirectional: true, Bidirectional: true,
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l3bps"}@@ }}
SELECT 1 AS axis, * FROM ( SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
emptyArrayString() AS dimensions emptyArrayString() AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} AND (DstCountry = 'FR' AND SrcCountry = 'US') WHERE {{ .Timefilter }} AND (DstCountry = 'FR' AND SrcCountry = 'US')
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864}) STEP {{ .Interval }})
{{ end }}
UNION ALL UNION ALL
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l3bps"}@@ }}
SELECT 2 AS axis, * FROM ( SELECT 2 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
emptyArrayString() AS dimensions emptyArrayString() AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} AND (SrcCountry = 'FR' AND DstCountry = 'US') WHERE {{ .Timefilter }} AND (SrcCountry = 'FR' AND DstCountry = 'US')
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864})`, STEP {{ .Interval }})
{{ end }}`,
}, { }, {
Description: "no filters", Description: "no filters",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -208,20 +246,22 @@ ORDER BY time WITH FILL
Units: "l3bps", Units: "l3bps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l3bps"}@@ }}
WITH WITH
rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20) rows AS (SELECT ExporterName, InIfProvider FROM {{ .Table }} WHERE {{ .Timefilter }} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20)
SELECT 1 AS axis, * FROM ( SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
if((ExporterName, InIfProvider) IN rows, [ExporterName, InIfProvider], ['Other', 'Other']) AS dimensions if((ExporterName, InIfProvider) IN rows, [ExporterName, InIfProvider], ['Other', 'Other']) AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864})`, STEP {{ .Interval }})
{{ end }}`,
}, { }, {
Description: "no filters, reverse", Description: "no filters, reverse",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -238,36 +278,41 @@ ORDER BY time WITH FILL
Bidirectional: true, Bidirectional: true,
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l3bps"}@@ }}
WITH WITH
rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20) rows AS (SELECT ExporterName, InIfProvider FROM {{ .Table }} WHERE {{ .Timefilter }} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20)
SELECT 1 AS axis, * FROM ( SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
if((ExporterName, InIfProvider) IN rows, [ExporterName, InIfProvider], ['Other', 'Other']) AS dimensions if((ExporterName, InIfProvider) IN rows, [ExporterName, InIfProvider], ['Other', 'Other']) AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864}) STEP {{ .Interval }})
{{ end }}
UNION ALL UNION ALL
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":100,"units":"l3bps"}@@ }}
SELECT 2 AS axis, * FROM ( SELECT 2 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, {{ call .ToStartOfInterval "TimeReceived" }} AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, {{ .Units }}/{{ .Interval }} AS xps,
if((ExporterName, OutIfProvider) IN rows, [ExporterName, OutIfProvider], ['Other', 'Other']) AS dimensions if((ExporterName, OutIfProvider) IN rows, [ExporterName, OutIfProvider], ['Other', 'Other']) AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY time, dimensions GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP {resolution->864})`, STEP {{ .Interval }})
{{ end }}`,
}, },
} }
for _, tc := range cases { for _, tc := range cases {
tc.Expected = strings.ReplaceAll(tc.Expected, "@@", "`")
t.Run(tc.Description, func(t *testing.T) { t.Run(tc.Description, func(t *testing.T) {
got := tc.Input.toSQL() got := tc.Input.toSQL()
if diff := helpers.Diff(strings.Split(strings.TrimSpace(got), "\n"), if diff := helpers.Diff(strings.Split(strings.TrimSpace(got), "\n"),

View File

@@ -5,7 +5,6 @@
package console package console
import ( import (
"html/template"
"io/fs" "io/fs"
netHTTP "net/http" netHTTP "net/http"
"os" "os"
@@ -13,6 +12,7 @@ import (
"path/filepath" "path/filepath"
"runtime" "runtime"
"sync" "sync"
"text/template"
"time" "time"
"github.com/benbjohnson/clock" "github.com/benbjohnson/clock"

View File

@@ -42,13 +42,7 @@ type sankeyLink struct {
// sankeyHandlerInputToSQL converts a sankey query to an SQL request // sankeyHandlerInputToSQL converts a sankey query to an SQL request
func (input sankeyHandlerInput) toSQL() (string, error) { func (input sankeyHandlerInput) toSQL() (string, error) {
// Filter where := templateWhere(input.Filter)
where := input.Filter.Filter
if where == "" {
where = "{timefilter}"
} else {
where = fmt.Sprintf("{timefilter} AND (%s)", where)
}
// Select // Select
arrayFields := []string{} arrayFields := []string{}
@@ -60,22 +54,16 @@ func (input sankeyHandlerInput) toSQL() (string, error) {
column.toSQLSelect())) column.toSQLSelect()))
dimensions = append(dimensions, column.String()) dimensions = append(dimensions, column.String())
} }
fields := []string{} fields := []string{
switch input.Units { `{{ .Units }}/range AS xps`,
case "pps": fmt.Sprintf("[%s] AS dimensions", strings.Join(arrayFields, ",\n ")),
fields = append(fields, `SUM(Packets*SamplingRate/range) AS xps`)
case "l3bps":
fields = append(fields, `SUM(Bytes*SamplingRate*8/range) AS xps`)
case "l2bps":
fields = append(fields, `SUM((Bytes+18*Packets)*SamplingRate*8/range) AS xps`)
} }
fields = append(fields, fmt.Sprintf("[%s] AS dimensions", strings.Join(arrayFields, ",\n ")))
// With // With
with := []string{ with := []string{
fmt.Sprintf(`(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE %s) AS range`, where), fmt.Sprintf(`(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {{ .Table }} WHERE %s) AS range`, where),
fmt.Sprintf( fmt.Sprintf(
"rows AS (SELECT %s FROM {table} WHERE %s GROUP BY %s ORDER BY SUM(Bytes) DESC LIMIT %d)", "rows AS (SELECT %s FROM {{ .Table }} WHERE %s GROUP BY %s ORDER BY SUM(Bytes) DESC LIMIT %d)",
strings.Join(dimensions, ", "), strings.Join(dimensions, ", "),
where, where,
strings.Join(dimensions, ", "), strings.Join(dimensions, ", "),
@@ -83,15 +71,25 @@ func (input sankeyHandlerInput) toSQL() (string, error) {
} }
sqlQuery := fmt.Sprintf(` sqlQuery := fmt.Sprintf(`
{{ with %s }}
WITH WITH
%s %s
SELECT SELECT
%s %s
FROM {table} FROM {{ .Table }}
WHERE %s WHERE %s
GROUP BY dimensions GROUP BY dimensions
ORDER BY xps DESC`, strings.Join(with, ",\n "), strings.Join(fields, ",\n "), where) ORDER BY xps DESC
return sqlQuery, nil {{ end }}`,
templateContext(inputContext{
Start: input.Start,
End: input.End,
MainTableRequired: requireMainTable(input.Dimensions, input.Filter),
Points: 20,
Units: input.Units,
}),
strings.Join(with, ",\n "), strings.Join(fields, ",\n "), where)
return strings.TrimSpace(sqlQuery), nil
} }
func (c *Component) sankeyHandlerFunc(gc *gin.Context) { func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
@@ -108,15 +106,8 @@ func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
return return
} }
// We need to select a resolution allowing us to have a somewhat accurate timespan
resolution := time.Duration(int64(input.End.Sub(input.Start).Nanoseconds()) / 20)
if resolution < time.Second {
resolution = time.Second
}
// Prepare and execute query // Prepare and execute query
sqlQuery = c.queryFlowsTable(sqlQuery, requireMainTable(input.Dimensions, input.Filter), sqlQuery = c.finalizeQuery(sqlQuery)
input.Start, input.End, resolution)
gc.Header("X-SQL-Query", strings.ReplaceAll(sqlQuery, "\n", " ")) gc.Header("X-SQL-Query", strings.ReplaceAll(sqlQuery, "\n", " "))
results := []struct { results := []struct {
Xps float64 `ch:"xps"` Xps float64 `ch:"xps"`

View File

@@ -31,17 +31,19 @@ func TestSankeyQuerySQL(t *testing.T) {
Units: "l3bps", Units: "l3bps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":20,"units":"l3bps"}@@ }}
WITH WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter}) AS range, (SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {{ .Table }} WHERE {{ .Timefilter }}) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5) rows AS (SELECT SrcAS, ExporterName FROM {{ .Table }} WHERE {{ .Timefilter }} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5)
SELECT SELECT
SUM(Bytes*SamplingRate*8/range) AS xps, {{ .Units }}/range AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'), [if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY dimensions GROUP BY dimensions
ORDER BY xps DESC`, ORDER BY xps DESC
{{ end }}`,
}, { }, {
Description: "two dimensions, no filters, l2 bps", Description: "two dimensions, no filters, l2 bps",
Input: sankeyHandlerInput{ Input: sankeyHandlerInput{
@@ -53,17 +55,20 @@ ORDER BY xps DESC`,
Units: "l2bps", Units: "l2bps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":20,"units":"l2bps"}@@ }}
WITH WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter}) AS range, (SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {{ .Table }} WHERE {{ .Timefilter }}) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5) rows AS (SELECT SrcAS, ExporterName FROM {{ .Table }} WHERE {{ .Timefilter }} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5)
SELECT SELECT
SUM((Bytes+18*Packets)*SamplingRate*8/range) AS xps, {{ .Units }}/range AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'), [if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY dimensions GROUP BY dimensions
ORDER BY xps DESC`, ORDER BY xps DESC
{{ end }}
`,
}, { }, {
Description: "two dimensions, no filters, pps", Description: "two dimensions, no filters, pps",
Input: sankeyHandlerInput{ Input: sankeyHandlerInput{
@@ -75,17 +80,19 @@ ORDER BY xps DESC`,
Units: "pps", Units: "pps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":20,"units":"pps"}@@ }}
WITH WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter}) AS range, (SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {{ .Table }} WHERE {{ .Timefilter }}) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5) rows AS (SELECT SrcAS, ExporterName FROM {{ .Table }} WHERE {{ .Timefilter }} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5)
SELECT SELECT
SUM(Packets*SamplingRate/range) AS xps, {{ .Units }}/range AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'), [if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
GROUP BY dimensions GROUP BY dimensions
ORDER BY xps DESC`, ORDER BY xps DESC
{{ end }}`,
}, { }, {
Description: "two dimensions, with filter", Description: "two dimensions, with filter",
Input: sankeyHandlerInput{ Input: sankeyHandlerInput{
@@ -97,23 +104,27 @@ ORDER BY xps DESC`,
Units: "l3bps", Units: "l3bps",
}, },
Expected: ` Expected: `
{{ with context @@{"start":"2022-04-10T15:45:10Z","end":"2022-04-11T15:45:10Z","points":20,"units":"l3bps"}@@ }}
WITH WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter} AND (DstCountry = 'FR')) AS range, (SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {{ .Table }} WHERE {{ .Timefilter }} AND (DstCountry = 'FR')) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} AND (DstCountry = 'FR') GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 10) rows AS (SELECT SrcAS, ExporterName FROM {{ .Table }} WHERE {{ .Timefilter }} AND (DstCountry = 'FR') GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 10)
SELECT SELECT
SUM(Bytes*SamplingRate*8/range) AS xps, {{ .Units }}/range AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'), [if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table} FROM {{ .Table }}
WHERE {timefilter} AND (DstCountry = 'FR') WHERE {{ .Timefilter }} AND (DstCountry = 'FR')
GROUP BY dimensions GROUP BY dimensions
ORDER BY xps DESC`, ORDER BY xps DESC
{{ end }}`,
}, },
} }
for _, tc := range cases { for _, tc := range cases {
tc.Expected = strings.ReplaceAll(tc.Expected, "@@", "`")
t.Run(tc.Description, func(t *testing.T) { t.Run(tc.Description, func(t *testing.T) {
got, _ := tc.Input.toSQL() got, _ := tc.Input.toSQL()
if diff := helpers.Diff(strings.Split(got, "\n"), strings.Split(tc.Expected, "\n")); diff != "" { if diff := helpers.Diff(strings.Split(strings.TrimSpace(got), "\n"),
strings.Split(strings.TrimSpace(tc.Expected), "\n")); diff != "" {
t.Errorf("toSQL (-got, +want):\n%s", diff) t.Errorf("toSQL (-got, +want):\n%s", diff)
} }
}) })

View File

@@ -7,6 +7,7 @@ import (
"fmt" "fmt"
"net/http" "net/http"
"reflect" "reflect"
"strings"
"time" "time"
"akvorado/common/helpers" "akvorado/common/helpers"
@@ -151,23 +152,31 @@ func (c *Component) widgetTopHandlerFunc(gc *gin.Context) {
} }
now := c.d.Clock.Now() now := c.d.Clock.Now()
query := c.queryFlowsTable(fmt.Sprintf(` query := c.finalizeQuery(fmt.Sprintf(`
{{ with %s }}
WITH WITH
(SELECT SUM(Bytes*SamplingRate) FROM {table} WHERE {timefilter} %s) AS Total (SELECT SUM(Bytes*SamplingRate) FROM {{ .Table }} WHERE {{ .Timefilter }} %s) AS Total
SELECT SELECT
if(empty(%s),'Unknown',%s) AS Name, if(empty(%s),'Unknown',%s) AS Name,
SUM(Bytes*SamplingRate) / Total * 100 AS Percent SUM(Bytes*SamplingRate) / Total * 100 AS Percent
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
%s %s
GROUP BY %s GROUP BY %s
ORDER BY Percent DESC ORDER BY Percent DESC
LIMIT 5 LIMIT 5
`, filter, selector, selector, filter, groupby), mainTableRequired, now.Add(-5*time.Minute), now, time.Minute) {{ end }}`,
templateContext(inputContext{
Start: now.Add(-5 * time.Minute),
End: now,
MainTableRequired: mainTableRequired,
Points: 5,
}),
filter, selector, selector, filter, groupby))
gc.Header("X-SQL-Query", query) gc.Header("X-SQL-Query", query)
results := []topResult{} results := []topResult{}
err := c.d.ClickHouseDB.Conn.Select(ctx, &results, query) err := c.d.ClickHouseDB.Conn.Select(ctx, &results, strings.TrimSpace(query))
if err != nil { if err != nil {
c.r.Err(err).Msg("unable to query database") c.r.Err(err).Msg("unable to query database")
gc.JSON(http.StatusInternalServerError, gin.H{"message": "Unable to query database."}) gc.JSON(http.StatusInternalServerError, gin.H{"message": "Unable to query database."})
@@ -177,7 +186,7 @@ LIMIT 5
} }
type widgetParameters struct { type widgetParameters struct {
Points uint64 `form:"points" binding:"isdefault|min=5,max=1000"` Points uint `form:"points" binding:"isdefault|min=5,max=1000"`
} }
func (c *Component) widgetGraphHandlerFunc(gc *gin.Context) { func (c *Component) widgetGraphHandlerFunc(gc *gin.Context) {
@@ -191,28 +200,34 @@ func (c *Component) widgetGraphHandlerFunc(gc *gin.Context) {
if params.Points == 0 { if params.Points == 0 {
params.Points = 200 params.Points = 200
} }
interval := int64((24 * time.Hour).Seconds()) / int64(params.Points)
slot := fmt.Sprintf(`{resolution->%d}`, interval)
now := c.d.Clock.Now() now := c.d.Clock.Now()
query := c.queryFlowsTable(fmt.Sprintf(` query := c.finalizeQuery(fmt.Sprintf(`
{{ with %s }}
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL %s second) AS Time, {{ call .ToStartOfInterval "TimeReceived" }} AS Time,
SUM(Bytes*SamplingRate*8/%s)/1000/1000/1000 AS Gbps SUM(Bytes*SamplingRate*8/{{ .Interval }})/1000/1000/1000 AS Gbps
FROM {table} FROM {{ .Table }}
WHERE {timefilter} WHERE {{ .Timefilter }}
AND InIfBoundary = 'external' AND InIfBoundary = 'external'
GROUP BY Time GROUP BY Time
ORDER BY Time WITH FILL ORDER BY Time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL %s second) FROM {{ .TimefilterStart }}
TO {timefilter.Stop} TO {{ .TimefilterEnd }}
STEP %s`, slot, slot, slot, slot), false, now.Add(-24*time.Hour), now, time.Duration(interval)*time.Second) STEP {{ .Interval }}
{{ end }}`,
templateContext(inputContext{
Start: now.Add(-24 * time.Hour),
End: now,
MainTableRequired: false,
Points: params.Points,
})))
gc.Header("X-SQL-Query", query) gc.Header("X-SQL-Query", query)
results := []struct { results := []struct {
Time time.Time `json:"t"` Time time.Time `json:"t"`
Gbps float64 `json:"gbps"` Gbps float64 `json:"gbps"`
}{} }{}
err := c.d.ClickHouseDB.Conn.Select(ctx, &results, query) err := c.d.ClickHouseDB.Conn.Select(ctx, &results, strings.TrimSpace(query))
if err != nil { if err != nil {
c.r.Err(err).Msg("unable to query database") c.r.Err(err).Msg("unable to query database")
gc.JSON(http.StatusInternalServerError, gin.H{"message": "Unable to query database."}) gc.JSON(http.StatusInternalServerError, gin.H{"message": "Unable to query database."})

View File

@@ -6,6 +6,7 @@ package console
import ( import (
"net" "net"
"reflect" "reflect"
"strings"
"testing" "testing"
"time" "time"
@@ -229,18 +230,18 @@ func TestWidgetGraph(t *testing.T) {
{base.Add(5 * time.Minute), 24.7}, {base.Add(5 * time.Minute), 24.7},
} }
mockConn.EXPECT(). mockConn.EXPECT().
Select(gomock.Any(), gomock.Any(), ` Select(gomock.Any(), gomock.Any(), strings.TrimSpace(`
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL 864 second) AS Time, toStartOfInterval(TimeReceived + INTERVAL 144 second, INTERVAL 864 second) - INTERVAL 144 second AS Time,
SUM(Bytes*SamplingRate*8/864)/1000/1000/1000 AS Gbps SUM(Bytes*SamplingRate*8/864)/1000/1000/1000 AS Gbps
FROM flows FROM flows
WHERE TimeReceived BETWEEN toDateTime('2009-11-10 23:00:00', 'UTC') AND toDateTime('2009-11-11 23:00:00', 'UTC') WHERE TimeReceived BETWEEN toDateTime('2009-11-10 23:00:00', 'UTC') AND toDateTime('2009-11-11 23:00:00', 'UTC')
AND InIfBoundary = 'external' AND InIfBoundary = 'external'
GROUP BY Time GROUP BY Time
ORDER BY Time WITH FILL ORDER BY Time WITH FILL
FROM toStartOfInterval(toDateTime('2009-11-10 23:00:00', 'UTC'), INTERVAL 864 second) FROM toDateTime('2009-11-10 23:00:00', 'UTC')
TO toDateTime('2009-11-11 23:00:00', 'UTC') TO toDateTime('2009-11-11 23:00:00', 'UTC')
STEP 864`). STEP 864`)).
SetArg(1, expected). SetArg(1, expected).
Return(nil) Return(nil)