diff --git a/console/frontend/src/components/InputComponent.vue b/console/frontend/src/components/InputComponent.vue
index 01509c85..930b3642 100644
--- a/console/frontend/src/components/InputComponent.vue
+++ b/console/frontend/src/components/InputComponent.vue
@@ -41,6 +41,5 @@ const props = defineProps({
});
import { v4 as uuidv4 } from "uuid";
-
const id = uuidv4();
diff --git a/console/frontend/src/components/InputToggle.vue b/console/frontend/src/components/InputToggle.vue
new file mode 100644
index 00000000..59afc267
--- /dev/null
+++ b/console/frontend/src/components/InputToggle.vue
@@ -0,0 +1,36 @@
+
+
+
+
+
+
+
diff --git a/console/frontend/src/utils/index.js b/console/frontend/src/utils/index.js
index 88940fa1..bb68e813 100644
--- a/console/frontend/src/utils/index.js
+++ b/console/frontend/src/utils/index.js
@@ -1,4 +1,4 @@
-export function formatBps(value) {
+export function formatXps(value) {
const suffixes = ["", "K", "M", "G", "T"];
let idx = 0;
while (value >= 1000 && idx < suffixes.length) {
diff --git a/console/frontend/src/views/HomePage/WidgetGraph.vue b/console/frontend/src/views/HomePage/WidgetGraph.vue
index 3af50af4..c1a78bd2 100644
--- a/console/frontend/src/views/HomePage/WidgetGraph.vue
+++ b/console/frontend/src/views/HomePage/WidgetGraph.vue
@@ -21,12 +21,12 @@ import { CanvasRenderer } from "echarts/renderers";
import { LineChart } from "echarts/charts";
import { TooltipComponent, GridComponent } from "echarts/components";
import VChart from "vue-echarts";
-import { dataColor, formatBps } from "../../utils";
+import { dataColor, formatXps } from "../../utils";
const { isDark } = inject("theme");
use([CanvasRenderer, LineChart, TooltipComponent, GridComponent]);
-const formatGbps = (value) => formatBps(value * 1_000_000_000);
+const formatGbps = (value) => formatXps(value * 1_000_000_000);
const url = computed(() => `/api/v0/console/widget/graph?${props.refresh}`);
const { data } = useFetch(url, { refetch: true }).get().json();
diff --git a/console/frontend/src/views/VisualizePage.vue b/console/frontend/src/views/VisualizePage.vue
index b0edfcac..8fbba4f2 100644
--- a/console/frontend/src/views/VisualizePage.vue
+++ b/console/frontend/src/views/VisualizePage.vue
@@ -76,6 +76,7 @@ const defaultState = () => ({
dimensions: ["SrcAS", "ExporterName"],
limit: 10,
filter: "InIfBoundary = external",
+ units: "bps",
});
const state = ref({});
@@ -149,6 +150,7 @@ const { data, isFetching, aborted, abort, canAbort, error } = useFetch("", {
start: payload.value.start,
end: payload.value.end,
graphType: payload.value.graphType,
+ units: payload.value.units,
};
// Also update URL.
diff --git a/console/frontend/src/views/VisualizePage/DataGraphSankey.vue b/console/frontend/src/views/VisualizePage/DataGraphSankey.vue
index 9cffcff1..a152c3b4 100644
--- a/console/frontend/src/views/VisualizePage/DataGraphSankey.vue
+++ b/console/frontend/src/views/VisualizePage/DataGraphSankey.vue
@@ -11,7 +11,7 @@ const props = defineProps({
});
import { inject, computed } from "vue";
-import { formatBps, dataColor, dataColorGrey } from "@/utils";
+import { formatXps, dataColor, dataColorGrey } from "@/utils";
const { isDark } = inject("theme");
import { use } from "echarts/core";
@@ -25,7 +25,7 @@ use([CanvasRenderer, SankeyChart, TooltipComponent]);
const graph = computed(() => {
const theme = isDark.value ? "dark" : "light";
const data = props.data || {};
- if (!data.bps) return {};
+ if (!data.xps) return {};
let greyNodes = 0;
let colorNodes = 0;
return {
@@ -34,7 +34,7 @@ const graph = computed(() => {
confine: true,
trigger: "item",
triggerOn: "mousemove",
- valueFormatter: formatBps,
+ valueFormatter: formatXps,
},
series: [
{
@@ -52,10 +52,10 @@ const graph = computed(() => {
: dataColor(colorNodes++, false, theme),
},
})),
- links: data.links.map(({ source, target, bps }) => ({
+ links: data.links.map(({ source, target, xps }) => ({
source,
target,
- value: bps,
+ value: xps,
})),
label: {
formatter: "{b}",
diff --git a/console/frontend/src/views/VisualizePage/DataGraphTimeSeries.vue b/console/frontend/src/views/VisualizePage/DataGraphTimeSeries.vue
index fbc5d223..40a1b50a 100644
--- a/console/frontend/src/views/VisualizePage/DataGraphTimeSeries.vue
+++ b/console/frontend/src/views/VisualizePage/DataGraphTimeSeries.vue
@@ -21,7 +21,7 @@ const props = defineProps({
const emit = defineEmits(["updateTimeRange"]);
import { ref, watch, inject, computed, onMounted, nextTick } from "vue";
-import { formatBps, dataColor, dataColorGrey } from "@/utils";
+import { formatXps, dataColor, dataColorGrey } from "@/utils";
import { graphTypes } from "./constants";
const { isDark } = inject("theme");
@@ -66,7 +66,7 @@ const commonGraph = {
type: "cross",
label: { backgroundColor: "#6a7985" },
},
- valueFormatter: formatBps,
+ valueFormatter: formatXps,
},
};
const graph = computed(() => {
@@ -93,9 +93,9 @@ const graph = computed(() => {
yAxis = {
type: "value",
min: 0,
- axisLabel: { formatter: formatBps },
+ axisLabel: { formatter: formatXps },
axisPointer: {
- label: { formatter: ({ value }) => formatBps(value) },
+ label: { formatter: ({ value }) => formatXps(value) },
},
};
diff --git a/console/frontend/src/views/VisualizePage/DataTable.vue b/console/frontend/src/views/VisualizePage/DataTable.vue
index 63bc8991..9bff1451 100644
--- a/console/frontend/src/views/VisualizePage/DataTable.vue
+++ b/console/frontend/src/views/VisualizePage/DataTable.vue
@@ -64,7 +64,7 @@ const props = defineProps({
defineEmits(["highlighted"]);
import { computed, inject } from "vue";
-import { formatBps, dataColor, dataColorGrey } from "@/utils";
+import { formatXps, dataColor, dataColorGrey } from "@/utils";
import { graphTypes } from "./constants";
const { isDark } = inject("theme");
const { stacked, lines, grid, sankey } = graphTypes;
@@ -104,7 +104,7 @@ const table = computed(() => {
data.average[idx],
data["95th"][idx],
].map((d) => ({
- value: formatBps(d) + "bps",
+ value: formatXps(d) + data.units,
classNames: "text-right tabular-nums",
})),
],
@@ -129,7 +129,7 @@ const table = computed(() => {
...rows.map((r) => ({ value: r })),
// Average
{
- value: formatBps(data.bps[idx]) + "bps",
+ value: formatXps(data.xps[idx]) + data.units,
classNames: "text-right tabular-nums",
},
],
diff --git a/console/frontend/src/views/VisualizePage/OptionsPanel.vue b/console/frontend/src/views/VisualizePage/OptionsPanel.vue
index a1f95b01..748ffa7c 100644
--- a/console/frontend/src/views/VisualizePage/OptionsPanel.vue
+++ b/console/frontend/src/views/VisualizePage/OptionsPanel.vue
@@ -52,7 +52,11 @@
class="mb-2 font-mono"
autosize
/>
-
+
+
({
// Common to all graph types
@@ -113,6 +119,7 @@ const options = computed(() => ({
dimensions: dimensions.value.selected,
limit: dimensions.value.limit,
filter: filter.value,
+ units: pps.value ? "pps" : "bps",
// Only for time series
...([stacked, lines].includes(graphType.value.name) && { points: 200 }),
...(graphType.value.name === grid && { points: 50 }),
@@ -135,12 +142,14 @@ watch(
limit,
points /* eslint-disable-line no-unused-vars */,
filter: _filter,
+ units,
} = modelValue;
graphType.value =
graphTypeList.find(({ name }) => name === _graphType) || graphTypeList[0];
timeRange.value = { start, end };
dimensions.value = { selected: [...(_dimensions || [])], limit };
filter.value = _filter;
+ pps.value = units == "pps";
},
{ immediate: true, deep: true }
);
diff --git a/console/frontend/src/views/VisualizePage/RequestSummary.vue b/console/frontend/src/views/VisualizePage/RequestSummary.vue
index 814d59eb..9da230f4 100644
--- a/console/frontend/src/views/VisualizePage/RequestSummary.vue
+++ b/console/frontend/src/views/VisualizePage/RequestSummary.vue
@@ -22,6 +22,12 @@
{{ request.filter }}
+
+
+ {{
+ { bps: "ᵇ⁄ₛ", pps: "ᵖ⁄ₛ" }[request.units] || requests.units
+ }}
+
@@ -41,6 +47,7 @@ import {
AdjustmentsIcon,
ArrowUpIcon,
FilterIcon,
+ HashtagIcon,
} from "@heroicons/vue/solid";
import { Date as SugarDate } from "sugar-date";
diff --git a/console/graph.go b/console/graph.go
index 02529629..9e8cb4ab 100644
--- a/console/graph.go
+++ b/console/graph.go
@@ -20,6 +20,7 @@ type graphQuery struct {
Dimensions []queryColumn `json:"dimensions"` // group by ...
Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions
Filter queryFilter `json:"filter"` // where ...
+ Units string `json:"units" binding:"required,oneof=pps bps"`
}
// graphQueryToSQL converts a graph query to an SQL request
@@ -37,7 +38,11 @@ func (query graphQuery) toSQL() (string, error) {
// Select
fields := []string{
`toStartOfInterval(TimeReceived, INTERVAL slot second) AS time`,
- `SUM(Bytes*SamplingRate*8/slot) AS bps`,
+ }
+ if query.Units == "pps" {
+ fields = append(fields, `SUM(Packets*SamplingRate/slot) AS xps`)
+ } else {
+ fields = append(fields, `SUM(Bytes*SamplingRate*8/slot) AS xps`)
}
selectFields := []string{}
dimensions := []string{}
@@ -83,8 +88,8 @@ ORDER BY time`, strings.Join(with, ",\n "), strings.Join(fields, ",\n "), where)
type graphHandlerOutput struct {
Rows [][]string `json:"rows"`
Time []time.Time `json:"t"`
- Points [][]int `json:"points"` // t → row → bps
- Average []int `json:"average"` // row → bps
+ Points [][]int `json:"points"` // t → row → xps
+ Average []int `json:"average"` // row → xps
Min []int `json:"min"`
Max []int `json:"max"`
NinetyFivePercentile []int `json:"95th"`
@@ -113,7 +118,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
results := []struct {
Time time.Time `ch:"time"`
- Bps float64 `ch:"bps"`
+ Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{}
if err := c.d.ClickHouseDB.Conn.Select(ctx, &results, sqlQuery); err != nil {
@@ -150,9 +155,9 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
row = make([]int, len(output.Time))
rowValues[rowKey] = row
}
- rowValues[rowKey][idx] = int(result.Bps)
+ rowValues[rowKey][idx] = int(result.Xps)
sum, _ := rowSums[rowKey]
- rowSums[rowKey] = sum + uint64(result.Bps)
+ rowSums[rowKey] = sum + uint64(result.Xps)
}
rows := make([]string, len(rowKeys))
i := 0
@@ -205,7 +210,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
output.NinetyFivePercentile[idx] = s[j-1]
} else if index > 1 {
// We use the average of the two values. This
- // is good enough for bps
+ // is good enough for bps/pps
output.NinetyFivePercentile[idx] = (s[j-1] + s[j]) / 2
}
}
diff --git a/console/graph_test.go b/console/graph_test.go
index 416afe46..6b90b9a9 100644
--- a/console/graph_test.go
+++ b/console/graph_test.go
@@ -26,20 +26,42 @@ func TestGraphQuerySQL(t *testing.T) {
Expected string
}{
{
- Description: "no dimensions, no filters",
+ Description: "no dimensions, no filters, bps",
Input: graphQuery{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{},
Filter: queryFilter{},
+ Units: "bps",
},
Expected: `
WITH
intDiv(864, {resolution})*{resolution} AS slot
SELECT
toStartOfInterval(TimeReceived, INTERVAL slot second) AS time,
- SUM(Bytes*SamplingRate*8/slot) AS bps,
+ SUM(Bytes*SamplingRate*8/slot) AS xps,
+ emptyArrayString() AS dimensions
+FROM {table}
+WHERE {timefilter}
+GROUP BY time, dimensions
+ORDER BY time`,
+ }, {
+ Description: "no dimensions, no filters, pps",
+ Input: graphQuery{
+ Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
+ End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
+ Points: 100,
+ Dimensions: []queryColumn{},
+ Filter: queryFilter{},
+ Units: "pps",
+ },
+ Expected: `
+WITH
+ intDiv(864, {resolution})*{resolution} AS slot
+SELECT
+ toStartOfInterval(TimeReceived, INTERVAL slot second) AS time,
+ SUM(Packets*SamplingRate/slot) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter}
@@ -53,13 +75,14 @@ ORDER BY time`,
Points: 100,
Dimensions: []queryColumn{},
Filter: queryFilter{"DstCountry = 'FR' AND SrcCountry = 'US'"},
+ Units: "bps",
},
Expected: `
WITH
intDiv(864, {resolution})*{resolution} AS slot
SELECT
toStartOfInterval(TimeReceived, INTERVAL slot second) AS time,
- SUM(Bytes*SamplingRate*8/slot) AS bps,
+ SUM(Bytes*SamplingRate*8/slot) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter} AND (DstCountry = 'FR' AND SrcCountry = 'US')
@@ -77,6 +100,7 @@ ORDER BY time`,
queryColumnInIfProvider,
},
Filter: queryFilter{},
+ Units: "bps",
},
Expected: `
WITH
@@ -84,7 +108,7 @@ WITH
rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20)
SELECT
toStartOfInterval(TimeReceived, INTERVAL slot second) AS time,
- SUM(Bytes*SamplingRate*8/slot) AS bps,
+ SUM(Bytes*SamplingRate*8/slot) AS xps,
if((ExporterName, InIfProvider) IN rows, [ExporterName, InIfProvider], ['Other', 'Other']) AS dimensions
FROM {table}
WHERE {timefilter}
@@ -119,7 +143,7 @@ func TestGraphHandler(t *testing.T) {
base := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
expectedSQL := []struct {
Time time.Time `ch:"time"`
- Bps float64 `ch:"bps"`
+ Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{
{base, 1000, []string{"router1", "provider1"}},
@@ -207,6 +231,7 @@ func TestGraphHandler(t *testing.T) {
queryColumnInIfProvider,
},
Filter: queryFilter{"DstCountry = 'FR' AND SrcCountry = 'US'"},
+ Units: "bps",
}
payload := new(bytes.Buffer)
err = json.NewEncoder(payload).Encode(input)
diff --git a/console/sankey.go b/console/sankey.go
index f0263f49..eafe566c 100644
--- a/console/sankey.go
+++ b/console/sankey.go
@@ -19,6 +19,7 @@ type sankeyQuery struct {
Dimensions []queryColumn `json:"dimensions" binding:"required,min=2"` // group by ...
Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions
Filter queryFilter `json:"filter"` // where ...
+ Units string `json:"units" binding:"required,oneof=pps bps"`
}
// sankeyQueryToSQL converts a sankey query to an SQL request
@@ -41,10 +42,13 @@ func (query sankeyQuery) toSQL() (string, error) {
column.toSQLSelect()))
dimensions = append(dimensions, column.String())
}
- fields := []string{
- `SUM(Bytes*SamplingRate*8/range) AS bps`,
- fmt.Sprintf("[%s] AS dimensions", strings.Join(arrayFields, ",\n ")),
+ fields := []string{}
+ if query.Units == "pps" {
+ fields = append(fields, `SUM(Packets*SamplingRate/range) AS xps`)
+ } else {
+ fields = append(fields, `SUM(Bytes*SamplingRate*8/range) AS xps`)
}
+ fields = append(fields, fmt.Sprintf("[%s] AS dimensions", strings.Join(arrayFields, ",\n ")))
// With
with := []string{
@@ -65,14 +69,14 @@ SELECT
FROM {table}
WHERE %s
GROUP BY dimensions
-ORDER BY bps DESC`, strings.Join(with, ",\n "), strings.Join(fields, ",\n "), where)
+ORDER BY xps DESC`, strings.Join(with, ",\n "), strings.Join(fields, ",\n "), where)
return sqlQuery, nil
}
type sankeyHandlerOutput struct {
// Unprocessed data for table view
Rows [][]string `json:"rows"`
- Bps []int `json:"bps"` // row → bps
+ Xps []int `json:"xps"` // row → xps
// Processed data for sankey graph
Nodes []string `json:"nodes"`
Links []sankeyLink `json:"links"`
@@ -80,7 +84,7 @@ type sankeyHandlerOutput struct {
type sankeyLink struct {
Source string `json:"source"`
Target string `json:"target"`
- Bps int `json:"bps"`
+ Xps int `json:"xps"`
}
func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
@@ -108,7 +112,7 @@ func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
query.Start, query.End, resolution)
gc.Header("X-SQL-Query", sqlQuery)
results := []struct {
- Bps float64 `ch:"bps"`
+ Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{}
if err := c.d.ClickHouseDB.Conn.Select(ctx, &results, sqlQuery); err != nil {
@@ -120,7 +124,7 @@ func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
// Prepare output
output := sankeyHandlerOutput{
Rows: make([][]string, 0, len(results)),
- Bps: make([]int, 0, len(results)),
+ Xps: make([]int, 0, len(results)),
Nodes: make([]string, 0),
Links: make([]sankeyLink, 0),
}
@@ -137,32 +141,32 @@ func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
output.Nodes = append(output.Nodes, name)
}
}
- addLink := func(source, target string, bps int) {
+ addLink := func(source, target string, xps int) {
for idx, link := range output.Links {
if link.Source == source && link.Target == target {
- output.Links[idx].Bps += bps
+ output.Links[idx].Xps += xps
return
}
}
- output.Links = append(output.Links, sankeyLink{source, target, bps})
+ output.Links = append(output.Links, sankeyLink{source, target, xps})
}
for _, result := range results {
output.Rows = append(output.Rows, result.Dimensions)
- output.Bps = append(output.Bps, int(result.Bps))
+ output.Xps = append(output.Xps, int(result.Xps))
// Consider each pair of successive dimensions
for i := 0; i < len(query.Dimensions)-1; i++ {
dimension1 := completeName(result.Dimensions[i], i)
dimension2 := completeName(result.Dimensions[i+1], i+1)
addNode(dimension1)
addNode(dimension2)
- addLink(dimension1, dimension2, int(result.Bps))
+ addLink(dimension1, dimension2, int(result.Xps))
}
}
sort.Slice(output.Links, func(i, j int) bool {
- if output.Links[i].Bps == output.Links[j].Bps {
+ if output.Links[i].Xps == output.Links[j].Xps {
return output.Links[i].Source < output.Links[j].Source
}
- return output.Links[i].Bps > output.Links[j].Bps
+ return output.Links[i].Xps > output.Links[j].Xps
})
gc.JSON(http.StatusOK, output)
diff --git a/console/sankey_test.go b/console/sankey_test.go
index 18d6b9dd..f8821446 100644
--- a/console/sankey_test.go
+++ b/console/sankey_test.go
@@ -26,26 +26,49 @@ func TestSankeyQuerySQL(t *testing.T) {
Expected string
}{
{
- Description: "two dimensions, no filters",
+ Description: "two dimensions, no filters, bps",
Input: sankeyQuery{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Dimensions: []queryColumn{queryColumnSrcAS, queryColumnExporterName},
Limit: 5,
Filter: queryFilter{},
+ Units: "bps",
},
Expected: `
WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter}) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5)
SELECT
- SUM(Bytes*SamplingRate*8/range) AS bps,
+ SUM(Bytes*SamplingRate*8/range) AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table}
WHERE {timefilter}
GROUP BY dimensions
-ORDER BY bps DESC`,
+ORDER BY xps DESC`,
+ }, {
+ Description: "two dimensions, no filters, pps",
+ Input: sankeyQuery{
+ Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
+ End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
+ Dimensions: []queryColumn{queryColumnSrcAS, queryColumnExporterName},
+ Limit: 5,
+ Filter: queryFilter{},
+ Units: "pps",
+ },
+ Expected: `
+WITH
+ (SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter}) AS range,
+ rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5)
+SELECT
+ SUM(Packets*SamplingRate/range) AS xps,
+ [if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
+ if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
+FROM {table}
+WHERE {timefilter}
+GROUP BY dimensions
+ORDER BY xps DESC`,
}, {
Description: "two dimensions, with filter",
Input: sankeyQuery{
@@ -54,19 +77,20 @@ ORDER BY bps DESC`,
Dimensions: []queryColumn{queryColumnSrcAS, queryColumnExporterName},
Limit: 10,
Filter: queryFilter{"DstCountry = 'FR'"},
+ Units: "bps",
},
Expected: `
WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter} AND (DstCountry = 'FR')) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} AND (DstCountry = 'FR') GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 10)
SELECT
- SUM(Bytes*SamplingRate*8/range) AS bps,
+ SUM(Bytes*SamplingRate*8/range) AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table}
WHERE {timefilter} AND (DstCountry = 'FR')
GROUP BY dimensions
-ORDER BY bps DESC`,
+ORDER BY xps DESC`,
},
}
for _, tc := range cases {
@@ -94,7 +118,7 @@ func TestSankeyHandler(t *testing.T) {
helpers.StartStop(t, c)
expectedSQL := []struct {
- Bps float64 `ch:"bps"`
+ Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{
// [(random.randrange(100, 10000), x)
@@ -148,7 +172,7 @@ func TestSankeyHandler(t *testing.T) {
{"Other", "Other", "Other"},
{"Other", "provider1", "router1"},
},
- "bps": []int{
+ "xps": []int{
9677,
9472,
7593,
@@ -186,30 +210,30 @@ func TestSankeyHandler(t *testing.T) {
"router2",
},
"links": []gin.H{
- {"source": "provider1", "target": "Other ExporterName", "bps": 9472 + 7234 + 6006 + 5988},
- {"source": "Other InIfProvider", "target": "router1", "bps": 9677 + 3623 + 2915 + 1360},
- {"source": "AS100", "target": "Other InIfProvider", "bps": 9677},
- {"source": "AS300", "target": "provider1", "bps": 9472},
- {"source": "provider3", "target": "Other ExporterName", "bps": 4675 + 3999},
- {"source": "AS100", "target": "provider1", "bps": 6006 + 2623},
- {"source": "AS100", "target": "provider3", "bps": 3999 + 3978},
- {"source": "provider3", "target": "router2", "bps": 3978 + 3080 + 717},
- {"source": "AS300", "target": "provider2", "bps": 7593},
- {"source": "provider2", "target": "router1", "bps": 7593},
- {"source": "AS200", "target": "provider1", "bps": 7234},
- {"source": "Other SrcAS", "target": "provider1", "bps": 5988 + 159},
- {"source": "AS200", "target": "Other InIfProvider", "bps": 4348 + 1360},
- {"source": "AS200", "target": "provider3", "bps": 4675 + 717},
- {"source": "Other InIfProvider", "target": "router2", "bps": 4348},
- {"source": "Other SrcAS", "target": "Other InIfProvider", "bps": 3623 + 621},
- {"source": "AS300", "target": "Other InIfProvider", "bps": 2915 + 975},
- {"source": "AS300", "target": "provider3", "bps": 3080},
- {"source": "provider1", "target": "router1", "bps": 2623 + 159},
- {"source": "AS200", "target": "provider2", "bps": 2482},
- {"source": "provider2", "target": "router2", "bps": 2482},
- {"source": "AS100", "target": "provider2", "bps": 2234},
- {"source": "provider2", "target": "Other ExporterName", "bps": 2234},
- {"source": "Other InIfProvider", "target": "Other ExporterName", "bps": 975 + 621},
+ {"source": "provider1", "target": "Other ExporterName", "xps": 9472 + 7234 + 6006 + 5988},
+ {"source": "Other InIfProvider", "target": "router1", "xps": 9677 + 3623 + 2915 + 1360},
+ {"source": "AS100", "target": "Other InIfProvider", "xps": 9677},
+ {"source": "AS300", "target": "provider1", "xps": 9472},
+ {"source": "provider3", "target": "Other ExporterName", "xps": 4675 + 3999},
+ {"source": "AS100", "target": "provider1", "xps": 6006 + 2623},
+ {"source": "AS100", "target": "provider3", "xps": 3999 + 3978},
+ {"source": "provider3", "target": "router2", "xps": 3978 + 3080 + 717},
+ {"source": "AS300", "target": "provider2", "xps": 7593},
+ {"source": "provider2", "target": "router1", "xps": 7593},
+ {"source": "AS200", "target": "provider1", "xps": 7234},
+ {"source": "Other SrcAS", "target": "provider1", "xps": 5988 + 159},
+ {"source": "AS200", "target": "Other InIfProvider", "xps": 4348 + 1360},
+ {"source": "AS200", "target": "provider3", "xps": 4675 + 717},
+ {"source": "Other InIfProvider", "target": "router2", "xps": 4348},
+ {"source": "Other SrcAS", "target": "Other InIfProvider", "xps": 3623 + 621},
+ {"source": "AS300", "target": "Other InIfProvider", "xps": 2915 + 975},
+ {"source": "AS300", "target": "provider3", "xps": 3080},
+ {"source": "provider1", "target": "router1", "xps": 2623 + 159},
+ {"source": "AS200", "target": "provider2", "xps": 2482},
+ {"source": "provider2", "target": "router2", "xps": 2482},
+ {"source": "AS100", "target": "provider2", "xps": 2234},
+ {"source": "provider2", "target": "Other ExporterName", "xps": 2234},
+ {"source": "Other InIfProvider", "target": "Other ExporterName", "xps": 975 + 621},
},
}
mockConn.EXPECT().
@@ -223,6 +247,7 @@ func TestSankeyHandler(t *testing.T) {
Dimensions: []queryColumn{queryColumnSrcAS, queryColumnInIfProvider, queryColumnExporterName},
Limit: 10,
Filter: queryFilter{"DstCountry = 'FR'"},
+ Units: "bps",
}
payload := new(bytes.Buffer)
err = json.NewEncoder(payload).Encode(input)