console: ability to get values in pps instead of bps

This commit is contained in:
Vincent Bernat
2022-05-24 20:34:23 +02:00
parent eb75dc40ac
commit be444652f0
14 changed files with 187 additions and 75 deletions

View File

@@ -41,6 +41,5 @@ const props = defineProps({
});
import { v4 as uuidv4 } from "uuid";
const id = uuidv4();
</script>

View File

@@ -0,0 +1,36 @@
<template>
<div>
<label :for="id" class="relative inline-flex cursor-pointer items-center">
<input
:id="id"
type="checkbox"
:checked="modelValue"
class="peer sr-only"
@change="$emit('update:modelValue', $event.target.checked)"
/>
<div
class="peer h-5 w-9 rounded-full bg-gray-200 after:absolute after:top-[2px] after:left-[2px] after:h-4 after:w-4 after:rounded-full after:border after:border-gray-300 after:bg-white after:transition-all after:content-[''] peer-checked:bg-blue-600 peer-checked:after:translate-x-full peer-checked:after:border-white peer-focus:outline-none peer-focus:ring-4 peer-focus:ring-blue-300 dark:border-gray-600 dark:bg-gray-700 dark:peer-focus:ring-blue-800"
></div>
<span class="ml-3 text-sm font-medium text-gray-900 dark:text-gray-300">
{{ label }}
</span>
</label>
</div>
</template>
<script setup>
defineProps({
label: {
type: String,
required: true,
},
modelValue: {
type: Boolean,
required: true,
},
});
defineEmits(["update:modelValue"]);
import { v4 as uuidv4 } from "uuid";
const id = uuidv4();
</script>

View File

@@ -1,4 +1,4 @@
export function formatBps(value) {
export function formatXps(value) {
const suffixes = ["", "K", "M", "G", "T"];
let idx = 0;
while (value >= 1000 && idx < suffixes.length) {

View File

@@ -21,12 +21,12 @@ import { CanvasRenderer } from "echarts/renderers";
import { LineChart } from "echarts/charts";
import { TooltipComponent, GridComponent } from "echarts/components";
import VChart from "vue-echarts";
import { dataColor, formatBps } from "../../utils";
import { dataColor, formatXps } from "../../utils";
const { isDark } = inject("theme");
use([CanvasRenderer, LineChart, TooltipComponent, GridComponent]);
const formatGbps = (value) => formatBps(value * 1_000_000_000);
const formatGbps = (value) => formatXps(value * 1_000_000_000);
const url = computed(() => `/api/v0/console/widget/graph?${props.refresh}`);
const { data } = useFetch(url, { refetch: true }).get().json();

View File

@@ -76,6 +76,7 @@ const defaultState = () => ({
dimensions: ["SrcAS", "ExporterName"],
limit: 10,
filter: "InIfBoundary = external",
units: "bps",
});
const state = ref({});
@@ -149,6 +150,7 @@ const { data, isFetching, aborted, abort, canAbort, error } = useFetch("", {
start: payload.value.start,
end: payload.value.end,
graphType: payload.value.graphType,
units: payload.value.units,
};
// Also update URL.

View File

@@ -11,7 +11,7 @@ const props = defineProps({
});
import { inject, computed } from "vue";
import { formatBps, dataColor, dataColorGrey } from "@/utils";
import { formatXps, dataColor, dataColorGrey } from "@/utils";
const { isDark } = inject("theme");
import { use } from "echarts/core";
@@ -25,7 +25,7 @@ use([CanvasRenderer, SankeyChart, TooltipComponent]);
const graph = computed(() => {
const theme = isDark.value ? "dark" : "light";
const data = props.data || {};
if (!data.bps) return {};
if (!data.xps) return {};
let greyNodes = 0;
let colorNodes = 0;
return {
@@ -34,7 +34,7 @@ const graph = computed(() => {
confine: true,
trigger: "item",
triggerOn: "mousemove",
valueFormatter: formatBps,
valueFormatter: formatXps,
},
series: [
{
@@ -52,10 +52,10 @@ const graph = computed(() => {
: dataColor(colorNodes++, false, theme),
},
})),
links: data.links.map(({ source, target, bps }) => ({
links: data.links.map(({ source, target, xps }) => ({
source,
target,
value: bps,
value: xps,
})),
label: {
formatter: "{b}",

View File

@@ -21,7 +21,7 @@ const props = defineProps({
const emit = defineEmits(["updateTimeRange"]);
import { ref, watch, inject, computed, onMounted, nextTick } from "vue";
import { formatBps, dataColor, dataColorGrey } from "@/utils";
import { formatXps, dataColor, dataColorGrey } from "@/utils";
import { graphTypes } from "./constants";
const { isDark } = inject("theme");
@@ -66,7 +66,7 @@ const commonGraph = {
type: "cross",
label: { backgroundColor: "#6a7985" },
},
valueFormatter: formatBps,
valueFormatter: formatXps,
},
};
const graph = computed(() => {
@@ -93,9 +93,9 @@ const graph = computed(() => {
yAxis = {
type: "value",
min: 0,
axisLabel: { formatter: formatBps },
axisLabel: { formatter: formatXps },
axisPointer: {
label: { formatter: ({ value }) => formatBps(value) },
label: { formatter: ({ value }) => formatXps(value) },
},
};

View File

@@ -64,7 +64,7 @@ const props = defineProps({
defineEmits(["highlighted"]);
import { computed, inject } from "vue";
import { formatBps, dataColor, dataColorGrey } from "@/utils";
import { formatXps, dataColor, dataColorGrey } from "@/utils";
import { graphTypes } from "./constants";
const { isDark } = inject("theme");
const { stacked, lines, grid, sankey } = graphTypes;
@@ -104,7 +104,7 @@ const table = computed(() => {
data.average[idx],
data["95th"][idx],
].map((d) => ({
value: formatBps(d) + "bps",
value: formatXps(d) + data.units,
classNames: "text-right tabular-nums",
})),
],
@@ -129,7 +129,7 @@ const table = computed(() => {
...rows.map((r) => ({ value: r })),
// Average
{
value: formatBps(data.bps[idx]) + "bps",
value: formatXps(data.xps[idx]) + data.units,
classNames: "text-right tabular-nums",
},
],

View File

@@ -52,7 +52,11 @@
class="mb-2 font-mono"
autosize
/>
<div class="flex flex-col items-end">
<div class="flex flex-row items-start justify-between">
<InputToggle
v-model="pps"
:label="'Unit: ' + (pps ? 'ᵖ⁄ₛ' : 'ᵇ⁄ₛ')"
/>
<InputButton
attr-type="submit"
:disabled="hasErrors && !loading"
@@ -88,6 +92,7 @@ import InputDimensions from "@/components/InputDimensions.vue";
import InputTextarea from "@/components/InputTextarea.vue";
import InputListBox from "@/components/InputListBox.vue";
import InputButton from "@/components/InputButton.vue";
import InputToggle from "@/components/InputToggle.vue";
import SectionLabel from "./SectionLabel.vue";
import GraphIcon from "./GraphIcon.vue";
import { graphTypes } from "./constants";
@@ -104,6 +109,7 @@ const graphType = ref(graphTypeList[0]);
const timeRange = ref({});
const dimensions = ref([]);
const filter = ref("");
const pps = ref(false);
const options = computed(() => ({
// Common to all graph types
@@ -113,6 +119,7 @@ const options = computed(() => ({
dimensions: dimensions.value.selected,
limit: dimensions.value.limit,
filter: filter.value,
units: pps.value ? "pps" : "bps",
// Only for time series
...([stacked, lines].includes(graphType.value.name) && { points: 200 }),
...(graphType.value.name === grid && { points: 50 }),
@@ -135,12 +142,14 @@ watch(
limit,
points /* eslint-disable-line no-unused-vars */,
filter: _filter,
units,
} = modelValue;
graphType.value =
graphTypeList.find(({ name }) => name === _graphType) || graphTypeList[0];
timeRange.value = { start, end };
dimensions.value = { selected: [...(_dimensions || [])], limit };
filter.value = _filter;
pps.value = units == "pps";
},
{ immediate: true, deep: true }
);

View File

@@ -22,6 +22,12 @@
<FilterIcon class="inline h-4 px-1 align-middle" />
<span class="align-middle">{{ request.filter }}</span>
</span>
<span v-if="request.units">
<HashtagIcon class="inline h-4 px-1 align-middle" />
<span class="align-middle">{{
{ bps: "ᵇ⁄ₛ", pps: "ᵖ⁄ₛ" }[request.units] || requests.units
}}</span>
</span>
</div>
<div class="hidden h-8 lg:block"></div>
</template>
@@ -41,6 +47,7 @@ import {
AdjustmentsIcon,
ArrowUpIcon,
FilterIcon,
HashtagIcon,
} from "@heroicons/vue/solid";
import { Date as SugarDate } from "sugar-date";

View File

@@ -20,6 +20,7 @@ type graphQuery struct {
Dimensions []queryColumn `json:"dimensions"` // group by ...
Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions
Filter queryFilter `json:"filter"` // where ...
Units string `json:"units" binding:"required,oneof=pps bps"`
}
// graphQueryToSQL converts a graph query to an SQL request
@@ -37,7 +38,11 @@ func (query graphQuery) toSQL() (string, error) {
// Select
fields := []string{
`toStartOfInterval(TimeReceived, INTERVAL slot second) AS time`,
`SUM(Bytes*SamplingRate*8/slot) AS bps`,
}
if query.Units == "pps" {
fields = append(fields, `SUM(Packets*SamplingRate/slot) AS xps`)
} else {
fields = append(fields, `SUM(Bytes*SamplingRate*8/slot) AS xps`)
}
selectFields := []string{}
dimensions := []string{}
@@ -83,8 +88,8 @@ ORDER BY time`, strings.Join(with, ",\n "), strings.Join(fields, ",\n "), where)
type graphHandlerOutput struct {
Rows [][]string `json:"rows"`
Time []time.Time `json:"t"`
Points [][]int `json:"points"` // t → row → bps
Average []int `json:"average"` // row → bps
Points [][]int `json:"points"` // t → row → xps
Average []int `json:"average"` // row → xps
Min []int `json:"min"`
Max []int `json:"max"`
NinetyFivePercentile []int `json:"95th"`
@@ -113,7 +118,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
results := []struct {
Time time.Time `ch:"time"`
Bps float64 `ch:"bps"`
Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{}
if err := c.d.ClickHouseDB.Conn.Select(ctx, &results, sqlQuery); err != nil {
@@ -150,9 +155,9 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
row = make([]int, len(output.Time))
rowValues[rowKey] = row
}
rowValues[rowKey][idx] = int(result.Bps)
rowValues[rowKey][idx] = int(result.Xps)
sum, _ := rowSums[rowKey]
rowSums[rowKey] = sum + uint64(result.Bps)
rowSums[rowKey] = sum + uint64(result.Xps)
}
rows := make([]string, len(rowKeys))
i := 0
@@ -205,7 +210,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
output.NinetyFivePercentile[idx] = s[j-1]
} else if index > 1 {
// We use the average of the two values. This
// is good enough for bps
// is good enough for bps/pps
output.NinetyFivePercentile[idx] = (s[j-1] + s[j]) / 2
}
}

View File

@@ -26,20 +26,42 @@ func TestGraphQuerySQL(t *testing.T) {
Expected string
}{
{
Description: "no dimensions, no filters",
Description: "no dimensions, no filters, bps",
Input: graphQuery{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{},
Filter: queryFilter{},
Units: "bps",
},
Expected: `
WITH
intDiv(864, {resolution})*{resolution} AS slot
SELECT
toStartOfInterval(TimeReceived, INTERVAL slot second) AS time,
SUM(Bytes*SamplingRate*8/slot) AS bps,
SUM(Bytes*SamplingRate*8/slot) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter}
GROUP BY time, dimensions
ORDER BY time`,
}, {
Description: "no dimensions, no filters, pps",
Input: graphQuery{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{},
Filter: queryFilter{},
Units: "pps",
},
Expected: `
WITH
intDiv(864, {resolution})*{resolution} AS slot
SELECT
toStartOfInterval(TimeReceived, INTERVAL slot second) AS time,
SUM(Packets*SamplingRate/slot) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter}
@@ -53,13 +75,14 @@ ORDER BY time`,
Points: 100,
Dimensions: []queryColumn{},
Filter: queryFilter{"DstCountry = 'FR' AND SrcCountry = 'US'"},
Units: "bps",
},
Expected: `
WITH
intDiv(864, {resolution})*{resolution} AS slot
SELECT
toStartOfInterval(TimeReceived, INTERVAL slot second) AS time,
SUM(Bytes*SamplingRate*8/slot) AS bps,
SUM(Bytes*SamplingRate*8/slot) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter} AND (DstCountry = 'FR' AND SrcCountry = 'US')
@@ -77,6 +100,7 @@ ORDER BY time`,
queryColumnInIfProvider,
},
Filter: queryFilter{},
Units: "bps",
},
Expected: `
WITH
@@ -84,7 +108,7 @@ WITH
rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20)
SELECT
toStartOfInterval(TimeReceived, INTERVAL slot second) AS time,
SUM(Bytes*SamplingRate*8/slot) AS bps,
SUM(Bytes*SamplingRate*8/slot) AS xps,
if((ExporterName, InIfProvider) IN rows, [ExporterName, InIfProvider], ['Other', 'Other']) AS dimensions
FROM {table}
WHERE {timefilter}
@@ -119,7 +143,7 @@ func TestGraphHandler(t *testing.T) {
base := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
expectedSQL := []struct {
Time time.Time `ch:"time"`
Bps float64 `ch:"bps"`
Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{
{base, 1000, []string{"router1", "provider1"}},
@@ -207,6 +231,7 @@ func TestGraphHandler(t *testing.T) {
queryColumnInIfProvider,
},
Filter: queryFilter{"DstCountry = 'FR' AND SrcCountry = 'US'"},
Units: "bps",
}
payload := new(bytes.Buffer)
err = json.NewEncoder(payload).Encode(input)

View File

@@ -19,6 +19,7 @@ type sankeyQuery struct {
Dimensions []queryColumn `json:"dimensions" binding:"required,min=2"` // group by ...
Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions
Filter queryFilter `json:"filter"` // where ...
Units string `json:"units" binding:"required,oneof=pps bps"`
}
// sankeyQueryToSQL converts a sankey query to an SQL request
@@ -41,10 +42,13 @@ func (query sankeyQuery) toSQL() (string, error) {
column.toSQLSelect()))
dimensions = append(dimensions, column.String())
}
fields := []string{
`SUM(Bytes*SamplingRate*8/range) AS bps`,
fmt.Sprintf("[%s] AS dimensions", strings.Join(arrayFields, ",\n ")),
fields := []string{}
if query.Units == "pps" {
fields = append(fields, `SUM(Packets*SamplingRate/range) AS xps`)
} else {
fields = append(fields, `SUM(Bytes*SamplingRate*8/range) AS xps`)
}
fields = append(fields, fmt.Sprintf("[%s] AS dimensions", strings.Join(arrayFields, ",\n ")))
// With
with := []string{
@@ -65,14 +69,14 @@ SELECT
FROM {table}
WHERE %s
GROUP BY dimensions
ORDER BY bps DESC`, strings.Join(with, ",\n "), strings.Join(fields, ",\n "), where)
ORDER BY xps DESC`, strings.Join(with, ",\n "), strings.Join(fields, ",\n "), where)
return sqlQuery, nil
}
type sankeyHandlerOutput struct {
// Unprocessed data for table view
Rows [][]string `json:"rows"`
Bps []int `json:"bps"` // row → bps
Xps []int `json:"xps"` // row → xps
// Processed data for sankey graph
Nodes []string `json:"nodes"`
Links []sankeyLink `json:"links"`
@@ -80,7 +84,7 @@ type sankeyHandlerOutput struct {
type sankeyLink struct {
Source string `json:"source"`
Target string `json:"target"`
Bps int `json:"bps"`
Xps int `json:"xps"`
}
func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
@@ -108,7 +112,7 @@ func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
query.Start, query.End, resolution)
gc.Header("X-SQL-Query", sqlQuery)
results := []struct {
Bps float64 `ch:"bps"`
Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{}
if err := c.d.ClickHouseDB.Conn.Select(ctx, &results, sqlQuery); err != nil {
@@ -120,7 +124,7 @@ func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
// Prepare output
output := sankeyHandlerOutput{
Rows: make([][]string, 0, len(results)),
Bps: make([]int, 0, len(results)),
Xps: make([]int, 0, len(results)),
Nodes: make([]string, 0),
Links: make([]sankeyLink, 0),
}
@@ -137,32 +141,32 @@ func (c *Component) sankeyHandlerFunc(gc *gin.Context) {
output.Nodes = append(output.Nodes, name)
}
}
addLink := func(source, target string, bps int) {
addLink := func(source, target string, xps int) {
for idx, link := range output.Links {
if link.Source == source && link.Target == target {
output.Links[idx].Bps += bps
output.Links[idx].Xps += xps
return
}
}
output.Links = append(output.Links, sankeyLink{source, target, bps})
output.Links = append(output.Links, sankeyLink{source, target, xps})
}
for _, result := range results {
output.Rows = append(output.Rows, result.Dimensions)
output.Bps = append(output.Bps, int(result.Bps))
output.Xps = append(output.Xps, int(result.Xps))
// Consider each pair of successive dimensions
for i := 0; i < len(query.Dimensions)-1; i++ {
dimension1 := completeName(result.Dimensions[i], i)
dimension2 := completeName(result.Dimensions[i+1], i+1)
addNode(dimension1)
addNode(dimension2)
addLink(dimension1, dimension2, int(result.Bps))
addLink(dimension1, dimension2, int(result.Xps))
}
}
sort.Slice(output.Links, func(i, j int) bool {
if output.Links[i].Bps == output.Links[j].Bps {
if output.Links[i].Xps == output.Links[j].Xps {
return output.Links[i].Source < output.Links[j].Source
}
return output.Links[i].Bps > output.Links[j].Bps
return output.Links[i].Xps > output.Links[j].Xps
})
gc.JSON(http.StatusOK, output)

View File

@@ -26,26 +26,49 @@ func TestSankeyQuerySQL(t *testing.T) {
Expected string
}{
{
Description: "two dimensions, no filters",
Description: "two dimensions, no filters, bps",
Input: sankeyQuery{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Dimensions: []queryColumn{queryColumnSrcAS, queryColumnExporterName},
Limit: 5,
Filter: queryFilter{},
Units: "bps",
},
Expected: `
WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter}) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5)
SELECT
SUM(Bytes*SamplingRate*8/range) AS bps,
SUM(Bytes*SamplingRate*8/range) AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table}
WHERE {timefilter}
GROUP BY dimensions
ORDER BY bps DESC`,
ORDER BY xps DESC`,
}, {
Description: "two dimensions, no filters, pps",
Input: sankeyQuery{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Dimensions: []queryColumn{queryColumnSrcAS, queryColumnExporterName},
Limit: 5,
Filter: queryFilter{},
Units: "pps",
},
Expected: `
WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter}) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 5)
SELECT
SUM(Packets*SamplingRate/range) AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table}
WHERE {timefilter}
GROUP BY dimensions
ORDER BY xps DESC`,
}, {
Description: "two dimensions, with filter",
Input: sankeyQuery{
@@ -54,19 +77,20 @@ ORDER BY bps DESC`,
Dimensions: []queryColumn{queryColumnSrcAS, queryColumnExporterName},
Limit: 10,
Filter: queryFilter{"DstCountry = 'FR'"},
Units: "bps",
},
Expected: `
WITH
(SELECT MAX(TimeReceived) - MIN(TimeReceived) FROM {table} WHERE {timefilter} AND (DstCountry = 'FR')) AS range,
rows AS (SELECT SrcAS, ExporterName FROM {table} WHERE {timefilter} AND (DstCountry = 'FR') GROUP BY SrcAS, ExporterName ORDER BY SUM(Bytes) DESC LIMIT 10)
SELECT
SUM(Bytes*SamplingRate*8/range) AS bps,
SUM(Bytes*SamplingRate*8/range) AS xps,
[if(SrcAS IN (SELECT SrcAS FROM rows), concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???')), 'Other'),
if(ExporterName IN (SELECT ExporterName FROM rows), ExporterName, 'Other')] AS dimensions
FROM {table}
WHERE {timefilter} AND (DstCountry = 'FR')
GROUP BY dimensions
ORDER BY bps DESC`,
ORDER BY xps DESC`,
},
}
for _, tc := range cases {
@@ -94,7 +118,7 @@ func TestSankeyHandler(t *testing.T) {
helpers.StartStop(t, c)
expectedSQL := []struct {
Bps float64 `ch:"bps"`
Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{
// [(random.randrange(100, 10000), x)
@@ -148,7 +172,7 @@ func TestSankeyHandler(t *testing.T) {
{"Other", "Other", "Other"},
{"Other", "provider1", "router1"},
},
"bps": []int{
"xps": []int{
9677,
9472,
7593,
@@ -186,30 +210,30 @@ func TestSankeyHandler(t *testing.T) {
"router2",
},
"links": []gin.H{
{"source": "provider1", "target": "Other ExporterName", "bps": 9472 + 7234 + 6006 + 5988},
{"source": "Other InIfProvider", "target": "router1", "bps": 9677 + 3623 + 2915 + 1360},
{"source": "AS100", "target": "Other InIfProvider", "bps": 9677},
{"source": "AS300", "target": "provider1", "bps": 9472},
{"source": "provider3", "target": "Other ExporterName", "bps": 4675 + 3999},
{"source": "AS100", "target": "provider1", "bps": 6006 + 2623},
{"source": "AS100", "target": "provider3", "bps": 3999 + 3978},
{"source": "provider3", "target": "router2", "bps": 3978 + 3080 + 717},
{"source": "AS300", "target": "provider2", "bps": 7593},
{"source": "provider2", "target": "router1", "bps": 7593},
{"source": "AS200", "target": "provider1", "bps": 7234},
{"source": "Other SrcAS", "target": "provider1", "bps": 5988 + 159},
{"source": "AS200", "target": "Other InIfProvider", "bps": 4348 + 1360},
{"source": "AS200", "target": "provider3", "bps": 4675 + 717},
{"source": "Other InIfProvider", "target": "router2", "bps": 4348},
{"source": "Other SrcAS", "target": "Other InIfProvider", "bps": 3623 + 621},
{"source": "AS300", "target": "Other InIfProvider", "bps": 2915 + 975},
{"source": "AS300", "target": "provider3", "bps": 3080},
{"source": "provider1", "target": "router1", "bps": 2623 + 159},
{"source": "AS200", "target": "provider2", "bps": 2482},
{"source": "provider2", "target": "router2", "bps": 2482},
{"source": "AS100", "target": "provider2", "bps": 2234},
{"source": "provider2", "target": "Other ExporterName", "bps": 2234},
{"source": "Other InIfProvider", "target": "Other ExporterName", "bps": 975 + 621},
{"source": "provider1", "target": "Other ExporterName", "xps": 9472 + 7234 + 6006 + 5988},
{"source": "Other InIfProvider", "target": "router1", "xps": 9677 + 3623 + 2915 + 1360},
{"source": "AS100", "target": "Other InIfProvider", "xps": 9677},
{"source": "AS300", "target": "provider1", "xps": 9472},
{"source": "provider3", "target": "Other ExporterName", "xps": 4675 + 3999},
{"source": "AS100", "target": "provider1", "xps": 6006 + 2623},
{"source": "AS100", "target": "provider3", "xps": 3999 + 3978},
{"source": "provider3", "target": "router2", "xps": 3978 + 3080 + 717},
{"source": "AS300", "target": "provider2", "xps": 7593},
{"source": "provider2", "target": "router1", "xps": 7593},
{"source": "AS200", "target": "provider1", "xps": 7234},
{"source": "Other SrcAS", "target": "provider1", "xps": 5988 + 159},
{"source": "AS200", "target": "Other InIfProvider", "xps": 4348 + 1360},
{"source": "AS200", "target": "provider3", "xps": 4675 + 717},
{"source": "Other InIfProvider", "target": "router2", "xps": 4348},
{"source": "Other SrcAS", "target": "Other InIfProvider", "xps": 3623 + 621},
{"source": "AS300", "target": "Other InIfProvider", "xps": 2915 + 975},
{"source": "AS300", "target": "provider3", "xps": 3080},
{"source": "provider1", "target": "router1", "xps": 2623 + 159},
{"source": "AS200", "target": "provider2", "xps": 2482},
{"source": "provider2", "target": "router2", "xps": 2482},
{"source": "AS100", "target": "provider2", "xps": 2234},
{"source": "provider2", "target": "Other ExporterName", "xps": 2234},
{"source": "Other InIfProvider", "target": "Other ExporterName", "xps": 975 + 621},
},
}
mockConn.EXPECT().
@@ -223,6 +247,7 @@ func TestSankeyHandler(t *testing.T) {
Dimensions: []queryColumn{queryColumnSrcAS, queryColumnInIfProvider, queryColumnExporterName},
Limit: 10,
Filter: queryFilter{"DstCountry = 'FR'"},
Units: "bps",
}
payload := new(bytes.Buffer)
err = json.NewEncoder(payload).Encode(input)