console: add a bidirectional mode for graphs

It allows one to also display flows in the opposite direction.
This commit is contained in:
Vincent Bernat
2022-08-07 23:15:18 +02:00
parent e89a188d6d
commit 50614cef5b
15 changed files with 978 additions and 425 deletions

View File

@@ -88,7 +88,7 @@ console/filter/parser.go: console/filter/parser.peg | $(PIGEON) ; $(info $(M) ge
console/frontend/node_modules: console/frontend/package.json console/frontend/package-lock.json
console/frontend/node_modules: ; $(info $(M) fetching node modules)
$Q (cd console/frontend ; npm ci --silent --no-audit --no-fund) && touch $@
console/frontend/data/fields.json: console/query.go ; $(info $(M) generate list of selectable fields)
console/frontend/data/fields.json: console/query_consts.go ; $(info $(M) generate list of selectable fields)
$Q sed -En -e 's/^\tqueryColumn([a-zA-Z]+)( .*|$$)/ "\1"/p' $< \
| sed -E -e '$$ ! s/$$/,/' -e '1s/^ */[/' -e '$$s/$$/]/' > $@
$Q test -s $@

View File

@@ -208,28 +208,28 @@ demo-exporter:
out-if-index: [20, 21]
peak-hour: 16h
multiplier: 3
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.2
<<: [*from-v4-facebook, *to-v4-customers, *http-src]
- per-second: 0.2
in-if-index: [10, 11]
out-if-index: [20, 21]
peak-hour: 16h
multiplier: 3
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.2
<<: [*from-v4-facebook, *to-v4-customers, *quic-src]
- per-second: 1.8
in-if-index: [10, 11]
out-if-index: [20, 21]
peak-hour: 18h
multiplier: 3
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.2
<<: [*from-v6-facebook, *to-v6-customers, *http-src]
- per-second: 0.2
in-if-index: [10, 11]
out-if-index: [20, 21]
peak-hour: 20h
multiplier: 3
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.2
<<: [*from-v6-facebook, *to-v6-customers, *quic-src]
# Netflix
- per-second: 0.2
@@ -252,7 +252,7 @@ demo-exporter:
out-if-index: [20, 21]
peak-hour: 21h
multiplier: 17
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.4
<<: [*from-v4-twitch, *to-v4-customers, *http-src]
# Akamai
- per-second: 0.14
@@ -290,14 +290,14 @@ demo-exporter:
out-if-index: [20, 21]
peak-hour: 18h
multiplier: 1.3
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.15
<<: [*from-v4-amazon, *to-v4-customers, *http-src]
- per-second: 0.1
in-if-index: 10
out-if-index: [20, 21]
peak-hour: 18h
multiplier: 1.3
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.15
<<: [*from-v6-amazon, *to-v6-customers, *http-src]
# Random SSH
@@ -306,7 +306,7 @@ demo-exporter:
out-if-index: [20, 21]
peak-hour: 15h
multiplier: 1.2
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.5
<<: [*from-v4-renater, *to-v4-customers, *ssh-src]
# Servers
- per-second: 0.1
@@ -314,14 +314,14 @@ demo-exporter:
out-if-index: [20, 21]
peak-hour: 15h
multiplier: 1.2
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.2
<<: [*from-v4-renater, *to-v4-servers, *ssh-dst]
- per-second: 0.2
in-if-index: 10
out-if-index: [20, 21]
peak-hour: 15h
multiplier: 1.2
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.15
<<: [*from-v4-random, *to-v4-servers, *http-dst]
- per-second: 0.2
in-if-index: 10
@@ -340,7 +340,7 @@ demo-exporter:
multiplier: 1
protocol: [tcp, udp]
srcport: [80, 443, 22, 25461, 8080, 4500, 993, 8801]
reverse-direction-ratio: 0.1
reverse-direction-ratio: 0.25
<<: [*from-v4-random, *to-v4-customers]
- <<: [*from-v6-random, *to-v6-customers, *random-flow]
- snmp:

View File

@@ -13,6 +13,7 @@ identified with a specific icon:
## Unreleased
-*console*: add a bidirectional mode for graphs to also display flows in the opposite direction
- 🌱 *demo-exporter*: add a setting to automatically generate a reverse flow
- 🌱 *docker-compose*: loosen required privileges for `conntrack-fixer`

View File

@@ -17,23 +17,28 @@ type Meta struct {
MainTableRequired bool
}
func (c *current) reverseDirection(direct string) string {
// ReverseColumnDirection reverts the direction of a provided column name.
func ReverseColumnDirection(name string) string {
if strings.HasPrefix(name, "Src") {
return "Dst" + name[3:]
}
if strings.HasPrefix(name, "Dst") {
return "Src" + name[3:]
}
if strings.HasPrefix(name, "In") {
return "Out" + name[2:]
}
if strings.HasPrefix(name, "Out") {
return "In" + name[3:]
}
return name
}
func (c *current) reverseColumnDirection(name string) string {
if c.globalStore["meta"].(*Meta).ReverseDirection {
if strings.HasPrefix(direct, "Src") {
return "Dst" + direct[3:]
return ReverseColumnDirection(name)
}
if strings.HasPrefix(direct, "Dst") {
return "Src" + direct[3:]
}
if strings.HasPrefix(direct, "In") {
return "Out" + direct[2:]
}
if strings.HasPrefix(direct, "Out") {
return "In" + direct[3:]
}
panic("no reverse?")
}
return direct
return name
}
func lastIP(subnet *net.IPNet) net.IP {

View File

@@ -49,9 +49,9 @@ ConditionExpr "conditional" ←
ColumnIP ←
"ExporterAddress"i { return "ExporterAddress", nil }
/ "SrcAddr"i #{ c.state["main-table-only"] = true ; return nil }
{ return c.reverseDirection("SrcAddr"), nil }
{ return c.reverseColumnDirection("SrcAddr"), nil }
/ "DstAddr"i #{ c.state["main-table-only"] = true ; return nil }
{ return c.reverseDirection("DstAddr"), nil }
{ return c.reverseColumnDirection("DstAddr"), nil }
ConditionIPExpr "condition on IP" ←
column:ColumnIP _
operator:("=" / "!=") _ ip:IP {
@@ -73,26 +73,26 @@ ConditionStringExpr "condition on string" ←
/ "ExporterSite"i { return "ExporterSite", nil }
/ "ExporterRegion"i { return "ExporterRegion", nil }
/ "ExporterTenant"i { return "ExporterTenant", nil }
/ "SrcCountry"i { return c.reverseDirection("SrcCountry"), nil }
/ "DstCountry"i { return c.reverseDirection("DstCountry"), nil }
/ "SrcNetName"i { return c.reverseDirection("SrcNetName"), nil }
/ "DstNetName"i { return c.reverseDirection("DstNetName"), nil }
/ "SrcNetRole"i { return c.reverseDirection("SrcNetRole"), nil }
/ "DstNetRole"i { return c.reverseDirection("DstNetRole"), nil }
/ "SrcNetSite"i { return c.reverseDirection("SrcNetSite"), nil }
/ "DstNetSite"i { return c.reverseDirection("DstNetSite"), nil }
/ "SrcNetRegion"i { return c.reverseDirection("SrcNetRegion"), nil }
/ "DstNetRegion"i { return c.reverseDirection("DstNetRegion"), nil }
/ "SrcNetTenant"i { return c.reverseDirection("SrcNetTenant"), nil }
/ "DstNetTenant"i { return c.reverseDirection("DstNetTenant"), nil }
/ "InIfName"i { return c.reverseDirection("InIfName"), nil }
/ "OutIfName"i { return c.reverseDirection("OutIfName"), nil }
/ "InIfDescription"i { return c.reverseDirection("InIfDescription"), nil }
/ "OutIfDescription"i { return c.reverseDirection("OutIfDescription"), nil }
/ "InIfConnectivity"i { return c.reverseDirection("InIfConnectivity"), nil }
/ "OutIfConnectivity"i { return c.reverseDirection("OutIfConnectivity"), nil }
/ "InIfProvider"i { return c.reverseDirection("InIfProvider"), nil }
/ "OutIfProvider"i { return c.reverseDirection("OutIfProvider"), nil }) _
/ "SrcCountry"i { return c.reverseColumnDirection("SrcCountry"), nil }
/ "DstCountry"i { return c.reverseColumnDirection("DstCountry"), nil }
/ "SrcNetName"i { return c.reverseColumnDirection("SrcNetName"), nil }
/ "DstNetName"i { return c.reverseColumnDirection("DstNetName"), nil }
/ "SrcNetRole"i { return c.reverseColumnDirection("SrcNetRole"), nil }
/ "DstNetRole"i { return c.reverseColumnDirection("DstNetRole"), nil }
/ "SrcNetSite"i { return c.reverseColumnDirection("SrcNetSite"), nil }
/ "DstNetSite"i { return c.reverseColumnDirection("DstNetSite"), nil }
/ "SrcNetRegion"i { return c.reverseColumnDirection("SrcNetRegion"), nil }
/ "DstNetRegion"i { return c.reverseColumnDirection("DstNetRegion"), nil }
/ "SrcNetTenant"i { return c.reverseColumnDirection("SrcNetTenant"), nil }
/ "DstNetTenant"i { return c.reverseColumnDirection("DstNetTenant"), nil }
/ "InIfName"i { return c.reverseColumnDirection("InIfName"), nil }
/ "OutIfName"i { return c.reverseColumnDirection("OutIfName"), nil }
/ "InIfDescription"i { return c.reverseColumnDirection("InIfDescription"), nil }
/ "OutIfDescription"i { return c.reverseColumnDirection("OutIfDescription"), nil }
/ "InIfConnectivity"i { return c.reverseColumnDirection("InIfConnectivity"), nil }
/ "OutIfConnectivity"i { return c.reverseColumnDirection("OutIfConnectivity"), nil }
/ "InIfProvider"i { return c.reverseColumnDirection("InIfProvider"), nil }
/ "OutIfProvider"i { return c.reverseColumnDirection("OutIfProvider"), nil }) _
rcond:RConditionStringExpr {
return fmt.Sprintf("%s %s", toString(column), toString(rcond)), nil
}
@@ -105,16 +105,16 @@ RConditionStringExpr "condition on string" ←
}
ConditionBoundaryExpr "condition on boundary" ←
column:("InIfBoundary"i { return c.reverseDirection("InIfBoundary"), nil }
/ "OutIfBoundary"i { return c.reverseDirection("OutIfBoundary"), nil }) _
column:("InIfBoundary"i { return c.reverseColumnDirection("InIfBoundary"), nil }
/ "OutIfBoundary"i { return c.reverseColumnDirection("OutIfBoundary"), nil }) _
operator:("=" / "!=") _
boundary:("external"i / "internal"i / "undefined"i) {
return fmt.Sprintf("%s %s %s", toString(column), toString(operator),
quote(strings.ToLower(toString(boundary)))), nil
}
ConditionSpeedExpr "condition on speed" ←
column:("InIfSpeed"i { return c.reverseDirection("InIfSpeed"), nil }
/ "OutIfSpeed"i { return c.reverseDirection("OutIfSpeed"), nil }) _
column:("InIfSpeed"i { return c.reverseColumnDirection("InIfSpeed"), nil }
/ "OutIfSpeed"i { return c.reverseColumnDirection("OutIfSpeed"), nil }) _
operator:("=" / ">=" / "<=" / "<" / ">" / "!=") _
value:Unsigned64 {
return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil
@@ -126,15 +126,15 @@ ConditionForwardingStatusExpr "condition on forwarding status" ←
return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil
}
ConditionPortExpr "condition on port" ←
column:("SrcPort"i #{ c.state["main-table-only"] = true ; return nil } { return c.reverseDirection("SrcPort"), nil }
/ "DstPort"i #{ c.state["main-table-only"] = true ; return nil } { return c.reverseDirection("DstPort"), nil }) _
column:("SrcPort"i #{ c.state["main-table-only"] = true ; return nil } { return c.reverseColumnDirection("SrcPort"), nil }
/ "DstPort"i #{ c.state["main-table-only"] = true ; return nil } { return c.reverseColumnDirection("DstPort"), nil }) _
operator:("=" / ">=" / "<=" / "<" / ">" / "!=") _ value:Unsigned16 {
return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil
}
ConditionASExpr "condition on AS number" ←
column:("SrcAS"i { return c.reverseDirection("SrcAS"), nil }
/ "DstAS"i { return c.reverseDirection("DstAS"), nil }) _
column:("SrcAS"i { return c.reverseColumnDirection("SrcAS"), nil }
/ "DstAS"i { return c.reverseColumnDirection("DstAS"), nil }) _
rcond:RConditionASExpr {
return fmt.Sprintf("%s %s", toString(column), toString(rcond)), nil
}

View File

@@ -0,0 +1,33 @@
<template>
<div>
<label :for="id" class="flex items-center">
<input
:id="id"
type="checkbox"
:checked="modelValue"
class="h-4 w-4 rounded border-gray-300 bg-gray-100 text-blue-600 focus:ring-2 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700 dark:ring-offset-gray-800 dark:focus:ring-blue-600"
@change="$emit('update:modelValue', $event.target.checked)"
/>
<span class="ml-1 text-sm font-medium text-gray-900 dark:text-gray-300">
{{ label }}
</span>
</label>
</div>
</template>
<script setup>
defineProps({
label: {
type: String,
required: true,
},
modelValue: {
type: Boolean,
required: true,
},
});
defineEmits(["update:modelValue"]);
import { v4 as uuidv4 } from "uuid";
const id = uuidv4();
</script>

View File

@@ -2,6 +2,7 @@
// SPDX-License-Identifier: AGPL-3.0-only
export function formatXps(value) {
value = Math.abs(value);
const suffixes = ["", "K", "M", "G", "T"];
let idx = 0;
while (value >= 1000 && idx < suffixes.length) {

View File

@@ -151,6 +151,7 @@ const { data, isFetching, aborted, abort, canAbort, error } = useFetch("", {
end: payload.value.end,
graphType: payload.value.graphType,
units: payload.value.units,
bidirectional: payload.value.bidirectional,
};
// Also update URL.

View File

@@ -28,6 +28,7 @@ import { formatXps, dataColor, dataColorGrey } from "@/utils";
import { graphTypes } from "./constants";
const { isDark } = inject("theme");
import { uniqWith, isEqual, findIndex } from "lodash-es";
import { use, graphic } from "echarts/core";
import { CanvasRenderer } from "echarts/renderers";
import { LineChart } from "echarts/charts";
@@ -69,22 +70,62 @@ const commonGraph = {
type: "cross",
label: { backgroundColor: "#6a7985" },
},
valueFormatter: formatXps,
formatter: (params) => {
// We will use a custom formatter, notably to handle bidirectional tooltips.
if (params.length === 0) return;
let table = [],
bidirectional = false;
params.forEach((param) => {
let idx = findIndex(table, (r) => r.seriesName === param.seriesName);
if (idx === -1) {
table.push({
marker: param.marker,
seriesName: param.seriesName,
});
idx = table.length - 1;
}
const val = param.value[param.seriesIndex + 1];
if (table[idx].col1 !== undefined || val < 0) {
table[idx].col2 = val;
bidirectional = true;
} else table[idx].col1 = val;
});
const rows = table
.map(
(row) => `<tr>
<td>${row.marker} ${row.seriesName}</td>
<td class="pl-2">${bidirectional ? "↑" : ""}<b>${formatXps(
row.col1 || 0
)}</b></td>
<td class="pl-2">${bidirectional ? "↓" : ""}<b>${
bidirectional ? formatXps(row.col2 || 0) : ""
}</b></td>
</tr>`
)
.join("");
return `${params[0].axisValueLabel}<table>${rows}</table>`;
},
},
};
const graph = computed(() => {
const theme = isDark.value ? "dark" : "light";
const data = props.data || {};
if (!data.t) return {};
const dataset = {
const rowName = (row) => row.join(" — ") || "Total",
dataset = {
sourceHeader: false,
dimensions: [
"time",
...data.rows.map((rows) => rows.join(" — ") || "Total"),
],
dimensions: ["time", ...data.rows.map(rowName)],
source: [
...data.t
.map((t, timeIdx) => [t, ...data.points.map((rows) => rows[timeIdx])])
.map((t, timeIdx) => [
t,
...data.points.map(
// Unfortunately, eCharts does not seem to make it easy
// to inverse an axis and put the result below. Therefore,
// we use negative values for the second axis.
(row, rowIdx) => row[timeIdx] * (data.axis[rowIdx] == 1 ? 1 : -1)
),
])
.slice(1, -1),
],
},
@@ -95,7 +136,7 @@ const graph = computed(() => {
},
yAxis = {
type: "value",
min: 0,
min: data.bidirectional ? undefined : 0,
axisLabel: { formatter: formatXps },
axisPointer: {
label: { formatter: ({ value }) => formatXps(value) },
@@ -104,6 +145,9 @@ const graph = computed(() => {
// Lines and stacked areas
if ([graphTypes.stacked, graphTypes.lines].includes(data.graphType)) {
const uniqRows = uniqWith(data.rows, isEqual),
uniqRowIndex = (row) => findIndex(uniqRows, (orow) => isEqual(row, orow));
return {
grid: {
left: 60,
@@ -115,8 +159,8 @@ const graph = computed(() => {
yAxis,
dataset,
series: data.rows
.map((rows, idx) => {
const isOther = rows.some((name) => name === "Other"),
.map((row, idx) => {
const isOther = row.some((name) => name === "Other"),
color = isOther ? dataColorGrey : dataColor;
if (data.graphType === graphTypes.lines && isOther) {
return undefined;
@@ -125,10 +169,10 @@ const graph = computed(() => {
type: "line",
symbol: "none",
itemStyle: {
color: color(idx, false, theme),
color: color(uniqRowIndex(row), false, theme),
},
lineStyle: {
color: color(idx, false, theme),
color: color(uniqRowIndex(row), false, theme),
width: 2,
},
emphasis: {
@@ -144,22 +188,23 @@ const graph = computed(() => {
if (data.graphType === graphTypes.stacked) {
serie = {
...serie,
stack: "all",
stack: data.axis[idx],
lineStyle:
idx == data.rows.length - 1
idx == data.rows.length - 1 ||
data.axis[idx] != data.axis[idx + 1]
? {
color: isDark.value ? "#ddd" : "#111",
width: 2,
width: 1.5,
}
: {
color: color(idx, false, theme),
color: color(uniqRowIndex(row), false, theme),
width: 1,
},
areaStyle: {
opacity: 0.95,
color: new graphic.LinearGradient(0, 0, 0, 1, [
{ offset: 0, color: color(idx, false, theme) },
{ offset: 1, color: color(idx, true, theme) },
{ offset: 0, color: color(uniqRowIndex(row), false, theme) },
{ offset: 1, color: color(uniqRowIndex(row), true, theme) },
]),
},
};
@@ -170,34 +215,38 @@ const graph = computed(() => {
};
}
if (data.graphType === graphTypes.grid) {
const dataRows = data.rows.filter((rows) =>
rows.some((name) => name !== "Other")
const uniqRows = uniqWith(data.rows, isEqual).filter((row) =>
row.some((name) => name !== "Other")
),
otherIndex = dataset.dimensions.indexOf("Other");
const maxY = Math.max(
...dataset.source.map((rows) =>
otherIndex === -1
? Math.max(...rows.slice(1))
: Math.max(
// Skip "Other" column
...rows.slice(1, otherIndex),
...rows.slice(otherIndex + 1)
uniqRowIndex = (row) => findIndex(uniqRows, (orow) => isEqual(row, orow)),
otherIndexes = data.rows
.map((row, idx) => (row.some((name) => name === "Other") ? idx : -1))
.filter((idx) => idx >= 0),
somethingY = (fn) =>
fn(
...dataset.source.map((row) =>
fn(
...row
.slice(1)
.filter((_, idx) => !otherIndexes.includes(idx + 1))
)
)
);
let rowNumber = Math.ceil(Math.sqrt(dataRows.length)),
),
maxY = somethingY(Math.max),
minY = somethingY(Math.min);
let rowNumber = Math.ceil(Math.sqrt(uniqRows.length)),
colNumber = rowNumber;
if ((rowNumber - 1) * colNumber >= dataRows.length) {
if ((rowNumber - 1) * colNumber >= uniqRows.length) {
rowNumber--;
}
const positions = dataRows.map((_, idx) => ({
const positions = uniqRows.map((_, idx) => ({
left: ((idx % colNumber) / colNumber) * 100,
top: (Math.floor(idx / colNumber) / rowNumber) * 100,
width: (1 / colNumber) * 100,
height: (1 / rowNumber) * 100,
}));
return {
title: dataRows.map((rows, idx) => ({
title: uniqRows.map((_, idx) => ({
textAlign: "left",
textStyle: {
fontSize: 12,
@@ -209,7 +258,7 @@ const graph = computed(() => {
bottom: 100 - positions[idx].top - positions[idx].height - 0.5 + "%",
left: positions[idx].left + 0.25 + "%",
})),
grid: dataRows.map((_, idx) => ({
grid: uniqRows.map((_, idx) => ({
show: true,
borderWidth: 0,
left: positions[idx].left + 0.25 + "%",
@@ -217,32 +266,40 @@ const graph = computed(() => {
width: positions[idx].width - 0.5 + "%",
height: positions[idx].height - 0.5 + "%",
})),
xAxis: dataRows.map((_, idx) => ({
xAxis: uniqRows.map((_, idx) => ({
...xAxis,
gridIndex: idx,
show: false,
})),
yAxis: dataRows.map((_, idx) => ({
yAxis: uniqRows.map((_, idx) => ({
...yAxis,
max: maxY,
min: data.bidirectional ? minY : 0,
gridIndex: idx,
show: false,
})),
dataset,
series: dataRows.map((rows, idx) => {
series: data.rows
.map((row, idx) => {
let serie = {
type: "line",
symbol: "none",
xAxisIndex: idx,
yAxisIndex: idx,
xAxisIndex: uniqRowIndex(row),
yAxisIndex: uniqRowIndex(row),
itemStyle: {
color: dataColor(idx, false, theme),
color: dataColor(uniqRowIndex(row), false, theme),
},
areaStyle: {
opacity: 0.95,
color: new graphic.LinearGradient(0, 0, 0, 1, [
{ offset: 0, color: dataColor(idx, false, theme) },
{ offset: 1, color: dataColor(idx, true, theme) },
{
offset: 0,
color: dataColor(uniqRowIndex(row), false, theme),
},
{
offset: 1,
color: dataColor(uniqRowIndex(row), true, theme),
},
]),
},
emphasis: {
@@ -256,7 +313,8 @@ const graph = computed(() => {
},
};
return serie;
}),
})
.filter((s) => s.xAxisIndex >= 0),
};
}
return {};

View File

@@ -2,6 +2,29 @@
<!-- SPDX-License-Identifier: AGPL-3.0-only -->
<template>
<div>
<!-- Axis selection -->
<div
v-if="axes.length > 1"
class="border-b border-gray-200 text-center text-sm font-medium text-gray-500 dark:border-gray-700 dark:text-gray-400"
>
<ul class="-mb-px flex flex-wrap">
<li v-for="{ id: axis, name } in axes" :key="axis" class="mr-2">
<button
class="pointer-cursor inline-block rounded-t-lg border-b-2 border-transparent p-4 hover:border-gray-300 hover:text-gray-600 dark:hover:text-gray-300"
:class="{
'active border-blue-600 text-blue-600 dark:border-blue-500 dark:text-blue-500':
displayedAxis === axis,
}"
:aria-current="displayedAxis === axis ? 'page' : null"
@click="selectedAxis = axis"
>
{{ name }}
</button>
</li>
</ul>
</div>
<!-- Table -->
<div
class="relative overflow-x-auto shadow-md dark:shadow-white/10 sm:rounded-lg"
>
@@ -30,8 +53,8 @@
v-for="(row, index) in table.rows"
:key="index"
class="border-b odd:bg-white even:bg-gray-50 dark:border-gray-700 dark:bg-gray-800 odd:dark:bg-gray-800 even:dark:bg-gray-700"
@pointerenter="highlightEnabled && $emit('highlighted', index)"
@pointerleave="$emit('highlighted', null)"
@pointerenter="highlight(index)"
@pointerleave="highlight(null)"
>
<th scope="row">
<div v-if="row.color" class="px-6 py-2 text-right font-medium">
@@ -58,6 +81,7 @@
</tbody>
</table>
</div>
</div>
</template>
<script setup>
@@ -67,21 +91,51 @@ const props = defineProps({
default: null,
},
});
defineEmits(["highlighted"]);
const emit = defineEmits(["highlighted"]);
import { computed, inject } from "vue";
import { computed, inject, ref } from "vue";
import { formatXps, dataColor, dataColorGrey } from "@/utils";
import { graphTypes } from "./constants";
const { isDark } = inject("theme");
const { stacked, lines, grid, sankey } = graphTypes;
const highlightEnabled = computed(() =>
[stacked, lines, grid].includes(props.data?.graphType)
import { uniq, uniqWith, isEqual, findIndex, takeWhile } from "lodash-es";
const highlight = (index) => {
if (index === null) {
emit("highlighted", null);
return;
}
if (![stacked, lines, grid].includes(props.data?.graphType)) return;
// The index provided is the one in the filtered data. We want the original index.
const originalIndex = takeWhile(
props.data.rows,
(() => {
let count = 0;
return (_, idx) =>
props.data.axis[idx] != displayedAxis.value || count++ < index;
})()
).length;
emit("highlighted", originalIndex);
};
const axes = computed(() =>
uniq(props.data.axis ?? []).map((axis) => ({
id: axis,
name: { 1: "Direct", 2: "Reverse" }[axis] ?? "Unknown",
}))
);
const selectedAxis = ref(1);
const displayedAxis = computed(() =>
axes.value.some((axis) => axis.id === selectedAxis.value)
? selectedAxis.value
: 1
);
const table = computed(() => {
const theme = isDark.value ? "dark" : "light";
const data = props.data || {};
if ([stacked, lines, grid].includes(data.graphType)) {
const uniqRows = uniqWith(data.rows, isEqual),
uniqRowIndex = (row) => findIndex(uniqRows, (orow) => isEqual(row, orow));
return {
columns: [
// Dimensions
@@ -95,14 +149,16 @@ const table = computed(() => {
{ name: "~95th", classNames: "text-right" },
],
rows:
data.rows?.map((rows, idx) => {
const color = rows.some((name) => name === "Other")
data.rows
?.filter((_, idx) => data.axis[idx] == displayedAxis.value)
.map((row, idx) => {
const color = row.some((name) => name === "Other")
? dataColorGrey
: dataColor;
return {
values: [
// Dimensions
...rows.map((r) => ({ value: r })),
...row.map((r) => ({ value: r })),
// Stats
...[
data.min[idx],
@@ -114,7 +170,7 @@ const table = computed(() => {
classNames: "text-right tabular-nums",
})),
],
color: color(idx, false, theme),
color: color(uniqRowIndex(row), false, theme),
};
}) || [],
};
@@ -129,10 +185,10 @@ const table = computed(() => {
// Average
{ name: "Average", classNames: "text-right" },
],
rows: data.rows?.map((rows, idx) => ({
rows: data.rows?.map((row, idx) => ({
values: [
// Dimensions
...rows.map((r) => ({ value: r })),
...row.map((r) => ({ value: r })),
// Average
{
value: formatXps(data.xps[idx]) + data.units.slice(-3),

View File

@@ -33,14 +33,14 @@
>
<div v-if="open" class="flex flex-col px-3 py-4 lg:max-h-screen">
<div
class="mb-2 flex flex-row flex-wrap justify-between gap-2 sm:flex-nowrap lg:flex-wrap"
class="mb-2 flex flex-row flex-wrap items-center justify-between gap-2 sm:flex-nowrap lg:flex-wrap"
>
<InputButton
attr-type="submit"
:disabled="hasErrors && !loading"
:loading="loading"
:type="loading ? 'alternative' : 'primary'"
class="order-2 w-28 justify-center sm:order-3 lg:order-2"
class="order-2 w-28 justify-center sm:order-4 lg:order-2"
>
{{ loading ? "Cancel" : applyLabel }}
</InputButton>
@@ -57,7 +57,7 @@
<InputListBox
v-model="graphType"
:items="graphTypeList"
class="order-3 grow basis-full sm:order-2 sm:basis-0 lg:order-3"
class="order-3 grow basis-full sm:order-3 sm:basis-0 lg:order-3 lg:basis-full"
label="Graph type"
>
<template #selected>{{ graphType.name }}</template>
@@ -71,6 +71,12 @@
</div>
</template>
</InputListBox>
<InputCheckbox
v-if="[stacked, lines, grid].includes(graphType.name)"
v-model="bidirectional"
class="order-4 sm:order-2 lg:order-4"
label="Bidirectional"
/>
</div>
<SectionLabel>Time range</SectionLabel>
<InputTimeRange v-model="timeRange" />
@@ -114,6 +120,7 @@ import InputTimeRange from "@/components/InputTimeRange.vue";
import InputDimensions from "@/components/InputDimensions.vue";
import InputListBox from "@/components/InputListBox.vue";
import InputButton from "@/components/InputButton.vue";
import InputCheckbox from "@/components/InputCheckbox.vue";
import InputChoice from "@/components/InputChoice.vue";
import InputFilter from "@/components/InputFilter.vue";
import SectionLabel from "./SectionLabel.vue";
@@ -133,6 +140,7 @@ const timeRange = ref({});
const dimensions = ref([]);
const filter = ref({});
const units = ref("l3bps");
const bidirectional = ref(false);
const options = computed(() => ({
// Common to all graph types
@@ -144,8 +152,14 @@ const options = computed(() => ({
filter: filter.value.expression,
units: units.value,
// Only for time series
...([stacked, lines].includes(graphType.value.name) && { points: 200 }),
...(graphType.value.name === grid && { points: 50 }),
...([stacked, lines].includes(graphType.value.name) && {
bidirectional: bidirectional.value,
points: 200,
}),
...(graphType.value.name === grid && {
bidirectional: bidirectional.value,
points: 50,
}),
}));
const applyLabel = computed(() =>
isEqual(options.value, props.modelValue) ? "Refresh" : "Apply"
@@ -169,6 +183,7 @@ watch(
points /* eslint-disable-line no-unused-vars */,
filter: _filter = defaultOptions?.filter,
units: _units = "l3bps",
bidirectional: _bidirectional = false,
} = modelValue;
// Dispatch values in refs
@@ -181,6 +196,7 @@ watch(
};
filter.value = { expression: _filter };
units.value = _units;
bidirectional.value = _bidirectional;
// A bit risky, but it seems to work.
if (!isEqual(modelValue, options.value)) {

View File

@@ -24,21 +24,36 @@ type graphHandlerInput struct {
Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions
Filter queryFilter `json:"filter"` // where ...
Units string `json:"units" binding:"required,oneof=pps l2bps l3bps"`
Bidirectional bool `json:"bidirectional"`
}
// graphHandlerOutput describes the output for the /graph endpoint.
// graphHandlerOutput describes the output for the /graph endpoint. A
// row is a set of values for dimensions. Currently, axis 1 is for the
// direct direction and axis 2 is for the reverse direction. Rows are
// sorted by axis, then by the sum of traffic.
type graphHandlerOutput struct {
Rows [][]string `json:"rows"`
Time []time.Time `json:"t"`
Rows [][]string `json:"rows"` // List of rows
Points [][]int `json:"points"` // t → row → xps
Average []int `json:"average"` // row → xps
Min []int `json:"min"`
Max []int `json:"max"`
NinetyFivePercentile []int `json:"95th"`
Axis []int `json:"axis"` // row → axis
Average []int `json:"average"` // row → average xps
Min []int `json:"min"` // row → min xps
Max []int `json:"max"` // row → max xps
NinetyFivePercentile []int `json:"95th"` // row → 95th xps
}
// graphHandlerInputToSQL converts a graph input to an SQL request
func (input graphHandlerInput) toSQL() (string, error) {
// reverseDirection reverts the direction of a provided input
func (input graphHandlerInput) reverseDirection() graphHandlerInput {
input.Filter.filter = input.Filter.reverseFilter
dimensions := input.Dimensions
input.Dimensions = make([]queryColumn, len(dimensions))
for i := range dimensions {
input.Dimensions[i] = dimensions[i].reverseDirection()
}
return input
}
func (input graphHandlerInput) toSQL1(axis int, skipWith bool) string {
interval := int64((input.End.Sub(input.Start).Seconds())) / int64(input.Points)
slot := fmt.Sprintf(`{resolution->%d}`, interval)
@@ -82,7 +97,7 @@ func (input graphHandlerInput) toSQL() (string, error) {
// With
with := []string{}
if len(dimensions) > 0 {
if len(dimensions) > 0 && !skipWith {
with = append(with, fmt.Sprintf(
"rows AS (SELECT %s FROM {table} WHERE %s GROUP BY %s ORDER BY SUM(Bytes) DESC LIMIT %d)",
strings.Join(dimensions, ", "),
@@ -97,6 +112,7 @@ func (input graphHandlerInput) toSQL() (string, error) {
sqlQuery := fmt.Sprintf(`
%s
SELECT %d AS axis, * FROM (
SELECT
%s
FROM {table}
@@ -105,8 +121,20 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL %s second)
TO {timefilter.Stop}
STEP %s`, withStr, strings.Join(fields, ",\n "), where, slot, slot)
return sqlQuery, nil
STEP %s)`, withStr, axis, strings.Join(fields, ",\n "), where, slot, slot)
return sqlQuery
}
// graphHandlerInputToSQL converts a graph input to an SQL request
func (input graphHandlerInput) toSQL() string {
result := input.toSQL1(1, false)
if input.Bidirectional {
part2 := input.reverseDirection().toSQL1(2, true)
result = fmt.Sprintf(`%s
UNION ALL
%s`, result, strings.TrimSpace(part2))
}
return strings.TrimSpace(result)
}
func (c *Component) graphHandlerFunc(gc *gin.Context) {
@@ -117,11 +145,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
return
}
sqlQuery, err := input.toSQL()
if err != nil {
gc.JSON(http.StatusBadRequest, gin.H{"message": helpers.Capitalize(err.Error())})
return
}
sqlQuery := input.toSQL()
resolution := time.Duration(int64(input.End.Sub(input.Start).Nanoseconds()) / int64(input.Points))
if resolution < time.Second {
resolution = time.Second
@@ -131,6 +155,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
gc.Header("X-SQL-Query", strings.ReplaceAll(sqlQuery, "\n", " "))
results := []struct {
Axis uint8 `ch:"axis"`
Time time.Time `ch:"time"`
Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
@@ -156,99 +181,140 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
}
}
// We want to sort rows depending on how much data they gather each
// Set time axis. We assume the first returned axis has the complete view.
output := graphHandlerOutput{
Time: []time.Time{},
}
rowValues := map[string][]int{} // values for each row (indexed by internal key)
rowKeys := map[string][]string{} // mapping from keys to dimensions
rowSums := map[string]uint64{} // sum for a given row (to sort)
lastTime := time.Time{}
for _, result := range results {
if result.Time != lastTime {
if result.Axis == 1 && result.Time != lastTime {
output.Time = append(output.Time, result.Time)
lastTime = result.Time
}
}
lastTime = time.Time{}
idx := -1
// For the remaining, we will collect information into various
// structures in one pass. Each structure will be keyed by the
// axis and the row.
axes := []int{} // list of axes
rows := map[int]map[string][]string{} // for each axis, a map from row to list of dimensions
points := map[int]map[string][]int{} // for each axis, a map from row to list of points (one point per ts)
sums := map[int]map[string]uint64{} // for each axis, a map from row to sum (for sorting purpose)
lastTimeForAxis := map[int]time.Time{}
timeIndexForAxis := map[int]int{}
for _, result := range results {
if result.Time != lastTime {
idx++
lastTime = result.Time
}
rowKey := fmt.Sprintf("%s", result.Dimensions)
row, ok := rowValues[rowKey]
var ok bool
axis := int(result.Axis)
lastTime, ok = lastTimeForAxis[axis]
if !ok {
rowKeys[rowKey] = result.Dimensions
// Unknown axis, initialize various structs
axes = append(axes, axis)
lastTimeForAxis[axis] = time.Time{}
timeIndexForAxis[axis] = -1
rows[axis] = map[string][]string{}
points[axis] = map[string][]int{}
sums[axis] = map[string]uint64{}
}
if result.Time != lastTime {
// New timestamp, increment time index
timeIndexForAxis[axis]++
lastTimeForAxis[axis] = result.Time
}
rowKey := fmt.Sprintf("%d-%s", axis, result.Dimensions)
row, ok := points[axis][rowKey]
if !ok {
// Not points for this row yet, create it
rows[axis][rowKey] = result.Dimensions
row = make([]int, len(output.Time))
rowValues[rowKey] = row
points[axis][rowKey] = row
sums[axis][rowKey] = 0
}
rowValues[rowKey][idx] = int(result.Xps)
sum, _ := rowSums[rowKey]
rowSums[rowKey] = sum + uint64(result.Xps)
points[axis][rowKey][timeIndexForAxis[axis]] = int(result.Xps)
sums[axis][rowKey] += uint64(result.Xps)
}
rows := make([]string, len(rowKeys))
i := 0
for k := range rowKeys {
rows[i] = k
i++
// Sort axes
sort.Ints(axes)
// Sort the rows using the sums
sortedRowKeys := map[int][]string{}
for _, axis := range axes {
sortedRowKeys[axis] = make([]string, 0, len(rows[axis]))
for k := range rows[axis] {
sortedRowKeys[axis] = append(sortedRowKeys[axis], k)
}
// Sort by sum, except we want "Other" to be last
sort.Slice(rows, func(i, j int) bool {
if rowKeys[rows[i]][0] == "Other" {
sort.Slice(sortedRowKeys[axis], func(i, j int) bool {
iKey := sortedRowKeys[axis][i]
jKey := sortedRowKeys[axis][j]
if rows[axis][iKey][0] == "Other" {
return false
}
if rowKeys[rows[j]][0] == "Other" {
if rows[axis][jKey][0] == "Other" {
return true
}
return rowSums[rows[i]] > rowSums[rows[j]]
return sums[axis][iKey] > sums[axis][jKey]
})
output.Rows = make([][]string, len(rows))
output.Points = make([][]int, len(rows))
output.Average = make([]int, len(rows))
output.Min = make([]int, len(rows))
output.Max = make([]int, len(rows))
output.NinetyFivePercentile = make([]int, len(rows))
}
for idx, r := range rows {
output.Rows[idx] = rowKeys[r]
output.Points[idx] = rowValues[r]
output.Average[idx] = int(rowSums[r] / uint64(len(output.Time)))
// For 95th percentile, we need to sort the values.
// Use that for min/max too.
if len(rowValues[r]) == 0 {
// Now, we can complete the `output' structure!
totalRows := 0
for _, axis := range axes {
totalRows += len(rows[axis])
}
output.Rows = make([][]string, totalRows)
output.Axis = make([]int, totalRows)
output.Points = make([][]int, totalRows)
output.Average = make([]int, totalRows)
output.Min = make([]int, totalRows)
output.Max = make([]int, totalRows)
output.NinetyFivePercentile = make([]int, totalRows)
i := -1
for _, axis := range axes {
for _, k := range sortedRowKeys[axis] {
i++
output.Rows[i] = rows[axis][k]
output.Axis[i] = axis
output.Points[i] = points[axis][k]
output.Average[i] = int(sums[axis][k] / uint64(len(output.Time)))
// For remaining, we will sort the values. It
// is needed for 95th percentile but it helps
// for min/max too. We remove special cases
// for 0 or 1 point.
nbPoints := len(output.Points[i])
if nbPoints == 0 {
continue
}
if len(rowValues[r]) == 1 {
v := rowValues[r][0]
output.Min[idx] = v
output.Max[idx] = v
output.NinetyFivePercentile[idx] = v
if nbPoints == 1 {
v := output.Points[i][0]
output.Min[i] = v
output.Max[i] = v
output.NinetyFivePercentile[i] = v
continue
}
s := make([]int, len(rowValues[r]))
copy(s, rowValues[r])
sort.Ints(s)
points := make([]int, nbPoints)
copy(points, output.Points[i])
sort.Ints(points)
// Min (but not 0)
for i := 0; i < len(s); i++ {
output.Min[idx] = s[i]
if s[i] > 0 {
for j := 0; j < nbPoints; j++ {
output.Min[i] = points[j]
if points[j] > 0 {
break
}
}
// Max
output.Max[idx] = s[len(s)-1]
output.Max[i] = points[nbPoints-1]
// 95th percentile
index := 0.95 * float64(len(s))
index := 0.95 * float64(nbPoints)
j := int(index)
if index == float64(j) {
output.NinetyFivePercentile[idx] = s[j-1]
output.NinetyFivePercentile[i] = points[j-1]
} else if index > 1 {
// We use the average of the two values. This
// is good enough for bps/pps
output.NinetyFivePercentile[idx] = (s[j-1] + s[j]) / 2
output.NinetyFivePercentile[i] = (points[j-1] + points[j]) / 2
}
}
}

View File

@@ -4,6 +4,7 @@
package console
import (
"fmt"
"strings"
"testing"
"time"
@@ -14,6 +15,46 @@ import (
"akvorado/common/helpers"
)
func TestGraphInputReverseDirection(t *testing.T) {
input := graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{
queryColumnExporterName,
queryColumnInIfProvider,
},
Filter: queryFilter{
filter: "DstCountry = 'FR' AND SrcCountry = 'US'",
reverseFilter: "SrcCountry = 'FR' AND DstCountry = 'US'",
},
Units: "l3bps",
}
original1 := fmt.Sprintf("%+v", input)
expected := graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{
queryColumnExporterName,
queryColumnOutIfProvider,
},
Filter: queryFilter{
reverseFilter: "DstCountry = 'FR' AND SrcCountry = 'US'",
filter: "SrcCountry = 'FR' ANd DstCountry = 'US'",
},
Units: "l3bps",
}
got := input.reverseDirection()
original2 := fmt.Sprintf("%+v", input)
if diff := helpers.Diff(got, expected); diff != "" {
t.Fatalf("reverseDirection() (-got, +want):\n%s", diff)
}
if original1 != original2 {
t.Fatalf("reverseDirection() modified original to:\n-%s\n+%s", original1, original2)
}
}
func TestGraphQuerySQL(t *testing.T) {
cases := []struct {
Description string
@@ -31,6 +72,7 @@ func TestGraphQuerySQL(t *testing.T) {
Units: "l3bps",
},
Expected: `
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
@@ -41,7 +83,7 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864}`,
STEP {resolution->864})`,
}, {
Description: "no dimensions, no filters, l2 bps",
Input: graphHandlerInput{
@@ -53,6 +95,7 @@ ORDER BY time WITH FILL
Units: "l2bps",
},
Expected: `
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM((Bytes+18*Packets)*SamplingRate*8/{resolution->864}) AS xps,
@@ -63,7 +106,7 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864}`,
STEP {resolution->864})`,
}, {
Description: "no dimensions, no filters, pps",
Input: graphHandlerInput{
@@ -75,6 +118,7 @@ ORDER BY time WITH FILL
Units: "pps",
},
Expected: `
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Packets*SamplingRate/{resolution->864}) AS xps,
@@ -85,7 +129,7 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864}`,
STEP {resolution->864})`,
}, {
Description: "no dimensions",
Input: graphHandlerInput{
@@ -97,6 +141,7 @@ ORDER BY time WITH FILL
Units: "l3bps",
},
Expected: `
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
@@ -107,7 +152,47 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864}`,
STEP {resolution->864})`,
}, {
Description: "no dimensions, reverse direction",
Input: graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{},
Filter: queryFilter{
filter: "DstCountry = 'FR' AND SrcCountry = 'US'",
reverseFilter: "SrcCountry = 'FR' AND DstCountry = 'US'",
},
Units: "l3bps",
Bidirectional: true,
},
Expected: `
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter} AND (DstCountry = 'FR' AND SrcCountry = 'US')
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864})
UNION ALL
SELECT 2 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter} AND (SrcCountry = 'FR' AND DstCountry = 'US')
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864})`,
}, {
Description: "no filters",
Input: graphHandlerInput{
@@ -125,6 +210,7 @@ ORDER BY time WITH FILL
Expected: `
WITH
rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20)
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
@@ -135,12 +221,55 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864}`,
STEP {resolution->864})`,
}, {
Description: "no filters, reverse",
Input: graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Limit: 20,
Dimensions: []queryColumn{
queryColumnExporterName,
queryColumnInIfProvider,
},
Filter: queryFilter{},
Units: "l3bps",
Bidirectional: true,
},
Expected: `
WITH
rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20)
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
if((ExporterName, InIfProvider) IN rows, [ExporterName, InIfProvider], ['Other', 'Other']) AS dimensions
FROM {table}
WHERE {timefilter}
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864})
UNION ALL
SELECT 2 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
if((ExporterName, OutIfProvider) IN rows, [ExporterName, OutIfProvider], ['Other', 'Other']) AS dimensions
FROM {table}
WHERE {timefilter}
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864})`,
},
}
for _, tc := range cases {
t.Run(tc.Description, func(t *testing.T) {
got, _ := tc.Input.toSQL()
got := tc.Input.toSQL()
if diff := helpers.Diff(strings.Split(strings.TrimSpace(got), "\n"),
strings.Split(strings.TrimSpace(tc.Expected), "\n")); diff != "" {
t.Errorf("toSQL (-got, +want):\n%s", diff)
@@ -151,26 +280,75 @@ ORDER BY time WITH FILL
func TestGraphHandler(t *testing.T) {
_, h, mockConn, _ := NewMock(t, DefaultConfiguration())
base := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
// Single direction
expectedSQL := []struct {
Axis uint8 `ch:"axis"`
Time time.Time `ch:"time"`
Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{
{base, 1000, []string{"router1", "provider1"}},
{base, 2000, []string{"router1", "provider2"}},
{base, 1200, []string{"router2", "provider2"}},
{base, 1100, []string{"router2", "provider3"}},
{base, 1900, []string{"Other", "Other"}},
{base.Add(time.Minute), 500, []string{"router1", "provider1"}},
{base.Add(time.Minute), 5000, []string{"router1", "provider2"}},
{base.Add(time.Minute), 900, []string{"router2", "provider4"}},
{base.Add(time.Minute), 100, []string{"Other", "Other"}},
{base.Add(2 * time.Minute), 100, []string{"router1", "provider1"}},
{base.Add(2 * time.Minute), 3000, []string{"router1", "provider2"}},
{base.Add(2 * time.Minute), 100, []string{"router2", "provider4"}},
{base.Add(2 * time.Minute), 100, []string{"Other", "Other"}},
{1, base, 1000, []string{"router1", "provider1"}},
{1, base, 2000, []string{"router1", "provider2"}},
{1, base, 1200, []string{"router2", "provider2"}},
{1, base, 1100, []string{"router2", "provider3"}},
{1, base, 1900, []string{"Other", "Other"}},
{1, base.Add(time.Minute), 500, []string{"router1", "provider1"}},
{1, base.Add(time.Minute), 5000, []string{"router1", "provider2"}},
{1, base.Add(time.Minute), 900, []string{"router2", "provider4"}},
{1, base.Add(time.Minute), 100, []string{"Other", "Other"}},
{1, base.Add(2 * time.Minute), 100, []string{"router1", "provider1"}},
{1, base.Add(2 * time.Minute), 3000, []string{"router1", "provider2"}},
{1, base.Add(2 * time.Minute), 100, []string{"router2", "provider4"}},
{1, base.Add(2 * time.Minute), 100, []string{"Other", "Other"}},
}
mockConn.EXPECT().
Select(gomock.Any(), gomock.Any(), gomock.Any()).
SetArg(1, expectedSQL).
Return(nil)
// Bidirectional
expectedSQL = []struct {
Axis uint8 `ch:"axis"`
Time time.Time `ch:"time"`
Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{
{1, base, 1000, []string{"router1", "provider1"}},
{1, base, 2000, []string{"router1", "provider2"}},
{1, base, 1200, []string{"router2", "provider2"}},
{1, base, 1100, []string{"router2", "provider3"}},
{1, base, 1900, []string{"Other", "Other"}},
{1, base.Add(time.Minute), 500, []string{"router1", "provider1"}},
{1, base.Add(time.Minute), 5000, []string{"router1", "provider2"}},
{1, base.Add(time.Minute), 900, []string{"router2", "provider4"}},
// Axes can be mixed. In reality, it seems they cannot
// be interleaved, but ClickHouse documentation does
// not say it is not possible.
{2, base, 100, []string{"router1", "provider1"}},
{2, base, 200, []string{"router1", "provider2"}},
{2, base, 120, []string{"router2", "provider2"}},
{1, base.Add(time.Minute), 100, []string{"Other", "Other"}},
{1, base.Add(2 * time.Minute), 100, []string{"router1", "provider1"}},
{2, base, 110, []string{"router2", "provider3"}},
{2, base, 190, []string{"Other", "Other"}},
{2, base.Add(time.Minute), 50, []string{"router1", "provider1"}},
{2, base.Add(time.Minute), 500, []string{"router1", "provider2"}},
{1, base.Add(2 * time.Minute), 3000, []string{"router1", "provider2"}},
{1, base.Add(2 * time.Minute), 100, []string{"router2", "provider4"}},
{1, base.Add(2 * time.Minute), 100, []string{"Other", "Other"}},
{2, base.Add(time.Minute), 90, []string{"router2", "provider4"}},
{2, base.Add(time.Minute), 10, []string{"Other", "Other"}},
{2, base.Add(2 * time.Minute), 10, []string{"router1", "provider1"}},
{2, base.Add(2 * time.Minute), 300, []string{"router1", "provider2"}},
{2, base.Add(2 * time.Minute), 10, []string{"router2", "provider4"}},
{2, base.Add(2 * time.Minute), 10, []string{"Other", "Other"}},
}
mockConn.EXPECT().
Select(gomock.Any(), gomock.Any(), gomock.Any()).
@@ -179,6 +357,7 @@ func TestGraphHandler(t *testing.T) {
helpers.TestHTTPEndpoints(t, h.Address, helpers.HTTPEndpointCases{
{
Description: "single direction",
URL: "/api/v0/console/graph",
JSONInput: gin.H{
"start": time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
@@ -188,6 +367,7 @@ func TestGraphHandler(t *testing.T) {
"dimensions": []string{"ExporterName", "InIfProvider"},
"filter": "DstCountry = 'FR' AND SrcCountry = 'US'",
"units": "l3bps",
"bidirectional": false,
},
JSONOutput: gin.H{
// Sorted by sum of bps
@@ -244,6 +424,124 @@ func TestGraphHandler(t *testing.T) {
500,
1000,
},
"axis": []int{
1, 1, 1, 1, 1, 1,
},
},
}, {
Description: "bidirectional",
URL: "/api/v0/console/graph",
JSONInput: gin.H{
"start": time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
"end": time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
"points": 100,
"limit": 20,
"dimensions": []string{"ExporterName", "InIfProvider"},
"filter": "DstCountry = 'FR' AND SrcCountry = 'US'",
"units": "l3bps",
"bidirectional": true,
},
JSONOutput: gin.H{
// Sorted by sum of bps
"rows": [][]string{
{"router1", "provider2"}, // 10000
{"router1", "provider1"}, // 1600
{"router2", "provider2"}, // 1200
{"router2", "provider3"}, // 1100
{"router2", "provider4"}, // 1000
{"Other", "Other"}, // 2100
{"router1", "provider2"}, // 1000
{"router1", "provider1"}, // 160
{"router2", "provider2"}, // 120
{"router2", "provider3"}, // 110
{"router2", "provider4"}, // 100
{"Other", "Other"}, // 210
},
"t": []string{
"2009-11-10T23:00:00Z",
"2009-11-10T23:01:00Z",
"2009-11-10T23:02:00Z",
},
"points": [][]int{
{2000, 5000, 3000},
{1000, 500, 100},
{1200, 0, 0},
{1100, 0, 0},
{0, 900, 100},
{1900, 100, 100},
{200, 500, 300},
{100, 50, 10},
{120, 0, 0},
{110, 0, 0},
{0, 90, 10},
{190, 10, 10},
},
"min": []int{
2000,
100,
1200,
1100,
100,
100,
200,
10,
120,
110,
10,
10,
},
"max": []int{
5000,
1000,
1200,
1100,
900,
1900,
500,
100,
120,
110,
90,
190,
},
"average": []int{
3333,
533,
400,
366,
333,
700,
333,
53,
40,
36,
33,
70,
},
"95th": []int{
4000,
750,
600,
550,
500,
1000,
400,
75,
60,
55,
50,
100,
},
"axis": []int{
1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2,
},
},
},
})

View File

@@ -14,94 +14,6 @@ import (
type queryColumn int
const (
queryColumnExporterAddress queryColumn = iota + 1
queryColumnExporterName
queryColumnExporterGroup
queryColumnExporterRole
queryColumnExporterSite
queryColumnExporterRegion
queryColumnExporterTenant
queryColumnSrcAS
queryColumnSrcNetName
queryColumnSrcNetRole
queryColumnSrcNetSite
queryColumnSrcNetRegion
queryColumnSrcNetTenant
queryColumnSrcCountry
queryColumnInIfName
queryColumnInIfDescription
queryColumnInIfSpeed
queryColumnInIfConnectivity
queryColumnInIfProvider
queryColumnInIfBoundary
queryColumnEType
queryColumnProto
queryColumnSrcPort
queryColumnSrcAddr
queryColumnDstAS
queryColumnDstNetName
queryColumnDstNetRole
queryColumnDstNetSite
queryColumnDstNetRegion
queryColumnDstNetTenant
queryColumnDstCountry
queryColumnOutIfName
queryColumnOutIfDescription
queryColumnOutIfSpeed
queryColumnOutIfConnectivity
queryColumnOutIfProvider
queryColumnOutIfBoundary
queryColumnDstAddr
queryColumnDstPort
queryColumnForwardingStatus
queryColumnPacketSizeBucket
)
var queryColumnMap = helpers.NewBimap(map[queryColumn]string{
queryColumnExporterAddress: "ExporterAddress",
queryColumnExporterName: "ExporterName",
queryColumnExporterGroup: "ExporterGroup",
queryColumnExporterRole: "ExporterRole",
queryColumnExporterSite: "ExporterSite",
queryColumnExporterRegion: "ExporterRegion",
queryColumnExporterTenant: "ExporterTenant",
queryColumnSrcAddr: "SrcAddr",
queryColumnDstAddr: "DstAddr",
queryColumnSrcAS: "SrcAS",
queryColumnDstAS: "DstAS",
queryColumnSrcNetName: "SrcNetName",
queryColumnDstNetName: "DstNetName",
queryColumnSrcNetRole: "SrcNetRole",
queryColumnDstNetRole: "DstNetRole",
queryColumnSrcNetSite: "SrcNetSite",
queryColumnDstNetSite: "DstNetSite",
queryColumnSrcNetRegion: "SrcNetRegion",
queryColumnDstNetRegion: "DstNetRegion",
queryColumnSrcNetTenant: "SrcNetTenant",
queryColumnDstNetTenant: "DstNetTenant",
queryColumnSrcCountry: "SrcCountry",
queryColumnDstCountry: "DstCountry",
queryColumnInIfName: "InIfName",
queryColumnOutIfName: "OutIfName",
queryColumnInIfDescription: "InIfDescription",
queryColumnOutIfDescription: "OutIfDescription",
queryColumnInIfSpeed: "InIfSpeed",
queryColumnOutIfSpeed: "OutIfSpeed",
queryColumnInIfConnectivity: "InIfConnectivity",
queryColumnOutIfConnectivity: "OutIfConnectivity",
queryColumnInIfProvider: "InIfProvider",
queryColumnOutIfProvider: "OutIfProvider",
queryColumnInIfBoundary: "InIfBoundary",
queryColumnOutIfBoundary: "OutIfBoundary",
queryColumnEType: "EType",
queryColumnProto: "Proto",
queryColumnSrcPort: "SrcPort",
queryColumnDstPort: "DstPort",
queryColumnForwardingStatus: "ForwardingStatus",
queryColumnPacketSizeBucket: "PacketSizeBucket",
})
func (gc queryColumn) MarshalText() ([]byte, error) {
got, ok := queryColumnMap.LoadValue(gc)
if ok {
@@ -128,6 +40,9 @@ type queryFilter struct {
mainTableRequired bool
}
func (gf queryFilter) String() string {
return gf.filter
}
func (gf queryFilter) MarshalText() ([]byte, error) {
return []byte(gf.filter), nil
}
@@ -172,3 +87,12 @@ func (gc queryColumn) toSQLSelect() string {
}
return strValue
}
// reverseDirection reverse the direction of a column (src/dst, in/out)
func (gc queryColumn) reverseDirection() queryColumn {
value, ok := queryColumnMap.LoadKey(filter.ReverseColumnDirection(gc.String()))
if !ok {
panic("unknown reverse column")
}
return value
}

94
console/query_consts.go Normal file
View File

@@ -0,0 +1,94 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package console
import "akvorado/common/helpers"
const (
queryColumnExporterAddress queryColumn = iota + 1
queryColumnExporterName
queryColumnExporterGroup
queryColumnExporterRole
queryColumnExporterSite
queryColumnExporterRegion
queryColumnExporterTenant
queryColumnSrcAS
queryColumnSrcNetName
queryColumnSrcNetRole
queryColumnSrcNetSite
queryColumnSrcNetRegion
queryColumnSrcNetTenant
queryColumnSrcCountry
queryColumnInIfName
queryColumnInIfDescription
queryColumnInIfSpeed
queryColumnInIfConnectivity
queryColumnInIfProvider
queryColumnInIfBoundary
queryColumnEType
queryColumnProto
queryColumnSrcPort
queryColumnSrcAddr
queryColumnDstAS
queryColumnDstNetName
queryColumnDstNetRole
queryColumnDstNetSite
queryColumnDstNetRegion
queryColumnDstNetTenant
queryColumnDstCountry
queryColumnOutIfName
queryColumnOutIfDescription
queryColumnOutIfSpeed
queryColumnOutIfConnectivity
queryColumnOutIfProvider
queryColumnOutIfBoundary
queryColumnDstAddr
queryColumnDstPort
queryColumnForwardingStatus
queryColumnPacketSizeBucket
)
var queryColumnMap = helpers.NewBimap(map[queryColumn]string{
queryColumnExporterAddress: "ExporterAddress",
queryColumnExporterName: "ExporterName",
queryColumnExporterGroup: "ExporterGroup",
queryColumnExporterRole: "ExporterRole",
queryColumnExporterSite: "ExporterSite",
queryColumnExporterRegion: "ExporterRegion",
queryColumnExporterTenant: "ExporterTenant",
queryColumnSrcAddr: "SrcAddr",
queryColumnDstAddr: "DstAddr",
queryColumnSrcAS: "SrcAS",
queryColumnDstAS: "DstAS",
queryColumnSrcNetName: "SrcNetName",
queryColumnDstNetName: "DstNetName",
queryColumnSrcNetRole: "SrcNetRole",
queryColumnDstNetRole: "DstNetRole",
queryColumnSrcNetSite: "SrcNetSite",
queryColumnDstNetSite: "DstNetSite",
queryColumnSrcNetRegion: "SrcNetRegion",
queryColumnDstNetRegion: "DstNetRegion",
queryColumnSrcNetTenant: "SrcNetTenant",
queryColumnDstNetTenant: "DstNetTenant",
queryColumnSrcCountry: "SrcCountry",
queryColumnDstCountry: "DstCountry",
queryColumnInIfName: "InIfName",
queryColumnOutIfName: "OutIfName",
queryColumnInIfDescription: "InIfDescription",
queryColumnOutIfDescription: "OutIfDescription",
queryColumnInIfSpeed: "InIfSpeed",
queryColumnOutIfSpeed: "OutIfSpeed",
queryColumnInIfConnectivity: "InIfConnectivity",
queryColumnOutIfConnectivity: "OutIfConnectivity",
queryColumnInIfProvider: "InIfProvider",
queryColumnOutIfProvider: "OutIfProvider",
queryColumnInIfBoundary: "InIfBoundary",
queryColumnOutIfBoundary: "OutIfBoundary",
queryColumnEType: "EType",
queryColumnProto: "Proto",
queryColumnSrcPort: "SrcPort",
queryColumnDstPort: "DstPort",
queryColumnForwardingStatus: "ForwardingStatus",
queryColumnPacketSizeBucket: "PacketSizeBucket",
})