console: add a bidirectional mode for graphs

It allows one to also display flows in the opposite direction.
This commit is contained in:
Vincent Bernat
2022-08-07 23:15:18 +02:00
parent e89a188d6d
commit 50614cef5b
15 changed files with 978 additions and 425 deletions

View File

@@ -88,7 +88,7 @@ console/filter/parser.go: console/filter/parser.peg | $(PIGEON) ; $(info $(M) ge
console/frontend/node_modules: console/frontend/package.json console/frontend/package-lock.json console/frontend/node_modules: console/frontend/package.json console/frontend/package-lock.json
console/frontend/node_modules: ; $(info $(M) fetching node modules) console/frontend/node_modules: ; $(info $(M) fetching node modules)
$Q (cd console/frontend ; npm ci --silent --no-audit --no-fund) && touch $@ $Q (cd console/frontend ; npm ci --silent --no-audit --no-fund) && touch $@
console/frontend/data/fields.json: console/query.go ; $(info $(M) generate list of selectable fields) console/frontend/data/fields.json: console/query_consts.go ; $(info $(M) generate list of selectable fields)
$Q sed -En -e 's/^\tqueryColumn([a-zA-Z]+)( .*|$$)/ "\1"/p' $< \ $Q sed -En -e 's/^\tqueryColumn([a-zA-Z]+)( .*|$$)/ "\1"/p' $< \
| sed -E -e '$$ ! s/$$/,/' -e '1s/^ */[/' -e '$$s/$$/]/' > $@ | sed -E -e '$$ ! s/$$/,/' -e '1s/^ */[/' -e '$$s/$$/]/' > $@
$Q test -s $@ $Q test -s $@

View File

@@ -208,28 +208,28 @@ demo-exporter:
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 16h peak-hour: 16h
multiplier: 3 multiplier: 3
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.2
<<: [*from-v4-facebook, *to-v4-customers, *http-src] <<: [*from-v4-facebook, *to-v4-customers, *http-src]
- per-second: 0.2 - per-second: 0.2
in-if-index: [10, 11] in-if-index: [10, 11]
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 16h peak-hour: 16h
multiplier: 3 multiplier: 3
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.2
<<: [*from-v4-facebook, *to-v4-customers, *quic-src] <<: [*from-v4-facebook, *to-v4-customers, *quic-src]
- per-second: 1.8 - per-second: 1.8
in-if-index: [10, 11] in-if-index: [10, 11]
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 18h peak-hour: 18h
multiplier: 3 multiplier: 3
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.2
<<: [*from-v6-facebook, *to-v6-customers, *http-src] <<: [*from-v6-facebook, *to-v6-customers, *http-src]
- per-second: 0.2 - per-second: 0.2
in-if-index: [10, 11] in-if-index: [10, 11]
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 20h peak-hour: 20h
multiplier: 3 multiplier: 3
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.2
<<: [*from-v6-facebook, *to-v6-customers, *quic-src] <<: [*from-v6-facebook, *to-v6-customers, *quic-src]
# Netflix # Netflix
- per-second: 0.2 - per-second: 0.2
@@ -252,7 +252,7 @@ demo-exporter:
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 21h peak-hour: 21h
multiplier: 17 multiplier: 17
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.4
<<: [*from-v4-twitch, *to-v4-customers, *http-src] <<: [*from-v4-twitch, *to-v4-customers, *http-src]
# Akamai # Akamai
- per-second: 0.14 - per-second: 0.14
@@ -290,14 +290,14 @@ demo-exporter:
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 18h peak-hour: 18h
multiplier: 1.3 multiplier: 1.3
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.15
<<: [*from-v4-amazon, *to-v4-customers, *http-src] <<: [*from-v4-amazon, *to-v4-customers, *http-src]
- per-second: 0.1 - per-second: 0.1
in-if-index: 10 in-if-index: 10
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 18h peak-hour: 18h
multiplier: 1.3 multiplier: 1.3
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.15
<<: [*from-v6-amazon, *to-v6-customers, *http-src] <<: [*from-v6-amazon, *to-v6-customers, *http-src]
# Random SSH # Random SSH
@@ -306,7 +306,7 @@ demo-exporter:
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 15h peak-hour: 15h
multiplier: 1.2 multiplier: 1.2
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.5
<<: [*from-v4-renater, *to-v4-customers, *ssh-src] <<: [*from-v4-renater, *to-v4-customers, *ssh-src]
# Servers # Servers
- per-second: 0.1 - per-second: 0.1
@@ -314,14 +314,14 @@ demo-exporter:
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 15h peak-hour: 15h
multiplier: 1.2 multiplier: 1.2
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.2
<<: [*from-v4-renater, *to-v4-servers, *ssh-dst] <<: [*from-v4-renater, *to-v4-servers, *ssh-dst]
- per-second: 0.2 - per-second: 0.2
in-if-index: 10 in-if-index: 10
out-if-index: [20, 21] out-if-index: [20, 21]
peak-hour: 15h peak-hour: 15h
multiplier: 1.2 multiplier: 1.2
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.15
<<: [*from-v4-random, *to-v4-servers, *http-dst] <<: [*from-v4-random, *to-v4-servers, *http-dst]
- per-second: 0.2 - per-second: 0.2
in-if-index: 10 in-if-index: 10
@@ -340,7 +340,7 @@ demo-exporter:
multiplier: 1 multiplier: 1
protocol: [tcp, udp] protocol: [tcp, udp]
srcport: [80, 443, 22, 25461, 8080, 4500, 993, 8801] srcport: [80, 443, 22, 25461, 8080, 4500, 993, 8801]
reverse-direction-ratio: 0.1 reverse-direction-ratio: 0.25
<<: [*from-v4-random, *to-v4-customers] <<: [*from-v4-random, *to-v4-customers]
- <<: [*from-v6-random, *to-v6-customers, *random-flow] - <<: [*from-v6-random, *to-v6-customers, *random-flow]
- snmp: - snmp:

View File

@@ -13,6 +13,7 @@ identified with a specific icon:
## Unreleased ## Unreleased
-*console*: add a bidirectional mode for graphs to also display flows in the opposite direction
- 🌱 *demo-exporter*: add a setting to automatically generate a reverse flow - 🌱 *demo-exporter*: add a setting to automatically generate a reverse flow
- 🌱 *docker-compose*: loosen required privileges for `conntrack-fixer` - 🌱 *docker-compose*: loosen required privileges for `conntrack-fixer`

View File

@@ -17,23 +17,28 @@ type Meta struct {
MainTableRequired bool MainTableRequired bool
} }
func (c *current) reverseDirection(direct string) string { // ReverseColumnDirection reverts the direction of a provided column name.
if c.globalStore["meta"].(*Meta).ReverseDirection { func ReverseColumnDirection(name string) string {
if strings.HasPrefix(direct, "Src") { if strings.HasPrefix(name, "Src") {
return "Dst" + direct[3:] return "Dst" + name[3:]
}
if strings.HasPrefix(direct, "Dst") {
return "Src" + direct[3:]
}
if strings.HasPrefix(direct, "In") {
return "Out" + direct[2:]
}
if strings.HasPrefix(direct, "Out") {
return "In" + direct[3:]
}
panic("no reverse?")
} }
return direct if strings.HasPrefix(name, "Dst") {
return "Src" + name[3:]
}
if strings.HasPrefix(name, "In") {
return "Out" + name[2:]
}
if strings.HasPrefix(name, "Out") {
return "In" + name[3:]
}
return name
}
func (c *current) reverseColumnDirection(name string) string {
if c.globalStore["meta"].(*Meta).ReverseDirection {
return ReverseColumnDirection(name)
}
return name
} }
func lastIP(subnet *net.IPNet) net.IP { func lastIP(subnet *net.IPNet) net.IP {

View File

@@ -49,9 +49,9 @@ ConditionExpr "conditional" ←
ColumnIP ← ColumnIP ←
"ExporterAddress"i { return "ExporterAddress", nil } "ExporterAddress"i { return "ExporterAddress", nil }
/ "SrcAddr"i #{ c.state["main-table-only"] = true ; return nil } / "SrcAddr"i #{ c.state["main-table-only"] = true ; return nil }
{ return c.reverseDirection("SrcAddr"), nil } { return c.reverseColumnDirection("SrcAddr"), nil }
/ "DstAddr"i #{ c.state["main-table-only"] = true ; return nil } / "DstAddr"i #{ c.state["main-table-only"] = true ; return nil }
{ return c.reverseDirection("DstAddr"), nil } { return c.reverseColumnDirection("DstAddr"), nil }
ConditionIPExpr "condition on IP" ← ConditionIPExpr "condition on IP" ←
column:ColumnIP _ column:ColumnIP _
operator:("=" / "!=") _ ip:IP { operator:("=" / "!=") _ ip:IP {
@@ -73,26 +73,26 @@ ConditionStringExpr "condition on string" ←
/ "ExporterSite"i { return "ExporterSite", nil } / "ExporterSite"i { return "ExporterSite", nil }
/ "ExporterRegion"i { return "ExporterRegion", nil } / "ExporterRegion"i { return "ExporterRegion", nil }
/ "ExporterTenant"i { return "ExporterTenant", nil } / "ExporterTenant"i { return "ExporterTenant", nil }
/ "SrcCountry"i { return c.reverseDirection("SrcCountry"), nil } / "SrcCountry"i { return c.reverseColumnDirection("SrcCountry"), nil }
/ "DstCountry"i { return c.reverseDirection("DstCountry"), nil } / "DstCountry"i { return c.reverseColumnDirection("DstCountry"), nil }
/ "SrcNetName"i { return c.reverseDirection("SrcNetName"), nil } / "SrcNetName"i { return c.reverseColumnDirection("SrcNetName"), nil }
/ "DstNetName"i { return c.reverseDirection("DstNetName"), nil } / "DstNetName"i { return c.reverseColumnDirection("DstNetName"), nil }
/ "SrcNetRole"i { return c.reverseDirection("SrcNetRole"), nil } / "SrcNetRole"i { return c.reverseColumnDirection("SrcNetRole"), nil }
/ "DstNetRole"i { return c.reverseDirection("DstNetRole"), nil } / "DstNetRole"i { return c.reverseColumnDirection("DstNetRole"), nil }
/ "SrcNetSite"i { return c.reverseDirection("SrcNetSite"), nil } / "SrcNetSite"i { return c.reverseColumnDirection("SrcNetSite"), nil }
/ "DstNetSite"i { return c.reverseDirection("DstNetSite"), nil } / "DstNetSite"i { return c.reverseColumnDirection("DstNetSite"), nil }
/ "SrcNetRegion"i { return c.reverseDirection("SrcNetRegion"), nil } / "SrcNetRegion"i { return c.reverseColumnDirection("SrcNetRegion"), nil }
/ "DstNetRegion"i { return c.reverseDirection("DstNetRegion"), nil } / "DstNetRegion"i { return c.reverseColumnDirection("DstNetRegion"), nil }
/ "SrcNetTenant"i { return c.reverseDirection("SrcNetTenant"), nil } / "SrcNetTenant"i { return c.reverseColumnDirection("SrcNetTenant"), nil }
/ "DstNetTenant"i { return c.reverseDirection("DstNetTenant"), nil } / "DstNetTenant"i { return c.reverseColumnDirection("DstNetTenant"), nil }
/ "InIfName"i { return c.reverseDirection("InIfName"), nil } / "InIfName"i { return c.reverseColumnDirection("InIfName"), nil }
/ "OutIfName"i { return c.reverseDirection("OutIfName"), nil } / "OutIfName"i { return c.reverseColumnDirection("OutIfName"), nil }
/ "InIfDescription"i { return c.reverseDirection("InIfDescription"), nil } / "InIfDescription"i { return c.reverseColumnDirection("InIfDescription"), nil }
/ "OutIfDescription"i { return c.reverseDirection("OutIfDescription"), nil } / "OutIfDescription"i { return c.reverseColumnDirection("OutIfDescription"), nil }
/ "InIfConnectivity"i { return c.reverseDirection("InIfConnectivity"), nil } / "InIfConnectivity"i { return c.reverseColumnDirection("InIfConnectivity"), nil }
/ "OutIfConnectivity"i { return c.reverseDirection("OutIfConnectivity"), nil } / "OutIfConnectivity"i { return c.reverseColumnDirection("OutIfConnectivity"), nil }
/ "InIfProvider"i { return c.reverseDirection("InIfProvider"), nil } / "InIfProvider"i { return c.reverseColumnDirection("InIfProvider"), nil }
/ "OutIfProvider"i { return c.reverseDirection("OutIfProvider"), nil }) _ / "OutIfProvider"i { return c.reverseColumnDirection("OutIfProvider"), nil }) _
rcond:RConditionStringExpr { rcond:RConditionStringExpr {
return fmt.Sprintf("%s %s", toString(column), toString(rcond)), nil return fmt.Sprintf("%s %s", toString(column), toString(rcond)), nil
} }
@@ -105,16 +105,16 @@ RConditionStringExpr "condition on string" ←
} }
ConditionBoundaryExpr "condition on boundary" ← ConditionBoundaryExpr "condition on boundary" ←
column:("InIfBoundary"i { return c.reverseDirection("InIfBoundary"), nil } column:("InIfBoundary"i { return c.reverseColumnDirection("InIfBoundary"), nil }
/ "OutIfBoundary"i { return c.reverseDirection("OutIfBoundary"), nil }) _ / "OutIfBoundary"i { return c.reverseColumnDirection("OutIfBoundary"), nil }) _
operator:("=" / "!=") _ operator:("=" / "!=") _
boundary:("external"i / "internal"i / "undefined"i) { boundary:("external"i / "internal"i / "undefined"i) {
return fmt.Sprintf("%s %s %s", toString(column), toString(operator), return fmt.Sprintf("%s %s %s", toString(column), toString(operator),
quote(strings.ToLower(toString(boundary)))), nil quote(strings.ToLower(toString(boundary)))), nil
} }
ConditionSpeedExpr "condition on speed" ← ConditionSpeedExpr "condition on speed" ←
column:("InIfSpeed"i { return c.reverseDirection("InIfSpeed"), nil } column:("InIfSpeed"i { return c.reverseColumnDirection("InIfSpeed"), nil }
/ "OutIfSpeed"i { return c.reverseDirection("OutIfSpeed"), nil }) _ / "OutIfSpeed"i { return c.reverseColumnDirection("OutIfSpeed"), nil }) _
operator:("=" / ">=" / "<=" / "<" / ">" / "!=") _ operator:("=" / ">=" / "<=" / "<" / ">" / "!=") _
value:Unsigned64 { value:Unsigned64 {
return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil
@@ -126,15 +126,15 @@ ConditionForwardingStatusExpr "condition on forwarding status" ←
return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil
} }
ConditionPortExpr "condition on port" ← ConditionPortExpr "condition on port" ←
column:("SrcPort"i #{ c.state["main-table-only"] = true ; return nil } { return c.reverseDirection("SrcPort"), nil } column:("SrcPort"i #{ c.state["main-table-only"] = true ; return nil } { return c.reverseColumnDirection("SrcPort"), nil }
/ "DstPort"i #{ c.state["main-table-only"] = true ; return nil } { return c.reverseDirection("DstPort"), nil }) _ / "DstPort"i #{ c.state["main-table-only"] = true ; return nil } { return c.reverseColumnDirection("DstPort"), nil }) _
operator:("=" / ">=" / "<=" / "<" / ">" / "!=") _ value:Unsigned16 { operator:("=" / ">=" / "<=" / "<" / ">" / "!=") _ value:Unsigned16 {
return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil return fmt.Sprintf("%s %s %s", toString(column), toString(operator), toString(value)), nil
} }
ConditionASExpr "condition on AS number" ← ConditionASExpr "condition on AS number" ←
column:("SrcAS"i { return c.reverseDirection("SrcAS"), nil } column:("SrcAS"i { return c.reverseColumnDirection("SrcAS"), nil }
/ "DstAS"i { return c.reverseDirection("DstAS"), nil }) _ / "DstAS"i { return c.reverseColumnDirection("DstAS"), nil }) _
rcond:RConditionASExpr { rcond:RConditionASExpr {
return fmt.Sprintf("%s %s", toString(column), toString(rcond)), nil return fmt.Sprintf("%s %s", toString(column), toString(rcond)), nil
} }

View File

@@ -0,0 +1,33 @@
<template>
<div>
<label :for="id" class="flex items-center">
<input
:id="id"
type="checkbox"
:checked="modelValue"
class="h-4 w-4 rounded border-gray-300 bg-gray-100 text-blue-600 focus:ring-2 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700 dark:ring-offset-gray-800 dark:focus:ring-blue-600"
@change="$emit('update:modelValue', $event.target.checked)"
/>
<span class="ml-1 text-sm font-medium text-gray-900 dark:text-gray-300">
{{ label }}
</span>
</label>
</div>
</template>
<script setup>
defineProps({
label: {
type: String,
required: true,
},
modelValue: {
type: Boolean,
required: true,
},
});
defineEmits(["update:modelValue"]);
import { v4 as uuidv4 } from "uuid";
const id = uuidv4();
</script>

View File

@@ -2,6 +2,7 @@
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-License-Identifier: AGPL-3.0-only
export function formatXps(value) { export function formatXps(value) {
value = Math.abs(value);
const suffixes = ["", "K", "M", "G", "T"]; const suffixes = ["", "K", "M", "G", "T"];
let idx = 0; let idx = 0;
while (value >= 1000 && idx < suffixes.length) { while (value >= 1000 && idx < suffixes.length) {

View File

@@ -151,6 +151,7 @@ const { data, isFetching, aborted, abort, canAbort, error } = useFetch("", {
end: payload.value.end, end: payload.value.end,
graphType: payload.value.graphType, graphType: payload.value.graphType,
units: payload.value.units, units: payload.value.units,
bidirectional: payload.value.bidirectional,
}; };
// Also update URL. // Also update URL.

View File

@@ -28,6 +28,7 @@ import { formatXps, dataColor, dataColorGrey } from "@/utils";
import { graphTypes } from "./constants"; import { graphTypes } from "./constants";
const { isDark } = inject("theme"); const { isDark } = inject("theme");
import { uniqWith, isEqual, findIndex } from "lodash-es";
import { use, graphic } from "echarts/core"; import { use, graphic } from "echarts/core";
import { CanvasRenderer } from "echarts/renderers"; import { CanvasRenderer } from "echarts/renderers";
import { LineChart } from "echarts/charts"; import { LineChart } from "echarts/charts";
@@ -69,22 +70,62 @@ const commonGraph = {
type: "cross", type: "cross",
label: { backgroundColor: "#6a7985" }, label: { backgroundColor: "#6a7985" },
}, },
valueFormatter: formatXps, formatter: (params) => {
// We will use a custom formatter, notably to handle bidirectional tooltips.
if (params.length === 0) return;
let table = [],
bidirectional = false;
params.forEach((param) => {
let idx = findIndex(table, (r) => r.seriesName === param.seriesName);
if (idx === -1) {
table.push({
marker: param.marker,
seriesName: param.seriesName,
});
idx = table.length - 1;
}
const val = param.value[param.seriesIndex + 1];
if (table[idx].col1 !== undefined || val < 0) {
table[idx].col2 = val;
bidirectional = true;
} else table[idx].col1 = val;
});
const rows = table
.map(
(row) => `<tr>
<td>${row.marker} ${row.seriesName}</td>
<td class="pl-2">${bidirectional ? "↑" : ""}<b>${formatXps(
row.col1 || 0
)}</b></td>
<td class="pl-2">${bidirectional ? "↓" : ""}<b>${
bidirectional ? formatXps(row.col2 || 0) : ""
}</b></td>
</tr>`
)
.join("");
return `${params[0].axisValueLabel}<table>${rows}</table>`;
},
}, },
}; };
const graph = computed(() => { const graph = computed(() => {
const theme = isDark.value ? "dark" : "light"; const theme = isDark.value ? "dark" : "light";
const data = props.data || {}; const data = props.data || {};
if (!data.t) return {}; if (!data.t) return {};
const dataset = { const rowName = (row) => row.join(" — ") || "Total",
dataset = {
sourceHeader: false, sourceHeader: false,
dimensions: [ dimensions: ["time", ...data.rows.map(rowName)],
"time",
...data.rows.map((rows) => rows.join(" — ") || "Total"),
],
source: [ source: [
...data.t ...data.t
.map((t, timeIdx) => [t, ...data.points.map((rows) => rows[timeIdx])]) .map((t, timeIdx) => [
t,
...data.points.map(
// Unfortunately, eCharts does not seem to make it easy
// to inverse an axis and put the result below. Therefore,
// we use negative values for the second axis.
(row, rowIdx) => row[timeIdx] * (data.axis[rowIdx] == 1 ? 1 : -1)
),
])
.slice(1, -1), .slice(1, -1),
], ],
}, },
@@ -95,7 +136,7 @@ const graph = computed(() => {
}, },
yAxis = { yAxis = {
type: "value", type: "value",
min: 0, min: data.bidirectional ? undefined : 0,
axisLabel: { formatter: formatXps }, axisLabel: { formatter: formatXps },
axisPointer: { axisPointer: {
label: { formatter: ({ value }) => formatXps(value) }, label: { formatter: ({ value }) => formatXps(value) },
@@ -104,6 +145,9 @@ const graph = computed(() => {
// Lines and stacked areas // Lines and stacked areas
if ([graphTypes.stacked, graphTypes.lines].includes(data.graphType)) { if ([graphTypes.stacked, graphTypes.lines].includes(data.graphType)) {
const uniqRows = uniqWith(data.rows, isEqual),
uniqRowIndex = (row) => findIndex(uniqRows, (orow) => isEqual(row, orow));
return { return {
grid: { grid: {
left: 60, left: 60,
@@ -115,8 +159,8 @@ const graph = computed(() => {
yAxis, yAxis,
dataset, dataset,
series: data.rows series: data.rows
.map((rows, idx) => { .map((row, idx) => {
const isOther = rows.some((name) => name === "Other"), const isOther = row.some((name) => name === "Other"),
color = isOther ? dataColorGrey : dataColor; color = isOther ? dataColorGrey : dataColor;
if (data.graphType === graphTypes.lines && isOther) { if (data.graphType === graphTypes.lines && isOther) {
return undefined; return undefined;
@@ -125,10 +169,10 @@ const graph = computed(() => {
type: "line", type: "line",
symbol: "none", symbol: "none",
itemStyle: { itemStyle: {
color: color(idx, false, theme), color: color(uniqRowIndex(row), false, theme),
}, },
lineStyle: { lineStyle: {
color: color(idx, false, theme), color: color(uniqRowIndex(row), false, theme),
width: 2, width: 2,
}, },
emphasis: { emphasis: {
@@ -144,22 +188,23 @@ const graph = computed(() => {
if (data.graphType === graphTypes.stacked) { if (data.graphType === graphTypes.stacked) {
serie = { serie = {
...serie, ...serie,
stack: "all", stack: data.axis[idx],
lineStyle: lineStyle:
idx == data.rows.length - 1 idx == data.rows.length - 1 ||
data.axis[idx] != data.axis[idx + 1]
? { ? {
color: isDark.value ? "#ddd" : "#111", color: isDark.value ? "#ddd" : "#111",
width: 2, width: 1.5,
} }
: { : {
color: color(idx, false, theme), color: color(uniqRowIndex(row), false, theme),
width: 1, width: 1,
}, },
areaStyle: { areaStyle: {
opacity: 0.95, opacity: 0.95,
color: new graphic.LinearGradient(0, 0, 0, 1, [ color: new graphic.LinearGradient(0, 0, 0, 1, [
{ offset: 0, color: color(idx, false, theme) }, { offset: 0, color: color(uniqRowIndex(row), false, theme) },
{ offset: 1, color: color(idx, true, theme) }, { offset: 1, color: color(uniqRowIndex(row), true, theme) },
]), ]),
}, },
}; };
@@ -170,34 +215,38 @@ const graph = computed(() => {
}; };
} }
if (data.graphType === graphTypes.grid) { if (data.graphType === graphTypes.grid) {
const dataRows = data.rows.filter((rows) => const uniqRows = uniqWith(data.rows, isEqual).filter((row) =>
rows.some((name) => name !== "Other") row.some((name) => name !== "Other")
), ),
otherIndex = dataset.dimensions.indexOf("Other"); uniqRowIndex = (row) => findIndex(uniqRows, (orow) => isEqual(row, orow)),
const maxY = Math.max( otherIndexes = data.rows
...dataset.source.map((rows) => .map((row, idx) => (row.some((name) => name === "Other") ? idx : -1))
otherIndex === -1 .filter((idx) => idx >= 0),
? Math.max(...rows.slice(1)) somethingY = (fn) =>
: Math.max( fn(
// Skip "Other" column ...dataset.source.map((row) =>
...rows.slice(1, otherIndex), fn(
...rows.slice(otherIndex + 1) ...row
.slice(1)
.filter((_, idx) => !otherIndexes.includes(idx + 1))
) )
) )
); ),
let rowNumber = Math.ceil(Math.sqrt(dataRows.length)), maxY = somethingY(Math.max),
minY = somethingY(Math.min);
let rowNumber = Math.ceil(Math.sqrt(uniqRows.length)),
colNumber = rowNumber; colNumber = rowNumber;
if ((rowNumber - 1) * colNumber >= dataRows.length) { if ((rowNumber - 1) * colNumber >= uniqRows.length) {
rowNumber--; rowNumber--;
} }
const positions = dataRows.map((_, idx) => ({ const positions = uniqRows.map((_, idx) => ({
left: ((idx % colNumber) / colNumber) * 100, left: ((idx % colNumber) / colNumber) * 100,
top: (Math.floor(idx / colNumber) / rowNumber) * 100, top: (Math.floor(idx / colNumber) / rowNumber) * 100,
width: (1 / colNumber) * 100, width: (1 / colNumber) * 100,
height: (1 / rowNumber) * 100, height: (1 / rowNumber) * 100,
})); }));
return { return {
title: dataRows.map((rows, idx) => ({ title: uniqRows.map((_, idx) => ({
textAlign: "left", textAlign: "left",
textStyle: { textStyle: {
fontSize: 12, fontSize: 12,
@@ -209,7 +258,7 @@ const graph = computed(() => {
bottom: 100 - positions[idx].top - positions[idx].height - 0.5 + "%", bottom: 100 - positions[idx].top - positions[idx].height - 0.5 + "%",
left: positions[idx].left + 0.25 + "%", left: positions[idx].left + 0.25 + "%",
})), })),
grid: dataRows.map((_, idx) => ({ grid: uniqRows.map((_, idx) => ({
show: true, show: true,
borderWidth: 0, borderWidth: 0,
left: positions[idx].left + 0.25 + "%", left: positions[idx].left + 0.25 + "%",
@@ -217,46 +266,55 @@ const graph = computed(() => {
width: positions[idx].width - 0.5 + "%", width: positions[idx].width - 0.5 + "%",
height: positions[idx].height - 0.5 + "%", height: positions[idx].height - 0.5 + "%",
})), })),
xAxis: dataRows.map((_, idx) => ({ xAxis: uniqRows.map((_, idx) => ({
...xAxis, ...xAxis,
gridIndex: idx, gridIndex: idx,
show: false, show: false,
})), })),
yAxis: dataRows.map((_, idx) => ({ yAxis: uniqRows.map((_, idx) => ({
...yAxis, ...yAxis,
max: maxY, max: maxY,
min: data.bidirectional ? minY : 0,
gridIndex: idx, gridIndex: idx,
show: false, show: false,
})), })),
dataset, dataset,
series: dataRows.map((rows, idx) => { series: data.rows
let serie = { .map((row, idx) => {
type: "line", let serie = {
symbol: "none", type: "line",
xAxisIndex: idx, symbol: "none",
yAxisIndex: idx, xAxisIndex: uniqRowIndex(row),
itemStyle: { yAxisIndex: uniqRowIndex(row),
color: dataColor(idx, false, theme), itemStyle: {
}, color: dataColor(uniqRowIndex(row), false, theme),
areaStyle: { },
opacity: 0.95, areaStyle: {
color: new graphic.LinearGradient(0, 0, 0, 1, [ opacity: 0.95,
{ offset: 0, color: dataColor(idx, false, theme) }, color: new graphic.LinearGradient(0, 0, 0, 1, [
{ offset: 1, color: dataColor(idx, true, theme) }, {
]), offset: 0,
}, color: dataColor(uniqRowIndex(row), false, theme),
emphasis: { },
focus: "series", {
}, offset: 1,
encode: { color: dataColor(uniqRowIndex(row), true, theme),
x: 0, },
y: idx + 1, ]),
seriesName: idx + 1, },
seriesId: idx + 1, emphasis: {
}, focus: "series",
}; },
return serie; encode: {
}), x: 0,
y: idx + 1,
seriesName: idx + 1,
seriesId: idx + 1,
},
};
return serie;
})
.filter((s) => s.xAxisIndex >= 0),
}; };
} }
return {}; return {};

View File

@@ -2,61 +2,85 @@
<!-- SPDX-License-Identifier: AGPL-3.0-only --> <!-- SPDX-License-Identifier: AGPL-3.0-only -->
<template> <template>
<div <div>
class="relative overflow-x-auto shadow-md dark:shadow-white/10 sm:rounded-lg" <!-- Axis selection -->
> <div
<table v-if="axes.length > 1"
class="w-full max-w-full text-left text-sm text-gray-700 dark:text-gray-200" class="border-b border-gray-200 text-center text-sm font-medium text-gray-500 dark:border-gray-700 dark:text-gray-400"
> >
<thead class="bg-gray-50 text-xs uppercase dark:bg-gray-700"> <ul class="-mb-px flex flex-wrap">
<tr> <li v-for="{ id: axis, name } in axes" :key="axis" class="mr-2">
<th <button
scope="col" class="pointer-cursor inline-block rounded-t-lg border-b-2 border-transparent p-4 hover:border-gray-300 hover:text-gray-600 dark:hover:text-gray-300"
:class="{ 'px-6 py-2': table.rows.some((r) => r.color) }" :class="{
></th> 'active border-blue-600 text-blue-600 dark:border-blue-500 dark:text-blue-500':
<th displayedAxis === axis,
v-for="column in table.columns" }"
:key="column.name" :aria-current="displayedAxis === axis ? 'page' : null"
scope="col" @click="selectedAxis = axis"
class="px-6 py-2"
:class="column.classNames"
> >
{{ column.name }} {{ name }}
</th> </button>
</tr> </li>
</thead> </ul>
<tbody> </div>
<tr <!-- Table -->
v-for="(row, index) in table.rows" <div
:key="index" class="relative overflow-x-auto shadow-md dark:shadow-white/10 sm:rounded-lg"
class="border-b odd:bg-white even:bg-gray-50 dark:border-gray-700 dark:bg-gray-800 odd:dark:bg-gray-800 even:dark:bg-gray-700" >
@pointerenter="highlightEnabled && $emit('highlighted', index)" <table
@pointerleave="$emit('highlighted', null)" class="w-full max-w-full text-left text-sm text-gray-700 dark:text-gray-200"
> >
<th scope="row"> <thead class="bg-gray-50 text-xs uppercase dark:bg-gray-700">
<div v-if="row.color" class="px-6 py-2 text-right font-medium"> <tr>
<div <th
class="w-5 cursor-pointer rounded" scope="col"
:style="{ :class="{ 'px-6 py-2': table.rows.some((r) => r.color) }"
backgroundColor: row.color, ></th>
printColorAdjust: 'exact', <th
}" v-for="column in table.columns"
> :key="column.name"
&nbsp; scope="col"
class="px-6 py-2"
:class="column.classNames"
>
{{ column.name }}
</th>
</tr>
</thead>
<tbody>
<tr
v-for="(row, index) in table.rows"
:key="index"
class="border-b odd:bg-white even:bg-gray-50 dark:border-gray-700 dark:bg-gray-800 odd:dark:bg-gray-800 even:dark:bg-gray-700"
@pointerenter="highlight(index)"
@pointerleave="highlight(null)"
>
<th scope="row">
<div v-if="row.color" class="px-6 py-2 text-right font-medium">
<div
class="w-5 cursor-pointer rounded"
:style="{
backgroundColor: row.color,
printColorAdjust: 'exact',
}"
>
&nbsp;
</div>
</div> </div>
</div> </th>
</th> <td
<td v-for="(value, idx) in row.values"
v-for="(value, idx) in row.values" :key="idx"
:key="idx" class="px-6 py-2"
class="px-6 py-2" :class="value.classNames"
:class="value.classNames" >
> {{ value.value }}
{{ value.value }} </td>
</td> </tr>
</tr> </tbody>
</tbody> </table>
</table> </div>
</div> </div>
</template> </template>
@@ -67,21 +91,51 @@ const props = defineProps({
default: null, default: null,
}, },
}); });
defineEmits(["highlighted"]); const emit = defineEmits(["highlighted"]);
import { computed, inject } from "vue"; import { computed, inject, ref } from "vue";
import { formatXps, dataColor, dataColorGrey } from "@/utils"; import { formatXps, dataColor, dataColorGrey } from "@/utils";
import { graphTypes } from "./constants"; import { graphTypes } from "./constants";
const { isDark } = inject("theme"); const { isDark } = inject("theme");
const { stacked, lines, grid, sankey } = graphTypes; const { stacked, lines, grid, sankey } = graphTypes;
const highlightEnabled = computed(() => import { uniq, uniqWith, isEqual, findIndex, takeWhile } from "lodash-es";
[stacked, lines, grid].includes(props.data?.graphType)
const highlight = (index) => {
if (index === null) {
emit("highlighted", null);
return;
}
if (![stacked, lines, grid].includes(props.data?.graphType)) return;
// The index provided is the one in the filtered data. We want the original index.
const originalIndex = takeWhile(
props.data.rows,
(() => {
let count = 0;
return (_, idx) =>
props.data.axis[idx] != displayedAxis.value || count++ < index;
})()
).length;
emit("highlighted", originalIndex);
};
const axes = computed(() =>
uniq(props.data.axis ?? []).map((axis) => ({
id: axis,
name: { 1: "Direct", 2: "Reverse" }[axis] ?? "Unknown",
}))
);
const selectedAxis = ref(1);
const displayedAxis = computed(() =>
axes.value.some((axis) => axis.id === selectedAxis.value)
? selectedAxis.value
: 1
); );
const table = computed(() => { const table = computed(() => {
const theme = isDark.value ? "dark" : "light"; const theme = isDark.value ? "dark" : "light";
const data = props.data || {}; const data = props.data || {};
if ([stacked, lines, grid].includes(data.graphType)) { if ([stacked, lines, grid].includes(data.graphType)) {
const uniqRows = uniqWith(data.rows, isEqual),
uniqRowIndex = (row) => findIndex(uniqRows, (orow) => isEqual(row, orow));
return { return {
columns: [ columns: [
// Dimensions // Dimensions
@@ -95,28 +149,30 @@ const table = computed(() => {
{ name: "~95th", classNames: "text-right" }, { name: "~95th", classNames: "text-right" },
], ],
rows: rows:
data.rows?.map((rows, idx) => { data.rows
const color = rows.some((name) => name === "Other") ?.filter((_, idx) => data.axis[idx] == displayedAxis.value)
? dataColorGrey .map((row, idx) => {
: dataColor; const color = row.some((name) => name === "Other")
return { ? dataColorGrey
values: [ : dataColor;
// Dimensions return {
...rows.map((r) => ({ value: r })), values: [
// Stats // Dimensions
...[ ...row.map((r) => ({ value: r })),
data.min[idx], // Stats
data.max[idx], ...[
data.average[idx], data.min[idx],
data["95th"][idx], data.max[idx],
].map((d) => ({ data.average[idx],
value: formatXps(d) + data.units.slice(-3), data["95th"][idx],
classNames: "text-right tabular-nums", ].map((d) => ({
})), value: formatXps(d) + data.units.slice(-3),
], classNames: "text-right tabular-nums",
color: color(idx, false, theme), })),
}; ],
}) || [], color: color(uniqRowIndex(row), false, theme),
};
}) || [],
}; };
} }
if ([sankey].includes(data.graphType)) { if ([sankey].includes(data.graphType)) {
@@ -129,10 +185,10 @@ const table = computed(() => {
// Average // Average
{ name: "Average", classNames: "text-right" }, { name: "Average", classNames: "text-right" },
], ],
rows: data.rows?.map((rows, idx) => ({ rows: data.rows?.map((row, idx) => ({
values: [ values: [
// Dimensions // Dimensions
...rows.map((r) => ({ value: r })), ...row.map((r) => ({ value: r })),
// Average // Average
{ {
value: formatXps(data.xps[idx]) + data.units.slice(-3), value: formatXps(data.xps[idx]) + data.units.slice(-3),

View File

@@ -33,14 +33,14 @@
> >
<div v-if="open" class="flex flex-col px-3 py-4 lg:max-h-screen"> <div v-if="open" class="flex flex-col px-3 py-4 lg:max-h-screen">
<div <div
class="mb-2 flex flex-row flex-wrap justify-between gap-2 sm:flex-nowrap lg:flex-wrap" class="mb-2 flex flex-row flex-wrap items-center justify-between gap-2 sm:flex-nowrap lg:flex-wrap"
> >
<InputButton <InputButton
attr-type="submit" attr-type="submit"
:disabled="hasErrors && !loading" :disabled="hasErrors && !loading"
:loading="loading" :loading="loading"
:type="loading ? 'alternative' : 'primary'" :type="loading ? 'alternative' : 'primary'"
class="order-2 w-28 justify-center sm:order-3 lg:order-2" class="order-2 w-28 justify-center sm:order-4 lg:order-2"
> >
{{ loading ? "Cancel" : applyLabel }} {{ loading ? "Cancel" : applyLabel }}
</InputButton> </InputButton>
@@ -57,7 +57,7 @@
<InputListBox <InputListBox
v-model="graphType" v-model="graphType"
:items="graphTypeList" :items="graphTypeList"
class="order-3 grow basis-full sm:order-2 sm:basis-0 lg:order-3" class="order-3 grow basis-full sm:order-3 sm:basis-0 lg:order-3 lg:basis-full"
label="Graph type" label="Graph type"
> >
<template #selected>{{ graphType.name }}</template> <template #selected>{{ graphType.name }}</template>
@@ -71,6 +71,12 @@
</div> </div>
</template> </template>
</InputListBox> </InputListBox>
<InputCheckbox
v-if="[stacked, lines, grid].includes(graphType.name)"
v-model="bidirectional"
class="order-4 sm:order-2 lg:order-4"
label="Bidirectional"
/>
</div> </div>
<SectionLabel>Time range</SectionLabel> <SectionLabel>Time range</SectionLabel>
<InputTimeRange v-model="timeRange" /> <InputTimeRange v-model="timeRange" />
@@ -114,6 +120,7 @@ import InputTimeRange from "@/components/InputTimeRange.vue";
import InputDimensions from "@/components/InputDimensions.vue"; import InputDimensions from "@/components/InputDimensions.vue";
import InputListBox from "@/components/InputListBox.vue"; import InputListBox from "@/components/InputListBox.vue";
import InputButton from "@/components/InputButton.vue"; import InputButton from "@/components/InputButton.vue";
import InputCheckbox from "@/components/InputCheckbox.vue";
import InputChoice from "@/components/InputChoice.vue"; import InputChoice from "@/components/InputChoice.vue";
import InputFilter from "@/components/InputFilter.vue"; import InputFilter from "@/components/InputFilter.vue";
import SectionLabel from "./SectionLabel.vue"; import SectionLabel from "./SectionLabel.vue";
@@ -133,6 +140,7 @@ const timeRange = ref({});
const dimensions = ref([]); const dimensions = ref([]);
const filter = ref({}); const filter = ref({});
const units = ref("l3bps"); const units = ref("l3bps");
const bidirectional = ref(false);
const options = computed(() => ({ const options = computed(() => ({
// Common to all graph types // Common to all graph types
@@ -144,8 +152,14 @@ const options = computed(() => ({
filter: filter.value.expression, filter: filter.value.expression,
units: units.value, units: units.value,
// Only for time series // Only for time series
...([stacked, lines].includes(graphType.value.name) && { points: 200 }), ...([stacked, lines].includes(graphType.value.name) && {
...(graphType.value.name === grid && { points: 50 }), bidirectional: bidirectional.value,
points: 200,
}),
...(graphType.value.name === grid && {
bidirectional: bidirectional.value,
points: 50,
}),
})); }));
const applyLabel = computed(() => const applyLabel = computed(() =>
isEqual(options.value, props.modelValue) ? "Refresh" : "Apply" isEqual(options.value, props.modelValue) ? "Refresh" : "Apply"
@@ -169,6 +183,7 @@ watch(
points /* eslint-disable-line no-unused-vars */, points /* eslint-disable-line no-unused-vars */,
filter: _filter = defaultOptions?.filter, filter: _filter = defaultOptions?.filter,
units: _units = "l3bps", units: _units = "l3bps",
bidirectional: _bidirectional = false,
} = modelValue; } = modelValue;
// Dispatch values in refs // Dispatch values in refs
@@ -181,6 +196,7 @@ watch(
}; };
filter.value = { expression: _filter }; filter.value = { expression: _filter };
units.value = _units; units.value = _units;
bidirectional.value = _bidirectional;
// A bit risky, but it seems to work. // A bit risky, but it seems to work.
if (!isEqual(modelValue, options.value)) { if (!isEqual(modelValue, options.value)) {

View File

@@ -17,28 +17,43 @@ import (
// graphHandlerInput describes the input for the /graph endpoint. // graphHandlerInput describes the input for the /graph endpoint.
type graphHandlerInput struct { type graphHandlerInput struct {
Start time.Time `json:"start" binding:"required"` Start time.Time `json:"start" binding:"required"`
End time.Time `json:"end" binding:"required,gtfield=Start"` End time.Time `json:"end" binding:"required,gtfield=Start"`
Points int `json:"points" binding:"required,min=5,max=2000"` // minimum number of points Points int `json:"points" binding:"required,min=5,max=2000"` // minimum number of points
Dimensions []queryColumn `json:"dimensions"` // group by ... Dimensions []queryColumn `json:"dimensions"` // group by ...
Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions Limit int `json:"limit" binding:"min=1,max=50"` // limit product of dimensions
Filter queryFilter `json:"filter"` // where ... Filter queryFilter `json:"filter"` // where ...
Units string `json:"units" binding:"required,oneof=pps l2bps l3bps"` Units string `json:"units" binding:"required,oneof=pps l2bps l3bps"`
Bidirectional bool `json:"bidirectional"`
} }
// graphHandlerOutput describes the output for the /graph endpoint. // graphHandlerOutput describes the output for the /graph endpoint. A
// row is a set of values for dimensions. Currently, axis 1 is for the
// direct direction and axis 2 is for the reverse direction. Rows are
// sorted by axis, then by the sum of traffic.
type graphHandlerOutput struct { type graphHandlerOutput struct {
Rows [][]string `json:"rows"`
Time []time.Time `json:"t"` Time []time.Time `json:"t"`
Rows [][]string `json:"rows"` // List of rows
Points [][]int `json:"points"` // t → row → xps Points [][]int `json:"points"` // t → row → xps
Average []int `json:"average"` // row → xps Axis []int `json:"axis"` // row → axis
Min []int `json:"min"` Average []int `json:"average"` // row → average xps
Max []int `json:"max"` Min []int `json:"min"` // row → min xps
NinetyFivePercentile []int `json:"95th"` Max []int `json:"max"` // row → max xps
NinetyFivePercentile []int `json:"95th"` // row → 95th xps
} }
// graphHandlerInputToSQL converts a graph input to an SQL request // reverseDirection reverts the direction of a provided input
func (input graphHandlerInput) toSQL() (string, error) { func (input graphHandlerInput) reverseDirection() graphHandlerInput {
input.Filter.filter = input.Filter.reverseFilter
dimensions := input.Dimensions
input.Dimensions = make([]queryColumn, len(dimensions))
for i := range dimensions {
input.Dimensions[i] = dimensions[i].reverseDirection()
}
return input
}
func (input graphHandlerInput) toSQL1(axis int, skipWith bool) string {
interval := int64((input.End.Sub(input.Start).Seconds())) / int64(input.Points) interval := int64((input.End.Sub(input.Start).Seconds())) / int64(input.Points)
slot := fmt.Sprintf(`{resolution->%d}`, interval) slot := fmt.Sprintf(`{resolution->%d}`, interval)
@@ -82,7 +97,7 @@ func (input graphHandlerInput) toSQL() (string, error) {
// With // With
with := []string{} with := []string{}
if len(dimensions) > 0 { if len(dimensions) > 0 && !skipWith {
with = append(with, fmt.Sprintf( with = append(with, fmt.Sprintf(
"rows AS (SELECT %s FROM {table} WHERE %s GROUP BY %s ORDER BY SUM(Bytes) DESC LIMIT %d)", "rows AS (SELECT %s FROM {table} WHERE %s GROUP BY %s ORDER BY SUM(Bytes) DESC LIMIT %d)",
strings.Join(dimensions, ", "), strings.Join(dimensions, ", "),
@@ -97,6 +112,7 @@ func (input graphHandlerInput) toSQL() (string, error) {
sqlQuery := fmt.Sprintf(` sqlQuery := fmt.Sprintf(`
%s %s
SELECT %d AS axis, * FROM (
SELECT SELECT
%s %s
FROM {table} FROM {table}
@@ -105,8 +121,20 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL %s second) FROM toStartOfInterval({timefilter.Start}, INTERVAL %s second)
TO {timefilter.Stop} TO {timefilter.Stop}
STEP %s`, withStr, strings.Join(fields, ",\n "), where, slot, slot) STEP %s)`, withStr, axis, strings.Join(fields, ",\n "), where, slot, slot)
return sqlQuery, nil return sqlQuery
}
// graphHandlerInputToSQL converts a graph input to an SQL request
func (input graphHandlerInput) toSQL() string {
result := input.toSQL1(1, false)
if input.Bidirectional {
part2 := input.reverseDirection().toSQL1(2, true)
result = fmt.Sprintf(`%s
UNION ALL
%s`, result, strings.TrimSpace(part2))
}
return strings.TrimSpace(result)
} }
func (c *Component) graphHandlerFunc(gc *gin.Context) { func (c *Component) graphHandlerFunc(gc *gin.Context) {
@@ -117,11 +145,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
return return
} }
sqlQuery, err := input.toSQL() sqlQuery := input.toSQL()
if err != nil {
gc.JSON(http.StatusBadRequest, gin.H{"message": helpers.Capitalize(err.Error())})
return
}
resolution := time.Duration(int64(input.End.Sub(input.Start).Nanoseconds()) / int64(input.Points)) resolution := time.Duration(int64(input.End.Sub(input.Start).Nanoseconds()) / int64(input.Points))
if resolution < time.Second { if resolution < time.Second {
resolution = time.Second resolution = time.Second
@@ -131,6 +155,7 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
gc.Header("X-SQL-Query", strings.ReplaceAll(sqlQuery, "\n", " ")) gc.Header("X-SQL-Query", strings.ReplaceAll(sqlQuery, "\n", " "))
results := []struct { results := []struct {
Axis uint8 `ch:"axis"`
Time time.Time `ch:"time"` Time time.Time `ch:"time"`
Xps float64 `ch:"xps"` Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"` Dimensions []string `ch:"dimensions"`
@@ -156,99 +181,140 @@ func (c *Component) graphHandlerFunc(gc *gin.Context) {
} }
} }
// We want to sort rows depending on how much data they gather each // Set time axis. We assume the first returned axis has the complete view.
output := graphHandlerOutput{ output := graphHandlerOutput{
Time: []time.Time{}, Time: []time.Time{},
} }
rowValues := map[string][]int{} // values for each row (indexed by internal key)
rowKeys := map[string][]string{} // mapping from keys to dimensions
rowSums := map[string]uint64{} // sum for a given row (to sort)
lastTime := time.Time{} lastTime := time.Time{}
for _, result := range results { for _, result := range results {
if result.Time != lastTime { if result.Axis == 1 && result.Time != lastTime {
output.Time = append(output.Time, result.Time) output.Time = append(output.Time, result.Time)
lastTime = result.Time lastTime = result.Time
} }
} }
lastTime = time.Time{}
idx := -1 // For the remaining, we will collect information into various
// structures in one pass. Each structure will be keyed by the
// axis and the row.
axes := []int{} // list of axes
rows := map[int]map[string][]string{} // for each axis, a map from row to list of dimensions
points := map[int]map[string][]int{} // for each axis, a map from row to list of points (one point per ts)
sums := map[int]map[string]uint64{} // for each axis, a map from row to sum (for sorting purpose)
lastTimeForAxis := map[int]time.Time{}
timeIndexForAxis := map[int]int{}
for _, result := range results { for _, result := range results {
if result.Time != lastTime { var ok bool
idx++ axis := int(result.Axis)
lastTime = result.Time lastTime, ok = lastTimeForAxis[axis]
}
rowKey := fmt.Sprintf("%s", result.Dimensions)
row, ok := rowValues[rowKey]
if !ok { if !ok {
rowKeys[rowKey] = result.Dimensions // Unknown axis, initialize various structs
axes = append(axes, axis)
lastTimeForAxis[axis] = time.Time{}
timeIndexForAxis[axis] = -1
rows[axis] = map[string][]string{}
points[axis] = map[string][]int{}
sums[axis] = map[string]uint64{}
}
if result.Time != lastTime {
// New timestamp, increment time index
timeIndexForAxis[axis]++
lastTimeForAxis[axis] = result.Time
}
rowKey := fmt.Sprintf("%d-%s", axis, result.Dimensions)
row, ok := points[axis][rowKey]
if !ok {
// Not points for this row yet, create it
rows[axis][rowKey] = result.Dimensions
row = make([]int, len(output.Time)) row = make([]int, len(output.Time))
rowValues[rowKey] = row points[axis][rowKey] = row
sums[axis][rowKey] = 0
} }
rowValues[rowKey][idx] = int(result.Xps) points[axis][rowKey][timeIndexForAxis[axis]] = int(result.Xps)
sum, _ := rowSums[rowKey] sums[axis][rowKey] += uint64(result.Xps)
rowSums[rowKey] = sum + uint64(result.Xps)
} }
rows := make([]string, len(rowKeys)) // Sort axes
i := 0 sort.Ints(axes)
for k := range rowKeys { // Sort the rows using the sums
rows[i] = k sortedRowKeys := map[int][]string{}
i++ for _, axis := range axes {
} sortedRowKeys[axis] = make([]string, 0, len(rows[axis]))
// Sort by sum, except we want "Other" to be last for k := range rows[axis] {
sort.Slice(rows, func(i, j int) bool { sortedRowKeys[axis] = append(sortedRowKeys[axis], k)
if rowKeys[rows[i]][0] == "Other" {
return false
} }
if rowKeys[rows[j]][0] == "Other" { sort.Slice(sortedRowKeys[axis], func(i, j int) bool {
return true iKey := sortedRowKeys[axis][i]
} jKey := sortedRowKeys[axis][j]
return rowSums[rows[i]] > rowSums[rows[j]] if rows[axis][iKey][0] == "Other" {
}) return false
output.Rows = make([][]string, len(rows)) }
output.Points = make([][]int, len(rows)) if rows[axis][jKey][0] == "Other" {
output.Average = make([]int, len(rows)) return true
output.Min = make([]int, len(rows)) }
output.Max = make([]int, len(rows)) return sums[axis][iKey] > sums[axis][jKey]
output.NinetyFivePercentile = make([]int, len(rows)) })
}
for idx, r := range rows {
output.Rows[idx] = rowKeys[r] // Now, we can complete the `output' structure!
output.Points[idx] = rowValues[r] totalRows := 0
output.Average[idx] = int(rowSums[r] / uint64(len(output.Time))) for _, axis := range axes {
// For 95th percentile, we need to sort the values. totalRows += len(rows[axis])
// Use that for min/max too. }
if len(rowValues[r]) == 0 { output.Rows = make([][]string, totalRows)
continue output.Axis = make([]int, totalRows)
} output.Points = make([][]int, totalRows)
if len(rowValues[r]) == 1 { output.Average = make([]int, totalRows)
v := rowValues[r][0] output.Min = make([]int, totalRows)
output.Min[idx] = v output.Max = make([]int, totalRows)
output.Max[idx] = v output.NinetyFivePercentile = make([]int, totalRows)
output.NinetyFivePercentile[idx] = v
continue i := -1
} for _, axis := range axes {
for _, k := range sortedRowKeys[axis] {
s := make([]int, len(rowValues[r])) i++
copy(s, rowValues[r]) output.Rows[i] = rows[axis][k]
sort.Ints(s) output.Axis[i] = axis
// Min (but not 0) output.Points[i] = points[axis][k]
for i := 0; i < len(s); i++ { output.Average[i] = int(sums[axis][k] / uint64(len(output.Time)))
output.Min[idx] = s[i]
if s[i] > 0 { // For remaining, we will sort the values. It
break // is needed for 95th percentile but it helps
// for min/max too. We remove special cases
// for 0 or 1 point.
nbPoints := len(output.Points[i])
if nbPoints == 0 {
continue
}
if nbPoints == 1 {
v := output.Points[i][0]
output.Min[i] = v
output.Max[i] = v
output.NinetyFivePercentile[i] = v
continue
}
points := make([]int, nbPoints)
copy(points, output.Points[i])
sort.Ints(points)
// Min (but not 0)
for j := 0; j < nbPoints; j++ {
output.Min[i] = points[j]
if points[j] > 0 {
break
}
}
// Max
output.Max[i] = points[nbPoints-1]
// 95th percentile
index := 0.95 * float64(nbPoints)
j := int(index)
if index == float64(j) {
output.NinetyFivePercentile[i] = points[j-1]
} else if index > 1 {
// We use the average of the two values. This
// is good enough for bps/pps
output.NinetyFivePercentile[i] = (points[j-1] + points[j]) / 2
} }
}
// Max
output.Max[idx] = s[len(s)-1]
// 95th percentile
index := 0.95 * float64(len(s))
j := int(index)
if index == float64(j) {
output.NinetyFivePercentile[idx] = s[j-1]
} else if index > 1 {
// We use the average of the two values. This
// is good enough for bps/pps
output.NinetyFivePercentile[idx] = (s[j-1] + s[j]) / 2
} }
} }

View File

@@ -4,6 +4,7 @@
package console package console
import ( import (
"fmt"
"strings" "strings"
"testing" "testing"
"time" "time"
@@ -14,6 +15,46 @@ import (
"akvorado/common/helpers" "akvorado/common/helpers"
) )
func TestGraphInputReverseDirection(t *testing.T) {
input := graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{
queryColumnExporterName,
queryColumnInIfProvider,
},
Filter: queryFilter{
filter: "DstCountry = 'FR' AND SrcCountry = 'US'",
reverseFilter: "SrcCountry = 'FR' AND DstCountry = 'US'",
},
Units: "l3bps",
}
original1 := fmt.Sprintf("%+v", input)
expected := graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{
queryColumnExporterName,
queryColumnOutIfProvider,
},
Filter: queryFilter{
reverseFilter: "DstCountry = 'FR' AND SrcCountry = 'US'",
filter: "SrcCountry = 'FR' ANd DstCountry = 'US'",
},
Units: "l3bps",
}
got := input.reverseDirection()
original2 := fmt.Sprintf("%+v", input)
if diff := helpers.Diff(got, expected); diff != "" {
t.Fatalf("reverseDirection() (-got, +want):\n%s", diff)
}
if original1 != original2 {
t.Fatalf("reverseDirection() modified original to:\n-%s\n+%s", original1, original2)
}
}
func TestGraphQuerySQL(t *testing.T) { func TestGraphQuerySQL(t *testing.T) {
cases := []struct { cases := []struct {
Description string Description string
@@ -31,6 +72,7 @@ func TestGraphQuerySQL(t *testing.T) {
Units: "l3bps", Units: "l3bps",
}, },
Expected: ` Expected: `
SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
@@ -41,7 +83,7 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop} TO {timefilter.Stop}
STEP {resolution->864}`, STEP {resolution->864})`,
}, { }, {
Description: "no dimensions, no filters, l2 bps", Description: "no dimensions, no filters, l2 bps",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -53,6 +95,7 @@ ORDER BY time WITH FILL
Units: "l2bps", Units: "l2bps",
}, },
Expected: ` Expected: `
SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM((Bytes+18*Packets)*SamplingRate*8/{resolution->864}) AS xps, SUM((Bytes+18*Packets)*SamplingRate*8/{resolution->864}) AS xps,
@@ -63,7 +106,7 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop} TO {timefilter.Stop}
STEP {resolution->864}`, STEP {resolution->864})`,
}, { }, {
Description: "no dimensions, no filters, pps", Description: "no dimensions, no filters, pps",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -75,6 +118,7 @@ ORDER BY time WITH FILL
Units: "pps", Units: "pps",
}, },
Expected: ` Expected: `
SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Packets*SamplingRate/{resolution->864}) AS xps, SUM(Packets*SamplingRate/{resolution->864}) AS xps,
@@ -85,7 +129,7 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop} TO {timefilter.Stop}
STEP {resolution->864}`, STEP {resolution->864})`,
}, { }, {
Description: "no dimensions", Description: "no dimensions",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -97,6 +141,7 @@ ORDER BY time WITH FILL
Units: "l3bps", Units: "l3bps",
}, },
Expected: ` Expected: `
SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
@@ -107,7 +152,47 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop} TO {timefilter.Stop}
STEP {resolution->864}`, STEP {resolution->864})`,
}, {
Description: "no dimensions, reverse direction",
Input: graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Dimensions: []queryColumn{},
Filter: queryFilter{
filter: "DstCountry = 'FR' AND SrcCountry = 'US'",
reverseFilter: "SrcCountry = 'FR' AND DstCountry = 'US'",
},
Units: "l3bps",
Bidirectional: true,
},
Expected: `
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter} AND (DstCountry = 'FR' AND SrcCountry = 'US')
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864})
UNION ALL
SELECT 2 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
emptyArrayString() AS dimensions
FROM {table}
WHERE {timefilter} AND (SrcCountry = 'FR' AND DstCountry = 'US')
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864})`,
}, { }, {
Description: "no filters", Description: "no filters",
Input: graphHandlerInput{ Input: graphHandlerInput{
@@ -125,6 +210,7 @@ ORDER BY time WITH FILL
Expected: ` Expected: `
WITH WITH
rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20) rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20)
SELECT 1 AS axis, * FROM (
SELECT SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time, toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps, SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
@@ -135,12 +221,55 @@ GROUP BY time, dimensions
ORDER BY time WITH FILL ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second) FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop} TO {timefilter.Stop}
STEP {resolution->864}`, STEP {resolution->864})`,
}, {
Description: "no filters, reverse",
Input: graphHandlerInput{
Start: time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
End: time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
Points: 100,
Limit: 20,
Dimensions: []queryColumn{
queryColumnExporterName,
queryColumnInIfProvider,
},
Filter: queryFilter{},
Units: "l3bps",
Bidirectional: true,
},
Expected: `
WITH
rows AS (SELECT ExporterName, InIfProvider FROM {table} WHERE {timefilter} GROUP BY ExporterName, InIfProvider ORDER BY SUM(Bytes) DESC LIMIT 20)
SELECT 1 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
if((ExporterName, InIfProvider) IN rows, [ExporterName, InIfProvider], ['Other', 'Other']) AS dimensions
FROM {table}
WHERE {timefilter}
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864})
UNION ALL
SELECT 2 AS axis, * FROM (
SELECT
toStartOfInterval(TimeReceived, INTERVAL {resolution->864} second) AS time,
SUM(Bytes*SamplingRate*8/{resolution->864}) AS xps,
if((ExporterName, OutIfProvider) IN rows, [ExporterName, OutIfProvider], ['Other', 'Other']) AS dimensions
FROM {table}
WHERE {timefilter}
GROUP BY time, dimensions
ORDER BY time WITH FILL
FROM toStartOfInterval({timefilter.Start}, INTERVAL {resolution->864} second)
TO {timefilter.Stop}
STEP {resolution->864})`,
}, },
} }
for _, tc := range cases { for _, tc := range cases {
t.Run(tc.Description, func(t *testing.T) { t.Run(tc.Description, func(t *testing.T) {
got, _ := tc.Input.toSQL() got := tc.Input.toSQL()
if diff := helpers.Diff(strings.Split(strings.TrimSpace(got), "\n"), if diff := helpers.Diff(strings.Split(strings.TrimSpace(got), "\n"),
strings.Split(strings.TrimSpace(tc.Expected), "\n")); diff != "" { strings.Split(strings.TrimSpace(tc.Expected), "\n")); diff != "" {
t.Errorf("toSQL (-got, +want):\n%s", diff) t.Errorf("toSQL (-got, +want):\n%s", diff)
@@ -151,26 +280,75 @@ ORDER BY time WITH FILL
func TestGraphHandler(t *testing.T) { func TestGraphHandler(t *testing.T) {
_, h, mockConn, _ := NewMock(t, DefaultConfiguration()) _, h, mockConn, _ := NewMock(t, DefaultConfiguration())
base := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC) base := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
// Single direction
expectedSQL := []struct { expectedSQL := []struct {
Axis uint8 `ch:"axis"`
Time time.Time `ch:"time"` Time time.Time `ch:"time"`
Xps float64 `ch:"xps"` Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"` Dimensions []string `ch:"dimensions"`
}{ }{
{base, 1000, []string{"router1", "provider1"}}, {1, base, 1000, []string{"router1", "provider1"}},
{base, 2000, []string{"router1", "provider2"}}, {1, base, 2000, []string{"router1", "provider2"}},
{base, 1200, []string{"router2", "provider2"}}, {1, base, 1200, []string{"router2", "provider2"}},
{base, 1100, []string{"router2", "provider3"}}, {1, base, 1100, []string{"router2", "provider3"}},
{base, 1900, []string{"Other", "Other"}}, {1, base, 1900, []string{"Other", "Other"}},
{base.Add(time.Minute), 500, []string{"router1", "provider1"}}, {1, base.Add(time.Minute), 500, []string{"router1", "provider1"}},
{base.Add(time.Minute), 5000, []string{"router1", "provider2"}}, {1, base.Add(time.Minute), 5000, []string{"router1", "provider2"}},
{base.Add(time.Minute), 900, []string{"router2", "provider4"}}, {1, base.Add(time.Minute), 900, []string{"router2", "provider4"}},
{base.Add(time.Minute), 100, []string{"Other", "Other"}}, {1, base.Add(time.Minute), 100, []string{"Other", "Other"}},
{base.Add(2 * time.Minute), 100, []string{"router1", "provider1"}}, {1, base.Add(2 * time.Minute), 100, []string{"router1", "provider1"}},
{base.Add(2 * time.Minute), 3000, []string{"router1", "provider2"}}, {1, base.Add(2 * time.Minute), 3000, []string{"router1", "provider2"}},
{base.Add(2 * time.Minute), 100, []string{"router2", "provider4"}}, {1, base.Add(2 * time.Minute), 100, []string{"router2", "provider4"}},
{base.Add(2 * time.Minute), 100, []string{"Other", "Other"}}, {1, base.Add(2 * time.Minute), 100, []string{"Other", "Other"}},
}
mockConn.EXPECT().
Select(gomock.Any(), gomock.Any(), gomock.Any()).
SetArg(1, expectedSQL).
Return(nil)
// Bidirectional
expectedSQL = []struct {
Axis uint8 `ch:"axis"`
Time time.Time `ch:"time"`
Xps float64 `ch:"xps"`
Dimensions []string `ch:"dimensions"`
}{
{1, base, 1000, []string{"router1", "provider1"}},
{1, base, 2000, []string{"router1", "provider2"}},
{1, base, 1200, []string{"router2", "provider2"}},
{1, base, 1100, []string{"router2", "provider3"}},
{1, base, 1900, []string{"Other", "Other"}},
{1, base.Add(time.Minute), 500, []string{"router1", "provider1"}},
{1, base.Add(time.Minute), 5000, []string{"router1", "provider2"}},
{1, base.Add(time.Minute), 900, []string{"router2", "provider4"}},
// Axes can be mixed. In reality, it seems they cannot
// be interleaved, but ClickHouse documentation does
// not say it is not possible.
{2, base, 100, []string{"router1", "provider1"}},
{2, base, 200, []string{"router1", "provider2"}},
{2, base, 120, []string{"router2", "provider2"}},
{1, base.Add(time.Minute), 100, []string{"Other", "Other"}},
{1, base.Add(2 * time.Minute), 100, []string{"router1", "provider1"}},
{2, base, 110, []string{"router2", "provider3"}},
{2, base, 190, []string{"Other", "Other"}},
{2, base.Add(time.Minute), 50, []string{"router1", "provider1"}},
{2, base.Add(time.Minute), 500, []string{"router1", "provider2"}},
{1, base.Add(2 * time.Minute), 3000, []string{"router1", "provider2"}},
{1, base.Add(2 * time.Minute), 100, []string{"router2", "provider4"}},
{1, base.Add(2 * time.Minute), 100, []string{"Other", "Other"}},
{2, base.Add(time.Minute), 90, []string{"router2", "provider4"}},
{2, base.Add(time.Minute), 10, []string{"Other", "Other"}},
{2, base.Add(2 * time.Minute), 10, []string{"router1", "provider1"}},
{2, base.Add(2 * time.Minute), 300, []string{"router1", "provider2"}},
{2, base.Add(2 * time.Minute), 10, []string{"router2", "provider4"}},
{2, base.Add(2 * time.Minute), 10, []string{"Other", "Other"}},
} }
mockConn.EXPECT(). mockConn.EXPECT().
Select(gomock.Any(), gomock.Any(), gomock.Any()). Select(gomock.Any(), gomock.Any(), gomock.Any()).
@@ -179,15 +357,17 @@ func TestGraphHandler(t *testing.T) {
helpers.TestHTTPEndpoints(t, h.Address, helpers.HTTPEndpointCases{ helpers.TestHTTPEndpoints(t, h.Address, helpers.HTTPEndpointCases{
{ {
URL: "/api/v0/console/graph", Description: "single direction",
URL: "/api/v0/console/graph",
JSONInput: gin.H{ JSONInput: gin.H{
"start": time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC), "start": time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
"end": time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC), "end": time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
"points": 100, "points": 100,
"limit": 20, "limit": 20,
"dimensions": []string{"ExporterName", "InIfProvider"}, "dimensions": []string{"ExporterName", "InIfProvider"},
"filter": "DstCountry = 'FR' AND SrcCountry = 'US'", "filter": "DstCountry = 'FR' AND SrcCountry = 'US'",
"units": "l3bps", "units": "l3bps",
"bidirectional": false,
}, },
JSONOutput: gin.H{ JSONOutput: gin.H{
// Sorted by sum of bps // Sorted by sum of bps
@@ -244,6 +424,124 @@ func TestGraphHandler(t *testing.T) {
500, 500,
1000, 1000,
}, },
"axis": []int{
1, 1, 1, 1, 1, 1,
},
},
}, {
Description: "bidirectional",
URL: "/api/v0/console/graph",
JSONInput: gin.H{
"start": time.Date(2022, 04, 10, 15, 45, 10, 0, time.UTC),
"end": time.Date(2022, 04, 11, 15, 45, 10, 0, time.UTC),
"points": 100,
"limit": 20,
"dimensions": []string{"ExporterName", "InIfProvider"},
"filter": "DstCountry = 'FR' AND SrcCountry = 'US'",
"units": "l3bps",
"bidirectional": true,
},
JSONOutput: gin.H{
// Sorted by sum of bps
"rows": [][]string{
{"router1", "provider2"}, // 10000
{"router1", "provider1"}, // 1600
{"router2", "provider2"}, // 1200
{"router2", "provider3"}, // 1100
{"router2", "provider4"}, // 1000
{"Other", "Other"}, // 2100
{"router1", "provider2"}, // 1000
{"router1", "provider1"}, // 160
{"router2", "provider2"}, // 120
{"router2", "provider3"}, // 110
{"router2", "provider4"}, // 100
{"Other", "Other"}, // 210
},
"t": []string{
"2009-11-10T23:00:00Z",
"2009-11-10T23:01:00Z",
"2009-11-10T23:02:00Z",
},
"points": [][]int{
{2000, 5000, 3000},
{1000, 500, 100},
{1200, 0, 0},
{1100, 0, 0},
{0, 900, 100},
{1900, 100, 100},
{200, 500, 300},
{100, 50, 10},
{120, 0, 0},
{110, 0, 0},
{0, 90, 10},
{190, 10, 10},
},
"min": []int{
2000,
100,
1200,
1100,
100,
100,
200,
10,
120,
110,
10,
10,
},
"max": []int{
5000,
1000,
1200,
1100,
900,
1900,
500,
100,
120,
110,
90,
190,
},
"average": []int{
3333,
533,
400,
366,
333,
700,
333,
53,
40,
36,
33,
70,
},
"95th": []int{
4000,
750,
600,
550,
500,
1000,
400,
75,
60,
55,
50,
100,
},
"axis": []int{
1, 1, 1, 1, 1, 1,
2, 2, 2, 2, 2, 2,
},
}, },
}, },
}) })

View File

@@ -14,94 +14,6 @@ import (
type queryColumn int type queryColumn int
const (
queryColumnExporterAddress queryColumn = iota + 1
queryColumnExporterName
queryColumnExporterGroup
queryColumnExporterRole
queryColumnExporterSite
queryColumnExporterRegion
queryColumnExporterTenant
queryColumnSrcAS
queryColumnSrcNetName
queryColumnSrcNetRole
queryColumnSrcNetSite
queryColumnSrcNetRegion
queryColumnSrcNetTenant
queryColumnSrcCountry
queryColumnInIfName
queryColumnInIfDescription
queryColumnInIfSpeed
queryColumnInIfConnectivity
queryColumnInIfProvider
queryColumnInIfBoundary
queryColumnEType
queryColumnProto
queryColumnSrcPort
queryColumnSrcAddr
queryColumnDstAS
queryColumnDstNetName
queryColumnDstNetRole
queryColumnDstNetSite
queryColumnDstNetRegion
queryColumnDstNetTenant
queryColumnDstCountry
queryColumnOutIfName
queryColumnOutIfDescription
queryColumnOutIfSpeed
queryColumnOutIfConnectivity
queryColumnOutIfProvider
queryColumnOutIfBoundary
queryColumnDstAddr
queryColumnDstPort
queryColumnForwardingStatus
queryColumnPacketSizeBucket
)
var queryColumnMap = helpers.NewBimap(map[queryColumn]string{
queryColumnExporterAddress: "ExporterAddress",
queryColumnExporterName: "ExporterName",
queryColumnExporterGroup: "ExporterGroup",
queryColumnExporterRole: "ExporterRole",
queryColumnExporterSite: "ExporterSite",
queryColumnExporterRegion: "ExporterRegion",
queryColumnExporterTenant: "ExporterTenant",
queryColumnSrcAddr: "SrcAddr",
queryColumnDstAddr: "DstAddr",
queryColumnSrcAS: "SrcAS",
queryColumnDstAS: "DstAS",
queryColumnSrcNetName: "SrcNetName",
queryColumnDstNetName: "DstNetName",
queryColumnSrcNetRole: "SrcNetRole",
queryColumnDstNetRole: "DstNetRole",
queryColumnSrcNetSite: "SrcNetSite",
queryColumnDstNetSite: "DstNetSite",
queryColumnSrcNetRegion: "SrcNetRegion",
queryColumnDstNetRegion: "DstNetRegion",
queryColumnSrcNetTenant: "SrcNetTenant",
queryColumnDstNetTenant: "DstNetTenant",
queryColumnSrcCountry: "SrcCountry",
queryColumnDstCountry: "DstCountry",
queryColumnInIfName: "InIfName",
queryColumnOutIfName: "OutIfName",
queryColumnInIfDescription: "InIfDescription",
queryColumnOutIfDescription: "OutIfDescription",
queryColumnInIfSpeed: "InIfSpeed",
queryColumnOutIfSpeed: "OutIfSpeed",
queryColumnInIfConnectivity: "InIfConnectivity",
queryColumnOutIfConnectivity: "OutIfConnectivity",
queryColumnInIfProvider: "InIfProvider",
queryColumnOutIfProvider: "OutIfProvider",
queryColumnInIfBoundary: "InIfBoundary",
queryColumnOutIfBoundary: "OutIfBoundary",
queryColumnEType: "EType",
queryColumnProto: "Proto",
queryColumnSrcPort: "SrcPort",
queryColumnDstPort: "DstPort",
queryColumnForwardingStatus: "ForwardingStatus",
queryColumnPacketSizeBucket: "PacketSizeBucket",
})
func (gc queryColumn) MarshalText() ([]byte, error) { func (gc queryColumn) MarshalText() ([]byte, error) {
got, ok := queryColumnMap.LoadValue(gc) got, ok := queryColumnMap.LoadValue(gc)
if ok { if ok {
@@ -128,6 +40,9 @@ type queryFilter struct {
mainTableRequired bool mainTableRequired bool
} }
func (gf queryFilter) String() string {
return gf.filter
}
func (gf queryFilter) MarshalText() ([]byte, error) { func (gf queryFilter) MarshalText() ([]byte, error) {
return []byte(gf.filter), nil return []byte(gf.filter), nil
} }
@@ -172,3 +87,12 @@ func (gc queryColumn) toSQLSelect() string {
} }
return strValue return strValue
} }
// reverseDirection reverse the direction of a column (src/dst, in/out)
func (gc queryColumn) reverseDirection() queryColumn {
value, ok := queryColumnMap.LoadKey(filter.ReverseColumnDirection(gc.String()))
if !ok {
panic("unknown reverse column")
}
return value
}

94
console/query_consts.go Normal file
View File

@@ -0,0 +1,94 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package console
import "akvorado/common/helpers"
const (
queryColumnExporterAddress queryColumn = iota + 1
queryColumnExporterName
queryColumnExporterGroup
queryColumnExporterRole
queryColumnExporterSite
queryColumnExporterRegion
queryColumnExporterTenant
queryColumnSrcAS
queryColumnSrcNetName
queryColumnSrcNetRole
queryColumnSrcNetSite
queryColumnSrcNetRegion
queryColumnSrcNetTenant
queryColumnSrcCountry
queryColumnInIfName
queryColumnInIfDescription
queryColumnInIfSpeed
queryColumnInIfConnectivity
queryColumnInIfProvider
queryColumnInIfBoundary
queryColumnEType
queryColumnProto
queryColumnSrcPort
queryColumnSrcAddr
queryColumnDstAS
queryColumnDstNetName
queryColumnDstNetRole
queryColumnDstNetSite
queryColumnDstNetRegion
queryColumnDstNetTenant
queryColumnDstCountry
queryColumnOutIfName
queryColumnOutIfDescription
queryColumnOutIfSpeed
queryColumnOutIfConnectivity
queryColumnOutIfProvider
queryColumnOutIfBoundary
queryColumnDstAddr
queryColumnDstPort
queryColumnForwardingStatus
queryColumnPacketSizeBucket
)
var queryColumnMap = helpers.NewBimap(map[queryColumn]string{
queryColumnExporterAddress: "ExporterAddress",
queryColumnExporterName: "ExporterName",
queryColumnExporterGroup: "ExporterGroup",
queryColumnExporterRole: "ExporterRole",
queryColumnExporterSite: "ExporterSite",
queryColumnExporterRegion: "ExporterRegion",
queryColumnExporterTenant: "ExporterTenant",
queryColumnSrcAddr: "SrcAddr",
queryColumnDstAddr: "DstAddr",
queryColumnSrcAS: "SrcAS",
queryColumnDstAS: "DstAS",
queryColumnSrcNetName: "SrcNetName",
queryColumnDstNetName: "DstNetName",
queryColumnSrcNetRole: "SrcNetRole",
queryColumnDstNetRole: "DstNetRole",
queryColumnSrcNetSite: "SrcNetSite",
queryColumnDstNetSite: "DstNetSite",
queryColumnSrcNetRegion: "SrcNetRegion",
queryColumnDstNetRegion: "DstNetRegion",
queryColumnSrcNetTenant: "SrcNetTenant",
queryColumnDstNetTenant: "DstNetTenant",
queryColumnSrcCountry: "SrcCountry",
queryColumnDstCountry: "DstCountry",
queryColumnInIfName: "InIfName",
queryColumnOutIfName: "OutIfName",
queryColumnInIfDescription: "InIfDescription",
queryColumnOutIfDescription: "OutIfDescription",
queryColumnInIfSpeed: "InIfSpeed",
queryColumnOutIfSpeed: "OutIfSpeed",
queryColumnInIfConnectivity: "InIfConnectivity",
queryColumnOutIfConnectivity: "OutIfConnectivity",
queryColumnInIfProvider: "InIfProvider",
queryColumnOutIfProvider: "OutIfProvider",
queryColumnInIfBoundary: "InIfBoundary",
queryColumnOutIfBoundary: "OutIfBoundary",
queryColumnEType: "EType",
queryColumnProto: "Proto",
queryColumnSrcPort: "SrcPort",
queryColumnDstPort: "DstPort",
queryColumnForwardingStatus: "ForwardingStatus",
queryColumnPacketSizeBucket: "PacketSizeBucket",
})