inlet: make use of schema for inlet

This is a huge change to make the various subcomponents of the inlet use
the schema to generate the protobuf. For it to make sense, we also
modify the way we parse flows to directly serialize non-essential fields
to Protobuf.

The performance is mostly on par with the previous commit. We are a bit
less efficient because we don't have a fixed structure, but we avoid
loosing too much performance by not relying on reflection and keeping
the production of messages as code. We use less of Goflow2: raw flow
parsing is still done by Goflow2, but we don't use the producer part
anymore. This helps a bit with the performance as we parse less.
Overall, we are 20% than the previous commit and twice faster than the
1.6.4!

```
goos: linux
goarch: amd64
pkg: akvorado/inlet/flow
cpu: AMD Ryzen 5 5600X 6-Core Processor
BenchmarkDecodeEncodeNetflow
BenchmarkDecodeEncodeNetflow/with_encoding
BenchmarkDecodeEncodeNetflow/with_encoding-12             151484              7789 ns/op            8272 B/op        143 allocs/op
BenchmarkDecodeEncodeNetflow/without_encoding
BenchmarkDecodeEncodeNetflow/without_encoding-12          162550              7133 ns/op            8272 B/op        143 allocs/op
BenchmarkDecodeEncodeSflow
BenchmarkDecodeEncodeSflow/with_encoding
BenchmarkDecodeEncodeSflow/with_encoding-12                94844             13193 ns/op            9816 B/op        295 allocs/op
BenchmarkDecodeEncodeSflow/without_encoding
BenchmarkDecodeEncodeSflow/without_encoding-12             92569             12456 ns/op            9816 B/op        295 allocs/op
```

There was a tentative to parse sFlow packets with gopackets, but the
adhoc parser used here is more performant.
This commit is contained in:
Vincent Bernat
2023-01-17 17:34:13 +01:00
parent ba7a6fca49
commit e352202631
63 changed files with 1915 additions and 1340 deletions

1
.gitignore vendored
View File

@@ -2,7 +2,6 @@
/test/
/orchestrator/clickhouse/data/asns.csv
/console/filter/parser.go
*.pb.go
mock_*.go
/console/data/frontend/

View File

@@ -14,11 +14,9 @@ M = $(shell if [ "$$(tput colors 2> /dev/null || echo 0)" -ge 8 ]; then printf "
export CGO_ENABLED=0
FLOW_VERSION := $(shell sed -n 's/^const CurrentSchemaVersion = //p' inlet/flow/schemas.go)
GENERATED_JS = \
console/frontend/node_modules
GENERATED_GO = \
inlet/flow/decoder/flow-ANY.pb.go \
common/clickhousedb/mocks/mock_driver.go \
conntrackfixer/mocks/mock_conntrackfixer.go \
orchestrator/clickhouse/data/asns.csv \
@@ -60,12 +58,6 @@ $(BIN)/gotestsum: PACKAGE=gotest.tools/gotestsum@latest
MOCKGEN = $(BIN)/mockgen
$(BIN)/mockgen: PACKAGE=github.com/golang/mock/mockgen@v1.6.0
PROTOC = protoc
PROTOC_GEN_GO = $(BIN)/protoc-gen-go
PROTOC_GEN_GO_VTPROTO = $(BIN)/protoc-gen-go-vtproto
$(BIN)/protoc-gen-go: PACKAGE=google.golang.org/protobuf/cmd/protoc-gen-go@v1.28.1
$(BIN)/protoc-gen-go-vtproto: PACKAGE=github.com/planetscale/vtprotobuf/cmd/protoc-gen-go-vtproto@v0.3.0
PIGEON = $(BIN)/pigeon
$(BIN)/pigeon: PACKAGE=github.com/mna/pigeon@v1.1.0
@@ -76,18 +68,6 @@ $(BIN)/wwhrd: PACKAGE=github.com/frapposelli/wwhrd@latest
.DELETE_ON_ERROR:
inlet/flow/decoder/flow-ANY.pb.go: inlet/flow/decoder/flow-$(FLOW_VERSION).pb.go inlet/flow/decoder/flow-$(FLOW_VERSION)_vtproto.pb.go
$Q for f in inlet/flow/decoder/flow-*.pb.go; do \
echo $^ | grep -Fwq $$f || rm -f $$f; \
done
$Q sed -i.bkp s/v$(FLOW_VERSION)//g inlet/flow/decoder/flow-*.pb.go && rm inlet/flow/decoder/flow-*.pb.go.bkp
inlet/flow/decoder/flow-$(FLOW_VERSION).pb.go inlet/flow/decoder/flow-$(FLOW_VERSION)_vtproto.pb.go: inlet/flow/data/schemas/flow-$(FLOW_VERSION).proto | $(PROTOC_GEN_GO) $(PROTOC_GEN_GO_VTPROTO) ; $(info $(M) compiling protocol buffers definition)
$Q $(PROTOC) -I=. \
--plugin=$(PROTOC_GEN_GO) --go_out=module=$(MODULE):. \
--plugin=$(PROTOC_GEN_GO_VTPROTO) --go-vtproto_out=module=$(MODULE):. --go-vtproto_opt=features=marshal+size \
$<
common/clickhousedb/mocks/mock_driver.go: $(MOCKGEN) ; $(info $(M) generate mocks for ClickHouse driver)
$Q echo '//go:build !release' > $@
$Q $(MOCKGEN) -package mocks \
@@ -143,7 +123,8 @@ test-race: GOTEST_MORE=, with race detector
test-race: test-go ## Run Go tests with race detector
test-bench: | $(GOTESTSUM) ; $(info $(M) running benchmarks) @ ## Run Go benchmarks
$Q $(GOTESTSUM) -f standard-quiet -- \
-timeout $(TIMEOUT)s -run=__absolutelynothing__ -bench=. -benchmem \
-timeout $(TIMEOUT)s -run=__absolutelynothing__ -bench=. \
-benchmem -memprofile test/go/memprofile.out -cpuprofile test/go/cpuprofile.out \
$(PKGS)
test-coverage-go: | $(GOTESTSUM) $(GOCOV) $(GOCOVXML) ; $(info $(M) running Go coverage tests) @ ## Run Go coverage tests
$Q mkdir -p test/go

View File

@@ -63,10 +63,8 @@ func (schema Schema) ClickHouseSelectColumns(options ...ClickHouseTableOption) [
}
func (schema Schema) clickhouseIterate(fn func(Column), options ...ClickHouseTableOption) {
for pair := schema.columns.Front(); pair != nil; pair = pair.Next() {
key := pair.Key
column := pair.Value
if slices.Contains(options, ClickHouseSkipTimeReceived) && key == ColumnTimeReceived {
for _, column := range schema.columns {
if slices.Contains(options, ClickHouseSkipTimeReceived) && column.Key == ColumnTimeReceived {
continue
}
if slices.Contains(options, ClickHouseSkipMainOnlyColumns) && column.MainOnly {
@@ -103,8 +101,7 @@ func (schema Schema) clickhouseIterate(fn func(Column), options ...ClickHouseTab
// ClickHouseSortingKeys returns the list of sorting keys, prefixed by the primary keys.
func (schema Schema) ClickHouseSortingKeys() []string {
cols := schema.ClickHousePrimaryKeys()
for pair := schema.columns.Front(); pair != nil; pair = pair.Next() {
column := pair.Value
for _, column := range schema.columns {
if column.ClickHouseNotSortingKey || column.MainOnly {
continue
}

View File

@@ -9,13 +9,22 @@ import (
"akvorado/common/helpers/bimap"
orderedmap "github.com/elliotchance/orderedmap/v2"
"golang.org/x/exp/slices"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/reflect/protoreflect"
)
// revive:disable
const (
ColumnTimeReceived ColumnKey = iota + 1
ColumnSamplingRate
ColumnEType
ColumnProto
ColumnBytes
ColumnPackets
ColumnPacketSize
ColumnPacketSizeBucket
ColumnForwardingStatus
ColumnExporterAddress
ColumnExporterName
ColumnExporterGroup
@@ -31,6 +40,8 @@ const (
ColumnDstNetPrefix
ColumnSrcAS
ColumnDstAS
ColumnSrcPort
ColumnDstPort
ColumnSrcNetName
ColumnDstNetName
ColumnSrcNetRole
@@ -64,15 +75,8 @@ const (
ColumnOutIfConnectivity
ColumnInIfBoundary
ColumnOutIfBoundary
ColumnEType
ColumnProto
ColumnSrcPort
ColumnDstPort
ColumnBytes
ColumnPackets
ColumnPacketSize
ColumnPacketSizeBucket
ColumnForwardingStatus
ColumnLast
)
// revive:enable
@@ -113,9 +117,9 @@ var columnNameMap = bimap.New(map[ColumnKey]string{
ColumnDst3rdAS: "Dst3rdAS",
ColumnDstCommunities: "DstCommunities",
ColumnDstLargeCommunities: "DstLargeCommunities",
ColumnDstLargeCommunitiesASN: "DstLargeCommunities.ASN",
ColumnDstLargeCommunitiesLocalData1: "DstLargeCommunities.LocalData1",
ColumnDstLargeCommunitiesLocalData2: "DstLargeCommunities.LocalData2",
ColumnDstLargeCommunitiesASN: "DstLargeCommunitiesASN",
ColumnDstLargeCommunitiesLocalData1: "DstLargeCommunitiesLocalData1",
ColumnDstLargeCommunitiesLocalData2: "DstLargeCommunitiesLocalData2",
ColumnInIfName: "InIfName",
ColumnOutIfName: "OutIfName",
ColumnInIfDescription: "InIfDescription",
@@ -160,12 +164,13 @@ var Flows = Schema{
ColumnDstAS,
ColumnSamplingRate,
},
columns: buildMapFromColumns([]Column{
columns: []Column{
{
Key: ColumnTimeReceived,
ClickHouseType: "DateTime",
ClickHouseCodec: "DoubleDelta, LZ4",
ConsoleNotDimension: true,
ProtobufType: protoreflect.Uint64Kind,
},
{Key: ColumnSamplingRate, ClickHouseType: "UInt64", ConsoleNotDimension: true},
{Key: ColumnExporterAddress, ClickHouseType: "LowCardinality(IPv6)"},
@@ -242,11 +247,20 @@ END`,
MainOnly: true,
ClickHouseType: "Array(UInt128)",
ClickHouseTransformFrom: []Column{
{Key: ColumnDstLargeCommunitiesASN, ClickHouseType: "Array(UInt32)"},
{Key: ColumnDstLargeCommunitiesLocalData1, ClickHouseType: "Array(UInt32)"},
{Key: ColumnDstLargeCommunitiesLocalData2, ClickHouseType: "Array(UInt32)"},
{
Key: ColumnDstLargeCommunitiesASN,
ClickHouseType: "Array(UInt32)",
},
{
Key: ColumnDstLargeCommunitiesLocalData1,
ClickHouseType: "Array(UInt32)",
},
{
Key: ColumnDstLargeCommunitiesLocalData2,
ClickHouseType: "Array(UInt32)",
},
},
ClickHouseTransformTo: "arrayMap((asn, l1, l2) -> ((bitShiftLeft(CAST(asn, 'UInt128'), 64) + bitShiftLeft(CAST(l1, 'UInt128'), 32)) + CAST(l2, 'UInt128')), `DstLargeCommunities.ASN`, `DstLargeCommunities.LocalData1`, `DstLargeCommunities.LocalData2`)",
ClickHouseTransformTo: "arrayMap((asn, l1, l2) -> ((bitShiftLeft(CAST(asn, 'UInt128'), 64) + bitShiftLeft(CAST(l1, 'UInt128'), 32)) + CAST(l2, 'UInt128')), DstLargeCommunitiesASN, DstLargeCommunitiesLocalData1, DstLargeCommunitiesLocalData2)",
ConsoleNotDimension: true,
},
{Key: ColumnInIfName, ClickHouseType: "LowCardinality(String)"},
@@ -254,7 +268,18 @@ END`,
{Key: ColumnInIfSpeed, ClickHouseType: "UInt32", ClickHouseNotSortingKey: true},
{Key: ColumnInIfConnectivity, ClickHouseType: "LowCardinality(String)", ClickHouseNotSortingKey: true},
{Key: ColumnInIfProvider, ClickHouseType: "LowCardinality(String)", ClickHouseNotSortingKey: true},
{Key: ColumnInIfBoundary, ClickHouseType: "Enum8('undefined' = 0, 'external' = 1, 'internal' = 2)", ClickHouseNotSortingKey: true},
{
Key: ColumnInIfBoundary,
ClickHouseType: "Enum8('undefined' = 0, 'external' = 1, 'internal' = 2)",
ClickHouseNotSortingKey: true,
ProtobufType: protoreflect.EnumKind,
ProtobufEnumName: "Boundary",
ProtobufEnum: map[int]string{
0: "UNDEFINED",
1: "EXTERNAL",
2: "INTERNAL",
},
},
{Key: ColumnEType, ClickHouseType: "UInt32"},
{Key: ColumnProto, ClickHouseType: "UInt32"},
{Key: ColumnSrcPort, ClickHouseType: "UInt32", MainOnly: true},
@@ -283,64 +308,114 @@ END`,
}(),
},
{Key: ColumnForwardingStatus, ClickHouseType: "UInt32"},
}),
}
},
}.finalize()
func buildMapFromColumns(columns []Column) *orderedmap.OrderedMap[ColumnKey, Column] {
omap := orderedmap.NewOrderedMap[ColumnKey, Column]()
for _, column := range columns {
func (schema Schema) finalize() Schema {
ncolumns := []Column{}
for _, column := range schema.columns {
// Add true name
name, ok := columnNameMap.LoadValue(column.Key)
if !ok {
panic(fmt.Sprintf("missing name mapping for %d", column.Key))
}
column.Name = name
if column.Name == "" {
column.Name = name
}
// Also true name for columns in ClickHouseTransformFrom
for idx, ecolumn := range column.ClickHouseTransformFrom {
name, ok := columnNameMap.LoadValue(ecolumn.Key)
if !ok {
panic(fmt.Sprintf("missing name mapping for %d", ecolumn.Key))
if ecolumn.Name == "" {
name, ok := columnNameMap.LoadValue(ecolumn.Key)
if !ok {
panic(fmt.Sprintf("missing name mapping for %d", ecolumn.Key))
}
column.ClickHouseTransformFrom[idx].Name = name
}
column.ClickHouseTransformFrom[idx].Name = name
}
// Add non-main columns with an alias to NotSortingKey
if !column.MainOnly && column.ClickHouseAlias != "" {
column.ClickHouseNotSortingKey = true
}
omap.Set(column.Key, column)
ncolumns = append(ncolumns, column)
// Expand the schema Src → Dst and InIf → OutIf
if strings.HasPrefix(name, "Src") {
column.Name = fmt.Sprintf("Dst%s", name[3:])
if strings.HasPrefix(column.Name, "Src") {
column.Name = fmt.Sprintf("Dst%s", column.Name[3:])
column.Key, ok = columnNameMap.LoadKey(column.Name)
if !ok {
panic(fmt.Sprintf("missing name mapping for %q", column.Name))
}
column.ClickHouseAlias = strings.ReplaceAll(column.ClickHouseAlias, "Src", "Dst")
omap.Set(column.Key, column)
} else if strings.HasPrefix(name, "InIf") {
column.Name = fmt.Sprintf("OutIf%s", name[4:])
column.ClickHouseTransformFrom = slices.Clone(column.ClickHouseTransformFrom)
ncolumns = append(ncolumns, column)
} else if strings.HasPrefix(column.Name, "InIf") {
column.Name = fmt.Sprintf("OutIf%s", column.Name[4:])
column.Key, ok = columnNameMap.LoadKey(column.Name)
if !ok {
panic(fmt.Sprintf("missing name mapping for %q", column.Name))
}
column.ClickHouseAlias = strings.ReplaceAll(column.ClickHouseAlias, "InIf", "OutIf")
omap.Set(column.Key, column)
column.ClickHouseTransformFrom = slices.Clone(column.ClickHouseTransformFrom)
ncolumns = append(ncolumns, column)
}
}
return omap
}
schema.columns = ncolumns
func init() {
for _, key := range Flows.clickHousePrimaryKeys {
if column, ok := Flows.columns.Get(key); !ok {
panic(fmt.Sprintf("primary key %q not a column", key))
} else {
if column.ClickHouseNotSortingKey {
panic(fmt.Sprintf("primary key %q is marked as a non-sorting key", key))
// Set Protobuf index and type
protobufIndex := 1
ncolumns = []Column{}
for _, column := range schema.columns {
pcolumns := []*Column{&column}
for idx := range column.ClickHouseTransformFrom {
pcolumns = append(pcolumns, &column.ClickHouseTransformFrom[idx])
}
for _, column := range pcolumns {
if column.ProtobufIndex == 0 {
if column.ClickHouseTransformFrom != nil ||
column.ClickHouseGenerateFrom != "" ||
column.ClickHouseAlias != "" {
column.ProtobufIndex = -1
continue
}
column.ProtobufIndex = protowire.Number(protobufIndex)
protobufIndex++
}
if column.ProtobufType == 0 &&
column.ClickHouseTransformFrom == nil &&
column.ClickHouseGenerateFrom == "" &&
column.ClickHouseAlias == "" {
switch column.ClickHouseType {
case "String", "LowCardinality(String)", "FixedString(2)":
column.ProtobufType = protoreflect.StringKind
case "UInt64":
column.ProtobufType = protoreflect.Uint64Kind
case "UInt32", "UInt16", "UInt8":
column.ProtobufType = protoreflect.Uint32Kind
case "IPv6", "LowCardinality(IPv6)":
column.ProtobufType = protoreflect.BytesKind
case "Array(UInt32)":
column.ProtobufType = protoreflect.Uint32Kind
column.ProtobufRepeated = true
}
}
}
ncolumns = append(ncolumns, column)
}
schema.columns = ncolumns
// Build column index
schema.columnIndex = make([]*Column, ColumnLast)
for i, column := range schema.columns {
schema.columnIndex[column.Key] = &schema.columns[i]
for j, column := range column.ClickHouseTransformFrom {
schema.columnIndex[column.Key] = &schema.columns[i].ClickHouseTransformFrom[j]
}
}
return schema
}

View File

@@ -0,0 +1,38 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package schema
import (
"testing"
)
func TestFlowsClickHouse(t *testing.T) {
for _, key := range Flows.clickHousePrimaryKeys {
if column := Flows.columnIndex[key]; column.Key == 0 {
t.Errorf("primary key %q not a column", key)
} else {
if column.ClickHouseNotSortingKey {
t.Errorf("primary key %q is marked as a non-sorting key", key)
}
}
}
}
func TestFlowsProtobuf(t *testing.T) {
for _, column := range Flows.Columns() {
if column.ProtobufIndex >= 0 {
if column.ProtobufType == 0 {
t.Errorf("column %s has not protobuf type", column.Name)
}
}
}
}
func TestColumnIndex(t *testing.T) {
for i := ColumnTimeReceived; i < ColumnLast; i++ {
if _, ok := Flows.LookupColumnByKey(i); !ok {
t.Errorf("column %s cannot be looked up by key", i)
}
}
}

View File

@@ -6,21 +6,25 @@ package schema
import "strings"
// LookupColumnByName can lookup a column by its name.
func (schema Schema) LookupColumnByName(name string) (Column, bool) {
func (schema *Schema) LookupColumnByName(name string) (*Column, bool) {
key, ok := columnNameMap.LoadKey(name)
if !ok {
return Column{}, false
return &Column{}, false
}
return schema.columns.Get(key)
return schema.LookupColumnByKey(key)
}
// LookupColumnByKey can lookup a column by its key.
func (schema Schema) LookupColumnByKey(key ColumnKey) (Column, bool) {
return schema.columns.Get(key)
func (schema *Schema) LookupColumnByKey(key ColumnKey) (*Column, bool) {
column := schema.columnIndex[key]
if column == nil {
return &Column{}, false
}
return column, true
}
// ReverseColumnDirection reverts the direction of a provided column name.
func (schema Schema) ReverseColumnDirection(key ColumnKey) ColumnKey {
func (schema *Schema) ReverseColumnDirection(key ColumnKey) ColumnKey {
var candidateName string
name := key.String()
if strings.HasPrefix(name, "Src") {
@@ -36,7 +40,7 @@ func (schema Schema) ReverseColumnDirection(key ColumnKey) ColumnKey {
candidateName = "In" + name[3:]
}
if candidateKey, ok := columnNameMap.LoadKey(candidateName); ok {
if _, ok := schema.columns.Get(candidateKey); ok {
if _, ok := schema.LookupColumnByKey(candidateKey); ok {
return candidateKey
}
}
@@ -44,10 +48,6 @@ func (schema Schema) ReverseColumnDirection(key ColumnKey) ColumnKey {
}
// Columns returns the columns.
func (schema Schema) Columns() []Column {
result := []Column{}
for pair := schema.columns.Front(); pair != nil; pair = pair.Next() {
result = append(result, pair.Value)
}
return result
func (schema *Schema) Columns() []Column {
return schema.columns[:]
}

242
common/schema/protobuf.go Normal file
View File

@@ -0,0 +1,242 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package schema
import (
"encoding/base32"
"fmt"
"hash/fnv"
"net/netip"
"strings"
"github.com/bits-and-blooms/bitset"
"golang.org/x/exp/slices"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/reflect/protoreflect"
)
// ProtobufMessageHash returns the name of the protobuf definition.
func (schema Schema) ProtobufMessageHash() string {
name, _ := schema.protobufMessageHashAndDefinition()
return name
}
// ProtobufDefinition returns the protobuf definition.
func (schema Schema) ProtobufDefinition() string {
_, definition := schema.protobufMessageHashAndDefinition()
return definition
}
// protobufMessageHashAndDefinition returns the name of the protobuf definition
// along with the protobuf definition itself (.proto file).
func (schema Schema) protobufMessageHashAndDefinition() (string, string) {
lines := []string{}
enums := map[string]string{}
hash := fnv.New128()
for _, column := range schema.Columns() {
for _, column := range append([]Column{column}, column.ClickHouseTransformFrom...) {
if column.ProtobufIndex < 0 {
continue
}
t := column.ProtobufType.String()
// Enum definition
if column.ProtobufType == protoreflect.EnumKind {
if _, ok := enums[column.ProtobufEnumName]; !ok {
definition := []string{}
keys := []int{}
for key := range column.ProtobufEnum {
keys = append(keys, key)
}
slices.Sort(keys)
for _, key := range keys {
definition = append(definition, fmt.Sprintf("%s = %d;", column.ProtobufEnum[key], key))
}
enums[column.ProtobufEnumName] = fmt.Sprintf("enum %s { %s }",
column.ProtobufEnumName,
strings.Join(definition, " "))
}
t = column.ProtobufEnumName
}
// Column definition
if column.ProtobufRepeated {
t = fmt.Sprintf("repeated %s", t)
}
line := fmt.Sprintf("%s %s = %d;",
t,
column.Name,
column.ProtobufIndex,
)
lines = append(lines, line)
hash.Write([]byte(line))
}
}
enumDefinitions := []string{}
for _, v := range enums {
enumDefinitions = append(enumDefinitions, v)
hash.Write([]byte(v))
}
hashString := base32.StdEncoding.WithPadding(base32.NoPadding).EncodeToString(hash.Sum(nil))
return hashString, fmt.Sprintf(`
syntax = "proto3";
message FlowMessagev%s {
%s
%s
}
`, hashString, strings.Join(enumDefinitions, "\n "), strings.Join(lines, "\n "))
}
// ProtobufMarshal transforms a basic flow into protobuf bytes. The provided flow should
// not be reused afterwards.
func (schema *Schema) ProtobufMarshal(bf *FlowMessage) []byte {
schema.ProtobufAppendVarint(bf, ColumnTimeReceived, bf.TimeReceived)
schema.ProtobufAppendVarint(bf, ColumnSamplingRate, uint64(bf.SamplingRate))
schema.ProtobufAppendIP(bf, ColumnExporterAddress, bf.ExporterAddress)
schema.ProtobufAppendVarint(bf, ColumnSrcAS, uint64(bf.SrcAS))
schema.ProtobufAppendVarint(bf, ColumnDstAS, uint64(bf.DstAS))
schema.ProtobufAppendIP(bf, ColumnSrcAddr, bf.SrcAddr)
schema.ProtobufAppendIP(bf, ColumnDstAddr, bf.DstAddr)
// Add length and move it as a prefix
end := len(bf.protobuf)
payloadLen := end - maxSizeVarint
bf.protobuf = protowire.AppendVarint(bf.protobuf, uint64(payloadLen))
sizeLen := len(bf.protobuf) - end
result := bf.protobuf[maxSizeVarint-sizeLen : end]
copy(result, bf.protobuf[end:end+sizeLen])
return result
}
// ProtobufAppendVarint append a varint to the protobuf representation of a flow.
func (schema *Schema) ProtobufAppendVarint(bf *FlowMessage, columnKey ColumnKey, value uint64) {
// Check if value is 0 to avoid a lookup.
if value > 0 {
schema.ProtobufAppendVarintForce(bf, columnKey, value)
}
}
// ProtobufAppendVarintForce append a varint to the protobuf representation of a flow, even if it is a 0-value.
func (schema *Schema) ProtobufAppendVarintForce(bf *FlowMessage, columnKey ColumnKey, value uint64) {
column, _ := schema.LookupColumnByKey(columnKey)
column.ProtobufAppendVarintForce(bf, value)
}
// ProtobufAppendVarint append a varint to the protobuf representation of a flow.
func (column *Column) ProtobufAppendVarint(bf *FlowMessage, value uint64) {
if value > 0 {
column.ProtobufAppendVarintForce(bf, value)
}
}
// ProtobufAppendVarintForce append a varint to the protobuf representation of a flow, even when 0.
func (column *Column) ProtobufAppendVarintForce(bf *FlowMessage, value uint64) {
bf.init()
if column.ProtobufIndex > 0 && (column.ProtobufRepeated || !bf.protobufSet.Test(uint(column.ProtobufIndex))) {
bf.protobuf = protowire.AppendTag(bf.protobuf, column.ProtobufIndex, protowire.VarintType)
bf.protobuf = protowire.AppendVarint(bf.protobuf, value)
bf.protobufSet.Set(uint(column.ProtobufIndex))
if debug {
column.appendDebug(bf, value)
}
}
}
// ProtobufAppendBytes append a slice of bytes to the protobuf representation
// of a flow.
func (schema *Schema) ProtobufAppendBytes(bf *FlowMessage, columnKey ColumnKey, value []byte) {
if len(value) > 0 {
schema.ProtobufAppendBytesForce(bf, columnKey, value)
}
}
// ProtobufAppendBytesForce append a slice of bytes to the protobuf representation
// of a flow, even when empty
func (schema *Schema) ProtobufAppendBytesForce(bf *FlowMessage, columnKey ColumnKey, value []byte) {
column, _ := schema.LookupColumnByKey(columnKey)
column.ProtobufAppendBytesForce(bf, value)
}
// ProtobufAppendBytes append a slice of bytes to the protobuf representation
// of a flow.
func (column *Column) ProtobufAppendBytes(bf *FlowMessage, value []byte) {
if len(value) > 0 {
column.ProtobufAppendBytesForce(bf, value)
}
}
// ProtobufAppendBytesForce append a slice of bytes to the protobuf representation
// of a flow, even when empty
func (column *Column) ProtobufAppendBytesForce(bf *FlowMessage, value []byte) {
bf.init()
if column.ProtobufIndex > 0 && (column.ProtobufRepeated || !bf.protobufSet.Test(uint(column.ProtobufIndex))) {
bf.protobuf = protowire.AppendTag(bf.protobuf, column.ProtobufIndex, protowire.BytesType)
bf.protobuf = protowire.AppendBytes(bf.protobuf, value)
bf.protobufSet.Set(uint(column.ProtobufIndex))
if debug {
column.appendDebug(bf, value)
}
}
}
// ProtobufAppendIP append an IP to the protobuf representation
// of a flow.
func (schema *Schema) ProtobufAppendIP(bf *FlowMessage, columnKey ColumnKey, value netip.Addr) {
if value.IsValid() {
column, _ := schema.LookupColumnByKey(columnKey)
column.ProtobufAppendIPForce(bf, value)
}
}
// ProtobufAppendIP append an IP to the protobuf representation
// of a flow.
func (column *Column) ProtobufAppendIP(bf *FlowMessage, value netip.Addr) {
if value.IsValid() {
column.ProtobufAppendIPForce(bf, value)
}
}
// ProtobufAppendIPForce append an IP to the protobuf representation
// of a flow, even when not valid
func (column *Column) ProtobufAppendIPForce(bf *FlowMessage, value netip.Addr) {
bf.init()
if column.ProtobufIndex > 0 && (column.ProtobufRepeated || !bf.protobufSet.Test(uint(column.ProtobufIndex))) {
v := value.As16()
bf.protobuf = protowire.AppendTag(bf.protobuf, column.ProtobufIndex, protowire.BytesType)
bf.protobuf = protowire.AppendBytes(bf.protobuf, v[:])
bf.protobufSet.Set(uint(column.ProtobufIndex))
if debug {
column.appendDebug(bf, value)
}
}
}
func (column *Column) appendDebug(bf *FlowMessage, value interface{}) {
if bf.ProtobufDebug == nil {
bf.ProtobufDebug = make(map[ColumnKey]interface{})
}
if column.ProtobufRepeated {
if current, ok := bf.ProtobufDebug[column.Key]; ok {
bf.ProtobufDebug[column.Key] = append(current.([]interface{}), value)
} else {
bf.ProtobufDebug[column.Key] = []interface{}{value}
}
} else {
bf.ProtobufDebug[column.Key] = value
}
}
func (bf *FlowMessage) init() {
if bf.protobuf == nil {
bf.protobuf = make([]byte, maxSizeVarint, 500)
bf.protobufSet = *bitset.New(uint(ColumnLast))
}
}

View File

@@ -0,0 +1,191 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package schema
import (
"fmt"
"net/netip"
"reflect"
"strings"
"testing"
"akvorado/common/helpers"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/reflect/protoreflect"
)
func TestProtobufDefinition(t *testing.T) {
// Use a smaller version
flows := Schema{
columns: []Column{
{
Key: ColumnTimeReceived,
ClickHouseType: "DateTime",
ProtobufType: protoreflect.Uint64Kind,
},
{Key: ColumnSamplingRate, ClickHouseType: "UInt64"},
{Key: ColumnExporterAddress, ClickHouseType: "LowCardinality(IPv6)"},
{Key: ColumnExporterName, ClickHouseType: "LowCardinality(String)"},
{
Key: ColumnSrcAddr,
ClickHouseType: "IPv6",
}, {
Key: ColumnSrcNetMask,
ClickHouseType: "UInt8",
}, {
Key: ColumnSrcNetPrefix,
ClickHouseType: "String",
ClickHouseAlias: `something`,
},
{Key: ColumnSrcAS, ClickHouseType: "UInt32"},
{
Key: ColumnSrcNetName,
ClickHouseType: "LowCardinality(String)",
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'name', SrcAddr, '')",
},
{Key: ColumnSrcCountry, ClickHouseType: "FixedString(2)"},
{
Key: ColumnDstASPath,
ClickHouseType: "Array(UInt32)",
}, {
Key: ColumnDstLargeCommunities,
ClickHouseType: "Array(UInt128)",
ClickHouseTransformFrom: []Column{
{Key: ColumnDstLargeCommunitiesASN, ClickHouseType: "Array(UInt32)"},
{Key: ColumnDstLargeCommunitiesLocalData1, ClickHouseType: "Array(UInt32)"},
{Key: ColumnDstLargeCommunitiesLocalData2, ClickHouseType: "Array(UInt32)"},
},
ClickHouseTransformTo: "something",
},
{Key: ColumnInIfName, ClickHouseType: "LowCardinality(String)"},
{
Key: ColumnInIfBoundary,
ClickHouseType: "Enum8('undefined' = 0, 'external' = 1, 'internal' = 2)",
ClickHouseNotSortingKey: true,
ProtobufType: protoreflect.EnumKind,
ProtobufEnumName: "Boundary",
ProtobufEnum: map[int]string{
0: "UNDEFINED",
1: "EXTERNAL",
2: "INTERNAL",
},
},
{Key: ColumnBytes, ClickHouseType: "UInt64"},
},
}.finalize()
got := flows.ProtobufDefinition()
expected := `
syntax = "proto3";
message FlowMessagevLH2TTFF7P352DSYYCJYWFCXHAM {
enum Boundary { UNDEFINED = 0; EXTERNAL = 1; INTERNAL = 2; }
uint64 TimeReceived = 1;
uint64 SamplingRate = 2;
bytes ExporterAddress = 3;
string ExporterName = 4;
bytes SrcAddr = 5;
bytes DstAddr = 6;
uint32 SrcNetMask = 7;
uint32 DstNetMask = 8;
uint32 SrcAS = 9;
uint32 DstAS = 10;
string SrcCountry = 11;
string DstCountry = 12;
repeated uint32 DstASPath = 13;
repeated uint32 DstLargeCommunitiesASN = 14;
repeated uint32 DstLargeCommunitiesLocalData1 = 15;
repeated uint32 DstLargeCommunitiesLocalData2 = 16;
string InIfName = 17;
string OutIfName = 18;
Boundary InIfBoundary = 19;
Boundary OutIfBoundary = 20;
uint64 Bytes = 21;
}
`
if diff := helpers.Diff(strings.Split(got, "\n"), strings.Split(expected, "\n")); diff != "" {
t.Fatalf("ProtobufDefinition() (-got, +want): %s", diff)
}
}
func TestProtobufMarshal(t *testing.T) {
exporterAddress := netip.MustParseAddr("::ffff:203.0.113.14")
bf := &FlowMessage{}
bf.TimeReceived = 1000
bf.SamplingRate = 20000
bf.ExporterAddress = exporterAddress
Flows.ProtobufAppendVarint(bf, ColumnDstAS, 65000)
Flows.ProtobufAppendVarint(bf, ColumnBytes, 200)
Flows.ProtobufAppendVarint(bf, ColumnPackets, 300)
Flows.ProtobufAppendVarint(bf, ColumnBytes, 300) // duplicate!
Flows.ProtobufAppendBytes(bf, ColumnDstCountry, []byte("FR"))
got := Flows.ProtobufMarshal(bf)
size, n := protowire.ConsumeVarint(got)
if uint64(len(got)-n) != size {
t.Fatalf("ProtobufMarshal() produced an incorrect size: %d + %d != %d", size, n, len(got))
}
t.Run("compare as bytes", func(t *testing.T) {
expected := []byte{
// 15: 65000
0x78, 0xe8, 0xfb, 0x03,
// 39: 200
0xb8, 0x02, 0xc8, 0x01,
// 40: 300
0xc0, 0x02, 0xac, 0x02,
// 17: FR
0x8a, 0x01, 0x02, 0x46, 0x52,
// 1: 1000
0x08, 0xe8, 0x07,
// 2: 20000
0x10, 0xa0, 0x9c, 0x01,
// 3: ::ffff:203.0.113.14
0x1a, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xcb, 0x0, 0x71, 0xe,
}
if diff := helpers.Diff(got[n:], expected); diff != "" {
t.Fatalf("ProtobufMarshal() (-got, +want):\n%s", diff)
}
})
t.Run("compare as protobuf message", func(t *testing.T) {
got := Flows.ProtobufDecode(t, got)
expected := FlowMessage{
TimeReceived: 1000,
SamplingRate: 20000,
ExporterAddress: exporterAddress,
DstAS: 65000,
ProtobufDebug: map[ColumnKey]interface{}{
ColumnBytes: 200,
ColumnPackets: 300,
ColumnDstCountry: "FR",
},
}
if diff := helpers.Diff(got, expected, helpers.DiffFormatter(reflect.TypeOf(ColumnBytes), fmt.Sprint)); diff != "" {
t.Fatalf("ProtobufDecode() (-got, +want):\n%s", diff)
}
})
}
func BenchmarkProtobufMarshal(b *testing.B) {
exporterAddress := netip.MustParseAddr("::ffff:203.0.113.14")
DisableDebug(b)
for i := 0; i < b.N; i++ {
bf := &FlowMessage{
TimeReceived: 1000,
SamplingRate: 20000,
ExporterAddress: exporterAddress,
}
Flows.ProtobufAppendVarint(bf, ColumnDstAS, 65000)
Flows.ProtobufAppendVarint(bf, ColumnBytes, 200)
Flows.ProtobufAppendVarint(bf, ColumnPackets, 300)
Flows.ProtobufAppendVarint(bf, ColumnBytes, 300) // duplicate!
Flows.ProtobufAppendBytes(bf, ColumnDstCountry, []byte("FR"))
Flows.ProtobufMarshal(bf)
}
}

8
common/schema/release.go Normal file
View File

@@ -0,0 +1,8 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
//go:build release
package schema
const debug = false

103
common/schema/tests.go Normal file
View File

@@ -0,0 +1,103 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
//go:build !release
package schema
import (
"net/netip"
"reflect"
"strings"
"testing"
"github.com/jhump/protoreflect/desc"
"github.com/jhump/protoreflect/desc/protoparse"
"github.com/jhump/protoreflect/dynamic"
"google.golang.org/protobuf/encoding/protowire"
)
var debug = true
// DisableDebug disables debug during the provided test.
func DisableDebug(t testing.TB) {
debug = false
t.Cleanup(func() {
debug = true
})
}
// ProtobufDecode decodes the provided protobuf message.
func (schema *Schema) ProtobufDecode(t *testing.T, input []byte) *FlowMessage {
parser := protoparse.Parser{
Accessor: protoparse.FileContentsFromMap(map[string]string{
"flow.proto": schema.ProtobufDefinition(),
}),
}
descs, err := parser.ParseFiles("flow.proto")
if err != nil {
t.Fatalf("ParseFiles(%q) error:\n%+v", "flow.proto", err)
}
var descriptor *desc.MessageDescriptor
for _, msg := range descs[0].GetMessageTypes() {
if strings.HasPrefix(msg.GetName(), "FlowMessagev") {
descriptor = msg
break
}
}
if descriptor == nil {
t.Fatal("cannot find message descriptor")
}
message := dynamic.NewMessage(descriptor)
size, n := protowire.ConsumeVarint(input)
if len(input)-n != int(size) {
t.Fatalf("bad length for protobuf message: %d - %d != %d", len(input), n, size)
}
if err := message.Unmarshal(input[n:]); err != nil {
t.Fatalf("Unmarshal() error:\n%+v", err)
}
textVersion, _ := message.MarshalTextIndent()
t.Logf("Unmarshal():\n%s", textVersion)
flow := FlowMessage{
ProtobufDebug: map[ColumnKey]interface{}{},
}
for _, field := range message.GetKnownFields() {
k := int(field.GetNumber())
name := field.GetName()
switch name {
case "TimeReceived":
flow.TimeReceived = message.GetFieldByNumber(k).(uint64)
case "SamplingRate":
flow.SamplingRate = uint32(message.GetFieldByNumber(k).(uint64))
case "ExporterAddress":
ip, _ := netip.AddrFromSlice(message.GetFieldByNumber(k).([]byte))
flow.ExporterAddress = ip
case "SrcAddr":
ip, _ := netip.AddrFromSlice(message.GetFieldByNumber(k).([]byte))
flow.SrcAddr = ip
case "DstAddr":
ip, _ := netip.AddrFromSlice(message.GetFieldByNumber(k).([]byte))
flow.DstAddr = ip
case "SrcAS":
flow.SrcAS = uint32(message.GetFieldByNumber(k).(uint32))
case "DstAS":
flow.DstAS = uint32(message.GetFieldByNumber(k).(uint32))
default:
column, ok := schema.LookupColumnByName(name)
if !ok {
break
}
key := column.Key
value := message.GetFieldByNumber(k)
if reflect.ValueOf(value).IsZero() {
break
}
flow.ProtobufDebug[key] = value
}
}
return &flow
}

View File

@@ -1,17 +1,23 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
// Package schema is an abstraction of the data schema used by Akvorado. It is a
// leaky abstraction as there are multiple parts dependant of the subsystem that
// will use it.
// Package schema is an abstraction of the data schema for flows used by
// Akvorado. It is a leaky abstraction as there are multiple parts dependant of
// the subsystem that will use it.
package schema
import orderedmap "github.com/elliotchance/orderedmap/v2"
import (
"net/netip"
"github.com/bits-and-blooms/bitset"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/reflect/protoreflect"
)
// Schema is the data schema.
type Schema struct {
// We use an ordered map for direct access to columns.
columns *orderedmap.OrderedMap[ColumnKey, Column]
columns []Column // Ordered list of columns
columnIndex []*Column // Columns indexed by ColumnKey
// For ClickHouse. This is the set of primary keys (order is important and
// may not follow column order).
@@ -40,7 +46,45 @@ type Column struct {
// For the console.
ConsoleNotDimension bool
// For protobuf. The index is automatically derived from the position,
// unless specified. Use -1 to not include the column into the protobuf
// schema.
ProtobufIndex protowire.Number
ProtobufType protoreflect.Kind // Uint64Kind, Uint32Kind, BytesKind, StringKind, EnumKind
ProtobufEnum map[int]string
ProtobufEnumName string
ProtobufRepeated bool
}
// ColumnKey is the name of a column
type ColumnKey int
// FlowMessage is the abstract representation of a flow through various subsystems.
type FlowMessage struct {
TimeReceived uint64
SamplingRate uint32
// For exporter classifier
ExporterAddress netip.Addr
// For interface classifier
InIf uint32
OutIf uint32
// For geolocation or BMP
SrcAddr netip.Addr
DstAddr netip.Addr
NextHop netip.Addr
// Core component may override them
SrcAS uint32
DstAS uint32
// protobuf is the protobuf representation for the information not contained above.
protobuf []byte
protobufSet bitset.BitSet
ProtobufDebug map[ColumnKey]interface{} `json:"-"` // for testing purpose
}
const maxSizeVarint = 10 // protowire.SizeVarint(^uint64(0))

View File

@@ -0,0 +1,17 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package schema
import (
"testing"
"google.golang.org/protobuf/encoding/protowire"
)
func TestMaxSizeVarint(t *testing.T) {
got := protowire.SizeVarint(^uint64(0))
if got != maxSizeVarint {
t.Fatalf("maximum size for varint is %d, not %d", got, maxSizeVarint)
}
}

View File

@@ -21,8 +21,9 @@ to 0.
- 🩹 *inlet*: handle correctly interfaces with high indexes for sFlow
- 🩹 *docker*: fix Kafka healthcheck
- 🌱 *inlet*: improve performance of Protobuf encoding
- 🌱 *inlet*: improve decoding/encoding performance (twice faster!)
- 🌱 *orchestrator*: set TTL for ClickHouse system log tables and `exporters` table
- 🌱 *common*: more flexible data schema (first step to make this configurable)
## 1.6.4 - 2022-12-22

View File

@@ -74,7 +74,7 @@ const emit = defineEmits<{
}>();
const serverConfiguration = inject(ServerConfigKey)!;
const selectedDimensions = ref<Array<typeof dimensions.value[0]>>([]);
const selectedDimensions = ref<Array<(typeof dimensions.value)[0]>>([]);
const dimensionsError = computed(() => {
if (selectedDimensions.value.length < props.minDimensions) {
return "At least two dimensions are required";
@@ -110,7 +110,7 @@ const dimensions = computed(() =>
}))
);
const removeDimension = (dimension: typeof dimensions.value[0]) => {
const removeDimension = (dimension: (typeof dimensions.value)[0]) => {
selectedDimensions.value = selectedDimensions.value.filter(
(d) => d !== dimension
);
@@ -125,7 +125,7 @@ watch(
if (value)
selectedDimensions.value = value.selected
.map((name) => dimensions.find((d) => d.name === name))
.filter((d): d is typeof dimensions[0] => !!d);
.filter((d): d is (typeof dimensions)[0] => !!d);
},
{ immediate: true, deep: true }
);

View File

@@ -171,7 +171,7 @@ const graph = computed((): ECOption => {
let table: {
key: string;
seriesName: string;
marker: typeof params[0]["marker"];
marker: (typeof params)[0]["marker"];
up: number;
down: number;
}[] = [];

View File

@@ -6,11 +6,13 @@ package flows
import (
"context"
"net"
"net/netip"
"testing"
"time"
"akvorado/common/helpers"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
"akvorado/inlet/flow/decoder/netflow"
)
@@ -100,73 +102,70 @@ func TestGetNetflowData(t *testing.T) {
expected := []interface{}{
[]interface{}{}, // templates
[]interface{}{
&decoder.FlowMessage{
SequenceNum: 100,
SamplingRate: 30000,
ExporterAddress: net.ParseIP("127.0.0.1"),
TimeFlowStart: 1647361980,
TimeFlowEnd: 1647361980,
Bytes: 1500,
Packets: 1,
SrcAddr: net.ParseIP("192.0.2.206"),
DstAddr: net.ParseIP("203.0.113.165"),
Etype: 0x800,
Proto: 6,
SrcPort: 443,
DstPort: 34974,
InIf: 10,
OutIf: 20,
ForwardingStatus: 64,
SrcAS: 65201,
DstAS: 65202,
SrcNetMask: 24,
DstNetMask: 23,
&schema.FlowMessage{
SamplingRate: 30000,
ExporterAddress: netip.MustParseAddr("::ffff:127.0.0.1"),
SrcAddr: netip.MustParseAddr("::ffff:192.0.2.206"),
DstAddr: netip.MustParseAddr("::ffff:203.0.113.165"),
InIf: 10,
OutIf: 20,
SrcAS: 65201,
DstAS: 65202,
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1500,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 443,
schema.ColumnDstPort: 34974,
schema.ColumnForwardingStatus: 64,
schema.ColumnSrcNetMask: 24,
schema.ColumnDstNetMask: 23,
},
},
&decoder.FlowMessage{
SequenceNum: 100,
SamplingRate: 30000,
ExporterAddress: net.ParseIP("127.0.0.1"),
TimeFlowStart: 1647361980,
TimeFlowEnd: 1647361980,
Bytes: 1339,
Packets: 1,
SrcAddr: net.ParseIP("192.0.2.236"),
DstAddr: net.ParseIP("203.0.113.67"),
Etype: 0x800,
Proto: 6,
SrcPort: 443,
DstPort: 33199,
InIf: 10,
OutIf: 20,
ForwardingStatus: 64,
SrcAS: 65201,
DstAS: 65202,
SrcNetMask: 24,
DstNetMask: 24,
&schema.FlowMessage{
SamplingRate: 30000,
ExporterAddress: netip.MustParseAddr("::ffff:127.0.0.1"),
SrcAddr: netip.MustParseAddr("::ffff:192.0.2.236"),
DstAddr: netip.MustParseAddr("::ffff:203.0.113.67"),
InIf: 10,
OutIf: 20,
SrcAS: 65201,
DstAS: 65202,
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1339,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 443,
schema.ColumnDstPort: 33199,
schema.ColumnForwardingStatus: 64,
schema.ColumnSrcNetMask: 24,
schema.ColumnDstNetMask: 24,
},
},
},
[]interface{}{
&decoder.FlowMessage{
SequenceNum: 101,
SamplingRate: 30000,
ExporterAddress: net.ParseIP("127.0.0.1"),
TimeFlowStart: 1647361980,
TimeFlowEnd: 1647361980,
Bytes: 1300,
Packets: 1,
SrcAddr: net.ParseIP("2001:db8::1"),
DstAddr: net.ParseIP("2001:db8:2:0:cea5:d643:ec43:3772"),
Etype: 0x86dd,
Proto: 6,
SrcPort: 33179,
DstPort: 443,
InIf: 20,
OutIf: 10,
ForwardingStatus: 64,
SrcAS: 65201,
DstAS: 65202,
SrcNetMask: 48,
DstNetMask: 48,
&schema.FlowMessage{
SamplingRate: 30000,
ExporterAddress: netip.MustParseAddr("::ffff:127.0.0.1"),
SrcAddr: netip.MustParseAddr("2001:db8::1"),
DstAddr: netip.MustParseAddr("2001:db8:2:0:cea5:d643:ec43:3772"),
InIf: 20,
OutIf: 10,
SrcAS: 65201,
DstAS: 65202,
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1300,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv6,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 33179,
schema.ColumnDstPort: 443,
schema.ColumnForwardingStatus: 64,
schema.ColumnSrcNetMask: 48,
schema.ColumnDstNetMask: 48,
},
},
},
}
@@ -175,7 +174,7 @@ func TestGetNetflowData(t *testing.T) {
continue
}
switch g := got[idx1].(type) {
case []*decoder.FlowMessage:
case []*schema.FlowMessage:
for idx2 := range g {
g[idx2].TimeReceived = 0
}

8
go.mod
View File

@@ -8,27 +8,28 @@ require (
github.com/alecthomas/chroma v0.10.0
github.com/antonmedv/expr v1.9.0
github.com/benbjohnson/clock v1.3.0
github.com/bits-and-blooms/bitset v1.4.0
github.com/cenkalti/backoff/v4 v4.2.0
github.com/chenyahui/gin-cache v1.7.2-0.20221230102232-cd1fa6cf7b54
github.com/docker/docker v20.10.22+incompatible
github.com/docker/go-connections v0.4.0
github.com/eapache/go-resiliency v1.3.0
github.com/elliotchance/orderedmap/v2 v2.2.0
github.com/fsnotify/fsnotify v1.6.0
github.com/gin-gonic/gin v1.8.2
github.com/glebarez/sqlite v1.6.0
github.com/go-playground/validator/v10 v10.11.1
github.com/go-redis/redis/v8 v8.11.5
github.com/golang/mock v1.6.0
github.com/golang/protobuf v1.5.2
github.com/google/gopacket v1.1.19
github.com/gosnmp/gosnmp v1.35.0
github.com/itchyny/gojq v0.12.11
github.com/jhump/protoreflect v1.14.1
github.com/kentik/patricia v1.2.0
github.com/kylelemons/godebug v1.1.0
github.com/mattn/go-isatty v0.0.17
github.com/mitchellh/mapstructure v1.5.0
github.com/netsampler/goflow2 v1.1.1-0.20221008154147-57fad2e0c837
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799
github.com/oschwald/maxminddb-golang v1.10.0
github.com/osrg/gobgp/v3 v3.10.0
github.com/prometheus/client_golang v1.14.0
@@ -73,6 +74,7 @@ require (
github.com/go-playground/universal-translator v0.18.0 // indirect
github.com/goccy/go-json v0.9.11 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/uuid v1.3.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
@@ -97,7 +99,6 @@ require (
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 // indirect
github.com/paulmach/orb v0.8.0 // indirect
github.com/pelletier/go-toml/v2 v2.0.6 // indirect
github.com/pierrec/lz4/v4 v4.1.17 // indirect
@@ -125,6 +126,7 @@ require (
golang.org/x/net v0.4.0 // indirect
golang.org/x/sync v0.0.0-20220923202941-7f9b1623fab7 // indirect
golang.org/x/text v0.5.0 // indirect
google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gotest.tools/v3 v3.3.0 // indirect
modernc.org/libc v1.21.5 // indirect

17
go.sum
View File

@@ -59,6 +59,8 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bits-and-blooms/bitset v1.4.0 h1:+YZ8ePm+He2pU3dZlIZiOeAKfrBkXi1lSrXJ/Xzgbu8=
github.com/bits-and-blooms/bitset v1.4.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
github.com/cenkalti/backoff/v4 v4.2.0 h1:HN5dHm3WBOgndBH6E8V0q2jIYIR3s9yglV8k/+MN3u4=
github.com/cenkalti/backoff/v4 v4.2.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
@@ -75,6 +77,7 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
github.com/chzyer/test v0.0.0-20210722231415-061457976a23/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/coreos/go-systemd/v22 v22.3.3-0.20220203105225-a9a7ef127534/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
@@ -102,11 +105,10 @@ github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 h1:YEetp8
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU=
github.com/eapache/queue v1.1.0 h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc=
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/elliotchance/orderedmap/v2 v2.2.0 h1:7/2iwO98kYT4XkOjA9mBEIwvi4KpGB4cyHeOFOnj4Vk=
github.com/elliotchance/orderedmap/v2 v2.2.0/go.mod h1:85lZyVbpGaGvHvnKa7Qhx7zncAdBIBq6u56Hb1PRU5Q=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
@@ -226,6 +228,7 @@ github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hf
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
@@ -267,6 +270,12 @@ github.com/jcmturner/rpc/v2 v2.0.3 h1:7FXXj8Ti1IaVFpSAziCZWNzbNuZmnvw/i6CqLNdWfZ
github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc=
github.com/jellydator/ttlcache/v2 v2.11.1 h1:AZGME43Eh2Vv3giG6GeqeLeFXxwxn1/qHItqWZl6U64=
github.com/jellydator/ttlcache/v2 v2.11.1/go.mod h1:RtE5Snf0/57e+2cLWFYWCCsLas2Hy3c5Z4n14XmSvTI=
github.com/jhump/gopoet v0.0.0-20190322174617-17282ff210b3/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI=
github.com/jhump/gopoet v0.1.0/go.mod h1:me9yfT6IJSlOL3FCfrg+L6yzUEZ+5jW6WHt4Sk+UPUI=
github.com/jhump/goprotoc v0.5.0/go.mod h1:VrbvcYrQOrTi3i0Vf+m+oqQWk9l72mjkJCYo7UvLHRQ=
github.com/jhump/protoreflect v1.11.0/go.mod h1:U7aMIjN0NWq9swDP7xDdoMfRHb35uiuTd3Z9nFXJf5E=
github.com/jhump/protoreflect v1.14.1 h1:N88q7JkxTHWFEqReuTsYH1dPIwXxA0ITNQp7avLY10s=
github.com/jhump/protoreflect v1.14.1/go.mod h1:JytZfP5d0r8pVNLZvai7U/MCuTWITgrI4tTg7puQFKI=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
@@ -816,6 +825,8 @@ google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7Fc
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e h1:S9GbmC1iCgvbLyAokVCwiO6tVIrU9Y7c5oMx1V/ki/Y=
google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e/go.mod h1:9qHF0xnpdSfF6knlcsnpzUu5y+rpwgbvsyGAZPBMg4s=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@@ -828,6 +839,8 @@ google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKa
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.51.0 h1:E1eGv1FTqoLIdnBCZufiSHgKjlqG6fKFf6pPWtMTh8U=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=

View File

@@ -4,7 +4,6 @@
package bmp
import (
"net"
"net/netip"
"github.com/kentik/patricia"
@@ -23,12 +22,10 @@ type LookupResult struct {
// provided next hop if provided. This is somewhat approximate because
// we use the best route we have, while the exporter may not have this
// best route available. The returned result should not be modified!
func (c *Component) Lookup(addrIP net.IP, nextHopIP net.IP) LookupResult {
func (c *Component) Lookup(ip netip.Addr, nh netip.Addr) LookupResult {
if !c.config.CollectASNs && !c.config.CollectASPaths && !c.config.CollectCommunities {
return LookupResult{}
}
ip, _ := netip.AddrFromSlice(addrIP.To16())
nh, _ := netip.AddrFromSlice(nextHopIP.To16())
v6 := patricia.NewIPv6Address(ip.AsSlice(), 128)
c.mu.RLock()

View File

@@ -1078,7 +1078,7 @@ func TestBMP(t *testing.T) {
send(t, conn, "bmp-eor.pcap")
time.Sleep(20 * time.Millisecond)
lookup := c.Lookup(net.ParseIP("2001:db8:1::10"), net.ParseIP("2001:db8::a"))
lookup := c.Lookup(netip.MustParseAddr("2001:db8:1::10"), netip.MustParseAddr("2001:db8::a"))
if lookup.ASN != 174 {
t.Errorf("Lookup() == %d, expected 174", lookup.ASN)
}
@@ -1091,11 +1091,11 @@ func TestBMP(t *testing.T) {
attributes: c.rib.rtas.Put(routeAttributes{asn: 176}),
})
lookup = c.Lookup(net.ParseIP("2001:db8:1::10"), net.ParseIP("2001:db8::a"))
lookup = c.Lookup(netip.MustParseAddr("2001:db8:1::10"), netip.MustParseAddr("2001:db8::a"))
if lookup.ASN != 176 {
t.Errorf("Lookup() == %d, expected 176", lookup.ASN)
}
lookup = c.Lookup(net.ParseIP("2001:db8:1::10"), net.ParseIP("2001:db8::b"))
lookup = c.Lookup(netip.MustParseAddr("2001:db8:1::10"), netip.MustParseAddr("2001:db8::b"))
if lookup.ASN != 174 {
t.Errorf("Lookup() == %d, expected 174", lookup.ASN)
}
@@ -1108,11 +1108,11 @@ func TestBMP(t *testing.T) {
helpers.StartStop(t, c)
c.PopulateRIB(t)
lookup := c.Lookup(net.ParseIP("192.0.2.2").To16(), net.ParseIP("198.51.100.200").To16())
lookup := c.Lookup(netip.MustParseAddr("::ffff:192.0.2.2"), netip.MustParseAddr("::ffff:198.51.100.200"))
if lookup.ASN != 174 {
t.Errorf("Lookup() == %d, expected 174", lookup.ASN)
}
lookup = c.Lookup(net.ParseIP("192.0.2.254").To16(), net.ParseIP("198.51.100.200").To16())
lookup = c.Lookup(netip.MustParseAddr("::ffff:192.0.2.254"), netip.MustParseAddr("::ffff:198.51.100.200"))
if lookup.ASN != 0 {
t.Errorf("Lookup() == %d, expected 0", lookup.ASN)
}

View File

@@ -4,14 +4,12 @@
package core
import (
"net"
"net/netip"
"strconv"
"time"
"akvorado/common/reporter"
"akvorado/inlet/flow"
"akvorado/inlet/flow/decoder"
"akvorado/common/schema"
"akvorado/inlet/snmp"
)
@@ -22,7 +20,11 @@ type exporterAndInterfaceInfo struct {
}
// enrichFlow adds more data to a flow.
func (c *Component) enrichFlow(exporterIP netip.Addr, exporterStr string, flow *flow.Message) (skip bool) {
func (c *Component) enrichFlow(exporterIP netip.Addr, exporterStr string, flow *schema.FlowMessage) (skip bool) {
var flowExporterName string
var flowInIfName, flowInIfDescription, flowOutIfName, flowOutIfDescription string
var flowInIfSpeed, flowOutIfSpeed uint32
errLogger := c.r.Sample(reporter.BurstSampler(time.Minute, 10))
if flow.InIf != 0 {
@@ -34,10 +36,10 @@ func (c *Component) enrichFlow(exporterIP netip.Addr, exporterStr string, flow *
c.metrics.flowsErrors.WithLabelValues(exporterStr, err.Error()).Inc()
skip = true
} else {
flow.ExporterName = exporterName
flow.InIfName = iface.Name
flow.InIfDescription = iface.Description
flow.InIfSpeed = uint32(iface.Speed)
flowExporterName = exporterName
flowInIfName = iface.Name
flowInIfDescription = iface.Description
flowInIfSpeed = uint32(iface.Speed)
}
}
@@ -54,10 +56,10 @@ func (c *Component) enrichFlow(exporterIP netip.Addr, exporterStr string, flow *
skip = true
}
} else {
flow.ExporterName = exporterName
flow.OutIfName = iface.Name
flow.OutIfDescription = iface.Description
flow.OutIfSpeed = uint32(iface.Speed)
flowExporterName = exporterName
flowOutIfName = iface.Name
flowOutIfDescription = iface.Description
flowOutIfSpeed = uint32(iface.Speed)
}
}
@@ -68,11 +70,11 @@ func (c *Component) enrichFlow(exporterIP netip.Addr, exporterStr string, flow *
}
if samplingRate, ok := c.config.OverrideSamplingRate.Lookup(exporterIP); ok && samplingRate > 0 {
flow.SamplingRate = uint64(samplingRate)
flow.SamplingRate = uint32(samplingRate)
}
if flow.SamplingRate == 0 {
if samplingRate, ok := c.config.DefaultSamplingRate.Lookup(exporterIP); ok && samplingRate > 0 {
flow.SamplingRate = uint64(samplingRate)
flow.SamplingRate = uint32(samplingRate)
} else {
c.metrics.flowsErrors.WithLabelValues(exporterStr, "sampling rate missing").Inc()
skip = true
@@ -84,41 +86,48 @@ func (c *Component) enrichFlow(exporterIP netip.Addr, exporterStr string, flow *
}
// Classification
c.classifyExporter(exporterStr, flow)
c.classifyInterface(exporterStr, flow,
flow.OutIfName, flow.OutIfDescription, flow.OutIfSpeed,
&flow.OutIfConnectivity, &flow.OutIfProvider, &flow.OutIfBoundary)
c.classifyInterface(exporterStr, flow,
flow.InIfName, flow.InIfDescription, flow.InIfSpeed,
&flow.InIfConnectivity, &flow.InIfProvider, &flow.InIfBoundary)
c.classifyExporter(exporterStr, flowExporterName, flow)
c.classifyInterface(exporterStr, flowExporterName, flow,
flowOutIfName, flowOutIfDescription, flowOutIfSpeed,
false)
c.classifyInterface(exporterStr, flowExporterName, flow,
flowInIfName, flowInIfDescription, flowInIfSpeed,
true)
sourceBMP := c.d.BMP.Lookup(net.IP(flow.SrcAddr), nil)
destBMP := c.d.BMP.Lookup(net.IP(flow.DstAddr), net.IP(flow.NextHop))
flow.SrcAS = c.getASNumber(net.IP(flow.SrcAddr), flow.SrcAS, sourceBMP.ASN)
flow.DstAS = c.getASNumber(net.IP(flow.DstAddr), flow.DstAS, destBMP.ASN)
flow.SrcCountry = c.d.GeoIP.LookupCountry(net.IP(flow.SrcAddr))
flow.DstCountry = c.d.GeoIP.LookupCountry(net.IP(flow.DstAddr))
flow.DstCommunities = destBMP.Communities
flow.DstASPath = destBMP.ASPath
if len(destBMP.LargeCommunities) > 0 {
flow.DstLargeCommunities = &decoder.FlowMessage_LargeCommunities{
ASN: make([]uint32, len(destBMP.LargeCommunities)),
LocalData1: make([]uint32, len(destBMP.LargeCommunities)),
LocalData2: make([]uint32, len(destBMP.LargeCommunities)),
}
for i := 0; i < len(destBMP.LargeCommunities); i++ {
flow.DstLargeCommunities.ASN[i] = destBMP.LargeCommunities[i].ASN
flow.DstLargeCommunities.LocalData1[i] = destBMP.LargeCommunities[i].LocalData1
flow.DstLargeCommunities.LocalData2[i] = destBMP.LargeCommunities[i].LocalData2
}
sourceBMP := c.d.BMP.Lookup(flow.SrcAddr, netip.Addr{})
destBMP := c.d.BMP.Lookup(flow.DstAddr, flow.NextHop)
flow.SrcAS = c.getASNumber(flow.SrcAddr, flow.SrcAS, sourceBMP.ASN)
flow.DstAS = c.getASNumber(flow.DstAddr, flow.DstAS, destBMP.ASN)
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnSrcCountry, []byte(c.d.GeoIP.LookupCountry(flow.SrcAddr)))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnDstCountry, []byte(c.d.GeoIP.LookupCountry(flow.DstAddr)))
for _, comm := range destBMP.Communities {
schema.Flows.ProtobufAppendVarint(flow, schema.ColumnDstCommunities, uint64(comm))
}
for _, asn := range destBMP.ASPath {
schema.Flows.ProtobufAppendVarint(flow, schema.ColumnDstASPath, uint64(asn))
}
for _, comm := range destBMP.LargeCommunities {
schema.Flows.ProtobufAppendVarintForce(flow,
schema.ColumnDstLargeCommunitiesASN, uint64(comm.ASN))
schema.Flows.ProtobufAppendVarintForce(flow,
schema.ColumnDstLargeCommunitiesLocalData1, uint64(comm.LocalData1))
schema.Flows.ProtobufAppendVarintForce(flow,
schema.ColumnDstLargeCommunitiesLocalData2, uint64(comm.LocalData2))
}
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnExporterName, []byte(flowExporterName))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnInIfName, []byte(flowInIfName))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnInIfDescription, []byte(flowInIfDescription))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnOutIfName, []byte(flowOutIfName))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnOutIfDescription, []byte(flowOutIfDescription))
schema.Flows.ProtobufAppendVarint(flow, schema.ColumnInIfSpeed, uint64(flowInIfSpeed))
schema.Flows.ProtobufAppendVarint(flow, schema.ColumnOutIfSpeed, uint64(flowOutIfSpeed))
return
}
// getASNumber retrieves the AS number for a flow, depending on user preferences.
func (c *Component) getASNumber(flowAddr net.IP, flowAS, bmpAS uint32) (asn uint32) {
func (c *Component) getASNumber(flowAddr netip.Addr, flowAS, bmpAS uint32) (asn uint32) {
for _, provider := range c.config.ASNProviders {
if asn != 0 {
break
@@ -145,18 +154,21 @@ func (c *Component) getASNumber(flowAddr net.IP, flowAS, bmpAS uint32) (asn uint
return asn
}
func (c *Component) classifyExporter(ip string, flow *flow.Message) {
func writeExporter(flow *schema.FlowMessage, classification exporterClassification) {
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnExporterGroup, []byte(classification.Group))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnExporterRole, []byte(classification.Role))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnExporterSite, []byte(classification.Site))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnExporterRegion, []byte(classification.Region))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnExporterTenant, []byte(classification.Tenant))
}
func (c *Component) classifyExporter(ip string, name string, flow *schema.FlowMessage) {
if len(c.config.ExporterClassifiers) == 0 {
return
}
name := flow.ExporterName
si := exporterInfo{IP: ip, Name: name}
if classification, ok := c.classifierExporterCache.Get(si); ok {
flow.ExporterGroup = classification.Group
flow.ExporterRole = classification.Role
flow.ExporterSite = classification.Site
flow.ExporterRegion = classification.Region
flow.ExporterTenant = classification.Tenant
writeExporter(flow, classification)
return
}
@@ -178,29 +190,33 @@ func (c *Component) classifyExporter(ip string, flow *flow.Message) {
break
}
c.classifierExporterCache.Set(si, classification)
flow.ExporterGroup = classification.Group
flow.ExporterRole = classification.Role
flow.ExporterSite = classification.Site
flow.ExporterRegion = classification.Region
flow.ExporterTenant = classification.Tenant
writeExporter(flow, classification)
}
func (c *Component) classifyInterface(ip string, fl *flow.Message,
ifName, ifDescription string, ifSpeed uint32,
connectivity, provider *string, boundary *decoder.FlowMessage_Boundary) {
func writeInterface(flow *schema.FlowMessage, classification interfaceClassification, directionIn bool) {
if directionIn {
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnInIfConnectivity, []byte(classification.Connectivity))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnInIfProvider, []byte(classification.Provider))
schema.Flows.ProtobufAppendVarint(flow, schema.ColumnInIfBoundary, uint64(classification.Boundary))
} else {
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnOutIfConnectivity, []byte(classification.Connectivity))
schema.Flows.ProtobufAppendBytes(flow, schema.ColumnOutIfProvider, []byte(classification.Provider))
schema.Flows.ProtobufAppendVarint(flow, schema.ColumnOutIfBoundary, uint64(classification.Boundary))
}
}
func (c *Component) classifyInterface(ip string, exporterName string, fl *schema.FlowMessage, ifName, ifDescription string, ifSpeed uint32, directionIn bool) {
if len(c.config.InterfaceClassifiers) == 0 {
return
}
si := exporterInfo{IP: ip, Name: fl.ExporterName}
si := exporterInfo{IP: ip, Name: exporterName}
ii := interfaceInfo{Name: ifName, Description: ifDescription, Speed: ifSpeed}
key := exporterAndInterfaceInfo{
Exporter: si,
Interface: ii,
}
if classification, ok := c.classifierInterfaceCache.Get(key); ok {
*connectivity = classification.Connectivity
*provider = classification.Provider
*boundary = convertBoundaryToProto(classification.Boundary)
writeInterface(fl, classification, directionIn)
return
}
@@ -211,7 +227,7 @@ func (c *Component) classifyInterface(ip string, fl *flow.Message,
c.classifierErrLogger.Err(err).
Str("type", "interface").
Int("index", idx).
Str("exporter", fl.ExporterName).
Str("exporter", exporterName).
Str("interface", ifName).
Msg("error executing classifier")
c.metrics.classifierErrors.WithLabelValues("interface", strconv.Itoa(idx)).Inc()
@@ -227,19 +243,7 @@ func (c *Component) classifyInterface(ip string, fl *flow.Message,
break
}
c.classifierInterfaceCache.Set(key, classification)
*connectivity = classification.Connectivity
*provider = classification.Provider
*boundary = convertBoundaryToProto(classification.Boundary)
}
func convertBoundaryToProto(from interfaceBoundary) decoder.FlowMessage_Boundary {
switch from {
case externalBoundary:
return decoder.FlowMessage_EXTERNAL
case internalBoundary:
return decoder.FlowMessage_INTERNAL
}
return decoder.FlowMessage_UNDEFINED
writeInterface(fl, classification, directionIn)
}
func isPrivateAS(as uint32) bool {

View File

@@ -5,7 +5,8 @@ package core
import (
"fmt"
"net"
"net/netip"
"reflect"
"testing"
"time"
@@ -17,9 +18,9 @@ import (
"akvorado/common/helpers"
"akvorado/common/http"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/bmp"
"akvorado/inlet/flow"
"akvorado/inlet/flow/decoder"
"akvorado/inlet/geoip"
"akvorado/inlet/kafka"
"akvorado/inlet/snmp"
@@ -29,32 +30,32 @@ func TestEnrich(t *testing.T) {
cases := []struct {
Name string
Configuration gin.H
InputFlow func() *flow.Message
OutputFlow *flow.Message
InputFlow func() *schema.FlowMessage
OutputFlow *schema.FlowMessage
}{
{
Name: "no rule",
Configuration: gin.H{},
InputFlow: func() *flow.Message {
return &flow.Message{
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
OutputFlow: &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
},
},
}, {
Name: "no rule, override sampling rate",
@@ -63,49 +64,49 @@ func TestEnrich(t *testing.T) {
"192.0.2.128/25": 500,
"192.0.2.141/32": 1000,
}},
InputFlow: func() *flow.Message {
return &flow.Message{
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 500,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
OutputFlow: &schema.FlowMessage{
SamplingRate: 500,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
},
},
}, {
Name: "no rule, no sampling rate, default is one value",
Configuration: gin.H{"defaultsamplingrate": 500},
InputFlow: func() *flow.Message {
return &flow.Message{
ExporterAddress: net.ParseIP("192.0.2.142"),
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 500,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
OutputFlow: &schema.FlowMessage{
SamplingRate: 500,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
},
},
}, {
Name: "no rule, no sampling rate, default is map",
@@ -114,25 +115,25 @@ func TestEnrich(t *testing.T) {
"192.0.2.128/25": 500,
"192.0.2.141/32": 1000,
}},
InputFlow: func() *flow.Message {
return &flow.Message{
ExporterAddress: net.ParseIP("192.0.2.142"),
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 500,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
OutputFlow: &schema.FlowMessage{
SamplingRate: 500,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
},
},
}, {
Name: "exporter rule",
@@ -143,29 +144,29 @@ func TestEnrich(t *testing.T) {
`ClassifyRegion("other") && ClassifySite("unknown") && ClassifyTenant("alfred")`,
},
},
InputFlow: func() *flow.Message {
return &flow.Message{
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
ExporterRegion: "asia",
ExporterTenant: "alfred",
ExporterSite: "unknown",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
OutputFlow: &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnExporterRegion: "asia",
schema.ColumnExporterTenant: "alfred",
schema.ColumnExporterSite: "unknown",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
},
},
}, {
Name: "interface rule",
@@ -179,28 +180,28 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1")`,
`ClassifyInternal()`,
},
},
InputFlow: func() *flow.Message {
return &flow.Message{
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
InIfBoundary: 2, // Internal
OutIfBoundary: 2,
OutputFlow: &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
schema.ColumnInIfBoundary: internalBoundary,
schema.ColumnOutIfBoundary: internalBoundary,
},
},
}, {
Name: "configure twice boundary",
@@ -210,28 +211,28 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1")`,
`ClassifyExternal()`,
},
},
InputFlow: func() *flow.Message {
return &flow.Message{
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
InIfBoundary: 2, // Internal
OutIfBoundary: 2,
OutputFlow: &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
schema.ColumnInIfBoundary: 2, // Internal
schema.ColumnOutIfBoundary: 2,
},
},
}, {
Name: "configure twice provider",
@@ -241,28 +242,28 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1")`,
`ClassifyProvider("cogent")`,
},
},
InputFlow: func() *flow.Message {
return &flow.Message{
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
InIfProvider: "telia",
OutIfProvider: "telia",
OutputFlow: &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
schema.ColumnInIfProvider: "telia",
schema.ColumnOutIfProvider: "telia",
},
},
}, {
Name: "classify depending on description",
@@ -273,66 +274,66 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1")`,
`ClassifyInternal() && ClassifyConnectivity("core")`,
},
},
InputFlow: func() *flow.Message {
return &flow.Message{
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
}
},
OutputFlow: &flow.Message{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
InIfConnectivity: "p100",
OutIfConnectivity: "core",
InIfProvider: "othello",
OutIfProvider: "othello",
InIfBoundary: 1, // external
OutIfBoundary: 2, // internal
OutputFlow: &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
schema.ColumnInIfConnectivity: "p100",
schema.ColumnOutIfConnectivity: "core",
schema.ColumnInIfProvider: "othello",
schema.ColumnOutIfProvider: "othello",
schema.ColumnInIfBoundary: 1, // external
schema.ColumnOutIfBoundary: 2, // internal
},
},
}, {
Name: "use data from BMP",
Configuration: gin.H{},
InputFlow: func() *flow.Message {
return &flow.Message{
InputFlow: func() *schema.FlowMessage {
return &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
InIf: 100,
OutIf: 200,
SrcAddr: net.ParseIP("192.0.2.142"),
DstAddr: net.ParseIP("192.0.2.10"),
SrcAddr: netip.MustParseAddr("::ffff:192.0.2.142"),
DstAddr: netip.MustParseAddr("::ffff:192.0.2.10"),
}
},
OutputFlow: &flow.Message{
SamplingRate: 1000,
ExporterAddress: net.ParseIP("192.0.2.142"),
ExporterName: "192_0_2_142",
InIf: 100,
OutIf: 200,
InIfName: "Gi0/0/100",
OutIfName: "Gi0/0/200",
InIfDescription: "Interface 100",
OutIfDescription: "Interface 200",
InIfSpeed: 1000,
OutIfSpeed: 1000,
SrcAddr: net.ParseIP("192.0.2.142").To16(),
DstAddr: net.ParseIP("192.0.2.10").To16(),
SrcAS: 1299,
DstAS: 174,
DstASPath: []uint32{64200, 1299, 174},
DstCommunities: []uint32{100, 200, 400},
DstLargeCommunities: &decoder.FlowMessage_LargeCommunities{
ASN: []uint32{64200}, LocalData1: []uint32{2}, LocalData2: []uint32{3},
OutputFlow: &schema.FlowMessage{
SamplingRate: 1000,
ExporterAddress: netip.MustParseAddr("::ffff:192.0.2.142"),
SrcAddr: netip.MustParseAddr("::ffff:192.0.2.142"),
DstAddr: netip.MustParseAddr("::ffff:192.0.2.10"),
SrcAS: 1299,
DstAS: 174,
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnExporterName: "192_0_2_142",
schema.ColumnInIfName: "Gi0/0/100",
schema.ColumnOutIfName: "Gi0/0/200",
schema.ColumnInIfDescription: "Interface 100",
schema.ColumnOutIfDescription: "Interface 200",
schema.ColumnInIfSpeed: 1000,
schema.ColumnOutIfSpeed: 1000,
schema.ColumnDstASPath: []uint32{64200, 1299, 174},
schema.ColumnDstCommunities: []uint32{100, 200, 400},
schema.ColumnDstLargeCommunitiesASN: []int32{64200},
schema.ColumnDstLargeCommunitiesLocalData1: []int32{2},
schema.ColumnDstLargeCommunitiesLocalData2: []int32{3},
},
},
},
@@ -382,16 +383,13 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1")`,
kafkaProducer.ExpectInputWithMessageCheckerFunctionAndSucceed(
func(msg *sarama.ProducerMessage) error {
defer close(received)
got := flow.Message{}
b, err := msg.Value.Encode()
if err != nil {
t.Fatalf("Kafka message encoding error:\n%+v", err)
}
if err = got.DecodeMessage(b); err != nil {
t.Fatalf("DecodeMessage() error:\n%+v", err)
}
if diff := helpers.Diff(&got, tc.OutputFlow); diff != "" {
t.Logf("Raw message: %v", b)
got := schema.Flows.ProtobufDecode(t, b)
if diff := helpers.Diff(&got, tc.OutputFlow, helpers.DiffFormatter(reflect.TypeOf(schema.ColumnBytes), fmt.Sprint)); diff != "" {
t.Errorf("Classifier (-got, +want):\n%s", diff)
}
return nil
@@ -429,6 +427,7 @@ func TestGetASNumber(t *testing.T) {
}{
// 1
{"1.0.0.1", 12322, 0, []ASNProvider{ProviderFlow}, 12322},
{"::ffff:1.0.0.1", 12322, 0, []ASNProvider{ProviderFlow}, 12322},
{"1.0.0.1", 65536, 0, []ASNProvider{ProviderFlow}, 65536},
{"1.0.0.1", 65536, 0, []ASNProvider{ProviderFlowExceptPrivate}, 0},
{"1.0.0.1", 4_200_000_121, 0, []ASNProvider{ProviderFlowExceptPrivate}, 0},
@@ -466,7 +465,7 @@ func TestGetASNumber(t *testing.T) {
if err != nil {
t.Fatalf("New() error:\n%+v", err)
}
got := c.getASNumber(net.ParseIP(tc.Addr), tc.FlowAS, tc.BMPAS)
got := c.getASNumber(netip.MustParseAddr(tc.Addr), tc.FlowAS, tc.BMPAS)
if diff := helpers.Diff(got, tc.Expected); diff != "" {
t.Fatalf("getASNumber() (-got, +want):\n%s", diff)
}

View File

@@ -53,13 +53,6 @@ func (c *Component) FlowsHTTPHandler(gc *gin.Context) {
gc.JSON(http.StatusOK, msg)
gc.Writer.Write([]byte("\n"))
}
case "application/x-protobuf":
buf, err := msg.EncodeMessage()
if err != nil {
continue
}
gc.Set("Content-Type", format)
gc.Writer.Write(buf)
}
count++

View File

@@ -6,8 +6,6 @@ package core
import (
"fmt"
"net"
"net/netip"
"sync/atomic"
"time"
@@ -17,6 +15,7 @@ import (
"akvorado/common/daemon"
"akvorado/common/http"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/bmp"
"akvorado/inlet/flow"
"akvorado/inlet/geoip"
@@ -35,7 +34,7 @@ type Component struct {
healthy chan reporter.ChannelHealthcheckFunc
httpFlowClients uint32 // for dumping flows
httpFlowChannel chan *flow.Message
httpFlowChannel chan *schema.FlowMessage
httpFlowFlushDelay time.Duration
classifierExporterCache *zcache.Cache[exporterInfo, exporterClassification]
@@ -63,7 +62,7 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende
healthy: make(chan reporter.ChannelHealthcheckFunc),
httpFlowClients: 0,
httpFlowChannel: make(chan *flow.Message, 10),
httpFlowChannel: make(chan *schema.FlowMessage, 10),
httpFlowFlushDelay: time.Second,
classifierExporterCache: zcache.New[exporterInfo, exporterClassification](configuration.ClassifierCacheDuration, 2*configuration.ClassifierCacheDuration),
@@ -94,7 +93,6 @@ func (c *Component) Start() error {
func (c *Component) runWorker(workerID int) error {
c.r.Debug().Int("worker", workerID).Msg("starting core worker")
errLogger := c.r.Sample(reporter.BurstSampler(time.Minute, 10))
for {
select {
case <-c.t.Dying():
@@ -111,22 +109,17 @@ func (c *Component) runWorker(workerID int) error {
}
start := time.Now()
exporter := net.IP(flow.ExporterAddress).String()
exporter := flow.ExporterAddress.Unmap().String()
c.metrics.flowsReceived.WithLabelValues(exporter).Inc()
// Enrichment
ip, _ := netip.AddrFromSlice(flow.ExporterAddress)
ip := flow.ExporterAddress
if skip := c.enrichFlow(ip, exporter, flow); skip {
continue
}
// Serialize flow to Protobuf
buf, err := flow.EncodeMessage()
if err != nil {
errLogger.Err(err).Str("exporter", exporter).Msg("unable to serialize flow")
c.metrics.flowsErrors.WithLabelValues(exporter, err.Error()).Inc()
continue
}
buf := schema.Flows.ProtobufMarshal(flow)
c.metrics.flowsProcessingTime.Observe(time.Now().Sub(start).Seconds())
// Forward to Kafka. This could block and buf is now owned by the

View File

@@ -1,6 +1,8 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
//go:build none
package core
import (
@@ -9,7 +11,6 @@ import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net"
netHTTP "net/http"
"testing"
@@ -17,15 +18,14 @@ import (
"github.com/Shopify/sarama"
"github.com/gin-gonic/gin"
"github.com/golang/protobuf/proto"
"akvorado/common/daemon"
"akvorado/common/helpers"
"akvorado/common/http"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/bmp"
"akvorado/inlet/flow"
"akvorado/inlet/flow/decoder"
"akvorado/inlet/geoip"
"akvorado/inlet/kafka"
"akvorado/inlet/snmp"
@@ -59,8 +59,8 @@ func TestCore(t *testing.T) {
}
helpers.StartStop(t, c)
flowMessage := func(exporter string, in, out uint32) *flow.Message {
return &flow.Message{
flowMessage := func(exporter string, in, out uint32) *schema.BasicFlow {
return &schema.BasicFlow{
TimeReceived: 200,
SequenceNum: 1000,
SamplingRate: 1000,
@@ -132,12 +132,12 @@ func TestCore(t *testing.T) {
received := make(chan bool)
kafkaProducer.ExpectInputWithMessageCheckerFunctionAndSucceed(func(msg *sarama.ProducerMessage) error {
defer close(received)
expectedTopic := fmt.Sprintf("flows-v%d", flow.CurrentSchemaVersion)
expectedTopic := fmt.Sprintf("flows-%s", schema.Flows.ProtobufMessageHash())
if msg.Topic != expectedTopic {
t.Errorf("Kafka message topic (-got, +want):\n-%s\n+%s", msg.Topic, expectedTopic)
}
got := flow.Message{}
got := &schema.BasicFlow{}
b, err := msg.Value.Encode()
if err != nil {
t.Fatalf("Kafka message encoding error:\n%+v", err)
@@ -326,40 +326,4 @@ func TestCore(t *testing.T) {
t.Fatalf("GET /api/v0/inlet/flows got less than 4 flows (%d)", count)
}
})
// Test HTTP flow clients using Protobuf
t.Run("http flows", func(t *testing.T) {
c.httpFlowFlushDelay = 20 * time.Millisecond
client := netHTTP.Client{}
req, err := netHTTP.NewRequest("GET", fmt.Sprintf("http://%s/api/v0/inlet/flows?limit=1", c.d.HTTP.LocalAddr()), nil)
if err != nil {
t.Fatalf("GET /api/v0/inlet/flows:\n%+v", err)
}
req.Header.Set("Accept", "application/x-protobuf")
resp, err := client.Do(req)
if err != nil {
t.Fatalf("GET /api/v0/inlet/flows:\n%+v", err)
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
t.Fatalf("GET /api/v0/inlet/flows status code %d", resp.StatusCode)
}
// Produce one flow
kafkaProducer.ExpectInputAndSucceed()
flowComponent.Inject(t, flowMessage("192.0.2.142", 434, 677))
// Decode it
raw, err := ioutil.ReadAll(resp.Body)
if err != nil {
t.Fatalf("ReadAll() error:\n%+v", err)
}
var flow decoder.FlowMessage
buf := proto.NewBuffer(raw)
if err := buf.DecodeMessage(&flow); err != nil {
t.Fatalf("DecodeMessage() error:\n%+v", err)
}
})
}

View File

@@ -4,16 +4,15 @@
package flow
import (
"net/netip"
"time"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
"akvorado/inlet/flow/decoder/netflow"
"akvorado/inlet/flow/decoder/sflow"
)
// Message describes a decoded flow message.
type Message = decoder.FlowMessage
type wrappedDecoder struct {
c *Component
orig decoder.Decoder
@@ -21,7 +20,13 @@ type wrappedDecoder struct {
}
// Decode decodes a flow while keeping some stats.
func (wd *wrappedDecoder) Decode(in decoder.RawFlow) []*Message {
func (wd *wrappedDecoder) Decode(in decoder.RawFlow) []*schema.FlowMessage {
defer func() {
if r := recover(); r != nil {
wd.c.metrics.decoderErrors.WithLabelValues(wd.orig.Name()).
Inc()
}
}()
timeTrackStart := time.Now()
decoded := wd.orig.Decode(in)
timeTrackStop := time.Now()
@@ -33,8 +38,9 @@ func (wd *wrappedDecoder) Decode(in decoder.RawFlow) []*Message {
}
if wd.useSrcAddrForExporterAddr {
exporterAddress, _ := netip.AddrFromSlice(in.Source.To16())
for _, f := range decoded {
f.ExporterAddress = in.Source.To16()
f.ExporterAddress = exporterAddress
}
}

View File

@@ -1,43 +0,0 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package decoder
import (
"bytes"
"encoding/json"
"net"
)
type rawFlowMessage FlowMessage
type prettierFlowMessage struct {
rawFlowMessage
PrettierSrcAddr string `json:"SrcAddr,omitempty"`
PrettierDstAddr string `json:"DstAddr,omitempty"`
PrettierExporterAddress string `json:"ExporterAddress,omitempty"`
PrettierInIfBoundary string `json:"InIfBoundary,omitempty"`
PrettierOutIfBoundary string `json:"OutIfBoundary,omitempty"`
}
// MarshalJSON marshals a flow message to JSON. It uses a textual
// format for IP addresses. This is expected to be used for debug
// purpose only.
func (fm FlowMessage) MarshalJSON() ([]byte, error) {
prettier := prettierFlowMessage{
rawFlowMessage: rawFlowMessage(fm),
PrettierSrcAddr: net.IP(fm.SrcAddr).String(),
PrettierDstAddr: net.IP(fm.DstAddr).String(),
PrettierExporterAddress: net.IP(fm.ExporterAddress).String(),
PrettierInIfBoundary: fm.InIfBoundary.String(),
PrettierOutIfBoundary: fm.OutIfBoundary.String(),
}
prettier.SrcAddr = nil
prettier.DstAddr = nil
prettier.ExporterAddress = nil
buf := bytes.NewBuffer([]byte{})
encoder := json.NewEncoder(buf)
if err := encoder.Encode(&prettier); err != nil {
return nil, err
}
return buf.Bytes(), nil
}

View File

@@ -1,72 +0,0 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package decoder
import (
"bytes"
"encoding/json"
"net"
"strings"
"testing"
"akvorado/common/helpers"
)
func TestJSONEncoding(t *testing.T) {
flow := &FlowMessage{
TimeReceived: 200,
SequenceNum: 1000,
SamplingRate: 1000,
FlowDirection: 1,
ExporterAddress: net.ParseIP("192.0.2.42"),
TimeFlowStart: 100,
TimeFlowEnd: 200,
Bytes: 6765,
Packets: 4,
InIf: 300,
OutIf: 200,
SrcAddr: net.ParseIP("67.43.156.77"),
DstAddr: net.ParseIP("2.125.160.216"),
Etype: 0x800,
Proto: 6,
SrcPort: 8534,
DstPort: 80,
InIfProvider: "Telia",
InIfBoundary: FlowMessage_EXTERNAL,
OutIfBoundary: FlowMessage_INTERNAL,
}
buf := bytes.NewBuffer([]byte{})
encoder := json.NewEncoder(buf)
encoder.SetIndent("", " ")
if err := encoder.Encode(flow); err != nil {
t.Fatalf("Encode() error:\n%+v", err)
}
got := strings.Split(buf.String(), "\n")
expected := strings.Split(`{
"TimeReceived": 200,
"SequenceNum": 1000,
"SamplingRate": 1000,
"FlowDirection": 1,
"TimeFlowStart": 100,
"TimeFlowEnd": 200,
"Bytes": 6765,
"Packets": 4,
"Etype": 2048,
"Proto": 6,
"SrcPort": 8534,
"DstPort": 80,
"InIf": 300,
"OutIf": 200,
"InIfProvider": "Telia",
"SrcAddr": "67.43.156.77",
"DstAddr": "2.125.160.216",
"ExporterAddress": "192.0.2.42",
"InIfBoundary": "EXTERNAL",
"OutIfBoundary": "INTERNAL"
}
`, "\n")
if diff := helpers.Diff(got, expected); diff != "" {
t.Errorf("Encode() (-got, +want):\n%s", diff)
}
}

View File

@@ -1,69 +0,0 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package decoder
import (
"net"
goflowmessage "github.com/netsampler/goflow2/pb"
)
// ConvertGoflowToFlowMessage a flow message from goflow2 to our own
// format.
func ConvertGoflowToFlowMessage(input *goflowmessage.FlowMessage) *FlowMessage {
result := FlowMessage{
TimeReceived: input.TimeReceived,
SequenceNum: input.SequenceNum,
SamplingRate: input.SamplingRate,
FlowDirection: input.FlowDirection,
ExporterAddress: ipCopy(input.SamplerAddress),
TimeFlowStart: input.TimeFlowStart,
TimeFlowEnd: input.TimeFlowEnd,
Bytes: input.Bytes,
Packets: input.Packets,
SrcAddr: ipCopy(input.SrcAddr),
DstAddr: ipCopy(input.DstAddr),
Etype: input.Etype,
Proto: input.Proto,
SrcPort: input.SrcPort,
DstPort: input.DstPort,
InIf: input.InIf,
OutIf: input.OutIf,
IPTos: input.IPTos,
ForwardingStatus: input.ForwardingStatus,
IPTTL: input.IPTTL,
TCPFlags: input.TCPFlags,
IcmpType: input.IcmpType,
IcmpCode: input.IcmpCode,
IPv6FlowLabel: input.IPv6FlowLabel,
FragmentId: input.FragmentId,
FragmentOffset: input.FragmentOffset,
BiFlowDirection: input.BiFlowDirection,
SrcAS: input.SrcAS,
DstAS: input.DstAS,
SrcNetMask: input.SrcNet,
DstNetMask: input.DstNet,
NextHopAS: input.NextHopAS,
}
if !net.IP(input.BgpNextHop).IsUnspecified() {
result.NextHop = ipCopy(input.BgpNextHop)
} else {
result.NextHop = ipCopy(input.NextHop)
}
return &result
}
// Ensure we copy the IP address. This is similar to To16(), except
// that when we get an IPv6, we return a copy.
func ipCopy(src net.IP) net.IP {
if len(src) == 4 {
return net.IPv4(src[0], src[1], src[2], src[3])
}
if len(src) == 16 {
dst := make(net.IP, len(src))
copy(dst, src)
return dst
}
return nil
}

View File

@@ -0,0 +1,160 @@
// SPDX-FileCopyrightText: 2023 Free Mobile
// SPDX-FileCopyrightText: 2021 NetSampler
// SPDX-License-Identifier: AGPL-3.0-only AND BSD-3-Clause
package netflow
import (
"encoding/binary"
"net/netip"
"akvorado/common/helpers"
"akvorado/common/schema"
"github.com/netsampler/goflow2/decoders/netflow"
"github.com/netsampler/goflow2/producer"
)
func decode(msgDec interface{}, samplingRateSys producer.SamplingRateSystem) []*schema.FlowMessage {
flowMessageSet := []*schema.FlowMessage{}
var obsDomainID uint32
var dataFlowSet []netflow.DataFlowSet
var optionsDataFlowSet []netflow.OptionsDataFlowSet
var version int
switch msgDecConv := msgDec.(type) {
case netflow.NFv9Packet:
dataFlowSet, _, _, optionsDataFlowSet = producer.SplitNetFlowSets(msgDecConv)
obsDomainID = msgDecConv.SourceId
version = 9
case netflow.IPFIXPacket:
dataFlowSet, _, _, optionsDataFlowSet = producer.SplitIPFIXSets(msgDecConv)
obsDomainID = msgDecConv.ObservationDomainId
version = 10
default:
return nil
}
// Get sampling rate
samplingRate, found := producer.SearchNetFlowOptionDataSets(optionsDataFlowSet)
if samplingRateSys != nil {
if found {
samplingRateSys.AddSamplingRate(10, obsDomainID, samplingRate)
} else {
samplingRate, _ = samplingRateSys.GetSamplingRate(10, obsDomainID)
}
}
// Parse fields
for _, dataFlowSetItem := range dataFlowSet {
for _, record := range dataFlowSetItem.Records {
flow := decodeRecord(version, record.Values)
if flow != nil {
flow.SamplingRate = samplingRate
flowMessageSet = append(flowMessageSet, flow)
}
}
}
return flowMessageSet
}
func decodeRecord(version int, fields []netflow.DataField) *schema.FlowMessage {
var etype uint16
bf := &schema.FlowMessage{}
for _, field := range fields {
v, ok := field.Value.([]byte)
if !ok {
continue
}
if field.PenProvided {
continue
}
switch field.Type {
// Statistics
case netflow.NFV9_FIELD_IN_BYTES, netflow.NFV9_FIELD_OUT_BYTES:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnBytes, decodeUNumber(v))
case netflow.NFV9_FIELD_IN_PKTS, netflow.NFV9_FIELD_OUT_PKTS:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnPackets, decodeUNumber(v))
// L3
case netflow.NFV9_FIELD_IPV4_SRC_ADDR:
etype = helpers.ETypeIPv4
bf.SrcAddr = decodeIP(v)
case netflow.NFV9_FIELD_IPV4_DST_ADDR:
etype = helpers.ETypeIPv4
bf.DstAddr = decodeIP(v)
case netflow.NFV9_FIELD_IPV6_SRC_ADDR:
etype = helpers.ETypeIPv6
bf.SrcAddr = decodeIP(v)
case netflow.NFV9_FIELD_IPV6_DST_ADDR:
etype = helpers.ETypeIPv6
bf.DstAddr = decodeIP(v)
case netflow.NFV9_FIELD_SRC_MASK, netflow.NFV9_FIELD_IPV6_SRC_MASK:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnSrcNetMask, decodeUNumber(v))
case netflow.NFV9_FIELD_DST_MASK, netflow.NFV9_FIELD_IPV6_DST_MASK:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnDstNetMask, decodeUNumber(v))
case netflow.NFV9_FIELD_IPV4_NEXT_HOP, netflow.NFV9_FIELD_BGP_IPV4_NEXT_HOP, netflow.NFV9_FIELD_IPV6_NEXT_HOP, netflow.NFV9_FIELD_BGP_IPV6_NEXT_HOP:
bf.NextHop = decodeIP(v)
// L4
case netflow.NFV9_FIELD_L4_SRC_PORT:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnSrcPort, decodeUNumber(v))
case netflow.NFV9_FIELD_L4_DST_PORT:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnDstPort, decodeUNumber(v))
case netflow.NFV9_FIELD_PROTOCOL:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnProto, decodeUNumber(v))
// Network
case netflow.NFV9_FIELD_SRC_AS:
bf.SrcAS = uint32(decodeUNumber(v))
case netflow.NFV9_FIELD_DST_AS:
bf.DstAS = uint32(decodeUNumber(v))
// Interfaces
case netflow.NFV9_FIELD_INPUT_SNMP:
bf.InIf = uint32(decodeUNumber(v))
case netflow.NFV9_FIELD_OUTPUT_SNMP:
bf.OutIf = uint32(decodeUNumber(v))
// Remaining
case netflow.NFV9_FIELD_FORWARDING_STATUS:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnForwardingStatus, decodeUNumber(v))
}
}
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnEType, uint64(etype))
return bf
}
func decodeUNumber(b []byte) uint64 {
var o uint64
l := len(b)
switch l {
case 1:
o = uint64(b[0])
case 2:
o = uint64(binary.BigEndian.Uint16(b))
case 4:
o = uint64(binary.BigEndian.Uint32(b))
case 8:
o = binary.BigEndian.Uint64(b)
default:
if l < 8 {
var iter uint
for i := range b {
o |= uint64(b[i]) << uint(8*(uint(l)-iter-1))
iter++
}
} else {
return 0
}
}
return o
}
func decodeIP(b []byte) netip.Addr {
if ip, ok := netip.AddrFromSlice(b); ok {
return netip.AddrFrom16(ip.As16())
}
return netip.Addr{}
}

View File

@@ -6,6 +6,7 @@ package netflow
import (
"bytes"
"net/netip"
"strconv"
"sync"
@@ -13,6 +14,7 @@ import (
"github.com/netsampler/goflow2/producer"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
)
@@ -20,18 +22,16 @@ import (
type Decoder struct {
r *reporter.Reporter
// Templates and sampling
templatesLock sync.RWMutex
templates map[string]*templateSystem
samplingLock sync.RWMutex
sampling map[string]producer.SamplingRateSystem
// Templates and sampling systems
systemsLock sync.RWMutex
templates map[string]*templateSystem
sampling map[string]producer.SamplingRateSystem
metrics struct {
errors *reporter.CounterVec
stats *reporter.CounterVec
setRecordsStatsSum *reporter.CounterVec
setStatsSum *reporter.CounterVec
timeStatsSum *reporter.SummaryVec
templatesStats *reporter.CounterVec
}
}
@@ -72,14 +72,6 @@ func New(r *reporter.Reporter) decoder.Decoder {
},
[]string{"exporter", "version", "type"},
)
nd.metrics.timeStatsSum = nd.r.SummaryVec(
reporter.SummaryOpts{
Name: "delay_summary_seconds",
Help: "Netflows time difference between time of flow and processing.",
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
},
[]string{"exporter", "version"},
)
nd.metrics.templatesStats = nd.r.CounterVec(
reporter.CounterOpts{
Name: "templates_count",
@@ -130,29 +122,27 @@ func (s *templateSystem) GetTemplate(version uint16, obsDomainID uint32, templat
}
// Decode decodes a Netflow payload.
func (nd *Decoder) Decode(in decoder.RawFlow) []*decoder.FlowMessage {
func (nd *Decoder) Decode(in decoder.RawFlow) []*schema.FlowMessage {
key := in.Source.String()
nd.templatesLock.RLock()
templates, ok := nd.templates[key]
nd.templatesLock.RUnlock()
if !ok {
nd.systemsLock.RLock()
templates, tok := nd.templates[key]
sampling, sok := nd.sampling[key]
nd.systemsLock.RUnlock()
if !tok {
templates = &templateSystem{
nd: nd,
templates: netflow.CreateTemplateSystem(),
key: key,
}
nd.templatesLock.Lock()
nd.systemsLock.Lock()
nd.templates[key] = templates
nd.templatesLock.Unlock()
nd.systemsLock.Unlock()
}
nd.samplingLock.RLock()
sampling, ok := nd.sampling[key]
nd.samplingLock.RUnlock()
if !ok {
if !sok {
sampling = producer.CreateSamplingSystem()
nd.samplingLock.Lock()
nd.systemsLock.Lock()
nd.sampling[key] = sampling
nd.samplingLock.Unlock()
nd.systemsLock.Unlock()
}
ts := uint64(in.TimeReceived.UTC().Unix())
@@ -218,21 +208,14 @@ func (nd *Decoder) Decode(in decoder.RawFlow) []*decoder.FlowMessage {
}
}
flowMessageSet, _ := producer.ProcessMessageNetFlow(msgDec, sampling)
flowMessageSet := decode(msgDec, sampling)
exporterAddress, _ := netip.AddrFromSlice(in.Source.To16())
for _, fmsg := range flowMessageSet {
fmsg.TimeReceived = ts
fmsg.SamplerAddress = in.Source
timeDiff := fmsg.TimeReceived - fmsg.TimeFlowEnd
nd.metrics.timeStatsSum.WithLabelValues(key, version).
Observe(float64(timeDiff))
fmsg.ExporterAddress = exporterAddress
}
results := make([]*decoder.FlowMessage, len(flowMessageSet))
for idx, fmsg := range flowMessageSet {
results[idx] = decoder.ConvertGoflowToFlowMessage(fmsg)
}
return results
return flowMessageSet
}
// Name returns the name of the decoder.

View File

@@ -4,12 +4,16 @@
package netflow
import (
"fmt"
"net"
"net/netip"
"path/filepath"
"reflect"
"testing"
"akvorado/common/helpers"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
)
@@ -96,98 +100,90 @@ func TestDecode(t *testing.T) {
if got == nil {
t.Fatalf("Decode() error on data")
}
expectedFlows := []*decoder.FlowMessage{
expectedFlows := []*schema.FlowMessage{
{
SequenceNum: 44797001,
ExporterAddress: net.ParseIP("127.0.0.1").To16(),
SamplingRate: 30000,
TimeFlowStart: 1647285926,
TimeFlowEnd: 1647285926,
Bytes: 1500,
Packets: 1,
SrcAddr: net.ParseIP("198.38.121.178").To16(),
DstAddr: net.ParseIP("91.170.143.87").To16(),
SrcNetMask: 24,
DstNetMask: 14,
Etype: 0x800,
Proto: 6,
SrcPort: 443,
DstPort: 19624,
InIf: 335,
OutIf: 450,
ForwardingStatus: 64,
TCPFlags: 16,
NextHop: net.ParseIP("194.149.174.63").To16(),
SamplingRate: 30000,
ExporterAddress: netip.MustParseAddr("::ffff:127.0.0.1"),
SrcAddr: netip.MustParseAddr("::ffff:198.38.121.178"),
DstAddr: netip.MustParseAddr("::ffff:91.170.143.87"),
NextHop: netip.MustParseAddr("::ffff:194.149.174.63"),
InIf: 335,
OutIf: 450,
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1500,
schema.ColumnPackets: 1,
schema.ColumnSrcNetMask: 24,
schema.ColumnDstNetMask: 14,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 443,
schema.ColumnDstPort: 19624,
schema.ColumnForwardingStatus: 64,
},
}, {
SequenceNum: 44797001,
ExporterAddress: net.ParseIP("127.0.0.1").To16(),
SamplingRate: 30000,
TimeFlowStart: 1647285926,
TimeFlowEnd: 1647285926,
Bytes: 1500,
Packets: 1,
SrcAddr: net.ParseIP("198.38.121.219").To16(),
DstAddr: net.ParseIP("88.122.57.97").To16(),
SrcNetMask: 24,
DstNetMask: 14,
Etype: 0x800,
Proto: 6,
SrcPort: 443,
DstPort: 2444,
InIf: 335,
OutIf: 452,
ForwardingStatus: 64,
TCPFlags: 16,
NextHop: net.ParseIP("194.149.174.71").To16(),
SamplingRate: 30000,
ExporterAddress: netip.MustParseAddr("::ffff:127.0.0.1"),
SrcAddr: netip.MustParseAddr("::ffff:198.38.121.219"),
DstAddr: netip.MustParseAddr("::ffff:88.122.57.97"),
InIf: 335,
OutIf: 452,
NextHop: netip.MustParseAddr("::ffff:194.149.174.71"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1500,
schema.ColumnPackets: 1,
schema.ColumnSrcNetMask: 24,
schema.ColumnDstNetMask: 14,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 443,
schema.ColumnDstPort: 2444,
schema.ColumnForwardingStatus: 64,
},
}, {
SequenceNum: 44797001,
ExporterAddress: net.ParseIP("127.0.0.1").To16(),
SamplingRate: 30000,
TimeFlowStart: 1647285926,
TimeFlowEnd: 1647285926,
Bytes: 1400,
Packets: 1,
SrcAddr: net.ParseIP("173.194.190.106").To16(),
DstAddr: net.ParseIP("37.165.129.20").To16(),
SrcNetMask: 20,
DstNetMask: 18,
Etype: 0x800,
Proto: 6,
SrcPort: 443,
DstPort: 53697,
InIf: 461,
OutIf: 306,
ForwardingStatus: 64,
TCPFlags: 16,
NextHop: net.ParseIP("252.223.0.0").To16(),
SamplingRate: 30000,
ExporterAddress: netip.MustParseAddr("::ffff:127.0.0.1"),
SrcAddr: netip.MustParseAddr("::ffff:173.194.190.106"),
DstAddr: netip.MustParseAddr("::ffff:37.165.129.20"),
InIf: 461,
OutIf: 306,
NextHop: netip.MustParseAddr("::ffff:252.223.0.0"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1400,
schema.ColumnPackets: 1,
schema.ColumnSrcNetMask: 20,
schema.ColumnDstNetMask: 18,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 443,
schema.ColumnDstPort: 53697,
schema.ColumnForwardingStatus: 64,
},
}, {
SequenceNum: 44797001,
ExporterAddress: net.ParseIP("127.0.0.1").To16(),
SamplingRate: 30000,
TimeFlowStart: 1647285926,
TimeFlowEnd: 1647285926,
Bytes: 1448,
Packets: 1,
SrcAddr: net.ParseIP("74.125.100.234").To16(),
DstAddr: net.ParseIP("88.120.219.117").To16(),
SrcNetMask: 16,
DstNetMask: 14,
Etype: 0x800,
Proto: 6,
SrcPort: 443,
DstPort: 52300,
InIf: 461,
OutIf: 451,
ForwardingStatus: 64,
TCPFlags: 16,
NextHop: net.ParseIP("194.149.174.61").To16(),
SamplingRate: 30000,
ExporterAddress: netip.MustParseAddr("::ffff:127.0.0.1"),
SrcAddr: netip.MustParseAddr("::ffff:74.125.100.234"),
DstAddr: netip.MustParseAddr("::ffff:88.120.219.117"),
NextHop: netip.MustParseAddr("::ffff:194.149.174.61"),
InIf: 461,
OutIf: 451,
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1448,
schema.ColumnPackets: 1,
schema.ColumnSrcNetMask: 16,
schema.ColumnDstNetMask: 14,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 443,
schema.ColumnDstPort: 52300,
schema.ColumnForwardingStatus: 64,
},
},
}
for _, f := range got {
f.TimeReceived = 0
}
if diff := helpers.Diff(got, expectedFlows); diff != "" {
if diff := helpers.Diff(got, expectedFlows, helpers.DiffFormatter(reflect.TypeOf(schema.ColumnBytes), fmt.Sprint)); diff != "" {
t.Fatalf("Decode() (-got, +want):\n%s", diff)
}
gotMetrics = r.GetMetrics(

View File

@@ -1,26 +0,0 @@
// SPDX-FileCopyrightText: 2023 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package decoder
import (
"fmt"
"google.golang.org/protobuf/encoding/protowire"
)
// EncodeMessage will marshal a protobuf message using the length-prefixed
// representation.
func (m *FlowMessage) EncodeMessage() ([]byte, error) {
messageSize := m.SizeVT()
prefixSize := protowire.SizeVarint(uint64(messageSize))
totalSize := prefixSize + messageSize
buf := make([]byte, 0, totalSize)
buf = protowire.AppendVarint(buf, uint64(messageSize))
buf = buf[:totalSize]
n, err := m.MarshalToSizedBufferVT(buf[prefixSize:])
if n != messageSize {
return buf, fmt.Errorf("incorrect size for proto buffer (%d vs %d)", n, messageSize)
}
return buf, err
}

View File

@@ -1,79 +0,0 @@
// SPDX-FileCopyrightText: 2023 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package decoder_test
import (
"testing"
"github.com/golang/protobuf/proto"
"akvorado/common/helpers"
"akvorado/inlet/flow/decoder"
)
func TestProtoMarshalEmpty(t *testing.T) {
flow := decoder.FlowMessage{}
buf, err := flow.EncodeMessage()
if err != nil {
t.Fatalf("MarshalProto() error:\n%+v", err)
}
got := decoder.FlowMessage{}
if err := got.DecodeMessage(buf); err != nil {
t.Fatalf("DecodeMessage() error:\n%+v", err)
}
if diff := helpers.Diff(got, flow); diff != "" {
t.Fatalf("MarshalProto() (-got, +want):\n%s", diff)
}
}
func TestProtoMarshal(t *testing.T) {
flow := decoder.FlowMessage{
TimeReceived: 16999,
SrcCountry: "FR",
DstCountry: "US",
}
buf, err := flow.EncodeMessage()
if err != nil {
t.Fatalf("MarshalProto() error:\n%+v", err)
}
got := decoder.FlowMessage{}
if err := got.DecodeMessage(buf); err != nil {
t.Fatalf("DecodeMessage() error:\n%+v", err)
}
if diff := helpers.Diff(got, flow); diff != "" {
t.Fatalf("MarshalProto() (-got, +want):\n%s", diff)
}
}
func TestProtoMarshalBufferSizes(t *testing.T) {
for cap := 0; cap < 100; cap++ {
for len := 0; len <= cap; len++ {
buf := make([]byte, len, cap)
flow := decoder.FlowMessage{
TimeReceived: 16999,
SrcCountry: "FR",
DstCountry: "US",
}
buf, err := flow.EncodeMessage()
if err != nil {
t.Fatalf("MarshalProto() error:\n%+v", err)
}
got := decoder.FlowMessage{}
pbuf := proto.NewBuffer(buf)
err = pbuf.DecodeMessage(&got)
if err != nil {
t.Fatalf("DecodeMessage() error:\n%+v", err)
}
if diff := helpers.Diff(got, flow); diff != "" {
t.Fatalf("MarshalProto() (-got, +want):\n%s", diff)
}
}
}
}

View File

@@ -10,6 +10,7 @@ import (
"time"
"akvorado/common/reporter"
"akvorado/common/schema"
)
// Decoder is the interface each decoder should implement.
@@ -17,7 +18,7 @@ type Decoder interface {
// Decoder takes a raw flow and returns a
// slice of flow messages. Returning nil means there was an
// error during decoding.
Decode(in RawFlow) []*FlowMessage
Decode(in RawFlow) []*schema.FlowMessage
// Name returns the decoder name
Name() string

View File

@@ -0,0 +1,191 @@
// SPDX-FileCopyrightText: 2023 Free Mobile
// SPDX-FileCopyrightText: 2021 NetSampler
// SPDX-License-Identifier: AGPL-3.0-only AND BSD-3-Clause
package sflow
import (
"encoding/binary"
"net/netip"
"akvorado/common/helpers"
"akvorado/common/schema"
"github.com/netsampler/goflow2/decoders/sflow"
)
func decode(msgDec interface{}) []*schema.FlowMessage {
flowMessageSet := []*schema.FlowMessage{}
switch msgDec.(type) {
case sflow.Packet:
default:
return nil
}
packet := msgDec.(sflow.Packet)
for _, flowSample := range packet.Samples {
var records []sflow.FlowRecord
bf := &schema.FlowMessage{}
forwardingStatus := 0
switch flowSample := flowSample.(type) {
case sflow.FlowSample:
records = flowSample.Records
bf.SamplingRate = flowSample.SamplingRate
bf.InIf = flowSample.Input
bf.OutIf = flowSample.Output
if bf.OutIf&interfaceOutMask == interfaceOutDiscard {
bf.OutIf = 0
forwardingStatus = 128
} else if bf.OutIf&interfaceOutMask == interfaceOutMultiple {
bf.OutIf = 0
}
case sflow.ExpandedFlowSample:
records = flowSample.Records
bf.SamplingRate = flowSample.SamplingRate
bf.InIf = flowSample.InputIfValue
bf.OutIf = flowSample.OutputIfValue
}
if bf.InIf == interfaceLocal {
bf.InIf = 0
}
if bf.OutIf == interfaceLocal {
bf.OutIf = 0
}
bf.ExporterAddress = decodeIP(packet.AgentIP)
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnPackets, 1)
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnForwardingStatus, uint64(forwardingStatus))
for _, record := range records {
switch recordData := record.Data.(type) {
case sflow.SampledHeader:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnBytes, uint64(recordData.FrameLength))
parseSampledHeader(bf, &recordData)
case sflow.SampledIPv4:
bf.SrcAddr = decodeIP(recordData.Base.SrcIP)
bf.DstAddr = decodeIP(recordData.Base.DstIP)
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnBytes, uint64(recordData.Base.Length))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnProto, uint64(recordData.Base.Protocol))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnSrcPort, uint64(recordData.Base.SrcPort))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnDstPort, uint64(recordData.Base.DstPort))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnEType, helpers.ETypeIPv4)
case sflow.SampledIPv6:
bf.SrcAddr = decodeIP(recordData.Base.SrcIP)
bf.DstAddr = decodeIP(recordData.Base.DstIP)
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnBytes, uint64(recordData.Base.Length))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnProto, uint64(recordData.Base.Protocol))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnSrcPort, uint64(recordData.Base.SrcPort))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnDstPort, uint64(recordData.Base.DstPort))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnEType, helpers.ETypeIPv6)
case sflow.ExtendedRouter:
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnSrcNetMask, uint64(recordData.SrcMaskLen))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnDstNetMask, uint64(recordData.DstMaskLen))
bf.NextHop = decodeIP(recordData.NextHop)
case sflow.ExtendedGateway:
bf.NextHop = decodeIP(recordData.NextHop)
bf.DstAS = recordData.AS
bf.SrcAS = recordData.AS
if len(recordData.ASPath) > 0 {
bf.DstAS = recordData.ASPath[len(recordData.ASPath)-1]
}
if recordData.SrcAS > 0 {
bf.SrcAS = recordData.SrcAS
}
}
}
flowMessageSet = append(flowMessageSet, bf)
}
return flowMessageSet
}
func parseSampledHeader(bf *schema.FlowMessage, header *sflow.SampledHeader) {
data := header.HeaderData
switch header.Protocol {
case 1: // Ethernet
parseEthernetHeader(bf, data)
}
}
func parseEthernetHeader(bf *schema.FlowMessage, data []byte) {
if len(data) < 14 {
return
}
etherType := data[12:14]
data = data[14:]
if etherType[0] == 0x81 && etherType[1] == 0x00 {
// 802.1q
if len(data) < 4 {
return
}
etherType = data[2:4]
data = data[4:]
}
if etherType[0] == 0x88 && etherType[1] == 0x47 {
// MPLS
for {
if len(data) < 5 {
return
}
label := binary.BigEndian.Uint32(append([]byte{0}, data[:3]...)) >> 4
bottom := data[2] & 1
data = data[4:]
if bottom == 1 || label <= 15 {
if data[0]&0xf0>>4 == 4 {
etherType = []byte{0x8, 0x0}
} else if data[0]&0xf0>>4 == 6 {
etherType = []byte{0x86, 0xdd}
} else {
return
}
break
}
}
}
var proto uint8
if etherType[0] == 0x8 && etherType[1] == 0x0 {
// IPv4
if len(data) < 20 {
return
}
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnEType, helpers.ETypeIPv4)
bf.SrcAddr = decodeIP(data[12:16])
bf.DstAddr = decodeIP(data[16:20])
proto = data[9]
ihl := int((data[0] & 0xf) * 4)
if len(data) >= ihl {
data = data[ihl:]
} else {
data = data[:0]
}
} else if etherType[0] == 0x86 && etherType[1] == 0xdd {
// IPv6
if len(data) < 40 {
return
}
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnEType, helpers.ETypeIPv6)
bf.SrcAddr = decodeIP(data[8:24])
bf.DstAddr = decodeIP(data[24:40])
proto = data[6]
data = data[40:]
}
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnProto, uint64(proto))
if proto == 6 || proto == 17 {
if len(data) > 4 {
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnSrcPort,
uint64(binary.BigEndian.Uint16(data[0:2])))
schema.Flows.ProtobufAppendVarint(bf, schema.ColumnDstPort,
uint64(binary.BigEndian.Uint16(data[2:4])))
}
}
}
func decodeIP(b []byte) netip.Addr {
if ip, ok := netip.AddrFromSlice(b); ok {
return netip.AddrFrom16(ip.As16())
}
return netip.Addr{}
}

View File

@@ -9,9 +9,9 @@ import (
"net"
"github.com/netsampler/goflow2/decoders/sflow"
"github.com/netsampler/goflow2/producer"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
)
@@ -78,7 +78,7 @@ func New(r *reporter.Reporter) decoder.Decoder {
}
// Decode decodes an sFlow payload.
func (nd *Decoder) Decode(in decoder.RawFlow) []*decoder.FlowMessage {
func (nd *Decoder) Decode(in decoder.RawFlow) []*schema.FlowMessage {
buf := bytes.NewBuffer(in.Payload)
key := in.Source.String()
@@ -109,8 +109,6 @@ func (nd *Decoder) Decode(in decoder.RawFlow) []*decoder.FlowMessage {
version := "5"
samples := msgDecConv.Samples
nd.metrics.stats.WithLabelValues(key, agent, version).Inc()
hasFlowSamples := false
hasExpandedFlowSamples := false
for _, s := range samples {
switch sConv := s.(type) {
case sflow.FlowSample:
@@ -118,13 +116,11 @@ func (nd *Decoder) Decode(in decoder.RawFlow) []*decoder.FlowMessage {
Inc()
nd.metrics.sampleRecordsStatsSum.WithLabelValues(key, agent, version, "FlowSample").
Add(float64(len(sConv.Records)))
hasFlowSamples = true
case sflow.ExpandedFlowSample:
nd.metrics.sampleStatsSum.WithLabelValues(key, agent, version, "ExpandedFlowSample").
Inc()
nd.metrics.sampleRecordsStatsSum.WithLabelValues(key, agent, version, "ExpandedFlowSample").
Add(float64(len(sConv.Records)))
hasExpandedFlowSamples = true
case sflow.CounterSample:
nd.metrics.sampleStatsSum.WithLabelValues(key, agent, version, "CounterSample").
Inc()
@@ -132,41 +128,13 @@ func (nd *Decoder) Decode(in decoder.RawFlow) []*decoder.FlowMessage {
Add(float64(len(sConv.Records)))
}
}
if hasFlowSamples && hasExpandedFlowSamples {
// We assume routers are either exporting one or the others. The
// alternative would be to keep count of the received flows and their
// types into a bitset. However, this would rely on the fact that
// GoFlow2 keep everything in order and therefore may not be
// future-proof. Better have people not have flows at all than having
// something wrong.
nd.metrics.errors.WithLabelValues(key, "sflow packet has both regular and expanded flows").Inc()
return nil
}
flowMessageSet, _ := producer.ProcessMessageSFlow(msgDec)
flowMessageSet := decode(msgDec)
for _, fmsg := range flowMessageSet {
fmsg.TimeReceived = ts
fmsg.TimeFlowStart = ts
fmsg.TimeFlowEnd = ts
}
results := make([]*decoder.FlowMessage, len(flowMessageSet))
for idx, fmsg := range flowMessageSet {
results[idx] = decoder.ConvertGoflowToFlowMessage(fmsg)
if fmsg.InIf == interfaceLocal {
results[idx].InIf = 0
}
if fmsg.OutIf == interfaceLocal {
results[idx].OutIf = 0
} else if hasFlowSamples && fmsg.OutIf&interfaceOutMask == interfaceOutDiscard {
results[idx].OutIf = 0
results[idx].ForwardingStatus = 128
} else if hasFlowSamples && fmsg.OutIf&interfaceOutMask == interfaceOutMultiple {
results[idx].OutIf = 0
}
}
return results
return flowMessageSet
}
// Name returns the name of the decoder.

View File

@@ -4,12 +4,16 @@
package sflow
import (
"fmt"
"net"
"net/netip"
"path/filepath"
"reflect"
"testing"
"akvorado/common/helpers"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
)
@@ -23,124 +27,99 @@ func TestDecode(t *testing.T) {
if got == nil {
t.Fatalf("Decode() error on data")
}
expectedFlows := []*decoder.FlowMessage{
expectedFlows := []*schema.FlowMessage{
{
SequenceNum: 812646826,
SamplingRate: 1024,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 1518,
Packets: 1,
Etype: 0x86DD,
Proto: 6,
SrcPort: 46026,
DstPort: 22,
InIf: 27,
OutIf: 28,
IPTos: 8,
IPTTL: 64,
TCPFlags: 16,
IPv6FlowLabel: 426132,
SrcAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:38").To16(),
DstAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:39").To16(),
ExporterAddress: net.ParseIP("172.16.0.3").To16(),
SrcAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:38"),
DstAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:39"),
ExporterAddress: netip.MustParseAddr("::ffff:172.16.0.3"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1518,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv6,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 46026,
schema.ColumnDstPort: 22,
},
}, {
SequenceNum: 812646826,
SamplingRate: 1024,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 439,
Packets: 1,
Etype: 0x800,
Proto: 6,
SrcPort: 443,
DstPort: 56876,
SrcAddr: netip.MustParseAddr("::ffff:104.26.8.24"),
DstAddr: netip.MustParseAddr("::ffff:45.90.161.46"),
ExporterAddress: netip.MustParseAddr("::ffff:172.16.0.3"),
NextHop: netip.MustParseAddr("::ffff:45.90.161.46"),
InIf: 49001,
OutIf: 25,
IPTTL: 59,
TCPFlags: 24,
FragmentId: 42354,
FragmentOffset: 16384,
SrcAS: 13335,
DstAS: 39421,
SrcNetMask: 20,
DstNetMask: 27,
SrcAddr: net.ParseIP("104.26.8.24").To16(),
DstAddr: net.ParseIP("45.90.161.46").To16(),
ExporterAddress: net.ParseIP("172.16.0.3").To16(),
NextHop: net.ParseIP("45.90.161.46").To16(),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 439,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 443,
schema.ColumnDstPort: 56876,
schema.ColumnSrcNetMask: 20,
schema.ColumnDstNetMask: 27,
},
}, {
SequenceNum: 812646826,
SamplingRate: 1024,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 1518,
Packets: 1,
Etype: 0x86DD,
Proto: 6,
SrcPort: 46026,
DstPort: 22,
SrcAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:38"),
DstAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:39"),
ExporterAddress: netip.MustParseAddr("::ffff:172.16.0.3"),
InIf: 27,
OutIf: 28,
IPTos: 8,
IPTTL: 64,
TCPFlags: 16,
IPv6FlowLabel: 426132,
SrcAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:38").To16(),
DstAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:39").To16(),
ExporterAddress: net.ParseIP("172.16.0.3").To16(),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1518,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv6,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 46026,
schema.ColumnDstPort: 22,
},
}, {
SequenceNum: 812646826,
SamplingRate: 1024,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 64,
Packets: 1,
Etype: 0x800,
Proto: 6,
SrcPort: 55658,
DstPort: 5555,
InIf: 28,
OutIf: 49001,
IPTTL: 255,
TCPFlags: 2,
FragmentId: 54321,
SrcAS: 39421,
DstAS: 26615,
SrcNetMask: 27,
DstNetMask: 17,
SrcAddr: net.ParseIP("45.90.161.148").To16(),
DstAddr: net.ParseIP("191.87.91.27").To16(),
ExporterAddress: net.ParseIP("172.16.0.3").To16(),
NextHop: net.ParseIP("31.14.69.110").To16(),
NextHopAS: 203698,
SrcAddr: netip.MustParseAddr("::ffff:45.90.161.148"),
DstAddr: netip.MustParseAddr("::ffff:191.87.91.27"),
ExporterAddress: netip.MustParseAddr("::ffff:172.16.0.3"),
NextHop: netip.MustParseAddr("::ffff:31.14.69.110"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 64,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 55658,
schema.ColumnDstPort: 5555,
schema.ColumnSrcNetMask: 27,
schema.ColumnDstNetMask: 17,
},
}, {
SequenceNum: 812646826,
SamplingRate: 1024,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 1518,
Packets: 1,
Etype: 0x86DD,
Proto: 6,
SrcPort: 46026,
DstPort: 22,
SrcAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:38"),
DstAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:39"),
ExporterAddress: netip.MustParseAddr("::ffff:172.16.0.3"),
InIf: 27,
OutIf: 28,
IPTos: 8,
IPTTL: 64,
TCPFlags: 16,
IPv6FlowLabel: 426132,
SrcAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:38").To16(),
DstAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:39").To16(),
ExporterAddress: net.ParseIP("172.16.0.3").To16(),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1518,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv6,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 46026,
schema.ColumnDstPort: 22,
},
},
}
for _, f := range got {
f.TimeReceived = 0
}
if diff := helpers.Diff(got, expectedFlows); diff != "" {
if diff := helpers.Diff(got, expectedFlows, helpers.DiffFormatter(reflect.TypeOf(schema.ColumnBytes), fmt.Sprint)); diff != "" {
t.Fatalf("Decode() (-got, +want):\n%s", diff)
}
gotMetrics := r.GetMetrics(
@@ -169,34 +148,29 @@ func TestDecodeInterface(t *testing.T) {
if got == nil {
t.Fatalf("Decode() error on data")
}
expectedFlows := []*decoder.FlowMessage{
expectedFlows := []*schema.FlowMessage{
{
SequenceNum: 812646826,
SamplingRate: 1024,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 1518,
Packets: 1,
Etype: 0x86DD,
Proto: 6,
SrcPort: 46026,
DstPort: 22,
SrcAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:38"),
DstAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:39"),
ExporterAddress: netip.MustParseAddr("::ffff:172.16.0.3"),
InIf: 27,
OutIf: 0, // local interface
IPTos: 8,
IPTTL: 64,
TCPFlags: 16,
IPv6FlowLabel: 426132,
SrcAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:38").To16(),
DstAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:39").To16(),
ExporterAddress: net.ParseIP("172.16.0.3").To16(),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1518,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv6,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 46026,
schema.ColumnDstPort: 22,
},
},
}
for _, f := range got {
f.TimeReceived = 0
}
if diff := helpers.Diff(got, expectedFlows); diff != "" {
if diff := helpers.Diff(got, expectedFlows, helpers.DiffFormatter(reflect.TypeOf(schema.ColumnBytes), fmt.Sprint)); diff != "" {
t.Fatalf("Decode() (-got, +want):\n%s", diff)
}
})
@@ -208,35 +182,30 @@ func TestDecodeInterface(t *testing.T) {
if got == nil {
t.Fatalf("Decode() error on data")
}
expectedFlows := []*decoder.FlowMessage{
expectedFlows := []*schema.FlowMessage{
{
SequenceNum: 812646826,
SamplingRate: 1024,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 1518,
Packets: 1,
Etype: 0x86DD,
Proto: 6,
SrcPort: 46026,
DstPort: 22,
InIf: 27,
OutIf: 0, // discard interface
ForwardingStatus: 128,
IPTos: 8,
IPTTL: 64,
TCPFlags: 16,
IPv6FlowLabel: 426132,
SrcAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:38").To16(),
DstAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:39").To16(),
ExporterAddress: net.ParseIP("172.16.0.3").To16(),
SamplingRate: 1024,
SrcAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:38"),
DstAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:39"),
ExporterAddress: netip.MustParseAddr("::ffff:172.16.0.3"),
InIf: 27,
OutIf: 0, // discard interface
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1518,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv6,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 46026,
schema.ColumnDstPort: 22,
schema.ColumnForwardingStatus: 128,
},
},
}
for _, f := range got {
f.TimeReceived = 0
}
if diff := helpers.Diff(got, expectedFlows); diff != "" {
if diff := helpers.Diff(got, expectedFlows, helpers.DiffFormatter(reflect.TypeOf(schema.ColumnBytes), fmt.Sprint)); diff != "" {
t.Fatalf("Decode() (-got, +want):\n%s", diff)
}
})
@@ -248,34 +217,29 @@ func TestDecodeInterface(t *testing.T) {
if got == nil {
t.Fatalf("Decode() error on data")
}
expectedFlows := []*decoder.FlowMessage{
expectedFlows := []*schema.FlowMessage{
{
SequenceNum: 812646826,
SamplingRate: 1024,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 1518,
Packets: 1,
Etype: 0x86DD,
Proto: 6,
SrcPort: 46026,
DstPort: 22,
SrcAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:38"),
DstAddr: netip.MustParseAddr("2a0c:8880:2:0:185:21:130:39"),
ExporterAddress: netip.MustParseAddr("::ffff:172.16.0.3"),
InIf: 27,
OutIf: 0, // multiple interfaces
IPTos: 8,
IPTTL: 64,
TCPFlags: 16,
IPv6FlowLabel: 426132,
SrcAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:38").To16(),
DstAddr: net.ParseIP("2a0c:8880:2:0:185:21:130:39").To16(),
ExporterAddress: net.ParseIP("172.16.0.3").To16(),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 1518,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv6,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 46026,
schema.ColumnDstPort: 22,
},
},
}
for _, f := range got {
f.TimeReceived = 0
}
if diff := helpers.Diff(got, expectedFlows); diff != "" {
if diff := helpers.Diff(got, expectedFlows, helpers.DiffFormatter(reflect.TypeOf(schema.ColumnBytes), fmt.Sprint)); diff != "" {
t.Fatalf("Decode() (-got, +want):\n%s", diff)
}
})
@@ -287,41 +251,34 @@ func TestDecodeInterface(t *testing.T) {
if got == nil {
t.Fatalf("Decode() error on data")
}
expectedFlows := []*decoder.FlowMessage{
expectedFlows := []*schema.FlowMessage{
{
SequenceNum: 115694180,
SamplingRate: 1000,
TimeFlowStart: 18446744011573954816,
TimeFlowEnd: 18446744011573954816,
Bytes: 126,
Packets: 1,
Etype: 2048,
Proto: 6,
SrcPort: 22,
DstPort: 52237,
InIf: 29001,
OutIf: 1285816721,
IPTos: 8,
IPTTL: 61,
TCPFlags: 24,
FragmentId: 43854,
FragmentOffset: 16384,
SrcNetMask: 32,
DstNetMask: 22,
SrcAddr: net.ParseIP("52.52.52.52").To16(),
DstAddr: net.ParseIP("53.53.53.53").To16(),
ExporterAddress: net.ParseIP("49.49.49.49").To16(),
NextHop: net.ParseIP("54.54.54.54").To16(),
NextHopAS: 8218,
SrcAddr: netip.MustParseAddr("::ffff:52.52.52.52"),
DstAddr: netip.MustParseAddr("::ffff:53.53.53.53"),
ExporterAddress: netip.MustParseAddr("::ffff:49.49.49.49"),
NextHop: netip.MustParseAddr("::ffff:54.54.54.54"),
SrcAS: 203476,
DstAS: 203361,
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 126,
schema.ColumnPackets: 1,
schema.ColumnEType: helpers.ETypeIPv4,
schema.ColumnProto: 6,
schema.ColumnSrcPort: 22,
schema.ColumnDstPort: 52237,
schema.ColumnSrcNetMask: 32,
schema.ColumnDstNetMask: 22,
},
},
}
for _, f := range got {
f.TimeReceived = 0
}
if diff := helpers.Diff(got, expectedFlows); diff != "" {
if diff := helpers.Diff(got, expectedFlows, helpers.DiffFormatter(reflect.TypeOf(schema.ColumnBytes), fmt.Sprint)); diff != "" {
t.Fatalf("Decode() (-got, +want):\n%s", diff)
}

View File

@@ -6,10 +6,9 @@
package decoder
import (
"fmt"
"net/netip"
"google.golang.org/protobuf/encoding/protowire"
"google.golang.org/protobuf/proto"
"akvorado/common/schema"
)
// DummyDecoder is a simple decoder producing flows from random data.
@@ -17,36 +16,19 @@ import (
type DummyDecoder struct{}
// Decode returns uninteresting flow messages.
func (dc *DummyDecoder) Decode(in RawFlow) []*FlowMessage {
return []*FlowMessage{
{
TimeReceived: uint64(in.TimeReceived.UTC().Unix()),
ExporterAddress: in.Source.To16(),
Bytes: uint64(len(in.Payload)),
Packets: 1,
InIfDescription: string(in.Payload),
},
func (dc *DummyDecoder) Decode(in RawFlow) []*schema.FlowMessage {
exporterAddress, _ := netip.AddrFromSlice(in.Source.To16())
f := &schema.FlowMessage{
TimeReceived: uint64(in.TimeReceived.UTC().Unix()),
ExporterAddress: exporterAddress,
}
schema.Flows.ProtobufAppendVarint(f, schema.ColumnBytes, uint64(len(in.Payload)))
schema.Flows.ProtobufAppendVarint(f, schema.ColumnPackets, 1)
schema.Flows.ProtobufAppendBytes(f, schema.ColumnInIfDescription, in.Payload)
return []*schema.FlowMessage{f}
}
// Name returns the original name.
func (dc *DummyDecoder) Name() string {
return "dummy"
}
// DecodeMessage decodes a length-prefixed protobuf message. It assumes the
// whole buffer is used. This does not use VT functions.
func (m *FlowMessage) DecodeMessage(buf []byte) error {
messageSize, n := protowire.ConsumeVarint(buf)
if n < 0 {
return protowire.ParseError(n)
}
buf = buf[n:]
if uint64(len(buf)) != messageSize {
return fmt.Errorf("input buffer is of incorrect size (%d vs %d)", len(buf), messageSize)
}
if err := proto.Unmarshal(buf, m); err != nil {
return err
}
return nil
}

View File

@@ -10,6 +10,7 @@ import (
"akvorado/common/helpers"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
"akvorado/inlet/flow/decoder/netflow"
"akvorado/inlet/flow/decoder/sflow"
@@ -18,23 +19,24 @@ import (
// The goal is to benchmark flow decoding + encoding to protobuf
func BenchmarkDecodeEncodeNetflow(b *testing.B) {
schema.DisableDebug(b)
r := reporter.NewMock(b)
nfdecoder := netflow.New(r)
template := helpers.ReadPcapPayload(b, filepath.Join("decoder", "netflow", "testdata", "options-template-257.pcap"))
got := nfdecoder.Decode(decoder.RawFlow{Payload: template, Source: net.ParseIP("127.0.0.1")})
if got == nil || len(got) != 0 {
b.Fatalf("Decode() error on options template")
b.Fatal("Decode() error on options template")
}
data := helpers.ReadPcapPayload(b, filepath.Join("decoder", "netflow", "testdata", "options-data-257.pcap"))
got = nfdecoder.Decode(decoder.RawFlow{Payload: data, Source: net.ParseIP("127.0.0.1")})
if got == nil || len(got) != 0 {
b.Fatalf("Decode() error on options data")
b.Fatal("Decode() error on options data")
}
template = helpers.ReadPcapPayload(b, filepath.Join("decoder", "netflow", "testdata", "template-260.pcap"))
got = nfdecoder.Decode(decoder.RawFlow{Payload: template, Source: net.ParseIP("127.0.0.1")})
if got == nil || len(got) != 0 {
b.Fatalf("Decode() error on template")
b.Fatal("Decode() error on template")
}
data = helpers.ReadPcapPayload(b, filepath.Join("decoder", "netflow", "testdata", "data-260.pcap"))
@@ -48,18 +50,19 @@ func BenchmarkDecodeEncodeNetflow(b *testing.B) {
got = nfdecoder.Decode(decoder.RawFlow{Payload: data, Source: net.ParseIP("127.0.0.1")})
if withEncoding {
for _, flow := range got {
_, err := flow.EncodeMessage()
if err != nil {
b.Fatalf("EncodeMessage() error:\n%+v", err)
}
schema.Flows.ProtobufMarshal(flow)
}
}
}
if got[0].ProtobufDebug != nil {
b.Fatal("debug is enabled")
}
})
}
}
func BenchmarkDecodeEncodeSflow(b *testing.B) {
schema.DisableDebug(b)
r := reporter.NewMock(b)
sdecoder := sflow.New(r)
data := helpers.ReadPcapPayload(b, filepath.Join("decoder", "sflow", "testdata", "data-1140.pcap"))
@@ -69,19 +72,19 @@ func BenchmarkDecodeEncodeSflow(b *testing.B) {
if !withEncoding {
title = "without encoding"
}
var got []*schema.FlowMessage
b.Run(title, func(b *testing.B) {
for i := 0; i < b.N; i++ {
got := sdecoder.Decode(decoder.RawFlow{Payload: data, Source: net.ParseIP("127.0.0.1")})
got = sdecoder.Decode(decoder.RawFlow{Payload: data, Source: net.ParseIP("127.0.0.1")})
if withEncoding {
for _, flow := range got {
var err error
_, err = flow.EncodeMessage()
if err != nil {
b.Fatalf("EncodeMessage() error:\n%+v", err)
}
schema.Flows.ProtobufMarshal(flow)
}
}
}
if got[0].ProtobufDebug != nil {
b.Fatal("debug is enabled")
}
})
}
}

View File

@@ -14,6 +14,7 @@ import (
"akvorado/common/daemon"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
"akvorado/inlet/flow/input"
)
@@ -24,7 +25,7 @@ type Input struct {
t tomb.Tomb
config *Configuration
ch chan []*decoder.FlowMessage // channel to send flows to
ch chan []*schema.FlowMessage // channel to send flows to
decoder decoder.Decoder
}
@@ -36,7 +37,7 @@ func (configuration *Configuration) New(r *reporter.Reporter, daemon daemon.Comp
input := &Input{
r: r,
config: configuration,
ch: make(chan []*decoder.FlowMessage),
ch: make(chan []*schema.FlowMessage),
decoder: dec,
}
daemon.Track(&input.t, "inlet/flow/input/file")
@@ -44,7 +45,7 @@ func (configuration *Configuration) New(r *reporter.Reporter, daemon daemon.Comp
}
// Start starts listening to the provided UDP socket and producing flows.
func (in *Input) Start() (<-chan []*decoder.FlowMessage, error) {
func (in *Input) Start() (<-chan []*schema.FlowMessage, error) {
in.r.Info().Msg("file input starting")
in.t.Go(func() error {
for idx := 0; true; idx++ {

View File

@@ -11,6 +11,7 @@ import (
"akvorado/common/daemon"
"akvorado/common/helpers"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
)
@@ -40,7 +41,7 @@ out:
select {
case got1 := <-ch:
for _, fl := range got1 {
got = append(got, string(fl.InIfDescription))
got = append(got, string(fl.ProtobufDebug[schema.ColumnInIfDescription].([]byte)))
}
case <-time.After(50 * time.Millisecond):
break out

View File

@@ -7,13 +7,14 @@ package input
import (
"akvorado/common/daemon"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
)
// Input is the interface any input should meet
type Input interface {
// Start instructs an input to start producing flows on the returned channel.
Start() (<-chan []*decoder.FlowMessage, error)
Start() (<-chan []*schema.FlowMessage, error)
// Stop instructs the input to stop producing flows.
Stop() error
}

View File

@@ -16,6 +16,7 @@ import (
"akvorado/common/daemon"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
"akvorado/inlet/flow/input"
)
@@ -35,9 +36,9 @@ type Input struct {
inDrops *reporter.GaugeVec
}
address net.Addr // listening address, for testing purpoese
ch chan []*decoder.FlowMessage // channel to send flows to
decoder decoder.Decoder // decoder to use
address net.Addr // listening address, for testing purpoese
ch chan []*schema.FlowMessage // channel to send flows to
decoder decoder.Decoder // decoder to use
}
// New instantiate a new UDP listener from the provided configuration.
@@ -45,7 +46,7 @@ func (configuration *Configuration) New(r *reporter.Reporter, daemon daemon.Comp
input := &Input{
r: r,
config: configuration,
ch: make(chan []*decoder.FlowMessage, configuration.QueueSize),
ch: make(chan []*schema.FlowMessage, configuration.QueueSize),
decoder: dec,
}
@@ -98,7 +99,7 @@ func (configuration *Configuration) New(r *reporter.Reporter, daemon daemon.Comp
}
// Start starts listening to the provided UDP socket and producing flows.
func (in *Input) Start() (<-chan []*decoder.FlowMessage, error) {
func (in *Input) Start() (<-chan []*schema.FlowMessage, error) {
in.r.Info().Str("listen", in.config.Listen).Msg("starting UDP input")
// Listen to UDP port

View File

@@ -4,13 +4,17 @@
package udp
import (
"fmt"
"net"
"net/netip"
"reflect"
"testing"
"time"
"akvorado/common/daemon"
"akvorado/common/helpers"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
)
@@ -44,7 +48,7 @@ func TestUDPInput(t *testing.T) {
}
// Get it back
var got []*decoder.FlowMessage
var got []*schema.FlowMessage
select {
case got = <-ch:
if len(got) == 0 {
@@ -58,16 +62,18 @@ func TestUDPInput(t *testing.T) {
if delta > 1 {
t.Errorf("TimeReceived out of range: %d (now: %d)", got[0].TimeReceived, time.Now().UTC().Unix())
}
expected := []*decoder.FlowMessage{
expected := []*schema.FlowMessage{
{
TimeReceived: got[0].TimeReceived,
ExporterAddress: net.ParseIP("127.0.0.1"),
Bytes: 12,
Packets: 1,
InIfDescription: "hello world!",
ExporterAddress: netip.MustParseAddr("::ffff:127.0.0.1"),
ProtobufDebug: map[schema.ColumnKey]interface{}{
schema.ColumnBytes: 12,
schema.ColumnPackets: 1,
schema.ColumnInIfDescription: []byte("hello world!"),
},
},
}
if diff := helpers.Diff(got, expected); diff != "" {
if diff := helpers.Diff(got, expected, helpers.DiffFormatter(reflect.TypeOf(schema.ColumnBytes), fmt.Sprint)); diff != "" {
t.Fatalf("Input data (-got, +want):\n%s", diff)
}

View File

@@ -4,10 +4,9 @@
package flow
import (
"net/netip"
"time"
"akvorado/inlet/flow/decoder"
"akvorado/common/schema"
"golang.org/x/time/rate"
)
@@ -23,12 +22,12 @@ type limiter struct {
// allowMessages tell if we can transmit the provided messages,
// depending on the rate limiter configuration. If yes, their sampling
// rate may be modified to match current drop rate.
func (c *Component) allowMessages(fmsgs []*decoder.FlowMessage) bool {
func (c *Component) allowMessages(fmsgs []*schema.FlowMessage) bool {
count := len(fmsgs)
if c.config.RateLimit == 0 || count == 0 {
return true
}
exporter, _ := netip.AddrFromSlice(fmsgs[0].ExporterAddress)
exporter := fmsgs[0].ExporterAddress
exporterLimiter, ok := c.limiters[exporter]
if !ok {
exporterLimiter = &limiter{
@@ -51,7 +50,7 @@ func (c *Component) allowMessages(fmsgs []*decoder.FlowMessage) bool {
}
if exporterLimiter.dropRate > 0 {
for _, flow := range fmsgs {
flow.SamplingRate *= uint64(1 / (1 - exporterLimiter.dropRate))
flow.SamplingRate *= uint32(1 / (1 - exporterLimiter.dropRate))
}
}
return true

View File

@@ -5,9 +5,9 @@
package flow
import (
_ "embed" // for flow.proto
"errors"
"fmt"
netHTTP "net/http"
"net/netip"
"gopkg.in/tomb.v2"
@@ -15,6 +15,7 @@ import (
"akvorado/common/daemon"
"akvorado/common/http"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/decoder"
"akvorado/inlet/flow/input"
)
@@ -33,7 +34,7 @@ type Component struct {
}
// Channel for sending flows out of the package.
outgoingFlows chan *Message
outgoingFlows chan *schema.FlowMessage
// Per-exporter rate-limiters
limiters map[netip.Addr]*limiter
@@ -58,7 +59,7 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende
r: r,
d: &dependencies,
config: configuration,
outgoingFlows: make(chan *Message),
outgoingFlows: make(chan *schema.FlowMessage),
limiters: make(map[netip.Addr]*limiter),
inputs: make([]input.Input, len(configuration.Inputs)),
}
@@ -115,12 +116,18 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende
)
c.d.Daemon.Track(&c.t, "inlet/flow")
c.initHTTP()
c.d.HTTP.AddHandler("/api/v0/inlet/flow/schema.proto",
netHTTP.HandlerFunc(func(w netHTTP.ResponseWriter, r *netHTTP.Request) {
w.Header().Set("Content-Type", "text/plain")
w.Write([]byte(schema.Flows.ProtobufDefinition()))
}))
return &c, nil
}
// Flows returns a channel to receive flows.
func (c *Component) Flows() <-chan *Message {
func (c *Component) Flows() <-chan *schema.FlowMessage {
return c.outgoingFlows
}

View File

@@ -110,7 +110,7 @@ func TestFlow(t *testing.T) {
// the lower limit should be OK.
t.Logf("Nominal rate was %d/second", nominalRate)
expectedRate := uint64(30000 / 1000 * nominalRate)
if flow.SamplingRate > 1000*expectedRate/100 || flow.SamplingRate < 70*expectedRate/100 {
if flow.SamplingRate > uint32(1000*expectedRate/100) || flow.SamplingRate < uint32(70*expectedRate/100) {
if retry > 0 {
continue
}

View File

@@ -1,75 +0,0 @@
// SPDX-FileCopyrightText: 2022 Free Mobile
// SPDX-License-Identifier: AGPL-3.0-only
package flow
import (
"embed"
"fmt"
"io/ioutil"
"net/http"
"strconv"
"strings"
"github.com/gin-gonic/gin"
)
// CurrentSchemaVersion is the version of the protobuf definition
const CurrentSchemaVersion = 4
var (
// VersionedSchemas is a mapping from schema version to protobuf definitions
VersionedSchemas map[int]string
//go:embed data/schemas/flow*.proto
schemas embed.FS
)
func init() {
VersionedSchemas = make(map[int]string)
entries, err := schemas.ReadDir("data/schemas")
if err != nil {
panic(err)
}
for _, entry := range entries {
version, err := strconv.Atoi(
strings.TrimPrefix(
strings.TrimSuffix(entry.Name(), ".proto"),
"flow-"))
if err != nil {
panic(err)
}
f, err := schemas.Open(fmt.Sprintf("data/schemas/%s", entry.Name()))
if err != nil {
panic(err)
}
schema, err := ioutil.ReadAll(f)
if err != nil {
panic(err)
}
VersionedSchemas[version] = string(schema)
}
}
func (c *Component) initHTTP() {
for version, schema := range VersionedSchemas {
c.d.HTTP.AddHandler(fmt.Sprintf("/api/v0/inlet/flow/schema-%d.proto", version),
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/plain")
w.Write([]byte(schema))
}))
}
c.d.HTTP.GinRouter.GET("/api/v0/inlet/flow/schemas.json",
func(gc *gin.Context) {
answer := struct {
CurrentVersion int `json:"current-version"`
Versions map[int]string `json:"versions"`
}{
CurrentVersion: CurrentSchemaVersion,
Versions: map[int]string{},
}
for version := range VersionedSchemas {
answer.Versions[version] = fmt.Sprintf("/api/v0/inlet/flow/schema-%d.proto", version)
}
gc.IndentedJSON(http.StatusOK, answer)
})
}

View File

@@ -4,37 +4,23 @@
package flow
import (
"fmt"
"strconv"
"testing"
"akvorado/common/helpers"
"akvorado/common/reporter"
"github.com/gin-gonic/gin"
)
func TestHTTPEndpoints(t *testing.T) {
r := reporter.NewMock(t)
c := NewMock(t, r, DefaultConfiguration())
versions := gin.H{}
for i := 0; i < CurrentSchemaVersion+1; i++ {
versions[strconv.Itoa(i)] = fmt.Sprintf("/api/v0/inlet/flow/schema-%d.proto", i)
}
cases := helpers.HTTPEndpointCases{
{
URL: "/api/v0/inlet/flow/schema-0.proto",
URL: "/api/v0/inlet/flow/schema.proto",
ContentType: "text/plain",
FirstLines: []string{
"",
`syntax = "proto3";`,
`package decoder;`,
},
}, {
URL: "/api/v0/inlet/flow/schemas.json",
JSONOutput: gin.H{
"current-version": CurrentSchemaVersion,
"versions": versions,
},
},
}

View File

@@ -12,6 +12,7 @@ import (
"akvorado/common/helpers"
"akvorado/common/http"
"akvorado/common/reporter"
"akvorado/common/schema"
"akvorado/inlet/flow/input/udp"
)
@@ -42,6 +43,6 @@ func NewMock(t *testing.T, r *reporter.Reporter, config Configuration) *Componen
}
// Inject inject the provided flow message, as if it was received.
func (c *Component) Inject(t *testing.T, fmsg *Message) {
func (c *Component) Inject(t *testing.T, fmsg *schema.FlowMessage) {
c.outgoingFlows <- fmsg
}

View File

@@ -5,6 +5,7 @@ package geoip
import (
"net"
"net/netip"
)
type asn struct {
@@ -18,11 +19,12 @@ type country struct {
}
// LookupASN returns the result of a lookup for an AS number.
func (c *Component) LookupASN(ip net.IP) uint32 {
func (c *Component) LookupASN(ip netip.Addr) uint32 {
asnDB := c.db.asn.Load()
if asnDB != nil {
var asn asn
err := asnDB.Lookup(ip, &asn)
ip := ip.As16()
err := asnDB.Lookup(net.IP(ip[:]), &asn)
if err == nil && asn.AutonomousSystemNumber != 0 {
c.metrics.databaseHit.WithLabelValues("asn").Inc()
return uint32(asn.AutonomousSystemNumber)
@@ -33,11 +35,12 @@ func (c *Component) LookupASN(ip net.IP) uint32 {
}
// LookupCountry returns the result of a lookup for country.
func (c *Component) LookupCountry(ip net.IP) string {
func (c *Component) LookupCountry(ip netip.Addr) string {
geoDB := c.db.geo.Load()
if geoDB != nil {
var country country
err := geoDB.Lookup(ip, &country)
ip := ip.As16()
err := geoDB.Lookup(net.IP(ip[:]), &country)
if err == nil && country.Country.IsoCode != "" {
c.metrics.databaseHit.WithLabelValues("geo").Inc()
return country.Country.IsoCode

View File

@@ -4,7 +4,7 @@
package geoip
import (
"net"
"net/netip"
"testing"
"akvorado/common/helpers"
@@ -23,6 +23,9 @@ func TestLookup(t *testing.T) {
{
IP: "1.0.0.0",
ExpectedASN: 15169,
}, {
IP: "::ffff:1.0.0.0",
ExpectedASN: 15169,
}, {
IP: "2.125.160.216",
ExpectedCountry: "GB",
@@ -36,21 +39,21 @@ func TestLookup(t *testing.T) {
},
}
for _, tc := range cases {
gotCountry := c.LookupCountry(net.ParseIP(tc.IP))
gotCountry := c.LookupCountry(netip.MustParseAddr(tc.IP))
if diff := helpers.Diff(gotCountry, tc.ExpectedCountry); diff != "" {
t.Errorf("LookupCountry(%q) (-got, +want):\n%s", tc.IP, diff)
}
gotASN := c.LookupASN(net.ParseIP(tc.IP))
gotASN := c.LookupASN(netip.MustParseAddr(tc.IP))
if diff := helpers.Diff(gotASN, tc.ExpectedASN); diff != "" {
t.Errorf("LookupASN(%q) (-got, +want):\n%s", tc.IP, diff)
}
}
gotMetrics := r.GetMetrics("akvorado_inlet_geoip_")
expectedMetrics := map[string]string{
`db_hits_total{database="asn"}`: "2",
`db_hits_total{database="asn"}`: "3",
`db_hits_total{database="geo"}`: "3",
`db_misses_total{database="asn"}`: "2",
`db_misses_total{database="geo"}`: "1",
`db_misses_total{database="geo"}`: "2",
`db_refresh_total{database="asn"}`: "1",
`db_refresh_total{database="geo"}`: "1",
}

View File

@@ -16,7 +16,7 @@ import (
"akvorado/common/helpers"
"akvorado/common/kafka"
"akvorado/common/reporter"
"akvorado/inlet/flow"
"akvorado/common/schema"
)
func TestRealKafka(t *testing.T) {
@@ -29,7 +29,7 @@ func TestRealKafka(t *testing.T) {
configuration.Brokers = brokers
configuration.Version = kafka.Version(sarama.V2_8_1_0)
configuration.FlushInterval = 100 * time.Millisecond
expectedTopicName := fmt.Sprintf("%s-v%d", topicName, flow.CurrentSchemaVersion)
expectedTopicName := fmt.Sprintf("%s-%s", topicName, schema.Flows.ProtobufMessageHash())
r := reporter.NewMock(t)
c, err := New(r, configuration, Dependencies{Daemon: daemon.NewMock(t)})
if err != nil {

View File

@@ -17,7 +17,7 @@ import (
"akvorado/common/daemon"
"akvorado/common/kafka"
"akvorado/common/reporter"
"akvorado/inlet/flow"
"akvorado/common/schema"
)
// Component represents the Kafka exporter.
@@ -65,7 +65,7 @@ func New(reporter *reporter.Reporter, configuration Configuration, dependencies
config: configuration,
kafkaConfig: kafkaConfig,
kafkaTopic: fmt.Sprintf("%s-v%d", configuration.Topic, flow.CurrentSchemaVersion),
kafkaTopic: fmt.Sprintf("%s-%s", configuration.Topic, schema.Flows.ProtobufMessageHash()),
}
c.initMetrics()
c.createKafkaProducer = func() (sarama.AsyncProducer, error) {
@@ -101,12 +101,14 @@ func (c *Component) Start() error {
c.r.Debug().Msg("stop error logger")
return nil
case msg := <-kafkaProducer.Errors():
c.metrics.errors.WithLabelValues(msg.Error()).Inc()
errLogger.Err(msg.Err).
Str("topic", msg.Msg.Topic).
Int64("offset", msg.Msg.Offset).
Int32("partition", msg.Msg.Partition).
Msg("Kafka producer error")
if msg != nil {
c.metrics.errors.WithLabelValues(msg.Error()).Inc()
errLogger.Err(msg.Err).
Str("topic", msg.Msg.Topic).
Int64("offset", msg.Msg.Offset).
Int32("partition", msg.Msg.Partition).
Msg("Kafka producer error")
}
}
}
})

View File

@@ -15,7 +15,7 @@ import (
"akvorado/common/daemon"
"akvorado/common/helpers"
"akvorado/common/reporter"
"akvorado/inlet/flow"
"akvorado/common/schema"
)
func TestKafka(t *testing.T) {
@@ -27,7 +27,7 @@ func TestKafka(t *testing.T) {
mockProducer.ExpectInputWithMessageCheckerFunctionAndSucceed(func(got *sarama.ProducerMessage) error {
defer close(received)
expected := sarama.ProducerMessage{
Topic: fmt.Sprintf("flows-v%d", flow.CurrentSchemaVersion),
Topic: fmt.Sprintf("flows-%s", schema.Flows.ProtobufMessageHash()),
Key: got.Key,
Value: sarama.ByteEncoder("hello world!"),
Partition: got.Partition,
@@ -52,7 +52,7 @@ func TestKafka(t *testing.T) {
gotMetrics := r.GetMetrics("akvorado_inlet_kafka_")
expectedMetrics := map[string]string{
`sent_bytes_total{exporter="127.0.0.1"}`: "26",
fmt.Sprintf(`errors_total{error="kafka: Failed to produce message to topic flows-v%d: noooo"}`, flow.CurrentSchemaVersion): "1",
fmt.Sprintf(`errors_total{error="kafka: Failed to produce message to topic flows-%s: noooo"}`, schema.Flows.ProtobufMessageHash()): "1",
`sent_messages_total{exporter="127.0.0.1"}`: "2",
}
if diff := helpers.Diff(gotMetrics, expectedMetrics); diff != "" {

View File

@@ -14,7 +14,7 @@ import (
"text/template"
"time"
"akvorado/inlet/flow"
"akvorado/common/schema"
)
var (
@@ -23,12 +23,10 @@ var (
data embed.FS
initShTemplate = template.Must(template.New("initsh").Parse(`#!/bin/sh
# Install Protobuf schemas
{{- range $version, $schema := .FlowSchemaVersions }}
cat > /var/lib/clickhouse/format_schemas/flow-{{ $version }}.proto <<'EOPROTO'
{{ $schema }}
# Install Protobuf schema
cat > /var/lib/clickhouse/format_schemas/flow-{{ .FlowSchemaHash }}.proto <<'EOPROTO'
{{ .FlowSchema }}
EOPROTO
{{ end }}
# Alter ClickHouse configuration
cat > /etc/clickhouse-server/config.d/akvorado.xml <<'EOCONFIG'
@@ -46,9 +44,10 @@ EOCONFIG
)
type initShVariables struct {
FlowSchemaVersions map[int]string
SystemLogTTL int
SystemLogTables []string
FlowSchemaHash string
FlowSchema string
SystemLogTTL int
SystemLogTables []string
}
func (c *Component) addHandlerEmbedded(url string, path string) {
@@ -72,8 +71,9 @@ func (c *Component) registerHTTPHandlers() error {
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
var result bytes.Buffer
if err := initShTemplate.Execute(&result, initShVariables{
FlowSchemaVersions: flow.VersionedSchemas,
SystemLogTTL: int(c.config.SystemLogTTL.Seconds()),
FlowSchemaHash: schema.Flows.ProtobufMessageHash(),
FlowSchema: schema.Flows.ProtobufDefinition(),
SystemLogTTL: int(c.config.SystemLogTTL.Seconds()),
SystemLogTables: []string{
"asynchronous_metric_log",
"metric_log",

View File

@@ -15,6 +15,7 @@ import (
"akvorado/common/helpers"
"akvorado/common/http"
"akvorado/common/reporter"
"akvorado/common/schema"
)
func TestHTTPEndpoints(t *testing.T) {
@@ -62,10 +63,11 @@ func TestHTTPEndpoints(t *testing.T) {
FirstLines: []string{
`#!/bin/sh`,
``,
`# Install Protobuf schemas`,
`cat > /var/lib/clickhouse/format_schemas/flow-0.proto <<'EOPROTO'`,
`# Install Protobuf schema`,
fmt.Sprintf(`cat > /var/lib/clickhouse/format_schemas/flow-%s.proto <<'EOPROTO'`,
schema.Flows.ProtobufMessageHash()),
"",
`syntax = "proto3";`,
`package decoder;`,
},
},
}

View File

@@ -17,7 +17,6 @@ import (
"golang.org/x/exp/slices"
"akvorado/common/schema"
"akvorado/inlet/flow"
)
var errSkipStep = errors.New("migration: skip this step")
@@ -178,16 +177,16 @@ AS %s
// createRawFlowsTable creates the raw flow table
func (c *Component) createRawFlowsTable(ctx context.Context) error {
tableName := fmt.Sprintf("flows_%d_raw", flow.CurrentSchemaVersion)
hash := schema.Flows.ProtobufMessageHash()
tableName := fmt.Sprintf("flows_%s_raw", hash)
kafkaEngine := fmt.Sprintf("Kafka SETTINGS %s", strings.Join([]string{
fmt.Sprintf(`kafka_broker_list = '%s'`,
strings.Join(c.config.Kafka.Brokers, ",")),
fmt.Sprintf(`kafka_topic_list = '%s-v%d'`,
c.config.Kafka.Topic, flow.CurrentSchemaVersion),
fmt.Sprintf(`kafka_topic_list = '%s-%s'`,
c.config.Kafka.Topic, hash),
`kafka_group_name = 'clickhouse'`,
`kafka_format = 'Protobuf'`,
fmt.Sprintf(`kafka_schema = 'flow-%d.proto:FlowMessagev%d'`,
flow.CurrentSchemaVersion, flow.CurrentSchemaVersion),
fmt.Sprintf(`kafka_schema = 'flow-%s.proto:FlowMessagev%s'`, hash, hash),
fmt.Sprintf(`kafka_num_consumers = %d`, c.config.Kafka.Consumers),
`kafka_thread_per_consumer = 1`,
`kafka_handle_error_mode = 'stream'`,
@@ -236,7 +235,7 @@ func (c *Component) createRawFlowsTable(ctx context.Context) error {
}
func (c *Component) createRawFlowsConsumerView(ctx context.Context) error {
tableName := fmt.Sprintf("flows_%d_raw", flow.CurrentSchemaVersion)
tableName := fmt.Sprintf("flows_%s_raw", schema.Flows.ProtobufMessageHash())
viewName := fmt.Sprintf("%s_consumer", tableName)
// Build SELECT query
@@ -278,7 +277,7 @@ func (c *Component) createRawFlowsConsumerView(ctx context.Context) error {
}
func (c *Component) createRawFlowsErrorsView(ctx context.Context) error {
tableName := fmt.Sprintf("flows_%d_raw", flow.CurrentSchemaVersion)
tableName := fmt.Sprintf("flows_%s_raw", schema.Flows.ProtobufMessageHash())
viewName := fmt.Sprintf("%s_errors", tableName)
// Build SELECT query

View File

@@ -23,6 +23,7 @@ import (
"akvorado/common/http"
"akvorado/common/kafka"
"akvorado/common/reporter"
"akvorado/common/schema"
"github.com/ClickHouse/clickhouse-go/v2/lib/proto"
)
@@ -78,7 +79,9 @@ func dumpAllTables(t *testing.T, ch *clickhousedb.Component) map[string]string {
if err := rows.Scan(&table, &schema); err != nil {
t.Fatalf("Scan() error:\n%+v", err)
}
schemas[table] = schema
if !oldTable(table) {
schemas[table] = schema
}
}
return schemas
}
@@ -103,6 +106,16 @@ outer:
}
}
func oldTable(table string) bool {
if strings.Contains(table, schema.Flows.ProtobufMessageHash()) {
return false
}
if strings.HasSuffix(table, "_raw") || strings.HasSuffix(table, "_raw_consumer") || strings.HasSuffix(table, "_raw_errors") {
return true
}
return false
}
// loadAllTables load tables from a CSV file. Use `format CSV` with
// query from dumpAllTables.
func loadAllTables(t *testing.T, ch *clickhousedb.Component, filename string) {
@@ -220,13 +233,16 @@ WHERE database=currentDatabase() AND table NOT LIKE '.%'`)
if err != nil {
t.Fatalf("Query() error:\n%+v", err)
}
hash := schema.Flows.ProtobufMessageHash()
got := []string{}
for rows.Next() {
var table string
if err := rows.Scan(&table); err != nil {
t.Fatalf("Scan() error:\n%+v", err)
}
got = append(got, table)
if !oldTable(table) {
got = append(got, table)
}
}
expected := []string{
"asns",
@@ -236,11 +252,11 @@ WHERE database=currentDatabase() AND table NOT LIKE '.%'`)
"flows_1h0m0s_consumer",
"flows_1m0s",
"flows_1m0s_consumer",
"flows_4_raw",
"flows_4_raw_consumer",
"flows_4_raw_errors",
"flows_5m0s",
"flows_5m0s_consumer",
fmt.Sprintf("flows_%s_raw", hash),
fmt.Sprintf("flows_%s_raw_consumer", hash),
fmt.Sprintf("flows_%s_raw_errors", hash),
"networks",
"protocols",
}
@@ -305,7 +321,11 @@ LIMIT 1`, proto.ClientName)
})
}
if !t.Failed() && lastSteps != 0 {
t.Fatalf("Last step was not idempotent. Record a new one with:\n%s FORMAT CSV", dumpAllTablesQuery)
if !t.Failed() {
t.Run("final state", func(t *testing.T) {
if lastSteps != 0 {
t.Fatalf("Last step was not idempotent. Record a new one with:\n%s FORMAT CSV", dumpAllTablesQuery)
}
})
}
}

View File

@@ -5,10 +5,10 @@
"protocols","CREATE DICTIONARY default.protocols (`proto` UInt8 INJECTIVE, `name` String, `description` String) PRIMARY KEY proto SOURCE(HTTP(URL 'http://something/api/v0/orchestrator/clickhouse/protocols.csv' FORMAT 'CSVWithNames')) LIFETIME(MIN 0 MAX 3600) LAYOUT(HASHED()) SETTINGS(format_csv_allow_single_quotes = 0)"
"flows_1m0s","CREATE TABLE default.flows_1m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(12096))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(604800) SETTINGS index_granularity = 8192"
"flows_5m0s","CREATE TABLE default.flows_5m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(155520))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(7776000) SETTINGS index_granularity = 8192"
"flows_4_raw","CREATE TABLE default.flows_4_raw (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `DstCommunities` Array(UInt32), `DstLargeCommunities.ASN` Array(UInt32), `DstLargeCommunities.LocalData1` Array(UInt32), `DstLargeCommunities.LocalData2` Array(UInt32), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt32, `DstPort` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) ENGINE = Kafka SETTINGS kafka_broker_list = '127.0.0.1:9092', kafka_topic_list = 'flows-v4', kafka_group_name = 'clickhouse', kafka_format = 'Protobuf', kafka_schema = 'flow-4.proto:FlowMessagev4', kafka_num_consumers = 1, kafka_thread_per_consumer = 1, kafka_handle_error_mode = 'stream'"
"flows_1h0m0s","CREATE TABLE default.flows_1h0m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(622080))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(31104000) SETTINGS index_granularity = 8192"
"flows_4_raw_errors","CREATE MATERIALIZED VIEW default.flows_4_raw_errors (`timestamp` DateTime, `topic` LowCardinality(String), `partition` UInt64, `offset` UInt64, `raw` String, `error` String) ENGINE = MergeTree PARTITION BY toYYYYMMDDhhmmss(toStartOfHour(timestamp)) ORDER BY (timestamp, topic, partition, offset) TTL timestamp + toIntervalDay(1) SETTINGS index_granularity = 8192 AS SELECT now() AS timestamp, _topic AS topic, _partition AS partition, _offset AS offset, _raw_message AS raw, _error AS error FROM default.flows_4_raw WHERE length(_error) > 0"
"flows_1m0s_consumer","CREATE MATERIALIZED VIEW default.flows_1m0s_consumer TO default.flows_1m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(60)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
"flows_5m0s_consumer","CREATE MATERIALIZED VIEW default.flows_5m0s_consumer TO default.flows_5m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(300)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
"flows_4_raw_consumer","CREATE MATERIALIZED VIEW default.flows_4_raw_consumer TO default.flows (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` String, `DstNetName` String, `SrcNetRole` String, `DstNetRole` String, `SrcNetSite` String, `DstNetSite` String, `SrcNetRegion` String, `DstNetRegion` String, `SrcNetTenant` String, `DstNetTenant` String, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `DstCommunities` Array(UInt32), `DstLargeCommunities` Array(UInt128), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt32, `DstPort` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS WITH arrayCompact(DstASPath) AS c_DstASPath SELECT TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAddr, DstAddr, SrcNetMask, DstNetMask, SrcAS, DstAS, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS SrcNetName, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS DstNetName, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS SrcNetRole, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS DstNetRole, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS SrcNetSite, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS DstNetSite, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS SrcNetRegion, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS DstNetRegion, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS SrcNetTenant, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS DstNetTenant, SrcCountry, DstCountry, DstASPath, c_DstASPath[1] AS Dst1stAS, c_DstASPath[2] AS Dst2ndAS, c_DstASPath[3] AS Dst3rdAS, DstCommunities, arrayMap((asn, l1, l2) -> ((bitShiftLeft(CAST(asn, 'UInt128'), 64) + bitShiftLeft(CAST(l1, 'UInt128'), 32)) + CAST(l2, 'UInt128')), `DstLargeCommunities.ASN`, `DstLargeCommunities.LocalData1`, `DstLargeCommunities.LocalData2`) AS DstLargeCommunities, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, SrcPort, DstPort, Bytes, Packets, ForwardingStatus FROM default.flows_4_raw WHERE length(_error) = 0"
"flows_1h0m0s_consumer","CREATE MATERIALIZED VIEW default.flows_1h0m0s_consumer TO default.flows_1h0m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(3600)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
"flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw","CREATE TABLE default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `DstCommunities` Array(UInt32), `DstLargeCommunitiesASN` Array(UInt32), `DstLargeCommunitiesLocalData1` Array(UInt32), `DstLargeCommunitiesLocalData2` Array(UInt32), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt32, `DstPort` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) ENGINE = Kafka SETTINGS kafka_broker_list = '127.0.0.1:9092', kafka_topic_list = 'flows-IMIHFOFXF6RYCYCTMQETKFYVCU', kafka_group_name = 'clickhouse', kafka_format = 'Protobuf', kafka_schema = 'flow-IMIHFOFXF6RYCYCTMQETKFYVCU.proto:FlowMessagevIMIHFOFXF6RYCYCTMQETKFYVCU', kafka_num_consumers = 1, kafka_thread_per_consumer = 1, kafka_handle_error_mode = 'stream'"
"flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw_errors","CREATE MATERIALIZED VIEW default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw_errors (`timestamp` DateTime, `topic` LowCardinality(String), `partition` UInt64, `offset` UInt64, `raw` String, `error` String) ENGINE = MergeTree PARTITION BY toYYYYMMDDhhmmss(toStartOfHour(timestamp)) ORDER BY (timestamp, topic, partition, offset) TTL timestamp + toIntervalDay(1) SETTINGS index_granularity = 8192 AS SELECT now() AS timestamp, _topic AS topic, _partition AS partition, _offset AS offset, _raw_message AS raw, _error AS error FROM default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw WHERE length(_error) > 0"
"flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw_consumer","CREATE MATERIALIZED VIEW default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw_consumer TO default.flows (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` String, `DstNetName` String, `SrcNetRole` String, `DstNetRole` String, `SrcNetSite` String, `DstNetSite` String, `SrcNetRegion` String, `DstNetRegion` String, `SrcNetTenant` String, `DstNetTenant` String, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `DstCommunities` Array(UInt32), `DstLargeCommunities` Array(UInt128), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt32, `DstPort` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS WITH arrayCompact(DstASPath) AS c_DstASPath SELECT TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAddr, DstAddr, SrcNetMask, DstNetMask, SrcAS, DstAS, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS SrcNetName, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS DstNetName, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS SrcNetRole, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS DstNetRole, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS SrcNetSite, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS DstNetSite, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS SrcNetRegion, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS DstNetRegion, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS SrcNetTenant, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS DstNetTenant, SrcCountry, DstCountry, DstASPath, c_DstASPath[1] AS Dst1stAS, c_DstASPath[2] AS Dst2ndAS, c_DstASPath[3] AS Dst3rdAS, DstCommunities, arrayMap((asn, l1, l2) -> ((bitShiftLeft(CAST(asn, 'UInt128'), 64) + bitShiftLeft(CAST(l1, 'UInt128'), 32)) + CAST(l2, 'UInt128')), DstLargeCommunitiesASN, DstLargeCommunitiesLocalData1, DstLargeCommunitiesLocalData2) AS DstLargeCommunities, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, SrcPort, DstPort, Bytes, Packets, ForwardingStatus FROM default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw WHERE length(_error) = 0"
1 asns CREATE DICTIONARY default.asns (`asn` UInt32 INJECTIVE, `name` String) PRIMARY KEY asn SOURCE(HTTP(URL 'http://something/api/v0/orchestrator/clickhouse/asns.csv' FORMAT 'CSVWithNames')) LIFETIME(MIN 0 MAX 3600) LAYOUT(HASHED()) SETTINGS(format_csv_allow_single_quotes = 0)
5 protocols CREATE DICTIONARY default.protocols (`proto` UInt8 INJECTIVE, `name` String, `description` String) PRIMARY KEY proto SOURCE(HTTP(URL 'http://something/api/v0/orchestrator/clickhouse/protocols.csv' FORMAT 'CSVWithNames')) LIFETIME(MIN 0 MAX 3600) LAYOUT(HASHED()) SETTINGS(format_csv_allow_single_quotes = 0)
6 flows_1m0s CREATE TABLE default.flows_1m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(12096))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(604800) SETTINGS index_granularity = 8192
7 flows_5m0s CREATE TABLE default.flows_5m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(155520))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(7776000) SETTINGS index_granularity = 8192
flows_4_raw CREATE TABLE default.flows_4_raw (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `DstCommunities` Array(UInt32), `DstLargeCommunities.ASN` Array(UInt32), `DstLargeCommunities.LocalData1` Array(UInt32), `DstLargeCommunities.LocalData2` Array(UInt32), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt32, `DstPort` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) ENGINE = Kafka SETTINGS kafka_broker_list = '127.0.0.1:9092', kafka_topic_list = 'flows-v4', kafka_group_name = 'clickhouse', kafka_format = 'Protobuf', kafka_schema = 'flow-4.proto:FlowMessagev4', kafka_num_consumers = 1, kafka_thread_per_consumer = 1, kafka_handle_error_mode = 'stream'
8 flows_1h0m0s CREATE TABLE default.flows_1h0m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(622080))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(31104000) SETTINGS index_granularity = 8192
flows_4_raw_errors CREATE MATERIALIZED VIEW default.flows_4_raw_errors (`timestamp` DateTime, `topic` LowCardinality(String), `partition` UInt64, `offset` UInt64, `raw` String, `error` String) ENGINE = MergeTree PARTITION BY toYYYYMMDDhhmmss(toStartOfHour(timestamp)) ORDER BY (timestamp, topic, partition, offset) TTL timestamp + toIntervalDay(1) SETTINGS index_granularity = 8192 AS SELECT now() AS timestamp, _topic AS topic, _partition AS partition, _offset AS offset, _raw_message AS raw, _error AS error FROM default.flows_4_raw WHERE length(_error) > 0
9 flows_1m0s_consumer CREATE MATERIALIZED VIEW default.flows_1m0s_consumer TO default.flows_1m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(60)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows
10 flows_5m0s_consumer CREATE MATERIALIZED VIEW default.flows_5m0s_consumer TO default.flows_5m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(300)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows
flows_4_raw_consumer CREATE MATERIALIZED VIEW default.flows_4_raw_consumer TO default.flows (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` String, `DstNetName` String, `SrcNetRole` String, `DstNetRole` String, `SrcNetSite` String, `DstNetSite` String, `SrcNetRegion` String, `DstNetRegion` String, `SrcNetTenant` String, `DstNetTenant` String, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `DstCommunities` Array(UInt32), `DstLargeCommunities` Array(UInt128), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt32, `DstPort` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS WITH arrayCompact(DstASPath) AS c_DstASPath SELECT TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAddr, DstAddr, SrcNetMask, DstNetMask, SrcAS, DstAS, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS SrcNetName, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS DstNetName, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS SrcNetRole, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS DstNetRole, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS SrcNetSite, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS DstNetSite, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS SrcNetRegion, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS DstNetRegion, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS SrcNetTenant, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS DstNetTenant, SrcCountry, DstCountry, DstASPath, c_DstASPath[1] AS Dst1stAS, c_DstASPath[2] AS Dst2ndAS, c_DstASPath[3] AS Dst3rdAS, DstCommunities, arrayMap((asn, l1, l2) -> ((bitShiftLeft(CAST(asn, 'UInt128'), 64) + bitShiftLeft(CAST(l1, 'UInt128'), 32)) + CAST(l2, 'UInt128')), `DstLargeCommunities.ASN`, `DstLargeCommunities.LocalData1`, `DstLargeCommunities.LocalData2`) AS DstLargeCommunities, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, SrcPort, DstPort, Bytes, Packets, ForwardingStatus FROM default.flows_4_raw WHERE length(_error) = 0
11 flows_1h0m0s_consumer CREATE MATERIALIZED VIEW default.flows_1h0m0s_consumer TO default.flows_1h0m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(3600)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows
12 flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw CREATE TABLE default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `DstCommunities` Array(UInt32), `DstLargeCommunitiesASN` Array(UInt32), `DstLargeCommunitiesLocalData1` Array(UInt32), `DstLargeCommunitiesLocalData2` Array(UInt32), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt32, `DstPort` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) ENGINE = Kafka SETTINGS kafka_broker_list = '127.0.0.1:9092', kafka_topic_list = 'flows-IMIHFOFXF6RYCYCTMQETKFYVCU', kafka_group_name = 'clickhouse', kafka_format = 'Protobuf', kafka_schema = 'flow-IMIHFOFXF6RYCYCTMQETKFYVCU.proto:FlowMessagevIMIHFOFXF6RYCYCTMQETKFYVCU', kafka_num_consumers = 1, kafka_thread_per_consumer = 1, kafka_handle_error_mode = 'stream'
13 flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw_errors CREATE MATERIALIZED VIEW default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw_errors (`timestamp` DateTime, `topic` LowCardinality(String), `partition` UInt64, `offset` UInt64, `raw` String, `error` String) ENGINE = MergeTree PARTITION BY toYYYYMMDDhhmmss(toStartOfHour(timestamp)) ORDER BY (timestamp, topic, partition, offset) TTL timestamp + toIntervalDay(1) SETTINGS index_granularity = 8192 AS SELECT now() AS timestamp, _topic AS topic, _partition AS partition, _offset AS offset, _raw_message AS raw, _error AS error FROM default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw WHERE length(_error) > 0
14 flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw_consumer CREATE MATERIALIZED VIEW default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw_consumer TO default.flows (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` String, `DstNetName` String, `SrcNetRole` String, `DstNetRole` String, `SrcNetSite` String, `DstNetSite` String, `SrcNetRegion` String, `DstNetRegion` String, `SrcNetTenant` String, `DstNetTenant` String, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `DstCommunities` Array(UInt32), `DstLargeCommunities` Array(UInt128), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` String, `OutIfDescription` String, `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt32, `DstPort` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS WITH arrayCompact(DstASPath) AS c_DstASPath SELECT TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAddr, DstAddr, SrcNetMask, DstNetMask, SrcAS, DstAS, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS SrcNetName, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS DstNetName, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS SrcNetRole, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS DstNetRole, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS SrcNetSite, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS DstNetSite, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS SrcNetRegion, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS DstNetRegion, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS SrcNetTenant, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS DstNetTenant, SrcCountry, DstCountry, DstASPath, c_DstASPath[1] AS Dst1stAS, c_DstASPath[2] AS Dst2ndAS, c_DstASPath[3] AS Dst3rdAS, DstCommunities, arrayMap((asn, l1, l2) -> ((bitShiftLeft(CAST(asn, 'UInt128'), 64) + bitShiftLeft(CAST(l1, 'UInt128'), 32)) + CAST(l2, 'UInt128')), DstLargeCommunitiesASN, DstLargeCommunitiesLocalData1, DstLargeCommunitiesLocalData2) AS DstLargeCommunities, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, SrcPort, DstPort, Bytes, Packets, ForwardingStatus FROM default.flows_IMIHFOFXF6RYCYCTMQETKFYVCU_raw WHERE length(_error) = 0

View File

@@ -14,7 +14,7 @@ import (
"akvorado/common/helpers"
"akvorado/common/kafka"
"akvorado/common/reporter"
"akvorado/inlet/flow"
"akvorado/common/schema"
)
func TestTopicCreation(t *testing.T) {
@@ -22,7 +22,7 @@ func TestTopicCreation(t *testing.T) {
rand.Seed(time.Now().UnixMicro())
topicName := fmt.Sprintf("test-topic-%d", rand.Int())
expectedTopicName := fmt.Sprintf("%s-v%d", topicName, flow.CurrentSchemaVersion)
expectedTopicName := fmt.Sprintf("%s-%s", topicName, schema.Flows.ProtobufMessageHash())
retentionMs := "76548"
segmentBytes := "107374184"
segmentBytes2 := "10737184"
@@ -96,7 +96,7 @@ func TestTopicMorePartitions(t *testing.T) {
rand.Seed(time.Now().UnixMicro())
topicName := fmt.Sprintf("test-topic-%d", rand.Int())
expectedTopicName := fmt.Sprintf("%s-v%d", topicName, flow.CurrentSchemaVersion)
expectedTopicName := fmt.Sprintf("%s-%s", topicName, schema.Flows.ProtobufMessageHash())
configuration := DefaultConfiguration()
configuration.Topic = topicName

View File

@@ -12,7 +12,7 @@ import (
"akvorado/common/kafka"
"akvorado/common/reporter"
"akvorado/inlet/flow"
"akvorado/common/schema"
)
// Component represents the Kafka configurator.
@@ -39,7 +39,7 @@ func New(r *reporter.Reporter, config Configuration) (*Component, error) {
config: config,
kafkaConfig: kafkaConfig,
kafkaTopic: fmt.Sprintf("%s-v%d", config.Topic, flow.CurrentSchemaVersion),
kafkaTopic: fmt.Sprintf("%s-%s", config.Topic, schema.Flows.ProtobufMessageHash()),
}, nil
}