mirror of
https://github.com/akvorado/akvorado.git
synced 2025-12-12 06:24:10 +01:00
Do geoip enrich in clickhouse instead of inlet
One solution to https://github.com/akvorado/akvorado/issues/62
This commit is contained in:
committed by
Vincent Bernat
parent
22814c4647
commit
87a57bf82e
4
Makefile
4
Makefile
@@ -208,3 +208,7 @@ help:
|
||||
.PHONY: version
|
||||
version:
|
||||
@echo $(VERSION)
|
||||
|
||||
.PHONY: docker
|
||||
docker:
|
||||
docker build -f docker/Dockerfile -t ghcr.io/akvorado/akvorado:latest .
|
||||
11
cmd/inlet.go
11
cmd/inlet.go
@@ -18,7 +18,6 @@ import (
|
||||
"akvorado/common/schema"
|
||||
"akvorado/inlet/core"
|
||||
"akvorado/inlet/flow"
|
||||
"akvorado/inlet/geoip"
|
||||
"akvorado/inlet/kafka"
|
||||
"akvorado/inlet/metadata"
|
||||
"akvorado/inlet/metadata/provider/snmp"
|
||||
@@ -33,7 +32,6 @@ type InletConfiguration struct {
|
||||
Flow flow.Configuration
|
||||
Metadata metadata.Configuration
|
||||
Routing routing.Configuration
|
||||
GeoIP geoip.Configuration
|
||||
Kafka kafka.Configuration
|
||||
Core core.Configuration
|
||||
Schema schema.Configuration
|
||||
@@ -47,7 +45,6 @@ func (c *InletConfiguration) Reset() {
|
||||
Flow: flow.DefaultConfiguration(),
|
||||
Metadata: metadata.DefaultConfiguration(),
|
||||
Routing: routing.DefaultConfiguration(),
|
||||
GeoIP: geoip.DefaultConfiguration(),
|
||||
Kafka: kafka.DefaultConfiguration(),
|
||||
Core: core.DefaultConfiguration(),
|
||||
Schema: schema.DefaultConfiguration(),
|
||||
@@ -130,12 +127,6 @@ func inletStart(r *reporter.Reporter, config InletConfiguration, checkOnly bool)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize routing component: %w", err)
|
||||
}
|
||||
geoipComponent, err := geoip.New(r, config.GeoIP, geoip.Dependencies{
|
||||
Daemon: daemonComponent,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize GeoIP component: %w", err)
|
||||
}
|
||||
kafkaComponent, err := kafka.New(r, config.Kafka, kafka.Dependencies{
|
||||
Daemon: daemonComponent,
|
||||
Schema: schemaComponent,
|
||||
@@ -148,7 +139,6 @@ func inletStart(r *reporter.Reporter, config InletConfiguration, checkOnly bool)
|
||||
Flow: flowComponent,
|
||||
Metadata: metadataComponent,
|
||||
Routing: routingComponent,
|
||||
GeoIP: geoipComponent,
|
||||
Kafka: kafkaComponent,
|
||||
HTTP: httpComponent,
|
||||
Schema: schemaComponent,
|
||||
@@ -171,7 +161,6 @@ func inletStart(r *reporter.Reporter, config InletConfiguration, checkOnly bool)
|
||||
httpComponent,
|
||||
metadataComponent,
|
||||
routingComponent,
|
||||
geoipComponent,
|
||||
kafkaComponent,
|
||||
coreComponent,
|
||||
flowComponent,
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"akvorado/common/schema"
|
||||
"akvorado/orchestrator"
|
||||
"akvorado/orchestrator/clickhouse"
|
||||
"akvorado/orchestrator/clickhouse/geoip"
|
||||
"akvorado/orchestrator/kafka"
|
||||
)
|
||||
|
||||
@@ -130,11 +131,20 @@ func orchestratorStart(r *reporter.Reporter, config OrchestratorConfiguration, c
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize ClickHouse component: %w", err)
|
||||
}
|
||||
|
||||
geoipComponent, err := geoip.New(r, config.ClickHouse.GeoIP, geoip.Dependencies{
|
||||
Daemon: daemonComponent,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize GeoIP component: %w", err)
|
||||
}
|
||||
|
||||
clickhouseComponent, err := clickhouse.New(r, config.ClickHouse, clickhouse.Dependencies{
|
||||
Daemon: daemonComponent,
|
||||
HTTP: httpComponent,
|
||||
ClickHouse: clickhouseDBComponent,
|
||||
Schema: schemaComponent,
|
||||
GeoIP: geoipComponent,
|
||||
})
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize clickhouse component: %w", err)
|
||||
@@ -166,6 +176,7 @@ func orchestratorStart(r *reporter.Reporter, config OrchestratorConfiguration, c
|
||||
|
||||
// Start all the components.
|
||||
components := []interface{}{
|
||||
geoipComponent,
|
||||
httpComponent,
|
||||
clickhouseDBComponent,
|
||||
clickhouseComponent,
|
||||
|
||||
@@ -16,6 +16,5 @@ paths:
|
||||
inlet.0.core.asnproviders:
|
||||
- flow
|
||||
- routing
|
||||
- geoip
|
||||
inlet.0.core.netproviders:
|
||||
- routing
|
||||
|
||||
@@ -9,6 +9,5 @@ inlet:
|
||||
asn-providers:
|
||||
- flow
|
||||
- bmp
|
||||
- geoip
|
||||
net-providers:
|
||||
- bmp
|
||||
|
||||
@@ -2,26 +2,42 @@
|
||||
paths:
|
||||
clickhouse.networks:
|
||||
192.0.2.0/24:
|
||||
asn: 0
|
||||
city: ""
|
||||
country: ""
|
||||
name: customers
|
||||
region: ""
|
||||
role: ""
|
||||
site: ""
|
||||
state: ""
|
||||
tenant: ""
|
||||
203.0.113.0/24:
|
||||
asn: 0
|
||||
city: ""
|
||||
country: ""
|
||||
name: servers
|
||||
region: ""
|
||||
role: ""
|
||||
site: ""
|
||||
state: ""
|
||||
tenant: ""
|
||||
2a01:db8:cafe:1::/64:
|
||||
asn: 0
|
||||
city: ""
|
||||
country: ""
|
||||
name: customers
|
||||
region: ""
|
||||
role: ""
|
||||
site: ""
|
||||
state: ""
|
||||
tenant: ""
|
||||
2a01:db8:cafe:2::/64:
|
||||
asn: 0
|
||||
city: ""
|
||||
country: ""
|
||||
name: servers
|
||||
region: ""
|
||||
role: ""
|
||||
site: ""
|
||||
state: ""
|
||||
tenant: ""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
---
|
||||
paths:
|
||||
inlet.0.core.asnproviders:
|
||||
- geoip
|
||||
- routing
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
---
|
||||
paths:
|
||||
inlet.0.geoip:
|
||||
asndatabase: /usr/share/GeoIP/GeoLite2-ASN.mmdb
|
||||
geodatabase: /usr/share/GeoIP/GeoLite2-Country.mmdb
|
||||
optional: false
|
||||
@@ -1,5 +0,0 @@
|
||||
---
|
||||
inlet:
|
||||
geoip:
|
||||
asn-database: /usr/share/GeoIP/GeoLite2-ASN.mmdb
|
||||
country-database: /usr/share/GeoIP/GeoLite2-Country.mmdb
|
||||
@@ -2,28 +2,44 @@
|
||||
paths:
|
||||
clickhouse.networks:
|
||||
192.0.2.0/24:
|
||||
asn: 0
|
||||
city: ""
|
||||
country: ""
|
||||
name: ipv4-customers
|
||||
role: customers
|
||||
region: ""
|
||||
site: ""
|
||||
state: ""
|
||||
tenant: ""
|
||||
203.0.113.0/24:
|
||||
asn: 0
|
||||
city: ""
|
||||
country: ""
|
||||
name: ipv4-servers
|
||||
role: servers
|
||||
region: ""
|
||||
site: ""
|
||||
state: ""
|
||||
tenant: ""
|
||||
2a01:db8:cafe:1::/64:
|
||||
asn: 0
|
||||
city: ""
|
||||
country: ""
|
||||
name: ipv6-customers
|
||||
role: customers
|
||||
region: ""
|
||||
site: ""
|
||||
state: ""
|
||||
tenant: ""
|
||||
2a01:db8:cafe:2::/64:
|
||||
asn: 0
|
||||
city: ""
|
||||
country: ""
|
||||
name: ipv6-servers
|
||||
role: servers
|
||||
region: ""
|
||||
site: ""
|
||||
state: ""
|
||||
tenant: ""
|
||||
kafka.brokers:
|
||||
- kafka:9092
|
||||
|
||||
@@ -57,25 +57,69 @@ func (sm *SubnetMap[V]) ToMap() map[string]V {
|
||||
return output
|
||||
}
|
||||
|
||||
// Set inserts the given key k into the SubnetMap, replacing any existing value if it exists.
|
||||
func (sm *SubnetMap[V]) Set(k string, v V) error {
|
||||
subnetK, err := SubnetMapParseKey(k)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, ipNet, err := net.ParseCIDR(subnetK)
|
||||
if err != nil {
|
||||
// Should not happen
|
||||
return err
|
||||
}
|
||||
_, bits := ipNet.Mask.Size()
|
||||
if bits != 128 {
|
||||
return fmt.Errorf("%q is not an IPv6 subnet", ipNet)
|
||||
}
|
||||
plen, _ := ipNet.Mask.Size()
|
||||
sm.tree.Set(patricia.NewIPv6Address(ipNet.IP.To16(), uint(plen)), v)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Update inserts the given key k into the SubnetMap, calling updateFunc with the existing value.
|
||||
func (sm *SubnetMap[V]) Update(k string, v V, updateFunc tree.UpdatesFunc[V]) error {
|
||||
subnetK, err := SubnetMapParseKey(k)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, ipNet, err := net.ParseCIDR(subnetK)
|
||||
if err != nil {
|
||||
// Should not happen
|
||||
return err
|
||||
}
|
||||
_, bits := ipNet.Mask.Size()
|
||||
if bits != 128 {
|
||||
return fmt.Errorf("%q is not an IPv6 subnet", ipNet)
|
||||
}
|
||||
plen, _ := ipNet.Mask.Size()
|
||||
sm.tree.SetOrUpdate(patricia.NewIPv6Address(ipNet.IP.To16(), uint(plen)), v, updateFunc)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Iter enables iteration of the SubnetMap, calling f for every entry. If f returns an error, the iteration is aborted.
|
||||
func (sm *SubnetMap[V]) Iter(f func(address patricia.IPv6Address, tags [][]V) error) error {
|
||||
iter := sm.tree.Iterate()
|
||||
for iter.Next() {
|
||||
if err := f(iter.Address(), iter.TagsFromRoot()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// NewSubnetMap creates a subnetmap from a map. Unlike user-provided
|
||||
// configuration, this function is stricter and require everything to
|
||||
// be IPv6 subnets.
|
||||
func NewSubnetMap[V any](from map[string]V) (*SubnetMap[V], error) {
|
||||
trie := tree.NewTreeV6[V]()
|
||||
for k, v := range from {
|
||||
_, ipNet, err := net.ParseCIDR(k)
|
||||
if err != nil {
|
||||
// Should not happen
|
||||
return nil, err
|
||||
}
|
||||
_, bits := ipNet.Mask.Size()
|
||||
if bits != 128 {
|
||||
return nil, fmt.Errorf("%q is not an IPv6 subnet", ipNet)
|
||||
}
|
||||
plen, _ := ipNet.Mask.Size()
|
||||
trie.Set(patricia.NewIPv6Address(ipNet.IP.To16(), uint(plen)), v)
|
||||
trie := &SubnetMap[V]{tree.NewTreeV6[V]()}
|
||||
if from == nil {
|
||||
return trie, nil
|
||||
}
|
||||
return &SubnetMap[V]{trie}, nil
|
||||
for k, v := range from {
|
||||
trie.Set(k, v)
|
||||
}
|
||||
return trie, nil
|
||||
}
|
||||
|
||||
// MustNewSubnetMap creates a subnet from a map and panic in case of a
|
||||
|
||||
@@ -22,6 +22,10 @@ var prettyC = pretty.Config{
|
||||
IncludeUnexported: false,
|
||||
}
|
||||
|
||||
func formatByte(v interface{}) string {
|
||||
return fmt.Sprintf("0x%x", v)
|
||||
}
|
||||
|
||||
func defaultPrettyFormatters() map[reflect.Type]interface{} {
|
||||
result := map[reflect.Type]interface{}{
|
||||
reflect.TypeOf(net.IP{}): fmt.Sprint,
|
||||
@@ -30,6 +34,7 @@ func defaultPrettyFormatters() map[reflect.Type]interface{} {
|
||||
reflect.TypeOf(SubnetMap[string]{}): fmt.Sprint,
|
||||
reflect.TypeOf(SubnetMap[uint]{}): fmt.Sprint,
|
||||
reflect.TypeOf(SubnetMap[uint16]{}): fmt.Sprint,
|
||||
reflect.TypeOf(byte(0)): formatByte,
|
||||
}
|
||||
for t, fn := range nonDefaultPrettyFormatters {
|
||||
result[t] = fn
|
||||
|
||||
@@ -70,7 +70,7 @@ func (schema Schema) clickhouseIterate(fn func(Column), options ...ClickHouseTab
|
||||
if slices.Contains(options, ClickHouseSkipMainOnlyColumns) && column.ClickHouseMainOnly {
|
||||
continue
|
||||
}
|
||||
if slices.Contains(options, ClickHouseSkipGeneratedColumns) && column.ClickHouseGenerateFrom != "" {
|
||||
if slices.Contains(options, ClickHouseSkipGeneratedColumns) && column.ClickHouseGenerateFrom != "" && !column.ClickHouseSelfGenerated {
|
||||
continue
|
||||
}
|
||||
if slices.Contains(options, ClickHouseSkipTransformColumns) && column.ClickHouseTransformFrom != nil {
|
||||
|
||||
@@ -66,6 +66,17 @@ func (ib *InterfaceBoundary) UnmarshalText(input []byte) error {
|
||||
return errUnknownInterfaceBoundary
|
||||
}
|
||||
|
||||
const (
|
||||
// DictionaryASNs is the name of the asns clickhouse dictionary.
|
||||
DictionaryASNs string = "asns"
|
||||
// DictionaryProtocols is the name of the protocols clickhouse dictionary.
|
||||
DictionaryProtocols string = "protocols"
|
||||
// DictionaryICMP is the name of the icmp clickhouse dictionary.
|
||||
DictionaryICMP string = "icmp"
|
||||
// DictionaryNetworks is the name of the networks clickhouse dictionary.
|
||||
DictionaryNetworks string = "networks"
|
||||
)
|
||||
|
||||
// revive:disable
|
||||
const (
|
||||
ColumnTimeReceived ColumnKey = iota + 1
|
||||
@@ -108,6 +119,10 @@ const (
|
||||
ColumnDstNetTenant
|
||||
ColumnSrcCountry
|
||||
ColumnDstCountry
|
||||
ColumnSrcGeoState
|
||||
ColumnDstGeoState
|
||||
ColumnSrcGeoCity
|
||||
ColumnDstGeoCity
|
||||
ColumnDstASPath
|
||||
ColumnDst1stAS
|
||||
ColumnDst2ndAS
|
||||
@@ -228,69 +243,115 @@ func flows() Schema {
|
||||
ELSE ''
|
||||
END`,
|
||||
},
|
||||
{Key: ColumnSrcAS, ClickHouseType: "UInt32"},
|
||||
{
|
||||
Key: ColumnSrcAS,
|
||||
ClickHouseType: "UInt32",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("if(SrcAS = 0, dictGetOrDefault('%s', 'asn', SrcAddr, 0), SrcAS)", DictionaryNetworks),
|
||||
ClickHouseSelfGenerated: true,
|
||||
},
|
||||
{
|
||||
Key: ColumnDstAS,
|
||||
ClickHouseType: "UInt32",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("if(DstAS = 0, dictGetOrDefault('%s', 'asn', DstAddr, 0), DstAS)", DictionaryNetworks),
|
||||
ClickHouseSelfGenerated: true,
|
||||
},
|
||||
{
|
||||
Key: ColumnSrcNetName,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'name', SrcAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'name', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstNetName,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'name', DstAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'name', DstAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnSrcNetRole,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'role', SrcAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'role', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstNetRole,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'role', DstAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'role', DstAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnSrcNetSite,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'site', SrcAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'site', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstNetSite,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'site', DstAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'site', DstAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnSrcNetRegion,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'region', SrcAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'region', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstNetRegion,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'region', DstAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'region', DstAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnSrcNetTenant,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'tenant', SrcAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'tenant', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstNetTenant,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'tenant', DstAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'tenant', DstAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{Key: ColumnSrcVlan, ParserType: "uint", ClickHouseType: "UInt16", Disabled: true, Group: ColumnGroupL2},
|
||||
{Key: ColumnSrcCountry, ParserType: "string", ClickHouseType: "FixedString(2)"},
|
||||
{
|
||||
Key: ColumnSrcCountry,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "FixedString(2)",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'country', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstCountry,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "FixedString(2)",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'country', DstAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnSrcGeoCity,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'city', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstGeoCity,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'city', DstAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnSrcGeoState,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'state', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstGeoState,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'state', DstAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{
|
||||
Key: ColumnDstASPath,
|
||||
ClickHouseMainOnly: true,
|
||||
@@ -441,7 +502,7 @@ END`,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseAlias: `if(Proto = 1, ` +
|
||||
`dictGetOrDefault('icmp', 'name', tuple(Proto, ICMPv4Type, ICMPv4Code), ` +
|
||||
fmt.Sprintf(`dictGetOrDefault('%s', 'name', tuple(Proto, ICMPv4Type, ICMPv4Code), `, DictionaryICMP) +
|
||||
`concat(toString(ICMPv4Type), '/', toString(ICMPv4Code))), '')`,
|
||||
},
|
||||
{
|
||||
@@ -452,7 +513,7 @@ END`,
|
||||
ParserType: "string",
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseAlias: `if(Proto = 58, ` +
|
||||
`dictGetOrDefault('icmp', 'name', tuple(Proto, ICMPv6Type, ICMPv6Code), ` +
|
||||
fmt.Sprintf(`dictGetOrDefault('%s', 'name', tuple(Proto, ICMPv6Type, ICMPv6Code), `, DictionaryICMP) +
|
||||
`concat(toString(ICMPv6Type), '/', toString(ICMPv6Code))), '')`,
|
||||
},
|
||||
{
|
||||
@@ -509,6 +570,12 @@ END`,
|
||||
}.finalize()
|
||||
}
|
||||
|
||||
func (column *Column) shouldBeProto() bool {
|
||||
return column.ClickHouseTransformFrom == nil &&
|
||||
(column.ClickHouseGenerateFrom == "" || column.ClickHouseSelfGenerated) &&
|
||||
column.ClickHouseAlias == ""
|
||||
}
|
||||
|
||||
func (schema Schema) finalize() Schema {
|
||||
ncolumns := []Column{}
|
||||
for _, column := range schema.columns {
|
||||
@@ -596,9 +663,7 @@ func (schema Schema) finalize() Schema {
|
||||
}
|
||||
for _, column := range pcolumns {
|
||||
if column.ProtobufIndex == 0 {
|
||||
if column.ClickHouseTransformFrom != nil ||
|
||||
column.ClickHouseGenerateFrom != "" ||
|
||||
column.ClickHouseAlias != "" {
|
||||
if !column.shouldBeProto() {
|
||||
column.ProtobufIndex = -1
|
||||
continue
|
||||
}
|
||||
@@ -608,9 +673,7 @@ func (schema Schema) finalize() Schema {
|
||||
}
|
||||
|
||||
if column.ProtobufType == 0 &&
|
||||
column.ClickHouseTransformFrom == nil &&
|
||||
column.ClickHouseGenerateFrom == "" &&
|
||||
column.ClickHouseAlias == "" {
|
||||
column.shouldBeProto() {
|
||||
switch column.ClickHouseType {
|
||||
case "String", "LowCardinality(String)", "FixedString(2)":
|
||||
column.ProtobufType = protoreflect.StringKind
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
package schema
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/netip"
|
||||
"strings"
|
||||
"testing"
|
||||
@@ -43,9 +44,8 @@ func TestProtobufDefinition(t *testing.T) {
|
||||
{
|
||||
Key: ColumnSrcNetName,
|
||||
ClickHouseType: "LowCardinality(String)",
|
||||
ClickHouseGenerateFrom: "dictGetOrDefault('networks', 'name', SrcAddr, '')",
|
||||
ClickHouseGenerateFrom: fmt.Sprintf("dictGetOrDefault('%s', 'name', SrcAddr, '')", DictionaryNetworks),
|
||||
},
|
||||
{Key: ColumnSrcCountry, ClickHouseType: "FixedString(2)"},
|
||||
{
|
||||
Key: ColumnDstASPath,
|
||||
ClickHouseType: "Array(UInt32)",
|
||||
@@ -81,7 +81,7 @@ func TestProtobufDefinition(t *testing.T) {
|
||||
expected := `
|
||||
syntax = "proto3";
|
||||
|
||||
message FlowMessagevLH2TTFF7P352DSYYCJYWFCXHAM {
|
||||
message FlowMessagev5WRSGBXQDXZSUHZQE6QEHLI5JM {
|
||||
enum Boundary { UNDEFINED = 0; EXTERNAL = 1; INTERNAL = 2; }
|
||||
|
||||
uint64 TimeReceived = 1;
|
||||
@@ -94,17 +94,15 @@ message FlowMessagevLH2TTFF7P352DSYYCJYWFCXHAM {
|
||||
uint32 DstNetMask = 8;
|
||||
uint32 SrcAS = 9;
|
||||
uint32 DstAS = 10;
|
||||
string SrcCountry = 11;
|
||||
string DstCountry = 12;
|
||||
repeated uint32 DstASPath = 13;
|
||||
repeated uint32 DstLargeCommunitiesASN = 14;
|
||||
repeated uint32 DstLargeCommunitiesLocalData1 = 15;
|
||||
repeated uint32 DstLargeCommunitiesLocalData2 = 16;
|
||||
string InIfName = 17;
|
||||
string OutIfName = 18;
|
||||
Boundary InIfBoundary = 19;
|
||||
Boundary OutIfBoundary = 20;
|
||||
uint64 Bytes = 21;
|
||||
repeated uint32 DstASPath = 11;
|
||||
repeated uint32 DstLargeCommunitiesASN = 12;
|
||||
repeated uint32 DstLargeCommunitiesLocalData1 = 13;
|
||||
repeated uint32 DstLargeCommunitiesLocalData2 = 14;
|
||||
string InIfName = 15;
|
||||
string OutIfName = 16;
|
||||
Boundary InIfBoundary = 17;
|
||||
Boundary OutIfBoundary = 18;
|
||||
uint64 Bytes = 19;
|
||||
}
|
||||
`
|
||||
if diff := helpers.Diff(strings.Split(got, "\n"), strings.Split(expected, "\n")); diff != "" {
|
||||
@@ -123,7 +121,6 @@ func TestProtobufMarshal(t *testing.T) {
|
||||
c.ProtobufAppendVarint(bf, ColumnBytes, 200)
|
||||
c.ProtobufAppendVarint(bf, ColumnPackets, 300)
|
||||
c.ProtobufAppendVarint(bf, ColumnBytes, 300) // duplicate!
|
||||
c.ProtobufAppendBytes(bf, ColumnDstCountry, []byte("FR"))
|
||||
|
||||
got := c.ProtobufMarshal(bf)
|
||||
|
||||
@@ -132,25 +129,77 @@ func TestProtobufMarshal(t *testing.T) {
|
||||
t.Fatalf("ProtobufMarshal() produced an incorrect size: %d + %d != %d", size, n, len(got))
|
||||
}
|
||||
|
||||
// text schema definition for reference
|
||||
// syntax = "proto3";
|
||||
|
||||
// message FlowMessagevLAABIGYMRYZPTGOYIIFZNYDEQM {
|
||||
// enum Boundary { UNDEFINED = 0; EXTERNAL = 1; INTERNAL = 2; }
|
||||
|
||||
// uint64 TimeReceived = 1;
|
||||
// uint64 SamplingRate = 2;
|
||||
// bytes ExporterAddress = 3;
|
||||
// string ExporterName = 4;
|
||||
// string ExporterGroup = 5;
|
||||
// string ExporterRole = 6;
|
||||
// string ExporterSite = 7;
|
||||
// string ExporterRegion = 8;
|
||||
// string ExporterTenant = 9;
|
||||
// bytes SrcAddr = 10;
|
||||
// bytes DstAddr = 11;
|
||||
// uint32 SrcNetMask = 12;
|
||||
// uint32 DstNetMask = 13;
|
||||
// uint32 SrcAS = 14;
|
||||
// uint32 DstAS = 15;
|
||||
// repeated uint32 DstASPath = 18;
|
||||
// repeated uint32 DstCommunities = 19;
|
||||
// repeated uint32 DstLargeCommunitiesASN = 20;
|
||||
// repeated uint32 DstLargeCommunitiesLocalData1 = 21;
|
||||
// repeated uint32 DstLargeCommunitiesLocalData2 = 22;
|
||||
// string InIfName = 23;
|
||||
// string OutIfName = 24;
|
||||
// string InIfDescription = 25;
|
||||
// string OutIfDescription = 26;
|
||||
// uint32 InIfSpeed = 27;
|
||||
// uint32 OutIfSpeed = 28;
|
||||
// string InIfConnectivity = 29;
|
||||
// string OutIfConnectivity = 30;
|
||||
// string InIfProvider = 31;
|
||||
// string OutIfProvider = 32;
|
||||
// Boundary InIfBoundary = 33;
|
||||
// Boundary OutIfBoundary = 34;
|
||||
// uint32 EType = 35;
|
||||
// uint32 Proto = 36;
|
||||
// uint32 SrcPort = 37;
|
||||
// uint32 DstPort = 38;
|
||||
// uint64 Bytes = 39;
|
||||
// uint64 Packets = 40;
|
||||
// uint32 ForwardingStatus = 41;
|
||||
// }
|
||||
// to check: https://protobuf-decoder.netlify.app/
|
||||
t.Run("compare as bytes", func(t *testing.T) {
|
||||
expected := []byte{
|
||||
// DstAS
|
||||
// 15: 65000
|
||||
0x78, 0xe8, 0xfb, 0x03,
|
||||
// 41: 200
|
||||
0xc8, 0x02, 0xc8, 0x01,
|
||||
// 42: 300
|
||||
0xd0, 0x02, 0xac, 0x02,
|
||||
// 19: FR
|
||||
0x9a, 0x01, 0x02, 0x46, 0x52,
|
||||
// Bytes
|
||||
// 39: 200
|
||||
0xb8, 0x02, 0xc8, 0x01,
|
||||
// Packet
|
||||
// 40: 300
|
||||
0xc0, 0x02, 0xac, 0x02,
|
||||
// TimeReceived
|
||||
// 1: 1000
|
||||
0x08, 0xe8, 0x07,
|
||||
// SamplingRate
|
||||
// 2: 20000
|
||||
0x10, 0xa0, 0x9c, 0x01,
|
||||
// ExporterAddress
|
||||
// 3: ::ffff:203.0.113.14
|
||||
0x1a, 0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xcb, 0x0, 0x71, 0xe,
|
||||
}
|
||||
if diff := helpers.Diff(got[n:], expected); diff != "" {
|
||||
t.Logf("got: %v", got)
|
||||
|
||||
t.Fatalf("ProtobufMarshal() (-got, +want):\n%s", diff)
|
||||
}
|
||||
})
|
||||
@@ -163,9 +212,8 @@ func TestProtobufMarshal(t *testing.T) {
|
||||
ExporterAddress: exporterAddress,
|
||||
DstAS: 65000,
|
||||
ProtobufDebug: map[ColumnKey]interface{}{
|
||||
ColumnBytes: 200,
|
||||
ColumnPackets: 300,
|
||||
ColumnDstCountry: "FR",
|
||||
ColumnBytes: 200,
|
||||
ColumnPackets: 300,
|
||||
},
|
||||
}
|
||||
if diff := helpers.Diff(got, expected); diff != "" {
|
||||
@@ -189,7 +237,6 @@ func BenchmarkProtobufMarshal(b *testing.B) {
|
||||
c.ProtobufAppendVarint(bf, ColumnPackets, 300)
|
||||
c.ProtobufAppendVarint(bf, ColumnBytes, 300) // duplicate!
|
||||
c.ProtobufAppendVarint(bf, ColumnSrcVlan, 1600) // disabled!
|
||||
c.ProtobufAppendBytes(bf, ColumnDstCountry, []byte("FR"))
|
||||
c.ProtobufMarshal(bf)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,6 +51,8 @@ type Column struct {
|
||||
ClickHouseTransformFrom []Column
|
||||
ClickHouseTransformTo string
|
||||
ClickHouseMainOnly bool
|
||||
// ClickHouseSelfGenerated identifies a column as being formatted using itself as source
|
||||
ClickHouseSelfGenerated bool
|
||||
|
||||
// ClickHouseMaterialized indicates that the column was materialized (and is not by default)
|
||||
ClickHouseMaterialized bool
|
||||
|
||||
@@ -24,6 +24,20 @@ clickhouse:
|
||||
orchestrator-url: http://akvorado-orchestrator:8080
|
||||
kafka:
|
||||
consumers: 4
|
||||
geoip:
|
||||
optional: true
|
||||
# When running on Docker, these paths are inside the container. By default,
|
||||
# IPinfo databases are used. (https://ipinfo.io/)
|
||||
asn-database:
|
||||
- /usr/share/GeoIP/asn.mmdb
|
||||
geo-database:
|
||||
- /usr/share/GeoIP/country.mmdb
|
||||
# If you want to use MaxmindDB, check `.env`, `docker-compose-maxmind.yml` and
|
||||
# update these paths:
|
||||
#asn-database:
|
||||
# - /usr/share/GeoIP/GeoLite2-ASN.mmdb
|
||||
#geo-database:
|
||||
# - /usr/share/GeoIP/GeoLite2-Country.mmdb
|
||||
servers:
|
||||
- clickhouse:9000
|
||||
prometheus-endpoint: /metrics
|
||||
|
||||
@@ -1,16 +1,6 @@
|
||||
---
|
||||
kafka:
|
||||
compression-codec: zstd
|
||||
geoip:
|
||||
optional: true
|
||||
# When running on Docker, these paths are inside the container. By default,
|
||||
# IPinfo databases are used. (https://ipinfo.io/)
|
||||
asn-database: /usr/share/GeoIP/asn.mmdb
|
||||
geo-database: /usr/share/GeoIP/country.mmdb
|
||||
# If you want to use MaxmindDB, check `.env`, `docker-compose-maxmind.yml` and
|
||||
# update these paths:
|
||||
#asn-database: /usr/share/GeoIP/GeoLite2-ASN.mmdb
|
||||
#geo-database: /usr/share/GeoIP/GeoLite2-Country.mmdb
|
||||
metadata:
|
||||
workers: 10
|
||||
provider:
|
||||
|
||||
@@ -56,6 +56,10 @@ func TestConfigHandler(t *testing.T) {
|
||||
"DstNetTenant",
|
||||
"SrcCountry",
|
||||
"DstCountry",
|
||||
"SrcGeoCity",
|
||||
"DstGeoCity",
|
||||
"SrcGeoState",
|
||||
"DstGeoState",
|
||||
"DstASPath",
|
||||
"Dst1stAS",
|
||||
"Dst2ndAS",
|
||||
|
||||
@@ -249,7 +249,7 @@ LIMIT 20`
|
||||
}
|
||||
sqlQuery := fmt.Sprintf(`
|
||||
SELECT label, detail FROM (
|
||||
SELECT concat('AS', toString(%s)) AS label, dictGet('asns', 'name', %s) AS detail, 1 AS rank
|
||||
SELECT concat('AS', toString(%s)) AS label, dictGet('%s', 'name', %s) AS detail, 1 AS rank
|
||||
FROM flows
|
||||
WHERE TimeReceived > date_sub(minute, 1, now())
|
||||
AND detail != ''
|
||||
@@ -264,7 +264,7 @@ UNION DISTINCT
|
||||
ORDER BY positionCaseInsensitive(name, $1) ASC, asn ASC
|
||||
LIMIT 20
|
||||
) GROUP BY label, detail ORDER BY MIN(rank) ASC, MIN(rowNumberInBlock()) ASC LIMIT 20`,
|
||||
columnName, columnName, columnName)
|
||||
columnName, schema.DictionaryASNs, columnName, columnName)
|
||||
if err := c.d.ClickHouseDB.Conn.Select(ctx, &results, sqlQuery, input.Prefix); err != nil {
|
||||
c.r.Err(err).Msg("unable to query database")
|
||||
break
|
||||
|
||||
@@ -102,15 +102,15 @@ func (qc Column) ToSQLSelect(sch *schema.Component) string {
|
||||
switch key {
|
||||
// Special cases
|
||||
case schema.ColumnSrcAS, schema.ColumnDstAS, schema.ColumnDst1stAS, schema.ColumnDst2ndAS, schema.ColumnDst3rdAS:
|
||||
strValue = fmt.Sprintf(`concat(toString(%s), ': ', dictGetOrDefault('asns', 'name', %s, '???'))`,
|
||||
qc, qc)
|
||||
strValue = fmt.Sprintf(`concat(toString(%s), ': ', dictGetOrDefault('%s', 'name', %s, '???'))`,
|
||||
qc, schema.DictionaryASNs, qc)
|
||||
case schema.ColumnInIfBoundary, schema.ColumnOutIfBoundary:
|
||||
strValue = fmt.Sprintf(`toString(%s)`, qc.String())
|
||||
case schema.ColumnEType:
|
||||
strValue = fmt.Sprintf(`if(EType = %d, 'IPv4', if(EType = %d, 'IPv6', '???'))`,
|
||||
helpers.ETypeIPv4, helpers.ETypeIPv6)
|
||||
case schema.ColumnProto:
|
||||
strValue = `dictGetOrDefault('protocols', 'name', Proto, '???')`
|
||||
strValue = fmt.Sprintf(`dictGetOrDefault('%s', 'name', Proto, '???')`, schema.DictionaryProtocols)
|
||||
case schema.ColumnMPLSLabels:
|
||||
strValue = `arrayStringConcat(MPLSLabels, ' ')`
|
||||
case schema.ColumnDstASPath:
|
||||
|
||||
@@ -144,11 +144,11 @@ func (c *Component) widgetTopHandlerFunc(gc *gin.Context) {
|
||||
gc.JSON(http.StatusNotFound, gin.H{"message": "Unknown top request."})
|
||||
return
|
||||
case "src-as":
|
||||
selector = `concat(toString(SrcAS), ': ', dictGetOrDefault('asns', 'name', SrcAS, '???'))`
|
||||
selector = fmt.Sprintf(`concat(toString(SrcAS), ': ', dictGetOrDefault('%s', 'name', SrcAS, '???'))`, schema.DictionaryASNs)
|
||||
groupby = `SrcAS`
|
||||
filter = "AND InIfBoundary = 'external'"
|
||||
case "dst-as":
|
||||
selector = `concat(toString(DstAS), ': ', dictGetOrDefault('asns', 'name', DstAS, '???'))`
|
||||
selector = fmt.Sprintf(`concat(toString(DstAS), ': ', dictGetOrDefault('%s', 'name', DstAS, '???'))`, schema.DictionaryASNs)
|
||||
groupby = `DstAS`
|
||||
filter = "AND OutIfBoundary = 'external'"
|
||||
case "src-country":
|
||||
@@ -160,17 +160,17 @@ func (c *Component) widgetTopHandlerFunc(gc *gin.Context) {
|
||||
case "exporter":
|
||||
selector = "ExporterName"
|
||||
case "protocol":
|
||||
selector = `dictGetOrDefault('protocols', 'name', Proto, '???')`
|
||||
selector = fmt.Sprintf(`dictGetOrDefault('%s', 'name', Proto, '???')`, schema.DictionaryProtocols)
|
||||
groupby = `Proto`
|
||||
case "etype":
|
||||
selector = `if(equals(EType, 34525), 'IPv6', if(equals(EType, 2048), 'IPv4', '???'))`
|
||||
groupby = `EType`
|
||||
case "src-port":
|
||||
selector = `concat(dictGetOrDefault('protocols', 'name', Proto, '???'), '/', toString(SrcPort))`
|
||||
selector = fmt.Sprintf(`concat(dictGetOrDefault('%s', 'name', Proto, '???'), '/', toString(SrcPort))`, schema.DictionaryProtocols)
|
||||
groupby = `Proto, SrcPort`
|
||||
mainTableRequired = true
|
||||
case "dst-port":
|
||||
selector = `concat(dictGetOrDefault('protocols', 'name', Proto, '???'), '/', toString(DstPort))`
|
||||
selector = fmt.Sprintf(`concat(dictGetOrDefault('%s', 'name', Proto, '???'), '/', toString(DstPort))`, schema.DictionaryProtocols)
|
||||
groupby = `Proto, DstPort`
|
||||
mainTableRequired = true
|
||||
}
|
||||
|
||||
@@ -72,6 +72,10 @@ services:
|
||||
command: orchestrator /etc/akvorado/akvorado.yaml
|
||||
volumes:
|
||||
- ../config:/etc/akvorado:ro
|
||||
- akvorado-geoip:/usr/share/GeoIP:ro
|
||||
# If you prefer to use geo IP databases from host, comment the
|
||||
# above line and uncomment this one:
|
||||
# - /usr/share/GeoIP:/usr/share/GeoIP:ro
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
- traefik.http.routers.akvorado-orchestrator.entrypoints=private # it exposes configuration files with passwords
|
||||
@@ -103,10 +107,6 @@ services:
|
||||
restart: unless-stopped
|
||||
command: inlet http://akvorado-orchestrator:8080
|
||||
volumes:
|
||||
- akvorado-geoip:/usr/share/GeoIP:ro
|
||||
# If you prefer to use geo IP databases from host, comment the
|
||||
# above line and uncomment this one:
|
||||
# - /usr/share/GeoIP:/usr/share/GeoIP:ro
|
||||
- akvorado-run:/run/akvorado
|
||||
labels:
|
||||
- traefik.enable=true
|
||||
|
||||
2
go.mod
2
go.mod
@@ -176,3 +176,5 @@ require (
|
||||
modernc.org/memory v1.5.0 // indirect
|
||||
modernc.org/sqlite v1.23.1 // indirect
|
||||
)
|
||||
|
||||
replace github.com/kentik/patricia => github.com/netixx/patricia v0.0.0-20240221115530-83194fdd3ab9
|
||||
|
||||
4
go.sum
4
go.sum
@@ -217,8 +217,6 @@ github.com/josharian/native v1.1.0/go.mod h1:7X/raswPFr05uY3HiLlYeyQntB6OO7E/d2C
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/kentik/patricia v1.2.1 h1:+ZyPXnEiFLbmT1yZR0JRfRUuNXmxROXdzI8YiSpTx5w=
|
||||
github.com/kentik/patricia v1.2.1/go.mod h1:6jY40ESetsbfi04/S12iJlsiS6DYL2B2W+WAcqoDHtw=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
@@ -266,6 +264,8 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||
github.com/netixx/patricia v0.0.0-20240221115530-83194fdd3ab9 h1:oppJfQ6WAhViRJjKv/WQCKjz2o4ZeIGzaDtLxqCD/MQ=
|
||||
github.com/netixx/patricia v0.0.0-20240221115530-83194fdd3ab9/go.mod h1:6jY40ESetsbfi04/S12iJlsiS6DYL2B2W+WAcqoDHtw=
|
||||
github.com/netsampler/goflow2/v2 v2.1.2 h1:jgzUC+xZ1B0T7iv1tyz+DFQKgWvwVIPFRdzc84XTX4g=
|
||||
github.com/netsampler/goflow2/v2 v2.1.2/go.mod h1:mDkDLl+uSFLq7aRuQ113+ZAJN0HdzCx/Dgf0wCmr+Cc=
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
|
||||
@@ -45,7 +45,7 @@ func DefaultConfiguration() Configuration {
|
||||
ExporterClassifiers: []ExporterClassifierRule{},
|
||||
InterfaceClassifiers: []InterfaceClassifierRule{},
|
||||
ClassifierCacheDuration: 5 * time.Minute,
|
||||
ASNProviders: []ASNProvider{ASNProviderFlow, ASNProviderRouting, ASNProviderGeoIP},
|
||||
ASNProviders: []ASNProvider{ASNProviderFlow, ASNProviderRouting},
|
||||
NetProviders: []NetProvider{NetProviderFlow, NetProviderRouting},
|
||||
}
|
||||
}
|
||||
@@ -62,8 +62,6 @@ const (
|
||||
ASNProviderFlow ASNProvider = iota
|
||||
// ASNProviderFlowExceptPrivate uses the AS number embedded in flows, except if this is a private AS.
|
||||
ASNProviderFlowExceptPrivate
|
||||
// ASNProviderGeoIP pulls the AS number from a GeoIP database.
|
||||
ASNProviderGeoIP
|
||||
// ASNProviderRouting uses the AS number from BMP
|
||||
ASNProviderRouting
|
||||
// ASNProviderRoutingExceptPrivate uses the AS number from BMP, except if this is a private AS.
|
||||
@@ -73,7 +71,6 @@ const (
|
||||
var asnProviderMap = bimap.New(map[ASNProvider]string{
|
||||
ASNProviderFlow: "flow",
|
||||
ASNProviderFlowExceptPrivate: "flow-except-private",
|
||||
ASNProviderGeoIP: "geoip",
|
||||
ASNProviderRouting: "routing",
|
||||
ASNProviderRoutingExceptPrivate: "routing-except-private",
|
||||
})
|
||||
@@ -178,7 +175,7 @@ func ConfigurationUnmarshallerHook() mapstructure.DecodeHookFunc {
|
||||
oldValue := helpers.ElemOrIdentity(from.MapIndex(*oldKey))
|
||||
if oldValue.Kind() == reflect.Bool && oldValue.Bool() == true {
|
||||
from.SetMapIndex(reflect.ValueOf("asn-providers"),
|
||||
reflect.ValueOf([]ASNProvider{ASNProviderGeoIP}))
|
||||
reflect.ValueOf([]ASNProvider{ASNProviderRouting}))
|
||||
}
|
||||
from.SetMapIndex(*oldKey, reflect.Value{})
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ func TestConfigurationUnmarshallerHook(t *testing.T) {
|
||||
}
|
||||
},
|
||||
Expected: Configuration{
|
||||
ASNProviders: []ASNProvider{ASNProviderGeoIP},
|
||||
ASNProviders: []ASNProvider{ASNProviderRouting},
|
||||
},
|
||||
SkipValidation: true,
|
||||
}, {
|
||||
@@ -69,11 +69,11 @@ func TestConfigurationUnmarshallerHook(t *testing.T) {
|
||||
Initial: func() interface{} { return Configuration{} },
|
||||
Configuration: func() interface{} {
|
||||
return gin.H{
|
||||
"asn-providers": []string{"flow-except-private", "geoip", "flow"},
|
||||
"asn-providers": []string{"flow-except-private", "routing", "flow"},
|
||||
}
|
||||
},
|
||||
Expected: Configuration{
|
||||
ASNProviders: []ASNProvider{ASNProviderFlowExceptPrivate, ASNProviderGeoIP, ASNProviderFlow},
|
||||
ASNProviders: []ASNProvider{ASNProviderFlowExceptPrivate, ASNProviderRouting, ASNProviderFlow},
|
||||
},
|
||||
SkipValidation: true,
|
||||
}, {
|
||||
|
||||
@@ -126,10 +126,8 @@ func (c *Component) enrichFlow(exporterIP netip.Addr, exporterStr string, flow *
|
||||
flow.NextHop = c.getNextHop(flow.NextHop, destRouting.NextHop)
|
||||
|
||||
// set asns according to user config
|
||||
flow.SrcAS = c.getASNumber(flow.SrcAddr, flow.SrcAS, sourceRouting.ASN)
|
||||
flow.DstAS = c.getASNumber(flow.DstAddr, flow.DstAS, destRouting.ASN)
|
||||
c.d.Schema.ProtobufAppendBytes(flow, schema.ColumnSrcCountry, []byte(c.d.GeoIP.LookupCountry(flow.SrcAddr)))
|
||||
c.d.Schema.ProtobufAppendBytes(flow, schema.ColumnDstCountry, []byte(c.d.GeoIP.LookupCountry(flow.DstAddr)))
|
||||
flow.SrcAS = c.getASNumber(flow.SrcAS, sourceRouting.ASN)
|
||||
flow.DstAS = c.getASNumber(flow.DstAS, destRouting.ASN)
|
||||
for _, comm := range destRouting.Communities {
|
||||
c.d.Schema.ProtobufAppendVarint(flow, schema.ColumnDstCommunities, uint64(comm))
|
||||
}
|
||||
@@ -155,14 +153,12 @@ func (c *Component) enrichFlow(exporterIP netip.Addr, exporterStr string, flow *
|
||||
}
|
||||
|
||||
// getASNumber retrieves the AS number for a flow, depending on user preferences.
|
||||
func (c *Component) getASNumber(flowAddr netip.Addr, flowAS, bmpAS uint32) (asn uint32) {
|
||||
func (c *Component) getASNumber(flowAS, bmpAS uint32) (asn uint32) {
|
||||
for _, provider := range c.config.ASNProviders {
|
||||
if asn != 0 {
|
||||
break
|
||||
}
|
||||
switch provider {
|
||||
case ASNProviderGeoIP:
|
||||
asn = c.d.GeoIP.LookupASN(flowAddr)
|
||||
case ASNProviderFlow:
|
||||
asn = flowAS
|
||||
case ASNProviderFlowExceptPrivate:
|
||||
|
||||
@@ -19,7 +19,6 @@ import (
|
||||
"akvorado/common/reporter"
|
||||
"akvorado/common/schema"
|
||||
"akvorado/inlet/flow"
|
||||
"akvorado/inlet/geoip"
|
||||
"akvorado/inlet/kafka"
|
||||
"akvorado/inlet/metadata"
|
||||
"akvorado/inlet/routing"
|
||||
@@ -543,7 +542,6 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1")`,
|
||||
metadataComponent := metadata.NewMock(t, r, metadata.DefaultConfiguration(),
|
||||
metadata.Dependencies{Daemon: daemonComponent})
|
||||
flowComponent := flow.NewMock(t, r, flow.DefaultConfiguration())
|
||||
geoipComponent := geoip.NewMock(t, r)
|
||||
kafkaComponent, kafkaProducer := kafka.NewMock(t, r, kafka.DefaultConfiguration())
|
||||
httpComponent := httpserver.NewMock(t, r)
|
||||
routingComponent := routing.NewMock(t, r)
|
||||
@@ -564,7 +562,6 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1")`,
|
||||
Daemon: daemonComponent,
|
||||
Flow: flowComponent,
|
||||
Metadata: metadataComponent,
|
||||
GeoIP: geoipComponent,
|
||||
Kafka: kafkaComponent,
|
||||
HTTP: httpComponent,
|
||||
Routing: routingComponent,
|
||||
@@ -638,18 +635,11 @@ func TestGetASNumber(t *testing.T) {
|
||||
{"1.0.0.1", 4_200_000_121, 0, []ASNProvider{ASNProviderFlowExceptPrivate}, 0},
|
||||
{"1.0.0.1", 65536, 0, []ASNProvider{ASNProviderFlowExceptPrivate, ASNProviderFlow}, 65536},
|
||||
{"1.0.0.1", 12322, 0, []ASNProvider{ASNProviderFlowExceptPrivate}, 12322},
|
||||
{"1.0.0.1", 12322, 0, []ASNProvider{ASNProviderGeoIP}, 15169},
|
||||
{"2.0.0.1", 12322, 0, []ASNProvider{ASNProviderGeoIP}, 0},
|
||||
{"1.0.0.1", 12322, 0, []ASNProvider{ASNProviderGeoIP, ASNProviderFlow}, 15169},
|
||||
// 10
|
||||
{"1.0.0.1", 12322, 0, []ASNProvider{ASNProviderFlow, ASNProviderGeoIP}, 12322},
|
||||
{"2.0.0.1", 12322, 0, []ASNProvider{ASNProviderFlow, ASNProviderGeoIP}, 12322},
|
||||
{"2.0.0.1", 12322, 0, []ASNProvider{ASNProviderGeoIP, ASNProviderFlow}, 12322},
|
||||
{"192.0.2.2", 12322, 174, []ASNProvider{ASNProviderRouting}, 174},
|
||||
{"192.0.2.129", 12322, 1299, []ASNProvider{ASNProviderRouting}, 1299},
|
||||
{"192.0.2.254", 12322, 0, []ASNProvider{ASNProviderRouting}, 0},
|
||||
{"1.0.0.1", 12322, 65300, []ASNProvider{ASNProviderRouting}, 65300},
|
||||
{"1.0.0.1", 12322, 15169, []ASNProvider{ASNProviderRoutingExceptPrivate, ASNProviderGeoIP}, 15169},
|
||||
}
|
||||
for i, tc := range cases {
|
||||
i++
|
||||
@@ -664,14 +654,13 @@ func TestGetASNumber(t *testing.T) {
|
||||
|
||||
c, err := New(r, configuration, Dependencies{
|
||||
Daemon: daemon.NewMock(t),
|
||||
GeoIP: geoip.NewMock(t, r),
|
||||
Routing: routingComponent,
|
||||
Schema: schema.NewMock(t),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
}
|
||||
got := c.getASNumber(netip.MustParseAddr(tc.Addr), tc.FlowAS, tc.BMPAS)
|
||||
got := c.getASNumber(tc.FlowAS, tc.BMPAS)
|
||||
if diff := helpers.Diff(got, tc.Expected); diff != "" {
|
||||
t.Fatalf("getASNumber() (-got, +want):\n%s", diff)
|
||||
}
|
||||
@@ -722,7 +711,6 @@ func TestGetNetMask(t *testing.T) {
|
||||
|
||||
c, err := New(r, configuration, Dependencies{
|
||||
Daemon: daemon.NewMock(t),
|
||||
GeoIP: geoip.NewMock(t, r),
|
||||
Routing: routingComponent,
|
||||
Schema: schema.NewMock(t),
|
||||
})
|
||||
@@ -780,7 +768,6 @@ func TestGetNextHop(t *testing.T) {
|
||||
|
||||
c, err := New(r, configuration, Dependencies{
|
||||
Daemon: daemon.NewMock(t),
|
||||
GeoIP: geoip.NewMock(t, r),
|
||||
Routing: routingComponent,
|
||||
Schema: schema.NewMock(t),
|
||||
})
|
||||
|
||||
@@ -17,7 +17,6 @@ import (
|
||||
"akvorado/common/reporter"
|
||||
"akvorado/common/schema"
|
||||
"akvorado/inlet/flow"
|
||||
"akvorado/inlet/geoip"
|
||||
"akvorado/inlet/kafka"
|
||||
"akvorado/inlet/metadata"
|
||||
"akvorado/inlet/routing"
|
||||
@@ -48,7 +47,6 @@ type Dependencies struct {
|
||||
Flow *flow.Component
|
||||
Metadata *metadata.Component
|
||||
Routing *routing.Component
|
||||
GeoIP *geoip.Component
|
||||
Kafka *kafka.Component
|
||||
HTTP *httpserver.Component
|
||||
Schema *schema.Component
|
||||
|
||||
@@ -24,7 +24,6 @@ import (
|
||||
"akvorado/common/reporter"
|
||||
"akvorado/common/schema"
|
||||
"akvorado/inlet/flow"
|
||||
"akvorado/inlet/geoip"
|
||||
"akvorado/inlet/kafka"
|
||||
"akvorado/inlet/metadata"
|
||||
"akvorado/inlet/routing"
|
||||
@@ -38,7 +37,6 @@ func TestCore(t *testing.T) {
|
||||
metadataComponent := metadata.NewMock(t, r, metadata.DefaultConfiguration(),
|
||||
metadata.Dependencies{Daemon: daemonComponent})
|
||||
flowComponent := flow.NewMock(t, r, flow.DefaultConfiguration())
|
||||
geoipComponent := geoip.NewMock(t, r)
|
||||
kafkaComponent, kafkaProducer := kafka.NewMock(t, r, kafka.DefaultConfiguration())
|
||||
httpComponent := httpserver.NewMock(t, r)
|
||||
routingComponent := routing.NewMock(t, r)
|
||||
@@ -50,7 +48,6 @@ func TestCore(t *testing.T) {
|
||||
Daemon: daemonComponent,
|
||||
Flow: flowComponent,
|
||||
Metadata: metadataComponent,
|
||||
GeoIP: geoipComponent,
|
||||
Kafka: kafkaComponent,
|
||||
HTTP: httpComponent,
|
||||
Routing: routingComponent,
|
||||
@@ -88,15 +85,13 @@ func TestCore(t *testing.T) {
|
||||
|
||||
expectedFlowMessage := func(exporter string, in, out uint32) *schema.FlowMessage {
|
||||
expected := flowMessage(exporter, in, out)
|
||||
expected.SrcAS = 35908
|
||||
expected.DstAS = 0 // not in database
|
||||
expected.SrcAS = 0 // no geoip enrich anymore
|
||||
expected.DstAS = 0 // no geoip enrich anymore
|
||||
expected.InIf = 0 // not serialized
|
||||
expected.OutIf = 0 // not serialized
|
||||
expected.ExporterAddress = netip.AddrFrom16(expected.ExporterAddress.As16())
|
||||
expected.SrcAddr = netip.AddrFrom16(expected.SrcAddr.As16())
|
||||
expected.DstAddr = netip.AddrFrom16(expected.DstAddr.As16())
|
||||
expected.ProtobufDebug[schema.ColumnSrcCountry] = "BT"
|
||||
expected.ProtobufDebug[schema.ColumnDstCountry] = "GB"
|
||||
expected.ProtobufDebug[schema.ColumnInIfName] = fmt.Sprintf("Gi0/0/%d", in)
|
||||
expected.ProtobufDebug[schema.ColumnOutIfName] = fmt.Sprintf("Gi0/0/%d", out)
|
||||
expected.ProtobufDebug[schema.ColumnInIfDescription] = fmt.Sprintf("Interface %d", in)
|
||||
@@ -258,7 +253,7 @@ func TestCore(t *testing.T) {
|
||||
"ExporterAddress": "192.0.2.142",
|
||||
"SrcAddr": "67.43.156.77",
|
||||
"DstAddr": "2.125.160.216",
|
||||
"SrcAS": 35908,
|
||||
"SrcAS": 0, // no geoip enrich anymore
|
||||
"InIf": 434,
|
||||
"OutIf": 677,
|
||||
|
||||
@@ -327,7 +322,7 @@ func TestCore(t *testing.T) {
|
||||
|
||||
// Test HTTP flow clients using protobuf
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
t.Run("http flows with protovuf", func(t *testing.T) {
|
||||
t.Run("http flows with protobuf", func(t *testing.T) {
|
||||
req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("http://%s/api/v0/inlet/flows?limit=1", c.d.HTTP.LocalAddr()), nil)
|
||||
if err != nil {
|
||||
t.Fatalf("http.NewRequest() error:\n%+v", err)
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
// SPDX-FileCopyrightText: 2022 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"net"
|
||||
"net/netip"
|
||||
)
|
||||
|
||||
// LookupASN returns the result of a lookup for an AS number.
|
||||
func (c *Component) LookupASN(ip netip.Addr) uint32 {
|
||||
asnDB := c.db.asn.Load()
|
||||
if asnDB != nil {
|
||||
ip := ip.As16()
|
||||
asn, err := (*asnDB).LookupASN(net.IP(ip[:]))
|
||||
if err == nil && asn != 0 {
|
||||
c.metrics.databaseHit.WithLabelValues("asn").Inc()
|
||||
return asn
|
||||
}
|
||||
c.metrics.databaseMiss.WithLabelValues("asn").Inc()
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// LookupCountry returns the result of a lookup for country.
|
||||
func (c *Component) LookupCountry(ip netip.Addr) string {
|
||||
geoDB := c.db.geo.Load()
|
||||
if geoDB != nil {
|
||||
ip := ip.As16()
|
||||
country, err := (*geoDB).LookupCountry(net.IP(ip[:]))
|
||||
if err == nil && country != "" {
|
||||
c.metrics.databaseHit.WithLabelValues("geo").Inc()
|
||||
return country
|
||||
}
|
||||
c.metrics.databaseMiss.WithLabelValues("geo").Inc()
|
||||
}
|
||||
return ""
|
||||
}
|
||||
@@ -1,52 +0,0 @@
|
||||
// SPDX-FileCopyrightText: 2023 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"net"
|
||||
"strconv"
|
||||
|
||||
"github.com/oschwald/maxminddb-golang"
|
||||
)
|
||||
|
||||
type ipinfoDBASN struct {
|
||||
ASN string `maxminddb:"asn"`
|
||||
}
|
||||
|
||||
type ipinfoDBCountry struct {
|
||||
Country string `maxminddb:"country"`
|
||||
}
|
||||
|
||||
type ipinfoDB struct {
|
||||
db *maxminddb.Reader
|
||||
}
|
||||
|
||||
// LookupASN returns the result of a lookup for an AS number.
|
||||
func (mmdb *ipinfoDB) LookupASN(ip net.IP) (uint32, error) {
|
||||
var asn ipinfoDBASN
|
||||
if err := mmdb.db.Lookup(ip, &asn); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if asn.ASN == "" {
|
||||
return 0, nil
|
||||
}
|
||||
n, err := strconv.ParseUint(asn.ASN[2:], 10, 32)
|
||||
if err != nil {
|
||||
return 0, nil
|
||||
}
|
||||
return uint32(n), nil
|
||||
}
|
||||
|
||||
// LookupCountry returns the result of a lookup for country.
|
||||
func (mmdb *ipinfoDB) LookupCountry(ip net.IP) (string, error) {
|
||||
var country ipinfoDBCountry
|
||||
if err := mmdb.db.Lookup(ip, &country); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return country.Country, nil
|
||||
}
|
||||
|
||||
func (mmdb *ipinfoDB) Close() {
|
||||
mmdb.db.Close()
|
||||
}
|
||||
@@ -1,46 +0,0 @@
|
||||
// SPDX-FileCopyrightText: 2023 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"net"
|
||||
|
||||
"github.com/oschwald/maxminddb-golang"
|
||||
)
|
||||
|
||||
type maxmindDBASN struct {
|
||||
AutonomousSystemNumber uint `maxminddb:"autonomous_system_number"`
|
||||
}
|
||||
|
||||
type maxmindDBCountry struct {
|
||||
Country struct {
|
||||
IsoCode string `maxminddb:"iso_code"`
|
||||
} `maxminddb:"country"`
|
||||
}
|
||||
|
||||
type maxmindDB struct {
|
||||
db *maxminddb.Reader
|
||||
}
|
||||
|
||||
// LookupASN returns the result of a lookup for an AS number.
|
||||
func (mmdb *maxmindDB) LookupASN(ip net.IP) (uint32, error) {
|
||||
var asn maxmindDBASN
|
||||
if err := mmdb.db.Lookup(ip, &asn); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return uint32(asn.AutonomousSystemNumber), nil
|
||||
}
|
||||
|
||||
// LookupCountry returns the result of a lookup for country.
|
||||
func (mmdb *maxmindDB) LookupCountry(ip net.IP) (string, error) {
|
||||
var country maxmindDBCountry
|
||||
if err := mmdb.db.Lookup(ip, &country); err != nil {
|
||||
return "", err
|
||||
}
|
||||
return country.Country.IsoCode, nil
|
||||
}
|
||||
|
||||
func (mmdb *maxmindDB) Close() {
|
||||
mmdb.db.Close()
|
||||
}
|
||||
@@ -1,109 +0,0 @@
|
||||
// SPDX-FileCopyrightText: 2022 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"net/netip"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"akvorado/common/daemon"
|
||||
"akvorado/common/helpers"
|
||||
"akvorado/common/reporter"
|
||||
)
|
||||
|
||||
func TestLookup(t *testing.T) {
|
||||
r := reporter.NewMock(t)
|
||||
c := NewMock(t, r)
|
||||
|
||||
cases := []struct {
|
||||
IP string
|
||||
ExpectedASN uint32
|
||||
ExpectedCountry string
|
||||
}{
|
||||
{
|
||||
IP: "1.0.0.0",
|
||||
ExpectedASN: 15169,
|
||||
}, {
|
||||
IP: "::ffff:1.0.0.0",
|
||||
ExpectedASN: 15169,
|
||||
}, {
|
||||
IP: "2.125.160.216",
|
||||
ExpectedCountry: "GB",
|
||||
}, {
|
||||
IP: "2a02:ff00::1:1",
|
||||
ExpectedCountry: "IT",
|
||||
}, {
|
||||
IP: "67.43.156.77",
|
||||
ExpectedASN: 35908,
|
||||
ExpectedCountry: "BT",
|
||||
},
|
||||
}
|
||||
for _, tc := range cases {
|
||||
gotCountry := c.LookupCountry(netip.MustParseAddr(tc.IP))
|
||||
if diff := helpers.Diff(gotCountry, tc.ExpectedCountry); diff != "" {
|
||||
t.Errorf("LookupCountry(%q) (-got, +want):\n%s", tc.IP, diff)
|
||||
}
|
||||
gotASN := c.LookupASN(netip.MustParseAddr(tc.IP))
|
||||
if diff := helpers.Diff(gotASN, tc.ExpectedASN); diff != "" {
|
||||
t.Errorf("LookupASN(%q) (-got, +want):\n%s", tc.IP, diff)
|
||||
}
|
||||
}
|
||||
gotMetrics := r.GetMetrics("akvorado_inlet_geoip_")
|
||||
expectedMetrics := map[string]string{
|
||||
`db_hits_total{database="asn"}`: "3",
|
||||
`db_hits_total{database="geo"}`: "3",
|
||||
`db_misses_total{database="asn"}`: "2",
|
||||
`db_misses_total{database="geo"}`: "2",
|
||||
`db_refresh_total{database="asn"}`: "1",
|
||||
`db_refresh_total{database="geo"}`: "1",
|
||||
}
|
||||
if diff := helpers.Diff(gotMetrics, expectedMetrics); diff != "" {
|
||||
t.Fatalf("Metrics (-got, +want):\n%s", diff)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLookupIPInfo(t *testing.T) {
|
||||
r := reporter.NewMock(t)
|
||||
config := DefaultConfiguration()
|
||||
// The JSON version of this one is here:
|
||||
// https://github.com/ipinfo/sample-database/blob/main/IP%20to%20Country%20ASN/ip_country_asn_sample.json
|
||||
config.GeoDatabase = filepath.Join("testdata", "ip_country_asn_sample.mmdb")
|
||||
config.ASNDatabase = filepath.Join("testdata", "ip_country_asn_sample.mmdb")
|
||||
c, err := New(r, config, Dependencies{Daemon: daemon.NewMock(t)})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+s", err)
|
||||
}
|
||||
helpers.StartStop(t, c)
|
||||
|
||||
cases := []struct {
|
||||
IP string
|
||||
ExpectedASN uint32
|
||||
ExpectedCountry string
|
||||
}{
|
||||
{
|
||||
IP: "2.19.4.138",
|
||||
ExpectedASN: 32787,
|
||||
ExpectedCountry: "SG",
|
||||
}, {
|
||||
IP: "2a09:bac1:14a0:fd0::a:1",
|
||||
ExpectedASN: 13335,
|
||||
ExpectedCountry: "CA",
|
||||
}, {
|
||||
IP: "213.248.218.137",
|
||||
ExpectedASN: 43519,
|
||||
ExpectedCountry: "HK",
|
||||
},
|
||||
}
|
||||
for _, tc := range cases {
|
||||
gotCountry := c.LookupCountry(netip.MustParseAddr(tc.IP))
|
||||
if diff := helpers.Diff(gotCountry, tc.ExpectedCountry); diff != "" {
|
||||
t.Errorf("LookupCountry(%q) (-got, +want):\n%s", tc.IP, diff)
|
||||
}
|
||||
gotASN := c.LookupASN(netip.MustParseAddr(tc.IP))
|
||||
if diff := helpers.Diff(gotASN, tc.ExpectedASN); diff != "" {
|
||||
t.Errorf("LookupASN(%q) (-got, +want):\n%s", tc.IP, diff)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,159 +0,0 @@
|
||||
// SPDX-FileCopyrightText: 2022 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
// Package geoip provides ASN and country for GeoIP addresses.
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
"gopkg.in/tomb.v2"
|
||||
|
||||
"akvorado/common/daemon"
|
||||
"akvorado/common/reporter"
|
||||
)
|
||||
|
||||
// Component represents the GeoIP component.
|
||||
type Component struct {
|
||||
r *reporter.Reporter
|
||||
d *Dependencies
|
||||
t tomb.Tomb
|
||||
config Configuration
|
||||
|
||||
db struct {
|
||||
geo atomic.Pointer[geoDatabase]
|
||||
asn atomic.Pointer[geoDatabase]
|
||||
}
|
||||
metrics struct {
|
||||
databaseRefresh *reporter.CounterVec
|
||||
databaseHit *reporter.CounterVec
|
||||
databaseMiss *reporter.CounterVec
|
||||
}
|
||||
}
|
||||
|
||||
// Dependencies define the dependencies of the GeoIP component.
|
||||
type Dependencies struct {
|
||||
Daemon daemon.Component
|
||||
}
|
||||
|
||||
// New creates a new GeoIP component.
|
||||
func New(r *reporter.Reporter, configuration Configuration, dependencies Dependencies) (*Component, error) {
|
||||
c := Component{
|
||||
r: r,
|
||||
d: &dependencies,
|
||||
config: configuration,
|
||||
}
|
||||
if c.config.GeoDatabase != "" {
|
||||
c.config.GeoDatabase = filepath.Clean(c.config.GeoDatabase)
|
||||
}
|
||||
if c.config.ASNDatabase != "" {
|
||||
c.config.ASNDatabase = filepath.Clean(c.config.ASNDatabase)
|
||||
}
|
||||
c.d.Daemon.Track(&c.t, "inlet/geoip")
|
||||
c.metrics.databaseRefresh = c.r.CounterVec(
|
||||
reporter.CounterOpts{
|
||||
Name: "db_refresh_total",
|
||||
Help: "Refresh event for a GeoIP database.",
|
||||
},
|
||||
[]string{"database"},
|
||||
)
|
||||
c.metrics.databaseHit = c.r.CounterVec(
|
||||
reporter.CounterOpts{
|
||||
Name: "db_hits_total",
|
||||
Help: "Number of hits for a GeoIP database.",
|
||||
},
|
||||
[]string{"database"},
|
||||
)
|
||||
c.metrics.databaseMiss = c.r.CounterVec(
|
||||
reporter.CounterOpts{
|
||||
Name: "db_misses_total",
|
||||
Help: "Number of misses for a GeoIP database.",
|
||||
},
|
||||
[]string{"database"},
|
||||
)
|
||||
return &c, nil
|
||||
}
|
||||
|
||||
// Start starts the GeoIP component.
|
||||
func (c *Component) Start() error {
|
||||
if err := c.openDatabase("geo", c.config.GeoDatabase, &c.db.geo); err != nil && !c.config.Optional {
|
||||
return err
|
||||
}
|
||||
if err := c.openDatabase("asn", c.config.ASNDatabase, &c.db.asn); err != nil && !c.config.Optional {
|
||||
return err
|
||||
}
|
||||
if c.db.geo.Load() == nil && c.db.asn.Load() == nil {
|
||||
c.r.Warn().Msg("skipping GeoIP component: no database specified")
|
||||
}
|
||||
|
||||
c.r.Info().Msg("starting GeoIP component")
|
||||
|
||||
// Watch for modifications
|
||||
watcher, err := fsnotify.NewWatcher()
|
||||
if err != nil {
|
||||
c.r.Err(err).Msg("cannot setup watcher for GeoIP databases")
|
||||
return fmt.Errorf("cannot setup watcher: %w", err)
|
||||
}
|
||||
dirs := map[string]struct{}{}
|
||||
if c.config.GeoDatabase != "" {
|
||||
dirs[filepath.Dir(c.config.GeoDatabase)] = struct{}{}
|
||||
}
|
||||
if c.config.ASNDatabase != "" {
|
||||
dirs[filepath.Dir(c.config.ASNDatabase)] = struct{}{}
|
||||
}
|
||||
for k := range dirs {
|
||||
if err := watcher.Add(k); err != nil {
|
||||
c.r.Err(err).Msg("cannot watch database directory")
|
||||
return fmt.Errorf("cannot watch database directory: %w", err)
|
||||
}
|
||||
}
|
||||
c.t.Go(func() error {
|
||||
errLogger := c.r.Sample(reporter.BurstSampler(10*time.Second, 1))
|
||||
defer watcher.Close()
|
||||
|
||||
for {
|
||||
// Watch both for errors and events in the
|
||||
// same goroutine. fsnotify's FAQ says this is
|
||||
// not a good idea.
|
||||
select {
|
||||
case <-c.t.Dying():
|
||||
return nil
|
||||
case err, ok := <-watcher.Errors:
|
||||
if !ok {
|
||||
return errors.New("file watcher died")
|
||||
}
|
||||
errLogger.Err(err).Msg("error from watcher")
|
||||
case event, ok := <-watcher.Events:
|
||||
if !ok {
|
||||
return errors.New("file watcher died")
|
||||
}
|
||||
if !event.Has(fsnotify.Write) && !event.Has(fsnotify.Create) {
|
||||
continue
|
||||
}
|
||||
if filepath.Clean(event.Name) == c.config.GeoDatabase {
|
||||
c.openDatabase("geo", c.config.GeoDatabase, &c.db.geo)
|
||||
}
|
||||
if filepath.Clean(event.Name) == c.config.ASNDatabase {
|
||||
c.openDatabase("asn", c.config.ASNDatabase, &c.db.asn)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
// Stop stops the GeoIP component.
|
||||
func (c *Component) Stop() error {
|
||||
if c.db.geo.Load() == nil && c.db.asn.Load() == nil {
|
||||
return nil
|
||||
}
|
||||
c.r.Info().Msg("stopping GeoIP component")
|
||||
defer c.r.Info().Msg("GeoIP component stopped")
|
||||
c.t.Kill(nil)
|
||||
return c.t.Wait()
|
||||
}
|
||||
@@ -26,7 +26,7 @@ type Configuration struct {
|
||||
|
||||
// ExporterConfiguration is the interface configuration for an exporter.
|
||||
type ExporterConfiguration struct {
|
||||
provider.Exporter `mapstructure:",squash" yaml:"inline"`
|
||||
provider.Exporter `mapstructure:",squash" yaml:",inline"`
|
||||
// Default is used if not empty for any unknown ifindexes
|
||||
Default provider.Interface `validate:"omitempty"`
|
||||
// IfIndexes is a map from interface indexes to interfaces
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"akvorado/common/clickhousedb"
|
||||
"akvorado/common/helpers"
|
||||
"akvorado/common/kafka"
|
||||
"akvorado/orchestrator/clickhouse/geoip"
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
)
|
||||
@@ -51,6 +52,8 @@ type Configuration struct {
|
||||
// OrchestratorURL allows one to override URL to reach
|
||||
// orchestrator from ClickHouse
|
||||
OrchestratorURL string `validate:"isdefault|url"`
|
||||
|
||||
GeoIP geoip.Configuration
|
||||
}
|
||||
|
||||
// ResolutionConfiguration describes a consolidation interval.
|
||||
@@ -97,18 +100,27 @@ func DefaultConfiguration() Configuration {
|
||||
}
|
||||
}
|
||||
|
||||
// NetworkAttributes is a set of attributes attached to a network
|
||||
// NetworkAttributes is a set of attributes attached to a network.
|
||||
// Don't forget to update orchestrator/clickhouse/migrations.go:78 when this changes.
|
||||
type NetworkAttributes struct {
|
||||
// Name is a name attached to the network. May be unique or not.
|
||||
Name string
|
||||
// Role is a role attached to the network (server, customer).
|
||||
Role string
|
||||
// Site is the site of the network (paris, berlin).
|
||||
// Site is the site of the network (ams5, pa3).
|
||||
Site string
|
||||
// Region is the region of the network (france, italy).
|
||||
// Region is the region of the network (eu-west-1, us-east-3).
|
||||
Region string
|
||||
// City is the administrative city where the prefix is located (Paris, London).
|
||||
City string
|
||||
// State is the first administrative sub-division of the country (Ile-de-france, Alabama)
|
||||
State string
|
||||
// Country is the country of the network (france, italy)
|
||||
Country string
|
||||
// Tenant is a tenant for the network.
|
||||
Tenant string
|
||||
// ASN is the AS number associated to the network.
|
||||
ASN uint32
|
||||
}
|
||||
|
||||
// NetworkAttributesUnmarshallerHook decodes network attributes. It
|
||||
|
||||
@@ -15,9 +15,9 @@ import (
|
||||
// Configuration describes the configuration for the GeoIP component.
|
||||
type Configuration struct {
|
||||
// ASNDatabase defines the path to the ASN database.
|
||||
ASNDatabase string
|
||||
ASNDatabase []string
|
||||
// GeoDatabase defines the path to the geo database.
|
||||
GeoDatabase string
|
||||
GeoDatabase []string
|
||||
// Optional tells if we need to error if not present on start.
|
||||
Optional bool
|
||||
}
|
||||
@@ -28,12 +28,12 @@ func TestConfigurationUnmarshallerHook(t *testing.T) {
|
||||
Initial: func() interface{} { return Configuration{} },
|
||||
Configuration: func() interface{} {
|
||||
return gin.H{
|
||||
"asn-database": "something",
|
||||
"asn-database": []string{"something"},
|
||||
"optional": true,
|
||||
}
|
||||
},
|
||||
Expected: Configuration{
|
||||
ASNDatabase: "something",
|
||||
ASNDatabase: []string{"something"},
|
||||
Optional: true,
|
||||
},
|
||||
}, {
|
||||
@@ -41,35 +41,35 @@ func TestConfigurationUnmarshallerHook(t *testing.T) {
|
||||
Initial: func() interface{} { return Configuration{} },
|
||||
Configuration: func() interface{} {
|
||||
return gin.H{
|
||||
"asn-database": "something",
|
||||
"country-database": "something else",
|
||||
"asn-database": []string{"something"},
|
||||
"country-database": []string{"something else"},
|
||||
}
|
||||
},
|
||||
Expected: Configuration{
|
||||
ASNDatabase: "something",
|
||||
GeoDatabase: "something else",
|
||||
ASNDatabase: []string{"something"},
|
||||
GeoDatabase: []string{"something else"},
|
||||
},
|
||||
}, {
|
||||
Description: "no country-database, geoip-database",
|
||||
Initial: func() interface{} { return Configuration{} },
|
||||
Configuration: func() interface{} {
|
||||
return gin.H{
|
||||
"asn-database": "something",
|
||||
"geo-database": "something else",
|
||||
"asn-database": []string{"something"},
|
||||
"geo-database": []string{"something else"},
|
||||
}
|
||||
},
|
||||
Expected: Configuration{
|
||||
ASNDatabase: "something",
|
||||
GeoDatabase: "something else",
|
||||
ASNDatabase: []string{"something"},
|
||||
GeoDatabase: []string{"something else"},
|
||||
},
|
||||
}, {
|
||||
Description: "both country-database, geoip-database",
|
||||
Initial: func() interface{} { return Configuration{} },
|
||||
Configuration: func() interface{} {
|
||||
return gin.H{
|
||||
"asn-database": "something",
|
||||
"geo-database": "something else",
|
||||
"country-database": "another value",
|
||||
"asn-database": []string{"something"},
|
||||
"geo-database": []string{"something else"},
|
||||
"country-database": []string{"another value"},
|
||||
}
|
||||
},
|
||||
Error: true,
|
||||
@@ -7,20 +7,37 @@ import (
|
||||
"fmt"
|
||||
"net"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/oschwald/maxminddb-golang"
|
||||
)
|
||||
|
||||
// GeoIterFunc is the required signature to iter a geo database.
|
||||
type GeoIterFunc func(*net.IPNet, GeoInfo) error
|
||||
|
||||
// AsnIterFunc is the required signature to iter an asn database;
|
||||
type AsnIterFunc func(*net.IPNet, ASNInfo) error
|
||||
|
||||
type geoDatabase interface {
|
||||
Close()
|
||||
LookupCountry(ip net.IP) (string, error)
|
||||
LookupASN(ip net.IP) (uint32, error)
|
||||
IterASNDatabase(AsnIterFunc) error
|
||||
IterGeoDatabase(GeoIterFunc) error
|
||||
}
|
||||
|
||||
// openDatabase opens the provided database and closes the current
|
||||
// one. Do nothing if the path is empty.
|
||||
func (c *Component) openDatabase(which string, path string, container *atomic.Pointer[geoDatabase]) error {
|
||||
func (c *Component) openDatabase(which string, index int, path string) error {
|
||||
// notify open channel when a database is (re)loaded
|
||||
defer func() {
|
||||
// prevent the fanout thread from closing the channel until everying is written
|
||||
c.notifyDone.Add(1)
|
||||
c.onOpenChan <- DBNotification{
|
||||
Path: path,
|
||||
Kind: which,
|
||||
Index: index,
|
||||
}
|
||||
c.notifyDone.Done()
|
||||
}()
|
||||
|
||||
if path == "" {
|
||||
return nil
|
||||
}
|
||||
@@ -36,13 +53,23 @@ func (c *Component) openDatabase(which string, path string, container *atomic.Po
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
oldOne := container.Swap(&newOne)
|
||||
c.db.lock.Lock()
|
||||
defer c.db.lock.Unlock()
|
||||
var oldOne geoDatabase
|
||||
switch which {
|
||||
case "asn":
|
||||
oldOne = c.db.asn[path]
|
||||
c.db.asn[path] = newOne
|
||||
case "geo":
|
||||
oldOne = c.db.geo[path]
|
||||
c.db.geo[path] = newOne
|
||||
}
|
||||
c.metrics.databaseRefresh.WithLabelValues(which).Inc()
|
||||
if oldOne != nil {
|
||||
c.r.Debug().
|
||||
Str("database", path).
|
||||
Msgf("closing previous %s database", which)
|
||||
(*oldOne).Close()
|
||||
oldOne.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
46
orchestrator/clickhouse/geoip/iter.go
Normal file
46
orchestrator/clickhouse/geoip/iter.go
Normal file
@@ -0,0 +1,46 @@
|
||||
// SPDX-FileCopyrightText: 2022 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// GeoInfo describes geographical data of a geo the database.
|
||||
type GeoInfo struct {
|
||||
Country string
|
||||
Continent string
|
||||
City string
|
||||
State string
|
||||
}
|
||||
|
||||
// ASNInfo describes ASN data of an asn database.
|
||||
type ASNInfo struct {
|
||||
ASNumber uint32
|
||||
ASName string
|
||||
}
|
||||
|
||||
// IterGeoDatabase iter all entries in the given geo database path.
|
||||
func (c *Component) IterGeoDatabase(path string, f GeoIterFunc) error {
|
||||
c.db.lock.RLock()
|
||||
defer c.db.lock.RUnlock()
|
||||
geoDB := c.db.geo[path]
|
||||
if geoDB != nil {
|
||||
return geoDB.IterGeoDatabase(f)
|
||||
}
|
||||
|
||||
return fmt.Errorf("database not found %s", path)
|
||||
}
|
||||
|
||||
// IterASNDatabase iter all entries in the given asn database path.
|
||||
func (c *Component) IterASNDatabase(path string, f AsnIterFunc) error {
|
||||
c.db.lock.RLock()
|
||||
defer c.db.lock.RUnlock()
|
||||
geoDB := c.db.asn[path]
|
||||
if geoDB != nil {
|
||||
return geoDB.IterASNDatabase(f)
|
||||
}
|
||||
|
||||
return fmt.Errorf("database not found %s", path)
|
||||
}
|
||||
71
orchestrator/clickhouse/geoip/iter_ipinfo.go
Normal file
71
orchestrator/clickhouse/geoip/iter_ipinfo.go
Normal file
@@ -0,0 +1,71 @@
|
||||
// SPDX-FileCopyrightText: 2023 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/oschwald/maxminddb-golang"
|
||||
)
|
||||
|
||||
type ipinfoDBASN struct {
|
||||
ASN string `maxminddb:"asn"`
|
||||
ASName string `maxminddb:"as_name"`
|
||||
}
|
||||
|
||||
type ipinfoDBCountry struct {
|
||||
Country string `maxminddb:"country"`
|
||||
Continent string `maxminddb:"continent"`
|
||||
}
|
||||
|
||||
type ipinfoDB struct {
|
||||
db *maxminddb.Reader
|
||||
}
|
||||
|
||||
func (mmdb *ipinfoDB) IterASNDatabase(f AsnIterFunc) error {
|
||||
it := mmdb.db.Networks()
|
||||
maxminddb.SkipAliasedNetworks(it)
|
||||
for it.Next() {
|
||||
asnInfo := &ipinfoDBASN{}
|
||||
subnet, err := it.Network(asnInfo)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
n, err := strconv.ParseUint(asnInfo.ASN[2:], 10, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := f(subnet, ASNInfo{
|
||||
ASNumber: uint32(n),
|
||||
ASName: asnInfo.ASName,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (mmdb *ipinfoDB) IterGeoDatabase(f GeoIterFunc) error {
|
||||
it := mmdb.db.Networks()
|
||||
maxminddb.SkipAliasedNetworks(it)
|
||||
for it.Next() {
|
||||
geoInfo := &ipinfoDBCountry{}
|
||||
subnet, err := it.Network(geoInfo)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := f(subnet, GeoInfo{
|
||||
Country: geoInfo.Country,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (mmdb *ipinfoDB) Close() {
|
||||
mmdb.db.Close()
|
||||
}
|
||||
82
orchestrator/clickhouse/geoip/iter_maxminddb.go
Normal file
82
orchestrator/clickhouse/geoip/iter_maxminddb.go
Normal file
@@ -0,0 +1,82 @@
|
||||
// SPDX-FileCopyrightText: 2023 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"github.com/oschwald/maxminddb-golang"
|
||||
)
|
||||
|
||||
type maxmindDBASN struct {
|
||||
AutonomousSystemNumber uint `maxminddb:"autonomous_system_number"`
|
||||
AutonomousSystemOrganization string `maxminddb:"autonomous_system_organization"`
|
||||
}
|
||||
|
||||
// for a list fields available, see: https://github.com/oschwald/geoip2-golang/blob/main/reader.go
|
||||
type maxmindDBCountry struct {
|
||||
Country struct {
|
||||
IsoCode string `maxminddb:"iso_code"`
|
||||
} `maxminddb:"country"`
|
||||
City struct {
|
||||
Names map[string]string `maxminddb:"names"`
|
||||
} `maxminddb:"city"`
|
||||
Subdivisions []struct {
|
||||
IsoCode string `maxminddb:"iso_code"`
|
||||
} `maxminddb:"subdivisions"`
|
||||
}
|
||||
|
||||
type maxmindDB struct {
|
||||
db *maxminddb.Reader
|
||||
}
|
||||
|
||||
func (mmdb *maxmindDB) IterASNDatabase(f AsnIterFunc) error {
|
||||
it := mmdb.db.Networks()
|
||||
maxminddb.SkipAliasedNetworks(it)
|
||||
|
||||
for it.Next() {
|
||||
asnInfo := &maxmindDBASN{}
|
||||
subnet, err := it.Network(asnInfo)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := f(subnet, ASNInfo{
|
||||
ASNumber: uint32(asnInfo.AutonomousSystemNumber),
|
||||
ASName: asnInfo.AutonomousSystemOrganization,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (mmdb *maxmindDB) IterGeoDatabase(f GeoIterFunc) error {
|
||||
it := mmdb.db.Networks()
|
||||
maxminddb.SkipAliasedNetworks(it)
|
||||
|
||||
for it.Next() {
|
||||
geoInfo := &maxmindDBCountry{}
|
||||
subnet, err := it.Network(geoInfo)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var state string
|
||||
if len(geoInfo.Subdivisions) > 0 {
|
||||
state = geoInfo.Subdivisions[0].IsoCode
|
||||
}
|
||||
|
||||
if err := f(subnet, GeoInfo{
|
||||
Country: geoInfo.Country.IsoCode,
|
||||
State: state,
|
||||
City: geoInfo.City.Names["en"],
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (mmdb *maxmindDB) Close() {
|
||||
mmdb.db.Close()
|
||||
}
|
||||
98
orchestrator/clickhouse/geoip/iter_test.go
Normal file
98
orchestrator/clickhouse/geoip/iter_test.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"net"
|
||||
"testing"
|
||||
|
||||
"akvorado/common/reporter"
|
||||
)
|
||||
|
||||
func TestIterDatabase(t *testing.T) {
|
||||
r := reporter.NewMock(t)
|
||||
c := NewMock(t, r, true)
|
||||
|
||||
mustHave := []struct {
|
||||
IP string
|
||||
ExpectedASN uint32
|
||||
ExpectedCountry string
|
||||
hasCountry bool
|
||||
hasASN bool
|
||||
}{
|
||||
// ipinfo database
|
||||
{
|
||||
IP: "2.19.4.138",
|
||||
ExpectedASN: 32787,
|
||||
ExpectedCountry: "SG",
|
||||
}, {
|
||||
IP: "2a09:bac1:14a0:fd0::a:1",
|
||||
ExpectedASN: 13335,
|
||||
ExpectedCountry: "CA",
|
||||
}, {
|
||||
IP: "213.248.218.137",
|
||||
ExpectedASN: 43519,
|
||||
ExpectedCountry: "HK",
|
||||
},
|
||||
// maxmind
|
||||
{
|
||||
IP: "1.0.0.0",
|
||||
ExpectedASN: 15169,
|
||||
}, {
|
||||
IP: "2.125.160.216",
|
||||
ExpectedCountry: "GB",
|
||||
}, {
|
||||
IP: "2a02:ff00::1:1",
|
||||
ExpectedCountry: "IT",
|
||||
}, {
|
||||
IP: "67.43.156.77",
|
||||
ExpectedASN: 35908,
|
||||
ExpectedCountry: "BT",
|
||||
},
|
||||
}
|
||||
|
||||
for _, asnDb := range c.config.ASNDatabase {
|
||||
err := c.IterASNDatabase(asnDb, func(n *net.IPNet, a ASNInfo) error {
|
||||
for i, h := range mustHave {
|
||||
// found the IP
|
||||
if n.Contains(net.ParseIP(h.IP)) {
|
||||
if h.ExpectedASN != 0 && a.ASNumber != h.ExpectedASN {
|
||||
t.Errorf("expected ASN %d, got %d", h.ExpectedASN, a.ASNumber)
|
||||
}
|
||||
mustHave[i].hasASN = true
|
||||
break
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, geoDb := range c.config.GeoDatabase {
|
||||
err := c.IterGeoDatabase(geoDb, func(n *net.IPNet, a GeoInfo) error {
|
||||
for i, h := range mustHave {
|
||||
// found the IP
|
||||
if n.Contains(net.ParseIP(h.IP).To16()) {
|
||||
if h.ExpectedCountry != "" && a.Country != h.ExpectedCountry {
|
||||
t.Errorf("expected Country %s, got %s", h.ExpectedCountry, a.Country)
|
||||
}
|
||||
mustHave[i].hasCountry = true
|
||||
break
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, h := range mustHave {
|
||||
if !h.hasASN && h.ExpectedASN != 0 {
|
||||
t.Errorf("missing subnet %s in ASN database", h.IP)
|
||||
}
|
||||
if !h.hasCountry && h.ExpectedCountry != "" {
|
||||
t.Errorf("missing subnet %s in GEO database", h.IP)
|
||||
}
|
||||
}
|
||||
}
|
||||
267
orchestrator/clickhouse/geoip/root.go
Normal file
267
orchestrator/clickhouse/geoip/root.go
Normal file
@@ -0,0 +1,267 @@
|
||||
// SPDX-FileCopyrightText: 2022 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
// Package geoip provides ASN and country for GeoIP addresses.
|
||||
package geoip
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
"gopkg.in/tomb.v2"
|
||||
|
||||
"akvorado/common/daemon"
|
||||
"akvorado/common/reporter"
|
||||
)
|
||||
|
||||
// Component represents the GeoIP component.
|
||||
type Component struct {
|
||||
r *reporter.Reporter
|
||||
d *Dependencies
|
||||
t tomb.Tomb
|
||||
config Configuration
|
||||
|
||||
db struct {
|
||||
geo map[string]geoDatabase
|
||||
asn map[string]geoDatabase
|
||||
lock sync.RWMutex
|
||||
}
|
||||
|
||||
metrics struct {
|
||||
databaseRefresh *reporter.CounterVec
|
||||
}
|
||||
|
||||
onOpenChan chan DBNotification
|
||||
onOpenSubscribers []chan DBNotification
|
||||
notifyLock sync.RWMutex
|
||||
notifyDone sync.WaitGroup
|
||||
}
|
||||
|
||||
// DBNotification is sent to all listener when a databased is opened/refreshed.
|
||||
type DBNotification struct {
|
||||
Path string
|
||||
Kind string
|
||||
Index int
|
||||
}
|
||||
|
||||
// Dependencies define the dependencies of the GeoIP component.
|
||||
type Dependencies struct {
|
||||
Daemon daemon.Component
|
||||
}
|
||||
|
||||
// New creates a new GeoIP component.
|
||||
func New(r *reporter.Reporter, configuration Configuration, dependencies Dependencies) (*Component, error) {
|
||||
c := Component{
|
||||
r: r,
|
||||
d: &dependencies,
|
||||
config: configuration,
|
||||
onOpenChan: make(chan DBNotification),
|
||||
onOpenSubscribers: []chan DBNotification{},
|
||||
}
|
||||
c.db.geo = make(map[string]geoDatabase)
|
||||
c.db.asn = make(map[string]geoDatabase)
|
||||
|
||||
for i, path := range c.config.GeoDatabase {
|
||||
c.config.GeoDatabase[i] = filepath.Clean(path)
|
||||
}
|
||||
for i, path := range c.config.ASNDatabase {
|
||||
c.config.ASNDatabase[i] = filepath.Clean(path)
|
||||
}
|
||||
c.d.Daemon.Track(&c.t, "orchestrator/geoip")
|
||||
c.metrics.databaseRefresh = c.r.CounterVec(
|
||||
reporter.CounterOpts{
|
||||
Name: "db_refresh_total",
|
||||
Help: "Refresh event for a GeoIP database.",
|
||||
},
|
||||
[]string{"database"},
|
||||
)
|
||||
return &c, nil
|
||||
}
|
||||
|
||||
func (c *Component) fanout(notif DBNotification) {
|
||||
c.notifyLock.RLock()
|
||||
defer c.notifyLock.RUnlock()
|
||||
for _, subChan := range c.onOpenSubscribers {
|
||||
select {
|
||||
case <-c.t.Dying():
|
||||
return
|
||||
case subChan <- notif:
|
||||
default:
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start starts the GeoIP component.
|
||||
func (c *Component) Start() error {
|
||||
if len(c.config.GeoDatabase) == 0 && len(c.config.ASNDatabase) == 0 {
|
||||
c.r.Warn().Msg("skipping GeoIP component: no database specified")
|
||||
}
|
||||
c.r.Info().Msg("starting GeoIP component")
|
||||
|
||||
c.t.Go(func() error {
|
||||
// notifier fanout
|
||||
for notif := range c.onOpenChan {
|
||||
c.fanout(notif)
|
||||
}
|
||||
for _, c := range c.onOpenSubscribers {
|
||||
close(c)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
for i, path := range c.config.GeoDatabase {
|
||||
if err := c.openDatabase("geo", i, path); err != nil && !c.config.Optional {
|
||||
return err
|
||||
}
|
||||
}
|
||||
for i, path := range c.config.ASNDatabase {
|
||||
if err := c.openDatabase("asn", i, path); err != nil && !c.config.Optional {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Watch for modifications
|
||||
watcher, err := fsnotify.NewWatcher()
|
||||
if err != nil {
|
||||
c.r.Err(err).Msg("cannot setup watcher for GeoIP databases")
|
||||
return fmt.Errorf("cannot setup watcher: %w", err)
|
||||
}
|
||||
dirs := map[string]struct{}{}
|
||||
for _, path := range c.config.GeoDatabase {
|
||||
dirs[filepath.Dir(path)] = struct{}{}
|
||||
}
|
||||
for _, path := range c.config.ASNDatabase {
|
||||
dirs[filepath.Dir(path)] = struct{}{}
|
||||
}
|
||||
for k := range dirs {
|
||||
if err := watcher.Add(k); err != nil {
|
||||
c.r.Err(err).Msg("cannot watch database directory")
|
||||
return fmt.Errorf("cannot watch database directory: %w", err)
|
||||
}
|
||||
}
|
||||
c.t.Go(func() error {
|
||||
errLogger := c.r.Sample(reporter.BurstSampler(10*time.Second, 1))
|
||||
defer watcher.Close()
|
||||
|
||||
for {
|
||||
// Watch both for errors and events in the
|
||||
// same goroutine. fsnotify's FAQ says this is
|
||||
// not a good idea.
|
||||
select {
|
||||
case <-c.t.Dying():
|
||||
return nil
|
||||
case err, ok := <-watcher.Errors:
|
||||
if !ok {
|
||||
return errors.New("file watcher died")
|
||||
}
|
||||
errLogger.Err(err).Msg("error from watcher")
|
||||
case event, ok := <-watcher.Events:
|
||||
if !ok {
|
||||
return errors.New("file watcher died")
|
||||
}
|
||||
if !event.Has(fsnotify.Write) && !event.Has(fsnotify.Create) {
|
||||
continue
|
||||
}
|
||||
for i, path := range c.config.GeoDatabase {
|
||||
if filepath.Clean(event.Name) == path {
|
||||
c.openDatabase("geo", i, path)
|
||||
break
|
||||
}
|
||||
}
|
||||
for i, path := range c.config.ASNDatabase {
|
||||
if filepath.Clean(event.Name) == path {
|
||||
c.openDatabase("geo", i, path)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Stop stops the GeoIP component.
|
||||
func (c *Component) Stop() error {
|
||||
c.r.Info().Msg("stopping GeoIP component")
|
||||
c.db.lock.RLock()
|
||||
c.r.Debug().Msg("closing database files")
|
||||
|
||||
for _, db := range c.db.geo {
|
||||
if db != nil {
|
||||
db.Close()
|
||||
}
|
||||
}
|
||||
for _, db := range c.db.asn {
|
||||
if db != nil {
|
||||
db.Close()
|
||||
}
|
||||
}
|
||||
c.db.lock.RUnlock()
|
||||
c.r.Debug().Msg("stopping child threads")
|
||||
c.t.Kill(nil)
|
||||
c.r.Debug().Msg("waiting for notification to be sent")
|
||||
c.notifyDone.Wait()
|
||||
close(c.onOpenChan)
|
||||
defer c.r.Info().Msg("GeoIP component stopped")
|
||||
return c.t.Wait()
|
||||
}
|
||||
|
||||
// Notify is what parent component should call to get notified when a database is updated.
|
||||
func (c *Component) Notify() (chan DBNotification, chan struct{}) {
|
||||
notifyChan := make(chan DBNotification)
|
||||
c.notifyLock.Lock()
|
||||
c.onOpenSubscribers = append(c.onOpenSubscribers, notifyChan)
|
||||
c.notifyLock.Unlock()
|
||||
initDoneChan := make(chan struct{})
|
||||
// send existing database when the client subscribes
|
||||
c.t.Go(func() error {
|
||||
c.db.lock.RLock()
|
||||
defer c.db.lock.RUnlock()
|
||||
for i, path := range c.config.GeoDatabase {
|
||||
// not loaded yet
|
||||
if _, has := c.db.geo[path]; !has {
|
||||
continue
|
||||
}
|
||||
// prevent the fanout thread from closing the channel until everying is written
|
||||
c.notifyDone.Add(1)
|
||||
defer c.notifyDone.Done()
|
||||
select {
|
||||
case <-c.t.Dying():
|
||||
return nil
|
||||
case notifyChan <- DBNotification{
|
||||
Path: path,
|
||||
Kind: "geo",
|
||||
Index: i,
|
||||
}:
|
||||
continue
|
||||
}
|
||||
}
|
||||
for i, path := range c.config.ASNDatabase {
|
||||
// not loaded yet
|
||||
if _, has := c.db.asn[path]; !has {
|
||||
continue
|
||||
}
|
||||
// prevent the fanout thread from closing the channel until everying is written
|
||||
c.notifyDone.Add(1)
|
||||
defer c.notifyDone.Done()
|
||||
select {
|
||||
case <-c.t.Dying():
|
||||
return nil
|
||||
case notifyChan <- DBNotification{
|
||||
Path: path,
|
||||
Kind: "asn",
|
||||
Index: i,
|
||||
}:
|
||||
continue
|
||||
}
|
||||
}
|
||||
close(initDoneChan)
|
||||
return nil
|
||||
})
|
||||
return notifyChan, initDoneChan
|
||||
}
|
||||
@@ -36,13 +36,16 @@ func copyFile(src string, dst string) {
|
||||
func TestDatabaseRefresh(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
config := DefaultConfiguration()
|
||||
config.GeoDatabase = filepath.Join(dir, "country.mmdb")
|
||||
config.ASNDatabase = filepath.Join(dir, "asn.mmdb")
|
||||
|
||||
countryFile := filepath.Join(dir, "country.mmdb")
|
||||
asnFile := filepath.Join(dir, "asn.mmdb")
|
||||
config.GeoDatabase = []string{countryFile}
|
||||
config.ASNDatabase = []string{asnFile}
|
||||
|
||||
copyFile(filepath.Join("testdata", "GeoLite2-Country-Test.mmdb"),
|
||||
config.GeoDatabase)
|
||||
countryFile)
|
||||
copyFile(filepath.Join("testdata", "GeoLite2-ASN-Test.mmdb"),
|
||||
config.ASNDatabase)
|
||||
asnFile)
|
||||
|
||||
r := reporter.NewMock(t)
|
||||
c, err := New(r, config, Dependencies{Daemon: daemon.NewMock(t)})
|
||||
@@ -52,7 +55,7 @@ func TestDatabaseRefresh(t *testing.T) {
|
||||
helpers.StartStop(t, c)
|
||||
|
||||
// Check we did load both databases
|
||||
gotMetrics := r.GetMetrics("akvorado_inlet_geoip_db_")
|
||||
gotMetrics := r.GetMetrics("akvorado_orchestrator_clickhouse_geoip_db_")
|
||||
expectedMetrics := map[string]string{
|
||||
`refresh_total{database="asn"}`: "1",
|
||||
`refresh_total{database="geo"}`: "1",
|
||||
@@ -64,9 +67,9 @@ func TestDatabaseRefresh(t *testing.T) {
|
||||
// Check we can reload the database
|
||||
copyFile(filepath.Join("testdata", "GeoLite2-Country-Test.mmdb"),
|
||||
filepath.Join(dir, "tmp.mmdb"))
|
||||
os.Rename(filepath.Join(dir, "tmp.mmdb"), config.GeoDatabase)
|
||||
os.Rename(filepath.Join(dir, "tmp.mmdb"), countryFile)
|
||||
time.Sleep(20 * time.Millisecond)
|
||||
gotMetrics = r.GetMetrics("akvorado_inlet_geoip_db_")
|
||||
gotMetrics = r.GetMetrics("akvorado_orchestrator_clickhouse_geoip_db_")
|
||||
expectedMetrics = map[string]string{
|
||||
`refresh_total{database="asn"}`: "1",
|
||||
`refresh_total{database="geo"}`: "2",
|
||||
@@ -87,9 +90,9 @@ func TestStartWithoutDatabase(t *testing.T) {
|
||||
|
||||
func TestStartWithMissingDatabase(t *testing.T) {
|
||||
geoConfiguration := DefaultConfiguration()
|
||||
geoConfiguration.GeoDatabase = "/i/do/not/exist"
|
||||
geoConfiguration.GeoDatabase = []string{"/i/do/not/exist"}
|
||||
asnConfiguration := DefaultConfiguration()
|
||||
asnConfiguration.ASNDatabase = "/i/do/not/exist"
|
||||
asnConfiguration.ASNDatabase = []string{"/i/do/not/exist"}
|
||||
cases := []struct {
|
||||
Name string
|
||||
Config Configuration
|
||||
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
@@ -21,12 +21,20 @@ import (
|
||||
// available here:
|
||||
// - https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-ASN-Test.json
|
||||
// - https://github.com/maxmind/MaxMind-DB/blob/main/source-data/GeoLite2-Country-Test.json
|
||||
func NewMock(t *testing.T, r *reporter.Reporter) *Component {
|
||||
func NewMock(t *testing.T, r *reporter.Reporter, withData bool) *Component {
|
||||
t.Helper()
|
||||
config := DefaultConfiguration()
|
||||
_, src, _, _ := runtime.Caller(0)
|
||||
config.GeoDatabase = filepath.Join(path.Dir(src), "testdata", "GeoLite2-Country-Test.mmdb")
|
||||
config.ASNDatabase = filepath.Join(path.Dir(src), "testdata", "GeoLite2-ASN-Test.mmdb")
|
||||
if withData {
|
||||
config.GeoDatabase = []string{
|
||||
filepath.Join(path.Dir(src), "testdata", "GeoLite2-Country-Test.mmdb"),
|
||||
filepath.Join(path.Dir(src), "testdata", "ip_country_asn_sample.mmdb"),
|
||||
}
|
||||
config.ASNDatabase = []string{
|
||||
filepath.Join(path.Dir(src), "testdata", "GeoLite2-ASN-Test.mmdb"),
|
||||
filepath.Join(path.Dir(src), "testdata", "ip_country_asn_sample.mmdb"),
|
||||
}
|
||||
}
|
||||
c, err := New(r, config, Dependencies{Daemon: daemon.NewMock(t)})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+s", err)
|
||||
66
orchestrator/clickhouse/geoip_test.go
Normal file
66
orchestrator/clickhouse/geoip_test.go
Normal file
@@ -0,0 +1,66 @@
|
||||
// SPDX-FileCopyrightText: 2023 Free Mobile
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
package clickhouse
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"akvorado/orchestrator/clickhouse/geoip"
|
||||
|
||||
"akvorado/common/clickhousedb"
|
||||
"akvorado/common/daemon"
|
||||
"akvorado/common/helpers"
|
||||
"akvorado/common/httpserver"
|
||||
"akvorado/common/reporter"
|
||||
"akvorado/common/schema"
|
||||
)
|
||||
|
||||
func TestNetworkGeoip(t *testing.T) {
|
||||
// Setup an HTTP server to serve the JSON
|
||||
|
||||
config := DefaultConfiguration()
|
||||
config.SkipMigrations = true
|
||||
r := reporter.NewMock(t)
|
||||
|
||||
c, err := New(r, config, Dependencies{
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: schema.NewMock(t),
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
ClickHouse: clickhousedb.SetupClickHouse(t, r),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
}
|
||||
helpers.StartStop(t, c)
|
||||
|
||||
time.Sleep(1000 * time.Millisecond)
|
||||
helpers.TestHTTPEndpoints(t, c.d.HTTP.LocalAddr(), helpers.HTTPEndpointCases{
|
||||
{
|
||||
Description: "try when ready",
|
||||
URL: "/api/v0/orchestrator/clickhouse/networks.csv",
|
||||
ContentType: "text/csv; charset=utf-8",
|
||||
FirstLines: []string{
|
||||
"network,name,role,site,region,country,state,city,tenant,asn",
|
||||
"1.0.0.0/24,,,,,,,,Google Inc.,15169",
|
||||
"1.128.0.0/11,,,,,,,,Telstra Pty Ltd,1221",
|
||||
"2.19.4.136/30,,,,,,,,\"Akamai Technologies, Inc.\",32787",
|
||||
"2.19.4.140/32,,,,,,,,\"Akamai Technologies, Inc.\",32787",
|
||||
"2.125.160.216/29,,,,,GB,,,,32787",
|
||||
"12.81.92.0/22,,,,,,,,AT&T Services,7018",
|
||||
"12.81.96.0/19,,,,,,,,,7018",
|
||||
"12.81.128.0/17,,,,,,,,,7018",
|
||||
"12.82.0.0/15,,,,,,,,,7018",
|
||||
"12.84.0.0/14,,,,,,,,,7018",
|
||||
"12.88.0.0/13,,,,,,,,,7018",
|
||||
"12.96.0.0/20,,,,,,,,,7018",
|
||||
"12.96.16.0/24,,,,,,,,,7018",
|
||||
"15.0.0.0/8,,,,,,,,Hewlett-Packard Company,71",
|
||||
"16.0.0.0/8,,,,,,,,Hewlett-Packard Company,71",
|
||||
"18.0.0.0/8,,,,,,,,Massachusetts Institute of Technology,3",
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -14,6 +14,8 @@ import (
|
||||
"strconv"
|
||||
"text/template"
|
||||
"time"
|
||||
|
||||
"github.com/kentik/patricia"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -131,22 +133,59 @@ func (c *Component) registerHTTPHandlers() error {
|
||||
w.Header().Set("Content-Type", "text/csv; charset=utf-8")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
wr := csv.NewWriter(w)
|
||||
wr.Write([]string{"network", "name", "role", "site", "region", "tenant"})
|
||||
c.networkSourcesLock.RLock()
|
||||
defer c.networkSourcesLock.RUnlock()
|
||||
for _, ss := range c.networkSources {
|
||||
for _, v := range ss {
|
||||
wr.Write([]string{
|
||||
v.Prefix.String(),
|
||||
v.Name, v.Role, v.Site, v.Region, v.Tenant,
|
||||
})
|
||||
wr.Write([]string{"network", "name", "role", "site", "region", "country", "state", "city", "tenant", "asn"})
|
||||
c.convergedNetworksLock.RLock()
|
||||
defer c.convergedNetworksLock.RUnlock()
|
||||
// merge the upstream items to the downstream when they are missing
|
||||
var currentASN uint32
|
||||
c.convergedNetworks.Iter(func(address patricia.IPv6Address, tags [][]NetworkAttributes) error {
|
||||
// make final network attributes, by merging tags of the leaf
|
||||
var currentName, currentRegion, currentRole, currentTenant, currentSite, currentCountry, currentCity, currentState string
|
||||
for _, nodeTags := range tags {
|
||||
for _, tag := range nodeTags {
|
||||
if tag.Name != "" {
|
||||
currentName = tag.Name
|
||||
}
|
||||
if tag.Region != "" {
|
||||
currentRegion = tag.Region
|
||||
}
|
||||
if tag.Role != "" {
|
||||
currentRole = tag.Role
|
||||
}
|
||||
if tag.Tenant != "" {
|
||||
currentTenant = tag.Tenant
|
||||
}
|
||||
if tag.Site != "" {
|
||||
currentSite = tag.Site
|
||||
}
|
||||
if tag.ASN != 0 {
|
||||
currentASN = tag.ASN
|
||||
}
|
||||
if tag.Country != "" {
|
||||
currentCountry = tag.Country
|
||||
}
|
||||
if tag.Country != "" {
|
||||
currentCountry = tag.Country
|
||||
}
|
||||
if tag.State != "" {
|
||||
currentState = tag.State
|
||||
}
|
||||
if tag.City != "" {
|
||||
currentCity = tag.City
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if c.config.Networks != nil {
|
||||
for k, v := range c.config.Networks.ToMap() {
|
||||
wr.Write([]string{k, v.Name, v.Role, v.Site, v.Region, v.Tenant})
|
||||
|
||||
var asnVal string
|
||||
if currentASN != 0 {
|
||||
asnVal = strconv.Itoa(int(currentASN))
|
||||
}
|
||||
}
|
||||
wr.Write([]string{
|
||||
address.String(),
|
||||
currentName, currentRole, currentSite, currentRegion, currentCountry, currentState, currentCity, currentTenant, asnVal,
|
||||
})
|
||||
return nil
|
||||
})
|
||||
wr.Flush()
|
||||
}))
|
||||
|
||||
|
||||
@@ -7,11 +7,13 @@ import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"akvorado/common/clickhousedb"
|
||||
"akvorado/common/daemon"
|
||||
"akvorado/common/helpers"
|
||||
"akvorado/common/httpserver"
|
||||
"akvorado/common/reporter"
|
||||
"akvorado/common/schema"
|
||||
"akvorado/orchestrator/clickhouse/geoip"
|
||||
)
|
||||
|
||||
func TestHTTPEndpoints(t *testing.T) {
|
||||
@@ -37,9 +39,11 @@ func TestHTTPEndpoints(t *testing.T) {
|
||||
}
|
||||
// create http entry
|
||||
c, err := New(r, config, Dependencies{
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: sch,
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: sch,
|
||||
GeoIP: geoip.NewMock(t, r, false),
|
||||
ClickHouse: clickhousedb.SetupClickHouse(t, r),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -66,8 +70,8 @@ func TestHTTPEndpoints(t *testing.T) {
|
||||
URL: "/api/v0/orchestrator/clickhouse/networks.csv",
|
||||
ContentType: "text/csv; charset=utf-8",
|
||||
FirstLines: []string{
|
||||
`network,name,role,site,region,tenant`,
|
||||
`192.0.2.0/24,infra,,,,`,
|
||||
`network,name,role,site,region,country,state,city,tenant,asn`,
|
||||
`192.0.2.0/24,infra,,,,,,,,`,
|
||||
},
|
||||
}, {
|
||||
URL: "/api/v0/orchestrator/clickhouse/init.sh",
|
||||
@@ -113,9 +117,11 @@ func TestAdditionalASNs(t *testing.T) {
|
||||
1: "New network",
|
||||
}
|
||||
c, err := New(r, config, Dependencies{
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: schema.NewMock(t),
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: schema.NewMock(t),
|
||||
GeoIP: geoip.NewMock(t, r, false),
|
||||
ClickHouse: clickhousedb.SetupClickHouse(t, r),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
|
||||
"akvorado/common/reporter"
|
||||
"akvorado/common/schema"
|
||||
)
|
||||
|
||||
type migrationStep struct {
|
||||
@@ -66,17 +67,17 @@ func (c *Component) migrateDatabase() error {
|
||||
// Create dictionaries
|
||||
err := c.wrapMigrations(
|
||||
func() error {
|
||||
return c.createDictionary(ctx, "asns", "hashed",
|
||||
return c.createDictionary(ctx, schema.DictionaryASNs, "hashed",
|
||||
"`asn` UInt32 INJECTIVE, `name` String", "asn")
|
||||
}, func() error {
|
||||
return c.createDictionary(ctx, "protocols", "hashed",
|
||||
return c.createDictionary(ctx, schema.DictionaryProtocols, "hashed",
|
||||
"`proto` UInt8 INJECTIVE, `name` String, `description` String", "proto")
|
||||
}, func() error {
|
||||
return c.createDictionary(ctx, "icmp", "complex_key_hashed",
|
||||
return c.createDictionary(ctx, schema.DictionaryICMP, "complex_key_hashed",
|
||||
"`proto` UInt8, `type` UInt8, `code` UInt8, `name` String", "proto, type, code")
|
||||
}, func() error {
|
||||
return c.createDictionary(ctx, "networks", "ip_trie",
|
||||
"`network` String, `name` String, `role` String, `site` String, `region` String, `tenant` String",
|
||||
return c.createDictionary(ctx, schema.DictionaryNetworks, "ip_trie",
|
||||
"`network` String, `name` String, `role` String, `site` String, `region` String, `city` String, `state` String, `country` String, `tenant` String, `asn` UInt32",
|
||||
"network")
|
||||
})
|
||||
if err != nil {
|
||||
@@ -180,3 +181,8 @@ func (c *Component) getHTTPBaseURL(address string) (string, error) {
|
||||
c.r.Debug().Msgf("detected base URL is %s", base)
|
||||
return base, nil
|
||||
}
|
||||
|
||||
// ReloadDictionary will reload the specified dictionnary.
|
||||
func (c *Component) ReloadDictionary(ctx context.Context, dictName string) error {
|
||||
return c.d.ClickHouse.Exec(ctx, fmt.Sprintf("SYSTEM RELOAD DICTIONARY %s", dictName))
|
||||
}
|
||||
|
||||
@@ -65,7 +65,7 @@ func (c *Component) tableAlreadyExists(ctx context.Context, table, column, targe
|
||||
table, c.config.Database)
|
||||
var existing string
|
||||
if err := row.Scan(&existing); err != nil && err != sql.ErrNoRows {
|
||||
return false, fmt.Errorf("cannot check if table %s already exists", table)
|
||||
return false, fmt.Errorf("cannot check if table %s already exists: %w", table, err)
|
||||
}
|
||||
existing = strings.ReplaceAll(existing,
|
||||
fmt.Sprintf(`dictGetOrDefault('%s.`, c.config.Database),
|
||||
|
||||
@@ -23,6 +23,7 @@ import (
|
||||
"akvorado/common/kafka"
|
||||
"akvorado/common/reporter"
|
||||
"akvorado/common/schema"
|
||||
"akvorado/orchestrator/clickhouse/geoip"
|
||||
|
||||
"github.com/ClickHouse/clickhouse-go/v2"
|
||||
)
|
||||
@@ -155,15 +156,18 @@ func waitMigrations(t *testing.T, ch *Component) {
|
||||
case <-time.After(30 * time.Second):
|
||||
t.Fatalf("Migrations not finished")
|
||||
}
|
||||
t.Log("Migrations done")
|
||||
}
|
||||
|
||||
func TestGetHTTPBaseURL(t *testing.T) {
|
||||
r := reporter.NewMock(t)
|
||||
http := httpserver.NewMock(t, r)
|
||||
c, err := New(r, DefaultConfiguration(), Dependencies{
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: http,
|
||||
Schema: schema.NewMock(t),
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: http,
|
||||
Schema: schema.NewMock(t),
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
ClickHouse: clickhousedb.SetupClickHouse(t, r),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -214,6 +218,7 @@ func TestMigration(t *testing.T) {
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: schema.NewMock(t),
|
||||
ClickHouse: chComponent,
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -241,7 +246,7 @@ WHERE database=currentDatabase() AND table NOT LIKE '.%'`)
|
||||
}
|
||||
}
|
||||
expected := []string{
|
||||
"asns",
|
||||
schema.DictionaryASNs,
|
||||
"exporters",
|
||||
"flows",
|
||||
"flows_1h0m0s",
|
||||
@@ -253,9 +258,9 @@ WHERE database=currentDatabase() AND table NOT LIKE '.%'`)
|
||||
fmt.Sprintf("flows_%s_raw", hash),
|
||||
fmt.Sprintf("flows_%s_raw_consumer", hash),
|
||||
fmt.Sprintf("flows_%s_raw_errors", hash),
|
||||
"icmp",
|
||||
"networks",
|
||||
"protocols",
|
||||
schema.DictionaryICMP,
|
||||
schema.DictionaryNetworks,
|
||||
schema.DictionaryProtocols,
|
||||
}
|
||||
if diff := helpers.Diff(got, expected); diff != "" {
|
||||
t.Fatalf("SHOW TABLES (-got, +want):\n%s", diff)
|
||||
@@ -303,6 +308,7 @@ LIMIT 1`)
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: schema.NewMock(t),
|
||||
ClickHouse: chComponent,
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -339,6 +345,7 @@ LIMIT 1`)
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: schema.NewMock(t).EnableAllColumns(),
|
||||
ClickHouse: chComponent,
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -380,6 +387,7 @@ LIMIT 1`)
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: sch,
|
||||
ClickHouse: chComponent,
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -415,6 +423,7 @@ LIMIT 1`)
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: sch,
|
||||
ClickHouse: chComponent,
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -469,6 +478,7 @@ func TestCustomDictMigration(t *testing.T) {
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: sch,
|
||||
ClickHouse: chComponent,
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -513,6 +523,7 @@ func TestCustomDictMigration(t *testing.T) {
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: sch,
|
||||
ClickHouse: chComponent,
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -544,7 +555,7 @@ func TestCustomDictMigration(t *testing.T) {
|
||||
|
||||
// Check if the rows were created in the consumer flows table
|
||||
rowConsumer := ch.d.ClickHouse.QueryRow(context.Background(), `
|
||||
SHOW CREATE flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw_consumer`)
|
||||
SHOW CREATE flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw_consumer`)
|
||||
var existingConsumer string
|
||||
if err := rowConsumer.Scan(&existingConsumer); err != nil {
|
||||
t.Fatalf("Scan() error:\n%+v", err)
|
||||
@@ -592,6 +603,7 @@ func TestCustomDictMigration(t *testing.T) {
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: sch,
|
||||
ClickHouse: chComponent,
|
||||
GeoIP: geoip.NewMock(t, r, true),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -623,7 +635,7 @@ func TestCustomDictMigration(t *testing.T) {
|
||||
|
||||
// Check if the rows were removed in the consumer flows table
|
||||
rowConsumer := ch.d.ClickHouse.QueryRow(context.Background(), `
|
||||
SHOW CREATE flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw_consumer`)
|
||||
SHOW CREATE flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw_consumer`)
|
||||
var existingConsumer string
|
||||
if err := rowConsumer.Scan(&existingConsumer); err != nil {
|
||||
t.Fatalf("Scan() error:\n%+v", err)
|
||||
|
||||
@@ -5,7 +5,9 @@
|
||||
package clickhouse
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net"
|
||||
"sort"
|
||||
"sync"
|
||||
"time"
|
||||
@@ -13,13 +15,16 @@ import (
|
||||
"akvorado/common/remotedatasourcefetcher"
|
||||
|
||||
"github.com/cenkalti/backoff/v4"
|
||||
"github.com/kentik/patricia"
|
||||
"gopkg.in/tomb.v2"
|
||||
|
||||
"akvorado/common/clickhousedb"
|
||||
"akvorado/common/daemon"
|
||||
"akvorado/common/helpers"
|
||||
"akvorado/common/httpserver"
|
||||
"akvorado/common/reporter"
|
||||
"akvorado/common/schema"
|
||||
"akvorado/orchestrator/clickhouse/geoip"
|
||||
)
|
||||
|
||||
// Component represents the ClickHouse configurator.
|
||||
@@ -35,6 +40,11 @@ type Component struct {
|
||||
networkSourcesFetcher *remotedatasourcefetcher.Component[externalNetworkAttributes]
|
||||
networkSources map[string][]externalNetworkAttributes
|
||||
networkSourcesLock sync.RWMutex
|
||||
geoipSources map[string]*helpers.SubnetMap[NetworkAttributes]
|
||||
geoipOrder map[string]int
|
||||
geoipSourcesLock sync.RWMutex
|
||||
convergedNetworksLock sync.RWMutex
|
||||
convergedNetworks *helpers.SubnetMap[NetworkAttributes]
|
||||
}
|
||||
|
||||
// Dependencies define the dependencies of the ClickHouse configurator.
|
||||
@@ -43,18 +53,21 @@ type Dependencies struct {
|
||||
HTTP *httpserver.Component
|
||||
ClickHouse *clickhousedb.Component
|
||||
Schema *schema.Component
|
||||
GeoIP *geoip.Component
|
||||
}
|
||||
|
||||
// New creates a new ClickHouse component.
|
||||
func New(r *reporter.Reporter, configuration Configuration, dependencies Dependencies) (*Component, error) {
|
||||
|
||||
c := Component{
|
||||
r: r,
|
||||
d: &dependencies,
|
||||
config: configuration,
|
||||
migrationsDone: make(chan bool),
|
||||
migrationsOnce: make(chan bool),
|
||||
networkSources: make(map[string][]externalNetworkAttributes),
|
||||
r: r,
|
||||
d: &dependencies,
|
||||
config: configuration,
|
||||
migrationsDone: make(chan bool),
|
||||
migrationsOnce: make(chan bool),
|
||||
networkSources: make(map[string][]externalNetworkAttributes),
|
||||
geoipSources: make(map[string]*helpers.SubnetMap[NetworkAttributes]),
|
||||
geoipOrder: make(map[string]int),
|
||||
convergedNetworks: helpers.MustNewSubnetMap[NetworkAttributes](nil),
|
||||
}
|
||||
var err error
|
||||
c.networkSourcesFetcher, err = remotedatasourcefetcher.New[externalNetworkAttributes](r, c.UpdateRemoteDataSource, "network_source", configuration.NetworkSources)
|
||||
@@ -62,6 +75,7 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende
|
||||
return nil, fmt.Errorf("unable to initialize remote data source fetcher component: %w", err)
|
||||
}
|
||||
c.initMetrics()
|
||||
|
||||
if err := c.registerHTTPHandlers(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -74,6 +88,8 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende
|
||||
return nil, fmt.Errorf("resolutions need to be configured, including interval: 0")
|
||||
}
|
||||
|
||||
c.d.Daemon.Track(&c.t, "orchestrator/clickhouse")
|
||||
|
||||
return &c, nil
|
||||
}
|
||||
|
||||
@@ -81,6 +97,12 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende
|
||||
func (c *Component) Start() error {
|
||||
c.r.Info().Msg("starting ClickHouse component")
|
||||
|
||||
// stub to prevent tomb dying immediately after migrations are done
|
||||
c.t.Go(func() error {
|
||||
<-c.t.Dying()
|
||||
return nil
|
||||
})
|
||||
|
||||
// Database migration
|
||||
migrationsOnce := false
|
||||
c.metrics.migrationsRunning.Set(1)
|
||||
@@ -111,11 +133,186 @@ func (c *Component) Start() error {
|
||||
}
|
||||
})
|
||||
|
||||
// refresh converged networks after migrations
|
||||
// because it will trigger a RELOAD SYSTEM DICTIONARY
|
||||
|
||||
// not sure here if c.migrationsDone should be closed
|
||||
// regardless of wether migrations are skipped or not
|
||||
if !c.config.SkipMigrations {
|
||||
<-c.migrationsDone
|
||||
}
|
||||
c.r.Log().Msg("refreshing converved networks")
|
||||
if err := c.refreshConvergedNetworks(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Network sources update
|
||||
if err := c.networkSourcesFetcher.Start(); err != nil {
|
||||
return fmt.Errorf("unable to start network sources fetcher component: %w", err)
|
||||
}
|
||||
notifyChan, initDoneChan := c.d.GeoIP.Notify()
|
||||
|
||||
// geoip process updates
|
||||
c.t.Go(func() error {
|
||||
c.r.Log().Msg("Starting geoip refresher")
|
||||
for {
|
||||
select {
|
||||
case <-c.t.Dying():
|
||||
return nil
|
||||
case notif := <-notifyChan:
|
||||
geoipData := helpers.MustNewSubnetMap[NetworkAttributes](nil)
|
||||
switch notif.Kind {
|
||||
case "asn":
|
||||
err := c.d.GeoIP.IterASNDatabase(notif.Path, func(subnet *net.IPNet, data geoip.ASNInfo) error {
|
||||
subV6Str, err := helpers.SubnetMapParseKey(subnet.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
attrs := NetworkAttributes{
|
||||
ASN: data.ASNumber,
|
||||
Tenant: data.ASName,
|
||||
}
|
||||
return geoipData.Update(subV6Str, attrs, overrideNetworkAttrs(attrs))
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case "geo":
|
||||
err := c.d.GeoIP.IterGeoDatabase(notif.Path, func(subnet *net.IPNet, data geoip.GeoInfo) error {
|
||||
subV6Str, err := helpers.SubnetMapParseKey(subnet.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
attrs := NetworkAttributes{
|
||||
State: data.State,
|
||||
Country: data.Country,
|
||||
City: data.City,
|
||||
}
|
||||
return geoipData.Update(subV6Str, attrs, overrideNetworkAttrs(attrs))
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
c.geoipSourcesLock.Lock()
|
||||
c.geoipSources[notif.Path] = geoipData
|
||||
c.geoipOrder[notif.Path] = notif.Index
|
||||
c.geoipSourcesLock.Unlock()
|
||||
}
|
||||
if err := c.refreshConvergedNetworks(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// wait for initial sync of geoip component
|
||||
select {
|
||||
case <-initDoneChan:
|
||||
case <-c.t.Dying():
|
||||
}
|
||||
c.r.Info().Msg("ClickHouse component started")
|
||||
return nil
|
||||
}
|
||||
|
||||
func overrideNetworkAttrs(newAttrs NetworkAttributes) func(existing NetworkAttributes) NetworkAttributes {
|
||||
return func(existing NetworkAttributes) NetworkAttributes {
|
||||
if newAttrs.ASN != 0 {
|
||||
existing.ASN = newAttrs.ASN
|
||||
}
|
||||
if newAttrs.Name != "" {
|
||||
existing.Name = newAttrs.Name
|
||||
}
|
||||
if newAttrs.Region != "" {
|
||||
existing.Region = newAttrs.Region
|
||||
}
|
||||
if newAttrs.Site != "" {
|
||||
existing.Site = newAttrs.Role
|
||||
}
|
||||
if newAttrs.Role != "" {
|
||||
existing.Role = newAttrs.Role
|
||||
}
|
||||
if newAttrs.Tenant != "" {
|
||||
existing.Tenant = newAttrs.Tenant
|
||||
}
|
||||
if newAttrs.Country != "" {
|
||||
existing.Country = newAttrs.Country
|
||||
}
|
||||
if newAttrs.State != "" {
|
||||
existing.State = newAttrs.State
|
||||
}
|
||||
if newAttrs.City != "" {
|
||||
existing.City = newAttrs.City
|
||||
}
|
||||
return existing
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Component) refreshConvergedNetworks() error {
|
||||
|
||||
c.geoipSourcesLock.RLock()
|
||||
// inject info from GeoIP first so that custom networks will override
|
||||
networks := helpers.MustNewSubnetMap[NetworkAttributes](nil)
|
||||
// do the iteration in the order of the configured database in the configuration
|
||||
geoipDbs := make([]string, 0, len(c.geoipSources))
|
||||
for k := range c.geoipOrder {
|
||||
geoipDbs = append(geoipDbs, k)
|
||||
}
|
||||
sort.Slice(geoipDbs, func(i, j int) bool {
|
||||
// sort in reverse order, so that the first item of the user list overrides the data (first=best)
|
||||
return c.geoipOrder[geoipDbs[i]] > c.geoipOrder[geoipDbs[j]]
|
||||
})
|
||||
|
||||
for _, dbName := range geoipDbs {
|
||||
err := c.geoipSources[dbName].Iter(func(address patricia.IPv6Address, tags [][]NetworkAttributes) error {
|
||||
return networks.Update(
|
||||
address.String(),
|
||||
tags[len(tags)-1][0],
|
||||
// override existing network attributes
|
||||
overrideNetworkAttrs(tags[len(tags)-1][0]),
|
||||
)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
c.geoipSourcesLock.RUnlock()
|
||||
|
||||
c.networkSourcesLock.RLock()
|
||||
for _, networkList := range c.networkSources {
|
||||
for _, val := range networkList {
|
||||
if err := networks.Update(
|
||||
val.Prefix.String(),
|
||||
val.NetworkAttributes,
|
||||
// override existing network attributes
|
||||
overrideNetworkAttrs(val.NetworkAttributes),
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
c.networkSourcesLock.RUnlock()
|
||||
if c.config.Networks != nil {
|
||||
err := c.config.Networks.Iter(func(address patricia.IPv6Address, tags [][]NetworkAttributes) error {
|
||||
return networks.Update(
|
||||
address.String(),
|
||||
tags[len(tags)-1][0],
|
||||
// override existing network attributes
|
||||
overrideNetworkAttrs(tags[len(tags)-1][0]),
|
||||
)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
c.convergedNetworksLock.Lock()
|
||||
c.convergedNetworks = networks
|
||||
c.convergedNetworksLock.Unlock()
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
|
||||
defer cancel()
|
||||
if err := c.ReloadDictionary(ctx, schema.DictionaryNetworks); err != nil {
|
||||
c.r.Err(err).Msg("failed to refresh networks dict")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -25,5 +25,8 @@ func (c *Component) UpdateRemoteDataSource(ctx context.Context, name string, sou
|
||||
c.networkSourcesLock.Lock()
|
||||
c.networkSources[name] = results
|
||||
c.networkSourcesLock.Unlock()
|
||||
if err := c.refreshConvergedNetworks(); err != nil {
|
||||
return len(results), err
|
||||
}
|
||||
return len(results), nil
|
||||
}
|
||||
|
||||
@@ -11,7 +11,9 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"akvorado/common/clickhousedb"
|
||||
"akvorado/common/remotedatasourcefetcher"
|
||||
"akvorado/orchestrator/clickhouse/geoip"
|
||||
|
||||
"akvorado/common/daemon"
|
||||
"akvorado/common/helpers"
|
||||
@@ -96,9 +98,11 @@ func TestNetworkSources(t *testing.T) {
|
||||
},
|
||||
}
|
||||
c, err := New(r, config, Dependencies{
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: schema.NewMock(t),
|
||||
Daemon: daemon.NewMock(t),
|
||||
HTTP: httpserver.NewMock(t, r),
|
||||
Schema: schema.NewMock(t),
|
||||
GeoIP: geoip.NewMock(t, r, false),
|
||||
ClickHouse: clickhousedb.SetupClickHouse(t, r),
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("New() error:\n%+v", err)
|
||||
@@ -121,10 +125,10 @@ func TestNetworkSources(t *testing.T) {
|
||||
URL: "/api/v0/orchestrator/clickhouse/networks.csv",
|
||||
ContentType: "text/csv; charset=utf-8",
|
||||
FirstLines: []string{
|
||||
`network,name,role,site,region,tenant`,
|
||||
`3.2.34.0/26,,amazon,,af-south-1,amazon`,
|
||||
`2600:1ff2:4000::/40,,amazon,,us-west-2,amazon`,
|
||||
`2600:1f14:fff:f800::/56,,route53_healthchecks,,us-west-2,amazon`,
|
||||
`network,name,role,site,region,country,state,city,tenant,asn`,
|
||||
`3.2.34.0/26,,amazon,,af-south-1,,,,amazon,`,
|
||||
`2600:1f14:fff:f800::/56,,route53_healthchecks,,us-west-2,,,,amazon,`,
|
||||
`2600:1ff2:4000::/40,,amazon,,us-west-2,,,,amazon,`,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
22
orchestrator/clickhouse/testdata/states/011.csv
vendored
22
orchestrator/clickhouse/testdata/states/011.csv
vendored
@@ -1,15 +1,15 @@
|
||||
"asns","CREATE DICTIONARY default.asns (`asn` UInt32 INJECTIVE, `name` String) PRIMARY KEY asn SOURCE(HTTP(URL 'http://something/api/v0/orchestrator/clickhouse/asns.csv' FORMAT 'CSVWithNames')) LIFETIME(MIN 0 MAX 3600) LAYOUT(HASHED()) SETTINGS(format_csv_allow_single_quotes = 0)"
|
||||
"icmp","CREATE DICTIONARY default.icmp (`proto` UInt8, `type` UInt8, `code` UInt8, `name` String) PRIMARY KEY proto, type, code SOURCE(HTTP(URL 'http://something/api/v0/orchestrator/clickhouse/icmp.csv' FORMAT 'CSVWithNames')) LIFETIME(MIN 0 MAX 3600) LAYOUT(COMPLEX_KEY_HASHED()) SETTINGS(format_csv_allow_single_quotes = 0)"
|
||||
"flows","CREATE TABLE default.flows (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6 CODEC(ZSTD(1)), `DstAddr` IPv6 CODEC(ZSTD(1)), `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcNetPrefix` String ALIAS multiIf(EType = 2048, concat(replaceRegexpOne(CAST(IPv6CIDRToRange(SrcAddr, CAST(96 + SrcNetMask, 'UInt8')).1, 'String'), '^::ffff:', ''), '/', CAST(SrcNetMask, 'String')), EType = 34525, concat(CAST(IPv6CIDRToRange(SrcAddr, SrcNetMask).1, 'String'), '/', CAST(SrcNetMask, 'String')), ''), `DstNetPrefix` String ALIAS multiIf(EType = 2048, concat(replaceRegexpOne(CAST(IPv6CIDRToRange(DstAddr, CAST(96 + DstNetMask, 'UInt8')).1, 'String'), '^::ffff:', ''), '/', CAST(DstNetMask, 'String')), EType = 34525, concat(CAST(IPv6CIDRToRange(DstAddr, DstNetMask).1, 'String'), '/', CAST(DstNetMask, 'String')), ''), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `DstCommunities` Array(UInt32), `DstLargeCommunities` Array(UInt128), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt16, `DstPort` UInt16, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = MergeTree PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(25920))) ORDER BY (TimeReceived, ExporterAddress, InIfName, OutIfName) TTL TimeReceived + toIntervalSecond(1296000) SETTINGS index_granularity = 8192"
|
||||
"networks","CREATE DICTIONARY default.networks (`network` String, `name` String, `role` String, `site` String, `region` String, `tenant` String) PRIMARY KEY network SOURCE(HTTP(URL 'http://something/api/v0/orchestrator/clickhouse/networks.csv' FORMAT 'CSVWithNames')) LIFETIME(MIN 0 MAX 3600) LAYOUT(IP_TRIE()) SETTINGS(format_csv_allow_single_quotes = 0)"
|
||||
"flows","CREATE TABLE default.flows (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6 CODEC(ZSTD(1)), `DstAddr` IPv6 CODEC(ZSTD(1)), `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcNetPrefix` String ALIAS multiIf(EType = 2048, concat(replaceRegexpOne(CAST(IPv6CIDRToRange(SrcAddr, CAST(96 + SrcNetMask, 'UInt8')).1, 'String'), '^::ffff:', ''), '/', CAST(SrcNetMask, 'String')), EType = 34525, concat(CAST(IPv6CIDRToRange(SrcAddr, SrcNetMask).1, 'String'), '/', CAST(SrcNetMask, 'String')), ''), `DstNetPrefix` String ALIAS multiIf(EType = 2048, concat(replaceRegexpOne(CAST(IPv6CIDRToRange(DstAddr, CAST(96 + DstNetMask, 'UInt8')).1, 'String'), '^::ffff:', ''), '/', CAST(DstNetMask, 'String')), EType = 34525, concat(CAST(IPv6CIDRToRange(DstAddr, DstNetMask).1, 'String'), '/', CAST(DstNetMask, 'String')), ''), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `SrcGeoCity` LowCardinality(String), `DstGeoCity` LowCardinality(String), `SrcGeoState` LowCardinality(String), `DstGeoState` LowCardinality(String), `DstASPath` Array(UInt32), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `DstCommunities` Array(UInt32), `DstLargeCommunities` Array(UInt128), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt16, `DstPort` UInt16, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = MergeTree PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(25920))) ORDER BY (TimeReceived, ExporterAddress, InIfName, OutIfName) TTL TimeReceived + toIntervalSecond(1296000) SETTINGS index_granularity = 8192"
|
||||
"networks","CREATE DICTIONARY default.networks (`network` String, `name` String, `role` String, `site` String, `region` String, `city` String, `state` String, `country` String, `tenant` String, `asn` UInt32) PRIMARY KEY network SOURCE(HTTP(URL 'http://something/api/v0/orchestrator/clickhouse/networks.csv' FORMAT 'CSVWithNames')) LIFETIME(MIN 0 MAX 3600) LAYOUT(IP_TRIE()) SETTINGS(format_csv_allow_single_quotes = 0)"
|
||||
"exporters","CREATE MATERIALIZED VIEW default.exporters (`TimeReceived` DateTime, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `IfName` String, `IfDescription` String, `IfSpeed` UInt32, `IfConnectivity` String, `IfProvider` String, `IfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2)) ENGINE = ReplacingMergeTree(TimeReceived) ORDER BY (ExporterAddress, IfName) TTL TimeReceived + toIntervalDay(1) SETTINGS index_granularity = 8192 AS SELECT DISTINCT TimeReceived, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, [InIfName, OutIfName][num] AS IfName, [InIfDescription, OutIfDescription][num] AS IfDescription, [InIfSpeed, OutIfSpeed][num] AS IfSpeed, [InIfConnectivity, OutIfConnectivity][num] AS IfConnectivity, [InIfProvider, OutIfProvider][num] AS IfProvider, [InIfBoundary, OutIfBoundary][num] AS IfBoundary FROM default.flows ARRAY JOIN arrayEnumerate([1, 2]) AS num"
|
||||
"protocols","CREATE DICTIONARY default.protocols (`proto` UInt8 INJECTIVE, `name` String, `description` String) PRIMARY KEY proto SOURCE(HTTP(URL 'http://something/api/v0/orchestrator/clickhouse/protocols.csv' FORMAT 'CSVWithNames')) LIFETIME(MIN 0 MAX 3600) LAYOUT(HASHED()) SETTINGS(format_csv_allow_single_quotes = 0)"
|
||||
"flows_1m0s","CREATE TABLE default.flows_1m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(12096))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(604800) SETTINGS index_granularity = 8192"
|
||||
"flows_5m0s","CREATE TABLE default.flows_5m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(155520))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(7776000) SETTINGS index_granularity = 8192"
|
||||
"flows_1h0m0s","CREATE TABLE default.flows_1h0m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(622080))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(31104000) SETTINGS index_granularity = 8192"
|
||||
"flows_1m0s_consumer","CREATE MATERIALIZED VIEW default.flows_1m0s_consumer TO default.flows_1m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(60)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
|
||||
"flows_5m0s_consumer","CREATE MATERIALIZED VIEW default.flows_5m0s_consumer TO default.flows_5m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(300)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
|
||||
"flows_1h0m0s_consumer","CREATE MATERIALIZED VIEW default.flows_1h0m0s_consumer TO default.flows_1h0m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(3600)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
|
||||
"flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw","CREATE TABLE default.flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6 CODEC(ZSTD(1)), `DstAddr` IPv6 CODEC(ZSTD(1)), `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `DstCommunities` Array(UInt32), `DstLargeCommunitiesASN` Array(UInt32), `DstLargeCommunitiesLocalData1` Array(UInt32), `DstLargeCommunitiesLocalData2` Array(UInt32), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt16, `DstPort` UInt16, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `ForwardingStatus` UInt32) ENGINE = Kafka SETTINGS kafka_broker_list = '127.0.0.1:9092', kafka_topic_list = 'flows-ZUYGDTE3EBIXX352XPM3YEEFV4', kafka_group_name = 'clickhouse', kafka_format = 'Protobuf', kafka_schema = 'flow-ZUYGDTE3EBIXX352XPM3YEEFV4.proto:FlowMessagevZUYGDTE3EBIXX352XPM3YEEFV4', kafka_num_consumers = 1, kafka_thread_per_consumer = 1, kafka_handle_error_mode = 'stream'"
|
||||
"flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw_errors","CREATE MATERIALIZED VIEW default.flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw_errors (`timestamp` DateTime, `topic` LowCardinality(String), `partition` UInt64, `offset` UInt64, `raw` String, `error` String) ENGINE = MergeTree PARTITION BY toYYYYMMDDhhmmss(toStartOfHour(timestamp)) ORDER BY (timestamp, topic, partition, offset) TTL timestamp + toIntervalDay(1) SETTINGS index_granularity = 8192 AS SELECT now() AS timestamp, _topic AS topic, _partition AS partition, _offset AS offset, _raw_message AS raw, _error AS error FROM default.flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw WHERE length(_error) > 0"
|
||||
"flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw_consumer","CREATE MATERIALIZED VIEW default.flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw_consumer TO default.flows (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` String, `DstNetName` String, `SrcNetRole` String, `DstNetRole` String, `SrcNetSite` String, `DstNetSite` String, `SrcNetRegion` String, `DstNetRegion` String, `SrcNetTenant` String, `DstNetTenant` String, `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `DstASPath` Array(UInt32), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `DstCommunities` Array(UInt32), `DstLargeCommunities` Array(UInt128), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt16, `DstPort` UInt16, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS WITH arrayCompact(DstASPath) AS c_DstASPath SELECT TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAddr, DstAddr, SrcNetMask, DstNetMask, SrcAS, DstAS, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS SrcNetName, dictGetOrDefault('default.networks', 'name', DstAddr, '') AS DstNetName, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS SrcNetRole, dictGetOrDefault('default.networks', 'role', DstAddr, '') AS DstNetRole, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS SrcNetSite, dictGetOrDefault('default.networks', 'site', DstAddr, '') AS DstNetSite, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS SrcNetRegion, dictGetOrDefault('default.networks', 'region', DstAddr, '') AS DstNetRegion, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS SrcNetTenant, dictGetOrDefault('default.networks', 'tenant', DstAddr, '') AS DstNetTenant, SrcCountry, DstCountry, DstASPath, c_DstASPath[1] AS Dst1stAS, c_DstASPath[2] AS Dst2ndAS, c_DstASPath[3] AS Dst3rdAS, DstCommunities, arrayMap((asn, l1, l2) -> ((bitShiftLeft(CAST(asn, 'UInt128'), 64) + bitShiftLeft(CAST(l1, 'UInt128'), 32)) + CAST(l2, 'UInt128')), DstLargeCommunitiesASN, DstLargeCommunitiesLocalData1, DstLargeCommunitiesLocalData2) AS DstLargeCommunities, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, SrcPort, DstPort, Bytes, Packets, ForwardingStatus FROM default.flows_ZUYGDTE3EBIXX352XPM3YEEFV4_raw WHERE length(_error) = 0"
|
||||
"flows_1m0s","CREATE TABLE default.flows_1m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `SrcGeoCity` LowCardinality(String), `DstGeoCity` LowCardinality(String), `SrcGeoState` LowCardinality(String), `DstGeoState` LowCardinality(String), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(12096))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, SrcGeoCity, DstGeoCity, SrcGeoState, DstGeoState, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(604800) SETTINGS index_granularity = 8192"
|
||||
"flows_5m0s","CREATE TABLE default.flows_5m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `SrcGeoCity` LowCardinality(String), `DstGeoCity` LowCardinality(String), `SrcGeoState` LowCardinality(String), `DstGeoState` LowCardinality(String), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(155520))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, SrcGeoCity, DstGeoCity, SrcGeoState, DstGeoState, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(7776000) SETTINGS index_granularity = 8192"
|
||||
"flows_1h0m0s","CREATE TABLE default.flows_1h0m0s (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `SrcGeoCity` LowCardinality(String), `DstGeoCity` LowCardinality(String), `SrcGeoState` LowCardinality(String), `DstGeoState` LowCardinality(String), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `PacketSize` UInt64 ALIAS intDiv(Bytes, Packets), `PacketSizeBucket` LowCardinality(String) ALIAS multiIf(PacketSize < 64, '0-63', PacketSize < 128, '64-127', PacketSize < 256, '128-255', PacketSize < 512, '256-511', PacketSize < 768, '512-767', PacketSize < 1024, '768-1023', PacketSize < 1280, '1024-1279', PacketSize < 1501, '1280-1500', PacketSize < 2048, '1501-2047', PacketSize < 3072, '2048-3071', PacketSize < 4096, '3072-4095', PacketSize < 8192, '4096-8191', PacketSize < 10240, '8192-10239', PacketSize < 16384, '10240-16383', PacketSize < 32768, '16384-32767', PacketSize < 65536, '32768-65535', '65536-Inf'), `ForwardingStatus` UInt32) ENGINE = SummingMergeTree((Bytes, Packets)) PARTITION BY toYYYYMMDDhhmmss(toStartOfInterval(TimeReceived, toIntervalSecond(622080))) PRIMARY KEY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate) ORDER BY (TimeReceived, ExporterAddress, EType, Proto, InIfName, SrcAS, ForwardingStatus, OutIfName, DstAS, SamplingRate, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, SrcGeoCity, DstGeoCity, SrcGeoState, DstGeoState, Dst1stAS, Dst2ndAS, Dst3rdAS) TTL TimeReceived + toIntervalSecond(31104000) SETTINGS index_granularity = 8192"
|
||||
"flows_1m0s_consumer","CREATE MATERIALIZED VIEW default.flows_1m0s_consumer TO default.flows_1m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `SrcGeoCity` LowCardinality(String), `DstGeoCity` LowCardinality(String), `SrcGeoState` LowCardinality(String), `DstGeoState` LowCardinality(String), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(60)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, SrcGeoCity, DstGeoCity, SrcGeoState, DstGeoState, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
|
||||
"flows_5m0s_consumer","CREATE MATERIALIZED VIEW default.flows_5m0s_consumer TO default.flows_5m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `SrcGeoCity` LowCardinality(String), `DstGeoCity` LowCardinality(String), `SrcGeoState` LowCardinality(String), `DstGeoState` LowCardinality(String), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(300)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, SrcGeoCity, DstGeoCity, SrcGeoState, DstGeoState, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
|
||||
"flows_1h0m0s_consumer","CREATE MATERIALIZED VIEW default.flows_1h0m0s_consumer TO default.flows_1h0m0s (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` LowCardinality(String), `DstNetName` LowCardinality(String), `SrcNetRole` LowCardinality(String), `DstNetRole` LowCardinality(String), `SrcNetSite` LowCardinality(String), `DstNetSite` LowCardinality(String), `SrcNetRegion` LowCardinality(String), `DstNetRegion` LowCardinality(String), `SrcNetTenant` LowCardinality(String), `DstNetTenant` LowCardinality(String), `SrcCountry` FixedString(2), `DstCountry` FixedString(2), `SrcGeoCity` LowCardinality(String), `DstGeoCity` LowCardinality(String), `SrcGeoState` LowCardinality(String), `DstGeoState` LowCardinality(String), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS SELECT toStartOfInterval(TimeReceived, toIntervalSecond(3600)) AS TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAS, DstAS, SrcNetName, DstNetName, SrcNetRole, DstNetRole, SrcNetSite, DstNetSite, SrcNetRegion, DstNetRegion, SrcNetTenant, DstNetTenant, SrcCountry, DstCountry, SrcGeoCity, DstGeoCity, SrcGeoState, DstGeoState, Dst1stAS, Dst2ndAS, Dst3rdAS, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, Bytes, Packets, ForwardingStatus FROM default.flows"
|
||||
"flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw","CREATE TABLE default.flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw (`TimeReceived` DateTime CODEC(DoubleDelta, LZ4), `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6 CODEC(ZSTD(1)), `DstAddr` IPv6 CODEC(ZSTD(1)), `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `DstASPath` Array(UInt32), `DstCommunities` Array(UInt32), `DstLargeCommunitiesASN` Array(UInt32), `DstLargeCommunitiesLocalData1` Array(UInt32), `DstLargeCommunitiesLocalData2` Array(UInt32), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt16, `DstPort` UInt16, `Bytes` UInt64 CODEC(T64, LZ4), `Packets` UInt64 CODEC(T64, LZ4), `ForwardingStatus` UInt32) ENGINE = Kafka SETTINGS kafka_broker_list = '127.0.0.1:9092', kafka_topic_list = 'flows-LAABIGYMRYZPTGOYIIFZNYDEQM', kafka_group_name = 'clickhouse', kafka_format = 'Protobuf', kafka_schema = 'flow-LAABIGYMRYZPTGOYIIFZNYDEQM.proto:FlowMessagevLAABIGYMRYZPTGOYIIFZNYDEQM', kafka_num_consumers = 1, kafka_thread_per_consumer = 1, kafka_handle_error_mode = 'stream'"
|
||||
"flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw_errors","CREATE MATERIALIZED VIEW default.flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw_errors (`timestamp` DateTime, `topic` LowCardinality(String), `partition` UInt64, `offset` UInt64, `raw` String, `error` String) ENGINE = MergeTree PARTITION BY toYYYYMMDDhhmmss(toStartOfHour(timestamp)) ORDER BY (timestamp, topic, partition, offset) TTL timestamp + toIntervalDay(1) SETTINGS index_granularity = 8192 AS SELECT now() AS timestamp, _topic AS topic, _partition AS partition, _offset AS offset, _raw_message AS raw, _error AS error FROM default.flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw WHERE length(_error) > 0"
|
||||
"flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw_consumer","CREATE MATERIALIZED VIEW default.flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw_consumer TO default.flows (`TimeReceived` DateTime, `SamplingRate` UInt64, `ExporterAddress` LowCardinality(IPv6), `ExporterName` LowCardinality(String), `ExporterGroup` LowCardinality(String), `ExporterRole` LowCardinality(String), `ExporterSite` LowCardinality(String), `ExporterRegion` LowCardinality(String), `ExporterTenant` LowCardinality(String), `SrcAddr` IPv6, `DstAddr` IPv6, `SrcNetMask` UInt8, `DstNetMask` UInt8, `SrcAS` UInt32, `DstAS` UInt32, `SrcNetName` String, `DstNetName` String, `SrcNetRole` String, `DstNetRole` String, `SrcNetSite` String, `DstNetSite` String, `SrcNetRegion` String, `DstNetRegion` String, `SrcNetTenant` String, `DstNetTenant` String, `SrcCountry` String, `DstCountry` String, `SrcGeoCity` String, `DstGeoCity` String, `SrcGeoState` String, `DstGeoState` String, `DstASPath` Array(UInt32), `Dst1stAS` UInt32, `Dst2ndAS` UInt32, `Dst3rdAS` UInt32, `DstCommunities` Array(UInt32), `DstLargeCommunities` Array(UInt128), `InIfName` LowCardinality(String), `OutIfName` LowCardinality(String), `InIfDescription` LowCardinality(String), `OutIfDescription` LowCardinality(String), `InIfSpeed` UInt32, `OutIfSpeed` UInt32, `InIfConnectivity` LowCardinality(String), `OutIfConnectivity` LowCardinality(String), `InIfProvider` LowCardinality(String), `OutIfProvider` LowCardinality(String), `InIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `OutIfBoundary` Enum8('undefined' = 0, 'external' = 1, 'internal' = 2), `EType` UInt32, `Proto` UInt32, `SrcPort` UInt16, `DstPort` UInt16, `Bytes` UInt64, `Packets` UInt64, `ForwardingStatus` UInt32) AS WITH arrayCompact(DstASPath) AS c_DstASPath SELECT TimeReceived, SamplingRate, ExporterAddress, ExporterName, ExporterGroup, ExporterRole, ExporterSite, ExporterRegion, ExporterTenant, SrcAddr, DstAddr, SrcNetMask, DstNetMask, if(SrcAS = 0, dictGetOrDefault('default.networks', 'asn', SrcAddr, 0), SrcAS) AS SrcAS, if(DstAS = 0, dictGetOrDefault('default.networks', 'asn', DstAddr, 0), DstAS) AS DstAS, dictGetOrDefault('default.networks', 'name', SrcAddr, '') AS SrcNetName, dictGetOrDefault('default.networks', 'name', DstAddr, '') AS DstNetName, dictGetOrDefault('default.networks', 'role', SrcAddr, '') AS SrcNetRole, dictGetOrDefault('default.networks', 'role', DstAddr, '') AS DstNetRole, dictGetOrDefault('default.networks', 'site', SrcAddr, '') AS SrcNetSite, dictGetOrDefault('default.networks', 'site', DstAddr, '') AS DstNetSite, dictGetOrDefault('default.networks', 'region', SrcAddr, '') AS SrcNetRegion, dictGetOrDefault('default.networks', 'region', DstAddr, '') AS DstNetRegion, dictGetOrDefault('default.networks', 'tenant', SrcAddr, '') AS SrcNetTenant, dictGetOrDefault('default.networks', 'tenant', DstAddr, '') AS DstNetTenant, dictGetOrDefault('default.networks', 'country', SrcAddr, '') AS SrcCountry, dictGetOrDefault('default.networks', 'country', DstAddr, '') AS DstCountry, dictGetOrDefault('default.networks', 'city', SrcAddr, '') AS SrcGeoCity, dictGetOrDefault('default.networks', 'city', DstAddr, '') AS DstGeoCity, dictGetOrDefault('default.networks', 'state', SrcAddr, '') AS SrcGeoState, dictGetOrDefault('default.networks', 'state', DstAddr, '') AS DstGeoState, DstASPath, c_DstASPath[1] AS Dst1stAS, c_DstASPath[2] AS Dst2ndAS, c_DstASPath[3] AS Dst3rdAS, DstCommunities, arrayMap((asn, l1, l2) -> ((bitShiftLeft(CAST(asn, 'UInt128'), 64) + bitShiftLeft(CAST(l1, 'UInt128'), 32)) + CAST(l2, 'UInt128')), DstLargeCommunitiesASN, DstLargeCommunitiesLocalData1, DstLargeCommunitiesLocalData2) AS DstLargeCommunities, InIfName, OutIfName, InIfDescription, OutIfDescription, InIfSpeed, OutIfSpeed, InIfConnectivity, OutIfConnectivity, InIfProvider, OutIfProvider, InIfBoundary, OutIfBoundary, EType, Proto, SrcPort, DstPort, Bytes, Packets, ForwardingStatus FROM default.flows_LAABIGYMRYZPTGOYIIFZNYDEQM_raw WHERE length(_error) = 0"
|
||||
|
Reference in New Issue
Block a user