diff --git a/.gitignore b/.gitignore index 998d5c5b..72f5128b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,6 @@ /bin/ /test/ -/flow/decoder/flow*.pb.go +/inlet/flow/decoder/flow*.pb.go -/web/data/node_modules/ -/web/data/assets/generated/ +/console/data/node_modules/ +/console/data/assets/generated/ diff --git a/Makefile b/Makefile index 4ec7a7a5..fc9e691a 100644 --- a/Makefile +++ b/Makefile @@ -13,7 +13,7 @@ M = $(shell if [ "$$(tput colors 2> /dev/null || echo 0)" -ge 8 ]; then printf " export GO111MODULE=on -GENERATED = flow/decoder/flow-1.pb.go web/data/node_modules web/data/assets/generated +GENERATED = inlet/flow/decoder/flow-1.pb.go console/data/node_modules console/data/assets/generated .PHONY: all all: fmt lint $(GENERATED) | $(BIN) ; $(info $(M) building executable…) @ ## Build program binary @@ -47,17 +47,17 @@ $(BIN)/protoc-gen-go: PACKAGE=google.golang.org/protobuf/cmd/protoc-gen-go # Generated files -flow/decoder/%.pb.go: flow/data/schemas/%.proto | $(PROTOC_GEN_GO) ; $(info $(M) compiling protocol buffers definition…) +inlet/flow/decoder/%.pb.go: inlet/flow/data/schemas/%.proto | $(PROTOC_GEN_GO) ; $(info $(M) compiling protocol buffers definition…) $Q $(PROTOC) -I=. --plugin=$(PROTOC_GEN_GO) --go_out=. --go_opt=module=$(MODULE) $< -web/data/node_modules: web/data/package.json web/data/yarn.lock ; $(info $(M) fetching node modules…) - $Q yarn install --frozen-lockfile --cwd web/data && touch $@ -web/data/assets/generated: web/data/node_modules Makefile ; $(info $(M) copying static assets…) +console/data/node_modules: console/data/package.json console/data/yarn.lock ; $(info $(M) fetching node modules…) + $Q yarn install --frozen-lockfile --cwd console/data && touch $@ +console/data/assets/generated: console/data/node_modules Makefile ; $(info $(M) copying static assets…) $Q rm -rf $@ && mkdir -p $@/stylesheets $@/javascript $@/fonts - $Q cp web/data/node_modules/@mdi/font/fonts/materialdesignicons-webfont.woff* $@/fonts/. - $Q cp web/data/node_modules/@mdi/font/css/materialdesignicons.min.css $@/stylesheets/. - $Q cp web/data/node_modules/bootstrap/dist/css/bootstrap.min.css $@/stylesheets/. - $Q cp web/data/node_modules/bootstrap/dist/js/bootstrap.bundle.min.js $@/javascript/. + $Q cp console/data/node_modules/@mdi/font/fonts/materialdesignicons-webfont.woff* $@/fonts/. + $Q cp console/data/node_modules/@mdi/font/css/materialdesignicons.min.css $@/stylesheets/. + $Q cp console/data/node_modules/bootstrap/dist/css/bootstrap.min.css $@/stylesheets/. + $Q cp console/data/node_modules/bootstrap/dist/js/bootstrap.bundle.min.js $@/javascript/. # These files are versioned in Git, but we may want to update them. clickhouse/data/protocols.csv: diff --git a/cmd/components.go b/cmd/components.go new file mode 100644 index 00000000..f5614cae --- /dev/null +++ b/cmd/components.go @@ -0,0 +1,48 @@ +package cmd + +import ( + "fmt" + + "akvorado/common/daemon" + "akvorado/common/reporter" +) + +// StartStopComponents activate/deactivate components in order. +func StartStopComponents(r *reporter.Reporter, daemonComponent daemon.Component, otherComponents []interface{}) error { + components := append([]interface{}{r, daemonComponent}, otherComponents...) + startedComponents := []interface{}{} + defer func() { + for _, cmp := range startedComponents { + if stopperC, ok := cmp.(stopper); ok { + if err := stopperC.Stop(); err != nil { + r.Err(err).Msg("unable to stop component, ignoring") + } + } + } + }() + for _, cmp := range components { + if starterC, ok := cmp.(starter); ok { + if err := starterC.Start(); err != nil { + return fmt.Errorf("unable to start component: %w", err) + } + } + startedComponents = append([]interface{}{cmp}, startedComponents...) + } + + r.Info(). + Str("version", Version).Str("build-date", BuildDate). + Msg("akvorado has started") + + select { + case <-daemonComponent.Terminated(): + r.Info().Msg("stopping all components") + } + return nil +} + +type starter interface { + Start() error +} +type stopper interface { + Stop() error +} diff --git a/cmd/components_test.go b/cmd/components_test.go new file mode 100644 index 00000000..fbfd4f6e --- /dev/null +++ b/cmd/components_test.go @@ -0,0 +1,81 @@ +package cmd_test + +import ( + "errors" + "testing" + + "akvorado/cmd" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/reporter" +) + +type Startable struct { + Started bool +} +type Stopable struct { + Stopped bool +} + +func (c *Startable) Start() error { + c.Started = true + return nil +} +func (c *Stopable) Stop() error { + c.Stopped = true + return nil +} + +type ComponentStartStop struct { + Startable + Stopable +} +type ComponentStop struct { + Stopable +} +type ComponentStart struct { + Startable +} +type ComponentNone struct{} +type ComponentStartError struct { + Stopable +} + +func (c ComponentStartError) Start() error { + return errors.New("nooo") +} + +func TestStartStop(t *testing.T) { + r := reporter.NewMock(t) + daemonComponent := daemon.NewMock(t) + otherComponents := []interface{}{ + &ComponentStartStop{}, + &ComponentStop{}, + &ComponentStart{}, + &ComponentNone{}, + &ComponentStartError{}, + &ComponentStartStop{}, + } + if err := cmd.StartStopComponents(r, daemonComponent, otherComponents); err == nil { + t.Error("StartStopComponents() did not trigger an error") + } + + expected := []interface{}{ + &ComponentStartStop{ + Startable: Startable{Started: true}, + Stopable: Stopable{Stopped: true}, + }, + &ComponentStop{ + Stopable: Stopable{Stopped: true}, + }, + &ComponentStart{ + Startable: Startable{Started: true}, + }, + &ComponentNone{}, + &ComponentStartError{}, + &ComponentStartStop{}, + } + if diff := helpers.Diff(otherComponents, expected); diff != "" { + t.Errorf("StartStopComponents() (-got, +want):\n%s", diff) + } +} diff --git a/cmd/config.go b/cmd/config.go new file mode 100644 index 00000000..3eb7b12b --- /dev/null +++ b/cmd/config.go @@ -0,0 +1,107 @@ +package cmd + +import ( + "fmt" + "io" + "io/ioutil" + "os" + "strconv" + "strings" + + "github.com/mitchellh/mapstructure" + "gopkg.in/yaml.v2" + + "akvorado/inlet/flow" +) + +// ConfigRelatedOptions are command-line options related to handling a +// configuration file. +type ConfigRelatedOptions struct { + Path string + Dump bool +} + +// Parse parses the configuration file (if present) and the +// environment variables into the provided configuration. +func (c ConfigRelatedOptions) Parse(out io.Writer, component string, config interface{}) error { + var rawConfig map[string]interface{} + if cfgFile := c.Path; cfgFile != "" { + input, err := ioutil.ReadFile(cfgFile) + if err != nil { + return fmt.Errorf("unable to read configuration file: %w", err) + } + if err := yaml.Unmarshal(input, &rawConfig); err != nil { + return fmt.Errorf("unable to parse configuration file: %w", err) + } + } + + // Parse provided configuration + decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ + Result: &config, + ErrorUnused: true, + Metadata: nil, + WeaklyTypedInput: true, + MatchName: func(mapKey, fieldName string) bool { + key := strings.ToLower(strings.ReplaceAll(mapKey, "-", "")) + field := strings.ToLower(fieldName) + return key == field + }, + DecodeHook: mapstructure.ComposeDecodeHookFunc( + flow.ConfigurationUnmarshalerHook(), + mapstructure.TextUnmarshallerHookFunc(), + mapstructure.StringToTimeDurationHookFunc(), + mapstructure.StringToSliceHookFunc(","), + ), + }) + if err != nil { + return fmt.Errorf("unable to create configuration decoder: %w", err) + } + if err := decoder.Decode(rawConfig); err != nil { + return fmt.Errorf("unable to parse configuration: %w", err) + } + + // Override with environment variables + for _, keyval := range os.Environ() { + kv := strings.SplitN(keyval, "=", 2) + if len(kv) != 2 { + continue + } + kk := strings.Split(kv[0], "_") + if len(kk) < 3 || kk[0] != "AKVORADO" || kk[1] != strings.ToUpper(component) { + continue + } + // From AKVORADO_CMP_SQUID_PURPLE_QUIRK=47, we + // build a map "squid -> purple -> quirk -> + // 47". From AKVORADO_CMP_SQUID_3_PURPLE=47, we + // build "squid[3] -> purple -> 47" + var rawConfig interface{} + rawConfig = kv[1] + for i := len(kk) - 1; i > 1; i-- { + if index, err := strconv.Atoi(kk[i]); err == nil { + newRawConfig := make([]interface{}, index+1) + newRawConfig[index] = rawConfig + rawConfig = newRawConfig + } else { + rawConfig = map[string]interface{}{ + kk[i]: rawConfig, + } + } + } + if err := decoder.Decode(rawConfig); err != nil { + return fmt.Errorf("unable to parse override %q: %w", kv[0], err) + } + } + + // Dump configuration if requested + if c.Dump { + output, err := yaml.Marshal(config) + if err != nil { + return fmt.Errorf("unable to dump configuration: %w", err) + } + out.Write([]byte("---\n")) + out.Write(output) + out.Write([]byte("\n")) + } + + return nil +} diff --git a/cmd/serve_test.go b/cmd/config_test.go similarity index 61% rename from cmd/serve_test.go rename to cmd/config_test.go index 0c89ff56..73b33614 100644 --- a/cmd/serve_test.go +++ b/cmd/config_test.go @@ -10,7 +10,7 @@ import ( "gopkg.in/yaml.v2" "akvorado/cmd" - "akvorado/helpers" + "akvorado/common/helpers" ) func want(t *testing.T, got, expected interface{}) { @@ -20,7 +20,7 @@ func want(t *testing.T, got, expected interface{}) { } } -func TestServeDump(t *testing.T) { +func TestDump(t *testing.T) { // Configuration file config := `--- http: @@ -36,27 +36,25 @@ snmp: cache-duration: 20m default-community: private kafka: - topic: netflow + connect: + version: 2.8.1 + topic: netflow compression-codec: zstd - version: 2.8.1 core: workers: 3 ` configFile := filepath.Join(t.TempDir(), "akvorado.yaml") ioutil.WriteFile(configFile, []byte(config), 0644) - // Start serves with it - root := cmd.RootCmd - buf := new(bytes.Buffer) - root.SetOut(buf) - root.SetErr(os.Stderr) - root.SetArgs([]string{"serve", "-D", "-C", "--config", configFile}) - cmd.ServeOptionsReset() - err := root.Execute() - if err != nil { - t.Fatalf("`serve -D -C` error:\n%+v", err) + c := cmd.ConfigRelatedOptions{ + Path: configFile, + Dump: true, + } + conf := cmd.DefaultInletConfiguration + buf := bytes.NewBuffer([]byte{}) + if err := c.Parse(buf, "inlet", conf); err != nil { + t.Fatalf("Parse() error:\n%+v", err) } - var got map[string]map[string]interface{} if err := yaml.Unmarshal(buf.Bytes(), &got); err != nil { t.Fatalf("Unmarshal() error:\n%+v", err) @@ -74,12 +72,14 @@ core: want(t, got["snmp"]["workers"], 2) want(t, got["snmp"]["cacheduration"], "20m0s") want(t, got["snmp"]["defaultcommunity"], "private") - want(t, got["kafka"]["topic"], "netflow") - want(t, got["kafka"]["version"], "2.8.1") - want(t, got["kafka"]["brokers"], []string{"127.0.0.1:9092"}) + want(t, got["kafka"]["connect"], map[string]interface{}{ + "brokers": []string{"127.0.0.1:9092"}, + "version": "2.8.1", + "topic": "netflow", + }) } -func TestServeEnvOverride(t *testing.T) { +func TestEnvOverride(t *testing.T) { // Configuration file config := `--- http: @@ -94,9 +94,10 @@ snmp: workers: 2 cache-duration: 10m kafka: - topic: netflow + connect: + version: 2.8.1 + topic: netflow compression-codec: zstd - version: 2.8.1 core: workers: 3 ` @@ -104,28 +105,25 @@ core: ioutil.WriteFile(configFile, []byte(config), 0644) // Environment - os.Setenv("AKVORADO_SNMP_CACHEDURATION", "22m") - os.Setenv("AKVORADO_SNMP_DEFAULTCOMMUNITY", "privateer") - os.Setenv("AKVORADO_SNMP_WORKERS", "3") - os.Setenv("AKVORADO_KAFKA_BROKERS", "127.0.0.1:9092,127.0.0.2:9092") - os.Setenv("AKVORADO_FLOW_INPUTS_0_LISTEN", "0.0.0.0:2056") + os.Setenv("AKVORADO_INLET_SNMP_CACHEDURATION", "22m") + os.Setenv("AKVORADO_INLET_SNMP_DEFAULTCOMMUNITY", "privateer") + os.Setenv("AKVORADO_INLET_SNMP_WORKERS", "3") + os.Setenv("AKVORADO_INLET_KAFKA_CONNECT_BROKERS", "127.0.0.1:9092,127.0.0.2:9092") + os.Setenv("AKVORADO_INLET_FLOW_INPUTS_0_LISTEN", "0.0.0.0:2056") // We may be lucky or the environment is keeping order - os.Setenv("AKVORADO_FLOW_INPUTS_1_TYPE", "file") - os.Setenv("AKVORADO_FLOW_INPUTS_1_DECODER", "netflow") - os.Setenv("AKVORADO_FLOW_INPUTS_1_PATHS", "f1,f2") + os.Setenv("AKVORADO_INLET_FLOW_INPUTS_1_TYPE", "file") + os.Setenv("AKVORADO_INLET_FLOW_INPUTS_1_DECODER", "netflow") + os.Setenv("AKVORADO_INLET_FLOW_INPUTS_1_PATHS", "f1,f2") - // Start serves with it - root := cmd.RootCmd - buf := new(bytes.Buffer) - root.SetOut(buf) - root.SetErr(os.Stderr) - root.SetArgs([]string{"serve", "-D", "-C", "--config", configFile}) - cmd.ServeOptionsReset() - err := root.Execute() - if err != nil { - t.Fatalf("`serve -D -C` error:\n%+v", err) + c := cmd.ConfigRelatedOptions{ + Path: configFile, + Dump: true, + } + conf := cmd.DefaultInletConfiguration + buf := bytes.NewBuffer([]byte{}) + if err := c.Parse(buf, "inlet", conf); err != nil { + t.Fatalf("Parse() error:\n%+v", err) } - var got map[string]map[string]interface{} if err := yaml.Unmarshal(buf.Bytes(), &got); err != nil { t.Fatalf("Unmarshal() error:\n%+v", err) @@ -133,7 +131,11 @@ core: want(t, got["snmp"]["cacheduration"], "22m0s") want(t, got["snmp"]["defaultcommunity"], "privateer") want(t, got["snmp"]["workers"], 3) - want(t, got["kafka"]["brokers"], []string{"127.0.0.1:9092", "127.0.0.2:9092"}) + want(t, got["kafka"]["connect"], map[string]interface{}{ + "brokers": []string{"127.0.0.1:9092", "127.0.0.2:9092"}, + "version": "2.8.1", + "topic": "netflow", + }) want(t, got["flow"], map[string]interface{}{ "inputs": []map[string]interface{}{ { diff --git a/cmd/configure.go b/cmd/configure.go new file mode 100644 index 00000000..c05426fa --- /dev/null +++ b/cmd/configure.go @@ -0,0 +1,109 @@ +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" + + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" + "akvorado/configure/clickhouse" + "akvorado/configure/kafka" +) + +// ConfigureConfiguration represents the configuration file for the configure command. +type ConfigureConfiguration struct { + Reporting reporter.Configuration + HTTP http.Configuration + Clickhouse clickhouse.Configuration + Kafka kafka.Configuration +} + +// DefaultConfigureConfiguration is the default configuration for the configure command. +var DefaultConfigureConfiguration = ConfigureConfiguration{ + HTTP: http.DefaultConfiguration, + Reporting: reporter.DefaultConfiguration, + Clickhouse: clickhouse.DefaultConfiguration, + Kafka: kafka.DefaultConfiguration, +} + +type configureOptions struct { + ConfigRelatedOptions + CheckMode bool +} + +// ConfigureOptions stores the command-line option values for the configure +// command. +var ConfigureOptions configureOptions + +var configureCmd = &cobra.Command{ + Use: "configure", + Short: "Start Akvorado's configure service", + Long: `Akvorado is a Netflow/IPFIX collector. The configure service configure external +components: Kafka and Clickhouse.`, + Args: cobra.ExactArgs(0), + RunE: func(cmd *cobra.Command, args []string) error { + config := DefaultConfigureConfiguration + if err := ConfigureOptions.Parse(cmd.OutOrStdout(), "configure", &config); err != nil { + return err + } + + r, err := reporter.New(config.Reporting) + if err != nil { + return fmt.Errorf("unable to initialize reporter: %w", err) + } + return configureStart(r, config, ConfigureOptions.CheckMode) + }, +} + +func init() { + RootCmd.AddCommand(configureCmd) + configureCmd.Flags().StringVarP(&ConfigureOptions.ConfigRelatedOptions.Path, "config", "c", "", + "Configuration file") + configureCmd.Flags().BoolVarP(&ConfigureOptions.ConfigRelatedOptions.Dump, "dump", "D", false, + "Dump configuration before starting") + configureCmd.Flags().BoolVarP(&ConfigureOptions.CheckMode, "check", "C", false, + "Check configuration, but does not start") +} + +func configureStart(r *reporter.Reporter, config ConfigureConfiguration, checkOnly bool) error { + daemonComponent, err := daemon.New(r) + if err != nil { + return fmt.Errorf("unable to initialize daemon component: %w", err) + } + httpComponent, err := http.New(r, config.HTTP, http.Dependencies{ + Daemon: daemonComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize HTTP component: %w", err) + } + kafkaComponent, err := kafka.New(r, config.Kafka) + if err != nil { + return fmt.Errorf("unable to initialize kafka component: %w", err) + } + clickhouseComponent, err := clickhouse.New(r, config.Clickhouse, clickhouse.Dependencies{ + Daemon: daemonComponent, + HTTP: httpComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize clickhouse component: %w", err) + } + + // Expose some informations and metrics + addCommonHTTPHandlers(r, "configure", httpComponent) + versionMetrics(r) + + // If we only asked for a check, stop here. + if checkOnly { + return nil + } + + // Start all the components. + components := []interface{}{ + httpComponent, + clickhouseComponent, + kafkaComponent, + } + return StartStopComponents(r, daemonComponent, components) +} diff --git a/cmd/configure_test.go b/cmd/configure_test.go new file mode 100644 index 00000000..c367bc99 --- /dev/null +++ b/cmd/configure_test.go @@ -0,0 +1,14 @@ +package cmd + +import ( + "testing" + + "akvorado/common/reporter" +) + +func TestConfigureStart(t *testing.T) { + r := reporter.NewMock(t) + if err := configureStart(r, DefaultConfigureConfiguration, true); err != nil { + t.Fatalf("configureStart() error:\n%+v", err) + } +} diff --git a/cmd/console.go b/cmd/console.go new file mode 100644 index 00000000..5eaa051c --- /dev/null +++ b/cmd/console.go @@ -0,0 +1,101 @@ +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" + + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" + "akvorado/console" +) + +// ConsoleConfiguration represents the configuration file for the console command. +type ConsoleConfiguration struct { + Reporting reporter.Configuration + HTTP http.Configuration + Console console.Configuration +} + +// DefaultConsoleConfiguration is the default configuration for the console command. +var DefaultConsoleConfiguration = ConsoleConfiguration{ + HTTP: http.DefaultConfiguration, + Reporting: reporter.DefaultConfiguration, + Console: console.DefaultConfiguration, +} + +type consoleOptions struct { + ConfigRelatedOptions + CheckMode bool +} + +// ConsoleOptions stores the command-line option values for the console +// command. +var ConsoleOptions consoleOptions + +var consoleCmd = &cobra.Command{ + Use: "console", + Short: "Start Akvorado's console service", + Long: `Akvorado is a Netflow/IPFIX collector. The console service exposes a web interface to +manage collected flows.`, + Args: cobra.ExactArgs(0), + RunE: func(cmd *cobra.Command, args []string) error { + config := DefaultConsoleConfiguration + if err := ConsoleOptions.Parse(cmd.OutOrStdout(), "console", &config); err != nil { + return err + } + + r, err := reporter.New(config.Reporting) + if err != nil { + return fmt.Errorf("unable to initialize reporter: %w", err) + } + return consoleStart(r, config, ConsoleOptions.CheckMode) + }, +} + +func init() { + RootCmd.AddCommand(consoleCmd) + consoleCmd.Flags().StringVarP(&ConsoleOptions.ConfigRelatedOptions.Path, "config", "c", "", + "Configuration file") + consoleCmd.Flags().BoolVarP(&ConsoleOptions.ConfigRelatedOptions.Dump, "dump", "D", false, + "Dump configuration before starting") + consoleCmd.Flags().BoolVarP(&ConsoleOptions.CheckMode, "check", "C", false, + "Check configuration, but does not start") +} + +func consoleStart(r *reporter.Reporter, config ConsoleConfiguration, checkOnly bool) error { + daemonComponent, err := daemon.New(r) + if err != nil { + return fmt.Errorf("unable to initialize daemon component: %w", err) + } + httpComponent, err := http.New(r, config.HTTP, http.Dependencies{ + Daemon: daemonComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize HTTP component: %w", err) + } + consoleComponent, err := console.New(r, config.Console, console.Dependencies{ + Daemon: daemonComponent, + HTTP: httpComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize console component: %w", err) + } + + // Expose some informations and metrics + addCommonHTTPHandlers(r, "console", httpComponent) + versionMetrics(r) + + // If we only asked for a check, stop here. + if checkOnly { + return nil + } + + // Start all the components. + components := []interface{}{ + httpComponent, + consoleComponent, + } + return StartStopComponents(r, daemonComponent, components) +} diff --git a/cmd/console_test.go b/cmd/console_test.go new file mode 100644 index 00000000..11e4e59c --- /dev/null +++ b/cmd/console_test.go @@ -0,0 +1,14 @@ +package cmd + +import ( + "testing" + + "akvorado/common/reporter" +) + +func TestConsoleStart(t *testing.T) { + r := reporter.NewMock(t) + if err := consoleStart(r, DefaultConsoleConfiguration, true); err != nil { + t.Fatalf("consoleStart() error:\n%+v", err) + } +} diff --git a/cmd/http.go b/cmd/http.go new file mode 100644 index 00000000..8fdfdb6a --- /dev/null +++ b/cmd/http.go @@ -0,0 +1,20 @@ +package cmd + +import ( + "fmt" + + "akvorado/common/http" + "akvorado/common/reporter" +) + +// addCommonHTTPHandlers configures various endpoints common to all +// services. Each endpoint is registered under `/api/v0` and +// `/api/v0/SERVICE` namespaces. +func addCommonHTTPHandlers(r *reporter.Reporter, service string, httpComponent *http.Component) { + httpComponent.AddHandler(fmt.Sprintf("/api/v0/%s/metrics", service), r.MetricsHTTPHandler()) + httpComponent.AddHandler("/api/v0/metrics", r.MetricsHTTPHandler()) + httpComponent.AddHandler(fmt.Sprintf("/api/v0/%s/healthcheck", service), r.HealthcheckHTTPHandler()) + httpComponent.AddHandler("/api/v0/healthcheck", r.HealthcheckHTTPHandler()) + httpComponent.AddHandler(fmt.Sprintf("/api/v0/%s/version", service), versionHandler()) + httpComponent.AddHandler("/api/v0/version", versionHandler()) +} diff --git a/cmd/inlet.go b/cmd/inlet.go new file mode 100644 index 00000000..e7b69cd7 --- /dev/null +++ b/cmd/inlet.go @@ -0,0 +1,147 @@ +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" + + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" + "akvorado/inlet/core" + "akvorado/inlet/flow" + "akvorado/inlet/geoip" + "akvorado/inlet/kafka" + "akvorado/inlet/snmp" +) + +// InletConfiguration represents the configuration file for the inlet command. +type InletConfiguration struct { + Reporting reporter.Configuration + HTTP http.Configuration + Flow flow.Configuration + SNMP snmp.Configuration + GeoIP geoip.Configuration + Kafka kafka.Configuration + Core core.Configuration +} + +// DefaultInletConfiguration is the default configuration for the inlet command. +var DefaultInletConfiguration = InletConfiguration{ + HTTP: http.DefaultConfiguration, + Reporting: reporter.DefaultConfiguration, + Flow: flow.DefaultConfiguration, + SNMP: snmp.DefaultConfiguration, + GeoIP: geoip.DefaultConfiguration, + Kafka: kafka.DefaultConfiguration, + Core: core.DefaultConfiguration, +} + +type inletOptions struct { + ConfigRelatedOptions + CheckMode bool +} + +// InletOptions stores the command-line option values for the inlet +// command. +var InletOptions inletOptions + +var inletCmd = &cobra.Command{ + Use: "inlet", + Short: "Start Akvorado's inlet service", + Long: `Akvorado is a Netflow/IPFIX collector. The inlet service handles flow ingestion, +hydration and export to Kafka.`, + Args: cobra.ExactArgs(0), + RunE: func(cmd *cobra.Command, args []string) error { + config := DefaultInletConfiguration + if err := InletOptions.Parse(cmd.OutOrStdout(), "inlet", &config); err != nil { + return err + } + + r, err := reporter.New(config.Reporting) + if err != nil { + return fmt.Errorf("unable to initialize reporter: %w", err) + } + return inletStart(r, config, InletOptions.CheckMode) + }, +} + +func init() { + RootCmd.AddCommand(inletCmd) + inletCmd.Flags().StringVarP(&InletOptions.ConfigRelatedOptions.Path, "config", "c", "", + "Configuration file") + inletCmd.Flags().BoolVarP(&InletOptions.ConfigRelatedOptions.Dump, "dump", "D", false, + "Dump configuration before starting") + inletCmd.Flags().BoolVarP(&InletOptions.CheckMode, "check", "C", false, + "Check configuration, but does not start") +} + +func inletStart(r *reporter.Reporter, config InletConfiguration, checkOnly bool) error { + // Initialize the various components + daemonComponent, err := daemon.New(r) + if err != nil { + return fmt.Errorf("unable to initialize daemon component: %w", err) + } + httpComponent, err := http.New(r, config.HTTP, http.Dependencies{ + Daemon: daemonComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize http component: %w", err) + } + flowComponent, err := flow.New(r, config.Flow, flow.Dependencies{ + Daemon: daemonComponent, + HTTP: httpComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize flow component: %w", err) + } + snmpComponent, err := snmp.New(r, config.SNMP, snmp.Dependencies{ + Daemon: daemonComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize SNMP component: %w", err) + } + geoipComponent, err := geoip.New(r, config.GeoIP, geoip.Dependencies{ + Daemon: daemonComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize GeoIP component: %w", err) + } + kafkaComponent, err := kafka.New(r, config.Kafka, kafka.Dependencies{ + Daemon: daemonComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize Kafka component: %w", err) + } + coreComponent, err := core.New(r, config.Core, core.Dependencies{ + Daemon: daemonComponent, + Flow: flowComponent, + Snmp: snmpComponent, + GeoIP: geoipComponent, + Kafka: kafkaComponent, + HTTP: httpComponent, + }) + if err != nil { + return fmt.Errorf("unable to initialize core component: %w", err) + } + + // Expose some informations and metrics + addCommonHTTPHandlers(r, "inlet", httpComponent) + versionMetrics(r) + + // If we only asked for a check, stop here. + if checkOnly { + return nil + } + + // Start all the components. + components := []interface{}{ + httpComponent, + snmpComponent, + geoipComponent, + kafkaComponent, + coreComponent, + flowComponent, + } + return StartStopComponents(r, daemonComponent, components) +} diff --git a/cmd/inlet_test.go b/cmd/inlet_test.go new file mode 100644 index 00000000..e8e1cad9 --- /dev/null +++ b/cmd/inlet_test.go @@ -0,0 +1,14 @@ +package cmd + +import ( + "testing" + + "akvorado/common/reporter" +) + +func TestInletStart(t *testing.T) { + r := reporter.NewMock(t) + if err := inletStart(r, DefaultInletConfiguration, true); err != nil { + t.Fatalf("inletStart() error:\n%+v", err) + } +} diff --git a/cmd/serve.go b/cmd/serve.go deleted file mode 100644 index 407eb2a8..00000000 --- a/cmd/serve.go +++ /dev/null @@ -1,310 +0,0 @@ -package cmd - -import ( - "encoding/json" - "fmt" - "io/ioutil" - netHTTP "net/http" - "os" - "runtime" - "strconv" - "strings" - - "github.com/mitchellh/mapstructure" - "github.com/spf13/cobra" - "gopkg.in/yaml.v2" - - "akvorado/clickhouse" - "akvorado/core" - "akvorado/daemon" - "akvorado/flow" - "akvorado/geoip" - "akvorado/http" - "akvorado/kafka" - "akvorado/reporter" - "akvorado/snmp" - "akvorado/web" -) - -// ServeConfiguration represents the configuration file for the serve command. -type ServeConfiguration struct { - Reporting reporter.Configuration - HTTP http.Configuration - Flow flow.Configuration - SNMP snmp.Configuration - GeoIP geoip.Configuration - Kafka kafka.Configuration - Core core.Configuration - Web web.Configuration - ClickHouse clickhouse.Configuration -} - -// DefaultServeConfiguration is the default configuration for the serve command. -var DefaultServeConfiguration = ServeConfiguration{ - Reporting: reporter.DefaultConfiguration, - HTTP: http.DefaultConfiguration, - Flow: flow.DefaultConfiguration, - SNMP: snmp.DefaultConfiguration, - GeoIP: geoip.DefaultConfiguration, - Kafka: kafka.DefaultConfiguration, - Core: core.DefaultConfiguration, - Web: web.DefaultConfiguration, - ClickHouse: clickhouse.DefaultConfiguration, -} - -type serveOptions struct { - configurationFile string - checkMode bool - dumpConfiguration bool -} - -// ServeOptions stores the command-line option values for the serve -// command. -var ServeOptions serveOptions - -var serveCmd = &cobra.Command{ - Use: "serve", - Short: "Start akvorado", - Long: `Akvorado is a Netflow/IPFIX collector. It hydrates flows with information from SNMP and GeoIP -and exports them to Kafka.`, - Args: cobra.ExactArgs(0), - RunE: func(cmd *cobra.Command, args []string) error { - // Parse YAML - var rawConfig map[string]interface{} - if cfgFile := ServeOptions.configurationFile; cfgFile != "" { - input, err := ioutil.ReadFile(cfgFile) - if err != nil { - return fmt.Errorf("unable to read configuration file: %w", err) - } - if err := yaml.Unmarshal(input, &rawConfig); err != nil { - return fmt.Errorf("unable to parse configuration file: %w", err) - } - } - - // Parse provided configuration - config := DefaultServeConfiguration - decoder, err := mapstructure.NewDecoder(&mapstructure.DecoderConfig{ - Result: &config, - ErrorUnused: true, - Metadata: nil, - WeaklyTypedInput: true, - MatchName: func(mapKey, fieldName string) bool { - key := strings.ToLower(strings.ReplaceAll(mapKey, "-", "")) - field := strings.ToLower(fieldName) - return key == field - }, - DecodeHook: mapstructure.ComposeDecodeHookFunc( - flow.ConfigurationUnmarshalerHook(), - mapstructure.TextUnmarshallerHookFunc(), - mapstructure.StringToTimeDurationHookFunc(), - mapstructure.StringToSliceHookFunc(","), - ), - }) - if err != nil { - return fmt.Errorf("unable to create configuration decoder: %w", err) - } - if err := decoder.Decode(rawConfig); err != nil { - return fmt.Errorf("unable to parse configuration: %w", err) - } - - // Override with environment variables - for _, keyval := range os.Environ() { - kv := strings.SplitN(keyval, "=", 2) - if len(kv) != 2 { - continue - } - kk := strings.Split(kv[0], "_") - if kk[0] != "AKVORADO" || len(kk) < 2 { - continue - } - // From AKVORADO_SQUID_PURPLE_QUIRK=47, we - // build a map "squid -> purple -> quirk -> - // 47". From AKVORADO_SQUID_3_PURPLE=47, we - // build "squid[3] -> purple -> 47" - var rawConfig interface{} - rawConfig = kv[1] - for i := len(kk) - 1; i > 0; i-- { - if index, err := strconv.Atoi(kk[i]); err == nil { - newRawConfig := make([]interface{}, index+1) - newRawConfig[index] = rawConfig - rawConfig = newRawConfig - } else { - rawConfig = map[string]interface{}{ - kk[i]: rawConfig, - } - } - } - if err := decoder.Decode(rawConfig); err != nil { - return fmt.Errorf("unable to parse override %q: %w", kv[0], err) - } - } - - // Dump configuration if requested - if ServeOptions.dumpConfiguration { - output, err := yaml.Marshal(config) - if err != nil { - return fmt.Errorf("unable to dump configuration: %w", err) - } - cmd.Printf("---\n%s\n", string(output)) - } - - r, err := reporter.New(config.Reporting) - if err != nil { - return fmt.Errorf("unable to initialize reporter: %w", err) - } - return daemonStart(r, config, ServeOptions.checkMode) - }, -} - -func init() { - RootCmd.AddCommand(serveCmd) - serveCmd.Flags().StringVarP(&ServeOptions.configurationFile, "config", "c", "", - "Configuration file") - serveCmd.Flags().BoolVarP(&ServeOptions.checkMode, "check", "C", false, - "Check configuration, but does not start") - serveCmd.Flags().BoolVarP(&ServeOptions.dumpConfiguration, "dump", "D", false, - "Dump configuration before starting") -} - -// daemonStart will start all components and manage daemon lifetime. -func daemonStart(r *reporter.Reporter, config ServeConfiguration, checkOnly bool) error { - // Initialize the various components - daemonComponent, err := daemon.New(r) - if err != nil { - return fmt.Errorf("unable to initialize daemon component: %w", err) - } - httpComponent, err := http.New(r, config.HTTP, http.Dependencies{ - Daemon: daemonComponent, - }) - if err != nil { - return fmt.Errorf("unable to initialize http component: %w", err) - } - flowComponent, err := flow.New(r, config.Flow, flow.Dependencies{ - Daemon: daemonComponent, - HTTP: httpComponent, - }) - if err != nil { - return fmt.Errorf("unable to initialize flow component: %w", err) - } - snmpComponent, err := snmp.New(r, config.SNMP, snmp.Dependencies{ - Daemon: daemonComponent, - }) - if err != nil { - return fmt.Errorf("unable to initialize SNMP component: %w", err) - } - geoipComponent, err := geoip.New(r, config.GeoIP, geoip.Dependencies{ - Daemon: daemonComponent, - }) - if err != nil { - return fmt.Errorf("unable to initialize GeoIP component: %w", err) - } - kafkaComponent, err := kafka.New(r, config.Kafka, kafka.Dependencies{ - Daemon: daemonComponent, - }) - if err != nil { - return fmt.Errorf("unable to initialize Kafka component: %w", err) - } - clickhouseComponent, err := clickhouse.New(r, config.ClickHouse, clickhouse.Dependencies{ - Daemon: daemonComponent, - HTTP: httpComponent, - Kafka: kafkaComponent, - }) - if err != nil { - return fmt.Errorf("unable to initialize ClickHouse component: %w", err) - } - coreComponent, err := core.New(r, config.Core, core.Dependencies{ - Daemon: daemonComponent, - Flow: flowComponent, - Snmp: snmpComponent, - GeoIP: geoipComponent, - Kafka: kafkaComponent, - HTTP: httpComponent, - }) - if err != nil { - return fmt.Errorf("unable to initialize core component: %w", err) - } - webComponent, err := web.New(r, config.Web, web.Dependencies{ - HTTP: httpComponent, - }) - if err != nil { - return fmt.Errorf("unable to initialize web component: %w", err) - } - - // If we only asked for a check, stop here. - if checkOnly { - return nil - } - - // Expose some informations and metrics - httpComponent.AddHandler("/api/v0/metrics", r.MetricsHTTPHandler()) - httpComponent.AddHandler("/api/v0/healthcheck", r.HealthcheckHTTPHandler()) - httpComponent.AddHandler("/api/v0/version", netHTTP.HandlerFunc( - func(w netHTTP.ResponseWriter, r *netHTTP.Request) { - versionInfo := struct { - Version string `json:"version"` - BuildDate string `json:"build_date"` - Compiler string `json:"compiler"` - }{ - Version: Version, - BuildDate: BuildDate, - Compiler: runtime.Version(), - } - w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(versionInfo) - })) - r.GaugeVec(reporter.GaugeOpts{ - Name: "info", - Help: "Akvorado build information", - }, []string{"version", "build_date", "compiler"}). - WithLabelValues(Version, BuildDate, runtime.Version()).Set(1) - - // Start all the components. - components := []interface{}{ - r, - daemonComponent, - httpComponent, - snmpComponent, - geoipComponent, - kafkaComponent, - clickhouseComponent, - coreComponent, - webComponent, - flowComponent, - } - startedComponents := []interface{}{} - defer func() { - for _, cmp := range startedComponents { - if stopperC, ok := cmp.(stopper); ok { - if err := stopperC.Stop(); err != nil { - r.Err(err).Msg("unable to stop component, ignoring") - } - } - } - }() - for _, cmp := range components { - if starterC, ok := cmp.(starter); ok { - if err := starterC.Start(); err != nil { - return fmt.Errorf("unable to start component: %w", err) - } - } - startedComponents = append([]interface{}{cmp}, startedComponents...) - } - - r.Info(). - Str("version", Version).Str("build-date", BuildDate). - Msg("akvorado has started") - - select { - case <-daemonComponent.Terminated(): - r.Info().Msg("stopping all components") - } - - return nil -} - -type starter interface { - Start() error -} -type stopper interface { - Stop() error -} diff --git a/cmd/tests.go b/cmd/tests.go deleted file mode 100644 index 932819a3..00000000 --- a/cmd/tests.go +++ /dev/null @@ -1,7 +0,0 @@ -package cmd - -// ServeOptionsReset resets serve options provided on command line. -// This should be used between two tests. -func ServeOptionsReset() { - ServeOptions = serveOptions{} -} diff --git a/cmd/version.go b/cmd/version.go index fabdd7a7..99133b99 100644 --- a/cmd/version.go +++ b/cmd/version.go @@ -1,9 +1,13 @@ package cmd import ( + "encoding/json" + "net/http" "runtime" "github.com/spf13/cobra" + + "akvorado/common/reporter" ) var ( @@ -27,3 +31,28 @@ var versionCmd = &cobra.Command{ cmd.Printf(" Built with: %s\n", runtime.Version()) }, } + +func versionHandler() http.Handler { + return http.HandlerFunc( + func(w http.ResponseWriter, r *http.Request) { + versionInfo := struct { + Version string `json:"version"` + BuildDate string `json:"build_date"` + Compiler string `json:"compiler"` + }{ + Version: Version, + BuildDate: BuildDate, + Compiler: runtime.Version(), + } + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(versionInfo) + }) +} + +func versionMetrics(r *reporter.Reporter) { + r.GaugeVec(reporter.GaugeOpts{ + Name: "info", + Help: "Akvorado build information", + }, []string{"version", "build_date", "compiler"}). + WithLabelValues(Version, BuildDate, runtime.Version()).Set(1) +} diff --git a/cmd/version_test.go b/cmd/version_test.go index 8e05ae3c..69c817bd 100644 --- a/cmd/version_test.go +++ b/cmd/version_test.go @@ -8,7 +8,7 @@ import ( "testing" "akvorado/cmd" - "akvorado/helpers" + "akvorado/common/helpers" ) func TestVersion(t *testing.T) { diff --git a/daemon/lifecycle.go b/common/daemon/lifecycle.go similarity index 100% rename from daemon/lifecycle.go rename to common/daemon/lifecycle.go diff --git a/daemon/root.go b/common/daemon/root.go similarity index 98% rename from daemon/root.go rename to common/daemon/root.go index d4adf449..3f6d35fd 100644 --- a/daemon/root.go +++ b/common/daemon/root.go @@ -10,7 +10,7 @@ import ( "gopkg.in/tomb.v2" - "akvorado/reporter" + "akvorado/common/reporter" ) // Component is the interface the daemon component provides. diff --git a/daemon/root_test.go b/common/daemon/root_test.go similarity index 98% rename from daemon/root_test.go rename to common/daemon/root_test.go index 6799f1d6..dfe98d40 100644 --- a/daemon/root_test.go +++ b/common/daemon/root_test.go @@ -7,7 +7,7 @@ import ( "gopkg.in/tomb.v2" - "akvorado/reporter" + "akvorado/common/reporter" ) func TestTerminate(t *testing.T) { diff --git a/daemon/tests.go b/common/daemon/tests.go similarity index 100% rename from daemon/tests.go rename to common/daemon/tests.go diff --git a/helpers/binary_amd64.go b/common/helpers/binary_amd64.go similarity index 100% rename from helpers/binary_amd64.go rename to common/helpers/binary_amd64.go diff --git a/helpers/tests.go b/common/helpers/tests.go similarity index 100% rename from helpers/tests.go rename to common/helpers/tests.go diff --git a/http/config.go b/common/http/config.go similarity index 79% rename from http/config.go rename to common/http/config.go index fbfd09e3..746481e2 100644 --- a/http/config.go +++ b/common/http/config.go @@ -8,7 +8,7 @@ type Configuration struct { Profiler bool } -// DefaultConfiguration represents the default configuration for the HTTP server. +// DefaultConfiguration is the default configuration of the HTTP server. var DefaultConfiguration = Configuration{ Listen: "localhost:8080", } diff --git a/http/root.go b/common/http/root.go similarity index 98% rename from http/root.go rename to common/http/root.go index 355e5745..efff575c 100644 --- a/http/root.go +++ b/common/http/root.go @@ -14,8 +14,8 @@ import ( "github.com/rs/zerolog/hlog" "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" ) // Component represents the HTTP compomenent. diff --git a/http/root_test.go b/common/http/root_test.go similarity index 90% rename from http/root_test.go rename to common/http/root_test.go index 549fa6bc..b055f25f 100644 --- a/http/root_test.go +++ b/common/http/root_test.go @@ -6,9 +6,9 @@ import ( "runtime" "testing" - "akvorado/helpers" - "akvorado/http" - "akvorado/reporter" + "akvorado/common/helpers" + "akvorado/common/http" + "akvorado/common/reporter" ) func TestHandler(t *testing.T) { @@ -39,7 +39,7 @@ func TestHandler(t *testing.T) { t.Fatalf("GET /test: got status code %d, not 200", resp.StatusCode) } - gotMetrics := r.GetMetrics("akvorado_http_", "inflight_", "requests_total", "response_size") + gotMetrics := r.GetMetrics("akvorado_common_http_", "inflight_", "requests_total", "response_size") expectedMetrics := map[string]string{ `inflight_requests`: "0", `requests_total{code="200",handler="/test",method="get"}`: "1", diff --git a/http/tests.go b/common/http/tests.go similarity index 79% rename from http/tests.go rename to common/http/tests.go index 27fb6ac7..d776d8d8 100644 --- a/http/tests.go +++ b/common/http/tests.go @@ -5,15 +5,16 @@ package http import ( "testing" - "akvorado/daemon" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" ) // NewMock create a new HTTP component listening on a random free port. func NewMock(t *testing.T, r *reporter.Reporter) *Component { t.Helper() - config := DefaultConfiguration - config.Listen = "127.0.0.1:0" + config := Configuration{ + Listen: "127.0.0.1:0", + } c, err := New(r, config, Dependencies{Daemon: daemon.NewMock(t)}) if err != nil { t.Fatalf("New() error:\n%+v", err) diff --git a/common/kafka/config.go b/common/kafka/config.go new file mode 100644 index 00000000..17e43c44 --- /dev/null +++ b/common/kafka/config.go @@ -0,0 +1,43 @@ +package kafka + +import "github.com/Shopify/sarama" + +// Configuration defines how we connect to a Kafka cluster. +type Configuration struct { + // Topic defines the topic to write flows to. + Topic string + // Brokers is the list of brokers to connect to. + Brokers []string + // Version is the version of Kafka we assume to work + Version Version +} + +// DefaultConfiguration represents the default configuration for connecting to Kafka. +var DefaultConfiguration = Configuration{ + Topic: "flows", + Brokers: []string{"127.0.0.1:9092"}, + Version: Version(sarama.V2_8_1_0), +} + +// Version represents a supported version of Kafka +type Version sarama.KafkaVersion + +// UnmarshalText parses a version of Kafka +func (v *Version) UnmarshalText(text []byte) error { + version, err := sarama.ParseKafkaVersion(string(text)) + if err != nil { + return err + } + *v = Version(version) + return nil +} + +// String turns a Kafka version into a string +func (v Version) String() string { + return sarama.KafkaVersion(v).String() +} + +// MarshalText turns a Kafka version intro a string +func (v Version) MarshalText() ([]byte, error) { + return []byte(v.String()), nil +} diff --git a/kafka/logs.go b/common/kafka/logs.go similarity index 63% rename from kafka/logs.go rename to common/kafka/logs.go index 272c4e8b..7869511d 100644 --- a/kafka/logs.go +++ b/common/kafka/logs.go @@ -6,20 +6,32 @@ import ( "github.com/Shopify/sarama" - "akvorado/reporter" + "akvorado/common/reporter" ) func init() { // The logger in Sarama is global. Do the same. - sarama.Logger = &globalKafkaLogger + sarama.Logger = &GlobalKafkaLogger } -var globalKafkaLogger kafkaLogger +// GlobalKafkaLogger is the logger instance registered to sarama. +var GlobalKafkaLogger kafkaLogger type kafkaLogger struct { r atomic.Value } +// Register register the provided reporter to be used for logging with sarama. +func (l *kafkaLogger) Register(r *reporter.Reporter) { + l.r.Store(r) +} + +// Unregister removes the currently registered reporter. +func (l *kafkaLogger) Unregister() { + var noreporter *reporter.Reporter + l.r.Store(noreporter) +} + func (l *kafkaLogger) Print(v ...interface{}) { r := l.r.Load() if r != nil && r.(*reporter.Reporter) != nil { diff --git a/common/kafka/tests.go b/common/kafka/tests.go new file mode 100644 index 00000000..88004fe2 --- /dev/null +++ b/common/kafka/tests.go @@ -0,0 +1,59 @@ +//go:build !release + +package kafka + +import ( + "testing" + "time" + + "github.com/Shopify/sarama" + + "akvorado/common/helpers" +) + +// SetupKafkaBroker configures a client to use for testing. +func SetupKafkaBroker(t *testing.T) (sarama.Client, []string) { + broker := helpers.CheckExternalService(t, "Kafka", []string{"kafka", "localhost"}, "9092") + + // Wait for broker to be ready + saramaConfig := sarama.NewConfig() + saramaConfig.Version = sarama.V2_8_1_0 + saramaConfig.Net.DialTimeout = 1 * time.Second + saramaConfig.Net.ReadTimeout = 1 * time.Second + saramaConfig.Net.WriteTimeout = 1 * time.Second + ready := false + var ( + client sarama.Client + err error + ) + for i := 0; i < 90; i++ { + if client != nil { + client.Close() + } + client, err = sarama.NewClient([]string{broker}, saramaConfig) + if err != nil { + continue + } + if err := client.RefreshMetadata(); err != nil { + continue + } + brokers := client.Brokers() + if len(brokers) == 0 { + continue + } + if err := brokers[0].Open(client.Config()); err != nil { + continue + } + if connected, err := brokers[0].Connected(); err != nil || !connected { + brokers[0].Close() + continue + } + brokers[0].Close() + ready = true + } + if !ready { + t.Fatalf("broker is not ready") + } + + return client, []string{broker} +} diff --git a/reporter/config.go b/common/reporter/config.go similarity index 83% rename from reporter/config.go rename to common/reporter/config.go index 3ccee374..ac5c0e54 100644 --- a/reporter/config.go +++ b/common/reporter/config.go @@ -1,8 +1,8 @@ package reporter import ( - "akvorado/reporter/logger" - "akvorado/reporter/metrics" + "akvorado/common/reporter/logger" + "akvorado/common/reporter/metrics" ) // Configuration contains the reporter configuration. diff --git a/reporter/healthcheck.go b/common/reporter/healthcheck.go similarity index 100% rename from reporter/healthcheck.go rename to common/reporter/healthcheck.go diff --git a/reporter/healthcheck_test.go b/common/reporter/healthcheck_test.go similarity index 98% rename from reporter/healthcheck_test.go rename to common/reporter/healthcheck_test.go index a7779866..80c82338 100644 --- a/reporter/healthcheck_test.go +++ b/common/reporter/healthcheck_test.go @@ -9,8 +9,8 @@ import ( "testing" "time" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func testHealthchecks(t *testing.T, r *reporter.Reporter, ctx context.Context, expectedStatus reporter.HealthcheckStatus, expectedResults map[string]reporter.HealthcheckResult) { diff --git a/reporter/logger.go b/common/reporter/logger.go similarity index 100% rename from reporter/logger.go rename to common/reporter/logger.go diff --git a/reporter/logger/config.go b/common/reporter/logger/config.go similarity index 100% rename from reporter/logger/config.go rename to common/reporter/logger/config.go diff --git a/reporter/logger/root.go b/common/reporter/logger/root.go similarity index 97% rename from reporter/logger/root.go rename to common/reporter/logger/root.go index 5ffd64b7..bff4251c 100644 --- a/reporter/logger/root.go +++ b/common/reporter/logger/root.go @@ -15,7 +15,7 @@ import ( "github.com/rs/zerolog" "github.com/rs/zerolog/log" - "akvorado/reporter/stack" + "akvorado/common/reporter/stack" ) // Logger is a logger instance. It is compatible with the interface diff --git a/reporter/logger/root_example_test.go b/common/reporter/logger/root_example_test.go similarity index 74% rename from reporter/logger/root_example_test.go rename to common/reporter/logger/root_example_test.go index 4acf5537..606d115c 100644 --- a/reporter/logger/root_example_test.go +++ b/common/reporter/logger/root_example_test.go @@ -7,7 +7,7 @@ import ( "github.com/rs/zerolog" "github.com/rs/zerolog/log" - "akvorado/reporter/logger" + "akvorado/common/reporter/logger" ) func ExampleNew() { @@ -24,5 +24,5 @@ func ExampleNew() { } logger.Info().Int("example", 15).Msg("hello world") - // Output: {"level":"info","example":15,"time":"2008-01-08T17:05:05Z","caller":"akvorado/reporter/logger/root_example_test.go:26","module":"akvorado/reporter/logger_test","message":"hello world"} + // Output: {"level":"info","example":15,"time":"2008-01-08T17:05:05Z","caller":"akvorado/common/reporter/logger/root_example_test.go:26","module":"akvorado/common/reporter/logger_test","message":"hello world"} } diff --git a/reporter/logger/root_test.go b/common/reporter/logger/root_test.go similarity index 100% rename from reporter/logger/root_test.go rename to common/reporter/logger/root_test.go diff --git a/reporter/metrics.go b/common/reporter/metrics.go similarity index 100% rename from reporter/metrics.go rename to common/reporter/metrics.go diff --git a/reporter/metrics/config.go b/common/reporter/metrics/config.go similarity index 100% rename from reporter/metrics/config.go rename to common/reporter/metrics/config.go diff --git a/reporter/metrics/factory.go b/common/reporter/metrics/factory.go similarity index 100% rename from reporter/metrics/factory.go rename to common/reporter/metrics/factory.go diff --git a/reporter/metrics/logs.go b/common/reporter/metrics/logs.go similarity index 89% rename from reporter/metrics/logs.go rename to common/reporter/metrics/logs.go index d884c8a6..ef78c5b4 100644 --- a/reporter/metrics/logs.go +++ b/common/reporter/metrics/logs.go @@ -3,7 +3,7 @@ package metrics import ( "fmt" - "akvorado/reporter/logger" + "akvorado/common/reporter/logger" ) // promHTTPLogger is an adapter for logger.Logger to be used as promhttp.Logger diff --git a/reporter/metrics/root.go b/common/reporter/metrics/root.go similarity index 94% rename from reporter/metrics/root.go rename to common/reporter/metrics/root.go index 48ca9348..edff2bf2 100644 --- a/reporter/metrics/root.go +++ b/common/reporter/metrics/root.go @@ -13,8 +13,8 @@ import ( "github.com/prometheus/client_golang/prometheus/collectors" "github.com/prometheus/client_golang/prometheus/promhttp" - "akvorado/reporter/logger" - "akvorado/reporter/stack" + "akvorado/common/reporter/logger" + "akvorado/common/reporter/stack" ) // Metrics represents the internal state of the metric subsystem. @@ -54,9 +54,9 @@ func getPrefix(module string) (moduleName string) { moduleName = stack.ModuleName } else { moduleName = strings.SplitN(module, ".", 2)[0] - moduleName = strings.ReplaceAll(moduleName, "/", "_") - moduleName = strings.ReplaceAll(moduleName, ".", "_") } + moduleName = strings.ReplaceAll(moduleName, "/", "_") + moduleName = strings.ReplaceAll(moduleName, ".", "_") moduleName = fmt.Sprintf("%s_", moduleName) return } diff --git a/reporter/metrics/root_test.go b/common/reporter/metrics/root_test.go similarity index 84% rename from reporter/metrics/root_test.go rename to common/reporter/metrics/root_test.go index 2d98cfdb..4ce82a9a 100644 --- a/reporter/metrics/root_test.go +++ b/common/reporter/metrics/root_test.go @@ -8,9 +8,9 @@ import ( "github.com/prometheus/client_golang/prometheus" - "akvorado/helpers" - "akvorado/reporter/logger" - "akvorado/reporter/metrics" + "akvorado/common/helpers" + "akvorado/common/reporter/logger" + "akvorado/common/reporter/metrics" ) func TestNew(t *testing.T) { @@ -64,12 +64,12 @@ func TestNew(t *testing.T) { gotFiltered = append(gotFiltered, line) } expected := []string{ - "# HELP akvorado_reporter_metrics_test_counter1 Some counter", - "# TYPE akvorado_reporter_metrics_test_counter1 counter", - "akvorado_reporter_metrics_test_counter1 18", - "# HELP akvorado_reporter_metrics_test_gauge1 Some gauge", - "# TYPE akvorado_reporter_metrics_test_gauge1 gauge", - "akvorado_reporter_metrics_test_gauge1 4", + "# HELP akvorado_common_reporter_metrics_test_counter1 Some counter", + "# TYPE akvorado_common_reporter_metrics_test_counter1 counter", + "akvorado_common_reporter_metrics_test_counter1 18", + "# HELP akvorado_common_reporter_metrics_test_gauge1 Some gauge", + "# TYPE akvorado_common_reporter_metrics_test_gauge1 gauge", + "akvorado_common_reporter_metrics_test_gauge1 4", "", } if diff := helpers.Diff(gotFiltered, expected); diff != "" { diff --git a/reporter/metrics_test.go b/common/reporter/metrics_test.go similarity index 96% rename from reporter/metrics_test.go rename to common/reporter/metrics_test.go index d6799ab1..765eedde 100644 --- a/reporter/metrics_test.go +++ b/common/reporter/metrics_test.go @@ -6,8 +6,8 @@ import ( "github.com/prometheus/client_golang/prometheus" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func TestMetrics(t *testing.T) { @@ -90,7 +90,7 @@ func TestMetrics(t *testing.T) { summary2.WithLabelValues("value2").Observe(15) } - got := r.GetMetrics("akvorado_reporter_test_") + got := r.GetMetrics("akvorado_common_reporter_test_") expected := map[string]string{ `counter1`: "18", `counter2`: "1.17", @@ -144,7 +144,7 @@ func TestMetrics(t *testing.T) { t.Fatalf("metrics (-got, +want):\n%s", diff) } - got = r.GetMetrics("akvorado_reporter_test_", + got = r.GetMetrics("akvorado_common_reporter_test_", "counter1", "counter2", "counter3") expected = map[string]string{ `counter1`: "18", @@ -183,7 +183,7 @@ func TestMetricCollector(t *testing.T) { m.metric2 = r.MetricDesc("metric2", "Custom metric 2", nil) r.MetricCollector(m) - got := r.GetMetrics("akvorado_reporter_test_") + got := r.GetMetrics("akvorado_common_reporter_test_") expected := map[string]string{ `metric1`: "18", `metric2`: "30", diff --git a/reporter/root.go b/common/reporter/root.go similarity index 91% rename from reporter/root.go rename to common/reporter/root.go index f29327d9..a56f4adf 100644 --- a/reporter/root.go +++ b/common/reporter/root.go @@ -6,8 +6,8 @@ package reporter import ( "sync" - "akvorado/reporter/logger" - "akvorado/reporter/metrics" + "akvorado/common/reporter/logger" + "akvorado/common/reporter/metrics" ) // Reporter contains the state for a reporter. It also supports the diff --git a/reporter/stack/root.go b/common/reporter/stack/root.go similarity index 92% rename from reporter/stack/root.go rename to common/reporter/stack/root.go index efb4454e..73a66ed7 100644 --- a/reporter/stack/root.go +++ b/common/reporter/stack/root.go @@ -94,9 +94,9 @@ func (pc Call) SourceFile(withLine bool) string { var ( ownPackageCall = Callers()[0] - ownPackageName = strings.SplitN(ownPackageCall.FunctionName(), ".", 2)[0] // akvorado/reporter/stack - parentPackageName = ownPackageName[0:strings.LastIndex(ownPackageName, "/")] // akvorado/reporter + ownPackageName = strings.SplitN(ownPackageCall.FunctionName(), ".", 2)[0] // akvorado/common/reporter/stack + parentPackageName = ownPackageName[0:strings.LastIndex(ownPackageName, "/")] // akvorado/common/reporter // ModuleName is the name of the current module. This can be used to prefix stuff. - ModuleName = parentPackageName[0:strings.LastIndex(parentPackageName, "/")] // akvorado + ModuleName = strings.TrimSuffix(parentPackageName[0:strings.LastIndex(parentPackageName, "/")], "/common") // akvorado ) diff --git a/reporter/stack/root_test.go b/common/reporter/stack/root_test.go similarity index 84% rename from reporter/stack/root_test.go rename to common/reporter/stack/root_test.go index 6631feec..1b9a2e7a 100644 --- a/reporter/stack/root_test.go +++ b/common/reporter/stack/root_test.go @@ -4,8 +4,8 @@ import ( "strings" "testing" - "akvorado/helpers" - "akvorado/reporter/stack" + "akvorado/common/helpers" + "akvorado/common/reporter/stack" ) func TestSourceFile(t *testing.T) { @@ -15,7 +15,7 @@ func TestSourceFile(t *testing.T) { got = append(got, caller.SourceFile(false)) } expected := []string{ - "akvorado/reporter/stack/root_test.go", + "akvorado/common/reporter/stack/root_test.go", "testing/testing.go", } if diff := helpers.Diff(got, expected); diff != "" { @@ -30,7 +30,7 @@ func TestFunctionName(t *testing.T) { got = append(got, caller.FunctionName()) } expected := []string{ - "akvorado/reporter/stack_test.TestFunctionName", + "akvorado/common/reporter/stack_test.TestFunctionName", "testing.tRunner", } if diff := helpers.Diff(got, expected); diff != "" { diff --git a/reporter/tests.go b/common/reporter/tests.go similarity index 100% rename from reporter/tests.go rename to common/reporter/tests.go diff --git a/clickhouse/config.go b/configure/clickhouse/config.go similarity index 60% rename from clickhouse/config.go rename to configure/clickhouse/config.go index 360af8c1..3ebe0870 100644 --- a/clickhouse/config.go +++ b/configure/clickhouse/config.go @@ -1,6 +1,8 @@ package clickhouse -// Configuration describes the configuration for the ClickHouse component. +import "akvorado/common/kafka" + +// Configuration describes the configuration for the ClickHouse configurator. type Configuration struct { // Servers define the list of clickhouse servers to connect to (with ports) Servers []string @@ -10,13 +12,19 @@ type Configuration struct { Username string // Password defines the password to use for authentication Password string + // Kafka describes how to connect to Kafka + Kafka kafka.Configuration `yaml:"-"` + // KafkaThreads tell how many threads to use to poll data from Kafka + KafkaThreads int // AkvoradoURL allows one to override URL to reach Akvorado from Clickhouse AkvoradoURL string } -// DefaultConfiguration represents the default configuration for the ClickHouse component. +// DefaultConfiguration represents the default configuration for the ClickHouse configurator. var DefaultConfiguration = Configuration{ - Servers: []string{}, // No clickhouse by default - Database: "default", - Username: "default", + Servers: []string{}, // No clickhouse by default + Database: "default", + Username: "default", + Kafka: kafka.DefaultConfiguration, + KafkaThreads: 1, } diff --git a/clickhouse/data/asns.csv b/configure/clickhouse/data/asns.csv similarity index 100% rename from clickhouse/data/asns.csv rename to configure/clickhouse/data/asns.csv diff --git a/clickhouse/data/migrations/000001_create_flows_table.up.sql b/configure/clickhouse/data/migrations/000001_create_flows_table.up.sql similarity index 100% rename from clickhouse/data/migrations/000001_create_flows_table.up.sql rename to configure/clickhouse/data/migrations/000001_create_flows_table.up.sql diff --git a/clickhouse/data/migrations/000002_create_kafka_flows0_table.up.sql.tmpl b/configure/clickhouse/data/migrations/000002_create_kafka_flows0_table.up.sql.tmpl similarity index 96% rename from clickhouse/data/migrations/000002_create_kafka_flows0_table.up.sql.tmpl rename to configure/clickhouse/data/migrations/000002_create_kafka_flows0_table.up.sql.tmpl index 4ef261af..e4c9f615 100644 --- a/clickhouse/data/migrations/000002_create_kafka_flows0_table.up.sql.tmpl +++ b/configure/clickhouse/data/migrations/000002_create_kafka_flows0_table.up.sql.tmpl @@ -44,7 +44,7 @@ SETTINGS kafka_broker_list = '{{ .KafkaBrokers }}', kafka_topic_list = '{{ .KafkaTopic }}-v{{ $version }}', kafka_group_name = 'clickhouse', - kafka_num_consumers = {{ .KafkaPartitions }}, + kafka_num_consumers = {{ .KafkaThreads }}, kafka_thread_per_consumer = 1, kafka_format = 'Protobuf', kafka_schema = 'flow-{{ $version }}.proto:FlowMessage' diff --git a/clickhouse/data/migrations/000003_create_kafka_flows0_view.up.sql.tmpl b/configure/clickhouse/data/migrations/000003_create_kafka_flows0_view.up.sql.tmpl similarity index 100% rename from clickhouse/data/migrations/000003_create_kafka_flows0_view.up.sql.tmpl rename to configure/clickhouse/data/migrations/000003_create_kafka_flows0_view.up.sql.tmpl diff --git a/clickhouse/data/migrations/000004_create_samplers_table.up.sql b/configure/clickhouse/data/migrations/000004_create_samplers_table.up.sql similarity index 100% rename from clickhouse/data/migrations/000004_create_samplers_table.up.sql rename to configure/clickhouse/data/migrations/000004_create_samplers_table.up.sql diff --git a/clickhouse/data/migrations/000005_create_samplers_view_inif.up.sql b/configure/clickhouse/data/migrations/000005_create_samplers_view_inif.up.sql similarity index 100% rename from clickhouse/data/migrations/000005_create_samplers_view_inif.up.sql rename to configure/clickhouse/data/migrations/000005_create_samplers_view_inif.up.sql diff --git a/clickhouse/data/migrations/000006_create_samplers_view_outif.up.sql b/configure/clickhouse/data/migrations/000006_create_samplers_view_outif.up.sql similarity index 100% rename from clickhouse/data/migrations/000006_create_samplers_view_outif.up.sql rename to configure/clickhouse/data/migrations/000006_create_samplers_view_outif.up.sql diff --git a/clickhouse/data/migrations/000007_create_dictionary_protocols.up.sql.tmpl b/configure/clickhouse/data/migrations/000007_create_dictionary_protocols.up.sql.tmpl similarity index 73% rename from clickhouse/data/migrations/000007_create_dictionary_protocols.up.sql.tmpl rename to configure/clickhouse/data/migrations/000007_create_dictionary_protocols.up.sql.tmpl index f59b9550..41225373 100644 --- a/clickhouse/data/migrations/000007_create_dictionary_protocols.up.sql.tmpl +++ b/configure/clickhouse/data/migrations/000007_create_dictionary_protocols.up.sql.tmpl @@ -6,7 +6,7 @@ CREATE DICTIONARY protocols ( PRIMARY KEY proto LAYOUT(HASHED()) SOURCE (HTTP( - url '{{ .BaseURL }}/api/v0/clickhouse/protocols.csv' + url '{{ .BaseURL }}/api/v0/configure/clickhouse/protocols.csv' format 'CSVWithNames' )) LIFETIME(3600) diff --git a/clickhouse/data/migrations/000008_create_dictionary_asns.up.sql.tmpl b/configure/clickhouse/data/migrations/000008_create_dictionary_asns.up.sql.tmpl similarity index 71% rename from clickhouse/data/migrations/000008_create_dictionary_asns.up.sql.tmpl rename to configure/clickhouse/data/migrations/000008_create_dictionary_asns.up.sql.tmpl index 326bd54c..546ac312 100644 --- a/clickhouse/data/migrations/000008_create_dictionary_asns.up.sql.tmpl +++ b/configure/clickhouse/data/migrations/000008_create_dictionary_asns.up.sql.tmpl @@ -5,7 +5,7 @@ CREATE DICTIONARY asns ( PRIMARY KEY asn LAYOUT(HASHED()) SOURCE (HTTP( - url '{{ .BaseURL }}/api/v0/clickhouse/asns.csv' + url '{{ .BaseURL }}/api/v0/configure/clickhouse/asns.csv' format 'CSVWithNames' )) LIFETIME(3600) diff --git a/clickhouse/data/migrations/000009_modify_flows_doubledelta.up.sql b/configure/clickhouse/data/migrations/000009_modify_flows_doubledelta.up.sql similarity index 100% rename from clickhouse/data/migrations/000009_modify_flows_doubledelta.up.sql rename to configure/clickhouse/data/migrations/000009_modify_flows_doubledelta.up.sql diff --git a/clickhouse/data/migrations/000010_modify_flows_sampler_exporter.up.sql b/configure/clickhouse/data/migrations/000010_modify_flows_sampler_exporter.up.sql similarity index 100% rename from clickhouse/data/migrations/000010_modify_flows_sampler_exporter.up.sql rename to configure/clickhouse/data/migrations/000010_modify_flows_sampler_exporter.up.sql diff --git a/clickhouse/data/migrations/000011_create_kafka_flows1_table.up.sql.tmpl b/configure/clickhouse/data/migrations/000011_create_kafka_flows1_table.up.sql.tmpl similarity index 96% rename from clickhouse/data/migrations/000011_create_kafka_flows1_table.up.sql.tmpl rename to configure/clickhouse/data/migrations/000011_create_kafka_flows1_table.up.sql.tmpl index 4f1a36d4..29d7377c 100644 --- a/clickhouse/data/migrations/000011_create_kafka_flows1_table.up.sql.tmpl +++ b/configure/clickhouse/data/migrations/000011_create_kafka_flows1_table.up.sql.tmpl @@ -44,7 +44,7 @@ SETTINGS kafka_broker_list = '{{ .KafkaBrokers }}', kafka_topic_list = '{{ .KafkaTopic }}-v{{ $version }}', kafka_group_name = 'clickhouse', - kafka_num_consumers = {{ .KafkaPartitions }}, + kafka_num_consumers = {{ .KafkaThreads }}, kafka_thread_per_consumer = 1, kafka_format = 'Protobuf', kafka_schema = 'flow-{{ $version }}.proto:FlowMessage' diff --git a/clickhouse/data/migrations/000012_create_kafka_flows1_view.up.sql.tmpl b/configure/clickhouse/data/migrations/000012_create_kafka_flows1_view.up.sql.tmpl similarity index 100% rename from clickhouse/data/migrations/000012_create_kafka_flows1_view.up.sql.tmpl rename to configure/clickhouse/data/migrations/000012_create_kafka_flows1_view.up.sql.tmpl diff --git a/clickhouse/data/migrations/000013_drop_samplers_view_inif.up.sql b/configure/clickhouse/data/migrations/000013_drop_samplers_view_inif.up.sql similarity index 100% rename from clickhouse/data/migrations/000013_drop_samplers_view_inif.up.sql rename to configure/clickhouse/data/migrations/000013_drop_samplers_view_inif.up.sql diff --git a/clickhouse/data/migrations/000014_drop_samplers_view_outif.up.sql b/configure/clickhouse/data/migrations/000014_drop_samplers_view_outif.up.sql similarity index 100% rename from clickhouse/data/migrations/000014_drop_samplers_view_outif.up.sql rename to configure/clickhouse/data/migrations/000014_drop_samplers_view_outif.up.sql diff --git a/clickhouse/data/migrations/000015_drop_samplers_table.up.sql b/configure/clickhouse/data/migrations/000015_drop_samplers_table.up.sql similarity index 100% rename from clickhouse/data/migrations/000015_drop_samplers_table.up.sql rename to configure/clickhouse/data/migrations/000015_drop_samplers_table.up.sql diff --git a/clickhouse/data/migrations/000016_create_exporters_table.up.sql b/configure/clickhouse/data/migrations/000016_create_exporters_table.up.sql similarity index 100% rename from clickhouse/data/migrations/000016_create_exporters_table.up.sql rename to configure/clickhouse/data/migrations/000016_create_exporters_table.up.sql diff --git a/clickhouse/data/protocols.csv b/configure/clickhouse/data/protocols.csv similarity index 100% rename from clickhouse/data/protocols.csv rename to configure/clickhouse/data/protocols.csv diff --git a/clickhouse/functional_test.go b/configure/clickhouse/functional_test.go similarity index 91% rename from clickhouse/functional_test.go rename to configure/clickhouse/functional_test.go index 102619a9..1d037261 100644 --- a/clickhouse/functional_test.go +++ b/configure/clickhouse/functional_test.go @@ -7,11 +7,10 @@ import ( "github.com/ClickHouse/clickhouse-go/v2" - "akvorado/daemon" - "akvorado/helpers" - "akvorado/http" - "akvorado/kafka" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/http" + "akvorado/common/reporter" ) func TestRealClickHouse(t *testing.T) { @@ -20,10 +19,8 @@ func TestRealClickHouse(t *testing.T) { configuration := DefaultConfiguration configuration.Servers = []string{chServer} r := reporter.NewMock(t) - kafka, _ := kafka.NewMock(t, r, kafka.DefaultConfiguration) ch, err := New(r, configuration, Dependencies{ Daemon: daemon.NewMock(t), - Kafka: kafka, HTTP: http.NewMock(t, r), }) if err != nil { diff --git a/clickhouse/http.go b/configure/clickhouse/http.go similarity index 90% rename from clickhouse/http.go rename to configure/clickhouse/http.go index 5928494c..159d3aec 100644 --- a/clickhouse/http.go +++ b/configure/clickhouse/http.go @@ -7,7 +7,7 @@ import ( "text/template" "time" - "akvorado/flow" + "akvorado/inlet/flow" ) var ( @@ -39,7 +39,7 @@ func (c *Component) addHandlerEmbedded(url string, path string) { // registerHTTPHandler register some handlers that will be useful for // ClickHouse func (c *Component) registerHTTPHandlers() error { - c.d.HTTP.AddHandler("/api/v0/clickhouse/init.sh", + c.d.HTTP.AddHandler("/api/v0/configure/clickhouse/init.sh", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/x-shellscript") initShTemplate.Execute(w, flow.VersionedSchemas) @@ -53,7 +53,7 @@ func (c *Component) registerHTTPHandlers() error { if entry.IsDir() { continue } - url := fmt.Sprintf("/api/v0/clickhouse/%s", entry.Name()) + url := fmt.Sprintf("/api/v0/configure/clickhouse/%s", entry.Name()) path := fmt.Sprintf("data/%s", entry.Name()) c.addHandlerEmbedded(url, path) } diff --git a/clickhouse/http_test.go b/configure/clickhouse/http_test.go similarity index 74% rename from clickhouse/http_test.go rename to configure/clickhouse/http_test.go index 87891a4b..0346c5b3 100644 --- a/clickhouse/http_test.go +++ b/configure/clickhouse/http_test.go @@ -3,19 +3,16 @@ package clickhouse import ( "testing" - "akvorado/daemon" - "akvorado/helpers" - "akvorado/http" - "akvorado/kafka" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/http" + "akvorado/common/reporter" ) func TestHTTPEndpoints(t *testing.T) { r := reporter.NewMock(t) - kafka, _ := kafka.NewMock(t, r, kafka.DefaultConfiguration) c, err := New(r, DefaultConfiguration, Dependencies{ Daemon: daemon.NewMock(t), - Kafka: kafka, HTTP: http.NewMock(t, r), }) if err != nil { @@ -24,7 +21,7 @@ func TestHTTPEndpoints(t *testing.T) { cases := helpers.HTTPEndpointCases{ { - URL: "/api/v0/clickhouse/protocols.csv", + URL: "/api/v0/configure/clickhouse/protocols.csv", ContentType: "text/csv; charset=utf-8", FirstLines: []string{ `proto,name,description`, @@ -32,14 +29,14 @@ func TestHTTPEndpoints(t *testing.T) { `1,ICMP,Internet Control Message`, }, }, { - URL: "/api/v0/clickhouse/asns.csv", + URL: "/api/v0/configure/clickhouse/asns.csv", ContentType: "text/csv; charset=utf-8", FirstLines: []string{ "asn,name", "1,LVLT-1", }, }, { - URL: "/api/v0/clickhouse/init.sh", + URL: "/api/v0/configure/clickhouse/init.sh", ContentType: "text/x-shellscript", FirstLines: []string{ `#!/bin/sh`, diff --git a/clickhouse/migrations-driver.go b/configure/clickhouse/migrations-driver.go similarity index 100% rename from clickhouse/migrations-driver.go rename to configure/clickhouse/migrations-driver.go diff --git a/clickhouse/migrations.go b/configure/clickhouse/migrations.go similarity index 91% rename from clickhouse/migrations.go rename to configure/clickhouse/migrations.go index 760d9258..df283921 100644 --- a/clickhouse/migrations.go +++ b/configure/clickhouse/migrations.go @@ -1,7 +1,8 @@ package clickhouse import ( - "akvorado/reporter" + "akvorado/common/reporter" + "akvorado/inlet/flow" "embed" "errors" "fmt" @@ -44,16 +45,11 @@ func (c *Component) migrateDatabaseOnServer(server string) error { return err } } - kafkaConf := c.d.Kafka.GetConfiguration() - partitions := 1 - if kafkaConf.TopicConfiguration != nil && kafkaConf.TopicConfiguration.NumPartitions > 0 { - partitions = int(kafkaConf.TopicConfiguration.NumPartitions) - } data := map[string]string{ - "KafkaBrokers": strings.Join(kafkaConf.Brokers, ","), - "KafkaTopic": kafkaConf.Topic, - "KafkaPartitions": strconv.Itoa(partitions), - "BaseURL": baseURL, + "KafkaBrokers": strings.Join(c.config.Kafka.Brokers, ","), + "KafkaTopic": fmt.Sprintf("%s-v%d", c.config.Kafka.Topic, flow.CurrentSchemaVersion), + "KafkaThreads": strconv.Itoa(c.config.KafkaThreads), + "BaseURL": baseURL, } l := c.r.With(). diff --git a/clickhouse/migrations_test.go b/configure/clickhouse/migrations_test.go similarity index 84% rename from clickhouse/migrations_test.go rename to configure/clickhouse/migrations_test.go index 7f4e51c3..b0c7986f 100644 --- a/clickhouse/migrations_test.go +++ b/configure/clickhouse/migrations_test.go @@ -5,20 +5,17 @@ import ( "strings" "testing" - "akvorado/daemon" - "akvorado/helpers" - "akvorado/http" - "akvorado/kafka" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/http" + "akvorado/common/reporter" ) func TestGetHTTPBaseURL(t *testing.T) { r := reporter.NewMock(t) - kafka, _ := kafka.NewMock(t, r, kafka.DefaultConfiguration) http := http.NewMock(t, r) c, err := New(r, DefaultConfiguration, Dependencies{ Daemon: daemon.NewMock(t), - Kafka: kafka, HTTP: http, }) if err != nil { diff --git a/clickhouse/root.go b/configure/clickhouse/root.go similarity index 81% rename from clickhouse/root.go rename to configure/clickhouse/root.go index 8f781d76..eaae2283 100644 --- a/clickhouse/root.go +++ b/configure/clickhouse/root.go @@ -1,4 +1,4 @@ -// Package clickhouse handles housekeeping for the ClickHouse database. +// Package clickhouse handles configuration of the ClickHouse database. package clickhouse import ( @@ -6,10 +6,9 @@ import ( "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/http" - "akvorado/kafka" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" ) // Component represents the Kafka exporter. @@ -25,7 +24,6 @@ type Component struct { // Dependencies define the dependencies of the Kafka exporter. type Dependencies struct { Daemon daemon.Component - Kafka *kafka.Component HTTP *http.Component } @@ -40,15 +38,12 @@ func New(reporter *reporter.Reporter, configuration Configuration, dependencies if err := c.registerHTTPHandlers(); err != nil { return nil, err } - c.d.Daemon.Track(&c.t, "clickhouse") + c.d.Daemon.Track(&c.t, "configure/clickhouse") return &c, nil } // Start the ClickHouse component func (c *Component) Start() error { - if len(c.config.Servers) == 0 { - c.r.Warn().Msg("no clickhouse configuration, skipping database management") - } c.r.Info().Msg("starting ClickHouse component") if err := c.migrateDatabase(); err != nil { c.r.Warn().Msg("database migration failed, continue in the background") @@ -78,9 +73,6 @@ func (c *Component) Start() error { // Stop stops the ClickHouse component func (c *Component) Stop() error { - if len(c.config.Servers) == 0 { - return nil - } c.r.Info().Msg("stopping ClickHouse component") defer c.r.Info().Msg("ClickHouse component stopped") c.t.Kill(nil) diff --git a/clickhouse/templatedfs.go b/configure/clickhouse/templatedfs.go similarity index 100% rename from clickhouse/templatedfs.go rename to configure/clickhouse/templatedfs.go diff --git a/clickhouse/templatedfs_test.go b/configure/clickhouse/templatedfs_test.go similarity index 98% rename from clickhouse/templatedfs_test.go rename to configure/clickhouse/templatedfs_test.go index 003191c6..22f58d1e 100644 --- a/clickhouse/templatedfs_test.go +++ b/configure/clickhouse/templatedfs_test.go @@ -7,7 +7,7 @@ import ( "sort" "testing" - "akvorado/helpers" + "akvorado/common/helpers" ) //go:embed testdata diff --git a/clickhouse/testdata/regular-file.txt b/configure/clickhouse/testdata/regular-file.txt similarity index 100% rename from clickhouse/testdata/regular-file.txt rename to configure/clickhouse/testdata/regular-file.txt diff --git a/clickhouse/testdata/templated-file-with-error.txt.tmpl b/configure/clickhouse/testdata/templated-file-with-error.txt.tmpl similarity index 100% rename from clickhouse/testdata/templated-file-with-error.txt.tmpl rename to configure/clickhouse/testdata/templated-file-with-error.txt.tmpl diff --git a/clickhouse/testdata/templated-file.txt.tmpl b/configure/clickhouse/testdata/templated-file.txt.tmpl similarity index 100% rename from clickhouse/testdata/templated-file.txt.tmpl rename to configure/clickhouse/testdata/templated-file.txt.tmpl diff --git a/configure/kafka/config.go b/configure/kafka/config.go new file mode 100644 index 00000000..cb5f8c63 --- /dev/null +++ b/configure/kafka/config.go @@ -0,0 +1,30 @@ +package kafka + +import "akvorado/common/kafka" + +// Configuration describes the configuration for the Kafka configurator. +type Configuration struct { + // Connect describes how to connect to Kafka. + Connect kafka.Configuration + // TopicConfiguration describes the topic configuration. + TopicConfiguration TopicConfiguration +} + +// TopicConfiguration describes the configuration for a topic +type TopicConfiguration struct { + // NumPartitions tells how many partitions should be used for the topic. + NumPartitions int32 + // ReplicationFactor tells the replication factor for the topic. + ReplicationFactor int16 + // ConfigEntries is a map to specify the topic overrides. Non-listed overrides will be removed + ConfigEntries map[string]*string +} + +// DefaultConfiguration represents the default configuration for the Kafka configurator. +var DefaultConfiguration = Configuration{ + Connect: kafka.DefaultConfiguration, + TopicConfiguration: TopicConfiguration{ + NumPartitions: 1, + ReplicationFactor: 1, + }, +} diff --git a/configure/kafka/functional_test.go b/configure/kafka/functional_test.go new file mode 100644 index 00000000..145ec0cd --- /dev/null +++ b/configure/kafka/functional_test.go @@ -0,0 +1,91 @@ +package kafka + +import ( + "fmt" + "math/rand" + "testing" + "time" + + "github.com/Shopify/sarama" + + "akvorado/common/helpers" + "akvorado/common/kafka" + "akvorado/common/reporter" + "akvorado/inlet/flow" +) + +func TestTopicCreation(t *testing.T) { + client, brokers := kafka.SetupKafkaBroker(t) + + rand.Seed(time.Now().UnixMicro()) + topicName := fmt.Sprintf("test-topic-%d", rand.Int()) + expectedTopicName := fmt.Sprintf("%s-v%d", topicName, flow.CurrentSchemaVersion) + retentionMs := "76548" + segmentBytes := "107374184" + segmentBytes2 := "10737184" + cleanupPolicy := "delete" + + cases := []struct { + Name string + ConfigEntries map[string]*string + }{ + { + Name: "Set initial config", + ConfigEntries: map[string]*string{ + "retention.ms": &retentionMs, + "segment.bytes": &segmentBytes, + }, + }, { + Name: "Alter initial config", + ConfigEntries: map[string]*string{ + "retention.ms": &retentionMs, + "segment.bytes": &segmentBytes2, + "cleanup.policy": &cleanupPolicy, + }, + }, { + Name: "Remove item", + ConfigEntries: map[string]*string{ + "retention.ms": &retentionMs, + "segment.bytes": &segmentBytes2, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.Name, func(t *testing.T) { + configuration := DefaultConfiguration + configuration.Connect.Topic = topicName + configuration.TopicConfiguration = TopicConfiguration{ + NumPartitions: 1, + ReplicationFactor: 1, + ConfigEntries: tc.ConfigEntries, + } + configuration.Connect.Brokers = brokers + configuration.Connect.Version = kafka.Version(sarama.V2_8_1_0) + c, err := New(reporter.NewMock(t), configuration) + if err != nil { + t.Fatalf("New() error:\n%+v", err) + } + if err := c.Start(); err != nil { + t.Fatalf("Start() error:\n%+v", err) + } + + adminClient, err := sarama.NewClusterAdminFromClient(client) + if err != nil { + t.Fatalf("NewClusterAdmin() error:\n%+v", err) + } + topics, err := adminClient.ListTopics() + if err != nil { + t.Fatalf("ListTopics() error:\n%+v", err) + } + topic, ok := topics[expectedTopicName] + if !ok { + t.Fatal("ListTopics() did not find the topic") + } + if diff := helpers.Diff(topic.ConfigEntries, tc.ConfigEntries); diff != "" { + t.Fatalf("ListTopics() (-got, +want):\n%s", diff) + } + }) + } + +} diff --git a/configure/kafka/root.go b/configure/kafka/root.go new file mode 100644 index 00000000..45d29858 --- /dev/null +++ b/configure/kafka/root.go @@ -0,0 +1,95 @@ +package kafka + +import ( + "fmt" + "strings" + + "github.com/Shopify/sarama" + + "akvorado/common/kafka" + "akvorado/common/reporter" + "akvorado/inlet/flow" +) + +// Component represents the Kafka configurator. +type Component struct { + r *reporter.Reporter + config Configuration + + kafkaConfig *sarama.Config + kafkaTopic string +} + +// New creates a new Kafka configurator. +func New(r *reporter.Reporter, config Configuration) (*Component, error) { + kafkaConfig := sarama.NewConfig() + kafkaConfig.Version = sarama.KafkaVersion(config.Connect.Version) + if err := kafkaConfig.Validate(); err != nil { + return nil, fmt.Errorf("cannot validate Kafka configuration: %w", err) + } + + return &Component{ + r: r, + config: config, + + kafkaConfig: kafkaConfig, + kafkaTopic: fmt.Sprintf("%s-v%d", config.Connect.Topic, flow.CurrentSchemaVersion), + }, nil +} + +// Start starts Kafka configuration. +func (c *Component) Start() error { + c.r.Info().Msg("starting Kafka component") + kafka.GlobalKafkaLogger.Register(c.r) + defer func() { + kafka.GlobalKafkaLogger.Unregister() + c.r.Info().Msg("Kafka component stopped") + }() + + // Create topic + client, err := sarama.NewClusterAdmin(c.config.Connect.Brokers, c.kafkaConfig) + if err != nil { + c.r.Err(err). + Str("brokers", strings.Join(c.config.Connect.Brokers, ",")). + Msg("unable to get admin client for topic creation") + return fmt.Errorf("unable to get admin client for topic creation: %w", err) + } + defer client.Close() + l := c.r.With(). + Str("brokers", strings.Join(c.config.Connect.Brokers, ",")). + Str("topic", c.kafkaTopic). + Logger() + topics, err := client.ListTopics() + if err != nil { + l.Err(err).Msg("unable to get metadata for topics") + return fmt.Errorf("unable to get metadata for topics: %w", err) + } + if topic, ok := topics[c.kafkaTopic]; !ok { + if err := client.CreateTopic(c.kafkaTopic, + &sarama.TopicDetail{ + NumPartitions: c.config.TopicConfiguration.NumPartitions, + ReplicationFactor: c.config.TopicConfiguration.ReplicationFactor, + ConfigEntries: c.config.TopicConfiguration.ConfigEntries, + }, false); err != nil { + l.Err(err).Msg("unable to create topic") + return fmt.Errorf("unable to create topic %q: %w", c.kafkaTopic, err) + } + l.Info().Msg("topic created") + } else { + if topic.NumPartitions != c.config.TopicConfiguration.NumPartitions { + l.Warn().Msgf("mismatch for number of partitions: got %d, want %d", + topic.NumPartitions, c.config.TopicConfiguration.NumPartitions) + } + if topic.ReplicationFactor != c.config.TopicConfiguration.ReplicationFactor { + l.Warn().Msgf("mismatch for replication factor: got %d, want %d", + topic.ReplicationFactor, c.config.TopicConfiguration.ReplicationFactor) + } + if err := client.AlterConfig(sarama.TopicResource, c.kafkaTopic, c.config.TopicConfiguration.ConfigEntries, false); err != nil { + l.Err(err).Msg("unable to set topic configuration") + return fmt.Errorf("unable to set topic configuration for %q: %w", + c.kafkaTopic, err) + } + l.Info().Msg("topic updated") + } + return nil +} diff --git a/web/assets.go b/console/assets.go similarity index 98% rename from web/assets.go rename to console/assets.go index e2ca75eb..304ee624 100644 --- a/web/assets.go +++ b/console/assets.go @@ -1,4 +1,4 @@ -package web +package console import ( "embed" diff --git a/web/assets_test.go b/console/assets_test.go similarity index 84% rename from web/assets_test.go rename to console/assets_test.go index a950e7bb..c4e87b79 100644 --- a/web/assets_test.go +++ b/console/assets_test.go @@ -1,12 +1,13 @@ -package web +package console import ( "fmt" netHTTP "net/http" "testing" - "akvorado/http" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" ) func TestServeAssets(t *testing.T) { @@ -24,7 +25,10 @@ func TestServeAssets(t *testing.T) { h := http.NewMock(t, r) _, err := New(r, Configuration{ ServeLiveFS: live, - }, Dependencies{HTTP: h}) + }, Dependencies{ + HTTP: h, + Daemon: daemon.NewMock(t), + }) if err != nil { t.Fatalf("New() error:\n%+v", err) } diff --git a/web/config.go b/console/config.go similarity index 72% rename from web/config.go rename to console/config.go index fb0ea32e..af6d4f05 100644 --- a/web/config.go +++ b/console/config.go @@ -1,6 +1,6 @@ -package web +package console -// Configuration describes the configuration for the web component. +// Configuration describes the configuration for the console component. type Configuration struct { // GrafanaURL is the URL to acess Grafana. GrafanaURL string @@ -8,5 +8,5 @@ type Configuration struct { ServeLiveFS bool } -// DefaultConfiguration represents the default configuration for the web exporter. +// DefaultConfiguration represents the default configuration for the console component. var DefaultConfiguration = Configuration{} diff --git a/web/data/assets/images/akvorado.svg b/console/data/assets/images/akvorado.svg similarity index 100% rename from web/data/assets/images/akvorado.svg rename to console/data/assets/images/akvorado.svg diff --git a/console/data/assets/images/design.svg b/console/data/assets/images/design.svg new file mode 100644 index 00000000..1f6aa0c0 --- /dev/null +++ b/console/data/assets/images/design.svg @@ -0,0 +1,4 @@ + + + +
Kafka
Kafka
ClickHouse
ClickHouse
Inlet service
Inlet service
Flow
collection
Flow...
SNMP
poller
SNMP...
GeoIP
databases
GeoIP...
Classifiers
Classifiers
Time 2022-03-22 11:55
Source 192.0.2.15
Destination 203.0.113.56
Protocol UDP
Port 23876 / 53
Bytes 7654
Packets 8
Exporter th2-ncs5k8-10
Interface Hu0/0/3 / Hu0/2/3
ASN
 64476 / 12322
Country US / FR
Group th2
Provider Cogent
Connectivity Transit
Boundary external / internal
Time 2022-03-22 11:55...
Console service
Console service
Configuration
service
Configuration...
Text is not SVG - cannot display
\ No newline at end of file diff --git a/console/data/assets/images/~$design.svg.bkp b/console/data/assets/images/~$design.svg.bkp new file mode 100644 index 00000000..38523dc0 --- /dev/null +++ b/console/data/assets/images/~$design.svg.bkp @@ -0,0 +1,4 @@ + + + +
Kafka
Kafka
ClickHouse
ClickHouse
Inlet service
Inlet service
Flow
collection
Flow...
SNMP
poller
SNMP...
GeoIP
databases
GeoIP...
Classifiers
Classifiers
Time 2022-03-22 11:55
Source 192.0.2.15
Destination 203.0.113.56
Protocol UDP
Port 23876 / 53
Bytes 7654
Packets 8
Exporter th2-ncs5k8-10
Interface Hu0/0/3 / Hu0/2/3
ASN
 64476 / 12322
Country US / FR
Group th2
Provider Cogent
Connectivity Transit
Boundary external / internal
Time 2022-03-22 11:55...
Console service
Console service
Configuration
service
Configuration...
Text is not SVG - cannot display
\ No newline at end of file diff --git a/web/data/assets/stylesheets/akvorado.css b/console/data/assets/stylesheets/akvorado.css similarity index 100% rename from web/data/assets/stylesheets/akvorado.css rename to console/data/assets/stylesheets/akvorado.css diff --git a/web/data/assets/stylesheets/docs.css b/console/data/assets/stylesheets/docs.css similarity index 100% rename from web/data/assets/stylesheets/docs.css rename to console/data/assets/stylesheets/docs.css diff --git a/console/data/docs/00-intro.md b/console/data/docs/00-intro.md new file mode 100644 index 00000000..bc6a11f1 --- /dev/null +++ b/console/data/docs/00-intro.md @@ -0,0 +1,48 @@ +![Akvorado logo](../assets/images/akvorado.svg) + +# Introduction + +*Akvorado*[^name] is a flow collector, hydrater and exporter. It +receives flows, adds some data like interface names and countries, and +exports them to Kafka. + +[^name]: [Akvorado][] means "water wheel" in Esperanto. + +[Akvorado]: https://eo.wikipedia.org/wiki/Akvorado + +## Big picture + +![General design](../assets/images/design.svg) + +*Akvorado* is split into three components: + +- The **inlet service** receives flows from exporters. It poll each + exporter using SNMP to get the *system name*, the *interface names*, + *descriptions* and *speeds*. It query GeoIP databases to get the + *country* and the *AS number*. It applies rules to classify + exporters into *groups*. Interface rules attach to each interface a + *boundary* (external or internal), a *network provider* and a + *connectivity type* (PNI, IX, transit). The flow is exported to + *Kafka*, serialized using *Protobuf*. + +- The **configuration service** configures the external components. It + creates the *Kafka topic* and configures *ClickHouse* to receive the + flows from Kafka. + +- The **console service** exposes a web interface to look and + manipulate the flows stored inside the ClickHouse database. + +## Serialized flow schemas + +Flows sent to Kafka are encoded with a versioned schema, described in +the `flow-*.proto` files. For each version of the schema, a different +Kafka topic is used. For example, the `flows-v1` topic receive +serialized flows using the first version of the schema. The inlet +service exports the schemas as well as the current version with its +HTTP service, via the `/api/v0/inlet/schemas.json` endpoint. + +## ClickHouse database schemas + +Flows are stored in a ClickHouse database using a single table +`flows`. The configuration service keeps the table schema up-to-date. +You can check the schema using `SHOW CREATE TABLE flows`. diff --git a/web/data/docs/01-install.md b/console/data/docs/01-install.md similarity index 67% rename from web/data/docs/01-install.md rename to console/data/docs/01-install.md index a5add6cb..88913b40 100644 --- a/web/data/docs/01-install.md +++ b/console/data/docs/01-install.md @@ -1,9 +1,14 @@ # Installation +*Akvorado* is written in Go. It provides its 3 components into a +single binary or Docker image. + ## Compilation from source -*Akvorado* is written in Go. You need a proper installation of *Go*. -Then, simply type: +You need a proper installation of [Go](https://go.dev/doc/install) +(1.17+) as well as +[Yarn](https://yarnpkg.com/getting-started/install). Then, simply +type: ```console # make @@ -31,7 +36,8 @@ The following `make` targets are available: ## Docker image -It is also possible to build a Docker image with: +It is also possible to build a Docker image without installing +anything else than [Docker](https://docs.docker.com/get-docker): ```console # docker build . -t akvorado:main diff --git a/web/data/docs/02-configuration.md b/console/data/docs/02-configuration.md similarity index 72% rename from web/data/docs/02-configuration.md rename to console/data/docs/02-configuration.md index 70a13382..223fd043 100644 --- a/web/data/docs/02-configuration.md +++ b/console/data/docs/02-configuration.md @@ -1,33 +1,44 @@ # Configuration -*Akvorado* can be configured through a YAML file. You can get the -default configuration with `./akvorado --dump --check`. Durations can -be written in seconds or using strings like `10h20m`. +Each *Akvorado* service is configured through a YAML file. You can get +the default configuration with `./akvorado SERVICE --dump --check`. +Durations can be written in seconds or using strings like `10h20m`. It is also possible to override configuration settings using environment variables. You need to remove any `-` from key names and -use `_` to handle nesting. Then, put `AKVORADO_` as a prefix. For -example, let's consider the following configuration file: +use `_` to handle nesting. Then, put `AKVORADO_SERVICE_` as a prefix +where `SERVICE` should be replaced by the service name (`inlet`, +`configure` or `console`). For example, let's consider the following +configuration file for the *inlet* service: ```yaml +http: + listen: 127.0.0.1:8081 kafka: - topic: test-topic - topic-configuration: - num-partitions: 1 - brokers: - - 192.0.2.1:9092 - - 192.0.2.2:9092 + connect: + topic: test-topic + brokers: + - 192.0.2.1:9092 + - 192.0.2.2:9092 ``` It can be translated to: ```sh -AKVORADO_KAFKA_TOPIC=test-topic -AKVORADO_KAFKA_TOPICCONFIGURATION_NUMPARTITIONS=1 -AKVORADO_KAFKA_BROKERS=192.0.2.1:9092,192.0.2.2:9092 +AKVORADO_INLET_HTTP_LISTEN=127.0.0.1:8081 +AKVORADO_INLET_KAFKA_CONNECT_TOPIC=test-topic +AKVORADO_INLET_KAFKA_CONNECT_BROKERS=192.0.2.1:9092,192.0.2.2:9092 ``` -## Flow +Each service is split into several functional components. Each of them +gets a section of the configuration file matching its name. + +## Inlet service + +The main components of the inlet services are `flow`, `kafka`, and +`core`. + +### Flow The flow component handles incoming flows. It only accepts the `inputs` key to define the list of inputs to receive incoming flows. @@ -70,7 +81,7 @@ flow: Without configuration, *Akvorado* will listen for incoming Netflow/IPFIX flows on port 2055. -## Kafka +### Kafka Received flows are exported to a Kafka topic using the [protocol buffers format][]. The definition file is `flow/flow-*.proto`. Each @@ -81,12 +92,11 @@ flow is written in the [length-delimited format][]. The following keys are accepted: -- `topic` tells which topic to use to write messages -- `topic-configuration` contains the topic configuration -- `brokers` specifies the list of brokers to use to bootstrap the - connection to the Kafka cluster -- `version` tells which minimal version of Kafka to expect -- `usetls` tells if we should use TLS to connection (authentication is not supported) +- `connect` describes how to connect to the *Kafka* topic. It contains + three keys: `topic` defines the base topic name, `brokers` specifies + the list of brokers to use to bootstrap the connection to the Kafka + cluster and `version` tells which minimal version of Kafka to + expect. - `flush-interval` defines the maximum flush interval to send received flows to Kafka - `flush-bytes` defines the maximum number of bytes to store before @@ -101,37 +111,24 @@ The following keys are accepted: The topic name is suffixed by the version of the schema. For example, if the configured topic is `flows` and the current schema version is -0, the topic used to send received flows will be `flows-v0`. - -If no topic configuration is provided, the topic should already exist -in Kafka. If a configuration is provided, the topic is created if it -does not exist or updated if it does. Currently, updating the number -of partitions or the replication factor is not possible. The following -keys are accepted for the topic configuration: - -- `num-partitions` for the number of partitions -- `replication-factor` for the replication factor -- `config-entries` is a mapping from configuration names to their values +1, the topic used to send received flows will be `flows-v1`. For example: ```yaml kafka: - topic: test-topic - topic-configuration: - num-partitions: 1 - replication-factor: 1 - config-entries: - segment.bytes: 1073741824 - retention.ms: 86400000 - cleanup.policy: delete + connect: + topic: test-topic + brokers: 10.167.19.3:9092,10.167.19.4:9092,10.167.19.5:9092 + compression-codec: zstd ``` -## Core +### Core -The core component adds some information using the GeoIP databases and -the SNMP poller, and push the resulting flow to Kafka. It is also able -to classify exporters and interfaces into groups. +The core component queries the `geoip` and the `snmp` component to +hydrates the flows with additional information. It also classifies +exporters and interfaces into groups with a set of classification +rules. The following configuration keys are accepted: @@ -195,7 +192,7 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1") [expr]: https://github.com/antonmedv/expr/blob/master/docs/Language-Definition.md [from Go]: https://pkg.go.dev/regexp#Regexp.Expand -## GeoIP +### GeoIP The GeoIP component adds source and destination country, as well as the AS number of the source and destination IP if they are not present @@ -211,7 +208,7 @@ is provided, the component is inactive. It accepts the following keys: If the files are updated while *Akvorado* is running, they are automatically refreshed. -## SNMP +### SNMP Flows only include interface indexes. To associate them with an interface name and description, SNMP is used to poll the exporter @@ -236,7 +233,7 @@ As flows missing interface information are discarded, persisting the cache is useful to quickly be able to handle incoming flows. By default, no persistent cache is configured. -## HTTP +### HTTP The builtin HTTP server serves various pages. Its configuration supports only the `listen` key to specify the address and port to @@ -247,32 +244,72 @@ http: listen: 0.0.0.0:8000 ``` -## Web +### Reporting -The web interface presents the landing page of *Akvorado*. It also -embeds the documentation. It accepts only the following key: +Reporting encompasses logging and metrics. Currently, as *Akvorado* is +expected to be run inside Docker, logging is done on the standard +output and is not configurable. As for metrics, they are reported by +the HTTP component on the `/api/v0/inlet/metrics` endpoint and there is +nothing to configure either. -- `grafanaurl` to specify the URL to Grafana and exposes it as - [`/grafana`](/grafana). +## Configuration service +The two main components of the configuration service are `clickhouse` +and `kafka`. It also uses the [HTTP](#http) and +[reporting](#reporting) component from the inlet service and accepts +the same configuration settings. -## ClickHouse +### ClickHouse The ClickHouse component exposes some useful HTTP endpoints to -configure a ClickHouse database. Optionally, it will also provision -and keep up-to-date a ClickHouse database. In this case, the following -keys should be provided: +configure a ClickHouse database. It also provisions and keep +up-to-date a ClickHouse database. The following keys should be +provided: - `servers` defines the list of ClickHouse servers to connect to - `username` is the username to use for authentication - `password` is the password to use for authentication - `database` defines the database to use to create tables - `akvorado-url` defines the URL of Akvorado to be used by Clickhouse (autodetection when not specified) + - `kafka-threads` defines the number of threads to use to poll Kafka (it should not exceed the number of partitions) -## Reporting +### Kafka -Reporting encompasses logging and metrics. Currently, as *Akvorado* is -expected to be run inside Docker, logging is done on the standard -output and is not configurable. As for metrics, they are reported by -the HTTP component on the `/api/v0/metrics` endpoint and there is -nothing to configure either. +The Kafka component creates or updates the Kafka topic to receive +flows. It accepts the following keys: + + - `connect` describes how to connect to the topic. This is the same + configuration as for the [inlet service](#kafka): the `topic`, + `brokers`, and `version` keys are accepted. + - `topic-configuration` describes how the topic should be configured. + +The following keys are accepted for the topic configuration: + +- `num-partitions` for the number of partitions +- `replication-factor` for the replication factor +- `config-entries` is a mapping from configuration names to their values + +For example: + +```yaml +kafka: + connect: + topic: test-topic + topic-configuration: + num-partitions: 1 + replication-factor: 1 + config-entries: + segment.bytes: 1073741824 + retention.ms: 86400000 + cleanup.policy: delete +``` + +Currently, the configure service won't update the number of partitions +or the replicaiton factor. However, the configuration entries are kept +in sync with the content of the configuration file. + +## Console service + +The main components of the console service are `http` and `console`. +`http` accepts the [same configuration](#http) as for the inlet +service. The `console` has no configuration. diff --git a/console/data/docs/03-usage.md b/console/data/docs/03-usage.md new file mode 100644 index 00000000..ef7672dc --- /dev/null +++ b/console/data/docs/03-usage.md @@ -0,0 +1,79 @@ +# Usage + +*Akvorado* uses a subcommand system. Each subcommand comes with its +own set of options. It is possible to get help using `akvorado +--help`. Each service is started using the matchin subcommand. When +started from a TTY, a service displays logs in a fancy way. Without a +TTY, logs are output formatted as JSON. + +## Common options + +Each service accepts a set of common options as flags. + +The `--config` options allows to provide a configuration file in YAML +format. See the [configuration section](02-configuration.md) for more +information on this file. + +The `--check` option will check if the provided configuration is +correct and stops here. The `--dump` option will dump the parsed +configuration, along with the default values. It should be combined +with `--check` if you don't want the service to start. + +Each service embeds an HTTP server exposing a few endpoints. All +services expose the following endpoints in addition to the +service-specific endpoints: + +- `/api/v0/metrics`: Prometheus metrics +- `/api/v0/version`: *Akvorado* version +- `/api/v0/healthcheck`: are we alive? + +Each endpoint is also exposed under the service namespace. The idea is +to be able to expose an unified API for all services under a single +endpoint using an HTTP proxy. For example, the `inlet` service also +exposes its metrics under `/api/v0/inlet/metrics`. + +## Inlet service + +`akvorado inlet` starts the inlet service, allowing it to receive and +process flows. The following endpoints are exposed by the HTTP +component embedded into the service: + +- `/api/v0/inlet/flows`: stream the received flows +- `/api/v0/inlet/schemas.json`: versioned list of protobuf schemas used to export flows +- `/api/v0/inlet/schemas-X.proto`: protobuf schema for the provided version + +## Configure service + +`akvorado configure` starts the configure service. It runs as a +service as it exposes an HTTP service for ClickHouse to configure +itself. The Kafka topic is configured at start and does not need the +service to be running. + +The following endpoints are exposed for use by ClickHouse: + +- `/api/v0/clickhouse/init.sh` contains the schemas in the form of a + script to execute during initialization to get them installed at the + proper location +- `/api/v0/clickhouse/protocols.csv` contains a CSV with the mapping + between protocol numbers and names +- `/api/v0/clickhouse/asns.csv` contains a CSV with the mapping + between AS numbers and organization names + +ClickHouse clusters are currently not supported, despite being able to +configure several servers in the configuration. Several servers are in +fact managed like they are a copy of one another. + +*Akvorado* also handles database migration during upgrades. When the +protobuf schema is updated, new Kafka tables should be created, as +well as the associated materialized view. Older tables should be kept +around, notably when upgrades can be rolling (some *akvorado* +instances are still running an older version). + +## Console service + +`akvorado console` starts the console service. Currently, only this +documentation is accessible through this service. + +## Other commands + +`akvorado version` displays the version. diff --git a/web/data/docs/05-troubleshooting.md b/console/data/docs/05-troubleshooting.md similarity index 82% rename from web/data/docs/05-troubleshooting.md rename to console/data/docs/05-troubleshooting.md index 228a26e3..349c30bb 100644 --- a/web/data/docs/05-troubleshooting.md +++ b/console/data/docs/05-troubleshooting.md @@ -1,11 +1,11 @@ # Troubleshooting -*Akvorado* outputs some logs and exposes some counters to help +The inlet service outputs some logs and exposes some counters to help troubleshoot most issues. The first step to check if everything works as expected is to request a flow: ```console -$ curl -s http://akvorado/api/v0/flows\?limit=1 +$ curl -s http://akvorado/api/v0/inlet/flows\?limit=1 { "TimeReceived": 1648305235, "SequenceNum": 425385846, @@ -18,7 +18,7 @@ If this does not work, be sure to check the logs and the metrics. The later can be queried with `curl`: ```console -$ curl -s http://akvorado/api/v0/metrics +$ curl -s http://akvorado/api/v0/inlet/metrics ``` ## No packets received @@ -41,7 +41,7 @@ contain information such as: - `exporter:172.19.162.244 poller breaker open` - `exporter:172.19.162.244 unable to GET` -The `akvorado_snmp_poller_failure_requests` metric would also increase +The `akvorado_inlet_snmp_poller_failure_requests` metric would also increase for the affected exporter. ## Dropped packets @@ -56,7 +56,7 @@ The first problem may come from the exporter dropping some of the flows. Most of the time, there are counters to detect this situation and it can be solved by lowering the exporter rate. -#### On Cisco NCS5500 routers +#### NCS5500 routers [Netflow, Sampling-Interval and the Mythical Internet Packet Size][1] contains many information about the limit of this platform. The first @@ -98,8 +98,8 @@ default) to keep packets before handling them to the application. When this buffer is full, packets are dropped. *Akvorado* reports the number of drops for each listening socket with -the `akvorado_flow_input_udp_in_drops` counter. This should be -compared to `akvorado_flow_input_udp_packets`. Another way to get the same +the `akvorado_inlet_flow_input_udp_in_drops` counter. This should be +compared to `akvorado_inlet_flow_input_udp_packets`. Another way to get the same information is by using `ss -lunepm` and look at the drop counter: ```console @@ -116,10 +116,10 @@ increasing the value of `net.core.rmem_max` sysctl and increasing the ### Internal queues -Inside *Akvorado*, parsed packets are transmitted to one module to -another using channels. When there is a bottleneck at this level, the -`akvorado_flow_input_udp_out_drops` counter will increase. There are -several ways to fix that: +Inside the inlet service, parsed packets are transmitted to one module +to another using channels. When there is a bottleneck at this level, +the `akvorado_inlet_flow_input_udp_out_drops` counter will increase. +There are several ways to fix that: - increasing the channel between the input module and the flow module, with the `queue-size` setting attached to the input, @@ -130,12 +130,12 @@ several ways to fix that: ### SNMP poller -To process a flow, *Akvorado* needs the interface name and +To process a flow, the inlet service needs the interface name and description. This information is provided by the `snmp` submodule. When all workers of the SNMP pollers are busy, new requests are -dropped. In this case, the `akvorado_snmp_poller_busy_count` counter -is increased. To mitigate this issue, *Akvorado* tries to skip -exporters with too many errors to avoid blocking SNMP requests for -other exporters. However, ensuring the exporters accept to answer +dropped. In this case, the `akvorado_inlet_snmp_poller_busy_count` +counter is increased. To mitigate this issue, the inlet service tries +to skip exporters with too many errors to avoid blocking SNMP requests +for other exporters. However, ensuring the exporters accept to answer requests is the first fix. If not enough, you can increase the number of workers. Workers handle SNMP requests synchronously. diff --git a/web/data/docs/06-internals.md b/console/data/docs/06-internals.md similarity index 92% rename from web/data/docs/06-internals.md rename to console/data/docs/06-internals.md index 2e558ebe..a6ef1e7c 100644 --- a/web/data/docs/06-internals.md +++ b/console/data/docs/06-internals.md @@ -1,15 +1,17 @@ # Internal design -*Akvorado* is written in Go. It uses a component architecture. The -entry point is `cmd/serve.go` and each directory is a distinct -component. This is heavily inspired by the [Component framework in -Clojure][]. A component is a piece of software with its configuration, -its state and its dependencies on other components. +*Akvorado* is written in Go. Each service has its code in a distinct +directory (`inlet/`, `configure/` and `console/`). The `common/` +directory contains components common to several services. The `cmd/` +directory contains the main entry points. + +Each service is splitted into several components. This is heavily +inspired by the [Component framework in Clojure][]. A component is a +piece of software with its configuration, its state and its +dependencies on other components. [Component framework in Clojure]: https://github.com/stuartsierra/component -![General design](../assets/images/design.svg) - Each component features the following piece of code: - A `Component` structure containing its state. @@ -71,9 +73,9 @@ fatal, or rate-limited and accounted into a metric. ## CLI -The CLI is handled by [Cobra](https://github.com/spf13/cobra). The -configuration file is handled by -[mapstructure](https://github.com/mitchellh/mapstructure). +The CLI (not a component) is handled by +[Cobra](https://github.com/spf13/cobra). The configuration file is +handled by [mapstructure](https://github.com/mitchellh/mapstructure). ## Flow decoding diff --git a/web/data/package.json b/console/data/package.json similarity index 100% rename from web/data/package.json rename to console/data/package.json diff --git a/web/data/templates/docs.html b/console/data/templates/docs.html similarity index 100% rename from web/data/templates/docs.html rename to console/data/templates/docs.html diff --git a/web/data/templates/dummy.html b/console/data/templates/dummy.html similarity index 100% rename from web/data/templates/dummy.html rename to console/data/templates/dummy.html diff --git a/web/data/templates/layout/base.html b/console/data/templates/layout/base.html similarity index 100% rename from web/data/templates/layout/base.html rename to console/data/templates/layout/base.html diff --git a/web/data/templates/layout/navigation.html b/console/data/templates/layout/navigation.html similarity index 100% rename from web/data/templates/layout/navigation.html rename to console/data/templates/layout/navigation.html diff --git a/web/data/yarn.lock b/console/data/yarn.lock similarity index 100% rename from web/data/yarn.lock rename to console/data/yarn.lock diff --git a/web/docs.go b/console/docs.go similarity index 99% rename from web/docs.go rename to console/docs.go index b49899cd..0fcb3e3f 100644 --- a/web/docs.go +++ b/console/docs.go @@ -1,4 +1,4 @@ -package web +package console import ( "bytes" diff --git a/web/docs_test.go b/console/docs_test.go similarity index 84% rename from web/docs_test.go rename to console/docs_test.go index 25eb8652..57013dc5 100644 --- a/web/docs_test.go +++ b/console/docs_test.go @@ -1,4 +1,4 @@ -package web +package console import ( "fmt" @@ -7,8 +7,9 @@ import ( "strings" "testing" - "akvorado/http" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" ) func TestServeDocs(t *testing.T) { @@ -22,7 +23,10 @@ func TestServeDocs(t *testing.T) { h := http.NewMock(t, r) _, err := New(r, Configuration{ ServeLiveFS: live, - }, Dependencies{HTTP: h}) + }, Dependencies{ + HTTP: h, + Daemon: daemon.NewMock(t), + }) if err != nil { t.Fatalf("New() error:\n%+v", err) } diff --git a/web/root.go b/console/root.go similarity index 80% rename from web/root.go rename to console/root.go index df4e521f..9d4a0fdb 100644 --- a/web/root.go +++ b/console/root.go @@ -1,5 +1,5 @@ -// Package web exposes a web interface. -package web +// Package console exposes a web interface. +package console import ( "fmt" @@ -18,11 +18,12 @@ import ( "github.com/rs/zerolog" "gopkg.in/tomb.v2" - "akvorado/http" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" ) -// Component represents the web component. +// Component represents the console component. type Component struct { r *reporter.Reporter d *Dependencies @@ -33,18 +34,20 @@ type Component struct { templatesLock sync.RWMutex } -// Dependencies define the dependencies of the web component. +// Dependencies define the dependencies of the console component. type Dependencies struct { - HTTP *http.Component + Daemon daemon.Component + HTTP *http.Component } -// New creates a new web component. +// New creates a new console component. func New(reporter *reporter.Reporter, config Configuration, dependencies Dependencies) (*Component, error) { c := Component{ r: reporter, d: &dependencies, config: config, } + c.d.Daemon.Track(&c.t, "console") if err := c.loadTemplates(); err != nil { return nil, err } @@ -79,7 +82,7 @@ func New(reporter *reporter.Reporter, config Configuration, dependencies Depende // Start starts the web component. func (c *Component) Start() error { - c.r.Info().Msg("starting web component") + c.r.Info().Msg("starting console component") if err := c.watchTemplates(); err != nil { return err } @@ -92,10 +95,10 @@ func (c *Component) Start() error { return nil } -// Stop stops the web component. +// Stop stops the console component. func (c *Component) Stop() error { - c.r.Info().Msg("stopping web component") - defer c.r.Info().Msg("web component stopped") + c.r.Info().Msg("stopping console component") + defer c.r.Info().Msg("console component stopped") c.t.Kill(nil) return c.t.Wait() } diff --git a/web/root_test.go b/console/root_test.go similarity index 84% rename from web/root_test.go rename to console/root_test.go index d76e9fcf..a57bc6d4 100644 --- a/web/root_test.go +++ b/console/root_test.go @@ -1,4 +1,4 @@ -package web +package console import ( "fmt" @@ -7,9 +7,10 @@ import ( "net/http/httptest" "testing" - "akvorado/helpers" - "akvorado/http" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/http" + "akvorado/common/reporter" ) func TestProxy(t *testing.T) { @@ -25,7 +26,10 @@ func TestProxy(t *testing.T) { h := http.NewMock(t, r) _, err := New(r, Configuration{ GrafanaURL: server.URL, - }, Dependencies{HTTP: h}) + }, Dependencies{ + HTTP: h, + Daemon: daemon.NewMock(t), + }) if err != nil { t.Fatalf("New() error:\n%+v", err) } diff --git a/web/templates.go b/console/templates.go similarity index 99% rename from web/templates.go rename to console/templates.go index 15891870..70838d6c 100644 --- a/web/templates.go +++ b/console/templates.go @@ -1,7 +1,7 @@ -package web +package console import ( - "akvorado/reporter" + "akvorado/common/reporter" "bytes" "embed" "fmt" diff --git a/web/templates_test.go b/console/templates_test.go similarity index 85% rename from web/templates_test.go rename to console/templates_test.go index 2be69131..dfc2296d 100644 --- a/web/templates_test.go +++ b/console/templates_test.go @@ -1,4 +1,4 @@ -package web +package console import ( "net/http/httptest" @@ -6,8 +6,9 @@ import ( "testing" "time" - "akvorado/http" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" ) func TestTemplate(t *testing.T) { @@ -20,7 +21,10 @@ func TestTemplate(t *testing.T) { r := reporter.NewMock(t) c, err := New(r, Configuration{ ServeLiveFS: live, - }, Dependencies{HTTP: http.NewMock(t, r)}) + }, Dependencies{ + HTTP: http.NewMock(t, r), + Daemon: daemon.NewMock(t), + }) if err != nil { t.Fatalf("New() error:\n%+v", err) } diff --git a/docs b/docs index 6283d6ca..e53f71e3 120000 --- a/docs +++ b/docs @@ -1 +1 @@ -web/data/docs \ No newline at end of file +console/data/docs \ No newline at end of file diff --git a/core/classifier.go b/inlet/core/classifier.go similarity index 100% rename from core/classifier.go rename to inlet/core/classifier.go diff --git a/core/classifier_test.go b/inlet/core/classifier_test.go similarity index 99% rename from core/classifier_test.go rename to inlet/core/classifier_test.go index 8fb9c3df..f0380ee0 100644 --- a/core/classifier_test.go +++ b/inlet/core/classifier_test.go @@ -3,7 +3,7 @@ package core import ( "testing" - "akvorado/helpers" + "akvorado/common/helpers" ) func TestExporterClassifier(t *testing.T) { diff --git a/core/config.go b/inlet/core/config.go similarity index 100% rename from core/config.go rename to inlet/core/config.go diff --git a/core/http.go b/inlet/core/http.go similarity index 100% rename from core/http.go rename to inlet/core/http.go diff --git a/core/hydrate.go b/inlet/core/hydrate.go similarity index 98% rename from core/hydrate.go rename to inlet/core/hydrate.go index 22adafa3..b7f40202 100644 --- a/core/hydrate.go +++ b/inlet/core/hydrate.go @@ -6,10 +6,10 @@ import ( "strconv" "time" - "akvorado/flow" - "akvorado/flow/decoder" - "akvorado/reporter" - "akvorado/snmp" + "akvorado/common/reporter" + "akvorado/inlet/flow" + "akvorado/inlet/flow/decoder" + "akvorado/inlet/snmp" ) // hydrateFlow adds more data to a flow. diff --git a/core/hydrate_test.go b/inlet/core/hydrate_test.go similarity index 96% rename from core/hydrate_test.go rename to inlet/core/hydrate_test.go index da862928..4dae30c8 100644 --- a/core/hydrate_test.go +++ b/inlet/core/hydrate_test.go @@ -9,14 +9,14 @@ import ( "github.com/golang/protobuf/proto" "gopkg.in/yaml.v2" - "akvorado/daemon" - "akvorado/flow" - "akvorado/geoip" - "akvorado/helpers" - "akvorado/http" - "akvorado/kafka" - "akvorado/reporter" - "akvorado/snmp" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/http" + "akvorado/common/reporter" + "akvorado/inlet/flow" + "akvorado/inlet/geoip" + "akvorado/inlet/kafka" + "akvorado/inlet/snmp" ) func TestClassifiers(t *testing.T) { @@ -282,7 +282,7 @@ interfaceclassifiers: case <-time.After(1 * time.Second): t.Fatal("Kafka message not received") } - gotMetrics := r.GetMetrics("akvorado_core_flows_") + gotMetrics := r.GetMetrics("akvorado_inlet_core_flows_") expectedMetrics := map[string]string{ `errors{error="SNMP cache miss",exporter="192.0.2.142"}`: "1", `http_clients`: "0", diff --git a/core/metrics.go b/inlet/core/metrics.go similarity index 98% rename from core/metrics.go rename to inlet/core/metrics.go index bea5960a..0ac63f0b 100644 --- a/core/metrics.go +++ b/inlet/core/metrics.go @@ -1,7 +1,7 @@ package core import ( - "akvorado/reporter" + "akvorado/common/reporter" "sync/atomic" ) diff --git a/core/root.go b/inlet/core/root.go similarity index 93% rename from core/root.go rename to inlet/core/root.go index 97ee6198..b94f6270 100644 --- a/core/root.go +++ b/inlet/core/root.go @@ -11,13 +11,13 @@ import ( "github.com/golang/protobuf/proto" "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/flow" - "akvorado/geoip" - "akvorado/http" - "akvorado/kafka" - "akvorado/reporter" - "akvorado/snmp" + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" + "akvorado/inlet/flow" + "akvorado/inlet/geoip" + "akvorado/inlet/kafka" + "akvorado/inlet/snmp" ) // Component represents the HTTP compomenent. @@ -72,7 +72,7 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende classifierCache: cache, classifierErrLogger: r.Sample(reporter.BurstSampler(10*time.Second, 3)), } - c.d.Daemon.Track(&c.t, "core") + c.d.Daemon.Track(&c.t, "inlet/core") c.initMetrics() return &c, nil } @@ -88,7 +88,7 @@ func (c *Component) Start() error { } c.r.RegisterHealthcheck("core", c.channelHealthcheck()) - c.d.HTTP.AddHandler("/api/v0/flows", c.FlowsHTTPHandler()) + c.d.HTTP.AddHandler("/api/v0/inlet/flows", c.FlowsHTTPHandler()) return nil } diff --git a/core/root_test.go b/inlet/core/root_test.go similarity index 87% rename from core/root_test.go rename to inlet/core/root_test.go index 05675816..9c7c80c3 100644 --- a/core/root_test.go +++ b/inlet/core/root_test.go @@ -14,14 +14,14 @@ import ( "github.com/Shopify/sarama" "github.com/golang/protobuf/proto" - "akvorado/daemon" - "akvorado/flow" - "akvorado/geoip" - "akvorado/helpers" - "akvorado/http" - "akvorado/kafka" - "akvorado/reporter" - "akvorado/snmp" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/http" + "akvorado/common/reporter" + "akvorado/inlet/flow" + "akvorado/inlet/geoip" + "akvorado/inlet/kafka" + "akvorado/inlet/snmp" ) func TestCore(t *testing.T) { @@ -87,7 +87,7 @@ func TestCore(t *testing.T) { flowComponent.Inject(t, flowMessage("192.0.2.143", 434, 679)) time.Sleep(20 * time.Millisecond) - gotMetrics := r.GetMetrics("akvorado_core_") + gotMetrics := r.GetMetrics("akvorado_inlet_core_") expectedMetrics := map[string]string{ `classifier_cache_hits`: "0", `classifier_cache_misses`: "0", @@ -108,7 +108,7 @@ func TestCore(t *testing.T) { flowComponent.Inject(t, flowMessage("192.0.2.143", 437, 679)) time.Sleep(20 * time.Millisecond) - gotMetrics = r.GetMetrics("akvorado_core_", "classifier_", "flows_") + gotMetrics = r.GetMetrics("akvorado_inlet_core_", "classifier_", "flows_") expectedMetrics = map[string]string{ `classifier_cache_hits`: "0", `classifier_cache_misses`: "0", @@ -177,7 +177,7 @@ func TestCore(t *testing.T) { input.SamplingRate = 0 flowComponent.Inject(t, input) time.Sleep(20 * time.Millisecond) - gotMetrics = r.GetMetrics("akvorado_core_", "classifier_", "flows_") + gotMetrics = r.GetMetrics("akvorado_inlet_core_", "classifier_", "flows_") expectedMetrics = map[string]string{ `classifier_cache_hits`: "0", `classifier_cache_misses`: "0", @@ -210,17 +210,17 @@ func TestCore(t *testing.T) { t.Run("http flows", func(t *testing.T) { c.httpFlowFlushDelay = 20 * time.Millisecond - resp, err := netHTTP.Get(fmt.Sprintf("http://%s/api/v0/flows", c.d.HTTP.Address)) + resp, err := netHTTP.Get(fmt.Sprintf("http://%s/api/v0/inlet/flows", c.d.HTTP.Address)) if err != nil { - t.Fatalf("GET /api/v0/flows:\n%+v", err) + t.Fatalf("GET /api/v0/inlet/flows:\n%+v", err) } defer resp.Body.Close() if resp.StatusCode != 200 { - t.Fatalf("GET /api/v0/flows status code %d", resp.StatusCode) + t.Fatalf("GET /api/v0/inlet/flows status code %d", resp.StatusCode) } // Metrics should tell we have a client - gotMetrics := r.GetMetrics("akvorado_core_", "flows_http_clients") + gotMetrics := r.GetMetrics("akvorado_inlet_core_", "flows_http_clients") expectedMetrics := map[string]string{ `flows_http_clients`: "1", } @@ -240,7 +240,7 @@ func TestCore(t *testing.T) { for i := 0; i < 10; i++ { var got map[string]interface{} if err := decoder.Decode(&got); err != nil { - t.Fatalf("GET /api/v0/flows error while reading body:\n%+v", err) + t.Fatalf("GET /api/v0/inlet/flows error while reading body:\n%+v", err) } expected := map[string]interface{}{ "TimeReceived": 200, @@ -275,7 +275,7 @@ func TestCore(t *testing.T) { "ExporterName": "192_0_2_142", } if diff := helpers.Diff(got, expected); diff != "" { - t.Fatalf("GET /api/v0/flows (-got, +want):\n%s", diff) + t.Fatalf("GET /api/v0/inlet/flows (-got, +want):\n%s", diff) } } }) @@ -283,17 +283,17 @@ func TestCore(t *testing.T) { // Test HTTP flow clients with a limit time.Sleep(10 * time.Millisecond) t.Run("http flows with limit", func(t *testing.T) { - resp, err := netHTTP.Get(fmt.Sprintf("http://%s/api/v0/flows?limit=4", c.d.HTTP.Address)) + resp, err := netHTTP.Get(fmt.Sprintf("http://%s/api/v0/inlet/flows?limit=4", c.d.HTTP.Address)) if err != nil { - t.Fatalf("GET /api/v0/flows:\n%+v", err) + t.Fatalf("GET /api/v0/inlet/flows:\n%+v", err) } defer resp.Body.Close() if resp.StatusCode != 200 { - t.Fatalf("GET /api/v0/flows status code %d", resp.StatusCode) + t.Fatalf("GET /api/v0/inlet/flows status code %d", resp.StatusCode) } // Metrics should tell we have a client - gotMetrics := r.GetMetrics("akvorado_core_", "flows_http_clients") + gotMetrics := r.GetMetrics("akvorado_inlet_core_", "flows_http_clients") expectedMetrics := map[string]string{ `flows_http_clients`: "1", } @@ -316,7 +316,7 @@ func TestCore(t *testing.T) { break } if err != nil { - t.Fatalf("GET /api/v0/flows error while reading:\n%+v", err) + t.Fatalf("GET /api/v0/inlet/flows error while reading:\n%+v", err) } count++ if count > 4 { @@ -324,10 +324,10 @@ func TestCore(t *testing.T) { } } if count > 4 { - t.Fatal("GET /api/v0/flows got more than 4 flows") + t.Fatal("GET /api/v0/inlet/flows got more than 4 flows") } if count != 4 { - t.Fatalf("GET /api/v0/flows got less than 4 flows (%d)", count) + t.Fatalf("GET /api/v0/inlet/flows got less than 4 flows (%d)", count) } }) } diff --git a/flow/config.go b/inlet/flow/config.go similarity index 97% rename from flow/config.go rename to inlet/flow/config.go index 5c7f2597..7176462b 100644 --- a/flow/config.go +++ b/inlet/flow/config.go @@ -8,9 +8,9 @@ import ( "github.com/mitchellh/mapstructure" - "akvorado/flow/input" - "akvorado/flow/input/file" - "akvorado/flow/input/udp" + "akvorado/inlet/flow/input" + "akvorado/inlet/flow/input/file" + "akvorado/inlet/flow/input/udp" ) // Configuration describes the configuration for the flow component diff --git a/flow/config_test.go b/inlet/flow/config_test.go similarity index 97% rename from flow/config_test.go rename to inlet/flow/config_test.go index a841dba1..abe9884c 100644 --- a/flow/config_test.go +++ b/inlet/flow/config_test.go @@ -1,9 +1,9 @@ package flow import ( - "akvorado/flow/input/file" - "akvorado/flow/input/udp" - "akvorado/helpers" + "akvorado/common/helpers" + "akvorado/inlet/flow/input/file" + "akvorado/inlet/flow/input/udp" "strings" "testing" diff --git a/flow/data/schemas/flow-0.proto b/inlet/flow/data/schemas/flow-0.proto similarity index 97% rename from flow/data/schemas/flow-0.proto rename to inlet/flow/data/schemas/flow-0.proto index f29cad4d..4e9f47df 100644 --- a/flow/data/schemas/flow-0.proto +++ b/inlet/flow/data/schemas/flow-0.proto @@ -1,6 +1,6 @@ syntax = "proto3"; package decoder; -option go_package = "akvorado/flow/decoder"; +option go_package = "akvorado/inlet/flow/decoder"; // This is a stripped version from the one in Goflow2, but with GeoIP added. diff --git a/flow/data/schemas/flow-1.proto b/inlet/flow/data/schemas/flow-1.proto similarity index 97% rename from flow/data/schemas/flow-1.proto rename to inlet/flow/data/schemas/flow-1.proto index 767a44bc..a4c2314b 100644 --- a/flow/data/schemas/flow-1.proto +++ b/inlet/flow/data/schemas/flow-1.proto @@ -1,6 +1,6 @@ syntax = "proto3"; package decoder; -option go_package = "akvorado/flow/decoder"; +option go_package = "akvorado/inlet/flow/decoder"; // This is a stripped version from the one in Goflow2, but with GeoIP added. diff --git a/flow/decoder.go b/inlet/flow/decoder.go similarity index 94% rename from flow/decoder.go rename to inlet/flow/decoder.go index 66444ed2..f6b291d3 100644 --- a/flow/decoder.go +++ b/inlet/flow/decoder.go @@ -3,8 +3,8 @@ package flow import ( "time" - "akvorado/flow/decoder" - "akvorado/flow/decoder/netflow" + "akvorado/inlet/flow/decoder" + "akvorado/inlet/flow/decoder/netflow" ) // Message describes a decoded flow message. diff --git a/flow/decoder/encoder.go b/inlet/flow/decoder/encoder.go similarity index 100% rename from flow/decoder/encoder.go rename to inlet/flow/decoder/encoder.go diff --git a/flow/decoder/encoder_test.go b/inlet/flow/decoder/encoder_test.go similarity index 98% rename from flow/decoder/encoder_test.go rename to inlet/flow/decoder/encoder_test.go index 740f894d..b333bda0 100644 --- a/flow/decoder/encoder_test.go +++ b/inlet/flow/decoder/encoder_test.go @@ -1,7 +1,7 @@ package decoder import ( - "akvorado/helpers" + "akvorado/common/helpers" "bytes" "encoding/json" "net" diff --git a/flow/decoder/helper.go b/inlet/flow/decoder/helper.go similarity index 100% rename from flow/decoder/helper.go rename to inlet/flow/decoder/helper.go diff --git a/flow/decoder/netflow/root.go b/inlet/flow/decoder/netflow/root.go similarity index 99% rename from flow/decoder/netflow/root.go rename to inlet/flow/decoder/netflow/root.go index e0ee9da1..3b045c2f 100644 --- a/flow/decoder/netflow/root.go +++ b/inlet/flow/decoder/netflow/root.go @@ -9,8 +9,8 @@ import ( "github.com/netsampler/goflow2/decoders/netflow" "github.com/netsampler/goflow2/producer" - "akvorado/flow/decoder" - "akvorado/reporter" + "akvorado/common/reporter" + "akvorado/inlet/flow/decoder" ) // Decoder contains the state for the Netflow v9 decoder. diff --git a/flow/decoder/netflow/root_test.go b/inlet/flow/decoder/netflow/root_test.go similarity index 96% rename from flow/decoder/netflow/root_test.go rename to inlet/flow/decoder/netflow/root_test.go index 03e6faee..c60f6910 100644 --- a/flow/decoder/netflow/root_test.go +++ b/inlet/flow/decoder/netflow/root_test.go @@ -6,9 +6,9 @@ import ( "path/filepath" "testing" - "akvorado/flow/decoder" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/helpers" + "akvorado/common/reporter" + "akvorado/inlet/flow/decoder" ) func TestDecode(t *testing.T) { @@ -29,7 +29,7 @@ func TestDecode(t *testing.T) { } // Check metrics - gotMetrics := r.GetMetrics("akvorado_flow_decoder_netflow_") + gotMetrics := r.GetMetrics("akvorado_inlet_flow_decoder_netflow_") expectedMetrics := map[string]string{ `count{exporter="127.0.0.1",version="9"}`: "1", `flowset_records_sum{exporter="127.0.0.1",type="OptionsTemplateFlowSet",version="9"}`: "1", @@ -54,7 +54,7 @@ func TestDecode(t *testing.T) { } // Check metrics - gotMetrics = r.GetMetrics("akvorado_flow_decoder_netflow_") + gotMetrics = r.GetMetrics("akvorado_inlet_flow_decoder_netflow_") expectedMetrics = map[string]string{ `count{exporter="127.0.0.1",version="9"}`: "2", `flowset_records_sum{exporter="127.0.0.1",type="OptionsTemplateFlowSet",version="9"}`: "1", @@ -81,7 +81,7 @@ func TestDecode(t *testing.T) { } // Check metrics - gotMetrics = r.GetMetrics("akvorado_flow_decoder_netflow_") + gotMetrics = r.GetMetrics("akvorado_inlet_flow_decoder_netflow_") expectedMetrics = map[string]string{ `count{exporter="127.0.0.1",version="9"}`: "3", `flowset_records_sum{exporter="127.0.0.1",type="OptionsTemplateFlowSet",version="9"}`: "1", @@ -197,7 +197,7 @@ func TestDecode(t *testing.T) { t.Fatalf("Decode() (-got, +want):\n%s", diff) } gotMetrics = r.GetMetrics( - "akvorado_flow_decoder_netflow_", + "akvorado_inlet_flow_decoder_netflow_", "count", "flowset_", "templates_", diff --git a/flow/decoder/netflow/testdata/data-260.data b/inlet/flow/decoder/netflow/testdata/data-260.data similarity index 100% rename from flow/decoder/netflow/testdata/data-260.data rename to inlet/flow/decoder/netflow/testdata/data-260.data diff --git a/flow/decoder/netflow/testdata/options-data-257.data b/inlet/flow/decoder/netflow/testdata/options-data-257.data similarity index 100% rename from flow/decoder/netflow/testdata/options-data-257.data rename to inlet/flow/decoder/netflow/testdata/options-data-257.data diff --git a/flow/decoder/netflow/testdata/options-template-257.data b/inlet/flow/decoder/netflow/testdata/options-template-257.data similarity index 100% rename from flow/decoder/netflow/testdata/options-template-257.data rename to inlet/flow/decoder/netflow/testdata/options-template-257.data diff --git a/flow/decoder/netflow/testdata/template-260.data b/inlet/flow/decoder/netflow/testdata/template-260.data similarity index 100% rename from flow/decoder/netflow/testdata/template-260.data rename to inlet/flow/decoder/netflow/testdata/template-260.data diff --git a/flow/decoder/root.go b/inlet/flow/decoder/root.go similarity index 95% rename from flow/decoder/root.go rename to inlet/flow/decoder/root.go index 6f920e45..93611c9d 100644 --- a/flow/decoder/root.go +++ b/inlet/flow/decoder/root.go @@ -1,7 +1,7 @@ package decoder import ( - "akvorado/reporter" + "akvorado/common/reporter" "net" "time" ) diff --git a/flow/decoder/tests.go b/inlet/flow/decoder/tests.go similarity index 100% rename from flow/decoder/tests.go rename to inlet/flow/decoder/tests.go diff --git a/flow/input/file/config.go b/inlet/flow/input/file/config.go similarity index 100% rename from flow/input/file/config.go rename to inlet/flow/input/file/config.go diff --git a/flow/input/file/root.go b/inlet/flow/input/file/root.go similarity index 91% rename from flow/input/file/root.go rename to inlet/flow/input/file/root.go index d44f042c..2f83de75 100644 --- a/flow/input/file/root.go +++ b/inlet/flow/input/file/root.go @@ -9,10 +9,10 @@ import ( "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/flow/decoder" - "akvorado/flow/input" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" + "akvorado/inlet/flow/decoder" + "akvorado/inlet/flow/input" ) // Input represents the state of a file input. @@ -36,7 +36,7 @@ func (configuration *Configuration) New(r *reporter.Reporter, daemon daemon.Comp ch: make(chan []*decoder.FlowMessage), decoder: dec, } - daemon.Track(&input.t, "flow/input/file") + daemon.Track(&input.t, "inlet/flow/input/file") return input, nil } diff --git a/flow/input/file/root_test.go b/inlet/flow/input/file/root_test.go similarity index 90% rename from flow/input/file/root_test.go rename to inlet/flow/input/file/root_test.go index 7a8b91fb..f3418643 100644 --- a/flow/input/file/root_test.go +++ b/inlet/flow/input/file/root_test.go @@ -5,10 +5,10 @@ import ( "testing" "time" - "akvorado/daemon" - "akvorado/flow/decoder" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/reporter" + "akvorado/inlet/flow/decoder" ) func TestFileInput(t *testing.T) { diff --git a/flow/input/file/testdata/file1.txt b/inlet/flow/input/file/testdata/file1.txt similarity index 100% rename from flow/input/file/testdata/file1.txt rename to inlet/flow/input/file/testdata/file1.txt diff --git a/flow/input/file/testdata/file2.txt b/inlet/flow/input/file/testdata/file2.txt similarity index 100% rename from flow/input/file/testdata/file2.txt rename to inlet/flow/input/file/testdata/file2.txt diff --git a/flow/input/root.go b/inlet/flow/input/root.go similarity index 86% rename from flow/input/root.go rename to inlet/flow/input/root.go index 7726892b..dcb217ca 100644 --- a/flow/input/root.go +++ b/inlet/flow/input/root.go @@ -1,9 +1,9 @@ package input import ( - "akvorado/daemon" - "akvorado/flow/decoder" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" + "akvorado/inlet/flow/decoder" ) // Input is the interface any input should meet diff --git a/flow/input/udp/config.go b/inlet/flow/input/udp/config.go similarity index 100% rename from flow/input/udp/config.go rename to inlet/flow/input/udp/config.go diff --git a/flow/input/udp/root.go b/inlet/flow/input/udp/root.go similarity index 97% rename from flow/input/udp/root.go rename to inlet/flow/input/udp/root.go index cd6add6f..d3d3468c 100644 --- a/flow/input/udp/root.go +++ b/inlet/flow/input/udp/root.go @@ -11,10 +11,10 @@ import ( "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/flow/decoder" - "akvorado/flow/input" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" + "akvorado/inlet/flow/decoder" + "akvorado/inlet/flow/input" ) // Input represents the state of an UDP listener. @@ -90,7 +90,7 @@ func (configuration *Configuration) New(r *reporter.Reporter, daemon daemon.Comp []string{"listener", "worker"}, ) - daemon.Track(&input.t, "flow/input/udp") + daemon.Track(&input.t, "inlet/flow/input/udp") return input, nil } diff --git a/flow/input/udp/root_test.go b/inlet/flow/input/udp/root_test.go similarity index 94% rename from flow/input/udp/root_test.go rename to inlet/flow/input/udp/root_test.go index 11fd3582..b107d9c5 100644 --- a/flow/input/udp/root_test.go +++ b/inlet/flow/input/udp/root_test.go @@ -5,10 +5,10 @@ import ( "testing" "time" - "akvorado/daemon" - "akvorado/flow/decoder" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/reporter" + "akvorado/inlet/flow/decoder" ) func TestUDPInput(t *testing.T) { @@ -69,7 +69,7 @@ func TestUDPInput(t *testing.T) { } // Check metrics - gotMetrics := r.GetMetrics("akvorado_flow_input_udp_") + gotMetrics := r.GetMetrics("akvorado_inlet_flow_input_udp_") expectedMetrics := map[string]string{ `bytes{exporter="127.0.0.1",listener="127.0.0.1:0",worker="0"}`: "12", `packets{exporter="127.0.0.1",listener="127.0.0.1:0",worker="0"}`: "1", @@ -119,7 +119,7 @@ func TestOverflow(t *testing.T) { time.Sleep(20 * time.Millisecond) // Check metrics (same as before because we got only one packet, others were dropped) - gotMetrics := r.GetMetrics("akvorado_flow_input_udp_") + gotMetrics := r.GetMetrics("akvorado_inlet_flow_input_udp_") expectedMetrics := map[string]string{ `bytes{exporter="127.0.0.1",listener="127.0.0.1:0",worker="0"}`: "12", `in_drops{listener="127.0.0.1:0",worker="0"}`: "0", diff --git a/flow/input/udp/socket.go b/inlet/flow/input/udp/socket.go similarity index 97% rename from flow/input/udp/socket.go rename to inlet/flow/input/udp/socket.go index a7fa5b73..6ea4430b 100644 --- a/flow/input/udp/socket.go +++ b/inlet/flow/input/udp/socket.go @@ -6,7 +6,7 @@ import ( "golang.org/x/sys/unix" - "akvorado/helpers" + "akvorado/common/helpers" ) var ( diff --git a/flow/input/udp/socket_test.go b/inlet/flow/input/udp/socket_test.go similarity index 100% rename from flow/input/udp/socket_test.go rename to inlet/flow/input/udp/socket_test.go diff --git a/flow/root.go b/inlet/flow/root.go similarity index 95% rename from flow/root.go rename to inlet/flow/root.go index 8acaace4..b5be8ffb 100644 --- a/flow/root.go +++ b/inlet/flow/root.go @@ -8,11 +8,11 @@ import ( "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/flow/decoder" - "akvorado/flow/input" - "akvorado/http" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" + "akvorado/inlet/flow/decoder" + "akvorado/inlet/flow/input" ) // Component represents the flow component. @@ -106,7 +106,7 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende []string{"name"}, ) - c.d.Daemon.Track(&c.t, "flow") + c.d.Daemon.Track(&c.t, "inlet/flow") c.initHTTP() return &c, nil } diff --git a/flow/root_test.go b/inlet/flow/root_test.go similarity index 93% rename from flow/root_test.go rename to inlet/flow/root_test.go index c73a2d76..c7113953 100644 --- a/flow/root_test.go +++ b/inlet/flow/root_test.go @@ -6,8 +6,8 @@ import ( "testing" "time" - "akvorado/flow/input/file" - "akvorado/reporter" + "akvorado/common/reporter" + "akvorado/inlet/flow/input/file" ) func TestFlow(t *testing.T) { diff --git a/flow/schemas.go b/inlet/flow/schemas.go similarity index 86% rename from flow/schemas.go rename to inlet/flow/schemas.go index 6331911a..c592f645 100644 --- a/flow/schemas.go +++ b/inlet/flow/schemas.go @@ -48,13 +48,13 @@ func init() { func (c *Component) initHTTP() { for version, schema := range VersionedSchemas { - c.d.HTTP.AddHandler(fmt.Sprintf("/api/v0/flow/schema-%d.proto", version), + c.d.HTTP.AddHandler(fmt.Sprintf("/api/v0/inlet/flow/schema-%d.proto", version), http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "text/plain") w.Write([]byte(schema)) })) } - c.d.HTTP.AddHandler("/api/v0/flow/schemas.json", http.HandlerFunc( + c.d.HTTP.AddHandler("/api/v0/inlet/flow/schemas.json", http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") answer := struct { @@ -65,7 +65,7 @@ func (c *Component) initHTTP() { Versions: map[int]string{}, } for version := range VersionedSchemas { - answer.Versions[version] = fmt.Sprintf("/api/v0/flow/schema-%d.proto", version) + answer.Versions[version] = fmt.Sprintf("/api/v0/inlet/flow/schema-%d.proto", version) } encoder := json.NewEncoder(w) encoder.SetIndent("", " ") diff --git a/flow/schemas_test.go b/inlet/flow/schemas_test.go similarity index 66% rename from flow/schemas_test.go rename to inlet/flow/schemas_test.go index dce4555a..931c4d86 100644 --- a/flow/schemas_test.go +++ b/inlet/flow/schemas_test.go @@ -3,8 +3,8 @@ package flow import ( "testing" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func TestHTTPEndpoints(t *testing.T) { @@ -13,21 +13,21 @@ func TestHTTPEndpoints(t *testing.T) { cases := helpers.HTTPEndpointCases{ { - URL: "/api/v0/flow/schema-0.proto", + URL: "/api/v0/inlet/flow/schema-0.proto", ContentType: "text/plain", FirstLines: []string{ `syntax = "proto3";`, `package decoder;`, }, }, { - URL: "/api/v0/flow/schemas.json", + URL: "/api/v0/inlet/flow/schemas.json", ContentType: "application/json", FirstLines: []string{ `{`, ` "current_version": 1,`, ` "versions": {`, - ` "0": "/api/v0/flow/schema-0.proto",`, - ` "1": "/api/v0/flow/schema-1.proto"`, + ` "0": "/api/v0/inlet/flow/schema-0.proto",`, + ` "1": "/api/v0/inlet/flow/schema-1.proto"`, ` }`, `}`, }, diff --git a/flow/tests.go b/inlet/flow/tests.go similarity index 88% rename from flow/tests.go rename to inlet/flow/tests.go index f5b16920..c4dafeba 100644 --- a/flow/tests.go +++ b/inlet/flow/tests.go @@ -5,10 +5,10 @@ package flow import ( "testing" - "akvorado/daemon" - "akvorado/flow/input/udp" - "akvorado/http" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/http" + "akvorado/common/reporter" + "akvorado/inlet/flow/input/udp" ) // NewMock creates a new flow importer listening on a random port. It diff --git a/geoip/config.go b/inlet/geoip/config.go similarity index 100% rename from geoip/config.go rename to inlet/geoip/config.go diff --git a/geoip/lookup.go b/inlet/geoip/lookup.go similarity index 100% rename from geoip/lookup.go rename to inlet/geoip/lookup.go diff --git a/geoip/lookup_test.go b/inlet/geoip/lookup_test.go similarity index 94% rename from geoip/lookup_test.go rename to inlet/geoip/lookup_test.go index 4383ee9b..d15e909d 100644 --- a/geoip/lookup_test.go +++ b/inlet/geoip/lookup_test.go @@ -4,8 +4,8 @@ import ( "net" "testing" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func TestLookup(t *testing.T) { diff --git a/geoip/root.go b/inlet/geoip/root.go similarity index 97% rename from geoip/root.go rename to inlet/geoip/root.go index e3e88d02..bde1f66a 100644 --- a/geoip/root.go +++ b/inlet/geoip/root.go @@ -11,8 +11,8 @@ import ( "github.com/oschwald/geoip2-golang" "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" ) // Component represents the GeoIP component. @@ -49,7 +49,7 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende if c.config.ASNDatabase != "" { c.config.ASNDatabase = filepath.Clean(c.config.ASNDatabase) } - c.d.Daemon.Track(&c.t, "geoip") + c.d.Daemon.Track(&c.t, "inlet/geoip") c.metrics.databaseRefresh = c.r.CounterVec( reporter.CounterOpts{ Name: "db_refresh_total", diff --git a/geoip/root_test.go b/inlet/geoip/root_test.go similarity index 93% rename from geoip/root_test.go rename to inlet/geoip/root_test.go index 59760037..e33fe13f 100644 --- a/geoip/root_test.go +++ b/inlet/geoip/root_test.go @@ -7,9 +7,9 @@ import ( "testing" "time" - "akvorado/daemon" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func copyFile(src string, dst string) { @@ -57,7 +57,7 @@ func TestDatabaseRefresh(t *testing.T) { }() // Check we did load both databases - gotMetrics := r.GetMetrics("akvorado_geoip_db_") + gotMetrics := r.GetMetrics("akvorado_inlet_geoip_db_") expectedMetrics := map[string]string{ `refresh_total{database="asn"}`: "1", `refresh_total{database="country"}`: "1", @@ -71,7 +71,7 @@ func TestDatabaseRefresh(t *testing.T) { filepath.Join(dir, "tmp.mmdb")) os.Rename(filepath.Join(dir, "tmp.mmdb"), config.CountryDatabase) time.Sleep(10 * time.Millisecond) - gotMetrics = r.GetMetrics("akvorado_geoip_db_") + gotMetrics = r.GetMetrics("akvorado_inlet_geoip_db_") expectedMetrics = map[string]string{ `refresh_total{database="asn"}`: "1", `refresh_total{database="country"}`: "2", diff --git a/geoip/testdata/GeoLite2-ASN-Test.mmdb b/inlet/geoip/testdata/GeoLite2-ASN-Test.mmdb similarity index 100% rename from geoip/testdata/GeoLite2-ASN-Test.mmdb rename to inlet/geoip/testdata/GeoLite2-ASN-Test.mmdb diff --git a/geoip/testdata/GeoLite2-Country-Test.mmdb b/inlet/geoip/testdata/GeoLite2-Country-Test.mmdb similarity index 100% rename from geoip/testdata/GeoLite2-Country-Test.mmdb rename to inlet/geoip/testdata/GeoLite2-Country-Test.mmdb diff --git a/geoip/tests.go b/inlet/geoip/tests.go similarity index 94% rename from geoip/tests.go rename to inlet/geoip/tests.go index 17314e4c..b7f3e7e3 100644 --- a/geoip/tests.go +++ b/inlet/geoip/tests.go @@ -8,8 +8,8 @@ import ( "runtime" "testing" - "akvorado/daemon" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" ) // NewMock creates a GeoIP component usable for testing. It is already diff --git a/kafka/config.go b/inlet/kafka/config.go similarity index 53% rename from kafka/config.go rename to inlet/kafka/config.go index f2d6e423..8a1027b6 100644 --- a/kafka/config.go +++ b/inlet/kafka/config.go @@ -5,20 +5,14 @@ import ( "time" "github.com/Shopify/sarama" + + "akvorado/common/kafka" ) // Configuration describes the configuration for the Kafka exporter. type Configuration struct { - // Topic defines the topic to write flows to. - Topic string - // TopicConfiguration describes the topic configuration. If none is provided, it will not be created. - TopicConfiguration *TopicConfiguration - // Brokers is the list of brokers to connect to. - Brokers []string - // Version is the version of Kafka we assume to work - Version Version - // UseTls tells if we should use TLS. - UseTLS bool + // Connect is the configuration to connect to Kafka. + Connect kafka.Configuration // FlushInterval tells how often to flush pending data to Kafka. FlushInterval time.Duration // FlushBytes tells to flush when there are many bytes to write @@ -33,22 +27,9 @@ type Configuration struct { QueueSize int } -// TopicConfiguration describes the configuration for a topic -type TopicConfiguration struct { - // NumPartitions tells how many partitions should be used for the topic. - NumPartitions int32 - // ReplicationFactor tells the replication factor for the topic. - ReplicationFactor int16 - // ConfigEntries is a map to specify the topic overrides. Non-listed overrides will be removed - ConfigEntries map[string]*string -} - // DefaultConfiguration represents the default configuration for the Kafka exporter. var DefaultConfiguration = Configuration{ - Topic: "flows", - Brokers: []string{"127.0.0.1:9092"}, - Version: Version(sarama.DefaultVersion), - UseTLS: false, + Connect: kafka.DefaultConfiguration, FlushInterval: 10 * time.Second, FlushBytes: int(sarama.MaxRequestSize) - 1, MaxMessageBytes: 1000000, @@ -56,29 +37,6 @@ var DefaultConfiguration = Configuration{ QueueSize: 32, } -// Version represents a supported version of Kafka -type Version sarama.KafkaVersion - -// UnmarshalText parses a version of Kafka -func (v *Version) UnmarshalText(text []byte) error { - version, err := sarama.ParseKafkaVersion(string(text)) - if err != nil { - return err - } - *v = Version(version) - return nil -} - -// String turns a Kafka version into a string -func (v Version) String() string { - return sarama.KafkaVersion(v).String() -} - -// MarshalText turns a Kafka version intro a string -func (v Version) MarshalText() ([]byte, error) { - return []byte(v.String()), nil -} - // CompressionCodec represents a compression codec. type CompressionCodec sarama.CompressionCodec @@ -108,8 +66,3 @@ func (cc CompressionCodec) String() string { func (cc CompressionCodec) MarshalText() ([]byte, error) { return []byte(cc.String()), nil } - -// GetConfiguration returns component configuration -func (c *Component) GetConfiguration() Configuration { - return c.config -} diff --git a/kafka/config_test.go b/inlet/kafka/config_test.go similarity index 100% rename from kafka/config_test.go rename to inlet/kafka/config_test.go diff --git a/inlet/kafka/functional_test.go b/inlet/kafka/functional_test.go new file mode 100644 index 00000000..795ffe3c --- /dev/null +++ b/inlet/kafka/functional_test.go @@ -0,0 +1,99 @@ +package kafka + +import ( + "errors" + "fmt" + "math/rand" + "testing" + "time" + + "github.com/Shopify/sarama" + + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/kafka" + "akvorado/common/reporter" + "akvorado/inlet/flow" +) + +func TestRealKafka(t *testing.T) { + client, brokers := kafka.SetupKafkaBroker(t) + + rand.Seed(time.Now().UnixMicro()) + topicName := fmt.Sprintf("test-topic-%d", rand.Int()) + configuration := DefaultConfiguration + configuration.Connect.Topic = topicName + configuration.Connect.Brokers = brokers + configuration.Connect.Version = kafka.Version(sarama.V2_8_1_0) + configuration.FlushInterval = 100 * time.Millisecond + expectedTopicName := fmt.Sprintf("%s-v%d", topicName, flow.CurrentSchemaVersion) + r := reporter.NewMock(t) + c, err := New(r, configuration, Dependencies{Daemon: daemon.NewMock(t)}) + if err != nil { + t.Fatalf("New() error:\n%+v", err) + } + if err := c.Start(); err != nil { + t.Fatalf("Start() error:\n%+v", err) + } + defer func() { + if err := c.Stop(); err != nil { + t.Fatalf("Stop() error:\n%+v", err) + } + }() + + c.Send("127.0.0.1", []byte("hello world!")) + c.Send("127.0.0.1", []byte("goodbye world!")) + + time.Sleep(10 * time.Millisecond) + gotMetrics := r.GetMetrics("akvorado_inlet_kafka_", "sent_") + expectedMetrics := map[string]string{ + `sent_bytes_total{exporter="127.0.0.1"}`: "26", + `sent_messages_total{exporter="127.0.0.1"}`: "2", + } + if diff := helpers.Diff(gotMetrics, expectedMetrics); diff != "" { + t.Fatalf("Metrics (-got, +want):\n%s", diff) + } + + // Try to consume the two messages + consumer, err := sarama.NewConsumerFromClient(client) + if err != nil { + t.Fatalf("NewConsumerGroup() error:\n%+v", err) + } + defer consumer.Close() + var partitions []int32 + for { + partitions, err = consumer.Partitions(expectedTopicName) + if err != nil { + if errors.Is(err, sarama.ErrUnknownTopicOrPartition) { + // Wait for topic to be available + continue + } + t.Fatalf("Partitions() error:\n%+v", err) + } + break + } + partitionConsumer, err := consumer.ConsumePartition(expectedTopicName, partitions[0], sarama.OffsetOldest) + if err != nil { + t.Fatalf("ConsumePartitions() error:\n%+v", err) + } + + got := []string{} + expected := []string{ + "127.0.0.1:hello world!", + "127.0.0.1:goodbye world!", + } + timeout := time.After(15 * time.Second) + for i := 0; i < len(expected); i++ { + select { + case msg := <-partitionConsumer.Messages(): + got = append(got, fmt.Sprintf("%s:%s", string(msg.Key), string(msg.Value))) + case err := <-partitionConsumer.Errors(): + t.Fatalf("consumer.Errors():\n%+v", err) + case <-timeout: + } + } + + if diff := helpers.Diff(got, expected); diff != "" { + t.Fatalf("Didn't received the expected messages (-got, +want):\n%s", diff) + } +} diff --git a/kafka/metrics.go b/inlet/kafka/metrics.go similarity index 99% rename from kafka/metrics.go rename to inlet/kafka/metrics.go index da3ca7ca..62ba5594 100644 --- a/kafka/metrics.go +++ b/inlet/kafka/metrics.go @@ -6,7 +6,7 @@ import ( "github.com/prometheus/client_golang/prometheus" gometrics "github.com/rcrowley/go-metrics" - "akvorado/reporter" + "akvorado/common/reporter" ) type metrics struct { diff --git a/kafka/root.go b/inlet/kafka/root.go similarity index 53% rename from kafka/root.go rename to inlet/kafka/root.go index 55bf3d3c..54732d59 100644 --- a/kafka/root.go +++ b/inlet/kafka/root.go @@ -2,8 +2,6 @@ package kafka import ( - "crypto/tls" - "crypto/x509" "fmt" "strings" "time" @@ -11,9 +9,10 @@ import ( "github.com/Shopify/sarama" "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/flow" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/kafka" + "akvorado/common/reporter" + "akvorado/inlet/flow" ) // Component represents the Kafka exporter. @@ -39,8 +38,8 @@ type Dependencies struct { func New(reporter *reporter.Reporter, configuration Configuration, dependencies Dependencies) (*Component, error) { // Build Kafka configuration kafkaConfig := sarama.NewConfig() - kafkaConfig.Version = sarama.KafkaVersion(configuration.Version) - kafkaConfig.Metadata.AllowAutoTopicCreation = false + kafkaConfig.Version = sarama.KafkaVersion(configuration.Connect.Version) + kafkaConfig.Metadata.AllowAutoTopicCreation = true kafkaConfig.Producer.MaxMessageBytes = configuration.MaxMessageBytes kafkaConfig.Producer.Compression = sarama.CompressionCodec(configuration.CompressionCodec) kafkaConfig.Producer.Return.Successes = false @@ -49,14 +48,6 @@ func New(reporter *reporter.Reporter, configuration Configuration, dependencies kafkaConfig.Producer.Flush.Frequency = configuration.FlushInterval kafkaConfig.Producer.Partitioner = sarama.NewHashPartitioner kafkaConfig.ChannelBufferSize = configuration.QueueSize / 2 - if configuration.UseTLS { - rootCAs, err := x509.SystemCertPool() - if err != nil { - return nil, fmt.Errorf("cannot initialize TLS: %w", err) - } - kafkaConfig.Net.TLS.Enable = true - kafkaConfig.Net.TLS.Config = &tls.Config{RootCAs: rootCAs} - } if err := kafkaConfig.Validate(); err != nil { return nil, fmt.Errorf("cannot validate Kafka configuration: %w", err) } @@ -67,80 +58,31 @@ func New(reporter *reporter.Reporter, configuration Configuration, dependencies config: configuration, kafkaConfig: kafkaConfig, - kafkaTopic: fmt.Sprintf("%s-v%d", configuration.Topic, flow.CurrentSchemaVersion), + kafkaTopic: fmt.Sprintf("%s-v%d", configuration.Connect.Topic, flow.CurrentSchemaVersion), } c.initMetrics() c.createKafkaProducer = func() (sarama.AsyncProducer, error) { - return sarama.NewAsyncProducer(c.config.Brokers, c.kafkaConfig) + return sarama.NewAsyncProducer(c.config.Connect.Brokers, c.kafkaConfig) } - c.d.Daemon.Track(&c.t, "kafka") + c.d.Daemon.Track(&c.t, "inlet/kafka") return &c, nil } // Start starts the Kafka component. func (c *Component) Start() error { c.r.Info().Msg("starting Kafka component") - globalKafkaLogger.r.Store(c.r) + kafka.GlobalKafkaLogger.Register(c.r) // Create producer kafkaProducer, err := c.createKafkaProducer() if err != nil { c.r.Err(err). - Str("brokers", strings.Join(c.config.Brokers, ",")). + Str("brokers", strings.Join(c.config.Connect.Brokers, ",")). Msg("unable to create async producer") return fmt.Errorf("unable to create Kafka async producer: %w", err) } c.kafkaProducer = kafkaProducer - // Create topic - if c.config.TopicConfiguration != nil { - client, err := sarama.NewClusterAdmin(c.config.Brokers, c.kafkaConfig) - if err != nil { - kafkaProducer.Close() - c.r.Err(err). - Str("brokers", strings.Join(c.config.Brokers, ",")). - Msg("unable to get admin client for topic creation") - return fmt.Errorf("unable to get admin client for topic creation: %w", err) - } - defer client.Close() - l := c.r.With(). - Str("brokers", strings.Join(c.config.Brokers, ",")). - Str("topic", c.kafkaTopic). - Logger() - topics, err := client.ListTopics() - if err != nil { - l.Err(err).Msg("unable to get metadata for topics") - return fmt.Errorf("unable to get metadata for topics: %w", err) - } - if topic, ok := topics[c.kafkaTopic]; !ok { - if err := client.CreateTopic(c.kafkaTopic, - &sarama.TopicDetail{ - NumPartitions: c.config.TopicConfiguration.NumPartitions, - ReplicationFactor: c.config.TopicConfiguration.ReplicationFactor, - ConfigEntries: c.config.TopicConfiguration.ConfigEntries, - }, false); err != nil { - l.Err(err).Msg("unable to create topic") - return fmt.Errorf("unable to create topic %q: %w", c.kafkaTopic, err) - } - l.Info().Msg("topic created") - } else { - if topic.NumPartitions != c.config.TopicConfiguration.NumPartitions { - l.Warn().Msgf("mismatch for number of partitions: got %d, want %d", - topic.NumPartitions, c.config.TopicConfiguration.NumPartitions) - } - if topic.ReplicationFactor != c.config.TopicConfiguration.ReplicationFactor { - l.Warn().Msgf("mismatch for replication factor: got %d, want %d", - topic.ReplicationFactor, c.config.TopicConfiguration.ReplicationFactor) - } - if err := client.AlterConfig(sarama.TopicResource, c.kafkaTopic, c.config.TopicConfiguration.ConfigEntries, false); err != nil { - l.Err(err).Msg("unable to set topic configuration") - return fmt.Errorf("unable to set topic configuration for %q: %w", - c.kafkaTopic, err) - } - l.Info().Msg("topic updated") - } - } - // Main loop c.t.Go(func() error { defer kafkaProducer.Close() @@ -166,9 +108,8 @@ func (c *Component) Start() error { // Stop stops the Kafka component func (c *Component) Stop() error { - var noreporter *reporter.Reporter defer func() { - globalKafkaLogger.r.Store(noreporter) + kafka.GlobalKafkaLogger.Unregister() c.r.Info().Msg("Kafka component stopped") }() c.r.Info().Msg("stopping Kafka component") diff --git a/kafka/root_test.go b/inlet/kafka/root_test.go similarity index 94% rename from kafka/root_test.go rename to inlet/kafka/root_test.go index c95fa547..d0865793 100644 --- a/kafka/root_test.go +++ b/inlet/kafka/root_test.go @@ -8,9 +8,9 @@ import ( "github.com/Shopify/sarama" gometrics "github.com/rcrowley/go-metrics" - "akvorado/daemon" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func TestKafka(t *testing.T) { @@ -44,7 +44,7 @@ func TestKafka(t *testing.T) { c.Send("127.0.0.1", []byte("goodbye world!")) time.Sleep(10 * time.Millisecond) - gotMetrics := r.GetMetrics("akvorado_kafka_") + gotMetrics := r.GetMetrics("akvorado_inlet_kafka_") expectedMetrics := map[string]string{ `sent_bytes_total{exporter="127.0.0.1"}`: "26", `errors_total{error="kafka: Failed to produce message to topic flows-v1: noooo"}`: "1", @@ -83,7 +83,7 @@ func TestKafkaMetrics(t *testing.T) { gometrics.GetOrRegisterCounter("requests-in-flight-for-broker-1112", c.kafkaConfig.MetricRegistry). Inc(20) - gotMetrics := r.GetMetrics("akvorado_kafka_") + gotMetrics := r.GetMetrics("akvorado_inlet_kafka_") expectedMetrics := map[string]string{ `brokers_incoming_byte_rate{broker="1111"}`: "0", `brokers_incoming_byte_rate{broker="1112"}`: "0", diff --git a/kafka/tests.go b/inlet/kafka/tests.go similarity index 93% rename from kafka/tests.go rename to inlet/kafka/tests.go index 818f4558..d89cdc5b 100644 --- a/kafka/tests.go +++ b/inlet/kafka/tests.go @@ -8,8 +8,8 @@ import ( "github.com/Shopify/sarama" "github.com/Shopify/sarama/mocks" - "akvorado/daemon" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" ) // NewMock creates a new Kafka component with a mocked Kafka. It will diff --git a/snmp/cache.go b/inlet/snmp/cache.go similarity index 99% rename from snmp/cache.go rename to inlet/snmp/cache.go index bf5d54fa..170204f0 100644 --- a/snmp/cache.go +++ b/inlet/snmp/cache.go @@ -14,7 +14,7 @@ import ( "github.com/benbjohnson/clock" - "akvorado/reporter" + "akvorado/common/reporter" ) var ( diff --git a/snmp/cache_test.go b/inlet/snmp/cache_test.go similarity index 97% rename from snmp/cache_test.go rename to inlet/snmp/cache_test.go index 28a7944f..9dd96a9b 100644 --- a/snmp/cache_test.go +++ b/inlet/snmp/cache_test.go @@ -14,8 +14,8 @@ import ( "github.com/benbjohnson/clock" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func setupTestCache(t *testing.T) (*reporter.Reporter, *clock.Mock, *snmpCache) { @@ -45,7 +45,7 @@ func TestGetEmpty(t *testing.T) { r, _, sc := setupTestCache(t) expectCacheLookup(t, sc, "127.0.0.1", 676, answer{Err: ErrCacheMiss}) - gotMetrics := r.GetMetrics("akvorado_snmp_cache_") + gotMetrics := r.GetMetrics("akvorado_inlet_snmp_cache_") expectedMetrics := map[string]string{ `expired`: "0", `hit`: "0", @@ -67,7 +67,7 @@ func TestSimpleLookup(t *testing.T) { expectCacheLookup(t, sc, "127.0.0.1", 787, answer{Err: ErrCacheMiss}) expectCacheLookup(t, sc, "127.0.0.2", 676, answer{Err: ErrCacheMiss}) - gotMetrics := r.GetMetrics("akvorado_snmp_cache_") + gotMetrics := r.GetMetrics("akvorado_inlet_snmp_cache_") expectedMetrics := map[string]string{ `expired`: "0", `hit`: "1", @@ -123,7 +123,7 @@ func TestExpire(t *testing.T) { ExporterName: "localhost", Interface: Interface{Name: "Gi0/0/0/1", Description: "Transit"}}) - gotMetrics := r.GetMetrics("akvorado_snmp_cache_") + gotMetrics := r.GetMetrics("akvorado_inlet_snmp_cache_") expectedMetrics := map[string]string{ `expired`: "3", `hit`: "7", @@ -376,7 +376,7 @@ func TestConcurrentOperations(t *testing.T) { close(done) wg.Wait() - gotMetrics := r.GetMetrics("akvorado_snmp_cache_") + gotMetrics := r.GetMetrics("akvorado_inlet_snmp_cache_") hits, _ := strconv.Atoi(gotMetrics["hit"]) misses, _ := strconv.Atoi(gotMetrics["miss"]) size, _ := strconv.Atoi(gotMetrics["size"]) diff --git a/snmp/config.go b/inlet/snmp/config.go similarity index 100% rename from snmp/config.go rename to inlet/snmp/config.go diff --git a/snmp/poller.go b/inlet/snmp/poller.go similarity index 99% rename from snmp/poller.go rename to inlet/snmp/poller.go index 00431f98..70ef13eb 100644 --- a/snmp/poller.go +++ b/inlet/snmp/poller.go @@ -10,7 +10,7 @@ import ( "github.com/benbjohnson/clock" "github.com/gosnmp/gosnmp" - "akvorado/reporter" + "akvorado/common/reporter" ) type poller interface { diff --git a/snmp/poller_test.go b/inlet/snmp/poller_test.go similarity index 96% rename from snmp/poller_test.go rename to inlet/snmp/poller_test.go index d4d1c22e..f3d9fc66 100644 --- a/snmp/poller_test.go +++ b/inlet/snmp/poller_test.go @@ -12,8 +12,8 @@ import ( "github.com/slayercat/GoSNMPServer" "github.com/slayercat/gosnmp" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func TestPoller(t *testing.T) { @@ -124,7 +124,7 @@ func TestPoller(t *testing.T) { t.Fatalf("Poll() (-got, +want):\n%s", diff) } - gotMetrics := r.GetMetrics("akvorado_snmp_poller_", "failure_", "pending_", "success_") + gotMetrics := r.GetMetrics("akvorado_inlet_snmp_poller_", "failure_", "pending_", "success_") expectedMetrics := map[string]string{ `failure_requests{error="ifalias_missing",exporter="127.0.0.1"}`: "2", // 643+644 `failure_requests{error="ifdescr_missing",exporter="127.0.0.1"}`: "1", // 644 diff --git a/snmp/root.go b/inlet/snmp/root.go similarity index 99% rename from snmp/root.go rename to inlet/snmp/root.go index 0fca8d6d..b53c7d36 100644 --- a/snmp/root.go +++ b/inlet/snmp/root.go @@ -14,8 +14,8 @@ import ( "github.com/eapache/go-resiliency/breaker" "gopkg.in/tomb.v2" - "akvorado/daemon" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/reporter" ) // Component represents the SNMP compomenent. @@ -79,7 +79,7 @@ func New(r *reporter.Reporter, configuration Configuration, dependencies Depende Timeout: configuration.PollerTimeout, }, dependencies.Clock, sc.Put), } - c.d.Daemon.Track(&c.t, "snmp") + c.d.Daemon.Track(&c.t, "inlet/snmp") c.metrics.cacheRefreshRuns = r.Counter( reporter.CounterOpts{ diff --git a/snmp/root_test.go b/inlet/snmp/root_test.go similarity index 96% rename from snmp/root_test.go rename to inlet/snmp/root_test.go index 968ff0ac..f88e26e3 100644 --- a/snmp/root_test.go +++ b/inlet/snmp/root_test.go @@ -9,9 +9,9 @@ import ( "github.com/benbjohnson/clock" - "akvorado/daemon" - "akvorado/helpers" - "akvorado/reporter" + "akvorado/common/daemon" + "akvorado/common/helpers" + "akvorado/common/reporter" ) func expectSNMPLookup(t *testing.T, c *Component, exporter string, ifIndex uint, expected answer) { @@ -136,7 +136,7 @@ func TestAutoRefresh(t *testing.T) { t.Fatalf("Stop() error:\n%+v", err) } - gotMetrics := r.GetMetrics("akvorado_snmp_cache_") + gotMetrics := r.GetMetrics("akvorado_inlet_snmp_cache_") expectedMetrics := map[string]string{ `expired`: "0", `hit`: "4", @@ -223,7 +223,7 @@ func TestCoalescing(t *testing.T) { close(blocker) time.Sleep(20 * time.Millisecond) - gotMetrics := r.GetMetrics("akvorado_snmp_poller_", "coalesced_count") + gotMetrics := r.GetMetrics("akvorado_inlet_snmp_poller_", "coalesced_count") expectedMetrics := map[string]string{ `coalesced_count`: "4", } @@ -282,7 +282,7 @@ func TestPollerBreaker(t *testing.T) { } time.Sleep(50 * time.Millisecond) - gotMetrics := r.GetMetrics("akvorado_snmp_poller_", "breaker_open_count", "coalesced_count") + gotMetrics := r.GetMetrics("akvorado_inlet_snmp_poller_", "breaker_open_count", "coalesced_count") expectedMetrics := map[string]string{ `coalesced_count`: "0", `breaker_open_count{exporter="127.0.0.1"}`: tc.ExpectedCount, diff --git a/snmp/tests.go b/inlet/snmp/tests.go similarity index 98% rename from snmp/tests.go rename to inlet/snmp/tests.go index 83ed1ed1..babdfb09 100644 --- a/snmp/tests.go +++ b/inlet/snmp/tests.go @@ -8,7 +8,7 @@ import ( "strings" "testing" - "akvorado/reporter" + "akvorado/common/reporter" ) // mockPoller will use static data. diff --git a/kafka/functional_test.go b/kafka/functional_test.go deleted file mode 100644 index c1c2110c..00000000 --- a/kafka/functional_test.go +++ /dev/null @@ -1,227 +0,0 @@ -package kafka - -import ( - "errors" - "fmt" - "math/rand" - "testing" - "time" - - "github.com/Shopify/sarama" - - "akvorado/daemon" - "akvorado/flow" - "akvorado/helpers" - "akvorado/reporter" -) - -func setupKafkaBroker(t *testing.T) (sarama.Client, []string) { - broker := helpers.CheckExternalService(t, "Kafka", []string{"kafka", "localhost"}, "9092") - - // Wait for broker to be ready - saramaConfig := sarama.NewConfig() - saramaConfig.Version = sarama.V2_8_1_0 - saramaConfig.Net.DialTimeout = 1 * time.Second - saramaConfig.Net.ReadTimeout = 1 * time.Second - saramaConfig.Net.WriteTimeout = 1 * time.Second - ready := false - var ( - client sarama.Client - err error - ) - for i := 0; i < 90; i++ { - if client != nil { - client.Close() - } - client, err = sarama.NewClient([]string{broker}, saramaConfig) - if err != nil { - continue - } - if err := client.RefreshMetadata(); err != nil { - continue - } - brokers := client.Brokers() - if len(brokers) == 0 { - continue - } - if err := brokers[0].Open(client.Config()); err != nil { - continue - } - if connected, err := brokers[0].Connected(); err != nil || !connected { - brokers[0].Close() - continue - } - brokers[0].Close() - ready = true - } - if !ready { - t.Fatalf("broker is not ready") - } - - return client, []string{broker} -} - -func TestRealKafka(t *testing.T) { - client, brokers := setupKafkaBroker(t) - - rand.Seed(time.Now().UnixMicro()) - topicName := fmt.Sprintf("test-topic-%d", rand.Int()) - configuration := DefaultConfiguration - configuration.Topic = topicName - configuration.TopicConfiguration = &TopicConfiguration{ - NumPartitions: 1, - ReplicationFactor: 1, - } - configuration.Brokers = brokers - configuration.Version = Version(sarama.V2_8_1_0) - configuration.FlushInterval = 100 * time.Millisecond - expectedTopicName := fmt.Sprintf("%s-v%d", topicName, flow.CurrentSchemaVersion) - r := reporter.NewMock(t) - c, err := New(r, configuration, Dependencies{Daemon: daemon.NewMock(t)}) - if err != nil { - t.Fatalf("New() error:\n%+v", err) - } - if err := c.Start(); err != nil { - t.Fatalf("Start() error:\n%+v", err) - } - defer func() { - if err := c.Stop(); err != nil { - t.Fatalf("Stop() error:\n%+v", err) - } - }() - - c.Send("127.0.0.1", []byte("hello world!")) - c.Send("127.0.0.1", []byte("goodbye world!")) - - time.Sleep(10 * time.Millisecond) - gotMetrics := r.GetMetrics("akvorado_kafka_", "sent_") - expectedMetrics := map[string]string{ - `sent_bytes_total{exporter="127.0.0.1"}`: "26", - `sent_messages_total{exporter="127.0.0.1"}`: "2", - } - if diff := helpers.Diff(gotMetrics, expectedMetrics); diff != "" { - t.Fatalf("Metrics (-got, +want):\n%s", diff) - } - - // Try to consume the two messages - consumer, err := sarama.NewConsumerFromClient(client) - if err != nil { - t.Fatalf("NewConsumerGroup() error:\n%+v", err) - } - defer consumer.Close() - var partitions []int32 - for { - partitions, err = consumer.Partitions(expectedTopicName) - if err != nil { - if errors.Is(err, sarama.ErrUnknownTopicOrPartition) { - // Wait for topic to be available - continue - } - t.Fatalf("Partitions() error:\n%+v", err) - } - break - } - partitionConsumer, err := consumer.ConsumePartition(expectedTopicName, partitions[0], sarama.OffsetOldest) - if err != nil { - t.Fatalf("ConsumePartitions() error:\n%+v", err) - } - - got := []string{} - expected := []string{ - "127.0.0.1:hello world!", - "127.0.0.1:goodbye world!", - } - timeout := time.After(15 * time.Second) - for i := 0; i < len(expected); i++ { - select { - case msg := <-partitionConsumer.Messages(): - got = append(got, fmt.Sprintf("%s:%s", string(msg.Key), string(msg.Value))) - case err := <-partitionConsumer.Errors(): - t.Fatalf("consumer.Errors():\n%+v", err) - case <-timeout: - } - } - - if diff := helpers.Diff(got, expected); diff != "" { - t.Fatalf("Didn't received the expected messages (-got, +want):\n%s", diff) - } -} - -func TestTopicCreation(t *testing.T) { - client, brokers := setupKafkaBroker(t) - - rand.Seed(time.Now().UnixMicro()) - topicName := fmt.Sprintf("test-topic-%d", rand.Int()) - expectedTopicName := fmt.Sprintf("%s-v%d", topicName, flow.CurrentSchemaVersion) - retentionMs := "76548" - segmentBytes := "107374184" - segmentBytes2 := "10737184" - cleanupPolicy := "delete" - - cases := []struct { - Name string - ConfigEntries map[string]*string - }{ - { - Name: "Set initial config", - ConfigEntries: map[string]*string{ - "retention.ms": &retentionMs, - "segment.bytes": &segmentBytes, - }, - }, { - Name: "Alter initial config", - ConfigEntries: map[string]*string{ - "retention.ms": &retentionMs, - "segment.bytes": &segmentBytes2, - "cleanup.policy": &cleanupPolicy, - }, - }, { - Name: "Remove item", - ConfigEntries: map[string]*string{ - "retention.ms": &retentionMs, - "segment.bytes": &segmentBytes2, - }, - }, - } - - for _, tc := range cases { - t.Run(tc.Name, func(t *testing.T) { - configuration := DefaultConfiguration - configuration.Topic = topicName - configuration.TopicConfiguration = &TopicConfiguration{ - NumPartitions: 1, - ReplicationFactor: 1, - ConfigEntries: tc.ConfigEntries, - } - configuration.Brokers = brokers - configuration.Version = Version(sarama.V2_8_1_0) - c, err := New(reporter.NewMock(t), configuration, Dependencies{Daemon: daemon.NewMock(t)}) - if err != nil { - t.Fatalf("New() error:\n%+v", err) - } - if err := c.Start(); err != nil { - t.Fatalf("Start() error:\n%+v", err) - } - if err := c.Stop(); err != nil { - t.Fatalf("Start() error:\n%+v", err) - } - - adminClient, err := sarama.NewClusterAdminFromClient(client) - if err != nil { - t.Fatalf("NewClusterAdmin() error:\n%+v", err) - } - topics, err := adminClient.ListTopics() - if err != nil { - t.Fatalf("ListTopics() error:\n%+v", err) - } - topic, ok := topics[expectedTopicName] - if !ok { - t.Fatal("ListTopics() did not find the topic") - } - if diff := helpers.Diff(topic.ConfigEntries, tc.ConfigEntries); diff != "" { - t.Fatalf("ListTopics() (-got, +want):\n%s", diff) - } - }) - } - -} diff --git a/web/data/assets/images/design.svg b/web/data/assets/images/design.svg deleted file mode 100644 index 418d3671..00000000 --- a/web/data/assets/images/design.svg +++ /dev/null @@ -1,4 +0,0 @@ - - - -
exporter 1
exporter...
exporter 2
exporter...
exporter 3
exporter...
Flow ingest
Flow ingest
Core component
Core component
GeoIP
GeoIP
1: Send Netflow v9
1: Send Netflow v9
3: Send decoded flow
3: Send decoded flow
SNMP poller
SNMP poller
4: Query country and ASN
4: Query country and ASN
5: Query interface name
5: Query interface name
6: SNMP request
6: SNMP request
1
1
3
3
4
4
5
5
Kafka
Kafka
2
2
6
6
7: Push to Kafka
7: Push to Kafka
Kafka
Kafka
ClickHouse
ClickHouse
Grafana
Grafana
7
7
Text is not SVG - cannot display
\ No newline at end of file diff --git a/web/data/docs/00-intro.md b/web/data/docs/00-intro.md deleted file mode 100644 index ad8d0774..00000000 --- a/web/data/docs/00-intro.md +++ /dev/null @@ -1,46 +0,0 @@ -![](../assets/images/akvorado.svg) - -# Introduction - -*Akvorado*[^name] is a flow collector, hydrater and exporter. It -receives flows, adds some data like interface names and countries, and -exports them to Kafka. - -[^name]: [Akvorado][] means "water wheel" in Esperanto. - -[Akvorado]: https://eo.wikipedia.org/wiki/Akvorado - -## Big picture - -The general design of *Akvorado* is the following: - -- The exporters send Netflow and IPFIX flows to Akvorado. They don't - need to be declared as Akvorado accepts flows from anyone. -- The received flows are decoded and hydrated with additional - information: - - source and destination countries (GeoIP database) - - source and destination AS numbers (GeoIP database) - - source and destination interface names, descriptions and speeds (SNMP) -- The SNMP poller queries the exporters for host names, interface - names, interface descriptions and interface speeds. This information - is cached and updated from time to time. -- Once a flow is hydrated, it is transformed into a binary - representation using *protocol buffers* and sent to Kafka. - -The remaining steps are outside of *Akvorado* control: - -- ClickHouse subscribes to the Kafka topic to receive and store the - flows. -- Grafana queries ClickHouse to build various dashboards. - -## Flow schema - -Flows sent to Kafka are encoded with a versioned schema, described in -the `flow-*.proto` files. Any information that could change with time -is embedded in the flow. This includes for example interface names and -speeds, as well. This ensures that older data are not processed using -incorrect mappings. - -Each time the schema changes, we issue a new `flow-*.proto` file, -update the schema version and a new Kafka topic will be used. This -ensures we do not mix different schemas in a single topic. diff --git a/web/data/docs/03-usage.md b/web/data/docs/03-usage.md deleted file mode 100644 index d9d411e7..00000000 --- a/web/data/docs/03-usage.md +++ /dev/null @@ -1,45 +0,0 @@ -# Usage - -*Akvorado* uses a subcommand system. Each subcommand comes with its -own set of options. It is possible to get help using `akvorado ---help`. - -## Starting Akvorado - -`akvorado serve` starts *Akvorado* itself, allowing it to receive and -process flows. When started from a TTY, it will display logs in a -fancy way. Without a TTY, logs are output formatted as JSON. - -The `--config` options allows to provide a configuration file in YAML -format. See the [configuration section](02-configuration.md) for more -information on this file. - -The `--check` option will check if the provided configuration is -correct and stops here. The `--dump` option will dump the parsed -configuration, along with the default values. It should be combined -with `--check` if you don't want *Akvorado* to start. - -## Exposed HTTP endpoints - -The embedded HTTP server contains the following endpoints: - -- [`/api/v0/metrics`](/api/v0/metrics): Prometheus metrics -- [`/api/v0/version`](/api/v0/version): *Akvorado* version -- [`/api/v0/healthcheck`](/api/v0/healthcheck): are we alive? -- [`/api/v0/flows`](/api/v0/flows?limit=1): next available flows -- [`/api/v0/schemas.json`](/api/v0/schemas.json): versioned list of protobuf schemas used to export flows -- [`/api/v0/schema-X.proto`](/api/v0/schema-1.proto): protobuf schema used to export flows -- `/api/v0/clickhouse`: various endpoints for [ClickHouse integration](04-integration.md#clickhouse) - -The [`/api/v0/flows`](/api/v0/flows?limit=1) continously printed flows -sent to Kafka (using [ndjson]()). It also accepts a `limit` argument -to stops after emitting the specified number of flows. This endpoint -should not be used for anything else other than debug: it can skips -some flows and if there are several users, flows will be dispatched -between them. - -[ndjson]: http://ndjson.org/ - -## Other commands - -`akvorado version` displays the version. diff --git a/web/data/docs/04-integration.md b/web/data/docs/04-integration.md deleted file mode 100644 index bcb15f1e..00000000 --- a/web/data/docs/04-integration.md +++ /dev/null @@ -1,52 +0,0 @@ -# Integrations - -*Akvorado* needs some integration with external components to be -useful. The most important one is Kafka, but it can also integrate -with ClickHouse and Grafana. - -## Kafka - -The Kafka component sends flows to Kafka. Its -[configuration](02-configuration.md#kafka) mostly needs a topic name and a list -of brokers. It is possible to let *Akvorado* manage the topic with the -appropriate settings (number of partitions, replication factor and -additional configuration entries). If the topic exists, *Akvorado* -won't update the number of partitions and the replication factor but -other settings will be updated. - -Each time a new flow schema is needed, a different topic is used. -*Akvorado* suffixes the topic name with the version to ensure this -property. - -## ClickHouse - -ClickHouse can collect the data from Kafka. To help its configuration, -*Akvorado* exposes a few HTTP endpoint: - -- `/api/v0/clickhouse/init.sh` contains the schemas in the form of a - script to execute during initialization -- `/api/v0/clickhouse/protocols.csv` contains a CSV with the mapping - between protocol numbers and names -- `/api/v0/clickhouse/asns.csv` contains a CSV with the mapping - between AS numbers and organization names - -When [configured](02-configuration.md#clickhouse), it can also populate -the database with the appropriate tables and manages them. As a -prerequisite, the script contained in `/api/v0/clickhouse/init.sh` -should be executed. It is not possible for ClickHouse to fetch the -appropriate schemas in another way. - -ClickHouse clusters are currently not supported, despite being able to -configure several servers in the configuration. Several servers are in -fact managed like they are a copy of one another. - -*Akvorado* also handles database migration during upgrades. When the -protobuf schema is updated, new Kafka tables should be created, as -well as the associated materialized view. Older tables should be kept -around, notably when upgrades can be rolling (some *akvorado* -instances are still running an older version). - -## Grafana - -No integration is currently done for Grafana, except a reverse proxy -configured in the [web section](02-configuration.md#web).