diff --git a/.dockerignore b/.dockerignore
index 94143827..1b3037c1 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1 +1,2 @@
Dockerfile
+node_modules/
diff --git a/.gitignore b/.gitignore
index 1e56aca4..998d5c5b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,6 @@
/bin/
/test/
/flow/decoder/flow*.pb.go
-/web/data
+
+/web/data/node_modules/
+/web/data/assets/generated/
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a72df4bc..9ce2a8a1 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -32,9 +32,8 @@ run tests:
alias: clickhouse
script:
- export GOMODCACHE=$PWD/.go-cache
- - time apk add --no-cache git make gcc musl-dev protoc shared-mime-info
+ - time apk add --no-cache git make gcc musl-dev protoc shared-mime-info yarn
- time go mod download
- - mkdir -p web/data && touch web/data/install.html # don't build web data
- time make test
- time make test-race || make test-race
- time make test-coverage || make test-coverage
diff --git a/Dockerfile b/Dockerfile
index 8a2b34a2..69ffdfce 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,19 +1,11 @@
-FROM squidfunk/mkdocs-material:8.2.5 AS documentation
-COPY mkdocs.yml /docs/
-COPY docs /docs/docs/
-RUN mkdocs build --strict --site-dir /output
-
FROM golang:1.18-alpine AS build
-RUN apk add --no-cache git make gcc musl-dev protoc shared-mime-info
+RUN apk add --no-cache git make gcc musl-dev protoc shared-mime-info yarn
WORKDIR /app
COPY go.mod ./
COPY go.sum ./
RUN go mod download
COPY . .
-RUN make clean
-COPY --from=documentation /output web/data/
-RUN find web/data
-RUN make test && make
+RUN make clean && make test && make
# Do not use scratch, we use alpine to get an healthcheck
FROM alpine
diff --git a/Makefile b/Makefile
index ede61578..4ec7a7a5 100644
--- a/Makefile
+++ b/Makefile
@@ -13,7 +13,7 @@ M = $(shell if [ "$$(tput colors 2> /dev/null || echo 0)" -ge 8 ]; then printf "
export GO111MODULE=on
-GENERATED = flow/decoder/flow-1.pb.go web/data
+GENERATED = flow/decoder/flow-1.pb.go web/data/node_modules web/data/assets/generated
.PHONY: all
all: fmt lint $(GENERATED) | $(BIN) ; $(info $(M) building executable…) @ ## Build program binary
@@ -50,13 +50,14 @@ $(BIN)/protoc-gen-go: PACKAGE=google.golang.org/protobuf/cmd/protoc-gen-go
flow/decoder/%.pb.go: flow/data/schemas/%.proto | $(PROTOC_GEN_GO) ; $(info $(M) compiling protocol buffers definition…)
$Q $(PROTOC) -I=. --plugin=$(PROTOC_GEN_GO) --go_out=. --go_opt=module=$(MODULE) $<
-web/data: mkdocs.yml $(wildcard docs/*.md docs/assets/*) ; $(info $(M) build documentation) @ ## Build documentation
- $Q rm -rf web/data
- $Q mkdir -p web/data
- $Q docker run --rm -it --user=$(shell id -u):$(shell id -g) \
- -v $(CURDIR):/docs:ro \
- -v $(CURDIR)/web/data:/output:rw \
- squidfunk/mkdocs-material:8.2.5 build --strict --site-dir /output
+web/data/node_modules: web/data/package.json web/data/yarn.lock ; $(info $(M) fetching node modules…)
+ $Q yarn install --frozen-lockfile --cwd web/data && touch $@
+web/data/assets/generated: web/data/node_modules Makefile ; $(info $(M) copying static assets…)
+ $Q rm -rf $@ && mkdir -p $@/stylesheets $@/javascript $@/fonts
+ $Q cp web/data/node_modules/@mdi/font/fonts/materialdesignicons-webfont.woff* $@/fonts/.
+ $Q cp web/data/node_modules/@mdi/font/css/materialdesignicons.min.css $@/stylesheets/.
+ $Q cp web/data/node_modules/bootstrap/dist/css/bootstrap.min.css $@/stylesheets/.
+ $Q cp web/data/node_modules/bootstrap/dist/js/bootstrap.bundle.min.js $@/javascript/.
# These files are versioned in Git, but we may want to update them.
clickhouse/data/protocols.csv:
diff --git a/docs b/docs
new file mode 120000
index 00000000..6283d6ca
--- /dev/null
+++ b/docs
@@ -0,0 +1 @@
+web/data/docs
\ No newline at end of file
diff --git a/docs/assets/images/akvorado.svg b/docs/assets/images/akvorado.svg
deleted file mode 100644
index f453ed2a..00000000
--- a/docs/assets/images/akvorado.svg
+++ /dev/null
@@ -1,91 +0,0 @@
-
-
-
-
diff --git a/docs/assets/images/design.svg b/docs/assets/images/design.svg
deleted file mode 100644
index 97510da7..00000000
--- a/docs/assets/images/design.svg
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-
-
\ No newline at end of file
diff --git a/docs/assets/stylesheets/extra.css b/docs/assets/stylesheets/extra.css
deleted file mode 100644
index df0e1637..00000000
--- a/docs/assets/stylesheets/extra.css
+++ /dev/null
@@ -1,26 +0,0 @@
-/* Add more room for the logo and hide the title */
-.md-header__button.md-logo {
- margin: 0;
- margin-left: inherit;
- padding: 0;
- padding-left: inherit;
-}
-.md-header__button.md-logo img, .md-header__button.md-logo svg {
- height: 2.4rem;
- width: initial;
-}
-.md-header__title {
- visibility: hidden;
-}
-
-/* No footer */
-.md-footer {
- display: none;
-}
-
-/* Logo on the first page */
-.akvorado-logo {
- margin-left: auto;
- margin-right: auto;
- display: block;
-}
diff --git a/docs/design.md b/docs/design.md
deleted file mode 100644
index 57de9f4f..00000000
--- a/docs/design.md
+++ /dev/null
@@ -1,72 +0,0 @@
-# Design
-
-
- { width="500" }
- General design for Akvorado
-
-
-## Big picture
-
-The general design of *Akvorado* is the following:
-
-- The exporters send flow to Akvorado. They don't need to be declared.
-- The received flows are decoded then sent to the core component.
-- For each flow, the core component query the GeoIP component and the
- SNMP poller to get additional information, including country, AS
- number, interface name, description and speed.
-- The GeoIP component provides country and AS information for IP
- addresses using Maxmind databases.
-- The SNMP poller queries the exporters for host names, interface
- names, interface descriptions and interface speeds. This information
- is cached and updated from time to time.
-- Once the core component has a complete flow, it pushes it to the
- Kafka component.
-- The Kafka component turns the flow into a binary representation
- using *protocol buffers* and send the result into a Kafka topic.
-
-The remaining steps are outside of *Akvorado* control:
-
-- A ClickHouse database subscribe to the Kafka topic to receive and
- process the flows.
-- A graphing tool like Grafana queries this database to build various
- dashboards.
-
-## Flow representation
-
-The flow representation is encoded in a versioned `flow-*.proto` file.
-Any information that could change with time is embedded in the flow.
-This includes for example interface names and speeds, as well. This
-ensures that older data are not processed using incorrect mappings.
-
-Each time the schema changes, we issue a new `flow-*.proto` file,
-update the schema version and a new Kafka topic will be used. This
-ensures we do not mix different schemas in a single topic.
-
-## Future plans
-
-In the future, we may:
-
-- Add more information to the landing page, including some basic statistics.
-- Automatically build dashboards for Grafana.[^grafana]
-- Builds dashboards with [D3.js][].[^d3js]
-- Buffer message to disks instead of blocking (when sending to Kafka)
- or dropping (when querying the SNMP poller). We could probable just
- have a system service running tcpdump dumping packets to a directory
- and use that as input. This would be allow *Akvorado* to block from
- end-to-end instead of trying to be realtime.
-- Collect routes by integrating GoBGP. This is low priority if we
- consider information from Maxmind good enough for our use.
-
-[^grafana]: The templating system in Grafana is quite limited.
- Notably, it is difficult to build different query depending on the
- input fields. Grafana supports scripted dashboard, but it does not
- seem to be possible to have a function build the query string.
-[^d3js]: There is a [gallery][] containing many interesting examples,
- including [stacked area charts][], [small multiple charts][] and
- [Sankey diagrams][].
-[expression language]: https://github.com/antonmedv/expr/blob/master/docs/Language-Definition.md
-[D3.js]: https://d3js.org/
-[gallery]: https://www.d3-graph-gallery.com/
-[stacked area charts]: https://www.d3-graph-gallery.com/stackedarea.html
-[small multiple charts]: https://www.d3-graph-gallery.com/graph/area_smallmultiple.html
-[Sankey diagrams]: https://www.d3-graph-gallery.com/graph/sankey_basic.html
diff --git a/docs/index.md b/docs/index.md
deleted file mode 100644
index d7f26379..00000000
--- a/docs/index.md
+++ /dev/null
@@ -1,27 +0,0 @@
----
-hide:
- - navigation
- - toc
----
-
-# Akvorado { style=display:none }
-
-{ .akvorado-logo }
-
-*Akvorado* is a flow collector, hydrater and exporter. It receives
-flows, adds some data like interface names and geo information, and
-exports them to Kafka. [Akvorado][] means "water wheel" in Esperanto.
-
-[Akvorado]: https://eo.wikipedia.org/wiki/Akvorado
-
-
-
-The embedded HTTP server serves the following endpoints:
-
-- [`/api/v0/metrics`](/api/v0/metrics){ target=http }: Prometheus metrics
-- [`/api/v0/version`](/api/v0/version){ target=http }: *Akvorado* version
-- [`/api/v0/healthcheck`](/api/v0/healthcheck){ target=http }: are we alive?
-- [`/api/v0/flows`](/api/v0/flows?limit=1){ target=http }: next available flow
-- [`/api/v0/grafana`](/api/v0/grafana): Grafana web interface (if configured)
-
-
diff --git a/docs/usage.md b/docs/usage.md
deleted file mode 100644
index 6790b435..00000000
--- a/docs/usage.md
+++ /dev/null
@@ -1,36 +0,0 @@
-# Usage
-
-*Akvorado* uses a subcommand system. Each subcommand comes with its
-own set of options. It is possible to get help using `akvorado
---help`.
-
-## version
-
-`akvorado version` displays the version.
-
-## serve
-
-`akvorado serve` starts *Akvorado* itself, allowing it to receive and
-process flows. When started from a TTY, it will display logs in a
-fancy way. Without a TTY, logs are output using JSON.
-
-The `--config` options allows to provide a configuration file in YAML
-format. See the [configuration section](configuration.md) for more
-information on this file.
-
-The `--check` option will check if the provided configuration is
-correct and stops here. The `--dump` option will dump the parsed
-configuration, along with the default values. It should be combined
-with `--check` if you don't want *Akvorado* to start.
-
-### Exposed HTTP endpoints
-
-The embedded HTTP server contains the endpoints listed on the [home
-page](index.md). The [`/api/v0/flows`](/api/v0/flows?limit=1)
-continously printed flows sent to Kafka (using [ndjson]()). It also
-accepts a `limit` argument to stops after emitting the specified
-number of flows. This endpoint should not be used for anything else
-other than debug: it can skips some flows and if there are several
-users, flows will be dispatched between them.
-
-[ndjson]: http://ndjson.org/
diff --git a/geoip/root.go b/geoip/root.go
index e10ec4b3..e3e88d02 100644
--- a/geoip/root.go
+++ b/geoip/root.go
@@ -121,9 +121,7 @@ func (c *Component) Start() error {
}
c.t.Go(func() error {
errLogger := c.r.Sample(reporter.BurstSampler(10*time.Second, 1))
- defer func() {
- watcher.Close()
- }()
+ defer watcher.Close()
for {
// Watch both for errors and events in the
diff --git a/go.mod b/go.mod
index c84fa3ce..b117f793 100644
--- a/go.mod
+++ b/go.mod
@@ -4,6 +4,7 @@ go 1.17
require (
github.com/ClickHouse/clickhouse-go/v2 v2.0.12
+ github.com/Masterminds/sprig v2.22.0+incompatible
github.com/Shopify/sarama v1.32.1-0.20220321223103-27b8f1b5973b
github.com/antonmedv/expr v1.9.0
github.com/benbjohnson/clock v1.3.0
@@ -25,6 +26,8 @@ require (
github.com/slayercat/GoSNMPServer v0.1.2
github.com/slayercat/gosnmp v1.24.0
github.com/spf13/cobra v1.3.0
+ github.com/yuin/goldmark v1.4.5
+ github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594
go.uber.org/atomic v1.7.0
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9
google.golang.org/protobuf v1.27.1
@@ -33,10 +36,14 @@ require (
)
require (
+ github.com/Masterminds/goutils v1.1.1 // indirect
+ github.com/Masterminds/semver v1.5.0 // indirect
github.com/StackExchange/wmi v1.2.1 // indirect
+ github.com/alecthomas/chroma v0.10.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
+ github.com/dlclark/regexp2 v1.4.0 // indirect
github.com/dustin/go-humanize v1.0.0 // indirect
github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21 // indirect
github.com/eapache/queue v1.1.0 // indirect
@@ -46,6 +53,8 @@ require (
github.com/google/uuid v1.3.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-uuid v1.0.2 // indirect
+ github.com/huandu/xstrings v1.3.2 // indirect
+ github.com/imdario/mergo v0.3.12 // indirect
github.com/inconshreveable/mousetrap v1.0.0 // indirect
github.com/jcmturner/aescts/v2 v2.0.0 // indirect
github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect
@@ -54,6 +63,8 @@ require (
github.com/jcmturner/rpc/v2 v2.0.3 // indirect
github.com/klauspost/compress v1.15.1 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect
+ github.com/mitchellh/copystructure v1.2.0 // indirect
+ github.com/mitchellh/reflectwalk v1.0.2 // indirect
github.com/oschwald/maxminddb-golang v1.8.0 // indirect
github.com/paulmach/orb v0.4.0 // indirect
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
diff --git a/go.sum b/go.sum
index cb78c633..3f63adb2 100644
--- a/go.sum
+++ b/go.sum
@@ -77,6 +77,12 @@ github.com/ClickHouse/clickhouse-go/v2 v2.0.12 h1:Nbl/NZwoM6LGJm7smNBgvtdr/rxjlI
github.com/ClickHouse/clickhouse-go/v2 v2.0.12/go.mod h1:u4RoNQLLM2W6hNSPYrIESLJqaWSInZVmfM+MlaAhXcg=
github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
+github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
+github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
+github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
+github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
+github.com/Masterminds/sprig v2.22.0+incompatible h1:z4yfnGrZ7netVz+0EDJ0Wi+5VZCSYp4Z0m2dk6cEM60=
+github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o=
github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA=
github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA=
github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw=
@@ -111,6 +117,8 @@ github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d/go.mod h1:3eOhrU
github.com/StackExchange/wmi v1.2.1 h1:VIkavFPXSjcnS+O8yTq7NI32k0R5Aj+v39y29VYDOSA=
github.com/StackExchange/wmi v1.2.1/go.mod h1:rcmrprowKIVzvc+NUiLncP2uuArMWLCbu9SBzvHz7e8=
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
+github.com/alecthomas/chroma v0.10.0 h1:7XDcGkCQopCNKjZHfYrNLraA+M7e0fMiJ/Mfikbfjek=
+github.com/alecthomas/chroma v0.10.0/go.mod h1:jtJATyUxlIORhUOFNA9NZDWGAQ8wpxQQqNSB4rjA/1s=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@@ -337,6 +345,8 @@ github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2 h1:tdlZCpZ/P9DhczC
github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dhui/dktest v0.3.7/go.mod h1:nYMOkafiA07WchSwKnKFUSbGMb2hMm5DrCGiXYG6gwM=
+github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
+github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=
github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY=
github.com/docker/distribution v2.7.1-0.20190205005809-0d3efadf0154+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
@@ -633,6 +643,8 @@ github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
+github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw=
+github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
@@ -640,6 +652,7 @@ github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJ
github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA=
github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
+github.com/imdario/mergo v0.3.12 h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=
github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
@@ -816,6 +829,8 @@ github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJys
github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4=
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
+github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
+github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
@@ -824,6 +839,8 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh
github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs=
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A=
+github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
+github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/mkevac/debugcharts v0.0.0-20191222103121-ae1c48aa8615/go.mod h1:Ad7oeElCZqA1Ufj0U9/liOF4BtVepxRcTvr2ey7zTvM=
github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A=
@@ -1104,6 +1121,10 @@ github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.0/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+github.com/yuin/goldmark v1.4.5 h1:4OEQwtW2uLXjEdgnGM3Vg652Pq37X7NOIRzFWb3BzIc=
+github.com/yuin/goldmark v1.4.5/go.mod h1:rmuwmfZ0+bvzB24eSC//bk1R1Zp3hM0OXYv/G2LIilg=
+github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594 h1:yHfZyN55+5dp1wG7wDKv8HQ044moxkyGq12KFFMFDxg=
+github.com/yuin/goldmark-highlighting v0.0.0-20220208100518-594be1970594/go.mod h1:U9ihbh+1ZN7fR5Se3daSPoz1CGF9IYtSvWwVQtnzGHU=
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
diff --git a/mkdocs.yml b/mkdocs.yml
deleted file mode 100644
index c86c417c..00000000
--- a/mkdocs.yml
+++ /dev/null
@@ -1,66 +0,0 @@
----
-site_name: Akvorado
-site_dir: web/data
-strict: true
-use_directory_urls: false
-theme:
- name: material
- language: en
- features:
- - content.code.annotate
- - navigation.indexes
- - navigation.sections
- - navigation.tabs
- - navigation.top
- - navigation.tracking
- - search.highlight
- - search.share
- - search.suggest
- - toc.integrate
- palette:
- - scheme: default
- primary: white
- accent: "#008affff"
- toggle:
- icon: material/toggle-switch-off-outline
- name: Switch to dark mode
- - scheme: slate
- primary: black
- accent: "#008affff"
- toggle:
- icon: material/toggle-switch
- name: Switch to light mode
- font:
- text: Montserrat
- code: Roboto Mono
- logo: assets/images/akvorado.svg
-extra_css:
- - assets/stylesheets/extra.css
-
-plugins:
- - search
-
-markdown_extensions:
- - abbr
- - admonition
- - footnotes
- - tables
- - attr_list
- - md_in_html
- - toc:
- permalink: true
- - pymdownx.snippets
- - pymdownx.highlight
- - pymdownx.superfences
-
-nav:
- - Home: index.md
- - Documentation:
- Design: design.md
- Installation: install.md
- Configuration: configuration.md
- Usage: usage.md
- Integration: integration.md
- Troubleshooting: troubleshooting.md
- Internals: internals.md
-
diff --git a/web/assets.go b/web/assets.go
new file mode 100644
index 00000000..e2ca75eb
--- /dev/null
+++ b/web/assets.go
@@ -0,0 +1,39 @@
+package web
+
+import (
+ "embed"
+ "errors"
+ "fmt"
+ "io/fs"
+ "net/http"
+ "path"
+ "strings"
+)
+
+//go:embed data/assets/generated data/assets/images
+var embeddedAssets embed.FS
+
+func (c *Component) assetsHandlerFunc(w http.ResponseWriter, req *http.Request) {
+ assets := c.embedOrLiveFS(embeddedAssets, "data/assets")
+ rpath := strings.TrimPrefix(req.URL.Path, "/assets/")
+ rpath = strings.Trim(rpath, "/")
+
+ for _, p := range []string{
+ fmt.Sprintf("%s", rpath),
+ fmt.Sprintf("generated/%s", rpath),
+ } {
+ f, err := http.FS(assets).Open(p)
+ if errors.Is(err, fs.ErrNotExist) {
+ continue
+ }
+ st, err := f.Stat()
+ if err != nil || st.IsDir() {
+ continue
+ }
+ http.ServeContent(w, req, path.Base(rpath), st.ModTime(), f)
+ f.Close()
+ return
+ }
+
+ http.Error(w, "Asset not found.", http.StatusNotFound)
+}
diff --git a/web/assets_test.go b/web/assets_test.go
new file mode 100644
index 00000000..a950e7bb
--- /dev/null
+++ b/web/assets_test.go
@@ -0,0 +1,43 @@
+package web
+
+import (
+ "fmt"
+ netHTTP "net/http"
+ "testing"
+
+ "akvorado/http"
+ "akvorado/reporter"
+)
+
+func TestServeAssets(t *testing.T) {
+ for _, live := range []bool{false, true} {
+ for _, f := range []string{"images/akvorado.svg", "javascript/bootstrap.bundle.min.js"} {
+ var name string
+ switch live {
+ case true:
+ name = fmt.Sprintf("livefs-%s", f)
+ case false:
+ name = fmt.Sprintf("embeddedfs-%s", f)
+ }
+ t.Run(name, func(t *testing.T) {
+ r := reporter.NewMock(t)
+ h := http.NewMock(t, r)
+ _, err := New(r, Configuration{
+ ServeLiveFS: live,
+ }, Dependencies{HTTP: h})
+ if err != nil {
+ t.Fatalf("New() error:\n%+v", err)
+ }
+
+ resp, err := netHTTP.Get(fmt.Sprintf("http://%s/assets/%s", h.Address, f))
+ if err != nil {
+ t.Fatalf("GET /assets/%s:\n%+v", f, err)
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode != 200 {
+ t.Errorf("GET /assets/%s: got status code %d, not 200", f, resp.StatusCode)
+ }
+ })
+ }
+ }
+}
diff --git a/web/data/assets/images/akvorado.svg b/web/data/assets/images/akvorado.svg
new file mode 100644
index 00000000..39e5eb22
--- /dev/null
+++ b/web/data/assets/images/akvorado.svg
@@ -0,0 +1,440 @@
+
+
+
+
diff --git a/web/data/assets/images/design.svg b/web/data/assets/images/design.svg
new file mode 100644
index 00000000..418d3671
--- /dev/null
+++ b/web/data/assets/images/design.svg
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/web/data/assets/stylesheets/akvorado.css b/web/data/assets/stylesheets/akvorado.css
new file mode 100644
index 00000000..560ffd33
--- /dev/null
+++ b/web/data/assets/stylesheets/akvorado.css
@@ -0,0 +1,10 @@
+:root {
+ --bs-primary: #008aff;
+ --bs-primary-rgb: 0,138,255;
+ --bs-secondary: #48092f;
+ --bs-secondary-rgb: 72,9,47;
+}
+
+.ak-navbar {
+ background-color: #e3f2fd;
+}
diff --git a/web/data/assets/stylesheets/docs.css b/web/data/assets/stylesheets/docs.css
new file mode 100644
index 00000000..6350d6c4
--- /dev/null
+++ b/web/data/assets/stylesheets/docs.css
@@ -0,0 +1,10 @@
+.ak-docs-markdown p > img {
+ margin: 0 auto;
+ display: block;
+ max-width: 100%;
+}
+
+.ak-docs-toc ul {
+ padding-left: 0;
+ list-style: none;
+}
diff --git a/web/data/docs/00-intro.md b/web/data/docs/00-intro.md
new file mode 100644
index 00000000..ad8d0774
--- /dev/null
+++ b/web/data/docs/00-intro.md
@@ -0,0 +1,46 @@
+
+
+# Introduction
+
+*Akvorado*[^name] is a flow collector, hydrater and exporter. It
+receives flows, adds some data like interface names and countries, and
+exports them to Kafka.
+
+[^name]: [Akvorado][] means "water wheel" in Esperanto.
+
+[Akvorado]: https://eo.wikipedia.org/wiki/Akvorado
+
+## Big picture
+
+The general design of *Akvorado* is the following:
+
+- The exporters send Netflow and IPFIX flows to Akvorado. They don't
+ need to be declared as Akvorado accepts flows from anyone.
+- The received flows are decoded and hydrated with additional
+ information:
+ - source and destination countries (GeoIP database)
+ - source and destination AS numbers (GeoIP database)
+ - source and destination interface names, descriptions and speeds (SNMP)
+- The SNMP poller queries the exporters for host names, interface
+ names, interface descriptions and interface speeds. This information
+ is cached and updated from time to time.
+- Once a flow is hydrated, it is transformed into a binary
+ representation using *protocol buffers* and sent to Kafka.
+
+The remaining steps are outside of *Akvorado* control:
+
+- ClickHouse subscribes to the Kafka topic to receive and store the
+ flows.
+- Grafana queries ClickHouse to build various dashboards.
+
+## Flow schema
+
+Flows sent to Kafka are encoded with a versioned schema, described in
+the `flow-*.proto` files. Any information that could change with time
+is embedded in the flow. This includes for example interface names and
+speeds, as well. This ensures that older data are not processed using
+incorrect mappings.
+
+Each time the schema changes, we issue a new `flow-*.proto` file,
+update the schema version and a new Kafka topic will be used. This
+ensures we do not mix different schemas in a single topic.
diff --git a/docs/install.md b/web/data/docs/01-install.md
similarity index 100%
rename from docs/install.md
rename to web/data/docs/01-install.md
diff --git a/docs/configuration.md b/web/data/docs/02-configuration.md
similarity index 89%
rename from docs/configuration.md
rename to web/data/docs/02-configuration.md
index ae8007b0..70a13382 100644
--- a/docs/configuration.md
+++ b/web/data/docs/02-configuration.md
@@ -1,21 +1,8 @@
# Configuration
-*Akvorado* can be configured through a YAML file. Each aspect is
-configured through a different section:
-
-- `reporting`: [Log and metric reporting](#reporting)
-- `http`: [Builtin HTTP server](#http)
-- `web`: [Web interface](#web)
-- `flow`: [Flow ingestion](#flow)
-- `snmp`: [SNMP poller](#snmp)
-- `geoip`: [GeoIP database](#geoip)
-- `kafka`: [Kafka broker](#kafka)
-- `clickhouse`: [ClickHouse helper](#clickhouse)
-- `core`: [Core](#core)
-
-You can get the default configuration with `./akvorado --dump
---check`. Durations can be written in seconds or using strings like
-`10h20m`.
+*Akvorado* can be configured through a YAML file. You can get the
+default configuration with `./akvorado --dump --check`. Durations can
+be written in seconds or using strings like `10h20m`.
It is also possible to override configuration settings using
environment variables. You need to remove any `-` from key names and
@@ -40,43 +27,14 @@ AKVORADO_KAFKA_TOPICCONFIGURATION_NUMPARTITIONS=1
AKVORADO_KAFKA_BROKERS=192.0.2.1:9092,192.0.2.2:9092
```
-## Reporting
-
-Reporting encompasses logging and metrics. Currently, as *Akvorado* is
-expected to be run inside Docker, logging is done on the standard
-output and is not configurable. As for metrics, they are reported by
-the HTTP component on the `/api/v0/metrics` endpoint and there is
-nothing to configure either.
-
-## HTTP
-
-The builtin HTTP server serves various pages. Its configuration
-supports only the `listen` key to specify the address and port to
-listen. For example:
-
-```yaml
-http:
- listen: 0.0.0.0:8000
-```
-
-## Web
-
-The web interface presents the landing page of *Akvorado*. It also
-embeds the documentation. It accepts only the following key:
-
-- `grafanaurl` to specify the URL to Grafana and exposes it as
- [`/grafana`](/grafana).
-
## Flow
-The flow component handles flow ingestion. It supports the following
-configuration keys:
+The flow component handles incoming flows. It only accepts the
+`inputs` key to define the list of inputs to receive incoming flows.
-- `inputs` to specify the list of inputs
-
-Each input should define a `type` and `decoder`. For `decoder`, only
-`netflow` is currently supported. As for the `type`, both `udp` and
-`file` are supported.
+Each input has a `type` and a `decoder`. For `decoder`, only `netflow`
+is currently supported. As for the `type`, both `udp` and `file` are
+supported.
For the UDP input, the supported keys are `listen` to set the
listening endpoint, `workers` to set the number of workers to listen
@@ -109,46 +67,8 @@ flow:
workers: 2
```
-## SNMP
-
-Flows only include interface indexes. To associate them with an
-interface name and description, SNMP is used to poll the exporter
-sending each flows. A cache is maintained to avoid polling
-continuously the exporters. The following keys are accepted:
-
-- `cache-duration` tells how much time to keep data in the cache
-- `cache-refresh` tells how much time to wait before updating an entry
- by polling it
-- `cache-check-interval` tells how often to check if cached data is
- about to expire or need an update
-- `cache-persist-file` tells where to store cached data on shutdown and
- read them back on startup
-- `default-community` tells which community to use when polling exporters
-- `communities` is a map from a exporter IP address to the community to
- use for a exporter, overriding the default value set above,
-- `poller-retries` is the number of retries on unsuccessful SNMP requests.
-- `poller-timeout` tells how much time should the poller wait for an answer.
-- `workers` tell how many workers to spawn to handle SNMP polling.
-
-As flows missing interface information are discarded, persisting the
-cache is useful to quickly be able to handle incoming flows. By
-default, no persistent cache is configured.
-
-## GeoIP
-
-The GeoIP component adds source and destination country, as well as
-the AS number of the source and destination IP if they are not present
-in the received flows. It needs two databases using the [MaxMind DB
-file format][], one for AS numbers, one for countries. If no database
-is provided, the component is inactive. It accepts the following keys:
-
-- `asn-database` tells the path to the ASN database
-- `country-database` tells the path to the country database
-
-[MaxMind DB file format]: https://maxmind.github.io/MaxMind-DB/
-
-If the files are updated while *Akvorado* is running, they are
-automatically refreshed.
+Without configuration, *Akvorado* will listen for incoming
+Netflow/IPFIX flows on port 2055.
## Kafka
@@ -207,26 +127,13 @@ kafka:
cleanup.policy: delete
```
-## ClickHouse
-
-The ClickHouse component exposes some useful HTTP endpoints to
-configure a ClickHouse database. Optionally, it will also provision
-and keep up-to-date a ClickHouse database. In this case, the following
-keys should be provided:
-
- - `servers` defines the list of ClickHouse servers to connect to
- - `username` is the username to use for authentication
- - `password` is the password to use for authentication
- - `database` defines the database to use to create tables
- - `akvorado-url` defines the URL of Akvorado to be used by Clickhouse (autodetection when not specified)
-
## Core
-The core orchestrates the remaining components. It receives the flows
-from the flow component, add some information using the GeoIP
-databases and the SNMP poller, and push the resulting flow to Kafka.
+The core component adds some information using the GeoIP databases and
+the SNMP poller, and push the resulting flow to Kafka. It is also able
+to classify exporters and interfaces into groups.
-The following keys are accepted:
+The following configuration keys are accepted:
- `workers` key define how many workers should be spawned to process
incoming flows
@@ -287,3 +194,85 @@ ClassifyProviderRegex(Interface.Description, "^Transit: ([^ ]+)", "$1")
[expr]: https://github.com/antonmedv/expr/blob/master/docs/Language-Definition.md
[from Go]: https://pkg.go.dev/regexp#Regexp.Expand
+
+## GeoIP
+
+The GeoIP component adds source and destination country, as well as
+the AS number of the source and destination IP if they are not present
+in the received flows. It needs two databases using the [MaxMind DB
+file format][], one for AS numbers, one for countries. If no database
+is provided, the component is inactive. It accepts the following keys:
+
+- `asn-database` tells the path to the ASN database
+- `country-database` tells the path to the country database
+
+[MaxMind DB file format]: https://maxmind.github.io/MaxMind-DB/
+
+If the files are updated while *Akvorado* is running, they are
+automatically refreshed.
+
+## SNMP
+
+Flows only include interface indexes. To associate them with an
+interface name and description, SNMP is used to poll the exporter
+sending each flows. A cache is maintained to avoid polling
+continuously the exporters. The following keys are accepted:
+
+- `cache-duration` tells how much time to keep data in the cache
+- `cache-refresh` tells how much time to wait before updating an entry
+ by polling it
+- `cache-check-interval` tells how often to check if cached data is
+ about to expire or need an update
+- `cache-persist-file` tells where to store cached data on shutdown and
+ read them back on startup
+- `default-community` tells which community to use when polling exporters
+- `communities` is a map from a exporter IP address to the community to
+ use for a exporter, overriding the default value set above,
+- `poller-retries` is the number of retries on unsuccessful SNMP requests.
+- `poller-timeout` tells how much time should the poller wait for an answer.
+- `workers` tell how many workers to spawn to handle SNMP polling.
+
+As flows missing interface information are discarded, persisting the
+cache is useful to quickly be able to handle incoming flows. By
+default, no persistent cache is configured.
+
+## HTTP
+
+The builtin HTTP server serves various pages. Its configuration
+supports only the `listen` key to specify the address and port to
+listen. For example:
+
+```yaml
+http:
+ listen: 0.0.0.0:8000
+```
+
+## Web
+
+The web interface presents the landing page of *Akvorado*. It also
+embeds the documentation. It accepts only the following key:
+
+- `grafanaurl` to specify the URL to Grafana and exposes it as
+ [`/grafana`](/grafana).
+
+
+## ClickHouse
+
+The ClickHouse component exposes some useful HTTP endpoints to
+configure a ClickHouse database. Optionally, it will also provision
+and keep up-to-date a ClickHouse database. In this case, the following
+keys should be provided:
+
+ - `servers` defines the list of ClickHouse servers to connect to
+ - `username` is the username to use for authentication
+ - `password` is the password to use for authentication
+ - `database` defines the database to use to create tables
+ - `akvorado-url` defines the URL of Akvorado to be used by Clickhouse (autodetection when not specified)
+
+## Reporting
+
+Reporting encompasses logging and metrics. Currently, as *Akvorado* is
+expected to be run inside Docker, logging is done on the standard
+output and is not configurable. As for metrics, they are reported by
+the HTTP component on the `/api/v0/metrics` endpoint and there is
+nothing to configure either.
diff --git a/web/data/docs/03-usage.md b/web/data/docs/03-usage.md
new file mode 100644
index 00000000..d9d411e7
--- /dev/null
+++ b/web/data/docs/03-usage.md
@@ -0,0 +1,45 @@
+# Usage
+
+*Akvorado* uses a subcommand system. Each subcommand comes with its
+own set of options. It is possible to get help using `akvorado
+--help`.
+
+## Starting Akvorado
+
+`akvorado serve` starts *Akvorado* itself, allowing it to receive and
+process flows. When started from a TTY, it will display logs in a
+fancy way. Without a TTY, logs are output formatted as JSON.
+
+The `--config` options allows to provide a configuration file in YAML
+format. See the [configuration section](02-configuration.md) for more
+information on this file.
+
+The `--check` option will check if the provided configuration is
+correct and stops here. The `--dump` option will dump the parsed
+configuration, along with the default values. It should be combined
+with `--check` if you don't want *Akvorado* to start.
+
+## Exposed HTTP endpoints
+
+The embedded HTTP server contains the following endpoints:
+
+- [`/api/v0/metrics`](/api/v0/metrics): Prometheus metrics
+- [`/api/v0/version`](/api/v0/version): *Akvorado* version
+- [`/api/v0/healthcheck`](/api/v0/healthcheck): are we alive?
+- [`/api/v0/flows`](/api/v0/flows?limit=1): next available flows
+- [`/api/v0/schemas.json`](/api/v0/schemas.json): versioned list of protobuf schemas used to export flows
+- [`/api/v0/schema-X.proto`](/api/v0/schema-1.proto): protobuf schema used to export flows
+- `/api/v0/clickhouse`: various endpoints for [ClickHouse integration](04-integration.md#clickhouse)
+
+The [`/api/v0/flows`](/api/v0/flows?limit=1) continously printed flows
+sent to Kafka (using [ndjson]()). It also accepts a `limit` argument
+to stops after emitting the specified number of flows. This endpoint
+should not be used for anything else other than debug: it can skips
+some flows and if there are several users, flows will be dispatched
+between them.
+
+[ndjson]: http://ndjson.org/
+
+## Other commands
+
+`akvorado version` displays the version.
diff --git a/docs/integration.md b/web/data/docs/04-integration.md
similarity index 90%
rename from docs/integration.md
rename to web/data/docs/04-integration.md
index 65631528..bcb15f1e 100644
--- a/docs/integration.md
+++ b/web/data/docs/04-integration.md
@@ -7,7 +7,7 @@ with ClickHouse and Grafana.
## Kafka
The Kafka component sends flows to Kafka. Its
-[configuration](configuration.md#kafka) mostly needs a topic name and a list
+[configuration](02-configuration.md#kafka) mostly needs a topic name and a list
of brokers. It is possible to let *Akvorado* manage the topic with the
appropriate settings (number of partitions, replication factor and
additional configuration entries). If the topic exists, *Akvorado*
@@ -30,7 +30,7 @@ ClickHouse can collect the data from Kafka. To help its configuration,
- `/api/v0/clickhouse/asns.csv` contains a CSV with the mapping
between AS numbers and organization names
-When [configured](configuration.md#clickhouse), it can also populate
+When [configured](02-configuration.md#clickhouse), it can also populate
the database with the appropriate tables and manages them. As a
prerequisite, the script contained in `/api/v0/clickhouse/init.sh`
should be executed. It is not possible for ClickHouse to fetch the
@@ -49,4 +49,4 @@ instances are still running an older version).
## Grafana
No integration is currently done for Grafana, except a reverse proxy
-configured in the [web section](configuration.md#web).
+configured in the [web section](02-configuration.md#web).
diff --git a/docs/troubleshooting.md b/web/data/docs/05-troubleshooting.md
similarity index 100%
rename from docs/troubleshooting.md
rename to web/data/docs/05-troubleshooting.md
diff --git a/docs/internals.md b/web/data/docs/06-internals.md
similarity index 83%
rename from docs/internals.md
rename to web/data/docs/06-internals.md
index fd8c5f26..2e558ebe 100644
--- a/docs/internals.md
+++ b/web/data/docs/06-internals.md
@@ -8,12 +8,14 @@ its state and its dependencies on other components.
[Component framework in Clojure]: https://github.com/stuartsierra/component
+
+
Each component features the following piece of code:
- A `Component` structure containing its state.
- A `Configuration` structure containing the configuration of the
component. It maps to a section of [Akvorado configuration
- file][configuration.md].
+ file](02-configuration.md).
- A `DefaultConfiguration` variable with the default values for the
configuration.
- A `New()` function instantiating the component. This method takes
@@ -151,7 +153,7 @@ the lifecycle of the HTTP server and to provide a method to add
handlers. The web component provides the web interface of *Akvorado*.
Currently, this is only the documentation. Other components may expose
some various endpoints. They are documented in the [usage
-section](usage.md).
+section](03-usage.md).
The daemon component handles the lifecycle of the whole application.
It watches for the various goroutines (through tombs, see below)
@@ -174,3 +176,32 @@ spawned by the other components and wait for signals to terminate. If
- [github.com/eapache/go-resiliency](https://github.com/eapache/go-resiliency)
implements several resiliency pattersn, including the breaker
pattern.
+
+## Future plans
+
+In the future, we may:
+
+- Add more information to the landing page, including some basic statistics.
+- Automatically build dashboards for Grafana.[^grafana]
+- Builds dashboards with [D3.js][].[^d3js]
+- Buffer message to disks instead of blocking (when sending to Kafka)
+ or dropping (when querying the SNMP poller). We could probable just
+ have a system service running tcpdump dumping packets to a directory
+ and use that as input. This would be allow *Akvorado* to block from
+ end-to-end instead of trying to be realtime.
+- Collect routes by integrating GoBGP. This is low priority if we
+ consider information from Maxmind good enough for our use.
+
+[^grafana]: The templating system in Grafana is quite limited.
+ Notably, it is difficult to build different query depending on the
+ input fields. Grafana supports scripted dashboard, but it does not
+ seem to be possible to have a function build the query string.
+[^d3js]: There is a [gallery][] containing many interesting examples,
+ including [stacked area charts][], [small multiple charts][] and
+ [Sankey diagrams][].
+
+[D3.js]: https://d3js.org/
+[gallery]: https://www.d3-graph-gallery.com/
+[stacked area charts]: https://www.d3-graph-gallery.com/stackedarea.html
+[small multiple charts]: https://www.d3-graph-gallery.com/graph/area_smallmultiple.html
+[Sankey diagrams]: https://www.d3-graph-gallery.com/graph/sankey_basic.html
diff --git a/web/data/package.json b/web/data/package.json
new file mode 100644
index 00000000..8c4fdd9a
--- /dev/null
+++ b/web/data/package.json
@@ -0,0 +1,6 @@
+{
+ "dependencies": {
+ "@mdi/font": "6.x",
+ "bootstrap": "^5.1.3"
+ }
+}
diff --git a/web/data/templates/docs.html b/web/data/templates/docs.html
new file mode 100644
index 00000000..af0eb4c4
--- /dev/null
+++ b/web/data/templates/docs.html
@@ -0,0 +1,41 @@
+{{ define "title" }}Documentation{{ end }}
+{{ define "stylesheets" }}
+
+{{ end }}
+{{ define "content" }}
+
+
+
+
+
+
+ {{ .Markdown }}
+
+
+
+
+
+
+{{ end }}
diff --git a/web/data/templates/dummy.html b/web/data/templates/dummy.html
new file mode 100644
index 00000000..2e25255c
--- /dev/null
+++ b/web/data/templates/dummy.html
@@ -0,0 +1,4 @@
+{{ define "title" }}Dummy test{{ end }}
+{{ define "content" }}
+Hello there!
+{{ end }}
diff --git a/web/data/templates/layout/base.html b/web/data/templates/layout/base.html
new file mode 100644
index 00000000..d072c849
--- /dev/null
+++ b/web/data/templates/layout/base.html
@@ -0,0 +1,24 @@
+{{ define "base" }}
+
+
+
+
+
+
+
+ {{ block "stylesheets" . }}{{ end }}
+
+
+ {{ template "title" . }}
+
+
+ {{ template "navigation.html" . }}
+
+ {{ template "content" . }}
+
+
+
+ {{ block "scripts" . }}{{ end }}
+
+
+{{ end }}
diff --git a/web/data/templates/layout/navigation.html b/web/data/templates/layout/navigation.html
new file mode 100644
index 00000000..ea794582
--- /dev/null
+++ b/web/data/templates/layout/navigation.html
@@ -0,0 +1,29 @@
+
+
+
diff --git a/web/data/yarn.lock b/web/data/yarn.lock
new file mode 100644
index 00000000..962cb07f
--- /dev/null
+++ b/web/data/yarn.lock
@@ -0,0 +1,13 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"@mdi/font@6.x":
+ version "6.6.96"
+ resolved "https://registry.yarnpkg.com/@mdi/font/-/font-6.6.96.tgz#4eee6faee5f44d3ec401d354fb95775cd6699575"
+ integrity sha512-FbcvG9z17hwZ7IwX5XeOR1UYGoLq+gTKq6XNPvJFuCpn599GdiPCJbAmmDBJb+jMYXjKYr0lCxfouWGxDA82sA==
+
+bootstrap@^5.1.3:
+ version "5.1.3"
+ resolved "https://registry.yarnpkg.com/bootstrap/-/bootstrap-5.1.3.tgz#ba081b0c130f810fa70900acbc1c6d3c28fa8f34"
+ integrity sha512-fcQztozJ8jToQWXxVuEyXWW+dSo8AiXWKwiSSrKWsRB/Qt+Ewwza+JWoLKiTuQLaEPhdNAJ7+Dosc9DOIqNy7Q==
diff --git a/web/docs.go b/web/docs.go
new file mode 100644
index 00000000..b49899cd
--- /dev/null
+++ b/web/docs.go
@@ -0,0 +1,190 @@
+package web
+
+import (
+ "bytes"
+ "embed"
+ "fmt"
+ "html/template"
+ "io/fs"
+ "io/ioutil"
+ "net/http"
+ "regexp"
+ "strings"
+
+ "github.com/yuin/goldmark"
+ highlighting "github.com/yuin/goldmark-highlighting"
+ "github.com/yuin/goldmark/ast"
+ "github.com/yuin/goldmark/extension"
+ "github.com/yuin/goldmark/parser"
+ "github.com/yuin/goldmark/text"
+ "github.com/yuin/goldmark/util"
+)
+
+var (
+ //go:embed data/docs
+ embeddedDocs embed.FS
+ internalLinkRegexp = regexp.MustCompile("^(([0-9]+)-([a-z]+).md)(#.*|$)")
+)
+
+// Header describes a document header.
+type Header struct {
+ Level int
+ ID string
+ Title string
+}
+
+// DocumentTOC describes the TOC of a document
+type DocumentTOC struct {
+ Name string
+ Headers []Header
+}
+
+type templateDocData struct {
+ templateBaseData
+ Markdown template.HTML
+ TOC []DocumentTOC
+}
+
+func (c *Component) docsHandlerFunc(w http.ResponseWriter, req *http.Request) {
+ docs := c.embedOrLiveFS(embeddedDocs, "data/docs")
+ rpath := strings.TrimPrefix(req.URL.Path, "/docs/")
+ rpath = strings.Trim(rpath, "/")
+
+ var markdown []byte
+ toc := []DocumentTOC{}
+
+ // Find right file and compute ToC
+ entries, err := fs.ReadDir(docs, ".")
+ if err != nil {
+ c.r.Err(err).Msg("unable to list documentation files")
+ http.Error(w, "Unable to get documentation files.", http.StatusInternalServerError)
+ return
+ }
+ for _, entry := range entries {
+ if entry.IsDir() {
+ continue
+ }
+ matches := internalLinkRegexp.FindStringSubmatch(entry.Name())
+ if matches == nil {
+ continue
+ }
+
+ f, err := http.FS(docs).Open(entry.Name())
+ if err != nil {
+ c.r.Err(err).Str("path", entry.Name()).Msg("unable to open documentation file")
+ continue
+ }
+
+ // Markdown rendering to build ToC
+ content, _ := ioutil.ReadAll(f)
+ f.Close()
+ if matches[3] == rpath {
+ // That's the one we need to do final rendering on.
+ markdown = content
+ }
+ tocLogger := &tocLogger{}
+ md := goldmark.New(
+ goldmark.WithParserOptions(
+ parser.WithAutoHeadingID(),
+ parser.WithASTTransformers(
+ util.Prioritized(tocLogger, 500),
+ ),
+ ),
+ )
+ buf := &bytes.Buffer{}
+ if err = md.Convert(content, buf); err != nil {
+ c.r.Err(err).Str("path", rpath).Msg("unable to render markdown document")
+ continue
+ }
+ toc = append(toc, DocumentTOC{
+ Name: matches[3],
+ Headers: tocLogger.headers,
+ })
+ }
+
+ if markdown == nil {
+ http.Error(w, "Document not found.", http.StatusNotFound)
+ return
+ }
+ md := goldmark.New(
+ goldmark.WithExtensions(
+ extension.Footnote,
+ extension.Typographer,
+ highlighting.Highlighting,
+ ),
+ goldmark.WithParserOptions(
+ parser.WithAutoHeadingID(),
+ parser.WithASTTransformers(
+ util.Prioritized(&internalLinkTransformer{}, 500),
+ ),
+ ),
+ )
+ buf := &bytes.Buffer{}
+ if err = md.Convert(markdown, buf); err != nil {
+ c.r.Err(err).Str("path", rpath).Msg("unable to render markdown document")
+ http.Error(w, "Unable to render document.", http.StatusInternalServerError)
+ return
+ }
+ w.Header().Set("Cache-Control", "max-age=300")
+ c.renderTemplate(w, "docs.html", templateDocData{
+ templateBaseData: templateBaseData{
+ RootPath: "..",
+ CurrentPath: req.URL.Path,
+ },
+ Markdown: template.HTML(buf.String()),
+ TOC: toc,
+ })
+}
+
+type internalLinkTransformer struct{}
+
+func (r *internalLinkTransformer) Transform(node *ast.Document, reader text.Reader, pc parser.Context) {
+ replaceLinks := func(n ast.Node, entering bool) (ast.WalkStatus, error) {
+ if !entering {
+ return ast.WalkContinue, nil
+ }
+ switch node := n.(type) {
+ case *ast.Link:
+ matches := internalLinkRegexp.FindStringSubmatch(string(node.Destination))
+ if matches != nil {
+ node.Destination = []byte(fmt.Sprintf("%s%s", matches[3], matches[4]))
+ }
+ }
+ return ast.WalkContinue, nil
+ }
+ ast.Walk(node, replaceLinks)
+}
+
+type tocLogger struct {
+ headers []Header
+}
+
+func (r *tocLogger) Transform(node *ast.Document, reader text.Reader, pc parser.Context) {
+ r.headers = []Header{}
+ logHeaders := func(n ast.Node, entering bool) (ast.WalkStatus, error) {
+ if !entering {
+ return ast.WalkContinue, nil
+ }
+ switch node := n.(type) {
+ case *ast.Heading:
+ id, ok := n.AttributeString("id")
+ if ok {
+ var title []byte
+ lastIndex := node.Lines().Len() - 1
+ if lastIndex > -1 {
+ lastLine := node.Lines().At(lastIndex)
+ title = lastLine.Value(reader.Source())
+ }
+ if title != nil {
+ r.headers = append(r.headers, Header{
+ ID: string(id.([]uint8)),
+ Level: node.Level,
+ Title: string(title),
+ })
+ }
+ }
+ }
+ return ast.WalkContinue, nil
+ }
+ ast.Walk(node, logHeaders)
+}
diff --git a/web/docs_test.go b/web/docs_test.go
new file mode 100644
index 00000000..25eb8652
--- /dev/null
+++ b/web/docs_test.go
@@ -0,0 +1,44 @@
+package web
+
+import (
+ "fmt"
+ "io/ioutil"
+ netHTTP "net/http"
+ "strings"
+ "testing"
+
+ "akvorado/http"
+ "akvorado/reporter"
+)
+
+func TestServeDocs(t *testing.T) {
+ for _, live := range []bool{false, true} {
+ name := "livefs"
+ if !live {
+ name = "embeddedfs"
+ }
+ t.Run(name, func(t *testing.T) {
+ r := reporter.NewMock(t)
+ h := http.NewMock(t, r)
+ _, err := New(r, Configuration{
+ ServeLiveFS: live,
+ }, Dependencies{HTTP: h})
+ if err != nil {
+ t.Fatalf("New() error:\n%+v", err)
+ }
+
+ resp, err := netHTTP.Get(fmt.Sprintf("http://%s/docs/usage", h.Address))
+ if err != nil {
+ t.Fatalf("GET /docs/usage:\n%+v", err)
+ }
+ defer resp.Body.Close()
+ if resp.StatusCode != 200 {
+ t.Errorf("GET /docs/usage: got status code %d, not 200", resp.StatusCode)
+ }
+ body, _ := ioutil.ReadAll(resp.Body)
+ if strings.Contains(string(body), "configuration.md") {
+ t.Errorf("GET /docs/usage: contains %q while it should not", "configuration.md")
+ }
+ })
+ }
+}
diff --git a/web/root.go b/web/root.go
index 132ad2d5..df4e521f 100644
--- a/web/root.go
+++ b/web/root.go
@@ -2,8 +2,8 @@
package web
import (
- "embed"
"fmt"
+ "html/template"
"io/fs"
"log"
netHTTP "net/http"
@@ -13,21 +13,24 @@ import (
"path"
"path/filepath"
"runtime"
+ "sync"
+
+ "github.com/rs/zerolog"
+ "gopkg.in/tomb.v2"
"akvorado/http"
"akvorado/reporter"
-
- "github.com/rs/zerolog"
)
-//go:embed data
-var rootSite embed.FS
-
// Component represents the web component.
type Component struct {
r *reporter.Reporter
d *Dependencies
+ t tomb.Tomb
config Configuration
+
+ templates map[string]*template.Template
+ templatesLock sync.RWMutex
}
// Dependencies define the dependencies of the web component.
@@ -42,20 +45,11 @@ func New(reporter *reporter.Reporter, config Configuration, dependencies Depende
d: &dependencies,
config: config,
}
-
- var data fs.FS
- if config.ServeLiveFS {
- _, src, _, _ := runtime.Caller(0)
- data = os.DirFS(filepath.Join(path.Dir(src), "data"))
- } else {
- var err error
- data, err = fs.Sub(rootSite, "data")
- if err != nil {
- return nil, fmt.Errorf("unable to get embedded website: %w", err)
- }
+ if err := c.loadTemplates(); err != nil {
+ return nil, err
}
- c.d.HTTP.AddHandler("/", netHTTP.FileServer(netHTTP.FS(data)))
+ // Grafana proxy
if c.config.GrafanaURL != "" {
// Provide a proxy for Grafana
url, err := url.Parse(config.GrafanaURL)
@@ -77,5 +71,49 @@ func New(reporter *reporter.Reporter, config Configuration, dependencies Depende
c.d.HTTP.AddHandler("/grafana/", proxyHandler)
}
+ c.d.HTTP.AddHandler("/docs/", netHTTP.HandlerFunc(c.docsHandlerFunc))
+ c.d.HTTP.AddHandler("/assets/", netHTTP.HandlerFunc(c.assetsHandlerFunc))
+
return &c, nil
}
+
+// Start starts the web component.
+func (c *Component) Start() error {
+ c.r.Info().Msg("starting web component")
+ if err := c.watchTemplates(); err != nil {
+ return err
+ }
+ c.t.Go(func() error {
+ select {
+ case <-c.t.Dying():
+ return nil
+ }
+ })
+ return nil
+}
+
+// Stop stops the web component.
+func (c *Component) Stop() error {
+ c.r.Info().Msg("stopping web component")
+ defer c.r.Info().Msg("web component stopped")
+ c.t.Kill(nil)
+ return c.t.Wait()
+}
+
+// embedOrLiveFS returns a subset of the provided embedded filesystem,
+// except if the component is configured to use the live filesystem.
+// Then, it returns the provided tree.
+func (c *Component) embedOrLiveFS(embed fs.FS, p string) fs.FS {
+ var fileSystem fs.FS
+ if c.config.ServeLiveFS {
+ _, src, _, _ := runtime.Caller(0)
+ fileSystem = os.DirFS(filepath.Join(path.Dir(src), p))
+ } else {
+ var err error
+ fileSystem, err = fs.Sub(embed, p)
+ if err != nil {
+ panic(err)
+ }
+ }
+ return fileSystem
+}
diff --git a/web/root_test.go b/web/root_test.go
index b61dd790..d76e9fcf 100644
--- a/web/root_test.go
+++ b/web/root_test.go
@@ -47,31 +47,3 @@ func TestProxy(t *testing.T) {
t.Errorf("GET /grafana/test (-got, +want):\n%s", diff)
}
}
-
-func TestStaticFiles(t *testing.T) {
- for _, live := range []bool{false, true} {
- name := "livefs"
- if !live {
- name = "embeddedfs"
- }
- t.Run(name, func(t *testing.T) {
- r := reporter.NewMock(t)
- h := http.NewMock(t, r)
- _, err := New(r, Configuration{
- ServeLiveFS: live,
- }, Dependencies{HTTP: h})
- if err != nil {
- t.Fatalf("New() error:\n%+v", err)
- }
-
- resp, err := netHTTP.Get(fmt.Sprintf("http://%s/install.html", h.Address))
- if err != nil {
- t.Fatalf("GET /install.html:\n%+v", err)
- }
- defer resp.Body.Close()
- if resp.StatusCode != 200 {
- t.Errorf("GET /install.html: got status code %d, not 200", resp.StatusCode)
- }
- })
- }
-}
diff --git a/web/templates.go b/web/templates.go
new file mode 100644
index 00000000..15891870
--- /dev/null
+++ b/web/templates.go
@@ -0,0 +1,153 @@
+package web
+
+import (
+ "akvorado/reporter"
+ "bytes"
+ "embed"
+ "fmt"
+ "html/template"
+ "io"
+ "io/fs"
+ "net/http"
+ "path"
+ "path/filepath"
+ "runtime"
+ "time"
+
+ "github.com/Masterminds/sprig"
+ "github.com/fsnotify/fsnotify"
+)
+
+//go:embed data/templates
+var embeddedTemplates embed.FS
+
+// baseData is the data to pass for all templates.
+type templateBaseData struct {
+ RootPath string
+ CurrentPath string
+}
+
+// loadTemplates reload the templates.
+func (c *Component) loadTemplates() error {
+ mainTemplate, err := template.
+ New("main").
+ Option("missingkey=error").
+ Funcs(sprig.FuncMap()).
+ Parse(`{{define "main" }}{{ template "base" . }}{{ end }}`)
+ if err != nil {
+ c.r.Err(err).Msg("unable to create main template")
+ return fmt.Errorf("unable to create main template: %w", err)
+ }
+
+ layoutFiles := c.embedOrLiveFS(embeddedTemplates, "data/templates/layout")
+ templateFiles := c.embedOrLiveFS(embeddedTemplates, "data/templates")
+ compiled := make(map[string]*template.Template)
+ entries, err := fs.ReadDir(templateFiles, ".")
+ if err != nil {
+ c.r.Err(err).Msg("unable to list template files")
+ return fmt.Errorf("unable to list template files: %w", err)
+ }
+ for _, tpl := range entries {
+ if tpl.IsDir() {
+ continue
+ }
+ template, err := mainTemplate.Clone()
+ if err != nil {
+ c.r.Err(err).Msg("unable to clone main template")
+ return fmt.Errorf("unable to clone main template: %w", err)
+ }
+ f, err := templateFiles.Open(tpl.Name())
+ if err != nil {
+ c.r.Err(err).Str("template", tpl.Name()).Msg("unable to open template")
+ return fmt.Errorf("unable to open template %q: %w", tpl.Name(), err)
+ }
+ content, err := io.ReadAll(f)
+ if err != nil {
+ f.Close()
+ c.r.Err(err).Str("template", tpl.Name()).Msg("unable to read template")
+ return fmt.Errorf("unable to read template %q: %w", tpl.Name(), err)
+ }
+ f.Close()
+ template, err = template.Parse(string(content))
+ if err != nil {
+ c.r.Err(err).Str("template", tpl.Name()).Msg("unable to parse template")
+ return fmt.Errorf("unable to parse template %q: %w", tpl.Name(), err)
+ }
+ template, err = template.ParseFS(layoutFiles, "*.html")
+ if err != nil {
+ c.r.Err(err).Msg("unable to parse layout templates")
+ return fmt.Errorf("unable to parse layout templates: %w", err)
+ }
+ compiled[tpl.Name()] = template
+ }
+ c.templatesLock.Lock()
+ c.templates = compiled
+ c.templatesLock.Unlock()
+ return nil
+}
+
+// renderTemplate render the specified template
+func (c *Component) renderTemplate(w http.ResponseWriter, name string, data interface{}) {
+ c.templatesLock.RLock()
+ tmpl, ok := c.templates[name]
+ c.templatesLock.RUnlock()
+ if !ok {
+ c.r.Error().Str("template", name).Msg("template not found")
+ http.Error(w, fmt.Sprintf("No template %q found.", name), http.StatusNotFound)
+ return
+ }
+
+ buf := &bytes.Buffer{}
+ if err := tmpl.Execute(buf, data); err != nil {
+ c.r.Err(err).Str("template", name).Msg("error while rendering template")
+ http.Error(w, fmt.Sprintf("Error while rendering %q.", name), http.StatusInternalServerError)
+ return
+ }
+ w.Header().Set("Content-Type", "text/html; charset=utf-8")
+ buf.WriteTo(w)
+}
+
+// watchTemplates monitor changes in template directories and reload them
+func (c *Component) watchTemplates() error {
+ if !c.config.ServeLiveFS {
+ return nil
+ }
+
+ watcher, err := fsnotify.NewWatcher()
+ if err != nil {
+ c.r.Err(err).Msg("cannot setup watcher for templates")
+ return fmt.Errorf("cannot setup watcher: %w", err)
+ }
+ for _, dir := range []string{"templates", "templates/layout"} {
+ _, base, _, _ := runtime.Caller(0)
+ dir = filepath.Join(path.Dir(base), "data", dir)
+ if err := watcher.Add(dir); err != nil {
+ c.r.Err(err).Str("directory", dir).Msg("cannot watch template directory")
+ return fmt.Errorf("cannot watch template directory %q: %w", dir, err)
+ }
+ }
+ c.t.Go(func() error {
+ defer watcher.Close()
+ errLogger := c.r.Sample(reporter.BurstSampler(10*time.Second, 1))
+ timer := time.NewTimer(100 * time.Millisecond)
+
+ for {
+ select {
+ case <-c.t.Dying():
+ return nil
+ case err := <-watcher.Errors:
+ errLogger.Err(err).Msg("error from watcher")
+ case event := <-watcher.Events:
+ if event.Op&(fsnotify.Write|fsnotify.Create) == 0 {
+ continue
+ }
+ timer.Stop()
+ timer.Reset(500 * time.Millisecond)
+ case <-timer.C:
+ c.r.Info().Msg("reload templates")
+ c.loadTemplates() // errors are ignored
+ }
+ }
+ })
+ return nil
+}
diff --git a/web/templates_test.go b/web/templates_test.go
new file mode 100644
index 00000000..2be69131
--- /dev/null
+++ b/web/templates_test.go
@@ -0,0 +1,56 @@
+package web
+
+import (
+ "net/http/httptest"
+ "strings"
+ "testing"
+ "time"
+
+ "akvorado/http"
+ "akvorado/reporter"
+)
+
+func TestTemplate(t *testing.T) {
+ for _, live := range []bool{false, true} {
+ name := "livefs"
+ if !live {
+ name = "embeddedfs"
+ }
+ t.Run(name, func(t *testing.T) {
+ r := reporter.NewMock(t)
+ c, err := New(r, Configuration{
+ ServeLiveFS: live,
+ }, Dependencies{HTTP: http.NewMock(t, r)})
+ if err != nil {
+ t.Fatalf("New() error:\n%+v", err)
+ }
+ if err := c.Start(); err != nil {
+ t.Fatalf("Start() error:\n%+v", err)
+ }
+ defer func() {
+ if err := c.Stop(); err != nil {
+ t.Fatalf("Stop() error:\n%+v", err)
+ }
+ }()
+
+ w := httptest.NewRecorder()
+ c.renderTemplate(w, "dummy.html", templateBaseData{
+ RootPath: ".",
+ })
+
+ if w.Code != 200 {
+ t.Errorf("renderTemplate() code was %d, expected 200", w.Code)
+ }
+ body := strings.TrimSpace(w.Body.String())
+ if !strings.HasPrefix(body, "") {
+ t.Errorf("renderTemplate() body should contain , got:\n%s",
+ body)
+ }
+
+ if live && !testing.Short() {
+ // Wait for refresh of templates to happen.
+ time.Sleep(200 * time.Millisecond)
+ }
+ })
+ }
+}