ELK: update kibana and the elastic beats to 5.4 (#26252)

* Add kibana5 and logstash5
* Upgrade the elastic beats to 5.4
* Make sure all elastic products use the same version
  (see elk5Version)
* Add a test for the ELK stack
This commit is contained in:
Bas van Dijk 2017-06-13 22:36:08 +02:00 committed by Franz Pletz
parent 424dc0138d
commit 2444eab485
13 changed files with 362 additions and 192 deletions

View file

@ -4,10 +4,14 @@ with lib;
let
cfg = config.services.logstash;
atLeast54 = versionAtLeast (builtins.parseDrvName cfg.package.name).version "5.4";
pluginPath = lib.concatStringsSep ":" cfg.plugins;
havePluginPath = lib.length cfg.plugins > 0;
ops = lib.optionalString;
verbosityFlag = {
verbosityFlag =
if atLeast54
then "--log.level " + cfg.logLevel
else {
debug = "--debug";
info = "--verbose";
warn = ""; # intentionally empty
@ -15,6 +19,31 @@ let
fatal = "--silent";
}."${cfg.logLevel}";
pluginsPath =
if atLeast54
then "--path.plugins ${pluginPath}"
else "--pluginpath ${pluginPath}";
logstashConf = pkgs.writeText "logstash.conf" ''
input {
${cfg.inputConfig}
}
filter {
${cfg.filterConfig}
}
output {
${cfg.outputConfig}
}
'';
logstashSettingsYml = pkgs.writeText "logstash.yml" cfg.extraSettings;
logstashSettingsDir = pkgs.runCommand "logstash-settings" {inherit logstashSettingsYml;} ''
mkdir -p $out
ln -s $logstashSettingsYml $out/logstash.yml
'';
in
{
@ -45,6 +74,15 @@ in
description = "The paths to find other logstash plugins in.";
};
dataDir = mkOption {
type = types.str;
default = "/var/lib/logstash";
description = ''
A path to directory writable by logstash that it uses to store data.
Plugins will also have access to this path.
'';
};
logLevel = mkOption {
type = types.enum [ "debug" "info" "warn" "error" "fatal" ];
default = "warn";
@ -116,6 +154,19 @@ in
'';
};
extraSettings = mkOption {
type = types.lines;
default = "";
description = "Extra Logstash settings in YAML format.";
example = ''
pipeline:
batch:
size: 125
delay: 5
'';
};
};
};
@ -123,31 +174,34 @@ in
###### implementation
config = mkIf cfg.enable {
assertions = [
{ assertion = atLeast54 -> !cfg.enableWeb;
message = ''
The logstash web interface is only available for versions older than 5.4.
So either set services.logstash.enableWeb = false,
or set services.logstash.package to an older logstash.
'';
}
];
systemd.services.logstash = with pkgs; {
description = "Logstash Daemon";
wantedBy = [ "multi-user.target" ];
environment = { JAVA_HOME = jre; };
path = [ pkgs.bash ];
serviceConfig = {
ExecStart =
"${cfg.package}/bin/logstash agent " +
"-w ${toString cfg.filterWorkers} " +
ops havePluginPath "--pluginpath ${pluginPath} " +
"${verbosityFlag} " +
"-f ${writeText "logstash.conf" ''
input {
${cfg.inputConfig}
}
filter {
${cfg.filterConfig}
}
output {
${cfg.outputConfig}
}
''} " +
ops cfg.enableWeb "-- web -a ${cfg.listenAddress} -p ${cfg.port}";
ExecStartPre = ''${pkgs.coreutils}/bin/mkdir -p "${cfg.dataDir}" ; ${pkgs.coreutils}/bin/chmod 700 "${cfg.dataDir}"'';
ExecStart = concatStringsSep " " (filter (s: stringLength s != 0) [
"${cfg.package}/bin/logstash"
(ops (!atLeast54) "agent")
"-w ${toString cfg.filterWorkers}"
(ops havePluginPath pluginsPath)
"${verbosityFlag}"
"-f ${logstashConf}"
(ops atLeast54 "--path.settings ${logstashSettingsDir}")
(ops atLeast54 "--path.data ${cfg.dataDir}")
(ops cfg.enableWeb "-- web -a ${cfg.listenAddress} -p ${cfg.port}")
]);
};
};
};

View file

@ -5,7 +5,11 @@ with lib;
let
cfg = config.services.kibana;
cfgFile = pkgs.writeText "kibana.json" (builtins.toJSON (
atLeast54 = versionAtLeast (builtins.parseDrvName cfg.package.name).version "5.4";
cfgFile = if atLeast54 then cfgFile5 else cfgFile4;
cfgFile4 = pkgs.writeText "kibana.json" (builtins.toJSON (
(filterAttrsRecursive (n: v: v != null) ({
host = cfg.listenAddress;
port = cfg.port;
@ -36,6 +40,27 @@ let
];
} // cfg.extraConf)
)));
cfgFile5 = pkgs.writeText "kibana.json" (builtins.toJSON (
(filterAttrsRecursive (n: v: v != null) ({
server.host = cfg.listenAddress;
server.port = cfg.port;
server.ssl.certificate = cfg.cert;
server.ssl.key = cfg.key;
kibana.index = cfg.index;
kibana.defaultAppId = cfg.defaultAppId;
elasticsearch.url = cfg.elasticsearch.url;
elasticsearch.username = cfg.elasticsearch.username;
elasticsearch.password = cfg.elasticsearch.password;
elasticsearch.ssl.certificate = cfg.elasticsearch.cert;
elasticsearch.ssl.key = cfg.elasticsearch.key;
elasticsearch.ssl.certificateAuthorities = cfg.elasticsearch.certificateAuthorities;
} // cfg.extraConf)
)));
in {
options.services.kibana = {
enable = mkEnableOption "enable kibana service";
@ -96,11 +121,29 @@ in {
};
ca = mkOption {
description = "CA file to auth against elasticsearch.";
description = ''
CA file to auth against elasticsearch.
It's recommended to use the <option>certificateAuthorities</option> option
when using kibana-5.4 or newer.
'';
default = null;
type = types.nullOr types.path;
};
certificateAuthorities = mkOption {
description = ''
CA files to auth against elasticsearch.
Please use the <option>ca</option> option when using kibana &lt; 5.4
because those old versions don't support setting multiple CA's.
This defaults to the singleton list [ca] when the <option>ca</option> option is defined.
'';
default = if isNull cfg.elasticsearch.ca then [] else [ca];
type = types.listOf types.path;
};
cert = mkOption {
description = "Certificate file to auth against elasticsearch.";
default = null;
@ -118,6 +161,7 @@ in {
description = "Kibana package to use";
default = pkgs.kibana;
defaultText = "pkgs.kibana";
example = "pkgs.kibana5";
type = types.package;
};

95
nixos/tests/elk.nix Normal file
View file

@ -0,0 +1,95 @@
# Test the ELK stack: Elasticsearch, Logstash and Kibana.
import ./make-test.nix ({ pkgs, ...} :
let
esUrl = "http://localhost:9200";
in {
name = "ELK";
meta = with pkgs.stdenv.lib.maintainers; {
maintainers = [ eelco chaoflow offline basvandijk ];
};
nodes = {
one =
{ config, pkgs, ... }: {
# Not giving the machine at least 2060MB results in elasticsearch failing with the following error:
#
# OpenJDK 64-Bit Server VM warning:
# INFO: os::commit_memory(0x0000000085330000, 2060255232, 0)
# failed; error='Cannot allocate memory' (errno=12)
#
# There is insufficient memory for the Java Runtime Environment to continue.
# Native memory allocation (mmap) failed to map 2060255232 bytes for committing reserved memory.
#
# When setting this to 2500 I got "Kernel panic - not syncing: Out of
# memory: compulsory panic_on_oom is enabled" so lets give it even a
# bit more room:
virtualisation.memorySize = 3000;
# For querying JSON objects returned from elasticsearch and kibana.
environment.systemPackages = [ pkgs.jq ];
services = {
logstash = {
enable = true;
package = pkgs.logstash5;
inputConfig = ''
exec { command => "echo -n flowers" interval => 1 type => "test" }
exec { command => "echo -n dragons" interval => 1 type => "test" }
'';
filterConfig = ''
if [message] =~ /dragons/ {
drop {}
}
'';
outputConfig = ''
file {
path => "/tmp/logstash.out"
codec => line { format => "%{message}" }
}
elasticsearch {
hosts => [ "${esUrl}" ]
}
'';
};
elasticsearch = {
enable = true;
package = pkgs.elasticsearch5;
};
kibana = {
enable = true;
package = pkgs.kibana5;
elasticsearch.url = esUrl;
};
};
};
};
testScript = ''
startAll;
$one->waitForUnit("elasticsearch.service");
# Continue as long as the status is not "red". The status is probably
# "yellow" instead of "green" because we are using a single elasticsearch
# node which elasticsearch considers risky.
#
# TODO: extend this test with multiple elasticsearch nodes and see if the status turns "green".
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_cluster/health' | jq .status | grep -v red");
# Perform some simple logstash tests.
$one->waitForUnit("logstash.service");
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep flowers");
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep -v dragons");
# See if kibana is healthy.
$one->waitForUnit("kibana.service");
$one->waitUntilSucceeds("curl --silent --show-error 'http://localhost:5601/api/status' | jq .status.overall.state | grep green");
# See if logstash messages arive in elasticsearch.
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' -d '{\"query\" : { \"match\" : { \"message\" : \"flowers\"}}}' | jq .hits.total | grep -v 0");
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' -d '{\"query\" : { \"match\" : { \"message\" : \"dragons\"}}}' | jq .hits.total | grep 0");
'';
})

View file

@ -1,41 +0,0 @@
# This test runs logstash and checks if messages flows and
# elasticsearch is started.
import ./make-test.nix ({ pkgs, ...} : {
name = "logstash";
meta = with pkgs.stdenv.lib.maintainers; {
maintainers = [ eelco chaoflow offline ];
};
nodes = {
one =
{ config, pkgs, ... }:
{
services = {
logstash = {
enable = true;
inputConfig = ''
exec { command => "echo flowers" interval => 1 type => "test" }
exec { command => "echo dragons" interval => 1 type => "test" }
'';
filterConfig = ''
if [message] =~ /dragons/ {
drop {}
}
'';
outputConfig = ''
stdout { codec => rubydebug }
'';
};
};
};
};
testScript = ''
startAll;
$one->waitForUnit("logstash.service");
$one->waitUntilSucceeds("journalctl -n 20 _SYSTEMD_UNIT=logstash.service | grep flowers");
$one->fail("journalctl -n 20 _SYSTEMD_UNIT=logstash.service | grep dragons");
'';
})

View file

@ -0,0 +1,45 @@
{ stdenv, makeWrapper, fetchurl, elk5Version, nodejs, coreutils, which }:
with stdenv.lib;
let
inherit (builtins) elemAt;
archOverrides = {
"i686" = "x86";
};
info = splitString "-" stdenv.system;
arch = (elemAt info 0);
elasticArch = archOverrides."${arch}" or arch;
plat = elemAt info 1;
shas = {
"x86_64-linux" = "1g5i81wq77fk6pyaq3rpfqs2m23xsbz2cndh3rg4b59ibg5qv0sq";
"i686-linux" = "0pxnpg3g8l6hy8qz404kbkk5rd9y65jrzd0y9j8wr5fd4pqs7vgv";
"x86_64-darwin" = "0pffl2hbck3s271jlzdibp5698djm5fdvj15w2knm815rs2kfbl5";
};
in stdenv.mkDerivation rec {
name = "kibana-${version}";
version = elk5Version;
src = fetchurl {
url = "https://artifacts.elastic.co/downloads/kibana/${name}-${plat}-${elasticArch}.tar.gz";
sha256 = shas."${stdenv.system}" or (throw "Unknown architecture");
};
buildInputs = [ makeWrapper ];
installPhase = ''
mkdir -p $out/libexec/kibana $out/bin
mv * $out/libexec/kibana/
rm -r $out/libexec/kibana/node
makeWrapper $out/libexec/kibana/bin/kibana $out/bin/kibana \
--prefix PATH : "${stdenv.lib.makeBinPath [ nodejs coreutils which ]}"
sed -i 's@NODE=.*@NODE=${nodejs}/bin/node@' $out/libexec/kibana/bin/kibana
'';
meta = {
description = "Visualize logs and time-stamped data";
homepage = http://www.elasticsearch.org/overview/kibana;
license = licenses.asl20;
maintainers = with maintainers; [ offline rickynils ];
platforms = with platforms; unix;
};
}

View file

@ -0,0 +1,42 @@
{ stdenv, fetchFromGitHub, elk5Version, buildGoPackage, libpcap }:
let beat = package : extraArgs : buildGoPackage (rec {
name = "${package}-${version}";
version = elk5Version;
src = fetchFromGitHub {
owner = "elastic";
repo = "beats";
rev = "v${version}";
sha256 = "1if16sqbhgxc7ahn9pak8av9rq9l8ldk44hr4w4g7lhxnqhmhsji";
};
goPackagePath = "github.com/elastic/beats";
subPackages = [ package ];
meta = with stdenv.lib; {
homepage = https://www.elastic.co/products/beats;
license = licenses.asl20;
maintainers = with maintainers; [ fadenb basvandijk ];
platforms = platforms.linux;
};
} // extraArgs);
in {
filebeat = beat "filebeat" {meta.description = "Lightweight shipper for logfiles";};
heartbeat = beat "heartbeat" {meta.description = "Lightweight shipper for uptime monitoring";};
metricbeat = beat "metricbeat" {meta.description = "Lightweight shipper for metrics";};
packetbeat = beat "packetbeat" {
buildInputs = [ libpcap ];
meta.description = "Network packet analyzer that ships data to Elasticsearch";
meta.longDescription = ''
Packetbeat is an open source network packet analyzer that ships the
data to Elasticsearch.
Think of it like a distributed real-time Wireshark with a lot more
analytics features. The Packetbeat shippers sniff the traffic between
your application processes, parse on the fly protocols like HTTP, MySQL,
PostgreSQL, Redis or Thrift and correlate the messages into transactions.
'';
};
}

View file

@ -1,25 +0,0 @@
{ stdenv, fetchFromGitHub, buildGoPackage }:
buildGoPackage rec {
name = "filebeat-${version}";
version = "5.2.2";
src = fetchFromGitHub {
owner = "elastic";
repo = "beats";
rev = "v${version}";
sha256 = "19hkq19xpi3c9y5g1yq77sm2d5vzybn6mxxf0s5l6sw4l98aak5q";
};
goPackagePath = "github.com/elastic/beats";
subPackages = [ "filebeat" ];
meta = with stdenv.lib; {
description = "Lightweight shipper for logfiles";
homepage = https://www.elastic.co/products/beats;
license = licenses.asl20;
maintainers = [ maintainers.fadenb ];
platforms = platforms.linux;
};
}

View file

@ -1,25 +0,0 @@
{ stdenv, fetchFromGitHub, buildGoPackage }:
buildGoPackage rec {
name = "heartbeat-${version}";
version = "5.2.2";
src = fetchFromGitHub {
owner = "elastic";
repo = "beats";
rev = "v${version}";
sha256 = "19hkq19xpi3c9y5g1yq77sm2d5vzybn6mxxf0s5l6sw4l98aak5q";
};
goPackagePath = "github.com/elastic/beats";
subPackages = [ "heartbeat" ];
meta = with stdenv.lib; {
description = "Lightweight shipper for uptime monitoring";
homepage = https://www.elastic.co/products/beats;
license = licenses.asl20;
maintainers = [ maintainers.fadenb ];
platforms = platforms.linux;
};
}

View file

@ -1,25 +0,0 @@
{ stdenv, fetchFromGitHub, buildGoPackage }:
buildGoPackage rec {
name = "metricbeat-${version}";
version = "5.2.2";
src = fetchFromGitHub {
owner = "elastic";
repo = "beats";
rev = "v${version}";
sha256 = "19hkq19xpi3c9y5g1yq77sm2d5vzybn6mxxf0s5l6sw4l98aak5q";
};
goPackagePath = "github.com/elastic/beats";
subPackages = [ "metricbeat" ];
meta = with stdenv.lib; {
description = "Lightweight shipper for metrics";
homepage = https://www.elastic.co/products/beats;
license = licenses.asl20;
maintainers = [ maintainers.fadenb ];
platforms = platforms.linux;
};
}

View file

@ -1,36 +0,0 @@
{ stdenv, fetchFromGitHub, buildGoPackage, libpcap }:
buildGoPackage rec {
name = "packetbeat-${version}";
version = "5.2.2";
src = fetchFromGitHub {
owner = "elastic";
repo = "beats";
rev = "v${version}";
sha256 = "19hkq19xpi3c9y5g1yq77sm2d5vzybn6mxxf0s5l6sw4l98aak5q";
};
goPackagePath = "github.com/elastic/beats";
subPackages = [ "packetbeat" ];
buildInputs = [ libpcap ];
meta = with stdenv.lib; {
description = "Network packet analyzer that ships data to Elasticsearch";
longDescription = ''
Packetbeat is an open source network packet analyzer that ships the
data to Elasticsearch.
Think of it like a distributed real-time Wireshark with a lot more
analytics features. The Packetbeat shippers sniff the traffic between
your application processes, parse on the fly protocols like HTTP, MySQL,
PostgreSQL, Redis or Thrift and correlate the messages into transactions.
'';
homepage = https://www.elastic.co/products/beats;
license = licenses.asl20;
maintainers = [ maintainers.fadenb ];
platforms = platforms.linux;
};
}

View file

@ -1,9 +1,9 @@
{ stdenv, fetchurl, makeWrapper, jre, utillinux, getopt }:
{ stdenv, fetchurl, elk5Version, makeWrapper, jre, utillinux, getopt }:
with stdenv.lib;
stdenv.mkDerivation rec {
version = "5.4.0";
version = elk5Version;
name = "elasticsearch-${version}";
src = fetchurl {

View file

@ -0,0 +1,39 @@
{ stdenv, fetchurl, elk5Version, makeWrapper, jre }:
stdenv.mkDerivation rec {
version = elk5Version;
name = "logstash-${version}";
src = fetchurl {
url = "https://artifacts.elastic.co/downloads/logstash/${name}.tar.gz";
sha256 = "019bhsnbbbg1a4g9jf02j3jb1xhhmrr3i7882s5l4pmkyn1d3gd1";
};
dontBuild = true;
dontPatchELF = true;
dontStrip = true;
dontPatchShebangs = true;
buildInputs = [
makeWrapper jre
];
installPhase = ''
mkdir -p $out
cp -r {Gemfile*,vendor,lib,bin,config,data,logstash-core,logstash-core-plugin-api} $out
wrapProgram $out/bin/logstash \
--set JAVA_HOME "${jre}"
wrapProgram $out/bin/logstash-plugin \
--set JAVA_HOME "${jre}"
'';
meta = with stdenv.lib; {
description = "Logstash is a data pipeline that helps you process logs and other event data from a variety of systems";
homepage = https://www.elastic.co/products/logstash;
license = licenses.asl20;
platforms = platforms.unix;
maintainers = [ maintainers.wjlroe maintainers.offline ];
};
}

View file

@ -667,6 +667,12 @@ with pkgs;
bchunk = callPackage ../tools/cd-dvd/bchunk { };
inherit (callPackages ../misc/logging/beats { })
filebeat
heartbeat
metricbeat
packetbeat;
bfr = callPackage ../tools/misc/bfr { };
bibtool = callPackage ../tools/misc/bibtool { };
@ -968,8 +974,6 @@ with pkgs;
fastJson = callPackage ../development/libraries/fastjson { };
filebeat = callPackage ../misc/logging/filebeat { };
filebench = callPackage ../tools/misc/filebench { };
fsmon = callPackage ../tools/misc/fsmon { };
@ -1018,8 +1022,6 @@ with pkgs;
gti = callPackage ../tools/misc/gti { };
heartbeat = callPackage ../misc/logging/heartbeat { };
heatseeker = callPackage ../tools/misc/heatseeker { };
hexio = callPackage ../development/tools/hexio { };
@ -1050,8 +1052,6 @@ with pkgs;
meson = callPackage ../development/tools/build-managers/meson { };
metricbeat = callPackage ../misc/logging/metricbeat { };
mp3fs = callPackage ../tools/filesystems/mp3fs { };
mpdcron = callPackage ../tools/audio/mpdcron { };
@ -1732,6 +1732,9 @@ with pkgs;
evemu = callPackage ../tools/system/evemu { };
# The latest version used by elasticsearch, logstash, kibana and the the beats from elastic.
elk5Version = "5.4.0";
elasticsearch = callPackage ../servers/search/elasticsearch { };
elasticsearch2 = callPackage ../servers/search/elasticsearch/2.x.nix { };
elasticsearch5 = callPackage ../servers/search/elasticsearch/5.x.nix { };
@ -2626,6 +2629,7 @@ with pkgs;
keyfuzz = callPackage ../tools/inputmethods/keyfuzz { };
kibana = callPackage ../development/tools/misc/kibana { };
kibana5 = callPackage ../development/tools/misc/kibana/5.x.nix { };
kismet = callPackage ../applications/networking/sniffers/kismet { };
@ -2686,6 +2690,7 @@ with pkgs;
lockfileProgs = callPackage ../tools/misc/lockfile-progs { };
logstash = callPackage ../tools/misc/logstash { };
logstash5 = callPackage ../tools/misc/logstash/5.x.nix { };
logstash-contrib = callPackage ../tools/misc/logstash/contrib.nix { };
@ -3500,8 +3505,6 @@ with pkgs;
nix = nixUnstable;
};
packetbeat = callPackage ../misc/logging/packetbeat { };
packetdrill = callPackage ../tools/networking/packetdrill { };
pakcs = callPackage ../development/compilers/pakcs {};