forked from mirrors/nixpkgs
elk: add elasticsearch6, logstash6, kibana6 and the beats at v6.1.0
This change is backwards compatible since the ELK tools at version 5.x remain unchanged. The test suite now both tests ELK-5 and ELK-6.
This commit is contained in:
parent
95913d2768
commit
803077ef1c
|
@ -6,6 +6,7 @@ let
|
|||
cfg = config.services.elasticsearch;
|
||||
|
||||
es5 = builtins.compareVersions (builtins.parseDrvName cfg.package.name).version "5" >= 0;
|
||||
es6 = builtins.compareVersions (builtins.parseDrvName cfg.package.name).version "6" >= 0;
|
||||
|
||||
esConfig = ''
|
||||
network.host: ${cfg.listenAddress}
|
||||
|
@ -92,8 +93,6 @@ in {
|
|||
node.name: "elasticsearch"
|
||||
node.master: true
|
||||
node.data: false
|
||||
index.number_of_shards: 5
|
||||
index.number_of_replicas: 1
|
||||
'';
|
||||
};
|
||||
|
||||
|
@ -165,7 +164,10 @@ in {
|
|||
path = [ pkgs.inetutils ];
|
||||
environment = {
|
||||
ES_HOME = cfg.dataDir;
|
||||
ES_JAVA_OPTS = toString ([ "-Des.path.conf=${configDir}" ] ++ cfg.extraJavaOptions);
|
||||
ES_JAVA_OPTS = toString ( optional (!es6) [ "-Des.path.conf=${configDir}" ]
|
||||
++ cfg.extraJavaOptions);
|
||||
} // optionalAttrs es6 {
|
||||
ES_PATH_CONF = configDir;
|
||||
};
|
||||
serviceConfig = {
|
||||
ExecStart = "${cfg.package}/bin/elasticsearch ${toString cfg.extraCmdLineOptions}";
|
||||
|
|
|
@ -260,7 +260,7 @@ in rec {
|
|||
tests.etcd = hydraJob (import tests/etcd.nix { system = "x86_64-linux"; });
|
||||
tests.ec2-nixops = hydraJob (import tests/ec2.nix { system = "x86_64-linux"; }).boot-ec2-nixops;
|
||||
tests.ec2-config = hydraJob (import tests/ec2.nix { system = "x86_64-linux"; }).boot-ec2-config;
|
||||
tests.elk = hydraJob (import tests/elk.nix { system = "x86_64-linux"; });
|
||||
tests.elk = callSubTests tests/elk.nix { system = "x86_64-linux"; };
|
||||
tests.env = callTest tests/env.nix {};
|
||||
tests.ferm = callTest tests/ferm.nix {};
|
||||
tests.firefox = callTest tests/firefox.nix {};
|
||||
|
|
|
@ -1,95 +1,107 @@
|
|||
# Test the ELK stack: Elasticsearch, Logstash and Kibana.
|
||||
|
||||
import ./make-test.nix ({ pkgs, ...} :
|
||||
{ system ? builtins.currentSystem }:
|
||||
with import ../lib/testing.nix { inherit system; };
|
||||
with pkgs.lib;
|
||||
let
|
||||
esUrl = "http://localhost:9200";
|
||||
in {
|
||||
name = "ELK";
|
||||
meta = with pkgs.stdenv.lib.maintainers; {
|
||||
maintainers = [ eelco chaoflow offline basvandijk ];
|
||||
};
|
||||
|
||||
nodes = {
|
||||
one =
|
||||
{ config, pkgs, ... }: {
|
||||
# Not giving the machine at least 2060MB results in elasticsearch failing with the following error:
|
||||
#
|
||||
# OpenJDK 64-Bit Server VM warning:
|
||||
# INFO: os::commit_memory(0x0000000085330000, 2060255232, 0)
|
||||
# failed; error='Cannot allocate memory' (errno=12)
|
||||
#
|
||||
# There is insufficient memory for the Java Runtime Environment to continue.
|
||||
# Native memory allocation (mmap) failed to map 2060255232 bytes for committing reserved memory.
|
||||
#
|
||||
# When setting this to 2500 I got "Kernel panic - not syncing: Out of
|
||||
# memory: compulsory panic_on_oom is enabled" so lets give it even a
|
||||
# bit more room:
|
||||
virtualisation.memorySize = 3000;
|
||||
mkElkTest = name : elk : makeTest {
|
||||
inherit name;
|
||||
meta = with pkgs.stdenv.lib.maintainers; {
|
||||
maintainers = [ eelco chaoflow offline basvandijk ];
|
||||
};
|
||||
nodes = {
|
||||
one =
|
||||
{ config, pkgs, ... }: {
|
||||
# Not giving the machine at least 2060MB results in elasticsearch failing with the following error:
|
||||
#
|
||||
# OpenJDK 64-Bit Server VM warning:
|
||||
# INFO: os::commit_memory(0x0000000085330000, 2060255232, 0)
|
||||
# failed; error='Cannot allocate memory' (errno=12)
|
||||
#
|
||||
# There is insufficient memory for the Java Runtime Environment to continue.
|
||||
# Native memory allocation (mmap) failed to map 2060255232 bytes for committing reserved memory.
|
||||
#
|
||||
# When setting this to 2500 I got "Kernel panic - not syncing: Out of
|
||||
# memory: compulsory panic_on_oom is enabled" so lets give it even a
|
||||
# bit more room:
|
||||
virtualisation.memorySize = 3000;
|
||||
|
||||
# For querying JSON objects returned from elasticsearch and kibana.
|
||||
environment.systemPackages = [ pkgs.jq ];
|
||||
# For querying JSON objects returned from elasticsearch and kibana.
|
||||
environment.systemPackages = [ pkgs.jq ];
|
||||
|
||||
services = {
|
||||
logstash = {
|
||||
enable = true;
|
||||
package = pkgs.logstash5;
|
||||
inputConfig = ''
|
||||
exec { command => "echo -n flowers" interval => 1 type => "test" }
|
||||
exec { command => "echo -n dragons" interval => 1 type => "test" }
|
||||
'';
|
||||
filterConfig = ''
|
||||
if [message] =~ /dragons/ {
|
||||
drop {}
|
||||
}
|
||||
'';
|
||||
outputConfig = ''
|
||||
file {
|
||||
path => "/tmp/logstash.out"
|
||||
codec => line { format => "%{message}" }
|
||||
}
|
||||
elasticsearch {
|
||||
hosts => [ "${esUrl}" ]
|
||||
}
|
||||
'';
|
||||
};
|
||||
services = {
|
||||
logstash = {
|
||||
enable = true;
|
||||
package = elk.logstash;
|
||||
inputConfig = ''
|
||||
exec { command => "echo -n flowers" interval => 1 type => "test" }
|
||||
exec { command => "echo -n dragons" interval => 1 type => "test" }
|
||||
'';
|
||||
filterConfig = ''
|
||||
if [message] =~ /dragons/ {
|
||||
drop {}
|
||||
}
|
||||
'';
|
||||
outputConfig = ''
|
||||
file {
|
||||
path => "/tmp/logstash.out"
|
||||
codec => line { format => "%{message}" }
|
||||
}
|
||||
elasticsearch {
|
||||
hosts => [ "${esUrl}" ]
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
elasticsearch = {
|
||||
enable = true;
|
||||
package = pkgs.elasticsearch5;
|
||||
};
|
||||
elasticsearch = {
|
||||
enable = true;
|
||||
package = elk.elasticsearch;
|
||||
};
|
||||
|
||||
kibana = {
|
||||
enable = true;
|
||||
package = pkgs.kibana5;
|
||||
elasticsearch.url = esUrl;
|
||||
kibana = {
|
||||
enable = true;
|
||||
package = elk.kibana;
|
||||
elasticsearch.url = esUrl;
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
startAll;
|
||||
testScript = ''
|
||||
startAll;
|
||||
|
||||
$one->waitForUnit("elasticsearch.service");
|
||||
$one->waitForUnit("elasticsearch.service");
|
||||
|
||||
# Continue as long as the status is not "red". The status is probably
|
||||
# "yellow" instead of "green" because we are using a single elasticsearch
|
||||
# node which elasticsearch considers risky.
|
||||
#
|
||||
# TODO: extend this test with multiple elasticsearch nodes and see if the status turns "green".
|
||||
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_cluster/health' | jq .status | grep -v red");
|
||||
# Continue as long as the status is not "red". The status is probably
|
||||
# "yellow" instead of "green" because we are using a single elasticsearch
|
||||
# node which elasticsearch considers risky.
|
||||
#
|
||||
# TODO: extend this test with multiple elasticsearch nodes and see if the status turns "green".
|
||||
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_cluster/health' | jq .status | grep -v red");
|
||||
|
||||
# Perform some simple logstash tests.
|
||||
$one->waitForUnit("logstash.service");
|
||||
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep flowers");
|
||||
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep -v dragons");
|
||||
# Perform some simple logstash tests.
|
||||
$one->waitForUnit("logstash.service");
|
||||
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep flowers");
|
||||
$one->waitUntilSucceeds("cat /tmp/logstash.out | grep -v dragons");
|
||||
|
||||
# See if kibana is healthy.
|
||||
$one->waitForUnit("kibana.service");
|
||||
$one->waitUntilSucceeds("curl --silent --show-error 'http://localhost:5601/api/status' | jq .status.overall.state | grep green");
|
||||
# See if kibana is healthy.
|
||||
$one->waitForUnit("kibana.service");
|
||||
$one->waitUntilSucceeds("curl --silent --show-error 'http://localhost:5601/api/status' | jq .status.overall.state | grep green");
|
||||
|
||||
# See if logstash messages arive in elasticsearch.
|
||||
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' -d '{\"query\" : { \"match\" : { \"message\" : \"flowers\"}}}' | jq .hits.total | grep -v 0");
|
||||
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' -d '{\"query\" : { \"match\" : { \"message\" : \"dragons\"}}}' | jq .hits.total | grep 0");
|
||||
'';
|
||||
})
|
||||
# See if logstash messages arive in elasticsearch.
|
||||
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' -d '{\"query\" : { \"match\" : { \"message\" : \"flowers\"}}}' | jq .hits.total | grep -v 0");
|
||||
$one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' -d '{\"query\" : { \"match\" : { \"message\" : \"dragons\"}}}' | jq .hits.total | grep 0");
|
||||
'';
|
||||
};
|
||||
in mapAttrs mkElkTest {
|
||||
"ELK-5" = {
|
||||
elasticsearch = pkgs.elasticsearch5;
|
||||
logstash = pkgs.logstash5;
|
||||
kibana = pkgs.kibana5;
|
||||
};
|
||||
"ELK-6" = {
|
||||
elasticsearch = pkgs.elasticsearch6;
|
||||
logstash = pkgs.logstash6;
|
||||
kibana = pkgs.kibana6;
|
||||
};
|
||||
}
|
||||
|
|
40
pkgs/development/tools/misc/kibana/6.x.nix
Normal file
40
pkgs/development/tools/misc/kibana/6.x.nix
Normal file
|
@ -0,0 +1,40 @@
|
|||
{ stdenv, makeWrapper, fetchurl, elk6Version, nodejs, coreutils, which }:
|
||||
|
||||
with stdenv.lib;
|
||||
let
|
||||
inherit (builtins) elemAt;
|
||||
info = splitString "-" stdenv.system;
|
||||
arch = elemAt info 0;
|
||||
plat = elemAt info 1;
|
||||
shas = {
|
||||
"x86_64-linux" = "08lkjj9h4ij25b53bgdz825j2ccymlllijbhv9kw1q1liv2irr34";
|
||||
"x86_64-darwin" = "1iqzj01s9walj5arfdlw0dgbmrv6mjp64mch11rx5aybcafv4z9h";
|
||||
};
|
||||
in stdenv.mkDerivation rec {
|
||||
name = "kibana-${version}";
|
||||
version = elk6Version;
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://artifacts.elastic.co/downloads/kibana/${name}-${plat}-${arch}.tar.gz";
|
||||
sha256 = shas."${stdenv.system}" or (throw "Unknown architecture");
|
||||
};
|
||||
|
||||
buildInputs = [ makeWrapper ];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/libexec/kibana $out/bin
|
||||
mv * $out/libexec/kibana/
|
||||
rm -r $out/libexec/kibana/node
|
||||
makeWrapper $out/libexec/kibana/bin/kibana $out/bin/kibana \
|
||||
--prefix PATH : "${stdenv.lib.makeBinPath [ nodejs coreutils which ]}"
|
||||
sed -i 's@NODE=.*@NODE=${nodejs}/bin/node@' $out/libexec/kibana/bin/kibana
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "Visualize logs and time-stamped data";
|
||||
homepage = http://www.elasticsearch.org/overview/kibana;
|
||||
license = licenses.asl20;
|
||||
maintainers = with maintainers; [ offline rickynils basvandijk ];
|
||||
platforms = with platforms; unix;
|
||||
};
|
||||
}
|
42
pkgs/misc/logging/beats/6.x.nix
Normal file
42
pkgs/misc/logging/beats/6.x.nix
Normal file
|
@ -0,0 +1,42 @@
|
|||
{ stdenv, fetchFromGitHub, elk6Version, buildGoPackage, libpcap }:
|
||||
|
||||
let beat = package : extraArgs : buildGoPackage (rec {
|
||||
name = "${package}-${version}";
|
||||
version = elk6Version;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "elastic";
|
||||
repo = "beats";
|
||||
rev = "v${version}";
|
||||
sha256 = "0pp4in66byggcfmvf8yx0m1vra98cs77m7mbr45sdla4hinvaqar";
|
||||
};
|
||||
|
||||
goPackagePath = "github.com/elastic/beats";
|
||||
|
||||
subPackages = [ package ];
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
homepage = https://www.elastic.co/products/beats;
|
||||
license = licenses.asl20;
|
||||
maintainers = with maintainers; [ fadenb basvandijk ];
|
||||
platforms = platforms.linux;
|
||||
};
|
||||
} // extraArgs);
|
||||
in {
|
||||
filebeat = beat "filebeat" {meta.description = "Lightweight shipper for logfiles";};
|
||||
heartbeat = beat "heartbeat" {meta.description = "Lightweight shipper for uptime monitoring";};
|
||||
metricbeat = beat "metricbeat" {meta.description = "Lightweight shipper for metrics";};
|
||||
packetbeat = beat "packetbeat" {
|
||||
buildInputs = [ libpcap ];
|
||||
meta.description = "Network packet analyzer that ships data to Elasticsearch";
|
||||
meta.longDescription = ''
|
||||
Packetbeat is an open source network packet analyzer that ships the
|
||||
data to Elasticsearch.
|
||||
|
||||
Think of it like a distributed real-time Wireshark with a lot more
|
||||
analytics features. The Packetbeat shippers sniff the traffic between
|
||||
your application processes, parse on the fly protocols like HTTP, MySQL,
|
||||
PostgreSQL, Redis or Thrift and correlate the messages into transactions.
|
||||
'';
|
||||
};
|
||||
}
|
45
pkgs/servers/search/elasticsearch/6.x.nix
Normal file
45
pkgs/servers/search/elasticsearch/6.x.nix
Normal file
|
@ -0,0 +1,45 @@
|
|||
{ stdenv, fetchurl, elk6Version, makeWrapper, jre_headless, utillinux, getopt }:
|
||||
|
||||
with stdenv.lib;
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = elk6Version;
|
||||
name = "elasticsearch-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://artifacts.elastic.co/downloads/elasticsearch/${name}.tar.gz";
|
||||
sha256 = "1mq8lnpv5y82a7d8vxn5np6hrg2pys22v85l5l9jynk3k0kgwyf8";
|
||||
};
|
||||
|
||||
patches = [ ./es-home-6.x.patch ];
|
||||
|
||||
postPatch = ''
|
||||
sed -i "s|ES_CLASSPATH=\"\$ES_HOME/lib/\*\"|ES_CLASSPATH=\"$out/lib/*\"|" ./bin/elasticsearch-env
|
||||
'';
|
||||
|
||||
buildInputs = [ makeWrapper jre_headless ] ++
|
||||
(if (!stdenv.isDarwin) then [utillinux] else [getopt]);
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -R bin config lib modules plugins $out
|
||||
|
||||
chmod -x $out/bin/*.*
|
||||
|
||||
wrapProgram $out/bin/elasticsearch \
|
||||
${if (!stdenv.isDarwin)
|
||||
then ''--prefix PATH : "${utillinux}/bin/"''
|
||||
else ''--prefix PATH : "${getopt}/bin"''} \
|
||||
--set JAVA_HOME "${jre_headless}" \
|
||||
--set ES_JVM_OPTIONS "$out/config/jvm.options"
|
||||
|
||||
wrapProgram $out/bin/elasticsearch-plugin --set JAVA_HOME "${jre_headless}"
|
||||
'';
|
||||
|
||||
meta = {
|
||||
description = "Open Source, Distributed, RESTful Search Engine";
|
||||
license = licenses.asl20;
|
||||
platforms = platforms.unix;
|
||||
maintainers = with maintainers; [ apeschar basvandijk ];
|
||||
};
|
||||
}
|
26
pkgs/servers/search/elasticsearch/es-home-6.x.patch
Normal file
26
pkgs/servers/search/elasticsearch/es-home-6.x.patch
Normal file
|
@ -0,0 +1,26 @@
|
|||
diff -Naur a/bin/elasticsearch-env b/bin/elasticsearch-env
|
||||
--- a/bin/elasticsearch-env 2017-12-12 13:31:51.000000000 +0100
|
||||
+++ b/bin/elasticsearch-env 2017-12-18 19:51:12.282809695 +0100
|
||||
@@ -19,18 +19,10 @@
|
||||
fi
|
||||
done
|
||||
|
||||
-# determine Elasticsearch home; to do this, we strip from the path until we find
|
||||
-# bin, and then strip bin (there is an assumption here that there is no nested
|
||||
-# directory under bin also named bin)
|
||||
-ES_HOME=`dirname "$SCRIPT"`
|
||||
-
|
||||
-# now make ES_HOME absolute
|
||||
-ES_HOME=`cd "$ES_HOME"; pwd`
|
||||
-
|
||||
-while [ "`basename "$ES_HOME"`" != "bin" ]; do
|
||||
- ES_HOME=`dirname "$ES_HOME"`
|
||||
-done
|
||||
-ES_HOME=`dirname "$ES_HOME"`
|
||||
+if [ -z "$ES_HOME" ]; then
|
||||
+ echo "You must set the ES_HOME var" >&2
|
||||
+ exit 1
|
||||
+fi
|
||||
|
||||
# now set the classpath
|
||||
ES_CLASSPATH="$ES_HOME/lib/*"
|
39
pkgs/tools/misc/logstash/6.x.nix
Normal file
39
pkgs/tools/misc/logstash/6.x.nix
Normal file
|
@ -0,0 +1,39 @@
|
|||
{ stdenv, fetchurl, elk6Version, makeWrapper, jre }:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
version = elk6Version;
|
||||
name = "logstash-${version}";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://artifacts.elastic.co/downloads/logstash/${name}.tar.gz";
|
||||
sha256 = "1s2w8d2siryg2wy8i9lwqbp4mjf1sv80lf3sllxwa2vqwsv6l64p";
|
||||
};
|
||||
|
||||
dontBuild = true;
|
||||
dontPatchELF = true;
|
||||
dontStrip = true;
|
||||
dontPatchShebangs = true;
|
||||
|
||||
buildInputs = [
|
||||
makeWrapper jre
|
||||
];
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out
|
||||
cp -r {Gemfile*,modules,vendor,lib,bin,config,data,logstash-core,logstash-core-plugin-api} $out
|
||||
|
||||
wrapProgram $out/bin/logstash \
|
||||
--set JAVA_HOME "${jre}"
|
||||
|
||||
wrapProgram $out/bin/logstash-plugin \
|
||||
--set JAVA_HOME "${jre}"
|
||||
'';
|
||||
|
||||
meta = with stdenv.lib; {
|
||||
description = "Logstash is a data pipeline that helps you process logs and other event data from a variety of systems";
|
||||
homepage = https://www.elastic.co/products/logstash;
|
||||
license = licenses.asl20;
|
||||
platforms = platforms.unix;
|
||||
maintainers = with maintainers; [ wjlroe offline basvandijk ];
|
||||
};
|
||||
}
|
|
@ -741,12 +741,23 @@ with pkgs;
|
|||
|
||||
bchunk = callPackage ../tools/cd-dvd/bchunk { };
|
||||
|
||||
inherit (callPackages ../misc/logging/beats { })
|
||||
inherit (callPackages ../misc/logging/beats/5.x.nix { })
|
||||
filebeat
|
||||
heartbeat
|
||||
metricbeat
|
||||
packetbeat;
|
||||
|
||||
inherit (let beats6 = callPackages ../misc/logging/beats/6.x.nix { }; in {
|
||||
filebeat6 = beats6.filebeat;
|
||||
heartbeat6 = beats6.heartbeat;
|
||||
metricbeat6 = beats6.metricbeat;
|
||||
packetbeat6 = beats6.packetbeat;
|
||||
})
|
||||
filebeat6
|
||||
heartbeat6
|
||||
metricbeat6
|
||||
packetbeat6;
|
||||
|
||||
bfr = callPackage ../tools/misc/bfr { };
|
||||
|
||||
bibtool = callPackage ../tools/misc/bibtool { };
|
||||
|
@ -1949,10 +1960,12 @@ with pkgs;
|
|||
|
||||
# The latest version used by elasticsearch, logstash, kibana and the the beats from elastic.
|
||||
elk5Version = "5.6.5";
|
||||
elk6Version = "6.1.0";
|
||||
|
||||
elasticsearch = callPackage ../servers/search/elasticsearch { };
|
||||
elasticsearch2 = callPackage ../servers/search/elasticsearch/2.x.nix { };
|
||||
elasticsearch5 = callPackage ../servers/search/elasticsearch/5.x.nix { };
|
||||
elasticsearch6 = callPackage ../servers/search/elasticsearch/6.x.nix { };
|
||||
|
||||
elasticsearchPlugins = recurseIntoAttrs (
|
||||
callPackage ../servers/search/elasticsearch/plugins.nix { }
|
||||
|
@ -2956,6 +2969,7 @@ with pkgs;
|
|||
|
||||
kibana = callPackage ../development/tools/misc/kibana { };
|
||||
kibana5 = callPackage ../development/tools/misc/kibana/5.x.nix { };
|
||||
kibana6 = callPackage ../development/tools/misc/kibana/6.x.nix { };
|
||||
|
||||
kismet = callPackage ../applications/networking/sniffers/kismet { };
|
||||
|
||||
|
@ -3022,6 +3036,7 @@ with pkgs;
|
|||
|
||||
logstash = callPackage ../tools/misc/logstash { };
|
||||
logstash5 = callPackage ../tools/misc/logstash/5.x.nix { };
|
||||
logstash6 = callPackage ../tools/misc/logstash/6.x.nix { };
|
||||
|
||||
logstash-contrib = callPackage ../tools/misc/logstash/contrib.nix { };
|
||||
|
||||
|
|
Loading…
Reference in a new issue