mirror of
https://github.com/telekom-security/tpotce.git
synced 2025-07-01 04:22:11 +00:00
start on elk6.x
This commit is contained in:
parent
06c7454da6
commit
6e072980a0
10 changed files with 655 additions and 12 deletions
|
@ -14,8 +14,8 @@ RUN apk -U upgrade && \
|
|||
# Get and install packages
|
||||
cd /root/dist/ && \
|
||||
mkdir -p /usr/share/kibana/ && \
|
||||
wget https://artifacts.elastic.co/downloads/kibana/kibana-5.6.8-linux-x86_64.tar.gz && \
|
||||
tar xvfz kibana-5.6.8-linux-x86_64.tar.gz --strip-components=1 -C /usr/share/kibana/ && \
|
||||
wget https://artifacts.elastic.co/downloads/kibana/kibana-6.2.4-linux-x86_64.tar.gz && \
|
||||
tar xvfz kibana-6.2.4-linux-x86_64.tar.gz --strip-components=1 -C /usr/share/kibana/ && \
|
||||
|
||||
# Kibana's bundled node does not work in alpine
|
||||
rm /usr/share/kibana/node/bin/node && \
|
||||
|
@ -25,14 +25,23 @@ RUN apk -U upgrade && \
|
|||
|
||||
# Add and move files
|
||||
cd /root/dist/ && \
|
||||
cp kibana.svg /usr/share/kibana/src/ui/public/images/kibana.svg && \
|
||||
cp kibana.svg /usr/share/kibana/src/ui/public/icons/kibana.svg && \
|
||||
cp test.svg /usr/share/kibana/src/ui/public/images/kibana.svg && \
|
||||
cp test.svg /usr/share/kibana/src/ui/public/icons/kibana.svg && \
|
||||
cp elk.ico /usr/share/kibana/src/ui/public/assets/favicons/favicon.ico && \
|
||||
cp elk.ico /usr/share/kibana/src/ui/public/assets/favicons/favicon-16x16.png && \
|
||||
cp elk.ico /usr/share/kibana/src/ui/public/assets/favicons/favicon-32x32.png && \
|
||||
cp create_kibana_index.js /usr/share/kibana/src/core_plugins/elasticsearch/lib/ && \
|
||||
cd / && \
|
||||
|
||||
# Setup plugins
|
||||
cd /usr/share/kibana/plugins && \
|
||||
wget https://github.com/dlumbrer/kbn_radar/releases/download/Kibana-6.X/kbn_radar.tar.gz && \
|
||||
wget https://github.com/dlumbrer/kbn_network/releases/download/6.0.X-1/network_vis.tar.gz && \
|
||||
tar xvfz kbn_radar.tar.gz && \
|
||||
tar xvfz network_vis.tar.gz && \
|
||||
rm *.tar.gz && \
|
||||
# /usr/share/kibana/bin/kibana-plugin install https://github.com/johtani/analyze-api-ui-plugin/releases/download/6.2.4/analyze-api-ui-plugin-6.2.4.zip && \
|
||||
|
||||
# Setup user, groups and configs
|
||||
sed -i 's/#server.basePath: ""/server.basePath: "\/kibana"/' /usr/share/kibana/config/kibana.yml && \
|
||||
sed -i 's/#kibana.defaultAppId: "discover"/kibana.defaultAppId: "dashboards"/' /usr/share/kibana/config/kibana.yml && \
|
||||
|
|
12
docker/elk/kibana/dist/test.svg
vendored
Normal file
12
docker/elk/kibana/dist/test.svg
vendored
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 5.8 KiB |
|
@ -18,12 +18,12 @@ RUN apk -U upgrade && \
|
|||
git clone https://github.com/dtag-dev-sec/listbot /etc/listbot && \
|
||||
cd /root/dist/ && \
|
||||
mkdir -p /usr/share/logstash/ && \
|
||||
wget https://artifacts.elastic.co/downloads/logstash/logstash-5.6.8.tar.gz && \
|
||||
wget https://artifacts.elastic.co/downloads/logstash/logstash-6.2.4.tar.gz && \
|
||||
wget http://geolite.maxmind.com/download/geoip/database/GeoLite2-ASN.tar.gz && \
|
||||
tar xvfz logstash-5.6.8.tar.gz --strip-components=1 -C /usr/share/logstash/ && \
|
||||
tar xvfz logstash-6.2.4.tar.gz --strip-components=1 -C /usr/share/logstash/ && \
|
||||
/usr/share/logstash/bin/logstash-plugin install logstash-filter-translate && \
|
||||
/usr/share/logstash/bin/logstash-plugin install logstash-output-syslog && \
|
||||
tar xvfz GeoLite2-ASN.tar.gz --strip-components=1 -C /usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.3.1-java/vendor/ && \
|
||||
tar xvfz GeoLite2-ASN.tar.gz --strip-components=1 -C /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/ && \
|
||||
|
||||
# Add and move files
|
||||
cd /root/dist/ && \
|
||||
|
@ -31,7 +31,7 @@ RUN apk -U upgrade && \
|
|||
chmod u+x /usr/bin/update.sh && \
|
||||
mkdir -p /etc/logstash/conf.d && \
|
||||
cp logstash.conf /etc/logstash/conf.d/ && \
|
||||
cp elasticsearch-template-es5x.json /usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-7.4.2-java/lib/logstash/outputs/elasticsearch/ && \
|
||||
cp elasticsearch-template-es6x.json /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-output-elasticsearch-9.1.1-java/lib/logstash/outputs/elasticsearch/ && \
|
||||
|
||||
# Setup user, groups and configs
|
||||
addgroup -g 2000 logstash && \
|
||||
|
@ -48,5 +48,5 @@ RUN apk -U upgrade && \
|
|||
HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:9600'
|
||||
|
||||
# Start logstash
|
||||
USER logstash:logstash
|
||||
CMD update.sh && exec /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf
|
||||
#USER logstash:logstash
|
||||
CMD update.sh && /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf
|
||||
|
|
52
docker/elk/logstash/Dockerfile.5x
Normal file
52
docker/elk/logstash/Dockerfile.5x
Normal file
|
@ -0,0 +1,52 @@
|
|||
FROM alpine
|
||||
|
||||
# Include dist
|
||||
ADD dist/ /root/dist/
|
||||
|
||||
# Setup env and apt
|
||||
RUN apk -U upgrade && \
|
||||
apk add bash \
|
||||
curl \
|
||||
git \
|
||||
libc6-compat \
|
||||
libzmq \
|
||||
openjdk8-jre \
|
||||
procps \
|
||||
wget && \
|
||||
|
||||
# Get and install packages
|
||||
git clone https://github.com/dtag-dev-sec/listbot /etc/listbot && \
|
||||
cd /root/dist/ && \
|
||||
mkdir -p /usr/share/logstash/ && \
|
||||
wget https://artifacts.elastic.co/downloads/logstash/logstash-5.6.8.tar.gz && \
|
||||
wget http://geolite.maxmind.com/download/geoip/database/GeoLite2-ASN.tar.gz && \
|
||||
tar xvfz logstash-5.6.8.tar.gz --strip-components=1 -C /usr/share/logstash/ && \
|
||||
/usr/share/logstash/bin/logstash-plugin install logstash-filter-translate && \
|
||||
/usr/share/logstash/bin/logstash-plugin install logstash-output-syslog && \
|
||||
tar xvfz GeoLite2-ASN.tar.gz --strip-components=1 -C /usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.3.1-java/vendor/ && \
|
||||
|
||||
# Add and move files
|
||||
cd /root/dist/ && \
|
||||
cp update.sh /usr/bin/ && \
|
||||
chmod u+x /usr/bin/update.sh && \
|
||||
mkdir -p /etc/logstash/conf.d && \
|
||||
cp logstash.conf /etc/logstash/conf.d/ && \
|
||||
cp elasticsearch-template-es5x.json /usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-output-elasticsearch-7.4.2-java/lib/logstash/outputs/elasticsearch/ && \
|
||||
|
||||
# Setup user, groups and configs
|
||||
addgroup -g 2000 logstash && \
|
||||
adduser -S -H -s /bin/bash -u 2000 -D -g 2000 logstash && \
|
||||
chown -R logstash:logstash /usr/share/logstash && \
|
||||
chown -R logstash:logstash /etc/listbot && \
|
||||
chmod 755 /usr/bin/update.sh && \
|
||||
|
||||
# Clean up
|
||||
apk del --purge wget && \
|
||||
rm -rf /root/*
|
||||
|
||||
# Healthcheck
|
||||
HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:9600'
|
||||
|
||||
# Start logstash
|
||||
USER logstash:logstash
|
||||
CMD update.sh && exec /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf
|
52
docker/elk/logstash/Dockerfile.6x
Normal file
52
docker/elk/logstash/Dockerfile.6x
Normal file
|
@ -0,0 +1,52 @@
|
|||
FROM alpine
|
||||
|
||||
# Include dist
|
||||
ADD dist/ /root/dist/
|
||||
|
||||
# Setup env and apt
|
||||
RUN apk -U upgrade && \
|
||||
apk add bash \
|
||||
curl \
|
||||
git \
|
||||
libc6-compat \
|
||||
libzmq \
|
||||
openjdk8-jre \
|
||||
procps \
|
||||
wget && \
|
||||
|
||||
# Get and install packages
|
||||
git clone https://github.com/dtag-dev-sec/listbot /etc/listbot && \
|
||||
cd /root/dist/ && \
|
||||
mkdir -p /usr/share/logstash/ && \
|
||||
wget https://artifacts.elastic.co/downloads/logstash/logstash-6.2.4.tar.gz && \
|
||||
wget http://geolite.maxmind.com/download/geoip/database/GeoLite2-ASN.tar.gz && \
|
||||
tar xvfz logstash-6.2.4.tar.gz --strip-components=1 -C /usr/share/logstash/ && \
|
||||
/usr/share/logstash/bin/logstash-plugin install logstash-filter-translate && \
|
||||
/usr/share/logstash/bin/logstash-plugin install logstash-output-syslog && \
|
||||
tar xvfz GeoLite2-ASN.tar.gz --strip-components=1 -C /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/ && \
|
||||
|
||||
# Add and move files
|
||||
cd /root/dist/ && \
|
||||
cp update.sh /usr/bin/ && \
|
||||
chmod u+x /usr/bin/update.sh && \
|
||||
mkdir -p /etc/logstash/conf.d && \
|
||||
cp logstash.conf /etc/logstash/conf.d/ && \
|
||||
cp elasticsearch-template-es6x.json /usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-output-elasticsearch-9.1.1-java/lib/logstash/outputs/elasticsearch/ && \
|
||||
|
||||
# Setup user, groups and configs
|
||||
addgroup -g 2000 logstash && \
|
||||
adduser -S -H -s /bin/bash -u 2000 -D -g 2000 logstash && \
|
||||
chown -R logstash:logstash /usr/share/logstash && \
|
||||
chown -R logstash:logstash /etc/listbot && \
|
||||
chmod 755 /usr/bin/update.sh && \
|
||||
|
||||
# Clean up
|
||||
apk del --purge wget && \
|
||||
rm -rf /root/*
|
||||
|
||||
# Healthcheck
|
||||
HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:9600'
|
||||
|
||||
# Start logstash
|
||||
#USER logstash:logstash
|
||||
CMD update.sh && exec /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf
|
47
docker/elk/logstash/dist/elasticsearch-template-es6x.json
vendored
Normal file
47
docker/elk/logstash/dist/elasticsearch-template-es6x.json
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
{
|
||||
"template" : "logstash-*",
|
||||
"version" : 60001,
|
||||
"settings" : {
|
||||
"index.refresh_interval" : "5s",
|
||||
"index.number_of_shards" : "1",
|
||||
"index.number_of_replicas" : "0"
|
||||
},
|
||||
"mappings" : {
|
||||
"_default_" : {
|
||||
"dynamic_templates" : [ {
|
||||
"message_field" : {
|
||||
"path_match" : "message",
|
||||
"match_mapping_type" : "string",
|
||||
"mapping" : {
|
||||
"type" : "text",
|
||||
"norms" : false
|
||||
}
|
||||
}
|
||||
}, {
|
||||
"string_fields" : {
|
||||
"match" : "*",
|
||||
"match_mapping_type" : "string",
|
||||
"mapping" : {
|
||||
"type" : "text", "norms" : false,
|
||||
"fields" : {
|
||||
"keyword" : { "type": "keyword", "ignore_above": 256 }
|
||||
}
|
||||
}
|
||||
}
|
||||
} ],
|
||||
"properties" : {
|
||||
"@timestamp": { "type": "date"},
|
||||
"@version": { "type": "keyword"},
|
||||
"geoip" : {
|
||||
"dynamic": true,
|
||||
"properties" : {
|
||||
"ip": { "type": "ip" },
|
||||
"location" : { "type" : "geo_point" },
|
||||
"latitude" : { "type" : "half_float" },
|
||||
"longitude" : { "type" : "half_float" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
5
docker/elk/logstash/dist/logstash.conf
vendored
5
docker/elk/logstash/dist/logstash.conf
vendored
|
@ -382,12 +382,12 @@ if "_grokparsefailure" in [tags] { drop {} }
|
|||
geoip {
|
||||
cache_size => 10000
|
||||
source => "src_ip"
|
||||
database => "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.3.1-java/vendor/GeoLite2-City.mmdb"
|
||||
database => "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-City.mmdb"
|
||||
}
|
||||
geoip {
|
||||
cache_size => 10000
|
||||
source => "src_ip"
|
||||
database => "/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.3.1-java/vendor/GeoLite2-ASN.mmdb"
|
||||
database => "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-ASN.mmdb"
|
||||
}
|
||||
translate {
|
||||
refresh_interval => 86400
|
||||
|
@ -426,6 +426,7 @@ if "_grokparsefailure" in [tags] { drop {} }
|
|||
output {
|
||||
elasticsearch {
|
||||
hosts => ["elasticsearch:9200"]
|
||||
document_type => "doc"
|
||||
}
|
||||
|
||||
if [type] == "Suricata" {
|
||||
|
|
449
docker/elk/logstash/dist/logstash.conf.6x
vendored
Normal file
449
docker/elk/logstash/dist/logstash.conf.6x
vendored
Normal file
|
@ -0,0 +1,449 @@
|
|||
# Input section
|
||||
input {
|
||||
|
||||
# Suricata
|
||||
file {
|
||||
path => ["/data/suricata/log/eve.json"]
|
||||
codec => json
|
||||
type => "Suricata"
|
||||
}
|
||||
|
||||
# P0f
|
||||
file {
|
||||
path => ["/data/p0f/log/p0f.json"]
|
||||
codec => json
|
||||
type => "P0f"
|
||||
}
|
||||
|
||||
# Conpot
|
||||
file {
|
||||
path => ["/data/conpot/log/*.json"]
|
||||
codec => json
|
||||
type => "ConPot"
|
||||
}
|
||||
|
||||
# Cowrie
|
||||
file {
|
||||
path => ["/data/cowrie/log/cowrie.json"]
|
||||
codec => json
|
||||
type => "Cowrie"
|
||||
}
|
||||
|
||||
# Dionaea
|
||||
file {
|
||||
path => ["/data/dionaea/log/dionaea.json"]
|
||||
codec => json
|
||||
type => "Dionaea"
|
||||
}
|
||||
|
||||
# Elasticpot
|
||||
file {
|
||||
path => ["/data/elasticpot/log/elasticpot.log"]
|
||||
codec => json
|
||||
type => "ElasticPot"
|
||||
}
|
||||
|
||||
# eMobility
|
||||
file {
|
||||
path => ["/data/emobility/log/centralsystemEWS.log"]
|
||||
type => "eMobility"
|
||||
}
|
||||
|
||||
# Glastopf
|
||||
file {
|
||||
path => ["/data/glastopf/log/glastopf.log"]
|
||||
type => "Glastopf"
|
||||
}
|
||||
|
||||
# Glutton
|
||||
file {
|
||||
path => ["/data/glutton/log/glutton.log"]
|
||||
codec => json
|
||||
type => "Glutton"
|
||||
}
|
||||
|
||||
# Heralding
|
||||
file {
|
||||
path => ["/data/heralding/log/auth.csv"]
|
||||
type => "Heralding"
|
||||
}
|
||||
|
||||
# Honeytrap
|
||||
file {
|
||||
path => ["/data/honeytrap/log/attackers.json"]
|
||||
codec => json
|
||||
type => "Honeytrap"
|
||||
}
|
||||
|
||||
# Mailoney
|
||||
file {
|
||||
path => ["/data/mailoney/log/commands.log"]
|
||||
type => "Mailoney"
|
||||
}
|
||||
|
||||
# Rdpy
|
||||
file {
|
||||
path => ["/data/rdpy/log/rdpy.log"]
|
||||
type => "Rdpy"
|
||||
}
|
||||
|
||||
# Host Syslog
|
||||
file {
|
||||
path => ["/data/host/log/auth.log"]
|
||||
codec => plain
|
||||
type => "Syslog"
|
||||
}
|
||||
|
||||
# Host NGINX
|
||||
file {
|
||||
path => ["/data/host/log/nginx/access.log"]
|
||||
codec => json
|
||||
type => "NGINX"
|
||||
}
|
||||
|
||||
# Vnclowpot
|
||||
file {
|
||||
path => ["/data/vnclowpot/log/vnclowpot.log"]
|
||||
type => "Vnclowpot"
|
||||
}
|
||||
}
|
||||
|
||||
# Filter Section
|
||||
filter {
|
||||
|
||||
# Suricata
|
||||
if [type] == "Suricata" {
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
}
|
||||
translate {
|
||||
refresh_interval => 86400
|
||||
field => "[alert][signature_id]"
|
||||
destination => "[alert][cve_id]"
|
||||
dictionary_path => "/etc/listbot/cve.yaml"
|
||||
}
|
||||
}
|
||||
|
||||
# P0f
|
||||
if [type] == "P0f" {
|
||||
date {
|
||||
match => [ "timestamp", "yyyy'/'MM'/'dd HH:mm:ss" ]
|
||||
remove_field => ["timestamp"]
|
||||
}
|
||||
mutate {
|
||||
rename => {
|
||||
"server_port" => "dest_port"
|
||||
"server_ip" => "dest_ip"
|
||||
"client_port" => "src_port"
|
||||
"client_ip" => "src_ip"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Conpot
|
||||
if [type] == "ConPot" {
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
}
|
||||
}
|
||||
|
||||
# Cowrie
|
||||
if [type] == "Cowrie" {
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
}
|
||||
mutate {
|
||||
rename => {
|
||||
"dst_port" => "dest_port"
|
||||
"dst_ip" => "dest_ip"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Dionaea
|
||||
if [type] == "Dionaea" {
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
}
|
||||
mutate {
|
||||
rename => {
|
||||
"dst_port" => "dest_port"
|
||||
"dst_ip" => "dest_ip"
|
||||
}
|
||||
gsub => [
|
||||
"src_ip", "::ffff:", "",
|
||||
"dest_ip", "::ffff:", ""
|
||||
]
|
||||
}
|
||||
if [credentials] {
|
||||
mutate {
|
||||
add_field => {
|
||||
"login.username" => "%{[credentials][username]}"
|
||||
"login.password" => "%{[credentials][password]}"
|
||||
}
|
||||
remove_field => "[credentials]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# ElasticPot
|
||||
if [type] == "ElasticPot" {
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
}
|
||||
}
|
||||
|
||||
# eMobility
|
||||
if [type] == "eMobility" {
|
||||
grok {
|
||||
match => [ "message", "\A%{IP:src_ip}\.%{POSINT:src_port:integer}\|%{IP:dest_ip}\.%{POSINT:dest_port:integer}:%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424SD}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{SYSLOG5424PRINTASCII}%{SPACE}%{URIPROTO:http_method}\|%{URIPATH:http_uri}\|%{TIMESTAMP_ISO8601:timestamp}" ]
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
}
|
||||
}
|
||||
|
||||
# Glastopf
|
||||
if [type] == "Glastopf" {
|
||||
grok {
|
||||
match => [ "message", "\A%{TIMESTAMP_ISO8601:timestamp}%{SPACE}%{NOTSPACE}%{SPACE}%{IP:src_ip}%{SPACE}%{WORD}%{SPACE}%{URIPROTO:http_method}%{SPACE}%{NOTSPACE:http_uri}%{SPACE}%{NOTSPACE}%{SPACE}%{HOSTNAME}:%{NUMBER:dest_port:integer}" ]
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss,SSS" ]
|
||||
remove_field => ["timestamp"]
|
||||
}
|
||||
}
|
||||
|
||||
# Glutton
|
||||
if [type] == "Glutton" {
|
||||
date {
|
||||
match => [ "ts", "UNIX" ]
|
||||
remove_field => ["ts"]
|
||||
}
|
||||
}
|
||||
|
||||
# Heralding
|
||||
if [type] == "Heralding" {
|
||||
csv {
|
||||
columns => ["timestamp","auth_id","session_id","src_ip","src_port","dest_ip","dest_port","proto","username","password"] separator => ","
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss.SSSSSS" ]
|
||||
remove_field => ["timestamp"]
|
||||
}
|
||||
}
|
||||
|
||||
# Honeytrap
|
||||
if [type] == "Honeytrap" {
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
}
|
||||
mutate {
|
||||
rename => {
|
||||
"[attack_connection][local_port]" => "dest_port"
|
||||
"[attack_connection][local_ip]" => "dest_ip"
|
||||
"[attack_connection][remote_port]" => "src_port"
|
||||
"[attack_connection][remote_ip]" => "src_ip"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Mailoney
|
||||
if [type] == "Mailoney" {
|
||||
grok {
|
||||
match => [ "message", "\A%{NAGIOSTIME}\[%{IPV4:src_ip}:%{INT:src_port:integer}] %{GREEDYDATA:smtp_input}" ]
|
||||
}
|
||||
mutate {
|
||||
add_field => {
|
||||
"dest_port" => "25"
|
||||
}
|
||||
}
|
||||
date {
|
||||
match => [ "nagios_epoch", "UNIX" ]
|
||||
remove_field => ["nagios_epoch"]
|
||||
}
|
||||
}
|
||||
|
||||
# Rdpy
|
||||
if [type] == "Rdpy" {
|
||||
grok { match => { "message" => [ "\A%{TIMESTAMP_ISO8601:timestamp},domain:%{CISCO_REASON:domain},username:%{CISCO_REASON:username},password:%{CISCO_REASON:password},hostname:%{GREEDYDATA:hostname}", "\A%{TIMESTAMP_ISO8601:timestamp},Connection from %{IPV4:src_ip}:%{INT:src_port:integer}" ] } }
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
remove_field => ["timestamp"]
|
||||
}
|
||||
mutate {
|
||||
add_field => {
|
||||
"dest_port" => "3389"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Syslog
|
||||
if [type] == "Syslog" {
|
||||
grok {
|
||||
match => {
|
||||
"message" => ["%{SYSLOGPAMSESSION}", "%{CRONLOG}", "%{SYSLOGLINE}"]
|
||||
}
|
||||
overwrite => "message"
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
|
||||
remove_field => ["timestamp"]
|
||||
}
|
||||
date {
|
||||
match => ["timestamp8601", "ISO8601"]
|
||||
remove_field => ["timestamp8601"]
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "Connection closed by %{IP:src_ip}" }
|
||||
add_tag => [ "ssh_connection_closed" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "Received disconnect from %{IP:src_ip}" }
|
||||
add_tag => [ "ssh_connection_disconnect" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "Failed password for invalid user %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2" }
|
||||
add_tag => [ "ssh_failed_password" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "Did not receive identification string from %{IP:src_ip}" }
|
||||
add_tag => [ "ssh_no_id" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "User %{USERNAME:username} from %{IP:src_ip} not allowed because not listed in AllowUsers" }
|
||||
add_tag => [ "ssh_user_not_allowed" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "authentication failure; logname=%{USERNAME:logname} uid=%{BASE10NUM:uid} euid=%{BASE10NUM:euid} tty=%{TTY:tty} ruser=%{USERNAME:ruser} rhost=(?:%{HOSTNAME:remote_host}|\s*) user=%{USERNAME:user}"}
|
||||
add_tag => [ "ssh_auth_failure" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "pam_unix\(sshd:auth\): authentication failure; logname= uid=0 euid=0 tty=%{NOTSPACE:tty} ruser= rhost=(?:%{HOSTNAME:remote_host}|\s*) user=%{USERNAME:user}"}
|
||||
add_tag => [ "ssh_auth_failure" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "Failed password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
|
||||
add_tag => [ "ssh_failed_password" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "Accepted password for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
|
||||
add_tag => [ "ssh_accepted_password" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "Accepted publickey for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
|
||||
add_tag => [ "ssh_accepted_pubkey" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
grok {
|
||||
match => { "message" => "Accepted keyboard-interactive/pam for %{USERNAME:username} from %{IP:src_ip} port %{BASE10NUM:port} ssh2"}
|
||||
add_tag => [ "ssh_accepted_interactive" ]
|
||||
tag_on_failure => []
|
||||
}
|
||||
}
|
||||
|
||||
# NGINX
|
||||
if [type] == "NGINX" {
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601" ]
|
||||
}
|
||||
}
|
||||
|
||||
# Vnclowpot
|
||||
if [type] == "Vnclowpot" {
|
||||
grok {
|
||||
match => [ "message", "\A%{NOTSPACE}%{SPACE}%{TIME}%{SPACE}%{IPV4:src_ip}:%{INT:src_port}%{SPACE}%{NOTSPACE:vnc_handshake}" ]
|
||||
}
|
||||
date {
|
||||
match => [ "timestamp", "yyyy/MM/dd HH:mm:ss" ]
|
||||
remove_field => ["timestamp"]
|
||||
}
|
||||
mutate {
|
||||
add_field => {
|
||||
"dest_port" => "5900"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Drop if parse fails
|
||||
if "_grokparsefailure" in [tags] { drop {} }
|
||||
|
||||
# Add geo coordinates / ASN info / IP rep.
|
||||
if [src_ip] {
|
||||
geoip {
|
||||
cache_size => 10000
|
||||
source => "src_ip"
|
||||
database => "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-City.mmdb"
|
||||
}
|
||||
geoip {
|
||||
cache_size => 10000
|
||||
source => "src_ip"
|
||||
database => "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-filter-geoip-5.0.3-java/vendor/GeoLite2-ASN.mmdb"
|
||||
}
|
||||
translate {
|
||||
refresh_interval => 86400
|
||||
field => "src_ip"
|
||||
destination => "ip_rep"
|
||||
dictionary_path => "/etc/listbot/iprep.yaml"
|
||||
}
|
||||
}
|
||||
|
||||
# In some rare conditions dest_port, src_port is indexed as string, forcing integer for now
|
||||
if [dest_port] {
|
||||
mutate {
|
||||
convert => { "dest_port" => "integer" }
|
||||
}
|
||||
}
|
||||
if [src_port] {
|
||||
mutate {
|
||||
convert => { "src_port" => "integer" }
|
||||
}
|
||||
}
|
||||
|
||||
# Add T-Pot hostname and external IP
|
||||
if [type] == "ConPot" or [type] == "Cowrie" or [type] == "Dionaea" or [type] == "ElasticPot" or [type] == "eMobility" or [type] == "Glastopf" or [type] == "Honeytrap" or [type] == "Heralding" or [type] == "Mailoney" or [type] == "Rdpy" or [type] == "Suricata" or [type] == "Vnclowpot" {
|
||||
mutate {
|
||||
add_field => {
|
||||
"t-pot_ip_ext" => "${MY_EXTIP}"
|
||||
"t-pot_ip_int" => "${MY_INTIP}"
|
||||
"t-pot_hostname" => "${MY_HOSTNAME}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
# Output section
|
||||
output {
|
||||
elasticsearch {
|
||||
path => "/usr/share/logstash/vendor/bundle/jruby/2.3.0/gems/logstash-output-elasticsearch-9.1.1-java/lib/logstash/outputs/elasticsearch/elasticsearch-template-es6x.json"
|
||||
hosts => ["elasticsearch:9200"]
|
||||
}
|
||||
|
||||
if [type] == "Suricata" {
|
||||
file {
|
||||
file_mode => 0760
|
||||
path => "/data/suricata/log/suricata_ews.log"
|
||||
}
|
||||
}
|
||||
# Debug output
|
||||
#if [type] == "XYZ" {
|
||||
# stdout {
|
||||
# codec => rubydebug
|
||||
# }
|
||||
#}
|
||||
# Debug output
|
||||
#stdout {
|
||||
# codec => rubydebug
|
||||
#}
|
||||
|
||||
}
|
|
@ -18,3 +18,4 @@ services:
|
|||
volumes:
|
||||
- /data:/data
|
||||
- /var/log:/data/host/log
|
||||
- /root/tpotce/docker/elk/logstash/dist/logstash.conf:/etc/logstash/conf.d/logstash.conf
|
||||
|
|
20
docker/elk/logstash/docker-compose.yml.5x
Normal file
20
docker/elk/logstash/docker-compose.yml.5x
Normal file
|
@ -0,0 +1,20 @@
|
|||
# T-Pot (Standard)
|
||||
# For docker-compose ...
|
||||
version: '2.2'
|
||||
|
||||
services:
|
||||
|
||||
## Logstash service
|
||||
logstash:
|
||||
build: .
|
||||
container_name: logstash
|
||||
restart: always
|
||||
# depends_on:
|
||||
# elasticsearch:
|
||||
# condition: service_healthy
|
||||
env_file:
|
||||
- /opt/tpot/etc/compose/elk_environment
|
||||
image: "dtagdevsec/logstash:1804"
|
||||
volumes:
|
||||
- /data:/data
|
||||
- /var/log:/data/host/log
|
Loading…
Reference in a new issue