From daf41b4b719dec0c737ed3792d6b13a65fcd906a Mon Sep 17 00:00:00 2001 From: t3chn0m4g3 Date: Tue, 21 Dec 2021 11:36:38 +0000 Subject: [PATCH] tweaking --- docker/elk/logstash/Dockerfile | 12 +- docker/elk/logstash/Dockerfile.new | 68 ++ docker/elk/logstash/deploy.sh | 123 +++ .../dist/{http.conf => http_input.conf} | 0 docker/elk/logstash/dist/http_output.conf | 715 ++++++++++++++++++ docker/elk/logstash/dist/pipelines.yml | 4 +- iso/installer/install.sh | 2 +- update.sh | 2 +- 8 files changed, 917 insertions(+), 9 deletions(-) create mode 100644 docker/elk/logstash/Dockerfile.new create mode 100755 docker/elk/logstash/deploy.sh rename docker/elk/logstash/dist/{http.conf => http_input.conf} (100%) create mode 100644 docker/elk/logstash/dist/http_output.conf diff --git a/docker/elk/logstash/Dockerfile b/docker/elk/logstash/Dockerfile index 72cf3fd2..a10ac424 100644 --- a/docker/elk/logstash/Dockerfile +++ b/docker/elk/logstash/Dockerfile @@ -1,7 +1,7 @@ FROM alpine:3.14 # # VARS -ENV LS_VER=7.15.1 +ENV LS_VER=7.16.2 # Include dist ADD dist/ /root/dist/ # @@ -9,12 +9,14 @@ ADD dist/ /root/dist/ #RUN sed -i 's/dl-cdn/dl-2/g' /etc/apk/repositories && \ RUN apk -U --no-cache add \ aria2 \ + autossh \ bash \ bzip2 \ curl \ libc6-compat \ libzmq \ - nss && \ + nss \ + openssh && \ apk add --no-cache -X http://dl-cdn.alpinelinux.org/alpine/edge/community openjdk16-jre && \ # # Get and install packages @@ -42,7 +44,8 @@ RUN apk -U --no-cache add \ chmod u+x /usr/bin/update.sh && \ mkdir -p /etc/logstash/conf.d && \ cp logstash.conf /etc/logstash/conf.d/ && \ - cp http.conf /etc/logstash/conf.d/ && \ + cp http_input.conf /etc/logstash/conf.d/ && \ + cp http_output.conf /etc/logstash/conf.d/ && \ cp pipelines.yml /usr/share/logstash/config/pipelines.yml && \ cp tpot_es_template.json /etc/logstash/ && \ # @@ -64,5 +67,4 @@ HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:9600' # Start logstash #USER logstash:logstash #CMD update.sh && exec /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf --config.reload.automatic --java-execution --log.level debug -#CMD update.sh && exec /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf --config.reload.automatic --java-execution -CMD update.sh && exec /usr/share/logstash/bin/logstash --config.reload.automatic --java-execution +CMD update.sh && exec /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/http_output.conf --config.reload.automatic --java-execution diff --git a/docker/elk/logstash/Dockerfile.new b/docker/elk/logstash/Dockerfile.new new file mode 100644 index 00000000..72cf3fd2 --- /dev/null +++ b/docker/elk/logstash/Dockerfile.new @@ -0,0 +1,68 @@ +FROM alpine:3.14 +# +# VARS +ENV LS_VER=7.15.1 +# Include dist +ADD dist/ /root/dist/ +# +# Setup env and apt +#RUN sed -i 's/dl-cdn/dl-2/g' /etc/apk/repositories && \ +RUN apk -U --no-cache add \ + aria2 \ + bash \ + bzip2 \ + curl \ + libc6-compat \ + libzmq \ + nss && \ + apk add --no-cache -X http://dl-cdn.alpinelinux.org/alpine/edge/community openjdk16-jre && \ +# +# Get and install packages + mkdir -p /etc/listbot && \ + cd /etc/listbot && \ + aria2c -s16 -x 16 https://listbot.sicherheitstacho.eu/cve.yaml.bz2 && \ + aria2c -s16 -x 16 https://listbot.sicherheitstacho.eu/iprep.yaml.bz2 && \ + bunzip2 *.bz2 && \ + cd /root/dist/ && \ + mkdir -p /usr/share/logstash/ && \ + aria2c -s 16 -x 16 https://artifacts.elastic.co/downloads/logstash/logstash-$LS_VER-linux-x86_64.tar.gz && \ + tar xvfz logstash-$LS_VER-linux-x86_64.tar.gz --strip-components=1 -C /usr/share/logstash/ && \ + rm -rf /usr/share/logstash/jdk && \ + # For some reason Alpine 3.14 does not report the -x flag correctly and thus elasticsearch does not find java + sed -i 's/! -x/! -e/g' /usr/share/logstash/bin/logstash.lib.sh && \ + /usr/share/logstash/bin/logstash-plugin install logstash-filter-translate && \ + /usr/share/logstash/bin/logstash-plugin install logstash-input-http && \ + /usr/share/logstash/bin/logstash-plugin install logstash-output-gelf && \ + /usr/share/logstash/bin/logstash-plugin install logstash-output-http && \ + /usr/share/logstash/bin/logstash-plugin install logstash-output-syslog && \ +# +# Add and move files + cd /root/dist/ && \ + cp update.sh /usr/bin/ && \ + chmod u+x /usr/bin/update.sh && \ + mkdir -p /etc/logstash/conf.d && \ + cp logstash.conf /etc/logstash/conf.d/ && \ + cp http.conf /etc/logstash/conf.d/ && \ + cp pipelines.yml /usr/share/logstash/config/pipelines.yml && \ + cp tpot_es_template.json /etc/logstash/ && \ +# +# Setup user, groups and configs + addgroup -g 2000 logstash && \ + adduser -S -H -s /bin/bash -u 2000 -D -g 2000 logstash && \ + chown -R logstash:logstash /usr/share/logstash && \ + chown -R logstash:logstash /etc/listbot && \ + chmod 755 /usr/bin/update.sh && \ +# +# Clean up + rm -rf /root/* && \ + rm -rf /tmp/* && \ + rm -rf /var/cache/apk/* +# +# Healthcheck +HEALTHCHECK --retries=10 CMD curl -s -XGET 'http://127.0.0.1:9600' +# +# Start logstash +#USER logstash:logstash +#CMD update.sh && exec /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf --config.reload.automatic --java-execution --log.level debug +#CMD update.sh && exec /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash.conf --config.reload.automatic --java-execution +CMD update.sh && exec /usr/share/logstash/bin/logstash --config.reload.automatic --java-execution diff --git a/docker/elk/logstash/deploy.sh b/docker/elk/logstash/deploy.sh new file mode 100755 index 00000000..e36f6e12 --- /dev/null +++ b/docker/elk/logstash/deploy.sh @@ -0,0 +1,123 @@ +#!/bin/bash + +# Do we have root? +function fuGOT_ROOT { +echo +echo -n "### Checking for root: " +if [ "$(whoami)" != "root" ]; + then + echo "[ NOT OK ]" + echo "### Please run as root." + echo "### Example: sudo $0" + exit + else + echo "[ OK ]" +fi +} + +function fuDEPLOY_POT () { +sshpass -e ssh -4 -t -T -l "$MY_TPOT_USERNAME" -p 64295 "$MY_HIVE_IP" << EOF +echo "$SSHPASS" | sudo -S bash -c 'useradd -m -s /sbin/nologin -G tpotlogs "$MY_HIVE_USERNAME"; +mkdir -p /home/"$MY_HIVE_USERNAME"/.ssh; +echo "$MY_POT_PUBLICKEY" >> /home/"$MY_HIVE_USERNAME"/.ssh/authorized_keys; +chmod 600 /home/"$MY_HIVE_USERNAME"/.ssh/authorized_keys; +chmod 755 /home/"$MY_HIVE_USERNAME"/.ssh; +chown "$MY_HIVE_USERNAME":"$MY_HIVE_USERNAME" -R /home/"$MY_HIVE_USERNAME"/.ssh' +EOF +exit +} + +# Check Hive availability +function fuCHECK_HIVE () { +sshpass -e ssh -4 -t -l "$MY_TPOT_USERNAME" -p 64295 -f -N -L64305:127.0.0.1:64305 "$MY_HIVE_IP" +if [ $? -eq 0 ]; + then + echo ssh success + myHIVE_OK=$(curl -s http://127.0.0.1:64305) + if [ "$myHIVE_OK" == "ok" ]; + then + echo ssh tunnel success + kill -9 $(pidof ssh) + else + echo tunneled port 64305 on Hive unreachable + echo aborting + kill -9 $(pidof ssh) + fi; + else + echo ssh on Hive unreachable +fi; +} + +function fuGET_DEPLOY_DATA () { +echo +echo "### Please provide data from your T-Pot Hive installation." +echo "### This usually is the one running the 'T-Pot Hive' type." +echo "### You will be needing the OS user (typically 'tsec'), the users' password and the IP / FQDN." +echo "### Do not worry, the password will not be persisted!" +echo + +read -p "Username: " MY_TPOT_USERNAME +read -s -p "Password: " SSHPASS +echo +export SSHPASS +read -p "IP / FQDN: " MY_HIVE_IP +MY_HIVE_USERNAME="$(hostname)" +MY_TPOT_TYPE="POT" + +echo "$MY_TPOT_USERNAME" +echo "$MY_HIVE_USERNAME" +echo "$SSHPASS" +echo "$MY_HIVE_IP" +echo "$MY_TPOT_TYPE" +MY_POT_PUBLICKEYFILE="/data/elk/logstash/$MY_HIVE_USERNAME.pub" +MY_POT_PRIVATEKEYFILE="/data/elk/logstash/$MY_HIVE_USERNAME" +if ! [ -s "$MY_POT_PRIVATEKEYFILE" ] && ! [ -s "$MY_POT_PUBLICKEYFILE" ]; + then + echo "we need to gen a keyfile" + mkdir -p /data/elk/logstash + ssh-keygen -f "$MY_POT_PRIVATEKEYFILE" -N "" -C "$MY_HIVE_USERNAME" + MY_POT_PUBLICKEY="$(cat "$MY_POT_PUBLICKEYFILE")" + echo "$MY_POT_PUBLICKEY" + else + echo "there is a keyfile already, exiting" + exit +fi +} + +# Deploy Pot to Hive +fuGOT_ROOT +echo +echo "-----------------------------" +echo "Ship T-Pot Logs to T-Pot Hive" +echo "-----------------------------" +echo "Executing this script will ship all logs to a T-Pot Hive installation." +echo +echo +echo "------------------------------------" +echo "Please provide data from your T-Pot " +echo "------------------------------------" +echo "[c] - Continue deplyoment" +#echo "[0] - Rollback" +echo "[q] - Abort and exit" +echo +while [ 1 != 2 ] + do + read -s -n 1 -p "Your choice: " mySELECT + echo $mySELECT + case "$mySELECT" in + [c,C]) + fuGET_DEPLOY_DATA + fuCHECK_HIVE + fuDEPLOY_POT + break + ;; +# [0]) +# fuOPTOUT +# break +# ;; + [q,Q]) + echo "Aborted." + exit + ;; + esac +done diff --git a/docker/elk/logstash/dist/http.conf b/docker/elk/logstash/dist/http_input.conf similarity index 100% rename from docker/elk/logstash/dist/http.conf rename to docker/elk/logstash/dist/http_input.conf diff --git a/docker/elk/logstash/dist/http_output.conf b/docker/elk/logstash/dist/http_output.conf new file mode 100644 index 00000000..3baea771 --- /dev/null +++ b/docker/elk/logstash/dist/http_output.conf @@ -0,0 +1,715 @@ +# Input section +input { + +# Fatt + file { + path => ["/data/fatt/log/fatt.log"] + codec => json + type => "Fatt" + } + +# Suricata + file { + path => ["/data/suricata/log/eve.json"] + codec => json + type => "Suricata" + } + +# P0f + file { + path => ["/data/p0f/log/p0f.json"] + codec => json + type => "P0f" + } + +# Adbhoney + file { + path => ["/data/adbhoney/log/adbhoney.json"] + codec => json + type => "Adbhoney" + } + +# Ciscoasa + file { + path => ["/data/ciscoasa/log/ciscoasa.log"] + codec => plain + type => "Ciscoasa" + } + +# CitrixHoneypot + file { + path => ["/data/citrixhoneypot/logs/server.log"] + codec => json + type => "CitrixHoneypot" + } + +# Conpot + file { + path => ["/data/conpot/log/*.json"] + codec => json + type => "ConPot" + } + +# Cowrie + file { + path => ["/data/cowrie/log/cowrie.json"] + codec => json + type => "Cowrie" + } + +# Dionaea + file { + path => ["/data/dionaea/log/dionaea.json"] + codec => json + type => "Dionaea" + } + +# Dicompot + file { + path => ["/data/dicompot/log/dicompot.log"] + codec => json + type => "Dicompot" + } + +# Ddospot + file { + path => ["/data/ddospot/log/*.log"] + codec => json + type => "Ddospot" + } + +# ElasticPot + file { + path => ["/data/elasticpot/log/elasticpot.json"] + codec => json + type => "ElasticPot" + } + +# Endlessh + file { + path => ["/data/endlessh/log/endlessh.log"] + codec => plain + type => "Endlessh" + } + +# Glutton + file { + path => ["/data/glutton/log/glutton.log"] + codec => json + type => "Glutton" + } + +# Hellpot + file { + path => ["/data/hellpot/log/hellpot.log"] + codec => json + type => "Hellpot" + } + +# Heralding + file { + path => ["/data/heralding/log/auth.csv"] + type => "Heralding" + } + +# Honeypots + file { + path => ["/data/honeypots/log/*.log"] + codec => json + type => "Honeypots" + } + +# Honeypy + file { + path => ["/data/honeypy/log/json.log"] + codec => json + type => "Honeypy" + } + +# Honeysap + file { + path => ["/data/honeysap/log/honeysap-external.log"] + codec => json + type => "Honeysap" + } + +# Honeytrap + file { + path => ["/data/honeytrap/log/attackers.json"] + codec => json + type => "Honeytrap" + } + +# Ipphoney + file { + path => ["/data/ipphoney/log/ipphoney.json"] + codec => json + type => "Ipphoney" + } + +# Mailoney + file { + path => ["/data/mailoney/log/commands.log"] + codec => json + type => "Mailoney" + } + +# Medpot + file { + path => ["/data/medpot/log/medpot.log"] + codec => json + type => "Medpot" + } + +# Rdpy + file { + path => ["/data/rdpy/log/rdpy.log"] + type => "Rdpy" + } + +# Redishoneypot + file { + path => ["/data/redishoneypot/log/redishoneypot.log"] + codec => json + type => "Redishoneypot" + } + +# Host NGINX + file { + path => ["/data/nginx/log/access.log"] + codec => json + type => "NGINX" + } + +# Tanner + file { + path => ["/data/tanner/log/tanner_report.json"] + codec => json + type => "Tanner" + } + +} + +# Filter Section +filter { + + +# Fatt + if [type] == "Fatt" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { + "sourceIp" => "src_ip" + "destinationIp" => "dest_ip" + "sourcePort" => "src_port" + "destinationPort" => "dest_port" + "gquic" => "fatt_gquic" + "http" => "fatt_http" + "rdp" => "fatt_rdp" + "ssh" => "fatt_ssh" + "tls" => "fatt_tls" + } + } + } + +# Suricata + if [type] == "Suricata" { + date { + match => [ "timestamp", "ISO8601" ] + } + translate { + refresh_interval => 86400 + field => "[alert][signature_id]" + destination => "[alert][cve_id]" + dictionary_path => "/etc/listbot/cve.yaml" +# fallback => "-" + } + } + +# P0f + if [type] == "P0f" { + date { + match => [ "timestamp", "yyyy'/'MM'/'dd HH:mm:ss" ] + remove_field => ["timestamp"] + } + mutate { + rename => { + "server_port" => "dest_port" + "server_ip" => "dest_ip" + "client_port" => "src_port" + "client_ip" => "src_ip" + } + } + } + +# Adbhoney + if [type] == "Adbhoney" { + date { + match => [ "timestamp", "ISO8601" ] + remove_field => ["unixtime"] + } + } + +# Ciscoasa + if [type] == "Ciscoasa" { + kv { + remove_char_key => " '{}" + remove_char_value => "'{}" + value_split => ":" + field_split => "," + } + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + add_field => { + "dest_ip" => "${MY_EXTIP}" + } + } + } + +# CitrixHoneypot + if [type] == "CitrixHoneypot" { + grok { + match => { + "message" => [ "\A\(%{IPV4:src_ip:string}:%{INT:src_port:integer}\): %{JAVAMETHOD:http.http_method:string}%{SPACE}%{CISCO_REASON:fileinfo.state:string}: %{UNIXPATH:fileinfo.filename:string}", + "\A\(%{IPV4:src_ip:string}:%{INT:src_port:integer}\): %{JAVAMETHOD:http.http_method:string}%{SPACE}%{CISCO_REASON:fileinfo.state:string}: %{GREEDYDATA:payload:string}", + "\A\(%{IPV4:src_ip:string}:%{INT:src_port:integer}\): %{S3_REQUEST_LINE:msg:string} %{CISCO_REASON:fileinfo.state:string}: %{GREEDYDATA:payload:string:string}", + "\A\(%{IPV4:src_ip:string}:%{INT:src_port:integer}\): %{GREEDYDATA:msg:string}" ] + } + } + date { + match => [ "asctime", "ISO8601" ] + remove_field => ["asctime"] + remove_field => ["message"] + } + mutate { + add_field => { + "dest_port" => "443" + } + rename => { + "levelname" => "level" + } + } + } + +# Conpot + if [type] == "ConPot" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { + "dst_port" => "dest_port" + "dst_ip" => "dest_ip" + } + } + } + +# Cowrie + if [type] == "Cowrie" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { + "dst_port" => "dest_port" + "dst_ip" => "dest_ip" + } + } + } + +# Ddospot + if [type] == "Ddospot" { + date { + match => [ "time", "yyyy-MM-dd HH:mm:ss.SSSSSS" ] + remove_field => ["time"] + } + if [path] == "/data/ddospot/log/chargenpot.log" { + mutate { + add_field => { + "dest_port" => "19" + "dest_ip" => "${MY_EXTIP}" + } + } + } + if [path] == "/data/ddospot/log/dnspot.log" { + mutate { + add_field => { + "dest_port" => "53" + "dest_ip" => "${MY_EXTIP}" + } + } + } + if [path] == "/data/ddospot/log/ntpot.log" { + mutate { + add_field => { + "dest_port" => "123" + "dest_ip" => "${MY_EXTIP}" + } + } + } + if [path] == "/data/ddospot/log/ssdpot.log" { + mutate { + add_field => { + "dest_port" => "1900" + "dest_ip" => "${MY_EXTIP}" + } + } + } + } + +# Dionaea + if [type] == "Dionaea" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { + "dst_port" => "dest_port" + "dst_ip" => "dest_ip" + } + gsub => [ + "src_ip", "::ffff:", "", + "dest_ip", "::ffff:", "" + ] + } + if [credentials] { + mutate { + add_field => { + "username" => "%{[credentials][username]}" + "password" => "%{[credentials][password]}" + } + remove_field => "[credentials]" + } + } + } + +# Dicompot + if [type] == "Dicompot" { + date { + match => [ "time", "yyyy-MM-dd HH:mm:ss" ] + remove_field => ["time"] + remove_field => ["timestamp"] + } + mutate { + rename => { + "ID" => "id" + "IP" => "src_ip" + "Port" => "src_port" + "AETitle" => "aetitle" + "Command" => "input" + "Files" => "files" + "Identifier" => "identifier" + "Matches" => "matches" + "Status" => "session" + "Version" => "version" + } + } + } + +# ElasticPot + if [type] == "ElasticPot" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { + "content_type" => "http.http_content_type" + "dst_port" => "dest_port" + "dst_ip" => "dest_ip" + "message" => "event_type" + "request" => "request_method" + "user_agent" => "http_user_agent" + "url" => "http.url" + } + } + } + +# Endlessh +# Example: 2021-10-29T21:08:31.026Z CLOSE host=1.2.3.4 port=12345 fd=4 time=20.015 bytes=24 +# Example: 2021-10-29T21:08:11.011Z ACCEPT host=1.2.3.4 port=12346 fd=4 n=1/4096 + if [type] == "Endlessh" { + grok { match => { "message" => [ "\A%{TIMESTAMP_ISO8601:timestamp}%{SPACE}%{WORD:reason}%{SPACE}host=%{IPV4:src_ip}%{SPACE}port=%{INT:src_port}%{SPACE}fd=%{INT}%{SPACE}time=%{SECOND:duration}%{SPACE}bytes=%{NUMBER:bytes}", "\A%{TIMESTAMP_ISO8601:timestamp}%{SPACE}%{WORD:reason}%{SPACE}host=%{IPV4:src_ip}%{SPACE}port=%{INT:src_port}%{SPACE}fd=%{INT}%{SPACE}n=%{INT}/%{INT}" ] } } + date { + match => [ "timestamp", "ISO8601" ] + remove_field => ["timestamp"] + } + mutate { + add_field => { + "dest_port" => "22" + "dest_ip" => "${MY_EXTIP}" + } + } + } + +# Glutton + if [type] == "Glutton" { + date { + match => [ "ts", "UNIX" ] + remove_field => ["ts"] + } + } + +# Hellpot + if [type] == "Hellpot" { + date { + match => [ "time", "ISO8601" ] + remove_field => ["time"] + remove_field => ["timestamp"] + } + mutate { + add_field => { + "dest_port" => "80" + "dest_ip" => "${MY_EXTIP}" + } + rename => { + "BYTES" => "bytes" + "DURATION" => "duration" + "REMOTE_ADDR" => "src_ip" + "URL" => "url" + "USERAGENT" => "http_user_agent" + "message" => "reason" + } + } + } + +# Heralding + if [type] == "Heralding" { + csv { + columns => ["timestamp","auth_id","session_id","src_ip","src_port","dest_ip","dest_port","proto","username","password"] separator => "," + } + date { + match => [ "timestamp", "yyyy-MM-dd HH:mm:ss.SSSSSS" ] + remove_field => ["timestamp"] + } + } + +# Honeypy + if [type] == "Honeypy" { + date { + match => [ "timestamp", "ISO8601" ] + remove_field => ["timestamp"] + remove_field => ["date"] + remove_field => ["time"] + remove_field => ["millisecond"] + } + } + +# Honeypots + if [type] == "Honeypots" { + date { + match => [ "timestamp", "ISO8601" ] + } + } + +# Honeysap + if [type] == "Honeysap" { + date { + match => [ "timestamp", "yyyy-MM-dd HH:mm:ss.SSSSSS" ] + remove_field => ["timestamp"] + } + mutate { + rename => { + "[data][error_msg]" => "event_type" + "service" => "sensor" + "source_port" => "src_port" + "source_ip" => "src_ip" + "target_port" => "dest_port" + "target_ip" => "dest_ip" + } + remove_field => "event" + remove_field => "return_code" + } + if [data] { + mutate { + remove_field => "[data]" + } + } + } + +# Honeytrap + if [type] == "Honeytrap" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { + "[attack_connection][local_port]" => "dest_port" + "[attack_connection][local_ip]" => "dest_ip" + "[attack_connection][remote_port]" => "src_port" + "[attack_connection][remote_ip]" => "src_ip" + } + } + } + +# Ipphoney + if [type] == "Ipphoney" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { + "query" => "ipp_query" + "content_type" => "http.http_content_type" + "dst_port" => "dest_port" + "dst_ip" => "dest_ip" + "request" => "request_method" + "operation" => "data" + "user_agent" => "http_user_agent" + "url" => "http.url" + } + } + } + +# Mailoney + if [type] == "Mailoney" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + add_field => { "dest_port" => "25" } + } + } + +# Medpot + if [type] == "Medpot" { + mutate { + add_field => { + "dest_port" => "2575" + "dest_ip" => "${MY_EXTIP}" + } + } + date { + match => [ "timestamp", "ISO8601" ] + } + } + +# Rdpy + if [type] == "Rdpy" { + grok { match => { "message" => [ "\A%{TIMESTAMP_ISO8601:timestamp},domain:%{CISCO_REASON:domain},username:%{CISCO_REASON:username},password:%{CISCO_REASON:password},hostname:%{GREEDYDATA:hostname}", "\A%{TIMESTAMP_ISO8601:timestamp},Connection from %{IPV4:src_ip}:%{INT:src_port:integer}" ] } } + date { + match => [ "timestamp", "ISO8601" ] + remove_field => ["timestamp"] + } + mutate { + add_field => { "dest_port" => "3389" } + } + } + +# Redishoneypot + if [type] == "Redishoneypot" { + date { + match => [ "time", "yyyy-MM-dd HH:mm:ss" ] + remove_field => ["time"] + remove_field => ["timestamp"] + } + mutate { + split => { "addr" => ":" } + add_field => { + "src_ip" => "%{[addr][0]}" + "src_port" => "%{[addr][1]}" + "dest_port" => "6379" + "dest_ip" => "${MY_EXTIP}" + } + remove_field => ["addr"] + } + } + +# NGINX + if [type] == "NGINX" { + date { + match => [ "timestamp", "ISO8601" ] + } + } + +# Tanner + if [type] == "Tanner" { + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { + "[peer][ip]" => "src_ip" + "[peer][port]" => "src_port" + } + add_field => { "dest_port" => "80" } + } + } + +# Drop if parse fails +if "_grokparsefailure" in [tags] { drop {} } +if "_jsonparsefailure" in [tags] { drop {} } + + +# Add geo coordinates / ASN info / IP rep. + if [src_ip] { + geoip { + cache_size => 10000 + source => "src_ip" + database => "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-filter-geoip-7.2.3-java/vendor/GeoLite2-City.mmdb" + } + geoip { + cache_size => 10000 + source => "src_ip" + database => "/usr/share/logstash/vendor/bundle/jruby/2.5.0/gems/logstash-filter-geoip-7.2.3-java/vendor/GeoLite2-ASN.mmdb" + } + translate { + refresh_interval => 86400 + field => "src_ip" + destination => "ip_rep" + dictionary_path => "/etc/listbot/iprep.yaml" + } + } + +# In some rare conditions dest_port, src_port, status are indexed as string, forcing integer for now + if [dest_port] { + mutate { + convert => { "dest_port" => "integer" } + } + } + if [src_port] { + mutate { + convert => { "src_port" => "integer" } + } + } + if [status] { + mutate { + convert => { "status" => "integer" } + } + } + if [id] { + mutate { + convert => { "id" => "string" } + } + } + if [request] { + mutate { + convert => { "request" => "string" } + } + } + +# Add T-Pot hostname and external IP + mutate { + add_field => { + "t-pot_ip_ext" => "${MY_EXTIP}" + "t-pot_ip_int" => "${MY_INTIP}" + "t-pot_hostname" => "${MY_HOSTNAME}" + } + } + +} + +# Output section +output { + http { + http_method => "post" + http_compression => true + id => "${MY_HOSTNAME}" + codec => "json" + url => "http://127.0.0.1:64305" + } + +} diff --git a/docker/elk/logstash/dist/pipelines.yml b/docker/elk/logstash/dist/pipelines.yml index 8dc23e85..41883e78 100644 --- a/docker/elk/logstash/dist/pipelines.yml +++ b/docker/elk/logstash/dist/pipelines.yml @@ -1,4 +1,4 @@ - pipeline.id: logstash path.config: "/etc/logstash/conf.d/logstash.conf" -- pipeline.id: http - path.config: "/etc/logstash/conf.d/http.conf" +- pipeline.id: http_input + path.config: "/etc/logstash/conf.d/http_input.conf" diff --git a/iso/installer/install.sh b/iso/installer/install.sh index f7ac3917..3dc9102d 100755 --- a/iso/installer/install.sh +++ b/iso/installer/install.sh @@ -22,7 +22,7 @@ myLSB_STABLE_SUPPORTED="stretch buster" myLSB_TESTING_SUPPORTED="stable" myREMOTESITES="https://hub.docker.com https://github.com https://pypi.python.org https://debian.org https://listbot.sicherheitstacho.eu" myPREINSTALLPACKAGES="aria2 apache2-utils cracklib-runtime curl dialog figlet fuse grc libcrack2 libpq-dev lsb-release net-tools software-properties-common toilet" -myINSTALLPACKAGES="aria2 apache2-utils apparmor apt-transport-https aufs-tools bash-completion build-essential ca-certificates cgroupfs-mount cockpit cockpit-docker console-setup console-setup-linux cracklib-runtime curl debconf-utils dialog dnsutils docker.io docker-compose ethtool fail2ban figlet genisoimage git glances grc haveged html2text htop iptables iw jq kbd libcrack2 libltdl7 libpam-google-authenticator man mosh multitail net-tools npm ntp openssh-server openssl pass pigz prips software-properties-common syslinux psmisc pv python3-pip toilet unattended-upgrades unzip vim wget wireless-tools wpasupplicant" +myINSTALLPACKAGES="aria2 apache2-utils apparmor apt-transport-https aufs-tools bash-completion build-essential ca-certificates cgroupfs-mount cockpit cockpit-docker console-setup console-setup-linux cracklib-runtime curl debconf-utils dialog dnsutils docker.io docker-compose ethtool fail2ban figlet genisoimage git glances grc haveged html2text htop iptables iw jq kbd libcrack2 libltdl7 libpam-google-authenticator man mosh multitail net-tools npm ntp openssh-server openssl pass pigz prips software-properties-common sshpass syslinux psmisc pv python3-pip toilet unattended-upgrades unzip vim wget wireless-tools wpasupplicant" myINFO="\ ########################################### ### T-Pot Installer for Debian (Stable) ### diff --git a/update.sh b/update.sh index 8b1aa432..ce836979 100755 --- a/update.sh +++ b/update.sh @@ -183,7 +183,7 @@ function fuUPDATER () { export DEBIAN_FRONTEND=noninteractive echo "### Installing apt-fast" /bin/bash -c "$(curl -sL https://raw.githubusercontent.com/ilikenwf/apt-fast/master/quick-install.sh)" -local myPACKAGES="aria2 apache2-utils apparmor apt-transport-https aufs-tools bash-completion build-essential ca-certificates cgroupfs-mount cockpit cockpit-docker console-setup console-setup-linux cracklib-runtime curl debconf-utils dialog dnsutils docker.io docker-compose ethtool fail2ban figlet genisoimage git glances grc haveged html2text htop iptables iw jq kbd libcrack2 libltdl7 libpam-google-authenticator man mosh multitail net-tools npm ntp openssh-server openssl pass pigz prips software-properties-common syslinux psmisc pv python3-elasticsearch-curator python3-pip toilet unattended-upgrades unzip vim wget wireless-tools wpasupplicant" +local myPACKAGES="aria2 apache2-utils apparmor apt-transport-https aufs-tools bash-completion build-essential ca-certificates cgroupfs-mount cockpit cockpit-docker console-setup console-setup-linux cracklib-runtime curl debconf-utils dialog dnsutils docker.io docker-compose ethtool fail2ban figlet genisoimage git glances grc haveged html2text htop iptables iw jq kbd libcrack2 libltdl7 libpam-google-authenticator man mosh multitail net-tools npm ntp openssh-server openssl pass pigz prips software-properties-common sshpass syslinux psmisc pv python3-elasticsearch-curator python3-pip toilet unattended-upgrades unzip vim wget wireless-tools wpasupplicant" # Remove purge in the future echo "### Removing repository based install of elasticsearch-curator" apt-get purge elasticsearch-curator -y