mirror of
https://github.com/telekom-security/tpotce.git
synced 2025-07-01 12:32:12 +00:00
Cleanup, Tweaking
Remove old Sensor Edition and replace with Pot Edition Rename Pot Edition to new Sensor Edition POT is now called SENSOR
This commit is contained in:
parent
66b4ef047a
commit
3524bafda2
5 changed files with 35 additions and 552 deletions
|
@ -15,7 +15,7 @@ if [ "$(whoami)" != "root" ];
|
|||
fi
|
||||
}
|
||||
|
||||
function fuDEPLOY_POT () {
|
||||
function fuDEPLOY_SENSOR () {
|
||||
echo
|
||||
echo "###############################"
|
||||
echo "# Deploying to T-Pot Hive ... #"
|
||||
|
@ -24,7 +24,7 @@ echo
|
|||
sshpass -e ssh -4 -t -T -l "$MY_TPOT_USERNAME" -p 64295 "$MY_HIVE_IP" << EOF
|
||||
echo "$SSHPASS" | sudo -S bash -c 'useradd -m -s /sbin/nologin -G tpotlogs "$MY_HIVE_USERNAME";
|
||||
mkdir -p /home/"$MY_HIVE_USERNAME"/.ssh;
|
||||
echo "$MY_POT_PUBLICKEY" >> /home/"$MY_HIVE_USERNAME"/.ssh/authorized_keys;
|
||||
echo "$MY_SENSOR_PUBLICKEY" >> /home/"$MY_HIVE_USERNAME"/.ssh/authorized_keys;
|
||||
chmod 600 /home/"$MY_HIVE_USERNAME"/.ssh/authorized_keys;
|
||||
chmod 755 /home/"$MY_HIVE_USERNAME"/.ssh;
|
||||
chown "$MY_HIVE_USERNAME":"$MY_HIVE_USERNAME" -R /home/"$MY_HIVE_USERNAME"/.ssh'
|
||||
|
@ -72,8 +72,8 @@ if [ $? -eq 0 ];
|
|||
echo "######################################################"
|
||||
echo
|
||||
kill -9 $(pidof ssh)
|
||||
rm $MY_POT_PUBLICKEYFILE
|
||||
rm $MY_POT_PRIVATEKEYFILE
|
||||
rm $MY_SENSOR_PUBLICKEYFILE
|
||||
rm $MY_SENSOR_PRIVATEKEYFILE
|
||||
rm $MY_LS_ENVCONFIGFILE
|
||||
exit 1
|
||||
fi;
|
||||
|
@ -84,8 +84,8 @@ if [ $? -eq 0 ];
|
|||
echo "# Aborting. #"
|
||||
echo "#################################################################"
|
||||
echo
|
||||
rm $MY_POT_PUBLICKEYFILE
|
||||
rm $MY_POT_PRIVATEKEYFILE
|
||||
rm $MY_SENSOR_PUBLICKEYFILE
|
||||
rm $MY_SENSOR_PRIVATEKEYFILE
|
||||
rm $MY_LS_ENVCONFIGFILE
|
||||
exit 1
|
||||
fi;
|
||||
|
@ -105,12 +105,12 @@ echo
|
|||
export SSHPASS
|
||||
read -p "IP / FQDN: " MY_HIVE_IP
|
||||
MY_HIVE_USERNAME="$(hostname)"
|
||||
MY_TPOT_TYPE="POT"
|
||||
MY_TPOT_TYPE="SENSOR"
|
||||
MY_LS_ENVCONFIGFILE="/data/elk/logstash/ls_environment"
|
||||
|
||||
MY_POT_PUBLICKEYFILE="/data/elk/logstash/$MY_HIVE_USERNAME.pub"
|
||||
MY_POT_PRIVATEKEYFILE="/data/elk/logstash/$MY_HIVE_USERNAME"
|
||||
if ! [ -s "$MY_POT_PRIVATEKEYFILE" ] && ! [ -s "$MY_POT_PUBLICKEYFILE" ];
|
||||
MY_SENSOR_PUBLICKEYFILE="/data/elk/logstash/$MY_HIVE_USERNAME.pub"
|
||||
MY_SENSOR_PRIVATEKEYFILE="/data/elk/logstash/$MY_HIVE_USERNAME"
|
||||
if ! [ -s "$MY_SENSOR_PRIVATEKEYFILE" ] && ! [ -s "$MY_SENSOR_PUBLICKEYFILE" ];
|
||||
then
|
||||
echo
|
||||
echo "##############################"
|
||||
|
@ -118,8 +118,8 @@ if ! [ -s "$MY_POT_PRIVATEKEYFILE" ] && ! [ -s "$MY_POT_PUBLICKEYFILE" ];
|
|||
echo "##############################"
|
||||
echo
|
||||
mkdir -p /data/elk/logstash
|
||||
ssh-keygen -f "$MY_POT_PRIVATEKEYFILE" -N "" -C "$MY_HIVE_USERNAME"
|
||||
MY_POT_PUBLICKEY="$(cat "$MY_POT_PUBLICKEYFILE")"
|
||||
ssh-keygen -f "$MY_SENSOR_PRIVATEKEYFILE" -N "" -C "$MY_HIVE_USERNAME"
|
||||
MY_SENSOR_PUBLICKEY="$(cat "$MY_SENSOR_PUBLICKEYFILE")"
|
||||
else
|
||||
echo
|
||||
echo "#############################################"
|
||||
|
@ -137,7 +137,7 @@ echo "###########################################################"
|
|||
echo
|
||||
tee $MY_LS_ENVCONFIGFILE << EOF
|
||||
MY_TPOT_TYPE=$MY_TPOT_TYPE
|
||||
MY_POT_PRIVATEKEYFILE=$MY_POT_PRIVATEKEYFILE
|
||||
MY_SENSOR_PRIVATEKEYFILE=$MY_SENSOR_PRIVATEKEYFILE
|
||||
MY_HIVE_USERNAME=$MY_HIVE_USERNAME
|
||||
MY_HIVE_IP=$MY_HIVE_IP
|
||||
EOF
|
||||
|
@ -171,7 +171,7 @@ while [ 1 != 2 ]
|
|||
[c,C])
|
||||
fuGET_DEPLOY_DATA
|
||||
fuCHECK_HIVE
|
||||
fuDEPLOY_POT
|
||||
fuDEPLOY_SENSOR
|
||||
break
|
||||
;;
|
||||
[q,Q])
|
||||
|
|
|
@ -38,7 +38,7 @@ if [ -s "/data/elk/logstash/ls_environment" ];
|
|||
source /data/elk/logstash/ls_environment
|
||||
tee -a /opt/tpot/etc/compose/elk_environment << EOF
|
||||
MY_TPOT_TYPE=$MY_TPOT_TYPE
|
||||
MY_POT_PRIVATEKEYFILE=$MY_POT_PRIVATEKEYFILE
|
||||
MY_SENSOR_PRIVATEKEYFILE=$MY_SENSOR_PRIVATEKEYFILE
|
||||
MY_HIVE_USERNAME=$MY_HIVE_USERNAME
|
||||
MY_HIVE_IP=$MY_HIVE_IP
|
||||
EOF
|
||||
|
|
6
docker/elk/logstash/dist/update.sh
vendored
6
docker/elk/logstash/dist/update.sh
vendored
|
@ -36,18 +36,18 @@ if [ "$myCHECK" == "0" ];
|
|||
fi
|
||||
|
||||
# Distributed T-Pot installation needs a different pipeline config and autossh tunnel.
|
||||
if [ "$MY_TPOT_TYPE" == "POT" ];
|
||||
if [ "$MY_TPOT_TYPE" == "SENSOR" ];
|
||||
then
|
||||
echo
|
||||
echo "Distributed T-Pot setup, sending T-Pot logs to $MY_HIVE_IP."
|
||||
echo
|
||||
echo "T-Pot type: $MY_TPOT_TYPE"
|
||||
echo "Keyfile used: $MY_POT_PRIVATEKEYFILE"
|
||||
echo "Keyfile used: $MY_SENSOR_PRIVATEKEYFILE"
|
||||
echo "Hive username: $MY_HIVE_USERNAME"
|
||||
echo "Hive IP: $MY_HIVE_IP"
|
||||
echo
|
||||
cp /usr/share/logstash/config/pipelines_pot.yml /usr/share/logstash/config/pipelines.yml
|
||||
autossh -f -M 0 -4 -l $MY_HIVE_USERNAME -i $MY_POT_PRIVATEKEYFILE -p 64295 -N -L64305:127.0.0.1:64305 $MY_HIVE_IP -o "ServerAliveInterval 30" -o "ServerAliveCountMax 3" -o "StrictHostKeyChecking=no" -o "UserKnownHostsFile=/dev/null"
|
||||
autossh -f -M 0 -4 -l $MY_HIVE_USERNAME -i $MY_SENSOR_PRIVATEKEYFILE -p 64295 -N -L64305:127.0.0.1:64305 $MY_HIVE_IP -o "ServerAliveInterval 30" -o "ServerAliveCountMax 3" -o "StrictHostKeyChecking=no" -o "UserKnownHostsFile=/dev/null"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
|
|
@ -1,511 +0,0 @@
|
|||
# T-Pot (Pot)
|
||||
# Do not erase ports sections, these are used by /opt/tpot/bin/rules.sh to setup iptables ACCEPT rules for NFQ (honeytrap / glutton)
|
||||
version: '2.3'
|
||||
|
||||
networks:
|
||||
adbhoney_local:
|
||||
citrixhoneypot_local:
|
||||
conpot_local_IEC104:
|
||||
conpot_local_guardian_ast:
|
||||
conpot_local_ipmi:
|
||||
conpot_local_kamstrup_382:
|
||||
cowrie_local:
|
||||
dicompot_local:
|
||||
dionaea_local:
|
||||
elasticpot_local:
|
||||
heralding_local:
|
||||
honeysap_local:
|
||||
logstash_local:
|
||||
mailoney_local:
|
||||
medpot_local:
|
||||
rdpy_local:
|
||||
tanner_local:
|
||||
ewsposter_local:
|
||||
|
||||
services:
|
||||
|
||||
##################
|
||||
#### Honeypots
|
||||
##################
|
||||
|
||||
# Adbhoney service
|
||||
adbhoney:
|
||||
container_name: adbhoney
|
||||
restart: always
|
||||
networks:
|
||||
- adbhoney_local
|
||||
ports:
|
||||
- "5555:5555"
|
||||
image: "dtagdevsec/adbhoney:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/adbhoney/log:/opt/adbhoney/log
|
||||
- /data/adbhoney/downloads:/opt/adbhoney/dl
|
||||
|
||||
# Ciscoasa service
|
||||
ciscoasa:
|
||||
container_name: ciscoasa
|
||||
restart: always
|
||||
tmpfs:
|
||||
- /tmp/ciscoasa:uid=2000,gid=2000
|
||||
network_mode: "host"
|
||||
ports:
|
||||
- "5000:5000/udp"
|
||||
- "8443:8443"
|
||||
image: "dtagdevsec/ciscoasa:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/ciscoasa/log:/var/log/ciscoasa
|
||||
|
||||
# CitrixHoneypot service
|
||||
citrixhoneypot:
|
||||
container_name: citrixhoneypot
|
||||
restart: always
|
||||
networks:
|
||||
- citrixhoneypot_local
|
||||
ports:
|
||||
- "443:443"
|
||||
image: "dtagdevsec/citrixhoneypot:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/citrixhoneypot/logs:/opt/citrixhoneypot/logs
|
||||
|
||||
# Conpot IEC104 service
|
||||
conpot_IEC104:
|
||||
container_name: conpot_iec104
|
||||
restart: always
|
||||
environment:
|
||||
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
|
||||
- CONPOT_JSON_LOG=/var/log/conpot/conpot_IEC104.json
|
||||
- CONPOT_LOG=/var/log/conpot/conpot_IEC104.log
|
||||
- CONPOT_TEMPLATE=IEC104
|
||||
- CONPOT_TMP=/tmp/conpot
|
||||
tmpfs:
|
||||
- /tmp/conpot:uid=2000,gid=2000
|
||||
networks:
|
||||
- conpot_local_IEC104
|
||||
ports:
|
||||
- "161:161/udp"
|
||||
- "2404:2404"
|
||||
image: "dtagdevsec/conpot:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/conpot/log:/var/log/conpot
|
||||
|
||||
# Conpot guardian_ast service
|
||||
conpot_guardian_ast:
|
||||
container_name: conpot_guardian_ast
|
||||
restart: always
|
||||
environment:
|
||||
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
|
||||
- CONPOT_JSON_LOG=/var/log/conpot/conpot_guardian_ast.json
|
||||
- CONPOT_LOG=/var/log/conpot/conpot_guardian_ast.log
|
||||
- CONPOT_TEMPLATE=guardian_ast
|
||||
- CONPOT_TMP=/tmp/conpot
|
||||
tmpfs:
|
||||
- /tmp/conpot:uid=2000,gid=2000
|
||||
networks:
|
||||
- conpot_local_guardian_ast
|
||||
ports:
|
||||
- "10001:10001"
|
||||
image: "dtagdevsec/conpot:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/conpot/log:/var/log/conpot
|
||||
|
||||
# Conpot ipmi
|
||||
conpot_ipmi:
|
||||
container_name: conpot_ipmi
|
||||
restart: always
|
||||
environment:
|
||||
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
|
||||
- CONPOT_JSON_LOG=/var/log/conpot/conpot_ipmi.json
|
||||
- CONPOT_LOG=/var/log/conpot/conpot_ipmi.log
|
||||
- CONPOT_TEMPLATE=ipmi
|
||||
- CONPOT_TMP=/tmp/conpot
|
||||
tmpfs:
|
||||
- /tmp/conpot:uid=2000,gid=2000
|
||||
networks:
|
||||
- conpot_local_ipmi
|
||||
ports:
|
||||
- "623:623/udp"
|
||||
image: "dtagdevsec/conpot:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/conpot/log:/var/log/conpot
|
||||
|
||||
# Conpot kamstrup_382
|
||||
conpot_kamstrup_382:
|
||||
container_name: conpot_kamstrup_382
|
||||
restart: always
|
||||
environment:
|
||||
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
|
||||
- CONPOT_JSON_LOG=/var/log/conpot/conpot_kamstrup_382.json
|
||||
- CONPOT_LOG=/var/log/conpot/conpot_kamstrup_382.log
|
||||
- CONPOT_TEMPLATE=kamstrup_382
|
||||
- CONPOT_TMP=/tmp/conpot
|
||||
tmpfs:
|
||||
- /tmp/conpot:uid=2000,gid=2000
|
||||
networks:
|
||||
- conpot_local_kamstrup_382
|
||||
ports:
|
||||
- "1025:1025"
|
||||
- "50100:50100"
|
||||
image: "dtagdevsec/conpot:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/conpot/log:/var/log/conpot
|
||||
|
||||
# Cowrie service
|
||||
cowrie:
|
||||
container_name: cowrie
|
||||
restart: always
|
||||
tmpfs:
|
||||
- /tmp/cowrie:uid=2000,gid=2000
|
||||
- /tmp/cowrie/data:uid=2000,gid=2000
|
||||
networks:
|
||||
- cowrie_local
|
||||
ports:
|
||||
- "22:22"
|
||||
- "23:23"
|
||||
image: "dtagdevsec/cowrie:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/cowrie/downloads:/home/cowrie/cowrie/dl
|
||||
- /data/cowrie/keys:/home/cowrie/cowrie/etc
|
||||
- /data/cowrie/log:/home/cowrie/cowrie/log
|
||||
- /data/cowrie/log/tty:/home/cowrie/cowrie/log/tty
|
||||
|
||||
# Dicompot service
|
||||
# Get the Horos Client for testing: https://horosproject.org/
|
||||
# Get Dicom images (CC BY 3.0): https://www.cancerimagingarchive.net/collections/
|
||||
# Put images (which must be in Dicom DCM format or it will not work!) into /data/dicompot/images
|
||||
dicompot:
|
||||
container_name: dicompot
|
||||
restart: always
|
||||
networks:
|
||||
- dicompot_local
|
||||
ports:
|
||||
- "11112:11112"
|
||||
image: "dtagdevsec/dicompot:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/dicompot/log:/var/log/dicompot
|
||||
# - /data/dicompot/images:/opt/dicompot/images
|
||||
|
||||
# Dionaea service
|
||||
dionaea:
|
||||
container_name: dionaea
|
||||
stdin_open: true
|
||||
tty: true
|
||||
restart: always
|
||||
networks:
|
||||
- dionaea_local
|
||||
ports:
|
||||
- "20:20"
|
||||
- "21:21"
|
||||
- "42:42"
|
||||
- "69:69/udp"
|
||||
- "81:81"
|
||||
- "135:135"
|
||||
# - "443:443"
|
||||
- "445:445"
|
||||
- "1433:1433"
|
||||
- "1723:1723"
|
||||
- "1883:1883"
|
||||
- "3306:3306"
|
||||
- "5060:5060"
|
||||
- "5060:5060/udp"
|
||||
- "5061:5061"
|
||||
- "27017:27017"
|
||||
image: "dtagdevsec/dionaea:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/dionaea/roots/ftp:/opt/dionaea/var/dionaea/roots/ftp
|
||||
- /data/dionaea/roots/tftp:/opt/dionaea/var/dionaea/roots/tftp
|
||||
- /data/dionaea/roots/www:/opt/dionaea/var/dionaea/roots/www
|
||||
- /data/dionaea/roots/upnp:/opt/dionaea/var/dionaea/roots/upnp
|
||||
- /data/dionaea:/opt/dionaea/var/dionaea
|
||||
- /data/dionaea/binaries:/opt/dionaea/var/dionaea/binaries
|
||||
- /data/dionaea/log:/opt/dionaea/var/log
|
||||
- /data/dionaea/rtp:/opt/dionaea/var/dionaea/rtp
|
||||
|
||||
# ElasticPot service
|
||||
elasticpot:
|
||||
container_name: elasticpot
|
||||
restart: always
|
||||
networks:
|
||||
- elasticpot_local
|
||||
ports:
|
||||
- "9200:9200"
|
||||
image: "dtagdevsec/elasticpot:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/elasticpot/log:/opt/elasticpot/log
|
||||
|
||||
# Heralding service
|
||||
heralding:
|
||||
container_name: heralding
|
||||
restart: always
|
||||
tmpfs:
|
||||
- /tmp/heralding:uid=2000,gid=2000
|
||||
networks:
|
||||
- heralding_local
|
||||
ports:
|
||||
# - "21:21"
|
||||
# - "22:22"
|
||||
# - "23:23"
|
||||
# - "25:25"
|
||||
# - "80:80"
|
||||
- "110:110"
|
||||
- "143:143"
|
||||
# - "443:443"
|
||||
- "465:465"
|
||||
- "993:993"
|
||||
- "995:995"
|
||||
# - "3306:3306"
|
||||
# - "3389:3389"
|
||||
- "1080:1080"
|
||||
- "5432:5432"
|
||||
- "5900:5900"
|
||||
image: "dtagdevsec/heralding:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/heralding/log:/var/log/heralding
|
||||
|
||||
# HoneySAP service
|
||||
honeysap:
|
||||
container_name: honeysap
|
||||
restart: always
|
||||
networks:
|
||||
- honeysap_local
|
||||
ports:
|
||||
- "3299:3299"
|
||||
image: "dtagdevsec/honeysap:2203"
|
||||
volumes:
|
||||
- /data/honeysap/log:/opt/honeysap/log
|
||||
|
||||
# Honeytrap service
|
||||
honeytrap:
|
||||
container_name: honeytrap
|
||||
restart: always
|
||||
tmpfs:
|
||||
- /tmp/honeytrap:uid=2000,gid=2000
|
||||
network_mode: "host"
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
image: "dtagdevsec/honeytrap:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/honeytrap/attacks:/opt/honeytrap/var/attacks
|
||||
- /data/honeytrap/downloads:/opt/honeytrap/var/downloads
|
||||
- /data/honeytrap/log:/opt/honeytrap/var/log
|
||||
|
||||
# Mailoney service
|
||||
mailoney:
|
||||
container_name: mailoney
|
||||
restart: always
|
||||
environment:
|
||||
- HPFEEDS_SERVER=
|
||||
- HPFEEDS_IDENT=user
|
||||
- HPFEEDS_SECRET=pass
|
||||
- HPFEEDS_PORT=20000
|
||||
- HPFEEDS_CHANNELPREFIX=prefix
|
||||
networks:
|
||||
- mailoney_local
|
||||
ports:
|
||||
- "25:25"
|
||||
image: "dtagdevsec/mailoney:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/mailoney/log:/opt/mailoney/logs
|
||||
|
||||
# Medpot service
|
||||
medpot:
|
||||
container_name: medpot
|
||||
restart: always
|
||||
networks:
|
||||
- medpot_local
|
||||
ports:
|
||||
- "2575:2575"
|
||||
image: "dtagdevsec/medpot:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/medpot/log/:/var/log/medpot
|
||||
|
||||
# Rdpy service
|
||||
rdpy:
|
||||
container_name: rdpy
|
||||
extra_hosts:
|
||||
- hpfeeds.example.com:127.0.0.1
|
||||
restart: always
|
||||
environment:
|
||||
- HPFEEDS_SERVER=hpfeeds.example.com
|
||||
- HPFEEDS_IDENT=user
|
||||
- HPFEEDS_SECRET=pass
|
||||
- HPFEEDS_PORT=65000
|
||||
- SERVERID=id
|
||||
networks:
|
||||
- rdpy_local
|
||||
ports:
|
||||
- "3389:3389"
|
||||
image: "dtagdevsec/rdpy:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/rdpy/log:/var/log/rdpy
|
||||
|
||||
#### Snare / Tanner
|
||||
## Tanner Redis Service
|
||||
tanner_redis:
|
||||
container_name: tanner_redis
|
||||
restart: always
|
||||
tty: true
|
||||
networks:
|
||||
- tanner_local
|
||||
image: "dtagdevsec/redis:2203"
|
||||
read_only: true
|
||||
|
||||
## PHP Sandbox service
|
||||
tanner_phpox:
|
||||
container_name: tanner_phpox
|
||||
restart: always
|
||||
tty: true
|
||||
networks:
|
||||
- tanner_local
|
||||
image: "dtagdevsec/phpox:2203"
|
||||
read_only: true
|
||||
|
||||
## Tanner API Service
|
||||
tanner_api:
|
||||
container_name: tanner_api
|
||||
restart: always
|
||||
tmpfs:
|
||||
- /tmp/tanner:uid=2000,gid=2000
|
||||
tty: true
|
||||
networks:
|
||||
- tanner_local
|
||||
image: "dtagdevsec/tanner:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/tanner/log:/var/log/tanner
|
||||
command: tannerapi
|
||||
depends_on:
|
||||
- tanner_redis
|
||||
|
||||
## Tanner Service
|
||||
tanner:
|
||||
container_name: tanner
|
||||
restart: always
|
||||
tmpfs:
|
||||
- /tmp/tanner:uid=2000,gid=2000
|
||||
tty: true
|
||||
networks:
|
||||
- tanner_local
|
||||
image: "dtagdevsec/tanner:2203"
|
||||
command: tanner
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/tanner/log:/var/log/tanner
|
||||
- /data/tanner/files:/opt/tanner/files
|
||||
depends_on:
|
||||
- tanner_api
|
||||
# - tanner_web
|
||||
- tanner_phpox
|
||||
|
||||
## Snare Service
|
||||
snare:
|
||||
container_name: snare
|
||||
restart: always
|
||||
tty: true
|
||||
networks:
|
||||
- tanner_local
|
||||
ports:
|
||||
- "80:80"
|
||||
image: "dtagdevsec/snare:2203"
|
||||
depends_on:
|
||||
- tanner
|
||||
|
||||
|
||||
##################
|
||||
#### NSM
|
||||
##################
|
||||
|
||||
# Fatt service
|
||||
fatt:
|
||||
container_name: fatt
|
||||
restart: always
|
||||
network_mode: "host"
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
- SYS_NICE
|
||||
- NET_RAW
|
||||
image: "dtagdevsec/fatt:2203"
|
||||
volumes:
|
||||
- /data/fatt/log:/opt/fatt/log
|
||||
|
||||
# P0f service
|
||||
p0f:
|
||||
container_name: p0f
|
||||
restart: always
|
||||
network_mode: "host"
|
||||
image: "dtagdevsec/p0f:2203"
|
||||
read_only: true
|
||||
volumes:
|
||||
- /data/p0f/log:/var/log/p0f
|
||||
|
||||
# Suricata service
|
||||
suricata:
|
||||
container_name: suricata
|
||||
restart: always
|
||||
environment:
|
||||
# For ET Pro ruleset replace "OPEN" with your OINKCODE
|
||||
- OINKCODE=OPEN
|
||||
network_mode: "host"
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
- SYS_NICE
|
||||
- NET_RAW
|
||||
image: "dtagdevsec/suricata:2203"
|
||||
volumes:
|
||||
- /data/suricata/log:/var/log/suricata
|
||||
|
||||
##################
|
||||
#### Tools
|
||||
##################
|
||||
|
||||
# Logstash service
|
||||
logstash:
|
||||
container_name: logstash
|
||||
restart: always
|
||||
networks:
|
||||
- logstash_local
|
||||
# environment:
|
||||
# - LS_JAVA_OPTS=-Xms2048m -Xmx2048m
|
||||
env_file:
|
||||
- /opt/tpot/etc/compose/elk_environment
|
||||
mem_limit: 2g
|
||||
image: "dtagdevsec/logstash:2203"
|
||||
volumes:
|
||||
- /data:/data
|
||||
|
||||
# Ewsposter service
|
||||
ewsposter:
|
||||
container_name: ewsposter
|
||||
restart: always
|
||||
networks:
|
||||
- ewsposter_local
|
||||
environment:
|
||||
- EWS_HPFEEDS_ENABLE=false
|
||||
- EWS_HPFEEDS_HOST=host
|
||||
- EWS_HPFEEDS_PORT=port
|
||||
- EWS_HPFEEDS_CHANNELS=channels
|
||||
- EWS_HPFEEDS_IDENT=user
|
||||
- EWS_HPFEEDS_SECRET=secret
|
||||
- EWS_HPFEEDS_TLSCERT=false
|
||||
- EWS_HPFEEDS_FORMAT=json
|
||||
env_file:
|
||||
- /opt/tpot/etc/compose/elk_environment
|
||||
image: "dtagdevsec/ewsposter:2203"
|
||||
volumes:
|
||||
- /data:/data
|
||||
- /data/ews/conf/ews.ip:/opt/ewsposter/ews.ip
|
|
@ -4,13 +4,13 @@ version: '2.3'
|
|||
|
||||
networks:
|
||||
adbhoney_local:
|
||||
ciscoasa_local:
|
||||
citrixhoneypot_local:
|
||||
conpot_local_IEC104:
|
||||
conpot_local_guardian_ast:
|
||||
conpot_local_ipmi:
|
||||
conpot_local_kamstrup_382:
|
||||
cowrie_local:
|
||||
cyberchef_local:
|
||||
dicompot_local:
|
||||
dionaea_local:
|
||||
elasticpot_local:
|
||||
|
@ -46,10 +46,9 @@ services:
|
|||
ciscoasa:
|
||||
container_name: ciscoasa
|
||||
restart: always
|
||||
networks:
|
||||
- ciscoasa_local
|
||||
tmpfs:
|
||||
- /tmp/ciscoasa:uid=2000,gid=2000
|
||||
network_mode: "host"
|
||||
ports:
|
||||
- "5000:5000/udp"
|
||||
- "8443:8443"
|
||||
|
@ -393,23 +392,6 @@ services:
|
|||
depends_on:
|
||||
- tanner_redis
|
||||
|
||||
## Tanner WEB Service
|
||||
# tanner_web:
|
||||
# container_name: tanner_web
|
||||
# restart: always
|
||||
# tmpfs:
|
||||
# - /tmp/tanner:uid=2000,gid=2000
|
||||
# tty: true
|
||||
# networks:
|
||||
# - tanner_local
|
||||
# image: "dtagdevsec/tanner:2203"
|
||||
# command: tannerweb
|
||||
# read_only: true
|
||||
# volumes:
|
||||
# - /data/tanner/log:/var/log/tanner
|
||||
# depends_on:
|
||||
# - tanner_redis
|
||||
|
||||
## Tanner Service
|
||||
tanner:
|
||||
container_name: tanner
|
||||
|
@ -427,7 +409,6 @@ services:
|
|||
- /data/tanner/files:/opt/tanner/files
|
||||
depends_on:
|
||||
- tanner_api
|
||||
# - tanner_web
|
||||
- tanner_phpox
|
||||
|
||||
## Snare Service
|
||||
|
@ -478,8 +459,6 @@ services:
|
|||
environment:
|
||||
# For ET Pro ruleset replace "OPEN" with your OINKCODE
|
||||
- OINKCODE=OPEN
|
||||
# Loading externel Rules from URL
|
||||
# - FROMURL="https://username:password@yoururl.com|https://username:password@otherurl.com"
|
||||
network_mode: "host"
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
|
@ -494,6 +473,21 @@ services:
|
|||
#### Tools
|
||||
##################
|
||||
|
||||
# Logstash service
|
||||
logstash:
|
||||
container_name: logstash
|
||||
restart: always
|
||||
networks:
|
||||
- logstash_local
|
||||
# environment:
|
||||
# - LS_JAVA_OPTS=-Xms2048m -Xmx2048m
|
||||
env_file:
|
||||
- /opt/tpot/etc/compose/elk_environment
|
||||
mem_limit: 2g
|
||||
image: "dtagdevsec/logstash:2203"
|
||||
volumes:
|
||||
- /data:/data
|
||||
|
||||
# Ewsposter service
|
||||
ewsposter:
|
||||
container_name: ewsposter
|
||||
|
|
Loading…
Reference in a new issue