diff --git a/compose/raspberry_showcase.yml b/compose/raspberry_showcase.yml deleted file mode 100644 index 1dabc0fe..00000000 --- a/compose/raspberry_showcase.yml +++ /dev/null @@ -1,628 +0,0 @@ -# T-Pot: MOBILE -# Note: This docker compose file has been adjusted to limit the number of tools, services and honeypots to run -# T-Pot on a Raspberry Pi 4 (8GB of RAM). -# The standard docker compose file should work mostly fine (depending on traffic) if you do not enable a -# desktop environment such as LXDE and meet the minimum requirements of 8GB RAM. -version: '3.9' - -networks: - ciscoasa_local: - citrixhoneypot_local: - conpot_local_IEC104: - conpot_local_ipmi: - conpot_local_kamstrup_382: - cowrie_local: - dicompot_local: - dionaea_local: - elasticpot_local: - heralding_local: - ipphoney_local: - log4pot_local: - mailoney_local: - medpot_local: - redishoneypot_local: - sentrypeer_local: - tanner_local: - wordpot_local: - ewsposter_local: - -services: - -######################################### -#### DEV -######################################### -#### T-Pot Init - Never delete this! -######################################### - -# T-Pot Init Service - tpotinit: - container_name: tpotinit - env_file: - - .env - restart: always - stop_grace_period: 60s - tmpfs: - - /tmp/etc:uid=2000,gid=2000 - - /tmp/:uid=2000,gid=2000 - network_mode: "host" - cap_add: - - NET_ADMIN - image: ${TPOT_REPO}/tpotinit:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - volumes: - - ${TPOT_DOCKER_COMPOSE}:/tmp/tpot/docker-compose.yml:ro - - ${TPOT_DATA_PATH}/blackhole:/etc/blackhole - - ${TPOT_DATA_PATH}:/data - - /var/run/docker.sock:/var/run/docker.sock:ro - -################## -#### Honeypots -################## - -# Ciscoasa service - ciscoasa: - container_name: ciscoasa - restart: always - depends_on: - logstash: - condition: service_healthy - tmpfs: - - /tmp/ciscoasa:uid=2000,gid=2000 - networks: - - ciscoasa_local - ports: - - "5000:5000/udp" - - "8443:8443" - image: ${TPOT_REPO}/ciscoasa:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/ciscoasa/log:/var/log/ciscoasa - -# CitrixHoneypot service - citrixhoneypot: - container_name: citrixhoneypot - restart: always - depends_on: - logstash: - condition: service_healthy - networks: - - citrixhoneypot_local - ports: - - "443:443" - image: ${TPOT_REPO}/citrixhoneypot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/citrixhoneypot/log:/opt/citrixhoneypot/logs - -# Conpot IEC104 service - conpot_IEC104: - container_name: conpot_iec104 - restart: always - depends_on: - logstash: - condition: service_healthy - environment: - - CONPOT_CONFIG=/etc/conpot/conpot.cfg - - CONPOT_JSON_LOG=/var/log/conpot/conpot_IEC104.json - - CONPOT_LOG=/var/log/conpot/conpot_IEC104.log - - CONPOT_TEMPLATE=IEC104 - - CONPOT_TMP=/tmp/conpot - tmpfs: - - /tmp/conpot:uid=2000,gid=2000 - networks: - - conpot_local_IEC104 - ports: - - "161:161/udp" - - "2404:2404" - image: ${TPOT_REPO}/conpot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/conpot/log:/var/log/conpot - -# Conpot ipmi - conpot_ipmi: - container_name: conpot_ipmi - restart: always - depends_on: - logstash: - condition: service_healthy - environment: - - CONPOT_CONFIG=/etc/conpot/conpot.cfg - - CONPOT_JSON_LOG=/var/log/conpot/conpot_ipmi.json - - CONPOT_LOG=/var/log/conpot/conpot_ipmi.log - - CONPOT_TEMPLATE=ipmi - - CONPOT_TMP=/tmp/conpot - tmpfs: - - /tmp/conpot:uid=2000,gid=2000 - networks: - - conpot_local_ipmi - ports: - - "623:623/udp" - image: ${TPOT_REPO}/conpot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/conpot/log:/var/log/conpot - -# Conpot kamstrup_382 - conpot_kamstrup_382: - container_name: conpot_kamstrup_382 - restart: always - depends_on: - logstash: - condition: service_healthy - environment: - - CONPOT_CONFIG=/etc/conpot/conpot.cfg - - CONPOT_JSON_LOG=/var/log/conpot/conpot_kamstrup_382.json - - CONPOT_LOG=/var/log/conpot/conpot_kamstrup_382.log - - CONPOT_TEMPLATE=kamstrup_382 - - CONPOT_TMP=/tmp/conpot - tmpfs: - - /tmp/conpot:uid=2000,gid=2000 - networks: - - conpot_local_kamstrup_382 - ports: - - "1025:1025" - - "50100:50100" - image: ${TPOT_REPO}/conpot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/conpot/log:/var/log/conpot - -# Cowrie service - cowrie: - container_name: cowrie - restart: always - depends_on: - logstash: - condition: service_healthy - tmpfs: - - /tmp/cowrie:uid=2000,gid=2000 - - /tmp/cowrie/data:uid=2000,gid=2000 - networks: - - cowrie_local - ports: - - "22:22" - - "23:23" - image: ${TPOT_REPO}/cowrie:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/cowrie/downloads:/home/cowrie/cowrie/dl - - ${TPOT_DATA_PATH}/cowrie/keys:/home/cowrie/cowrie/etc - - ${TPOT_DATA_PATH}/cowrie/log:/home/cowrie/cowrie/log - - ${TPOT_DATA_PATH}/cowrie/log/tty:/home/cowrie/cowrie/log/tty - -# Dicompot service -# Get the Horos Client for testing: https://horosproject.org/ -# Get Dicom images (CC BY 3.0): https://www.cancerimagingarchive.net/collections/ -# Put images (which must be in Dicom DCM format or it will not work!) into /data/dicompot/images - dicompot: - container_name: dicompot - restart: always - depends_on: - logstash: - condition: service_healthy - networks: - - dicompot_local - ports: - - "11112:11112" - image: ${TPOT_REPO}/dicompot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/dicompot/log:/var/log/dicompot -# - ${TPOT_DATA_PATH}/dicompot/images:/opt/dicompot/images - -# Dionaea service - dionaea: - container_name: dionaea - stdin_open: true - tty: true - restart: always - depends_on: - logstash: - condition: service_healthy - networks: - - dionaea_local - ports: - - "20:20" - - "21:21" - - "42:42" - - "69:69/udp" - - "81:81" - - "135:135" - # - "443:443" - - "445:445" - - "1433:1433" - - "1723:1723" - - "1883:1883" - - "3306:3306" - # - "5060:5060" - # - "5060:5060/udp" - # - "5061:5061" - - "27017:27017" - image: ${TPOT_REPO}/dionaea:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/dionaea/roots/ftp:/opt/dionaea/var/dionaea/roots/ftp - - ${TPOT_DATA_PATH}/dionaea/roots/tftp:/opt/dionaea/var/dionaea/roots/tftp - - ${TPOT_DATA_PATH}/dionaea/roots/www:/opt/dionaea/var/dionaea/roots/www - - ${TPOT_DATA_PATH}/dionaea/roots/upnp:/opt/dionaea/var/dionaea/roots/upnp - - ${TPOT_DATA_PATH}/dionaea:/opt/dionaea/var/dionaea - - ${TPOT_DATA_PATH}/dionaea/binaries:/opt/dionaea/var/dionaea/binaries - - ${TPOT_DATA_PATH}/dionaea/log:/opt/dionaea/var/log - - ${TPOT_DATA_PATH}/dionaea/rtp:/opt/dionaea/var/dionaea/rtp - -# ElasticPot service - elasticpot: - container_name: elasticpot - restart: always - depends_on: - logstash: - condition: service_healthy - networks: - - elasticpot_local - ports: - - "9200:9200" - image: ${TPOT_REPO}/elasticpot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/elasticpot/log:/opt/elasticpot/log - -# Heralding service - heralding: - container_name: heralding - restart: always - depends_on: - logstash: - condition: service_healthy - tmpfs: - - /tmp/heralding:uid=2000,gid=2000 - networks: - - heralding_local - ports: - # - "21:21" - # - "22:22" - # - "23:23" - # - "25:25" - # - "80:80" - - "110:110" - - "143:143" - # - "443:443" - - "465:465" - - "993:993" - - "995:995" - # - "3306:3306" - # - "3389:3389" - - "1080:1080" - - "5432:5432" - - "5900:5900" - image: ${TPOT_REPO}/heralding:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/heralding/log:/var/log/heralding - -# Honeytrap service - honeytrap: - container_name: honeytrap - restart: always - depends_on: - logstash: - condition: service_healthy - tmpfs: - - /tmp/honeytrap:uid=2000,gid=2000 - network_mode: "host" - cap_add: - - NET_ADMIN - image: ${TPOT_REPO}/honeytrap:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/honeytrap/attacks:/opt/honeytrap/var/attacks - - ${TPOT_DATA_PATH}/honeytrap/downloads:/opt/honeytrap/var/downloads - - ${TPOT_DATA_PATH}/honeytrap/log:/opt/honeytrap/var/log - -# Ipphoney service - ipphoney: - container_name: ipphoney - restart: always - depends_on: - logstash: - condition: service_healthy - networks: - - ipphoney_local - ports: - - "631:631" - image: ${TPOT_REPO}/ipphoney:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/ipphoney/log:/opt/ipphoney/log - -# Mailoney service - mailoney: - container_name: mailoney - restart: always - depends_on: - logstash: - condition: service_healthy - environment: - - HPFEEDS_SERVER= - - HPFEEDS_IDENT=user - - HPFEEDS_SECRET=pass - - HPFEEDS_PORT=20000 - - HPFEEDS_CHANNELPREFIX=prefix - networks: - - mailoney_local - ports: - - "25:25" - - "587:25" - image: ${TPOT_REPO}/mailoney:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/mailoney/log:/opt/mailoney/logs - -# Log4pot service - log4pot: - container_name: log4pot - restart: always - depends_on: - logstash: - condition: service_healthy - tmpfs: - - /tmp:uid=2000,gid=2000 - networks: - - log4pot_local - ports: - # - "80:8080" - # - "443:8080" - - "8080:8080" - # - "9200:8080" - - "25565:8080" - image: ${TPOT_REPO}/log4pot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/log4pot/log:/var/log/log4pot/log - - ${TPOT_DATA_PATH}/log4pot/payloads:/var/log/log4pot/payloads - -# Medpot service - medpot: - container_name: medpot - restart: always - depends_on: - logstash: - condition: service_healthy - networks: - - medpot_local - ports: - - "2575:2575" - image: ${TPOT_REPO}/medpot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/medpot/log/:/var/log/medpot - -# Redishoneypot service - redishoneypot: - container_name: redishoneypot - restart: always - depends_on: - logstash: - condition: service_healthy - networks: - - redishoneypot_local - ports: - - "6379:6379" - image: ${TPOT_REPO}/redishoneypot:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/redishoneypot/log:/var/log/redishoneypot - -# SentryPeer service - sentrypeer: - container_name: sentrypeer - restart: always - depends_on: - logstash: - condition: service_healthy -# environment: -# - SENTRYPEER_PEER_TO_PEER=1 - networks: - - sentrypeer_local - ports: -# - "4222:4222/udp" - - "5060:5060/tcp" - - "5060:5060/udp" -# - "127.0.0.1:8082:8082" - image: ${TPOT_REPO}/sentrypeer:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/sentrypeer/log:/var/log/sentrypeer - -#### Snare / Tanner -## Tanner Redis Service - tanner_redis: - container_name: tanner_redis - restart: always - depends_on: - logstash: - condition: service_healthy - tty: true - networks: - - tanner_local - image: ${TPOT_REPO}/redis:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - -## PHP Sandbox service - tanner_phpox: - container_name: tanner_phpox - restart: always - depends_on: - logstash: - condition: service_healthy - tty: true - networks: - - tanner_local - image: ${TPOT_REPO}/phpox:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - -## Tanner API Service - tanner_api: - container_name: tanner_api - restart: always - depends_on: - - tanner_redis - tmpfs: - - /tmp/tanner:uid=2000,gid=2000 - tty: true - networks: - - tanner_local - image: ${TPOT_REPO}/tanner:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/tanner/log:/var/log/tanner - command: tannerapi - -## Tanner Service - tanner: - container_name: tanner - restart: always - depends_on: - - tanner_api - - tanner_phpox - tmpfs: - - /tmp/tanner:uid=2000,gid=2000 - tty: true - networks: - - tanner_local - image: ${TPOT_REPO}/tanner:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - command: tanner - read_only: true - volumes: - - ${TPOT_DATA_PATH}/tanner/log:/var/log/tanner - - ${TPOT_DATA_PATH}/tanner/files:/opt/tanner/files - -## Snare Service - snare: - container_name: snare - restart: always - depends_on: - - tanner - tty: true - networks: - - tanner_local - ports: - - "80:80" - image: ${TPOT_REPO}/snare:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - -# Wordpot service - wordpot: - container_name: wordpot - restart: always - depends_on: - tpotinit: - condition: service_healthy - networks: - - wordpot_local - ports: - - "82:80" - image: ${TPOT_REPO}/wordpot:${TPOT_VERSION} - read_only: true - volumes: - - ${TPOT_DATA_PATH}/wordpot/log:/opt/wordpot/logs/ - - -################## -#### Tools -################## - -#### ELK -## Elasticsearch service - elasticsearch: - container_name: elasticsearch - restart: always - depends_on: - tpotinit: - condition: service_healthy - environment: - - bootstrap.memory_lock=true - - ES_JAVA_OPTS=-Xms2048m -Xmx2048m - - ES_TMPDIR=/tmp - cap_add: - - IPC_LOCK - ulimits: - memlock: - soft: -1 - hard: -1 - nofile: - soft: 65536 - hard: 65536 - mem_limit: 4g - ports: - - "127.0.0.1:64298:9200" - image: ${TPOT_REPO}/elasticsearch:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - volumes: - - ${TPOT_DATA_PATH}:/data - -## Logstash service - logstash: - container_name: logstash - restart: always - depends_on: - elasticsearch: - condition: service_healthy - environment: - - LS_JAVA_OPTS=-Xms1024m -Xmx1024m - - TPOT_TYPE=${TPOT_TYPE:-HIVE} - - TPOT_HIVE_USER=${TPOT_HIVE_USER} - - TPOT_HIVE_IP=${TPOT_HIVE_IP} - ports: - - "127.0.0.1:64305:64305" - mem_limit: 2g - image: ${TPOT_REPO}/logstash:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - volumes: - - ${TPOT_DATA_PATH}:/data -#### /ELK - -# Ewsposter service - ewsposter: - container_name: ewsposter - restart: always - depends_on: - logstash: - condition: service_healthy - networks: - - ewsposter_local - environment: - - EWS_HPFEEDS_ENABLE=false - - EWS_HPFEEDS_HOST=host - - EWS_HPFEEDS_PORT=port - - EWS_HPFEEDS_CHANNELS=channels - - EWS_HPFEEDS_IDENT=user - - EWS_HPFEEDS_SECRET=secret - - EWS_HPFEEDS_TLSCERT=false - - EWS_HPFEEDS_FORMAT=json - image: ${TPOT_REPO}/ewsposter:${TPOT_VERSION} - pull_policy: ${TPOT_PULL_POLICY} - volumes: - - ${TPOT_DATA_PATH}:/data - - ${TPOT_DATA_PATH}/ews/conf/ews.ip:/opt/ewsposter/ews.ip diff --git a/install.sh b/install.sh index 0c0260da..ae29bdea 100755 --- a/install.sh +++ b/install.sh @@ -174,7 +174,7 @@ echo "### Optimized for a distributed installation, without WebUI, El echo "### (M)obile - T-Pot Mobile installation." echo "### Includes everything to run T-Pot Mobile (available separately)." while true; do - read -p "### Install Type? (h/s) " myTPOT_TYPE + read -p "### Install Type? (h/s/m) " myTPOT_TYPE case "${myTPOT_TYPE}" in h|H) echo