prep for 18.04

This commit is contained in:
Marco Ochse 2018-03-25 18:35:32 +00:00
parent 60cb42b34d
commit c9a33870ff
57 changed files with 2268 additions and 178 deletions

View file

@ -1,9 +1,14 @@
FROM alpine
MAINTAINER MO
# Setup env and apt
RUN apk -U upgrade && \
apk add bash build-base git libffi-dev openssl-dev python3-dev procps && \
apk add bash \
build-base \
git \
libffi-dev \
openssl-dev \
python3-dev \
procps && \
# Setup user
addgroup -g 2000 ciscoasa && \
@ -18,10 +23,16 @@ RUN apk -U upgrade && \
chown -R ciscoasa:ciscoasa /opt/ciscoasa_honeypot && \
# Clean up
apk del build-base git libffi-dev openssl-dev python3-dev && \
apk add libffi openssl python3
apk del build-base \
git \
libffi-dev \
openssl-dev \
python3-dev && \
apk add libffi \
openssl \
python3
# Start elasticsearch-head
WORKDIR /opt/ciscoasa_honeypot
USER ciscoasa
CMD python3 asa_server.py --enable_ssl --verbose
CMD python3 asa_server.py --enable_ssl --verbose > /var/log/ciscoasa/ciscoasa.log 2>&1

12
docker/ciscoasa/README.md Normal file
View file

@ -0,0 +1,12 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/ciscoasa:1804.svg)](https://microbadger.com/images/dtagdevsec/ciscoasa:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/ciscoasa:1804.svg)](https://microbadger.com/images/dtagdevsec/ciscoasa:1804 "Get your own image badge on microbadger.com")
# ciscoasa
[Ciscoasa](https://github.com/cymmetria/ciscoasa_honeypot) is a low interaction honeypot for the Cisco ASA component capable of detecting CVE-2018-0101, a DoS and remote code execution vulnerability
This dockerized version is part of the **[T-Pot community honeypot](http://dtag-dev-sec.github.io/)** of Deutsche Telekom AG.
The `Dockerfile` contains the blueprint for the dockerized ciscoasa and will be used to setup the docker image.
The `docker-compose.yml` contains the necessary settings to test conpot using `docker-compose`. This will ensure to start the docker container with the appropriate permissions and port mappings.

View file

@ -2,13 +2,21 @@
# For docker-compose ...
version: '2.2'
networks:
ciscoasa_local:
services:
# Wetty service
wetty:
container_name: wetty
# Ciscoasa service
ciscoasa:
build: .
container_name: ciscoasa
restart: always
network_mode: "host"
env_file:
- /opt/tpot/etc/compose/wetty_environment
image: "dtagdevsec/wetty:1710"
networks:
- ciscoasa_local
ports:
- "5000:5000"
- "8443:8443"
image: "dtagdevsec/ciscoasa:1804"
volumes:
- /data/ciscoasa/log:/var/log/ciscoasa

View file

@ -1,5 +1,4 @@
FROM alpine
MAINTAINER MO
# Include dist
ADD dist/ /root/dist/
@ -12,6 +11,7 @@ RUN apk -U --no-cache add bash \
git \
libev \
libtool \
libcap \
libxslt \
libxslt-dev \
mariadb-dev \
@ -27,16 +27,23 @@ RUN apk -U --no-cache add bash \
# Setup ConPot
git clone https://github.com/mushorg/conpot /opt/conpot/ && \
cd /opt/conpot/ && \
git checkout d157229e4587188ad3d3af5dddcd71200713852d && \
cp /root/dist/requirements.txt /opt/conpot/ && \
# Patch to accept ENV for MIB path
cp /root/dist/snmp_server.py /opt/conpot/conpot/protocols/snmp/ && \
python setup.py install && \
cd / && \
# Monkey patch, see https://github.com/mushorg/conpot/issues/361
cp /root/dist/conpot.bin /usr/bin/conpot && \
chmod u+x /usr/bin/conpot && \
rm -rf /opt/conpot /tmp/* /var/tmp/* && \
setcap cap_net_bind_service=+ep /usr/bin/conpot && \
# Get wireshark manuf db for scapy, setup configs, user, groups
mkdir -p /etc/conpot /var/log/conpot /usr/share/wireshark && \
wget https://github.com/wireshark/wireshark/raw/master/manuf -o /usr/share/wireshark/manuf && \
mv /root/dist/conpot.cfg /etc/conpot/conpot.cfg && \
mv /root/dist/kamstrup_382/template.xml /usr/lib/python2.7/site-packages/Conpot-0.5.1-py2.7.egg/conpot/templates/kamstrup_382/ && \
cp /root/dist/conpot.cfg /etc/conpot/conpot.cfg && \
cp -R /root/dist/templates /usr/lib/python2.7/site-packages/Conpot-0.5.1-py2.7.egg/conpot/ && \
addgroup -g 2000 conpot && \
adduser -S -s /bin/bash -u 2000 -D -g 2000 conpot && \
@ -56,5 +63,5 @@ RUN apk -U --no-cache add bash \
rm -rf /root/* && \
rm -rf /var/cache/apk/*
# Run supervisor upon container start
CMD ["/usr/bin/conpot", "--template", "kamstrup_382", "--logfile", "/var/log/conpot/conpot.log", "--config", "/etc/conpot/conpot.cfg"]
# Start conpot
CMD /usr/bin/conpot --template $CONPOT_TEMPLATE --logfile $CONPOT_LOG --config $CONPOT_CONFIG

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/conpot:1710.svg)](https://microbadger.com/images/dtagdevsec/conpot:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/conpot:1710.svg)](https://microbadger.com/images/dtagdevsec/conpot:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/conpot:1804.svg)](https://microbadger.com/images/dtagdevsec/conpot:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/conpot:1804.svg)](https://microbadger.com/images/dtagdevsec/conpot:1804 "Get your own image badge on microbadger.com")
# conpot

5
docker/conpot/dist/conpot.bin vendored Normal file
View file

@ -0,0 +1,5 @@
#!/usr/bin/python
# EASY-INSTALL-SCRIPT: 'Conpot==0.5.1','conpot'
__import__('gevent.monkey').monkey.patch_all()
__requires__ = 'Conpot==0.5.1'
__import__('pkg_resources').run_script('Conpot==0.5.1', 'conpot')

View file

@ -10,7 +10,7 @@ group = conpot
[json]
enabled = True
filename = /var/log/conpot/conpot.json
filename = %(CONPOT_JSON_LOG)s
[sqlite]
enabled = False

View file

@ -1,26 +1,25 @@
gevent>=1.0
pysnmp
pysmi
lxml
bottle
jinja2
beautifulsoup4
requests
sphinx==1.5.5
libtaxii>=1.1.0
MySQL-python
xlrd
crc16
natsort
scapy
enum34
hpfeeds
modbus-tk
stix-validator
stix
cybox
bacpypes==0.16.1
pyghmi
mixbox
modbus-tk
cpppo
beautifulsoup4==4.6.0
bottle==0.12.13
cpppo==3.9.7
crc16==0.1.1
cybox==2.1.0.13
enum34==1.1.6
gevent==1.3a1
hpfeeds==1.0
jinja2==2.10
libtaxii==1.1.110
lxml==4.1.1
mixbox==1.0.2
modbus-tk==0.5.8
MySQL-python==1.2.5
natsort==5.2.0
pyghmi==1.0.38
pysmi==0.2.2
pysnmp==4.4.4
requests==2.18.4
scapy==2.4.0rc4
sphinx==1.5.5
stix==1.2.0.2
stix-validator==2.5.0
xlrd==1.1.0

195
docker/conpot/dist/snmp_server.py vendored Normal file
View file

@ -0,0 +1,195 @@
# Copyright (C) 2013 Lukas Rist <glaslos@gmail.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import logging
import tempfile
import shutil
import os
from lxml import etree
from conpot.protocols.snmp.command_responder import CommandResponder
from conpot.protocols.snmp.build_pysnmp_mib_wrapper import find_mibs, compile_mib
import conpot.core as conpot_core
logger = logging.getLogger()
class SNMPServer(object):
def __init__(self, template, template_directory, args):
"""
:param host: hostname or ip address on which to server the snmp service (string).
:param port: listen port (integer).
:param template: path to the protocol specific xml configuration file (string).
"""
self.dom = etree.parse(template)
self.cmd_responder = None
if args.mibpaths:
self.compiled_mibs = args.mibpaths
else:
self.compiled_mibs = [os.path.join(template_directory, 'snmp', 'mibs_compiled')]
if args.raw_mib:
self.raw_mibs = args.raw_mib
else:
self.raw_mibs = [os.path.join(template_directory, 'snmp', 'mibs_raw')]
def xml_general_config(self, dom):
snmp_config = dom.xpath('//snmp/config/*')
if snmp_config:
for entity in snmp_config:
# TARPIT: individual response delays
if entity.attrib['name'].lower() == 'tarpit':
if entity.attrib['command'].lower() == 'get':
self.cmd_responder.resp_app_get.tarpit = self.config_sanitize_tarpit(entity.text)
elif entity.attrib['command'].lower() == 'set':
self.cmd_responder.resp_app_set.tarpit = self.config_sanitize_tarpit(entity.text)
elif entity.attrib['command'].lower() == 'next':
self.cmd_responder.resp_app_next.tarpit = self.config_sanitize_tarpit(entity.text)
elif entity.attrib['command'].lower() == 'bulk':
self.cmd_responder.resp_app_bulk.tarpit = self.config_sanitize_tarpit(entity.text)
# EVASION: response thresholds
if entity.attrib['name'].lower() == 'evasion':
if entity.attrib['command'].lower() == 'get':
self.cmd_responder.resp_app_get.threshold = self.config_sanitize_threshold(entity.text)
elif entity.attrib['command'].lower() == 'set':
self.cmd_responder.resp_app_set.threshold = self.config_sanitize_threshold(entity.text)
elif entity.attrib['command'].lower() == 'next':
self.cmd_responder.resp_app_next.threshold = self.config_sanitize_threshold(entity.text)
elif entity.attrib['command'].lower() == 'bulk':
self.cmd_responder.resp_app_bulk.threshold = self.config_sanitize_threshold(entity.text)
def xml_mib_config(self, dom, mibpaths, rawmibs_dirs):
try:
mibs = dom.xpath('//snmp/mibs/*')
tmp_mib_dir = tempfile.mkdtemp(dir=os.environ['CONPOT_TMP'])
mibpaths.append(tmp_mib_dir)
available_mibs = find_mibs(rawmibs_dirs)
databus = conpot_core.get_databus()
# parse mibs and oid tables
for mib in mibs:
mib_name = mib.attrib['name']
# compile the mib file if it is found and not already loaded.
if mib_name in available_mibs and not self.cmd_responder.has_mib(mib_name):
compile_mib(mib_name, tmp_mib_dir)
for symbol in mib:
symbol_name = symbol.attrib['name']
# retrieve instance from template
if 'instance' in symbol.attrib:
# convert instance to (int-)tuple
symbol_instance = symbol.attrib['instance'].split('.')
symbol_instance = tuple(map(int, symbol_instance))
else:
# use default instance (0)
symbol_instance = (0,)
# retrieve value from databus
value = databus.get_value(symbol.xpath('./value/text()')[0])
profile_map_name = symbol.xpath('./value/text()')[0]
# register this MIB instance to the command responder
self.cmd_responder.register(mib_name,
symbol_name,
symbol_instance,
value,
profile_map_name)
finally:
# cleanup compiled mib files
shutil.rmtree(tmp_mib_dir)
def config_sanitize_tarpit(self, value):
# checks tarpit value for being either a single int or float,
# or a series of two concatenated integers and/or floats separated by semicolon and returns
# either the (sanitized) value or zero.
if value is not None:
x, _, y = value.partition(';')
try:
_ = float(x)
except ValueError:
logger.error("SNMP invalid tarpit value: '%s'. Assuming no latency.", value)
# first value is invalid, ignore the whole setting.
return '0;0'
try:
_ = float(y)
# both values are fine.
return value
except ValueError:
# second value is invalid, use the first one.
return x
else:
return '0;0'
def config_sanitize_threshold(self, value):
# checks DoS thresholds for being either a single int or a series of two concatenated integers
# separated by semicolon and returns either the (sanitized) value or zero.
if value is not None:
x, _, y = value.partition(';')
try:
_ = int(x)
except ValueError:
logger.error("SNMP invalid evasion threshold: '%s'. Assuming no DoS evasion.", value)
# first value is invalid, ignore the whole setting.
return '0;0'
try:
_ = int(y)
# both values are fine.
return value
except ValueError:
# second value is invalid, use the first and ignore the second.
return str(x) + ';0'
else:
return '0;0'
def start(self, host, port):
self.cmd_responder = CommandResponder(host, port, self.compiled_mibs)
self.xml_general_config(self.dom)
self.xml_mib_config(self.dom, self.compiled_mibs, self.raw_mibs)
logger.info('SNMP server started on: %s', (host, self.get_port()))
self.cmd_responder.serve_forever()
def stop(self):
if self.cmd_responder:
self.cmd_responder.stop()
def get_port(self):
if self.cmd_responder:
return self.cmd_responder.server_port
else:
return None

View file

@ -0,0 +1,675 @@
<!-- Copyright (C) 2017 Patrick Reichenberger (University of Passau) <patrick.reichenberger@t-online.de>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-->
<core>
<template>
<!-- General information about the template -->
<entity name="unit">S7-300</entity>
<entity name="vendor">Siemens</entity>
<entity name="description">Creates a simple device for IEC 60870-5-104</entity>
<entity name="protocols">IEC104, SNMP</entity>
<entity name="creator">Patrick Reichenberger</entity>
</template>
<databus>
<!-- Core value that can be retrieved from the databus by key -->
<key_value_mappings>
<!-- SNMPv2-MIB -->
<key name="SystemDescription">
<value type="value">"Siemens, SIMATIC, S7-300"</value>
</key>
<key name="sysObjectID">
<value type="value">"0.0"</value>
</key>
<key name="Uptime">
<value type="function">conpot.emulators.misc.uptime.Uptime</value>
</key>
<key name="sysContact">
<value type="value">"Corporate IT"</value>
</key>
<key name="sysName">
<value type="value">"DE-BER01"</value>
</key>
<key name="sysLocation">
<value type="value">"BER01, T2E"</value>
</key>
<key name="sysServices">
<value type="value">"72"</value>
</key>
<!-- IF-MIB -->
<key name="ifNumber">
<value type="value">1</value>
</key>
<key name="ifIndex">
<value type="value">1</value>
</key>
<key name="ifDescr">
<value type="value">"Siemens, SIMATIC NET, CP 343-1 PN, 6GK7 343-1EX21-0XE0, HW: Version 2, FW: Version V1.2.3, Ethernet Port 1, Rack 0, 100Mbit"</value>
</key>
<key name="ifType">
<value type="value">6</value>
</key>
<key name="ifMtu">
<value type="value">1000</value>
</key>
<key name="ifSpeed">
<value type="value">100000000</value>
</key>
<key name="ifPhysAddress">
<value type="value">"\x00\x0e\x8c\x29\xc5\x1a"</value>
</key>
<key name="ifAdminStatus">
<value type="value">1</value>
</key>
<key name="ifOperStatus">
<value type="value">1</value>
</key>
<key name="ifLastChange">
<value type="function">conpot.emulators.misc.uptime.Uptime</value>
</key>
<key name="FacilityName">
<value type="value">"Compagnie Generale des Eaux"</value>
</key>
<key name="0">
<value type="value">0</value>
</key>
<key name="1">
<value type="value">1</value>
</key>
<key name="ifInOctets">
<value type="value">1618895</value>
</key>
<key name="ifInUcastPkts">
<value type="value">7018</value>
</key>
<key name="ifInNUcastPkts">
<value type="value">291</value>
</key>
<key name="ifOutOctets">
<value type="value">455107</value>
</key>
<key name="ifOutUcastPkts">
<value type="value">872264</value>
</key>
<key name="ifOutUNcastPkts">
<value type="value">143</value>
</key>
<!-- IP-MIB -->
<key name="ipForwarding">
<value type="value">2</value>
</key>
<key name="ipDefaultTTL">
<value type="value">60</value>
</key>
<key name="ipInReceives">
<value type="value">31271</value>
</key>
<key name="ipInHdrErrors">
<value type="value">0</value>
</key>
<key name="ipInAddrErrors">
<value type="value">0</value>
</key>
<key name="ipForwDatagrams">
<value type="value">0</value>
</key>
<key name="ipInUnknownProtos">
<value type="value">0</value>
</key>
<key name="ipInDiscards">
<value type="value">0</value>
</key>
<key name="ipInDelivers">
<value type="value">31282</value>
</key>
<key name="ipOutRequests">
<value type="value">69023</value>
</key>
<key name="ipOutDiscards">
<value type="value">0</value>
</key>
<key name="ipOutNoRoutes">
<value type="value">0</value>
</key>
<key name="ipReasmTimeout">
<value type="value">60</value>
</key>
<key name="ipReasmReqds">
<value type="value">7</value>
</key>
<key name="ipReasmOKs">
<value type="value">3</value>
</key>
<key name="ipReasmFails">
<value type="value">0</value>
</key>
<key name="ipFragOKs">
<value type="value">0</value>
</key>
<key name="ipFragFails">
<value type="value">0</value>
</key>
<key name="ipFragCreates">
<value type="value">0</value>
</key>
<key name="ipAdEntAddr">
<value type="value">"217.172.190.137"</value>
</key>
<key name="ipAdEntIfIndex">
<value type="value">1</value>
</key>
<key name="ipAdEntNetMask">
<value type="value">"255.255.255.255"</value>
</key>
<key name="ipAdEntBcastAddr">
<value type="value">1</value>
</key>
<key name="ipAdEntReasmMaxSize">
<value type="value">65528</value>
</key>
<key name="ipRoutingDiscards">
<value type="value">0</value>
</key>
<key name="icmpInMsgs">
<value type="value">4</value>
</key>
<key name="icmpInErrors">
<value type="value">0</value>
</key>
<key name="icmpInDestUnreachs">
<value type="value">1</value>
</key>
<key name="icmpInTimeExcds">
<value type="value">0</value>
</key>
<key name="icmpInParmProbs">
<value type="value">0</value>
</key>
<key name="icmpInSrcQuenchs">
<value type="value">0</value>
</key>
<key name="icmpInRedirects">
<value type="value">0</value>
</key>
<key name="icmpInEchos">
<value type="value">0</value>
</key>
<key name="icmpInEchoReps">
<value type="value">0</value>
</key>
<key name="icmpInTimestamps">
<value type="value">0</value>
</key>
<key name="icmpInTimestampReps">
<value type="value">0</value>
</key>
<key name="icmpInAddrMasks">
<value type="value">0</value>
</key>
<key name="icmpInAddrMaskReps">
<value type="value">0</value>
</key>
<key name="icmpOutMsgs">
<value type="value">0</value>
</key>
<key name="icmpOutErrors">
<value type="value">0</value>
</key>
<key name="icmpOutDestUnreachs">
<value type="value">144</value>
</key>
<key name="icmpOutTimeExcds">
<value type="value">0</value>
</key>
<key name="icmpOutParmProbs">
<value type="value">0</value>
</key>
<key name="icmpOutSrcQuenchs">
<value type="value">0</value>
</key>
<key name="icmpOutRedirects">
<value type="value">0</value>
</key>
<key name="icmpOutEchos">
<value type="value">0</value>
</key>
<key name="icmpOutEchoReps">
<value type="value">0</value>
</key>
<key name="icmpOutTimestamps">
<value type="value">0</value>
</key>
<key name="icmpOutTimestampReps">
<value type="value">0</value>
</key>
<key name="icmpOutAddrMasks">
<value type="value">0</value>
</key>
<key name="icmpOutAddrMaskReps">
<value type="value">0</value>
</key>
<!-- TCP-MIB -->
<key name="tcpRtoAlgorithm">
<value type="value">2</value>
</key>
<key name="tcpRtoMin">
<value type="value">0</value>
</key>
<key name="tcpRtoMax">
<value type="value">100</value>
</key>
<key name="tcpMaxConn">
<value type="value">-1</value>
</key>
<key name="tcpActiveOpens">
<value type="value">0</value>
</key>
<key name="tcpPassiveOpens">
<value type="value">101</value>
</key>
<key name="tcpAttemptFails">
<value type="value">42</value>
</key>
<key name="tcpEstabResets">
<value type="value">45</value>
</key>
<key name="tcpCurrEstab">
<value type="value">0</value>
</key>
<key name="tcpInSegs">
<value type="value">30321</value>
</key>
<key name="tcpOutSegs">
<value type="value">67821</value>
</key>
<key name="tcpRetransSegs">
<value type="value">2511</value>
</key>
<key name="tcpConnState">
<value type="value">2</value>
</key>
<key name="tcpConnLocalAddress">
<value type="value">"217.172.190.137"</value>
</key>
<key name="tcpConnLocalPort">
<value type="value">2404</value>
</key>
<key name="tcpConnRemAddress">
<value type="value">"0.0.0.0"</value>
</key>
<key name="tcpConnRemPort">
<value type="value">0</value>
</key>
<key name="tcpInErrs">
<value type="value">1</value>
</key>
<key name="tcpOutRsts">
<value type="value">728</value>
</key>
<!-- UDP-MIB -->
<key name="udpInDatagrams">
<value type="value">1441</value>
</key>
<key name="udpNoPorts">
<value type="value">1280</value>
</key>
<key name="udpInErrors">
<value type="value">23</value>
</key>
<key name="udpOutDatagrams">
<value type="value">47</value>
</key>
<key name="udpLocalAddress">
<value type="value">"217.172.190.137"</value>
</key>
<key name="udpLocalPort">
<value type="value">161</value>
</key>
<key name="SystemName">
<value type="value">"CP 343-1 IT"</value>
</key>
<!-- IEC104 Protocol parameter -->
<!-- Timeout of connection establishment -->
<key name="T_0">
<value type="value">30</value>
</key>
<!-- Timeout of send or test APDUs (Wartezeit auf Quittung) -->
<key name="T_1">
<value type="value">15</value>
</key>
<!-- Timeout for acknowledges in case of no data messages T_2 < T_1 (Quittieren nach x sek) -->
<key name="T_2">
<value type="value">10</value>
</key>
<!-- Timeout for sending test frames in case of a long idle state -->
<key name="T_3">
<value type="value">20</value>
</key>
<!-- Maximum difference receive sequence number to send state variable (Max. Anzahl unquittierter Telegramme) -->
<!-- not implemented yet -->
<key name="k">
<value type="value">12</value>
</key>
<!-- Latest acknowledge after receiving w I-format APDUs (Quittieren nach w Telegrammen) -->
<key name="w">
<value type="value">8</value>
</key>
<!-- Maximum frame size (in bytes) -->
<key name="MaxFrameSize">
<value type="value">254</value>
</key>
<!-- Devices -->
<!-- 13- -->
<key name="13_20">
<value type="value">1</value>
</key>
<key name="13_21">
<value type="value">0</value>
</key>
<key name="13_22">
<value type="value">0</value>
</key>
<key name="13_24">
<value type="value">1</value>
</key>
<key name="13_25">
<value type="value">1</value>
</key>
<key name="13_32">
<value type="value">1</value>
</key>
<key name="13_33">
<value type="value">1</value>
</key>
<key name="13_34">
<value type="value">1</value>
</key>
<key name="13_35">
<value type="value">1</value>
</key>
<key name="13_36">
<value type="value">1</value>
</key>
<key name="13_37">
<value type="value">1</value>
</key>
<key name="13_38">
<value type="value">1</value>
</key>
<key name="13_39">
<value type="value">1</value>
</key>
<key name="13_40">
<value type="value">0</value>
</key>
<key name="13_41">
<value type="value">1</value>
</key>
<key name="13_42">
<value type="value">0</value>
</key>
<!-- 22- -->
<key name="22_19">
<value type="value">1</value>
</key>
<key name="22_20">
<value type="value">1</value>
</key>
<key name="22_21">
<value type="value">0</value>
</key>
<key name="22_22">
<value type="value">0</value>
</key>
<key name="22_24">
<value type="value">1</value>
</key>
<key name="22_25">
<value type="value">1</value>
</key>
<key name="22_42">
<value type="value">1</value>
</key>
<key name="22_43">
<value type="value">1</value>
</key>
<key name="22_54">
<value type="value">1</value>
</key>
<!-- 33- -->
<key name="33_2">
<value type="value">1</value>
</key>
<key name="33_3">
<value type="value">2</value>
</key>
<key name="33_4">
<value type="value">1</value>
</key>
<key name="33_5">
<value type="value">2</value>
</key>
<key name="33_6">
<value type="value">2</value>
</key>
<key name="33_7">
<value type="value">1</value>
</key>
<key name="33_8">
<value type="value">1</value>
</key>
<key name="33_9">
<value type="value">1</value>
</key>
<key name="33_10">
<value type="value">1</value>
</key>
<key name="33_11">
<value type="value">1</value>
</key>
<!-- 60- -->
<key name="60_6">
<value type="value">2</value>
</key>
<key name="60_7">
<value type="value">1</value>
</key>
<key name="60_8">
<value type="value">1</value>
</key>
<key name="60_9">
<value type="value">1</value>
</key>
<key name="60_20">
<value type="value">1</value>
</key>
<key name="60_21">
<value type="value">1</value>
</key>
<key name="60_32">
<value type="value">1</value>
</key>
<key name="60_34">
<value type="value">1</value>
</key>
<key name="60_35">
<value type="value">1</value>
</key>
<key name="60_36">
<value type="value">1</value>
</key>
<!-- 100- -->
<key name="100_12">
<value type="value">103</value>
</key>
<key name="100_13">
<value type="value">31</value>
</key>
<key name="100_51">
<value type="value">-49</value>
</key>
<key name="100_108">
<value type="value">28871</value>
</key>
<key name="100_109">
<value type="value">13781</value>
</key>
<key name="100_178">
<value type="value">119</value>
</key>
<key name="100_179">
<value type="value">219</value>
</key>
<key name="100_190">
<value type="value">1009</value>
</key>
<key name="100_191">
<value type="value">-2</value>
</key>
<key name="100_192">
<value type="value">701</value>
</key>
<key name="100_193">
<value type="value">441</value>
</key>
<!-- 101- -->
<key name="101_63">
<value type="value">103</value>
</key>
<key name="101_205">
<value type="value">31</value>
</key>
<key name="101_100">
<value type="value">5</value>
</key>
<key name="101_101">
<value type="value">49</value>
</key>
<key name="101_102">
<value type="value">119</value>
</key>
<key name="101_105">
<value type="value">500</value>
</key>
<key name="101_106">
<value type="value">1</value>
</key>
<!-- 107- -->
<key name="107_3">
<value type="value">16.2</value>
</key>
<key name="107_77">
<value type="value">15.9</value>
</key>
<key name="107_78">
<value type="value">512.1</value>
</key>
<key name="107_79">
<value type="value">433.4</value>
</key>
<key name="107_90">
<value type="value">344.4</value>
</key>
<key name="107_130">
<value type="value">-0.44013</value>
</key>
<key name="107_131">
<value type="value">43.0</value>
</key>
<key name="107_132">
<value type="value">41.2</value>
</key>
<key name="107_141">
<value type="value">12.1</value>
</key>
<key name="107_200">
<value type="value">91</value>
</key>
<key name="107_201">
<value type="value">98.8</value>
</key>
<key name="107_202">
<value type="value">110</value>
</key>
<key name="107_203">
<value type="value">85.1</value>
</key>
<key name="107_204">
<value type="value">85.2</value>
</key>
<key name="107_205">
<value type="value">410</value>
</key>
<key name="107_206">
<value type="value">592</value>
</key>
<key name="107_207">
<value type="value">1.5</value>
</key>
<key name="107_208">
<value type="value">44.7</value>
</key>
<key name="107_209">
<value type="value">11.9</value>
</key>
<key name="107_210">
<value type="value">221.45</value>
</key>
<key name="107_211">
<value type="value">13.4</value>
</key>
<key name="107_212">
<value type="value">0.000402</value>
</key>
<!-- 109- -->
<key name="109_3">
<value type="value">16.2</value>
</key>
<key name="109_7">
<value type="value">15.9</value>
</key>
<key name="109_8">
<value type="value">880</value>
</key>
<key name="109_10">
<value type="value">344.4</value>
</key>
<key name="109_40">
<value type="value">41.2</value>
</key>
<key name="109_41">
<value type="value">12.1</value>
</key>
<key name="empty">
<value type="value">""</value>
</key>
</key_value_mappings>
</databus>
</core>

View file

@ -0,0 +1,78 @@
<core>
<template>
<!-- General information about the template -->
<entity name="unit">S7-200</entity>
<entity name="vendor">Siemens</entity>
<entity name="description">Rough simulation of a basic Siemens S7-200 CPU with 2 slaves</entity>
<entity name="protocols">HTTP, MODBUS, s7comm, SNMP</entity>
<entity name="creator">the conpot team</entity>
</template>
<databus>
<!-- Core value that can be retrieved from the databus by key -->
<key_value_mappings>
<key name="FacilityName">
<value type="value">"DoE Water Service"</value>
</key>
<key name="SystemName">
<value type="value">"Central Pump"</value>
</key>
<key name="SystemDescription">
<value type="value">"Pump Control Unit"</value>
</key>
<key name="Uptime">
<value type="function">conpot.emulators.misc.uptime.Uptime</value>
</key>
<key name="sysObjectID">
<value type="value">"0.0"</value>
</key>
<key name="sysContact">
<value type="value">"DoE"</value>
</key>
<key name="sysName">
<value type="value">"Pump Control Unit"</value>
</key>
<key name="sysLocation">
<value type="value">"DoE"</value>
</key>
<key name="sysServices">
<value type="value">"72"</value>
</key>
<key name="memoryModbusSlave0BlockA">
<value type="value">[random.randint(0,1) for b in range(0,128)]</value>
</key>
<key name="memoryModbusSlave0BlockB">
<value type="value">[random.randint(0,1) for b in range(0,32)]</value>
</key>
<key name="memoryModbusSlave255BlockA">
<value type="value">[random.randint(0,1) for b in range(0,128)]</value>
</key>
<key name="memoryModbusSlave255BlockB">
<value type="value">[random.randint(0,1) for b in range(0,32)]</value>
</key>
<key name="memoryModbusSlave1BlockA">
<value type="value">[random.randint(0,1) for b in range(0,128)]</value>
</key>
<key name="memoryModbusSlave1BlockB">
<value type="value">[random.randint(0,1) for b in range(0,32)]</value>
</key>
<key name="memoryModbusSlave2BlockC">
<value type="value">[random.randint(0,1) for b in range(0,8)]</value>
</key>
<key name="memoryModbusSlave2BlockD">
<value type="value">[0 for b in range(0,32)]</value>
</key>
<key name="Copyright">
<value type="value">"Original Siemens Equipment"</value>
</key>
<key name="s7_id">
<value type="value">"88111222"</value>
</key>
<key name="s7_module_type">
<value type="value">"IM151-8 PN/DP CPU"</value>
</key>
<key name="empty">
<value type="value">""</value>
</key>
</key_value_mappings>
</databus>
</core>

View file

@ -0,0 +1,93 @@
<core>
<template>
<!-- General information about the template -->
<entity name="unit">Guardian AST tank-monitoring system</entity>
<entity name="vendor">Guardian</entity>
<entity name="description">Guardian AST tank-monitoring system</entity>
<entity name="protocols">guardian_ast</entity>
<entity name="creator">the conpot team</entity>
</template>
<databus>
<!-- Core value that can be retrieved from the databus by key -->
<key_value_mappings>
<key name="product1">
<value type="value">"SUPER"</value>
</key>
<key name="product2">
<value type="value">"UNLEAD"</value>
</key>
<key name="product3">
<value type="value">"DIESEL"</value>
</key>
<key name="product4">
<value type="value">"ADBLUE"</value>
</key>
<key name="station_name">
<value type="value">"AVIA"</value>
</key>
<key name="vol1">
<value type="value">random.randint(1000, 9050)</value>
</key>
<key name="vol2">
<value type="value">random.randint(1000, 9050)</value>
</key>
<key name="vol3">
<value type="value">random.randint(1000, 9050)</value>
</key>
<key name="vol4">
<value type="value">random.randint(1000, 9050)</value>
</key>
<key name="ullage1">
<value type="value">random.randint(3000, 9999)</value>
</key>
<key name="ullage2">
<value type="value">random.randint(3000, 9999)</value>
</key>
<key name="ullage3">
<value type="value">random.randint(3000, 9999)</value>
</key>
<key name="ullage4">
<value type="value">random.randint(3000, 9999)</value>
</key>
<key name="height1">
<value type="value">round(random.uniform(25.00, 75.99), 2)</value>
</key>
<key name="height2">
<value type="value">round(random.uniform(25.00, 75.99), 2)</value>
</key>
<key name="height3">
<value type="value">round(random.uniform(25.00, 75.99), 2)</value>
</key>
<key name="height4">
<value type="value">round(random.uniform(25.00, 75.99), 2)</value>
</key>
<key name="h2o1">
<value type="value">round(random.uniform(0.0, 9.99), 2)</value>
</key>
<key name="h2o2">
<value type="value">round(random.uniform(0.0, 9.99), 2)</value>
</key>
<key name="h2o3">
<value type="value">round(random.uniform(0.0, 9.99), 2)</value>
</key>
<key name="h2o4">
<value type="value">round(random.uniform(0.0, 9.99), 2)</value>
</key>
<key name="temp1">
<value type="value">round(random.uniform(50.0, 59.99), 2)</value>
</key>
<key name="temp2">
<value type="value">round(random.uniform(50.0, 59.99), 2)</value>
</key>
<key name="temp3">
<value type="value">round(random.uniform(50.0, 59.99), 2)</value>
</key>
<key name="temp4">
<value type="value">round(random.uniform(50.0, 59.99), 2)</value>
</key>
<key name="empty">
<value type="value">""</value>
</key>
</key_value_mappings>
</databus>
</core>

View file

@ -0,0 +1,18 @@
<core>
<template>
<!-- General information about the template -->
<entity name="unit">371</entity>
<entity name="vendor">IPMI</entity>
<entity name="description">Creates a simple IPMI device</entity>
<entity name="protocols">IPMI</entity>
<entity name="creator">Lukas Rist</entity>
</template>
<databus>
<!-- Core value that can be retrieved from the databus by key -->
<key_value_mappings>
<key name="SystemName">
<value type="value">"DoE"</value>
</key>
</key_value_mappings>
</databus>
</core>

View file

@ -413,43 +413,43 @@
<value type="value">''</value>
</key>
<key name="nameserver_1">
<value type="value">'192.168.254.111'</value>
<value type="value">'0.0.0.0'</value>
</key>
<key name="nameserver_2">
<value type="value">'192.168.254.112'</value>
<value type="value">'0.0.0.0'</value>
</key>
<key name="nameserver_3">
<value type="value">'0.0.0.0'</value>
</key>
<key name="mac_address">
<value type="value">'00:13:EA:00:72:FA'</value>
<value type="value">'00:13:EA:00:00:00'</value>
</key>
<key name="use_dhcp">
<value type="value">'YES'</value>
</key>
<key name="ip_addr">
<value type="value">'192.168.201.101'</value>
<value type="value">'192.168.1.210'</value>
</key>
<key name="ip_gateway">
<value type="value">'192.168.201.254'</value>
<value type="value">'192.168.1.1'</value>
</key>
<key name="ip_subnet">
<value type="value">'255.255.255.0'</value>
</key>
<key name="ip_addr_dhcp">
<value type="value">'192.168.200.1'</value>
<value type="value">'192.168.0.1'</value>
</key>
<key name="ip_gateway_dhcp">
<value type="value">'192.168.200.254'</value>
<value type="value">'192.168.0.254'</value>
</key>
<key name="ip_subnet_dhcp">
<value type="value">'255.255.255.0'</value>
</key>
<key name="kap_a_server_hostname">
<value type="value">'de_fra_lxg00.local.dom'</value>
<value type="value">'kapserver.evilpowerprovider.org'</value>
</key>
<key name="kap_a_server_ip">
<value type="value">'192.168.254.201'</value>
<value type="value">'202.202.202.1'</value>
</key>
<key name="kap_a_server_port">
<value type="value">'50'</value>
@ -513,4 +513,4 @@
</key>
</key_value_mappings>
</databus>
</core>
</core>

View file

@ -0,0 +1,15 @@
<core>
<template>
<!-- General information about the template -->
<entity name="unit">Proxy</entity>
<entity name="vendor">None</entity>
<entity name="description">Sample template that demonstrates the proxy feature.</entity>
<entity name="protocols">Proxy</entity>
<entity name="creator">the conpot team</entity>
</template>
<databus>
<!-- Core value that can be retrieved from the databus by key -->
<key_value_mappings>
</key_value_mappings>
</databus>
</core>

View file

@ -1,19 +1,139 @@
version: '2.1'
# CONPOT TEMPLATE=[default, IEC104, guardian_ast, ipmi, kamstrup_382, proxy]
version: '2.2'
networks:
conpot_local:
conpot_local_default:
conpot_local_IEC104:
conpot_local_guardian_ast:
conpot_local_ipmi:
conpot_local_kamstrup_382:
services:
# Conpot service
conpot:
container_name: conpot
# Conpot default service
conpot_default:
build: .
container_name: conpot_default
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_default.json
- CONPOT_LOG=/var/log/conpot/conpot_default.log
- CONPOT_TEMPLATE=default
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
# - /var/run/conpot/
networks:
- conpot_local
- conpot_local_default
ports:
- "80:80"
- "102:102"
- "161:161"
- "502:502"
# - "623:623"
- "44818:44818"
- "47808:47808"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
# Conpot IEC104 service
conpot_IEC104:
build: .
container_name: conpot_IEC104
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_IEC104.json
- CONPOT_LOG=/var/log/conpot/conpot_IEC104.log
- CONPOT_TEMPLATE=IEC104
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
- /var/run/conpot/
networks:
- conpot_local_IEC104
ports:
# - "161:161"
- "2404:2404"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
# Conpot guardian_ast service
conpot_guardian_ast:
build: .
container_name: conpot_guardian_ast
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_guardian_ast.json
- CONPOT_LOG=/var/log/conpot/conpot_guardian_ast.log
- CONPOT_TEMPLATE=guardian_ast
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
- /var/run/conpot/
networks:
- conpot_local_guardian_ast
ports:
- "10001:10001"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
# Conpot ipmi
conpot_ipmi:
build: .
container_name: conpot_ipmi
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_ipmi.json
- CONPOT_LOG=/var/log/conpot/conpot_ipmi.log
- CONPOT_TEMPLATE=ipmi
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
- /var/run/conpot/
networks:
- conpot_local_ipmi
ports:
- "623:623"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
# Conpot kamstrup_382
conpot_kamstrup_382:
build: .
container_name: conpot_kamstrup_382
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_kamstrup_382.json
- CONPOT_LOG=/var/log/conpot/conpot_kamstrup_382.log
- CONPOT_TEMPLATE=kamstrup_382
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
- /var/run/conpot/
networks:
- conpot_local_kamstrup_382
ports:
- "1025:1025"
- "50100:50100"
image: "dtagdevsec/conpot:1710"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot

View file

@ -1,12 +1,25 @@
FROM alpine
MAINTAINER MO
# Include dist
ADD dist/ /root/dist/
# Get and install dependencies & packages
RUN apk -U upgrade && \
apk add git procps py-pip mpfr-dev openssl-dev mpc1-dev libffi-dev build-base python python-dev py-mysqldb py-requests py-setuptools gmp-dev && \
apk add build-base \
git \
gmp-dev \
libcap \
libffi-dev \
mpc1-dev \
mpfr-dev \
openssl-dev \
procps \
python \
python-dev \
py-mysqldb \
py-pip \
py-requests \
py-setuptools && \
# Setup user
addgroup -g 2000 cowrie && \

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/cowrie:1710.svg)](https://microbadger.com/images/dtagdevsec/cowrie:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/cowrie:1710.svg)](https://microbadger.com/images/dtagdevsec/cowrie:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/cowrie:1804.svg)](https://microbadger.com/images/dtagdevsec/cowrie:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/cowrie:1804.svg)](https://microbadger.com/images/dtagdevsec/cowrie:1804 "Get your own image badge on microbadger.com")
# cowrie

View file

@ -1,6 +1,6 @@
# T-Pot (Standard)
# For docker-compose ...
version: '2.1'
version: '2.2'
networks:
cowrie_local:
@ -9,6 +9,7 @@ services:
# Cowrie service
cowrie:
build: .
container_name: cowrie
restart: always
networks:
@ -17,8 +18,8 @@ services:
- NET_BIND_SERVICE
ports:
- "22:2222"
- "23:2223"
image: "dtagdevsec/cowrie:1710"
- "23:2323"
image: "dtagdevsec/cowrie:1804"
volumes:
- /data/cowrie/downloads:/home/cowrie/cowrie/dl
- /data/cowrie/keys:/home/cowrie/cowrie/etc

View file

@ -1,5 +1,4 @@
FROM debian:stretch-slim
MAINTAINER MO
ENV DEBIAN_FRONTEND noninteractive
# Include dist
@ -36,7 +35,6 @@ RUN apt-get update -y && \
# Get and install dionaea
git clone https://github.com/dinotools/dionaea /root/dionaea/ && \
cd /root/dionaea && \
# git checkout 99e9cfc88cfa8f3715813b18ec7006bca2622d76 && \
autoreconf -vi && \
./configure \
--prefix=/opt/dionaea \

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/dionaea:1710.svg)](https://microbadger.com/images/dtagdevsec/dionaea:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/dionaea:1710.svg)](https://microbadger.com/images/dtagdevsec/dionaea:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/dionaea:1804.svg)](https://microbadger.com/images/dtagdevsec/dionaea:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/dionaea:1804.svg)](https://microbadger.com/images/dtagdevsec/dionaea:1804 "Get your own image badge on microbadger.com")
# dionaea

View file

@ -1,6 +1,6 @@
# T-Pot (Standard)
# For docker-compose ...
version: '2.1'
version: '2.2'
networks:
dionaea_local:
@ -9,6 +9,7 @@ services:
# Dionaea service
dionaea:
build: .
container_name: dionaea
stdin_open: true
restart: always
@ -33,7 +34,7 @@ services:
- "5060:5060/udp"
- "5061:5061"
- "27017:27017"
image: "dtagdevsec/dionaea:1710"
image: "dtagdevsec/dionaea:1804"
volumes:
- /data/dionaea/roots/ftp:/opt/dionaea/var/dionaea/roots/ftp
- /data/dionaea/roots/tftp:/opt/dionaea/var/dionaea/roots/tftp

View file

@ -1,14 +1,18 @@
FROM alpine
MAINTAINER MS/MO
# Include dist
ADD dist/ /root/dist/
# Install packages
RUN apk -U upgrade && \
apk add bash python3 git && \
apk add bash \
git \
python3 && \
pip3 install --upgrade pip && \
pip3 install bottle requests configparser datetime && \
pip3 install bottle \
configparser \
datetime \
requests && \
mkdir -p /opt && \
cd /opt/ && \
git clone https://github.com/schmalle/ElasticpotPY.git && \

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/elasticpot:1710.svg)](https://microbadger.com/images/dtagdevsec/elasticpot:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/elasticpot:1710.svg)](https://microbadger.com/images/dtagdevsec/elasticpot:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/elasticpot:1804.svg)](https://microbadger.com/images/dtagdevsec/elasticpot:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/elasticpot:1804.svg)](https://microbadger.com/images/dtagdevsec/elasticpot:1804 "Get your own image badge on microbadger.com")
# elasticpot

View file

@ -7,12 +7,13 @@ services:
# Elasticpot service
elasticpot:
build: .
container_name: elasticpot
restart: always
networks:
- elasticpot_local
ports:
- "9200:9200"
image: "dtagdevsec/elasticpot:1710"
image: "dtagdevsec/elasticpot:1804"
volumes:
- /data/elasticpot/log:/opt/ElasticpotPY/log

View file

@ -1,11 +1,11 @@
# Elasticsearch
[![](https://images.microbadger.com/badges/version/dtagdevsec/elasticsearch:1710.svg)](https://microbadger.com/images/dtagdevsec/elasticsearch:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/elasticsearch:1710.svg)](https://microbadger.com/images/dtagdevsec/elasticsearch:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/elasticsearch:1804.svg)](https://microbadger.com/images/dtagdevsec/elasticsearch:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/elasticsearch:1804.svg)](https://microbadger.com/images/dtagdevsec/elasticsearch:1804 "Get your own image badge on microbadger.com")
# Logstash
[![](https://images.microbadger.com/badges/version/dtagdevsec/logstash:1710.svg)](https://microbadger.com/images/dtagdevsec/logstash:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/logstash:1710.svg)](https://microbadger.com/images/dtagdevsec/logstash:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/logstash:1804.svg)](https://microbadger.com/images/dtagdevsec/logstash:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/logstash:1804.svg)](https://microbadger.com/images/dtagdevsec/logstash:1804 "Get your own image badge on microbadger.com")
# Kibana
[![](https://images.microbadger.com/badges/version/dtagdevsec/kibana:1710.svg)](https://microbadger.com/images/dtagdevsec/kibana:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/kibana:1710.svg)](https://microbadger.com/images/dtagdevsec/kibana:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/kibana:1804.svg)](https://microbadger.com/images/dtagdevsec/kibana:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/kibana:1804.svg)](https://microbadger.com/images/dtagdevsec/kibana:1804 "Get your own image badge on microbadger.com")
# elk stack

View file

@ -1,18 +1,21 @@
FROM alpine
MAINTAINER MO
# Include dist
ADD dist/ /root/dist/
# Setup env and apt
RUN apk -U upgrade && \
apk add bash curl openjdk8-jre procps wget && \
apk add bash \
curl \
openjdk8-jre \
procps \
wget && \
# Get and install packages
cd /root/dist/ && \
mkdir -p /usr/share/elasticsearch/ && \
wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.6.5.tar.gz && \
tar xvfz elasticsearch-5.6.5.tar.gz --strip-components=1 -C /usr/share/elasticsearch/ && \
wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.6.8.tar.gz && \
tar xvfz elasticsearch-5.6.8.tar.gz --strip-components=1 -C /usr/share/elasticsearch/ && \
# Add and move files
cd /root/dist/ && \

View file

@ -1,12 +1,13 @@
# Elasticsearch-head Dockerfile by MO
#
# VERSION 17.06
FROM alpine
MAINTAINER MO
# Setup env and apt
RUN apk -U upgrade && \
apk add bash curl nodejs nodejs-npm git procps && \
apk add bash \
curl \
git \
nodejs \
nodejs-npm \
procps && \
# Get and install packages
mkdir -p /usr/src/app/ && \

View file

@ -1,18 +1,21 @@
FROM alpine
MAINTAINER MO
# Include dist
ADD dist/ /root/dist/
# Setup env and apt
RUN apk -U upgrade && \
apk add bash curl nodejs procps wget && \
apk add bash \
curl \
nodejs \
procps \
wget && \
# Get and install packages
cd /root/dist/ && \
mkdir -p /usr/share/kibana/ && \
wget https://artifacts.elastic.co/downloads/kibana/kibana-5.6.5-linux-x86_64.tar.gz && \
tar xvfz kibana-5.6.5-linux-x86_64.tar.gz --strip-components=1 -C /usr/share/kibana/ && \
wget https://artifacts.elastic.co/downloads/kibana/kibana-5.6.8-linux-x86_64.tar.gz && \
tar xvfz kibana-5.6.8-linux-x86_64.tar.gz --strip-components=1 -C /usr/share/kibana/ && \
# Kibana's bundled node does not work in alpine
rm /usr/share/kibana/node/bin/node && \

View file

@ -1,20 +1,26 @@
FROM alpine
MAINTAINER MO
# Include dist
ADD dist/ /root/dist/
# Setup env and apt
RUN apk -U upgrade && \
apk add bash curl git libc6-compat libzmq openjdk8-jre procps wget && \
apk add bash \
curl \
git \
libc6-compat \
libzmq \
openjdk8-jre \
procps \
wget && \
# Get and install packages
git clone https://github.com/dtag-dev-sec/listbot /etc/listbot && \
cd /root/dist/ && \
mkdir -p /usr/share/logstash/ && \
wget https://artifacts.elastic.co/downloads/logstash/logstash-5.6.5.tar.gz && \
wget https://artifacts.elastic.co/downloads/logstash/logstash-5.6.8.tar.gz && \
wget http://geolite.maxmind.com/download/geoip/database/GeoLite2-ASN.tar.gz && \
tar xvfz logstash-5.6.5.tar.gz --strip-components=1 -C /usr/share/logstash/ && \
tar xvfz logstash-5.6.8.tar.gz --strip-components=1 -C /usr/share/logstash/ && \
/usr/share/logstash/bin/logstash-plugin install logstash-filter-translate && \
/usr/share/logstash/bin/logstash-plugin install logstash-output-syslog && \
tar xvfz GeoLite2-ASN.tar.gz --strip-components=1 -C /usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-geoip-4.3.1-java/vendor/ && \

View file

@ -17,7 +17,7 @@ input {
# Conpot
file {
path => ["/data/conpot/log/conpot.json"]
path => ["/data/conpot/log/*.json"]
codec => json
type => "ConPot"
}
@ -55,6 +55,12 @@ input {
type => "Glastopf"
}
# Heralding
file {
path => ["/data/heralding/log/auth.csv"]
type => "Heralding"
}
# Honeytrap
file {
path => ["/data/honeytrap/log/attackers.json"]
@ -201,6 +207,17 @@ filter {
}
}
# Heralding
if [type] == "Heralding" {
csv {
columns => ["timestamp","auth_id","session_id","src_ip","src_port","dest_ip","dest_port","proto","username","password"] separator => ","
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss.SSSSSS" ]
remove_field => ["timestamp"]
}
}
# Honeytrap
if [type] == "Honeytrap" {
date {

View file

@ -1,12 +1,23 @@
FROM alpine
MAINTAINER MO
# Include dist
ADD dist/ /root/dist/
# Install packages
RUN apk -U upgrade && \
apk add build-base git libssl1.0 openssl-dev python-dev py-cffi py-ipaddress py-lxml py-mysqldb py-pip py-pysqlite py-requests py-setuptools && \
apk add build-base \
git \
libssl1.0 \
openssl-dev \
python-dev \
py-cffi \
py-ipaddress \
py-lxml \
py-mysqldb \
py-pip \
py-pysqlite \
py-requests \
py-setuptools && \
pip install pyOpenSSL==16.2.0 && \
# Setup ewsposter
@ -24,7 +35,12 @@ RUN apk -U upgrade && \
mv /root/dist/ews.cfg /opt/ewsposter/ && \
# Clean up
apk del build-base git openssl-dev python-dev py-pip py-setuptools && \
apk del build-base \
git \
openssl-dev \
python-dev \
py-pip \
py-setuptools && \
rm -rf /root/* && \
rm -rf /var/cache/apk/*

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/ewsposter:1710.svg)](https://microbadger.com/images/dtagdevsec/ewsposter:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/ewsposter:1710.svg)](https://microbadger.com/images/dtagdevsec/ewsposter:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/ewsposter:1804.svg)](https://microbadger.com/images/dtagdevsec/ewsposter:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/ewsposter:1804.svg)](https://microbadger.com/images/dtagdevsec/ewsposter:1804 "Get your own image badge on microbadger.com")
# ewsposter

View file

@ -1,22 +1,52 @@
FROM alpine
MAINTAINER MO
FROM alpine
# Include dist
ADD dist/ /root/dist/
# Install packages
RUN apk -U upgrade && \
apk add autoconf bash bind-tools build-base cython git libffi libffi-dev make py-asn1 \
py-cffi py-chardet py-chardet py-cparser py-cryptography py-dateutil \
py-enum34 py-idna py-ipaddress py-jinja2 py-lxml py-mysqldb py-openssl \
py-pip py-requests py-setuptools python python-dev && \
RUN apk -U --no-cache add autoconf \
bash \
bind-tools \
build-base \
cython \
git \
libffi \
libffi-dev \
libcap \
make \
py-asn1 \
py-cffi \
py-chardet \
py-cparser \
py-cryptography \
py-dateutil \
py-enum34 \
py-idna \
py-ipaddress \
py-jinja2 \
py-lxml \
py-mysqldb \
py-openssl \
py-pip \
py-requests \
py-setuptools \
python \
python-dev && \
apk -U add --repository http://dl-3.alpinelinux.org/alpine/edge/testing/ \
py-beautifulsoup4 php7 php7-dev py-cssselect py-gevent py-greenlet py-mongo \
py-sqlalchemy py-webob && \
py-beautifulsoup4 \
php7 \
php7-dev \
py-cssselect \
py-gevent \
py-greenlet \
py-mongo \
py-sqlalchemy \
py-webob && \
# Install php sandbox from git
git clone https://github.com/glastopf/BFR.git /opt/BFR && \
git clone https://github.com/mushorg/BFR /opt/BFR && \
cd /opt/BFR && \
git checkout 508729202428a35bcc6bb27dd97b831f7e5009b5 && \
phpize7 && \
./configure \
--with-php-config=/usr/bin/php-config7 \
@ -30,9 +60,14 @@ RUN apk -U upgrade && \
# Install glastopf from git
git clone https://github.com/mushorg/glastopf.git /opt/glastopf && \
cd /opt/glastopf && \
git checkout c4932d9cb513d284142e2c0d66284221201d7477 && \
cp /root/dist/base_logger.py /opt/glastopf/glastopf/modules/reporting/auxiliary/ && \
cp /root/dist/log_s3.py /opt/glastopf/glastopf/modules/reporting/auxiliary/ && \
cp /root/dist/requirements.txt /opt/glastopf/ && \
python setup.py install && \
cd / && \
rm -rf /opt/glastopf /tmp/* /var/tmp/* && \
setcap cap_net_bind_service=+ep /usr/bin/glastopf-runner && \
# Setup user, groups and configs
addgroup -g 2000 glastopf && \
@ -41,7 +76,12 @@ RUN apk -U upgrade && \
mv /root/dist/glastopf.cfg /opt/glastopf/ && \
# Clean up
apk del autoconf build-base git libffi-dev php7-dev python-dev && \
apk del autoconf \
build-base \
git \
libffi-dev \
php7-dev \
python-dev && \
rm -rf /root/* && \
rm -rf /var/cache/apk/*

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/glastopf:1710.svg)](https://microbadger.com/images/dtagdevsec/glastopf:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/glastopf:1710.svg)](https://microbadger.com/images/dtagdevsec/glastopf:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/glastopf:1804.svg)](https://microbadger.com/images/dtagdevsec/glastopf:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/glastopf:1804.svg)](https://microbadger.com/images/dtagdevsec/glastopf:1804 "Get your own image badge on microbadger.com")
# glastopf

31
docker/glastopf/dist/base_logger.py vendored Normal file
View file

@ -0,0 +1,31 @@
# Copyright (C) 2015 Lukas Rist
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from ConfigParser import SafeConfigParser
import os
class BaseLogger(object):
def __init__(self, config='glastopf.cfg'):
if not isinstance(config, SafeConfigParser):
self.config = SafeConfigParser(os.environ)
self.config.read(config)
else:
self.config = config
def insert(self, event):
pass

View file

@ -104,3 +104,13 @@ sensorid = None
[profiler]
enabled = False
[s3storage]
enabled = False
endpoint = %(GLASTOPF_S3_ENDPOINT)s
aws_access_key_id = %(GLASTOPF_S3_ACCESS_KEY_ID)s
aws_secret_access_key = %(GLASTOPF_S3_SECRET_ACCESS_KEY)s
bucket = %(GLASTOPF_S3_BUCKET)s
region = %(GLASTOPF_S3_REGION)s
signature_version = %(GLASTOPF_S3_SIGNATURE_VERSION)s

72
docker/glastopf/dist/log_s3.py vendored Normal file
View file

@ -0,0 +1,72 @@
# Copyright (C) 2018 Andre Vorbach @vorband
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import gevent
import botocore.session, botocore.client
from botocore.exceptions import ClientError
from glastopf.modules.reporting.auxiliary.base_logger import BaseLogger
logger = logging.getLogger(__name__)
class S3Logger(BaseLogger):
def __init__(self, data_dir, work_dir, config="glastopf.cfg", reconnect=True):
config = os.path.join(work_dir, config)
BaseLogger.__init__(self, config)
self.files_dir = os.path.join(data_dir, 'files/')
self.enabled = False
self._initial_connection_happend = False
self.options = {'enabled': self.enabled}
if self.config.getboolean("s3storage", "enabled"):
self.endpoint = self.config.get("s3storage", "endpoint")
self.accesskey = self.config.get("s3storage", "aws_access_key_id")
self.secretkey = self.config.get("s3storage", "aws_secret_access_key")
self.version = self.config.get("s3storage", "signature_version")
self.region = self.config.get("s3storage", "region")
self.bucket = self.config.get("s3storage", "bucket")
self.enabled = True
self.options = {'enabled': self.enabled}
self.s3client = None
self.s3session = None
gevent.spawn(self._start_connection, self.endpoint, self.accesskey, self.secretkey, self.version, self.region, self.bucket)
def _start_connection(self, endpoint, accesskey, secretkey, version, region, bucket):
self.s3session = botocore.session.get_session()
self.s3session.set_credentials(accesskey, secretkey)
self.s3client = self.s3session.create_client(
's3',
endpoint_url=self.endpoint,
region_name=self.region,
config=botocore.config.Config(signature_version=self.version)
)
self._initial_connection_happend = True
def insert(self, attack_event):
if self._initial_connection_happend:
if attack_event.file_name is not None:
with file(os.path.join(self.files_dir, attack_event.file_name), 'r') as file_handler:
try:
self.s3client.put_object(Bucket=self.bucket, Body=file_handler, Key=attack_event.sensorid+"/"+attack_event.file_name)
logger.debug('Sending file ({0}) using s3 bucket "{1}" on {2}'.format(attack_event.file_name, self.bucket, self.endpoint))
except ClientError as e:
logger.warning("Received error: %s", e.response['Error']['Message'])
else:
logger.warning('Not storing attack file because initial s3 connect has not succeeded')

17
docker/glastopf/dist/requirements.txt vendored Normal file
View file

@ -0,0 +1,17 @@
gevent==1.2.2
webob==1.2.3
pyopenssl==17.2.0
chardet==3.0.4
lxml==4.1.1
sqlalchemy==1.1.14
jinja2==2.9.6
beautifulsoup==3.2.1
requests==2.18.4
cssselect==1.0.0
pymongo==3.2.2
MySQL-python==1.2.5
hpfeeds==1.0
pylibinjection==0.2.4
libtaxii==1.1.110
python-logstash==0.4.6
botocore==1.9.6

View file

@ -1,4 +1,4 @@
version: '2.1'
version: '2.2'
networks:
glastopf_local:
@ -7,13 +7,14 @@ services:
# Glastopf service
glastopf:
build: .
container_name: glastopf
restart: always
networks:
- glastopf_local
ports:
- "80:80"
image: "dtagdevsec/glastopf:1710"
image: "dtagdevsec/glastopf:1804"
volumes:
- /data/glastopf/db:/opt/glastopf/db
- /data/glastopf/log:/opt/glastopf/log

View file

@ -0,0 +1,41 @@
FROM alpine
# Include dist
ADD dist/ /root/dist/
# Install packages
RUN apk -U upgrade && \
apk add bash \
build-base \
git \
libcap \
libffi-dev \
libressl-dev \
postgresql-dev \
python3 \
python3-dev \
py-virtualenv && \
pip3 install --upgrade pip && \
# Setup heralding
mkdir -p /opt && \
cd /opt/ && \
git clone https://github.com/johnnykv/heralding && \
cd heralding && \
mv /root/dist/heralding.yml /opt/heralding/ && \
pip3 install -r requirements.txt && \
pip3 install heralding && \
# Setup user, groups and configs
addgroup -g 2000 heralding && \
adduser -S -H -s /bin/bash -u 2000 -D -g 2000 heralding && \
mkdir -p /var/log/heralding/ && \
# Clean up
apk del git && \
rm -rf /root/* && \
rm -rf /var/cache/apk/*
# Start elasticpot
WORKDIR /opt/heralding/
CMD ["heralding","-l","/var/log/heralding/heralding.log"]

View file

@ -0,0 +1,12 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/heralding:1804.svg)](https://microbadger.com/images/dtagdevsec/heralding:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/heralding:1804.svg)](https://microbadger.com/images/dtagdevsec/heralding:1804 "Get your own image badge on microbadger.com")
# heralding
[Heralding](https://github.com/johnnykv/heralding) is a simple honeypot that collects credentials, nothing more. Heralding is that honeypot! Currently the following protocols are supported: ftp, telnet, ssh, http, https, pop3, pop3s, imap, imaps, smtp and postgresql.
This dockerized version is part of the **[T-Pot community honeypot](http://dtag-dev-sec.github.io/)** of Deutsche Telekom AG.
The `Dockerfile` contains the blueprint for the dockerized heralding and will be used to setup the docker image.
The `docker-compose.yml` contains the necessary settings to test conpot using `docker-compose`. This will ensure to start the docker container with the appropriate permissions and port mappings.

152
docker/heralding/dist/heralding.yml vendored Normal file
View file

@ -0,0 +1,152 @@
# will request and log the public ip every hours from ipify
public_ip_as_destination_ip: true
# ip address to listen on
bind_host: 0.0.0.0
# logging of sessions and authentication attempts
activity_logging:
file:
enabled: true
session_log_file: "/var/log/heralding/session.csv"
authentication_log_file: "/var/log/heralding/auth.csv"
syslog:
enabled: false
hpfeeds:
enabled: false
session_channel: "heralding.session"
auth_channel: "heralding.auth"
host:
port: 20000
ident:
secret:
curiosum:
enabled: false
port: 23400
# protocols to enable
capabilities:
ftp:
enabled: true
port: 21
timeout: 30
protocol_specific_data:
max_attempts: 3
banner: "Microsoft FTP Server"
syst_type: "Windows-NT"
telnet:
enabled: true
port: 23
timeout: 30
protocol_specific_data:
max_attempts: 3
pop3:
enabled: true
port: 110
timeout: 30
protocol_specific_data:
max_attempts: 3
pop3s:
enabled: true
port: 995
timeout: 30
protocol_specific_data:
max_attempts: 3
# if a .pem file is not found in work dir, a new pem file will be created
# using these values
cert:
common_name: "*"
country: "US"
state: None
locality: None
organization: None
organizational_unit: None
# how many days should the certificate be valid for
valid_days: 365
serial_number: 0
postgresql:
enabled: true
port: 5432
timeout: 30
imap:
enabled: true
port: 143
timeout: 30
protocol_specific_data:
max_attempts: 3
banner: "* OK IMAP4rev1 Server Ready"
imaps:
enabled: true
port: 993
timeout: 30
protocol_specific_data:
max_attempts: 3
banner: "* OK IMAP4rev1 Server Ready"
# if a .pem file is not found in work dir, a new pem file will be created
# using these values
cert:
common_name: "*"
country: "US"
state: None
locality: None
organization: None
organizational_unit: None
# how many days should the certificate be valid for
valid_days: 365
serial_number: 0
ssh:
enabled: true
port: 22
timeout: 30
protocol_specific_data:
banner: "SSH-2.0-OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.8"
http:
enabled: true
port: 80
timeout: 30
protocol_specific_data:
banner: ""
https:
enabled: true
port: 443
timeout: 30
protocol_specific_data:
banner: ""
# if a .pem file is not found in work dir, a new pem file will be created
# using these values
cert:
common_name: "*"
country: "US"
state: None
locality: None
organization: None
organizational_unit: None
# how many days should the certificate be valid for
valid_days: 365
serial_number: 0
smtp:
enabled: true
port: 25
timeout: 30
protocol_specific_data:
banner: "Microsoft ESMTP MAIL service ready"
# If the fqdn option is commented out or empty, then fqdn of the host will be used
fqdn: ""
vnc:
enabled: true
port: 5900
timeout: 30

View file

@ -0,0 +1,30 @@
version: '2.2'
networks:
heralding_local:
services:
# Heralding service
heralding:
build: .
container_name: heralding
restart: always
networks:
- heralding_local
ports:
- "21:21"
- "22:22"
- "23:23"
- "25:25"
- "80:80"
- "110:110"
- "143:143"
- "443:443"
- "993:993"
- "995:995"
- "5432:5432"
- "5900:5900"
image: "dtagdevsec/heralding:1804"
volumes:
- /data/heralding/log:/var/log/heralding

View file

@ -1,6 +1,4 @@
FROM debian:stretch-slim
MAINTAINER MO
ENV DEBIAN_FRONTEND noninteractive
# Include dist
@ -11,8 +9,19 @@ RUN apt-get update -y && \
apt-get dist-upgrade -y && \
# Install packages
apt-get install -y autoconf build-essential git iptables libnetfilter-queue1 libnetfilter-queue-dev \
libjson-c-dev libtool libpq5 libpq-dev netbase procps wget && \
apt-get install -y autoconf \
build-essential \
git \
iptables \
libnetfilter-queue1 \
libnetfilter-queue-dev \
libjson-c-dev \
libtool \
libpq5 \
libpq-dev \
netbase \
procps \
wget && \
# Install honeytrap from source
cd /root/ && \
@ -36,7 +45,11 @@ RUN apt-get update -y && \
# Clean up
rm -rf /root/* && \
apt-get purge -y autoconf build-essential git libnetfilter-queue-dev libpq-dev && \
apt-get purge -y autoconf \
build-essential \
git \
libnetfilter-queue-dev \
libpq-dev && \
apt-get autoremove -y --purge && \
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/honeytrap:1710.svg)](https://microbadger.com/images/dtagdevsec/honeytrap:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/honeytrap:1710.svg)](https://microbadger.com/images/dtagdevsec/honeytrap:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/honeytrap:1804.svg)](https://microbadger.com/images/dtagdevsec/honeytrap:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/honeytrap:1804.svg)](https://microbadger.com/images/dtagdevsec/honeytrap:1804 "Get your own image badge on microbadger.com")
# honeytrap

View file

@ -1,4 +1,4 @@
version: '2.1'
version: '2.2'
networks:
honeytrap_local:
@ -7,12 +7,13 @@ services:
# Honeytrap service
honeytrap:
build: .
container_name: honeytrap
restart: always
network_mode: "host"
cap_add:
- NET_ADMIN
image: "dtagdevsec/honeytrap:1710"
image: "dtagdevsec/honeytrap:1804"
volumes:
- /data/honeytrap/attacks:/opt/honeytrap/var/attacks
- /data/honeytrap/downloads:/opt/honeytrap/var/downloads

View file

@ -1,9 +1,17 @@
FROM alpine
MAINTAINER MO
# Install packages
RUN apk -U upgrade && \
apk add autoconf automake bash build-base git libtool procps py-pip python python-dev && \
apk add autoconf \
automake \
bash \
build-base \
git \
libtool \
procps \
py-pip \
python \
python-dev && \
# Install libemu
git clone https://github.com/buffer/libemu /root/libemu/ && \
@ -25,7 +33,12 @@ RUN apk -U upgrade && \
chown -R mailoney:mailoney /opt/mailoney && \
# Clean up
apk del autoconf automake build-base git py-pip python-dev && \
apk del autoconf \
automake \
build-base \
git \
py-pip \
python-dev && \
rm -rf /root/* && \
rm -rf /var/cache/apk/*

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/mailoney:1710.svg)](https://microbadger.com/images/dtagdevsec/mailoney:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/mailoney:1710.svg)](https://microbadger.com/images/dtagdevsec/mailoney:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/mailoney:1804.svg)](https://microbadger.com/images/dtagdevsec/mailoney:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/mailoney:1804.svg)](https://microbadger.com/images/dtagdevsec/mailoney:1804 "Get your own image badge on microbadger.com")
# mailoney

View file

@ -1,4 +1,4 @@
version: '2.1'
version: '2.2'
networks:
mailoney_local:
@ -7,12 +7,13 @@ services:
# Mailoney service
mailoney:
build: .
container_name: mailoney
restart: always
networks:
- mailoney_local
ports:
- "25:2525"
image: "dtagdevsec/mailoney:1710"
volumes:
- /data/mailoney/log:/opt/mailoney/logs
image: "dtagdevsec/mailoney:1804"
volumes:
- /data/mailoney/log:/opt/mailoney/logs

View file

@ -1,9 +1,29 @@
FROM alpine
MAINTAINER MO
# Install packages
RUN apk -U upgrade && \
apk add alpine-sdk autoconf automake bash curl gawk gcc iw jq libmnl-dev libuuid linux-headers lm_sensors make musl-dev netcat-openbsd util-linux-dev pkgconf python py-requests py-yaml zlib-dev && \
apk add alpine-sdk \
autoconf \
automake \
bash \
curl \
gawk \
gcc \
iw \
jq \
libmnl-dev \
libuuid \
linux-headers \
lm_sensors \
make \
musl-dev \
netcat-openbsd \
util-linux-dev \
pkgconf \
python \
py-requests \
py-yaml \
zlib-dev && \
# Install netdata
cd /root && \
@ -20,7 +40,17 @@ RUN apk -U upgrade && \
cd / && \
# Clean up
apk del alpine-sdk autoconf automake gcc libmnl-dev linux-headers make musl-dev pkgconf util-linux-dev zlib-dev && \
apk del alpine-sdk \
autoconf \
automake \
gcc \
libmnl-dev \
linux-headers \
make \
musl-dev \
pkgconf \
util-linux-dev \
zlib-dev && \
rm -rf /root/* && \
rm -rf /var/cache/apk/*

View file

@ -1,4 +1,4 @@
[![](https://images.microbadger.com/badges/version/dtagdevsec/netdata:1710.svg)](https://microbadger.com/images/dtagdevsec/netdata:1710 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/netdata:1710.svg)](https://microbadger.com/images/dtagdevsec/netdata:1710 "Get your own image badge on microbadger.com")
[![](https://images.microbadger.com/badges/version/dtagdevsec/netdata:1804.svg)](https://microbadger.com/images/dtagdevsec/netdata:1804 "Get your own version badge on microbadger.com") [![](https://images.microbadger.com/badges/image/dtagdevsec/netdata:1804.svg)](https://microbadger.com/images/dtagdevsec/netdata:1804 "Get your own image badge on microbadger.com")
# netdata

View file

@ -1,9 +1,10 @@
version: '2.1'
version: '2.2'
services:
# Netdata service
netdata:
build: .
container_name: netdata
restart: always
network_mode: "host"
@ -11,7 +12,7 @@ services:
- SYS_PTRACE
security_opt:
- apparmor=unconfined
image: "dtagdevsec/netdata:1710"
image: "dtagdevsec/netdata:1804"
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro

211
etc/compose/collect.yml Normal file
View file

@ -0,0 +1,211 @@
# T-Pot (Collector)
# For docker-compose ...
version: '2.2'
networks:
ewsposter_local:
heralding_local:
spiderfoot_local:
ui-for-docker_local:
services:
# ELK services
## Elasticsearch service
elasticsearch:
container_name: elasticsearch
restart: always
environment:
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms1024m -Xmx1024m"
cap_add:
- IPC_LOCK
ulimits:
memlock:
soft: -1
hard: -1
nofile:
soft: 65536
hard: 65536
mem_limit: 2g
ports:
- "127.0.0.1:64298:9200"
image: "dtagdevsec/elasticsearch:1804"
volumes:
- /data:/data
## Kibana service
kibana:
container_name: kibana
restart: always
depends_on:
elasticsearch:
condition: service_healthy
ports:
- "127.0.0.1:64296:5601"
image: "dtagdevsec/kibana:1804"
## Logstash service
logstash:
container_name: logstash
restart: always
depends_on:
elasticsearch:
condition: service_healthy
env_file:
- /opt/tpot/etc/compose/elk_environment
image: "dtagdevsec/logstash:1804"
volumes:
- /data:/data
- /var/log:/data/host/log
## Elasticsearch-head service
head:
container_name: head
restart: always
depends_on:
elasticsearch:
condition: service_healthy
ports:
- "127.0.0.1:64302:9100"
image: "dtagdevsec/head:1804"
# Ewsposter service
ewsposter:
container_name: ewsposter
restart: always
networks:
- ewsposter_local
env_file:
- /opt/tpot/etc/compose/elk_environment
image: "dtagdevsec/ewsposter:1804"
volumes:
- /data:/data
- /data/ews/conf/ews.ip:/opt/ewsposter/ews.ip
# Heralding service
heralding:
container_name: heralding
restart: always
networks:
- heralding_local
ports:
- "21:21"
- "22:22"
- "23:23"
- "25:25"
- "80:80"
- "110:110"
- "143:143"
- "443:443"
- "993:993"
- "995:995"
- "5432:5432"
- "5900:5900"
image: "dtagdevsec/heralding:1804"
volumes:
- /data/heralding/log:/var/log/heralding
# Honeytrap service
honeytrap:
container_name: honeytrap
restart: always
network_mode: "host"
cap_add:
- NET_ADMIN
image: "dtagdevsec/honeytrap:1804"
volumes:
- /data/honeytrap/attacks:/opt/honeytrap/var/attacks
- /data/honeytrap/downloads:/opt/honeytrap/var/downloads
- /data/honeytrap/log:/opt/honeytrap/var/log
# Netdata service
netdata:
container_name: netdata
restart: always
network_mode: "host"
depends_on:
elasticsearch:
condition: service_healthy
cap_add:
- SYS_PTRACE
security_opt:
- apparmor=unconfined
ports:
- "64301:64301"
image: "dtagdevsec/netdata:1804"
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro
- /var/run/docker.sock:/var/run/docker.sock
# Nginx service
nginx:
container_name: nginx
restart: always
network_mode: "host"
ports:
- "64297:64297"
image: "dtagdevsec/nginx:1710"
volumes:
- /data/nginx/cert/:/etc/nginx/cert/
- /data/nginx/conf/nginxpasswd:/etc/nginx/nginxpasswd
- /data/nginx/log/:/var/log/nginx/
# Spiderfoot service
spiderfoot:
container_name: spiderfoot
restart: always
networks:
- spiderfoot_local
ports:
- "127.0.0.1:64303:8080"
image: "dtagdevsec/spiderfoot:1710"
volumes:
- /data/spiderfoot/spiderfoot.db:/home/spiderfoot/spiderfoot.db
# Ui-for-docker service
ui-for-docker:
container_name: ui-for-docker
command: -H unix:///var/run/docker.sock --no-auth
restart: always
networks:
- ui-for-docker_local
ports:
- "127.0.0.1:64299:9000"
image: "dtagdevsec/ui-for-docker:1710"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
# Suricata service
suricata:
container_name: suricata
restart: always
network_mode: "host"
cap_add:
- NET_ADMIN
- SYS_NICE
- NET_RAW
image: "dtagdevsec/suricata:1710"
volumes:
- /data/suricata/log:/var/log/suricata
# P0f service
p0f:
container_name: p0f
restart: always
network_mode: "host"
image: "dtagdevsec/p0f:1710"
volumes:
- /data/p0f/log:/var/log/p0f
# Wetty service
wetty:
container_name: wetty
restart: always
network_mode: "host"
env_file:
- /opt/tpot/etc/compose/wetty_environment
ports:
- "64300:64300"
image: "dtagdevsec/wetty:1710"

View file

@ -1,26 +1,139 @@
# T-Pot (Industrial)
# T-Pot (Industrial, based on Conpot=[default, IEC104, guardian_ast, ipmi, kamstrup_382])
# For docker-compose ...
version: '2.2'
networks:
conpot_local:
emobility_local:
conpot_local_default:
conpot_local_IEC104:
conpot_local_guardian_ast:
conpot_local_ipmi:
conpot_local_kamstrup_382:
ewsposter_local:
spiderfoot_local:
ui-for-docker_local:
services:
# Conpot service
conpot:
container_name: conpot
# Conpot default service
conpot_default:
container_name: conpot_default
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_default.json
- CONPOT_LOG=/var/log/conpot/conpot_default.log
- CONPOT_TEMPLATE=default
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
# - /var/run/conpot/
networks:
- conpot_local
- conpot_local_default
ports:
- "80:80"
- "102:102"
- "161:161"
- "502:502"
# - "623:623"
- "44818:44818"
- "47808:47808"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
# Conpot IEC104 service
conpot_IEC104:
container_name: conpot_IEC104
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_IEC104.json
- CONPOT_LOG=/var/log/conpot/conpot_IEC104.log
- CONPOT_TEMPLATE=IEC104
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
- /var/run/conpot/
networks:
- conpot_local_IEC104
ports:
# - "161:161"
- "2404:2404"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
# Conpot guardian_ast service
conpot_guardian_ast:
container_name: conpot_guardian_ast
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_guardian_ast.json
- CONPOT_LOG=/var/log/conpot/conpot_guardian_ast.log
- CONPOT_TEMPLATE=guardian_ast
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
- /var/run/conpot/
networks:
- conpot_local_guardian_ast
ports:
- "10001:10001"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
# Conpot ipmi
conpot_ipmi:
container_name: conpot_ipmi
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_ipmi.json
- CONPOT_LOG=/var/log/conpot/conpot_ipmi.log
- CONPOT_TEMPLATE=ipmi
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
- /var/run/conpot/
networks:
- conpot_local_ipmi
ports:
- "623:623"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
# Conpot kamstrup_382
conpot_kamstrup_382:
container_name: conpot_kamstrup_382
restart: always
environment:
- CONPOT_CONFIG=/etc/conpot/conpot.cfg
- CONPOT_JSON_LOG=/var/log/conpot/conpot_kamstrup_382.json
- CONPOT_LOG=/var/log/conpot/conpot_kamstrup_382.log
- CONPOT_TEMPLATE=kamstrup_382
- CONPOT_TMP=/tmp/conpot
- PYTHON_EGG_CACHE=/tmp/conpot
tmpfs:
- /tmp/conpot:exec
- /var/run/conpot/
networks:
- conpot_local_kamstrup_382
ports:
- "1025:1025"
- "50100:50100"
image: "dtagdevsec/conpot:1710"
image: "dtagdevsec/conpot:1804"
read_only: true
volumes:
- /data/conpot/log:/var/log/conpot
@ -84,21 +197,6 @@ services:
- "127.0.0.1:64302:9100"
image: "dtagdevsec/head:1710"
# Emobility service
emobility:
container_name: emobility
restart: always
networks:
- emobility_local
cap_add:
- NET_ADMIN
ports:
- "8080:8080"
image: "dtagdevsec/emobility:1710"
volumes:
- /data/emobility:/data/eMobility
- /data/ews:/data/ews
# Ewsposter service
ewsposter:
container_name: ewsposter

View file

@ -3,6 +3,7 @@
version: '2.2'
networks:
ciscoasa_local:
cowrie_local:
dionaea_local:
elasticpot_local:
@ -16,6 +17,19 @@ networks:
services:
# Ciscoasa service
ciscoasa:
container_name: ciscoasa
restart: always
networks:
- ciscoasa_local
ports:
- "5000:5000/udp"
- "8443:8443"
image: "dtagdevsec/ciscoasa:1804"
volumes:
- /data/ciscoasa/log:/var/log/ciscoasa
# Cowrie service
cowrie:
container_name: cowrie
@ -26,8 +40,8 @@ services:
- NET_BIND_SERVICE
ports:
- "22:2222"
- "23:2223"
image: "dtagdevsec/cowrie:1710"
- "23:2323"
image: "dtagdevsec/cowrie:1804"
volumes:
- /data/cowrie/downloads:/home/cowrie/cowrie/dl
- /data/cowrie/keys:/home/cowrie/cowrie/etc
@ -60,7 +74,7 @@ services:
- "5060:5060/udp"
- "5061:5061"
- "27017:27017"
image: "dtagdevsec/dionaea:1710"
image: "dtagdevsec/dionaea:1804"
volumes:
- /data/dionaea/roots/ftp:/opt/dionaea/var/dionaea/roots/ftp
- /data/dionaea/roots/tftp:/opt/dionaea/var/dionaea/roots/tftp
@ -79,7 +93,7 @@ services:
- elasticpot_local
ports:
- "9200:9200"
image: "dtagdevsec/elasticpot:1710"
image: "dtagdevsec/elasticpot:1804"
volumes:
- /data/elasticpot/log:/opt/ElasticpotPY/log
@ -90,7 +104,7 @@ services:
restart: always
environment:
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
- "ES_JAVA_OPTS=-Xms1024m -Xmx1024m"
cap_add:
- IPC_LOCK
ulimits:
@ -100,10 +114,10 @@ services:
nofile:
soft: 65536
hard: 65536
# mem_limit: 2g
mem_limit: 2g
ports:
- "127.0.0.1:64298:9200"
image: "dtagdevsec/elasticsearch:1710"
image: "dtagdevsec/elasticsearch:1804"
volumes:
- /data:/data
@ -116,7 +130,7 @@ services:
condition: service_healthy
ports:
- "127.0.0.1:64296:5601"
image: "dtagdevsec/kibana:1710"
image: "dtagdevsec/kibana:1804"
## Logstash service
logstash:
@ -127,7 +141,7 @@ services:
condition: service_healthy
env_file:
- /opt/tpot/etc/compose/elk_environment
image: "dtagdevsec/logstash:1710"
image: "dtagdevsec/logstash:1804"
volumes:
- /data:/data
- /var/log:/data/host/log
@ -141,7 +155,7 @@ services:
condition: service_healthy
ports:
- "127.0.0.1:64302:9100"
image: "dtagdevsec/head:1710"
image: "dtagdevsec/head:1804"
# Ewsposter service
ewsposter:
@ -151,7 +165,7 @@ services:
- ewsposter_local
env_file:
- /opt/tpot/etc/compose/elk_environment
image: "dtagdevsec/ewsposter:1710"
image: "dtagdevsec/ewsposter:1804"
volumes:
- /data:/data
- /data/ews/conf/ews.ip:/opt/ewsposter/ews.ip
@ -164,7 +178,7 @@ services:
- glastopf_local
ports:
- "80:80"
image: "dtagdevsec/glastopf:1710"
image: "dtagdevsec/glastopf:1804"
volumes:
- /data/glastopf/db:/opt/glastopf/db
- /data/glastopf/log:/opt/glastopf/log
@ -176,7 +190,7 @@ services:
network_mode: "host"
cap_add:
- NET_ADMIN
image: "dtagdevsec/honeytrap:1710"
image: "dtagdevsec/honeytrap:1804"
volumes:
- /data/honeytrap/attacks:/opt/honeytrap/var/attacks
- /data/honeytrap/downloads:/opt/honeytrap/var/downloads
@ -190,7 +204,7 @@ services:
- mailoney_local
ports:
- "25:2525"
image: "dtagdevsec/mailoney:1710"
image: "dtagdevsec/mailoney:1804"
volumes:
- /data/mailoney/log:/opt/mailoney/logs
@ -208,7 +222,7 @@ services:
- apparmor=unconfined
ports:
- "64301:64301"
image: "dtagdevsec/netdata:1710"
image: "dtagdevsec/netdata:1804"
volumes:
- /proc:/host/proc:ro
- /sys:/host/sys:ro

View file

@ -434,12 +434,14 @@ tee -a /etc/crontab 2>&1>/dev/null <<EOF
EOF
# Let's create some files and folders
mkdir -p /data/conpot/log \
mkdir -p /data/ciscoasa/log \
/data/conpot/log \
/data/cowrie/log/tty/ /data/cowrie/downloads/ /data/cowrie/keys/ /data/cowrie/misc/ \
/data/dionaea/log /data/dionaea/bistreams /data/dionaea/binaries /data/dionaea/rtp /data/dionaea/roots/ftp /data/dionaea/roots/tftp /data/dionaea/roots/www /data/dionaea/roots/upnp \
/data/elasticpot/log \
/data/elk/data /data/elk/log \
/data/glastopf /data/honeytrap/log/ /data/honeytrap/attacks/ /data/honeytrap/downloads/ \
/data/heralding/log \
/data/mailoney/log \
/data/nginx/log \
/data/emobility/log \