# ============================================================================ # General Honeypot Options # ============================================================================ [honeypot] # Sensor name is used to identify this honeypot instance. Used by the database # logging modules such as JSON. # # If not specified, the logging modules will instead use the IP address of the # server as the sensor name. # # (default: the name of the local machine) sensor_name = t-pot # The version of Elasticsearch reported by the honeypot. # # (default: 1.4.1) #spoofed_version = 1.4.1 # The Elasticsearch instance name reported by the honeypot. # # (default = Green Goblin) instance_name = USNYES01 # The name of the simulated Elasticsearch cluster # # (default = elasticsearch #cluster_name = elasticsearch # The name of the simulated host running Elasticsearch # # (default = elk) host_name = usnyes01 # The build number of the simulated Elasticsearch instance # Use something realistic or simply don't touch this value # # (default = 89d3241) #build = 89d3241 # The number of processors on the simulated host # # (default = 12) #total_processors = 12 # The total number of CPU cores on the simulated host # Use a multiple of total_processors # # (default = 24) #total_cores = 24 # The total number of sockets on the simulated host # Use a multiple of total_cores # # (default = 48) #total_sockets = 48 # The MAC address of the networking card of the simulated host # # (default = 08:01:c7:3F:15:DD) #mac_address = 08:01:c7:3F:15:DD # Directory where to save log files in. # Log files are .YYYY-MM-DD in that directory # # (default: log) log_path = log # Log file name # # (default: stdout) #log_filename = # Directory containing the response files # # (default: responses) #responses_dir = responses # ============================================================================ # Network Specific Options # ============================================================================ # Port to listen for incoming connections. # # (default: 9200) #listen_port = 9200 # Site to query for one's public IP address # # (default: https://ident.me) #public_ip_url = https://ident.me # Enable to log the public IP of the honeypot (useful if listening on 127.0.0.1) # IP address is obtained by querying public_ip_url # # (default: false) #report_public_ip = false # ============================================================================ # Output Plugins # These provide an extensible mechanism to send audit log entries to third # parties. The audit entries contain information on clients connecting to # the honeypot. # # Output entries need to start with 'output_' and have the 'enabled' entry. # ============================================================================ # JSON based logging module # [output_jsonlog] enabled = true logfile = log/elasticpot.json epoch_timestamp = false # MySQL logging module # Database structure for this module is supplied in docs/sql/mysql.sql # # MySQL logging requires extra software: sudo apt-get install libmysqlclient-dev # MySQL logging requires an extra Python module: pip install mysql-python # #[output_mysql] #enabled = false #host = localhost #database = elasticpot #username = elasticpot #password = secret #port = 3306 #debug = false # Whether to store geolocation data in the database #geoip = true # Location of the databases used for geolocation #geoip_citydb = data/GeoLite2-City.mmdb #geoip_asndb = data/GeoLite2-ASN.mmdb # Text output # This writes audit log entries to a text file # #[output_textlog] #enabled = false #logfile = log/elasticpot.txt # HPFeeds # # Note the lack of "s" at the end: [output_hpfeed] enabled = false #server = hpfeeds.mysite.org #tlscert = /path/to/tls/cert/file #port = 10000 #identifier = abc123 #secret = secret #channel = elasticpot # MongoDB logging module # #[output_mongodb] #enabled = false #host = 127.0.0.1 #port = 27017 #username = elasticpot #password = secret #database = elasticpot # Note: .format(username, password, host, port, database) is done # on the following string; make sure that there are 5 placeholders ({}) in it #connection_string = mongodb://{}:{}@{}:{}/{} # Whether to store geolocation data in the database #geoip = true # Location of the databases used for geolocation #geoip_citydb = data/GeoLite2-City.mmdb #geoip_asndb = data/GeoLite2-ASN.mmdb # RedisDB logging module # #[output_redisdb] #enabled = false #host = 127.0.0.1 #port = 6379 # DB of the redis server. Defaults to 0 #db = 0 # Password of the redis server. Defaults to None #password = secret # Name of the list to push to or the channel to publish to. Required #keyname = elasticpot # Method to use when sending data to redis. # Can be one of [lpush, rpush, publish]. Defaults to lpush #send_method = lpush # Rethinkdb output module # #[output_rethinkdblog] #enabled = false #host = 127.0.0.1 #port = 28015 #table = events #db = elasticpot #password = # InfluxDB logging module # #[output_influx] #enabled = false #host = 127.0.0.1 #port = 8086 #database_name = elasticpot #retention_policy_duration = 12w # InfluxDB 2.0 logging module # #[output_influx2] #enabled = false #host = hostname #token = token #org = organization #bucket = elasticpot # CouchDB logging module # #[output_couch] #enabled = false #host = localhost #port = 5984 #username = elasticpot #password = secret #database = elasticpot #geoip = true # Location of the databases used for geolocation #geoip_citydb = data/GeoLite2-City.mmdb #geoip_asndb = data/GeoLite2-ASN.mmdb # SQLite3 logging module # # Logging to SQLite3 database. To init the database, use the script # docs/sql/sqlite3.sql: # sqlite3 < docs/sql/sqlite3.sql # #[output_sqlite] #enabled = false #debug = false #db_file = data/elasticpot.db # Whether to store geolocation data in the database #geoip = true # Location of the databases used for geolocation #geoip_citydb = data/GeoLite2-City.mmdb #geoip_asndb = data/GeoLite2-ASN.mmdb # Elasticsearch logging module # #[output_elastic] #enabled = false #host = localhost #port = 9200 #index = elasticpot # # type has been deprecated since ES 6.0.0 # use _doc which is the default type. See # https://stackoverflow.com/a/53688626 for # more information # #type = _doc # # set pipeline = geoip to map src_ip to # geo location data. You can use a custom # pipeline but you must ensure it exists # in elasticsearch. # #pipeline = geoip # # Authentication. When x-pack.security is enabled # in ES, default users have been created and requests # must be authenticated. # # Credentials # #username = elasticpot #password = secret # # TLS encryption. Communications between the client (elasticpot) # and the ES server should naturally be protected by encryption # if requests are authenticated (to prevent from man-in-the-middle # attacks). The following options are then paramount # if username and password are provided. # # use ssl/tls #ssl = true # verify SSL certificates #verify_certs = true # Path to trusted CA certs on disk #ca_certs = /path/to/cert/file/elastic_ca.crt # TODO: # Kafka logging module # #[output_kafka] #enabled = false #host = 127.0.0.1 #port = 9092 #topic = elasticpot