diff --git a/bin/export_kibana-objects.sh b/bin/export_kibana-objects.sh index 6cea65f5..a1b308fe 100755 --- a/bin/export_kibana-objects.sh +++ b/bin/export_kibana-objects.sh @@ -17,15 +17,16 @@ fi myDATE=$(date +%Y%m%d%H%M) myINDEXCOUNT=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=index-pattern' | jq '.saved_objects[].attributes' | tr '\\' '\n' | grep "scripted" | wc -w) myINDEXID=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=index-pattern' | jq '.saved_objects[].id' | tr -d '"') -myDASHBOARDS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=dashboard&per_page=300' | jq '.saved_objects[].id' | tr -d '"') -myVISUALIZATIONS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=visualization&per_page=300' | jq '.saved_objects[].id' | tr -d '"') -mySEARCHES=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=search&per_page=300' | jq '.saved_objects[].id' | tr -d '"') +myDASHBOARDS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=dashboard&per_page=500' | jq '.saved_objects[].id' | tr -d '"') +myVISUALIZATIONS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=visualization&per_page=500' | jq '.saved_objects[].id' | tr -d '"') +mySEARCHES=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=search&per_page=500' | jq '.saved_objects[].id' | tr -d '"') +myCONFIGS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=config&per_page=500' | jq '.saved_objects[].id' | tr -d '"') myCOL1="" myCOL0="" # Let's ensure normal operation on exit or if interrupted ... function fuCLEANUP { - rm -rf patterns/ dashboards/ visualizations/ searches/ + rm -rf patterns/ dashboards/ visualizations/ searches/ configs/ } trap fuCLEANUP EXIT @@ -65,12 +66,22 @@ for i in $mySEARCHES; done; echo +# Export configs +mkdir -p configs +echo $myCOL1"### Now exporting"$myCOL0 $(echo $myCONFIGS | wc -w) $myCOL1"configs." $myCOL0 +for i in $myCONFIGS; + do + echo $myCOL1"###### "$i $myCOL0 + curl -s -XGET ''$myKIBANA'api/saved_objects/config/'$i'' | jq '. | {attributes, references}' > configs/$i.json & + done; +echo + # Wait for background exports to finish wait # Building tar archive echo $myCOL1"### Now building archive"$myCOL0 "kibana-objects_"$myDATE".tgz" -tar cvfz kibana-objects_$myDATE.tgz patterns dashboards visualizations searches > /dev/null +tar cvfz kibana-objects_$myDATE.tgz patterns dashboards visualizations searches configs > /dev/null # Stats echo @@ -79,4 +90,5 @@ echo $myCOL1"###### Exported"$myCOL0 $myINDEXCOUNT $myCOL1"index patterns." $myC echo $myCOL1"###### Exported"$myCOL0 $(echo $myDASHBOARDS | wc -w) $myCOL1"dashboards." $myCOL0 echo $myCOL1"###### Exported"$myCOL0 $(echo $myVISUALIZATIONS | wc -w) $myCOL1"visualizations." $myCOL0 echo $myCOL1"###### Exported"$myCOL0 $(echo $mySEARCHES | wc -w) $myCOL1"searches." $myCOL0 +echo $myCOL1"###### Exported"$myCOL0 $(echo $myCONFIGS | wc -w) $myCOL1"configs." $myCOL0 echo diff --git a/bin/import_kibana-objects.sh b/bin/import_kibana-objects.sh index 5524ce55..43d34206 100755 --- a/bin/import_kibana-objects.sh +++ b/bin/import_kibana-objects.sh @@ -20,7 +20,7 @@ myCOL0="" # Let's ensure normal operation on exit or if interrupted ... function fuCLEANUP { - rm -rf patterns/ dashboards/ visualizations/ searches/ + rm -rf patterns/ dashboards/ visualizations/ searches/ configs/ } trap fuCLEANUP EXIT @@ -98,6 +98,22 @@ for i in $mySEARCHES; echo wait +# Restore configs +myCONFIGS=$(ls configs/*.json | cut -c 9- | rev | cut -c 6- | rev) +echo $myCOL1"### Now importing "$myCOL0$(echo $myCONFIGS | wc -w)$myCOL1 "configs." $myCOL0 +for i in $myCONFIGS; + do + curl -s -XDELETE ''$myKIBANA'api/saved_objects/configs/'$i'' -H "Content-Type: application/json" -H "kbn-xsrf: true" > /dev/null & + done; +wait +for i in $myCONFIGS; + do + echo $myCOL1"###### "$i $myCOL0 + curl -s -XPOST ''$myKIBANA'api/saved_objects/configs/'$i'' -H "Content-Type: application/json" -H "kbn-xsrf: true" -d @configs/$i.json > /dev/null & + done; +echo +wait + # Stats echo echo $myCOL1"### Statistics" @@ -105,5 +121,6 @@ echo $myCOL1"###### Imported"$myCOL0 $myINDEXCOUNT $myCOL1"index patterns." $myC echo $myCOL1"###### Imported"$myCOL0 $(echo $myDASHBOARDS | wc -w) $myCOL1"dashboards." $myCOL0 echo $myCOL1"###### Imported"$myCOL0 $(echo $myVISUALIZATIONS | wc -w) $myCOL1"visualizations." $myCOL0 echo $myCOL1"###### Imported"$myCOL0 $(echo $mySEARCHES | wc -w) $myCOL1"searches." $myCOL0 +echo $myCOL1"###### Imported"$myCOL0 $(echo $myCONFIGS | wc -w) $myCOL1"configs." $myCOL0 echo diff --git a/docker/dicompot/docker-compose.yml b/docker/dicompot/docker-compose.yml index e9a90845..e06a4fad 100644 --- a/docker/dicompot/docker-compose.yml +++ b/docker/dicompot/docker-compose.yml @@ -5,7 +5,10 @@ networks: services: -# dicompot service +# Dicompot service +# Get the Horos Client for testing: https://horosproject.org/ +# Get Dicom images (CC BY 3.0): https://www.cancerimagingarchive.net/collections/ +# Put images (which must be in Dicom DCM format or it will not work!) into /data/dicompot/images dicompot: build: . container_name: dicompot @@ -18,5 +21,4 @@ services: read_only: true volumes: - /data/dicompot/log:/var/log/dicompot -# - /path/to/dicom/images:/opt/dicompot/images - +# - /data/dicompot/images:/opt/dicompot/images diff --git a/docker/ews/dist/ews.cfg b/docker/ews/dist/ews.cfg index 3bdf7bc0..44fc9e7d 100644 --- a/docker/ews/dist/ews.cfg +++ b/docker/ews/dist/ews.cfg @@ -92,9 +92,9 @@ nodeid = conpot-community-01 logfile = /data/conpot/log/conpot*.json [ELASTICPOT] -elasticpot = true +elasticpot = false nodeid = elasticpot-community-01 -logfile = /data/elasticpot/log/elasticpot.log +logfile = /data/elasticpot/log/elasticpot.json [SURICATA] suricata = true diff --git a/etc/objects/elkbase.tgz b/etc/objects/elkbase.tgz index 24aa95c2..75add335 100644 Binary files a/etc/objects/elkbase.tgz and b/etc/objects/elkbase.tgz differ diff --git a/etc/objects/kibana-objects.tgz b/etc/objects/kibana-objects.tgz index b3809a73..18497d19 100644 Binary files a/etc/objects/kibana-objects.tgz and b/etc/objects/kibana-objects.tgz differ diff --git a/etc/objects/kibana_export.ndjson.zip b/etc/objects/kibana_export.ndjson.zip index 7c003a7e..07be08c4 100644 Binary files a/etc/objects/kibana_export.ndjson.zip and b/etc/objects/kibana_export.ndjson.zip differ diff --git a/update.sh b/update.sh index d4285014..db7850e9 100755 --- a/update.sh +++ b/update.sh @@ -260,7 +260,7 @@ echo "### Now pulling latest docker images" echo "######$myBLUE This might take a while, please be patient!$myWHITE" fuPULLIMAGES 2>&1>/dev/null -#fuREMOVEOLDIMAGES "1804" +#fuREMOVEOLDIMAGES "1903" echo "### If you made changes to tpot.yml please ensure to add them again." echo "### We stored the previous version as backup in /root/." echo "### Some updates may need an import of the latest Kibana objects as well." @@ -268,7 +268,9 @@ echo "### Download the latest objects here if they recently changed:" echo "### https://raw.githubusercontent.com/dtag-dev-sec/tpotce/master/etc/objects/kibana_export.json.zip" echo "### Export and import the objects easily through the Kibana WebUI:" echo "### Go to Kibana > Management > Saved Objects > Export / Import" -echo "### All objects will be overwritten upon import, make sure to run an export first." +echo "### Or use the command:" +echo "### import_kibana-objects.sh /opt/tpot/etc/objects/kibana-objects.tgz" +echo "### All objects will be overwritten upon import, make sure to run an export first if you made changes." } function fuRESTORE_EWSCFG () { @@ -321,5 +323,5 @@ fuRESTORE_EWSCFG fuRESTORE_HPFEEDS echo -echo "### Please reboot." +echo "### Done." echo