mirror of
https://github.com/telekom-security/tpotce.git
synced 2025-04-20 06:02:24 +00:00
tweaking
add kibana export / import config function ewsposter remove transmitting old elasticpot data (need update) final export of all objects
This commit is contained in:
parent
3a418534d8
commit
618ee3c6e9
8 changed files with 47 additions and 14 deletions
|
@ -17,15 +17,16 @@ fi
|
|||
myDATE=$(date +%Y%m%d%H%M)
|
||||
myINDEXCOUNT=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=index-pattern' | jq '.saved_objects[].attributes' | tr '\\' '\n' | grep "scripted" | wc -w)
|
||||
myINDEXID=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=index-pattern' | jq '.saved_objects[].id' | tr -d '"')
|
||||
myDASHBOARDS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=dashboard&per_page=300' | jq '.saved_objects[].id' | tr -d '"')
|
||||
myVISUALIZATIONS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=visualization&per_page=300' | jq '.saved_objects[].id' | tr -d '"')
|
||||
mySEARCHES=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=search&per_page=300' | jq '.saved_objects[].id' | tr -d '"')
|
||||
myDASHBOARDS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=dashboard&per_page=500' | jq '.saved_objects[].id' | tr -d '"')
|
||||
myVISUALIZATIONS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=visualization&per_page=500' | jq '.saved_objects[].id' | tr -d '"')
|
||||
mySEARCHES=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=search&per_page=500' | jq '.saved_objects[].id' | tr -d '"')
|
||||
myCONFIGS=$(curl -s -XGET ''$myKIBANA'api/saved_objects/_find?type=config&per_page=500' | jq '.saved_objects[].id' | tr -d '"')
|
||||
myCOL1="[0;34m"
|
||||
myCOL0="[0;0m"
|
||||
|
||||
# Let's ensure normal operation on exit or if interrupted ...
|
||||
function fuCLEANUP {
|
||||
rm -rf patterns/ dashboards/ visualizations/ searches/
|
||||
rm -rf patterns/ dashboards/ visualizations/ searches/ configs/
|
||||
}
|
||||
trap fuCLEANUP EXIT
|
||||
|
||||
|
@ -65,12 +66,22 @@ for i in $mySEARCHES;
|
|||
done;
|
||||
echo
|
||||
|
||||
# Export configs
|
||||
mkdir -p configs
|
||||
echo $myCOL1"### Now exporting"$myCOL0 $(echo $myCONFIGS | wc -w) $myCOL1"configs." $myCOL0
|
||||
for i in $myCONFIGS;
|
||||
do
|
||||
echo $myCOL1"###### "$i $myCOL0
|
||||
curl -s -XGET ''$myKIBANA'api/saved_objects/config/'$i'' | jq '. | {attributes, references}' > configs/$i.json &
|
||||
done;
|
||||
echo
|
||||
|
||||
# Wait for background exports to finish
|
||||
wait
|
||||
|
||||
# Building tar archive
|
||||
echo $myCOL1"### Now building archive"$myCOL0 "kibana-objects_"$myDATE".tgz"
|
||||
tar cvfz kibana-objects_$myDATE.tgz patterns dashboards visualizations searches > /dev/null
|
||||
tar cvfz kibana-objects_$myDATE.tgz patterns dashboards visualizations searches configs > /dev/null
|
||||
|
||||
# Stats
|
||||
echo
|
||||
|
@ -79,4 +90,5 @@ echo $myCOL1"###### Exported"$myCOL0 $myINDEXCOUNT $myCOL1"index patterns." $myC
|
|||
echo $myCOL1"###### Exported"$myCOL0 $(echo $myDASHBOARDS | wc -w) $myCOL1"dashboards." $myCOL0
|
||||
echo $myCOL1"###### Exported"$myCOL0 $(echo $myVISUALIZATIONS | wc -w) $myCOL1"visualizations." $myCOL0
|
||||
echo $myCOL1"###### Exported"$myCOL0 $(echo $mySEARCHES | wc -w) $myCOL1"searches." $myCOL0
|
||||
echo $myCOL1"###### Exported"$myCOL0 $(echo $myCONFIGS | wc -w) $myCOL1"configs." $myCOL0
|
||||
echo
|
||||
|
|
|
@ -20,7 +20,7 @@ myCOL0="[0;0m"
|
|||
|
||||
# Let's ensure normal operation on exit or if interrupted ...
|
||||
function fuCLEANUP {
|
||||
rm -rf patterns/ dashboards/ visualizations/ searches/
|
||||
rm -rf patterns/ dashboards/ visualizations/ searches/ configs/
|
||||
}
|
||||
trap fuCLEANUP EXIT
|
||||
|
||||
|
@ -98,6 +98,22 @@ for i in $mySEARCHES;
|
|||
echo
|
||||
wait
|
||||
|
||||
# Restore configs
|
||||
myCONFIGS=$(ls configs/*.json | cut -c 9- | rev | cut -c 6- | rev)
|
||||
echo $myCOL1"### Now importing "$myCOL0$(echo $myCONFIGS | wc -w)$myCOL1 "configs." $myCOL0
|
||||
for i in $myCONFIGS;
|
||||
do
|
||||
curl -s -XDELETE ''$myKIBANA'api/saved_objects/configs/'$i'' -H "Content-Type: application/json" -H "kbn-xsrf: true" > /dev/null &
|
||||
done;
|
||||
wait
|
||||
for i in $myCONFIGS;
|
||||
do
|
||||
echo $myCOL1"###### "$i $myCOL0
|
||||
curl -s -XPOST ''$myKIBANA'api/saved_objects/configs/'$i'' -H "Content-Type: application/json" -H "kbn-xsrf: true" -d @configs/$i.json > /dev/null &
|
||||
done;
|
||||
echo
|
||||
wait
|
||||
|
||||
# Stats
|
||||
echo
|
||||
echo $myCOL1"### Statistics"
|
||||
|
@ -105,5 +121,6 @@ echo $myCOL1"###### Imported"$myCOL0 $myINDEXCOUNT $myCOL1"index patterns." $myC
|
|||
echo $myCOL1"###### Imported"$myCOL0 $(echo $myDASHBOARDS | wc -w) $myCOL1"dashboards." $myCOL0
|
||||
echo $myCOL1"###### Imported"$myCOL0 $(echo $myVISUALIZATIONS | wc -w) $myCOL1"visualizations." $myCOL0
|
||||
echo $myCOL1"###### Imported"$myCOL0 $(echo $mySEARCHES | wc -w) $myCOL1"searches." $myCOL0
|
||||
echo $myCOL1"###### Imported"$myCOL0 $(echo $myCONFIGS | wc -w) $myCOL1"configs." $myCOL0
|
||||
echo
|
||||
|
||||
|
|
|
@ -5,7 +5,10 @@ networks:
|
|||
|
||||
services:
|
||||
|
||||
# dicompot service
|
||||
# Dicompot service
|
||||
# Get the Horos Client for testing: https://horosproject.org/
|
||||
# Get Dicom images (CC BY 3.0): https://www.cancerimagingarchive.net/collections/
|
||||
# Put images (which must be in Dicom DCM format or it will not work!) into /data/dicompot/images
|
||||
dicompot:
|
||||
build: .
|
||||
container_name: dicompot
|
||||
|
@ -18,5 +21,4 @@ services:
|
|||
read_only: true
|
||||
volumes:
|
||||
- /data/dicompot/log:/var/log/dicompot
|
||||
# - /path/to/dicom/images:/opt/dicompot/images
|
||||
|
||||
# - /data/dicompot/images:/opt/dicompot/images
|
||||
|
|
4
docker/ews/dist/ews.cfg
vendored
4
docker/ews/dist/ews.cfg
vendored
|
@ -92,9 +92,9 @@ nodeid = conpot-community-01
|
|||
logfile = /data/conpot/log/conpot*.json
|
||||
|
||||
[ELASTICPOT]
|
||||
elasticpot = true
|
||||
elasticpot = false
|
||||
nodeid = elasticpot-community-01
|
||||
logfile = /data/elasticpot/log/elasticpot.log
|
||||
logfile = /data/elasticpot/log/elasticpot.json
|
||||
|
||||
[SURICATA]
|
||||
suricata = true
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -260,7 +260,7 @@ echo "### Now pulling latest docker images"
|
|||
echo "######$myBLUE This might take a while, please be patient!$myWHITE"
|
||||
fuPULLIMAGES 2>&1>/dev/null
|
||||
|
||||
#fuREMOVEOLDIMAGES "1804"
|
||||
#fuREMOVEOLDIMAGES "1903"
|
||||
echo "### If you made changes to tpot.yml please ensure to add them again."
|
||||
echo "### We stored the previous version as backup in /root/."
|
||||
echo "### Some updates may need an import of the latest Kibana objects as well."
|
||||
|
@ -268,7 +268,9 @@ echo "### Download the latest objects here if they recently changed:"
|
|||
echo "### https://raw.githubusercontent.com/dtag-dev-sec/tpotce/master/etc/objects/kibana_export.json.zip"
|
||||
echo "### Export and import the objects easily through the Kibana WebUI:"
|
||||
echo "### Go to Kibana > Management > Saved Objects > Export / Import"
|
||||
echo "### All objects will be overwritten upon import, make sure to run an export first."
|
||||
echo "### Or use the command:"
|
||||
echo "### import_kibana-objects.sh /opt/tpot/etc/objects/kibana-objects.tgz"
|
||||
echo "### All objects will be overwritten upon import, make sure to run an export first if you made changes."
|
||||
}
|
||||
|
||||
function fuRESTORE_EWSCFG () {
|
||||
|
@ -321,5 +323,5 @@ fuRESTORE_EWSCFG
|
|||
fuRESTORE_HPFEEDS
|
||||
|
||||
echo
|
||||
echo "### Please reboot."
|
||||
echo "### Done."
|
||||
echo
|
||||
|
|
Loading…
Reference in a new issue