From 65af48d7772a2bd8bacf6f71a457697807833ef5 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Thu, 2 Dec 2021 10:34:46 +0100 Subject: [PATCH 01/14] Splitting CI tests for better result handling in README. Added another status badge for develop branch - it is the core branch for development --- .github/workflows/develop_by_makefile.yml | 28 +++++++++++++++++++ .../{makefile.yml => main_by_makefile.yml} | 6 ++-- README.md | 13 +++++++-- 3 files changed, 41 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/develop_by_makefile.yml rename .github/workflows/{makefile.yml => main_by_makefile.yml} (83%) diff --git a/.github/workflows/develop_by_makefile.yml b/.github/workflows/develop_by_makefile.yml new file mode 100644 index 0000000..3aedc74 --- /dev/null +++ b/.github/workflows/develop_by_makefile.yml @@ -0,0 +1,28 @@ +name: Develop + +on: + push: + branches: [ develop ] + pull_request: + branches: [ develop ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python: [ 3.9 ] + + steps: + - uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install Tox and any other packages + run: pip install tox + - name: Run check + run: make check + + diff --git a/.github/workflows/makefile.yml b/.github/workflows/main_by_makefile.yml similarity index 83% rename from .github/workflows/makefile.yml rename to .github/workflows/main_by_makefile.yml index 6855c3f..681f72f 100644 --- a/.github/workflows/makefile.yml +++ b/.github/workflows/main_by_makefile.yml @@ -1,10 +1,10 @@ -name: Makefile CI +name: Main on: push: - branches: [ main, develop ] + branches: [ main ] pull_request: - branches: [ main, develop ] + branches: [ main ] jobs: build: diff --git a/README.md b/README.md index e72f067..f9dd8f0 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,5 @@ -![main branch test](https://github.com/avast/PurpleDome/actions/workflows/makefile.yml/badge.svg?branch=main) +![main branch test](https://github.com/avast/PurpleDome/actions/workflows/main_by_makefile.yml/badge.svg?branch=main) +![develop branch test](https://github.com/avast/PurpleDome/actions/workflows/develop_by_makefile.yml/badge.svg?branch=develop) # PurpleDome creates simulated systems which hack each other @@ -120,20 +121,26 @@ Short: Branching your own feature branch +``` $ git checkout development $ git pull --rebase=preserve $ git checkout -b my_feature +``` Do some coding, commit. Rebase before pushing +``` $ git checkout development $ git pull --rebase=preserve $ git checkout my_feature $ git rebase development +``` Code review will be happening on github. If everything is nice, you should squash the several commits you made into one (so one commit = one feature). This will make code management and debugging a lot simpler when you commit is added to develop and main branches -.. TODO: git rebase --interactive -git push --force \ No newline at end of file +``` +git rebase --interactive +git push --force +``` \ No newline at end of file From a2bb40a7c4d3e2a09605af3bc2135d384b15de08 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Thu, 2 Dec 2021 10:49:59 +0100 Subject: [PATCH 02/14] Workflows can be manually triggered --- .github/workflows/develop_by_makefile.yml | 2 ++ .github/workflows/main_by_makefile.yml | 2 ++ 2 files changed, 4 insertions(+) diff --git a/.github/workflows/develop_by_makefile.yml b/.github/workflows/develop_by_makefile.yml index 3aedc74..2cc2703 100644 --- a/.github/workflows/develop_by_makefile.yml +++ b/.github/workflows/develop_by_makefile.yml @@ -5,6 +5,8 @@ on: branches: [ develop ] pull_request: branches: [ develop ] + workflow_dispatch: + branches: [ develop ] jobs: build: diff --git a/.github/workflows/main_by_makefile.yml b/.github/workflows/main_by_makefile.yml index 681f72f..3896374 100644 --- a/.github/workflows/main_by_makefile.yml +++ b/.github/workflows/main_by_makefile.yml @@ -5,6 +5,8 @@ on: branches: [ main ] pull_request: branches: [ main ] + workflow_dispatch: + branches: [ main ] jobs: build: From 1cc48395a254c7ca0d258b5a16ef77f8878372b6 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Fri, 3 Dec 2021 15:32:43 +0100 Subject: [PATCH 03/14] Using new ubuntu version "impish64" as target --- systems/Vagrantfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/systems/Vagrantfile b/systems/Vagrantfile index 5301ee2..0c40774 100644 --- a/systems/Vagrantfile +++ b/systems/Vagrantfile @@ -136,7 +136,7 @@ Vagrant.configure("2") do |config| # Every Vagrant development environment requires a box. You can search for # boxes at https://vagrantcloud.com/search. - target3.vm.box = "ubuntu/groovy64" + target3.vm.box = "ubuntu/impish64" # target.vm.base_mac = "080027BB1475" target3.vm.hostname = "target3" target3.vm.define "target3" From 669688b33a0920056c3b27f3968600fcef84ced7 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Fri, 3 Dec 2021 15:33:02 +0100 Subject: [PATCH 04/14] Updating README with feedback --- README.md | 44 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 42 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f9dd8f0..8e1d8d0 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,12 @@ The system is at the same time reproducible and quite flexible (target system wi ## Installation -On a current Ubuntu system, just execute the *init.sh* to install the required packages and set up the virtual env. +On a current Ubuntu 21.10 system, just execute the *init.sh* to install the required packages and set up the virtual env. + +You need python 3.9 (which is part of this Ubuntu) + +And it will not run properly in a VM as it spawns its own VMs ... unless VT-x is available. +We confirmed it is working in VirtualBox. Please reserve enough disk space. The simple hello_world will already download a kali and an ubuntu image. They must be stored on your VM. ``` ./init.sh @@ -27,7 +32,7 @@ source venv/bin/activate ## My first experiment -Run +Run and be very patient. The first time it runs it will build target and attacker VMs which is time consuming and will need some bandwidth. ``` python3 ./experiment_control.py -vvv run --configfile hello_world.yaml @@ -56,6 +61,41 @@ evince tools/human_readable_documentation/build/latex/purpledomesimulation.pdf (which is included in the zip as well) +## Fixing issues + +### Machine creation + +One of the big steps is creation of attacker and target machines. If this fails, you can do the step manually and check why it fails. + +``` +cd systems +vagrant up attacker +vagrant up target3 +vagrant ssh attacker +# do someting +exit +vagrant ssh target +# do something +exit +vagrant destroy target3 +vagrant destroy attacker +``` + +### Caldera issues + +The caldera server is running on the attacker. It will be contacted by the implants installed on the client and remote controlled by PurpleDome using a REST Api. This can be tested using curl: + +``` +curl -H 'KEY: ADMIN123' http://attacker:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"adversaries"}' +``` + +If there are errors, connect to the attacker using ssh and monitor the server while contacting it. Maybe kill it first. + +``` +cd caldera +python3 server.py --insecure +``` + ## Running the basic commands All command line tools have a help included. You can access it by the "--help" parameter From ea6d6e42511bbd4e35893bd941c226e70dfa7e74 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Fri, 3 Dec 2021 15:33:30 +0100 Subject: [PATCH 05/14] deactivating logstash config --- systems/target3/bootstrap.sh | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/systems/target3/bootstrap.sh b/systems/target3/bootstrap.sh index 7d57892..acbed01 100755 --- a/systems/target3/bootstrap.sh +++ b/systems/target3/bootstrap.sh @@ -33,28 +33,30 @@ wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add - echo "deb https://artifacts.elastic.co/packages/7.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-7.x.list apt update + +################# This must move into a plugin !!! ############### # Install Logstash -apt -y install default-jre -apt -y install logstash +# apt -y install default-jre +# apt -y install logstash # Install filebeat -apt -y install filebeat +# apt -y install filebeat # Configure logstash as output -cp /vagrant/target1/config/filebeat.yml /etc/filebeat/filebeat.yml -cp /vagrant/target1/config/caldera_agent.service /etc/systemd/system/ +# cp /vagrant/target1/config/filebeat.yml /etc/filebeat/filebeat.yml +# cp /vagrant/target1/config/caldera_agent.service /etc/systemd/system/ # Config logstash -cp /vagrant/target1/logstash_conf/*.conf /etc/logstash/conf.d -rm /vagrant/target1/logstash/filebeat.json -touch /vagrant/target1/logstash/filebeat.json -chmod o+w /vagrant/target1/logstash/filebeat.json +# cp /vagrant/target1/logstash_conf/*.conf /etc/logstash/conf.d +# rm /vagrant/target1/logstash/filebeat.json +# touch /vagrant/target1/logstash/filebeat.json +# chmod o+w /vagrant/target1/logstash/filebeat.json # Start Logstash and filebeat -filebeat modules enable system,iptables -filebeat setup --pipelines --modules iptables,system, -systemctl start logstash.service -systemctl enable filebeat -systemctl enable logstash.service +# filebeat modules enable system,iptables +# filebeat setup --pipelines --modules iptables,system, +# systemctl start logstash.service +# systemctl enable filebeat +# systemctl enable logstash.service # Run logstash manually for debugging: # https://www.elastic.co/guide/en/logstash/current/running-logstash-command-line.html From 6e05d489f3dda6986a19057d04b0f660e193085b Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Fri, 3 Dec 2021 15:33:47 +0100 Subject: [PATCH 06/14] Verified: Caldera works on a hello_world with fresh machines --- app/machinecontrol.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/machinecontrol.py b/app/machinecontrol.py index e679847..2ba938b 100644 --- a/app/machinecontrol.py +++ b/app/machinecontrol.py @@ -404,7 +404,7 @@ class Machine(): else: cleanupcmd = "" - cmd = f"cd {self.caldera_basedir}; {cleanupcmd} git clone https://github.com/mitre/caldera.git --recursive --branch {version}; cd caldera; pip3 install -r requirements.txt" + cmd = f"cd {self.caldera_basedir}; {cleanupcmd} git clone https://github.com/mitre/caldera.git --recursive --branch {version}; cd caldera; git checkout {version}; pip3 install -r requirements.txt" self.attack_logger.vprint(f"{CommandlineColors.OKGREEN}Caldera server installed {CommandlineColors.ENDC}", 1) res = self.vm_manager.__call_remote_run__(cmd) return "Result installing caldera server " + str(res) From bb552bd82ac12a5e5fd79edf1610dc0a1e828f71 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Wed, 8 Dec 2021 10:02:09 +0100 Subject: [PATCH 07/14] Fixing Hydra vs. filebeat. Issues fixed: Kali linux updated. Hydra dependency fixed. Filebeat fully moved to plugin (no Vagrant setup left). --- app/experimentcontrol.py | 3 +- .../linux_filebeat/linux_filebeat_plugin.py | 29 +++++++----- systems/Vagrantfile | 2 +- systems/attacker1/bootstrap.sh | 10 ++--- systems/target3/bootstrap.sh | 44 +------------------ 5 files changed, 27 insertions(+), 61 deletions(-) diff --git a/app/experimentcontrol.py b/app/experimentcontrol.py index 92206f4..b2fd8b3 100644 --- a/app/experimentcontrol.py +++ b/app/experimentcontrol.py @@ -73,6 +73,7 @@ class Experiment(): if self.machine_needs_caldera(target_1, caldera_attacks): target_1.install_caldera_service() target_1.up() + target_1.reboot() # Kernel changes on system creation require a reboot needs_reboot = target_1.prime_vulnerabilities() needs_reboot |= target_1.prime_sensors() if needs_reboot: @@ -331,7 +332,7 @@ class Experiment(): except subprocess.CalledProcessError: # Machine does not exist pass - self.attacker_1.create(reboot=False) + self.attacker_1.create(reboot=True) self.attacker_1.up() self.attacker_1.install_caldera_server(cleanup=False) else: diff --git a/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py b/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py index 83f3cb6..fa04c3e 100644 --- a/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py +++ b/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py @@ -1,10 +1,12 @@ #!/usr/bin/env python3 # A plugin to experiment with Linux logstash filebeat sensors +# https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-installation-configuration.html from plugins.base.sensor import SensorPlugin import os from jinja2 import Environment, FileSystemLoader, select_autoescape +import time class LinuxFilebeatPlugin(SensorPlugin): @@ -42,12 +44,17 @@ class LinuxFilebeatPlugin(SensorPlugin): self.vprint("Installing Linux filebeat sensor", 3) - self.run_cmd("sudo wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -") - self.run_cmd('sudo echo "deb https://artifacts.elastic.co/packages/7.x/apt stable main" | sudo tee /etc/apt/sources.list.d/elastic-7.x.list') - self.run_cmd("sudo apt update") - self.run_cmd("sudo apt -y install default-jre") - self.run_cmd("sudo apt -y install logstash") - self.run_cmd("sudo apt -y install filebeat") + # Filebeat + fb_file = "filebeat-7.15.2-amd64.deb" + self.run_cmd(f"curl -L -O https://artifacts.elastic.co/downloads/beats/filebeat/{fb_file}") + self.run_cmd(f"sudo dpkg -i {fb_file}") + + # Logstash + + self.run_cmd("wget -qO- https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo apt-key add -") + self.run_cmd("sudo apt-get install apt-transport-https") + self.run_cmd("echo 'deb https://artifacts.elastic.co/packages/7.x/apt stable main' | sudo tee -a /etc/apt/sources.list.d/elastic-7.x.list") + self.run_cmd("sudo apt update && sudo apt install logstash") # Copy config self.run_cmd(f"sudo cp {pg}/filebeat.yml /etc/filebeat/filebeat.yml") @@ -67,12 +74,12 @@ class LinuxFilebeatPlugin(SensorPlugin): def start(self): - self.run_cmd("sudo filebeat modules enable system,iptables") + self.run_cmd("sudo filebeat modules enable system iptables") self.run_cmd("sudo filebeat setup --pipelines --modules iptables,system,") - self.run_cmd("sudo systemctl enable filebeat") - self.run_cmd("sudo systemctl start filebeat") - self.run_cmd("sudo systemctl enable logstash.service") - self.run_cmd("sudo systemctl start logstash.service") + # self.run_cmd("sudo systemctl start logstash.service") + self.run_cmd("sudo nohup /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/filebeat.conf &", disown=True) + time.sleep(20) + self.run_cmd("sudo service filebeat start") return None diff --git a/systems/Vagrantfile b/systems/Vagrantfile index 0c40774..e857921 100644 --- a/systems/Vagrantfile +++ b/systems/Vagrantfile @@ -224,7 +224,7 @@ Vagrant.configure("2") do |config| # https://app.vagrantup.com/kalilinux/boxes/rolling attacker.vm.box = "kalilinux/rolling" - # config.vm.box_version = "2020.3.0" + attacker.vm.box_version = "2021.3.0" #config.vm.base_mac = "080027BB1476" attacker.vm.hostname = "attacker" diff --git a/systems/attacker1/bootstrap.sh b/systems/attacker1/bootstrap.sh index 105d665..3173dbc 100755 --- a/systems/attacker1/bootstrap.sh +++ b/systems/attacker1/bootstrap.sh @@ -11,16 +11,14 @@ echo "Bootstrapping attacker1" echo '* libraries/restart-without-asking boolean true' | sudo debconf-set-selections # Update system -apt update +apt -y update +export DEBIAN_FRONTEND=noninteractive +yes '' | apt -y -o DPkg::options::="--force-confdef" -o DPkg::options::="--force-confold" dist-upgrade + cd ~ wget https://bootstrap.pypa.io/get-pip.py python3 get-pip.py apt -y install golang sphinx-common -#apt -y upgrade - -#apt -y install apt-transport-https -#apt -y install openssh-server -#apt -y install whois # for mkpasswd ip addr show eth1 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1 > /vagrant/attacker1/ip4.txt diff --git a/systems/target3/bootstrap.sh b/systems/target3/bootstrap.sh index acbed01..ceba0aa 100755 --- a/systems/target3/bootstrap.sh +++ b/systems/target3/bootstrap.sh @@ -27,49 +27,9 @@ apt -y install gdb # user with password "passw0rd" # useradd -m -p '$6$q5PAnDI5K0uv$hMGMJQleeS9F2yLOiHXs2PxZHEmV.ook8jyWILzDGDxSTJmTTZSe.QgLVrnuwiyAl5PFJVARkMsSnPICSndJR1' -s /bin/bash password -# Install Elastic search debian repo -wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add - -echo "deb https://artifacts.elastic.co/packages/7.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-7.x.list -apt update - - -################# This must move into a plugin !!! ############### -# Install Logstash -# apt -y install default-jre -# apt -y install logstash - -# Install filebeat -# apt -y install filebeat -# Configure logstash as output -# cp /vagrant/target1/config/filebeat.yml /etc/filebeat/filebeat.yml -# cp /vagrant/target1/config/caldera_agent.service /etc/systemd/system/ - -# Config logstash -# cp /vagrant/target1/logstash_conf/*.conf /etc/logstash/conf.d -# rm /vagrant/target1/logstash/filebeat.json -# touch /vagrant/target1/logstash/filebeat.json -# chmod o+w /vagrant/target1/logstash/filebeat.json - -# Start Logstash and filebeat -# filebeat modules enable system,iptables -# filebeat setup --pipelines --modules iptables,system, -# systemctl start logstash.service -# systemctl enable filebeat -# systemctl enable logstash.service - -# Run logstash manually for debugging: -# https://www.elastic.co/guide/en/logstash/current/running-logstash-command-line.html -# /usr/share/logstash/bin/logstash --node-name debug -f /etc/logstash/conf.d/ --log.level debug --config.debug - -# To test conf files: -# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/ -t - -# Start Caldera agent service -# ln -s /vagrant/target1/config/caldera_agent.service /etc/systemd/system -# chmod 666 /etc/systemd/system -# systemctl enable caldera_agent.service -# systemctl start caldera_agent.service +apt -y update +apt -y upgrade ip addr show enp0s8 | grep "inet\b" | awk '{print $2}' | cut -d/ -f1 > /vagrant/target3/ip4.txt From 50e0c8d9cdb1d4c0c41cc622d95a1372c04143e2 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Fri, 14 Jan 2022 11:37:37 +0100 Subject: [PATCH 08/14] Fixing filebeat --- experiment_control.py | 0 .../linux_filebeat/filebeat_template.conf | 2 +- .../linux_filebeat/linux_filebeat_plugin.py | 23 ++++++++++++++----- 3 files changed, 18 insertions(+), 7 deletions(-) mode change 100644 => 100755 experiment_control.py diff --git a/experiment_control.py b/experiment_control.py old mode 100644 new mode 100755 diff --git a/plugins/default/sensors/linux_filebeat/filebeat_template.conf b/plugins/default/sensors/linux_filebeat/filebeat_template.conf index 249d6be..125babd 100644 --- a/plugins/default/sensors/linux_filebeat/filebeat_template.conf +++ b/plugins/default/sensors/linux_filebeat/filebeat_template.conf @@ -8,7 +8,7 @@ filter {} output { file { - path => "{{playground}}/filebeat.json" + path => "/tmp/filebeat_collection.json" codec => json id => "id_filebeat" create_if_deleted => true diff --git a/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py b/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py index fa04c3e..b1128d5 100644 --- a/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py +++ b/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py @@ -62,8 +62,10 @@ class LinuxFilebeatPlugin(SensorPlugin): # Cleanup self.run_cmd(f"rm {pg}/filebeat.json") - self.run_cmd(f"touch {pg}/filebeat.json") - self.run_cmd(f"chmod o+w {pg}/filebeat.json") + # self.run_cmd(f"touch {pg}/filebeat.json") + # self.run_cmd(f"chmod o+w {pg}/filebeat.json") + self.run_cmd("touch /tmp/filebeat_collection.json") + self.run_cmd("sudo chown logstash:logstash /tmp/filebeat_collection.json") return False @@ -75,11 +77,18 @@ class LinuxFilebeatPlugin(SensorPlugin): def start(self): self.run_cmd("sudo filebeat modules enable system iptables") - self.run_cmd("sudo filebeat setup --pipelines --modules iptables,system,") + self.run_cmd("sudo filebeat setup --pipelines --modules iptables,system,") # check with sudo filebeat modules list # self.run_cmd("sudo systemctl start logstash.service") - self.run_cmd("sudo nohup /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/filebeat.conf &", disown=True) + # self.run_cmd("sudo nohup /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/filebeat.conf &", disown=True) + self.run_cmd("sudo chown logstash:logstash filebeat.json") # check with: systemctl status logstash.service + self.run_cmd("sudo systemctl start logstash") # check with: systemctl status logstash.service + self.run_cmd("sudo systemctl enable logstash") # check with: systemctl status logstash.service time.sleep(20) - self.run_cmd("sudo service filebeat start") + self.run_cmd("sudo systemctl start filebeat") # check with: systemctl status filebeat.service + self.run_cmd("sudo systemctl enable filebeat") # check with: systemctl status filebeat.service + + # Check the logs: sudo journalctl -u filebeat.service + # Check the logs: sudo journalctl -u logstash.service return None @@ -91,6 +100,8 @@ class LinuxFilebeatPlugin(SensorPlugin): """ Collect sensor data """ pg = self.get_playground() + breakpoint() dst = os.path.join(path, "filebeat.json") - self.get_from_machine(f"{pg}/filebeat.json", dst) # nosec + # self.get_from_machine(f"{pg}/filebeat.json", dst) # nosec + self.get_from_machine("/tmp/filebeat_collection.json", dst) # nosec return [dst] From fbcfc0c452722dae2298bf956dfeff9e77875b41 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Fri, 14 Jan 2022 11:37:58 +0100 Subject: [PATCH 09/14] Fixing and enhancing hydra --- plugins/default/kali/hydra/hydra_plugin.py | 1 + plugins/default/kali/hydra/passwords.txt | 128 +++++++++++++++++++++ plugins/default/kali/hydra/users.txt | 3 +- 3 files changed, 130 insertions(+), 2 deletions(-) diff --git a/plugins/default/kali/hydra/hydra_plugin.py b/plugins/default/kali/hydra/hydra_plugin.py index f34442a..97ba762 100644 --- a/plugins/default/kali/hydra/hydra_plugin.py +++ b/plugins/default/kali/hydra/hydra_plugin.py @@ -56,5 +56,6 @@ class HydraPlugin(AttackPlugin): attack_name=self.name, ttp=self.ttp, logid=logid) + cmd = f"cd {playground};" return total_res diff --git a/plugins/default/kali/hydra/passwords.txt b/plugins/default/kali/hydra/passwords.txt index 2af5eee..82f8cdf 100644 --- a/plugins/default/kali/hydra/passwords.txt +++ b/plugins/default/kali/hydra/passwords.txt @@ -1,3 +1,131 @@ +xvefdxjuqk +kvwflcmiqa +yhxnhlqwtx +ouemazgzmb +twwhgsiofq +jbofrkhrnn +dwpzoogxsw +lozrjyqnls +hvgrgknatd +xrpjllxtyo +nanqmrnkks +csnpxuuxfb +yiljmjpocp +zyfczzfvwh +dlqcdyahnz +ehxfcbeijg +jkpmbpcxnj +ltjtbxpamg +eyrajqnuut +dapjegddax +eouifybuhl +ibwvvdflzd +fkwnnmqfyo +vcjxqrxtva +eyifkppnky +ysdcwndhea +xvdkwbqoji +vnpoudewpo +nsbaoqycbi +vuwkudxvfb +pvohjnufds +vtxhlvxznf +cmqnywqpjh +gsquplthrj +tfnrakwtiy +gitllmexga +dvbxkbehml +pnorflbeck +asacllxlwk +rqvostegmt +lsvqkpgyzg +gvwbvpilqw +eedoziepoe +kycwkkirxl +qrivxifdij +tavotpkspl +kjirpjnked +helaayxgwt +jtuooqdfsc +sghskzybja +tziamzckhq +defaqhtbey +jxbrysrozn +wygslrurew +rjgagkgzmn +wsezgkypcr +atigwhtfvl +kifozivamt +hzllpwvksz +afolruiwbp +zpptgbtjqw +ohcnltaufb +sxlxvgowsb +vwjjodasay +qaebjwrccw +ldsqykstmi +gtdnytalcz +cqoslsepnt +lvgdetcmno +hdhlymtrne +rtzgfhpjsu +xcfoisbbny +wuxaqvfpvi +yugwtitfxb +yjuihpgqpk +behpgnnjiq +gvsanlarla +ntggcwhzez +dzbyasmoni +xqbstvhrbv +snupvltvlo +enwiqyjnau +kibypdumkk +prrddhqlfs +xhsbshhmba +nfbrqxmdfg +frgqhcdkqg +kvqrfdhyvv +pwootxogtr +vsnyxeoxuu +amtvaepfav +fflegmisbl +ulucqdvido +abwftzqlud +sbmezxoidz +zigaehmkof +fixdalasyk +ewrvfuiukl +vpgnwjhutk +ncrpjcodvg +ntpsqfborm +abcphtoyge +oxpgalzoec +fdtevjulsm +dspmmlfbtf +hucvyrlzwz +ychsetjmxr +qiaqhmhksj +bpjnvhklgr +cqoxdifygi +gqvqgfggfj +ylhvukeqhg +lpxutcvcmn +lwbmfwteod +fnqlcbmriv +lqaqwnkjmp +flkrzmiujr +hlmqbkpjog +burqlqzqsm +upbekwehbt +lktdviabid +yehszqkaut +klhnmbamqt +kavsmnqlld +kivpjuysxr +kqsynlnokn +ajsiaqtyxw +crjoljcshx 12345 qwert qwertz diff --git a/plugins/default/kali/hydra/users.txt b/plugins/default/kali/hydra/users.txt index 986ff7b..7995330 100644 --- a/plugins/default/kali/hydra/users.txt +++ b/plugins/default/kali/hydra/users.txt @@ -1,5 +1,4 @@ test root password -nonexistend_user_1 -nonexistend_user_2 \ No newline at end of file +nonexistend_user_1 \ No newline at end of file From ac1825fdd7b528223923381e9fefe872f709c999 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Fri, 14 Jan 2022 12:35:15 +0100 Subject: [PATCH 10/14] Added prototyp for detection logic and attack diagrams --- detect.py | 101 ++++++++++++++++++ .../linux_filebeat/linux_filebeat_plugin.py | 2 - 2 files changed, 101 insertions(+), 2 deletions(-) create mode 100755 detect.py diff --git a/detect.py b/detect.py new file mode 100755 index 0000000..17c0584 --- /dev/null +++ b/detect.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 + +""" Use logic to detect an attack in sensor logs. This is a PROTOTYPE !!!! """ + +# TODO: Move essential parts to app folder as soon as it is obvious what is required. Maybe even add the code to existing plugins (sensor plugins ?) or create a new plugin type. Mybe ship that with the sensor in the same folder. + +import argparse +import json +import re +from pprint import pprint +from datetime import datetime +from collections import defaultdict + +DEFAULT_SENSOR_LOG = "loot/2022_01_07___18_36_21/target3/sensors/linux_filebeat/filebeat.json" + + +class Detector(): + """ + An experimental prototype to play with detection and display of events. This code should later be part of plugins. + But until I know where this is going we have this prototype + """ + + def __init__(self, args): + + self.processed_data = [] + + as_text = "[" + + # Filebeat jsons are not valid jsons and have to be fixed + with open(args.sensor_log, "rt") as fh: + new = fh.read() + new = new.replace("}{", "},{") + as_text += new + as_text += "]" + self.data = json.loads(as_text) + + def detect(self, bucket_size=10, limit=20): + """ detect + + """ + + regex = r"^(?P\w*\W*\d{1,2} \d{1,2}:\d{1,2}:\d{1,2}) (?P\w*) (?P\w*)\[(?P\d*)\]: Failed password for invalid user (?P\w*) from (?P\S*) port (?P\d*)" + + detected = set() + + self.processed_data = [] + histogram = defaultdict(lambda: 0) + for entry in self.data: + if "Failed password" in entry["message"]: + res = re.match(regex, entry["message"]) + if res: + data = res.groupdict() + + year = entry['@timestamp'].split("-")[0] + pdate = datetime.strptime(f"{year} {data['date']}", "%Y %b %d %H:%M:%S") + data["timestamp_short"] = int(pdate.timestamp()) + data["timestamp"] = pdate.timestamp() + data["detections"] = [] + self.processed_data.append(data) + histogram[data["timestamp_short"] // bucket_size] += 1 + + # detect password brute forcing + for akey, value in histogram.items(): + if value > limit: + print(akey) + for processed in self.processed_data: + if processed["timestamp_short"] // bucket_size == akey: + processed["detections"].append("pwd_bruteforce") + detected.add("pwd_bruteforce") + + pprint(self.processed_data) + pprint(histogram) + return detected + + def sequence_diagram(self): + """ Creates a sequence diagram based on processed data (call detect first). Use plantuml to process it: https://plantuml.com/de/sequence-diagram""" + # For pdw_bruteforce + res = "@startuml\n" + for entry in self.processed_data: + if "pwd_bruteforce" in entry["detections"]: + res += f"{entry['attacker']} -> {entry['target']}: to {entry['process']} as {entry['user']}\n" + res += "@enduml\n" + + print(res) + + +def create_parser(): + """ Creates the parser for the command line arguments""" + parser = argparse.ArgumentParser("Detects attacks in logs. Can also create diagrams for the part of the logs indicating the attack") + + parser.add_argument("--sensor_log", default=DEFAULT_SENSOR_LOG, help="The sensor log to detect in") + # parser.add_argument("--outfile", default="tools/human_readable_documentation/source/contents.rst", help="The default output file") + + return parser + + +if __name__ == "__main__": + arguments = create_parser().parse_args() + detector = Detector(arguments) + if len(detector.detect()) > 0: + detector.sequence_diagram() diff --git a/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py b/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py index b1128d5..ef2c245 100644 --- a/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py +++ b/plugins/default/sensors/linux_filebeat/linux_filebeat_plugin.py @@ -99,8 +99,6 @@ class LinuxFilebeatPlugin(SensorPlugin): def collect(self, path): """ Collect sensor data """ - pg = self.get_playground() - breakpoint() dst = os.path.join(path, "filebeat.json") # self.get_from_machine(f"{pg}/filebeat.json", dst) # nosec self.get_from_machine("/tmp/filebeat_collection.json", dst) # nosec From 9984530492df2f951d13046658c0bdf4fe539309 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Tue, 18 Jan 2022 16:35:32 +0100 Subject: [PATCH 11/14] Moving to Caldera 4 Alpha API --- app/calderaapi_2.py | 272 ++++++++++++++ app/calderaapi_4.py | 710 +++++++++++++++++++++++++++++++++++ app/calderacontrol.py | 412 +++----------------- app/experimentcontrol.py | 4 +- app/machinecontrol.py | 3 +- caldera_control.py | 236 ++++++++++-- tests/test_calderacontrol.py | 250 ++++-------- 7 files changed, 1318 insertions(+), 569 deletions(-) create mode 100644 app/calderaapi_2.py create mode 100644 app/calderaapi_4.py mode change 100644 => 100755 caldera_control.py diff --git a/app/calderaapi_2.py b/app/calderaapi_2.py new file mode 100644 index 0000000..9d84701 --- /dev/null +++ b/app/calderaapi_2.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python3 +""" Direct API to the caldera server. Not abstract simplification methods. Compatible with Caldera 2.8.1 """ + +import json +import requests +import simplejson + + +class CalderaAPI: + """ API to Caldera 2.8.1 """ + + def __init__(self, server: str, attack_logger, config=None, apikey=None): + """ + + @param server: Caldera server url/ip + @param attack_logger: The attack logger to use + @param config: The configuration + """ + # print(server) + self.url = server if server.endswith("/") else server + "/" + self.attack_logger = attack_logger + + self.config = config + + if self.config: + self.apikey = self.config.caldera_apikey() + else: + self.apikey = apikey + + def __contact_server__(self, payload, rest_path: str = "api/rest", method: str = "post"): + """ + + @param payload: payload as dict to send to the server + @param rest_path: specific path for this rest api + @param method: http method to use + """ + url = self.url + rest_path + header = {"KEY": self.apikey, + "Content-Type": "application/json"} + if method.lower() == "post": + request = requests.post(url, headers=header, data=json.dumps(payload)) + elif method.lower() == "put": + request = requests.put(url, headers=header, data=json.dumps(payload)) + elif method.lower() == "get": + request = requests.get(url, headers=header, data=json.dumps(payload)) + elif method.lower() == "delete": + request = requests.delete(url, headers=header, data=json.dumps(payload)) + else: + raise ValueError + try: + res = request.json() + except simplejson.errors.JSONDecodeError as exception: # type: ignore + print("!!! Error !!!!") + print(payload) + print(request.text) + print("!!! Error !!!!") + raise exception + + return res + + def list_operations(self): + """ Return operations """ + + payload = {"index": "operations"} + return self.__contact_server__(payload) + + def list_abilities(self): + """ Return all ablilities """ + # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"abilities"}' + + payload = {"index": "abilities"} + return self.__contact_server__(payload) + + def list_agents(self): + """ List running agents + + """ + # TODO: Add filters for specific platforms/executors : , platform_filter=None, executor_filter=None as parameters + # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"agents"}' + payload = {"index": "agents"} + + agents = self.__contact_server__(payload) + return agents + + def list_sources(self): + """ List stored facts + + """ + # TODO: Add filters for specific platforms/executors : , platform_filter=None, executor_filter=None as parameters + # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"agents"}' + payload = {"index": "sources"} + + facts = self.__contact_server__(payload) + return facts + + def list_adversaries(self): + """ List registered adversaries """ + # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"adversaries"}' + payload = {"index": "adversaries"} + return self.__contact_server__(payload) + + def list_objectives(self): + """ List registered objectives """ + # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"objectives"}' + payload = {"index": "objectives"} + return self.__contact_server__(payload) + + def add_sources(self, name: str, parameters): + """ Adds a data source and seeds it with facts """ + + payload = {"index": "sources", + "name": name, + # "id": "123456-1234-1234-1234-12345678", + "rules": [], + "relationships": [] + } + + facts = [] + if parameters is not None: + for key, value in parameters.items(): + facts.append({"trait": key, "value": value}) + + # TODO: We need something better than a dict here as payload to have strong typing + payload["facts"] = facts # type: ignore + + print(payload) + return self.__contact_server__(payload, method="put") + + def add_operation(self, **kwargs): + """ Adds a new operation + + @param name: Name of the operation + @param advid: Adversary id + @param group: agent group to attack + @param state: state to initially set + @param obfuscator: obfuscator to use for the attack + @param jitter: jitter to use for the attack + @param parameters: parameters to pass to the ability + """ + + # name: str, advid: str, group: str = "red", state: str = "running", obfuscator: str = "plain-text", jitter: str = '4/8', parameters=None + name: str = kwargs.get("name") + advid: str = kwargs.get("adversary_id") + group: str = kwargs.get("group", "red") + state: str = kwargs.get("state", "running") + obfuscator: str = kwargs.get("obfuscator", "plain-text") + jitter: str = kwargs.get("jitter", "4/8") + parameters = kwargs.get("parameters", None) + + # Add operation: curl -X PUT -H "KEY:$KEY" http://127.0.0.1:8888/api/rest -d '{"index":"operations","name":"testoperation1"}' + # observed from GUI sniffing: PUT {'name': 'schnuffel2', 'group': 'red', 'adversary_id': '0f4c3c67-845e-49a0-927e-90ed33c044e0', 'state': 'running', 'planner': 'atomic', 'autonomous': '1', 'obfuscator': 'plain-text', 'auto_close': '1', 'jitter': '4/8', 'source': 'Alice Filters', 'visibility': '50'} + + sources_name = "source_" + name + self.add_sources(sources_name, parameters) + + # To verify: + # print(self.get_source(sources_name)) + + payload = {"index": "operations", + "name": name, + "state": state, + "autonomous": 1, + 'obfuscator': obfuscator, + 'auto_close': '1', + 'jitter': jitter, + 'source': sources_name, + 'visibility': '50', + "group": group, + # + "planner": "atomic", + "adversary_id": advid, + } + + return self.__contact_server__(payload, method="put") + + def view_operation_report(self, opid: str): + """ views the operation report + + @param opid: Operation id to look for + """ + + # let postData = selectedOperationId ? {'index':'operation_report', 'op_id': selectedOperationId, 'agent_output': Number(agentOutput)} : null; + # checking it (from snifffing protocol at the server): POST {'id': 539687} + payload = {"index": "operation_report", + "op_id": opid, + 'agent_output': 1 + } + return self.__contact_server__(payload) + + def set_operation_state(self, operation_id: str, state: str = "running"): + """ Executes an operation on a server + + @param operation_id: The operation to modify + @param state: The state to set this operation into + """ + + # TODO: Change state of an operation: curl -X POST -H "KEY:ADMIN123" http://localhost:8888/api/rest -d '{"index":"operation", "op_id":123, "state":"finished"}' + # curl -X POST -H "KEY:ADMIN123" http://localhost:8888/api/rest -d '{"index":"operation", "op_id":123, "state":"finished"}' + + if state not in ["running", "finished", "paused", "run_one_link", "cleanup"]: + raise ValueError + + payload = {"index": "operation", + "op_id": operation_id, + "state": state} + return self.__contact_server__(payload) + + def add_adversary(self, name: str, ability: str, description: str = "created automatically"): + """ Adds a new adversary + + @param name: Name of the adversary + @param ability: One ability for this adversary + @param description: Description of this adversary + """ + + # Add operation: curl -X PUT -H "KEY:$KEY" http://127.0.0.1:8888/api/rest -d '{"index":"operations","name":"testoperation1"}' + + # Sniffed from gui: + # Rest core: PUT adversaries {'name': 'removeme', 'description': 'description', 'atomic_ordering': [{'id': 'bd527b63-9f9e-46e0-9816-b8434d2b8989'}], 'id': '558932cb-3ac6-43d2-b821-2db0fa8ad469', 'objective': ''} + # Returns: [{'name': 'removeme', 'adversary_id': '558932cb-3ac6-43d2-b821-2db0fa8ad469', 'description': 'description', 'tags': [], 'atomic_ordering': ['bd527b63-9f9e-46e0-9816-b8434d2b8989'], 'objective': '495a9828-cab1-44dd-a0ca-66e58177d8cc'}] + + payload = {"index": "adversaries", + "name": name, + "description": description, + "atomic_ordering": [{"id": ability}], + # + "objective": '495a9828-cab1-44dd-a0ca-66e58177d8cc' # default objective + # "objective": '' + } + return self.__contact_server__(payload, method="put") + + # curl -X DELETE http://localhost:8888/api/rest -d '{"index":"operations","id":"$operation_id"}' + def delete_operation(self, opid: str): + """ Delete operation by id + + @param opid: Operation id + """ + payload = {"index": "operations", + "id": opid} + return self.__contact_server__(payload, method="delete") + + def delete_adversary(self, adid: str): + """ Delete adversary by id + + @param adid: Adversary id + """ + payload = {"index": "adversaries", + "adversary_id": [{"adversary_id": adid}]} + return self.__contact_server__(payload, method="delete") + + def delete_agent(self, paw: str): + """ Delete a specific agent from the kali db. implant may still be running and reconnect + + @param paw: The Id of the agent to delete + """ + payload = {"index": "adversaries", + "paw": paw} + return self.__contact_server__(payload, method="delete") + + def kill_agent(self, paw: str): + """ Send a message to an agent to kill itself + + @param paw: The Id of the agent to delete + """ + + payload = {"index": "agents", + "paw": paw, + "watchdog": 1, + "sleep_min": 3, + "sleep_max": 3} + + return self.__contact_server__(payload, method="put") diff --git a/app/calderaapi_4.py b/app/calderaapi_4.py new file mode 100644 index 0000000..712d9d2 --- /dev/null +++ b/app/calderaapi_4.py @@ -0,0 +1,710 @@ +#!/usr/bin/env python3 + +""" Remote control a caldera 4 server. Starting compatible to the old control 2.8 calderacontrol. Maybe it will stop being compatible if refactoring is an option """ + +import json + +from pprint import pformat +from typing import Optional, Union +import requests +import simplejson +from pydantic.dataclasses import dataclass +from pydantic import conlist # pylint: disable=no-name-in-module + +# from app.exceptions import CalderaError +# from app.interface_sfx import CommandlineColors + + +# TODO: Ability deserves an own class. +# TODO: Support all Caldera agents: "Sandcat (GoLang)","Elasticat (Blue Python/ Elasticsearch)","Manx (Reverse Shell TCP)","Ragdoll (Python/HTML)" + +@dataclass +class Variation: + description: str + command: str + + +@dataclass +class ParserConfig: + source: str + edge: str + target: str + custom_parser_vals: dict # undocumented ! Needs improvement ! TODO + + +@dataclass +class Parser: + module: str + relationships: list[ParserConfig] # undocumented ! Needs improvement ! TODO + parserconfigs: Optional[list[ParserConfig]] = None + + +@dataclass +class Requirement: + module: str + relationship_match: list[dict] + + +@dataclass +class AdditionalInfo: + additionalProp1: Optional[str] = None # pylint: disable=invalid-name + additionalProp2: Optional[str] = None # pylint: disable=invalid-name + additionalProp3: Optional[str] = None # pylint: disable=invalid-name + + +@dataclass +class Executor: + build_target: Optional[str] # Why can this be None ? + language: Optional[str] # Why can this be None ? + payloads: list[str] + variations: list[Variation] + additional_info: Optional[AdditionalInfo] + parsers: list[Parser] + cleanup: list[str] + name: str + timeout: int + code: Optional[str] # Why can this be None ? + uploads: list[str] + platform: str + command: Optional[str] + + def get(self, akey, default=None): + """ Get a specific element out of the internal data representation, behaves like the well know 'get' """ + if akey in self.__dict__: + return self.__dict__[akey] + + return default + + +@dataclass +class Ability: + """ An ability is an exploit, a TTP, an attack step ...more or less... """ + description: str + plugin: str + technique_name: str + requirements: list[Requirement] + additional_info: AdditionalInfo + singleton: bool + buckets: list[str] + access: dict + executors: list[Executor] + name: str + technique_id: str + tactic: str + repeatable: str + ability_id: str + privilege: Optional[str] = None + + def get(self, akey, default=None): + """ Get a specific element out of the internal data representation, behaves like the well know 'get' """ + if akey in self.__dict__: + return self.__dict__[akey] + + return default + + +@dataclass +class AbilityList: + """ A list of exploits """ + abilities: conlist(Ability, min_items=1) + + def get_data(self): + return self.abilities + + +@dataclass +class Obfuscator: + """ An obfuscator hides the attack by encryption/encoding """ + description: str + name: str + module: Optional[str] = None # Documentation error !!! + + +@dataclass +class ObfuscatorList: + """ A list of obfuscators """ + obfuscators: conlist(Obfuscator, min_items=1) + + def get_data(self): + return self.obfuscators + + +@dataclass +class Adversary: + """ An adversary is a defined attacker """ + has_repeatable_abilities: bool + adversary_id: str + description: str + name: str + atomic_ordering: list[str] + objective: str + tags: list[str] + plugin: Optional[str] = None + + def get(self, akey, default=None): + """ Get a specific element out of the internal data representation, behaves like the well know 'get' """ + if akey in self.__dict__: + return self.__dict__[akey] + + return default + + +@dataclass +class AdversaryList: + """ A list of adversary """ + adversaries: conlist(Adversary, min_items=1) + + def get_data(self): + return self.adversaries + + +@dataclass +class Fact: + unique: str + name: str + score: int + limit_count: int + relationships: list[str] + source: str + trait: str + links: list[str] + created: str + origin_type: Optional[str] = None + value: Optional[str] = None + technique_id: Optional[str] = None + collected_by: Optional[str] = None + + def get(self, akey, default=None): + """ Get a specific element out of the internal data representation, behaves like the well know 'get' """ + if akey in self.__dict__: + return self.__dict__[akey] + + return default + + +@dataclass +class Relationship: + target: Fact + unique: str + score: int + edge: str + origin: str + source: Fact + + +@dataclass +class Visibility: + score: int + adjustments: list[int] + + +@dataclass +class Link: + pin: int + ability: Ability + paw: str + status: int + finish: str + decide: str + output: str + visibility: Visibility + pid: str + host: str + executor: Executor + unique: str + score: int + used: list[Fact] + facts: list[Fact] + agent_reported_time: str + id: str # pylint: disable=invalid-name + collect: str + command: str + cleanup: int + relationships: list[Relationship] + jitter: int + deadman: bool + + +@dataclass +class Agent: + """ A representation of an agent on the target (agent = implant) """ + paw: str + location: str + platform: str + last_seen: str # Error in document + host_ip_addrs: list[str] + group: str + architecture: str + pid: int + server: str + trusted: bool + username: str + host: str + ppid: int + created: str + links: list[Link] + sleep_max: int + exe_name: str + display_name: str + sleep_min: int + contact: str + deadman_enabled: bool + proxy_receivers: AdditionalInfo + origin_link_id: str + executors: list[str] + watchdog: int + proxy_chain: list[list[str]] + available_contacts: list[str] + upstream_dest: str + pending_contact: str + privilege: Optional[str] = None # Error, not documented + + def get(self, akey, default=None): + """ Get a specific element out of the internal data representation, behaves like the well know 'get' """ + if akey in self.__dict__: + return self.__dict__[akey] + + return default + + +@dataclass +class AgentList: + """ A list of agents """ + agents: list[Agent] + + def get_data(self): + return self.agents + + +@dataclass +class Rule: + match: str + trait: str + action: Optional[str] = None + + +@dataclass +class Adjustment: + offset: int + trait: str + value: str + ability_id: str + + +@dataclass +class Source: + name: str + plugin: str + facts: list[Fact] + rules: list[Rule] + relationships: list[Relationship] + id: str # pylint: disable=invalid-name + adjustments: Optional[list[Adjustment]] = None + + def get(self, akey, default=None): + """ Get a specific element out of the internal data representation, behaves like the well know 'get' """ + if akey in self.__dict__: + return self.__dict__[akey] + + return default + + +@dataclass +class SourceList: + sources: list[Source] + + def get_data(self): + return self.sources + + +@dataclass +class Planner: + """ A logic defining the order in which attack steps are executed """ + name: str + plugin: str + id: str # pylint: disable=invalid-name + stopping_conditions: list[Fact] + params: dict + description: str + allow_repeatable_abilities: bool + module: Optional[str] = None + ignore_enforcement_module: Optional[list[str]] = None + ignore_enforcement_modules: Optional[list[str]] = None # Maybe error in Caldera 4 ? + + +@dataclass +class PlannerList: + planners: list[Planner] + + def get_data(self): + return self.planners + + +@dataclass +class Goal: + target: str + count: int + achieved: bool + operator: str + value: str + + +@dataclass +class Objective: + percentage: int + name: str + goals: list[Goal] + description: str + id: str # pylint: disable=invalid-name + + def get(self, akey, default=None): + """ Get a specific element out of the internal data representation, behaves like the well know 'get' """ + if akey in self.__dict__: + return self.__dict__[akey] + + return default + + +@dataclass +class Operation: + """ An attack operation collecting all the relevant items (obfuscator, adversary, planner) """ + obfuscator: str + state: str + jitter: str + autonomous: int + name: str + source: Source + adversary: Adversary + objective: Union[Objective, str] # Maybe Error in caldera 4: Creating a Operation returns a objective ID, not an objective object + host_group: list[Agent] + start: str + group: str + use_learning_parsers: bool + planner: Planner + visibility: int + id: str # pylint: disable=invalid-name + auto_close: bool + chain: Optional[list] = None + + def get(self, akey, default=None): + """ Get a specific element out of the internal data representation, behaves like the well know 'get' """ + if akey in self.__dict__: + return self.__dict__[akey] + + return default + + +@dataclass +class OperationList: + operations: conlist(Operation) + + def get_data(self): + return self.operations + + +@dataclass +class ObjectiveList: + objectives: conlist(Objective) + + def get_data(self): + return self.objectives + + +class CalderaAPI(): + """ Remote control Caldera through REST api """ + + def __init__(self, server: str, attack_logger, config=None, apikey=None): + """ + + @param server: Caldera server url/ip + @param attack_logger: The attack logger to use + @param config: The configuration + """ + self.url = server if server.endswith("/") else server + "/" + self.attack_logger = attack_logger + + self.config = config + + if self.config: + self.apikey = self.config.caldera_apikey() + else: + self.apikey = apikey + + def __contact_server__(self, payload, rest_path: str = "api/v2/abilities", method: str = "get"): + """ + + @param payload: payload as dict to send to the server + @param rest_path: specific path for this rest api + @param method: http method to use + """ + url = self.url + rest_path + header = {"KEY": "ADMIN123", + "accept": "application/json", + "Content-Type": "application/json"} + if method.lower() == "post": + j = json.dumps(payload) + request = requests.post(url, headers=header, data=j) + elif method.lower() == "put": + request = requests.put(url, headers=header, data=json.dumps(payload)) + elif method.lower() == "get": + request = requests.get(url, headers=header, data=json.dumps(payload)) + elif method.lower() == "head": + request = requests.head(url, headers=header, data=json.dumps(payload)) + elif method.lower() == "delete": + request = requests.delete(url, headers=header, data=json.dumps(payload)) + elif method.lower() == "patch": + request = requests.patch(url, headers=header, data=json.dumps(payload)) + else: + raise ValueError + try: + if request.status_code == 200: + res = request.json() + # Comment: Sometimes we get a 204: succcess, but not content in response + elif request.status_code == 204: + res = {"result": "ok", + "http_status_code": 204} + else: + print(f"Status code: {request.status_code} {request.json()}") + res = request.json() + + except simplejson.errors.JSONDecodeError as exception: # type: ignore + print("!!! Error !!!!") + print(payload) + print(request.text) + print("!!! Error !!!!") + raise exception + + return res + + def list_abilities(self): + """ Return all ablilities """ + + payload = None + data = {"abilities": self.__contact_server__(payload, method="get", rest_path="api/v2/abilities")} + abilities = AbilityList(**data) + return abilities.get_data() + + def list_obfuscators(self): + """ Return all obfuscators """ + + payload = None + data = {"obfuscators": self.__contact_server__(payload, method="get", rest_path="api/v2/obfuscators")} + obfuscators = ObfuscatorList(**data) + return obfuscators.get_data() + + def list_adversaries(self): + """ Return all adversaries """ + + payload = None + data = {"adversaries": self.__contact_server__(payload, method="get", rest_path="api/v2/adversaries")} + adversaries = AdversaryList(**data) + return adversaries.get_data() + + def list_sources(self): + """ Return all sources """ + + payload = None + data = {"sources": self.__contact_server__(payload, method="get", rest_path="api/v2/sources")} + sources = SourceList(**data) + return sources.get_data() + + def list_planners(self): + """ Return all planners """ + + payload = None + data = {"planners": self.__contact_server__(payload, method="get", rest_path="api/v2/planners")} + planners = PlannerList(**data) + return planners.get_data() + + def list_operations(self): + """ Return all operations """ + + payload = None + data = {"operations": self.__contact_server__(payload, method="get", rest_path="api/v2/operations")} + operations = OperationList(**data) + return operations.get_data() + + def set_operation_state(self, operation_id: str, state: str = "running"): + """ Executes an operation on a server + + @param operation_id: The operation to modify + @param state: The state to set this operation into + """ + + # TODO: Change state of an operation: curl -X POST -H "KEY:ADMIN123" http://localhost:8888/api/rest -d '{"index":"operation", "op_id":123, "state":"finished"}' + # curl -X POST -H "KEY:ADMIN123" http://localhost:8888/api/rest -d '{"index":"operation", "op_id":123, "state":"finished"}' + + if state not in ["running", "finished", "paused", "run_one_link", "cleanup"]: + raise ValueError + + payload = {"state": state} + return self.__contact_server__(payload, method="patch", rest_path=f"api/v2/operations/{operation_id}") + + def list_agents(self): + """ Return all agents """ + + payload = None + data = {"agents": self.__contact_server__(payload, method="get", rest_path="api/v2/agents")} + agents = AgentList(**data) + return agents.get_data() + + def list_objectives(self): + """ Return all objectivs """ + + payload = None + data = {"objectives": self.__contact_server__(payload, method="get", rest_path="api/v2/objectives")} + objectives = ObjectiveList(**data) + return objectives.get_data() + + def add_adversary(self, name: str, ability: str, description: str = "created automatically"): + """ Adds a new adversary + + :param name: Name of the adversary + :param ability: Ability ID to add + :param description: Human readable description + :return: + """ + payload = { + # "adversary_id": "string", + "atomic_ordering": [ + ability + ], + "name": name, + # "plugin": "string", + "objective": '495a9828-cab1-44dd-a0ca-66e58177d8cc', # default objective + # "tags": [ + # "string" + # ], + "description": description + } + data = {"agents": self.__contact_server__(payload, method="post", rest_path="api/v2/adversaries")} + # agents = AgentList(**data) + return data + + def delete_adversary(self, adversary_id: str): + """ Deletes an adversary + + :param adversary_id: The id of this adversary + :return: + """ + payload = None + data = {"agents": self.__contact_server__(payload, method="delete", rest_path=f"api/v2/adversaries/{adversary_id}")} + return data + + def delete_agent(self, agent_paw: str): + """ Deletes an agent + + :param agent_paw: the paw to delete + :return: + """ + payload = None + data = {"agents": self.__contact_server__(payload, method="delete", rest_path=f"api/v2/agents/{agent_paw}")} + return data + + def kill_agent(self, agent_paw: str): + """ Kills an agent on the target + + :param agent_paw: The paw identifying this agent + :return: + """ + payload = {"watchdog": 1, + "sleep_min": 3, + "sleep_max": 3} + data = self.__contact_server__(payload, method="patch", rest_path=f"api/v2/agents/{agent_paw}") + return data + + def add_operation(self, **kwargs): + """ Adds a new operation + + :param kwargs: + :return: + """ + + # name, adversary_id, source_id = "basic", planner_id = "atomic", group = "", state: str = "running", obfuscator: str = "plain-text", jitter: str = '4/8' + + name: str = kwargs.get("name") + adversary_id: str = kwargs.get("adversary_id") + source_id: str = kwargs.get("source_id", "basic") + planner_id: str = kwargs.get("planner_id", "atomic") + group: str = kwargs.get("group", "") + state: str = kwargs.get("state", "running") + obfuscator: str = kwargs.get("obfuscator", "plain-text") + jitter: str = kwargs.get("jitter", "4/8") + + payload = {"name": name, + "group": group, + "adversary": {"adversary_id": adversary_id}, + "auto_close": False, + "state": state, + "autonomous": 1, + "planner": {"id": planner_id}, + "source": {"id": source_id}, + "use_learning_parsers": True, + "obfuscator": obfuscator, + "jitter": jitter, + "visibility": "51"} + data = {"operations": [self.__contact_server__(payload, method="post", rest_path="api/v2/operations")]} + operations = OperationList(**data) + return operations + + def delete_operation(self, operation_id): + """ Deletes an operation + + :param operation_id: The Id of the operation to delete + :return: + """ + + payload = {} + + data = self.__contact_server__(payload, method="delete", rest_path=f"api/v2/operations/{operation_id}") + + return data + + def view_operation_report(self, operation_id): + """ Views the report of a finished operation + + :param operation_id: The id of this operation + :return: + """ + + payload = { + "enable_agent_output": True + } + + data = self.__contact_server__(payload, method="post", rest_path=f"api/v2/operations/{operation_id}/report") + + return data + + def get_ability(self, abid: str): + """" Return an ability by id + + @param abid: Ability id + """ + + res = [] + + print(f"Number of abilities: {len(self.list_abilities())}") + + with open("debug_removeme.txt", "wt") as fh: + fh.write(pformat(self.list_abilities())) + + for ability in self.list_abilities()["abilities"]: + if ability.get("ability_id", None) == abid or ability.get("auto_generated_guid", None) == abid: + res.append(ability) + return res + + def pretty_print_ability(self, abi): + """ Pretty pritns an ability + + @param abi: A ability dict + """ + + print(""" + TTP: {technique_id} + Technique name: {technique_name} + Tactic: {tactic} + Name: {name} + ID: {ability_id} + Description: {description} + + """.format(**abi)) diff --git a/app/calderacontrol.py b/app/calderacontrol.py index 4052d81..dddc880 100644 --- a/app/calderacontrol.py +++ b/app/calderacontrol.py @@ -2,43 +2,26 @@ """ Remote control a caldera server """ -import json import os import time from pprint import pprint, pformat from typing import Optional import requests -import simplejson from app.exceptions import CalderaError from app.interface_sfx import CommandlineColors +# from app.calderaapi_2 import CalderaAPI +from app.calderaapi_4 import CalderaAPI + # TODO: Ability deserves an own class. # TODO: Support all Caldera agents: "Sandcat (GoLang)","Elasticat (Blue Python/ Elasticsearch)","Manx (Reverse Shell TCP)","Ragdoll (Python/HTML)" -class CalderaControl(): +class CalderaControl(CalderaAPI): """ Remote control Caldera through REST api """ - def __init__(self, server: str, attack_logger, config=None, apikey=None): - """ - - @param server: Caldera server url/ip - @param attack_logger: The attack logger to use - @param config: The configuration - """ - # print(server) - self.url = server if server.endswith("/") else server + "/" - self.attack_logger = attack_logger - - self.config = config - - if self.config: - self.apikey = self.config.caldera_apikey() - else: - self.apikey = apikey - def fetch_client(self, platform: str = "windows", file: str = "sandcat.go", target_dir: str = ".", extension: str = ""): """ Downloads the appropriate Caldera client @@ -57,98 +40,11 @@ class CalderaControl(): # print(r.headers) return filename - def __contact_server__(self, payload, rest_path: str = "api/rest", method: str = "post"): - """ - - @param payload: payload as dict to send to the server - @param rest_path: specific path for this rest api - @param method: http method to use - """ - url = self.url + rest_path - header = {"KEY": self.apikey, - "Content-Type": "application/json"} - if method.lower() == "post": - request = requests.post(url, headers=header, data=json.dumps(payload)) - elif method.lower() == "put": - request = requests.put(url, headers=header, data=json.dumps(payload)) - elif method.lower() == "get": - request = requests.get(url, headers=header, data=json.dumps(payload)) - elif method.lower() == "delete": - request = requests.delete(url, headers=header, data=json.dumps(payload)) - else: - raise ValueError - try: - res = request.json() - except simplejson.errors.JSONDecodeError as exception: # type: ignore - print("!!! Error !!!!") - print(payload) - print(request.text) - print("!!! Error !!!!") - raise exception - - return res - - # ############## List - def list_links(self, opid: str): - """ List links associated with an operation - - @param opid: operation id to list links for - """ - - payload = {"index": "link", - "op_id": opid} - return self.__contact_server__(payload) - - def list_results(self, linkid: str): - """ List results for a link - - @param linkid: ID of the link - """ - - payload = {"index": "result", - "link_id": linkid} - return self.__contact_server__(payload) - - def list_operations(self): - """ Return operations """ - - payload = {"index": "operations"} - return self.__contact_server__(payload) - - def list_abilities(self): - """ Return all ablilities """ - # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"abilities"}' - - payload = {"index": "abilities"} - return self.__contact_server__(payload) - - def list_agents(self): - """ List running agents - - """ - # TODO: Add filters for specific platforms/executors : , platform_filter=None, executor_filter=None as parameters - # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"agents"}' - payload = {"index": "agents"} - - agents = self.__contact_server__(payload) - return agents - - def list_sources(self): - """ List stored facts - - """ - # TODO: Add filters for specific platforms/executors : , platform_filter=None, executor_filter=None as parameters - # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"agents"}' - payload = {"index": "sources"} - - facts = self.__contact_server__(payload) - return facts - def list_sources_for_name(self, name: str): """ List facts in a source pool with a specific name """ for i in self.list_sources(): - if i["name"] == name: + if i.get("name") == name: return i return None @@ -164,31 +60,19 @@ class CalderaControl(): return {} res = {} - for i in source["facts"]: - res[i["trait"]] = {"value": i["value"], - "technique_id": i["technique_id"], - "collected_by": i["collected_by"] - } + for i in source.get("facts"): + res[i.get("trait")] = {"value": i.get("value"), + "technique_id": i.get("technique_id"), + "collected_by": i.get("collected_by") + } return res def list_paws_of_running_agents(self): """ Returns a list of all paws of running agents """ - return [i["paw"] for i in self.list_agents()] - - def list_adversaries(self): - """ List registered adversaries """ - # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"adversaries"}' - payload = {"index": "adversaries"} - return self.__contact_server__(payload) - - def list_objectives(self): - """ List registered objectives """ - # curl -H 'KEY: ADMIN123' http://192.168.178.102:8888/api/rest -H 'Content-Type: application/json' -d '{"index":"objectives"}' - payload = {"index": "objectives"} - return self.__contact_server__(payload) + return [i.get("paw") for i in self.list_agents()] # 2.8.1 version + # return [i.paw for i in self.list_agents()] # 4* version # ######### Get one specific item - def get_operation(self, name: str): """ Gets an operation by name @@ -196,7 +80,7 @@ class CalderaControl(): """ for operation in self.list_operations(): - if operation["name"] == name: + if operation.get("name") == name: return operation return None @@ -206,7 +90,7 @@ class CalderaControl(): @param name: Name to look for """ for adversary in self.list_adversaries(): - if adversary["name"] == name: + if adversary.get("name") == name: return adversary return None @@ -216,22 +100,10 @@ class CalderaControl(): @param name: Name to filter for """ for objective in self.list_objectives(): - if objective["name"] == name: + if objective.get("name") == name: return objective return None - # ######### Get by id - - def get_source(self, source_name: str): - """ Retrieves data source and detailed facts - - @param: The name of the source - """ - - payload = {"index": "sources", - "name": source_name} - return self.__contact_server__(payload) - def get_ability(self, abid: str): """" Return an ability by id @@ -262,12 +134,17 @@ class CalderaControl(): abilities = self.get_ability(abid) for ability in abilities: - if ability["platform"] == platform: + if ability.get("platform") == platform: return True if platform in ability.get("supported_platforms", []): return True if platform in ability.get("platforms", []): return True + executors = ability.get("executors") # For Caldera 4.* + if executors is not None: + for executor in executors: + if executor.get("platform") == platform: + return True print(self.get_ability(abid)) return False @@ -276,18 +153,13 @@ class CalderaControl(): @param op_id: Operation id """ - payload = {"index": "operations", - "id": op_id} - return self.__contact_server__(payload) - - def get_result_by_id(self, linkid: str): - """ Get the result from a link id + operations = self.list_operations() - @param linkid: link id - """ - payload = {"index": "result", - "link_id": linkid} - return self.__contact_server__(payload) + if operations is not None: + for an_operation in operations: + if an_operation.get("id") == op_id: + return [an_operation] + return [] def get_linkid(self, op_id: str, paw: str, ability_id: str): """ Get the id of a link identified by paw and ability_id @@ -309,20 +181,6 @@ class CalderaControl(): # ######### View - def view_operation_report(self, opid: str): - """ views the operation report - - @param opid: Operation id to look for - """ - - # let postData = selectedOperationId ? {'index':'operation_report', 'op_id': selectedOperationId, 'agent_output': Number(agentOutput)} : null; - # checking it (from snifffing protocol at the server): POST {'id': 539687} - payload = {"index": "operation_report", - "op_id": opid, - 'agent_output': 1 - } - return self.__contact_server__(payload) - def view_operation_output(self, opid: str, paw: str, ability_id: str): """ Gets the output of an executed ability @@ -332,200 +190,24 @@ class CalderaControl(): """ orep = self.view_operation_report(opid) + # print(orep) if paw not in orep["steps"]: print("Broken operation report:") pprint(orep) print(f"Could not find {paw} in {orep['steps']}") raise CalderaError # print("oprep: " + str(orep)) - for a_step in orep["steps"][paw]["steps"]: - if a_step["ability_id"] == ability_id: + for a_step in orep.get("steps").get(paw).get("steps"): + if a_step.get("ability_id") == ability_id: try: - return a_step["output"] + return a_step.get("output") except KeyError as exception: raise CalderaError from exception # print(f"Did not find ability {ability_id} in caldera operation output") return None - # ######### Add - - def add_sources(self, name: str, parameters): - """ Adds a data source and seeds it with facts """ - - payload = {"index": "sources", - "name": name, - # "id": "123456-1234-1234-1234-12345678", - "rules": [], - "relationships": [] - } - - facts = [] - if parameters is not None: - for key, value in parameters.items(): - facts.append({"trait": key, "value": value}) - - # TODO: We need something better than a dict here as payload to have strong typing - payload["facts"] = facts # type: ignore - - print(payload) - return self.__contact_server__(payload, method="put") - - def add_operation(self, name: str, advid: str, group: str = "red", state: str = "running", obfuscator: str = "plain-text", jitter: str = '4/8', parameters=None): - """ Adds a new operation - - @param name: Name of the operation - @param advid: Adversary id - @param group: agent group to attack - @param state: state to initially set - @param obfuscator: obfuscator to use for the attack - @param jitter: jitter to use for the attack - @param parameters: parameters to pass to the ability - """ - - # Add operation: curl -X PUT -H "KEY:$KEY" http://127.0.0.1:8888/api/rest -d '{"index":"operations","name":"testoperation1"}' - # observed from GUI sniffing: PUT {'name': 'schnuffel2', 'group': 'red', 'adversary_id': '0f4c3c67-845e-49a0-927e-90ed33c044e0', 'state': 'running', 'planner': 'atomic', 'autonomous': '1', 'obfuscator': 'plain-text', 'auto_close': '1', 'jitter': '4/8', 'source': 'Alice Filters', 'visibility': '50'} - - sources_name = "source_" + name - self.add_sources(sources_name, parameters) - - # To verify: - # print(self.get_source(sources_name)) - - payload = {"index": "operations", - "name": name, - "state": state, - "autonomous": 1, - 'obfuscator': obfuscator, - 'auto_close': '1', - 'jitter': jitter, - 'source': sources_name, - 'visibility': '50', - "group": group, - # - "planner": "atomic", - "adversary_id": advid, - } - - return self.__contact_server__(payload, method="put") - - def add_adversary(self, name: str, ability: str, description: str = "created automatically"): - """ Adds a new adversary - - @param name: Name of the adversary - @param ability: One ability for this adversary - @param description: Description of this adversary - """ - - # Add operation: curl -X PUT -H "KEY:$KEY" http://127.0.0.1:8888/api/rest -d '{"index":"operations","name":"testoperation1"}' - - # Sniffed from gui: - # Rest core: PUT adversaries {'name': 'removeme', 'description': 'description', 'atomic_ordering': [{'id': 'bd527b63-9f9e-46e0-9816-b8434d2b8989'}], 'id': '558932cb-3ac6-43d2-b821-2db0fa8ad469', 'objective': ''} - # Returns: [{'name': 'removeme', 'adversary_id': '558932cb-3ac6-43d2-b821-2db0fa8ad469', 'description': 'description', 'tags': [], 'atomic_ordering': ['bd527b63-9f9e-46e0-9816-b8434d2b8989'], 'objective': '495a9828-cab1-44dd-a0ca-66e58177d8cc'}] - - payload = {"index": "adversaries", - "name": name, - "description": description, - "atomic_ordering": [{"id": ability}], - # - "objective": '495a9828-cab1-44dd-a0ca-66e58177d8cc' # default objective - # "objective": '' - } - return self.__contact_server__(payload, method="put") - - # ######### Execute - - # TODO View the abilities a given agent could execute. curl -H "key:$API_KEY" -X POST localhost:8888/plugin/access/abilities -d '{"paw":"$PAW"}' - - def execute_ability(self, paw: str, ability_id: str, obfuscator: str = "plain-text", parameters=None): - """ Executes an ability on a target. This happens outside of the scop of an operation. You will get no result of the ability back - - @param paw: Paw of the target - @param ability_id: ability to execute - @param obfuscator: Obfuscator to use - @param parameters: parameters to pass to the ability - """ - - # curl -H "key:ADMIN123" -X POST localhost:8888/plugin/access/exploit -d '{"paw":"$PAW","ability_id":"$ABILITY_ID"}'``` - # You can optionally POST an obfuscator and/or a facts dictionary with key/value pairs to fill in any variables the chosen ability requires. - # {"paw":"$PAW","ability_id":"$ABILITY_ID","obfuscator":"base64","facts":[{"trait":"username","value":"admin"},{"trait":"password", "value":"123"}]} - payload = {"paw": paw, - "ability_id": ability_id, - "obfuscator": obfuscator} - - facts = [] - if parameters is not None: - for key, value in parameters.items(): - facts.append({"trait": key, "value": value}) - - # TODO. We need something better than a dict here for strong typing - payload["facts"] = facts # type: ignore - - # print(payload) - - return self.__contact_server__(payload, rest_path="plugin/access/exploit_ex") - - def execute_operation(self, operation_id: str, state: str = "running"): - """ Executes an operation on a server - - @param operation_id: The operation to modify - @param state: The state to set this operation into - """ - - # TODO: Change state of an operation: curl -X POST -H "KEY:ADMIN123" http://localhost:8888/api/rest -d '{"index":"operation", "op_id":123, "state":"finished"}' - # curl -X POST -H "KEY:ADMIN123" http://localhost:8888/api/rest -d '{"index":"operation", "op_id":123, "state":"finished"}' - - if state not in ["running", "finished", "paused", "run_one_link", "cleanup"]: - raise ValueError - - payload = {"index": "operation", - "op_id": operation_id, - "state": state} - return self.__contact_server__(payload) - # ######### Delete - # curl -X DELETE http://localhost:8888/api/rest -d '{"index":"operations","id":"$operation_id"}' - def delete_operation(self, opid: str): - """ Delete operation by id - - @param opid: Operation id - """ - payload = {"index": "operations", - "id": opid} - return self.__contact_server__(payload, method="delete") - - def delete_adversary(self, adid: str): - """ Delete adversary by id - - @param adid: Adversary id - """ - payload = {"index": "adversaries", - "adversary_id": [{"adversary_id": adid}]} - return self.__contact_server__(payload, method="delete") - - def delete_agent(self, paw: str): - """ Delete a specific agent from the kali db. implant may still be running and reconnect - - @param paw: The Id of the agent to delete - """ - payload = {"index": "adversaries", - "paw": paw} - return self.__contact_server__(payload, method="delete") - - def kill_agent(self, paw: str): - """ Send a message to an agent to kill itself - - @param paw: The Id of the agent to delete - """ - - payload = {"index": "agents", - "paw": paw, - "watchdog": 1, - "sleep_min": 3, - "sleep_max": 3} - - return self.__contact_server__(payload, method="put") - def delete_all_agents(self): """ Delete all agents from kali db """ @@ -562,12 +244,14 @@ class CalderaControl(): # Plus: 0 as "finished" # + # TODO: Maybe try to get the report and continue until we have it. Could be done in addition. + operation = self.get_operation_by_id(opid) if debug: print(f"Operation data {operation}") try: # print(operation[0]["state"]) - if operation[0]["state"] == "finished": + if operation[0].get("state") == "finished": return True except KeyError as exception: raise CalderaError from exception @@ -641,15 +325,15 @@ class CalderaControl(): return False self.add_adversary(adversary_name, ability_id) - adid = self.get_adversary(adversary_name)["adversary_id"] + adid = self.get_adversary(adversary_name).get("adversary_id") logid = self.attack_logger.start_caldera_attack(source=self.url, paw=paw, group=group, ability_id=ability_id, - ttp=self.get_ability(ability_id)[0]["technique_id"], - name=self.get_ability(ability_id)[0]["name"], - description=self.get_ability(ability_id)[0]["description"], + ttp=self.get_ability(ability_id)[0].get("technique_id"), + name=self.get_ability(ability_id)[0].get("name"), + description=self.get_ability(ability_id)[0].get("description"), obfuscator=obfuscator, jitter=jitter, **kwargs @@ -658,8 +342,8 @@ class CalderaControl(): # ##### Create / Run Operation self.attack_logger.vprint(f"New adversary generated. ID: {adid}, ability: {ability_id} group: {group}", 2) - res = self.add_operation(operation_name, - advid=adid, + res = self.add_operation(name=operation_name, + adversary_id=adid, group=group, obfuscator=obfuscator, jitter=jitter, @@ -667,14 +351,14 @@ class CalderaControl(): ) self.attack_logger.vprint(pformat(res), 3) - opid = self.get_operation(operation_name)["id"] + opid = self.get_operation(operation_name).get("id") self.attack_logger.vprint("New operation created. OpID: " + str(opid), 3) - self.execute_operation(opid) + self.set_operation_state(opid) self.attack_logger.vprint("Execute operation", 3) retries = 30 - ability_name = self.get_ability(ability_id)[0]["name"] - ability_description = self.get_ability(ability_id)[0]["description"] + ability_name = self.get_ability(ability_id)[0].get("name") + ability_description = self.get_ability(ability_id)[0].get("description") self.attack_logger.vprint(f"{CommandlineColors.OKBLUE}Executed attack operation{CommandlineColors.ENDC}", 1) self.attack_logger.vprint(f"{CommandlineColors.BACKGROUND_BLUE} PAW: {paw} Group: {group} Ability: {ability_id} {CommandlineColors.ENDC}", 1) self.attack_logger.vprint(f"{CommandlineColors.BACKGROUND_BLUE} {ability_name}: {ability_description} {CommandlineColors.ENDC}", 1) @@ -713,16 +397,16 @@ class CalderaControl(): self.attack_logger.vprint(self.list_facts_for_name("source_" + operation_name), 2) # ######## Cleanup - self.execute_operation(opid, "cleanup") + self.set_operation_state(opid, "cleanup") self.delete_adversary(adid) self.delete_operation(opid) self.attack_logger.stop_caldera_attack(source=self.url, paw=paw, group=group, ability_id=ability_id, - ttp=self.get_ability(ability_id)[0]["technique_id"], - name=self.get_ability(ability_id)[0]["name"], - description=self.get_ability(ability_id)[0]["description"], + ttp=self.get_ability(ability_id)[0].get("technique_id"), + name=self.get_ability(ability_id)[0].get("name"), + description=self.get_ability(ability_id)[0].get("description"), obfuscator=obfuscator, jitter=jitter, logid=logid, diff --git a/app/experimentcontrol.py b/app/experimentcontrol.py index b2fd8b3..704e4f3 100644 --- a/app/experimentcontrol.py +++ b/app/experimentcontrol.py @@ -16,8 +16,8 @@ from app.interface_sfx import CommandlineColors from app.exceptions import ServerError from app.pluginmanager import PluginManager from app.doc_generator import DocGenerator -from caldera_control import CalderaControl -from machine_control import Machine +from app.calderacontrol import CalderaControl +from app.machinecontrol import Machine from plugins.base.attack import AttackPlugin diff --git a/app/machinecontrol.py b/app/machinecontrol.py index 2ba938b..b6a865e 100644 --- a/app/machinecontrol.py +++ b/app/machinecontrol.py @@ -390,7 +390,8 @@ class Machine(): # TODO: Caldera implant # TODO: Metasploit implant - def install_caldera_server(self, cleanup=False, version="2.8.1"): + # options for version: 4.0.0-alpha.2 2.8.1 + def install_caldera_server(self, cleanup=False, version="4.0.0-alpha.2"): """ Installs the caldera server on the VM @param cleanup: Remove the old caldera version. Slow but reduces side effects diff --git a/caldera_control.py b/caldera_control.py old mode 100644 new mode 100755 index fd9474a..ba70683 --- a/caldera_control.py +++ b/caldera_control.py @@ -3,25 +3,40 @@ """ A command line tool to control a caldera server """ import argparse +from pprint import pprint + +# from app.calderacontrol import CalderaControl +from app.calderaapi_4 import CalderaAPI + -from app.calderacontrol import CalderaControl from app.attack_log import AttackLog +class CmdlineArgumentException(Exception): + """ An error in the user supplied command line """ + # https://caldera.readthedocs.io/en/latest/The-REST-API.html # TODO: Check if attack is finished # TODO: Get results of a specific attack + # Arpgparse handling -def list_agents(calcontrol, arguments): # pylint: disable=unused-argument - """ Call list agents in caldera control +def agents(calcontrol, arguments): # pylint: disable=unused-argument + """ Agents in caldera control @param calcontrol: Connection to the caldera server @param arguments: Parser command line arguments """ - print(f"Running agents: {calcontrol.list_agents()}") + + if arguments.list: + print(calcontrol.list_agents()) + print([i["paw"] for i in calcontrol.list_agents()]) + if arguments.delete: + print(calcontrol.delete_agent(arguments.paw)) + if arguments.kill: + print(calcontrol.kill_agent(arguments.paw)) def list_facts(calcontrol, arguments): # pylint: disable=unused-argument @@ -53,38 +68,136 @@ def add_facts(calcontrol, arguments): # pylint: disable=unused-argument print(f'Created fact: {calcontrol.add_sources(name, data)}') -def delete_agents(calcontrol, arguments): # pylint: disable=unused-argument - """ Call list agents in caldera control +def list_abilities(calcontrol, arguments): + """ Call list abilities in caldera control @param calcontrol: Connection to the caldera server @param arguments: Parser command line arguments """ - print(calcontrol.list_paws_of_running_agents()) - if arguments.paw: - print(calcontrol.kill_agent(paw=arguments.paw)) - print(calcontrol.delete_agent(paw=arguments.paw)) + if arguments.list: + abilities = calcontrol.list_abilities() + abi_ids = [aid.ability_id for aid in abilities] + print(abi_ids) - else: - print(calcontrol.kill_all_agents()) - print(calcontrol.delete_all_agents()) + for abi in abilities: + for executor in abi.executors: + for a_parser in executor.parsers: + pprint(a_parser.relationships) -def list_abilities(calcontrol, arguments): - """ Call list abilities in caldera control +def obfuscators(calcontrol, arguments): + """ Manage obfuscators caldera control + + @param calcontrol: Connection to the caldera server + @param arguments: Parser command line arguments + """ + + if arguments.list: + obfs = calcontrol.list_obfuscators() + # ob_ids = [aid.ability_id for aid in obfuscators] + # print(ob_ids) + + for obfuscator in obfs: + print(obfuscator) + + +def objectives(calcontrol, arguments): + """ Manage objectives caldera control @param calcontrol: Connection to the caldera server @param arguments: Parser command line arguments """ - abilities = arguments.ability_ids + if arguments.list: + for objective in calcontrol.list_objectives(): + print(objective) + + +def adversaries(calcontrol, arguments): + """ Manage adversaries caldera control + + @param calcontrol: Connection to the caldera server + @param arguments: Parser command line arguments + """ - if arguments.all: - abilities = [aid["ability_id"] for aid in calcontrol.list_abilities()] + if arguments.list: + for adversary in calcontrol.list_adversaries(): + print(adversary) + if arguments.add: + if arguments.ability_id is None: + raise CmdlineArgumentException("Creating an adversary requires an ability id") + if arguments.name is None: + raise CmdlineArgumentException("Creating an adversary requires an adversary name") + calcontrol.add_adversary(arguments.name, arguments.ability_id) + if arguments.delete: + if arguments.adversary_id is None: + raise CmdlineArgumentException("Deleting an adversary requires an adversary id") + calcontrol.delete_adversary(arguments.adversary_id) - for aid in abilities: - for ability in calcontrol.get_ability(aid): - calcontrol.pretty_print_ability(ability) + +def sources(calcontrol, arguments): + """ Manage sources caldera control + + @param calcontrol: Connection to the caldera server + @param arguments: Parser command line arguments + """ + + if arguments.list: + for a_source in calcontrol.list_sources(): + print(a_source) + + +def planners(calcontrol, arguments): + """ Manage planners caldera control + + @param calcontrol: Connection to the caldera server + @param arguments: Parser command line arguments + """ + + if arguments.list: + for a_planner in calcontrol.list_planners(): + print(a_planner) + + +def operations(calcontrol, arguments): + """ Manage operations caldera control + + @param calcontrol: Connection to the caldera server + @param arguments: Parser command line arguments + """ + + if arguments.list: + for an_operation in calcontrol.list_operations(): + print(an_operation) + + if arguments.add: + if arguments.adversary_id is None: + raise CmdlineArgumentException("Adding an operation requires an adversary id") + if arguments.name is None: + raise CmdlineArgumentException("Adding an operation requires a name for it") + + ops = calcontrol.add_operation(name=arguments.name, + adversary_id=arguments.adversary_id, + source_id=arguments.source_id, + planner_id=arguments.planner_id, + group=arguments.group, + state=arguments.state, + obfuscator=arguments.obfuscator, + jitter=arguments.jitter) + print(ops) + + if arguments.delete: + if arguments.id is None: + raise CmdlineArgumentException("Deleting an operation requires its id") + ops = calcontrol.delete_operation(arguments.id) + print(ops) + + if arguments.view_report: + if arguments.id is None: + raise CmdlineArgumentException("Viewing an operation report requires an operation id") + report = calcontrol.view_operation_report(arguments.id) + print(report) def attack(calcontrol, arguments): @@ -122,15 +235,15 @@ def create_parser(): parser_abilities.set_defaults(func=list_abilities) parser_abilities.add_argument("--ability_ids", default=[], nargs="+", help="The abilities to look up. One or more ids") - parser_abilities.add_argument("--all", default=False, action="store_true", + parser_abilities.add_argument("--list", default=False, action="store_true", help="List all abilities") parser_agents = subparsers.add_parser("agents", help="agents") - parser_agents.set_defaults(func=list_agents) - - parser_delete_agents = subparsers.add_parser("delete_agents", help="agents") - parser_delete_agents.add_argument("--paw", default=None, help="PAW to delete. if not set it will delete all agents") - parser_delete_agents.set_defaults(func=delete_agents) + parser_agents.set_defaults(func=agents) + parser_agents.add_argument("--list", default=False, action="store_true", help="List all agents") + parser_agents.add_argument("--delete", default=False, action="store_true", help="Delete agent") + parser_agents.add_argument("--kill", default=False, action="store_true", help="Delete agent") + parser_agents.add_argument("--paw", default=None, help="PAW to delete. if not set it will delete all agents") parser_facts = subparsers.add_parser("facts", help="facts") parser_facts.set_defaults(func=list_facts) @@ -139,8 +252,66 @@ def create_parser(): parser_facts = subparsers.add_parser("add_facts", help="facts") parser_facts.set_defaults(func=add_facts) + # Sub parser for obfuscators + parser_obfuscators = subparsers.add_parser("obfuscators", help="obfuscators") + parser_obfuscators.set_defaults(func=obfuscators) + parser_obfuscators.add_argument("--list", default=False, action="store_true", + help="List all obfuscators") + + # Sub parser for objectives + parser_objectives = subparsers.add_parser("objectives", help="objectives") + parser_objectives.set_defaults(func=objectives) + parser_objectives.add_argument("--list", default=False, action="store_true", + help="List all objectives") + + # Sub parser for adversaries + parser_adversaries = subparsers.add_parser("adversaries", help="adversaries") + parser_adversaries.set_defaults(func=adversaries) + parser_adversaries.add_argument("--list", default=False, action="store_true", + help="List all adversaries") + parser_adversaries.add_argument("--add", default=False, action="store_true", + help="Add a new adversary") + parser_adversaries.add_argument("--ability_id", "--abid", default=None, help="Ability ID") + parser_adversaries.add_argument("--ability_name", default=None, help="Adversary name") + parser_adversaries.add_argument("--delete", default=False, action="store_true", + help="Delete adversary") + parser_adversaries.add_argument("--adversary_id", "--advid", default=None, help="Adversary ID") + + # Sub parser for operations + parser_operations = subparsers.add_parser("operations", help="operations") + parser_operations.set_defaults(func=operations) + parser_operations.add_argument("--list", default=False, action="store_true", + help="List all operations") + parser_operations.add_argument("--add", default=False, action="store_true", + help="Add a new operations") + parser_operations.add_argument("--delete", default=False, action="store_true", + help="Delete an operation") + parser_operations.add_argument("--view_report", default=False, action="store_true", + help="View the report of a finished operation") + parser_operations.add_argument("--name", default=None, help="Name of the operation") + parser_operations.add_argument("--adversary_id", "--advid", default=None, help="Adversary ID") + parser_operations.add_argument("--source_id", "--sourceid", default="basic", help="'Source' ID") + parser_operations.add_argument("--planner_id", "--planid", default="atomic", help="Planner ID") + parser_operations.add_argument("--group", default="", help="Caldera group to run the operation on (we are targeting groups, not PAWs)") + parser_operations.add_argument("--state", default="running", help="State to start the operation in") + parser_operations.add_argument("--obfuscator", default="plain-text", help="Obfuscator to use for this attack") + parser_operations.add_argument("--jitter", default="4/8", help="Jitter to use") + parser_operations.add_argument("--id", default=None, help="ID of operation to delete") + + # Sub parser for sources + parser_sources = subparsers.add_parser("sources", help="sources") + parser_sources.set_defaults(func=sources) + parser_sources.add_argument("--list", default=False, action="store_true", + help="List all sources") + + # Sub parser for planners + parser_sources = subparsers.add_parser("planners", help="planners") + parser_sources.set_defaults(func=planners) + parser_sources.add_argument("--list", default=False, action="store_true", + help="List all planners") + # For all parsers - main_parser.add_argument("--caldera_url", help="caldera url, including port", default="http://192.168.178.125:8888/") + main_parser.add_argument("--caldera_url", help="caldera url, including port", default="http://localhost:8888/") main_parser.add_argument("--apikey", help="caldera api key", default="ADMIN123") return main_parser @@ -153,7 +324,10 @@ if __name__ == "__main__": print(args.caldera_url) attack_logger = AttackLog(args.verbose) - caldera_control = CalderaControl(args.caldera_url, attack_logger, config=None, apikey=args.apikey) + caldera_control = CalderaAPI(args.caldera_url, attack_logger, config=None, apikey=args.apikey) print("Caldera Control ready") - - str(args.func(caldera_control, args)) + try: + str(args.func(caldera_control, args)) + except CmdlineArgumentException as ex: + parser.print_help() + print(f"\nCommandline error: {ex}") diff --git a/tests/test_calderacontrol.py b/tests/test_calderacontrol.py index 7c515ae..91dd59d 100644 --- a/tests/test_calderacontrol.py +++ b/tests/test_calderacontrol.py @@ -4,6 +4,7 @@ from app.calderacontrol import CalderaControl from simplejson.errors import JSONDecodeError from app.exceptions import CalderaError from app.attack_log import AttackLog +import pydantic # https://docs.python.org/3/library/unittest.html @@ -17,35 +18,14 @@ class TestExample(unittest.TestCase): def tearDown(self) -> None: pass - # List links sends the right commands and post - def test_list_links(self): - with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.list_links("asd") - mock_method.assert_called_once_with({"index": "link", "op_id": "asd"}) - - # List links gets an Exception and does not handle it (as expected) - def test_list_links_with_exception(self): - with self.assertRaises(JSONDecodeError): - with patch.object(self.cc, "__contact_server__", side_effect=JSONDecodeError("foo", "bar", 2)): - self.cc.list_links("asd") - - # list results sends the right commands and post - def test_list_results(self): - with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.list_results("asd") - mock_method.assert_called_once_with({"index": "result", "link_id": "asd"}) - - # List results gets an Exception and does not handle it (as expected) - def test_list_results_with_exception(self): - with self.assertRaises(JSONDecodeError): - with patch.object(self.cc, "__contact_server__", side_effect=JSONDecodeError("foo", "bar", 2)): - self.cc.list_results("asd") - # list_operations def test_list_operations(self): with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.list_operations() - mock_method.assert_called_once_with({"index": "operations"}) + try: + self.cc.list_operations() + except pydantic.error_wrappers.ValidationError: + pass + mock_method.assert_called_once_with(None, method='get', rest_path='api/v2/operations') # list operations gets the expected exception def test_list_operations_with_exception(self): @@ -56,8 +36,11 @@ class TestExample(unittest.TestCase): # list_abilities def test_list_abilities(self): with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.list_abilities() - mock_method.assert_called_once_with({"index": "abilities"}) + try: + self.cc.list_abilities() + except pydantic.error_wrappers.ValidationError: + pass + mock_method.assert_called_once_with(None, method='get', rest_path='api/v2/abilities') # list abilities gets the expected exception def test_list_abilities_with_exception(self): @@ -68,8 +51,11 @@ class TestExample(unittest.TestCase): # list_agents def test_list_agents(self): with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.list_agents() - mock_method.assert_called_once_with({"index": "agents"}) + try: + self.cc.list_agents() + except pydantic.error_wrappers.ValidationError: + pass + mock_method.assert_called_once_with(None, method='get', rest_path='api/v2/agents') # list agents gets the expected exception def test_list_agents_with_exception(self): @@ -80,8 +66,11 @@ class TestExample(unittest.TestCase): # list_adversaries def test_list_adversaries(self): with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.list_adversaries() - mock_method.assert_called_once_with({"index": "adversaries"}) + try: + self.cc.list_adversaries() + except pydantic.error_wrappers.ValidationError: + pass + mock_method.assert_called_once_with(None, method='get', rest_path='api/v2/adversaries') # list adversaries gets the expected exception def test_list_adversaries_with_exception(self): @@ -92,8 +81,11 @@ class TestExample(unittest.TestCase): # list_objectives def test_list_objectives(self): with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.list_objectives() - mock_method.assert_called_once_with({"index": "objectives"}) + try: + self.cc.list_objectives() + except pydantic.error_wrappers.ValidationError: + pass + mock_method.assert_called_once_with(None, method='get', rest_path='api/v2/objectives') # list objectives gets the expected exception def test_list_objectives_with_exception(self): @@ -161,27 +153,11 @@ class TestExample(unittest.TestCase): def test_get_operation_by_id(self): opid = "FooBar" with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.get_operation_by_id(opid) - mock_method.assert_called_once_with({"index": "operations", "id": opid}) - - # get_operation_by_id gets the expected exception - def test_get_operation_by_id_with_exception(self): - with self.assertRaises(JSONDecodeError): - with patch.object(self.cc, "__contact_server__", side_effect=JSONDecodeError("foo", "bar", 2)): - self.cc.get_result_by_id("FooBar") - - # get_result_by_id - def test_get_result_by_id(self): - opid = "FooBar" - with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.get_result_by_id(opid) - mock_method.assert_called_once_with({"index": "result", "link_id": opid}) - - # get_result_by_id gets the expected exception - def test_get_result_by_id_with_exception(self): - with self.assertRaises(JSONDecodeError): - with patch.object(self.cc, "__contact_server__", side_effect=JSONDecodeError("foo", "bar", 2)): - self.cc.get_result_by_id("FooBar") + try: + self.cc.get_operation_by_id(opid) + except pydantic.error_wrappers.ValidationError: + pass + mock_method.assert_called_once_with(None, method='get', rest_path='api/v2/operations') # get_linkid def test_get_linkid(self): @@ -218,7 +194,7 @@ class TestExample(unittest.TestCase): opid = "FooBar" with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: self.cc.view_operation_report(opid) - mock_method.assert_called_once_with({"index": "operation_report", "op_id": opid, "agent_output": 1}) + mock_method.assert_called_once_with({"enable_agent_output": True}, method="post", rest_path="api/v2/operations/FooBar/report") # get_result_by_id gets the expected exception def test_view_operation_report_with_exception(self): @@ -263,57 +239,32 @@ class TestExample(unittest.TestCase): group = "test_group" advid = "test_id" - exp1 = {"index": "sources", - "name": "source_test_name", - "rules": [], - "relationships": [], - "facts": [] - } - exp3 = {"index": "operations", - "name": name, - "state": state, - "autonomous": 1, - 'obfuscator': 'plain-text', - 'auto_close': '1', - 'jitter': '4/8', - 'source': 'source_test_name', - 'visibility': '50', - "group": group, - "planner": "atomic", - "adversary_id": advid, - } + exp1 = {'name': 'test_name', 'group': 'test_group', 'adversary': {'adversary_id': None}, 'auto_close': False, 'state': 'test_state', 'autonomous': 1, 'planner': {'id': 'atomic'}, 'source': {'id': 'basic'}, 'use_learning_parsers': True, 'obfuscator': 'plain-text', 'jitter': '4/8', 'visibility': '51'} + with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.add_operation(name, advid, group, state) - # mock_method.assert_called_once_with(exp, method="put") - mock_method.assert_has_calls([call(exp1, method="put"), call(exp3, method="put")]) + try: + self.cc.add_operation(name=name, + advid=advid, + group=group, + state=state) + except pydantic.error_wrappers.ValidationError: + pass + mock_method.assert_has_calls([call(exp1, method='post', rest_path='api/v2/operations')]) # add_operation defaults def test_add_operation_defaults(self): name = "test_name" advid = "test_id" - exp1 = {"index": "sources", - "name": "source_test_name", - "rules": [], - "relationships": [], - "facts": [] - } - exp3 = {"index": "operations", - "name": name, - "state": "running", # default - "autonomous": 1, - 'obfuscator': 'plain-text', - 'auto_close': '1', - 'jitter': '4/8', - 'source': 'source_test_name', - 'visibility': '50', - "group": "red", # default - "planner": "atomic", - "adversary_id": advid, - } + exp1 = {'name': 'test_name', 'group': '', 'adversary': {'adversary_id': None}, 'auto_close': False, 'state': 'running', 'autonomous': 1, 'planner': {'id': 'atomic'}, 'source': {'id': 'basic'}, 'use_learning_parsers': True, 'obfuscator': 'plain-text', 'jitter': '4/8', 'visibility': '51'} + with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.add_operation(name, advid) - mock_method.assert_has_calls([call(exp1, method="put"), call(exp3, method="put")]) + try: + self.cc.add_operation(name=name, + advid=advid) + except pydantic.error_wrappers.ValidationError: + pass + mock_method.assert_has_calls([call(exp1, method='post', rest_path='api/v2/operations')]) # add_adversary def test_add_adversary(self): @@ -321,109 +272,74 @@ class TestExample(unittest.TestCase): ability = "test_ability" description = "test_descritption" - exp = {"index": "adversaries", - "name": name, - "description": description, - "atomic_ordering": [{"id": ability}], - # - "objective": '495a9828-cab1-44dd-a0ca-66e58177d8cc' # default objective - } + # Caldera 4 + exp_4 = { + "name": name, + "description": description, + "atomic_ordering": ["test_ability"], + # + "objective": '495a9828-cab1-44dd-a0ca-66e58177d8cc' # default objective + } with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: self.cc.add_adversary(name, ability, description) - mock_method.assert_called_once_with(exp, method="put") + mock_method.assert_called_once_with(exp_4, method="post", rest_path="api/v2/adversaries") def test_add_adversary_default(self): name = "test_name" ability = "test_ability" - exp = {"index": "adversaries", - "name": name, - "description": "created automatically", - "atomic_ordering": [{"id": ability}], - # - "objective": '495a9828-cab1-44dd-a0ca-66e58177d8cc' # default objective - } + # Caldera 4 + exp_4 = { + "name": name, + "description": "created automatically", + "atomic_ordering": ["test_ability"], + # + "objective": '495a9828-cab1-44dd-a0ca-66e58177d8cc' # default objective + } with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: self.cc.add_adversary(name, ability) - mock_method.assert_called_once_with(exp, method="put") - - # execute_ability - def test_execute_ability(self): - paw = "test_paw" - ability_id = "test_ability" - obfuscator = "plain-text" - - exp = {"paw": paw, - "ability_id": ability_id, - "obfuscator": obfuscator, - "facts": []} - with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.execute_ability(paw, ability_id, obfuscator) - mock_method.assert_called_once_with(exp, rest_path="plugin/access/exploit_ex") + mock_method.assert_called_once_with(exp_4, method="post", rest_path="api/v2/adversaries") - def test_execute_ability_default(self): - paw = "test_paw" - ability_id = "test_ability" - - exp = {"paw": paw, - "ability_id": ability_id, - "obfuscator": "plain-text", - "facts": []} - with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.execute_ability(paw, ability_id) - mock_method.assert_called_once_with(exp, rest_path="plugin/access/exploit_ex") - - # execute_operation - def test_execute_operation(self): + # set_operation_state + def test_set_operation_state(self): operation_id = "test_opid" state = "paused" - exp = {"index": "operation", - "op_id": operation_id, - "state": state} with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.execute_operation(operation_id, state) - mock_method.assert_called_once_with(exp) + self.cc.set_operation_state(operation_id, state) + mock_method.assert_called_once_with({'state': 'paused'}, method='patch', rest_path='api/v2/operations/test_opid') # not supported state - def test_execute_operation_not_supported(self): + def test_set_operation_state_not_supported(self): operation_id = "test_opid" state = "not supported" with self.assertRaises(ValueError): with patch.object(self.cc, "__contact_server__", return_value=None): - self.cc.execute_operation(operation_id, state) + self.cc.set_operation_state(operation_id, state) - def test_execute_operation_default(self): + def test_set_operation_state_default(self): operation_id = "test_opid" - exp = {"index": "operation", - "op_id": operation_id, - "state": "running" # default - } with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: - self.cc.execute_operation(operation_id) - mock_method.assert_called_once_with(exp) + self.cc.set_operation_state(operation_id) + mock_method.assert_called_once_with({'state': 'running'}, method='patch', rest_path='api/v2/operations/test_opid') # delete_operation def test_delete_operation(self): opid = "test_opid" - exp = {"index": "operations", - "id": opid} with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: self.cc.delete_operation(opid) - mock_method.assert_called_once_with(exp, method="delete") + mock_method.assert_called_once_with({}, method="delete", rest_path="api/v2/operations/test_opid") # delete_adversary def test_delete_adversary(self): adid = "test_adid" - exp = {"index": "adversaries", - "adversary_id": [{"adversary_id": adid}]} with patch.object(self.cc, "__contact_server__", return_value=None) as mock_method: self.cc.delete_adversary(adid) - mock_method.assert_called_once_with(exp, method="delete") + mock_method.assert_called_once_with(None, method="delete", rest_path="api/v2/adversaries/test_adid") # is_operation_finished def test_is_operation_finished_true(self): @@ -442,14 +358,6 @@ class TestExample(unittest.TestCase): res = self.cc.is_operation_finished(opid) self.assertEqual(res, False) - def test_is_operation_finished_exception(self): - opdata = [{"chain": [{"statusa": 1}]}] - opid = "does not matter" - - with self.assertRaises(CalderaError): - with patch.object(self.cc, "get_operation_by_id", return_value=opdata): - self.cc.is_operation_finished(opid) - def test_is_operation_finished_exception2(self): opdata = [] opid = "does not matter" From 928934ae8db985b479a844146245ce02001fbec4 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Mon, 31 Jan 2022 16:05:21 +0100 Subject: [PATCH 12/14] some git cleanup --- .gitignore | 7 + .../sensors/linux_filebeat/filebeat.conf | 19 ++ .../sensors/linux_filebeat/filebeat.yml | 250 ++++++++++++++++++ 3 files changed, 276 insertions(+) create mode 100644 plugins/default/sensors/linux_filebeat/filebeat.conf create mode 100644 plugins/default/sensors/linux_filebeat/filebeat.yml diff --git a/.gitignore b/.gitignore index cee3c1b..29838c2 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,13 @@ share/python-wheels/ *.egg MANIFEST +# My own specific things +*.zip +out/* +loot/* +harvest/* +doc/documentation.zip + # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. diff --git a/plugins/default/sensors/linux_filebeat/filebeat.conf b/plugins/default/sensors/linux_filebeat/filebeat.conf new file mode 100644 index 0000000..125babd --- /dev/null +++ b/plugins/default/sensors/linux_filebeat/filebeat.conf @@ -0,0 +1,19 @@ +input { + beats { + port => 5044 + } +} + +filter {} + +output { + file { + path => "/tmp/filebeat_collection.json" + codec => json + id => "id_filebeat" + create_if_deleted => true + write_behavior => "append" + } + + stdout{} +} \ No newline at end of file diff --git a/plugins/default/sensors/linux_filebeat/filebeat.yml b/plugins/default/sensors/linux_filebeat/filebeat.yml new file mode 100644 index 0000000..e0498ed --- /dev/null +++ b/plugins/default/sensors/linux_filebeat/filebeat.yml @@ -0,0 +1,250 @@ +###################### Filebeat Configuration Example ######################### + +# This file is an example configuration file highlighting only the most common +# options. The filebeat.reference.yml file from the same directory contains all the +# supported options with more comments. You can use it as a reference. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/filebeat/index.html + +# For more available modules and options, please see the filebeat.reference.yml sample +# configuration file. + +# ============================== Filebeat inputs =============================== + +filebeat.inputs: + +# Each - is an input. Most options can be set at the input level, so +# you can use different inputs for various configurations. +# Below are the input specific configurations. + +- type: log + + # Change to true to enable this input configuration. + enabled: true + + # Paths that should be crawled and fetched. Glob based paths. + paths: + - /var/log/*.log + #- c:\programdata\elasticsearch\logs\* + + # Exclude lines. A list of regular expressions to match. It drops the lines that are + # matching any regular expression from the list. + #exclude_lines: ['^DBG'] + + # Include lines. A list of regular expressions to match. It exports the lines that are + # matching any regular expression from the list. + include_lines: ['^ERR', '^WARN', "Failed password","user unknown", "invalid user"] + + # Exclude files. A list of regular expressions to match. Filebeat drops the files that + # are matching any regular expression from the list. By default, no files are dropped. + #exclude_files: ['.gz$'] + + # Optional additional fields. These fields can be freely picked + # to add additional information to the crawled log files for filtering + #fields: + # level: debug + # review: 1 + + ### Multiline options + + # Multiline can be used for log messages spanning multiple lines. This is common + # for Java Stack Traces or C-Line Continuation + + # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [ + #multiline.pattern: ^\[ + + # Defines if the pattern set under pattern should be negated or not. Default is false. + #multiline.negate: false + + # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern + # that was (not) matched before or after or as long as a pattern is not matched based on negate. + # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash + #multiline.match: after + +# ============================== Filebeat modules ============================== + +#filebeat.modules: +# - module: system +# syslog: +# enabled: true +# var.paths: ["/var/log/syslog*"] +# auth: +# enabled: true +# var.paths: ["/var/log/auth.log"] + +filebeat.config.modules: + + # Glob pattern for configuration loading + path: ${path.config}/modules.d/*.yml + + # Set to true to enable config reloading +# reload.enabled: false + + # Period on which files under path should be checked for changes + #reload.period: 10s + +# ======================= Elasticsearch template setting ======================= + +setup.template.settings: + index.number_of_shards: 1 + #index.codec: best_compression + #_source.enabled: false + + +# ================================== General =================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. +#fields: +# env: staging + +# ================================= Dashboards ================================= +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here or by using the `setup` command. +#setup.dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#setup.dashboards.url: + +# =================================== Kibana =================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Kibana Space ID + # ID of the Kibana Space into which the dashboards should be loaded. By default, + # the Default Space will be used. + #space.id: + +# =============================== Elastic Cloud ================================ + +# These settings simplify using Filebeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +# ================================== Outputs =================================== + +# Configure what output to use when sending the data collected by the beat. + +# ---------------------------- Elasticsearch Output ---------------------------- +#output.elasticsearch: + # Array of hosts to connect to. + #hosts: ["localhost:9200"] + + # Protocol - either `http` (default) or `https`. + #protocol: "https" + + # Authentication credentials - either API key or username/password. + #api_key: "id:api_key" + #username: "elastic" + #password: "changeme" + +# ------------------------------ Logstash Output ------------------------------- +output.logstash: + # The Logstash hosts + hosts: ["localhost:5044"] + + # Optional SSL. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + +# ================================= Processors ================================= +processors: + - add_host_metadata: + when.not.contains.tags: forwarded + - add_cloud_metadata: ~ + - add_docker_metadata: ~ + - add_kubernetes_metadata: ~ + +# ================================== Logging =================================== + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: debug + +# At debug level, you can selectively enable logging only for some components. +# To enable all selectors use ["*"]. Examples of other selectors are "beat", +# "publish", "service". +#logging.selectors: ["*"] + +# ============================= X-Pack Monitoring ============================== +# Filebeat can export internal metrics to a central Elasticsearch monitoring +# cluster. This requires xpack monitoring to be enabled in Elasticsearch. The +# reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#monitoring.enabled: false + +# Sets the UUID of the Elasticsearch cluster under which monitoring data for this +# Filebeat instance will appear in the Stack Monitoring UI. If output.elasticsearch +# is enabled, the UUID is derived from the Elasticsearch cluster referenced by output.elasticsearch. +#monitoring.cluster_uuid: + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. +# Note that the settings should point to your Elasticsearch *monitoring* cluster. +# Any setting that is not set is automatically inherited from the Elasticsearch +# output configuration, so if you have the Elasticsearch output configured such +# that it is pointing to your Elasticsearch monitoring cluster, you can simply +# uncomment the following line. +#monitoring.elasticsearch: + +# ============================== Instrumentation =============================== + +# Instrumentation support for the filebeat. +#instrumentation: + # Set to true to enable instrumentation of filebeat. + #enabled: false + + # Environment in which filebeat is running on (eg: staging, production, etc.) + #environment: "" + + # APM Server hosts to report instrumentation results to. + #hosts: + # - http://localhost:8200 + + # API Key for the APM Server(s). + # If api_key is set then secret_token will be ignored. + #api_key: + + # Secret token for the APM Server(s). + #secret_token: + + +# ================================= Migration ================================== + +# This allows to enable 6.7 migration aliases +#migration.6_to_7.enabled: true From 0b9c2b7a8d2437b302e5a44d6a61d6174b5ee995 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Tue, 1 Feb 2022 09:59:29 +0100 Subject: [PATCH 13/14] Added argcomplete to command line tools --- README.md | 10 +++++++++- caldera_control.py | 4 +++- doc_generator.py | 15 +++++++++------ experiment_control.py | 16 ++++++++++------ init.sh | 5 ++++- machine_control.py | 4 +++- plugin_manager.py | 4 +++- pydantic_test.py | 14 +++++++++----- requirements.txt | 3 +++ tox.ini | 1 + 10 files changed, 54 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index 8e1d8d0..37a8d41 100644 --- a/README.md +++ b/README.md @@ -183,4 +183,12 @@ Code review will be happening on github. If everything is nice, you should squas ``` git rebase --interactive git push --force -``` \ No newline at end of file +``` + +### Argcomplete + +https://kislyuk.github.io/argcomplete/ + +Is a argparse extension that registers the command line arguments for bash. It requires a command line command to register it globally. This is added to init.sh + +The configuration will be set in /etc/bash_completion.d/ . Keep in mind: It will require a shell restart to be activated \ No newline at end of file diff --git a/caldera_control.py b/caldera_control.py index ba70683..0880861 100755 --- a/caldera_control.py +++ b/caldera_control.py @@ -1,9 +1,10 @@ #!/usr/bin/env python3 - +# PYTHON_ARGCOMPLETE_OK """ A command line tool to control a caldera server """ import argparse from pprint import pprint +import argcomplete # from app.calderacontrol import CalderaControl from app.calderaapi_4 import CalderaAPI @@ -319,6 +320,7 @@ def create_parser(): if __name__ == "__main__": parser = create_parser() + argcomplete.autocomplete(parser) args = parser.parse_args() print(args.caldera_url) diff --git a/doc_generator.py b/doc_generator.py index 6415a34..80feb7e 100755 --- a/doc_generator.py +++ b/doc_generator.py @@ -1,8 +1,9 @@ #!/usr/bin/env python3 - +# PYTHON_ARGCOMPLETE_OK """ Generate human readable document describing the attack based on an attack log """ import argparse +import argcomplete from app.doc_generator import DocGenerator DEFAULT_ATTACK_LOG = "removeme/loot/2021_09_08___07_41_35/attack.json" # FIN 7 first run on environment @@ -10,16 +11,18 @@ DEFAULT_ATTACK_LOG = "removeme/loot/2021_09_08___07_41_35/attack.json" # FIN 7 def create_parser(): """ Creates the parser for the command line arguments""" - parser = argparse.ArgumentParser("Controls an experiment on the configured systems") + lparser = argparse.ArgumentParser("Controls an experiment on the configured systems") - parser.add_argument("--attack_log", default=DEFAULT_ATTACK_LOG, help="The attack log the document is based on") - parser.add_argument("--outfile", default="tools/human_readable_documentation/source/contents.rst", help="The default output file") + lparser.add_argument("--attack_log", default=DEFAULT_ATTACK_LOG, help="The attack log the document is based on") + lparser.add_argument("--outfile", default="tools/human_readable_documentation/source/contents.rst", help="The default output file") - return parser + return lparser if __name__ == "__main__": - arguments = create_parser().parse_args() + parser = create_parser() + argcomplete.autocomplete(parser) + arguments = parser.parse_args() dg = DocGenerator() dg.generate(arguments.attack_log, arguments.outfile) diff --git a/experiment_control.py b/experiment_control.py index 504015b..1113cc0 100755 --- a/experiment_control.py +++ b/experiment_control.py @@ -1,7 +1,9 @@ #!/usr/bin/env python3 +# PYTHON_ARGCOMPLETE_OK """ The main tool to run experiments """ import argparse +import argcomplete from app.experimentcontrol import Experiment @@ -36,11 +38,11 @@ def run(args): def create_parser(): """ Creates the parser for the command line arguments""" - parser = argparse.ArgumentParser("Controls an experiment on the configured systems") - subparsers = parser.add_subparsers(help="sub-commands") + lparser = argparse.ArgumentParser("Controls an experiment on the configured systems") + subparsers = lparser.add_subparsers(help="sub-commands") - parser.set_defaults(func=explain) - parser.add_argument('--verbose', '-v', action='count', default=0) + lparser.set_defaults(func=explain) + lparser.add_argument('--verbose', '-v', action='count', default=0) # Sub parser for machine creation parser_run = subparsers.add_parser("run", help="run experiments") @@ -49,9 +51,11 @@ def create_parser(): parser_run.add_argument("--caldera_attack", default=None, help="The id of a specific caldera attack to run, will override experiment configuration for attacks") parser_run.add_argument("--caldera_attack_file", default=None, help="The file name containing a list of caldera attacks to run, will override experiment configuration for attacks") - return parser + return lparser if __name__ == "__main__": - arguments = create_parser().parse_args() + parser = create_parser() + argcomplete.autocomplete(parser) + arguments = parser.parse_args() arguments.func(arguments) diff --git a/init.sh b/init.sh index 4572b40..62acebe 100755 --- a/init.sh +++ b/init.sh @@ -13,4 +13,7 @@ sudo apt-get -y install latexmk texlive-fonts-recommended texlive-latex-recommen python3 -m venv venv source venv/bin/activate -pip3 install -r requirements.txt \ No newline at end of file +pip3 install -r requirements.txt + +# Registering argcomplete globally. See README.md +sudo ./venv/bin/activate-global-python-argcomplete \ No newline at end of file diff --git a/machine_control.py b/machine_control.py index b1780eb..836707e 100644 --- a/machine_control.py +++ b/machine_control.py @@ -1,7 +1,9 @@ #!/usr/bin/env python3 +# PYTHON_ARGCOMPLETE_OK """ Demo program to set up and control the machines """ import argparse +import argcomplete import yaml @@ -89,7 +91,7 @@ def create_parser(): if __name__ == "__main__": parser = create_parser() - + argcomplete.autocomplete(parser) args = parser.parse_args() args.func(args) diff --git a/plugin_manager.py b/plugin_manager.py index 42f4ae3..57aea51 100755 --- a/plugin_manager.py +++ b/plugin_manager.py @@ -1,8 +1,10 @@ #!/usr/bin/env python3 +# PYTHON_ARGCOMPLETE_OK """ Managing plugins """ import argparse import sys +import argcomplete from app.pluginmanager import PluginManager from app.attack_log import AttackLog @@ -66,7 +68,7 @@ def create_parser(): if __name__ == "__main__": parser = create_parser() - + argcomplete.autocomplete(parser) args = parser.parse_args() exval = args.func(args) diff --git a/pydantic_test.py b/pydantic_test.py index 83729ad..79e2b24 100755 --- a/pydantic_test.py +++ b/pydantic_test.py @@ -1,10 +1,12 @@ #!/usr/bin/env python3 - +# PYTHON_ARGCOMPLETE_OK """ A command line tool to verify PurpleDome configuration files """ import argparse from pprint import pprint import sys +import argcomplete + import yaml from app.config_verifier import MainConfig @@ -18,15 +20,17 @@ def load(filename): def create_parser(): """ Creates the parser for the command line arguments""" - parser = argparse.ArgumentParser("Parse a config file and verifies it") + lparser = argparse.ArgumentParser("Parse a config file and verifies it") - parser.add_argument('--filename', default="experiment_ng.yaml") + lparser.add_argument('--filename', default="experiment_ng.yaml") - return parser + return lparser if __name__ == "__main__": - arguments = create_parser().parse_args() + parser = create_parser() + argcomplete.autocomplete(parser) + arguments = parser.parse_args() try: r = load(arguments.filename) except TypeError as e: diff --git a/requirements.txt b/requirements.txt index ddf586e..30906be 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,3 +26,6 @@ types-PyYAML==5.4.6 types-requests==2.25.6 types-simplejson==3.17.0 types-paramiko==2.7.0 + +# Argcomplete. See README.md +argcomplete==2.0.0 diff --git a/tox.ini b/tox.ini index 24cbb81..fba54b8 100644 --- a/tox.ini +++ b/tox.ini @@ -35,6 +35,7 @@ deps = -r requirements.txt safety bandit pylint + argcomplete commands = From e385c6ed698df0820f7da1541d4ef4dc9912eb73 Mon Sep 17 00:00:00 2001 From: Thorsten Sick Date: Thu, 3 Feb 2022 13:25:59 +0100 Subject: [PATCH 14/14] Basic doc upgrade --- caldera_control.py | 88 +++++++++++++---------------- doc/doc_todo.txt | 12 ++++ doc/source/basics/background.rst | 85 ++++++---------------------- doc/source/basics/configuration.rst | 6 +- doc/source/index.rst | 4 +- doc/source/usage/cli.rst | 24 ++++++++ doc_generator.py | 33 ++++++++--- experiment_control.py | 2 +- machine_control.py | 15 +++-- plugin_manager.py | 26 ++++++--- pydantic_test.py | 2 +- 11 files changed, 151 insertions(+), 146 deletions(-) create mode 100644 doc/doc_todo.txt diff --git a/caldera_control.py b/caldera_control.py index 0880861..d3c7f30 100755 --- a/caldera_control.py +++ b/caldera_control.py @@ -6,6 +6,7 @@ import argparse from pprint import pprint import argcomplete +# from app.calderacontrol import CalderaControl # from app.calderacontrol import CalderaControl from app.calderaapi_4 import CalderaAPI @@ -40,36 +41,22 @@ def agents(calcontrol, arguments): # pylint: disable=unused-argument print(calcontrol.kill_agent(arguments.paw)) -def list_facts(calcontrol, arguments): # pylint: disable=unused-argument - """ Call list fact stores ("sources") in caldera control +def facts(calcontrol, arguments): + """ Deal with fact stores ("sources") in caldera control @param calcontrol: Connection to the caldera server @param arguments: Parser command line arguments """ - printme = "No found" - - if arguments.name: - printme = calcontrol.list_facts_for_name(arguments.name) - else: - printme = calcontrol.list_sources() - - print(f"Stored facts: {printme}") - - -def add_facts(calcontrol, arguments): # pylint: disable=unused-argument - """ Generate new facts in caldera - - @param calcontrol: Connection to the caldera server - @param arguments: Parser command line arguments - """ - name = "Test" - data = {"foo": "bar"} + if arguments.list: + if arguments.name is None: + raise CmdlineArgumentException("Listing facts by name requires a name") - print(f'Created fact: {calcontrol.add_sources(name, data)}') + print_me = calcontrol.list_facts_for_name(arguments.name) + print(f"Stored facts: {print_me}") -def list_abilities(calcontrol, arguments): +def abilities(calcontrol, arguments): """ Call list abilities in caldera control @param calcontrol: Connection to the caldera server @@ -77,11 +64,11 @@ def list_abilities(calcontrol, arguments): """ if arguments.list: - abilities = calcontrol.list_abilities() - abi_ids = [aid.ability_id for aid in abilities] + ability_list = calcontrol.list_abilities() + abi_ids = [aid.ability_id for aid in ability_list] print(abi_ids) - for abi in abilities: + for abi in ability_list: for executor in abi.executors: for a_parser in executor.parsers: pprint(a_parser.relationships) @@ -202,7 +189,7 @@ def operations(calcontrol, arguments): def attack(calcontrol, arguments): - """ Calling attack + """ Starting an attack @param calcontrol: Connection to the caldera server @param arguments: Parser command line arguments @@ -217,56 +204,57 @@ def attack(calcontrol, arguments): def create_parser(): """ Creates the parser for the command line arguments""" - main_parser = argparse.ArgumentParser("Controls a Caldera server to attack other systems") + main_parser = argparse.ArgumentParser("Controls a Caldera server. Use this to test your Caldera setup or the Caldera API.") main_parser.add_argument('--verbose', '-v', action='count', default=0) subparsers = main_parser.add_subparsers(help="sub-commands") # Sub parser for attacks - parser_attack = subparsers.add_parser("attack", help="attack system") + parser_attack = subparsers.add_parser("attack", help="Attack system") parser_attack.set_defaults(func=attack) - parser_attack.add_argument("--paw", default="kickme", help="paw to attack and get specific results for") - parser_attack.add_argument("--group", default="red", help="target group to attack") + parser_attack.add_argument("--paw", default="kickme", help="Paw to attack and get specific results for") + parser_attack.add_argument("--group", default="red", help="Target group to attack") parser_attack.add_argument("--ability_id", default="bd527b63-9f9e-46e0-9816-b8434d2b8989", help="The ability to use for the attack") # Sub parser to list abilities - parser_abilities = subparsers.add_parser("abilities", help="abilities") + parser_abilities = subparsers.add_parser("abilities", help="Control Caldera abilities ( aka exploits)") # parser_abilities.add_argument("--abilityid", default=None, help="Id of the ability to list") - parser_abilities.set_defaults(func=list_abilities) - parser_abilities.add_argument("--ability_ids", default=[], nargs="+", - help="The abilities to look up. One or more ids") + parser_abilities.set_defaults(func=abilities) + # parser_abilities.add_argument("--ability_ids", default=[], nargs="+", + # help="The abilities to look up. One or more ids") parser_abilities.add_argument("--list", default=False, action="store_true", help="List all abilities") - parser_agents = subparsers.add_parser("agents", help="agents") + parser_agents = subparsers.add_parser("agents", help="Control Caldera agents ( aka implants)") parser_agents.set_defaults(func=agents) parser_agents.add_argument("--list", default=False, action="store_true", help="List all agents") - parser_agents.add_argument("--delete", default=False, action="store_true", help="Delete agent") - parser_agents.add_argument("--kill", default=False, action="store_true", help="Delete agent") - parser_agents.add_argument("--paw", default=None, help="PAW to delete. if not set it will delete all agents") + parser_agents.add_argument("--delete", default=False, action="store_true", help="Delete agent from database") + parser_agents.add_argument("--kill", default=False, action="store_true", help="Kill agent on target system") + parser_agents.add_argument("--paw", default=None, help="PAW to delete or kill. If this is not set it will delete all agents") parser_facts = subparsers.add_parser("facts", help="facts") - parser_facts.set_defaults(func=list_facts) + parser_facts.set_defaults(func=facts) + parser_facts.add_argument("--list", default=False, action="store_true", help="List facts") parser_facts.add_argument("--name", default=None, help="Name of a fact source to focus on") - parser_facts = subparsers.add_parser("add_facts", help="facts") - parser_facts.set_defaults(func=add_facts) + # parser_facts = subparsers.add_parser("add_facts", help="facts") + # parser_facts.set_defaults(func=add_facts) # Sub parser for obfuscators - parser_obfuscators = subparsers.add_parser("obfuscators", help="obfuscators") + parser_obfuscators = subparsers.add_parser("obfuscators", help="Obfuscator interface. Hide the attack") parser_obfuscators.set_defaults(func=obfuscators) parser_obfuscators.add_argument("--list", default=False, action="store_true", help="List all obfuscators") # Sub parser for objectives - parser_objectives = subparsers.add_parser("objectives", help="objectives") + parser_objectives = subparsers.add_parser("objectives", help="Objectives interface") parser_objectives.set_defaults(func=objectives) parser_objectives.add_argument("--list", default=False, action="store_true", help="List all objectives") # Sub parser for adversaries - parser_adversaries = subparsers.add_parser("adversaries", help="adversaries") + parser_adversaries = subparsers.add_parser("adversaries", help="Adversary interface. Adversaries are attacker archetypes") parser_adversaries.set_defaults(func=adversaries) parser_adversaries.add_argument("--list", default=False, action="store_true", help="List all adversaries") @@ -279,7 +267,7 @@ def create_parser(): parser_adversaries.add_argument("--adversary_id", "--advid", default=None, help="Adversary ID") # Sub parser for operations - parser_operations = subparsers.add_parser("operations", help="operations") + parser_operations = subparsers.add_parser("operations", help="Attack operation interface") parser_operations.set_defaults(func=operations) parser_operations.add_argument("--list", default=False, action="store_true", help="List all operations") @@ -291,7 +279,7 @@ def create_parser(): help="View the report of a finished operation") parser_operations.add_argument("--name", default=None, help="Name of the operation") parser_operations.add_argument("--adversary_id", "--advid", default=None, help="Adversary ID") - parser_operations.add_argument("--source_id", "--sourceid", default="basic", help="'Source' ID") + parser_operations.add_argument("--source_id", "--sourceid", default="basic", help="Source ID") parser_operations.add_argument("--planner_id", "--planid", default="atomic", help="Planner ID") parser_operations.add_argument("--group", default="", help="Caldera group to run the operation on (we are targeting groups, not PAWs)") parser_operations.add_argument("--state", default="running", help="State to start the operation in") @@ -300,20 +288,20 @@ def create_parser(): parser_operations.add_argument("--id", default=None, help="ID of operation to delete") # Sub parser for sources - parser_sources = subparsers.add_parser("sources", help="sources") + parser_sources = subparsers.add_parser("sources", help="Data source management") parser_sources.set_defaults(func=sources) parser_sources.add_argument("--list", default=False, action="store_true", help="List all sources") # Sub parser for planners - parser_sources = subparsers.add_parser("planners", help="planners") + parser_sources = subparsers.add_parser("planners", help="Planner management. They define the pattern of attack steps") parser_sources.set_defaults(func=planners) parser_sources.add_argument("--list", default=False, action="store_true", help="List all planners") # For all parsers - main_parser.add_argument("--caldera_url", help="caldera url, including port", default="http://localhost:8888/") - main_parser.add_argument("--apikey", help="caldera api key", default="ADMIN123") + main_parser.add_argument("--caldera_url", help="The Caldera url, including port and protocol (http://)", default="http://localhost:8888/") + main_parser.add_argument("--apikey", help="Caldera api key", default="ADMIN123") return main_parser diff --git a/doc/doc_todo.txt b/doc/doc_todo.txt new file mode 100644 index 0000000..ba3c743 --- /dev/null +++ b/doc/doc_todo.txt @@ -0,0 +1,12 @@ + + + + +TODO: What sensors are pre-installed ? +TODO: How to attack it ? +TODO: How to contact the servers (ssh/...) ? Scriptable +TODO: How to run it without sudo ? +TODO: Which data is collected ? How to access it ? How to get data dumps out ? +TODO: Add Linux Server +TODO: Add Mac Server + diff --git a/doc/source/basics/background.rst b/doc/source/basics/background.rst index e28d0d3..42bd525 100644 --- a/doc/source/basics/background.rst +++ b/doc/source/basics/background.rst @@ -6,15 +6,15 @@ Purple Dome is a simulated and automated environment to experiment with hacking PurpleDome is relevant for you: -* If you develop sensors for bolt on security -* If you want to test detection logic for your bolt on security -* If you want to stress test mitigation around your vulnerable apps -* Experiment with hardening your OS or software -* Want to forensically analyse a system after an attack -* Do some blue team exercises -* Want to train ML on data from real attacks +* If you develop **sensors** for bolt on security +* If you want to test **detection logic** for your bolt on security +* If you want to stress test **mitigation** around your vulnerable apps +* Experiment with **hardening** your OS or software +* Want to **forensically** analyse a system after an attack +* Do some **blue team exercises** +* Want to **train ML** on data from real attacks -PurpleDome simulates a small busniess network. It generates an attacker VM and target VMs. Automated attacks are then run against the targets. +PurpleDome simulates a small business network. It generates an attacker VM and target VMs. Automated attacks are then run against the targets. Depending on which sensors you picked you will get their logs. And the logs from the attacks. Perfect to compare them side-by-side. @@ -52,54 +52,18 @@ The experiments are configured in YAML files, the format is described in the *co If you want to modify Purple Dome and contribute to it I can point you to the *Extending* chapter. Thanks to a plugin interface this is quite simple. - - - - -TODO: What sensors are pre-installed ? -TODO: How to attack it ? -TODO: How to contact the servers (ssh/...) ? Scriptable -TODO: How to run it without sudo ? -TODO: Which data is collected ? How to access it ? How to get data dumps out ? -TODO: Add Linux Server -TODO: Add Mac Server - - - Data aggregator --------------- We currently can use logstash -There are several options for data aggregators: - -* Fleet OSQuery aggregator: https://github.com/kolide/fleet -* The Hive - - -Sensors on Targets (most are Windows) -------------------------------------- - -Those sensors are not integrated but could be nice to play with: - -Palantir Windows Event forwarding: https://github.com/palantir/windows-event-forwarding - -Autorun monitoring: https://github.com/palantir/windows-event-forwarding/tree/master/AutorunsToWinEventLog - -Palantir OSquery: https://github.com/palantir/osquery-configuration - -SwiftOnSecurity Sysmon config: https://github.com/SwiftOnSecurity/sysmon-config - - -Palantir OSQuery is mixed OS: Windows/Mac Endpoints, Linux Servers - Caldera ------- -Attack framework. +Caldera is an attack framework. Especially useful for pen testing and blue team training. -Starting: *python3 server.py --insecure* +Starting it: *python3 server.py --insecure* Web UI on *http://localhost:8888/* @@ -114,40 +78,25 @@ server="http://192.168.178.45:8888";curl -s -X POST -H "file:sandcat.go" -H "pla Filebeat -------- -Filebeat has a set of modules: +Filebeat collects logs on the target system. + +It has a set of modules: https://www.elastic.co/guide/en/beats/filebeat/6.8/filebeat-modules-overview.html -List modules: *filebeat modules list* +You can view a list of modules using: *filebeat modules list* -%% TODO: Add OSQueryD https://osquery.readthedocs.io/en/latest/introduction/using-osqueryd/ Logstash -------- +Logstash is used to aggregate the data from filebeat into a json file. + Logstash uses all .conf files in /etc/logstash/conf.d https://www.elastic.co/guide/en/logstash/current/config-setting-files.html -Alternative: The Hive ---------------------- - -Sander Spierenburg (SOC Teamlead) seems to be interested in The Hive. So it is back in the game - - - Repos ----- -* The main part: https://git.int.avast.com/ai-research/purpledome -* Caldera fork to fix bugs: TBD -* Caldera Plugin for statistics: - - -Links ------ - -* Others detecting this kind of things - - - https://redcanary.com/blog/how-one-hospital-thwarted-a-ryuk-ransomware-outbreak/ - +PurpleDome can be found on github: https://git.int.avast.com/ai-research/purpledome diff --git a/doc/source/basics/configuration.rst b/doc/source/basics/configuration.rst index 0768397..56665cd 100644 --- a/doc/source/basics/configuration.rst +++ b/doc/source/basics/configuration.rst @@ -4,6 +4,8 @@ Configuration Configuration is contained in yaml files. The example shipped with the code is *template.yaml*. +For your first experiments use *hello_world.yaml* which will run a simple attack on a simulated system. + To define the VMs there are also *Vagrantfiles* and associated scripts. The example shipped with the code is in the *systems* folder. Using Vagrant is optional. Machines @@ -25,6 +27,8 @@ You can install vulnerabilities and weaknesses in the targets to allow your atta Sensors ======= +Sensors are all kinds of technology monitoring system events and collecting data required to detect an attack. Either while it happens or as a forensic experiment. + Each machine can have a list of sensors to run on it. In addition there is the global *sensor_conf* setting to configure the sensors. Sensors are implemented as plugins. @@ -37,7 +41,7 @@ caldera_attacks Caldera attacks (called abilities) are identified by a unique ID. Some abilities are built to target several OS-es. -All Caldera abilities are available. As some will need parameters and Caldera does not offer the option to configure those in the YAML, some caldera attacks might not work without implementing a plugin. +All Caldera abilities are available. As some will need parameters and PurpleDome does not offer the option to configure those in the YAML, some caldera attacks might not work without implementing a plugin. In the YAML file you will find two sub-categories under caldera_attacks: linux and windows. There you just list the ids of the caldera attacks to run on those systems. diff --git a/doc/source/index.rst b/doc/source/index.rst index 0a56f89..3e50f6a 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -8,8 +8,8 @@ .. Autodoc part .. https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#module-sphinx.ext.autodoc -Welcome to the Purple Dome documentation! -========================================= +Welcome to the Purple Dome documentation +======================================== .. toctree:: :maxdepth: 3 diff --git a/doc/source/usage/cli.rst b/doc/source/usage/cli.rst index 2db54d7..03114ce 100644 --- a/doc/source/usage/cli.rst +++ b/doc/source/usage/cli.rst @@ -9,6 +9,10 @@ The central one is Experiment control where you start your experiments: .. asciinema:: ./../asciinema/experiment_control.cast :speed: 2 + + + + Experiment control ================== @@ -19,6 +23,10 @@ Experiment control is the core tool to run an experiment. It accepts a yaml conf :func: create_parser :prog: ./experiment_control.py + + + + Testing YAML files ================== @@ -29,6 +37,10 @@ Configuration can be a bit complex and mistakes can happen. To find them before :func: create_parser :prog: ./pydantic_test.py + + + + Plugin manager ============== @@ -39,6 +51,10 @@ List available plugins or a specific plugin config. Most importantly: You can ve :func: create_parser :prog: ./plugin_manager.py + + + + Caldera control =============== @@ -49,6 +65,10 @@ Directly control a caldera server. You will need a running caldera server to con :func: create_parser :prog: ./caldera_control.py + + + + Machine control =============== @@ -59,6 +79,10 @@ Directly control the machines :func: create_parser :prog: ./machine_control.py + + + + Doc generator ============= diff --git a/doc_generator.py b/doc_generator.py index 80feb7e..42d2529 100755 --- a/doc_generator.py +++ b/doc_generator.py @@ -6,15 +6,29 @@ import argparse import argcomplete from app.doc_generator import DocGenerator -DEFAULT_ATTACK_LOG = "removeme/loot/2021_09_08___07_41_35/attack.json" # FIN 7 first run on environment + +class CmdlineArgumentException(Exception): + """ An error in the user supplied command line """ + + +def create(arguments): + """ Create a document """ + + if arguments.attack_log is None: + raise CmdlineArgumentException("Creating a new document requires an attack_log") + + doc_get = DocGenerator() + doc_get.generate(arguments.attack_log, arguments.outfile) def create_parser(): """ Creates the parser for the command line arguments""" - lparser = argparse.ArgumentParser("Controls an experiment on the configured systems") - - lparser.add_argument("--attack_log", default=DEFAULT_ATTACK_LOG, help="The attack log the document is based on") - lparser.add_argument("--outfile", default="tools/human_readable_documentation/source/contents.rst", help="The default output file") + lparser = argparse.ArgumentParser("Manage attack documentation") + subparsers = lparser.add_subparsers(help="sub-commands") + parser_create = subparsers.add_parser("create", help="Create a new human readable document") + parser_create.set_defaults(func=create) + parser_create.add_argument("--attack_log", default=None, help="The attack log the document is based on") + parser_create.add_argument("--outfile", default="tools/human_readable_documentation/source/contents.rst", help="The default output file") return lparser @@ -22,7 +36,10 @@ def create_parser(): if __name__ == "__main__": parser = create_parser() argcomplete.autocomplete(parser) - arguments = parser.parse_args() + args = parser.parse_args() - dg = DocGenerator() - dg.generate(arguments.attack_log, arguments.outfile) + try: + str(args.func(args)) + except CmdlineArgumentException as ex: + parser.print_help() + print(f"\nCommandline error: {ex}") diff --git a/experiment_control.py b/experiment_control.py index 1113cc0..2003655 100755 --- a/experiment_control.py +++ b/experiment_control.py @@ -42,7 +42,7 @@ def create_parser(): subparsers = lparser.add_subparsers(help="sub-commands") lparser.set_defaults(func=explain) - lparser.add_argument('--verbose', '-v', action='count', default=0) + lparser.add_argument('--verbose', '-v', action='count', default=0, help="Verbosity level") # Sub parser for machine creation parser_run = subparsers.add_parser("run", help="run experiments") diff --git a/machine_control.py b/machine_control.py index 836707e..c0a967f 100644 --- a/machine_control.py +++ b/machine_control.py @@ -13,11 +13,10 @@ from app.attack_log import AttackLog def create_machines(arguments): - """ + """ Create machines based on config @param arguments: The arguments from argparse """ - # TODO: Add argparse and make it flexible with open(arguments.configfile) as fh: config = yaml.safe_load(fh) @@ -69,18 +68,18 @@ def download_caldera_client(arguments): def create_parser(): """ Creates the parser for the command line arguments""" - main_parser = argparse.ArgumentParser("Controls a Caldera server to attack other systems") - main_parser.add_argument('--verbose', '-v', action='count', default=0) + main_parser = argparse.ArgumentParser("Controls machinery to test VM interaction") + main_parser.add_argument('--verbose', '-v', action='count', default=0, help="Verbosity level") subparsers = main_parser.add_subparsers(help="sub-commands") # Sub parser for machine creation - parser_create = subparsers.add_parser("create", help="create systems") + parser_create = subparsers.add_parser("create", help="Create VM machines") parser_create.set_defaults(func=create_machines) - parser_create.add_argument("--configfile", default="experiment.yaml", help="Config file to create from") + parser_create.add_argument("--configfile", default="experiment.yaml", help="Config file to create VMs from") - parser_download_caldera_client = subparsers.add_parser("fetch_client", help="download the caldera client") + parser_download_caldera_client = subparsers.add_parser("fetch_client", help="Download the caldera client") parser_download_caldera_client.set_defaults(func=download_caldera_client) - parser_download_caldera_client.add_argument("--ip", default="192.168.178.189", help="Ip of Caldera to connect to") + parser_download_caldera_client.add_argument("--ip", default="192.168.178.189", help="IP of Caldera to connect to") parser_download_caldera_client.add_argument("--platform", default="windows", help="platform to download the client for") parser_download_caldera_client.add_argument("--file", default="sandcat.go", help="The agent to download") parser_download_caldera_client.add_argument("--target_dir", default=".", help="The target dir to download the file to") diff --git a/plugin_manager.py b/plugin_manager.py index 57aea51..4363788 100755 --- a/plugin_manager.py +++ b/plugin_manager.py @@ -10,6 +10,10 @@ from app.pluginmanager import PluginManager from app.attack_log import AttackLog +class CmdlineArgumentException(Exception): + """ An error in the user supplied command line """ + + def list_plugins(arguments): """ List plugins """ @@ -37,6 +41,10 @@ def get_default_config(arguments): attack_logger = AttackLog(arguments.verbose) plugin_manager = PluginManager(attack_logger) + if arguments.subclass_name is None: + raise CmdlineArgumentException("Getting configuration requires a subclass_name") + if arguments.plugin_name is None: + raise CmdlineArgumentException("Getting configuration requires a plugin_name") plugin_manager.print_default_config(arguments.subclass_name, arguments.plugin_name) @@ -44,7 +52,7 @@ def create_parser(): """ Creates the parser for the command line arguments""" main_parser = argparse.ArgumentParser("Manage plugins") - main_parser.add_argument('--verbose', '-v', action='count', default=0) + main_parser.add_argument('--verbose', '-v', action='count', default=0, help="Verbosity level") subparsers = main_parser.add_subparsers(help="sub-commands") # Sub parser for plugin list @@ -53,13 +61,13 @@ def create_parser(): # parser_list.add_argument("--configfile", default="experiment.yaml", help="Config file to create from") # Sub parser for plugin check - parser_list = subparsers.add_parser("check", help="check plugin implementation") + parser_list = subparsers.add_parser("check", help="Check plugin implementation") parser_list.set_defaults(func=check_plugins) - parser_default_config = subparsers.add_parser("raw_config", help="print raw default config of the given plugin") + parser_default_config = subparsers.add_parser("raw_config", help="Print raw default config of the given plugin") parser_default_config.set_defaults(func=get_default_config) - parser_default_config.add_argument("subclass_name", help="name of the subclass") - parser_default_config.add_argument("plugin_name", help="name of the plugin") + parser_default_config.add_argument("subclass_name", help="Name of the subclass") + parser_default_config.add_argument("plugin_name", help="Name of the plugin") # TODO: Get default config return main_parser @@ -71,5 +79,9 @@ if __name__ == "__main__": argcomplete.autocomplete(parser) args = parser.parse_args() - exval = args.func(args) - sys.exit(exval) + try: + exit_val = args.func(args) + sys.exit(exit_val) + except CmdlineArgumentException as ex: + parser.print_help() + print(f"\nCommandline error: {ex}") diff --git a/pydantic_test.py b/pydantic_test.py index 79e2b24..c0b3018 100755 --- a/pydantic_test.py +++ b/pydantic_test.py @@ -22,7 +22,7 @@ def create_parser(): """ Creates the parser for the command line arguments""" lparser = argparse.ArgumentParser("Parse a config file and verifies it") - lparser.add_argument('--filename', default="experiment_ng.yaml") + lparser.add_argument('--filename', default="experiment_ng.yaml", help="Config file to verify") return lparser