diff --git a/conf/default/auxiliary.conf.default b/conf/default/auxiliary.conf.default index 4f6029ba9b6..ec8aeb84c73 100644 --- a/conf/default/auxiliary.conf.default +++ b/conf/default/auxiliary.conf.default @@ -77,3 +77,9 @@ bpf = not arp # Enable or disable the use of QEMU as screenshot capture [yes/no]. # screenshots_linux and screenshots_windows must be disabled enabled = no + +[Mitmdump] +# Enable or disable the use of mitmdump (mitmproxy) to get dump.har [yes/no]. +# This module requires installed mitmproxy see install_mitmproxy +# (https://github.com/kevoreilly/CAPEv2/blob/master/installer/cape2.sh#L1320) +enabled = no diff --git a/conf/default/mitmdump.conf.default b/conf/default/mitmdump.conf.default new file mode 100644 index 00000000000..703da46560a --- /dev/null +++ b/conf/default/mitmdump.conf.default @@ -0,0 +1,11 @@ +[cfg] +# bin path to mitmdump +bin = /opt/mitmproxy/mitmdump + +# Host ip where mitmdump is listening +host = 127.0.0.1 + +# Interface where mitmdump is listening +interface = virbr0 + +# Future options like custom ports, cert paths, etc diff --git a/conf/default/processing.conf.default b/conf/default/processing.conf.default index 0a83dda5407..dbd297d6286 100644 --- a/conf/default/processing.conf.default +++ b/conf/default/processing.conf.default @@ -321,11 +321,6 @@ key = [script_log_processing] enabled = yes -# Community -# Dump PE's overlay info -[overlay] -enabled = no - # Community [floss] enabled = no diff --git a/conf/default/selfextract.conf.default b/conf/default/selfextract.conf.default index 638bc1b4f49..b39903710fe 100644 --- a/conf/default/selfextract.conf.default +++ b/conf/default/selfextract.conf.default @@ -89,3 +89,8 @@ timeout = 60 [msix_extract] enabled = no timeout = 60 + +# PE file overlay +[overlay] +enabled = yes +timeout = 60 diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index 4be2af810bc..1422e550b62 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -5,10 +5,11 @@ rule Lumma description = "Lumma Payload" cape_type = "Lumma Payload" packed = "0ee580f0127b821f4f1e7c032cf76475df9724a9fade2e153a69849f652045f8" + packed = "23ff1c20b16d9afaf1ce443784fc9a025434a010e2194de9dec041788c369887" strings: - $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} - $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} - $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} + $decode1 = {C1 (E9|EA) 02 [0-3] 0F B6 (44|4C) ?? FF 83 (F8|F9) 3D 74 05 83 (F8|F9) 2E 75 01 (49|4A) [0-30] 2E 75} + $decode2 = {B0 40 C3 B0 3F C3 89 C8 04 D0 3C 09 77 06 80 C1 04 89 C8 C3 89 C8 04 BF 3C} + $decode3 = {B0 40 C3 B0 3F C3 80 F9 30 72 ?? 80 F9 39 77 06 80 C1 04 89 C8 C3} condition: uint16(0) == 0x5a4d and any of them } diff --git a/installer/cape2.sh b/installer/cape2.sh index 055dfcfa5ef..e9cb4cd5b98 100644 --- a/installer/cape2.sh +++ b/installer/cape2.sh @@ -1317,6 +1317,17 @@ function install_volatility3() { chown "${USER}:${USER}" $vol_path -R } +function install_mitmproxy() { + sudo mkdir /opt/mitmproxy + sudo chown ${USER}:${USER} /opt/mitmproxy + cd /opt/mitmproxy + mitmproxy_version=$(curl -s https://api.github.com/repos/mitmproxy/mitmproxy/releases/latest | grep '"tag_name":' | cut -d '"' -f 4 | sed 's/^v//') + wget https://downloads.mitmproxy.org/"$mitmproxy_version"/mitmproxy-"$mitmproxy_version"-linux-x86_64.tar.gz -O mitmproxy.tar.gz + tar xvzf mitmproxy.tar.gz + rm mitmproxy.tar.gz + chown "${USER}:${USER}" /opt/mitmproxy -R +} + function install_guacamole() { # Kudos to @Enzok https://github.com/kevoreilly/CAPEv2/pull/1065 # https://guacamole.apache.org/doc/gug/installing-guacamole.html @@ -1451,6 +1462,7 @@ case "$COMMAND" in install_systemd install_jemalloc install_logrotate + install_mitmproxy #socksproxies is to start redsocks stuff if [ -f /opt/CAPEv2/socksproxies.sh ]; then crontab -l | { cat; echo "@reboot /opt/CAPEv2/socksproxies.sh"; } | crontab - @@ -1501,6 +1513,8 @@ case "$COMMAND" in librenms_snmpd_config;; 'librenms_sneck_config') librenms_sneck_config;; +'mitmproxy') + install_mitmproxy;; 'issues') issues;; 'nginx') diff --git a/lib/cuckoo/common/integrations/file_extra_info.py b/lib/cuckoo/common/integrations/file_extra_info.py index 4e65c4bd18c..b49d5d31415 100644 --- a/lib/cuckoo/common/integrations/file_extra_info.py +++ b/lib/cuckoo/common/integrations/file_extra_info.py @@ -112,9 +112,6 @@ HAVE_BAT_DECODER = False print("OPTIONAL! Missed dependency: poetry run pip install -U git+https://github.com/DissectMalware/batch_deobfuscator") -processing_conf = Config("processing") -selfextract_conf = Config("selfextract") - unautoit_binary = os.path.join(CUCKOO_ROOT, selfextract_conf.UnAutoIt_extract.binary) if processing_conf.trid.enabled: diff --git a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py new file mode 100644 index 00000000000..540a97d2e22 --- /dev/null +++ b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py @@ -0,0 +1,41 @@ +import logging +import os + +from lib.cuckoo.common.integrations.file_extra_info_modules import ( + ExtractorReturnType, + collect_extracted_filenames, + extractor_ctx, + time_tracker, +) +from lib.cuckoo.common.path_utils import path_write_file + +# from base64 import b64encode + + +log = logging.getLogger(__name__) + + +@time_tracker +def extract_details(file, *, data_dictionary, **_) -> ExtractorReturnType: + + if not data_dictionary.get("pe", {}).get("overlay"): + return {} + + data = "" + overlay_size = int(data_dictionary["pe"]["overlay"]["size"], 16) + # Extract out the overlay data + try: + with open(file, "rb") as f: + f.seek(-overlay_size, os.SEEK_END) + data = f.read() + # data_dictionary["pe"]["overlay"]["data"] = b64encode(data[: min(overlay_size, 4096)]) + except Exception as e: + log.error(e) + + with extractor_ctx(file, "overlay", prefix="overlay") as ctx: + if data: + tempdir = ctx["tempdir"] + # You might need to change this 2 lines. See other examples in `file_extra_info.py` + _ = path_write_file(os.path.join(tempdir, "overlay"), data) + ctx["extracted_files"] = collect_extracted_filenames(tempdir) + return ctx diff --git a/modules/auxiliary/Mitmdump.py b/modules/auxiliary/Mitmdump.py new file mode 100644 index 00000000000..68cf1ecc12c --- /dev/null +++ b/modules/auxiliary/Mitmdump.py @@ -0,0 +1,130 @@ +# Copyright (C) 2024 davidsb@virustotal.com +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. +# This module runs mitmdump to get a HAR file +# mitmdump is behind mitmproxy project https://mitmproxy.org/ + +import logging +import os +import socket +import subprocess +from threading import Thread + +from lib.cuckoo.common.abstracts import Auxiliary +from lib.cuckoo.common.config import Config +from lib.cuckoo.common.constants import CUCKOO_ROOT +from lib.cuckoo.core.rooter import rooter + +mitmdump = Config("mitmdump") + +log = logging.getLogger(__name__) + + +class Mitmdump(Auxiliary): + """Module for generating HAR with Mitmdump.""" + + def __init__(self): + Auxiliary.__init__(self) + Thread.__init__(self) + log.info("Mitmdump module loaded") + self.mitmdump_thread = None + + def start(self): + """Start mitmdump in a separate thread.""" + + self.mitmdump_thread = MitmdumpThread(self.task, self.machine) + self.mitmdump_thread.start() + return True + + def stop(self): + """Stop mitmdump capture thread.""" + if self.mitmdump_thread: + self.mitmdump_thread.stop() + + +class MitmdumpThread(Thread): + """Thread responsible for control mitmdump service for each analysis.""" + + def __init__(self, task, machine): + Thread.__init__(self) + self.task = task + self.machine = machine + self.do_run = True + self.host_ip = mitmdump.cfg.get("host") + self.host_iface = mitmdump.cfg.get("interface") + self.mitmdump_bin = mitmdump.cfg.get("bin") + self.proc = None + self.host_port = self._get_unused_port() + self.mitmdump_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id), "mitmdump") + + def stop(self): + """Set stop mitmdump capture.""" + self.do_run = False + + if self.proc and self.proc.poll() is None: + self.proc.terminate() + self.proc.wait() + log.info("Stopping mitmdump") + + try: + rooter("disable_mitmdump", self.host_iface, self.machine.ip, self.host_port) + except subprocess.CalledProcessError as e: + log.error("Failed to execute firewall rules: %s", e) + + def run(self): + """Core function to the manage the module""" + if "mitmdump" not in self.task.options: + log.info("Exiting mitmdump. No parameter received.") + return + + if self.do_run: + if not self.host_port: + log.exception("All ports in range are in use") + return + + try: + rooter("enable_mitmdump", self.host_iface, self.machine.ip, self.host_port) + except subprocess.CalledProcessError as e: + log.error("Failed to execute firewall rules: %s", e) + + try: + mitmdump_args = [] + os.makedirs(self.mitmdump_path, exist_ok=True) + file_path = os.path.join(self.mitmdump_path, "dump.har") + mitmdump_args.extend( + [ + self.mitmdump_bin, + "-q", + "--listen-host", + self.host_ip, + "-p", + str(self.host_port), + "--set", + "hardump=", + file_path, + ] + ) + mitmdump_args[-2:] = [ + "".join(mitmdump_args[-2:]) + ] # concatenate the last two arguments, otherwise the HAR file will not be created. + self.proc = subprocess.Popen(mitmdump_args, stdout=None, stderr=None, shell=False) + except (OSError, ValueError): + log.exception("Failed to mitmdump (host=%s, port=%s, dump_path=%s)", self.host_ip, self.host_port, file_path) + return + + log.info( + "Started mitmdump with PID %d (host=%s, port=%s, dump_path=%s)", + self.proc.pid, + self.host_ip, + self.host_port, + file_path, + ) + + def _get_unused_port(self) -> str | None: + """Return the first unused TCP port from the set.""" + ports = set(range(8001, 8081)) + for port in ports: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + if s.connect_ex((self.host_ip, port)) != 0: + return str(port) + return None diff --git a/modules/processing/overlay.py b/modules/processing/overlay.py deleted file mode 100644 index 7de6ade5372..00000000000 --- a/modules/processing/overlay.py +++ /dev/null @@ -1,70 +0,0 @@ -import base64 -import logging -import os - -from lib.cuckoo.common.abstracts import Processing -from lib.cuckoo.common.objects import File -from lib.cuckoo.common.path_utils import path_exists, path_mkdir, path_write_file - -log = logging.getLogger(__name__) - - -class process_overlay_file(object): - """Returns the file information of the containing overlay data""" - - def __init__(self, overlay_fullpath): - self.overlay_fullpath = overlay_fullpath - - def run(self): - if not self.options.enabled: - return {} - - if not path_exists(self.overlay_fullpath): - return {} - - file_info, _ = File(file_path=self.overlay_fullpath).get_all() - return file_info - - -class extract_overlay_data(Processing): - """Makes use of static.py's result to determine if there is overlay data. Only works for PE for now. - If overlay has been detected by static.py, we extract the whole data and save them in a file - @returns: Up to first 4096 bytes of overlay data added as part of the json, full data will need to be downloaded - """ - - # To tell CAPE to run this after first round of processing is done - order = 2 - - def run(self): - if "static" not in self.results: - return None - - self.key = "static" # uses the existing "static" sub container to add in the overlay data - output = self.results["static"] - - if not output.get("pe", {}).get("overlay"): - return output - - overlay_size = int(output["pe"]["overlay"]["size"], 16) - - # Extract out the overlay data - try: - with open(self.file_path, "rb") as f: - f.seek(-overlay_size, os.SEEK_END) - data = f.read() - output["pe"]["overlay"]["data"] = base64.b64encode(data[: min(overlay_size, 4096)]) - - fld = os.path.join(self.analysis_path, "files") - if not path_exists(fld): - log.warning("Folder not present, creating it. Might affect the displaying of (overlay) results on the web") - path_mkdir(fld) - - fld = os.path.join(fld, "extracted_overlay") - _ = path_write_file(fld, data) - - output["pe"]["overlay"]["fileinfo"] = process_overlay_file(fld).run() - - except Exception as e: - log.error(e) - - return output diff --git a/modules/processing/parsers/CAPE/Lumma.py b/modules/processing/parsers/CAPE/Lumma.py index 8c5352b09a5..b1437d8e435 100644 --- a/modules/processing/parsers/CAPE/Lumma.py +++ b/modules/processing/parsers/CAPE/Lumma.py @@ -1,12 +1,96 @@ +import base64 +import re + +import pefile + + +def is_base64(s): + pattern = re.compile("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$") + if not s or len(s) < 1: + return False + else: + return pattern.match(s) + + +def extract_strings(data, minchars): + endlimit = b"8192" + apat = b"([\x20-\x7e]{" + str(minchars).encode() + b"," + endlimit + b"})\x00" + strings = [string.decode() for string in re.findall(apat, data)] + return strings + + +def get_base64_strings(str_list): + base64_strings = [] + for s in str_list: + if is_base64(s): + base64_strings.append(s) + return base64_strings + + +def get_rdata(data): + rdata = None + pe = pefile.PE(data=data) + section_idx = 0 + for section in pe.sections: + if section.Name == b".rdata\x00\x00": + rdata = pe.sections[section_idx].get_data() + break + section_idx += 1 + return rdata + + +def xor_data(data, key): + decoded = bytearray() + key_len = len(key) + for i in range(len(data)): + if i >= key_len: + break + decoded.append(data[i] ^ key[i]) + return decoded + + +def contains_non_printable(byte_array): + for byte in byte_array: + if not chr(byte).isprintable(): + return True + return False + + def extract_config(data): - config_dict = {} - C2s = [] + config_dict = {"C2": []} + try: lines = data.decode().split("\n") for line in lines: - if "." in line and len(line) > 2: - C2s.append(line) + try: + if "." in line and len(line) > 2: + if not contains_non_printable(line): + config_dict["C2"].append(line) + except Exception: + continue except Exception: - return - config_dict["C2s"] = C2s + pass + + # If no C2s with the old method, + # try with newer version xor decoding + if not config_dict["C2"]: + try: + rdata = get_rdata(data) + strings = extract_strings(rdata, 44) + base64_strings = get_base64_strings(strings) + + for base64_str in base64_strings: + try: + decoded_bytes = base64.b64decode(base64_str, validate=True) + encoded_c2 = decoded_bytes[:32] + xor_key = decoded_bytes[32:] + decoded_c2 = xor_data(encoded_c2, xor_key) + + if not contains_non_printable(decoded_c2): + config_dict["C2"].append(decoded_c2.decode()) + except Exception: + continue + except Exception: + return + return config_dict diff --git a/utils/rooter.py b/utils/rooter.py index 4c8175dfcec..717bdbdc762 100644 --- a/utils/rooter.py +++ b/utils/rooter.py @@ -188,6 +188,86 @@ def disable_nat(interface): run_iptables("-t", "nat", "-D", "POSTROUTING", "-o", interface, "-j", "MASQUERADE") +def enable_mitmdump(interface, client, port): + """Enable mitmdump on this interface.""" + run_iptables( + "-t", + "nat", + "-I", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, + ) + run_iptables( + "-t", + "nat", + "-I", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, + ) + + +def disable_mitmdump(interface, client, port): + """Disable mitmdump on this interface.""" + run_iptables( + "-t", + "nat", + "-D", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, + ) + run_iptables( + "-t", + "nat", + "-D", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, + ) + + def init_rttable(rt_table, interface): """Initialise routing table for this interface using routes from main table.""" @@ -674,6 +754,8 @@ def drop_disable(ipaddr, resultserver_port): "cleanup_vrf": cleanup_vrf, "add_dev_to_vrf": add_dev_to_vrf, "delete_dev_from_vrf": delete_dev_from_vrf, + "enable_mitmdump": enable_mitmdump, + "disable_mitmdump": disable_mitmdump, } if __name__ == "__main__": diff --git a/web/apiv2/urls.py b/web/apiv2/urls.py index 23571f3281c..aa512323add 100644 --- a/web/apiv2/urls.py +++ b/web/apiv2/urls.py @@ -51,6 +51,7 @@ re_path(r"^tasks/get/evtx/(?P\d+)/$", views.tasks_evtx), re_path(r"^tasks/get/dropped/(?P\d+)/$", views.tasks_dropped), re_path(r"^tasks/get/surifile/(?P\d+)/$", views.tasks_surifile), + re_path(r"^tasks/get/mitmdump/(?P\d+)/$", views.tasks_mitmdump), re_path(r"^tasks/get/payloadfiles/(?P\d+)/$", views.tasks_payloadfiles), re_path(r"^tasks/get/procdumpfiles/(?P\d+)/$", views.tasks_procdumpfiles), re_path(r"^files/view/md5/(?P([a-fA-F\d]{32}))/$", views.files_view), diff --git a/web/apiv2/views.py b/web/apiv2/views.py index b744aafdb0a..882f4005cd0 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -1625,6 +1625,36 @@ def tasks_evtx(request, task_id): return Response(resp) +@csrf_exempt +@api_view(["GET"]) +def tasks_mitmdump(request, task_id): + if not apiconf.taskmitmdump.get("enabled"): + resp = {"error": True, "error_value": "Mitmdump HAR download API is disabled"} + return Response(resp) + + check = validate_task(task_id) + if check["error"]: + return Response(check) + + rtid = check.get("rtid", 0) + if rtid: + task_id = rtid + + harfile = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % task_id, "mitmdump", "dump.har") + if not os.path.normpath(harfile).startswith(ANALYSIS_BASE_PATH): + return render(request, "error.html", {"error": f"File not found: {os.path.basename(harfile)}"}) + if path_exists(harfile): + fname = "%s_dump.har" % task_id + resp = StreamingHttpResponse(FileWrapper(open(harfile, "rb")), content_type="text/plain") + resp["Content-Length"] = os.path.getsize(harfile) + resp["Content-Disposition"] = "attachment; filename=" + fname + return resp + + else: + resp = {"error": True, "error_value": "HAR file does not exist"} + return Response(resp) + + @csrf_exempt @api_view(["GET"]) def tasks_dropped(request, task_id): diff --git a/web/submission/views.py b/web/submission/views.py index 39c25136c3a..a8c9e7efccd 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -312,6 +312,9 @@ def index(request, task_id=None, resubmit_hash=None): if request.POST.get("nohuman"): options += "nohuman=yes," + if request.POST.get("mitmdump"): + options += "mitmdump=yes," + if web_conf.guacamole.enabled and request.POST.get("interactive"): remote_console = True options += "interactive=1," diff --git a/web/templates/analysis/overlay/index.html b/web/templates/analysis/overlay/index.html deleted file mode 100644 index 361facc9e84..00000000000 --- a/web/templates/analysis/overlay/index.html +++ /dev/null @@ -1,74 +0,0 @@ -{% with overlay=analysis.static.pe.overlay %} -{% with fileinfo=overlay.fileinfo %} - -{%if fileinfo %} -
- - - - - - - - - - - - - - - - - - - - - - - - - - {% if fileinfo.sha3_384 %} - - - - - {% endif %} - - - - - {% if fileinfo.tlsh %} - - - - - {% endif %} - - - - - - - - - {% if overlay.data %} - - - - {% endif %} -
File name -
{{fileinfo.name|safe}}
-
File Size{{fileinfo.size}} bytes
File Type{{fileinfo.type}}
MD5{{fileinfo.md5}}
SHA1{{fileinfo.sha1}}
SHA256{{fileinfo.sha256}}
SHA3-384{{fileinfo.sha3_384}}
CRC32{{fileinfo.crc32}}
TLSH{{fileinfo.tlsh}}
Ssdeep{{fileinfo.ssdeep}}
- Download - {% if overlay.data %} -  Display Overlay Data (Up to 4KB) - {% endif %} -
{{overlay.data}}
-
-{%else%} -
Sorry! No overlay information.
-{% endif %} - -{% endwith %} -{% endwith %} diff --git a/web/templates/analysis/report.html b/web/templates/analysis/report.html index 7988fa1f1ad..ed08b01984f 100644 --- a/web/templates/analysis/report.html +++ b/web/templates/analysis/report.html @@ -85,9 +85,6 @@ {% if analysis.dropped %} {% endif %} - {% if analysis.static.pe.overlay.fileinfo %} - - {% endif %} {% if analysis.procmemory %} {% endif %} @@ -193,10 +190,5 @@ {% include "analysis/admin/index.html" %} {% endif %} - {% if analysis.static.pe.overlay.fileinfo %} -
- {% include "analysis/overlay/index.html" %} -
- {% endif %} {% endblock %}