From 6cd365a16f016015da94713e31f57f9c95cfb5b9 Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 00:39:32 +0000 Subject: [PATCH 01/18] Add Lumma C2 extractor --- data/yara/CAPE/Lumma.yar | 38 ++++++++---- modules/processing/parsers/CAPE/Lumma.py | 73 +++++++++++++++++++++--- 2 files changed, 93 insertions(+), 18 deletions(-) diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index 4be2af810bc..8da6997b6f2 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -1,14 +1,30 @@ rule Lumma { - meta: - author = "kevoreilly" - description = "Lumma Payload" - cape_type = "Lumma Payload" - packed = "0ee580f0127b821f4f1e7c032cf76475df9724a9fade2e153a69849f652045f8" - strings: - $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} - $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} - $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} - condition: - uint16(0) == 0x5a4d and any of them + meta: + author = "YungBinary" + description = "Lumma stealer" + cape_type = "Lumma Payload" + packed = "5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f" + strings: + $chunk_1 = { + 0F B6 14 0E + 89 CF + 83 E7 1F + 0F B6 7C 3C ?? + 89 D3 + 31 FB + 83 F3 FF + 89 FD + 21 DD + D1 E5 + 29 FD + 29 EA + 8B 5C 24 ?? + 88 14 0B + EB ?? + } + + condition: + uint16(0) == 0x5a4d and $chunk_1 + } diff --git a/modules/processing/parsers/CAPE/Lumma.py b/modules/processing/parsers/CAPE/Lumma.py index 8c5352b09a5..6b0c4d9db67 100644 --- a/modules/processing/parsers/CAPE/Lumma.py +++ b/modules/processing/parsers/CAPE/Lumma.py @@ -1,12 +1,71 @@ +import base64 +import pefile +import re + +def is_base64(s): + pattern = re.compile("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$") + if not s or len(s) < 1: + return False + else: + return pattern.match(s) + +def extract_strings(data, minchars): + endlimit = b"8192" + apat = b"([\x20-\x7e]{" + str(minchars).encode() + b"," + endlimit + b"})\x00" + upat = b"((?:[\x20-\x7e][\x00]){" + str(minchars).encode() + b"," + endlimit + b"})\x00\x00" + strings = [string.decode() for string in re.findall(apat, data)] + return strings + +def get_base64_strings(str_list): + base64_strings = [] + for s in str_list: + if is_base64(s): + base64_strings.append(s) + return base64_strings + +def get_rdata(data): + rdata = None + pe = pefile.PE(data=data) + section_idx = 0 + for section in pe.sections: + if section.Name == b'.rdata\x00\x00': + rdata = pe.sections[section_idx].get_data() + break + section_idx += 1 + return rdata + +def xor_data(data, key): + decoded = bytearray() + key_len = len(key) + for i in range(len(data)): + if i >= key_len: + break + decoded.append(data[i] ^ key[i]) + return decoded + +def contains_non_printable(byte_array): + for byte in byte_array: + if not chr(byte).isprintable(): + return True + return False + def extract_config(data): - config_dict = {} - C2s = [] + config_dict = {"C2": []} try: - lines = data.decode().split("\n") - for line in lines: - if "." in line and len(line) > 2: - C2s.append(line) + rdata = get_rdata(data) + strings = extract_strings(rdata, 44) + base64_strings = get_base64_strings(strings) + for base64_str in base64_strings: + decoded_bytes = base64.b64decode(base64_str, validate=True) + encoded_c2 = decoded_bytes[:32] + xor_key = decoded_bytes[32:] + + decoded_c2 = xor_data(encoded_c2, xor_key) + + if not contains_non_printable(decoded_c2): + config_dict["C2"].append(decoded_c2.decode()) + except Exception: return - config_dict["C2s"] = C2s + return config_dict From fb3d6e95afdd29a36a491ea43b42e8a033af4f8a Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 00:42:42 +0000 Subject: [PATCH 02/18] Fix unused var --- modules/processing/parsers/CAPE/Lumma.py | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/processing/parsers/CAPE/Lumma.py b/modules/processing/parsers/CAPE/Lumma.py index 6b0c4d9db67..1aa00c5d862 100644 --- a/modules/processing/parsers/CAPE/Lumma.py +++ b/modules/processing/parsers/CAPE/Lumma.py @@ -12,7 +12,6 @@ def is_base64(s): def extract_strings(data, minchars): endlimit = b"8192" apat = b"([\x20-\x7e]{" + str(minchars).encode() + b"," + endlimit + b"})\x00" - upat = b"((?:[\x20-\x7e][\x00]){" + str(minchars).encode() + b"," + endlimit + b"})\x00\x00" strings = [string.decode() for string in re.findall(apat, data)] return strings From b75b80af21e17cc73da4794e0c9481dbcc349556 Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 08:40:04 +0000 Subject: [PATCH 03/18] Backwards compatible yara rule add kev author --- data/yara/CAPE/Lumma.yar | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index 8da6997b6f2..8b27eb42a9c 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -1,12 +1,15 @@ rule Lumma { meta: - author = "YungBinary" + author = "kevoreilly,YungBinary" description = "Lumma stealer" cape_type = "Lumma Payload" packed = "5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f" strings: - $chunk_1 = { + $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} + $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} + $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} + $xor_decode = { 0F B6 14 0E 89 CF 83 E7 1F @@ -25,6 +28,6 @@ rule Lumma } condition: - uint16(0) == 0x5a4d and $chunk_1 + uint16(0) == 0x5a4d and any of them } From 7ef999bd644cbef7cdaa3e8c5b7e85549dbb1cc7 Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 08:41:49 +0000 Subject: [PATCH 04/18] Tabs -> Spaces --- data/yara/CAPE/Lumma.yar | 50 ++++++++++++++++++++-------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index 8b27eb42a9c..a0b9ebcb410 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -1,33 +1,33 @@ rule Lumma { - meta: - author = "kevoreilly,YungBinary" - description = "Lumma stealer" - cape_type = "Lumma Payload" - packed = "5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f" - strings: - $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} + meta: + author = "kevoreilly,YungBinary" + description = "Lumma stealer" + cape_type = "Lumma Payload" + packed = "5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f" + strings: + $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} - $xor_decode = { - 0F B6 14 0E - 89 CF - 83 E7 1F - 0F B6 7C 3C ?? - 89 D3 - 31 FB - 83 F3 FF - 89 FD - 21 DD - D1 E5 - 29 FD - 29 EA - 8B 5C 24 ?? - 88 14 0B - EB ?? - } + $xor_decode = { + 0F B6 14 0E + 89 CF + 83 E7 1F + 0F B6 7C 3C ?? + 89 D3 + 31 FB + 83 F3 FF + 89 FD + 21 DD + D1 E5 + 29 FD + 29 EA + 8B 5C 24 ?? + 88 14 0B + EB ?? + } condition: - uint16(0) == 0x5a4d and any of them + uint16(0) == 0x5a4d and any of them } From bdde20686a652e9809a664c95148ce3b088f90ab Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 08:43:43 +0000 Subject: [PATCH 05/18] Tabs -> Spaces --- data/yara/CAPE/Lumma.yar | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index a0b9ebcb410..3a74f1d0023 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -2,7 +2,7 @@ rule Lumma { meta: author = "kevoreilly,YungBinary" - description = "Lumma stealer" + description = "Lumma Payload" cape_type = "Lumma Payload" packed = "5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f" strings: @@ -10,24 +10,24 @@ rule Lumma $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} $xor_decode = { - 0F B6 14 0E - 89 CF - 83 E7 1F - 0F B6 7C 3C ?? - 89 D3 - 31 FB - 83 F3 FF - 89 FD - 21 DD - D1 E5 - 29 FD - 29 EA - 8B 5C 24 ?? - 88 14 0B - EB ?? + 0F B6 14 0E + 89 CF + 83 E7 1F + 0F B6 7C 3C ?? + 89 D3 + 31 FB + 83 F3 FF + 89 FD + 21 DD + D1 E5 + 29 FD + 29 EA + 8B 5C 24 ?? + 88 14 0B + EB ?? } - condition: + condition: uint16(0) == 0x5a4d and any of them } From d9e5b424d3096c6cbcda3a584080393023b346e1 Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 08:44:09 +0000 Subject: [PATCH 06/18] Fix new line --- data/yara/CAPE/Lumma.yar | 1 - 1 file changed, 1 deletion(-) diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index 3a74f1d0023..dad8967c68b 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -29,5 +29,4 @@ rule Lumma condition: uint16(0) == 0x5a4d and any of them - } From b9dd1e773d4198b795bab2929574a684ce638a76 Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 08:45:49 +0000 Subject: [PATCH 07/18] Fix order --- data/yara/CAPE/Lumma.yar | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index dad8967c68b..e4cb735628a 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -6,10 +6,7 @@ rule Lumma cape_type = "Lumma Payload" packed = "5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f" strings: - $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} - $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} - $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} - $xor_decode = { + $xor_decode = { 0F B6 14 0E 89 CF 83 E7 1F @@ -26,6 +23,9 @@ rule Lumma 88 14 0B EB ?? } + $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} + $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} + $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} condition: uint16(0) == 0x5a4d and any of them From 3a4157176b3e2e79d141d9a5310ca9885165e5dd Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 08:54:54 +0000 Subject: [PATCH 08/18] Maintain backwards compatibility --- modules/processing/parsers/CAPE/Lumma.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/modules/processing/parsers/CAPE/Lumma.py b/modules/processing/parsers/CAPE/Lumma.py index 1aa00c5d862..e1ab1e0715a 100644 --- a/modules/processing/parsers/CAPE/Lumma.py +++ b/modules/processing/parsers/CAPE/Lumma.py @@ -51,15 +51,23 @@ def contains_non_printable(byte_array): def extract_config(data): config_dict = {"C2": []} try: - rdata = get_rdata(data) - strings = extract_strings(rdata, 44) - base64_strings = get_base64_strings(strings) - for base64_str in base64_strings: - decoded_bytes = base64.b64decode(base64_str, validate=True) - encoded_c2 = decoded_bytes[:32] - xor_key = decoded_bytes[32:] + lines = data.decode().split("\n") + for line in lines: + if "." in line and len(line) > 2: + if not contains_non_printable(line): + config_dict["C2"].append(line) - decoded_c2 = xor_data(encoded_c2, xor_key) + # If no C2s with the old method, + # try with newer version xor decoding + if not config_dict["C2"]: + rdata = get_rdata(data) + strings = extract_strings(rdata, 44) + base64_strings = get_base64_strings(strings) + for base64_str in base64_strings: + decoded_bytes = base64.b64decode(base64_str, validate=True) + encoded_c2 = decoded_bytes[:32] + xor_key = decoded_bytes[32:] + decoded_c2 = xor_data(encoded_c2, xor_key) if not contains_non_printable(decoded_c2): config_dict["C2"].append(decoded_c2.decode()) From c5e2e03ee777aa3ca7c497ea9406cf003e686dc1 Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 09:05:54 +0000 Subject: [PATCH 09/18] Fix --- data/yara/CAPE/Lumma.yar | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index e4cb735628a..83e318d8219 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -6,7 +6,7 @@ rule Lumma cape_type = "Lumma Payload" packed = "5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f" strings: - $xor_decode = { + $xor_decode = { 0F B6 14 0E 89 CF 83 E7 1F From cd03becb2f4841588394bbad420af862f570c41b Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 09:10:16 +0000 Subject: [PATCH 10/18] Fix backwards compat --- modules/processing/parsers/CAPE/Lumma.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/modules/processing/parsers/CAPE/Lumma.py b/modules/processing/parsers/CAPE/Lumma.py index e1ab1e0715a..a747c7f43d0 100644 --- a/modules/processing/parsers/CAPE/Lumma.py +++ b/modules/processing/parsers/CAPE/Lumma.py @@ -56,10 +56,13 @@ def extract_config(data): if "." in line and len(line) > 2: if not contains_non_printable(line): config_dict["C2"].append(line) + except Exception: + pass - # If no C2s with the old method, - # try with newer version xor decoding - if not config_dict["C2"]: + # If no C2s with the old method, + # try with newer version xor decoding + if not config_dict["C2"]: + try: rdata = get_rdata(data) strings = extract_strings(rdata, 44) base64_strings = get_base64_strings(strings) @@ -72,7 +75,7 @@ def extract_config(data): if not contains_non_printable(decoded_c2): config_dict["C2"].append(decoded_c2.decode()) - except Exception: - return + except Exception: + pass return config_dict From d031f4917ff895a834327dd5e5ef90b85801163e Mon Sep 17 00:00:00 2001 From: Yung Binary <93540406+YungBinary@users.noreply.github.com> Date: Sun, 20 Oct 2024 09:43:00 +0000 Subject: [PATCH 11/18] Fix --- modules/processing/parsers/CAPE/Lumma.py | 29 +++++++++++++++--------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/modules/processing/parsers/CAPE/Lumma.py b/modules/processing/parsers/CAPE/Lumma.py index a747c7f43d0..c72db668a87 100644 --- a/modules/processing/parsers/CAPE/Lumma.py +++ b/modules/processing/parsers/CAPE/Lumma.py @@ -50,12 +50,16 @@ def contains_non_printable(byte_array): def extract_config(data): config_dict = {"C2": []} + try: lines = data.decode().split("\n") for line in lines: - if "." in line and len(line) > 2: - if not contains_non_printable(line): - config_dict["C2"].append(line) + try: + if "." in line and len(line) > 2: + if not contains_non_printable(line): + config_dict["C2"].append(line) + except Exception: + continue except Exception: pass @@ -66,16 +70,19 @@ def extract_config(data): rdata = get_rdata(data) strings = extract_strings(rdata, 44) base64_strings = get_base64_strings(strings) - for base64_str in base64_strings: - decoded_bytes = base64.b64decode(base64_str, validate=True) - encoded_c2 = decoded_bytes[:32] - xor_key = decoded_bytes[32:] - decoded_c2 = xor_data(encoded_c2, xor_key) - if not contains_non_printable(decoded_c2): - config_dict["C2"].append(decoded_c2.decode()) + for base64_str in base64_strings: + try: + decoded_bytes = base64.b64decode(base64_str, validate=True) + encoded_c2 = decoded_bytes[:32] + xor_key = decoded_bytes[32:] + decoded_c2 = xor_data(encoded_c2, xor_key) + if not contains_non_printable(decoded_c2): + config_dict["C2"].append(decoded_c2.decode()) + except Exception: + continue except Exception: - pass + return return config_dict From f289fcfa5bd7f121d6c52b6efba5a4630ba87c43 Mon Sep 17 00:00:00 2001 From: doomedraven Date: Tue, 22 Oct 2024 13:58:37 +0200 Subject: [PATCH 12/18] overlay (#2364) --- conf/default/processing.conf.default | 5 -- conf/default/selfextract.conf.default | 5 ++ .../common/integrations/file_extra_info.py | 3 - .../file_extra_info_modules/overlay.py | 35 +++++++++ modules/processing/overlay.py | 70 ------------------ web/templates/analysis/overlay/index.html | 74 ------------------- 6 files changed, 40 insertions(+), 152 deletions(-) create mode 100644 lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py delete mode 100644 modules/processing/overlay.py delete mode 100644 web/templates/analysis/overlay/index.html diff --git a/conf/default/processing.conf.default b/conf/default/processing.conf.default index 0a83dda5407..dbd297d6286 100644 --- a/conf/default/processing.conf.default +++ b/conf/default/processing.conf.default @@ -321,11 +321,6 @@ key = [script_log_processing] enabled = yes -# Community -# Dump PE's overlay info -[overlay] -enabled = no - # Community [floss] enabled = no diff --git a/conf/default/selfextract.conf.default b/conf/default/selfextract.conf.default index 638bc1b4f49..b39903710fe 100644 --- a/conf/default/selfextract.conf.default +++ b/conf/default/selfextract.conf.default @@ -89,3 +89,8 @@ timeout = 60 [msix_extract] enabled = no timeout = 60 + +# PE file overlay +[overlay] +enabled = yes +timeout = 60 diff --git a/lib/cuckoo/common/integrations/file_extra_info.py b/lib/cuckoo/common/integrations/file_extra_info.py index 4e65c4bd18c..b49d5d31415 100644 --- a/lib/cuckoo/common/integrations/file_extra_info.py +++ b/lib/cuckoo/common/integrations/file_extra_info.py @@ -112,9 +112,6 @@ HAVE_BAT_DECODER = False print("OPTIONAL! Missed dependency: poetry run pip install -U git+https://github.com/DissectMalware/batch_deobfuscator") -processing_conf = Config("processing") -selfextract_conf = Config("selfextract") - unautoit_binary = os.path.join(CUCKOO_ROOT, selfextract_conf.UnAutoIt_extract.binary) if processing_conf.trid.enabled: diff --git a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py new file mode 100644 index 00000000000..36518ede500 --- /dev/null +++ b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py @@ -0,0 +1,35 @@ +import os +import logging +# from base64 import b64encode + +from lib.cuckoo.common.path_utils import path_write_file +from lib.cuckoo.common.integrations.file_extra_info_modules import time_tracker, ExtractorReturnType, extractor_ctx, collect_extracted_filenames + +log = logging.getLogger(__name__) + + +@time_tracker +def extract_details(file, *, data_dictionary, **_) -> ExtractorReturnType: + + if not data_dictionary.get("pe", {}).get("overlay"): + return {} + + data = "" + overlay_size = int(data_dictionary["pe"]["overlay"]["size"], 16) + # Extract out the overlay data + try: + with open(file, "rb") as f: + f.seek(-overlay_size, os.SEEK_END) + data = f.read() + # data_dictionary["pe"]["overlay"]["data"] = b64encode(data[: min(overlay_size, 4096)]) + except Exception as e: + log.error(e) + + + with extractor_ctx(file, "overlay", prefix="overlay") as ctx: + if data: + tempdir = ctx["tempdir"] + # You might need to change this 2 lines. See other examples in `file_extra_info.py` + _ = path_write_file(os.path.join(tempdir, "overlay"), data) + ctx["extracted_files"] = collect_extracted_filenames(tempdir) + return ctx diff --git a/modules/processing/overlay.py b/modules/processing/overlay.py deleted file mode 100644 index 7de6ade5372..00000000000 --- a/modules/processing/overlay.py +++ /dev/null @@ -1,70 +0,0 @@ -import base64 -import logging -import os - -from lib.cuckoo.common.abstracts import Processing -from lib.cuckoo.common.objects import File -from lib.cuckoo.common.path_utils import path_exists, path_mkdir, path_write_file - -log = logging.getLogger(__name__) - - -class process_overlay_file(object): - """Returns the file information of the containing overlay data""" - - def __init__(self, overlay_fullpath): - self.overlay_fullpath = overlay_fullpath - - def run(self): - if not self.options.enabled: - return {} - - if not path_exists(self.overlay_fullpath): - return {} - - file_info, _ = File(file_path=self.overlay_fullpath).get_all() - return file_info - - -class extract_overlay_data(Processing): - """Makes use of static.py's result to determine if there is overlay data. Only works for PE for now. - If overlay has been detected by static.py, we extract the whole data and save them in a file - @returns: Up to first 4096 bytes of overlay data added as part of the json, full data will need to be downloaded - """ - - # To tell CAPE to run this after first round of processing is done - order = 2 - - def run(self): - if "static" not in self.results: - return None - - self.key = "static" # uses the existing "static" sub container to add in the overlay data - output = self.results["static"] - - if not output.get("pe", {}).get("overlay"): - return output - - overlay_size = int(output["pe"]["overlay"]["size"], 16) - - # Extract out the overlay data - try: - with open(self.file_path, "rb") as f: - f.seek(-overlay_size, os.SEEK_END) - data = f.read() - output["pe"]["overlay"]["data"] = base64.b64encode(data[: min(overlay_size, 4096)]) - - fld = os.path.join(self.analysis_path, "files") - if not path_exists(fld): - log.warning("Folder not present, creating it. Might affect the displaying of (overlay) results on the web") - path_mkdir(fld) - - fld = os.path.join(fld, "extracted_overlay") - _ = path_write_file(fld, data) - - output["pe"]["overlay"]["fileinfo"] = process_overlay_file(fld).run() - - except Exception as e: - log.error(e) - - return output diff --git a/web/templates/analysis/overlay/index.html b/web/templates/analysis/overlay/index.html deleted file mode 100644 index 361facc9e84..00000000000 --- a/web/templates/analysis/overlay/index.html +++ /dev/null @@ -1,74 +0,0 @@ -{% with overlay=analysis.static.pe.overlay %} -{% with fileinfo=overlay.fileinfo %} - -{%if fileinfo %} -
- - - - - - - - - - - - - - - - - - - - - - - - - - {% if fileinfo.sha3_384 %} - - - - - {% endif %} - - - - - {% if fileinfo.tlsh %} - - - - - {% endif %} - - - - - - - - - {% if overlay.data %} - - - - {% endif %} -
File name -
{{fileinfo.name|safe}}
-
File Size{{fileinfo.size}} bytes
File Type{{fileinfo.type}}
MD5{{fileinfo.md5}}
SHA1{{fileinfo.sha1}}
SHA256{{fileinfo.sha256}}
SHA3-384{{fileinfo.sha3_384}}
CRC32{{fileinfo.crc32}}
TLSH{{fileinfo.tlsh}}
Ssdeep{{fileinfo.ssdeep}}
- Download - {% if overlay.data %} -  Display Overlay Data (Up to 4KB) - {% endif %} -
{{overlay.data}}
-
-{%else%} -
Sorry! No overlay information.
-{% endif %} - -{% endwith %} -{% endwith %} From d0c133f9c3f0d7d05516aebf7327c1e9dd567605 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 22 Oct 2024 11:59:29 +0000 Subject: [PATCH 13/18] style: Automatic code formatting --- .../file_extra_info_modules/overlay.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py index 36518ede500..540a97d2e22 100644 --- a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py +++ b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py @@ -1,9 +1,16 @@ -import os import logging -# from base64 import b64encode +import os +from lib.cuckoo.common.integrations.file_extra_info_modules import ( + ExtractorReturnType, + collect_extracted_filenames, + extractor_ctx, + time_tracker, +) from lib.cuckoo.common.path_utils import path_write_file -from lib.cuckoo.common.integrations.file_extra_info_modules import time_tracker, ExtractorReturnType, extractor_ctx, collect_extracted_filenames + +# from base64 import b64encode + log = logging.getLogger(__name__) @@ -25,7 +32,6 @@ def extract_details(file, *, data_dictionary, **_) -> ExtractorReturnType: except Exception as e: log.error(e) - with extractor_ctx(file, "overlay", prefix="overlay") as ctx: if data: tempdir = ctx["tempdir"] From a0a3364e94d450d763ed23670ed7e16cfcde8c31 Mon Sep 17 00:00:00 2001 From: doomedraven Date: Tue, 22 Oct 2024 14:00:02 +0200 Subject: [PATCH 14/18] overlay --- web/templates/analysis/report.html | 8 -------- 1 file changed, 8 deletions(-) diff --git a/web/templates/analysis/report.html b/web/templates/analysis/report.html index 7988fa1f1ad..ed08b01984f 100644 --- a/web/templates/analysis/report.html +++ b/web/templates/analysis/report.html @@ -85,9 +85,6 @@ {% if analysis.dropped %} {% endif %} - {% if analysis.static.pe.overlay.fileinfo %} - - {% endif %} {% if analysis.procmemory %} {% endif %} @@ -193,10 +190,5 @@ {% include "analysis/admin/index.html" %} {% endif %} - {% if analysis.static.pe.overlay.fileinfo %} -
- {% include "analysis/overlay/index.html" %} -
- {% endif %} {% endblock %} From ec5bcbe6ed57d422e2de21f1a9315ef65e06ca63 Mon Sep 17 00:00:00 2001 From: David Santos <44490090+dsecuma@users.noreply.github.com> Date: Tue, 22 Oct 2024 14:54:32 +0200 Subject: [PATCH 15/18] Mitmdump support (#2365) --- conf/default/auxiliary.conf.default | 6 ++ conf/default/mitmdump.conf.default | 11 +++ installer/cape2.sh | 14 ++++ modules/auxiliary/Mitmdump.py | 111 ++++++++++++++++++++++++++++ utils/rooter.py | 25 +++++++ web/apiv2/urls.py | 1 + web/apiv2/views.py | 30 ++++++++ web/submission/views.py | 3 + 8 files changed, 201 insertions(+) create mode 100644 conf/default/mitmdump.conf.default create mode 100644 modules/auxiliary/Mitmdump.py diff --git a/conf/default/auxiliary.conf.default b/conf/default/auxiliary.conf.default index 4f6029ba9b6..ec8aeb84c73 100644 --- a/conf/default/auxiliary.conf.default +++ b/conf/default/auxiliary.conf.default @@ -77,3 +77,9 @@ bpf = not arp # Enable or disable the use of QEMU as screenshot capture [yes/no]. # screenshots_linux and screenshots_windows must be disabled enabled = no + +[Mitmdump] +# Enable or disable the use of mitmdump (mitmproxy) to get dump.har [yes/no]. +# This module requires installed mitmproxy see install_mitmproxy +# (https://github.com/kevoreilly/CAPEv2/blob/master/installer/cape2.sh#L1320) +enabled = no diff --git a/conf/default/mitmdump.conf.default b/conf/default/mitmdump.conf.default new file mode 100644 index 00000000000..703da46560a --- /dev/null +++ b/conf/default/mitmdump.conf.default @@ -0,0 +1,11 @@ +[cfg] +# bin path to mitmdump +bin = /opt/mitmproxy/mitmdump + +# Host ip where mitmdump is listening +host = 127.0.0.1 + +# Interface where mitmdump is listening +interface = virbr0 + +# Future options like custom ports, cert paths, etc diff --git a/installer/cape2.sh b/installer/cape2.sh index 055dfcfa5ef..e9cb4cd5b98 100644 --- a/installer/cape2.sh +++ b/installer/cape2.sh @@ -1317,6 +1317,17 @@ function install_volatility3() { chown "${USER}:${USER}" $vol_path -R } +function install_mitmproxy() { + sudo mkdir /opt/mitmproxy + sudo chown ${USER}:${USER} /opt/mitmproxy + cd /opt/mitmproxy + mitmproxy_version=$(curl -s https://api.github.com/repos/mitmproxy/mitmproxy/releases/latest | grep '"tag_name":' | cut -d '"' -f 4 | sed 's/^v//') + wget https://downloads.mitmproxy.org/"$mitmproxy_version"/mitmproxy-"$mitmproxy_version"-linux-x86_64.tar.gz -O mitmproxy.tar.gz + tar xvzf mitmproxy.tar.gz + rm mitmproxy.tar.gz + chown "${USER}:${USER}" /opt/mitmproxy -R +} + function install_guacamole() { # Kudos to @Enzok https://github.com/kevoreilly/CAPEv2/pull/1065 # https://guacamole.apache.org/doc/gug/installing-guacamole.html @@ -1451,6 +1462,7 @@ case "$COMMAND" in install_systemd install_jemalloc install_logrotate + install_mitmproxy #socksproxies is to start redsocks stuff if [ -f /opt/CAPEv2/socksproxies.sh ]; then crontab -l | { cat; echo "@reboot /opt/CAPEv2/socksproxies.sh"; } | crontab - @@ -1501,6 +1513,8 @@ case "$COMMAND" in librenms_snmpd_config;; 'librenms_sneck_config') librenms_sneck_config;; +'mitmproxy') + install_mitmproxy;; 'issues') issues;; 'nginx') diff --git a/modules/auxiliary/Mitmdump.py b/modules/auxiliary/Mitmdump.py new file mode 100644 index 00000000000..1e632978288 --- /dev/null +++ b/modules/auxiliary/Mitmdump.py @@ -0,0 +1,111 @@ +# Copyright (C) 2024 davidsb@virustotal.com +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. +# This module runs mitmdump to get a HAR file +# mitmdump is behind mitmproxy project https://mitmproxy.org/ + +import logging +import os +import socket +import subprocess +from threading import Thread + +from lib.cuckoo.common.abstracts import Auxiliary +from lib.cuckoo.common.config import Config +from lib.cuckoo.common.constants import CUCKOO_ROOT +from lib.cuckoo.core.rooter import rooter + +mitmdump = Config("mitmdump") + +log = logging.getLogger(__name__) + +class Mitmdump(Auxiliary): + """Module for generating HAR with Mitmdump.""" + + def __init__(self): + Auxiliary.__init__(self) + Thread.__init__(self) + log.info("Mitmdump module loaded") + self.mitmdump_thread = None + + + def start(self): + """Start mitmdump in a separate thread.""" + + self.mitmdump_thread = MitmdumpThread(self.task, self.machine) + self.mitmdump_thread.start() + return True + + def stop(self): + """Stop mitmdump capture thread.""" + if self.mitmdump_thread: + self.mitmdump_thread.stop() + + +class MitmdumpThread(Thread): + """Thread responsible for control mitmdump service for each analysis.""" + + def __init__(self, task, machine): + Thread.__init__(self) + self.task = task + self.machine = machine + self.do_run = True + self.host_ip = mitmdump.cfg.get("host") + self.host_iface = mitmdump.cfg.get("interface") + self.mitmdump_bin = mitmdump.cfg.get("bin") + self.proc = None + self.host_port = self._get_unused_port() + self.mitmdump_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id), "mitmdump") + + def stop(self): + """Set stop mitmdump capture.""" + self.do_run = False + + if self.proc and self.proc.poll() is None: + self.proc.terminate() + self.proc.wait() + log.info("Stopping mitmdump") + + try: + rooter("disable_mitmdump",self.host_iface, self.machine.ip, self.host_port) + except subprocess.CalledProcessError as e: + log.error("Failed to execute firewall rules: %s", e) + + def run(self): + """Core function to the manage the module""" + if "mitmdump" not in self.task.options: + log.info("Exiting mitmdump. No parameter received.") + return + + if self.do_run: + if not self.host_port: + log.exception("All ports in range are in use") + return + + try: + rooter("enable_mitmdump",self.host_iface, self.machine.ip, self.host_port) + except subprocess.CalledProcessError as e: + log.error("Failed to execute firewall rules: %s", e) + + try: + mitmdump_args = [] + os.makedirs(self.mitmdump_path, exist_ok=True) + file_path = os.path.join(self.mitmdump_path, "dump.har") + mitmdump_args.extend([self.mitmdump_bin, "-q", "--listen-host", self.host_ip, "-p", str(self.host_port), "--set", "hardump=", file_path]) + mitmdump_args[-2:] = ["".join(mitmdump_args[-2:])] # concatenate the last two arguments, otherwise the HAR file will not be created. + self.proc = subprocess.Popen(mitmdump_args, stdout=None, stderr=None, shell=False) + except (OSError, ValueError): + log.exception("Failed to mitmdump (host=%s, port=%s, dump_path=%s)", self.host_ip, self.host_port, file_path) + return + + log.info("Started mitmdump with PID %d (host=%s, port=%s, dump_path=%s)", self.proc.pid, self.host_ip, self.host_port, file_path) + + + def _get_unused_port(self) -> str | None: + """Return the first unused TCP port from the set.""" + ports = set(range(8001, 8081)) + for port in ports: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + if s.connect_ex((self.host_ip, port)) != 0: + return str(port) + return None diff --git a/utils/rooter.py b/utils/rooter.py index 4c8175dfcec..d07fcec45c1 100644 --- a/utils/rooter.py +++ b/utils/rooter.py @@ -188,6 +188,29 @@ def disable_nat(interface): run_iptables("-t", "nat", "-D", "POSTROUTING", "-o", interface, "-j", "MASQUERADE") +def enable_mitmdump(interface, client, port): + """Enable mitmdump on this interface.""" + run_iptables( + "-t", "nat", "-I", "PREROUTING", "-i", interface, "-s", client, "-p", "tcp", "--dport", "443", + "-j", "REDIRECT", "--to-port", port + ) + run_iptables( + "-t", "nat", "-I", "PREROUTING", "-i", interface, "-s", client, "-p", "tcp", "--dport", "80", + "-j", "REDIRECT", "--to-port", port + ) + +def disable_mitmdump(interface, client, port): + """Disable mitmdump on this interface.""" + run_iptables( + "-t", "nat", "-D", "PREROUTING", "-i", interface, "-s", client, "-p", "tcp", "--dport", "443", + "-j", "REDIRECT", "--to-port", port + ) + run_iptables( + "-t", "nat", "-D", "PREROUTING", "-i", interface, "-s", client, "-p", "tcp", "--dport", "80", + "-j", "REDIRECT", "--to-port", port + ) + + def init_rttable(rt_table, interface): """Initialise routing table for this interface using routes from main table.""" @@ -674,6 +697,8 @@ def drop_disable(ipaddr, resultserver_port): "cleanup_vrf": cleanup_vrf, "add_dev_to_vrf": add_dev_to_vrf, "delete_dev_from_vrf": delete_dev_from_vrf, + "enable_mitmdump": enable_mitmdump, + "disable_mitmdump": disable_mitmdump, } if __name__ == "__main__": diff --git a/web/apiv2/urls.py b/web/apiv2/urls.py index 23571f3281c..aa512323add 100644 --- a/web/apiv2/urls.py +++ b/web/apiv2/urls.py @@ -51,6 +51,7 @@ re_path(r"^tasks/get/evtx/(?P\d+)/$", views.tasks_evtx), re_path(r"^tasks/get/dropped/(?P\d+)/$", views.tasks_dropped), re_path(r"^tasks/get/surifile/(?P\d+)/$", views.tasks_surifile), + re_path(r"^tasks/get/mitmdump/(?P\d+)/$", views.tasks_mitmdump), re_path(r"^tasks/get/payloadfiles/(?P\d+)/$", views.tasks_payloadfiles), re_path(r"^tasks/get/procdumpfiles/(?P\d+)/$", views.tasks_procdumpfiles), re_path(r"^files/view/md5/(?P([a-fA-F\d]{32}))/$", views.files_view), diff --git a/web/apiv2/views.py b/web/apiv2/views.py index b744aafdb0a..882f4005cd0 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -1625,6 +1625,36 @@ def tasks_evtx(request, task_id): return Response(resp) +@csrf_exempt +@api_view(["GET"]) +def tasks_mitmdump(request, task_id): + if not apiconf.taskmitmdump.get("enabled"): + resp = {"error": True, "error_value": "Mitmdump HAR download API is disabled"} + return Response(resp) + + check = validate_task(task_id) + if check["error"]: + return Response(check) + + rtid = check.get("rtid", 0) + if rtid: + task_id = rtid + + harfile = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % task_id, "mitmdump", "dump.har") + if not os.path.normpath(harfile).startswith(ANALYSIS_BASE_PATH): + return render(request, "error.html", {"error": f"File not found: {os.path.basename(harfile)}"}) + if path_exists(harfile): + fname = "%s_dump.har" % task_id + resp = StreamingHttpResponse(FileWrapper(open(harfile, "rb")), content_type="text/plain") + resp["Content-Length"] = os.path.getsize(harfile) + resp["Content-Disposition"] = "attachment; filename=" + fname + return resp + + else: + resp = {"error": True, "error_value": "HAR file does not exist"} + return Response(resp) + + @csrf_exempt @api_view(["GET"]) def tasks_dropped(request, task_id): diff --git a/web/submission/views.py b/web/submission/views.py index 39c25136c3a..a8c9e7efccd 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -312,6 +312,9 @@ def index(request, task_id=None, resubmit_hash=None): if request.POST.get("nohuman"): options += "nohuman=yes," + if request.POST.get("mitmdump"): + options += "mitmdump=yes," + if web_conf.guacamole.enabled and request.POST.get("interactive"): remote_console = True options += "interactive=1," From 57f653e1edba8c4eec2ffa6f6950463cc808d3c3 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 22 Oct 2024 12:55:47 +0000 Subject: [PATCH 16/18] style: Automatic code formatting --- modules/auxiliary/Mitmdump.py | 41 ++++++++++++++----- utils/rooter.py | 75 ++++++++++++++++++++++++++++++----- 2 files changed, 96 insertions(+), 20 deletions(-) diff --git a/modules/auxiliary/Mitmdump.py b/modules/auxiliary/Mitmdump.py index 1e632978288..68cf1ecc12c 100644 --- a/modules/auxiliary/Mitmdump.py +++ b/modules/auxiliary/Mitmdump.py @@ -19,6 +19,7 @@ log = logging.getLogger(__name__) + class Mitmdump(Auxiliary): """Module for generating HAR with Mitmdump.""" @@ -26,8 +27,7 @@ def __init__(self): Auxiliary.__init__(self) Thread.__init__(self) log.info("Mitmdump module loaded") - self.mitmdump_thread = None - + self.mitmdump_thread = None def start(self): """Start mitmdump in a separate thread.""" @@ -50,12 +50,12 @@ def __init__(self, task, machine): self.task = task self.machine = machine self.do_run = True - self.host_ip = mitmdump.cfg.get("host") - self.host_iface = mitmdump.cfg.get("interface") + self.host_ip = mitmdump.cfg.get("host") + self.host_iface = mitmdump.cfg.get("interface") self.mitmdump_bin = mitmdump.cfg.get("bin") self.proc = None self.host_port = self._get_unused_port() - self.mitmdump_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id), "mitmdump") + self.mitmdump_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id), "mitmdump") def stop(self): """Set stop mitmdump capture.""" @@ -67,7 +67,7 @@ def stop(self): log.info("Stopping mitmdump") try: - rooter("disable_mitmdump",self.host_iface, self.machine.ip, self.host_port) + rooter("disable_mitmdump", self.host_iface, self.machine.ip, self.host_port) except subprocess.CalledProcessError as e: log.error("Failed to execute firewall rules: %s", e) @@ -83,7 +83,7 @@ def run(self): return try: - rooter("enable_mitmdump",self.host_iface, self.machine.ip, self.host_port) + rooter("enable_mitmdump", self.host_iface, self.machine.ip, self.host_port) except subprocess.CalledProcessError as e: log.error("Failed to execute firewall rules: %s", e) @@ -91,15 +91,34 @@ def run(self): mitmdump_args = [] os.makedirs(self.mitmdump_path, exist_ok=True) file_path = os.path.join(self.mitmdump_path, "dump.har") - mitmdump_args.extend([self.mitmdump_bin, "-q", "--listen-host", self.host_ip, "-p", str(self.host_port), "--set", "hardump=", file_path]) - mitmdump_args[-2:] = ["".join(mitmdump_args[-2:])] # concatenate the last two arguments, otherwise the HAR file will not be created. + mitmdump_args.extend( + [ + self.mitmdump_bin, + "-q", + "--listen-host", + self.host_ip, + "-p", + str(self.host_port), + "--set", + "hardump=", + file_path, + ] + ) + mitmdump_args[-2:] = [ + "".join(mitmdump_args[-2:]) + ] # concatenate the last two arguments, otherwise the HAR file will not be created. self.proc = subprocess.Popen(mitmdump_args, stdout=None, stderr=None, shell=False) except (OSError, ValueError): log.exception("Failed to mitmdump (host=%s, port=%s, dump_path=%s)", self.host_ip, self.host_port, file_path) return - log.info("Started mitmdump with PID %d (host=%s, port=%s, dump_path=%s)", self.proc.pid, self.host_ip, self.host_port, file_path) - + log.info( + "Started mitmdump with PID %d (host=%s, port=%s, dump_path=%s)", + self.proc.pid, + self.host_ip, + self.host_port, + file_path, + ) def _get_unused_port(self) -> str | None: """Return the first unused TCP port from the set.""" diff --git a/utils/rooter.py b/utils/rooter.py index d07fcec45c1..717bdbdc762 100644 --- a/utils/rooter.py +++ b/utils/rooter.py @@ -191,23 +191,80 @@ def disable_nat(interface): def enable_mitmdump(interface, client, port): """Enable mitmdump on this interface.""" run_iptables( - "-t", "nat", "-I", "PREROUTING", "-i", interface, "-s", client, "-p", "tcp", "--dport", "443", - "-j", "REDIRECT", "--to-port", port + "-t", + "nat", + "-I", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, ) run_iptables( - "-t", "nat", "-I", "PREROUTING", "-i", interface, "-s", client, "-p", "tcp", "--dport", "80", - "-j", "REDIRECT", "--to-port", port + "-t", + "nat", + "-I", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, ) + def disable_mitmdump(interface, client, port): """Disable mitmdump on this interface.""" run_iptables( - "-t", "nat", "-D", "PREROUTING", "-i", interface, "-s", client, "-p", "tcp", "--dport", "443", - "-j", "REDIRECT", "--to-port", port + "-t", + "nat", + "-D", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, ) run_iptables( - "-t", "nat", "-D", "PREROUTING", "-i", interface, "-s", client, "-p", "tcp", "--dport", "80", - "-j", "REDIRECT", "--to-port", port + "-t", + "nat", + "-D", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, ) @@ -698,7 +755,7 @@ def drop_disable(ipaddr, resultserver_port): "add_dev_to_vrf": add_dev_to_vrf, "delete_dev_from_vrf": delete_dev_from_vrf, "enable_mitmdump": enable_mitmdump, - "disable_mitmdump": disable_mitmdump, + "disable_mitmdump": disable_mitmdump, } if __name__ == "__main__": From b21dd40898e6381375f74c22dc7a592dfa35dcc1 Mon Sep 17 00:00:00 2001 From: Kevin O'Reilly Date: Tue, 22 Oct 2024 14:46:35 +0100 Subject: [PATCH 17/18] Rewrite Lumma detection --- data/yara/CAPE/Lumma.yar | 29 ++++++----------------------- 1 file changed, 6 insertions(+), 23 deletions(-) diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index 83e318d8219..1422e550b62 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -1,32 +1,15 @@ rule Lumma { meta: - author = "kevoreilly,YungBinary" + author = "kevoreilly" description = "Lumma Payload" cape_type = "Lumma Payload" - packed = "5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f" + packed = "0ee580f0127b821f4f1e7c032cf76475df9724a9fade2e153a69849f652045f8" + packed = "23ff1c20b16d9afaf1ce443784fc9a025434a010e2194de9dec041788c369887" strings: - $xor_decode = { - 0F B6 14 0E - 89 CF - 83 E7 1F - 0F B6 7C 3C ?? - 89 D3 - 31 FB - 83 F3 FF - 89 FD - 21 DD - D1 E5 - 29 FD - 29 EA - 8B 5C 24 ?? - 88 14 0B - EB ?? - } - $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} - $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} - $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} - + $decode1 = {C1 (E9|EA) 02 [0-3] 0F B6 (44|4C) ?? FF 83 (F8|F9) 3D 74 05 83 (F8|F9) 2E 75 01 (49|4A) [0-30] 2E 75} + $decode2 = {B0 40 C3 B0 3F C3 89 C8 04 D0 3C 09 77 06 80 C1 04 89 C8 C3 89 C8 04 BF 3C} + $decode3 = {B0 40 C3 B0 3F C3 80 F9 30 72 ?? 80 F9 39 77 06 80 C1 04 89 C8 C3} condition: uint16(0) == 0x5a4d and any of them } From 00ba68ff56fba412ecf5f686ddce3400beda7e00 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Tue, 22 Oct 2024 13:51:45 +0000 Subject: [PATCH 18/18] style: Automatic code formatting --- modules/processing/parsers/CAPE/Lumma.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/modules/processing/parsers/CAPE/Lumma.py b/modules/processing/parsers/CAPE/Lumma.py index c72db668a87..b1437d8e435 100644 --- a/modules/processing/parsers/CAPE/Lumma.py +++ b/modules/processing/parsers/CAPE/Lumma.py @@ -1,7 +1,9 @@ import base64 -import pefile import re +import pefile + + def is_base64(s): pattern = re.compile("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$") if not s or len(s) < 1: @@ -9,12 +11,14 @@ def is_base64(s): else: return pattern.match(s) + def extract_strings(data, minchars): endlimit = b"8192" apat = b"([\x20-\x7e]{" + str(minchars).encode() + b"," + endlimit + b"})\x00" strings = [string.decode() for string in re.findall(apat, data)] return strings + def get_base64_strings(str_list): base64_strings = [] for s in str_list: @@ -22,17 +26,19 @@ def get_base64_strings(str_list): base64_strings.append(s) return base64_strings + def get_rdata(data): rdata = None pe = pefile.PE(data=data) section_idx = 0 for section in pe.sections: - if section.Name == b'.rdata\x00\x00': + if section.Name == b".rdata\x00\x00": rdata = pe.sections[section_idx].get_data() break section_idx += 1 return rdata + def xor_data(data, key): decoded = bytearray() key_len = len(key) @@ -42,11 +48,13 @@ def xor_data(data, key): decoded.append(data[i] ^ key[i]) return decoded + def contains_non_printable(byte_array): for byte in byte_array: if not chr(byte).isprintable(): return True - return False + return False + def extract_config(data): config_dict = {"C2": []}