diff --git a/.github/actions/python-setup/action.yml b/.github/actions/python-setup/action.yml index 83d8f6c35f7..d547756a877 100644 --- a/.github/actions/python-setup/action.yml +++ b/.github/actions/python-setup/action.yml @@ -16,7 +16,7 @@ runs: - name: Install poetry shell: bash - run: pip install poetry + run: PIP_BREAK_SYSTEM_PACKAGES=1 pip install poetry poetry-plugin-export - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v5 @@ -27,4 +27,4 @@ runs: - name: Install requirements shell: bash run: | - poetry install --no-interaction --no-root + PIP_BREAK_SYSTEM_PACKAGES=1 poetry install --no-interaction --no-root diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 9c06706713d..d271e493800 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -34,7 +34,7 @@ jobs: - name: Install pyattck run: | - poetry run pip install pyattck==7.1.2 + poetry run pip install pyattck==7.1.2 maco - name: Run Ruff run: poetry run ruff . --line-length 132 --ignore E501,E402 diff --git a/README.md b/README.md index f0c302fd6ef..62909436d14 100644 --- a/README.md +++ b/README.md @@ -92,7 +92,7 @@ Malware can be classified in CAPE via three mechanisms: ![image](https://github.com/kevoreilly/CAPEv2/assets/22219888/a44f2f8a-10df-47cc-9690-5ef08f04ea6b) -Parsing can be done using CAPE's own framework, alternatively the following frameworks are supported: [RATDecoders](https://github.com/kevthehermit/RATDecoders), [DC3-MWCP](https://github.com/Defense-Cyber-Crime-Center/DC3-MWCP) or [MalDuck](https://github.com/CERT-Polska/malduck/tree/master/malduck/) +Parsing can be done using CAPE's own framework, alternatively the following frameworks are supported: [RATDecoders](https://github.com/kevthehermit/RATDecoders), [DC3-MWCP](https://github.com/Defense-Cyber-Crime-Center/DC3-MWCP), [MalDuck](https://github.com/CERT-Polska/malduck/tree/master/malduck/), or [MaCo](https://github.com/CybercentreCanada/maco) #### Special note about config parsing frameworks: * Due to the nature of malware, since it changes constantly when any new version is released, something might become broken! @@ -162,10 +162,10 @@ A huge thank you to @D00m3dR4v3n for single-handedly porting CAPE to Python 3. * Replace `` with a real pattern. * You need to replace all `` inside! * Read it! You must understand what it does! It has configuration in header of the script. - * `sudo ./kvm-qemu.sh all | tee kvm-qemu.log` + * `sudo ./kvm-qemu.sh all 2>&1 | tee kvm-qemu.log` 4. To install CAPE itself, [cape2.sh](https://github.com/kevoreilly/CAPEv2/blob/master/installer/cape2.sh) with all optimizations * Read and understand what it does! This is not a silver bullet for all your problems! It has configuration in header of the script. - * `sudo ./cape2.sh base | tee cape.log` + * `sudo ./cape2.sh base 2>&1 | tee cape.log` 5. After installing everything save both installation logs as gold! 6. Configure CAPE by doing mods to config files inside `conf` folder. 7. Restart all CAPE services to pick config changes and run CAPE properly! @@ -228,5 +228,3 @@ If you use CAPEv2 in your work, please cite it as specified in the "Cite this re ### Docs * [ReadTheDocs](https://capev2.readthedocs.io/en/latest/#) - - diff --git a/analyzer/windows/analyzer.py b/analyzer/windows/analyzer.py index 8444dd9aaeb..deb7a583b87 100644 --- a/analyzer/windows/analyzer.py +++ b/analyzer/windows/analyzer.py @@ -503,11 +503,27 @@ def run(self): mod_name = name.split(".")[-1] if mod_name in windows_modules: mod_name += "_windows" - # if hasattr(self.config, mod_name) and getattr(self.config, mod_name, False): - # log.debug('Imported auxiliary module "%s"', name) + if hasattr(self.config, mod_name) and getattr(self.config, mod_name, False): + __import__(name, globals(), locals(), ["dummy"]) + log.debug('Imported auxiliary module "%s"', name) except ImportError as e: log.warning('Unable to import the auxiliary module "%s": %s', name, e) + def configure_aux_from_data(instance): + # Do auxiliary module configuration stored in 'data/auxiliary/' + _class = type(instance) + try: + log.debug("attempting to configure '%s' from data", _class.__name__) + instance.configure_from_data() + except ModuleNotFoundError: + # let it go, not every module is configurable from data + log.debug("module %s does not support data configuration, ignoring", _class.__name__) + except ImportError as iexc: + # let it go but emit a warning; assume a dependency is missing + log.warning("configuration error for module %s: %s", _class.__name__, iexc) + except Exception as exc: + log.error("error configuring module %s: %s", _class.__name__, exc) + # Walk through the available auxiliary modules. aux_modules = [] @@ -516,6 +532,7 @@ def run(self): aux = module(self.options, self.config) log.debug('Initialized auxiliary module "%s"', module.__name__) aux_modules.append(aux) + configure_aux_from_data(aux) log.debug('Trying to start auxiliary module "%s"...', module.__module__) aux.start() except (NotImplementedError, AttributeError) as e: diff --git a/analyzer/windows/data/yara/Formbook.yar b/analyzer/windows/data/yara/Formbook.yar index 732310fc320..a1d3d50adf6 100644 --- a/analyzer/windows/data/yara/Formbook.yar +++ b/analyzer/windows/data/yara/Formbook.yar @@ -18,13 +18,14 @@ rule FormhookB meta: author = "kevoreilly" description = "Formbook Anti-hook Bypass" - cape_options = "clear,bp0=$decode,action0=scan,hc0=1,bp1=$remap_ntdll+6,action1=setdst:ntdll,count=0,force-sleepskip=1" + cape_options = "clear,bp0=$entry,action0=scan,hc0=1,bp1=$new_remap+6,action1=setdst:ntdll,count=0,force-sleepskip=1" packed = "08c5f44d57f5ccc285596b3d9921bf7fbbbf7f9a827bb3285a800e4c9faf6731" strings: - $decode = {55 8B EC 83 EC 24 53 56 57 [480-520] 8B E5 5D C3} - $remap_ntdll = {90 90 90 90 90 90 8B (86 [2] 00 00|46 ??|06) 5F 5E 5B 8B E5 5D C3} + $remap_ntdll = {33 96 [2] 00 00 8D 86 [2] 00 00 68 F0 00 00 00 50 89 [2-5] E8 [4-10] 6A 00 6A 0? 8D 4D ?? 51 6A} + $entry = {55 8B EC 83 EC ?4 53 56 57 [480-520] 8B E5 5D C3} + $new_remap = {90 90 90 90 90 90 8B (86 [2] 00 00|46 ??|06) 5F 5E 5B 8B E5 5D C3} condition: - any of them + 2 of them } rule FormconfA diff --git a/analyzer/windows/data/yara/NitrogenLoader.yar b/analyzer/windows/data/yara/NitrogenLoader.yar new file mode 100644 index 00000000000..0c37500494a --- /dev/null +++ b/analyzer/windows/data/yara/NitrogenLoader.yar @@ -0,0 +1,41 @@ +rule LoaderSyscall +{ + meta: + author = "enzok" + description = "Loader Syscall" + cape_options = "sysbp=$syscall*-2,count=0" + strings: + $makehashes = {48 89 4C 24 ?? 48 89 54 24 ?? 4? 89 44 24 ?? 4? 89 4C 24 ?? 4? 83 EC ?? B? [4] E8 [3] 00} + $number = {49 89 C3 B? [4] E8 [3] 00} + $syscall = {48 83 C4 ?? 4? 8B 4C 24 ?? 4? 8B 54 24 ?? 4? 8B 44 24 ?? 4? 8B 4C 24 ?? 4? 89 CA 4? FF E3} + condition: + all of them +} + +rule NitrogenLoaderAES +{ + meta: + author = "enzok" + description = "NitrogenLoader AES and IV" + cape_options = "bp0=$keyiv0+8,action0=dump:ecx::64,hc0=1,bp1=$keyiv0*-4,action1=dump:ecx::32,hc1=1,count=0" + strings: + $keyiv0 = {48 8B 8C 24 [4] E8 [3] 00 4? 89 84 24 [4] 4? 8B 84 24 [4] 4? 89 84 24 [4] 4? 8B 8C 24 [4] E8 [3] 00} + $keyiv1 = {48 89 84 24 [4] 4? 8B 84 24 [4] 4? 8B 94 24 [4] 4? 8D 8C 24 [4] E8 [3] FF} + $keyiv2 = {48 63 84 24 [4] 4? 8B C0 4? 8B 94 24 [4] 4? 8D 8C 24 [4] E8 [3] FF 4? 8B 84 24} + condition: + all of them +} + +rule NitrogenLoaderBypass +{ + meta: + author = "enzok" + description = "Nitrogen Loader Exit Bypass" + cape_options = "bp2=$exit-2,action2=jmp,count=0" + strings: + $string1 = "LoadResource" + $syscall = {48 83 C4 ?? 4? 8B 4C 24 ?? 4? 8B 54 24 ?? 4? 8B 44 24 ?? 4? 8B 4C 24 ?? 4? 89 CA 4? FF E3} + $exit = {33 C9 E8 [4] E8 [4] 48 8D 84 24 [4] 48 89 44 24 ?? 4? B? E4 00 00 00 4? 8B 05 [4] B? 03 00 00 00 48 8D} + condition: + all of them +} \ No newline at end of file diff --git a/analyzer/windows/data/yara/PrivateLoader.yar b/analyzer/windows/data/yara/PrivateLoader.yar new file mode 100644 index 00000000000..18ad22fc210 --- /dev/null +++ b/analyzer/windows/data/yara/PrivateLoader.yar @@ -0,0 +1,12 @@ +rule PrivateLoader +{ + meta: + author = "kevoreilly" + description = "PrivateLoader indirect syscall capture" + cape_options = "clear,sysbp=$syscall*-2" + packed = "075d0dafd7b794fbabaf53d38895cfd7cffed4a3fe093b0fc7853f3b3ce642a4" + strings: + $syscall = {48 31 C0 4C 8B 19 8B 41 10 48 8B 49 08 49 89 CA 41 FF E3} + condition: + any of them +} diff --git a/analyzer/windows/dll/capemon.dll b/analyzer/windows/dll/capemon.dll index 2f6ee42ce17..5a47c15e36d 100755 Binary files a/analyzer/windows/dll/capemon.dll and b/analyzer/windows/dll/capemon.dll differ diff --git a/analyzer/windows/dll/capemon_x64.dll b/analyzer/windows/dll/capemon_x64.dll index e7914e89331..4f96b7bf52f 100755 Binary files a/analyzer/windows/dll/capemon_x64.dll and b/analyzer/windows/dll/capemon_x64.dll differ diff --git a/analyzer/windows/lib/common/abstracts.py b/analyzer/windows/lib/common/abstracts.py index 6cd05214b10..595ec6d2860 100644 --- a/analyzer/windows/lib/common/abstracts.py +++ b/analyzer/windows/lib/common/abstracts.py @@ -322,3 +322,29 @@ def add_pid(self, pid): def del_pid(self, pid): pass + + def configure_from_data(self): + """Do private auxiliary module-specific configuration. + + Auxiliary modules can implement this method to perform pre-analysis + configuration based on runtime data contained in "data/auxiliary/". + + This method raises: + - ImportError when any exception occurs during import + - AttributeError if the module configure function is invalid + - ModuleNotFoundError if the module does not support configuration from data + """ + package_module_name = self.__class__.__module__.split(".")[-1] + module_name = f"data.auxiliary.{package_module_name}" + try: + mod = importlib.import_module(module_name) + except ModuleNotFoundError as exc: + raise exc + except Exception as exc: + raise ImportError(f"error importing {module_name}: {exc}") from exc + + spec = inspect.getfullargspec(mod.configure) + if len(spec.args) != 1: + err_msg = f"{module_name}.configure: expected 1 arguments, got {len(spec.args)}" + raise AttributeError(err_msg) + mod.configure(self) diff --git a/analyzer/windows/modules/auxiliary/browsermonitor.py b/analyzer/windows/modules/auxiliary/browsermonitor.py index e994505f274..6989f190b20 100644 --- a/analyzer/windows/modules/auxiliary/browsermonitor.py +++ b/analyzer/windows/modules/auxiliary/browsermonitor.py @@ -27,14 +27,17 @@ def __init__(self, options=None, config=None): self.startupinfo = subprocess.STARTUPINFO() self.startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW self.browser_logfile = "" - self.last_modification = 0.0 - self._is_first_save = True def _find_browser_extension(self): temp_dir = tempfile.gettempdir() while not self.browser_logfile and self.do_run: temp_dir_list = os.listdir(temp_dir) for directory in temp_dir_list: + # TOR Browser saves directly to %temp% + if directory.startswith("bext_") and directory.endswith(".json"): + log.debug(f"Found extension logs: {self.browser_logfile}") + self.browser_logfile = os.path.join(temp_dir, directory) + break tmp_directory_path = os.path.join(temp_dir, directory) if not os.path.isdir(tmp_directory_path): continue @@ -49,22 +52,12 @@ def _find_browser_extension(self): time.sleep(1) def _collect_browser_logs(self): - if not self._is_first_save and self.last_modification != os.path.getmtime(self.browser_logfile): - return - self.last_modification = os.path.getmtime(self.browser_logfile) upload_to_host(self.browser_logfile, "browser/requests.log") - self._is_first_save = False def run(self): self.do_run = True if self.enabled: self._find_browser_extension() - self.last_modification = os.path.getmtime(self.browser_logfile) - while self.do_run: - self._collect_browser_logs() - time.sleep(1) - return True - return False def stop(self): if self.enabled: diff --git a/analyzer/windows/modules/auxiliary/disguise.py b/analyzer/windows/modules/auxiliary/disguise.py index 062dd22bd5c..9f8745dbe4c 100644 --- a/analyzer/windows/modules/auxiliary/disguise.py +++ b/analyzer/windows/modules/auxiliary/disguise.py @@ -244,9 +244,11 @@ def randomizeUUID(self): SetValueEx(key, "MachineGuid", 0, REG_SZ, createdUUID) def add_persistent_route(self): - self.run_as_system(["C:\\Windows\\System32\ROUTE.exe", "-p", "add", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY]) self.run_as_system( - ["C:\\Windows\\System32\ROUTE.exe", "-p", "change", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] + ["C:\\Windows\\System32\\ROUTE.exe", "-p", "add", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] + ) + self.run_as_system( + ["C:\\Windows\\System32\\ROUTE.exe", "-p", "change", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] ) def start(self): diff --git a/analyzer/windows/modules/auxiliary/human.py b/analyzer/windows/modules/auxiliary/human.py index 095226251b2..c7fb4c8c519 100644 --- a/analyzer/windows/modules/auxiliary/human.py +++ b/analyzer/windows/modules/auxiliary/human.py @@ -71,6 +71,7 @@ "don't send", "don't save", "continue", + "connect", "unzip", "open", "close the program", @@ -115,6 +116,7 @@ DONT_CLICK_BUTTONS = ( # english "check online for a solution", + "don't ask me again for remote connections from this publisher", "don't run", "do not ask again until the next update is available", "cancel", diff --git a/analyzer/windows/modules/packages/Shellcode-Unpacker.py b/analyzer/windows/modules/packages/Shellcode-Unpacker.py deleted file mode 100644 index 28e4cc377ad..00000000000 --- a/analyzer/windows/modules/packages/Shellcode-Unpacker.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import logging -import os -import shutil - -from lib.common.abstracts import Package -from lib.common.constants import OPT_PROCDUMP, OPT_UNPACKER - -log = logging.getLogger(__name__) - -_OPT_DUMP_CALLER_REGIONS = "dump-caller-regions" - - -class Shellcode_Unpacker(Package): - """32-bit Shellcode Unpacker package.""" - - summary = "Executes 32-bit Shellcode using loader.exe with the unpacker option." - description = f"""Uses 'bin\\loader.exe shellcode ' to execute 32-bit Shellcode. - Turns off '{OPT_PROCDUMP}' and '{_OPT_DUMP_CALLER_REGIONS}'. - Turns on '{OPT_UNPACKER}'.""" - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[_OPT_DUMP_CALLER_REGIONS] = "0" - - def start(self, path): - loaderpath = "bin\\loader.exe" - arguments = f"shellcode {path}" - - # we need to move out of the analyzer directory - # due to a check in monitor dll - basepath = os.path.dirname(path) - newpath = os.path.join(basepath, os.path.basename(loaderpath)) - shutil.copy(loaderpath, newpath) - - log.info("[-] newpath : %s", newpath) - log.info("[-] arguments : %s", arguments) - - return self.execute(newpath, arguments, newpath) diff --git a/analyzer/windows/modules/packages/Unpacker.py b/analyzer/windows/modules/packages/Unpacker.py deleted file mode 100644 index 35d6f47264f..00000000000 --- a/analyzer/windows/modules/packages/Unpacker.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import OPT_ARGUMENTS, OPT_INJECTION, OPT_PROCDUMP, OPT_UNPACKER - - -class Unpacker(Package): - """CAPE Unpacker analysis package.""" - - # PATHS = [ - # ("SystemRoot", "system32"), - # ] - summary = "Executes a .exe file with the unpacker option." - description = f"""Executes the sample passing arguments if any. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The .exe filename extension will be added automatically.""" - option_names = (OPT_ARGUMENTS,) - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.pids = [] - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - arguments = self.options.get(OPT_ARGUMENTS) - - # If the file doesn't have an extension, add .exe - # See CWinApp::SetCurrentHandles(), it will throw - # an exception that will crash the app if it does - # not find an extension on the main exe's filename - path = check_file_extension(path, ".exe") - return self.execute(path, arguments, path) diff --git a/analyzer/windows/modules/packages/Unpacker_dll.py b/analyzer/windows/modules/packages/Unpacker_dll.py deleted file mode 100644 index 09e5a5ed16d..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_dll.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import os -import shutil - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import ( - DLL_OPTION_TEXT, - DLL_OPTIONS, - OPT_ARGUMENTS, - OPT_DLLLOADER, - OPT_FUNCTION, - OPT_INJECTION, - OPT_UNPACKER, -) - - -class Unpacker_dll(Package): - """CAPE Unpacker DLL analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "rundll32.exe"), - ] - summary = "Unpacks a .dll file using rundll32.exe as the loader." - description = f"""Uses rundll32.exe with the '/wait' option to run a .lnk file. - {DLL_OPTION_TEXT} - Turns off '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The .dll filename extension will be added automatically.""" - option_names = DLL_OPTIONS - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - rundll32 = self.get_path("rundll32.exe") - function = self.options.get(OPT_FUNCTION, "#1") - arguments = self.options.get(OPT_ARGUMENTS) - dllloader = self.options.get(OPT_DLLLOADER) - - # If the file doesn't have the proper .dll extension force it - # and rename it. This is needed for rundll32 to execute correctly. - # See ticket #354 for details. - path = check_file_extension(path, ".dll") - - args = f"{path},{function}" - if arguments: - args += f" {arguments}" - - if dllloader: - newname = os.path.join(os.path.dirname(rundll32), dllloader) - shutil.copy(rundll32, newname) - rundll32 = newname - - return self.execute(rundll32, args, path) diff --git a/analyzer/windows/modules/packages/Unpacker_js.py b/analyzer/windows/modules/packages/Unpacker_js.py deleted file mode 100644 index 18875faa347..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_js.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (C) 2015 Optiv, Inc. (brad.spengler@optiv.com) -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import os - -from lib.common.abstracts import Package -from lib.common.constants import OPT_INJECTION, OPT_PROCDUMP, OPT_UNPACKER - - -class Unpacker_JS(Package): - """JavaScript analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "wscript.exe"), - ] - summary = "Executes a .JS file using wscript.exe." - description = f"""Uses wscript.exe to run a .js/.jse file. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The appropriate filename extension will be added automatically.""" - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - wscript = self.get_path("wscript.exe") - args = f'"{path}"' - ext = os.path.splitext(path)[-1].lower() - if ext not in (".js", ".jse"): - with open(path, "r") as tmpfile: - magic_bytes = tmpfile.read(4) - if magic_bytes == "#@~^": - os.rename(path, f"{path}.jse") - path = f"{path}.jse" - else: - os.rename(path, f"{path}.js") - path = f"{path}.js" - args = f'"{path}"' - return self.execute(wscript, args, path) diff --git a/analyzer/windows/modules/packages/Unpacker_ps1.py b/analyzer/windows/modules/packages/Unpacker_ps1.py deleted file mode 100644 index 1b1243f850d..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_ps1.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import OPT_INJECTION, OPT_PROCDUMP, OPT_UNPACKER - - -class PS1(Package): - """PowerShell Unpacker analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "WindowsPowerShell", "v*.0", "powershell.exe"), - ] - summary = "Executes a sample file with powershell." - description = f"""Uses 'powershell -NoProfile -ExecutionPolicy bypass -File ' - to run a .ps1 file. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The .ps1 filename extension will be added automatically.""" - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - powershell = self.get_path_glob("PowerShell") - path = check_file_extension(path, ".ps1") - args = f'-NoProfile -ExecutionPolicy bypass -File "{path}"' - return self.execute(powershell, args, path) diff --git a/analyzer/windows/modules/packages/Unpacker_regsvr.py b/analyzer/windows/modules/packages/Unpacker_regsvr.py deleted file mode 100644 index dd357ca7169..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_regsvr.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import OPT_ARGUMENTS, OPT_INJECTION, OPT_PROCDUMP, OPT_UNPACKER - - -class Unpacker_Regsvr(Package): - """CAPE Unpacker DLL analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "regsvr32.exe"), - ] - summary = "Executes function(s) in a DLL file using regsvr32.exe." - description = f"""Uses regsvr32.exe to run one or more functions in a .dll file. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The .dll filename extension will be added automatically.""" - option_names = (OPT_ARGUMENTS,) - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - regsvr32 = self.get_path("regsvr32.exe") - arguments = self.options.get(OPT_ARGUMENTS) - - # If the file doesn't have the proper .dll extension force it - # and rename it. This is needed for rundll32 to execute correctly. - # See ticket #354 for details. - path = check_file_extension(path, ".dll") - - args = path - if arguments: - args += f" {arguments}" - - return self.execute(regsvr32, args, path) diff --git a/analyzer/windows/modules/packages/Unpacker_zip.py b/analyzer/windows/modules/packages/Unpacker_zip.py deleted file mode 100644 index d1bd8f5b85f..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_zip.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import logging -import os -import shutil -from zipfile import BadZipfile, ZipFile - -try: - import re2 as re -except ImportError: - import re - -from lib.common.abstracts import Package -from lib.common.constants import ( - ARCHIVE_OPTIONS, - DLL_OPTIONS, - OPT_ARGUMENTS, - OPT_DLLLOADER, - OPT_FILE, - OPT_FUNCTION, - OPT_INJECTION, - OPT_PASSWORD, - OPT_PROCDUMP, - OPT_UNPACKER, -) -from lib.common.exceptions import CuckooPackageError - -log = logging.getLogger(__name__) - - -class Unpacker_zip(Package): - """CAPE Unpacker zip analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "cmd.exe"), - ] - summary = "Unzips a file with the supplied password, execute its contents." - description = f"""Extracts the sample from a zip file. If the file name is not - supplied in the '{OPT_FILE}" option, the first file in the zip is taken. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The execution method is chosen based on the filename extension.""" - option_names = sorted(set(ARCHIVE_OPTIONS + DLL_OPTIONS)) - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.pids = [] - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def extract_zip(self, zip_path, extract_path, password, recursion_depth): - """Extracts a nested ZIP file. - @param zip_path: ZIP path - @param extract_path: where to extract - @param password: ZIP password - @param recursion_depth: how deep we are in a nested archive - """ - # Test if zip file contains a file named as itself. - if self.is_overwritten(zip_path): - log.debug("ZIP file contains a file with the same name, original is going to be overwritten") - # TODO: add random string. - new_zip_path = f"{zip_path}.old" - shutil.move(zip_path, new_zip_path) - zip_path = new_zip_path - - # Unpacker. - with ZipFile(zip_path, "r") as archive: - try: - archive.extractall(path=extract_path, pwd=password) - except BadZipfile as e: - raise CuckooPackageError("Invalid Zip file") from e - except RuntimeError: - try: - archive.extractall(path=extract_path, pwd="infected") - except RuntimeError as e: - raise CuckooPackageError(f"Unable to extract Zip file: {e}") from e - finally: - if recursion_depth < 4: - # Extract nested archives. - for name in archive.namelist(): - if name.endswith(".zip"): - # Recurse. - try: - self.extract_zip(os.path.join(extract_path, name), extract_path, password, recursion_depth + 1) - except BadZipfile: - log.warning( - "Nested zip file '%s' name end with 'zip' extension is not a valid zip, skipping extraction", - name, - ) - except RuntimeError as run_err: - log.error("Error to extract nested zip file %s with details: %s", name, run_err) - - def is_overwritten(self, zip_path): - """Checks if the ZIP file contains another file with the same name, so it is going to be overwritten. - @param zip_path: zip file path - @return: comparison boolean - """ - with ZipFile(zip_path, "r") as archive: - # Test if zip file contains a file named as itself. - try: - return any(name == os.path.basename(zip_path) for name in archive.namelist()) - except BadZipfile as e: - raise CuckooPackageError("Invalid Zip file") from e - - def get_infos(self, zip_path): - """Get information from ZIP file. - @param zip_path: zip file path - @return: ZipInfo class - """ - try: - with ZipFile(zip_path, "r") as archive: - return archive.infolist() - except BadZipfile as e: - raise CuckooPackageError("Invalid Zip file") from e - - def start(self, path): - root = os.environ["TEMP"] - password = self.options.get(OPT_PASSWORD) - exe_regex = re.compile(r"(\.exe|\.scr|\.msi|\.bat|\.lnk|\.js|\.jse|\.vbs|\.vbe|\.wsf\.ps1)$", flags=re.IGNORECASE) - dll_regex = re.compile(r"(\.dll|\.ocx)$", flags=re.IGNORECASE) - zipinfos = self.get_infos(path) - self.extract_zip(path, root, password, 0) - - file_name = self.options.get(OPT_FILE) - # If no file name is provided via option, take the first file. - if file_name is None: - # No name provided try to find a better name. - if not len(zipinfos): - raise CuckooPackageError("Empty ZIP archive") - - # Attempt to find a valid exe extension in the archive - for f in zipinfos: - if exe_regex.search(f.filename): - file_name = f.filename - break - if file_name is None: - for f in zipinfos: - if dll_regex.search(f.filename): - file_name = f.filename - break - # Default to the first one if none found - file_name = file_name or zipinfos[0].filename - log.debug("Missing file option, auto executing: %s", file_name) - file_path = os.path.join(root, file_name) - log.debug('file_name: "%s"', file_name) - if file_name.lower().endswith(".lnk"): - cmd_path = self.get_path("cmd.exe") - cmd_args = f'/c start /wait "" "{file_path}"' - return self.execute(cmd_path, cmd_args, file_path) - elif file_name.lower().endswith(".msi"): - msi_path = self.get_path("msiexec.exe") - msi_args = f'/I "{file_path}"' - return self.execute(msi_path, msi_args, file_path) - elif file_name.lower().endswith((".js", ".jse", ".vbs", ".vbe", ".wsf")): - wscript = self.get_path_app_in_path("wscript.exe") - wscript_args = f'"{file_path}"' - return self.execute(wscript, wscript_args, file_path) - elif file_name.lower().endswith((".dll", ".ocx")): - rundll32 = self.get_path_app_in_path("rundll32.exe") - function = self.options.get(OPT_FUNCTION, "#1") - arguments = self.options.get(OPT_ARGUMENTS) - dllloader = self.options.get(OPT_DLLLOADER) - dll_args = f'"{file_path}",{function}' - if arguments: - dll_args += f" {arguments}" - if dllloader: - newname = os.path.join(os.path.dirname(rundll32), dllloader) - shutil.copy(rundll32, newname) - rundll32 = newname - return self.execute(rundll32, dll_args, file_path) - elif file_name.lower().endswith(".ps1"): - powershell = self.get_path_app_in_path("powershell.exe") - args = f'-NoProfile -ExecutionPolicy bypass -File "{path}"' - return self.execute(powershell, args, file_path) - return self.execute(file_path, self.options.get(OPT_ARGUMENTS), file_path) diff --git a/analyzer/windows/modules/packages/rdp.py b/analyzer/windows/modules/packages/rdp.py new file mode 100644 index 00000000000..016df77bbf4 --- /dev/null +++ b/analyzer/windows/modules/packages/rdp.py @@ -0,0 +1,17 @@ +from lib.common.abstracts import Package +from lib.common.common import check_file_extension + + +class Exe(Package): + """RDP analysis package.""" + + PATHS = [ + ("SystemRoot", "system32", "mstsc.exe"), + ] + + def start(self, path): + args = self.options.get("arguments") + + path = check_file_extension(path, ".rdp") + mstsc = self.get_path_glob("mstsc.exe") + return self.execute(mstsc, f'"{path}" {args}', path) diff --git a/analyzer/windows/modules/packages/Shellcode.py b/analyzer/windows/modules/packages/shellcode.py similarity index 100% rename from analyzer/windows/modules/packages/Shellcode.py rename to analyzer/windows/modules/packages/shellcode.py diff --git a/analyzer/windows/modules/packages/Shellcode_x64.py b/analyzer/windows/modules/packages/shellcode_x64.py similarity index 100% rename from analyzer/windows/modules/packages/Shellcode_x64.py rename to analyzer/windows/modules/packages/shellcode_x64.py diff --git a/analyzer/windows/modules/packages/tor_browser.py b/analyzer/windows/modules/packages/tor_browser.py new file mode 100644 index 00000000000..064611db5ff --- /dev/null +++ b/analyzer/windows/modules/packages/tor_browser.py @@ -0,0 +1,25 @@ +# Copyright (C) 2024 fdiaz@virustotal.com +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. +import time +import webbrowser + +from lib.common.abstracts import Package + + +class TorBrowserExt(Package): + """TOR analysis package (with extension).""" + + PATHS = [ + ("LOCALAPPDATA", "Tor Browser", "Browser", "firefox.exe"), + ] + summary = "Opens the URL in firefox." + description = """Spawns TOR's firefox.exe and opens the supplied URL.""" + + def start(self, url): + webbrowser.register("firefox", None, webbrowser.BackgroundBrowser(self.get_path("firefox.exe"))) + firefox = webbrowser.get("firefox") + time.sleep(15) # Rough estimate, change based on your setup times. + firefox.open(url) + time.sleep(15) # Prevent analysis from finishing too early. + return diff --git a/analyzer/windows/modules/packages/vawtrak.py b/analyzer/windows/modules/packages/vawtrak.py deleted file mode 100644 index 709ff56736d..00000000000 --- a/analyzer/windows/modules/packages/vawtrak.py +++ /dev/null @@ -1,51 +0,0 @@ -# Andriy :P - -import os -import shutil -from subprocess import call - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import OPT_APPDATA, OPT_ARGUMENTS, OPT_RUNASX86 - - -class IE(Package): - """Internet Explorer analysis package.""" - - PATHS = [ - ("ProgramFiles", "Internet Explorer", "iexplore.exe"), - ] - summary = "Runs the supplied executable." - description = f"""First runs 'iexplore.exe about:blank' to open Internet Explorer. - Next executes the given sample, passing '{OPT_ARGUMENTS}' if specified. - Use the '{OPT_APPDATA}' option to run the executable from the APPDATA directory. - Use the '{OPT_RUNASX86}' option to set the 32BITREQUIRED flag in the PE header, - using 'CorFlags.exe /32bit+'. - The .exe filename extension will be added automatically.""" - option_names = (OPT_ARGUMENTS, OPT_APPDATA, OPT_RUNASX86) - - def start(self, path): - iexplore = self.get_path("iexplore.exe") - # pass the URL instead of a filename in this case - self.execute(iexplore, '"about:blank"', "about:blank") - - args = self.options.get(OPT_ARGUMENTS) - appdata = self.options.get(OPT_APPDATA) - runasx86 = self.options.get(OPT_RUNASX86) - - # If the file doesn't have an extension, add .exe - # See CWinApp::SetCurrentHandles(), it will throw - # an exception that will crash the app if it does - # not find an extension on the main exe's filename - path = check_file_extension(path, ".exe") - - if appdata: - # run the executable from the APPDATA directory, required for some malware - basepath = os.getenv("APPDATA") - newpath = os.path.join(basepath, os.path.basename(path)) - shutil.copy(path, newpath) - path = newpath - if runasx86: - # ignore the return value, user must have CorFlags.exe installed in the guest VM - call(["CorFlags.exe", path, "/32bit+"]) - return self.execute(path, args, path) diff --git a/analyzer/windows/tests/test_analysis_packages.py b/analyzer/windows/tests/test_analysis_packages.py index a86bc0b33ce..d1bd1202198 100644 --- a/analyzer/windows/tests/test_analysis_packages.py +++ b/analyzer/windows/tests/test_analysis_packages.py @@ -36,56 +36,17 @@ def test_has_summary_description(self): self.assertGreater(len(subclass.summary), 0) self.assertGreater(len(subclass.description), 0) - def test_choose_package_Shellcode_Unpacker(self): - pkg_class = self.class_from_analysis_package("modules.packages.Shellcode-Unpacker") - pkg_class() - - def test_Shellcode(self): - pkg_class = self.class_from_analysis_package("modules.packages.Shellcode") + def test_shellcode(self): + pkg_class = self.class_from_analysis_package("modules.packages.shellcode") obj = pkg_class() self.assertEqual("offset", obj.option_names[0]) expected_summary = "Executes 32-bit Shellcode using loader.exe." self.assertEqual(expected_summary, obj.summary) - def test_Shellcode_x64(self): - pkg_class = self.class_from_analysis_package("modules.packages.Shellcode_x64") + def test_shellcode_x64(self): + pkg_class = self.class_from_analysis_package("modules.packages.shellcode_x64") pkg_class() - def test_Unpacker(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker") - obj = pkg_class() - expected_summary = "Executes a .exe file with the unpacker option." - self.assertEqual(expected_summary, obj.summary) - - def test_Unpacker_dll(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_dll") - obj = pkg_class() - self.assertEqual("arguments", obj.option_names[0]) - self.assertEqual("dllloader", obj.option_names[1]) - self.assertEqual("function", obj.option_names[2]) - - def test_Unpacker_js(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_js") - obj = pkg_class() - expected_summary = "Executes a .JS file using wscript.exe." - self.assertEqual(expected_summary, obj.summary) - - def test_Unpacker_ps1(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_ps1") - obj = pkg_class() - expected_summary = "Executes a sample file with powershell." - self.assertEqual(expected_summary, obj.summary) - - def test_Unpacker_regsvr(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_regsvr") - pkg_class() - - def test_Unpacker_zip(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_zip") - obj = pkg_class() - expected_summary = "Unzips a file with the supplied password, execute its contents." - self.assertEqual(expected_summary, obj.summary) - def test_access(self): pkg_class = self.class_from_analysis_package("modules.packages.access") pkg_class() @@ -315,10 +276,6 @@ def test_swf(self): pkg_class = self.class_from_analysis_package("modules.packages.swf") pkg_class() - def test_vawtrak(self): - pkg_class = self.class_from_analysis_package("modules.packages.vawtrak") - pkg_class() - def test_vbejse(self): pkg_class = self.class_from_analysis_package("modules.packages.vbejse") pkg_class() diff --git a/analyzer/windows/tests/test_analyzer.py b/analyzer/windows/tests/test_analyzer.py index c633a4a7d73..a8db4ec4241 100644 --- a/analyzer/windows/tests/test_analyzer.py +++ b/analyzer/windows/tests/test_analyzer.py @@ -81,87 +81,25 @@ def test_prepare(self, set_lock, init_logging, config, pipeserver): class TestAnalyzerChoosePackage(unittest.TestCase): - def test_choose_package_Shellcode_Unpacker(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Shellcode-Unpacker" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Shellcode-Unpacker", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Shellcode_Unpacker") - def test_choose_package_Shellcode(self): + def test_choose_package_shellcode(self): test = analyzer.Analyzer() test.config = MagicMock() test.options = MagicMock() - test.config.package = "Shellcode" + test.config.package = "shellcode" pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Shellcode", pkg_name) + self.assertEqual("modules.packages.shellcode", pkg_name) self.assertEqual(pkg_class.__class__.__name__, "Shellcode") def test_choose_package_Shellcode_x64(self): test = analyzer.Analyzer() test.config = MagicMock() test.options = MagicMock() - test.config.package = "Shellcode_x64" + test.config.package = "shellcode_x64" pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Shellcode_x64", pkg_name) + self.assertEqual("modules.packages.shellcode_x64", pkg_name) self.assertEqual(pkg_class.__class__.__name__, "Shellcode_x64") - def test_choose_package_Unpacker(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker") - - def test_choose_package_Unpacker_dll(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_dll" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_dll", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker_dll") - - def test_choose_package_Unpacker_js(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_js" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_js", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker_JS") - - def test_choose_package_Unpacker_ps1(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_ps1" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_ps1", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "PS1") - - def test_choose_package_Unpacker_regsvr(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_regsvr" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_regsvr", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker_Regsvr") - - def test_choose_package_Unpacker_zip(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_zip" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_zip", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker_zip") - def test_choose_package_access(self): test = analyzer.Analyzer() test.config = MagicMock() @@ -603,15 +541,6 @@ def test_choose_package_swf(self): self.assertEqual("modules.packages.swf", pkg_name) self.assertEqual(pkg_class.__class__.__name__, "SWF") - def test_choose_package_vawtrak(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "vawtrak" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.vawtrak", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "IE") - def test_choose_package_vbejse(self): test = analyzer.Analyzer() test.config = MagicMock() diff --git a/changelog.md b/changelog.md index 5ff3aaafb04..eadbd064d4e 100644 --- a/changelog.md +++ b/changelog.md @@ -1,7 +1,13 @@ +### [04.10.2024] +* Monitor update: Add GetClassObject hook to handle UAC bypass technique using CMSTPLUA COM object +* PrivateLoader direct syscall capture + +### [01.10.2024] +* Monitor update: Improve fix for size bug with unpacking embedded PEs + ### [26.09.2024] Browser monitoring * [Browser extension details](https://github.com/kevoreilly/CAPEv2/tree/master/extra/browser_extension/README.md). For code details see [PR](https://github.com/kevoreilly/CAPEv2/pull/2330) - ### [23.09.2024] * Monitor update: Fix size bug with unpacking embedded PEs * .NET loader 'SlowLoader' detonation shim for slower cpus (race condition) diff --git a/conf/default/api.conf.default b/conf/default/api.conf.default index f1a6828d768..96e249df910 100644 --- a/conf/default/api.conf.default +++ b/conf/default/api.conf.default @@ -252,6 +252,10 @@ compress = no rps = 1/s rpm = 2/m +# Pull a HAR file from a specific task with mitmdump enabled +[taskmitmdump] +enabled = no + # Download a sample from a specific Task ID. [sampledl] enabled = no diff --git a/conf/default/auxiliary.conf.default b/conf/default/auxiliary.conf.default index 4f6029ba9b6..ec8aeb84c73 100644 --- a/conf/default/auxiliary.conf.default +++ b/conf/default/auxiliary.conf.default @@ -77,3 +77,9 @@ bpf = not arp # Enable or disable the use of QEMU as screenshot capture [yes/no]. # screenshots_linux and screenshots_windows must be disabled enabled = no + +[Mitmdump] +# Enable or disable the use of mitmdump (mitmproxy) to get dump.har [yes/no]. +# This module requires installed mitmproxy see install_mitmproxy +# (https://github.com/kevoreilly/CAPEv2/blob/master/installer/cape2.sh#L1320) +enabled = no diff --git a/conf/default/mitmdump.conf.default b/conf/default/mitmdump.conf.default new file mode 100644 index 00000000000..703da46560a --- /dev/null +++ b/conf/default/mitmdump.conf.default @@ -0,0 +1,11 @@ +[cfg] +# bin path to mitmdump +bin = /opt/mitmproxy/mitmdump + +# Host ip where mitmdump is listening +host = 127.0.0.1 + +# Interface where mitmdump is listening +interface = virbr0 + +# Future options like custom ports, cert paths, etc diff --git a/conf/default/processing.conf.default b/conf/default/processing.conf.default index 0a83dda5407..dbd297d6286 100644 --- a/conf/default/processing.conf.default +++ b/conf/default/processing.conf.default @@ -321,11 +321,6 @@ key = [script_log_processing] enabled = yes -# Community -# Dump PE's overlay info -[overlay] -enabled = no - # Community [floss] enabled = no diff --git a/conf/default/selfextract.conf.default b/conf/default/selfextract.conf.default index 6175734093e..b39903710fe 100644 --- a/conf/default/selfextract.conf.default +++ b/conf/default/selfextract.conf.default @@ -75,7 +75,7 @@ binary = /usr/bin/de4dot extra_args = timeout = 60 -# https://github.com/SychicBoy/NETReactorSlayer/releases +# https://github.com/otavepto/NETReactorSlayer/releases [eziriz_deobfuscate] enabled = yes binary = data/NETReactorSlayer.CLI @@ -89,3 +89,8 @@ timeout = 60 [msix_extract] enabled = no timeout = 60 + +# PE file overlay +[overlay] +enabled = yes +timeout = 60 diff --git a/conf/default/web.conf.default b/conf/default/web.conf.default index 476e9164ee1..5d9538a18e4 100644 --- a/conf/default/web.conf.default +++ b/conf/default/web.conf.default @@ -78,8 +78,10 @@ enabled = no #enable linux fields on webgui [linux] -#For advanced users only, can be buggy, linux analysis is work in progress for fun +# For advanced users only, can be buggy, linux analysis is work in progress for fun enabled = no +# independent of enabled or not. To not show linux options, but process statically those files +static_only = no [malscore] enabled = no diff --git a/data/html/base-report.html b/data/html/base-report.html index b7dddd52bff..f7d6a475ce5 100644 --- a/data/html/base-report.html +++ b/data/html/base-report.html @@ -22,7 +22,11 @@ +{% if title %} +{{ title }} • CAPE Sandbox +{% else %} CAPE Sandbox +{% endif %} diff --git a/data/yara/CAPE/AsyncRat.yar b/data/yara/CAPE/AsyncRAT.yar similarity index 90% rename from data/yara/CAPE/AsyncRat.yar rename to data/yara/CAPE/AsyncRAT.yar index 936299acdfb..84a02f65c2e 100644 --- a/data/yara/CAPE/AsyncRat.yar +++ b/data/yara/CAPE/AsyncRAT.yar @@ -1,9 +1,9 @@ -rule AsyncRat +rule AsyncRAT { meta: author = "kevoreilly, JPCERT/CC Incident Response Group" - description = "AsyncRat Payload" - cape_type = "AsyncRat Payload" + description = "AsyncRAT Payload" + cape_type = "AsyncRAT Payload" strings: $salt = {BF EB 1E 56 FB CD 97 3B B2 19 02 24 30 A5 78 43 00 3D 56 44 D2 1E 62 B9 D4 F1 80 E7 E6 C3 39 41} $b1 = {00 00 00 0D 53 00 48 00 41 00 32 00 35 00 36 00 00} @@ -16,10 +16,10 @@ rule AsyncRat uint16(0) == 0x5A4D and not $kitty and ($salt and (2 of ($str*) or 1 of ($b*))) or (all of ($b*) and 2 of ($str*)) } -rule asyncrat_kingrat { +rule AsyncRAT_kingrat { meta: author = "jeFF0Falltrades" - cape_type = "AsyncRat Payload" + cape_type = "AsyncRAT Payload" strings: $str_async = "AsyncClient" wide ascii nocase diff --git a/data/yara/CAPE/BlackDropper.yar b/data/yara/CAPE/BlackDropper.yar new file mode 100644 index 00000000000..fa8f218a74e --- /dev/null +++ b/data/yara/CAPE/BlackDropper.yar @@ -0,0 +1,17 @@ +rule BlackDropper +{ + meta: + author = "enzok" + description = "BlackDropper" + cape_type = "BlackDropper Payload" + hash = "f8026ae3237bdd885e5fcaceb86bcab4087d8857e50ba472ca79ce44c12bc257" + strings: + $string1 = "BlackDropperCPP" + $string2 = "Builder.dll" + $string3 = "\\Builder.exe" + $crypt1 = {33 D2 48 8B 44 24 ?? 48 8B 4C 24 ?? 48 F7 F1 48 8B C2 48 8B D0 48 8D 4C 24 ?? E8} + $crypt2 = {0F BE 00 8B 4C 24 ?? 33 C8 8B C1 88 44 24 ?? 48 8B 54 24 ?? 48 8D 4C 24} + $crypt3 = {E8 [4] 0F B6 4C 24 ?? 88 08 E9} + condition: + 2 of ($string*) or 2 of ($crypt*) +} \ No newline at end of file diff --git a/data/yara/CAPE/BumbleBee.yar b/data/yara/CAPE/BumbleBee.yar index 43513157063..e65b80d92e6 100644 --- a/data/yara/CAPE/BumbleBee.yar +++ b/data/yara/CAPE/BumbleBee.yar @@ -48,3 +48,21 @@ rule BumbleBee condition: uint16(0) == 0x5A4D and (any of ($antivm*) or all of ($str_*)) } + +rule BumbleBee2024 +{ + meta: + author = "enzok" + description = "BumbleBee 2024" + cape_type = "BumbleBee Payload" + packed = "a20d56ab2e53b3a599af9904f163bb2e1b2bb7f2c98432519e1fbe87c3867e66" + strings: + $rc4key = {48 [6] 48 [6] E8 [4] 4C 89 AD [4] 4C 89 AD [4] 4C 89 B5 [4] 4C 89 AD [4] 44 88 AD [4] 48 8D 15 [4] 44 38 2D [4] 75} + $botidlgt = {4C 8B C1 B? 4F 00 00 00 48 8D 0D [4] E8 [4] 4C 8B C3 48 8D 0D [4] B? 4F 00 00 00 E8 [4] 4C 8B C3 48 8D 0D [4] B? FF 0F 00 00 E8} + $botid = {90 48 [6] E8 [4] 4C 89 AD [4] 4C 89 AD [4] 4C 89 B5 [4] 4C 89 AD [4] 44 88 AD [4] 48 8D 15 [4] 44 38 2D [4] 75} + $port = {4C 89 6D ?? 4C 89 6D ?? 4c 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8D 05 [4] 44 38 2D [4] 75} + $dga1 = {4C 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8B 1D [4] 48 8D 0D [4] E8 [4] 8B F8} + $dga2 = {48 8D 0D [4] E8 [4] 8B F0 4C 89 6D ?? 4C 89 6D ?? 4C 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8D 15 [4] 44 38 2D [4] 75} + condition: + $rc4key and all of ($botid*) and 2 of ($port, $port, $dga1, $dga2) +} \ No newline at end of file diff --git a/data/yara/CAPE/DCRat.yar b/data/yara/CAPE/DCRat.yar new file mode 100644 index 00000000000..4ca7696dfd0 --- /dev/null +++ b/data/yara/CAPE/DCRat.yar @@ -0,0 +1,87 @@ +rule DCRat { + meta: + author = "ditekSHen" + description = "DCRat payload" + cape_type = "DCRat Payload" + strings: + // DCRat + $dc1 = "DCRatBuild" ascii + $dc2 = "DCStlr" ascii + $x1 = "px\">
DCRat Keylogger" wide + $x2 = "DCRat-Log#" wide + $x3 = "DCRat.Code" wide + $string1 = "CaptureBrowsers" fullword ascii + $string2 = "DecryptBrowsers" fullword ascii + $string3 = "Browsers.IE10" ascii + $string4 = "Browsers.Chromium" ascii + $string5 = "WshShell" ascii + $string6 = "SysMngmts" fullword ascii + $string7 = "LoggerData" fullword ascii + // DCRat Plugins/Libraries + $plugin = "DCRatPlugin" fullword ascii + // AntiVM + $av1 = "AntiVM" ascii wide + $av2 = "vmware" fullword wide + $av3 = "VirtualBox" fullword wide + $av4 = "microsoft corporation" fullword wide + $av5 = "VIRTUAL" fullword wide + $av6 = "DetectVirtualMachine" fullword ascii + $av7 = "Select * from Win32_ComputerSystem" fullword wide + // Plugin_AutoStealer, Plugin_AutoKeylogger + $pl1 = "dcratAPI" fullword ascii + $pl2 = "dsockapi" fullword ascii + $pl3 = "file_get_contents" fullword ascii + $pl4 = "classthis" fullword ascii + $pl5 = "typemdt" fullword ascii + $pl6 = "Plugin_AutoStealer" ascii wide + $pl7 = "Plugin_AutoKeylogger" ascii wide + // variant + $v1 = "Plugin couldn't process this action!" wide + $v2 = "Unknown command!" wide + $v3 = "PLUGINCONFIGS" wide + $v4 = "Saving log..." wide + $v5 = "~Work.log" wide + $v6 = "MicrophoneNum" fullword wide + $v7 = "WebcamNum" fullword wide + $v8 = "%SystemDrive% - Slow" wide + $v9 = "%UsersFolder% - Fast" wide + $v10 = "%AppData% - Very Fast" wide + $v11 = /\[(Up|Down|Enter|ESC|CTRL|Shift|Win|Tab|CAPSLOCK: (ON|OFF))\]<\/span>/ wide + $px1 = "[Browsers] Scanned elements: " wide + $px2 = "[Browsers] Grabbing cookies" wide + $px3 = "[Browsers] Grabbing passwords" wide + $px4 = "[Browsers] Grabbing forms" wide + $px5 = "[Browsers] Grabbing CC" wide + $px6 = "[Browsers] Grabbing history" wide + $px7 = "[StealerPlugin] Invoke: " wide + $px8 = "[Other] Grabbing steam" wide + $px9 = "[Other] Grabbing telegram" wide + $px10 = "[Other] Grabbing discord tokens" wide + $px11 = "[Other] Grabbing filezilla" wide + $px12 = "[Other] Screenshots:" wide + $px13 = "[Other] Clipboard" wide + $px14 = "[Other] Saving system information" wide + condition: + uint16(0) == 0x5a4d and (all of ($dc*) or all of ($string*) or 2 of ($x*) or 6 of ($v*) or 5 of ($px*)) or ($plugin and (4 of ($av*) or 5 of ($pl*))) +} + +rule dcrat_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "DCRat Payload" + strings: + $venom_1 = "VenomRAT" wide ascii nocase + $venom_2 = "HVNC_REPLY_MESSAGE" wide ascii + $str_aes_exc = "masterKey can not be null or empty" wide ascii + $str_b64_amsi = "YW1zaS5kbGw=" wide ascii + $str_b64_virtual_protect = "VmlydHVhbFByb3RlY3Q=" wide ascii + $str_dcrat = "dcrat" wide ascii nocase + $str_plugin = "save_Plugin" wide ascii + $str_qwqdan = "qwqdan" wide ascii + $byte_aes_key_base = { 7E [3] 04 73 [3] 06 80 } + $patt_config = { 72 [3] 70 80 [3] 04 } + $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } + + condition: + (not any of ($venom*)) and 5 of them and #patt_config >= 10 +} diff --git a/data/yara/CAPE/KoiLoader.yar b/data/yara/CAPE/KoiLoader.yar new file mode 100644 index 00000000000..3f71f780ef1 --- /dev/null +++ b/data/yara/CAPE/KoiLoader.yar @@ -0,0 +1,35 @@ +rule KoiLoader +{ + meta: + author = "YungBinary" + description = "KoiLoader" + cape_type = "KoiLoader Payload" + hash = "b462e3235c7578450b2b56a8aff875a3d99d22f6970a01db3ba98f7ecb6b01a0" + strings: + $chunk_1 = { + 68 27 11 68 05 + 8B 45 ?? + 50 + E8 ?? ?? ?? ?? + 83 C4 08 + 89 45 ?? + 68 15 B1 B3 09 + 8B 4D ?? + 51 + E8 ?? ?? ?? ?? + 83 C4 08 + 89 45 ?? + 68 B5 96 AA 0D + 8B 55 ?? + 52 + E8 ?? ?? ?? ?? + 83 C4 08 + 89 45 ?? + 6A 00 + FF 15 ?? ?? ?? ?? + } + + condition: + $chunk_1 + +} diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index 4be2af810bc..1422e550b62 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -5,10 +5,11 @@ rule Lumma description = "Lumma Payload" cape_type = "Lumma Payload" packed = "0ee580f0127b821f4f1e7c032cf76475df9724a9fade2e153a69849f652045f8" + packed = "23ff1c20b16d9afaf1ce443784fc9a025434a010e2194de9dec041788c369887" strings: - $c2 = {8D 44 24 ?? 50 89 4C 24 ?? FF 31 E8 [4] 83 C4 08 B8 FF FF FF FF} - $peb = {8B 44 24 04 85 C0 74 13 64 8B 0D 30 00 00 00 50 6A 00 FF 71 18 FF 15} - $remap = {C6 44 24 20 00 C7 44 24 1C C2 00 00 90 C7 44 24 18 00 00 FF D2 C7 44 24 14 00 BA 00 00 C7 44 24 10 B8 00 00 00 8B ?? 89 44 24 11} + $decode1 = {C1 (E9|EA) 02 [0-3] 0F B6 (44|4C) ?? FF 83 (F8|F9) 3D 74 05 83 (F8|F9) 2E 75 01 (49|4A) [0-30] 2E 75} + $decode2 = {B0 40 C3 B0 3F C3 89 C8 04 D0 3C 09 77 06 80 C1 04 89 C8 C3 89 C8 04 BF 3C} + $decode3 = {B0 40 C3 B0 3F C3 80 F9 30 72 ?? 80 F9 39 77 06 80 C1 04 89 C8 C3} condition: uint16(0) == 0x5a4d and any of them } diff --git a/data/yara/CAPE/NitrogenLoader.yar b/data/yara/CAPE/NitrogenLoader.yar new file mode 100644 index 00000000000..1939fc68e00 --- /dev/null +++ b/data/yara/CAPE/NitrogenLoader.yar @@ -0,0 +1,18 @@ +rule NitrogenLoader +{ + meta: + author = "enzok" + description = "Nitrogen Loader" + cape_type = "NitrogenLoader Loader" + hash = "7b603d63a23201ff0b6ffa9acdd650df9caa1731837d559d93b3d8ce1d82a962" + strings: + $aes1 = {63 7c 77 7b f2 6b 6f c5 30 01 67 2b fe d7 ab 76 ca 82 c9 7d fa} + $aes2 = {52 09 6a d5 30 36 a5 38 bf 40 a3 9e 81 f3 d7 fb 7c e3 39 82 9b} + $string1 = "BASS_GetEAXParameters" + $string2 = "LoadResource" + $syscallmakehashes = {48 89 4C 24 ?? 48 89 54 24 ?? 4? 89 44 24 ?? 4? 89 4C 24 ?? 4? 83 EC ?? B? [4] E8 [3] 00} + $syscallnumber = {49 89 C3 B? [4] E8 [3] 00} + $syscall = {48 83 C4 ?? 4? 8B 4C 24 ?? 4? 8B 54 24 ?? 4? 8B 44 24 ?? 4? 8B 4C 24 ?? 4? 89 CA 4? FF E3} + condition: + all of ($aes*) and all of ($string*) and any of ($syscall*) +} diff --git a/data/yara/CAPE/QuasarRAT.yar b/data/yara/CAPE/QuasarRAT.yar new file mode 100644 index 00000000000..8877430d23c --- /dev/null +++ b/data/yara/CAPE/QuasarRAT.yar @@ -0,0 +1,43 @@ +rule QuasarRAT { + meta: + author = "ditekshen" + description = "QuasarRAT payload" + cape_type = "QuasarRAT Payload" + strings: + $s1 = "GetKeyloggerLogsResponse" fullword ascii + $s2 = "GetKeyloggerLogs" fullword ascii + $s3 = "/>Log created on" wide + $s4 = "User: {0}{3}Pass: {1}{3}Host: {2}" wide + $s5 = "Domain: {1}{0}Cookie Name: {2}{0}Value: {3}{0}Path: {4}{0}Expired: {5}{0}HttpOnly: {6}{0}Secure: {7}" wide + $s6 = "grabber_" wide + $s7 = "" ascii + $s8 = "k__BackingField" fullword ascii + $s9 = "" ascii + $s10 = "add_OnHotKeysDown" ascii + $mutex = "QSR_MUTEX_" ascii wide + $ua1 = "Mozilla/5.0 (Windows NT 6.3; rv:48.0) Gecko/20100101 Firefox/48.0" fullword wide + $us2 = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A" fullword wide + condition: + uint16(0) == 0x5a4d and ($mutex or (all of ($ua*) and 2 of them) or 6 of ($s*)) +} + +rule quasarrat_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "QuasarRAT Payload" + strings: + $str_quasar = "Quasar." wide ascii + $str_hidden = "set_Hidden" wide ascii + $str_shell = "DoShellExecuteResponse" wide ascii + $str_close = "echo DONT CLOSE THIS WINDOW!" wide ascii + $str_pause = "ping -n 10 localhost > nul" wide ascii + $str_aes_exc = "masterKey can not be null or empty" wide ascii + $byte_aes_key_base = { 7E [3] 04 73 [3] 06 25 } + $byte_aes_salt_base = { BF EB 1E 56 FB CD 97 3B B2 19 } + $byte_special_folder = { 7e 73 [4] 28 [4] 80 } + $patt_config = { 72 [3] 70 80 [3] 04 } + $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } + + condition: + 6 of them and #patt_config >= 10 +} diff --git a/data/yara/CAPE/XWorm.yar b/data/yara/CAPE/XWorm.yar new file mode 100644 index 00000000000..76e401a3e47 --- /dev/null +++ b/data/yara/CAPE/XWorm.yar @@ -0,0 +1,46 @@ +rule XWorm { + meta: + author = "ditekSHen" + description = "Detects XWorm" + cape_type = "XWorm Payload" + strings: + $x1 = "XWorm " wide nocase + $x2 = /XWorm\s(V|v)\d+\.\d+/ fullword wide + $s1 = "RunBotKiller" fullword wide + $s2 = "XKlog.txt" fullword wide + $s3 = /(shell|reg)fuc/ fullword wide + $s4 = "closeshell" fullword ascii + $s5 = { 62 00 79 00 70 00 73 00 73 00 00 ?? 63 00 61 00 6c 00 6c 00 75 00 61 00 63 00 00 ?? 73 00 63 00 } + $s6 = { 44 00 44 00 6f 00 73 00 54 00 00 ?? 43 00 69 00 6c 00 70 00 70 00 65 00 72 00 00 ?? 50 00 45 00 } + $s7 = { 69 00 6e 00 6a 00 52 00 75 00 6e 00 00 ?? 73 00 74 00 61 00 72 00 74 00 75 00 73 00 62 } + $s8 = { 48 6f 73 74 00 50 6f 72 74 00 75 70 6c 6f 61 64 65 72 00 6e 61 6d 65 65 65 00 4b 45 59 00 53 50 4c 00 4d 75 74 65 78 78 00 } + $v2_1 = "PING!" fullword wide + $v2_2 = "Urlhide" fullword wide + $v2_3 = /PC(Restart|Shutdown)/ fullword wide + $v2_4 = /(Start|Stop)(DDos|Report)/ fullword wide + $v2_5 = /Offline(Get|Keylogger)/ wide + $v2_6 = "injRun" fullword wide + $v2_7 = "Xchat" fullword wide + $v2_8 = "UACFunc" fullword ascii wide + condition: + uint16(0) == 0x5a4d and ((1 of ($x*) and (3 of ($s*) or 3 of ($v2*))) or 6 of them) +} + +rule xworm_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "XWorm payload" + strings: + $str_xworm = "xworm" wide ascii nocase + $str_xwormmm = "Xwormmm" wide ascii + $str_xclient = "XClient" wide ascii + $str_default_log = "\\Log.tmp" wide ascii + $str_create_proc = "/create /f /RL HIGHEST /sc minute /mo 1 /t" wide ascii + $str_ddos_start = "StartDDos" wide ascii + $str_ddos_stop = "StopDDos" wide ascii + $str_timeout = "timeout 3 > NUL" wide ascii + $byte_md5_hash = { 7e [3] 04 28 [3] 06 6f } + $patt_config = { 72 [3] 70 80 [3] 04 } + condition: + 5 of them and #patt_config >= 7 + } diff --git a/data/yara/CAPE/XenoRAT.yar b/data/yara/CAPE/XenoRAT.yar new file mode 100644 index 00000000000..4208eb54c91 --- /dev/null +++ b/data/yara/CAPE/XenoRAT.yar @@ -0,0 +1,14 @@ +rule XenoRAT { + meta: + author = "jeFF0Falltrades" + cape_type = "XenoRAT payload" + strings: + $str_xeno_rat_1 = "xeno rat" wide ascii nocase + $str_xeno_rat_2 = "xeno_rat" wide ascii nocase + $str_xeno_update_mgr = "XenoUpdateManager" wide ascii + $str_nothingset = "nothingset" wide ascii + $byte_enc_dec_pre = { 1f 10 8d [4] (0a | 0b) } + $patt_config = { 72 [3] 70 80 [3] 04 } + condition: + 4 of them and #patt_config >= 5 + } diff --git a/docs/book/src/customization/auxiliary.rst b/docs/book/src/customization/auxiliary.rst index e858f4c8ba8..48b98801f7a 100644 --- a/docs/book/src/customization/auxiliary.rst +++ b/docs/book/src/customization/auxiliary.rst @@ -27,3 +27,52 @@ very end of the analysis process, before launching the processing and reporting For example, an auxiliary module provided by default in CAPE is called *sniffer.py* and takes care of executing **tcpdump** in order to dump the generated network traffic. + +Auxiliary Module Configuration +============================== + +Auxiliary modules can be "configured" before being started. This allows data to be added +at runtime, whilst also allowing for the configuration to be stored separately from the +CAPE python code. + +Private Auxiliary Module Configuration +-------------------------------------- + +Private auxiliary module configuration is stored outside the auxiliary class, in a module +under the same name as the auxiliary module. This is useful when managing configuration +of auxiliary modules separately if desired, for privacy reasons or otherwise. + +Here is a configuration module example that installs some software prior to the auxiliary +module starting: + + .. code-block:: python + :linenos: + + # data/auxiliary/example.py + import subprocess + import logging + from pathlib import Path + + log = logging.getLogger(__name__) + BIN_PATH = Path.cwd() / "bin" + + + def configure(aux_instance): + # here "example" refers to modules.auxiliary.example.Example + if not aux_instance.enabled: + return + msi = aux_instance.options.get("example_msi") + if not msi: + return + msi_path = BIN_PATH / msi + if not msi_path.exists(): + log.warning("missing MSI %s", msi_path) + return + cmd = ["msiexec", "/i", msi_path, "/quiet"] + try: + log.info("Executing msi package...") + subprocess.check_output(cmd) + log.info("Installation succesful") + except subprocess.CalledProcessError as exc: + log.error("Installation failed: %s", exc) + return diff --git a/docs/book/src/usage/packages.rst b/docs/book/src/usage/packages.rst index 83eb2efbaa3..bf622f1d7d7 100644 --- a/docs/book/src/usage/packages.rst +++ b/docs/book/src/usage/packages.rst @@ -30,7 +30,7 @@ The following is a list of the existing packages in alphabetical order: prior to executing files of interest within in the extracted folder. **Options**: - * ``arguments``: specify arguments to pass to the DLL through commandline. + * ``arguments``: specify arguments to pass to the DLL through commandline. * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). * ``file``: specify the name of the file contained in the archive to execute. If none is specified, CAPE will try to execute *sample.exe*. * ``function``: specify the function to be executed. If none is specified, CAPE will try to run the entry at ordinal 1. @@ -163,7 +163,6 @@ The following is a list of the existing packages in alphabetical order: **Options**: * ``offset``: specify the offset to run with the 64-bit CAPE loader. - * ``shellcode-unpacker``: used to run and analyze **Shellcode** via the 32-bit CAPE loader, with unpacking! * ``shellcode``: used to run and analyze **Shellcode** via the 32-bit CAPE loader, with unpacking! **Options**: @@ -173,60 +172,6 @@ The following is a list of the existing packages in alphabetical order: *NB*: You need to have ``flashplayer.exe`` in the analyzer's ``bin`` folder. - * ``unpacker_dll``: used to run and analyze **Dynamically Linked Libraries** via ``flashplayer.exe``, with unpacking! - - *NB*: You need to have ``flashplayer.exe`` in the analyzer's ``bin`` folder. - - **Options**: - * ``arguments``: specify arguments to pass to the DLL through commandline. - * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). - * ``function``: specify the function to be executed. If none is specified, CAPE will try to run all available functions, - up to the limit found in the `max_dll_exports` task option. - - * ``unpacker_js``: used to run and analyze **JavaScript and JScript Encoded files** via ``wscript.exe``, with unpacking! - * ``unpacker_ps1``: used to run and analyze **PowerShell scripts** via ``powershell.exe``, with unpacking! - * ``unpacker_regsvr``: used to run and analyze **Dynamically Linked Libraries** via ``regsvr.exe``, with unpacking! - - **Options**: - * ``arguments``: specify any command line argument to pass to the initial process of the submitted malware. - - * ``unpacker_zip``: used to run and analyze **Zip archives** via the zipfile Python package, and runs an executable file (if it exists), with ``cmd.exe``. Also unpacking! - - **Options**: - * ``arguments``: specify arguments to pass to the DLL through commandline. - * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). - * ``file``: specify the name of the file contained in the archive to execute. If none is specified, CAPE will try to execute *sample.exe*. - * ``function``: specify the function to be executed. If none is specified, CAPE will try to run the entry at ordinal 1. - * ``password``: specify the password of the archive. If none is specified, CAPE will try to extract the archive without password or use the password "*infected*". - - * ``unpacker``: used to run and analyze generic **Windows executables**, with unpacking! - - **Options**: - * ``arguments``: specify any command line argument to pass to the initial process of the submitted malware. - - * ``upx_dll``: used to run and analyze **Dynamically Linked Libraries** packed with **Ultimate Packer for eXecutables**. - - **Options**: - * ``arguments``: specify arguments to pass to the DLL through commandline. - * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). - * ``function``: specify the function to be executed. If none is specified, CAPE will try to run all available functions, - up to the limit found in the `max_dll_exports` task option. - - * ``upx``: used to run and analyze generic **Windows executables** packed with **Ultimate Packer for eXecutables**. - - **Options**: - * ``appdata``: *[yes/no]* if enabled, run the executable from the APPDATA directory. - * ``arguments``: specify any command line argument to pass to the initial process of the submitted malware. - - * ``vawtrak``: used to run and analyze **Vawtrak malware** with ``iexplore.exe``. - - *NB*: https://www.microsoft.com/en-us/wdsi/threats/malware-encyclopedia-description?Name=Backdoor:Win32/Vawtrak.A - - **Options**: - * ``appdata``: *[yes/no]* if enabled, run the executable from the APPDATA directory. - * ``arguments``: specify any command line argument to pass to the initial process of the submitted malware. - * ``runasx86``: *[yes/no]* if enabled, run ``CorFlags.exe`` with ``/32bit+`` prior to execution. - * ``vbejse``: used to run and analyze **VBScript Encoded and JScript Encoded files** via ``wscript.exe``. * ``vbs``: used to run and analyze **VBScript and VBScript Encoded files** via ``wscript.exe``. * ``wsf``: used to run and analyze **Windows Script Files** via ``wscript.exe``. @@ -251,7 +196,7 @@ The following is a list of the existing packages in alphabetical order: **Options**: * ``appdata``: *[yes/no]* if enabled, create custom folders in the APPDATA directory. - * ``arguments``: specify arguments to pass to the DLL through commandline. + * ``arguments``: specify arguments to pass to the DLL through commandline. * ``curdir``: specify the directory to create custom folders. * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). * ``file``: specify the name of the file contained in the archive to execute. If none is specified, a ``__configuration.json`` file must be present in the zip file. @@ -262,7 +207,7 @@ The following is a list of the existing packages in alphabetical order: **Options**: * ``appdata``: *[yes/no]* if enabled, create custom folders in the APPDATA directory. - * ``arguments``: specify arguments to pass to the DLL through commandline. + * ``arguments``: specify arguments to pass to the DLL through commandline. * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). * ``file``: specify the name of the file contained in the archive to execute. If none is specified, CAPE will try to execute *sample.exe*. * ``function``: specify the function to be executed. If none is specified, CAPE will try to run the entry at ordinal 1. diff --git a/extra/browser_extension/README.md b/extra/browser_extension/README.md index 697e43b9bb2..869b5013c25 100644 --- a/extra/browser_extension/README.md +++ b/extra/browser_extension/README.md @@ -26,3 +26,14 @@ permissions back. Then, the extension is permantently loaded. Tested on version The default path for the `chromium_ext` package is %LOCALAPPDATA%/Chromium/chrome.exe, change the path in .py if needed. + +==== TOR Browser ==== +Follow the same steps as FIREFOX. By default TOR browser always starts in a +Private Tab, allow the extension to run in Private Tabs by default. Because TOR +joins the TOR network, it won't see localhost and instead calls the browser +download API to save requests. + +Set the default downloads directory to %temp% for the auxiliary module to find +the .JSON file. After setting the saving path to %temp%, below untick "Always +ask you where to save files" so that the extension is able to call the +`browser.download` API. diff --git a/extra/browser_extension/background.js b/extra/browser_extension/background.js index e814b9692c3..64ff7f7ea92 100644 --- a/extra/browser_extension/background.js +++ b/extra/browser_extension/background.js @@ -1,6 +1,29 @@ +let isTORBrowser = false; let networkData = []; +let downloadTORPath = "bext_default.json"; + +function generateRandomFilename() { + const asciiLetters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; + let filename = 'bext_'; + for (let i = 0; i < 10; i++) { + filename += asciiLetters.charAt(Math.floor(Math.random() * asciiLetters.length)); + } + filename += '.json'; + return filename; +} +function storeNetworkData() { + const blob = new Blob([JSON.stringify(networkData, null, 2)], {type: "application/json"}); + const url = URL.createObjectURL(blob); + + browser.downloads.download({ + url: url, + filename: downloadTORPath, + conflictAction: 'overwrite' + }); +} + function onRequestEvent(details) { if (details.url.includes("/browser_extension")) { return; @@ -28,7 +51,11 @@ function onResponseEvent(details) { requestEvent.type = details.type; requestEvent.ip = details.ip; requestEvent.originUrl = details.originUrl; - sendEvents(); + if (isTORBrowser) { + storeNetworkData(); + } else { + sendEvents() + } } } @@ -73,4 +100,11 @@ browser.downloads.onCreated.addListener(function(downloadItem) { browser.runtime.onStartup.addListener(function () { networkData = []; +}); + +browser.runtime.getBrowserInfo().then((bInfo) => { + if (bInfo.vendor === "Tor Project") { + isTORBrowser = true; + downloadTORPath = generateRandomFilename(); + } }); \ No newline at end of file diff --git a/extra/yara_installer.sh b/extra/yara_installer.sh index 9b0d29d794c..ca5909bcfed 100755 --- a/extra/yara_installer.sh +++ b/extra/yara_installer.sh @@ -9,10 +9,12 @@ if [ ! -d /tmp/yara-python ]; then fi cd /tmp/yara-python -python setup.py build --enable-cuckoo --enable-magic --enable-profiling + +poetry --directory /opt/CAPEv2 run python setup.py build --enable-cuckoo --enable-magic --enable-profiling +poetry --directory /opt/CAPEv2 run pip install . + cd .. -# for root -pip install ./yara-python + if [ -d yara-python ]; then - rm -r yara-python -fi + rm -rf yara-python +fi \ No newline at end of file diff --git a/installer/cape2.sh b/installer/cape2.sh index d89faff9ab8..f514aa899a4 100644 --- a/installer/cape2.sh +++ b/installer/cape2.sh @@ -93,7 +93,7 @@ cat << EndOfHelp * This ISN'T a silver bullet, we can't control all changes in all third part software, you are welcome to report updates - Usage: $0 | tee $0.log + Usage: $0 [options] | tee $0.log Example: $0 all 192.168.1.1 | tee $0.log Commands - are case insensitive: Base - Installs dependencies, CAPE, systemd, see code for full list @@ -105,7 +105,9 @@ cat << EndOfHelp LetsEncrypt - Install LetsEncrypt for your site, pass your domain as argument Suricata - Install latest suricata with performance boost PostgreSQL - Install latest PostgresSQL + PostgreSQL_Utility - Install pg_activity Yara - Install latest yara + Yara-x - Install latest yara-x Volatility3 - Install Volatility3 and windows symbols Mongo - Install latest mongodb LetsEncrypt - Install dependencies and retrieves certificate @@ -127,7 +129,9 @@ cat << EndOfHelp osslsigncode - Linux alternative to Windows signtool.exe modsecurity - install Nginx ModSecurity plugin Issues - show some known possible bugs/solutions - + Options: + --disable-mongodb-avx-check - Disable check of AVX CPU feature for MongoDB + --disable-libvirt - Disable libvirt related packages installation Useful links - THEY CAN BE OUTDATED; RTFM!!! * https://cuckoo.sh/docs/introduction/index.html * https://medium.com/@seifreed/how-to-deploy-cuckoo-sandbox-431a6e65b848 @@ -138,6 +142,7 @@ EndOfHelp } function install_crowdsecurity() { + echo "[+] Install crowdsecurity" sudo apt-get install bash gettext whiptail curl wget cd /tmp || return if [ ! -d crowdsec-release.tgz ]; then @@ -160,6 +165,7 @@ function install_crowdsecurity() { } function install_docker() { + echo "[+] Install docker" # https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-20-04 sudo apt-get install apt-transport-https ca-certificates curl software-properties-common @@ -246,6 +252,7 @@ function librenms_snmpd_config() { } function install_librenms() { + echo "[+] Install librenms" if [ "$librenms_enable" -ge 1 ]; then echo "Enabling stuff for LibreNMS" apt-get install -y zlib1g-dev cpanminus libjson-perl libfile-readbackwards-perl \ @@ -285,6 +292,7 @@ function install_librenms() { } function install_modsecurity() { + echo "[+] Install modsecurity" # Tested on nginx 1.(16|18).X Based on https://www.nginx.com/blog/compiling-and-installing-modsecurity-for-open-source-nginx/ with fixes apt-get install -y apt-utils autoconf automake build-essential git libcurl4-openssl-dev libgeoip-dev liblmdb-dev libpcre++-dev libtool libxml2-dev libyajl-dev pkgconf wget zlib1g-dev git clone --depth 1 -b v3/master --single-branch https://github.com/SpiderLabs/ModSecurity @@ -332,7 +340,7 @@ function install_modsecurity() { } function install_nginx() { - + echo "[+] Install nginx" if [ ! -d nginx-$nginx_version ]; then wget http://nginx.org/download/nginx-$nginx_version.tar.gz wget http://nginx.org/download/nginx-$nginx_version.tar.gz.asc @@ -560,6 +568,7 @@ fi } function install_letsencrypt(){ + echo "[+] Install and configure letsencrypt" sudo add-apt-repository ppa:certbot/certbot -y sudo apt-get update sudo apt-get install python3-certbot-nginx -y @@ -568,6 +577,7 @@ function install_letsencrypt(){ } function install_fail2ban() { + echo "[+] Installing fail2ban" sudo apt-get install fail2ban -y sudo cp /etc/fail2ban/jail.conf /etc/fail2ban/jail.local sudo sed -i /etc/fail2ban/jail.local @@ -578,6 +588,7 @@ function install_fail2ban() { } function install_logrotate() { + echo "[+] Installing logrotate" # du -sh /var/log/* | sort -hr | head -n10 # thanks digitalocean.com for the manual # https://www.digitalocean.com/community/tutorials/how-to-manage-logfiles-with-logrotate-on-ubuntu-16-04 @@ -601,6 +612,7 @@ EOF } function redsocks2() { + echo "[+] Installing redsocks2" cd /tmp || return sudo apt-get install -y git libevent-dev libreadline-dev zlib1g-dev libncurses5-dev libssl1.0-dev libssl-dev git clone https://github.com/semigodking/redsocks redsocks2 && cd redsocks2 || return @@ -609,6 +621,7 @@ function redsocks2() { } function distributed() { + echo "[+] Configure distributed configuration" sudo apt-get install uwsgi uwsgi-plugin-python3 nginx -y 2>/dev/null sudo -u ${USER} bash -c 'poetry run pip install flask flask-restful flask-sqlalchemy requests' @@ -655,20 +668,25 @@ EOL function install_suricata() { echo '[+] Installing Suricata' add-apt-repository ppa:oisf/suricata-stable -y - apt-get install suricata -y + apt-get install suricata suricata-update -y touch /etc/suricata/threshold.config # Download etupdate to update Emerging Threats Open IDS rules: - pip3 install suricata-update mkdir -p "/etc/suricata/rules" if ! crontab -l | grep -q -F '15 * * * * /usr/bin/suricata-update'; then crontab -l | { cat; echo "15 * * * * /usr/bin/suricata-update --suricata /usr/bin/suricata --suricata-conf /etc/suricata/suricata.yaml -o /etc/suricata/rules/ && /usr/bin/suricatasc -c reload-rules /tmp/suricata-command.socket &>/dev/null"; } | crontab - fi if [ -d /usr/share/suricata/rules/ ]; then - cp "/usr/share/suricata/rules/"* "/etc/suricata/rules/" + #ย copy files if rules folder contains files + if [ "$(ls -A /var/lib/suricata/rules/)" ]; then + cp "/usr/share/suricata/rules/"* "/etc/suricata/rules/" + fi fi if [ -d /var/lib/suricata/rules/ ]; then - cp "/var/lib/suricata/rules/"* "/etc/suricata/rules/" + #ย copy files if rules folder contains files + if [ "$(ls -A /var/lib/suricata/rules/)" ]; then + cp "/var/lib/suricata/rules/"* "/etc/suricata/rules/" + fi fi # ToDo this is not the best solution but i don't have time now to investigate proper one @@ -710,14 +728,18 @@ function install_suricata() { systemctl restart suricata } -function insall_yara_x() { - curl https://sh.rustup.rs -sSf | sh +function install_yara_x() { + echo '[+] Installing Yara-X' + sudo -u ${USER} bash -c 'curl https://sh.rustup.rs -sSf | sh' cd /tmp || return - git clone https://github.com/VirusTotal/yara-x + # if yara-x exists from previous install remove it + if [ -d yara-x ]; then + sudo rm -rf yara-x + fi + sudo -u ${USER} git clone https://github.com/VirusTotal/yara-x cd yara-x || return - source "$HOME/.cargo/env" - cargo install --path cli - pip3 install yara-x + sudo -u ${USER} bash -c 'source "$HOME/.cargo/env" ; cargo install --path cli' + poetry --directory /opt/CAPEv2/ run pip install yara-x } function install_yara() { @@ -734,7 +756,7 @@ function install_yara() { yara_repo_url=$(echo "$yara_info" | jq ".zipball_url" | sed "s/\"//g") if [ ! -f "$yara_version" ]; then wget -q "$yara_repo_url" - unzip -q "$yara_version" + unzip -o -q "$yara_version" #wget "https://github.com/VirusTotal/yara/archive/v$yara_version.zip" && unzip "v$yara_version.zip" fi directory=$(ls | grep "VirusTotal-yara-*") @@ -751,29 +773,9 @@ function install_yara() { #checkinstall -D --pkgname="yara-$yara_version" --pkgversion="$yara_version_only" --default ldconfig - cd /tmp || return - git clone --recursive https://github.com/VirusTotal/yara-python - cd yara-python - # checkout tag v4.2.3 to work around broken master branch - # git checkout tags/v4.2.3 - # sometimes it requires to have a copy of YARA inside of yara-python for proper compilation - # git clone --recursive https://github.com/VirusTotal/yara - # Temp workarond to fix issues compiling yara-python https://github.com/VirusTotal/yara-python/issues/212 - # partially applying PR https://github.com/VirusTotal/yara-python/pull/210/files - # sed -i "191 i \ \ \ \ # Needed to build tlsh'\n module.define_macros.extend([('BUCKETS_128', 1), ('CHECKSUM_1B', 1)])\n # Needed to build authenticode parser\n module.libraries.append('ssl')" setup.py - python3 setup.py build --enable-cuckoo --enable-magic --enable-profiling - cd .. - # for root - pip3 install ./yara-python - if [ -d yara-python ]; then - sudo rm -rf yara-python - fi + # Run yara installer script + sudo -u ${USER} poetry --directory /opt/CAPEv2 run /opt/CAPEv2/extra/yara_installer.sh - if id "cape" >/dev/null 2>&1; then - cd /opt/CAPEv2/ - sudo -u cape poetry run extra/yara_installer.sh - cd - - fi if [ -d yara-python ]; then sudo rm -rf yara-python fi @@ -784,12 +786,14 @@ function install_mongo(){ if [ "$MONGO_ENABLE" -ge 1 ]; then echo "[+] Installing MongoDB" # Mongo >=5 requires CPU AVX instruction support https://www.mongodb.com/docs/manual/administration/production-notes/#x86_64 - if grep -q ' avx ' /proc/cpuinfo; then - MONGO_VERSION="7.0" - else - echo "[-] Mongo >= 5 is not supported" - MONGO_VERSION="4.4" - fi + + MONGO_VERSION="8.0" + if ! grep -q ' avx ' /proc/cpuinfo; then + if [[ "$DISABLE_MONGO_AVX_CHECK" -eq 0 ]]; then + echo "[-] Mongo >= 5 is not supported" + MONGO_VERSION="4.4" + fi + fi sudo curl -fsSL "https://pgp.mongodb.com/server-${MONGO_VERSION}.asc" | sudo gpg --dearmor -o /etc/apt/keyrings/mongo.gpg --yes echo "deb [signed-by=/etc/apt/keyrings/mongo.gpg arch=amd64] https://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/${MONGO_VERSION} multiverse" > /etc/apt/sources.list.d/mongodb.list @@ -797,7 +801,7 @@ function install_mongo(){ apt-get update 2>/dev/null apt-get install libpcre3-dev numactl cron -y apt-get install -y mongodb-org - pip3 install pymongo -U + pip3 install pymongo -U --break-system-packages apt-get install -y ntp systemctl start ntp.service && sudo systemctl enable ntp.service @@ -850,10 +854,10 @@ EOF systemctl restart mongodb.service if ! crontab -l | grep -q -F 'delete-unused-file-data-in-mongo'; then - crontab -l | { cat; echo "30 1 * * 0 cd /opt/CAPEv2 && sudo -u cape poetry run python ./utils/cleaners.py --delete-unused-file-data-in-mongo"; } | crontab - + crontab -l | { cat; echo "30 1 * * 0 cd /opt/CAPEv2 && sudo -u ${USER} poetry run python ./utils/cleaners.py --delete-unused-file-data-in-mongo"; } | crontab - fi - echo -n "https://www.percona.com/blog/2016/08/12/tuning-linux-for-mongodb/" + echo "https://www.percona.com/blog/2016/08/12/tuning-linux-for-mongodb/" else echo "[+] Skipping MongoDB" fi @@ -861,7 +865,7 @@ EOF } function install_elastic() { - + echo "[+] Installing elastic" sudo curl -fsSL "https://artifacts.elastic.co/GPG-KEY-elasticsearch" | sudo gpg --dearmor -o /etc/apt/keyrings/elasticsearch-keyring.gpg --yes # Elasticsearch 7.x @@ -871,7 +875,7 @@ function install_elastic() { # echo "deb [signed-by=/etc/apt/keyrings/elasticsearch-keyring.gpg] https://artifacts.elastic.co/packages/8.x/apt stable main" > /etc/apt/sources.list.d/elastic-8.x.list apt-get update && apt-get install elasticsearch - pip3 install elasticsearch + pip3 install elasticsearch --break-system-packages systemctl enable elasticsearch } @@ -884,9 +888,6 @@ function install_postgresql() { sudo apt-get update -y sudo apt -y install libpq-dev postgresql postgresql-client - # amazing tool for monitoring https://github.com/dalibo/pg_activity - # sudo -u postgres pg_activity -U postgres - python3 -m pip install pg_activity psycopg2-binary sudo systemctl enable postgresql.service sudo systemctl start postgresql.service @@ -894,6 +895,24 @@ function install_postgresql() { sudo -u postgres -H sh -c "psql -d \"${USER}\" -c \"ALTER DATABASE postgres REFRESH COLLATION VERSION;\"" } +function install_capa() { + echo "[+] Installing capa" + # pip3 install flare-capa fails for me + cd /tmp || return + if [ ! -d /tmp/capa ]; then + # problem with test files of dotnet as it goes over ssh insted of https --recurse-submodules + git clone https://github.com/mandiant/capa.git + fi + cd capa || return + git pull + git submodule update --init rules + poetry --directory /opt/CAPEv2/ run pip install . + cd /opt/CAPEv2 + if [ -d /tmp/capa ]; then + sudo rm -rf /tmp/capa + fi +} + function dependencies() { echo "[+] Installing dependencies" @@ -912,10 +931,25 @@ function dependencies() { apt-get install uthash-dev libconfig-dev libarchive-dev libtool autoconf automake privoxy software-properties-common wkhtmltopdf xvfb xfonts-100dpi tcpdump libcap2-bin wireshark-common -y apt-get install python3-pil subversion uwsgi uwsgi-plugin-python3 python3-pyelftools git curl -y apt-get install openvpn wireguard -y + apt-get install crudini -y + # APT poetry is ultra outdated + curl -sSL https://install.python-poetry.org | python3 - + apt-get install locate # used by extra/libvirt_installer.sh # de4dot selfextraction apt-get install -y libgdiplus libdnlib2.1-cil libgif7 libmono-accessibility4.0-cil libmono-ldap4.0-cil libmono-posix4.0-cil libmono-sqlite4.0-cil libmono-system-componentmodel-dataannotations4.0-cil libmono-system-data4.0-cil libmono-system-design4.0-cil libmono-system-drawing4.0-cil libmono-system-enterpriseservices4.0-cil libmono-system-ldap4.0-cil libmono-system-runtime-serialization-formatters-soap4.0-cil libmono-system-runtime4.0-cil libmono-system-transactions4.0-cil libmono-system-web-applicationservices4.0-cil libmono-system-web-services4.0-cil libmono-system-web4.0-cil libmono-system-windows-forms4.0-cil libmono-webbrowser4.0-cil - wget http://archive.ubuntu.com/ubuntu/pool/universe/d/de4dot/de4dot_3.1.41592.3405-2_all.deb && sudo dpkg -i de4dot_3.1.41592.3405-2_all.deb + de4dot_package_name="de4dot_3.1.41592.3405-2_all.deb" + # if not exist download package + if [ ! -f $de4dot_package_name ]; then + wget http://archive.ubuntu.com/ubuntu/pool/universe/d/de4dot/$de4dot_package_name + fi + if [ -f $de4dot_package_name ]; then + sudo dpkg -i $de4dot_package_name + sudo rm $de4dot_package_name + else + echo "[-] de4dot package not found" + return + fi # if broken sudo python -m pip uninstall pip && sudo apt-get install python-pip --reinstall #pip3 install --upgrade pip @@ -924,17 +958,6 @@ function dependencies() { # if __name__ == '__main__': # sys.exit(__main__._main()) - # pip3 install flare-capa fails for me - cd /tmp || return - if [ ! -d /tmp/capa ]; then - # problem with test files of dotnet as it goes over ssh insted of https --recurse-submodules - git clone https://github.com/mandiant/capa.git - fi - cd capa || return - git pull - git submodule update --init rules - pip3 install . - # re2 - dead on py3.11 # apt-get install libre2-dev -y #re2 for py3 @@ -969,9 +992,16 @@ function dependencies() { sudo apt-get install gnupg2 -y wget -qO- https://deb.torproject.org/torproject.org/A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89.asc | gpg --dearmor | sudo tee /usr/share/keyrings/deb.torproject.org-keyring.gpg >/dev/null - echo "deb [arch=amd64 signed-by=/usr/share/keyrings/deb.torproject.org-keyring.gpg] https://deb.torproject.org/torproject.org $(lsb_release -cs) main" > /etc/apt/sources.list.d/tor.list - echo "deb-src [arch=amd64 signed-by=/usr/share/keyrings/deb.torproject.org-keyring.gpg] https://deb.torproject.org/torproject.org $(lsb_release -cs) main" >> /etc/apt/sources.list.d/tor.list + # Tor project has no release for Ubuntu noble (24-10-18) + # TODO: Check if it is still the case + if [ "$(lsb_release -cs)" = "noble" ]; then + echo "deb [signed-by=/usr/share/keyrings/deb.torproject.org-keyring.gpg arch=amd64] https://deb.torproject.org/torproject.org jammy main" > /etc/apt/sources.list.d/tor.list + echo "deb-src [signed-by=/usr/share/keyrings/deb.torproject.org-keyring.gpg arch=amd64] https://deb.torproject.org/torproject.org jammy main" >> /etc/apt/sources.list.d/tor.list + else + echo "deb [signed-by=/usr/share/keyrings/deb.torproject.org-keyring.gpg arch=amd64] https://deb.torproject.org/torproject.org $(lsb_release -cs) main" > /etc/apt/sources.list.d/tor.list + echo "deb-src [signed-by=/usr/share/keyrings/deb.torproject.org-keyring.gpg arch=amd64] https://deb.torproject.org/torproject.org $(lsb_release -cs) main" >> /etc/apt/sources.list.d/tor.list + fi sudo apt-get update 2>/dev/null sudo systemctl stop tor@default.service && sudo systemctl disable tor@default.service @@ -1043,22 +1073,31 @@ EOF ### PDNS sudo apt-get install git binutils-dev libldns-dev libpcap-dev libdate-simple-perl libdatetime-perl libdbd-mysql-perl -y cd /tmp || return + + # From pevious install + if [ -d /tmp/passivedns ]; then + sudo rm -rf /tmp/passivedns + fi git clone https://github.com/gamelinux/passivedns.git cd passivedns/ || return autoreconf --install ./configure make -j"$(getconf _NPROCESSORS_ONLN)" sudo checkinstall -D --pkgname=passivedns --default - - pip3 install unicorn capstone - + chown ${USER}:${USER} -R /tmp/passivedns/ + sudo -u ${USER} bash -c 'poetry --directory /opt/CAPEv2/ run pip install unicorn capstone' + sudo -u ${USER} bash -c 'cd /tmp/passivedns/ ; poetry --directory /opt/CAPEv2/ run pip install unicorn capstone' sed -i 's/APT::Periodic::Unattended-Upgrade "1";/APT::Periodic::Unattended-Upgrade "0";/g' /etc/apt/apt.conf.d/20auto-upgrades + if [ -d /tmp/passivedns ]; then + sudo rm -rf /tmp/passivedns + fi + } function install_clamav() { - apt-get install clamav clamav-daemon clamav-freshclam clamav-unofficial-sigs -y - pip3 install -U pyclamd + echo "[+] Installing clamav" + apt-get install clamav clamav-daemon clamav-freshclam clamav-unofficial-sigs python3-pyclamd -y cat >> /usr/share/clamav-unofficial-sigs/conf.d/00-clamav-unofficial-sigs.conf << EOF # This file contains user configuration settings for the clamav-unofficial-sigs.sh @@ -1190,21 +1229,30 @@ function install_CAPE() { echo "[+] Installing CAPEv2" cd /opt || return - git clone https://github.com/kevoreilly/CAPEv2/ + # if folder CAPEv2 dosn't exist, clone it + if [ ! -d CAPEv2 ]; then + git clone https://github.com/kevoreilly/CAPEv2/ + fi + chown ${USER}:${USER} -R /opt/CAPEv2/ #chown -R root:${USER} /usr/var/malheur/ #chmod -R =rwX,g=rwX,o=X /usr/var/malheur/ # Adapting owner permissions to the ${USER} path folder cd "/opt/CAPEv2/" || return - pip3 install poetry crudini - CRYPTOGRAPHY_DONT_BUILD_RUST=1 sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; poetry install' - sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; poetry run extra/libvirt_installer.sh' + sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; CRYPTOGRAPHY_DONT_BUILD_RUST=1 poetry install' + + if [ "$DISABLE_LIBVIRT" -eq 0 ]; then + sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; poetry run extra/libvirt_installer.sh' + sudo usermod -aG kvm ${USER} + sudo usermod -aG libvirt ${USER} + fi + #packages are needed for build options in extra/yara_installer.sh apt-get install libjansson-dev libmagic1 libmagic-dev -y - sudo -u ${USER} bash -c 'poetry run extra/yara_installer.sh' - sudo rm -rf yara-python + sudo -u ${USER} bash -c 'poetry run /opt/CAPEv2/extra/yara_installer.sh' - sudo usermod -aG kvm ${USER} - sudo usermod -aG libvirt ${USER} + if [ -d /tmp/yara-python ]; then + sudo rm -rf /tmp/yara-python + fi # copy *.conf.default to *.conf so we have all properly updated fields, as we can't ignore old configs in repository for filename in conf/default/*.conf.default; do cp -vf "./$filename" "./$(echo "$filename" | sed -e 's/.default//g' | sed -e 's/default//g')"; done @@ -1240,7 +1288,7 @@ fi } function install_systemd() { - + echo "[+] Installing systemd configuration" cp /opt/CAPEv2/systemd/cape.service /lib/systemd/system/cape.service cp /opt/CAPEv2/systemd/cape-processor.service /lib/systemd/system/cape-processor.service cp /opt/CAPEv2/systemd/cape-web.service /lib/systemd/system/cape-web.service @@ -1281,7 +1329,7 @@ EOF function install_prometheus_grafana() { - + echo "[+] Installing prometheus grafana" # install only on master only master wget https://github.com/prometheus/prometheus/releases/download/v"$prometheus_version"/prometheus-"$prometheus_version".linux-amd64.tar.gz && tar xf prometheus-"$prometheus_version".linux-amd64.tar.gz cd prometheus-$prometheus_version.linux-amd6 && ./prometheus --config.file=prometheus.yml & @@ -1301,23 +1349,38 @@ EOL } function install_node_exporter() { + echo "[+] Installing prometheus's node exported" # deploy on all all monitoring servers wget https://github.com/prometheus/node_exporter/releases/download/v"$node_exporter_version"/node_exporter-"$node_exporter_version".linux-amd64.tar.gz && tar xf node_exporter-"$node_exporter_version".linux-amd64.tar.gz cd node_exporter-"$node_exporter_version".linux-amd6 && ./node_exporter & } function install_volatility3() { + echo "[+] Installing volatility3" sudo apt-get install unzip sudo -u ${USER} poetry run pip3 install git+https://github.com/volatilityfoundation/volatility3 vol_path=$(sudo -u ${USER} poetry run python3 -c "import volatility3.plugins;print(volatility3.__file__.replace('__init__.py', 'symbols/'))") cd $vol_path || return wget https://downloads.volatilityfoundation.org/volatility3/symbols/windows.zip -O windows.zip - unzip windows.zip + unzip -o windows.zip rm windows.zip chown "${USER}:${USER}" $vol_path -R } +function install_mitmproxy() { + echo "[+] Installing mitmproxy" + sudo mkdir /opt/mitmproxy + sudo chown ${USER}:${USER} /opt/mitmproxy + cd /opt/mitmproxy + mitmproxy_version=$(curl -s https://api.github.com/repos/mitmproxy/mitmproxy/releases/latest | grep '"tag_name":' | cut -d '"' -f 4 | sed 's/^v//') + wget https://downloads.mitmproxy.org/"$mitmproxy_version"/mitmproxy-"$mitmproxy_version"-linux-x86_64.tar.gz -O mitmproxy.tar.gz + tar xvzf mitmproxy.tar.gz + rm mitmproxy.tar.gz + chown "${USER}:${USER}" /opt/mitmproxy -R +} + function install_guacamole() { + echo "[+] Installing guacamole" # Kudos to @Enzok https://github.com/kevoreilly/CAPEv2/pull/1065 # https://guacamole.apache.org/doc/gug/installing-guacamole.html sudo add-apt-repository ppa:remmina-ppa-team/remmina-next-daily @@ -1349,7 +1412,8 @@ function install_guacamole() { sudo dpkg -i --force-overwrite /tmp/guacamole-"${guacamole_version}"_builded.deb sudo ldconfig - pip3 install -U 'Twisted[tls,http2]' + #pip3 install -U 'Twisted[tls,http2]' + sudo apt install python3-twisted -y if [ -f "/etc/systemd/system/guacd.service" ] ; then sudo rm /etc/systemd/system/guacd.service @@ -1382,17 +1446,26 @@ function install_guacamole() { } function install_DIE() { + echo "[+] Installing Detect It Easy" apt-get install libqt5opengl5 libqt5script5 libqt5scripttools5 libqt5sql5 -y wget "https://github.com/horsicq/DIE-engine/releases/download/${DIE_VERSION}/die_${DIE_VERSION}_Ubuntu_${UBUNTU_VERSION}_amd64.deb" -O DIE.deb && dpkg -i DIE.deb } function install_fluentd() { + echo "[+] Installing fluentd" curl -sSO https://dl.google.com/cloudagents/add-logging-agent-repo.sh && sudo bash add-logging-agent-repo.sh sudo apt-get update && sudo apt-get install google-fluentd sudo apt-get install -y google-fluentd-catch-all-config-structured sudo service google-fluentd start && sudo service google-fluentd status } +function install_postgres_pg_activity() { + echo "[+] Installing pg-activity" + # amazing tool for monitoring https://github.com/dalibo/pg_activity + # sudo -u postgres pg_activity -U postgres + apt install pg-activity -y +} + # Doesn't work ${$1,,} COMMAND=$(echo "$1"|tr "{A-Z}" "{a-z}") @@ -1410,6 +1483,20 @@ elif [ $# -eq 0 ]; then exit 1 fi +DISABLE_MONGO_AVX_CHECK=0 +DISABLE_LIBVIRT=0 + +for i in "$@"; do + if [ "$i" == "--disable-mongodb-avx-check" ]; then + # Usage: disable AVX check for MongoDB + # Example usecase: Run script in docker container where AVX is not available + DISABLE_MONGO_AVX_CHECK=1 + elif [ "$i" == "--disable-libvirt" ]; then + # Disable libvirt installation + DISABLE_LIBVIRT=1 + fi +done + sandbox_version=$(echo "$sandbox_version"|tr "{A-Z}" "{a-z}") #check if start with root @@ -1422,10 +1509,10 @@ case "$COMMAND" in 'base') dependencies install_mongo - install_suricata install_CAPE install_yara install_systemd + install_suricata install_jemalloc if ! crontab -l | grep -q './smtp_sinkhole.sh'; then crontab -l | { cat; echo "@reboot cd /opt/CAPEv2/utils/ && ./smtp_sinkhole.sh 2>/dev/null"; } | crontab - @@ -1446,11 +1533,12 @@ case "$COMMAND" in install_CAPE install_volatility3 install_mongo - install_suricata install_yara install_systemd + install_suricata install_jemalloc install_logrotate + install_mitmproxy #socksproxies is to start redsocks stuff if [ -f /opt/CAPEv2/socksproxies.sh ]; then crontab -l | { cat; echo "@reboot /opt/CAPEv2/socksproxies.sh"; } | crontab - @@ -1460,7 +1548,7 @@ case "$COMMAND" in fi # Update FLARE CAPA rules once per day if ! crontab -l | grep -q 'community.py -waf -cr'; then - crontab -l | { cat; echo "5 0 */1 * * cd /opt/CAPEv2/utils/ && python3 community.py -waf -cr && pip3 install -U flare-capa && systemctl restart cape-processor 2>/dev/null"; } | crontab - + crontab -l | { cat; echo "5 0 */1 * * cd /opt/CAPEv2/utils/ && sudo -u ${USER} poetry --directory /opt/CAPEv2/ run python3 community.py -waf -cr && poetry --directory /opt/CAPEv2/ run pip install -U flare-capa && systemctl restart cape-processor 2>/dev/null"; } | crontab - fi install_librenms if [ "$clamav_enable" -ge 1 ]; then @@ -1473,10 +1561,14 @@ case "$COMMAND" in install_suricata;; 'yara') install_yara;; +'yara-x') + install_yara_x;; 'volatility3') install_volatility3;; 'postgresql') install_postgresql;; +'postgresql_utility') + install_postgres_pg_activity;; 'elastic') install_elastic;; 'sandbox') @@ -1501,6 +1593,8 @@ case "$COMMAND" in librenms_snmpd_config;; 'librenms_sneck_config') librenms_sneck_config;; +'mitmproxy') + install_mitmproxy;; 'issues') issues;; 'nginx') @@ -1530,3 +1624,5 @@ case "$COMMAND" in *) usage;; esac + +echo "[+] cape2.sh - Done" diff --git a/installer/kvm-qemu.sh b/installer/kvm-qemu.sh index 8323e26dab8..d25ac23d046 100644 --- a/installer/kvm-qemu.sh +++ b/installer/kvm-qemu.sh @@ -799,6 +799,7 @@ function replace_qemu_clues_public() { _sed_aux 's/"bochs"/"'"$BOCHS_BLOCK_REPLACER"'"/g' qemu*/block/bochs.c 'BOCHS was not replaced in block/bochs.c' _sed_aux 's/"BOCHS "/"ALASKA"/g' qemu*/include/hw/acpi/aml-build.h 'BOCHS was not replaced in block/bochs.c' _sed_aux 's/Bochs Pseudo/Intel RealTime/g' qemu*/roms/ipxe/src/drivers/net/pnic.c 'Bochs Pseudo was not replaced in roms/ipxe/src/drivers/net/pnic.c' + _sed_aux 's/BXPC/'"$BXPC_REPLACER"'/g' qemu*/include/hw/acpi/aml-build.h 'BXPC was not replaced in include/hw/acpi/aml-build.h' } function replace_seabios_clues_public() { diff --git a/lib/cuckoo/common/demux.py b/lib/cuckoo/common/demux.py index d1287553b8e..fa42507f698 100644 --- a/lib/cuckoo/common/demux.py +++ b/lib/cuckoo/common/demux.py @@ -32,7 +32,7 @@ cuckoo_conf = Config() web_cfg = Config("web") tmp_path = cuckoo_conf.cuckoo.get("tmppath", "/tmp") -linux_enabled = web_cfg.linux.get("enabled", False) +linux_enabled = web_cfg.linux.get("enabled", False) or web_cfg.linux.get("static_only", False) demux_extensions_list = { b".accdr", @@ -162,7 +162,8 @@ def is_valid_package(package: str) -> bool: return any(ptype in package for ptype in VALID_PACKAGES) -def _sf_children(child: sfFile) -> bytes: +# ToDo fix return type +def _sf_children(child: sfFile): # -> bytes: path_to_extract = "" _, ext = os.path.splitext(child.filename) ext = ext.lower() @@ -184,15 +185,17 @@ def _sf_children(child: sfFile) -> bytes: _ = path_write_file(path_to_extract, child.contents) except Exception as e: log.error(e, exc_info=True) - return path_to_extract.encode() + return (path_to_extract.encode(), child.platform, child.get_type(), child.get_size()) -def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True) -> List[bytes]: +# ToDo fix typing need to add str as error msg +def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True): # -> List[bytes]: retlist = [] # do not extract from .bin (downloaded from us) if os.path.splitext(filename)[1] == b".bin": - return retlist + return retlist, "" + # ToDo need to introduce error msgs here try: password = options2passwd(options) or "infected" try: @@ -201,9 +204,13 @@ def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True) -> unpacked = unpack(filename, check_shellcode=check_shellcode) if unpacked.package in whitelist_extensions: - return [filename] + file = File(filename) + magic_type = file.get_type() + platform = file.get_platform() + file_size = file.get_size() + return [[filename, platform, magic_type, file_size]], "" if unpacked.package in blacklist_extensions: - return [filename] + return [], "blacklisted package" for sf_child in unpacked.children: if sf_child.to_dict().get("children"): retlist.extend(_sf_children(ch) for ch in sf_child.children) @@ -214,7 +221,7 @@ def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True) -> retlist.append(_sf_children(sf_child)) except Exception as e: log.error(e, exc_info=True) - return list(filter(None, retlist)) + return list(filter(None, retlist)), "" def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = True, platform: str = ""): # -> tuple[bytes, str]: @@ -227,10 +234,10 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = if isinstance(filename, str) and use_sflock: filename = filename.encode() + error_list = [] retlist = [] # if a package was specified, trim if allowed and required if package: - if package in ("msix",): retlist.append((filename, "windows")) else: @@ -241,7 +248,15 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = else: if web_cfg.general.enable_trim and trim_file(filename): retlist.append((trimmed_path(filename), platform)) - return retlist + else: + error_list.append( + { + os.path.basename( + filename + ): "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option" + } + ) + return retlist, error_list # handle quarantine files tmp_path = unquarantine(filename) @@ -259,9 +274,16 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = if use_sflock: if HAS_SFLOCK: retlist = demux_office(filename, password, platform) - return retlist + return retlist, error_list else: log.error("Detected password protected office file, but no sflock is installed: poetry install") + error_list.append( + { + os.path.basename( + filename + ): "Detected password protected office file, but no sflock is installed or correct password provided" + } + ) # don't try to extract from Java archives or executables if ( @@ -279,7 +301,14 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = else: if web_cfg.general.enable_trim and trim_file(filename): retlist.append((trimmed_path(filename), platform)) - return retlist + else: + error_list.append( + { + os.path.basename(filename), + "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", + } + ) + return retlist, error_list new_retlist = [] @@ -288,26 +317,33 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = check_shellcode = False # all in one unarchiver - retlist = demux_sflock(filename, options, check_shellcode) if HAS_SFLOCK and use_sflock else [] + retlist, error_msg = demux_sflock(filename, options, check_shellcode) if HAS_SFLOCK and use_sflock else ([], "") # if it isn't a ZIP or an email, or we aren't able to obtain anything interesting from either, then just submit the # original file if not retlist: + if error_msg: + error_list.append({os.path.basename(filename), error_msg}) new_retlist.append((filename, platform)) else: - for filename in retlist: + for filename, platform, magic_type, file_size in retlist: # verify not Windows binaries here: - file = File(filename) - magic_type = file.get_type() - platform = file.get_platform() if platform == "linux" and not linux_enabled and "Python" not in magic_type: + error_list.append({os.path.basename(filename): "Linux processing is disabled"}) continue - if file.get_size() > web_cfg.general.max_sample_size and not ( - web_cfg.general.allow_ignore_size and "ignore_size_check" in options - ): - if web_cfg.general.enable_trim: - # maybe identify here - if trim_file(filename): - filename = trimmed_path(filename) + if file_size > web_cfg.general.max_sample_size: + if web_cfg.general.allow_ignore_size and "ignore_size_check" in options: + if web_cfg.general.enable_trim: + # maybe identify here + if trim_file(filename): + filename = trimmed_path(filename) + else: + error_list.append( + { + os.path.basename(filename), + "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", + } + ) new_retlist.append((filename, platform)) - return new_retlist[:10] + + return new_retlist[:10], error_list diff --git a/lib/cuckoo/common/integrations/file_extra_info.py b/lib/cuckoo/common/integrations/file_extra_info.py index 25555f87359..b49d5d31415 100644 --- a/lib/cuckoo/common/integrations/file_extra_info.py +++ b/lib/cuckoo/common/integrations/file_extra_info.py @@ -112,9 +112,6 @@ HAVE_BAT_DECODER = False print("OPTIONAL! Missed dependency: poetry run pip install -U git+https://github.com/DissectMalware/batch_deobfuscator") -processing_conf = Config("processing") -selfextract_conf = Config("selfextract") - unautoit_binary = os.path.join(CUCKOO_ROOT, selfextract_conf.UnAutoIt_extract.binary) if processing_conf.trid.enabled: @@ -576,7 +573,7 @@ def eziriz_deobfuscate(file: str, *, data_dictionary: dict, **_) -> ExtractorRet if file.endswith("_Slayed"): return - if all("Eziriz .NET Reactor" not in string for string in data_dictionary.get("die", [])): + if all(".NET Reactor" not in string for string in data_dictionary.get("die", [])): return binary = shlex.split(selfextract_conf.eziriz_deobfuscate.binary.strip())[0] @@ -587,7 +584,7 @@ def eziriz_deobfuscate(file: str, *, data_dictionary: dict, **_) -> ExtractorRet if not path_exists(binary): log.error( - "Missing dependency: Download from https://github.com/SychicBoy/NETReactorSlayer/releases and place under %s.", + "Missing dependency: Download from https://github.com/otavepto/NETReactorSlayer/releases and place under %s.", binary, ) return @@ -820,6 +817,9 @@ def SevenZip_unpack(file: str, *, filetype: str, data_dictionary: dict, options: ): return + if all([pattern in file_data for pattern in (b"AndroidManifest.xml", b"classes.dex")]): + return + password = "" # Only for real 7zip, breaks others password = options.get("password", "infected") diff --git a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py new file mode 100644 index 00000000000..540a97d2e22 --- /dev/null +++ b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py @@ -0,0 +1,41 @@ +import logging +import os + +from lib.cuckoo.common.integrations.file_extra_info_modules import ( + ExtractorReturnType, + collect_extracted_filenames, + extractor_ctx, + time_tracker, +) +from lib.cuckoo.common.path_utils import path_write_file + +# from base64 import b64encode + + +log = logging.getLogger(__name__) + + +@time_tracker +def extract_details(file, *, data_dictionary, **_) -> ExtractorReturnType: + + if not data_dictionary.get("pe", {}).get("overlay"): + return {} + + data = "" + overlay_size = int(data_dictionary["pe"]["overlay"]["size"], 16) + # Extract out the overlay data + try: + with open(file, "rb") as f: + f.seek(-overlay_size, os.SEEK_END) + data = f.read() + # data_dictionary["pe"]["overlay"]["data"] = b64encode(data[: min(overlay_size, 4096)]) + except Exception as e: + log.error(e) + + with extractor_ctx(file, "overlay", prefix="overlay") as ctx: + if data: + tempdir = ctx["tempdir"] + # You might need to change this 2 lines. See other examples in `file_extra_info.py` + _ = path_write_file(os.path.join(tempdir, "overlay"), data) + ctx["extracted_files"] = collect_extracted_filenames(tempdir) + return ctx diff --git a/lib/cuckoo/common/integrations/floss.py b/lib/cuckoo/common/integrations/floss.py index f146ff7e9c9..5bc55331822 100644 --- a/lib/cuckoo/common/integrations/floss.py +++ b/lib/cuckoo/common/integrations/floss.py @@ -48,9 +48,9 @@ def run(self): try: if not fm.is_supported_file_type(Path(self.file_path)): - if self.package == "Shellcode": + if self.package == "shellcode": fileformat = "sc32" - elif self.package == "Shellcode_x64": + elif self.package == "shellcode_x64": fileformat = "sc64" else: return results diff --git a/lib/cuckoo/common/utils.py b/lib/cuckoo/common/utils.py index ab5be1d231a..829aa5006a4 100644 --- a/lib/cuckoo/common/utils.py +++ b/lib/cuckoo/common/utils.py @@ -342,6 +342,18 @@ def convert_to_printable_and_truncate(s: str, buf: int, cache=None): return convert_to_printable(f"{s[:buf]} " if len(s) > buf else s, cache=cache) +def truncate_str(s: str, max_length: int, marker=" "): + """Truncate a string if its length exceeds the configured `max_length`. + + If `max_length` is less than or equal to 0, the string is not modified. + If the string is truncated, `marker` is added to the end.""" + truncate_size = min(max_length, len(s)) + if truncate_size > 0 and truncate_size < len(s): + return f"{s[:truncate_size]}{marker}" + else: + return s + + def convert_filename_char(c): """Escapes filename characters. @param c: dirty char. diff --git a/lib/cuckoo/common/web_utils.py b/lib/cuckoo/common/web_utils.py index 4b0de852f48..eb10064548c 100644 --- a/lib/cuckoo/common/web_utils.py +++ b/lib/cuckoo/common/web_utils.py @@ -766,7 +766,7 @@ def download_file(**kwargs): if not onesuccess: return "error", {"error": f"Provided hash not found on {kwargs['service']}"} - return "ok", kwargs["task_ids"] + return "ok", {"task_ids": kwargs["task_ids"], "errors": extra_details.get("errors", [])} def save_script_to_storage(task_ids, kwargs): @@ -1324,15 +1324,19 @@ def thirdpart_aux(samples, prefix, opt_filename, details, settings): if content: details["content"] = content + errors = {} if not details.get("content", False): - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) else: details["service"] = "Local" - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({h: task_ids_tmp}) + details["errors"].append({h: tasks_details}) else: - details["task_ids"] = task_ids_tmp + details["task_ids"] = tasks_details.get("task_ids", []) + errors = tasks_details.get("errors") + if errors: + details["errors"].extend(errors) return details diff --git a/lib/cuckoo/core/database.py b/lib/cuckoo/core/database.py index 8a8c5993b68..7f88487641f 100644 --- a/lib/cuckoo/core/database.py +++ b/lib/cuckoo/core/database.py @@ -103,8 +103,8 @@ "msbuild", "sct", "xslt", - "Shellcode", - "Shellcode_x64", + "shellcode", + "shellcode_x64", "generic", "iso", "vhd", @@ -119,6 +119,7 @@ distconf = Config("distributed") web_conf = Config("web") LINUX_ENABLED = web_conf.linux.enabled +LINUX_STATIC = web_conf.linux.static_only DYNAMIC_ARCH_DETERMINATION = web_conf.general.dynamic_arch_determination if repconf.mongodb.enabled: @@ -975,8 +976,7 @@ def unlock_machine(self, machine: Machine) -> Machine: """ machine.locked = False machine.locked_changed_on = datetime.now() - self.session.add(machine) - + self.session.merge(machine) return machine def count_machines_available(self, label=None, platform=None, tags=None, arch=None, include_reserved=False, os_version=None): @@ -1538,7 +1538,7 @@ def demux_sample_and_add_to_db( package, _ = self._identify_aux_func(file_path, package, check_shellcode=check_shellcode) # extract files from the (potential) archive - extracted_files = demux_sample(file_path, package, options, platform=platform) + extracted_files, demux_error_msgs = demux_sample(file_path, package, options, platform=platform) # check if len is 1 and the same file, if diff register file, and set parent if extracted_files and (file_path, platform) not in extracted_files: sample_parent_id = self.register_sample(File(file_path), source_url=source_url) @@ -1547,6 +1547,18 @@ def demux_sample_and_add_to_db( # create tasks for each file in the archive for file, platform in extracted_files: + # ToDo we lose package here and send APKs to windows + if platform in ("linux", "darwin") and LINUX_STATIC: + task_ids += self.add_static( + file_path=file_path, + priority=priority, + tlp=tlp, + user_id=user_id, + username=username, + options=options, + package=package, + ) + continue if static: # On huge loads this just become a bottleneck config = False @@ -1621,6 +1633,8 @@ def demux_sample_and_add_to_db( if config and isinstance(config, dict): details = {"config": config.get("cape_config", {})} + if demux_error_msgs: + details["errors"] = demux_error_msgs # this is aim to return custom data, think of this as kwargs return task_ids, details @@ -1694,7 +1708,7 @@ def add_static( user_id=0, username=False, ): - extracted_files = demux_sample(file_path, package, options) + extracted_files, demux_error_msgs = demux_sample(file_path, package, options) sample_parent_id = None # check if len is 1 and the same file, if diff register file, and set parent if not isinstance(file_path, bytes): diff --git a/lib/parsers_aux/__init__.py b/lib/parsers_aux/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/lib/parsers_aux/ratking/__init__.py b/lib/parsers_aux/ratking/__init__.py deleted file mode 100644 index 821b0b402d6..00000000000 --- a/lib/parsers_aux/ratking/__init__.py +++ /dev/null @@ -1,173 +0,0 @@ -#!/usr/bin/env python3 -# -# rat_config_parser.py -# -# Author: jeFF0Falltrades -# -# Provides the primary functionality for parsing configurations from the -# AsyncRAT, DcRAT, QuasarRAT, VenomRAT, etc. RAT families -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from logging import getLogger -from re import DOTALL, search - -from .utils import config_item -from .utils.config_parser_exception import ConfigParserException -from .utils.decryptors import SUPPORTED_DECRYPTORS -from .utils.dotnet_constants import OPCODE_RET -from .utils.dotnetpe_payload import DotNetPEPayload - -logger = getLogger(__name__) - - -class RATConfigParser: - CONFIG_ITEM_TYPES = [ - config_item.BoolConfigItem(), - config_item.ByteArrayConfigItem(), - config_item.IntConfigItem(), - config_item.NullConfigItem(), - config_item.SpecialFolderConfigItem(), - config_item.EncryptedStringConfigItem(), - ] - MIN_CONFIG_LEN = 7 - PATTERN_VERIFY_HASH = rb"(?:\x7e.{3}\x04(?:\x6f.{3}\x0a){2}\x74.{3}\x01.+?\x2a.+?\x00{6,})" - - def __init__(self, file_data=False): - self.report = {"config": {}} - try: - - self.dnpp = DotNetPEPayload(file_data) - # self.report["sha256"] = self.dnpp.sha256 - # self.report["possible_yara_family"] = self.dnpp.yara_match - if self.dnpp.dotnetpe is None: - raise ConfigParserException("Failed to load file as .NET executable") - self.decryptor = None # Created in decrypt_and_decode_config() - self.report["config"] = self.get_config() - self.report["config"]["aes_key"] = ( - self.decryptor.key.hex() if self.decryptor is not None and self.decryptor.key is not None else "None" - ) - self.report["config"]["aes_salt"] = ( - self.decryptor.salt.hex() if self.decryptor is not None and self.decryptor.salt is not None else "None" - ) - except Exception as e: - self.report["config"] = f"Exception encountered: {e}" - - # Decrypts/decodes values from an encrypted config - def decrypt_and_decode_config(self, encrypted_config): - decoded_config = {} - selected_decryptor = 0 - for item in self.CONFIG_ITEM_TYPES: - item_data = item.parse_from(encrypted_config) - if len(item_data) > 0: - if type(item) is config_item.EncryptedStringConfigItem: - # Translate encrypted string RVAs to encrypted values - for k in item_data: - item_data[k] = self.dnpp.user_string_from_rva(item_data[k]) - # Decrypt the values - while selected_decryptor < len(SUPPORTED_DECRYPTORS): - try: - if self.decryptor is None: - self.decryptor = SUPPORTED_DECRYPTORS[selected_decryptor](self.dnpp, item_data) - item_data = self.decryptor.decrypt_encrypted_strings() - break - except Exception as e: - logger.debug( - f"Decryption failed with decryptor {SUPPORTED_DECRYPTORS[selected_decryptor]} : {e}, trying next decryptor..." - ) - self.decryptor = None - selected_decryptor += 1 - elif type(item) is config_item.ByteArrayConfigItem: - for k in item_data: - arr_size, arr_rva = item_data[k] - item_data[k] = self.dnpp.byte_array_from_size_and_rva(arr_size, arr_rva).hex() - decoded_config.update(item_data) - if len(decoded_config) < self.MIN_CONFIG_LEN: - raise ConfigParserException("Minimum threshold of config items not met") - return decoded_config - - # Searches for the RAT configuration in the Settings module - def get_config(self): - logger.debug("Extracting config...") - try: - config_start, decrypted_config = self.get_config_verify_hash_method() - except Exception: - logger.debug("VerifyHash() method failed; Attempting .cctor brute force...") - # If the typical patterns are not found, start brute-forcing - try: - config_start, decrypted_config = self.get_config_cctor_brute_force() - except Exception as e: - raise ConfigParserException("Could not identify config") from e - logger.debug(f"Config found at offset {hex(config_start)}...") - return self.translate_config_field_names(decrypted_config) - - # Attempts to retrieve the config via brute-force, looking through every - # static constructor (.cctor) and attempting to decode/decrypt a valid - # config from that constructor - def get_config_cctor_brute_force(self): - candidates = self.dnpp.method_rvas_from_name(".cctor") - if len(candidates) == 0: - raise ConfigParserException("No .cctor method could be found") - # Get each .cctor method RVA and bytes content up to a RET op - candidate_data = {rva: self.dnpp.string_from_offset(self.dnpp.offset_from_rva(rva), OPCODE_RET) for rva in candidates} - config_start, decrypted_config = None, None - for method_rva, method_ins in candidate_data.items(): - logger.debug(f"Attempting brute force at .cctor method at {hex(method_rva)}") - try: - config_start, decrypted_config = ( - method_rva, - self.decrypt_and_decode_config(method_ins), - ) - break - except Exception as e: - logger.debug(e) - continue - if decrypted_config is None: - raise ConfigParserException("No valid configuration could be parsed from any .cctor methods") - return config_start, decrypted_config - - # Attempts to retrieve the config via looking for a config section preceded - # by the "VerifyHash()" function that is typically found in the Settings - # module - def get_config_verify_hash_method(self): - # Identify the VerifyHash() Method code - hit = search(self.PATTERN_VERIFY_HASH, self.dnpp.data, DOTALL) - if hit is None: - raise ConfigParserException("Could not identify VerifyHash() marker method") - # Reverse the VerifyHash() instruction offset, look up VerifyHash() in - # the MethodDef metadata table, and then get the offset to the - # subsequent function, which should be our config constructor - config_start = self.dnpp.next_method_from_instruction_offset(hit.start()) - # Configuration ends with ret operation, so use that as our terminator - encrypted_config = self.dnpp.string_from_offset(config_start, OPCODE_RET) - decrypted_config = self.decrypt_and_decode_config(encrypted_config) - return config_start, decrypted_config - - # Sorts the config by field name RVA prior to replacing RVAs with field - # name strings (this is done last to preserve config ordering) - def translate_config_field_names(self, decrypted_config): - translated_config = {} - for field_rva, field_value in sorted(decrypted_config.items()): - key = self.dnpp.field_name_from_rva(field_rva) - translated_config[key] = field_value - logger.debug(f"Config item parsed {key}: {field_value}") - return translated_config diff --git a/lib/parsers_aux/ratking/readme.md b/lib/parsers_aux/ratking/readme.md deleted file mode 100644 index 28fc18ea444..00000000000 --- a/lib/parsers_aux/ratking/readme.md +++ /dev/null @@ -1,35 +0,0 @@ -All works here is done by https://github.com/jeFF0Falltrades/rat_king_parser - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/parsers_aux/ratking/utils/__init__.py b/lib/parsers_aux/ratking/utils/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/lib/parsers_aux/ratking/utils/config_item.py b/lib/parsers_aux/ratking/utils/config_item.py deleted file mode 100644 index 2192f30917e..00000000000 --- a/lib/parsers_aux/ratking/utils/config_item.py +++ /dev/null @@ -1,133 +0,0 @@ -#!/usr/bin/env python3 -# -# config_item.py -# -# Author: jeFF0Falltrades -# -# Provides a utility class for parsing field names and values of various types -# from raw RAT config data -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from logging import getLogger -from re import DOTALL, findall - -from .data_utils import bytes_to_int -from .dotnet_constants import OPCODE_LDC_I4_0, SpecialFolder - -logger = getLogger(__name__) - - -# Provides an abstract class for config items -class ConfigItem: - def __init__(self, label, pattern): - self.label = label - self.pattern = pattern - - # Should be overridden by children to provide a meaningful value - def derive_item_value(self): - return None - - # Derives config field RVAs and values from data using the specified - # ConfigItem's pattern - def parse_from(self, data): - logger.debug(f"Parsing {self.label} values from data...") - fields = {} - raw_data = findall(self.pattern, data, DOTALL) - found_items = 0 - for obj, string_rva in raw_data: - try: - field_value = self.derive_item_value(obj) - field_rva = bytes_to_int(string_rva) - except Exception: - logger.debug(f"Could not parse value from {obj} at {string_rva}") - continue - fields[field_rva] = field_value - found_items += 1 - logger.debug(f"Parsed {found_items} {self.label} values") - return fields - - -class BoolConfigItem(ConfigItem): - def __init__(self): - super().__init__("boolean", b"(\x16|\x17)\x80(.{3}\x04)") - - # Boolean values are derived by examing if the opcode is "ldc.i4.0" (False) - # or "ldc.i4.1" (True) - def derive_item_value(self, opcode): - return bool(bytes_to_int(opcode) - bytes_to_int(OPCODE_LDC_I4_0)) - - -class ByteArrayConfigItem(ConfigItem): - def __init__(self): - super().__init__( - "byte array", - rb"\x1f(.\x8d.{3}\x01\x25\xd0.{3}\x04)\x28.{3}\x0a\x80(.{3}\x04)", - ) - - # Byte array size and RVA is returned, as these are needed to - # extract the value of the bytes from the payload - def derive_item_value(self, byte_data): - arr_size = byte_data[0] - arr_rva = bytes_to_int(byte_data[-4:]) - return (arr_size, arr_rva) - - -class IntConfigItem(ConfigItem): - def __init__(self): - super().__init__("int", b"(\x20.{4}|[\x18-\x1e])\x80(.{3}\x04)") - - def derive_item_value(self, int_bytes): - # If single byte, must be value 2-8, represented by opcodes 0x18-0x1e - # Subtract 0x16 to get the int value, e.g.: - # ldc.i4.8 == 0x1e - 0x16 == 8 - if len(int_bytes) == 1: - return bytes_to_int(int_bytes) - 0x16 - # Else, look for which int was loaded by "ldc.i4" - return bytes_to_int(int_bytes[1:]) - - -class NullConfigItem(ConfigItem): - def __init__(self): - super().__init__("null", b"(\x14\x80)(.{3}\x04)") - - # If "ldnull" is being used, simply return "null" - def derive_item_value(self, _): - return "null" - - -class SpecialFolderConfigItem(ConfigItem): - def __init__(self): - super().__init__("special folder", b"\x1f(.)\x80(.{3}\x04)") - - # Translates SpecialFolder ID to name - def derive_item_value(self, folder_id): - return SpecialFolder(bytes_to_int(folder_id)).name - - -class EncryptedStringConfigItem(ConfigItem): - def __init__(self): - super().__init__("encrypted string", b"\x72(.{3}\x70)\x80(.{3}\x04)") - - # Returns the encrypted string's RVA - def derive_item_value(self, enc_str_rva): - return bytes_to_int(enc_str_rva) diff --git a/lib/parsers_aux/ratking/utils/config_parser_exception.py b/lib/parsers_aux/ratking/utils/config_parser_exception.py deleted file mode 100644 index c1d84e341b1..00000000000 --- a/lib/parsers_aux/ratking/utils/config_parser_exception.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python3 -# -# config_parser_exception.py -# -# Author: jeFF0Falltrades -# -# Provides a simple custom Exception class for use with configuration parsing -# actions -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -class ConfigParserException(Exception): - pass diff --git a/lib/parsers_aux/ratking/utils/data_utils.py b/lib/parsers_aux/ratking/utils/data_utils.py deleted file mode 100644 index 6e0ea6c8723..00000000000 --- a/lib/parsers_aux/ratking/utils/data_utils.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python3 -# -# data_utils.py -# -# Author: jeFF0Falltrades -# -# Provides various utility functions for working with binary data -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from .config_parser_exception import ConfigParserException - - -# Converts a bytes object to an int object using the specified byte order -def bytes_to_int(bytes, order="little"): - try: - return int.from_bytes(bytes, byteorder=order) - except Exception as e: - raise ConfigParserException(f"Error parsing int from value: {bytes}") from e - - -# Decodes a bytes object to a Unicode string, using UTF-16LE for byte values -# with null bytes still embedded in them, and UTF-8 for all other values -def decode_bytes(byte_str): - if isinstance(byte_str, str): - return byte_str.strip() - result = None - try: - if b"\x00" in byte_str: - result = byte_str.decode("utf-16le") - else: - result = byte_str.decode("utf-8") - except Exception as e: - raise ConfigParserException(f"Error decoding bytes object to Unicode: {byte_str}") from e - return result - - -def int_to_bytes(int, length=4, order="little"): - try: - return int.to_bytes(length, order) - except Exception as e: - raise ConfigParserException(f"Error parsing bytes from value: {int}") from e diff --git a/lib/parsers_aux/ratking/utils/decryptors/__init__.py b/lib/parsers_aux/ratking/utils/decryptors/__init__.py deleted file mode 100644 index 9a9176ae343..00000000000 --- a/lib/parsers_aux/ratking/utils/decryptors/__init__.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python3 -# -# __init__.py -# -# Author: jeFF0Falltrades -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from .config_decryptor_aes_cbc import ConfigDecryptorAESCBC -from .config_decryptor_aes_ecb import ConfigDecryptorAESECB -from .config_decryptor_plaintext import ConfigDecryptorPlaintext - -SUPPORTED_DECRYPTORS = [ - ConfigDecryptorAESCBC, - ConfigDecryptorAESECB, - ConfigDecryptorPlaintext, -] diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py deleted file mode 100644 index 9df3620f373..00000000000 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python3 -# -# config_decryptor.py -# -# Author: jeFF0Falltrades -# -# Provides a simple abstract base class for different types of config decryptors -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from abc import ABC, abstractmethod - - -class ConfigDecryptor(ABC): - def __init__(self, payload, config_strings): - self.payload = payload - self.config_strings = config_strings - self.key = None - self.salt = None - - @abstractmethod - def decrypt(self, ciphertext): - pass - - @abstractmethod - def decrypt_encrypted_strings(self): - pass diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py deleted file mode 100644 index fdc2a1bf5a7..00000000000 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py +++ /dev/null @@ -1,259 +0,0 @@ -#!/usr/bin/env python3 -# -# config_aes_decryptor.py -# -# Author: jeFF0Falltrades -# -# Provides a custom AES decryptor for RAT payloads utilizing the known -# encryption patterns of AsyncRAT, DcRAT, QuasarRAT, VenomRAT, etc. -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from base64 import b64decode -from logging import getLogger -from re import DOTALL, search - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.ciphers import Cipher -from cryptography.hazmat.primitives.ciphers.algorithms import AES -from cryptography.hazmat.primitives.ciphers.modes import CBC -from cryptography.hazmat.primitives.hashes import SHA1 -from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC -from cryptography.hazmat.primitives.padding import PKCS7 - -from ..config_parser_exception import ConfigParserException -from ..data_utils import bytes_to_int, decode_bytes, int_to_bytes -from ..dotnet_constants import OPCODE_LDSTR, OPCODE_LDTOKEN -from .config_decryptor import ConfigDecryptor - -logger = getLogger(__name__) - -MIN_CIPHERTEXT_LEN = 48 - - -class ConfigDecryptorAESCBC(ConfigDecryptor): - PATTERN_AES_KEY_AND_BLOCK_SIZE = b"[\x06-\x09]\x20(.{4})\x6f.{4}[\x06-\x09]\x20(.{4})" - PATTERN_AES_KEY_BASE = b"(.{3}\x04).%b" - PATTERN_AES_SALT_ITER = b"[\x02-\x05]\x7e(.{4})\x20(.{4})\x73" - PATTERN_AES_SALT_INIT = b"\x80%b\x2a" - - def __init__(self, payload, config_strings): - super().__init__(payload, config_strings) - self.key_size = self.block_size = self.iterations = self.key_candidates = None - self.aes_metadata = self.get_aes_metadata() - - # Given an initialization vector and ciphertext, creates a Cipher - # object with the AES key and specified IV and decrypts the ciphertext - def decrypt(self, iv, ciphertext): - logger.debug(f"Decrypting {ciphertext} with key {self.key.hex()} and IV {iv.hex()}...") - aes_cipher = Cipher(AES(self.key), CBC(iv), backend=default_backend()) - decryptor = aes_cipher.decryptor() - # Use a PKCS7 unpadder to remove padding from decrypted value - # https://cryptography.io/en/latest/hazmat/primitives/padding/ - unpadder = PKCS7(self.block_size).unpadder() - try: - padded_text = decryptor.update(ciphertext) + decryptor.finalize() - unpadded_text = unpadder.update(padded_text) + unpadder.finalize() - except Exception as e: - raise ConfigParserException( - f"Error decrypting ciphertext {ciphertext} with IV {iv.hex()} and key {self.key.hex()}" - ) from e - logger.debug(f"Decryption result: {unpadded_text}") - return unpadded_text - - # Derives AES passphrase candidates from a config - # If a passphrase is base64-encoded, both its raw value and decoded value - # will be added as candidates - def derive_aes_passphrase_candidates(self, aes_key_rva): - key_val = self.config_strings[aes_key_rva] - passphrase_candidates = [key_val.encode()] - try: - passphrase_candidates.append(b64decode(key_val)) - except Exception: - pass - logger.debug(f"AES passphrase candidates found: {passphrase_candidates}") - return passphrase_candidates - - # Decrypts encrypted config values with the provided cipher data - def decrypt_encrypted_strings(self): - logger.debug("Decrypting encrypted strings...") - decrypted_config_strings = {} - for k, v in self.config_strings.items(): - # Leave empty strings as they are - if len(v) == 0: - logger.debug(f"Key: {k}, Value: {v}") - decrypted_config_strings[k] = v - continue - # Check if base64-encoded string - b64_exception = False - try: - decoded_val = b64decode(v) - except Exception: - b64_exception = True - # If it was not base64-encoded, or if it is less than our min length - # for ciphertext, leave the value as it is - if b64_exception or len(decoded_val) < MIN_CIPHERTEXT_LEN: - logger.debug(f"Key: {k}, Value: {v}") - decrypted_config_strings[k] = v - continue - # Otherwise, extract the IV from the 16 bytes after the HMAC - # (first 32 bytes) and the ciphertext from the rest of the data - # after the IV, and run the decryption - iv, ciphertext = decoded_val[32:48], decoded_val[48:] - result, last_exc = None, None - key_idx = 0 - # Run through key candidates until suitable one found or failure - while result is None and key_idx < len(self.key_candidates): - try: - self.key = self.key_candidates[key_idx] - key_idx += 1 - result = decode_bytes(self.decrypt(iv, ciphertext)) - except ConfigParserException as e: - last_exc = e - if result is None: - logger.debug(f"Decryption failed for item {v}: {last_exc}; Leaving as original value...") - result = v - logger.debug(f"Key: {k}, Value: {result}") - decrypted_config_strings[k] = result - logger.debug("Successfully decrypted strings") - return decrypted_config_strings - - # Extracts AES key candidates from the payload - def get_aes_key_candidates(self, metadata_ins_offset): - logger.debug("Extracting possible AES key values...") - keys = [] - - # Get the RVA of the method that sets up AES256 metadata - metadata_method_rva = self.payload.next_method_from_instruction_offset(metadata_ins_offset, step_back=1, by_token=True) - - # Insert this RVA into the KEY_BASE pattern to find where the AES key - # is initialized - key_hit = search( - self.PATTERN_AES_KEY_BASE % int_to_bytes(metadata_method_rva), - self.payload.data, - DOTALL, - ) - if key_hit is None: - raise ConfigParserException("Could not find AES key pattern") - key_rva = bytes_to_int(key_hit.groups()[0]) - logger.debug(f"AES key RVA: {hex(key_rva)}") - - # Since we already have a map of all field names, use the key field - # name to index into our existing config dict - passphrase_candidates = self.derive_aes_passphrase_candidates(key_rva) - - for candidate in passphrase_candidates: - try: - # The backend parameter is optional in newer versions of the - # cryptography library, but we keep it here for compatibility - kdf = PBKDF2HMAC( - SHA1(), - length=self.key_size, - salt=self.salt, - iterations=self.iterations, - backend=default_backend(), - ) - keys.append(kdf.derive(candidate)) - logger.debug(f"AES key derived: {keys[-1]}") - except Exception: - continue - if len(keys) == 0: - raise ConfigParserException(f"Could not derive key from passphrase candidates: {passphrase_candidates}") - return keys - - # Extracts the AES key and block size from the payload - def get_aes_key_and_block_size(self): - logger.debug("Extracting AES key and block size...") - hit = search(self.PATTERN_AES_KEY_AND_BLOCK_SIZE, self.payload.data, DOTALL) - if hit is None: - raise ConfigParserException("Could not extract AES key or block size") - # Convert key size from bits to bytes by dividing by 8 - # Note use of // instead of / to ensure integer output, not float - key_size = bytes_to_int(hit.groups()[0]) // 8 - block_size = bytes_to_int(hit.groups()[1]) - logger.debug(f"Found key size {key_size} and block size {block_size}") - return key_size, block_size - - # Identifies the initialization of the AES256 object in the payload - def get_aes_metadata(self): - logger.debug("Extracting AES metadata...") - # Important to use DOTALL here (and with all regex ops to be safe) - # as we are working with bytes, and if we do not set this, and the - # byte sequence contains a byte that equates to a newline (\n or 0x0A), - # the search will fail - metadata = search(self.PATTERN_AES_SALT_ITER, self.payload.data, DOTALL) - if metadata is None: - raise ConfigParserException("Could not identify AES metadata") - logger.debug(f"AES metadata found at offset {hex(metadata.start())}") - - self.key_size, self.block_size = self.get_aes_key_and_block_size() - - logger.debug("Extracting AES iterations...") - self.iterations = bytes_to_int(metadata.groups()[1]) - logger.debug(f"Found AES iteration number of {self.iterations}") - - self.salt = self.get_aes_salt(metadata.groups()[0]) - self.key_candidates = self.get_aes_key_candidates(metadata.start()) - return metadata - - # Extracts the AES salt from the payload, accounting for both hardcoded - # salt byte arrays, and salts derived from hardcoded strings - def get_aes_salt(self, salt_rva): - logger.debug("Extracting AES salt value...") - # Use % to insert our salt RVA into our match pattern - # This pattern will then find the salt initialization ops, - # specifically: - # - # stsfld uint8[] Client.Algorithm.Aes256::Salt - # ret - aes_salt_initialization = self.payload.data.find(self.PATTERN_AES_SALT_INIT % salt_rva) - if aes_salt_initialization == -1: - raise ConfigParserException("Could not identify AES salt initialization") - - # Look at opcode used to initialize the salt to decide how to - # proceed on extracting the salt value (start of pattern - 10 bytes) - salt_op_offset = aes_salt_initialization - 10 - # Need to use bytes([int]) here to properly convert from int to byte - # string for our comparison below - salt_op = bytes([self.payload.data[salt_op_offset]]) - - # Get the salt RVA from the 4 bytes following the initialization op - salt_strings_rva_packed = self.payload.data[salt_op_offset + 1 : salt_op_offset + 5] - salt_strings_rva = bytes_to_int(salt_strings_rva_packed) - - # If the op is a ldstr op (0x72), just get the bytes value of the - # string being used to initialize the salt - if salt_op == OPCODE_LDSTR: - salt_encoded = self.payload.user_string_from_rva(salt_strings_rva) - # We use decode_bytes() here to get the salt string without any - # null bytes (because it's stored as UTF-16LE), then convert it - # back to bytes - salt = decode_bytes(salt_encoded).encode() - # If the op is a ldtoken (0xd0) operation, we need to get the salt - # byte array value from the FieldRVA table - elif salt_op == OPCODE_LDTOKEN: - salt_size = self.payload.data[salt_op_offset - 7] - salt = self.payload.byte_array_from_size_and_rva(salt_size, salt_strings_rva) - else: - raise ConfigParserException(f"Unknown salt opcode found: {salt_op.hex()}") - logger.debug(f"Found salt value: {salt.hex()}") - return salt diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py deleted file mode 100644 index cb0578fce86..00000000000 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py +++ /dev/null @@ -1,128 +0,0 @@ -#!/usr/bin/env python3 -# -# config_decryptor_aes_ecb.py -# -# Author: jeFF0Falltrades -# -# Provides a custom AES decryptor for RAT payloads utilizing ECB mode -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from base64 import b64decode -from hashlib import md5 -from logging import getLogger -from re import DOTALL, search - -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.ciphers import Cipher -from cryptography.hazmat.primitives.ciphers.algorithms import AES -from cryptography.hazmat.primitives.ciphers.modes import ECB -from cryptography.hazmat.primitives.padding import PKCS7 - -from ..config_parser_exception import ConfigParserException -from ..data_utils import bytes_to_int, decode_bytes -from .config_decryptor import ConfigDecryptor - -logger = getLogger(__name__) - - -class ConfigDecryptorAESECB(ConfigDecryptor): - PATTERN_MD5_HASH = rb"\x7e(.{3}\x04)\x28.{3}\x06\x6f" - - def __init__(self, payload, config_strings): - super().__init__(payload, config_strings) - - # Given ciphertext, creates a Cipher object with the AES key and decrypts - # the ciphertext - def decrypt(self, ciphertext): - if self.key is None: - self.get_aes_key() - logger.debug(f"Decrypting {ciphertext} with key {self.key.hex()}...") - aes_cipher = Cipher(AES(self.key), ECB(), backend=default_backend()) - decryptor = aes_cipher.decryptor() - unpadder = PKCS7(AES.block_size).unpadder() - # Use a PKCS7 unpadder to remove padding from decrypted value - # https://cryptography.io/en/latest/hazmat/primitives/padding/ - unpadder = PKCS7(AES.block_size).unpadder() - try: - padded_text = decryptor.update(ciphertext) + decryptor.finalize() - unpadded_text = unpadder.update(padded_text) + unpadder.finalize() - except Exception as e: - raise ConfigParserException(f"Error decrypting ciphertext {ciphertext} with key {self.key.hex()}") from e - logger.debug(f"Decryption result: {unpadded_text}") - return unpadded_text - - # Decrypts encrypted config values with the provided cipher data - def decrypt_encrypted_strings(self): - logger.debug("Decrypting encrypted strings...") - decrypted_config_strings = {} - for k, v in self.config_strings.items(): - # Leave empty strings as they are - if len(v) == 0: - logger.debug(f"Key: {k}, Value: {v}") - decrypted_config_strings[k] = v - continue - # Check if base64-encoded string - b64_exception = False - try: - decoded_val = b64decode(v) - except Exception: - b64_exception = True - # If it was not base64-encoded, leave the value as it is - if b64_exception: - logger.debug(f"Key: {k}, Value: {v}") - decrypted_config_strings[k] = v - continue - ciphertext = decoded_val - result, last_exc = None, None - try: - result = decode_bytes(self.decrypt(ciphertext)) - except ConfigParserException as e: - last_exc = e - if result is None: - logger.debug(f"Decryption failed for item {v}: {last_exc}") - logger.debug(f"Key: {k}, Value: {result}") - decrypted_config_strings[k] = result - logger.debug("Successfully decrypted strings") - return decrypted_config_strings - - # Extracts AES key candidates from the payload - def get_aes_key(self): - logger.debug("Extracting possible AES key value...") - key_hit = search( - self.PATTERN_MD5_HASH, - self.payload.data, - DOTALL, - ) - if key_hit is None: - raise ConfigParserException("Could not find AES key pattern") - key_rva = bytes_to_int(key_hit.groups()[0]) - logger.debug(f"AES key RVA: {hex(key_rva)}") - key_unhashed = self.config_strings[key_rva] - # Generate the MD5 hash - md5_hash = md5() - md5_hash.update(key_unhashed.encode("utf-8")) - md5_digest = md5_hash.digest() - # Key is a 32-byte value made up of the MD5 hash overlaying itself, - # tailed with one null byte - self.key = md5_digest[:15] + md5_digest[:16] + b"\x00" - logger.debug(f"AES key derived: {self.key}") diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py deleted file mode 100644 index c6e71f8a350..00000000000 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env python3 -# -# config_decryptor_plaintext.py -# -# Author: jeFF0Falltrades -# -# Provides a fall-through decryptor that will attempt to return the plaintext -# values of a found config when all other decryptors fail -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from logging import getLogger - -from .config_decryptor import ConfigDecryptor - -logger = getLogger(__name__) - - -class ConfigDecryptorPlaintext(ConfigDecryptor): - def __init__(self, payload, config_strings): - super().__init__(payload, config_strings) - - def decrypt(self, ciphertext): - return ciphertext - - def decrypt_encrypted_strings(self): - logger.debug("Could not find applicable decryptor, returning found config as plaintext...") - return self.config_strings diff --git a/lib/parsers_aux/ratking/utils/dotnet_constants.py b/lib/parsers_aux/ratking/utils/dotnet_constants.py deleted file mode 100644 index 2de1a34027a..00000000000 --- a/lib/parsers_aux/ratking/utils/dotnet_constants.py +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env python3 -# -# dotnet_constants.py -# -# Author: jeFF0Falltrades -# -# Useful .NET constants and enums -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from enum import IntEnum - -# Notable CIL Opcodes and Base RVAs -OPCODE_LDC_I4_0 = b"\x16" -OPCODE_LDC_I4_1 = b"\x17" -OPCODE_LDSTR = b"\x72" -OPCODE_LDTOKEN = b"\xd0" -OPCODE_RET = b"\x2a" -MDT_FIELD_DEF = 0x04000000 -MDT_METHOD_DEF = 0x06000000 -MDT_STRING = 0x70000000 - - -# IntEnum derivative used for translating a SpecialFolder ID to its name -class SpecialFolder(IntEnum): - ADMINTOOLS = 48 - APPLICATIONDATA = 26 - CDBURNING = 59 - COMMONADMINTOOLS = 47 - COMMONAPPLICATIONDATA = 35 - COMMONDESKTOPDIRECTORY = 25 - COMMONDOCUMENTS = 46 - COMMONMUSIC = 53 - COMMONOEMLINKS = 58 - COMMONPICTURES = 54 - COMMONPROGRAMFILES = 43 - COMMONPROGRAMFILESX86 = 44 - COMMONPROGRAMS = 23 - COMMONSTARTMENU = 22 - COMMONSTARTUP = 24 - COMMONTEMPLATES = 45 - COMMONVIDEOS = 55 - COOKIES = 33 - DESKTOPDIRECTORY = 16 - FONTS = 20 - HISTORY = 34 - INTERNETCACHE = 32 - LOCALAPPLICATIONDATA = 28 - LOCALIZEDRESOURCES = 57 - MYCOMPUTER = 17 - MYMUSIC = 13 - MYPICTURES = 39 - MYVIDEOS = 14 - NETWORKSHORTCUTS = 19 - PRINTERSHORTCUTS = 27 - PROGRAMFILES = 38 - PROGRAMFILESX86 = 42 - RESOURCES = 56 - STARTMENU = 11 - SYSTEM = 37 - SYSTEMX86 = 41 - TEMPLATES = 21 - USERPROFILE = 40 - WINDOWS = 36 diff --git a/lib/parsers_aux/ratking/utils/dotnetpe_payload.py b/lib/parsers_aux/ratking/utils/dotnetpe_payload.py deleted file mode 100644 index be66c0433f6..00000000000 --- a/lib/parsers_aux/ratking/utils/dotnetpe_payload.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python3 -# -# dotnetpe_payload.py -# -# Author: jeFF0Falltrades -# -# Provides a wrapper class for accessing metadata from a DotNetPE object and -# performing RVA to data offset conversions -# -# MIT License -# -# Copyright (c) 2024 Jeff Archer -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. -from hashlib import sha256 -from logging import getLogger - -from dnfile import dnPE - -from .config_parser_exception import ConfigParserException -from .dotnet_constants import MDT_FIELD_DEF, MDT_METHOD_DEF, MDT_STRING - -logger = getLogger(__name__) - - -class DotNetPEPayload: - def __init__(self, file_data, yara_rule=None): - # self.file_path = file_path - self.data = file_data # self.get_file_data() - self.sha256 = self.calculate_sha256() - self.dotnetpe = None - try: - self.dotnetpe = dnPE(data=file_data, clr_lazy_load=True) - except Exception as e: - logger.exception(e) - self.yara_match = "" - if yara_rule is not None: - self.yara_match = self.match_yara(yara_rule) - - # Given a byte array's size and RVA, translates the RVA to the offset of - # the byte array and returns the bytes of the array as a byte string - def byte_array_from_size_and_rva(self, arr_size, arr_rva): - arr_field_rva = self.fieldrva_from_rva(arr_rva) - arr_offset = self.offset_from_rva(arr_field_rva) - arr_value = self.data[arr_offset : arr_offset + arr_size] - return arr_value - - # Calculates the SHA256 hash of file data - def calculate_sha256(self): - sha256_hash = sha256() - sha256_hash.update(self.data) - return sha256_hash.hexdigest() - - # Given an RVA, derives the corresponding Field name from the RVA - def field_name_from_rva(self, rva): - return self.dotnetpe.net.mdtables.Field.rows[(rva ^ MDT_FIELD_DEF) - 1].Name.value - - # Given an RVA, derives the corresponding FieldRVA value from the RVA - def fieldrva_from_rva(self, rva): - field_id = rva ^ MDT_FIELD_DEF - for row in self.dotnetpe.net.mdtables.FieldRva: - if row.struct.Field_Index == field_id: - return row.struct.Rva - raise ConfigParserException(f"Could not find FieldRVA for address {rva}") - - # Reads in payload binary content - def get_file_data(self): - logger.debug(f"Reading contents from: {self.file_path}") - try: - with open(self.file_path, "rb") as fp: - data = fp.read() - except Exception as e: - raise ConfigParserException(f"Error reading from path: {self.file_path}") from e - logger.debug("Successfully read data") - return data - - # Tests a given YARA rule object against the file at file_path - def match_yara(self, rule): - try: - match = rule.match(data=self.file_data) - return str(match[0]) if len(match) > 0 else "No match" - except Exception as e: - logger.exception(e) - return f"Exception encountered: {e}" - - # Given a method name, returns RVAs of methods matching that name - def method_rvas_from_name(self, name): - return [row.Rva for row in self.dotnetpe.net.mdtables.MethodDef if row.Name.value == name] - - # Given the offset to an instruction, reverses the instruction to its - # parent Method, and then finds the subsequent Method in the MethodDef - # table and returns its offset or index - def next_method_from_instruction_offset(self, ins_offset, step_back=0, by_token=False): - # Translate the instruction offset to RVA - ins_rva = self.dotnetpe.get_rva_from_offset(ins_offset) - # Get both the regular MethodDef table and a sorted (by RVA) copy - # This is because the table is not guaranteed to be ordered by RVA - methods = self.dotnetpe.net.mdtables.MethodDef.rows - sorted_methods = sorted(methods, key=lambda m: m.Rva) - # Go through the sorted table and find the Method RVA that is greater - # than the instruction RVA (the subsequent function), and use step_back - # to get the function containing the instruction if necessary - for idx, method in enumerate(sorted_methods): - if method.Rva > ins_rva: - return ( - # Add 1 to token ID as table starts at index 1, not 0 - methods.index(sorted_methods[idx - step_back]) + 1 + MDT_METHOD_DEF - if by_token - else self.offset_from_rva(methods[methods.index(sorted_methods[idx - step_back])].Rva) - ) - raise ConfigParserException(f"Could not find method from instruction offset {ins_offset}") - - # Given an RVA, returns a data/file offset - def offset_from_rva(self, rva): - return self.dotnetpe.get_offset_from_rva(rva) - - # Given a string offset, and, optionally, a delimiter, extracts the string - def string_from_offset(self, str_offset, delimiter=b"\0"): - try: - result = self.data[str_offset:].partition(delimiter)[0] - except Exception as e: - raise ConfigParserException( - f"Could not extract string value from offset {hex(str_offset)} with delimiter {delimiter}" - ) from e - return result - - def string_from_range(self, start_offset, end_offset): - try: - return self.data[start_offset, end_offset] - except Exception as e: - raise ConfigParserException(f"Could not extract string value from range {hex(start_offset)}:{hex(end_offset)}") from e - - # Given an RVA, derives the corresponding User String - def user_string_from_rva(self, rva): - return self.dotnetpe.net.user_strings.get(rva ^ MDT_STRING).value diff --git a/modules/auxiliary/Mitmdump.py b/modules/auxiliary/Mitmdump.py new file mode 100644 index 00000000000..68cf1ecc12c --- /dev/null +++ b/modules/auxiliary/Mitmdump.py @@ -0,0 +1,130 @@ +# Copyright (C) 2024 davidsb@virustotal.com +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. +# This module runs mitmdump to get a HAR file +# mitmdump is behind mitmproxy project https://mitmproxy.org/ + +import logging +import os +import socket +import subprocess +from threading import Thread + +from lib.cuckoo.common.abstracts import Auxiliary +from lib.cuckoo.common.config import Config +from lib.cuckoo.common.constants import CUCKOO_ROOT +from lib.cuckoo.core.rooter import rooter + +mitmdump = Config("mitmdump") + +log = logging.getLogger(__name__) + + +class Mitmdump(Auxiliary): + """Module for generating HAR with Mitmdump.""" + + def __init__(self): + Auxiliary.__init__(self) + Thread.__init__(self) + log.info("Mitmdump module loaded") + self.mitmdump_thread = None + + def start(self): + """Start mitmdump in a separate thread.""" + + self.mitmdump_thread = MitmdumpThread(self.task, self.machine) + self.mitmdump_thread.start() + return True + + def stop(self): + """Stop mitmdump capture thread.""" + if self.mitmdump_thread: + self.mitmdump_thread.stop() + + +class MitmdumpThread(Thread): + """Thread responsible for control mitmdump service for each analysis.""" + + def __init__(self, task, machine): + Thread.__init__(self) + self.task = task + self.machine = machine + self.do_run = True + self.host_ip = mitmdump.cfg.get("host") + self.host_iface = mitmdump.cfg.get("interface") + self.mitmdump_bin = mitmdump.cfg.get("bin") + self.proc = None + self.host_port = self._get_unused_port() + self.mitmdump_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id), "mitmdump") + + def stop(self): + """Set stop mitmdump capture.""" + self.do_run = False + + if self.proc and self.proc.poll() is None: + self.proc.terminate() + self.proc.wait() + log.info("Stopping mitmdump") + + try: + rooter("disable_mitmdump", self.host_iface, self.machine.ip, self.host_port) + except subprocess.CalledProcessError as e: + log.error("Failed to execute firewall rules: %s", e) + + def run(self): + """Core function to the manage the module""" + if "mitmdump" not in self.task.options: + log.info("Exiting mitmdump. No parameter received.") + return + + if self.do_run: + if not self.host_port: + log.exception("All ports in range are in use") + return + + try: + rooter("enable_mitmdump", self.host_iface, self.machine.ip, self.host_port) + except subprocess.CalledProcessError as e: + log.error("Failed to execute firewall rules: %s", e) + + try: + mitmdump_args = [] + os.makedirs(self.mitmdump_path, exist_ok=True) + file_path = os.path.join(self.mitmdump_path, "dump.har") + mitmdump_args.extend( + [ + self.mitmdump_bin, + "-q", + "--listen-host", + self.host_ip, + "-p", + str(self.host_port), + "--set", + "hardump=", + file_path, + ] + ) + mitmdump_args[-2:] = [ + "".join(mitmdump_args[-2:]) + ] # concatenate the last two arguments, otherwise the HAR file will not be created. + self.proc = subprocess.Popen(mitmdump_args, stdout=None, stderr=None, shell=False) + except (OSError, ValueError): + log.exception("Failed to mitmdump (host=%s, port=%s, dump_path=%s)", self.host_ip, self.host_port, file_path) + return + + log.info( + "Started mitmdump with PID %d (host=%s, port=%s, dump_path=%s)", + self.proc.pid, + self.host_ip, + self.host_port, + file_path, + ) + + def _get_unused_port(self) -> str | None: + """Return the first unused TCP port from the set.""" + ports = set(range(8001, 8081)) + for port in ports: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + if s.connect_ex((self.host_ip, port)) != 0: + return str(port) + return None diff --git a/modules/processing/CAPE.py b/modules/processing/CAPE.py index f257444236b..a9bc97cd57f 100644 --- a/modules/processing/CAPE.py +++ b/modules/processing/CAPE.py @@ -113,7 +113,7 @@ def _cape_type_string(self, type_strings, file_info, append_file): elif type_strings[0] == "MS-DOS": file_info["cape_type"] = "DOS MZ image: executable" else: - file_info["cape_type"] = file_info["cape_type"] or "PE image" + file_info["cape_type"] = file_info["cape_type"] or "unknown" return append_file def _metadata_processing(self, metadata, file_info, append_file): diff --git a/modules/processing/debug.py b/modules/processing/debug.py index 61ce5c3a1cf..6d861dbf6fa 100644 --- a/modules/processing/debug.py +++ b/modules/processing/debug.py @@ -7,6 +7,7 @@ from lib.cuckoo.common.abstracts import Processing from lib.cuckoo.common.exceptions import CuckooProcessingError from lib.cuckoo.common.path_utils import path_exists +from lib.cuckoo.common.utils import truncate_str from lib.cuckoo.core.database import Database @@ -24,7 +25,7 @@ def run(self): try: buf_size = self.options.get("buffer", 8192) content = codecs.open(self.log_path, "rb", "utf-8").read() - debug["log"] = content[:buf_size] + " " if len(content) > buf_size else content + debug["log"] = truncate_str(content, buf_size) except ValueError as e: raise CuckooProcessingError(f"Error decoding {self.log_path}: {e}") from e except (IOError, OSError) as e: diff --git a/modules/processing/overlay.py b/modules/processing/overlay.py deleted file mode 100644 index 7de6ade5372..00000000000 --- a/modules/processing/overlay.py +++ /dev/null @@ -1,70 +0,0 @@ -import base64 -import logging -import os - -from lib.cuckoo.common.abstracts import Processing -from lib.cuckoo.common.objects import File -from lib.cuckoo.common.path_utils import path_exists, path_mkdir, path_write_file - -log = logging.getLogger(__name__) - - -class process_overlay_file(object): - """Returns the file information of the containing overlay data""" - - def __init__(self, overlay_fullpath): - self.overlay_fullpath = overlay_fullpath - - def run(self): - if not self.options.enabled: - return {} - - if not path_exists(self.overlay_fullpath): - return {} - - file_info, _ = File(file_path=self.overlay_fullpath).get_all() - return file_info - - -class extract_overlay_data(Processing): - """Makes use of static.py's result to determine if there is overlay data. Only works for PE for now. - If overlay has been detected by static.py, we extract the whole data and save them in a file - @returns: Up to first 4096 bytes of overlay data added as part of the json, full data will need to be downloaded - """ - - # To tell CAPE to run this after first round of processing is done - order = 2 - - def run(self): - if "static" not in self.results: - return None - - self.key = "static" # uses the existing "static" sub container to add in the overlay data - output = self.results["static"] - - if not output.get("pe", {}).get("overlay"): - return output - - overlay_size = int(output["pe"]["overlay"]["size"], 16) - - # Extract out the overlay data - try: - with open(self.file_path, "rb") as f: - f.seek(-overlay_size, os.SEEK_END) - data = f.read() - output["pe"]["overlay"]["data"] = base64.b64encode(data[: min(overlay_size, 4096)]) - - fld = os.path.join(self.analysis_path, "files") - if not path_exists(fld): - log.warning("Folder not present, creating it. Might affect the displaying of (overlay) results on the web") - path_mkdir(fld) - - fld = os.path.join(fld, "extracted_overlay") - _ = path_write_file(fld, data) - - output["pe"]["overlay"]["fileinfo"] = process_overlay_file(fld).run() - - except Exception as e: - log.error(e) - - return output diff --git a/modules/processing/parsers/CAPE/Quasarrat.py b/modules/processing/parsers/CAPE/AsyncRAT.py similarity index 64% rename from modules/processing/parsers/CAPE/Quasarrat.py rename to modules/processing/parsers/CAPE/AsyncRAT.py index 29c59a04fa2..1220071ea7d 100644 --- a/modules/processing/parsers/CAPE/Quasarrat.py +++ b/modules/processing/parsers/CAPE/AsyncRAT.py @@ -1,4 +1,4 @@ -from lib.parsers_aux.ratking import RATConfigParser +from rat_king_parser.rkp import RATConfigParser def extract_config(data: bytes): diff --git a/modules/processing/parsers/CAPE/AsyncRat.py b/modules/processing/parsers/CAPE/AsyncRat.py deleted file mode 100644 index 040c41c084f..00000000000 --- a/modules/processing/parsers/CAPE/AsyncRat.py +++ /dev/null @@ -1,87 +0,0 @@ -# based on https://github.com/c3rb3ru5d3d53c/mwcfg-modules/blob/master/asyncrat/asyncrat.py - -import base64 -import binascii -import re -import string -import struct -from contextlib import suppress - -from Cryptodome.Cipher import AES -from Cryptodome.Protocol.KDF import PBKDF2 - - -def get_string(data, index, offset): - return data[index][offset:].decode("utf-8", "ignore") - - -def get_wide_string(data, index, offset): - return (data[index][offset:] + b"\x00").decode("utf-16") - - -def get_salt(): - return bytes.fromhex("BFEB1E56FBCD973BB219022430A57843003D5644D21E62B9D4F180E7E6C33941") - - -def decrypt(key, ciphertext): - aes_key = PBKDF2(key, get_salt(), 32, 50000) - cipher = AES.new(aes_key, AES.MODE_CBC, ciphertext[32 : 32 + 16]) - plaintext = cipher.decrypt(ciphertext[48:]).decode("ascii", "ignore").strip() - return plaintext - - -def decrypt_config_string(key, data, index): - return "".join(filter(lambda x: x in string.printable, decrypt(key, base64.b64decode(data[index][2:])))) - - -def decrypt_config_list(key, data, index): - result = decrypt_config_string(key, data, index) - if result == "null": - return [] - return result.split(",") - - -def extract_config(filebuf): - config = {} - addr = re.search(b"BSJB", filebuf).start() - if not addr: - return - - strings_offset = struct.unpack(". + + +import datetime +import re +from contextlib import suppress + +import pefile + + +def get_current_year() -> str: + current_date = datetime.datetime.now() + return str(current_date.year) + + +def decrypt_string(encoded_string: str, key: str) -> str: + encoded_bytes = bytearray.fromhex(encoded_string) + key_bytes = bytearray(ord(char) for char in key) + encoded_length = len(encoded_bytes) + key_length = len(key_bytes) + decoded_bytes = bytearray(encoded_length) + + for i in range(encoded_length): + new_byte = (encoded_bytes[i] ^ key_bytes[i % key_length]) & 0xFF + decoded_bytes[i] = new_byte + + decoded_string = decoded_bytes.decode("ascii", errors="ignore") + + return decoded_string + + +def extract_config(data: bytes) -> dict: + pe = pefile.PE(data=data) + rdata_section = None + for section in pe.sections: + if b".rdata" in section.Name: + rdata_section = section + break + + if not rdata_section: + return {} + + rdata_data = rdata_section.get_data() + patterns = [b"Builder\.dll\x00", b"Builder\.exe\x00"] + matches = [] + for pattern in patterns: + matches.extend(re.finditer(pattern, rdata_data)) + + found_strings = set() + for match in matches: + start = max(0, match.start() - 1024) + end = min(len(rdata_data), match.end() + 1024) + found_strings.update(re.findall(b"[\x20-\x7E]{4,}?\x00", rdata_data[start:end])) + + result = {} + urls = [] + directories = [] + campaign = "" + + if found_strings: + for string in found_strings: + with suppress(UnicodeDecodeError): + decoded_string = string.decode("utf-8").rstrip("\x00") + + if re.match(r"^[0-9A-Fa-f]+$", decoded_string): + key = get_current_year() + url = decrypt_string(decoded_string, key) + if url: + urls.append(url) + elif decoded_string.count("\\") > 1: + directories.append(decoded_string) + elif re.match(r"^(?![A-Z]{6,}$)[a-zA-Z0-9\-=]{6,}$", decoded_string): + campaign = decoded_string + + result = {"urls": sorted(urls), "directories": directories, "campaign": campaign} + + return result + + +if __name__ == "__main__": + import sys + + with open(sys.argv[1], "rb") as f: + print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/BumbleBee.py b/modules/processing/parsers/CAPE/BumbleBee.py index 14425744a0a..b53fdfc722f 100644 --- a/modules/processing/parsers/CAPE/BumbleBee.py +++ b/modules/processing/parsers/CAPE/BumbleBee.py @@ -1,4 +1,5 @@ # Thanks to @MuziSec - https://github.com/MuziSec/malware_scripts/blob/main/bumblebee/extract_config.py +# 2024 updates by @enzok # import logging import traceback @@ -6,10 +7,31 @@ import pefile import regex as re +import yara from Cryptodome.Cipher import ARC4 log = logging.getLogger(__name__) -# log.setLevel(logging.DEBUG) +log.setLevel(logging.INFO) + +rule_source = """ +rule BumbleBee +{ + meta: + author = "enzok" + description = "BumbleBee 2024" + strings: + $rc4key = {48 [6] 48 [6] E8 [4] 4C 89 AD [4] 4C 89 AD [4] 4C 89 B5 [4] 4C 89 AD [4] 44 88 AD [4] 48 8D 15 [4] 44 38 2D [4] 75} + $botidlgt = {4C 8B C1 B? 4F 00 00 00 48 8D 0D [4] E8 [4] 4C 8B C3 48 8D 0D [4] B? 4F 00 00 00 E8 [4] 4C 8B C3 48 8D 0D [4] B? FF 0F 00 00 E8} + $botid = {90 48 [6] E8 [4] 4C 89 AD [4] 4C 89 AD [4] 4C 89 B5 [4] 4C 89 AD [4] 44 88 AD [4] 48 8D 15 [4] 44 38 2D [4] 75} + $port = {4C 89 6D ?? 4C 89 6D ?? 4c 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8D 05 [4] 44 38 2D [4] 75} + $dga1 = {4C 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8B 1D [4] 48 8D 0D [4] E8 [4] 8B F8} + $dga2 = {48 8D 0D [4] E8 [4] 8B F0 4C 89 6D ?? 4C 89 6D ?? 4C 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8D 15 [4] 44 38 2D [4] 75} + condition: + $rc4key and all of ($botid*) and 2 of ($port, $port, $dga1, $dga2) +} +""" + +yara_rules = yara.compile(source=rule_source) def extract_key_data(data, pe, key_match): @@ -80,6 +102,84 @@ def extract_config_data(data, pe, config_match): return campaign_id_ct, botnet_id_ct, c2s_ct +def extract_2024(pe, filebuf): + cfg = {} + rc4key_init_offset = 0 + botid_init_offset = 0 + port_init_offset = 0 + dga1_init_offset = 0 + dga2_init_offset = 0 + botidlgt_init_offset = 0 + + matches = yara_rules.match(data=filebuf) + if not matches: + return + + for match in matches: + if match.rule != "BumbleBee": + continue + for item in match.strings: + for instance in item.instances: + if "$rc4key" in item.identifier: + rc4key_init_offset = int(instance.offset) + elif "$botidlgt" in item.identifier: + botidlgt_init_offset = int(instance.offset) + elif "$botid" in item.identifier: + botid_init_offset = int(instance.offset) + elif "$port" in item.identifier: + port_init_offset = int(instance.offset) + elif "$dga1" in item.identifier: + dga1_init_offset = int(instance.offset) + elif "$dga2" in item.identifier: + dga2_init_offset = int(instance.offset) + + if not rc4key_init_offset: + return + + key_offset = pe.get_dword_from_offset(rc4key_init_offset + 57) + key_rva = pe.get_rva_from_offset(rc4key_init_offset + 61) + key_offset + key = pe.get_string_at_rva(key_rva) + cfg["RC4 key"] = key.decode() + + botid_offset = pe.get_dword_from_offset(botid_init_offset + 51) + botid_rva = pe.get_rva_from_offset(botid_init_offset + 55) + botid_offset + botid_len_offset = pe.get_dword_from_offset(botidlgt_init_offset + 31) + botid_data = pe.get_data(botid_rva)[:botid_len_offset] + with suppress(Exception): + botid = ARC4.new(key).decrypt(botid_data).split(b"\x00")[0].decode() + cfg["Botid"] = botid + + port_offset = pe.get_dword_from_offset(port_init_offset + 23) + port_rva = pe.get_rva_from_offset(port_init_offset + 27) + port_offset + port_len_offset = pe.get_dword_from_offset(botidlgt_init_offset + 4) + port_data = pe.get_data(port_rva)[:port_len_offset] + with suppress(Exception): + port = ARC4.new(key).decrypt(port_data).split(b"\x00")[0].decode() + cfg["Port"] = port + + dgaseed_offset = pe.get_dword_from_offset(dga1_init_offset + 15) + dgaseed_rva = pe.get_rva_from_offset(dga1_init_offset + 19) + dgaseed_offset + dgaseed_data = pe.get_qword_at_rva(dgaseed_rva) + cfg["DGA seed"] = int(dgaseed_data) + + numdga_offset = pe.get_dword_from_offset(dga1_init_offset + 22) + numdga_rva = pe.get_rva_from_offset(dga1_init_offset + 26) + numdga_offset + numdga_data = pe.get_string_at_rva(numdga_rva) + cfg["Number DGA domains"] = numdga_data.decode() + + domainlen_offset = pe.get_dword_from_offset(dga2_init_offset + 3) + domainlen_rva = pe.get_rva_from_offset(dga2_init_offset + 7) + domainlen_offset + domainlen_data = pe.get_string_at_rva(domainlen_rva) + cfg["Domain length"] = domainlen_data.decode() + + tld_offset = pe.get_dword_from_offset(dga2_init_offset + 37) + tld_rva = pe.get_rva_from_offset(dga2_init_offset + 41) + tld_offset + tld_data = pe.get_string_at_rva(tld_rva).decode() + cfg["TLD"] = tld_data + + return cfg + + def extract_config(data): """ Extract key and config and decrypt @@ -92,6 +192,7 @@ def extract_config(data): if not pe: return cfg + key_regex = re.compile(rb"(\x48\x8D.(?P....)\x80\x3D....\x00)", re.DOTALL) regex = re.compile( rb"(?\x48\x8D.(?P....))(?P\x48\x8D.(?P....))(?P\x48\x8D.(?P....))", @@ -128,6 +229,10 @@ def extract_config(data): cfg["C2s"] = list(ARC4.new(key).decrypt(c2s).split(b"\x00")[0].decode().split(",")) except Exception as e: log.error("This is broken: %s", str(e), exc_info=True) + + if not cfg: + cfg = extract_2024(pe, data) + return cfg diff --git a/modules/processing/parsers/CAPE/DCRat.py b/modules/processing/parsers/CAPE/DCRat.py index 29c59a04fa2..1220071ea7d 100644 --- a/modules/processing/parsers/CAPE/DCRat.py +++ b/modules/processing/parsers/CAPE/DCRat.py @@ -1,4 +1,4 @@ -from lib.parsers_aux.ratking import RATConfigParser +from rat_king_parser.rkp import RATConfigParser def extract_config(data: bytes): diff --git a/modules/processing/parsers/CAPE/KoiLoader.py b/modules/processing/parsers/CAPE/KoiLoader.py new file mode 100644 index 00000000000..75563816613 --- /dev/null +++ b/modules/processing/parsers/CAPE/KoiLoader.py @@ -0,0 +1,130 @@ +import re +import struct +from contextlib import suppress +from itertools import cycle + +import pefile +import yara + +# Hash = b462e3235c7578450b2b56a8aff875a3d99d22f6970a01db3ba98f7ecb6b01a0 + +RULE_SOURCE = """ +rule KoiLoaderResources +{ + meta: + author = "YungBinary" + description = "Find KoiLoader XOR key and payload resource ids" + strings: + $payload_resource = {8D [2] 50 68 [4] E8} + $xor_key_resource = {8D [2] 51 68 [4] E8} + condition: + uint16(0) == 0x5A4D and $payload_resource and $xor_key_resource +} +""" + + +def yara_scan(raw_data): + yara_rules = yara.compile(source=RULE_SOURCE) + matches = yara_rules.match(data=raw_data) + payload_resource_id = None + xor_key_resource_id = None + + for match in matches: + if match.rule != "KoiLoaderResources": + continue + for item in match.strings: + if "$payload_resource" in item.identifier: + payload_offset = item.instances[0].offset + payload_resource_id = struct.unpack("i", raw_data[payload_offset + 5 : payload_offset + 9])[0] + + elif "$xor_key_resource" in item.identifier: + xor_key_offset = item.instances[0].offset + xor_key_resource_id = struct.unpack("i", raw_data[xor_key_offset + 5 : xor_key_offset + 9])[0] + + return (payload_resource_id, xor_key_resource_id) + + +def remove_nulls(buffer, buffer_size): + """ + Modify a buffer removing null bytes + """ + num_nulls = count_nulls(buffer) + result = skip_nth(buffer, num_nulls + 1) + return bytearray(result) + + +def count_nulls(buffer): + """ + Count null separation in a buffer + """ + num_nulls = 0 + idx = 1 + while True: + cur_byte = buffer[idx] + if cur_byte == 0: + num_nulls += 1 + idx += 1 + continue + else: + break + + return num_nulls + + +def skip_nth(buffer, n): + iterable = list(buffer) + yield from (value for index, value in enumerate(iterable) if (index + 1) % n and (index - 1) % n) + + +def find_c2(decoded_buffer): + decoded_buffer = bytearray(skip_nth(decoded_buffer, 2)) + url_regex = re.compile(rb"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") + urls = [url.lower().decode() for url in url_regex.findall(decoded_buffer)] + return urls + + +def xor_data(data, key): + return bytes(c ^ k for c, k in zip(data, cycle(key))) + + +def extract_config(data): + config_dict = {"C2": []} + + xor_key = b"" + encoded_payload = b"" + + payload_resource_id, xor_key_resource_id = yara_scan(data) + + if payload_resource_id is None or xor_key_resource_id is None: + return + + with suppress(Exception): + pe = pefile.PE(data=data) + for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries: + resource_type = pefile.RESOURCE_TYPE.get(entry.struct.Id) + for directory in entry.directory.entries: + for resource in directory.directory.entries: + if resource_type != "RT_RCDATA": + continue + if directory.struct.Id == xor_key_resource_id: + offset = resource.data.struct.OffsetToData + xor_phrase_size = resource.data.struct.Size + xor_key = pe.get_memory_mapped_image()[offset : offset + xor_phrase_size] + elif directory.struct.Id == payload_resource_id: + offset = resource.data.struct.OffsetToData + encoded_payload_size = resource.data.struct.Size + encoded_payload = pe.get_memory_mapped_image()[offset : offset + encoded_payload_size] + + encoded_payload = remove_nulls(encoded_payload, encoded_payload_size) + decoded_payload = xor_data(encoded_payload, xor_key) + + config_dict["C2"] = find_c2(decoded_payload) + + return config_dict + + +if __name__ == "__main__": + import sys + + with open(sys.argv[1], "rb") as f: + print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Lumma.py b/modules/processing/parsers/CAPE/Lumma.py index 8c5352b09a5..85a84c6839a 100644 --- a/modules/processing/parsers/CAPE/Lumma.py +++ b/modules/processing/parsers/CAPE/Lumma.py @@ -1,12 +1,100 @@ +import base64 +import re + +import pefile + + +def is_base64(s): + pattern = re.compile("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$") + if not s or len(s) < 1: + return False + else: + return pattern.match(s) + + +def extract_strings(data, minchars): + endlimit = b"8192" + apat = b"([\x20-\x7e]{" + str(minchars).encode() + b"," + endlimit + b"})\x00" + strings = [string.decode() for string in re.findall(apat, data)] + return strings + + +def get_base64_strings(str_list): + base64_strings = [] + for s in str_list: + if is_base64(s): + base64_strings.append(s) + return base64_strings + + +def get_rdata(data): + rdata = None + pe = pefile.PE(data=data) + section_idx = 0 + for section in pe.sections: + if section.Name == b".rdata\x00\x00": + rdata = pe.sections[section_idx].get_data() + break + section_idx += 1 + return rdata + + +def xor_data(data, key): + decoded = bytearray() + for i in range(len(data)): + decoded.append(data[i] ^ key[i % len(data)]) + return decoded + + +def contains_non_printable(byte_array): + for byte in byte_array: + if not chr(byte).isprintable(): + return True + return False + + def extract_config(data): - config_dict = {} - C2s = [] + config_dict = {"C2": []} + try: lines = data.decode().split("\n") for line in lines: - if "." in line and len(line) > 2: - C2s.append(line) + try: + if "." in line and len(line) > 2: + if not contains_non_printable(line): + config_dict["C2"].append(line) + except Exception: + continue except Exception: - return - config_dict["C2s"] = C2s + pass + + # If no C2s with the old method, + # try with newer version xor decoding + if not config_dict["C2"]: + try: + rdata = get_rdata(data) + strings = extract_strings(rdata, 44) + base64_strings = get_base64_strings(strings) + + for base64_str in base64_strings: + try: + decoded_bytes = base64.b64decode(base64_str, validate=True) + encoded_c2 = decoded_bytes[32:] + xor_key = decoded_bytes[:32] + decoded_c2 = xor_data(encoded_c2, xor_key) + + if not contains_non_printable(decoded_c2): + config_dict["C2"].append(decoded_c2.decode()) + except Exception: + continue + except Exception: + return + return config_dict + + +if __name__ == "__main__": + import sys + + with open(sys.argv[1], "rb") as f: + print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/VenomRat.py b/modules/processing/parsers/CAPE/QuasarRAT.py similarity index 64% rename from modules/processing/parsers/CAPE/VenomRat.py rename to modules/processing/parsers/CAPE/QuasarRAT.py index 29c59a04fa2..1220071ea7d 100644 --- a/modules/processing/parsers/CAPE/VenomRat.py +++ b/modules/processing/parsers/CAPE/QuasarRAT.py @@ -1,4 +1,4 @@ -from lib.parsers_aux.ratking import RATConfigParser +from rat_king_parser.rkp import RATConfigParser def extract_config(data: bytes): diff --git a/modules/processing/parsers/CAPE/Snake.py b/modules/processing/parsers/CAPE/Snake.py new file mode 100644 index 00000000000..50356034693 --- /dev/null +++ b/modules/processing/parsers/CAPE/Snake.py @@ -0,0 +1,174 @@ +import base64 +import hashlib +import logging +import re +import traceback + +import dnfile + +try: + from Cryptodome.Cipher import DES + from Cryptodome.Util.Padding import unpad +except ModuleNotFoundError: + raise ModuleNotFoundError("Please run: pip3 install pycryptodomex") + +log = logging.getLogger(__name__) +log.setLevel(logging.INFO) + + +def is_base64(s): + pattern = re.compile("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{4}|[A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)$") + if not s or len(s) < 1: + return False + else: + return pattern.match(s) + + +def pad(text): + n = len(text) % 8 + return text + (b" " * n) + + +def md5(string: bytes) -> bytes: + return bytes.fromhex(hashlib.md5(string).hexdigest()) + + +def handle_plain(dotnet_file, c2_type, user_strings): + user_strings_list = list(user_strings.values()) + if c2_type == "Telegram": + token = dotnet_file.net.user_strings.get(user_strings_list[15]).value.__str__() + chat_id = dotnet_file.net.user_strings.get(user_strings_list[16]).value.__str__() + return {"Type": "Telegram", "C2": f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}"} + elif c2_type == "SMTP": + smtp_from = dotnet_file.net.user_strings.get(user_strings_list[7]).value.__str__() + smtp_password = dotnet_file.net.user_strings.get(user_strings_list[8]).value.__str__() + smtp_host = dotnet_file.net.user_strings.get(user_strings_list[9]).value.__str__() + smtp_to = dotnet_file.net.user_strings.get(user_strings_list[10]).value.__str__() + smtp_port = dotnet_file.net.user_strings.get(user_strings_list[11]).value.__str__() + return { + "Type": "SMTP", + "Host": smtp_host, + "Port": smtp_port, + "From Address": smtp_from, + "To Address": smtp_to, + "Password": smtp_password, + } + elif c2_type == "FTP": + ftp_username = dotnet_file.net.user_strings.get(user_strings_list[12]).value.__str__() + ftp_password = dotnet_file.net.user_strings.get(user_strings_list[13]).value.__str__() + ftp_host = dotnet_file.net.user_strings.get(user_strings_list[14]).value.__str__() + return {"Type": "FTP", "Host": ftp_host, "Username": ftp_username, "Password": ftp_password} + + +def handle_encrypted(dotnet_file, data, c2_type, user_strings): + # Match decrypt string pattern + decrypt_string_pattern = re.compile( + Rb"""(?x) + \x72(...)\x70 + \x7E(...)\x04 + \x28...\x06 + \x80...\x04 + """ + ) + + config_dict = None + decrypted_strings = [] + + matches2 = decrypt_string_pattern.findall(data) + for match in matches2: + string_index = int.from_bytes(match[0], "little") + user_string = dotnet_file.net.user_strings.get(string_index).value + # Skip user strings that are empty/not base64 + if user_string == "Yx74dJ0TP3M=" or not is_base64(user_string): + continue + field_row_index = int.from_bytes(match[1], "little") + field_name = dotnet_file.net.mdtables.Field.get_with_row_index(field_row_index).Name.__str__() + key_index = user_strings[field_name] + key_str = dotnet_file.net.user_strings.get(key_index).value.__str__() + key = md5(key_str.encode())[:8] + des = DES.new(key, DES.MODE_ECB) + + decoded_str = base64.b64decode(user_string) + padded_str = pad(decoded_str) + decrypted_text = des.decrypt(padded_str) + plaintext_bytes = unpad(decrypted_text, DES.block_size) + plaintext = plaintext_bytes.decode() + decrypted_strings.append(plaintext) + + if decrypted_strings: + if c2_type == "Telegram": + token, chat_id = decrypted_strings + config_dict = {"Type": "Telegram", "C2": f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}"} + elif c2_type == "SMTP": + smtp_from, smtp_password, smtp_host, smtp_to, smtp_port = decrypted_strings + config_dict = { + "Type": "SMTP", + "Host": smtp_host, + "Port": smtp_port, + "From Address": smtp_from, + "To Address": smtp_to, + "Password": smtp_password, + } + elif c2_type == "FTP": + ftp_username, ftp_password, ftp_host = decrypted_strings + config_dict = {"Type": "FTP", "Host": ftp_host, "Username": ftp_username, "Password": ftp_password} + return config_dict + + +def extract_config(data): + + try: + dotnet_file = dnfile.dnPE(data=data) + except Exception as e: + log.debug(f"Exception when attempting to parse .NET file: {e}") + log.debug(traceback.format_exc()) + + # ldstr, stsfld + static_strings = re.compile( + Rb"""(?x) + \x72(...)\x70 + \x80(...)\x04 + """ + ) + + # Get user strings and C2 type + user_strings = {} + c2_type = None + matches = static_strings.findall(data) + for match in matches: + try: + string_index = int.from_bytes(match[0], "little") + string_value = dotnet_file.net.user_strings.get(string_index).value.__str__() + field_index = int.from_bytes(match[1], "little") + field_name = dotnet_file.net.mdtables.Field.get_with_row_index(field_index).Name.__str__() + if string_value == "$%TelegramDv$": + c2_type = "Telegram" + + elif string_value == "$%SMTPDV$": + c2_type = "SMTP" + + elif string_value == "%FTPDV$": + c2_type = "FTP" + else: + user_strings[field_name] = string_index + except Exception as e: + log.debug(f"There was an exception parsing user strings: {e}") + log.debug(traceback.format_exc()) + + if c2_type is None: + raise ValueError("Could not identify C2 type.") + + # Handle encrypted strings + config_dict = handle_encrypted(dotnet_file, data, c2_type, user_strings) + if config_dict is None: + # Handle plain strings + config_dict = handle_plain(dotnet_file, c2_type, user_strings) + + return config_dict + + +if __name__ == "__main__": + import sys + + with open(sys.argv[1], "rb") as f: + print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Stealc.py b/modules/processing/parsers/CAPE/Stealc.py index a6a7d4e0c3d..b9259f3d9ac 100644 --- a/modules/processing/parsers/CAPE/Stealc.py +++ b/modules/processing/parsers/CAPE/Stealc.py @@ -1,6 +1,52 @@ +import struct +from contextlib import suppress + +import pefile +import yara + +# Hash = 619751f5ed0a9716318092998f2e4561f27f7f429fe6103406ecf16e33837470 + +RULE_SOURCE = """rule StealC +{ + meta: + author = "Yung Binary" + hash = "619751f5ed0a9716318092998f2e4561f27f7f429fe6103406ecf16e33837470" + strings: + $decode_1 = { + 6A ?? + 68 ?? ?? ?? ?? + 68 ?? ?? ?? ?? + E8 ?? ?? ?? ?? + 83 C4 0C + A3 ?? ?? ?? ?? + } + + condition: + $decode_1 +}""" + + +def yara_scan(raw_data): + yara_rules = yara.compile(source=RULE_SOURCE) + matches = yara_rules.match(data=raw_data) + + for match in matches: + for block in match.strings: + for instance in block.instances: + yield instance.offset + + +def xor_data(data, key): + decoded = bytearray() + for i in range(len(data)): + decoded.append(data[i] ^ key[i]) + return decoded + + def extract_config(data): config_dict = {} - C2s = [] + + # Attempt to extract via old method try: domain = "" uri = "" @@ -11,8 +57,50 @@ def extract_config(data): if line.startswith("/") and line[-4] == ".": uri = line if domain and uri: - C2s = [f"{domain}{uri}"] + config_dict.setdefault("C2", []).append(f"{domain}{uri}") except Exception: - return - config_dict["C2s"] = C2s + pass + + # Try with new method + if not config_dict.get("C2"): + with suppress(Exception): + # config_dict["Strings"] = [] + pe = pefile.PE(data=data, fast_load=False) + image_base = pe.OPTIONAL_HEADER.ImageBase + domain = "" + uri = "" + for str_decode_offset in yara_scan(data): + str_size = int(data[str_decode_offset + 1]) + # Ignore size 0 strings + if not str_size: + continue + + key_rva = data[str_decode_offset + 3 : str_decode_offset + 7] + encoded_str_rva = data[str_decode_offset + 8 : str_decode_offset + 12] + # dword_rva = data[str_decode_offset + 21 : str_decode_offset + 25] + + key_offset = pe.get_offset_from_rva(struct.unpack("i", key_rva)[0] - image_base) + encoded_str_offset = pe.get_offset_from_rva(struct.unpack("i", encoded_str_rva)[0] - image_base) + # dword_offset = hex(struct.unpack("i", dword_rva)[0])[2:] + + key = data[key_offset : key_offset + str_size] + encoded_str = data[encoded_str_offset : encoded_str_offset + str_size] + decoded_str = xor_data(encoded_str, key).decode() + if decoded_str.startswith("http") and "://" in decoded_str: + domain = decoded_str + elif decoded_str.startswith("/") and decoded_str[-4] == ".": + uri = decoded_str + # else: + # config_dict["Strings"].append({f"dword_{dword_offset}" : decoded_str}) + + if domain and uri: + config_dict.setdefault("C2", []).append(f"{domain}{uri}") + return config_dict + + +if __name__ == "__main__": + import sys + + with open(sys.argv[1], "rb") as f: + print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Xenorat.py b/modules/processing/parsers/CAPE/VenomRAT.py similarity index 64% rename from modules/processing/parsers/CAPE/Xenorat.py rename to modules/processing/parsers/CAPE/VenomRAT.py index 29c59a04fa2..1220071ea7d 100644 --- a/modules/processing/parsers/CAPE/Xenorat.py +++ b/modules/processing/parsers/CAPE/VenomRAT.py @@ -1,4 +1,4 @@ -from lib.parsers_aux.ratking import RATConfigParser +from rat_king_parser.rkp import RATConfigParser def extract_config(data: bytes): diff --git a/modules/processing/parsers/CAPE/XWorm.py b/modules/processing/parsers/CAPE/XWorm.py index dd774ab6f05..1220071ea7d 100644 --- a/modules/processing/parsers/CAPE/XWorm.py +++ b/modules/processing/parsers/CAPE/XWorm.py @@ -1,136 +1,5 @@ -import base64 -import hashlib -import re -from contextlib import suppress +from rat_king_parser.rkp import RATConfigParser -import dnfile -from Cryptodome.Cipher import AES -confPattern = re.compile( - rb"""(?x) - \x72(...)\x70\x80...\x04 - """, - re.DOTALL, -) - -mutexPattern1 = re.compile( - rb"""(?x) - \x72(...)\x70\x80...\x04 - \x72...\x70\x28...\x0A - """, - re.DOTALL, -) - -mutexPattern2 = re.compile( - rb"""(?x) - \x72(...)\x70\x80...\x04\x2A - """, - re.DOTALL, -) - -installBinNamePattern = re.compile( - rb"""(?x) - \x72(...)\x70\x80...\x04 - \x72...\x70\x80...\x04 - \x72...\x70\x28...\x0A - """, - re.DOTALL, -) - -installDirPattern = re.compile( - rb"""(?x) - \x72(...)\x70\x80...\x04 - \x72...\x70\x80...\x04 - \x72...\x70\x80...\x04 - \x72...\x70\x28...\x0A - """, - re.DOTALL, -) - -mutexPatterns = [mutexPattern1, mutexPattern2] - - -def deriveAESKey(encryptedMutex: str): - md5Hash = hashlib.md5(encryptedMutex.encode()).hexdigest() - AESKey = md5Hash[:30] + md5Hash + "00" - return AESKey - - -def decryptAES(key: str, ciphertext: str, mode): - cipher = AES.new(bytes.fromhex(key), mode) - decodedcipher = base64.b64decode(ciphertext) - decryptedBuff = cipher.decrypt(decodedcipher) - - ## To exclude garbage bytes (i.e. 'http:\\example.com\\\x03\x03\x03') - valid_bytes = set(b"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789.-/,") - - ## C2 could be one or more delimited by ',' - filtered_bytes = bytes(b for b in decryptedBuff if b in valid_bytes).decode("utf-8").split(",") - if len(filtered_bytes) > 1: - return filtered_bytes - return "".join(filtered_bytes) - - -def extract_config(data): - config_dict = {} - with suppress(Exception): - if data[:2] == b"MZ": - dn = dnfile.dnPE(data=data) - extracted = [] - conf = [] - - ## Mutex is used to derive AES key, so if it's not found, the extractor is useless - ## The main problem is Mutex is not found in fixed location, so this trick is used to find the Mutex - for pattern in mutexPatterns: - mutexMatched = pattern.findall(data) - if mutexMatched: - mutex = dn.net.user_strings.get(int.from_bytes(mutexMatched[0], "little")).value - AESKey = deriveAESKey(mutex) - break - else: - return - - for match in confPattern.findall(data): - er_string = dn.net.user_strings.get(int.from_bytes(match, "little")).value - extracted.append(er_string) - - for i in range(5): - with suppress(Exception): - conf.append(decryptAES(AESKey, extracted[i], AES.MODE_ECB)) - - config_dict["C2"] = conf[0] - - ## Sometimes the port is not found in configs and 'AES Key (connections)' is shifted with SPL' - if 1 <= int(conf[1]) <= 65535: - config_dict["Port"] = conf[1] - config_dict["AES Key (connections)"] = conf[2] - config_dict["SPL"] = conf[3] - else: - config_dict["Port"] = "" - config_dict["AES Key (connections)"] = conf[1] - config_dict["SPL"] = conf[2] - config_dict["AES Key (configs)"] = AESKey - config_dict["Mutex"] = mutex - - installBinMatch = installBinNamePattern.findall(data) - installDirMatch = installDirPattern.findall(data) - - if installDirMatch: - installDir = dn.net.user_strings.get(int.from_bytes(installDirMatch[0], "little")).value - config_dict["InstallDir"] = decryptAES(AESKey, installDir, AES.MODE_ECB) - if installBinMatch: - installBinName = dn.net.user_strings.get(int.from_bytes(installBinMatch[0], "little")).value - config_dict["InstallBinName"] = decryptAES(AESKey, installBinName, AES.MODE_ECB) - else: - lines = data.decode().split("\n") - if "," in lines[0]: - c2_list = lines[0].split(",") - config_dict["C2s"] = c2_list - else: - config_dict["C2"] = lines[0] - config_dict["Port"] = lines[1] - config_dict["AES Key (connections)"] = lines[2] - config_dict["SPL"] = lines[3] - config_dict["USBNM"] = lines[4] - - return config_dict +def extract_config(data: bytes): + return RATConfigParser(data).report.get("config", {}) diff --git a/modules/processing/parsers/CAPE/XenoRAT.py b/modules/processing/parsers/CAPE/XenoRAT.py new file mode 100644 index 00000000000..1220071ea7d --- /dev/null +++ b/modules/processing/parsers/CAPE/XenoRAT.py @@ -0,0 +1,5 @@ +from rat_king_parser.rkp import RATConfigParser + + +def extract_config(data: bytes): + return RATConfigParser(data).report.get("config", {}) diff --git a/modules/processing/parsers/MACO/AgentTesla.py b/modules/processing/parsers/MACO/AgentTesla.py new file mode 100644 index 00000000000..04615864ac3 --- /dev/null +++ b/modules/processing/parsers/MACO/AgentTesla.py @@ -0,0 +1,64 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.AgentTesla import extract_config + + +def convert_to_MACO(raw_config: dict) -> MACOModel: + if not raw_config: + return + + protocol = raw_config.get("Protocol") + if not protocol: + return + + parsed_result = MACOModel(family="AgentTesla", other=raw_config) + if protocol == "Telegram": + parsed_result.http.append(MACOModel.Http(uri=raw_config["C2"], password=raw_config["Password"], usage="c2")) + + elif protocol in ["HTTP(S)", "Discord"]: + parsed_result.http.append(MACOModel.Http(uri=raw_config["C2"], usage="c2")) + + elif protocol == "FTP": + parsed_result.ftp.append( + MACOModel.FTP( + username=raw_config["Username"], + password=raw_config["Password"], + hostname=raw_config["C2"].replace("ftp://", ""), + usage="c2", + ) + ) + + elif protocol == "SMTP": + smtp = dict( + username=raw_config["Username"], + password=raw_config["Password"], + hostname=raw_config["C2"], + mail_to=[raw_config["EmailTo"]], + usage="c2", + ) + if "Port" in raw_config: + smtp["port"] = raw_config["Port"] + parsed_result.smtp.append(MACOModel.SMTP(**smtp)) + + if "Persistence_Filename" in raw_config: + parsed_result.paths.append(MACOModel.Path(path=raw_config["Persistence_Filename"], usage="storage")) + + if "ExternalIPCheckServices" in raw_config: + for service in raw_config["ExternalIPCheckServices"]: + parsed_result.http.append(MACOModel.Http(uri=service, usage="other")) + + return parsed_result + + +class AgentTesla(Extractor): + author = "kevoreilly" + family = "AgentTesla" + last_modified = "2024-10-20" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/AsyncRAT.py b/modules/processing/parsers/MACO/AsyncRAT.py new file mode 100644 index 00000000000..6f64368cda7 --- /dev/null +++ b/modules/processing/parsers/MACO/AsyncRAT.py @@ -0,0 +1,51 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.AsyncRAT import extract_config + + +def convert_to_MACO(raw_config: dict) -> MACOModel: + if not raw_config: + return + + parsed_result = MACOModel(family="AsyncRAT", other=raw_config) + + # Mutex + parsed_result.mutex.append(raw_config["Mutex"]) + + # Version + parsed_result.version = raw_config["Version"] + + # Was persistence enabled? + if raw_config["Install"] == "true": + parsed_result.capability_enabled.append("persistence") + else: + parsed_result.capability_disabled.append("persistence") + + # Installation Path + if raw_config.get("Folder"): + parsed_result.paths.append(MACOModel.Path(path=os.path.join(raw_config["Folder"], raw_config["Filename"]), usage="install")) + + # C2s + for i in range(len(raw_config.get("C2s", []))): + parsed_result.http.append(MACOModel.Http(hostname=raw_config["C2s"][i], port=int(raw_config["Ports"][i]), usage="c2")) + # Pastebin + if raw_config.get("Pastebin") not in ["null", None]: + # TODO: Is it used to download the C2 information if not embedded? + # Ref: https://www.netskope.com/blog/asyncrat-using-fully-undetected-downloader + parsed_result.http.append(MACOModel.Http(uri=raw_config["Pastebin"], usage="download")) + + return parsed_result + + +class AsyncRAT(Extractor): + author = "kevoreilly" + family = "AsyncRAT" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/AuroraStealer.py b/modules/processing/parsers/MACO/AuroraStealer.py new file mode 100644 index 00000000000..1a63055f07e --- /dev/null +++ b/modules/processing/parsers/MACO/AuroraStealer.py @@ -0,0 +1,29 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.AuroraStealer import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="AuroraStealer", other=raw_config) + if raw_config.get("C2"): + # IP related to C2 + parsed_result.http.append(MACOModel.Http(hostname=raw_config["C2"], usage="c2")) + + return parsed_result + + +class AuroraStealer(Extractor): + author = "kevoreilly" + family = "AuroraStealer" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Azorult.py b/modules/processing/parsers/MACO/Azorult.py new file mode 100644 index 00000000000..4b462eacd74 --- /dev/null +++ b/modules/processing/parsers/MACO/Azorult.py @@ -0,0 +1,22 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Azorult import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + return MACOModel(family="Azorult", http=[MACOModel.Http(hostname=raw_config["address"])], other=raw_config) + + +class Azorult(Extractor): + author = "kevoreilly" + family = "Azorult" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BackOffLoader.py b/modules/processing/parsers/MACO/BackOffLoader.py new file mode 100644 index 00000000000..155fe0d8b9a --- /dev/null +++ b/modules/processing/parsers/MACO/BackOffLoader.py @@ -0,0 +1,33 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.BackOffLoader import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="BackOffLoader", other=raw_config) + + # Version + parsed_result.version = raw_config["Version"] + + # Encryption details + parsed_result.encryption.append( + MACOModel.Encryption(algorithm="rc4", key=raw_config["EncryptionKey"], seed=raw_config["RC4Seed"]) + ) + for url in raw_config["URLs"]: + parsed_result.http.append(MACOModel.Http(url=url)) + + return parsed_result + + +class BackOffLoader(Extractor): + author = "kevoreilly" + family = "BackOffLoader" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BackOffPOS.py b/modules/processing/parsers/MACO/BackOffPOS.py new file mode 100644 index 00000000000..2dfd7b89bbd --- /dev/null +++ b/modules/processing/parsers/MACO/BackOffPOS.py @@ -0,0 +1,33 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.BackOffPOS import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="BackOffPOS", other=raw_config) + + # Version + parsed_result.version = raw_config["Version"] + + # Encryption details + parsed_result.encryption.append( + MACOModel.Encryption(algorithm="rc4", key=raw_config["EncryptionKey"], seed=raw_config["RC4Seed"]) + ) + for url in raw_config["URLs"]: + parsed_result.http.append(MACOModel.Http(url=url)) + + return parsed_result + + +class BackOffPOS(Extractor): + author = "kevoreilly" + family = "BackOffPOS" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BitPaymer.py b/modules/processing/parsers/MACO/BitPaymer.py new file mode 100644 index 00000000000..34d0590fb08 --- /dev/null +++ b/modules/processing/parsers/MACO/BitPaymer.py @@ -0,0 +1,29 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.BitPaymer import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="BitPaymer", other=raw_config) + + # Extracted strings + parsed_result.decoded_strings = raw_config["strings"] + + # Encryption details + parsed_result.encryption.append(MACOModel.Encryption(algorithm="rsa", public_key=raw_config["RSA public key"])) + return parsed_result + + +class BitPaymer(Extractor): + author = "kevoreilly" + family = "BitPaymer" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BlackDropper.py b/modules/processing/parsers/MACO/BlackDropper.py new file mode 100644 index 00000000000..da619a6cbc4 --- /dev/null +++ b/modules/processing/parsers/MACO/BlackDropper.py @@ -0,0 +1,32 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.BlackDropper import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="BlackDropper", campaign_id=[raw_config["campaign"]], other=raw_config) + + for dir in raw_config.get("directories", []): + parsed_result.paths.append(MACOModel.Path(path=dir)) + + for url in raw_config.get("urls", []): + parsed_result.http.append(MACOModel.Http(uri=url)) + + return parsed_result + + +class BlackDropper(Extractor): + author = "kevoreilly" + family = "BlackDropper" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BlackNix.py b/modules/processing/parsers/MACO/BlackNix.py new file mode 100644 index 00000000000..70408d7828e --- /dev/null +++ b/modules/processing/parsers/MACO/BlackNix.py @@ -0,0 +1,66 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.BlackNix import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="BlackNix", other=raw_config) + + # Mutex + parsed_result.mutex.append(raw_config["Mutex"]) + + # Capabilities that are enabled/disabled + # TODO: Review if these are all capabilities set by a boolean flag + for capa in [ + "Anti Sandboxie", + "Kernel Mode Unhooking", + "User Mode Unhooking", + "Melt Server", + "Offline Screen Capture", + "Offline Keylogger", + "Copy to ADS", + "Safe Mode Startup", + "Inject winlogon.exe", + "Active X Run", + "Registry Run", + ]: + if raw_config[capa].lower() == "true": + parsed_result.capability_enabled.append(capa) + else: + parsed_result.capability_disabled.append(capa) + + # Delay Time + parsed_result.sleep_delay = raw_config["Delay Time"] + + # Password + parsed_result.password.append(raw_config["Password"]) + + # C2 Domain + parsed_result.http.append(MACOModel.Http(hostname=raw_config["Domain"], usage="c2")) + # Registry + parsed_result.registry.append(MACOModel.Registry(key=raw_config["Registry Key"])) + + # Install Path + parsed_result.paths.append( + MACOModel.Path(path=os.path.join(raw_config["Install Path"], raw_config["Install Name"]), usage="install") + ) + + # Campaign Group/Name + parsed_result.campaign_id = [raw_config["Campaign Name"], raw_config["Campaign Group"]] + return parsed_result + + +class BlackNix(Extractor): + author = "kevoreilly" + family = "BlackNix" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Blister.py b/modules/processing/parsers/MACO/Blister.py new file mode 100644 index 00000000000..1045539c2bc --- /dev/null +++ b/modules/processing/parsers/MACO/Blister.py @@ -0,0 +1,36 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Blister import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Blister", other=raw_config) + + for capa in ["Persistence", "Sleep after injection"]: + if raw_config[capa]: + parsed_result.capability_enabled.append(capa) + else: + parsed_result.capability_disabled.append(capa) + + # Rabbit encryption + parsed_result.encryption.append( + MACOModel.Encryption(algorithm="rabbit", key=raw_config["Rabbit key"], iv=raw_config["Rabbit IV"]) + ) + return parsed_result + + +class Blister(Extractor): + author = "kevoreilly" + family = "Blister" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BruteRatel.py b/modules/processing/parsers/MACO/BruteRatel.py new file mode 100644 index 00000000000..bfd7e32fda4 --- /dev/null +++ b/modules/processing/parsers/MACO/BruteRatel.py @@ -0,0 +1,32 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.BruteRatel import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="BruteRatel", other=raw_config) + + for url in raw_config["C2"]: + for path in raw_config["URI"]: + parsed_result.http.append( + MACOModel.Http(uri=url, user_agent=raw_config["User Agent"], port=raw_config["Port"], path=path, usage="c2") + ) + + return parsed_result + + +class BruteRatel(Extractor): + author = "kevoreilly" + family = "BruteRatel" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BuerLoader.py b/modules/processing/parsers/MACO/BuerLoader.py new file mode 100644 index 00000000000..fdda64590ae --- /dev/null +++ b/modules/processing/parsers/MACO/BuerLoader.py @@ -0,0 +1,28 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.BuerLoader import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="BuerLoader", other=raw_config) + + for c2 in raw_config["address"]: + parsed_result.http.append(MACOModel.Http(hostname=c2, usage="c2")) + return parsed_result + + +class BuerLoader(Extractor): + author = "kevoreilly" + family = "BuerLoader" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BumbleBee.py b/modules/processing/parsers/MACO/BumbleBee.py new file mode 100644 index 00000000000..27fa023e9e6 --- /dev/null +++ b/modules/processing/parsers/MACO/BumbleBee.py @@ -0,0 +1,46 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.BumbleBee import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="BumbleBee", other=raw_config) + + # Campaign ID + if raw_config.get("Campaign ID"): + parsed_result.campaign_id.append(raw_config["Campaign ID"]) + + # Botnet ID + if raw_config.get("Botnet ID"): + parsed_result.identifier.append(raw_config["Botnet ID"]) + + # C2s + for c2 in raw_config.get("C2s", []): + parsed_result.http.append(MACOModel.Http(hostname=c2, usage="c2")) + + # Data + if raw_config.get("Data"): + parsed_result.binaries.append(MACOModel.Binary(data=raw_config["Data"])) + + # RC4 Key + if raw_config.get("RC4 Key"): + parsed_result.encryption.append(MACOModel.Encryption(algorithm="rc4", key=raw_config["RC4 Key"])) + + return parsed_result + + +class BumbleBee(Extractor): + author = "kevoreilly" + family = "BumbleBee" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Carbanak.py b/modules/processing/parsers/MACO/Carbanak.py new file mode 100644 index 00000000000..8df0573348b --- /dev/null +++ b/modules/processing/parsers/MACO/Carbanak.py @@ -0,0 +1,45 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Carbanak import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Carbanak", other=raw_config) + + # Version + if raw_config.get("Version"): + parsed_result.version = raw_config["Version"] + + # Unknown strings + for i in [1, 2]: + if raw_config.get(f"Unknown {i}"): + parsed_result.decoded_strings.append(raw_config[f"Unknown {i}"]) + + # C2 + if raw_config.get("C2"): + if isinstance(raw_config["C2"], str): + parsed_result.http.append(MACOModel.Http(hostname=raw_config["C2"], usage="c2")) + else: + for c2 in raw_config["C2"]: + parsed_result.http.append(MACOModel.Http(hostname=c2, usage="c2")) + + # Campaign Id + if raw_config.get("Campaign Id"): + parsed_result.campaign_id.append(raw_config["Campaign Id"]) + + return parsed_result + + +class Carbanak(Extractor): + author = "kevoreilly" + family = "Carbanak" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/ChChes.py b/modules/processing/parsers/MACO/ChChes.py new file mode 100644 index 00000000000..02977e00e33 --- /dev/null +++ b/modules/processing/parsers/MACO/ChChes.py @@ -0,0 +1,28 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.ChChes import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="ChChes", other=raw_config) + + # C2 URLs + for c2_url in raw_config.get("c2_url", []): + parsed_result.http.append(MACOModel.Http(uri=c2_url, usage="c2")) + + return parsed_result + + +class ChChes(Extractor): + author = "kevoreilly" + family = "ChChes" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/CobaltStrikeBeacon.py b/modules/processing/parsers/MACO/CobaltStrikeBeacon.py new file mode 100644 index 00000000000..f639cbb9cc8 --- /dev/null +++ b/modules/processing/parsers/MACO/CobaltStrikeBeacon.py @@ -0,0 +1,50 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.CobaltStrikeBeacon import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="CobaltStrikeBeacon", other=raw_config) + + clean_config = {k: v for k, v in raw_config.items() if v != "Not Found"} + capabilities = {k[1:]: clean_config.pop(k) for k in list(clean_config.keys()) if clean_config[k] in ["True", "False"]} + + for capability, enabled in capabilities.items(): + if enabled.lower() == "true": + parsed_result.capability_enabled.append(capability) + else: + parsed_result.capability_disabled.append(capability) + + if "C2Server" in clean_config: + host, get_path = clean_config.pop("C2Server").split(",") + port = clean_config.pop("Port") + parsed_result.http.append(MACOModel.Http(hostname=host, port=port, method="GET", path=get_path, usage="c2")) + parsed_result.http.append( + MACOModel.Http(hostname=host, port=port, method="POST", path=clean_config.pop("HttpPostUri"), usage="c2") + ) + + parsed_result.sleep_delay = clean_config.pop("SleepTime") + parsed_result.sleep_delay_jitter = clean_config.pop("Jitter") + + for path_key in ["Spawnto_x86", "Spawnto_x64"]: + if path_key in clean_config: + parsed_result.paths.append(MACOModel.Path(path=clean_config.pop(path_key))) + + return parsed_result + + +class CobaltStrikeBeacon(Extractor): + author = "kevoreilly" + family = "CobaltStrikeBeacon" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/CobaltStrikeStager.py b/modules/processing/parsers/MACO/CobaltStrikeStager.py new file mode 100644 index 00000000000..3d3759a0503 --- /dev/null +++ b/modules/processing/parsers/MACO/CobaltStrikeStager.py @@ -0,0 +1,26 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.CobaltStrikeStager import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="CobaltStrikeStager", other=raw_config) + + return parsed_result + + +class CobaltStrikeStager(Extractor): + author = "kevoreilly" + family = "CobaltStrikeStager" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/DCRat.py b/modules/processing/parsers/MACO/DCRat.py new file mode 100644 index 00000000000..fba00548801 --- /dev/null +++ b/modules/processing/parsers/MACO/DCRat.py @@ -0,0 +1,27 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.DCRat import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + # TODO: Assign fields to MACO model + parsed_result = MACOModel(family="DCRat", other=raw_config) + + return parsed_result + + +class DCRat(Extractor): + author = "kevoreilly" + family = "DCRat" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/DarkGate.py b/modules/processing/parsers/MACO/DarkGate.py new file mode 100644 index 00000000000..6d382f80cd0 --- /dev/null +++ b/modules/processing/parsers/MACO/DarkGate.py @@ -0,0 +1,52 @@ +import os +from copy import deepcopy + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.DarkGate import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="DarkGate", other=raw_config) + + # Create a copy of the raw configuration for parsing + config = deepcopy(raw_config) + + # Go through capabilities/settings that are boolean in nature + for k, v in list(config.items()): + if v not in ["Yes", "No"]: + continue + + if v == "Yes": + parsed_result.capability_enabled.append(k) + else: + parsed_result.capability_disabled.append(k) + + # Remove key from raw config + config.pop(k) + + # C2 + c2_port = config.pop("c2_port", None) + for c2_url in config.pop("C2", []): + parsed_result.http.append(MACOModel.Http(uri=c2_url, port=c2_port, usage="c2")) + + # Mutex + if config.get("internal_mutex"): + parsed_result.mutex.append(config.pop("internal_mutex")) + + return parsed_result + + +class DarkGate(Extractor): + author = "kevoreilly" + family = "DarkGate" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/DoppelPaymer.py b/modules/processing/parsers/MACO/DoppelPaymer.py new file mode 100644 index 00000000000..1e1d97a8b43 --- /dev/null +++ b/modules/processing/parsers/MACO/DoppelPaymer.py @@ -0,0 +1,30 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.DoppelPaymer import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="DoppelPaymer") + + if "strings" in raw_config: + parsed_result.decoded_strings = raw_config["strings"] + + if "RSA public key" in raw_config: + parsed_result.encryption.append(MACOModel.Encryption(algorithm="RSA", public_key=raw_config["RSA public key"])) + + return parsed_result + + +class DoppelPaymer(Extractor): + author = "kevoreilly" + family = "DoppelPaymer" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/DridexLoader.py b/modules/processing/parsers/MACO/DridexLoader.py new file mode 100644 index 00000000000..7a1097ab71a --- /dev/null +++ b/modules/processing/parsers/MACO/DridexLoader.py @@ -0,0 +1,33 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.DridexLoader import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="DridexLoader", other=raw_config) + + for c2_address in raw_config.get("address", []): + parsed_result.http.append(MACOModel.Http(uri=c2_address, usage="c2")) + + if "RC4 key" in raw_config: + parsed_result.encryption.append(MACOModel.Encryption(algorithm="RC4", key=raw_config["RC4 key"])) + + if "Botnet ID" in raw_config: + parsed_result.identifier.append(raw_config["Botnet ID"]) + + return parsed_result + + +class DridexLoader(Extractor): + author = "kevoreilly" + family = "DridexLoader" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Emotet.py b/modules/processing/parsers/MACO/Emotet.py new file mode 100644 index 00000000000..6cc29da2b0b --- /dev/null +++ b/modules/processing/parsers/MACO/Emotet.py @@ -0,0 +1,32 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Emotet import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Emotet", other=raw_config) + + for c2_address in raw_config.get("address", []): + parsed_result.http.append(MACOModel.Http(uri=c2_address, usage="c2")) + + if "RC4 public key" in raw_config: + parsed_result.encryption.append(MACOModel.Encryption(algorithm="RC4", public_key=raw_config["RSA public key"])) + + parsed_result.other = {k: raw_config[k] for k in raw_config.keys() if k not in ["address", "RSA public key"]} + + return parsed_result + + +class Emotet(Extractor): + author = "kevoreilly" + family = "Emotet" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Enfal.py b/modules/processing/parsers/MACO/Enfal.py new file mode 100644 index 00000000000..8fe4d6f2ff6 --- /dev/null +++ b/modules/processing/parsers/MACO/Enfal.py @@ -0,0 +1,25 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Enfal import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + # TODO: Assign fields to MACO model + parsed_result = MACOModel(family="Enfal", other=raw_config) + + return parsed_result + + +class Enfal(Extractor): + author = "kevoreilly" + family = "Enfal" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/EvilGrab.py b/modules/processing/parsers/MACO/EvilGrab.py new file mode 100644 index 00000000000..e32975f06bc --- /dev/null +++ b/modules/processing/parsers/MACO/EvilGrab.py @@ -0,0 +1,38 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.EvilGrab import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="EvilGrab", other=raw_config) + + if "mutex" in raw_config: + parsed_result.mutex.append(raw_config["mutex"]) + + if "missionid" in raw_config: + parsed_result.campaign_id.append(raw_config["missionid"]) + + if "version" in raw_config: + parsed_result.version = raw_config["version"] + + if "c2_address" in raw_config: + parsed_result.http.append( + parsed_result.Http(uri=raw_config["c2_address"], port=raw_config["port"][0] if "port" in raw_config else None) + ) + + return parsed_result + + +class EvilGrab(Extractor): + author = "kevoreilly" + family = "EvilGrab" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Fareit.py b/modules/processing/parsers/MACO/Fareit.py new file mode 100644 index 00000000000..d09c1492600 --- /dev/null +++ b/modules/processing/parsers/MACO/Fareit.py @@ -0,0 +1,27 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Fareit import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + # TODO: Assign fields to MACO model + parsed_result = MACOModel(family="Fareit", other=raw_config) + + return parsed_result + + +class Fareit(Extractor): + author = "kevoreilly" + family = "Fareit" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Formbook.py b/modules/processing/parsers/MACO/Formbook.py new file mode 100644 index 00000000000..73a2d4dae8c --- /dev/null +++ b/modules/processing/parsers/MACO/Formbook.py @@ -0,0 +1,32 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Formbook import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Formbook", other=raw_config) + + if "C2" in raw_config: + parsed_result.http.append(MACOModel.Http(uri=raw_config["C2"], usage="c2")) + + for decoy in raw_config.get("Decoys", []): + parsed_result.http.append(MACOModel.Http(uri=decoy, usage="decoy")) + + return parsed_result + + +class Formbook(Extractor): + author = "kevoreilly" + family = "Formbook" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Greame.py b/modules/processing/parsers/MACO/Greame.py new file mode 100644 index 00000000000..bb06c40646c --- /dev/null +++ b/modules/processing/parsers/MACO/Greame.py @@ -0,0 +1,23 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Greame import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Greame", other=raw_config) + + return parsed_result + + +class Greame(Extractor): + author = "kevoreilly" + family = "Greame" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/GuLoader.py b/modules/processing/parsers/MACO/GuLoader.py new file mode 100644 index 00000000000..e0a0ceae0e2 --- /dev/null +++ b/modules/processing/parsers/MACO/GuLoader.py @@ -0,0 +1,29 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.GuLoader import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="GuLoader", other=raw_config) + + for url in raw_config.get("URLs", []): + parsed_result.http.append(MACOModel.Http(uri=url, usage="download")) + + return parsed_result + + +class GuLoader(Extractor): + author = "kevoreilly" + family = "GuLoader" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], "data/yara/CAPE/Guloader.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Hancitor.py b/modules/processing/parsers/MACO/Hancitor.py new file mode 100644 index 00000000000..1a9add97f8b --- /dev/null +++ b/modules/processing/parsers/MACO/Hancitor.py @@ -0,0 +1,32 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Hancitor import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Hancitor", other=raw_config) + + for url in raw_config.get("address", []): + parsed_result.http.append(MACOModel.Http(uri=url, usage="c2")) + + if "Build ID" in raw_config: + parsed_result.identifier.append(raw_config["Build ID"]) + + return parsed_result + + +class Hancitor(Extractor): + author = "kevoreilly" + family = "Hancitor" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/HttpBrowser.py b/modules/processing/parsers/MACO/HttpBrowser.py new file mode 100644 index 00000000000..6b851fd0178 --- /dev/null +++ b/modules/processing/parsers/MACO/HttpBrowser.py @@ -0,0 +1,35 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.HttpBrowser import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="HttpBrowser", other=raw_config) + + port = raw_config["port"][0] if "port" in raw_config else None + + if "c2_address" in raw_config: + parsed_result.http.append(MACOModel.Http(uri=raw_config["c2_address"], port=port, usage="c2")) + + if "filepath" in raw_config: + parsed_result.paths.append(MACOModel.Path(path=raw_config["filepath"])) + + if "injectionprocess" in raw_config: + parsed_result["injectionprocess"] = raw_config["injectionprocess"] + + return parsed_result + + +class HttpBrowser(Extractor): + author = "kevoreilly" + family = "HttpBrowser" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/IcedID.py b/modules/processing/parsers/MACO/IcedID.py new file mode 100644 index 00000000000..5ef0778118a --- /dev/null +++ b/modules/processing/parsers/MACO/IcedID.py @@ -0,0 +1,24 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.IcedID import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + return MACOModel(**raw_config) + + +class IcedID(Extractor): + author = "kevoreilly" + family = "IcedID" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/IcedIDLoader.py b/modules/processing/parsers/MACO/IcedIDLoader.py new file mode 100644 index 00000000000..46c6ea4cad4 --- /dev/null +++ b/modules/processing/parsers/MACO/IcedIDLoader.py @@ -0,0 +1,32 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.IcedIDLoader import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="IcedIDLoader", other=raw_config) + + if "C2" in raw_config: + parsed_result.http.append(MACOModel.Http(hostname=raw_config["C2"], usage="c2")) + + if "Campaign" in raw_config: + parsed_result.campaign_id.append(str(raw_config["Campaign"])) + + return parsed_result + + +class IcedIDLoader(Extractor): + author = "kevoreilly" + family = "IcedIDLoader" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/KoiLoader.py b/modules/processing/parsers/MACO/KoiLoader.py new file mode 100644 index 00000000000..63c0c75134d --- /dev/null +++ b/modules/processing/parsers/MACO/KoiLoader.py @@ -0,0 +1,27 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.KoiLoader import RULE_SOURCE, extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="KoiLoader", other=raw_config) + + for c2_url in raw_config.get("C2", []): + parsed_result.http.append(MACOModel.Http(uri=c2_url, usage="c2")) + + return parsed_result + + +class KoiLoader(Extractor): + author = "kevoreilly" + family = "KoiLoader" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = RULE_SOURCE + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Latrodectus.py b/modules/processing/parsers/MACO/Latrodectus.py new file mode 100644 index 00000000000..4ad7cbd1515 --- /dev/null +++ b/modules/processing/parsers/MACO/Latrodectus.py @@ -0,0 +1,44 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Latrodectus import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Latrodectus", other=raw_config) + + for c2_url in raw_config.get("C2", []): + parsed_result.http.append(MACOModel.Http(uri=c2_url, usage="c2")) + + if "Group name" in raw_config: + parsed_result.identifier.append(raw_config["Group name"]) + + if "Campaign ID" in raw_config: + parsed_result.campaign_id.append(str(raw_config["Campaign ID"])) + + if "Version" in raw_config: + parsed_result.version = raw_config["Version"] + + if "RC4 key" in raw_config: + parsed_result.encryption.append(MACOModel.Encryption(algorithm="RC4", key=raw_config["RC4 key"])) + + if "Strings" in raw_config: + parsed_result.decoded_strings = raw_config["Strings"] + + return parsed_result + + +class Latrodectus(Extractor): + author = "kevoreilly" + family = "Latrodectus" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/LokiBot.py b/modules/processing/parsers/MACO/LokiBot.py new file mode 100644 index 00000000000..01d36594953 --- /dev/null +++ b/modules/processing/parsers/MACO/LokiBot.py @@ -0,0 +1,29 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.LokiBot import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="LokiBot", other=raw_config) + + for address in raw_config.get("address", []): + parsed_result.http.append(MACOModel.Http(uri=address)) + + return parsed_result + + +class LokiBot(Extractor): + author = "kevoreilly" + family = "LokiBot" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Lumma.py b/modules/processing/parsers/MACO/Lumma.py new file mode 100644 index 00000000000..5f5153c0b5c --- /dev/null +++ b/modules/processing/parsers/MACO/Lumma.py @@ -0,0 +1,29 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Lumma import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Lumma", other=raw_config) + + for address in raw_config.get("C2", []): + parsed_result.http.append(MACOModel.Http(hostname=address, usage="c2")) + + return parsed_result + + +class Lumma(Extractor): + author = "kevoreilly" + family = "Lumma" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/NanoCore.py b/modules/processing/parsers/MACO/NanoCore.py new file mode 100644 index 00000000000..309f798de01 --- /dev/null +++ b/modules/processing/parsers/MACO/NanoCore.py @@ -0,0 +1,44 @@ +from copy import deepcopy + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.NanoCore import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="NanoCore", other=raw_config) + + config_copy = deepcopy(raw_config) + capabilities = {k: config_copy.pop(k) for k in list(config_copy.keys()) if config_copy[k] in ["True", "False"]} + + if "Version" in config_copy: + parsed_result.version = config_copy.pop("Version") + + if "Mutex" in config_copy: + parsed_result.mutex.append(config_copy.pop("Mutex")) + + for capability, enabled in capabilities.items(): + if enabled.lower() == "true": + parsed_result.capability_enabled.append(capability) + else: + parsed_result.capability_disabled.append(capability) + + for address in config_copy.pop("cncs", []): + host, port = address.split(":") + parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) + + return parsed_result + + +class NanoCore(Extractor): + author = "kevoreilly" + family = "NanoCore" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Nighthawk.py b/modules/processing/parsers/MACO/Nighthawk.py new file mode 100644 index 00000000000..b1872886bed --- /dev/null +++ b/modules/processing/parsers/MACO/Nighthawk.py @@ -0,0 +1,26 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Nighthawk import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Nighthawk", other=raw_config) + + return parsed_result + + +class Nighthawk(Extractor): + author = "kevoreilly" + family = "Nighthawk" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Njrat.py b/modules/processing/parsers/MACO/Njrat.py new file mode 100644 index 00000000000..f3f9b27de27 --- /dev/null +++ b/modules/processing/parsers/MACO/Njrat.py @@ -0,0 +1,33 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Njrat import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Njrat", other=raw_config) + + if "version" in raw_config: + parsed_result.version = raw_config["version"] + + if "campaign_id" in raw_config: + parsed_result.campaign_id.append(raw_config["campaign_id"]) + + for c2 in raw_config.get("cncs", []): + host, port = c2.split(":") + parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) + + return parsed_result + + +class Njrat(Extractor): + author = "kevoreilly" + family = "Njrat" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Oyster.py b/modules/processing/parsers/MACO/Oyster.py new file mode 100644 index 00000000000..4a80f038cbf --- /dev/null +++ b/modules/processing/parsers/MACO/Oyster.py @@ -0,0 +1,35 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Oyster import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Oyster", other=raw_config) + + for address in raw_config.get("C2", []): + parsed_result.http.append(MACOModel.Http(uri=address, usage="c2")) + + if "Dll Version" in raw_config: + parsed_result.version = raw_config["Dll Version"] + + if "Strings" in raw_config: + parsed_result.decoded_strings = raw_config["Strings"] + + return parsed_result + + +class Oyster(Extractor): + author = "kevoreilly" + family = "Oyster" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Pandora.py b/modules/processing/parsers/MACO/Pandora.py new file mode 100644 index 00000000000..b82bad0c02c --- /dev/null +++ b/modules/processing/parsers/MACO/Pandora.py @@ -0,0 +1,50 @@ +import os +from copy import deepcopy + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Pandora import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + config_copy = deepcopy(raw_config) + parsed_result = MACOModel( + family="Pandora", + mutex=[config_copy.pop("Mutex")], + campaign_id=[config_copy.pop("Campaign ID")], + version=config_copy.pop("Version"), + http=[dict(hostname=config_copy.pop("Domain"), port=config_copy.pop("Port"), password=config_copy.pop("Password"))], + other=raw_config, + ) + + parsed_result.paths.append( + MACOModel.Path(path=os.path.join(config_copy.pop("Install Path"), config_copy.pop("Install Name")), usage="install") + ) + + parsed_result.registry.append(MACOModel.Registry(key=config_copy.pop("HKCU Key"))) + parsed_result.registry.append(MACOModel.Registry(key=config_copy.pop("ActiveX Key"))) + + for field in list(config_copy.keys()): + # TODO: Unsure what's the value of the remaining fields + if config_copy[field].lower() in ["true", "false"]: + enabled = config_copy.pop(field).lower() == "true" + if enabled: + parsed_result.capability_enabled.append(field) + else: + parsed_result.capability_disabled.append(field) + + return parsed_result + + +class Pandora(Extractor): + author = "kevoreilly" + family = "Pandora" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/PhemedroneStealer.py b/modules/processing/parsers/MACO/PhemedroneStealer.py new file mode 100644 index 00000000000..ef30b9032bf --- /dev/null +++ b/modules/processing/parsers/MACO/PhemedroneStealer.py @@ -0,0 +1,23 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.PhemedroneStealer import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="PhemedroneStealer", other=raw_config) + + return parsed_result + + +class PhemedroneStealer(Extractor): + author = "kevoreilly" + family = "PhemedroneStealer" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/PikaBot.py b/modules/processing/parsers/MACO/PikaBot.py new file mode 100644 index 00000000000..4409b7f6cab --- /dev/null +++ b/modules/processing/parsers/MACO/PikaBot.py @@ -0,0 +1,35 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.PikaBot import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="PikaBot", other=raw_config) + + if "C2" in raw_config: + [parsed_result.http.append(MACOModel.Http(uri=c2, usage="c2")) for c2 in raw_config["C2"]] + parsed_result.binaries.append(MACOModel.Binary(datatype="payload", data=raw_config["Powershell"])) + elif "C2s" in raw_config: + parsed_result.version = raw_config["Version"] + parsed_result.campaign_id.append(raw_config["Campaign Name"]) + parsed_result.registry.append(MACOModel.Registry(key=raw_config["Registry Key"])) + for c2 in raw_config["C2s"]: + host, port = c2.split(":") + parsed_result.http.append(MACOModel.Http(hostname=host, port=port, user_agent=raw_config["User Agent"])) + + return parsed_result + + +class PikaBot(Extractor): + author = "kevoreilly" + family = "PikaBot" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/PlugX.py b/modules/processing/parsers/MACO/PlugX.py new file mode 100644 index 00000000000..c2ae83ea952 --- /dev/null +++ b/modules/processing/parsers/MACO/PlugX.py @@ -0,0 +1,23 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.PlugX import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="PlugX", other=raw_config) + + return parsed_result + + +class PlugX(Extractor): + author = "kevoreilly" + family = "PlugX" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/PoisonIvy.py b/modules/processing/parsers/MACO/PoisonIvy.py new file mode 100644 index 00000000000..e18175fa42d --- /dev/null +++ b/modules/processing/parsers/MACO/PoisonIvy.py @@ -0,0 +1,45 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.PoisonIvy import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="PoisonIvy", other=raw_config) + + if "Campaign ID" in raw_config: + parsed_result.campaign_id.append(raw_config["Campaign ID"]) + if "Group ID" in raw_config: + parsed_result.identifier.append(raw_config["Group ID"]) + if "Domains" in raw_config: + for domain_port in raw_config["Domains"].split("|"): + host, port = domain_port.split(":") + parsed_result.http.append(MACOModel.Http(hostname=host, port=port)) + if "Password" in raw_config: + parsed_result.password.append(raw_config["Password"]) + if "Mutex" in raw_config: + parsed_result.mutex.append(raw_config["Mutex"]) + + for field in list(raw_config.keys()): + value = raw_config[field] + if value.lower() == "true": + parsed_result.capability_enabled.append(field) + elif value.lower() == "false": + parsed_result.capability_disabled.append(field) + + return parsed_result + + +class PoisonIvy(Extractor): + author = "kevoreilly" + family = "PoisonIvy" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + output = extract_config(stream.read()) + if output: + return convert_to_MACO(output[0]) diff --git a/modules/processing/parsers/MACO/Punisher.py b/modules/processing/parsers/MACO/Punisher.py new file mode 100644 index 00000000000..6bdfbb3c1be --- /dev/null +++ b/modules/processing/parsers/MACO/Punisher.py @@ -0,0 +1,46 @@ +import os +from copy import deepcopy + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Punisher import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + config_copy = deepcopy(raw_config) + parsed_result = MACOModel( + family="Punisher", + campaign_id=config_copy["Campaign Name"], + password=[config_copy["Password"]], + registry=[MACOModel.Registry(key=config_copy["Registry Key"])], + paths=[MACOModel.Path(path=os.path.join(config_copy["Install Path"], config_copy["Install Name"]))], + http=[MACOModel.Http(hostname=config_copy["Domain"], port=config_copy["Port"])], + other=raw_config, + ) + + for field in raw_config.keys(): + value = raw_config[field] + if value.lower() == "true": + parsed_result.capability_enabled.append(field) + elif value.lower() == "false": + parsed_result.capability_disabled.append(field) + else: + parsed_result.other[field] = value + + return parsed_result + + +class Punisher(Extractor): + author = "kevoreilly" + family = "Punisher" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + output = extract_config(stream.read()) + if output: + return convert_to_MACO(output[0]) diff --git a/modules/processing/parsers/MACO/QakBot.py b/modules/processing/parsers/MACO/QakBot.py new file mode 100644 index 00000000000..d8ee5c8c023 --- /dev/null +++ b/modules/processing/parsers/MACO/QakBot.py @@ -0,0 +1,28 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.QakBot import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="QakBot", other=raw_config) + + for address in raw_config.get("address", []) + raw_config.get("C2s", []): + host, port = address.split(":") + parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) + + return parsed_result + + +class QakBot(Extractor): + author = "kevoreilly" + family = "QakBot" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/QuasarRAT.py b/modules/processing/parsers/MACO/QuasarRAT.py new file mode 100644 index 00000000000..e7a0aadf5e9 --- /dev/null +++ b/modules/processing/parsers/MACO/QuasarRAT.py @@ -0,0 +1,26 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.QuasarRAT import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="QuasarRAT", other=raw_config) + + return parsed_result + + +class QuasarRAT(Extractor): + author = "kevoreilly" + family = "QuasarRAT" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Quickbind.py b/modules/processing/parsers/MACO/Quickbind.py new file mode 100644 index 00000000000..2a0b9101766 --- /dev/null +++ b/modules/processing/parsers/MACO/Quickbind.py @@ -0,0 +1,35 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Quickbind import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Quickbind", other=raw_config) + + if "Mutex" in raw_config: + parsed_result.mutex = raw_config["Mutex"] + + for c2 in raw_config.get("C2", []): + parsed_result.http.append(MACOModel.Http(hostname=c2, usage="c2")) + + if "Encryption Key" in raw_config: + parsed_result.encryption.append(MACOModel.Encryption(key=raw_config["Encryption Key"])) + + return parsed_result + + +class Quickbind(Extractor): + author = "kevoreilly" + family = "Quickbind" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/RCSession.py b/modules/processing/parsers/MACO/RCSession.py new file mode 100644 index 00000000000..3c79bc89e32 --- /dev/null +++ b/modules/processing/parsers/MACO/RCSession.py @@ -0,0 +1,44 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.RCSession import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="RCSession", other=raw_config) + + for address in raw_config.get("c2_address", []): + parsed_result.http.append(MACOModel.Http(hostname=address, usage="c2")) + + if "directory" in raw_config: + parsed_result.paths.append(MACOModel.Path(path=raw_config["directory"], usage="install")) + + service = {} + + if "servicename" in raw_config: + service["name"] = raw_config["servicename"] + if "servicedisplayname" in raw_config: + service["display_name"] = raw_config["servicedisplayname"] + if "servicedescription" in raw_config: + service["description"] = raw_config["servicedescription"] + if "filename" in raw_config: + service["dll"] = raw_config["filename"] + + if service: + parsed_result.service.append(MACOModel.Service(**service)) + + return parsed_result + + +class RCSession(Extractor): + author = "kevoreilly" + family = "RCSession" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/REvil.py b/modules/processing/parsers/MACO/REvil.py new file mode 100644 index 00000000000..f05f9196733 --- /dev/null +++ b/modules/processing/parsers/MACO/REvil.py @@ -0,0 +1,23 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.REvil import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="REvil", other=raw_config) + + return parsed_result + + +class REvil(Extractor): + author = "kevoreilly" + family = "REvil" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/RedLeaf.py b/modules/processing/parsers/MACO/RedLeaf.py new file mode 100644 index 00000000000..22038c489ab --- /dev/null +++ b/modules/processing/parsers/MACO/RedLeaf.py @@ -0,0 +1,36 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.RedLeaf import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="RedLeaf", other=raw_config) + + for address in raw_config.get("c2_address", []): + parsed_result.http.append(MACOModel.Http(hostname=address, usage="c2")) + + if "missionid" in raw_config: + parsed_result.campaign_id.append(raw_config["missionid"]) + + if "mutex" in raw_config: + parsed_result.mutex.append(raw_config["mutex"]) + + if "key" in raw_config: + parsed_result.other["key"] = raw_config["key"] + + return parsed_result + + +class RedLeaf(Extractor): + author = "kevoreilly" + family = "RedLeaf" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/RedLine.py b/modules/processing/parsers/MACO/RedLine.py new file mode 100644 index 00000000000..3db57707287 --- /dev/null +++ b/modules/processing/parsers/MACO/RedLine.py @@ -0,0 +1,27 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.RedLine import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="RedLine", other=raw_config) + + if "C2" in raw_config: + host, port = raw_config["C2"].split(":") + parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) + + return parsed_result + + +class RedLine(Extractor): + author = "kevoreilly" + family = "RedLine" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Remcos.py b/modules/processing/parsers/MACO/Remcos.py new file mode 100644 index 00000000000..739dd52b54f --- /dev/null +++ b/modules/processing/parsers/MACO/Remcos.py @@ -0,0 +1,26 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Remcos import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Remcos", other=raw_config) + + return parsed_result + + +class Remcos(Extractor): + author = "kevoreilly" + family = "Remcos" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Retefe.py b/modules/processing/parsers/MACO/Retefe.py new file mode 100644 index 00000000000..119d0af7c4c --- /dev/null +++ b/modules/processing/parsers/MACO/Retefe.py @@ -0,0 +1,24 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Retefe import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Retefe", other=raw_config) + + return parsed_result + + +class Retefe(Extractor): + author = "kevoreilly" + family = "Retefe" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Rhadamanthys.py b/modules/processing/parsers/MACO/Rhadamanthys.py new file mode 100644 index 00000000000..d98b140a08e --- /dev/null +++ b/modules/processing/parsers/MACO/Rhadamanthys.py @@ -0,0 +1,27 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Rhadamanthys import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Rhadamanthys", other=raw_config) + parsed_result.http = [MACOModel.Http(hostname=raw_config["C2"], usage="c2")] + + return parsed_result + + +class Rhadamanthys(Extractor): + author = "kevoreilly" + family = "Rhadamanthys" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Rozena.py b/modules/processing/parsers/MACO/Rozena.py new file mode 100644 index 00000000000..ba019d79cd9 --- /dev/null +++ b/modules/processing/parsers/MACO/Rozena.py @@ -0,0 +1,27 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Rozena import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Rozena", other=raw_config) + parsed_result.http = [MACOModel.Http(hostname=raw_config["C2"], port=raw_config["Port"], usage="c2")] + + return parsed_result + + +class Rozena(Extractor): + author = "kevoreilly" + family = "Rozena" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/SmallNet.py b/modules/processing/parsers/MACO/SmallNet.py new file mode 100644 index 00000000000..5b81de3f3af --- /dev/null +++ b/modules/processing/parsers/MACO/SmallNet.py @@ -0,0 +1,26 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.SmallNet import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="SmallNet", other=raw_config) + + return parsed_result + + +class SmallNet(Extractor): + author = "kevoreilly" + family = "SmallNet" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + output = extract_config(stream.read()) + if output: + config = output if isinstance(output, dict) else output[0] + return convert_to_MACO(config) diff --git a/modules/processing/parsers/MACO/SmokeLoader.py b/modules/processing/parsers/MACO/SmokeLoader.py new file mode 100644 index 00000000000..ba61c9c08de --- /dev/null +++ b/modules/processing/parsers/MACO/SmokeLoader.py @@ -0,0 +1,26 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.SmokeLoader import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel( + family="SmokeLoader", other=raw_config, http=[MACOModel.Http(uri=c2, usage="c2") for c2 in raw_config["C2s"]] + ) + + return parsed_result + + +class SmokeLoader(Extractor): + author = "kevoreilly" + family = "SmokeLoader" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Socks5Systemz.py b/modules/processing/parsers/MACO/Socks5Systemz.py new file mode 100644 index 00000000000..9e6e2ab93a9 --- /dev/null +++ b/modules/processing/parsers/MACO/Socks5Systemz.py @@ -0,0 +1,31 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Socks5Systemz import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel( + family="Socks5Systemz", + other=raw_config, + http=[MACOModel.Http(hostname=c2, usage="c2") for c2 in raw_config.get("C2s", [])] + + [MACOModel.Http(hostname=decoy, usage="decoy") for decoy in raw_config.get("Dummy domain", [])], + ) + + return parsed_result + + +class Socks5Systemz(Extractor): + author = "kevoreilly" + family = "Socks5Systemz" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/SparkRAT.py b/modules/processing/parsers/MACO/SparkRAT.py new file mode 100644 index 00000000000..deae637bd99 --- /dev/null +++ b/modules/processing/parsers/MACO/SparkRAT.py @@ -0,0 +1,34 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.SparkRAT import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="SparkRAT", other=raw_config) + + url = f"http{'s' if raw_config['secure'] else ''}://{raw_config['host']}:{raw_config['port']}{raw_config['path']}" + + parsed_result.http.append( + MACOModel.Http(uri=url, hostname=raw_config["host"], port=raw_config["port"], path=raw_config["path"]) + ) + + parsed_result.identifier.append(raw_config["uuid"]) + + return parsed_result + + +class SparkRAT(Extractor): + author = "kevoreilly" + family = "SparkRAT" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/SquirrelWaffle.py b/modules/processing/parsers/MACO/SquirrelWaffle.py new file mode 100644 index 00000000000..0790a7b6653 --- /dev/null +++ b/modules/processing/parsers/MACO/SquirrelWaffle.py @@ -0,0 +1,26 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.SquirrelWaffle import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel( + family="SquirrelWaffle", other=raw_config, http=[MACOModel.Http(uri=c2, usage="c2") for c2 in raw_config["URLs"]] + ) + + return parsed_result + + +class SquirrelWaffle(Extractor): + author = "kevoreilly" + family = "SquirrelWaffle" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Stealc.py b/modules/processing/parsers/MACO/Stealc.py new file mode 100644 index 00000000000..9cd38a935b7 --- /dev/null +++ b/modules/processing/parsers/MACO/Stealc.py @@ -0,0 +1,26 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Stealc import RULE_SOURCE, extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel( + family="Stealc", other=raw_config, http=[MACOModel.Http(uri=c2, usage="c2") for c2 in raw_config["C2"]] + ) + + return parsed_result + + +class Stealc(Extractor): + author = "kevoreilly" + family = "Stealc" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = RULE_SOURCE + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Strrat.py b/modules/processing/parsers/MACO/Strrat.py new file mode 100644 index 00000000000..58a5d5f93d9 --- /dev/null +++ b/modules/processing/parsers/MACO/Strrat.py @@ -0,0 +1,23 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Strrat import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Strrat", other=raw_config) + + return parsed_result + + +class Strrat(Extractor): + author = "kevoreilly" + family = "Strrat" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/TSCookie.py b/modules/processing/parsers/MACO/TSCookie.py new file mode 100644 index 00000000000..7344c47e381 --- /dev/null +++ b/modules/processing/parsers/MACO/TSCookie.py @@ -0,0 +1,26 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.TSCookie import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="TSCookie", other=raw_config) + + return parsed_result + + +class TSCookie(Extractor): + author = "kevoreilly" + family = "TSCookie" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/TrickBot.py b/modules/processing/parsers/MACO/TrickBot.py new file mode 100644 index 00000000000..5962d7b46d7 --- /dev/null +++ b/modules/processing/parsers/MACO/TrickBot.py @@ -0,0 +1,24 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.TrickBot import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="TrickBot", other=raw_config) + + return parsed_result + + +class TrickBot(Extractor): + author = "kevoreilly" + family = "TrickBot" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/UrsnifV3.py b/modules/processing/parsers/MACO/UrsnifV3.py new file mode 100644 index 00000000000..2e8caefbdb0 --- /dev/null +++ b/modules/processing/parsers/MACO/UrsnifV3.py @@ -0,0 +1,26 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.UrsnifV3 import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="UrsnifV3", other=raw_config) + + return parsed_result + + +class UrsnifV3(Extractor): + author = "kevoreilly" + family = "UrsnifV3" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/VenomRat.py b/modules/processing/parsers/MACO/VenomRat.py new file mode 100644 index 00000000000..de2f70ddd85 --- /dev/null +++ b/modules/processing/parsers/MACO/VenomRat.py @@ -0,0 +1,23 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.VenomRAT import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="VenomRAT", other=raw_config) + + return parsed_result + + +class VenomRAT(Extractor): + author = "kevoreilly" + family = "VenomRAT" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/WarzoneRAT.py b/modules/processing/parsers/MACO/WarzoneRAT.py new file mode 100644 index 00000000000..186ed365448 --- /dev/null +++ b/modules/processing/parsers/MACO/WarzoneRAT.py @@ -0,0 +1,27 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.WarzoneRAT import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="WarzoneRAT", other=raw_config) + + if "C2" in raw_config: + host, port = raw_config["C2"].split(":") + parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) + + return parsed_result + + +class WarzoneRAT(Extractor): + author = "kevoreilly" + family = "WarzoneRAT" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/XWorm.py b/modules/processing/parsers/MACO/XWorm.py new file mode 100644 index 00000000000..8d81f728c21 --- /dev/null +++ b/modules/processing/parsers/MACO/XWorm.py @@ -0,0 +1,26 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.XWorm import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="XWorm", other=raw_config) + + return parsed_result + + +class XWorm(Extractor): + author = "kevoreilly" + family = "XWorm" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/XenoRAT.py b/modules/processing/parsers/MACO/XenoRAT.py new file mode 100644 index 00000000000..31fc541f702 --- /dev/null +++ b/modules/processing/parsers/MACO/XenoRAT.py @@ -0,0 +1,26 @@ +import os + +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.XenoRAT import extract_config + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="XenoRAT", other=raw_config) + + return parsed_result + + +class XenoRAT(Extractor): + author = "kevoreilly" + family = "XenoRAT" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Zloader.py b/modules/processing/parsers/MACO/Zloader.py new file mode 100644 index 00000000000..a6e085f4ce0 --- /dev/null +++ b/modules/processing/parsers/MACO/Zloader.py @@ -0,0 +1,33 @@ +from maco.extractor import Extractor +from maco.model import ExtractorModel as MACOModel + +from modules.processing.parsers.CAPE.Zloader import extract_config, rule_source + + +def convert_to_MACO(raw_config: dict): + if not raw_config: + return None + + parsed_result = MACOModel(family="Zloader", other=raw_config) + + if "Campaign ID" in raw_config: + parsed_result.campaign_id = [raw_config["Campaign ID"]] + + if "RC4 key" in raw_config: + parsed_result.encryption = [MACOModel.Encryption(algorithm="RC4", key=raw_config[:"RC4 key"])] + + for address in raw_config.get("address", []): + parsed_result.http.append(MACOModel.Http(uri=address)) + + return parsed_result + + +class Zloader(Extractor): + author = "kevoreilly" + family = "Zloader" + last_modified = "2024-10-26" + sharing = "TLP:CLEAR" + yara_rule = rule_source + + def run(self, stream, matches): + return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/__init__.py b/modules/processing/parsers/MACO/__init__.py new file mode 100644 index 00000000000..f39e5e8d683 --- /dev/null +++ b/modules/processing/parsers/MACO/__init__.py @@ -0,0 +1 @@ +# Init diff --git a/modules/processing/parsers/MACO/test_maco.py b/modules/processing/parsers/MACO/test_maco.py new file mode 100644 index 00000000000..d502c95b81c --- /dev/null +++ b/modules/processing/parsers/MACO/test_maco.py @@ -0,0 +1,10 @@ +from maco.extractor import Extractor + + +class Test(Extractor): + author = "test" + family = "test" + last_modified = "2024-10-20" + + def run(self, stream, matches): + pass diff --git a/poetry.lock b/poetry.lock index d817b4c22af..26063d23dd1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "alembic" @@ -275,6 +275,19 @@ files = [ {file = "capstone-4.0.2.tar.gz", hash = "sha256:2842913092c9b69fd903744bc1b87488e1451625460baac173056e1808ec1c66"}, ] +[[package]] +name = "cart" +version = "1.2.2" +description = "CaRT Neutering format" +optional = false +python-versions = ">=3.6" +files = [ + {file = "cart-1.2.2-py2.py3-none-any.whl", hash = "sha256:c111398038683c85d3edcadaa3b16183461907bdb613e05cbb60d381f2886309"}, +] + +[package.dependencies] +pycryptodome = "*" + [[package]] name = "certifi" version = "2024.7.4" @@ -839,13 +852,13 @@ files = [ [[package]] name = "django" -version = "4.2.15" +version = "4.2.16" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.15-py3-none-any.whl", hash = "sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30"}, - {file = "Django-4.2.15.tar.gz", hash = "sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a"}, + {file = "Django-4.2.16-py3-none-any.whl", hash = "sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898"}, + {file = "Django-4.2.16.tar.gz", hash = "sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad"}, ] [package.dependencies] @@ -1579,6 +1592,23 @@ files = [ {file = "LnkParse3-1.2.0.tar.gz", hash = "sha256:102b2aba6c2896127cb719f814a8579210368f9277fd5ec0d0151fe070166e1d"}, ] +[[package]] +name = "maco" +version = "1.1.8" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "maco-1.1.8-py3-none-any.whl", hash = "sha256:ab2d1d8e846c0abc455d16f718ba71dda5492ddc22533484156090aa4439fb06"}, + {file = "maco-1.1.8.tar.gz", hash = "sha256:e0985efdf645d3c55e3d4d4f2bf40b8d2260fa4add608bb8e8fdefba0500cb4a"}, +] + +[package.dependencies] +cart = "*" +pydantic = ">=2.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +yara-python = "*" + [[package]] name = "mako" version = "1.3.5" @@ -2452,6 +2482,47 @@ files = [ {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] +[[package]] +name = "pycryptodome" +version = "3.21.0" +description = "Cryptographic library for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3"}, + {file = "pycryptodome-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819"}, + {file = "pycryptodome-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4"}, + {file = "pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8"}, + {file = "pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2"}, + {file = "pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764"}, + {file = "pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca"}, + {file = "pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0"}, + {file = "pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58"}, + {file = "pycryptodome-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c"}, + {file = "pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297"}, +] + [[package]] name = "pycryptodomex" version = "3.20.0" @@ -2628,6 +2699,8 @@ files = [ {file = "pydeep2-0.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2283893e25826b547dd1e5c71a010e86ddfd7270e2f2b8c90973c1d7984c7eb7"}, {file = "pydeep2-0.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f248e3161deb53d46a9368a7c164e36d83004faf2f11625d47a5cf23a6bdd2cb"}, {file = "pydeep2-0.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a13fca9be89a9fa8d92a4f49d7b9191eef94555f8ddf030fb2be4c8c15ad618c"}, + {file = "pydeep2-0.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cb4757db97ac15ddf034c21cd6bab984f841586b6d53984e63c9a7803b2cd4"}, + {file = "pydeep2-0.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7809a1d6640bdbee68f075d53229d05229e11b4711f232728dd540f68e6483a4"}, {file = "pydeep2-0.5.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fedc1c9660cb5d0b73ad0b5f1dbffe16990e6721cbfc6454571a4b9882d0ea4"}, {file = "pydeep2-0.5.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca68f7d63e2ef510d410d20b223e8e97df41707fb50c4c526b6dd1d8698d9e6"}, {file = "pydeep2-0.5.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:199d05d8b4b7544509a2ba4802ead4b41dfe7859e0ecea9d9be9e41939f11660"}, @@ -3230,6 +3303,29 @@ files = [ [package.dependencies] pycryptodomex = "*" +[[package]] +name = "rat-king-parser" +version = "3.0.0" +description = "A robust, multiprocessing-capable, multi-family RAT config parser/config extractor for AsyncRAT, DcRAT, VenomRAT, QuasarRAT, XWorm, Xeno RAT, and cloned/derivative RAT families." +optional = false +python-versions = ">=3.10" +files = [] +develop = false + +[package.dependencies] +cryptography = "*" +dnfile = "*" +yara-python = "*" + +[package.extras] +maco = ["maco", "validators"] + +[package.source] +type = "git" +url = "https://github.com/jeFF0Falltrades/rat_king_parser" +reference = "ab849ec8face38c8dac3f803ae5fe7cf8be26583" +resolved_reference = "ab849ec8face38c8dac3f803ae5fe7cf8be26583" + [[package]] name = "regex" version = "2021.7.6" @@ -3672,30 +3768,51 @@ description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ + {file = "SQLAlchemy-1.4.50-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:54138aa80d2dedd364f4e8220eef284c364d3270aaef621570aa2bd99902e2e8"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4"}, + {file = "SQLAlchemy-1.4.50-cp310-cp310-win32.whl", hash = "sha256:6c78e3fb4a58e900ec433b6b5f4efe1a0bf81bbb366ae7761c6e0051dd310ee3"}, + {file = "SQLAlchemy-1.4.50-cp310-cp310-win_amd64.whl", hash = "sha256:d55f7a33e8631e15af1b9e67c9387c894fedf6deb1a19f94be8731263c51d515"}, + {file = "SQLAlchemy-1.4.50-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:324b1fdd50e960a93a231abb11d7e0f227989a371e3b9bd4f1259920f15d0304"}, {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7"}, {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace"}, + {file = "SQLAlchemy-1.4.50-cp311-cp311-win32.whl", hash = "sha256:8bdab03ff34fc91bfab005e96f672ae207d87e0ac7ee716d74e87e7046079d8b"}, + {file = "SQLAlchemy-1.4.50-cp311-cp311-win_amd64.whl", hash = "sha256:52e01d60b06f03b0a5fc303c8aada405729cbc91a56a64cead8cb7c0b9b13c1a"}, + {file = "SQLAlchemy-1.4.50-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:77fde9bf74f4659864c8e26ac08add8b084e479b9a18388e7db377afc391f926"}, {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc"}, {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75"}, + {file = "SQLAlchemy-1.4.50-cp312-cp312-win32.whl", hash = "sha256:e86c920b7d362cfa078c8b40e7765cbc34efb44c1007d7557920be9ddf138ec7"}, + {file = "SQLAlchemy-1.4.50-cp312-cp312-win_amd64.whl", hash = "sha256:6b3df20fbbcbcd1c1d43f49ccf3eefb370499088ca251ded632b8cbaee1d497d"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:fb9adc4c6752d62c6078c107d23327aa3023ef737938d0135ece8ffb67d07030"}, {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293"}, {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131"}, {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516"}, {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-win32.whl", hash = "sha256:1b9c4359d3198f341480e57494471201e736de459452caaacf6faa1aca852bd8"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-win_amd64.whl", hash = "sha256:35e4520f7c33c77f2636a1e860e4f8cafaac84b0b44abe5de4c6c8890b6aaa6d"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f5b1fb2943d13aba17795a770d22a2ec2214fc65cff46c487790192dda3a3ee7"}, {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a"}, {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c"}, {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5"}, {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-win32.whl", hash = "sha256:7b4396452273aedda447e5aebe68077aa7516abf3b3f48408793e771d696f397"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-win_amd64.whl", hash = "sha256:752f9df3dddbacb5f42d8405b2d5885675a93501eb5f86b88f2e47a839cf6337"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:35c7ed095a4b17dbc8813a2bfb38b5998318439da8e6db10a804df855e3a9e3a"}, {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436"}, {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d"}, {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab"}, {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-win32.whl", hash = "sha256:8a219688297ee5e887a93ce4679c87a60da4a5ce62b7cb4ee03d47e9e767f558"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-win_amd64.whl", hash = "sha256:a648770db002452703b729bdcf7d194e904aa4092b9a4d6ab185b48d13252f63"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4be4da121d297ce81e1ba745a0a0521c6cf8704634d7b520e350dce5964c71ac"}, {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49"}, {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9"}, {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3"}, {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-win32.whl", hash = "sha256:a7c9b9dca64036008962dd6b0d9fdab2dfdbf96c82f74dbd5d86006d8d24a30f"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-win_amd64.whl", hash = "sha256:df200762efbd672f7621b253721644642ff04a6ff957236e0e2fe56d9ca34d2c"}, {file = "SQLAlchemy-1.4.50.tar.gz", hash = "sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf"}, ] @@ -4307,13 +4424,13 @@ files = [ [[package]] name = "werkzeug" -version = "3.0.3" +version = "3.0.6" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"}, - {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"}, + {file = "werkzeug-3.0.6-py3-none-any.whl", hash = "sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17"}, + {file = "werkzeug-3.0.6.tar.gz", hash = "sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d"}, ] [package.dependencies] @@ -4515,7 +4632,10 @@ docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +[extras] +maco = ["maco"] + [metadata] lock-version = "2.0" python-versions = ">=3.10, <4.0" -content-hash = "74bde9cd19ea301395e0dbfdcc24884dc7e34f5735beaa685fb0be0c6d9f0860" +content-hash = "ab65373ef8c8244e2d8237cb6208783a0276fa62f52545098cb12170c1cd7d76" diff --git a/pyproject.toml b/pyproject.toml index 9f43c919121..2d50b19820a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ pyzipper = "0.3.5" flare-capa = "7.3.0" Cython = "0.29.24" # pyre2 = "0.3.6" # Dead for python3.11 -Django = "4.2.15" +Django = "4.2.16" SQLAlchemy = "1.4.50" SQLAlchemy-Utils = "0.41.1" Jinja2 = "^3.1.4" @@ -73,8 +73,9 @@ ruff = "0.0.290" paramiko = "3.4.0" psutil = "5.9.8" # peepdf-3 = "4.0.0" +maco = "1.1.8" -Werkzeug = "3.0.3" +Werkzeug = "3.0.6" packaging = "23.1" setuptools = "70.0.0" # command line config manipulation @@ -90,8 +91,12 @@ setproctitle = "1.3.2" # tmp dependency to fix vuln certifi = "2024.7.4" +rat_king_parser = {git = "https://github.com/jeFF0Falltrades/rat_king_parser", rev = "ab849ec8face38c8dac3f803ae5fe7cf8be26583"} +[tool.poetry.extras] +maco = ["maco"] + [tool.poetry.dev-dependencies] black = "^24.3.0" isort = "^5.10.1" diff --git a/requirements.txt b/requirements.txt index fc3b1acafa9..4b920a5528a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -61,6 +61,8 @@ capstone==4.0.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:9d1a9096c5f875b11290317722ed44bb6e7c52e50cc79d791f142bce968c49aa \ --hash=sha256:c3d9b443d1adb40ee2d9a4e7341169b76476ddcf3a54c03793b16cdc7cd35c5a \ --hash=sha256:da442f979414cf27e4621e70e835880878c858ea438c4f0e957e132593579e37 +cart==1.2.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:c111398038683c85d3edcadaa3b16183461907bdb613e05cbb60d381f2886309 certifi==2024.7.4 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 @@ -356,9 +358,9 @@ django-recaptcha==3.0.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:253197051288923cae675d7eff91b619e3775311292a5dbaf27a8a55ffebc670 django-settings-export==1.2.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:fceeae49fc597f654c1217415d8e049fc81c930b7154f5d8f28c432db738ff79 -django==4.2.15 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30 \ - --hash=sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a +django==4.2.16 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898 \ + --hash=sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad djangorestframework==3.15.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20 \ --hash=sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad @@ -566,6 +568,9 @@ jinja2==3.1.4 ; python_version >= "3.10" and python_version < "4.0" \ lnkparse3==1.2.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:102b2aba6c2896127cb719f814a8579210368f9277fd5ec0d0151fe070166e1d \ --hash=sha256:b97f9a3dfffa62ecbd5f1f6561d8b5b75b0045241482b4a980657d5aac696ee3 +maco==1.1.8 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:ab2d1d8e846c0abc455d16f718ba71dda5492ddc22533484156090aa4439fb06 \ + --hash=sha256:e0985efdf645d3c55e3d4d4f2bf40b8d2260fa4add608bb8e8fdefba0500cb4a mako==1.3.5 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a \ --hash=sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc @@ -1055,6 +1060,39 @@ pyasn1==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc +pycryptodome==3.21.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8 \ + --hash=sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d \ + --hash=sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0 \ + --hash=sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93 \ + --hash=sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4 \ + --hash=sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a \ + --hash=sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764 \ + --hash=sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca \ + --hash=sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e \ + --hash=sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e \ + --hash=sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd \ + --hash=sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f \ + --hash=sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6 \ + --hash=sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb \ + --hash=sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e \ + --hash=sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1 \ + --hash=sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6 \ + --hash=sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a \ + --hash=sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c \ + --hash=sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2 \ + --hash=sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4 \ + --hash=sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3 \ + --hash=sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819 \ + --hash=sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568 \ + --hash=sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53 \ + --hash=sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3 \ + --hash=sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8 \ + --hash=sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd \ + --hash=sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b \ + --hash=sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b \ + --hash=sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297 \ + --hash=sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58 pycryptodomex==3.20.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:0daad007b685db36d977f9de73f61f8da2a7104e20aca3effd30752fd56f73e1 \ --hash=sha256:108e5f1c1cd70ffce0b68739c75734437c919d2eaec8e85bffc2c8b4d2794305 \ @@ -1187,11 +1225,13 @@ pydeep2==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:2283893e25826b547dd1e5c71a010e86ddfd7270e2f2b8c90973c1d7984c7eb7 \ --hash=sha256:44ce447e3253a69d3393f3cc53e3a87a48fe3ff9861793736a7bc218a1b95d77 \ --hash=sha256:4bf00de2fe1918e4d698fe8195a5c0a3a0c3050a2e3e15583748cfd20b427153 \ + --hash=sha256:7809a1d6640bdbee68f075d53229d05229e11b4711f232728dd540f68e6483a4 \ --hash=sha256:7ca68f7d63e2ef510d410d20b223e8e97df41707fb50c4c526b6dd1d8698d9e6 \ --hash=sha256:a13fca9be89a9fa8d92a4f49d7b9191eef94555f8ddf030fb2be4c8c15ad618c \ --hash=sha256:add24d7aa0386b285fd3e99632719714efabeb13d7b03a015b7c64d1f588f815 \ --hash=sha256:c2063cbb053e5ce684cc45fff3e72c063b26aa85e41e6435cab0c658ad9e3e1e \ --hash=sha256:c65dc910d782fa2bc97e1b28a78d77c4bada037d14b63e3e75a1fa5918d642c5 \ + --hash=sha256:d1cb4757db97ac15ddf034c21cd6bab984f841586b6d53984e63c9a7803b2cd4 \ --hash=sha256:e14b310b820d895a7354be7fd025de874892df249cbfb3ad8a524459e1511fd8 \ --hash=sha256:ef00ca5681a2c4ad5dc744db5f8ae5406d3f13121b38d84cc58dfb8fce4c3dc2 \ --hash=sha256:f248e3161deb53d46a9368a7c164e36d83004faf2f11625d47a5cf23a6bdd2cb @@ -1408,6 +1448,7 @@ pyyaml==6.0.2 ; python_version >= "3.10" and python_version < "4.0" \ pyzipper==0.3.5 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:6040069654dad040cf8708d4db78ce5829238e2091ad8006a47d97d6ffe275d6 \ --hash=sha256:e696e9d306427400e23e13a766c7614b64d9fc3316bdc71bbcc8f0070a14f150 +rat-king-parser @ git+https://github.com/jeFF0Falltrades/rat_king_parser@ab849ec8face38c8dac3f803ae5fe7cf8be26583 ; python_version >= "3.10" and python_version < "4.0" regex==2021.7.6 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f \ --hash=sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad \ @@ -1654,27 +1695,48 @@ sqlalchemy==1.4.50 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131 \ --hash=sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3 \ --hash=sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7 \ + --hash=sha256:1b9c4359d3198f341480e57494471201e736de459452caaacf6faa1aca852bd8 \ --hash=sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace \ --hash=sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a \ --hash=sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab \ + --hash=sha256:324b1fdd50e960a93a231abb11d7e0f227989a371e3b9bd4f1259920f15d0304 \ --hash=sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814 \ + --hash=sha256:35c7ed095a4b17dbc8813a2bfb38b5998318439da8e6db10a804df855e3a9e3a \ + --hash=sha256:35e4520f7c33c77f2636a1e860e4f8cafaac84b0b44abe5de4c6c8890b6aaa6d \ --hash=sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf \ --hash=sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49 \ + --hash=sha256:4be4da121d297ce81e1ba745a0a0521c6cf8704634d7b520e350dce5964c71ac \ + --hash=sha256:52e01d60b06f03b0a5fc303c8aada405729cbc91a56a64cead8cb7c0b9b13c1a \ + --hash=sha256:54138aa80d2dedd364f4e8220eef284c364d3270aaef621570aa2bd99902e2e8 \ + --hash=sha256:6b3df20fbbcbcd1c1d43f49ccf3eefb370499088ca251ded632b8cbaee1d497d \ + --hash=sha256:6c78e3fb4a58e900ec433b6b5f4efe1a0bf81bbb366ae7761c6e0051dd310ee3 \ + --hash=sha256:752f9df3dddbacb5f42d8405b2d5885675a93501eb5f86b88f2e47a839cf6337 \ + --hash=sha256:77fde9bf74f4659864c8e26ac08add8b084e479b9a18388e7db377afc391f926 \ + --hash=sha256:7b4396452273aedda447e5aebe68077aa7516abf3b3f48408793e771d696f397 \ --hash=sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c \ --hash=sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796 \ + --hash=sha256:8a219688297ee5e887a93ce4679c87a60da4a5ce62b7cb4ee03d47e9e767f558 \ --hash=sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75 \ --hash=sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18 \ + --hash=sha256:8bdab03ff34fc91bfab005e96f672ae207d87e0ac7ee716d74e87e7046079d8b \ --hash=sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c \ + --hash=sha256:a648770db002452703b729bdcf7d194e904aa4092b9a4d6ab185b48d13252f63 \ + --hash=sha256:a7c9b9dca64036008962dd6b0d9fdab2dfdbf96c82f74dbd5d86006d8d24a30f \ --hash=sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516 \ --hash=sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9 \ --hash=sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293 \ --hash=sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc \ --hash=sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5 \ --hash=sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb \ + --hash=sha256:d55f7a33e8631e15af1b9e67c9387c894fedf6deb1a19f94be8731263c51d515 \ --hash=sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5 \ --hash=sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4 \ + --hash=sha256:df200762efbd672f7621b253721644642ff04a6ff957236e0e2fe56d9ca34d2c \ + --hash=sha256:e86c920b7d362cfa078c8b40e7765cbc34efb44c1007d7557920be9ddf138ec7 \ --hash=sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436 \ --hash=sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be \ + --hash=sha256:f5b1fb2943d13aba17795a770d22a2ec2214fc65cff46c487790192dda3a3ee7 \ + --hash=sha256:fb9adc4c6752d62c6078c107d23327aa3023ef737938d0135ece8ffb67d07030 \ --hash=sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d sqlparse==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4 \ @@ -1935,9 +1997,9 @@ websockets==13.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9 \ --hash=sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee \ --hash=sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6 -werkzeug==3.0.3 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18 \ - --hash=sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8 +werkzeug==3.0.6 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17 \ + --hash=sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d wheel==0.44.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 diff --git a/tests/test_demux.py b/tests/test_demux.py index 605974ee71e..e24a9664c41 100644 --- a/tests/test_demux.py +++ b/tests/test_demux.py @@ -85,9 +85,8 @@ def test_demux_sample_pe32(self, grab_sample): def test_demux_package(self): empty_file = tempfile.NamedTemporaryFile() - assert demux.demux_sample(filename=empty_file.name, package="Emotet", options="foo", use_sflock=False) == [ - (empty_file.name, "") - ] + demuxed, _ = demux.demux_sample(filename=empty_file.name, package="Emotet", options="foo", use_sflock=False) + demuxed == [(empty_file.name, "", "")] empty_file.close() def test_options2passwd(self): diff --git a/tests/test_objects.py b/tests/test_objects.py index ed0966aab7e..d6859890375 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -223,7 +223,14 @@ def test_get_type(self, test_files): [ ("temp_pe32", "PE32 executable (GUI) Intel 80386, for MS Windows", True), # emulated magic type ("temp_pe64", "PE32+ executable (GUI) x86-64, for MS Windows", True), # emulated magic type - ("temp_pe_aarch64", "MS-DOS executable PE32 executable Aarch64, for MS Windows", True), + ( + "temp_pe_aarch64", + ( + "PE32 executable Aarch64, for MS Windows", + "MS-DOS executable PE32 executable Aarch64, for MS Windows", + ), + True, + ), ("temp_elf32", "ELF 32-bit LSB", False), ("temp_elf64", "ELF 64-bit LSB", False), ("temp_macho_arm64", "Mach-O 64-bit arm64 executable", False), @@ -232,7 +239,9 @@ def test_get_type(self, test_files): def test_get_type_pe(self, file_fixture, expected, is_pe, request): path = request.getfixturevalue(file_fixture) file = File(path) - assert file.get_type() == expected + if isinstance(expected, str): + expected = (expected,) + assert file.get_type() in expected assert bool(file.pe) == is_pe def test_get_yara(self, hello_file, yara_compiled): diff --git a/tests/web/test_submission_views.py b/tests/web/test_submission_views.py index 007fe0dca85..5896cf79428 100644 --- a/tests/web/test_submission_views.py +++ b/tests/web/test_submission_views.py @@ -71,7 +71,6 @@ def test_submission_page(self): self.assertEqual('value="" title="">Detect Automatically', options[0]) self.one_should_match('value="exe" title=".*">exe - .*', options) - self.one_should_match('value="Unpacker" title="[^"]*">Unpacker', options) self.one_should_match(".*ichitaro.*", options) self.one_should_match(".*chromium.*", options) self.assertGreater(len(options), 10) @@ -80,7 +79,7 @@ def test_submission_page(self): def test_package_exclusion(self): """Pick a couple of packages to exclude, to test exclusion""" - web_conf.package_exclusion.packages = "chromium,chromium_ext,ichitaro,Shellcode" + web_conf.package_exclusion.packages = "chromium,chromium_ext,ichitaro,shellcode" submission_page = self.client.get("/submit/#file") self.assertIsNotNone(submission_page.content) self.assertIn("Analysis Package", submission_page.content.decode()) @@ -90,9 +89,8 @@ def test_package_exclusion(self): # excluded packages should not be listed self.none_should_match(".*ichitaro.*", options) self.none_should_match(".*chromium.*", options) - # Package 'Shellcode' was excluded, but not 'Shellcode-Unpacker'. - self.none_should_match('.*"Shellcode".*', options) - self.one_should_match('.*"Shellcode-Unpacker".*', options) + # Package 'shellcode' was excluded. + self.none_should_match('.*"shellcode".*', options) def test_get_package_exe_info(self): """Get the package info from exe.py.""" diff --git a/tests_parsers/test_agenttesla.py b/tests_parsers/test_agenttesla.py new file mode 100644 index 00000000000..b52cd064ce6 --- /dev/null +++ b/tests_parsers/test_agenttesla.py @@ -0,0 +1,49 @@ +from contextlib import suppress + +from modules.processing.parsers.CAPE.AgentTesla import extract_config + +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.AgentTesla import convert_to_MACO + + HAVE_MACO = True + + +def test_agenttesla(): + # AgentTeslaV5 + with open("tests/data/malware/893f4dc8f8a1dcee05a0840988cf90bc93c1cda5b414f35a6adb5e9f40678ce9", "rb") as data: + conf = extract_config(data.read()) + assert conf == { + "Protocol": "SMTP", + "C2": "mail.guestequipment.com.au", + "Username": "sendlog@guestequipment.com.au", + "Password": "Clone89!", + "EmailTo": "info@marethon.com", + "Persistence_Filename": "newfile.exe", + "ExternalIPCheckServices": ["http://ip-api.com/line/?fields=hosting"], + } + + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "AgentTesla", + "other": { + "Protocol": "SMTP", + "C2": "mail.guestequipment.com.au", + "Username": "sendlog@guestequipment.com.au", + "Password": "Clone89!", + "EmailTo": "info@marethon.com", + "Persistence_Filename": "newfile.exe", + "ExternalIPCheckServices": ["http://ip-api.com/line/?fields=hosting"], + }, + "smtp": [ + { + "username": "sendlog@guestequipment.com.au", + "password": "Clone89!", + "hostname": "mail.guestequipment.com.au", + "mail_to": ["info@marethon.com"], + "usage": "c2", + } + ], + "http": [{"uri": "http://ip-api.com/line/?fields=hosting", "usage": "other"}], + "paths": [{"path": "newfile.exe", "usage": "storage"}], + } diff --git a/tests_parsers/test_asyncrat.py b/tests_parsers/test_asyncrat.py new file mode 100644 index 00000000000..ee49867d95d --- /dev/null +++ b/tests_parsers/test_asyncrat.py @@ -0,0 +1,44 @@ +from contextlib import suppress + +from modules.processing.parsers.CAPE.AsyncRAT import extract_config + +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.AsyncRAT import convert_to_MACO + + HAVE_MACO = True + + +def test_asyncrat(): + with open("tests/data/malware/f08b325f5322a698e14f97db29d322e9ee91ad636ac688af352d51057fc56526", "rb") as data: + conf = extract_config(data.read()) + assert conf == { + "C2s": ["todfg.duckdns.org"], + "Ports": "6745", + "Version": "0.5.7B", + "Folder": "%AppData%", + "Filename": "updateee.exe", + "Install": "false", + "Mutex": "AsyncMutex_6SI8OkPnk", + "Pastebin": "null", + } + + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "AsyncRAT", + "version": "0.5.7B", + "capability_disabled": ["persistence"], + "mutex": ["AsyncMutex_6SI8OkPnk"], + "other": { + "C2s": ["todfg.duckdns.org"], + "Ports": "6745", + "Version": "0.5.7B", + "Folder": "%AppData%", + "Filename": "updateee.exe", + "Install": "false", + "Mutex": "AsyncMutex_6SI8OkPnk", + "Pastebin": "null", + }, + "http": [{"hostname": "todfg.duckdns.org", "port": 6, "usage": "c2"}], + "paths": [{"path": "%AppData%/updateee.exe", "usage": "install"}], + } diff --git a/tests_parsers/test_aurorastealer.py b/tests_parsers/test_aurorastealer.py new file mode 100644 index 00000000000..a09cc324614 --- /dev/null +++ b/tests_parsers/test_aurorastealer.py @@ -0,0 +1,38 @@ +from contextlib import suppress + +from modules.processing.parsers.CAPE.AuroraStealer import extract_config + +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.AuroraStealer import convert_to_MACO + + HAVE_MACO = True + + +def test_aurorastealer(): + with open("tests/data/malware/8da8821d410b94a2811ce7ae80e901d7e150ad3420d677b158e45324a6606ac4", "rb") as data: + conf = extract_config(data.read()) + assert conf == { + "BuildID": "x64pump", + "MD5Hash": "f29f33b296b35ec5e7fc3ee784ef68ee", + "C2": "77.91.85.73", + "Architecture": "X64", + "BuildGroup": "x64pump", + "BuildAccept": "0", + "Date": "2023-04-06 19", + } + + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "AuroraStealer", + "other": { + "BuildID": "x64pump", + "MD5Hash": "f29f33b296b35ec5e7fc3ee784ef68ee", + "C2": "77.91.85.73", + "Architecture": "X64", + "BuildGroup": "x64pump", + "BuildAccept": "0", + "Date": "2023-04-06 19", + }, + "http": [{"hostname": "77.91.85.73", "usage": "c2"}], + } diff --git a/tests_parsers/test_blackdropper.py b/tests_parsers/test_blackdropper.py new file mode 100644 index 00000000000..cf8326f56cd --- /dev/null +++ b/tests_parsers/test_blackdropper.py @@ -0,0 +1,36 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +from contextlib import suppress + +from modules.processing.parsers.CAPE.BlackDropper import extract_config + +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.BlackDropper import convert_to_MACO + + HAVE_MACO = True + + +def test_blackdropper(): + with open("tests/data/malware/f8026ae3237bdd885e5fcaceb86bcab4087d8857e50ba472ca79ce44c12bc257", "rb") as data: + conf = extract_config(data.read()) + assert conf == { + "urls": ["http://72.5.42.222:8568/api/dll/", "http://72.5.42.222:8568/api/fileZip"], + "directories": ["\\Music\\dkcydqtwjv"], + "campaign": "oFwQ0aQ3v", + } + + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "BlackDropper", + "campaign_id": ["oFwQ0aQ3v"], + "other": { + "urls": ["http://72.5.42.222:8568/api/dll/", "http://72.5.42.222:8568/api/fileZip"], + "directories": ["\\Music\\dkcydqtwjv"], + "campaign": "oFwQ0aQ3v", + }, + "http": [{"uri": "http://72.5.42.222:8568/api/dll/"}, {"uri": "http://72.5.42.222:8568/api/fileZip"}], + "paths": [{"path": "\\Music\\dkcydqtwjv"}], + } diff --git a/tests_parsers/test_bumblebee.py b/tests_parsers/test_bumblebee.py index 88aba1604a5..c26509687a4 100644 --- a/tests_parsers/test_bumblebee.py +++ b/tests_parsers/test_bumblebee.py @@ -2,10 +2,27 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.BumbleBee import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.BumbleBee import convert_to_MACO + + HAVE_MACO = True + def test_bumblebee(): with open("tests/data/malware/f8a6eddcec59934c42ea254cdd942fb62917b5898f71f0feeae6826ba4f3470d", "rb") as data: conf = extract_config(data.read()) assert conf == {"Botnet ID": "YTBSBbNTWU", "Campaign ID": "1904r", "Data": "XNgHUGLrCD", "C2s": ["444"]} + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "BumbleBee", + "campaign_id": ["1904r"], + "identifier": ["YTBSBbNTWU"], + "other": {"Botnet ID": "YTBSBbNTWU", "Campaign ID": "1904r", "Data": "XNgHUGLrCD", "C2s": ["444"]}, + "binaries": [{"data": "XNgHUGLrCD"}], + "http": [{"hostname": "444", "usage": "c2"}], + } diff --git a/tests_parsers/test_carbanak.py b/tests_parsers/test_carbanak.py index 8460665d911..bb0d512bccf 100644 --- a/tests_parsers/test_carbanak.py +++ b/tests_parsers/test_carbanak.py @@ -1,7 +1,21 @@ +from contextlib import suppress + from modules.processing.parsers.CAPE.Carbanak import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.Carbanak import convert_to_MACO + + HAVE_MACO = True + def test_carbanak(): with open("tests/data/malware/c9c1b06cb9c9bd6fc4451f5e2847a1f9524bb2870d7bb6f0ee09b9dd4e3e4c84", "rb") as data: conf = extract_config(data.read()) assert conf["C2"] == ["5.161.223.210:443", "207.174.30.226:443"] + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "Carbanak", + "other": {"C2": ["5.161.223.210:443", "207.174.30.226:443"]}, + "http": [{"hostname": "5.161.223.210:443", "usage": "c2"}, {"hostname": "207.174.30.226:443", "usage": "c2"}], + } diff --git a/tests_parsers/test_cobaltstrikebeacon.py b/tests_parsers/test_cobaltstrikebeacon.py index 8505e402035..12afcdd3677 100644 --- a/tests_parsers/test_cobaltstrikebeacon.py +++ b/tests_parsers/test_cobaltstrikebeacon.py @@ -2,13 +2,21 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.CobaltStrikeBeacon import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.CobaltStrikeBeacon import convert_to_MACO + + HAVE_MACO = True + def test_csb(): with open("tests/data/malware/2588fd3232138f587e294aea5cc9a0611d1e165b199743552c84bfddc1e4c063", "rb") as data: conf = extract_config(data.read()) - assert conf == { + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { "BeaconType": ["HTTP"], "Port": 4848, "SleepTime": 60000, @@ -55,3 +63,63 @@ def test_csb(): "bUsesCookies": "True", "HostHeader": "", } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "CobaltStrikeBeacon", + "capability_enabled": ["ProcInject_StartRWX", "ProcInject_UseRWX", "UsesCookies"], + "capability_disabled": ["StageCleanup", "CFGCaution"], + "sleep_delay": 60000, + "sleep_delay_jitter": 0, + "other": { + "BeaconType": ["HTTP"], + "Port": 4848, + "SleepTime": 60000, + "MaxGetSize": 1048576, + "Jitter": 0, + "MaxDNS": "Not Found", + "PublicKey": "30819f300d06092a864886f70d010101050003818d0030818902818100bebe41805d3c15a738caf3e308a992d4d507ce827996a8c9d783c766963e7e73083111729ae0abc1b49af0bcf803efdcaf83ac694fb53d043a88e9333f169e026a3c4e63cc6d4cd1aa5e199cb95eec500f948ac472c0ab2eda385d35fb8592d74b1154a1c671afb310eccb0b139ee1100907bfcdd8dfbf3385803a11bc252995020301000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "C2Server": "192.144.206.100,/load", + "UserAgent": "Not Found", + "HttpPostUri": "/submit.php", + "Malleable_C2_Instructions": [], + "HttpGet_Metadata": "Not Found", + "HttpPost_Metadata": "Not Found", + "SpawnTo": "d7a9ca15a07f82bfd3b63020da38aa16", + "PipeName": "Not Found", + "DNS_Idle": "Not Found", + "DNS_Sleep": "Not Found", + "SSH_Host": "Not Found", + "SSH_Port": "Not Found", + "SSH_Username": "Not Found", + "SSH_Password_Plaintext": "Not Found", + "SSH_Password_Pubkey": "Not Found", + "HttpGet_Verb": "GET", + "HttpPost_Verb": "POST", + "HttpPostChunk": 0, + "Spawnto_x86": "%windir%\\syswow64\\rundll32.exe", + "Spawnto_x64": "%windir%\\sysnative\\rundll32.exe", + "CryptoScheme": 0, + "Proxy_Config": "Not Found", + "Proxy_User": "Not Found", + "Proxy_Password": "Not Found", + "Proxy_Behavior": "Use IE settings", + "Watermark": 391144938, + "bStageCleanup": "False", + "bCFGCaution": "False", + "KillDate": 0, + "bProcInject_StartRWX": "True", + "bProcInject_UseRWX": "True", + "bProcInject_MinAllocSize": 0, + "ProcInject_PrependAppend_x86": "Empty", + "ProcInject_PrependAppend_x64": "Empty", + "ProcInject_Execute": ["CreateThread", "SetThreadContext", "CreateRemoteThread", "RtlCreateUserThread"], + "ProcInject_AllocationMethod": "VirtualAllocEx", + "bUsesCookies": "True", + "HostHeader": "", + }, + "http": [ + {"hostname": "192.144.206.100", "port": 4848, "path": "/load", "method": "GET", "usage": "c2"}, + {"hostname": "192.144.206.100", "port": 4848, "path": "/submit.php", "method": "POST", "usage": "c2"}, + ], + "paths": [{"path": "%windir%\\syswow64\\rundll32.exe"}, {"path": "%windir%\\sysnative\\rundll32.exe"}], + } diff --git a/tests_parsers/test_darkgate.py b/tests_parsers/test_darkgate.py index 3052934e0b1..7040df6f51f 100644 --- a/tests_parsers/test_darkgate.py +++ b/tests_parsers/test_darkgate.py @@ -1,7 +1,21 @@ +from contextlib import suppress + from modules.processing.parsers.CAPE.DarkGate import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.DarkGate import convert_to_MACO + + HAVE_MACO = True + def test_darkgate(): with open("tests/data/malware/1c3ae64795b61034080be00601b947819fe071efd69d7fc791a99ec666c2043d", "rb") as data: conf = extract_config(data.read()) assert conf["C2"] == ["http://80.66.88.145"] + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "DarkGate", + "other": {"C2": ["http://80.66.88.145"]}, + "http": [{"uri": "http://80.66.88.145", "usage": "c2"}], + } diff --git a/tests_parsers/test_icedid.py b/tests_parsers/test_icedid.py index c4b6b93f4d3..8b8b389a15d 100644 --- a/tests_parsers/test_icedid.py +++ b/tests_parsers/test_icedid.py @@ -1,7 +1,22 @@ +from contextlib import suppress + from modules.processing.parsers.CAPE.IcedIDLoader import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.IcedIDLoader import convert_to_MACO + + HAVE_MACO = True + def test_icedid(): with open("tests/data/malware/7aaf80eb1436b946b2bd710ab57d2dcbaad2b1553d45602f2f3af6f2cfca5212", "rb") as data: conf = extract_config(data.read()) assert conf == {"C2": "anscowerbrut.com", "Campaign": 2738000827} + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "IcedIDLoader", + "campaign_id": ["2738000827"], + "other": {"C2": "anscowerbrut.com", "Campaign": 2738000827}, + "http": [{"hostname": "anscowerbrut.com", "usage": "c2"}], + } diff --git a/tests_parsers/test_koiloader.py b/tests_parsers/test_koiloader.py new file mode 100644 index 00000000000..38a74bf700a --- /dev/null +++ b/tests_parsers/test_koiloader.py @@ -0,0 +1,28 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +from contextlib import suppress + +from modules.processing.parsers.CAPE.KoiLoader import extract_config + +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.KoiLoader import convert_to_MACO + + HAVE_MACO = True + + +def test_koiloader(): + with open("tests/data/malware/b462e3235c7578450b2b56a8aff875a3d99d22f6970a01db3ba98f7ecb6b01a0", "rb") as data: + conf = extract_config(data.read()) + assert conf == {"C2": ["http://91.202.233.209/hypermetropia.php", "https://admiralpub.ca/wp-content/uploads/2017"]} + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "KoiLoader", + "other": {"C2": ["http://91.202.233.209/hypermetropia.php", "https://admiralpub.ca/wp-content/uploads/2017"]}, + "http": [ + {"uri": "http://91.202.233.209/hypermetropia.php", "usage": "c2"}, + {"uri": "https://admiralpub.ca/wp-content/uploads/2017", "usage": "c2"}, + ], + } diff --git a/tests_parsers/test_latrodectus.py b/tests_parsers/test_latrodectus.py index 0348b115470..410bac0a9e2 100644 --- a/tests_parsers/test_latrodectus.py +++ b/tests_parsers/test_latrodectus.py @@ -2,8 +2,16 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.Latrodectus import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.Latrodectus import convert_to_MACO + + HAVE_MACO = True + def test_latrodectus(): with open("tests/data/malware/a547cff9991a713535e5c128a0711ca68acf9298cc2220c4ea0685d580f36811", "rb") as data: @@ -98,3 +106,188 @@ def test_latrodectus(): "URLS|%d|%s\r\n", ], } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "Latrodectus", + "version": "1.1", + "campaign_id": ["1053565364"], + "identifier": ["Novik"], + "decoded_strings": [ + "/c ipconfig /all", + "C:\\Windows\\System32\\cmd.exe", + "/c systeminfo", + "C:\\Windows\\System32\\cmd.exe", + "/c nltest /domain_trusts", + "C:\\Windows\\System32\\cmd.exe", + "/c nltest /domain_trusts /all_trusts", + "C:\\Windows\\System32\\cmd.exe", + "/c net view /all /domain", + "C:\\Windows\\System32\\cmd.exe", + "/c net view /all", + "C:\\Windows\\System32\\cmd.exe", + '/c net group "Domain Admins" /domain', + "C:\\Windows\\System32\\cmd.exe", + "/Node:localhost /Namespace:\\\\root\\SecurityCenter2 Path AntiVirusProduct Get * /Format:List", + "C:\\Windows\\System32\\wbem\\wmic.exe", + "/c net config workstation", + "C:\\Windows\\System32\\cmd.exe", + "/c wmic.exe /node:localhost /namespace:\\\\root\\SecurityCenter2 path AntiVirusProduct Get DisplayName | findstr /V /B /C:displayName || echo No Antivirus installed", + "C:\\Windows\\System32\\cmd.exe", + "/c whoami /groups", + "C:\\Windows\\System32\\cmd.exe", + ".dll", + ".exe", + '"%s"', + "rundll32.exe", + '"%s", %s %s', + "runnung", + ":wtfbbq", + "%s%s", + "%s\\%d.dll", + "%d.dat", + "%s\\%s", + 'init -zzzz="%s\\%s"', + "front", + "/files/", + "Novik", + ".exe", + "Content-Type: application/x-www-form-urlencoded", + "POST", + "GET", + "curl/7.88.1", + "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", + "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", + "CLEARURL", + "URLS", + "COMMAND", + "ERROR", + "12345", + "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", + "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", + "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", + "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", + "%s%d.dll", + "%s%d.exe", + "LogonTrigger", + "%x%x", + "TimeTrigger", + "PT1H%02dM", + "&mac=", + "%04d-%02d-%02dT%02d:%02d:%02d", + "%02x", + ":%02x", + "PT0S", + "&computername=%s", + "&domain=%s", + "\\*.dll", + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", + "%04X%04X%04X%04X%08X%04X", + "%04X%04X%04X%04X%08X%04X", + "\\Registry\\Machine\\", + "AppData", + "Desktop", + "Startup", + "Personal", + "Local AppData", + "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders", + "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s,%s", + "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s", + "URLS", + "URLS|%d|%s\r\n", + ], + "other": { + "C2": ["https://arsimonopa.com/live/", "https://lemonimonakio.com/live/"], + "Group name": "Novik", + "Campaign ID": 1053565364, + "Version": "1.1", + "RC4 key": "12345", + "Strings": [ + "/c ipconfig /all", + "C:\\Windows\\System32\\cmd.exe", + "/c systeminfo", + "C:\\Windows\\System32\\cmd.exe", + "/c nltest /domain_trusts", + "C:\\Windows\\System32\\cmd.exe", + "/c nltest /domain_trusts /all_trusts", + "C:\\Windows\\System32\\cmd.exe", + "/c net view /all /domain", + "C:\\Windows\\System32\\cmd.exe", + "/c net view /all", + "C:\\Windows\\System32\\cmd.exe", + '/c net group "Domain Admins" /domain', + "C:\\Windows\\System32\\cmd.exe", + "/Node:localhost /Namespace:\\\\root\\SecurityCenter2 Path AntiVirusProduct Get * /Format:List", + "C:\\Windows\\System32\\wbem\\wmic.exe", + "/c net config workstation", + "C:\\Windows\\System32\\cmd.exe", + "/c wmic.exe /node:localhost /namespace:\\\\root\\SecurityCenter2 path AntiVirusProduct Get DisplayName | findstr /V /B /C:displayName || echo No Antivirus installed", + "C:\\Windows\\System32\\cmd.exe", + "/c whoami /groups", + "C:\\Windows\\System32\\cmd.exe", + ".dll", + ".exe", + '"%s"', + "rundll32.exe", + '"%s", %s %s', + "runnung", + ":wtfbbq", + "%s%s", + "%s\\%d.dll", + "%d.dat", + "%s\\%s", + 'init -zzzz="%s\\%s"', + "front", + "/files/", + "Novik", + ".exe", + "Content-Type: application/x-www-form-urlencoded", + "POST", + "GET", + "curl/7.88.1", + "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", + "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", + "CLEARURL", + "URLS", + "COMMAND", + "ERROR", + "12345", + "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", + "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", + "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", + "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", + "%s%d.dll", + "%s%d.exe", + "LogonTrigger", + "%x%x", + "TimeTrigger", + "PT1H%02dM", + "&mac=", + "%04d-%02d-%02dT%02d:%02d:%02d", + "%02x", + ":%02x", + "PT0S", + "&computername=%s", + "&domain=%s", + "\\*.dll", + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", + "%04X%04X%04X%04X%08X%04X", + "%04X%04X%04X%04X%08X%04X", + "\\Registry\\Machine\\", + "AppData", + "Desktop", + "Startup", + "Personal", + "Local AppData", + "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders", + "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s,%s", + "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s", + "URLS", + "URLS|%d|%s\r\n", + ], + }, + "http": [ + {"uri": "https://arsimonopa.com/live/", "usage": "c2"}, + {"uri": "https://lemonimonakio.com/live/", "usage": "c2"}, + ], + "encryption": [{"algorithm": "RC4", "key": "12345"}], + } diff --git a/tests_parsers/test_lumma.py b/tests_parsers/test_lumma.py new file mode 100644 index 00000000000..e341169bcdd --- /dev/null +++ b/tests_parsers/test_lumma.py @@ -0,0 +1,59 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +from contextlib import suppress + +from modules.processing.parsers.CAPE.Lumma import extract_config + +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.Lumma import convert_to_MACO + + HAVE_MACO = True + + +def test_lumma(): + with open("tests/data/malware/5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f", "rb") as data: + conf = extract_config(data.read()) + assert conf == { + "C2": [ + "delaylacedmn.site", + "writekdmsnu.site", + "agentyanlark.site", + "bellykmrebk.site", + "underlinemdsj.site", + "commandejorsk.site", + "possiwreeste.site", + "famikyjdiag.site", + "agentyanlark.site", + ] + } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "Lumma", + "other": { + "C2": [ + "delaylacedmn.site", + "writekdmsnu.site", + "agentyanlark.site", + "bellykmrebk.site", + "underlinemdsj.site", + "commandejorsk.site", + "possiwreeste.site", + "famikyjdiag.site", + "agentyanlark.site", + ] + }, + "http": [ + {"hostname": "delaylacedmn.site", "usage": "c2"}, + {"hostname": "writekdmsnu.site", "usage": "c2"}, + {"hostname": "agentyanlark.site", "usage": "c2"}, + {"hostname": "bellykmrebk.site", "usage": "c2"}, + {"hostname": "underlinemdsj.site", "usage": "c2"}, + {"hostname": "commandejorsk.site", "usage": "c2"}, + {"hostname": "possiwreeste.site", "usage": "c2"}, + {"hostname": "famikyjdiag.site", "usage": "c2"}, + {"hostname": "agentyanlark.site", "usage": "c2"}, + ], + } diff --git a/tests_parsers/test_nanocore.py b/tests_parsers/test_nanocore.py index d27a0b1cb6a..af28b87d8ae 100644 --- a/tests_parsers/test_nanocore.py +++ b/tests_parsers/test_nanocore.py @@ -2,8 +2,16 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.NanoCore import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.NanoCore import convert_to_MACO + + HAVE_MACO = True + def test_nanocore(): with open("tests/data/malware/f1bd511b69f95c26f489157272884a12225c1cf7a453207bfc46ce48a91eae96", "rb") as data: @@ -41,3 +49,60 @@ def test_nanocore(): "BackupDnsServer": "8.8.4.4", "cncs": ["6coinc.zapto.org:6696", "127.0.0.1:6696"], } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "NanoCore", + "version": "1.2.2.0", + "capability_enabled": [ + "RunOnStartup", + "BypassUserAccountControl", + "ClearZoneIdentifier", + "PreventSystemSleep", + "UseCustomDnsServer", + ], + "capability_disabled": [ + "RequestElevation", + "ClearAccessControl", + "SetCriticalProcess", + "ActivateAwayMode", + "EnableDebugMode", + ], + "mutex": ["dc5ce709-95b6-4a26-9175-16a1a8446828"], + "other": { + "BuildTime": "2023-11-22 00:25:26.569697", + "Version": "1.2.2.0", + "Mutex": "dc5ce709-95b6-4a26-9175-16a1a8446828", + "DefaultGroup": "6coinc", + "PrimaryConnectionHost": "6coinc.zapto.org", + "BackupConnectionHost": "127.0.0.1", + "ConnectionPort": "6696", + "RunOnStartup": "True", + "RequestElevation": "False", + "BypassUserAccountControl": "True", + "ClearZoneIdentifier": "True", + "ClearAccessControl": "False", + "SetCriticalProcess": "False", + "PreventSystemSleep": "True", + "ActivateAwayMode": "False", + "EnableDebugMode": "False", + "RunDelay": "0", + "ConnectDelay": "4000", + "RestartDelay": "5000", + "TimeoutInterval": "5000", + "KeepAliveTimeout": "30000", + "MutexTimeout": "5000", + "LanTimeout": "2500", + "WanTimeout": "8000", + "BufferSize": "65535", + "MaxPacketSize": "10485760", + "GCThreshold": "10485760", + "UseCustomDnsServer": "True", + "PrimaryDnsServer": "8.8.8.8", + "BackupDnsServer": "8.8.4.4", + "cncs": ["6coinc.zapto.org:6696", "127.0.0.1:6696"], + }, + "http": [ + {"hostname": "6coinc.zapto.org", "port": 6696, "usage": "c2"}, + {"hostname": "127.0.0.1", "port": 6696, "usage": "c2"}, + ], + } diff --git a/tests_parsers/test_njrat.py b/tests_parsers/test_njrat.py index 8ca43e83a9c..106d9dd1662 100644 --- a/tests_parsers/test_njrat.py +++ b/tests_parsers/test_njrat.py @@ -2,8 +2,16 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.Njrat import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.Njrat import convert_to_MACO + + HAVE_MACO = True + def test_njrat(): with open("tests/data/malware/09bf19c00f3d8c63b8896edadd4622724a01f7d74de583733ee57a7d11eacd86", "rb") as data: @@ -13,6 +21,17 @@ def test_njrat(): "campaign id": "HacKed", "version": "Njrat 0.7 Golden By Hassan Amiri", } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "Njrat", + "version": "Njrat 0.7 Golden By Hassan Amiri", + "other": { + "cncs": ["peter-bikini.gl.at.ply.gg:64215"], + "campaign id": "HacKed", + "version": "Njrat 0.7 Golden By Hassan Amiri", + }, + "http": [{"hostname": "peter-bikini.gl.at.ply.gg", "port": 64215, "usage": "c2"}], + } """ diff --git a/tests_parsers/test_oyster.py b/tests_parsers/test_oyster.py index bf77aac1bc7..85811e2f80e 100644 --- a/tests_parsers/test_oyster.py +++ b/tests_parsers/test_oyster.py @@ -2,8 +2,16 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.Oyster import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.Oyster import convert_to_MACO + + HAVE_MACO = True + def test_oyster(): with open("tests/data/malware/8bae0fa9f589cd434a689eebd7a1fde949cc09e6a65e1b56bb620998246a1650", "rb") as data: @@ -13,3 +21,15 @@ def test_oyster(): "Dll Version": "v1.0 #ads 2", "Strings": ["api/connect", "Content-Type: application/json", "api/session"], } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "Oyster", + "version": "v1.0 #ads 2", + "decoded_strings": ["api/connect", "Content-Type: application/json", "api/session"], + "other": { + "C2": ["https://connectivity-check.linkpc.net/"], + "Dll Version": "v1.0 #ads 2", + "Strings": ["api/connect", "Content-Type: application/json", "api/session"], + }, + "http": [{"uri": "https://connectivity-check.linkpc.net/", "usage": "c2"}], + } diff --git a/tests_parsers/test_pikabot.py b/tests_parsers/test_pikabot.py index 2562dc84441..52d38194e55 100644 --- a/tests_parsers/test_pikabot.py +++ b/tests_parsers/test_pikabot.py @@ -2,8 +2,16 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.PikaBot import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.PikaBot import convert_to_MACO + + HAVE_MACO = True + def test_pikabot(): with open("tests/data/malware/7600d0efc92ecef06320a1a6ffd85cd90d3d98470a381b03202e81d93bcdd03c", "rb") as data: @@ -27,3 +35,86 @@ def test_pikabot(): "Campaign Name": "GG24_T@T@f0adda360d2b4ccda11468e026526576", "Registry Key": "MWnkl", } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "PikaBot", + "version": "1.8.32-beta", + "campaign_id": ["GG24_T@T@f0adda360d2b4ccda11468e026526576"], + "other": { + "C2s": [ + "154.53.55.165:13783", + "158.247.240.58:5632", + "70.34.223.164:5000", + "70.34.199.64:9785", + "45.77.63.237:5632", + "198.38.94.213:2224", + "94.72.104.80:5000", + "84.46.240.42:2083", + "154.12.236.248:13786", + "94.72.104.77:13724", + "209.126.86.48:1194", + ], + "Version": "1.8.32-beta", + "User Agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + "Campaign Name": "GG24_T@T@f0adda360d2b4ccda11468e026526576", + "Registry Key": "MWnkl", + }, + "http": [ + { + "hostname": "154.53.55.165", + "port": 13783, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "158.247.240.58", + "port": 5632, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "70.34.223.164", + "port": 5000, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "70.34.199.64", + "port": 9785, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "45.77.63.237", + "port": 5632, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "198.38.94.213", + "port": 2224, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "94.72.104.80", + "port": 5000, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "84.46.240.42", + "port": 2083, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "154.12.236.248", + "port": 13786, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "94.72.104.77", + "port": 13724, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + { + "hostname": "209.126.86.48", + "port": 1194, + "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", + }, + ], + "registry": [{"key": "MWnkl"}], + } diff --git a/tests_parsers/test_quickbind.py b/tests_parsers/test_quickbind.py index 089371fc437..094790ff831 100644 --- a/tests_parsers/test_quickbind.py +++ b/tests_parsers/test_quickbind.py @@ -2,8 +2,16 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.Quickbind import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.Quickbind import convert_to_MACO + + HAVE_MACO = True + def test_quickbind(): with open("tests/data/malware/bfcb215f86fc4f8b4829f6ddd5acb118e80fb5bd977453fc7e8ef10a52fc83b7", "rb") as data: @@ -13,3 +21,15 @@ def test_quickbind(): "Mutex": ["15432a4d-34ca-4d0d-a4ac-04df9a373862"], "C2": ["185.49.69.41"], } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "Quickbind", + "mutex": ["15432a4d-34ca-4d0d-a4ac-04df9a373862"], + "other": { + "Encryption Key": "24de21a8dc08434c", + "Mutex": ["15432a4d-34ca-4d0d-a4ac-04df9a373862"], + "C2": ["185.49.69.41"], + }, + "http": [{"hostname": "185.49.69.41", "usage": "c2"}], + "encryption": [{"key": "24de21a8dc08434c"}], + } diff --git a/tests_parsers/test_redline.py b/tests_parsers/test_redline.py index 96d133b72eb..8c455d06bac 100644 --- a/tests_parsers/test_redline.py +++ b/tests_parsers/test_redline.py @@ -2,8 +2,16 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.RedLine import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.RedLine import convert_to_MACO + + HAVE_MACO = True + def test_redline(): with open("tests/data/malware/000608d875638ba7d6c467ece976c1496e6a6ec8ce3e7f79e0fd195ae3045078", "rb") as data: @@ -14,3 +22,14 @@ def test_redline(): "Botnet": "krast", "Key": "Formative", } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "RedLine", + "other": { + "Authorization": "9059ea331e4599de3746df73ccb24514", + "C2": "77.91.68.68:19071", + "Botnet": "krast", + "Key": "Formative", + }, + "http": [{"hostname": "77.91.68.68", "port": 19071, "usage": "c2"}], + } diff --git a/tests_parsers/test_smokeloader.py b/tests_parsers/test_smokeloader.py index b77499c256d..216829dcd94 100644 --- a/tests_parsers/test_smokeloader.py +++ b/tests_parsers/test_smokeloader.py @@ -1,7 +1,24 @@ +from contextlib import suppress + from modules.processing.parsers.CAPE.SmokeLoader import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.SmokeLoader import convert_to_MACO + + HAVE_MACO = True + def test_smokeloader(): with open("tests/data/malware/6929fff132c05ae7d348867f4ea77ba18f84fb8fae17d45dde3571c9e33f01f8", "rb") as data: conf = extract_config(data.read()) assert conf == {"C2s": ["http://host-file-host6.com/", "http://host-host-file8.com/"]} + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "SmokeLoader", + "other": {"C2s": ["http://host-file-host6.com/", "http://host-host-file8.com/"]}, + "http": [ + {"uri": "http://host-file-host6.com/", "usage": "c2"}, + {"uri": "http://host-host-file8.com/", "usage": "c2"}, + ], + } diff --git a/tests_parsers/test_snake.py b/tests_parsers/test_snake.py new file mode 100644 index 00000000000..489418eca86 --- /dev/null +++ b/tests_parsers/test_snake.py @@ -0,0 +1,14 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +from modules.processing.parsers.CAPE.Snake import extract_config + + +def test_snake(): + with open("tests/data/malware/7b81c12fb7db9f0c317f36022ecac9faa45f5efefe24085c339c43db8b963ae2", "rb") as data: + conf = extract_config(data.read()) + assert conf == { + "Type": "Telegram", + "C2": "https://api.telegram.org/bot7952998151:AAFh98iY7kaOlHAR0qftD3ZcqGbQm0TXbBY/sendMessage?chat_id=5692813672", + } diff --git a/tests_parsers/test_sparkrat.py b/tests_parsers/test_sparkrat.py index 412c9165ae5..9e681b8efab 100644 --- a/tests_parsers/test_sparkrat.py +++ b/tests_parsers/test_sparkrat.py @@ -1,5 +1,13 @@ +from contextlib import suppress + from modules.processing.parsers.CAPE.SparkRAT import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.SparkRAT import convert_to_MACO + + HAVE_MACO = True + def test_sparkrat(): with open("tests/data/malware/ec349cfacc7658eed3640f1c475eb958c5f05bae7c2ed74d4cdb7493176daeba", "rb") as data: @@ -12,3 +20,17 @@ def test_sparkrat(): "uuid": "8dc7e7d8f8576f3e55a00850b72887db", "key": "a1348fb8969ad7a9f85ac173c2027622135e52e0e6d94d10e6a81916a29648ac", } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "SparkRAT", + "identifier": ["8dc7e7d8f8576f3e55a00850b72887db"], + "other": { + "secure": False, + "host": "67.217.62.106", + "port": 4443, + "path": "/", + "uuid": "8dc7e7d8f8576f3e55a00850b72887db", + "key": "a1348fb8969ad7a9f85ac173c2027622135e52e0e6d94d10e6a81916a29648ac", + }, + "http": [{"uri": "http://67.217.62.106:4443/", "hostname": "67.217.62.106", "port": 4443, "path": "/"}], + } diff --git a/tests_parsers/test_stealc.py b/tests_parsers/test_stealc.py new file mode 100644 index 00000000000..12a98def5df --- /dev/null +++ b/tests_parsers/test_stealc.py @@ -0,0 +1,13 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +from modules.processing.parsers.CAPE.Stealc import extract_config + + +def test_stealc(): + with open("tests/data/malware/619751f5ed0a9716318092998f2e4561f27f7f429fe6103406ecf16e33837470", "rb") as data: + conf = extract_config(data.read()) + assert conf == { + "C2": ["http://95.217.125.57/2f571d994666c8cb.php"], + } diff --git a/tests_parsers/test_zloader.py b/tests_parsers/test_zloader.py index 2168f2e9c7a..dda237b9ef0 100644 --- a/tests_parsers/test_zloader.py +++ b/tests_parsers/test_zloader.py @@ -2,8 +2,16 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +from contextlib import suppress + from modules.processing.parsers.CAPE.Zloader import extract_config +HAVE_MACO = False +with suppress(ImportError): + from modules.processing.parsers.MACO.AgentTesla import convert_to_MACO + + HAVE_MACO = True + def test_zloader(): with open("tests/data/malware/adbd0c7096a7373be82dd03df1aae61cb39e0a155c00bbb9c67abc01d48718aa", "rb") as data: @@ -14,3 +22,15 @@ def test_zloader(): "address": ["https://dem.businessdeep.com"], "Public key": "-----BEGIN PUBLIC KEY-----MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDKGAOWVkikqE7TyKIMtWI8dFsaleTaJNXMJNIPnRE/fGCzqrV+rtY3+ex4MCHEtq2Vwppthf0Rglv8OiWgKlerIN5P6NEyCfIsFYUMDfldQTF03VES8GBIvHq5SjlIz7lawuwfdjdEkaHfOmmu9srraftkI9gZO8WRQgY1uNdsXwIDAQAB-----END PUBLIC KEY-----", } + if HAVE_MACO: + assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { + "family": "Zloader", + "campaign_id": ["M1"], + "other": { + "Botnet name": "Bing_Mod5", + "Campaign ID": "M1", + "address": ["https://dem.businessdeep.com"], + "Public key": "-----BEGIN PUBLIC KEY-----MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDKGAOWVkikqE7TyKIMtWI8dFsaleTaJNXMJNIPnRE/fGCzqrV+rtY3+ex4MCHEtq2Vwppthf0Rglv8OiWgKlerIN5P6NEyCfIsFYUMDfldQTF03VES8GBIvHq5SjlIz7lawuwfdjdEkaHfOmmu9srraftkI9gZO8WRQgY1uNdsXwIDAQAB-----END PUBLIC KEY-----", + }, + "http": [{"uri": "https://dem.businessdeep.com"}], + } diff --git a/utils/rooter.py b/utils/rooter.py index 4c8175dfcec..717bdbdc762 100644 --- a/utils/rooter.py +++ b/utils/rooter.py @@ -188,6 +188,86 @@ def disable_nat(interface): run_iptables("-t", "nat", "-D", "POSTROUTING", "-o", interface, "-j", "MASQUERADE") +def enable_mitmdump(interface, client, port): + """Enable mitmdump on this interface.""" + run_iptables( + "-t", + "nat", + "-I", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, + ) + run_iptables( + "-t", + "nat", + "-I", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, + ) + + +def disable_mitmdump(interface, client, port): + """Disable mitmdump on this interface.""" + run_iptables( + "-t", + "nat", + "-D", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, + ) + run_iptables( + "-t", + "nat", + "-D", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, + ) + + def init_rttable(rt_table, interface): """Initialise routing table for this interface using routes from main table.""" @@ -674,6 +754,8 @@ def drop_disable(ipaddr, resultserver_port): "cleanup_vrf": cleanup_vrf, "add_dev_to_vrf": add_dev_to_vrf, "delete_dev_from_vrf": delete_dev_from_vrf, + "enable_mitmdump": enable_mitmdump, + "disable_mitmdump": disable_mitmdump, } if __name__ == "__main__": diff --git a/utils/submit.py b/utils/submit.py index c66378f8ce3..8785b58f5c8 100644 --- a/utils/submit.py +++ b/utils/submit.py @@ -345,6 +345,7 @@ def main(): try: tmp_path = store_temp_file(open(file_path, "rb").read(), sanitize_filename(os.path.basename(file_path))) with db.session.begin(): + # ToDo expose extra_details["errors"] task_ids, extra_details = db.demux_sample_and_add_to_db( file_path=tmp_path, package=args.package, diff --git a/web/analysis/views.py b/web/analysis/views.py index 1e7b699dd41..fd3c7158516 100644 --- a/web/analysis/views.py +++ b/web/analysis/views.py @@ -502,6 +502,7 @@ def index(request, page=1): request, "analysis/index.html", { + "title": "Recent Analysis", "files": analyses_files, "static": analyses_static, "urls": analyses_urls, @@ -534,7 +535,8 @@ def pending(request): } ) - return render(request, "analysis/pending.html", {"tasks": pending, "count": len(pending)}) + data = {"tasks": pending, "count": len(pending), "title": "Pending Tasks"} + return render(request, "analysis/pending.html", data) # @require_safe @@ -1701,6 +1703,7 @@ def report(request, task_id): request, "analysis/report.html", { + "title": "Analysis Report", "analysis": report, # ToDo test "file": report.get("target", {}).get("file", {}), @@ -2183,7 +2186,12 @@ def search(request, searched=""): return render( request, "analysis/search.html", - {"analyses": None, "term": searched, "error": "Search term too short, minimum 3 characters required"}, + { + "title": "Search", + "analyses": None, + "term": searched, + "error": "Search term too short, minimum 3 characters required", + }, ) # name:foo or name: foo @@ -2210,7 +2218,7 @@ def search(request, searched=""): return render( request, "analysis/search.html", - {"analyses": None, "term": searched, "error": "Not all values are integers"}, + {"title": "Search", "analyses": None, "term": searched, "error": "Not all values are integers"}, ) # Escape forward slash characters @@ -2226,13 +2234,13 @@ def search(request, searched=""): return render( request, "analysis/search.html", - {"analyses": None, "term": searched, "error": "Invalid search term: %s" % term}, + {"title": "Search", "analyses": None, "term": searched, "error": "Invalid search term: %s" % term}, ) else: return render( request, "analysis/search.html", - {"analyses": None, "term": None, "error": "Unable to recognize the search syntax"}, + {"title": "Search", "analyses": None, "term": None, "error": "Unable to recognize the search syntax"}, ) analyses = [] @@ -2252,6 +2260,7 @@ def search(request, searched=""): request, "analysis/search.html", { + "title": "Search Results", "analyses": analyses, "config": enabledconf, "term": searched, @@ -2260,7 +2269,7 @@ def search(request, searched=""): "value_only": value_only, }, ) - return render(request, "analysis/search.html", {"analyses": None, "term": None, "error": None}) + return render(request, "analysis/search.html", {"title": "Search", "analyses": None, "term": None, "error": None}) @require_safe @@ -2461,10 +2470,12 @@ def statistics_data(request, days=7): # psycopg2.OperationalError print(e) return render( - request, "error.html", {"error": "Please restart your database. Probably it had an update or it just down"} + request, + "error.html", + {"title": "Statistics", "error": "Please restart your database. Probably it had an update or it just down"}, ) - return render(request, "statistics.html", {"statistics": details, "days": days}) - return render(request, "error.html", {"error": "Provide days as number"}) + return render(request, "statistics.html", {"title": "Statistics", "statistics": details, "days": days}) + return render(request, "error.html", {"title": "Statistics", "error": "Provide days as number"}) on_demand_config_mapper = { diff --git a/web/apiv2/urls.py b/web/apiv2/urls.py index 23571f3281c..aa512323add 100644 --- a/web/apiv2/urls.py +++ b/web/apiv2/urls.py @@ -51,6 +51,7 @@ re_path(r"^tasks/get/evtx/(?P\d+)/$", views.tasks_evtx), re_path(r"^tasks/get/dropped/(?P\d+)/$", views.tasks_dropped), re_path(r"^tasks/get/surifile/(?P\d+)/$", views.tasks_surifile), + re_path(r"^tasks/get/mitmdump/(?P\d+)/$", views.tasks_mitmdump), re_path(r"^tasks/get/payloadfiles/(?P\d+)/$", views.tasks_payloadfiles), re_path(r"^tasks/get/procdumpfiles/(?P\d+)/$", views.tasks_procdumpfiles), re_path(r"^files/view/md5/(?P([a-fA-F\d]{32}))/$", views.files_view), diff --git a/web/apiv2/views.py b/web/apiv2/views.py index 087c9a7794f..882f4005cd0 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -170,7 +170,7 @@ def index(request): parsed[key]["rps"] = "None" parsed[key]["rpm"] = "None" - return render(request, "apiv2/index.html", {"config": parsed}) + return render(request, "apiv2/index.html", {"title": "API", "config": parsed}) @csrf_exempt @@ -187,7 +187,7 @@ def tasks_create_static(request): options = request.data.get("options", "") priority = force_int(request.data.get("priority")) - resp["error"] = False + resp["error"] = [] files = request.FILES.getlist("file") extra_details = {} task_ids = [] @@ -203,6 +203,8 @@ def tasks_create_static(request): user_id=request.user.id or 0, ) task_ids.extend(task_id) + if extra_details.get("erros"): + resp["errors"].extend(extra_details["errors"]) except CuckooDemuxError as e: resp = {"error": True, "error_value": e} return Response(resp) @@ -226,7 +228,6 @@ def tasks_create_static(request): resp["url"].append("{0}/submit/status/{1}".format(apiconf.api.get("url"), tid)) else: resp = {"error": True, "error_value": "Error adding task to database"} - return Response(resp) @@ -286,7 +287,6 @@ def tasks_create_file(request): "user_id": request.user.id or 0, } - task_ids_tmp = [] task_machines = [] vm_list = [vm.label for vm in db.list_machines()] @@ -341,11 +341,13 @@ def tasks_create_file(request): if tmp_path: details["path"] = tmp_path details["content"] = content - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(tmp_path).decode(): task_ids_tmp}) + details["errors"].append({os.path.basename(tmp_path).decode(): tasks_details}) else: - details["task_ids"] = task_ids_tmp + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) if details["task_ids"]: tasks_count = len(details["task_ids"]) @@ -565,11 +567,13 @@ def tasks_create_dlnexec(request): "user_id": request.user.id or 0, } - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(path).decode(): task_ids_tmp}) + details["errors"].append({os.path.basename(path).decode(): tasks_details}) else: - details["task_ids"] = task_ids_tmp + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) if details["task_ids"]: tasks_count = len(details["task_ids"]) @@ -1621,6 +1625,36 @@ def tasks_evtx(request, task_id): return Response(resp) +@csrf_exempt +@api_view(["GET"]) +def tasks_mitmdump(request, task_id): + if not apiconf.taskmitmdump.get("enabled"): + resp = {"error": True, "error_value": "Mitmdump HAR download API is disabled"} + return Response(resp) + + check = validate_task(task_id) + if check["error"]: + return Response(check) + + rtid = check.get("rtid", 0) + if rtid: + task_id = rtid + + harfile = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % task_id, "mitmdump", "dump.har") + if not os.path.normpath(harfile).startswith(ANALYSIS_BASE_PATH): + return render(request, "error.html", {"error": f"File not found: {os.path.basename(harfile)}"}) + if path_exists(harfile): + fname = "%s_dump.har" % task_id + resp = StreamingHttpResponse(FileWrapper(open(harfile, "rb")), content_type="text/plain") + resp["Content-Length"] = os.path.getsize(harfile) + resp["Content-Disposition"] = "attachment; filename=" + fname + return resp + + else: + resp = {"error": True, "error_value": "HAR file does not exist"} + return Response(resp) + + @csrf_exempt @api_view(["GET"]) def tasks_dropped(request, task_id): diff --git a/web/compare/views.py b/web/compare/views.py index 94ee2626946..0ab3b65f048 100644 --- a/web/compare/views.py +++ b/web/compare/views.py @@ -84,7 +84,8 @@ def left(request, left_id): for item in results: records.append(item["_source"]) - return render(request, "compare/left.html", {"left": left, "records": records}) + data = {"title": "Compare", "left": left, "records": records} + return render(request, "compare/left.html", data) @require_safe diff --git a/web/dashboard/views.py b/web/dashboard/views.py index 76191a8fd7b..5a7a09787e1 100644 --- a/web/dashboard/views.py +++ b/web/dashboard/views.py @@ -76,6 +76,8 @@ def index(request): tasks = db.count_tasks(status=TASK_COMPLETED) tasks += db.count_tasks(status=TASK_REPORTED) + data = {"title": "Dashboard", "report": {}} + if tasks: # Get the time when the first task started and last one ended. started, completed = db.minmax_tasks() @@ -91,4 +93,5 @@ def index(request): report["estimate_day"] = format_number_with_space(int(24 * hourly)) report["top_detections"] = top_detections() - return render(request, "dashboard/index.html", {"report": report}) + data["report"] = report + return render(request, "dashboard/index.html", data) diff --git a/web/submission/views.py b/web/submission/views.py index 44827af8a98..a8c9e7efccd 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -312,6 +312,9 @@ def index(request, task_id=None, resubmit_hash=None): if request.POST.get("nohuman"): options += "nohuman=yes," + if request.POST.get("mitmdump"): + options += "mitmdump=yes," + if web_conf.guacamole.enabled and request.POST.get("interactive"): remote_console = True options += "interactive=1," @@ -366,7 +369,6 @@ def index(request, task_id=None, resubmit_hash=None): opt_apikey = opts.get("apikey", False) status = "ok" - task_ids_tmp = [] existent_tasks = {} details = { "errors": [], @@ -508,11 +510,13 @@ def index(request, task_id=None, resubmit_hash=None): details["path"] = path details["content"] = content - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(filename): task_ids_tmp}) + details["errors"].append({os.path.basename(filename): tasks_details}) else: - details["task_ids"] = task_ids_tmp + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) if web_conf.web_reporting.get("enabled", False) and web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", hash, search_limit=5) if records: @@ -537,17 +541,19 @@ def index(request, task_id=None, resubmit_hash=None): details["path"] = path details["content"] = content - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(path): task_ids_tmp}) + details["errors"].append({os.path.basename(path): tasks_details}) else: + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", sha256, search_limit=5) if records: for record in records: if record.get("target").get("file", {}).get("sha256"): existent_tasks.setdefault(record["target"]["file"]["sha256"], []).append(record) - details["task_ids"] = task_ids_tmp elif task_category == "static": for content, path, sha256 in list_of_tasks: @@ -619,11 +625,13 @@ def index(request, task_id=None, resubmit_hash=None): details["content"] = content details["service"] = "DLnExec" details["source_url"] = samples - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(path): task_ids_tmp}) + details["errors"].append({os.path.basename(path): tasks_details}) else: - details["task_ids"] = task_ids_tmp + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) elif task_category == "vtdl": if not settings.VTDL_KEY: @@ -646,6 +654,7 @@ def index(request, task_id=None, resubmit_hash=None): tasks_count = 0 if tasks_count > 0: data = { + "title": "Submission", "tasks": details["task_ids"], "tasks_count": tasks_count, "errors": details["errors"], @@ -654,7 +663,12 @@ def index(request, task_id=None, resubmit_hash=None): } return render(request, "submission/complete.html", data) else: - return render(request, "error.html", {"error": "Error adding task(s) to CAPE's database.", "errors": details["errors"]}) + err_data = { + "error": "Error adding task(s) to CAPE's database.", + "errors": details["errors"], + "title": "Submission Failure", + } + return render(request, "error.html", err_data) else: enabledconf = {} enabledconf["vt"] = settings.VTDL_ENABLED @@ -753,6 +767,7 @@ def index(request, task_id=None, resubmit_hash=None): request, "submission/index.html", { + "title": "Submit", "packages": sorted(packages, key=lambda i: i["name"].lower()), "machines": machines, "vpns": vpns_data, @@ -785,7 +800,14 @@ def status(request, task_id): if status == "completed": status = "processing" - response = {"completed": completed, "status": status, "task_id": task_id, "session_data": ""} + response = { + "title": "Task Status", + "completed": completed, + "status": status, + "task_id": task_id, + "session_data": "", + "target": task.sample.sha256 if task.sample.sha256 else task.target, + } if settings.REMOTE_SESSION: machine = db.view_machine_by_label(task.machine) if machine: diff --git a/web/templates/analysis/overlay/index.html b/web/templates/analysis/overlay/index.html deleted file mode 100644 index 361facc9e84..00000000000 --- a/web/templates/analysis/overlay/index.html +++ /dev/null @@ -1,74 +0,0 @@ -{% with overlay=analysis.static.pe.overlay %} -{% with fileinfo=overlay.fileinfo %} - -{%if fileinfo %} -
- - - - - - - - - - - - - - - - - - - - - - - - - - {% if fileinfo.sha3_384 %} - - - - - {% endif %} - - - - - {% if fileinfo.tlsh %} - - - - - {% endif %} - - - - - - - - - {% if overlay.data %} - - - - {% endif %} -
File name -
{{fileinfo.name|safe}}
-
File Size{{fileinfo.size}} bytes
File Type{{fileinfo.type}}
MD5{{fileinfo.md5}}
SHA1{{fileinfo.sha1}}
SHA256{{fileinfo.sha256}}
SHA3-384{{fileinfo.sha3_384}}
CRC32{{fileinfo.crc32}}
TLSH{{fileinfo.tlsh}}
Ssdeep{{fileinfo.ssdeep}}
- Download - {% if overlay.data %} -  Display Overlay Data (Up to 4KB) - {% endif %} -
{{overlay.data}}
-
-{%else%} -
Sorry! No overlay information.
-{% endif %} - -{% endwith %} -{% endwith %} diff --git a/web/templates/analysis/report.html b/web/templates/analysis/report.html index 7988fa1f1ad..ed08b01984f 100644 --- a/web/templates/analysis/report.html +++ b/web/templates/analysis/report.html @@ -85,9 +85,6 @@ {% if analysis.dropped %} {% endif %} - {% if analysis.static.pe.overlay.fileinfo %} - - {% endif %} {% if analysis.procmemory %} {% endif %} @@ -193,10 +190,5 @@ {% include "analysis/admin/index.html" %} {% endif %} - {% if analysis.static.pe.overlay.fileinfo %} -
- {% include "analysis/overlay/index.html" %} -
- {% endif %} {% endblock %} diff --git a/web/templates/header.html b/web/templates/header.html index 52f4ce26f13..033d8508aec 100644 --- a/web/templates/header.html +++ b/web/templates/header.html @@ -2,7 +2,11 @@ +{% if title %} +{{ title|add:" ยท CAPE Sandbox" }} +{% else %} CAPE Sandbox +{% endif %} diff --git a/web/templates/submission/index.html b/web/templates/submission/index.html index 0efd0579f21..91947c0aee2 100644 --- a/web/templates/submission/index.html +++ b/web/templates/submission/index.html @@ -60,6 +60,8 @@ let title = $('option:selected', this).attr('title'); $("#package_description").text(title); }); + // tooltips + $('[data-toggle="tooltip"]').tooltip(); });
@@ -606,93 +608,100 @@
-
- -
- {% if config.procmemory %} -
- -
- {% endif %} - {% if config.amsidump %} -
- -
- {% endif %} -
- -
- {% if config.memory %} -
- -
- {% endif %} -
- -
-
- -
-
- -
-
- -
-
- -
-
- -
-
- -
-
- -
- {% if config.kernel %} -
- -
- {% endif %} -
- -
-
- + + + +
+
+ +
+ {% if config.procmemory %} +
+ +
+ {% endif %} + {% if config.amsidump %} +
+ +
+ {% endif %} +
+ +
+ {% if config.memory %} +
+ +
+ {% endif %} +
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
+ {% if config.kernel %} +
+ +
+ {% endif %} +
+ +
+
+ +
diff --git a/web/templates/submission/status.html b/web/templates/submission/status.html index fbc2933a96e..cc5d42217d0 100644 --- a/web/templates/submission/status.html +++ b/web/templates/submission/status.html @@ -3,10 +3,15 @@ {% if completed %}

Good news! :-)

The analysis is completed, you can view it here.
+{% elif status == "failed_analysis" %} +
+

Status for task {{task_id}} - {{ target }}

+

The analysis failed with status '{{status}}'. Click here to resubmit.

+
{% else %}
-

Hang on...

+

Status for task {{task_id}} - {{ target }}

The analysis is not finished yet, it's still {{status}}. This page will refresh every 30 seconds.

{% if session_data %}

To view the Remote Session - click here.