From 208f5e890b1e86fe8de56a04c60d7c40eddecb0d Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Tue, 24 May 2022 16:42:39 +0200 Subject: [PATCH 01/77] Bug fix: invalid name of devices --- htpclient/initialize.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/htpclient/initialize.py b/htpclient/initialize.py index c792680..3388149 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -98,8 +98,8 @@ def __update_information(self): for line in output: if not line: continue - line = line.split(":") - devices.append(line[2].strip()) + line = ' '.join(line.split(' ')[1:]).split(':') + devices.append(line[1].strip()) elif Initialize.get_os() == 1: # windows output = subprocess.check_output("wmic cpu get name", shell=True) From 02fa25d5ed5ba22a1399dd8631e7e2a868787027 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 11 Apr 2022 11:32:10 +0200 Subject: [PATCH 02/77] Initial commit of devcontainer --- .devcontainer/Dockerfile | 42 ++++++++++++++++++++++++++++++++ .devcontainer/devcontainer.json | 12 +++++++++ .devcontainer/docker-compose.yml | 21 ++++++++++++++++ .vscode/launch.json | 13 ++++++++++ 4 files changed, 88 insertions(+) create mode 100644 .devcontainer/Dockerfile create mode 100644 .devcontainer/devcontainer.json create mode 100644 .devcontainer/docker-compose.yml create mode 100644 .vscode/launch.json diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..8bba9e2 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,42 @@ +FROM nvidia/cuda:11.4.3-devel-ubuntu20.04 + +# Avoid warnings by switching to noninteractive +ENV DEBIAN_FRONTEND=noninteractive + +RUN groupadd vscode && useradd -rm -d /app -s /bin/bash -g vscode -u 1001 vscode + +RUN apt-get update \ + && apt-get install python3 python3-pip -y \ + && apt-get install git -y + +# Install Intel OpenCL Runtime +RUN cd /tmp \ + && apt install wget lsb-core libnuma-dev pciutils -y \ + && wget http://registrationcenter-download.intel.com/akdlm/irc_nas/vcp/15532/l_opencl_p_18.1.0.015.tgz \ + && tar xzvf l_opencl_p_18.1.0.015.tgz \ + && cd l_opencl_p_18.1.0.015 \ + && echo "ACCEPT_EULA=accept" > silent.cfg \ + && echo "PSET_INSTALL_DIR=/opt/intel" >> silent.cfg \ + && echo "CONTINUE_WITH_OPTIONAL_ERROR=yes" >> silent.cfg \ + && echo "CONTINUE_WITH_INSTALLDIR_OVERWRITE=yes" >> silent.cfg \ + && echo "COMPONENTS=DEFAULTS" >> silent.cfg \ + && echo "PSET_MODE=install" >> silent.cfg \ + && ./install.sh -s silent.cfg + +# Clean +RUN apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -rf /tmp/l_opencl_p_18.1.0.015* + +# Switch back to dialog for any ad-hoc use of apt-get +ENV DEBIAN_FRONTEND=dialog + +# Configuring app / python requirements +WORKDIR /app +USER vscode +COPY requirements.txt /app/src/ +RUN /usr/bin/pip3 install -r src/requirements.txt + +# Preventing container from exiting +CMD tail -f /dev/null diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..8a1f3de --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,12 @@ +{ + "name": "Hashtopolis-agent Devcontainer", + "dockerComposeFile": "docker-compose.yml", + "service": "hashtopolis-agent", + + "workspaceFolder": "/app/src", + + "extensions": [ + "ms-python.python" + ], + "remoteUser": "vscode" +} diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml new file mode 100644 index 0000000..3cc32e8 --- /dev/null +++ b/.devcontainer/docker-compose.yml @@ -0,0 +1,21 @@ +version: "3" +services: + hashtopolis-agent: + container_name: hashtopolis_agent + build: + context: .. + dockerfile: .devcontainer/Dockerfile + volumes: + # This is where VS Code should expect to find your project's source code + # and the value of "workspaceFolder" in .devcontainer/devcontainer.json + - ..:/app/src + deploy: + resources: + reservations: + devices: + - capabilities: [gpu] + +networks: + default: + external: + name: hashtopolis_dev diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..9a1446b --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,13 @@ +{ + "configurations": [ + { + "name": "Python: Current File", + "type": "python", + "request": "launch", + "program": ".", + "args": ["--url", "http://hashtopolis/api/server.php", "--debug", "--voucher", "devvoucher"], + "console": "integratedTerminal", + "justMyCode": true + } + ] +} From 1811c253319edda86bc7047fae76eb2a5b5a252f Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 1 Jun 2022 14:41:14 +0200 Subject: [PATCH 03/77] Adding DEV_CONTAINER_USER_CMD built argument This can be used to customize the container file, for example for specifying proxys. --- .devcontainer/Dockerfile | 6 ++++++ .devcontainer/docker-compose.yml | 2 ++ .gitignore | 1 + 3 files changed, 9 insertions(+) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 8bba9e2..bcc0954 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,8 +1,14 @@ FROM nvidia/cuda:11.4.3-devel-ubuntu20.04 +ARG DEV_CONTAINER_USER_CMD + # Avoid warnings by switching to noninteractive ENV DEBIAN_FRONTEND=noninteractive +# Check for and run optional user-supplied command to enable (advanced) customizations of the dev container +RUN [ -n "${DEV_CONTAINER_USER_CMD}" ] \ + && echo "${DEV_CONTAINER_USER_CMD}" | sh + RUN groupadd vscode && useradd -rm -d /app -s /bin/bash -g vscode -u 1001 vscode RUN apt-get update \ diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 3cc32e8..667fd4d 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -5,6 +5,8 @@ services: build: context: .. dockerfile: .devcontainer/Dockerfile + args: + - DEV_CONTAINER_USER_CMD volumes: # This is where VS Code should expect to find your project's source code # and the value of "workspaceFolder" in .devcontainer/devcontainer.json diff --git a/.gitignore b/.gitignore index f2fcbcb..9e16903 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,4 @@ __pycache__ .idea venv lock.pid +.env From b420618b2345163867de5c3e4eed3d6421ade919 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 29 Jun 2022 10:46:39 +0200 Subject: [PATCH 04/77] Minor devcontainer improvements --- .devcontainer/Dockerfile | 5 ++--- .devcontainer/docker-compose.yml | 5 ----- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index bcc0954..cc2ab75 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,4 +1,4 @@ -FROM nvidia/cuda:11.4.3-devel-ubuntu20.04 +FROM ubuntu:20.04 ARG DEV_CONTAINER_USER_CMD @@ -6,8 +6,7 @@ ARG DEV_CONTAINER_USER_CMD ENV DEBIAN_FRONTEND=noninteractive # Check for and run optional user-supplied command to enable (advanced) customizations of the dev container -RUN [ -n "${DEV_CONTAINER_USER_CMD}" ] \ - && echo "${DEV_CONTAINER_USER_CMD}" | sh +RUN if [ -n "${DEV_CONTAINER_USER_CMD}" ]; then echo "${DEV_CONTAINER_USER_CMD}" | sh ; fi RUN groupadd vscode && useradd -rm -d /app -s /bin/bash -g vscode -u 1001 vscode diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 667fd4d..53be70c 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -11,11 +11,6 @@ services: # This is where VS Code should expect to find your project's source code # and the value of "workspaceFolder" in .devcontainer/devcontainer.json - ..:/app/src - deploy: - resources: - reservations: - devices: - - capabilities: [gpu] networks: default: From 26e0b2530c7eb303a6d8184124ae1f24c9697bd2 Mon Sep 17 00:00:00 2001 From: Sein Coray Date: Tue, 2 Aug 2022 20:32:15 +0200 Subject: [PATCH 05/77] Catching version parsion exception if custom tags are used (#32) * catching version parsion exception if custom tags are used --- changelog.md | 4 ++++ htpclient/hashcat_cracker.py | 7 +++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/changelog.md b/changelog.md index 1d941a2..3379985 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ ## v0.6.1 -> v0.x.x +### Bugfixes + +* Fixed crash occurring when a custom hashcat version tag is used which contains non-numerical parts. + ## v0.6.0 -> v0.6.1 ### Features diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index e1201ac..bfdd1ae 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -63,8 +63,11 @@ def __init__(self, cracker_id, binary_download): def get_outfile_format(self): if self.version_string.find('-') == -1: release = self.version_string.split('.') - if int(str(release[0])) >= 6: - return "1,2,3,4" + try: + if int(str(release[0])) >= 6: + return "1,2,3,4" + except ValueError: + return "1,2,3,4" # if there is a custom version, we assume it's using the new format return "15" # if we cannot determine the version or if the release is older than 6.0.0, we will use the old format split = self.version_string.split('-') if len(split) < 2: From 78eb6a890d902cb7705a1643e7009c44f337e66f Mon Sep 17 00:00:00 2001 From: Sein Coray Date: Tue, 2 Aug 2022 20:37:26 +0200 Subject: [PATCH 06/77] Introduced waiting time after failed to send a solve progress (#34) * introduced waiting time after failed to send a solve progress --- changelog.md | 1 + htpclient/hashcat_cracker.py | 1 + 2 files changed, 2 insertions(+) diff --git a/changelog.md b/changelog.md index 3379985..5604375 100644 --- a/changelog.md +++ b/changelog.md @@ -2,6 +2,7 @@ ### Bugfixes +* Introduced time delay to prevent spamming server with requests when connection is refused (issue #799). * Fixed crash occurring when a custom hashcat version tag is used which contains non-numerical parts. ## v0.6.0 -> v0.6.1 diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index bfdd1ae..4b53509 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -327,6 +327,7 @@ def run_loop(self, proc, chunk, task): ans = req.execute() if ans is None: logging.error("Failed to send solve!") + sleep(1) elif ans['response'] != 'SUCCESS': self.wasStopped = True logging.error("Error from server on solve: " + str(ans)) From 363ab842fa9b913eba862d02c8afd778b9a3226f Mon Sep 17 00:00:00 2001 From: Sein Coray Date: Tue, 15 Nov 2022 20:53:18 +0100 Subject: [PATCH 07/77] Allowing an agent to register as cpu agent directly (#35) * added --cpu-only flag which is used on agent registration to only register as cpu agent --- README.md | 12 ++++++------ __main__.py | 1 + changelog.md | 4 ++++ htpclient/initialize.py | 31 ++++++++++++++++++++----------- 4 files changed, 31 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index a004143..9561a8f 100644 --- a/README.md +++ b/README.md @@ -31,22 +31,21 @@ Please note: ### Command Line Arguments ``` -usage: python3 hashtopolis.zip [-h] [--de-register] [--version] - [--number-only] [--disable-update] [--debug] - [--voucher VOUCHER] [--url URL] +usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--disable-update] [--debug] [--voucher VOUCHER] [--url URL] [--cert CERT] [--cpu-only] -Hashtopolis Client v0.6.0 +Hashtopolis Client v0.6.1 optional arguments: -h, --help show this help message and exit --de-register client should automatically de-register from server now --version show version information --number-only when using --version show only the number - --disable-update disable retrieving auto-updates of the client from the - server + --disable-update disable retrieving auto-updates of the client from the server --debug, -d enforce debugging output --voucher VOUCHER voucher to use to automatically register --url URL URL to Hashtopolis client API + --cert CERT Client TLS cert bundle for Hashtopolis client API + --cpu-only Force client to register as CPU only and also only reading out CPU information ``` ### Config @@ -81,6 +80,7 @@ When you run the client for the first time it will ask automatically for all the | auth-user | string | | HTTP Basic Auth user | | auth-password | string | | HTTP Basic Auth password | | outfile-history | boolean | false | Keep old hashcat outfiles with founds and not getting them overwritten | +| cpu-only | boolean | false | Only send CPU information about agent (for CPU only agents) | ### Debug example diff --git a/__main__.py b/__main__.py index 1cd3212..8b17f40 100644 --- a/__main__.py +++ b/__main__.py @@ -305,6 +305,7 @@ def de_register(): parser.add_argument('--voucher', type=str, required=False, help='voucher to use to automatically register') parser.add_argument('--url', type=str, required=False, help='URL to Hashtopolis client API') parser.add_argument('--cert', type=str, required=False, help='Client TLS cert bundle for Hashtopolis client API') + parser.add_argument('--cpu-only', action='store_true', help='Force client to register as CPU only and also only reading out CPU information') args = parser.parse_args() if args.version: diff --git a/changelog.md b/changelog.md index 5604375..6f1e562 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ ## v0.6.1 -> v0.x.x +### Features + +* Allow the agent to register as CPU only on the server and sending only CPU information. + ### Bugfixes * Introduced time delay to prevent spamming server with requests when connection is refused (issue #799). diff --git a/htpclient/initialize.py b/htpclient/initialize.py index 3388149..22472d2 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -89,17 +89,19 @@ def __update_information(self): for line in sorted(set(pairs)): devices.append(line.split(':', 1)[1].replace('\t', ' ')) - try: - output = subprocess.check_output("lspci | grep -E 'VGA compatible controller|3D controller'", shell=True) - except subprocess.CalledProcessError: - # we silently ignore this case on machines where lspci is not present or architecture has no pci bus - output = b"" - output = output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") - for line in output: - if not line: - continue - line = ' '.join(line.split(' ')[1:]).split(':') - devices.append(line[1].strip()) + + if not self.config.get_value('cpu-only'): + try: + output = subprocess.check_output("lspci | grep -E 'VGA compatible controller|3D controller'", shell=True) + except subprocess.CalledProcessError: + # we silently ignore this case on machines where lspci is not present or architecture has no pci bus + output = b"" + output = output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + for line in output: + if not line: + continue + line = ' '.join(line.split(' ')[1:]).split(':') + devices.append(line[1].strip()) elif Initialize.get_os() == 1: # windows output = subprocess.check_output("wmic cpu get name", shell=True) @@ -155,6 +157,9 @@ def __check_token(self, args): query = dict_register.copy() query['voucher'] = voucher query['name'] = name + if self.config.get_value('cpu-only'): + query['cpu-only'] = True + req = JsonRequest(query) ans = req.execute() if ans is None: @@ -206,6 +211,10 @@ def __check_url(self, args): else: logging.debug("Connection test successful!") + if args.cpu_only is not None and args.cpu_only: + logging.debug("Setting agent to be CPU only..") + self.config.set_value('cpu-only', True) + @staticmethod def __build_directories(): if not os.path.isdir("crackers"): From 57d61dc8a313d6f65b124dc96d3f65910eea3d93 Mon Sep 17 00:00:00 2001 From: Sein Coray Date: Tue, 15 Nov 2022 21:21:07 +0100 Subject: [PATCH 08/77] Made save paths for data configurable (#31) * added agent flags for paths of files, crackers, preprocessors, zaps and hashlists --- README.md | 40 +++++++++++++++------ __main__.py | 52 ++++++++++++++++++++------- changelog.md | 4 +++ htpclient/binarydownload.py | 20 +++++------ htpclient/files.py | 20 +++++------ htpclient/generic_cracker.py | 18 +++++----- htpclient/hashcat_cracker.py | 70 ++++++++++++++++++------------------ htpclient/hashlist.py | 4 +-- htpclient/helpers.py | 12 +++---- htpclient/initialize.py | 21 ++++++----- 10 files changed, 156 insertions(+), 105 deletions(-) diff --git a/README.md b/README.md index 9561a8f..c2db07e 100644 --- a/README.md +++ b/README.md @@ -31,21 +31,33 @@ Please note: ### Command Line Arguments ``` -usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--disable-update] [--debug] [--voucher VOUCHER] [--url URL] [--cert CERT] [--cpu-only] +usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--disable-update] [--debug] [--voucher VOUCHER] [--url URL] [--cert CERT] [--files-path FILES_PATH] + [--crackers-path CRACKERS_PATH] [--hashlists-path HASHLISTS_PATH] [--preprocessors-path PREPROCESSORS_PATH] [--zaps-path ZAPS_PATH] [--cpu-only] + Hashtopolis Client v0.6.1 optional arguments: - -h, --help show this help message and exit - --de-register client should automatically de-register from server now - --version show version information - --number-only when using --version show only the number - --disable-update disable retrieving auto-updates of the client from the server - --debug, -d enforce debugging output - --voucher VOUCHER voucher to use to automatically register - --url URL URL to Hashtopolis client API - --cert CERT Client TLS cert bundle for Hashtopolis client API - --cpu-only Force client to register as CPU only and also only reading out CPU information + -h, --help show this help message and exit + --de-register client should automatically de-register from server now + --version show version information + --number-only when using --version show only the number + --disable-update disable retrieving auto-updates of the client from the server + --debug, -d enforce debugging output + --voucher VOUCHER voucher to use to automatically register + --url URL URL to Hashtopolis client API + --cert CERT Client TLS cert bundle for Hashtopolis client API + --files-path FILES_PATH + Use given folder path as files location + --crackers-path CRACKERS_PATH + Use given folder path as crackers location + --hashlists-path HASHLISTS_PATH + Use given folder path as hashlists location + --preprocessors-path PREPROCESSORS_PATH + Use given folder path as preprocessors location + --zaps-path ZAPS_PATH + Use given folder path as zaps location + --cpu-only Force client to register as CPU only and also only reading out CPU information ``` ### Config @@ -80,6 +92,11 @@ When you run the client for the first time it will ask automatically for all the | auth-user | string | | HTTP Basic Auth user | | auth-password | string | | HTTP Basic Auth password | | outfile-history | boolean | false | Keep old hashcat outfiles with founds and not getting them overwritten | +| files-path | string | | Use given folder path as files location | +| crackers-path | string | | Use given folder path as crackers location | +| hashlists-path | string | | Use given folder path as hashlists location | +| preprocessors-path | string | | Use given folder path as preprocessors location | +| zaps-path | string | | Use given folder path as zaps location | | cpu-only | boolean | false | Only send CPU information about agent (for CPU only agents) | ### Debug example @@ -110,6 +127,7 @@ In order to use the multicast distribution for files, please make sure that the The list contains all Hashcat versions with which the client was tested and is able to work with (other versions might work): +* 6.2.5 * 6.2.4 * 6.2.3 * 6.2.2 diff --git a/__main__.py b/__main__.py index 8b17f40..83bfe3c 100644 --- a/__main__.py +++ b/__main__.py @@ -2,6 +2,7 @@ import shutil import signal import sys +import os import time from time import sleep @@ -47,13 +48,13 @@ def run_health_check(): logging.info("Starting check ID " + str(check_id)) # write hashes to file - hash_file = open("hashlists/health_check.txt", "w") + hash_file = open(CONFIG.get_value('hashlists-path') + "/health_check.txt", "w") hash_file.write("\n".join(ans['hashes'])) hash_file.close() # delete old file if necessary - if os.path.exists("hashlists/health_check.out"): - os.unlink("hashlists/health_check.out") + if os.path.exists(CONFIG.get_value('hashlists-path') + "/health_check.out"): + os.unlink(CONFIG.get_value('hashlists-path') + "/health_check.out") # run task cracker = HashcatCracker(ans['crackerBinaryId'], binaryDownload) @@ -62,8 +63,8 @@ def run_health_check(): end = int(time.time()) # read results - if os.path.exists("hashlists/health_check.out"): - founds = file_get_contents("hashlists/health_check.out").replace("\r\n", "\n").split("\n") + if os.path.exists(CONFIG.get_value('hashlists-path') + "/health_check.out"): + founds = file_get_contents(CONFIG.get_value('hashlists-path') + "/health_check.out").replace("\r\n", "\n").split("\n") else: founds = [] if len(states) > 0: @@ -118,15 +119,37 @@ def init_logging(args): def init(args): global CONFIG, binaryDownload + if len(CONFIG.get_value('files-path')) == 0: + CONFIG.set_value('files-path', os.path.abspath('files')) + if len(CONFIG.get_value('crackers-path')) == 0: + CONFIG.set_value('crackers-path', os.path.abspath('crackers')) + if len(CONFIG.get_value('hashlists-path')) == 0: + CONFIG.set_value('hashlists-path', os.path.abspath('hashlists')) + if len(CONFIG.get_value('zaps-path')) == 0: + CONFIG.set_value('zaps-path', os.path.abspath('.')) + if len(CONFIG.get_value('preprocessors-path')) == 0: + CONFIG.set_value('preprocessors-path', os.path.abspath('preprocessors')) + + if args.files_path and len(args.files_path): + CONFIG.set_value('files-path', os.path.abspath(args.files_path)) + if args.crackers_path and len(args.crackers_path): + CONFIG.set_value('crackers-path', os.path.abspath(args.crackers_path)) + if args.hashlists_path and len(args.hashlists_path): + CONFIG.set_value('hashlists-path', os.path.abspath(args.hashlists_path)) + if args.zaps_path and len(args.zaps_path): + CONFIG.set_value('zaps-path', os.path.abspath(args.zaps_path)) + if args.preprocessors_path and len(args.preprocessors_path): + CONFIG.set_value('preprocessors-path', os.path.abspath(args.preprocessors_path)) + logging.info("Starting client '" + Initialize.get_version() + "'...") # check if there are running hashcat.pid files around (as we assume that nothing is running anymore if the client gets newly started) - if os.path.exists("crackers"): - for root, dirs, files in os.walk("crackers"): + if os.path.exists(CONFIG.get_value('crackers-path')): + for root, dirs, files in os.walk(CONFIG.get_value('crackers-path')): for folder in dirs: - if folder.isdigit() and os.path.exists("crackers/" + folder + "/hashtopolis.pid"): - logging.info("Cleaning hashcat PID file from crackers/" + folder) - os.unlink("crackers/" + folder + "/hashtopolis.pid") + if folder.isdigit() and os.path.exists(CONFIG.get_value('crackers-path') + "/" + folder + "/hashtopolis.pid"): + logging.info("Cleaning hashcat PID file from " + CONFIG.get_value('crackers-path') + "/" + folder) + os.unlink(CONFIG.get_value('crackers-path') + "/" + folder + "/hashtopolis.pid") session = Session(requests.Session()).s session.headers.update({'User-Agent': Initialize.get_version()}) @@ -281,7 +304,7 @@ def de_register(): else: logging.info("Successfully de-registered!") # cleanup - dirs = ['crackers', 'prince', 'hashlists', 'files'] + dirs = [CONFIG.get_value('crackers-path'), CONFIG.get_value('preprocessors-path'), CONFIG.get_value('hashlists-path'), CONFIG.get_value('files-path')] files = ['config.json', '7zr.exe', '7zr'] for file in files: if os.path.exists(file): @@ -289,7 +312,7 @@ def de_register(): for directory in dirs: if os.path.exists(directory): shutil.rmtree(directory) - r = glob.glob('hashlist_*') + r = glob.glob(CONFIG.get_value('zaps-path') + '/hashlist_*') for i in r: shutil.rmtree(i) logging.info("Cleanup finished!") @@ -305,6 +328,11 @@ def de_register(): parser.add_argument('--voucher', type=str, required=False, help='voucher to use to automatically register') parser.add_argument('--url', type=str, required=False, help='URL to Hashtopolis client API') parser.add_argument('--cert', type=str, required=False, help='Client TLS cert bundle for Hashtopolis client API') + parser.add_argument('--files-path', type=str, required=False, help='Use given folder path as files location') + parser.add_argument('--crackers-path', type=str, required=False, help='Use given folder path as crackers location') + parser.add_argument('--hashlists-path', type=str, required=False, help='Use given folder path as hashlists location') + parser.add_argument('--preprocessors-path', type=str, required=False, help='Use given folder path as preprocessors location') + parser.add_argument('--zaps-path', type=str, required=False, help='Use given folder path as zaps location') parser.add_argument('--cpu-only', action='store_true', help='Force client to register as CPU only and also only reading out CPU information') args = parser.parse_args() diff --git a/changelog.md b/changelog.md index 6f1e562..7972c44 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,9 @@ ## v0.6.1 -> v0.x.x +### Enhancements + +* Paths for files, crackers, hashlists, zaps and preprocessors can be set via command line args or config. + ### Features * Allow the agent to register as CPU only on the server and sending only CPU information. diff --git a/htpclient/binarydownload.py b/htpclient/binarydownload.py index 9477432..c336392 100644 --- a/htpclient/binarydownload.py +++ b/htpclient/binarydownload.py @@ -135,7 +135,7 @@ def check_prince(self): def check_preprocessor(self, task): logging.debug("Checking if requested preprocessor is present...") - path = "preprocessor/" + str(task.get_task()['preprocessor']) + "/" + path = self.config.get_value('preprocessors-path') + "/" + str(task.get_task()['preprocessor']) + "/" query = copy_and_set_token(dict_downloadBinary, self.config.get_value('token')) query['type'] = 'preprocessor' query['preprocessorId'] = task.get_task()['preprocessor'] @@ -173,7 +173,7 @@ def check_preprocessor(self, task): return True def check_version(self, cracker_id): - path = "crackers/" + str(cracker_id) + "/" + path = self.config.get_value('crackers-path') + "/" + str(cracker_id) + "/" query = copy_and_set_token(dict_downloadBinary, self.config.get_value('token')) query['type'] = 'cracker' query['binaryVersionId'] = cracker_id @@ -191,19 +191,19 @@ def check_version(self, cracker_id): self.last_version = ans if not os.path.isdir(path): # we need to download the 7zip - if not Download.download(ans['url'], "crackers/" + str(cracker_id) + ".7z"): + if not Download.download(ans['url'], self.config.get_value('crackers-path') + "/" + str(cracker_id) + ".7z"): logging.error("Download of cracker binary failed!") sleep(5) return False if Initialize.get_os() == 1: - os.system("7zr" + Initialize.get_os_extension() + " x -ocrackers/temp crackers/" + str(cracker_id) + ".7z") + os.system("7zr" + Initialize.get_os_extension() + " x -o'" + self.config.get_value('crackers-path') + "/temp' '" + self.config.get_value('crackers-path') + "/" + str(cracker_id) + ".7z'") else: - os.system("./7zr" + Initialize.get_os_extension() + " x -ocrackers/temp crackers/" + str(cracker_id) + ".7z") - os.unlink("crackers/" + str(cracker_id) + ".7z") - for name in os.listdir("crackers/temp"): - if os.path.isdir("crackers/temp/" + name): - os.rename("crackers/temp/" + name, "crackers/" + str(cracker_id)) + os.system("./7zr" + Initialize.get_os_extension() + " x -o'" + self.config.get_value('crackers-path') + "/temp' '" + self.config.get_value('crackers-path') + "/" + str(cracker_id) + ".7z'") + os.unlink(self.config.get_value('crackers-path') + "/" + str(cracker_id) + ".7z") + for name in os.listdir(self.config.get_value('crackers-path') + "/temp"): + if os.path.isdir(self.config.get_value('crackers-path') + "/temp/" + name): + os.rename(self.config.get_value('crackers-path') + "/temp/" + name, self.config.get_value('crackers-path') + "/" + str(cracker_id)) else: - os.rename("crackers/temp", "crackers/" + str(cracker_id)) + os.rename(self.config.get_value('crackers-path') + "/temp", self.config.get_value('crackers-path') + "/" + str(cracker_id)) break return True diff --git a/htpclient/files.py b/htpclient/files.py index 43a5135..28916be 100644 --- a/htpclient/files.py +++ b/htpclient/files.py @@ -38,19 +38,19 @@ def deletion_check(self): for filename in files: if filename.find("/") != -1 or filename.find("\\") != -1: continue # ignore invalid file names - elif os.path.dirname("files/" + filename) != "files": + elif os.path.dirname(self.config.get_value('files-path') + "/" + filename) != "files": continue # ignore any case in which we would leave the files folder - elif os.path.exists("files/" + filename): + elif os.path.exists(self.config.get_value('files-path') + "/" + filename): logging.info("Delete file '" + filename + "' as requested by server...") - if os.path.splitext("files/" + filename)[1] == '.7z': - if os.path.exists("files/" + filename.replace(".7z", ".txt")): + if os.path.splitext(self.config.get_value('files-path') + "/" + filename)[1] == '.7z': + if os.path.exists(self.config.get_value('files-path') + "/" + filename.replace(".7z", ".txt")): logging.info("Also delete assumed wordlist from archive of same file...") - os.unlink("files/" + filename.replace(".7z", ".txt")) - os.unlink("files/" + filename) + os.unlink(self.config.get_value('files-path') + "/" + filename.replace(".7z", ".txt")) + os.unlink(self.config.get_value('files-path') + "/" + filename) def check_files(self, files, task_id): for file in files: - file_localpath = "files/" + file + file_localpath = self.config.get_value('files-path') + "/" + file query = copy_and_set_token(dict_getFile, self.config.get_value('token')) query['taskId'] = task_id query['file'] = file @@ -85,10 +85,10 @@ def check_files(self, files, task_id): logging.error("file size mismatch on file: %s" % file) sleep(5) return False - if os.path.splitext("files/" + file)[1] == '.7z' and not os.path.isfile("files/" + file.replace(".7z", ".txt")): + if os.path.splitext(self.config.get_value('files-path') + "/" + file)[1] == '.7z' and not os.path.isfile(self.config.get_value('files-path') + "/" + file.replace(".7z", ".txt")): # extract if needed if Initialize.get_os() != 1: - os.system("./7zr" + Initialize.get_os_extension() + " x -aoa -ofiles/ -y files/" + file) + os.system("./7zr" + Initialize.get_os_extension() + " x -aoa -o'" + self.config.get_value('files-path') + "/' -y '" + self.config.get_value('files-path') + "/" + file + "'") else: - os.system("7zr" + Initialize.get_os_extension() + " x -aoa -ofiles/ -y files/" + file) + os.system("7zr" + Initialize.get_os_extension() + " x -aoa -o'" + self.config.get_value('files-path') + "/' -y '" + self.config.get_value('files-path') + "/" + file + "'") return True diff --git a/htpclient/generic_cracker.py b/htpclient/generic_cracker.py index 81ea9d8..d6acc8e 100644 --- a/htpclient/generic_cracker.py +++ b/htpclient/generic_cracker.py @@ -16,19 +16,20 @@ class GenericCracker: def __init__(self, cracker_id, binary_download): self.config = Config() self.io_q = Queue() - self.callPath = "../crackers/" + str(cracker_id) + "/" + binary_download.get_version()['executable'] + self.callPath = self.config.get_value('crackers-path') + "/" + str(cracker_id) + "/" + binary_download.get_version()['executable'] self.executable_name = binary_download.get_version()['executable'] self.keyspace = 0 def run_chunk(self, task, chunk, preprocessor): args = " crack -s " + str(chunk['skip']) args += " -l " + str(chunk['length']) - args += " " + task['attackcmd'].replace(task['hashlistAlias'], "../hashlists/" + str(task['hashlistId'])) - full_cmd = self.callPath + args + hl_path = self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + args += " " + task['attackcmd'].replace(task['hashlistAlias'], f"'{hl_path}'") + full_cmd = f"'{self.callPath}'" + args if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') logging.debug("CALL: " + full_cmd) - process = subprocess.Popen(full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd='files') + process = subprocess.Popen(full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.config.get_value('files-path')) logging.debug("started cracking") out_thread = Thread(target=self.stream_watcher, name='stdout-watcher', args=('OUT', process.stdout)) @@ -97,7 +98,7 @@ def run_loop(self, process, chunk, task): logging.error("Error from server on solve: " + str(ans)) else: if ans['zaps']: - with open("files/zap", "wb") as zapfile: # need to check if we are in the main dir here + with open(self.config.get_value('files-path') + "/zap", "wb") as zapfile: # need to check if we are in the main dir here zapfile.write('\n'.join(ans['zaps']).encode()) zapfile.close() cracks = cracks_backup @@ -121,7 +122,7 @@ def measure_keyspace(self, task, chunk): full_cmd = full_cmd.replace("/", '\\') try: logging.debug("CALL: " + full_cmd) - output = subprocess.check_output(full_cmd, shell=True, cwd='files') + output = subprocess.check_output(full_cmd, shell=True, cwd=self.config.get_value('files-path')) except subprocess.CalledProcessError as e: logging.error("Error during keyspace measurement: " + str(e)) send_error("Keyspace measure failed!", self.config.get_value('token'), task['taskId'], None) @@ -140,12 +141,13 @@ def run_benchmark(self, task): ksp = self.keyspace if ksp == 0: ksp = task['keyspace'] - args = task['attackcmd'].replace(task['hashlistAlias'], "../hashlists/" + str(task['hashlistId'])) + hl_path = self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + args = task['attackcmd'].replace(task['hashlistAlias'], f"'{hl_path}'") full_cmd = self.callPath + " crack " + args + " -s 0 -l " + str(ksp) + " --timeout=" + str(task['bench']) if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') logging.debug("CALL: " + full_cmd) - output = subprocess.check_output(full_cmd, shell=True, cwd='files') + output = subprocess.check_output(full_cmd, shell=True, cwd=self.config.get_value('files-path')) if output: output = output.replace(b"\r\n", b"\n").decode('utf-8') output = output.split('\n') diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index 4b53509..37a1f2e 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -26,7 +26,7 @@ def __init__(self, cracker_id, binary_download): self.executable_name = binary_download.get_version()['executable'] k = self.executable_name.rfind(".") self.executable_name = self.executable_name[:k] + "." + self.executable_name[k + 1:] - self.cracker_path = "crackers/" + str(cracker_id) + "/" + self.cracker_path = self.config.get_value('crackers-path') + "/" + str(cracker_id) + "/" self.callPath = self.executable_name if Initialize.get_os() != 1: self.callPath = "./" + self.callPath @@ -35,12 +35,12 @@ def __init__(self, cracker_id, binary_download): self.executable_name = binary_download.get_version()['executable'] k = self.executable_name.rfind(".") self.executable_name = self.executable_name[:k] + get_bit() + "." + self.executable_name[k + 1:] - self.cracker_path = "crackers/" + str(cracker_id) + "/" + self.cracker_path = self.config.get_value('crackers-path') + "/" + str(cracker_id) + "/" self.callPath = self.executable_name if Initialize.get_os() != 1: self.callPath = "./" + self.callPath - cmd = self.callPath + " --version" + cmd = "'" + self.callPath + "' --version" output = '' try: logging.debug("CALL: " + cmd) @@ -86,8 +86,8 @@ def build_command(self, task, chunk): args = " --machine-readable --quiet --status --restore-disable --session=hashtopolis" args += " --status-timer " + str(task['statustimer']) args += " --outfile-check-timer=" + str(task['statustimer']) - args += " --outfile-check-dir=../../hashlist_" + str(task['hashlistId']) - args += " -o ../../hashlists/" + str(task['hashlistId']) + ".out --outfile-format=" + self.get_outfile_format() + " -p \"" + str(chr(9)) + "\"" + args += " --outfile-check-dir='" + self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId']) + "'" + args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out' --outfile-format=" + self.get_outfile_format() + " -p \"" + str(chr(9)) + "\"" args += " -s " + str(chunk['skip']) args += " -l " + str(chunk['length']) if 'useBrain' in task and task['useBrain']: # when using brain we set the according parameters @@ -98,7 +98,7 @@ def build_command(self, task, chunk): args += " --brain-client-features " + str(task['brainFeatures']) else: # remove should only be used if we run without brain args += " --potfile-disable --remove --remove-timer=" + str(task['statustimer']) - args += " " + update_files(task['attackcmd']).replace(task['hashlistAlias'], "../../hashlists/" + str(task['hashlistId'])) + " " + task['cmdpars'] + args += " " + update_files(task['attackcmd']).replace(task['hashlistAlias'], "'" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "' ") + task['cmdpars'] if args.find(" -S") != -1: self.uses_slow_hash_flag = True return self.callPath + args @@ -110,11 +110,11 @@ def build_pipe_command(self, task, chunk): post_args = " --machine-readable --quiet --status --remove --restore-disable --potfile-disable --session=hashtopolis" post_args += " --status-timer " + str(task['statustimer']) post_args += " --outfile-check-timer=" + str(task['statustimer']) - post_args += " --outfile-check-dir=../../hashlist_" + str(task['hashlistId']) - post_args += " -o ../../hashlists/" + str(task['hashlistId']) + ".out --outfile-format=" + self.get_outfile_format() + " -p \"" + str(chr(9)) + "\"" + post_args += " --outfile-check-dir='" + self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId']) + "'" + post_args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out' --outfile-format=" + self.get_outfile_format() + " -p \"" + str(chr(9)) + "\"" post_args += " --remove-timer=" + str(task['statustimer']) - post_args += " ../../hashlists/" + str(task['hashlistId']) - return self.callPath + pre_args + " | " + self.callPath + post_args + task['cmdpars'] + post_args += " '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "'" + return f"'{self.callPath}'" + pre_args + " | " + f"'{self.callPath}'" + post_args + task['cmdpars'] # DEPRECATED def build_prince_command(self, task, chunk): @@ -136,7 +136,7 @@ def build_prince_command(self, task, chunk): return binary + pre_args + " | " + self.callPath + post_args + task['cmdpars'] def build_preprocessor_command(self, task, chunk, preprocessor): - binary = "../../preprocessor/" + str(task['preprocessor']) + "/" + preprocessor['executable'] + binary = self.config.get_value('preprocessors-path') + "/" + str(task['preprocessor']) + "/" + preprocessor['executable'] if Initialize.get_os() != 1: binary = "./" + binary if not os.path.isfile(binary): @@ -158,12 +158,12 @@ def build_preprocessor_command(self, task, chunk, preprocessor): post_args = " --machine-readable --quiet --status --remove --restore-disable --potfile-disable --session=hashtopolis" post_args += " --status-timer " + str(task['statustimer']) post_args += " --outfile-check-timer=" + str(task['statustimer']) - post_args += " --outfile-check-dir=../../hashlist_" + str(task['hashlistId']) - post_args += " -o ../../hashlists/" + str(task['hashlistId']) + ".out --outfile-format=" + self.get_outfile_format() + " -p \"" + str(chr(9)) + "\"" + post_args += " --outfile-check-dir='" + self.config.get_value('zaps-path') + "hashlist_" + str(task['hashlistId']) + "'" + post_args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out' --outfile-format=" + self.get_outfile_format() + " -p \"" + str(chr(9)) + "\"" post_args += " --remove-timer=" + str(task['statustimer']) - post_args += " ../../hashlists/" + str(task['hashlistId']) + post_args += " '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "'" post_args += update_files(task['attackcmd']).replace(task['hashlistAlias'], '') - return binary + pre_args + " | " + self.callPath + post_args + task['cmdpars'] + return f"'{binary}'" + pre_args + " | " + f"'{self.callPath}'" + post_args + task['cmdpars'] def run_chunk(self, task, chunk, preprocessor): if 'enforcePipe' in task and task['enforcePipe']: @@ -182,14 +182,14 @@ def run_chunk(self, task, chunk, preprocessor): if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') # clear old found file - earlier we deleted them, but just in case, we just move it to a unique filename if configured so - if os.path.exists("hashlists/" + str(task['hashlistId']) + ".out"): + if os.path.exists(self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out"): if self.config.get_value('outfile-history'): - os.rename("hashlists/" + str(task['hashlistId']) + ".out", "hashlists/" + str(task['hashlistId']) + "_" + str(time.time()) + ".out") + os.rename(self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out", self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "_" + str(time.time()) + ".out") else: - os.unlink("hashlists/" + str(task['hashlistId']) + ".out") + os.unlink(self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out") # create zap folder - if not os.path.exists("hashlist_" + str(task['hashlistId'])): - os.mkdir("hashlist_" + str(task['hashlistId'])) + if not os.path.exists(self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId'])): + os.mkdir(self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId'])) logging.debug("CALL: " + full_cmd) if Initialize.get_os() != 1: process = subprocess.Popen(full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cracker_path, preexec_fn=os.setsid) @@ -199,7 +199,7 @@ def run_chunk(self, task, chunk, preprocessor): logging.debug("started cracking") out_thread = Thread(target=self.stream_watcher, name='stdout-watcher', args=('OUT', process.stdout)) err_thread = Thread(target=self.stream_watcher, name='stderr-watcher', args=('ERR', process.stderr)) - crk_thread = Thread(target=self.output_watcher, name='crack-watcher', args=("hashlists/" + str(task['hashlistId']) + ".out", process)) + crk_thread = Thread(target=self.output_watcher, name='crack-watcher', args=(self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out", process)) out_thread.start() err_thread.start() crk_thread.start() @@ -354,7 +354,7 @@ def run_loop(self, proc, chunk, task): if zaps: logging.debug("Writing zaps") zap_output = "\tFF\n".join(zaps) + '\tFF\n' - f = open("hashlist_" + str(task['hashlistId']) + "/" + str(time.time()), 'a') + f = open(self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId']) + "/" + str(time.time()), 'a') f.write(zap_output) f.close() logging.info("Progress:" + str("{:6.2f}".format(relative_progress / 100)) + "% Speed: " + print_speed(speed) + " Cracks: " + str(cracks_count) + " Accepted: " + str(ans['cracked']) + " Skips: " + str(ans['skipped']) + " Zaps: " + str(len(zaps))) @@ -378,7 +378,7 @@ def measure_keyspace(self, task, chunk): elif 'usePreprocessor' in task.get_task() and task.get_task()['usePreprocessor']: return self.preprocessor_keyspace(task, chunk) task = task.get_task() # TODO: refactor this to be better code - full_cmd = self.callPath + " --keyspace --quiet " + update_files(task['attackcmd']).replace(task['hashlistAlias'] + " ", "") + ' ' + task['cmdpars'] + full_cmd = f"'{self.callPath}'" + " --keyspace --quiet " + update_files(task['attackcmd']).replace(task['hashlistAlias'] + " ", "") + ' ' + task['cmdpars'] if 'useBrain' in task and task['useBrain']: full_cmd += " -S" if Initialize.get_os() == 1: @@ -446,12 +446,12 @@ def preprocessor_keyspace(self, task, chunk): split = binary.split(".") binary = '.'.join(split[:-1]) + get_bit() + "." + split[-1] - full_cmd = binary + " " + preprocessor['keyspaceCommand'] + " " + update_files(task.get_task()['preprocessorCommand']) + full_cmd = f"'{binary}'" + " " + preprocessor['keyspaceCommand'] + " " + update_files(task.get_task()['preprocessorCommand']) if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') try: logging.debug("CALL: " + full_cmd) - output = subprocess.check_output(full_cmd, shell=True, cwd="preprocessor/" + str(task.get_task()['preprocessor'])) + output = subprocess.check_output(full_cmd, shell=True, cwd=self.config.get_value('preprocessors-path') + "/" + str(task.get_task()['preprocessor'])) except subprocess.CalledProcessError: logging.error("Error during preprocessor keyspace measure") send_error("Preprocessor keyspace measure failed!", self.config.get_value('token'), task.get_task()['taskId'], None) @@ -476,9 +476,9 @@ def run_benchmark(self, task): args = " --machine-readable --quiet --runtime=" + str(task['bench']) args += " --restore-disable --potfile-disable --session=hashtopolis -p \"" + str(chr(9)) + "\" " - args += update_files(task['attackcmd']).replace(task['hashlistAlias'], "../../hashlists/" + str(task['hashlistId'])) + ' ' + task['cmdpars'] - args += " -o ../../hashlists/" + str(task['hashlistId']) + ".out" - full_cmd = self.callPath + args + args += update_files(task['attackcmd']).replace(task['hashlistAlias'], "'" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "' ") + task['cmdpars'] + args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out'" + full_cmd = f"'{self.callPath}'" + args if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') logging.debug("CALL: " + full_cmd) @@ -526,16 +526,16 @@ def run_speed_benchmark(self, task): args = " --machine-readable --quiet --progress-only" args += " --restore-disable --potfile-disable --session=hashtopolis -p \"" + str(chr(9)) + "\" " if 'usePrince' in task and task['usePrince']: - args += get_rules_and_hl(update_files(task['attackcmd']), task['hashlistAlias']).replace(task['hashlistAlias'], "../../hashlists/" + str(task['hashlistId'])) + ' ' + args += get_rules_and_hl(update_files(task['attackcmd']), task['hashlistAlias']).replace(task['hashlistAlias'], "'" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "' ") args += " example.dict" + ' ' + task['cmdpars'] else: - args += update_files(task['attackcmd']).replace(task['hashlistAlias'], "../../hashlists/" + str(task['hashlistId'])) + ' ' + task['cmdpars'] + args += update_files(task['attackcmd']).replace(task['hashlistAlias'], "'" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "' ") + task['cmdpars'] if 'usePreprocessor' in task and task['usePreprocessor']: args += " example.dict" if 'useBrain' in task and task['useBrain']: args += " -S" - args += " -o ../../hashlists/" + str(task['hashlistId']) + ".out" - full_cmd = self.callPath + args + args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out'" + full_cmd = f"'{self.callPath}'" + args output = b'' if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') @@ -595,9 +595,9 @@ def agent_stopped(self): def run_health_check(self, attack, hashlist_alias): args = " --machine-readable --quiet" args += " --restore-disable --potfile-disable --session=health " - args += update_files(attack).replace(hashlist_alias, "../../hashlists/health_check.txt") - args += " -o ../../hashlists/health_check.out" - full_cmd = self.callPath + args + args += update_files(attack).replace(hashlist_alias, "'" + self.config.get_value('hashlists-path') + "/health_check.txt'") + args += " -o '" + self.config.get_value('hashlists-path') + "/health_check.out'" + full_cmd = f"'{self.callPath}'" + args if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') logging.debug("CALL: " + full_cmd) diff --git a/htpclient/hashlist.py b/htpclient/hashlist.py index 9401b89..4e97864 100644 --- a/htpclient/hashlist.py +++ b/htpclient/hashlist.py @@ -26,7 +26,7 @@ def load_hashlist(self, hashlist_id): sleep(5) return False else: - Download.download(self.config.get_value('url').replace("api/server.php", "") + ans['url'], "hashlists/" + str(hashlist_id), True) + Download.download(self.config.get_value('url').replace("api/server.php", "") + ans['url'], self.config.get_value('hashlists-path') + "/" + str(hashlist_id), True) return True def load_found(self, hashlist_id, cracker_id): @@ -44,5 +44,5 @@ def load_found(self, hashlist_id, cracker_id): return False else: logging.info("Saving found hashes to hashcat potfile...") - Download.download(self.config.get_value('url').replace("api/server.php", "") + ans['url'], "crackers/" + str(cracker_id) + "/hashcat.potfile", True) + Download.download(self.config.get_value('url').replace("api/server.php", "") + ans['url'], self.config.get_value('crackers-path') + "/" + str(cracker_id) + "/hashcat.potfile", True) return True diff --git a/htpclient/helpers.py b/htpclient/helpers.py index e7eb8bd..cd5efe9 100644 --- a/htpclient/helpers.py +++ b/htpclient/helpers.py @@ -11,6 +11,7 @@ from htpclient.dicts import copy_and_set_token, dict_clientError from htpclient.jsonRequest import JsonRequest +from htpclient.config import Config def log_error_and_exit(message): @@ -70,7 +71,7 @@ def start_uftpd(os_extension, config): cmd += "-I " + config.get_value('multicast-device') + ' ' else: cmd += "-I eth0 " # wild guess as default - cmd += "-D " + os.path.abspath("files/") + ' ' + cmd += "-D " + os.path.abspath(config.get_value('files-path') + "/") + ' ' cmd += "-L " + os.path.abspath("multicast/" + str(time.time()) + ".log") logging.debug("CALL: " + cmd) subprocess.check_output(cmd, shell=True) @@ -111,18 +112,17 @@ def clean_list(element_list): # the prince flag is deprecated def update_files(command, prince=False): + config = Config() + split = command.split(" ") ret = [] for part in split: # test if file exists if not part: continue - path = "files/" + part + path = config.get_value('files-path') + "/" + part if os.path.exists(path): - if prince: - ret.append("../" + path) - else: - ret.append("../../" + path) + ret.append(f"'{path}'") else: ret.append(part) return " %s " % " ".join(ret) diff --git a/htpclient/initialize.py b/htpclient/initialize.py index 22472d2..c8d29a1 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -210,18 +210,17 @@ def __check_url(self, args): self.__check_url(args) else: logging.debug("Connection test successful!") - + if args.cpu_only is not None and args.cpu_only: logging.debug("Setting agent to be CPU only..") self.config.set_value('cpu-only', True) - @staticmethod - def __build_directories(): - if not os.path.isdir("crackers"): - os.mkdir("crackers") - if not os.path.isdir("files"): - os.mkdir("files") - if not os.path.isdir("hashlists"): - os.mkdir("hashlists") - if not os.path.isdir("preprocessor"): - os.mkdir("preprocessor") + def __build_directories(self): + if not os.path.isdir(self.config.get_value('crackers-path')): + os.makedirs(self.config.get_value('crackers-path')) + if not os.path.isdir(self.config.get_value('files-path')): + os.makedirs(self.config.get_value('files-path')) + if not os.path.isdir(self.config.get_value('hashlists-path')): + os.makedirs(self.config.get_value('hashlists-path')) + if not os.path.isdir(self.config.get_value('preprocessors-path')): + os.makedirs(self.config.get_value('preprocessors-path')) From eab7f40c916314dc5f305147a7146e02f8bbc5da Mon Sep 17 00:00:00 2001 From: sein Date: Mon, 28 Nov 2022 20:18:25 +0100 Subject: [PATCH 09/77] include newest hashcat version in compatibility list --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index c2db07e..1df7387 100644 --- a/README.md +++ b/README.md @@ -127,6 +127,7 @@ In order to use the multicast distribution for files, please make sure that the The list contains all Hashcat versions with which the client was tested and is able to work with (other versions might work): +* 6.2.6 * 6.2.5 * 6.2.4 * 6.2.3 From 7056ce4ccd8bd3d3ef6c2381431dec6b446a409b Mon Sep 17 00:00:00 2001 From: sein Date: Mon, 28 Nov 2022 21:47:54 +0100 Subject: [PATCH 10/77] preparations for release 0.7.0 --- README.md | 2 +- changelog.md | 2 +- htpclient/initialize.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 1df7387..b83d745 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--crackers-path CRACKERS_PATH] [--hashlists-path HASHLISTS_PATH] [--preprocessors-path PREPROCESSORS_PATH] [--zaps-path ZAPS_PATH] [--cpu-only] -Hashtopolis Client v0.6.1 +Hashtopolis Client v0.7.0 optional arguments: -h, --help show this help message and exit diff --git a/changelog.md b/changelog.md index 7972c44..687dc54 100644 --- a/changelog.md +++ b/changelog.md @@ -1,4 +1,4 @@ -## v0.6.1 -> v0.x.x +## v0.6.1 -> v0.7.0 ### Enhancements diff --git a/htpclient/initialize.py b/htpclient/initialize.py index c8d29a1..fca75a6 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -16,7 +16,7 @@ def get_version(): @staticmethod def get_version_number(): - return "0.6.1" + return "0.7.0" def run(self, args): self.__check_cert(args) From 8c9213475948a67aa46384877b72a155c9fcd6f0 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Thu, 26 Jan 2023 12:22:52 +0100 Subject: [PATCH 11/77] Adding user cmd and post command, changing certificate ca for nodejs (#38) * Adding user cmd and post command * Fix nodejs of vscode does not use the default certificate store, this is a workaround for adding custom ca certs --- .devcontainer/Dockerfile | 9 +++++++-- .devcontainer/devcontainer.json | 4 +++- .devcontainer/docker-compose.yml | 3 ++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index cc2ab75..cdceaeb 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,12 +1,14 @@ FROM ubuntu:20.04 -ARG DEV_CONTAINER_USER_CMD +ARG DEV_CONTAINER_USER_CMD_PRE +ARG DEV_CONTAINER_USER_CMD_POST # Avoid warnings by switching to noninteractive ENV DEBIAN_FRONTEND=noninteractive +ENV NODE_OPTIONS='--use-openssl-ca' # Check for and run optional user-supplied command to enable (advanced) customizations of the dev container -RUN if [ -n "${DEV_CONTAINER_USER_CMD}" ]; then echo "${DEV_CONTAINER_USER_CMD}" | sh ; fi +RUN if [ -n "${DEV_CONTAINER_USER_CMD_PRE}" ]; then echo "${DEV_CONTAINER_USER_CMD_PRE}" | sh ; fi RUN groupadd vscode && useradd -rm -d /app -s /bin/bash -g vscode -u 1001 vscode @@ -43,5 +45,8 @@ USER vscode COPY requirements.txt /app/src/ RUN /usr/bin/pip3 install -r src/requirements.txt +# Check for and run optional user-supplied command to enable (advanced) customizations of the dev container +RUN if [ -n "${DEV_CONTAINER_USER_CMD_POST}" ]; then echo "${DEV_CONTAINER_USER_CMD_POST}" | sh ; fi + # Preventing container from exiting CMD tail -f /dev/null diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 8a1f3de..ae72a42 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -4,7 +4,9 @@ "service": "hashtopolis-agent", "workspaceFolder": "/app/src", - + "remoteEnv": { + "NODE_OPTIONS": "--use-openssl-ca", + }, "extensions": [ "ms-python.python" ], diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 53be70c..2e4d941 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -6,7 +6,8 @@ services: context: .. dockerfile: .devcontainer/Dockerfile args: - - DEV_CONTAINER_USER_CMD + - DEV_CONTAINER_USER_CMD_PRE + - DEV_CONTAINER_USER_CMD_POST volumes: # This is where VS Code should expect to find your project's source code # and the value of "workspaceFolder" in .devcontainer/devcontainer.json From 953a77ce01d514d7c2ec094a033bba00817a2bb4 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Fri, 27 Jan 2023 16:21:48 +0100 Subject: [PATCH 12/77] Adding missing license (#39) --- LICENSE.txt | 674 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 674 insertions(+) create mode 100644 LICENSE.txt diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..30ace6a --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {one line to give the program's name and a brief idea of what it does.} + Copyright (C) {year} {name of author} + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + {project} Copyright (C) {year} {fullname} + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. \ No newline at end of file From 97a593df4837e08087359a02319d9c08752420e2 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Fri, 3 Feb 2023 17:05:28 +0100 Subject: [PATCH 13/77] Adding test for a test --- tests/test_hashcat_cracker.py | 44 +++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 tests/test_hashcat_cracker.py diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py new file mode 100644 index 0000000..a7c8286 --- /dev/null +++ b/tests/test_hashcat_cracker.py @@ -0,0 +1,44 @@ +import pytest +from unittest import mock + +from htpclient.hashcat_cracker import HashcatCracker +from htpclient.binarydownload import BinaryDownload + +from argparse import Namespace + +# The default cmdparameters, some objects need those. Maybe move to a common helper so other tests can include this aswell. +test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://example.com/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + +@mock.patch('htpclient.initialize.Initialize.get_os') +@mock.patch('subprocess.check_output') +@mock.patch('htpclient.jsonRequest.JsonRequest.execute') +@mock.patch('htpclient.download.Download.download') +@mock.patch('os.system') +@mock.patch('os.unlink') +def test_hashcat_cracker_linux(mock_unlink, mock_system, mock_download, mock_get, mock_check_output, mock_get_os): + #TODO: Make paths based on environment + #TODO: Clean all cracker folders etc + + # Force Linux OS + mock_get_os.return_value = 0 + + binaryDownload = BinaryDownload(test_args) + + # When calling binaryDownload.check_version(1), this will make a request for executable name + # Download the 7z if the cracker is not there + # Extract the 7z + # And cleanup the temp file + + mock_get.return_value = {'response': 'SUCCESS', 'url': 'leeg', 'executable': 'hashcat.bin'} + mock_download.return_value = True + mock_check_output.return_value = 'v6.2.6\n'.encode() + binaryDownload.check_version(1) + + # Checking if system and unlink were called correctly. + mock_system.assert_called_with("./7zr x -o'/app/src/crackers/temp' '/app/src/crackers/1.7z'") + mock_unlink.assert_called_with("/app/src/crackers/1.7z") + + # This will call 'hashcat --version' + hashcat = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with("'./hashcat64.bin' --version", shell=True, cwd='/app/src/crackers/1/') From b87f4a0a710b2dd1432ef444f62ce43d2852dcb4 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 12:02:50 +0100 Subject: [PATCH 14/77] Experimental testing on agent code --- tests/__init__.py | 0 tests/hashtopolis-test.yaml | 3 + tests/test_hashcat_cracker.py | 354 +++++++++++++++++++++++++++++++--- 3 files changed, 329 insertions(+), 28 deletions(-) create mode 100644 tests/__init__.py create mode 100644 tests/hashtopolis-test.yaml diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/hashtopolis-test.yaml b/tests/hashtopolis-test.yaml new file mode 100644 index 0000000..f763abb --- /dev/null +++ b/tests/hashtopolis-test.yaml @@ -0,0 +1,3 @@ +hashtopolis_uri: 'http://hashtopolis:80' +username: 'root' +password: 'hashtopolis' diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index a7c8286..540c609 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -7,38 +7,336 @@ from argparse import Namespace # The default cmdparameters, some objects need those. Maybe move to a common helper so other tests can include this aswell. -test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://example.com/api/server.php', version=False, voucher='devvoucher', zaps_path=None) +# test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://example.com/api/server.php', version=False, voucher='devvoucher', zaps_path=None) +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# PoC testing/development framework for APIv2 +# Written in python to work on creation of hashtopolis APIv2 python binding. +# +import json +import requests +import unittest +import datetime +from pathlib import Path -@mock.patch('htpclient.initialize.Initialize.get_os') -@mock.patch('subprocess.check_output') -@mock.patch('htpclient.jsonRequest.JsonRequest.execute') -@mock.patch('htpclient.download.Download.download') -@mock.patch('os.system') -@mock.patch('os.unlink') -def test_hashcat_cracker_linux(mock_unlink, mock_system, mock_download, mock_get, mock_check_output, mock_get_os): - #TODO: Make paths based on environment - #TODO: Clean all cracker folders etc +import requests +import unittest +import logging +from pathlib import Path +import abc - # Force Linux OS - mock_get_os.return_value = 0 +import http +import confidence - binaryDownload = BinaryDownload(test_args) +#logging.basicConfig(level=logging.DEBUG) + +logger = logging.getLogger(__name__) + +HTTP_DEBUG = False + +# Monkey patching to allow http debugging +if HTTP_DEBUG: + http_logger = logging.getLogger('http.client') + http.client.HTTPConnection.debuglevel = 0 + def print_to_log(*args): + http_logger.debug(" ".join(args)) + http.client.print = print_to_log + +cls_registry = {} + + +class Config(object): + def __init__(self): + # Request access TOKEN, used throughout the test + load_order = confidence.DEFAULT_LOAD_ORDER + (str(Path(__file__).parent.joinpath('{name}.{extension}')),) + self._cfg = confidence.load_name('hashtopolis-test', load_order=load_order) + self._hashtopolis_uri = self._cfg['hashtopolis_uri'] + self._api_endpoint = self._hashtopolis_uri + '/api/v2' + self.username = self._cfg['username'] + self.password = self._cfg['password'] - # When calling binaryDownload.check_version(1), this will make a request for executable name - # Download the 7z if the cracker is not there - # Extract the 7z - # And cleanup the temp file - mock_get.return_value = {'response': 'SUCCESS', 'url': 'leeg', 'executable': 'hashcat.bin'} - mock_download.return_value = True - mock_check_output.return_value = 'v6.2.6\n'.encode() - binaryDownload.check_version(1) - - # Checking if system and unlink were called correctly. - mock_system.assert_called_with("./7zr x -o'/app/src/crackers/temp' '/app/src/crackers/1.7z'") - mock_unlink.assert_called_with("/app/src/crackers/1.7z") + + +class HashtopolisConnector(object): + # Cache authorisation token per endpoint + token = {} + token_expires = {} + + def __init__(self, model_uri, config): + self._model_uri = model_uri + self._api_endpoint = config._api_endpoint + self._hashtopolis_uri = config._hashtopolis_uri + self.config = config + + def authenticate(self): + if not self._api_endpoint in HashtopolisConnector.token: + # Request access TOKEN, used throughout the test + + logger.info("Start authentication") + auth_uri = self._api_endpoint + '/auth/token' + auth = (self.config.username, self.config.password) + r = requests.post(auth_uri, auth=auth) + + HashtopolisConnector.token[self._api_endpoint] = r.json()['token'] + HashtopolisConnector.token_expires[self._api_endpoint] = r.json()['token'] + + self._token = HashtopolisConnector.token[self._api_endpoint] + self._token_expires = HashtopolisConnector.token_expires[self._api_endpoint] + + self._headers = { + 'Authorization': 'Bearer ' + self._token, + 'Content-Type': 'application/json' + } + + + def get_all(self): + self.authenticate() + + uri = self._api_endpoint + self._model_uri + headers = self._headers + payload = {} + + r = requests.get(uri, headers=headers, data=json.dumps(payload)) + return r.json()['values'] + + def patch_one(self, obj): + if not obj.has_changed(): + logger.debug("Object '%s' has not changed, no PATCH required", obj) + return + + self.authenticate() + uri = self._hashtopolis_uri + obj._self + headers = self._headers + payload = {} + + for k,v in obj.diff().items(): + logger.debug("Going to patch object '%s' property '%s' from '%s' to '%s'", obj, k, v[0], v[1]) + payload[k] = v[1] + + r = requests.patch(uri, headers=headers, data=json.dumps(payload)) + if r.status_code != 201: + logger.exception("Patching failed: %s", r.text) + + # TODO: Validate if return objects matches digital twin + obj.set_initial(r.json().copy()) + + def create(self, obj): + # Check if object to be created is new + assert(not hasattr(obj, '_self')) + + self.authenticate() + uri = self._api_endpoint + self._model_uri + headers = self._headers + payload = dict([(k,v[1]) for (k,v) in obj.diff().items()]) + + r = requests.post(uri, headers=headers, data=json.dumps(payload)) + if r.status_code != 201: + logger.exception("Creation of object failed: %s", r.text) + + # TODO: Validate if return objects matches digital twin + obj.set_initial(r.json().copy()) + + + def delete(self, obj): + """ Delete object from database """ + # TODO: Check if object to be deleted actually exists + assert(hasattr(obj, '_self')) + + self.authenticate() + uri = self._hashtopolis_uri + obj._self + headers = self._headers + payload = {} + + + r = requests.delete(uri, headers=headers, data=json.dumps(payload)) + if r.status_code != 204: + logger.exception("Deletion of object failed: %s", r.text) + + # TODO: Cleanup object to allow re-creation + + +class ManagerBase(type): + conn = {} + # Cache configuration values + config = None + @classmethod + def get_conn(cls): + if cls.config is None: + cls.config = Config() + + if cls._model_uri not in cls.conn: + cls.conn[cls._model_uri] = HashtopolisConnector(cls._model_uri, cls.config) + return cls.conn[cls._model_uri] + + @classmethod + def all(cls): + """ + Retrieve all backend objects + TODO: Make iterator supporting loading of objects via pages + """ + # Get all objects + api_objs = cls.get_conn().get_all() + + + # Convert into class + objs = [] + for api_obj in api_objs: + new_obj = cls._model(**api_obj) + objs.append(new_obj) + return objs + + @classmethod + def patch(cls, obj): + cls.get_conn().patch_one(obj) + + @classmethod + def create(cls, obj): + cls.get_conn().create(obj) + + @classmethod + def delete(cls, obj): + cls.get_conn().delete(obj) + + @classmethod + def get_first(cls): + """ + Retrieve first object + TODO: Error handling if first object does not exists + TODO: Request object with limit parameter instead + """ + return cls.all()[0] + +# Build Django ORM style 'ModelName.objects' interface +class ModelBase(type): + def __new__(cls, clsname, bases, attrs, uri=None, **kwargs): + parents = [b for b in bases if isinstance(b, ModelBase)] + if not parents: + return super().__new__(cls, clsname, bases, attrs) + + new_class = super().__new__(cls, clsname, bases, attrs) + + setattr(new_class, 'objects', type('Manager', (ManagerBase,), {'_model_uri': uri})) + setattr(new_class.objects, '_model', new_class) + cls_registry[clsname] = new_class + + return new_class + + +class Model(metaclass=ModelBase): + def __init__(self, *args, **kwargs): + self.set_initial(kwargs) + super().__init__() + + def set_initial(self, kv): + self.__fields = [] + # Store fields allowing us to detect changed values + if '_self' in kv: + self.__initial = kv.copy() + else: + # New model + self.__initial = {} + + # Create attribute values + for k,v in kv.items(): + setattr(self, k, v) + if not k.startswith('_'): + self.__fields.append(k) + + + def diff(self): + d1 = self.__initial + d2 = dict([(k, getattr(self, k)) for k in self.__fields]) + diffs = [(k, (v, d2[k])) for k, v in d2.items() if v != d1.get(k, None)] + return dict(diffs) + + def has_changed(self): + return bool(self.diff()) + + def save(self): + if hasattr(self, '_self'): + self.objects.patch(self) + else: + self.objects.create(self) + + def delete(self): + if hasattr(self, '_self'): + self.objects.delete(self) + + def serialize(self): + return [x for x in vars(self) if not x.startswith('_')] + + +class Task(Model, uri="/ui/tasks"): + def __repr__(self): + return self._self + + +class Hashlist(Model, uri="/ui/hashlists"): + def __repr__(self): + return self._self + + +class HashcatCrackerTestLinux(unittest.TestCase): + def test_correct_flow(self): + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist = Hashlist(**payload) + hashlist.save() + + # Create Task + for p in sorted(Path(__file__).parent.glob('create_task_001.json')): + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist._id) + obj = Task(**payload) + obj.save() + + # + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(1) + hashcat = HashcatCracker(1, binaryDownload) + + # Cleanup + obj.delete() + hashlist.delete() + +if __name__ == '__main__': + unittest.main() + + +# @mock.patch('htpclient.initialize.Initialize.get_os') +# @mock.patch('subprocess.check_output') +# @mock.patch('htpclient.jsonRequest.JsonRequest.execute') +# @mock.patch('htpclient.download.Download.download') +# @mock.patch('os.system') +# @mock.patch('os.unlink') +# def test_hashcat_cracker_linux(mock_unlink, mock_system, mock_download, mock_get, mock_check_output, mock_get_os): +# #TODO: Make paths based on environment +# #TODO: Clean all cracker folders etc + +# # Force Linux OS +# mock_get_os.return_value = 0 + +# binaryDownload = BinaryDownload(test_args) + +# # When calling binaryDownload.check_version(1), this will make a request for executable name +# # Download the 7z if the cracker is not there +# # Extract the 7z +# # And cleanup the temp file + +# mock_get.return_value = {'response': 'SUCCESS', 'url': 'leeg', 'executable': 'hashcat.bin'} +# mock_download.return_value = True +# mock_check_output.return_value = 'v6.2.6\n'.encode() +# binaryDownload.check_version(1) + +# # Checking if system and unlink were called correctly. +# mock_system.assert_called_with("./7zr x -o'/app/src/crackers/temp' '/app/src/crackers/1.7z'") +# mock_unlink.assert_called_with("/app/src/crackers/1.7z") - # This will call 'hashcat --version' - hashcat = HashcatCracker(1, binaryDownload) - mock_check_output.assert_called_with("'./hashcat64.bin' --version", shell=True, cwd='/app/src/crackers/1/') +# # This will call 'hashcat --version' +# hashcat = HashcatCracker(1, binaryDownload) +# mock_check_output.assert_called_with("'./hashcat64.bin' --version", shell=True, cwd='/app/src/crackers/1/') From e691fe588b434b69e5b4a9fbae326b4b8728fc23 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 12:12:02 +0100 Subject: [PATCH 15/77] Adding mission json files --- .gitignore | 1 + tests/create_hashlist_001.json | 19 +++++++++++++++++++ tests/create_task_001.json | 23 +++++++++++++++++++++++ 3 files changed, 43 insertions(+) create mode 100644 tests/create_hashlist_001.json create mode 100644 tests/create_task_001.json diff --git a/.gitignore b/.gitignore index 9e16903..50b7510 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ *.exe *.log *.json +!/tests/*.json crackers prince files diff --git a/tests/create_hashlist_001.json b/tests/create_hashlist_001.json new file mode 100644 index 0000000..da2cb28 --- /dev/null +++ b/tests/create_hashlist_001.json @@ -0,0 +1,19 @@ +{ + "name": "Hashlist-md5sum-test123", + "hashTypeId": 1, + "format": 0, + "separator": ";", + "isSalted": false, + "isHexSalt": false, + "accessGroupId": 1, + "useBrain": false, + "brainFeatures": 3, + "notes": "gj", + "sourceType": "paste", + "sourceData": "Y2MwM2U3NDdhNmFmYmJjYmY4YmU3NjY4YWNmZWJlZTUK", + "hashCount": 0, + "cracked": 0, + "isArchived": false, + "isSecret": false + } + \ No newline at end of file diff --git a/tests/create_task_001.json b/tests/create_task_001.json new file mode 100644 index 0000000..147ebeb --- /dev/null +++ b/tests/create_task_001.json @@ -0,0 +1,23 @@ +{ + "attackCmd": "#HL# -a 0 ad a1", + "chunkSize": 1000, + "chunkTime": 600, + "color": "7C6EFF", + "crackerBinaryId": 1, + "crackerBinaryTypeId": 1, + "forcePipe": true, + "files": [], + "isArchived": false, + "isCpuTask": false, + "isSmall": false, + "maxAgents": 112, + "notes": "example-note", + "preprocessorCommand": "this-is-prepressor", + "priority": 10, + "skipKeyspace": 500, + "staticChunks": 2, + "statusTimer": 5, + "taskName": "Example - Rijmen and Daemen", + "useNewBench": true, + "usePreprocessor": true +} From 09cfa13108573956d0d7ed1a5afd00ea2d3321ea Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 13:01:42 +0100 Subject: [PATCH 16/77] Adding extra requirements files for running tests --- .devcontainer/Dockerfile | 2 ++ requirements-tests.yaml | 2 ++ 2 files changed, 4 insertions(+) create mode 100644 requirements-tests.yaml diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index cdceaeb..4b8495e 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -43,7 +43,9 @@ ENV DEBIAN_FRONTEND=dialog WORKDIR /app USER vscode COPY requirements.txt /app/src/ +COPY requirements-tests.txt /app/src/ RUN /usr/bin/pip3 install -r src/requirements.txt +RUN /usr/bin/pip3 install -r src/requirements-tests.txt # Check for and run optional user-supplied command to enable (advanced) customizations of the dev container RUN if [ -n "${DEV_CONTAINER_USER_CMD_POST}" ]; then echo "${DEV_CONTAINER_USER_CMD_POST}" | sh ; fi diff --git a/requirements-tests.yaml b/requirements-tests.yaml new file mode 100644 index 0000000..d9cf678 --- /dev/null +++ b/requirements-tests.yaml @@ -0,0 +1,2 @@ +pytest +confidence \ No newline at end of file From 616ae742534dc8bdf746003e3ccc9089950957d0 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 13:02:04 +0100 Subject: [PATCH 17/77] Adding lines for gitignore to skip preprocessors and 7zr --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 50b7510..f8b1bf3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,11 @@ *.iml *.exe +7zr *.log *.json !/tests/*.json crackers +preprocessors prince files hashlists From 80c3ba0489638861b7df1215591d1938d07accf2 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 13:02:37 +0100 Subject: [PATCH 18/77] Fix bug that prevents from debugging tests through vscode --- .vscode/launch.json | 1 + 1 file changed, 1 insertion(+) diff --git a/.vscode/launch.json b/.vscode/launch.json index 9a1446b..8185486 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,4 +1,5 @@ { + "version": "0.2.0", "configurations": [ { "name": "Python: Current File", From a37cc036cbab037ae9b46bac8bd82b95c6dce046 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 13:03:11 +0100 Subject: [PATCH 19/77] Adding hashtopolis test module, moved it out of an individual test --- tests/hashtopolis.py | 268 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 268 insertions(+) create mode 100644 tests/hashtopolis.py diff --git a/tests/hashtopolis.py b/tests/hashtopolis.py new file mode 100644 index 0000000..99649b4 --- /dev/null +++ b/tests/hashtopolis.py @@ -0,0 +1,268 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# PoC testing/development framework for APIv2 +# Written in python to work on creation of hashtopolis APIv2 python binding. +# +import json +import requests +import unittest +import datetime +from pathlib import Path + +import requests +import unittest +import logging +from pathlib import Path +import abc + +import http +import confidence + +#logging.basicConfig(level=logging.DEBUG) + +logger = logging.getLogger(__name__) + +HTTP_DEBUG = False + +# Monkey patching to allow http debugging +if HTTP_DEBUG: + http_logger = logging.getLogger('http.client') + http.client.HTTPConnection.debuglevel = 0 + def print_to_log(*args): + http_logger.debug(" ".join(args)) + http.client.print = print_to_log + +cls_registry = {} + + +class Config(object): + def __init__(self): + # Request access TOKEN, used throughout the test + load_order = confidence.DEFAULT_LOAD_ORDER + (str(Path(__file__).parent.joinpath('{name}.{extension}')),) + self._cfg = confidence.load_name('hashtopolis-test', load_order=load_order) + self._hashtopolis_uri = self._cfg['hashtopolis_uri'] + self._api_endpoint = self._hashtopolis_uri + '/api/v2' + self.username = self._cfg['username'] + self.password = self._cfg['password'] + + + + +class HashtopolisConnector(object): + # Cache authorisation token per endpoint + token = {} + token_expires = {} + + def __init__(self, model_uri, config): + self._model_uri = model_uri + self._api_endpoint = config._api_endpoint + self._hashtopolis_uri = config._hashtopolis_uri + self.config = config + + def authenticate(self): + if not self._api_endpoint in HashtopolisConnector.token: + # Request access TOKEN, used throughout the test + + logger.info("Start authentication") + auth_uri = self._api_endpoint + '/auth/token' + auth = (self.config.username, self.config.password) + r = requests.post(auth_uri, auth=auth) + + HashtopolisConnector.token[self._api_endpoint] = r.json()['token'] + HashtopolisConnector.token_expires[self._api_endpoint] = r.json()['token'] + + self._token = HashtopolisConnector.token[self._api_endpoint] + self._token_expires = HashtopolisConnector.token_expires[self._api_endpoint] + + self._headers = { + 'Authorization': 'Bearer ' + self._token, + 'Content-Type': 'application/json' + } + + + def get_all(self): + self.authenticate() + + uri = self._api_endpoint + self._model_uri + headers = self._headers + payload = {} + + r = requests.get(uri, headers=headers, data=json.dumps(payload)) + return r.json()['values'] + + def patch_one(self, obj): + if not obj.has_changed(): + logger.debug("Object '%s' has not changed, no PATCH required", obj) + return + + self.authenticate() + uri = self._hashtopolis_uri + obj._self + headers = self._headers + payload = {} + + for k,v in obj.diff().items(): + logger.debug("Going to patch object '%s' property '%s' from '%s' to '%s'", obj, k, v[0], v[1]) + payload[k] = v[1] + + r = requests.patch(uri, headers=headers, data=json.dumps(payload)) + if r.status_code != 201: + logger.exception("Patching failed: %s", r.text) + + # TODO: Validate if return objects matches digital twin + obj.set_initial(r.json().copy()) + + def create(self, obj): + # Check if object to be created is new + assert(not hasattr(obj, '_self')) + + self.authenticate() + uri = self._api_endpoint + self._model_uri + headers = self._headers + payload = dict([(k,v[1]) for (k,v) in obj.diff().items()]) + + r = requests.post(uri, headers=headers, data=json.dumps(payload)) + if r.status_code != 201: + logger.exception("Creation of object failed: %s", r.text) + + # TODO: Validate if return objects matches digital twin + obj.set_initial(r.json().copy()) + + + def delete(self, obj): + """ Delete object from database """ + # TODO: Check if object to be deleted actually exists + assert(hasattr(obj, '_self')) + + self.authenticate() + uri = self._hashtopolis_uri + obj._self + headers = self._headers + payload = {} + + + r = requests.delete(uri, headers=headers, data=json.dumps(payload)) + if r.status_code != 204: + logger.exception("Deletion of object failed: %s", r.text) + + # TODO: Cleanup object to allow re-creation + + +class ManagerBase(type): + conn = {} + # Cache configuration values + config = None + @classmethod + def get_conn(cls): + if cls.config is None: + cls.config = Config() + + if cls._model_uri not in cls.conn: + cls.conn[cls._model_uri] = HashtopolisConnector(cls._model_uri, cls.config) + return cls.conn[cls._model_uri] + + @classmethod + def all(cls): + """ + Retrieve all backend objects + TODO: Make iterator supporting loading of objects via pages + """ + # Get all objects + api_objs = cls.get_conn().get_all() + + + # Convert into class + objs = [] + for api_obj in api_objs: + new_obj = cls._model(**api_obj) + objs.append(new_obj) + return objs + + @classmethod + def patch(cls, obj): + cls.get_conn().patch_one(obj) + + @classmethod + def create(cls, obj): + cls.get_conn().create(obj) + + @classmethod + def delete(cls, obj): + cls.get_conn().delete(obj) + + @classmethod + def get_first(cls): + """ + Retrieve first object + TODO: Error handling if first object does not exists + TODO: Request object with limit parameter instead + """ + return cls.all()[0] + +# Build Django ORM style 'ModelName.objects' interface +class ModelBase(type): + def __new__(cls, clsname, bases, attrs, uri=None, **kwargs): + parents = [b for b in bases if isinstance(b, ModelBase)] + if not parents: + return super().__new__(cls, clsname, bases, attrs) + + new_class = super().__new__(cls, clsname, bases, attrs) + + setattr(new_class, 'objects', type('Manager', (ManagerBase,), {'_model_uri': uri})) + setattr(new_class.objects, '_model', new_class) + cls_registry[clsname] = new_class + + return new_class + + +class Model(metaclass=ModelBase): + def __init__(self, *args, **kwargs): + self.set_initial(kwargs) + super().__init__() + + def set_initial(self, kv): + self.__fields = [] + # Store fields allowing us to detect changed values + if '_self' in kv: + self.__initial = kv.copy() + else: + # New model + self.__initial = {} + + # Create attribute values + for k,v in kv.items(): + setattr(self, k, v) + if not k.startswith('_'): + self.__fields.append(k) + + + def diff(self): + d1 = self.__initial + d2 = dict([(k, getattr(self, k)) for k in self.__fields]) + diffs = [(k, (v, d2[k])) for k, v in d2.items() if v != d1.get(k, None)] + return dict(diffs) + + def has_changed(self): + return bool(self.diff()) + + def save(self): + if hasattr(self, '_self'): + self.objects.patch(self) + else: + self.objects.create(self) + + def delete(self): + if hasattr(self, '_self'): + self.objects.delete(self) + + def serialize(self): + return [x for x in vars(self) if not x.startswith('_')] + + +class Task(Model, uri="/ui/tasks"): + def __repr__(self): + return self._self + + +class Hashlist(Model, uri="/ui/hashlists"): + def __repr__(self): + return self._self \ No newline at end of file From 62cc25dbe9af2d319de60ef4284416c4129854d2 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 13:03:42 +0100 Subject: [PATCH 20/77] Added initial happy flow test for linux (up to ./hashcat --version) --- tests/test_hashcat_cracker.py | 351 +++++----------------------------- 1 file changed, 43 insertions(+), 308 deletions(-) diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 540c609..986d16e 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -1,286 +1,39 @@ import pytest from unittest import mock +import unittest +from unittest.mock import MagicMock +import os +import subprocess +import shutil +import requests +import json +from pathlib import Path +from argparse import Namespace from htpclient.hashcat_cracker import HashcatCracker from htpclient.binarydownload import BinaryDownload +from htpclient.session import Session +from htpclient.config import Config +from htpclient.initialize import Initialize -from argparse import Namespace +from tests.hashtopolis import Hashlist, Task # The default cmdparameters, some objects need those. Maybe move to a common helper so other tests can include this aswell. # test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://example.com/api/server.php', version=False, voucher='devvoucher', zaps_path=None) -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# PoC testing/development framework for APIv2 -# Written in python to work on creation of hashtopolis APIv2 python binding. -# -import json -import requests -import unittest -import datetime -from pathlib import Path - -import requests -import unittest -import logging -from pathlib import Path -import abc - -import http -import confidence - -#logging.basicConfig(level=logging.DEBUG) - -logger = logging.getLogger(__name__) - -HTTP_DEBUG = False - -# Monkey patching to allow http debugging -if HTTP_DEBUG: - http_logger = logging.getLogger('http.client') - http.client.HTTPConnection.debuglevel = 0 - def print_to_log(*args): - http_logger.debug(" ".join(args)) - http.client.print = print_to_log - -cls_registry = {} - - -class Config(object): - def __init__(self): - # Request access TOKEN, used throughout the test - load_order = confidence.DEFAULT_LOAD_ORDER + (str(Path(__file__).parent.joinpath('{name}.{extension}')),) - self._cfg = confidence.load_name('hashtopolis-test', load_order=load_order) - self._hashtopolis_uri = self._cfg['hashtopolis_uri'] - self._api_endpoint = self._hashtopolis_uri + '/api/v2' - self.username = self._cfg['username'] - self.password = self._cfg['password'] - - - - -class HashtopolisConnector(object): - # Cache authorisation token per endpoint - token = {} - token_expires = {} - - def __init__(self, model_uri, config): - self._model_uri = model_uri - self._api_endpoint = config._api_endpoint - self._hashtopolis_uri = config._hashtopolis_uri - self.config = config - - def authenticate(self): - if not self._api_endpoint in HashtopolisConnector.token: - # Request access TOKEN, used throughout the test - - logger.info("Start authentication") - auth_uri = self._api_endpoint + '/auth/token' - auth = (self.config.username, self.config.password) - r = requests.post(auth_uri, auth=auth) - - HashtopolisConnector.token[self._api_endpoint] = r.json()['token'] - HashtopolisConnector.token_expires[self._api_endpoint] = r.json()['token'] - - self._token = HashtopolisConnector.token[self._api_endpoint] - self._token_expires = HashtopolisConnector.token_expires[self._api_endpoint] - - self._headers = { - 'Authorization': 'Bearer ' + self._token, - 'Content-Type': 'application/json' - } - - - def get_all(self): - self.authenticate() - - uri = self._api_endpoint + self._model_uri - headers = self._headers - payload = {} - - r = requests.get(uri, headers=headers, data=json.dumps(payload)) - return r.json()['values'] - - def patch_one(self, obj): - if not obj.has_changed(): - logger.debug("Object '%s' has not changed, no PATCH required", obj) - return - - self.authenticate() - uri = self._hashtopolis_uri + obj._self - headers = self._headers - payload = {} - - for k,v in obj.diff().items(): - logger.debug("Going to patch object '%s' property '%s' from '%s' to '%s'", obj, k, v[0], v[1]) - payload[k] = v[1] - - r = requests.patch(uri, headers=headers, data=json.dumps(payload)) - if r.status_code != 201: - logger.exception("Patching failed: %s", r.text) - - # TODO: Validate if return objects matches digital twin - obj.set_initial(r.json().copy()) - - def create(self, obj): - # Check if object to be created is new - assert(not hasattr(obj, '_self')) - - self.authenticate() - uri = self._api_endpoint + self._model_uri - headers = self._headers - payload = dict([(k,v[1]) for (k,v) in obj.diff().items()]) - - r = requests.post(uri, headers=headers, data=json.dumps(payload)) - if r.status_code != 201: - logger.exception("Creation of object failed: %s", r.text) - - # TODO: Validate if return objects matches digital twin - obj.set_initial(r.json().copy()) - - - def delete(self, obj): - """ Delete object from database """ - # TODO: Check if object to be deleted actually exists - assert(hasattr(obj, '_self')) - - self.authenticate() - uri = self._hashtopolis_uri + obj._self - headers = self._headers - payload = {} - - - r = requests.delete(uri, headers=headers, data=json.dumps(payload)) - if r.status_code != 204: - logger.exception("Deletion of object failed: %s", r.text) - - # TODO: Cleanup object to allow re-creation - - -class ManagerBase(type): - conn = {} - # Cache configuration values - config = None - @classmethod - def get_conn(cls): - if cls.config is None: - cls.config = Config() - - if cls._model_uri not in cls.conn: - cls.conn[cls._model_uri] = HashtopolisConnector(cls._model_uri, cls.config) - return cls.conn[cls._model_uri] - - @classmethod - def all(cls): - """ - Retrieve all backend objects - TODO: Make iterator supporting loading of objects via pages - """ - # Get all objects - api_objs = cls.get_conn().get_all() - - - # Convert into class - objs = [] - for api_obj in api_objs: - new_obj = cls._model(**api_obj) - objs.append(new_obj) - return objs - - @classmethod - def patch(cls, obj): - cls.get_conn().patch_one(obj) - - @classmethod - def create(cls, obj): - cls.get_conn().create(obj) - - @classmethod - def delete(cls, obj): - cls.get_conn().delete(obj) - - @classmethod - def get_first(cls): - """ - Retrieve first object - TODO: Error handling if first object does not exists - TODO: Request object with limit parameter instead - """ - return cls.all()[0] - -# Build Django ORM style 'ModelName.objects' interface -class ModelBase(type): - def __new__(cls, clsname, bases, attrs, uri=None, **kwargs): - parents = [b for b in bases if isinstance(b, ModelBase)] - if not parents: - return super().__new__(cls, clsname, bases, attrs) - - new_class = super().__new__(cls, clsname, bases, attrs) - - setattr(new_class, 'objects', type('Manager', (ManagerBase,), {'_model_uri': uri})) - setattr(new_class.objects, '_model', new_class) - cls_registry[clsname] = new_class - - return new_class - - -class Model(metaclass=ModelBase): - def __init__(self, *args, **kwargs): - self.set_initial(kwargs) - super().__init__() - - def set_initial(self, kv): - self.__fields = [] - # Store fields allowing us to detect changed values - if '_self' in kv: - self.__initial = kv.copy() - else: - # New model - self.__initial = {} - - # Create attribute values - for k,v in kv.items(): - setattr(self, k, v) - if not k.startswith('_'): - self.__fields.append(k) - - - def diff(self): - d1 = self.__initial - d2 = dict([(k, getattr(self, k)) for k in self.__fields]) - diffs = [(k, (v, d2[k])) for k, v in d2.items() if v != d1.get(k, None)] - return dict(diffs) - - def has_changed(self): - return bool(self.diff()) - - def save(self): - if hasattr(self, '_self'): - self.objects.patch(self) - else: - self.objects.create(self) - - def delete(self): - if hasattr(self, '_self'): - self.objects.delete(self) - - def serialize(self): - return [x for x in vars(self) if not x.startswith('_')] - - -class Task(Model, uri="/ui/tasks"): - def __repr__(self): - return self._self - - -class Hashlist(Model, uri="/ui/hashlists"): - def __repr__(self): - return self._self - - class HashcatCrackerTestLinux(unittest.TestCase): - def test_correct_flow(self): + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): + # Clean up cracker folder + if os.path.exists('crackers/1'): + shutil.rmtree('crackers/1') + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + # Create hashlist p = Path(__file__).parent.joinpath('create_hashlist_001.json') payload = json.loads(p.read_text('UTF-8')) @@ -294,11 +47,27 @@ def test_correct_flow(self): obj = Task(**payload) obj.save() - # + # Cmd parameters setup test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + config = Config() + crackers_path = config.get_value('crackers-path') + binaryDownload = BinaryDownload(test_args) - binaryDownload.check_version(1) + binaryDownload.check_version(cracker_id) + + cracker_zip = Path(crackers_path, f'{cracker_id}.7z') + crackers_temp = Path(crackers_path, 'temp') + zip_binary = './7zr' + mock_unlink.assert_called_with(str(cracker_zip)) + + mock_system.assert_called_with(f"{zip_binary} x -o'{crackers_temp}' '{cracker_zip}'") + + # hashcat = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with("'./hashcat.bin' --version", shell=True, cwd='/app/src/crackers/1/') # Cleanup obj.delete() @@ -306,37 +75,3 @@ def test_correct_flow(self): if __name__ == '__main__': unittest.main() - - -# @mock.patch('htpclient.initialize.Initialize.get_os') -# @mock.patch('subprocess.check_output') -# @mock.patch('htpclient.jsonRequest.JsonRequest.execute') -# @mock.patch('htpclient.download.Download.download') -# @mock.patch('os.system') -# @mock.patch('os.unlink') -# def test_hashcat_cracker_linux(mock_unlink, mock_system, mock_download, mock_get, mock_check_output, mock_get_os): -# #TODO: Make paths based on environment -# #TODO: Clean all cracker folders etc - -# # Force Linux OS -# mock_get_os.return_value = 0 - -# binaryDownload = BinaryDownload(test_args) - -# # When calling binaryDownload.check_version(1), this will make a request for executable name -# # Download the 7z if the cracker is not there -# # Extract the 7z -# # And cleanup the temp file - -# mock_get.return_value = {'response': 'SUCCESS', 'url': 'leeg', 'executable': 'hashcat.bin'} -# mock_download.return_value = True -# mock_check_output.return_value = 'v6.2.6\n'.encode() -# binaryDownload.check_version(1) - -# # Checking if system and unlink were called correctly. -# mock_system.assert_called_with("./7zr x -o'/app/src/crackers/temp' '/app/src/crackers/1.7z'") -# mock_unlink.assert_called_with("/app/src/crackers/1.7z") - -# # This will call 'hashcat --version' -# hashcat = HashcatCracker(1, binaryDownload) -# mock_check_output.assert_called_with("'./hashcat64.bin' --version", shell=True, cwd='/app/src/crackers/1/') From 3eb7c3197c683ce390c03d1e8dc2bcfb611a5ab7 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 15:52:09 +0100 Subject: [PATCH 21/77] Adding test for --keyspace --- tests/test_hashcat_cracker.py | 44 ++++++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 986d16e..5e32130 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -15,8 +15,15 @@ from htpclient.session import Session from htpclient.config import Config from htpclient.initialize import Initialize +from htpclient.chunk import Chunk +from htpclient.hashlist import Hashlist +from htpclient.task import Task +from htpclient.dicts import copy_and_set_token +from htpclient.dicts import dict_sendBenchmark +from htpclient.jsonRequest import JsonRequest -from tests.hashtopolis import Hashlist, Task +from tests.hashtopolis import Hashlist as Hashlist_v2 +from tests.hashtopolis import Task as Task_v2 # The default cmdparameters, some objects need those. Maybe move to a common helper so other tests can include this aswell. # test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://example.com/api/server.php', version=False, voucher='devvoucher', zaps_path=None) @@ -30,6 +37,8 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): if os.path.exists('crackers/1'): shutil.rmtree('crackers/1') + #TODO: Delete tasks / hashlist to ensure clean + # Setup session object session = Session(requests.Session()).s session.headers.update({'User-Agent': Initialize.get_version()}) @@ -37,14 +46,14 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): # Create hashlist p = Path(__file__).parent.joinpath('create_hashlist_001.json') payload = json.loads(p.read_text('UTF-8')) - hashlist = Hashlist(**payload) - hashlist.save() + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() # Create Task for p in sorted(Path(__file__).parent.glob('create_task_001.json')): payload = json.loads(p.read_text('UTF-8')) - payload['hashlistId'] = int(hashlist._id) - obj = Task(**payload) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) obj.save() # Cmd parameters setup @@ -65,13 +74,30 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): mock_system.assert_called_with(f"{zip_binary} x -o'{crackers_temp}' '{cracker_zip}'") - # - hashcat = HashcatCracker(1, binaryDownload) - mock_check_output.assert_called_with("'./hashcat.bin' --version", shell=True, cwd='/app/src/crackers/1/') + # --version + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with("'./hashcat.bin' --version", shell=True, cwd=str(Path(crackers_path, str(cracker_id)))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + chunk_resp = chunk.get_chunk(task.get_task()['taskId']) + + cracker.measure_keyspace(task, chunk) + mock_check_output.assert_called_with( + "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l?l?l --hash-type=0 ", + shell=True, + cwd=f"{Path(crackers_path, str(cracker_id))}/", + stderr=-2 + ) # Cleanup obj.delete() - hashlist.delete() + hashlist_v2.delete() if __name__ == '__main__': unittest.main() From 0864b6fa20bada24423bcb4659a4f5c3bfea4533 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 16:02:51 +0100 Subject: [PATCH 22/77] Adding benchmark test Fixing keyspace test --- tests/test_hashcat_cracker.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 5e32130..1964cd6 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -76,7 +76,7 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): # --version cracker = HashcatCracker(1, binaryDownload) - mock_check_output.assert_called_with("'./hashcat.bin' --version", shell=True, cwd=str(Path(crackers_path, str(cracker_id)))) + mock_check_output.assert_called_with("'./hashcat.bin' --version", shell=True, cwd=f"{Path(crackers_path, str(cracker_id))}/") # --keyspace chunk = Chunk() @@ -95,6 +95,15 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): stderr=-2 ) + # benchmark + result = cracker.run_benchmark(task.get_task()) + mock_check_output.assert_called_with( + "'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" '/app/src/hashlists/1' -a3 ?l?l?l?l?l?l --hash-type=0 -o '/app/src/hashlists/1.out'", + shell=True, + cwd=f"{Path(crackers_path, str(cracker_id))}/", + stderr=-2 + ) + # Cleanup obj.delete() hashlist_v2.delete() From 80b4845a008861b869b9a86d9dc884f37a4cc1c6 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 17:11:48 +0100 Subject: [PATCH 23/77] Adding test for real cracking Fixing other tests to match empty environment --- tests/create_task_001.json | 8 ++++---- tests/test_hashcat_cracker.py | 36 +++++++++++++++++++++++++++++++---- 2 files changed, 36 insertions(+), 8 deletions(-) diff --git a/tests/create_task_001.json b/tests/create_task_001.json index 147ebeb..7158cc4 100644 --- a/tests/create_task_001.json +++ b/tests/create_task_001.json @@ -1,5 +1,5 @@ { - "attackCmd": "#HL# -a 0 ad a1", + "attackCmd": "#HL# -a3 ?l?l?l?l", "chunkSize": 1000, "chunkTime": 600, "color": "7C6EFF", @@ -8,16 +8,16 @@ "forcePipe": true, "files": [], "isArchived": false, - "isCpuTask": false, + "isCpuTask": true, "isSmall": false, "maxAgents": 112, "notes": "example-note", - "preprocessorCommand": "this-is-prepressor", + "preprocessorCommand": "", "priority": 10, "skipKeyspace": 500, "staticChunks": 2, "statusTimer": 5, "taskName": "Example - Rijmen and Daemen", "useNewBench": true, - "usePreprocessor": true + "usePreprocessor": false } diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 1964cd6..93543ac 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -29,15 +29,17 @@ # test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://example.com/api/server.php', version=False, voucher='devvoucher', zaps_path=None) class HashcatCrackerTestLinux(unittest.TestCase): + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) @mock.patch('os.unlink', side_effect=os.unlink) @mock.patch('os.system', side_effect=os.system) - def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): + def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Popen): # Clean up cracker folder if os.path.exists('crackers/1'): shutil.rmtree('crackers/1') #TODO: Delete tasks / hashlist to ensure clean + #TODO: Verify setup agent # Setup session object session = Session(requests.Session()).s @@ -85,11 +87,14 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): hashlist = Hashlist() hashlist.load_hashlist(task.get_task()['hashlistId']) - chunk_resp = chunk.get_chunk(task.get_task()['taskId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + chunk.get_chunk(task.get_task()['taskId']) cracker.measure_keyspace(task, chunk) mock_check_output.assert_called_with( - "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l?l?l --hash-type=0 ", + "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ", shell=True, cwd=f"{Path(crackers_path, str(cracker_id))}/", stderr=-2 @@ -98,12 +103,35 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output): # benchmark result = cracker.run_benchmark(task.get_task()) mock_check_output.assert_called_with( - "'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" '/app/src/hashlists/1' -a3 ?l?l?l?l?l?l --hash-type=0 -o '/app/src/hashlists/1.out'", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" '{Path(hashlists_path, str(hashlist_id))}' -a3 ?l?l?l?l --hash-type=0 -o '{Path(hashlists_path, str(hashlist_id))}.out'", shell=True, cwd=f"{Path(crackers_path, str(cracker_id))}/", stderr=-2 ) + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + mock_Popen.assert_called_with( + f"./hashcat.bin --machine-readable --quiet --status --restore-disable --session=hashtopolis --status-timer 5 --outfile-check-timer=5 --outfile-check-dir='{Path(zaps_path, zaps_dir)}' -o '{Path(hashlists_path, str(hashlist_id))}.out' --outfile-format=1,2,3,4 -p \"\t\" -s {skip} -l {limit} --potfile-disable --remove --remove-timer=5 '{Path(hashlists_path, str(hashlist_id))}' -a3 ?l?l?l?l --hash-type=0 ", + shell=True, + stdout=-1, + stderr=-1, + cwd=f"{Path(crackers_path, str(cracker_id))}/", + preexec_fn=mock.ANY + ) + # Cleanup obj.delete() hashlist_v2.delete() From 810f096b21c2ea8e3783cf8bd7c0c2e449173ff9 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 17:20:31 +0100 Subject: [PATCH 24/77] Adding README.md for testing --- tests/README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 tests/README.md diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..f77727d --- /dev/null +++ b/tests/README.md @@ -0,0 +1,14 @@ +# Testing +## Setup +Currently the testing of the agent is limited and a bit complicated. Once APIv2 is release the testing framework +can be extended. + +1. Start the development container for the server, make sure you use the branch: feature/apiv2 +2. Start the development container for the agent +3. Start the agent once to setup the config.json file +4. You should be able to run the tests with `python3 -m pytest` or run them directly from 'Testing' in VSCode + +## Limitations +1. Only one environment can be tested at a time +2. Only works with APIv2 +3. No support yet for Github actions, waiting for release of APIv2 to prevent having to fix it again. \ No newline at end of file From f4c80a956bc4340ec62207f51b73a555f977bd08 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 17:26:51 +0100 Subject: [PATCH 25/77] Fixed incorrect file name for test requirements --- requirements-tests.yaml => requirements-tests.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename requirements-tests.yaml => requirements-tests.txt (100%) diff --git a/requirements-tests.yaml b/requirements-tests.txt similarity index 100% rename from requirements-tests.yaml rename to requirements-tests.txt From 418451f33edc12ff40b73f618a39ebb2f300828b Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 17:31:51 +0100 Subject: [PATCH 26/77] Fixed test document to make starting the test more clearer. --- tests/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/README.md b/tests/README.md index f77727d..1253156 100644 --- a/tests/README.md +++ b/tests/README.md @@ -5,7 +5,7 @@ can be extended. 1. Start the development container for the server, make sure you use the branch: feature/apiv2 2. Start the development container for the agent -3. Start the agent once to setup the config.json file +3. Start the agent once to setup the config.json file (Run -> Start Debugging) 4. You should be able to run the tests with `python3 -m pytest` or run them directly from 'Testing' in VSCode ## Limitations From fbb2c9da68e37269cd2069a1bace3f561bac250c Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 8 Feb 2023 17:45:34 +0100 Subject: [PATCH 27/77] Fixed readme.md to inform about forcing cpu_only Fixed test to run on clean environment --- tests/README.md | 13 +++++++------ tests/test_hashcat_cracker.py | 3 +-- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/README.md b/tests/README.md index 1253156..be46f7c 100644 --- a/tests/README.md +++ b/tests/README.md @@ -3,12 +3,13 @@ Currently the testing of the agent is limited and a bit complicated. Once APIv2 is release the testing framework can be extended. -1. Start the development container for the server, make sure you use the branch: feature/apiv2 -2. Start the development container for the agent -3. Start the agent once to setup the config.json file (Run -> Start Debugging) -4. You should be able to run the tests with `python3 -m pytest` or run them directly from 'Testing' in VSCode +1. Start the development container for the server, make sure you use the branch: feature/apiv2. +2. Start the development container for the agent. +3. Start the agent once to setup the config.json file (Run -> Start Debugging). +4. Set the agent to CPU only. +5. You should be able to run the tests with `python3 -m pytest` or run them directly from 'Testing' in VSCode. ## Limitations -1. Only one environment can be tested at a time -2. Only works with APIv2 +1. Only one environment can be tested at a time. +2. Only works with APIv2. 3. No support yet for Github actions, waiting for release of APIv2 to prevent having to fix it again. \ No newline at end of file diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 93543ac..9a1c5b9 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -89,8 +89,6 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po hashlist.load_hashlist(task.get_task()['hashlistId']) hashlist_id = task.get_task()['hashlistId'] hashlists_path = config.get_value('hashlists-path') - - chunk.get_chunk(task.get_task()['taskId']) cracker.measure_keyspace(task, chunk) mock_check_output.assert_called_with( @@ -118,6 +116,7 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po req.execute() # cracking + chunk.get_chunk(task.get_task()['taskId']) cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) zaps_path = config.get_value('zaps-path') zaps_dir = f"hashlist_{hashlist_id}" From 71ce17ff1c10716411a8effb89735a3792be957c Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Thu, 9 Feb 2023 14:23:24 +0100 Subject: [PATCH 28/77] Adding small test for asserting benchmark result --- tests/test_hashcat_cracker.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 9a1c5b9..ef9cf80 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -100,6 +100,7 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po # benchmark result = cracker.run_benchmark(task.get_task()) + assert result != 0 mock_check_output.assert_called_with( f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" '{Path(hashlists_path, str(hashlist_id))}' -a3 ?l?l?l?l --hash-type=0 -o '{Path(hashlists_path, str(hashlist_id))}.out'", shell=True, From e10bb17752eb3dbddb2439bb97d8af2b7b9bbe34 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Thu, 9 Feb 2023 14:25:21 +0100 Subject: [PATCH 29/77] Fixed network not existing when starting devcontainer agent --- .devcontainer/docker-compose.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 2e4d941..d89ea5c 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -1,4 +1,4 @@ -version: "3" +version: "3.5" services: hashtopolis-agent: container_name: hashtopolis_agent @@ -12,8 +12,10 @@ services: # This is where VS Code should expect to find your project's source code # and the value of "workspaceFolder" in .devcontainer/devcontainer.json - ..:/app/src + networks: + - hashtopolis_dev networks: - default: - external: - name: hashtopolis_dev + hashtopolis_dev: + # This network will also be used by the hashtopolis server and db + name: hashtopolis_dev \ No newline at end of file From 175f97ed9a2d766ec8cff01fe98d6194ed534297 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Fri, 10 Feb 2023 16:31:23 +0000 Subject: [PATCH 30/77] Adding clinfo in devcontainer. --- .devcontainer/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index cdceaeb..c44810c 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -18,7 +18,7 @@ RUN apt-get update \ # Install Intel OpenCL Runtime RUN cd /tmp \ - && apt install wget lsb-core libnuma-dev pciutils -y \ + && apt install wget lsb-core libnuma-dev pciutils clinfo -y \ && wget http://registrationcenter-download.intel.com/akdlm/irc_nas/vcp/15532/l_opencl_p_18.1.0.015.tgz \ && tar xzvf l_opencl_p_18.1.0.015.tgz \ && cd l_opencl_p_18.1.0.015 \ From e82480a31ab819a6d1d795900ce46ad22c8379da Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Fri, 10 Feb 2023 16:31:58 +0000 Subject: [PATCH 31/77] Fixing devcontainer json to match the specification --- .devcontainer/devcontainer.json | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index ae72a42..144bb83 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -5,10 +5,14 @@ "workspaceFolder": "/app/src", "remoteEnv": { - "NODE_OPTIONS": "--use-openssl-ca", + "NODE_OPTIONS": "--use-openssl-ca" + }, + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python" + ] + } }, - "extensions": [ - "ms-python.python" - ], "remoteUser": "vscode" } From b97e3247591a04581737ad13510ea04eee20477d Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Fri, 17 Feb 2023 16:00:51 +0100 Subject: [PATCH 32/77] Add docker image for running tests on Windows --- .devcontainer/windows/Dockerfile | 20 +++++++++++ .devcontainer/windows/docker-compose.yml | 9 +++++ .devcontainer/windows/entrypoint.ps1 | 2 ++ .devcontainer/windows/fix-hosts.ps1 | 43 ++++++++++++++++++++++++ tests/README.md | 17 +++++++++- 5 files changed, 90 insertions(+), 1 deletion(-) create mode 100644 .devcontainer/windows/Dockerfile create mode 100644 .devcontainer/windows/docker-compose.yml create mode 100644 .devcontainer/windows/entrypoint.ps1 create mode 100644 .devcontainer/windows/fix-hosts.ps1 diff --git a/.devcontainer/windows/Dockerfile b/.devcontainer/windows/Dockerfile new file mode 100644 index 0000000..5a087b3 --- /dev/null +++ b/.devcontainer/windows/Dockerfile @@ -0,0 +1,20 @@ +FROM mcr.microsoft.com/windows-cssc/python3.7.2server:ltsc2022 +# Nano image doesn't work because some API are not available + +# TODO: Support for USER_CMD_PRE and POST? +# TODO: Create a vscode user? +# TODO: OpenCL/Nvidia? + +WORKDIR C:/App/ + +# Installing python requirements +COPY requirements.txt C:/App/ +COPY requirements-tests.txt C:/App/ +RUN pip3 install -r requirements.txt -r requirements-tests.txt + +# Fix for host.docker.internal not working +COPY .devcontainer/windows/entrypoint.ps1 C:/ +COPY .devcontainer/windows/fix-hosts.ps1 C:/ + +# Setting entrypoint +ENTRYPOINT "C:\entrypoint.ps1" diff --git a/.devcontainer/windows/docker-compose.yml b/.devcontainer/windows/docker-compose.yml new file mode 100644 index 0000000..e4d5feb --- /dev/null +++ b/.devcontainer/windows/docker-compose.yml @@ -0,0 +1,9 @@ +version: "3" +services: + hashtopolis-agent-windows: + container_name: hashtopolis_agent_windows + build: + context: ../.. + dockerfile: .devcontainer/windows/Dockerfile + volumes: + - ../..:C:\App\ diff --git a/.devcontainer/windows/entrypoint.ps1 b/.devcontainer/windows/entrypoint.ps1 new file mode 100644 index 0000000..bca1149 --- /dev/null +++ b/.devcontainer/windows/entrypoint.ps1 @@ -0,0 +1,2 @@ +powershell C:\fix-hosts.ps1 +cmd /c ping -t localhost > $null \ No newline at end of file diff --git a/.devcontainer/windows/fix-hosts.ps1 b/.devcontainer/windows/fix-hosts.ps1 new file mode 100644 index 0000000..39439e3 --- /dev/null +++ b/.devcontainer/windows/fix-hosts.ps1 @@ -0,0 +1,43 @@ +# Source https://github.com/docker/for-win/issues/1976 +# Credits https://github.com/brunnotelma +$hostsFile = "C:\Windows\System32\drivers\etc\hosts" + +try { + $DnsEntries = @("host.docker.internal", "gateway.docker.internal") + # Tries resolving names for Docker + foreach ($Entry in $DnsEntries) { + # If any of the names are not resolved, throws an exception + Resolve-DnsName -Name $Entry -ErrorAction Stop + } + + # If it passes, means that DNS is already configured + Write-Host("DNS settings are already configured.") +} catch { + # Gets the gateway IP address, that is the Host's IP address in the Docker network + $ip = (ipconfig | where-object {$_ -match "Default Gateway"} | foreach-object{$_.Split(":")[1]}).Trim() + # Read the current content from Hosts file + $src = [System.IO.File]::ReadAllLines($hostsFile) + # Add the a new line after the content + $lines = $src += "" + + # Check the hosts file and write it in if its not there... + if((cat $hostsFile | Select-String -Pattern "host.docker.internal") -And (cat $hostsFile | Select-String -Pattern "gateway.docker.internal")) { + For ($i=0; $i -le $lines.length; $i++) { + if ($lines[$i].Contains("host.docker.internal")) + { + $lines[$i] = ("{0} host.docker.internal" -f $ip) + $lines[$i+1] = ("{0} gateway.docker.internal" -f $ip) + break + } + } + } else { + $lines = $lines += "# Added by Docker for Windows" + $lines = $lines += ("{0} host.docker.internal" -f $ip) + $lines = $lines += ("{0} gateway.docker.internal" -f $ip) + $lines = $lines += "# End of section" + } + # Writes the new content to the Hosts file + [System.IO.File]::WriteAllLines($hostsFile, [string[]]$lines) + + Write-Host("New DNS settings written successfully.") +} \ No newline at end of file diff --git a/tests/README.md b/tests/README.md index be46f7c..9c9b2ac 100644 --- a/tests/README.md +++ b/tests/README.md @@ -1,5 +1,5 @@ # Testing -## Setup +## Setup - Linux Currently the testing of the agent is limited and a bit complicated. Once APIv2 is release the testing framework can be extended. @@ -9,6 +9,21 @@ can be extended. 4. Set the agent to CPU only. 5. You should be able to run the tests with `python3 -m pytest` or run them directly from 'Testing' in VSCode. +## Setup - Windows +Currently you cannot run the tests from a devcontainer on Windows, because vscode does not support devcontainers running on Windows containers. + +Only possible to run tests on a Windows platform + +1. Start the development container for the server, make sure you use the branch: feature/apiv2. +2. Git clone the repo into the Windows file system +3. Switch Docker Desktop to Windows Containers. Right click the Docker Desktop tray icon. Select 'Switch to Windows Containers' +4. Open powershell, cd to the agent folder `.devcontainer\windows` +5. `docker compose build` +6. `docker compose up` +7. Everything should install, now attach to the container `docker exec -it hashtopolis_agent_windows cmd.exe` +8. Start the agent once `python -d . --url http://host.docker.internal:8080/api/server.php --debug --voucher devvoucher` +9. Run the tests `python -m pytest` + ## Limitations 1. Only one environment can be tested at a time. 2. Only works with APIv2. From ce79b90f6d9e011eb5490411bfb19c6bf9a3cf8f Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Fri, 17 Feb 2023 16:11:28 +0100 Subject: [PATCH 33/77] Fix sync hashtopolis.py and hashlist.json with servery --- tests/create_hashlist_001.json | 34 ++++++++--------- tests/hashtopolis.py | 69 +++++++++++++++++++++++++++------- 2 files changed, 72 insertions(+), 31 deletions(-) diff --git a/tests/create_hashlist_001.json b/tests/create_hashlist_001.json index da2cb28..eba8d4f 100644 --- a/tests/create_hashlist_001.json +++ b/tests/create_hashlist_001.json @@ -1,19 +1,17 @@ { - "name": "Hashlist-md5sum-test123", - "hashTypeId": 1, - "format": 0, - "separator": ";", - "isSalted": false, - "isHexSalt": false, - "accessGroupId": 1, - "useBrain": false, - "brainFeatures": 3, - "notes": "gj", - "sourceType": "paste", - "sourceData": "Y2MwM2U3NDdhNmFmYmJjYmY4YmU3NjY4YWNmZWJlZTUK", - "hashCount": 0, - "cracked": 0, - "isArchived": false, - "isSecret": false - } - \ No newline at end of file + "name": "Hashlist-md5sum-test123", + "hashTypeId": 1, + "format": 0, + "separator": ";", + "isSalted": false, + "isHexSalt": false, + "accessGroupId": 1, + "useBrain": false, + "brainFeatures": 3, + "notes": "gj", + "sourceType": "paste", + "sourceData": "Y2MwM2U3NDdhNmFmYmJjYmY4YmU3NjY4YWNmZWJlZTUK", + "hashCount": 0, + "isArchived": false, + "isSecret": false +} diff --git a/tests/hashtopolis.py b/tests/hashtopolis.py index 99649b4..76f1a9c 100644 --- a/tests/hashtopolis.py +++ b/tests/hashtopolis.py @@ -81,15 +81,35 @@ def authenticate(self): } - def get_all(self): + def filter(self, filter): self.authenticate() - uri = self._api_endpoint + self._model_uri headers = self._headers - payload = {} + + filter_list = [] + cast = { + '__gt': '>', + '__gte': '>=', + '__lt': '<', + '__lte': '<=', + } + for k,v in filter.items(): + l = None + for k2,v2 in cast.items(): + if k.endswith(k2): + l = f'{k[:-len(k2)]}{v2}{v}' + break + # Default to equal assignment + if l == None: + l = f'{k}={v}' + filter_list.append(l) + + payload = {'filter': filter_list} r = requests.get(uri, headers=headers, data=json.dumps(payload)) - return r.json()['values'] + if r.status_code != 201: + logger.exception("Filter failed: %s", r.text) + return r.json().get('values') def patch_one(self, obj): if not obj.has_changed(): @@ -166,17 +186,9 @@ def all(cls): Retrieve all backend objects TODO: Make iterator supporting loading of objects via pages """ - # Get all objects - api_objs = cls.get_conn().get_all() + return cls.filter() - # Convert into class - objs = [] - for api_obj in api_objs: - new_obj = cls._model(**api_obj) - objs.append(new_obj) - return objs - @classmethod def patch(cls, obj): cls.get_conn().patch_one(obj) @@ -198,6 +210,25 @@ def get_first(cls): """ return cls.all()[0] + @classmethod + def get(cls, **kwargs): + objs = cls.filter(**kwargs) + assert(len(objs) == 1) + return objs[0] + + @classmethod + def filter(cls, **kwargs): + # Get all objects + api_objs = cls.get_conn().filter(kwargs) + + # Convert into class + objs = [] + if api_objs: + for api_obj in api_objs: + new_obj = cls._model(**api_obj) + objs.append(new_obj) + return objs + # Build Django ORM style 'ModelName.objects' interface class ModelBase(type): def __new__(cls, clsname, bases, attrs, uri=None, **kwargs): @@ -257,6 +288,10 @@ def delete(self): def serialize(self): return [x for x in vars(self) if not x.startswith('_')] + @property + def id(self): + return self._id + class Task(Model, uri="/ui/tasks"): def __repr__(self): @@ -264,5 +299,13 @@ def __repr__(self): class Hashlist(Model, uri="/ui/hashlists"): + def __repr__(self): + return self._self + +class Cracker(Model, uri="/ui/crackers"): + def __repr__(self): + return self._self + +class CrackerType(Model, uri="/ui/crackertypes"): def __repr__(self): return self._self \ No newline at end of file From e5f4141af96161cbe003c644e93a6979def5b88f Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Fri, 17 Feb 2023 16:12:18 +0100 Subject: [PATCH 34/77] Add first binary test for windows --- tests/test_hashcat_cracker.py | 43 +++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index ef9cf80..2f8ed65 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -9,6 +9,7 @@ import json from pathlib import Path from argparse import Namespace +import sys from htpclient.hashcat_cracker import HashcatCracker from htpclient.binarydownload import BinaryDownload @@ -34,6 +35,8 @@ class HashcatCrackerTestLinux(unittest.TestCase): @mock.patch('os.unlink', side_effect=os.unlink) @mock.patch('os.system', side_effect=os.system) def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'linux': + return # Clean up cracker folder if os.path.exists('crackers/1'): shutil.rmtree('crackers/1') @@ -136,5 +139,45 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po obj.delete() hashlist_v2.delete() +class HashcatCrackerTestWindows(unittest.TestCase): + def test_correct_flow(self): + if sys.platform != 'win32': + return + + # Clean up cracker folder + if os.path.exists('crackers/1'): + shutil.rmtree('crackers/1') + + #TODO: Delete tasks / hashlist to ensure clean + #TODO: Verify setup agent + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create Task + for p in sorted(Path(__file__).parent.glob('create_task_001.json')): + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) + obj.save() + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + config = Config() + crackers_path = config.get_value('crackers-path') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + if __name__ == '__main__': unittest.main() From dcaf03a11a807ddbce031cab1a3f09bba295a0bc Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Sun, 19 Feb 2023 20:32:48 +0100 Subject: [PATCH 35/77] Add tests for windows up to the benchmarking --- tests/README.md | 24 +++++++++++- tests/test_hashcat_cracker.py | 73 ++++++++++++++++++++++++++++++++++- 2 files changed, 94 insertions(+), 3 deletions(-) diff --git a/tests/README.md b/tests/README.md index 9c9b2ac..b4bb2b4 100644 --- a/tests/README.md +++ b/tests/README.md @@ -9,10 +9,11 @@ can be extended. 4. Set the agent to CPU only. 5. You should be able to run the tests with `python3 -m pytest` or run them directly from 'Testing' in VSCode. -## Setup - Windows +## Setup - Windows - Docker Currently you cannot run the tests from a devcontainer on Windows, because vscode does not support devcontainers running on Windows containers. -Only possible to run tests on a Windows platform +Only possible to run tests on a Windows platform. +Does not really allow to run tests windows hashcat, no GPU support 1. Start the development container for the server, make sure you use the branch: feature/apiv2. 2. Git clone the repo into the Windows file system @@ -24,6 +25,25 @@ Only possible to run tests on a Windows platform 8. Start the agent once `python -d . --url http://host.docker.internal:8080/api/server.php --debug --voucher devvoucher` 9. Run the tests `python -m pytest` +## Setup - Windows - locally + +Requires some OpenCL device for example a GPU to run tests. + +1. Start the development container for the server, make sure you use the branch: feature/apiv2. +2. Git clone the repo into the Windows file system +3. Install Python3.10 through https://www.python.org/downloads/ (as Admin/systemwide) + add python to path +4. Install requirements-test.txt and requirements.txt `pip3 install -r .\requirements-tests.txt -r .\requirements.txt` +5. Run VSCode install Python extension +6. Run agent once `python -d . --url http://127.0.0.1:8080/api/server.php --debug --voucher devvoucher` +7. You should be able to run the tests with `python3 -m pytest` or run them directly from 'Testing' in VSCode. + +## Debugging + +1. Clear the who database through Config -> Server -> Delete all +2. Check if the agent is still active +3. Clear the agent folder +4. Check if the agent is marked CPU only + ## Limitations 1. Only one environment can be tested at a time. 2. Only works with APIv2. diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 2f8ed65..d4c50e6 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -140,7 +140,11 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po hashlist_v2.delete() class HashcatCrackerTestWindows(unittest.TestCase): - def test_correct_flow(self): + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Popen): if sys.platform != 'win32': return @@ -179,5 +183,72 @@ def test_correct_flow(self): binaryDownload = BinaryDownload(test_args) binaryDownload.check_version(cracker_id) + cracker_zip = Path(crackers_path, f'{cracker_id}.7z') + crackers_temp = Path(crackers_path, 'temp') + zip_binary = '7zr.exe' + mock_unlink.assert_called_with(cracker_zip) + + mock_system.assert_called_with(f'{zip_binary} x -o"{crackers_temp}" "{cracker_zip}"') + + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.exe') + + # --version + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + cracker.measure_keyspace(task, chunk) + + full_cmd = [str(executeable_path), '--keyspace', '--quiet', '-a3', '?l?l?l?l', '--hash-type=0'] + mock_check_output.assert_called_with( + full_cmd, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + hashlist_path = Path(hashlists_path, str(hashlist_id)) + hashlist_out_path = Path(hashlists_path, f'{hashlist_id}.out') + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + mock_check_output.assert_called_with( + [ + str(executeable_path), + '--machine-readable', + '--quiet', + '--progress-only', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '-p', + '0x09', + str(hashlist_path), + '-a3', + '?l?l?l?l', + '--hash-type=0', + '-o', + str(hashlist_out_path) + ], + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + if __name__ == '__main__': unittest.main() From b1481836015401ac61f8996e5b9e57aedea1d4f9 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Sun, 19 Feb 2023 20:34:39 +0100 Subject: [PATCH 36/77] Fix windows path fix up till benchmarking --- htpclient/binarydownload.py | 30 ++++++-- htpclient/hashcat_cracker.py | 145 ++++++++++++++++++++++++----------- htpclient/helpers.py | 20 ++--- 3 files changed, 135 insertions(+), 60 deletions(-) diff --git a/htpclient/binarydownload.py b/htpclient/binarydownload.py index c336392..fcb7d04 100644 --- a/htpclient/binarydownload.py +++ b/htpclient/binarydownload.py @@ -1,5 +1,6 @@ import logging import os.path +from pathlib import Path import stat import sys from time import sleep @@ -173,7 +174,7 @@ def check_preprocessor(self, task): return True def check_version(self, cracker_id): - path = self.config.get_value('crackers-path') + "/" + str(cracker_id) + "/" + path = Path(self.config.get_value('crackers-path'), str(cracker_id)) query = copy_and_set_token(dict_downloadBinary, self.config.get_value('token')) query['type'] = 'cracker' query['binaryVersionId'] = cracker_id @@ -195,15 +196,28 @@ def check_version(self, cracker_id): logging.error("Download of cracker binary failed!") sleep(5) return False + + # we need to extract the 7zip + temp_folder = Path(self.config.get_value('crackers-path'), 'temp') + zip_file = Path(self.config.get_value('crackers-path'), f'{cracker_id}.7z') + if Initialize.get_os() == 1: - os.system("7zr" + Initialize.get_os_extension() + " x -o'" + self.config.get_value('crackers-path') + "/temp' '" + self.config.get_value('crackers-path') + "/" + str(cracker_id) + ".7z'") + # Windows + cmd = f'7zr{Initialize.get_os_extension()} x -o"{temp_folder}" "{zip_file}"' else: - os.system("./7zr" + Initialize.get_os_extension() + " x -o'" + self.config.get_value('crackers-path') + "/temp' '" + self.config.get_value('crackers-path') + "/" + str(cracker_id) + ".7z'") - os.unlink(self.config.get_value('crackers-path') + "/" + str(cracker_id) + ".7z") - for name in os.listdir(self.config.get_value('crackers-path') + "/temp"): - if os.path.isdir(self.config.get_value('crackers-path') + "/temp/" + name): - os.rename(self.config.get_value('crackers-path') + "/temp/" + name, self.config.get_value('crackers-path') + "/" + str(cracker_id)) + # Linux + cmd = f"./7zr{Initialize.get_os_extension()} x -o'{temp_folder}' '{zip_file}'" + os.system(cmd) + + # Clean up 7zip + os.unlink(zip_file) + + # Workaround for a 7zip containing a folder name or already the contents of a cracker + for name in os.listdir(temp_folder): + to_check_path = Path(temp_folder, name) + if os.path.isdir(to_check_path): + os.rename(to_check_path, path) else: - os.rename(self.config.get_value('crackers-path') + "/temp", self.config.get_value('crackers-path') + "/" + str(cracker_id)) + os.rename(temp_folder, path) break return True diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index 37a1f2e..cdadeac 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -2,6 +2,7 @@ import logging import subprocess import psutil +from pathlib import Path from time import sleep from queue import Queue, Empty from threading import Thread, Lock @@ -26,25 +27,25 @@ def __init__(self, cracker_id, binary_download): self.executable_name = binary_download.get_version()['executable'] k = self.executable_name.rfind(".") self.executable_name = self.executable_name[:k] + "." + self.executable_name[k + 1:] - self.cracker_path = self.config.get_value('crackers-path') + "/" + str(cracker_id) + "/" - self.callPath = self.executable_name - if Initialize.get_os() != 1: - self.callPath = "./" + self.callPath + self.cracker_path = Path(self.config.get_value('crackers-path'), str(cracker_id)) - if not os.path.isfile(self.cracker_path + self.callPath): # in case it's not the new hashcat filename, try the old one (hashcat.) + self.executable_path = Path(self.cracker_path, self.executable_name) + if not os.path.isfile(self.executable_path): # in case it's not the new hashcat filename, try the old one (hashcat.) self.executable_name = binary_download.get_version()['executable'] k = self.executable_name.rfind(".") self.executable_name = self.executable_name[:k] + get_bit() + "." + self.executable_name[k + 1:] - self.cracker_path = self.config.get_value('crackers-path') + "/" + str(cracker_id) + "/" - self.callPath = self.executable_name - if Initialize.get_os() != 1: - self.callPath = "./" + self.callPath + + self.callPath = self.executable_name + # Not windows + # TODO: Maybe remove? + if Initialize.get_os() != 1: + self.callPath = f"./{self.callPath}" - cmd = "'" + self.callPath + "' --version" - output = '' + cmd = [str(self.executable_path), "--version"] + try: - logging.debug("CALL: " + cmd) - output = subprocess.check_output(cmd, shell=True, cwd=self.cracker_path) + logging.debug(f"CALL: {''.join(cmd)}") + output = subprocess.check_output(cmd, cwd=self.cracker_path) except subprocess.CalledProcessError as e: logging.error("Error during version detection: " + str(e)) sleep(5) @@ -378,15 +379,22 @@ def measure_keyspace(self, task, chunk): elif 'usePreprocessor' in task.get_task() and task.get_task()['usePreprocessor']: return self.preprocessor_keyspace(task, chunk) task = task.get_task() # TODO: refactor this to be better code - full_cmd = f"'{self.callPath}'" + " --keyspace --quiet " + update_files(task['attackcmd']).replace(task['hashlistAlias'] + " ", "") + ' ' + task['cmdpars'] + files = update_files(task['attackcmd']) + + if task['hashlistAlias'] in files: + files.remove(task['hashlistAlias']) + # full_cmd = f"'{self.callPath}'" + " --keyspace --quiet " + update_files(task['attackcmd']).replace(task['hashlistAlias'] + " ", "") + ' ' + task['cmdpars'] + full_cmd = [str(self.executable_path), '--keyspace', '--quiet'] + full_cmd.extend(files) + full_cmd.append(task['cmdpars'].strip()) + if 'useBrain' in task and task['useBrain']: - full_cmd += " -S" - if Initialize.get_os() == 1: - full_cmd = full_cmd.replace("/", '\\') + full_cmd.append + output = b'' try: - logging.debug("CALL: " + full_cmd) - output = subprocess.check_output(full_cmd, shell=True, cwd=self.cracker_path, stderr=subprocess.STDOUT) + logging.debug(f"CALL: {''.join(full_cmd)}") + output = subprocess.check_output(full_cmd, cwd=self.cracker_path, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logging.error("Error during keyspace measure: " + str(e) + " Output: " + output.decode(encoding='utf-8')) send_error("Keyspace measure failed!", self.config.get_value('token'), task['taskId'], None) @@ -473,16 +481,34 @@ def run_benchmark(self, task): if task['benchType'] == 'speed': # do a speed benchmark return self.run_speed_benchmark(task) - - args = " --machine-readable --quiet --runtime=" + str(task['bench']) - args += " --restore-disable --potfile-disable --session=hashtopolis -p \"" + str(chr(9)) + "\" " - args += update_files(task['attackcmd']).replace(task['hashlistAlias'], "'" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "' ") + task['cmdpars'] - args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out'" - full_cmd = f"'{self.callPath}'" + args - if Initialize.get_os() == 1: - full_cmd = full_cmd.replace("/", '\\') - logging.debug("CALL: " + full_cmd) - proc = subprocess.Popen(full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cracker_path) + args = [] + args.append('--machine-readable') + args.append('--quiet') + args.append(f"--runtime={task['bench']}") + + args.append('--restore-disable') + args.append('--potfile-disable') + args.append('--session=hashtopolis') + args.append('-p') + args.append(f'"{chr(9)}"') + + files = update_files(task['attackcmd']) + + if task['hashlistAlias'] in files: + files.remove(task['hashlistAlias']) + + hashlist_path = Path(self.config.get_value('hashlists-path'), str(task['hashlistId'])) + hashlist_out_path = Path(self.config.get_value('hashlists-path'), f"{str(task['hashlistId'])}.out") + args.append(str(hashlist_path)) + args.append(task['cmdpars'].strip()) + args.append('-o') + args.append(str(hashlist_out_path)) + + full_cmd = [str(self.executable_path)] + full_cmd.extend(args) + + logging.debug(f"CALL: {''.join(full_cmd)}") + proc = subprocess.Popen(full_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cracker_path) output, error = proc.communicate() logging.debug("started benchmark") proc.wait() # wait until done @@ -523,25 +549,58 @@ def stream_watcher(self, identifier, stream): stream.close() def run_speed_benchmark(self, task): - args = " --machine-readable --quiet --progress-only" - args += " --restore-disable --potfile-disable --session=hashtopolis -p \"" + str(chr(9)) + "\" " + args = [] + args.append('--machine-readable') + args.append('--quiet') + args.append('--progress-only') + + args.append('--restore-disable') + args.append('--potfile-disable') + args.append('--session=hashtopolis') + args.append('-p') + args.append('0x09') + + files = update_files(task['attackcmd']) + + if task['hashlistAlias'] in files: + files.remove(task['hashlistAlias']) + + hashlist_path = Path(self.config.get_value('hashlists-path'), str(task['hashlistId'])) + hashlist_out_path = Path(self.config.get_value('hashlists-path'), f"{str(task['hashlistId'])}.out") + if 'usePrince' in task and task['usePrince']: - args += get_rules_and_hl(update_files(task['attackcmd']), task['hashlistAlias']).replace(task['hashlistAlias'], "'" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "' ") - args += " example.dict" + ' ' + task['cmdpars'] + attackcmd = get_rules_and_hl(update_files(task['attackcmd'])) + # Replace #HL# with the real hashlist + attackcmd[:] = [str(hashlist_path) if x== task['hashlistAlias'] else x for x in attackcmd] + + args.extend(attackcmd) + + # This dict is purely used for benchmarking with prince + args.append('example.dict') + args.append(task['cmdpars'].strip()) else: - args += update_files(task['attackcmd']).replace(task['hashlistAlias'], "'" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "' ") + task['cmdpars'] + attackcmd = update_files(task['attackcmd']) + + # Replace #HL# with the real hashlist + attackcmd[:] = [str(hashlist_path) if x== task['hashlistAlias'] else x for x in attackcmd] + + args.extend(attackcmd) + args.append(task['cmdpars'].strip()) if 'usePreprocessor' in task and task['usePreprocessor']: - args += " example.dict" + args.append('example.dict') if 'useBrain' in task and task['useBrain']: - args += " -S" - args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out'" - full_cmd = f"'{self.callPath}'" + args + args.append('-S') + + args.append('-o') + args.append(str(hashlist_out_path)) + + full_cmd = [str(self.executable_path)] + full_cmd.extend(args) + output = b'' - if Initialize.get_os() == 1: - full_cmd = full_cmd.replace("/", '\\') try: - logging.debug("CALL: " + full_cmd) - output = subprocess.check_output(full_cmd, shell=True, cwd=self.cracker_path, stderr=subprocess.STDOUT) + logging.debug(f"CALL: {''.join(full_cmd)}") + output = subprocess.check_output(full_cmd, cwd=self.cracker_path, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logging.error("Error during speed benchmark, return code: " + str(e.returncode) + " Output: " + output.decode(encoding='utf-8')) send_error("Speed benchmark failed!", self.config.get_value('token'), task['taskId'], None) @@ -600,7 +659,7 @@ def run_health_check(self, attack, hashlist_alias): full_cmd = f"'{self.callPath}'" + args if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') - logging.debug("CALL: " + full_cmd) + logging.debug(f"CALL: {''.join(full_cmd)}") proc = subprocess.Popen(full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cracker_path) output, error = proc.communicate() logging.debug("Started health check attack") diff --git a/htpclient/helpers.py b/htpclient/helpers.py index cd5efe9..a3737fd 100644 --- a/htpclient/helpers.py +++ b/htpclient/helpers.py @@ -5,6 +5,7 @@ import logging import time from types import MappingProxyType +from pathlib import Path import os import subprocess @@ -88,13 +89,13 @@ def get_wordlist(command): return '' -def get_rules_and_hl(command, alias): - split = clean_list(command.split(" ")) +def get_rules_and_hl(command_list, alias): + # split = clean_list(command.split(" ")) rules = [] - for index, part in enumerate(split): - if index > 0 and (split[index - 1] == '-r' or split[index - 1] == '--rules-file'): - rules.append(split[index - 1]) - rules.append(split[index - 0]) + for index, part in enumerate(command_list): + if index > 0 and (command_list[index - 1] == '-r' or command_list[index - 1] == '--rules-file'): + rules.append(command_list[index - 1]) + rules.append(command_list[index - 0]) if part == alias: rules.append(part) return " ".join(rules) @@ -120,12 +121,13 @@ def update_files(command, prince=False): # test if file exists if not part: continue - path = config.get_value('files-path') + "/" + part + path = Path(config.get_value('files-path'), part) + if os.path.exists(path): - ret.append(f"'{path}'") + ret.append(path) else: ret.append(part) - return " %s " % " ".join(ret) + return ret def escape_ansi(line): From 6c7e164c7635625c1f2d41c10a52e2f7840cf7d7 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 20 Feb 2023 12:24:53 +0100 Subject: [PATCH 37/77] Fix rewrote with shell=true but using lists Merge windows agents tests --- htpclient/hashcat_cracker.py | 86 +++++++++++++++++++---------------- htpclient/helpers.py | 19 ++++---- tests/hashtopolis-test.yaml | 2 +- tests/test_hashcat_cracker.py | 43 ++++++++++-------- 4 files changed, 83 insertions(+), 67 deletions(-) diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index cdadeac..8bffb31 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -380,21 +380,24 @@ def measure_keyspace(self, task, chunk): return self.preprocessor_keyspace(task, chunk) task = task.get_task() # TODO: refactor this to be better code files = update_files(task['attackcmd']) + files = files.replace(task['hashlistAlias'] + " ", "") + + if Initialize.get_os() == 1: + # Windows + full_cmd = f'"{self.callPath}"' + else: + # Linux / Mac + full_cmd = f"'{self.callPath}'" + + full_cmd = f"{full_cmd} --keyspace --quiet {files} {task['cmdpars']}" - if task['hashlistAlias'] in files: - files.remove(task['hashlistAlias']) - # full_cmd = f"'{self.callPath}'" + " --keyspace --quiet " + update_files(task['attackcmd']).replace(task['hashlistAlias'] + " ", "") + ' ' + task['cmdpars'] - full_cmd = [str(self.executable_path), '--keyspace', '--quiet'] - full_cmd.extend(files) - full_cmd.append(task['cmdpars'].strip()) - if 'useBrain' in task and task['useBrain']: - full_cmd.append + full_cmd = f"{full_cmd} -S" output = b'' try: - logging.debug(f"CALL: {''.join(full_cmd)}") - output = subprocess.check_output(full_cmd, cwd=self.cracker_path, stderr=subprocess.STDOUT) + logging.debug(f"CALL: {full_cmd}") + output = subprocess.check_output(full_cmd, shell=True, cwd=self.cracker_path, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logging.error("Error during keyspace measure: " + str(e) + " Output: " + output.decode(encoding='utf-8')) send_error("Keyspace measure failed!", self.config.get_value('token'), task['taskId'], None) @@ -490,25 +493,31 @@ def run_benchmark(self, task): args.append('--potfile-disable') args.append('--session=hashtopolis') args.append('-p') - args.append(f'"{chr(9)}"') - - files = update_files(task['attackcmd']) + args.append('0x09') - if task['hashlistAlias'] in files: - files.remove(task['hashlistAlias']) + hashlist_path = Path(self.config.get_value('hashlists-path'), str(task['hashlistId'])) hashlist_out_path = Path(self.config.get_value('hashlists-path'), f"{str(task['hashlistId'])}.out") - args.append(str(hashlist_path)) - args.append(task['cmdpars'].strip()) + + files = update_files(task['attackcmd']) + files = files.replace(task['hashlistAlias'], f'"{hashlist_path}"') + + args.append(task['cmdpars']) args.append('-o') - args.append(str(hashlist_out_path)) + args.append(f'"{hashlist_out_path}"') - full_cmd = [str(self.executable_path)] - full_cmd.extend(args) - - logging.debug(f"CALL: {''.join(full_cmd)}") - proc = subprocess.Popen(full_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cracker_path) + full_cmd = ' '.join(args) + + if Initialize.get_os() == 1: + # Windows + full_cmd = f'"{self.callPath}" {full_cmd}' + else: + # Linux / Mac + full_cmd = f"'{self.callPath}' {full_cmd}" + + logging.debug(f"CALL: {full_cmd}") + proc = subprocess.Popen(full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cracker_path) output, error = proc.communicate() logging.debug("started benchmark") proc.wait() # wait until done @@ -559,11 +568,6 @@ def run_speed_benchmark(self, task): args.append('--session=hashtopolis') args.append('-p') args.append('0x09') - - files = update_files(task['attackcmd']) - - if task['hashlistAlias'] in files: - files.remove(task['hashlistAlias']) hashlist_path = Path(self.config.get_value('hashlists-path'), str(task['hashlistId'])) hashlist_out_path = Path(self.config.get_value('hashlists-path'), f"{str(task['hashlistId'])}.out") @@ -571,36 +575,42 @@ def run_speed_benchmark(self, task): if 'usePrince' in task and task['usePrince']: attackcmd = get_rules_and_hl(update_files(task['attackcmd'])) # Replace #HL# with the real hashlist - attackcmd[:] = [str(hashlist_path) if x== task['hashlistAlias'] else x for x in attackcmd] + attackcmd = attackcmd.replace(task['hashlistAlias'], f'"{hashlist_path}"') - args.extend(attackcmd) + args.append(attackcmd) # This dict is purely used for benchmarking with prince args.append('example.dict') - args.append(task['cmdpars'].strip()) + args.append(task['cmdpars']) else: attackcmd = update_files(task['attackcmd']) # Replace #HL# with the real hashlist - attackcmd[:] = [str(hashlist_path) if x== task['hashlistAlias'] else x for x in attackcmd] + attackcmd = attackcmd.replace(task['hashlistAlias'], f'"{hashlist_path}"') - args.extend(attackcmd) - args.append(task['cmdpars'].strip()) + args.append(attackcmd) + args.append(task['cmdpars']) if 'usePreprocessor' in task and task['usePreprocessor']: args.append('example.dict') if 'useBrain' in task and task['useBrain']: args.append('-S') args.append('-o') - args.append(str(hashlist_out_path)) + args.append(f'"{hashlist_out_path}"') - full_cmd = [str(self.executable_path)] - full_cmd.extend(args) + full_cmd = ' '.join(args) + + if Initialize.get_os() == 1: + # Windows + full_cmd = f'"{self.callPath}" {full_cmd}' + else: + # Linux / Mac + full_cmd = f"'{self.callPath}' {full_cmd}" output = b'' try: logging.debug(f"CALL: {''.join(full_cmd)}") - output = subprocess.check_output(full_cmd, cwd=self.cracker_path, stderr=subprocess.STDOUT) + output = subprocess.check_output(full_cmd, shell=True, cwd=self.cracker_path, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logging.error("Error during speed benchmark, return code: " + str(e.returncode) + " Output: " + output.decode(encoding='utf-8')) send_error("Speed benchmark failed!", self.config.get_value('token'), task['taskId'], None) diff --git a/htpclient/helpers.py b/htpclient/helpers.py index a3737fd..a622d3f 100644 --- a/htpclient/helpers.py +++ b/htpclient/helpers.py @@ -89,13 +89,13 @@ def get_wordlist(command): return '' -def get_rules_and_hl(command_list, alias): - # split = clean_list(command.split(" ")) +def get_rules_and_hl(command, alias): + split = clean_list(command.split(" ")) rules = [] - for index, part in enumerate(command_list): - if index > 0 and (command_list[index - 1] == '-r' or command_list[index - 1] == '--rules-file'): - rules.append(command_list[index - 1]) - rules.append(command_list[index - 0]) + for index, part in enumerate(split): + if index > 0 and (split[index - 1] == '-r' or split[index - 1] == '--rules-file'): + rules.append(split[index - 1]) + rules.append(split[index - 0]) if part == alias: rules.append(part) return " ".join(rules) @@ -122,12 +122,11 @@ def update_files(command, prince=False): if not part: continue path = Path(config.get_value('files-path'), part) - if os.path.exists(path): - ret.append(path) + ret.append(f"'{path}'") else: - ret.append(part) - return ret + ret.append(str(part)) + return " %s " % " ".join(ret) def escape_ansi(line): diff --git a/tests/hashtopolis-test.yaml b/tests/hashtopolis-test.yaml index f763abb..633af15 100644 --- a/tests/hashtopolis-test.yaml +++ b/tests/hashtopolis-test.yaml @@ -1,3 +1,3 @@ -hashtopolis_uri: 'http://hashtopolis:80' +hashtopolis_uri: 'http://127.0.0.1:8080' username: 'root' password: 'hashtopolis' diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index d4c50e6..8b5d436 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -208,9 +208,10 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po cracker.measure_keyspace(task, chunk) - full_cmd = [str(executeable_path), '--keyspace', '--quiet', '-a3', '?l?l?l?l', '--hash-type=0'] + full_cmd = f'"hashcat.exe" --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ' mock_check_output.assert_called_with( full_cmd, + shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 ) @@ -220,24 +221,30 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po hashlist_out_path = Path(hashlists_path, f'{hashlist_id}.out') result = cracker.run_benchmark(task.get_task()) assert result != 0 + + full_cmd = [ + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--progress-only', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '-p', + '0x09', + f' "{hashlist_path}"', + '-a3', + '?l?l?l?l', + ' --hash-type=0 ', + '-o', + f'"{hashlist_out_path}"' + ] + + full_cmd = ' '.join(full_cmd) + mock_check_output.assert_called_with( - [ - str(executeable_path), - '--machine-readable', - '--quiet', - '--progress-only', - '--restore-disable', - '--potfile-disable', - '--session=hashtopolis', - '-p', - '0x09', - str(hashlist_path), - '-a3', - '?l?l?l?l', - '--hash-type=0', - '-o', - str(hashlist_out_path) - ], + full_cmd, + shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 ) From 63f6f3fef47a8a7bb3311228fd34ba6d738ce634 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 20 Feb 2023 14:07:18 +0100 Subject: [PATCH 38/77] Fix tests for Linux again --- tests/test_hashcat_cracker.py | 43 ++++++++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 8 deletions(-) diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 8b5d436..67e3415 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -68,20 +68,22 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po cracker_id = 1 config = Config() crackers_path = config.get_value('crackers-path') - + + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + binaryDownload = BinaryDownload(test_args) binaryDownload.check_version(cracker_id) cracker_zip = Path(crackers_path, f'{cracker_id}.7z') crackers_temp = Path(crackers_path, 'temp') zip_binary = './7zr' - mock_unlink.assert_called_with(str(cracker_zip)) + mock_unlink.assert_called_with(cracker_zip) mock_system.assert_called_with(f"{zip_binary} x -o'{crackers_temp}' '{cracker_zip}'") # --version cracker = HashcatCracker(1, binaryDownload) - mock_check_output.assert_called_with("'./hashcat.bin' --version", shell=True, cwd=f"{Path(crackers_path, str(cracker_id))}/") + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) # --keyspace chunk = Chunk() @@ -97,7 +99,7 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po mock_check_output.assert_called_with( "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ", shell=True, - cwd=f"{Path(crackers_path, str(cracker_id))}/", + cwd=Path(crackers_path, str(cracker_id)), stderr=-2 ) @@ -105,9 +107,9 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po result = cracker.run_benchmark(task.get_task()) assert result != 0 mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" '{Path(hashlists_path, str(hashlist_id))}' -a3 ?l?l?l?l --hash-type=0 -o '{Path(hashlists_path, str(hashlist_id))}.out'", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", shell=True, - cwd=f"{Path(crackers_path, str(cracker_id))}/", + cwd=Path(crackers_path, str(cracker_id)), stderr=-2 ) @@ -126,12 +128,37 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po zaps_dir = f"hashlist_{hashlist_id}" skip = str(chunk.chunk_data()['skip']) limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + './hashcat.bin', + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--potfile-disable', + '--remove', + '--remove-timer=5 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + ' -a3 ?l?l?l?l ', + '--hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + mock_Popen.assert_called_with( - f"./hashcat.bin --machine-readable --quiet --status --restore-disable --session=hashtopolis --status-timer 5 --outfile-check-timer=5 --outfile-check-dir='{Path(zaps_path, zaps_dir)}' -o '{Path(hashlists_path, str(hashlist_id))}.out' --outfile-format=1,2,3,4 -p \"\t\" -s {skip} -l {limit} --potfile-disable --remove --remove-timer=5 '{Path(hashlists_path, str(hashlist_id))}' -a3 ?l?l?l?l --hash-type=0 ", + full_cmd, shell=True, stdout=-1, stderr=-1, - cwd=f"{Path(crackers_path, str(cracker_id))}/", + cwd=Path(crackers_path, str(cracker_id)), preexec_fn=mock.ANY ) From 9d0d777c0765c22dd9f89cf9b5b76202397e3834 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 20 Feb 2023 14:48:26 +0100 Subject: [PATCH 39/77] Fix cracking on windows --- htpclient/hashcat_cracker.py | 123 ++++++++++++++++++++-------------- tests/test_hashcat_cracker.py | 44 ++++++++++++ 2 files changed, 115 insertions(+), 52 deletions(-) diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index 8bffb31..b31f2de 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -34,12 +34,13 @@ def __init__(self, cracker_id, binary_download): self.executable_name = binary_download.get_version()['executable'] k = self.executable_name.rfind(".") self.executable_name = self.executable_name[:k] + get_bit() + "." + self.executable_name[k + 1:] - - self.callPath = self.executable_name - # Not windows - # TODO: Maybe remove? - if Initialize.get_os() != 1: - self.callPath = f"./{self.callPath}" + + if Initialize.get_os() == 1: + # Windows + self.callPath = f'"{self.executable_name}"' + else: + # Linux / Mac + self.callPath = f"./{self.executable_name}" cmd = [str(self.executable_path), "--version"] @@ -84,25 +85,52 @@ def get_outfile_format(self): return "1,2,3,4" # new outfile format def build_command(self, task, chunk): - args = " --machine-readable --quiet --status --restore-disable --session=hashtopolis" - args += " --status-timer " + str(task['statustimer']) - args += " --outfile-check-timer=" + str(task['statustimer']) - args += " --outfile-check-dir='" + self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId']) + "'" - args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out' --outfile-format=" + self.get_outfile_format() + " -p \"" + str(chr(9)) + "\"" - args += " -s " + str(chunk['skip']) - args += " -l " + str(chunk['length']) + args = [] + + zaps_file = Path(self.config.get_value('zaps-path'), f"hashlist_{task['hashlistId']}") + output_file = Path(self.config.get_value('hashlists-path'), f"{task['hashlistId']}.out") + hashlist_file = Path(self.config.get_value('hashlists-path'), str(task['hashlistId'])) + args.append('--machine-readable') + args.append('--quiet') + args.append('--status') + args.append('--restore-disable') + args.append('--session=hashtopolis') + args.append(f"--status-timer {task['statustimer']}") + args.append(f"--outfile-check-timer={task['statustimer']}") + args.append(f'--outfile-check-dir="{zaps_file}"') + args.append(f'-o "{output_file}"') + args.append(f'--outfile-format={self.get_outfile_format()}') + args.append('-p 0x09') + args.append(f"-s {chunk['skip']}") + args.append(f"-l {chunk['length']}") + if 'useBrain' in task and task['useBrain']: # when using brain we set the according parameters - args += " --brain-client --brain-host " + task['brainHost'] - args += " --brain-port " + str(task['brainPort']) - args += " --brain-password " + task['brainPass'] + args.append('--brain-client') + args.append(f"--brain-host {task['brainHost']}") + args.append(f"--brain-port {task['brainPort']}") + args.append(f"--brain-password {task['brainPass']}") + if 'brainFeatures' in task: - args += " --brain-client-features " + str(task['brainFeatures']) + args.append(f"--brain-client-features {task['brainFeatures']}") else: # remove should only be used if we run without brain - args += " --potfile-disable --remove --remove-timer=" + str(task['statustimer']) - args += " " + update_files(task['attackcmd']).replace(task['hashlistAlias'], "'" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "' ") + task['cmdpars'] - if args.find(" -S") != -1: + args.append('--potfile-disable') + args.append('--remove') + args.append(f"--remove-timer={task['statustimer']}") + + files = update_files(task['attackcmd']) + files = files.replace(task['hashlistAlias'], f'"{hashlist_file}"') + args.append(files) + args.append(task['cmdpars']) + + + + full_cmd = ' '.join(args) + full_cmd = f'{self.callPath} {full_cmd}' + + if ' -S ' in full_cmd: self.uses_slow_hash_flag = True - return self.callPath + args + + return full_cmd def build_pipe_command(self, task, chunk): # call the command with piping @@ -180,17 +208,24 @@ def run_chunk(self, task, chunk, preprocessor): full_cmd = self.build_command(task, chunk) self.statusCount = 0 self.wasStopped = False - if Initialize.get_os() == 1: - full_cmd = full_cmd.replace("/", '\\') + + # Set paths + outfile_path = Path(self.config.get_value('hashlists-path'), f"{task['hashlistId']}.out") + outfile_backup_path = Path(self.config.get_value('hashlists-path'), f"{task['hashlistId']}_{time.time()}.out") + zapfile_path = Path(self.config.get_value('zaps-path'), f"/hashlist_{task['hashlistId']}") + # clear old found file - earlier we deleted them, but just in case, we just move it to a unique filename if configured so - if os.path.exists(self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out"): + if os.path.exists(outfile_path): if self.config.get_value('outfile-history'): - os.rename(self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out", self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "_" + str(time.time()) + ".out") + os.rename(outfile_path, outfile_backup_path) else: - os.unlink(self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out") + os.unlink(outfile_path) + # create zap folder - if not os.path.exists(self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId'])): - os.mkdir(self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId'])) + if not os.path.exists(zapfile_path): + os.mkdir(zapfile_path) + + # Call command logging.debug("CALL: " + full_cmd) if Initialize.get_os() != 1: process = subprocess.Popen(full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cracker_path, preexec_fn=os.setsid) @@ -200,7 +235,7 @@ def run_chunk(self, task, chunk, preprocessor): logging.debug("started cracking") out_thread = Thread(target=self.stream_watcher, name='stdout-watcher', args=('OUT', process.stdout)) err_thread = Thread(target=self.stream_watcher, name='stderr-watcher', args=('ERR', process.stderr)) - crk_thread = Thread(target=self.output_watcher, name='crack-watcher', args=(self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out", process)) + crk_thread = Thread(target=self.output_watcher, name='crack-watcher', args=(outfile_path, process)) out_thread.start() err_thread.start() crk_thread.start() @@ -219,6 +254,8 @@ def run_chunk(self, task, chunk, preprocessor): logging.info("finished chunk") def run_loop(self, proc, chunk, task): + zap_path = Path(self.config.get_value('zaps-path'), f"hashlist_{task['hashlistId']}") + self.cracks = [] piping_threshold = 95 enable_piping = False @@ -355,7 +392,7 @@ def run_loop(self, proc, chunk, task): if zaps: logging.debug("Writing zaps") zap_output = "\tFF\n".join(zaps) + '\tFF\n' - f = open(self.config.get_value('zaps-path') + "/hashlist_" + str(task['hashlistId']) + "/" + str(time.time()), 'a') + f = open(Path(zap_path) / str(time.time()), 'a') f.write(zap_output) f.close() logging.info("Progress:" + str("{:6.2f}".format(relative_progress / 100)) + "% Speed: " + print_speed(speed) + " Cracks: " + str(cracks_count) + " Accepted: " + str(ans['cracked']) + " Skips: " + str(ans['skipped']) + " Zaps: " + str(len(zaps))) @@ -381,15 +418,8 @@ def measure_keyspace(self, task, chunk): task = task.get_task() # TODO: refactor this to be better code files = update_files(task['attackcmd']) files = files.replace(task['hashlistAlias'] + " ", "") - - if Initialize.get_os() == 1: - # Windows - full_cmd = f'"{self.callPath}"' - else: - # Linux / Mac - full_cmd = f"'{self.callPath}'" - - full_cmd = f"{full_cmd} --keyspace --quiet {files} {task['cmdpars']}" + + full_cmd = f"{self.callPath} --keyspace --quiet {files} {task['cmdpars']}" if 'useBrain' in task and task['useBrain']: full_cmd = f"{full_cmd} -S" @@ -509,12 +539,7 @@ def run_benchmark(self, task): full_cmd = ' '.join(args) - if Initialize.get_os() == 1: - # Windows - full_cmd = f'"{self.callPath}" {full_cmd}' - else: - # Linux / Mac - full_cmd = f"'{self.callPath}' {full_cmd}" + full_cmd = f"{self.callPath} {full_cmd}" logging.debug(f"CALL: {full_cmd}") proc = subprocess.Popen(full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cracker_path) @@ -599,13 +624,7 @@ def run_speed_benchmark(self, task): args.append(f'"{hashlist_out_path}"') full_cmd = ' '.join(args) - - if Initialize.get_os() == 1: - # Windows - full_cmd = f'"{self.callPath}" {full_cmd}' - else: - # Linux / Mac - full_cmd = f"'{self.callPath}' {full_cmd}" + full_cmd = f"{self.callPath} {full_cmd}" output = b'' try: diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 67e3415..4717d3a 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -284,5 +284,49 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po req = JsonRequest(query) req.execute() + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--potfile-disable', + '--remove', + '--remove-timer=5 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + '-a3 ?l?l?l?l ', + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + ) + + # Cleanup + obj.delete() + hashlist_v2.delete() + if __name__ == '__main__': unittest.main() From a69650acead4fdd0c1b82f4045f5e8e9491708f5 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 20 Feb 2023 14:57:34 +0100 Subject: [PATCH 40/77] Fix test on linux --- htpclient/hashcat_cracker.py | 4 ++-- tests/test_hashcat_cracker.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index b31f2de..0f13f51 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -40,7 +40,7 @@ def __init__(self, cracker_id, binary_download): self.callPath = f'"{self.executable_name}"' else: # Linux / Mac - self.callPath = f"./{self.executable_name}" + self.callPath = f"'./{self.executable_name}'" cmd = [str(self.executable_path), "--version"] @@ -212,7 +212,7 @@ def run_chunk(self, task, chunk, preprocessor): # Set paths outfile_path = Path(self.config.get_value('hashlists-path'), f"{task['hashlistId']}.out") outfile_backup_path = Path(self.config.get_value('hashlists-path'), f"{task['hashlistId']}_{time.time()}.out") - zapfile_path = Path(self.config.get_value('zaps-path'), f"/hashlist_{task['hashlistId']}") + zapfile_path = Path(self.config.get_value('zaps-path'), f"hashlist_{task['hashlistId']}") # clear old found file - earlier we deleted them, but just in case, we just move it to a unique filename if configured so if os.path.exists(outfile_path): diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 4717d3a..48f7e04 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -130,7 +130,7 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po limit = str(chunk.chunk_data()['length']) full_cmd = [ - './hashcat.bin', + "'./hashcat.bin'", '--machine-readable', '--quiet', '--status', @@ -147,8 +147,8 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po '--remove', '--remove-timer=5 ', f'"{Path(hashlists_path, str(hashlist_id))}"', - ' -a3 ?l?l?l?l ', - '--hash-type=0 ', + '-a3 ?l?l?l?l ', + ' --hash-type=0 ', ] full_cmd = ' '.join(full_cmd) From 85ceda8233c2efa9d9ec46b0879e3759ffc98ba5 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 20 Feb 2023 16:23:17 +0100 Subject: [PATCH 41/77] Add test for runtime benchmark --- htpclient/hashcat_cracker.py | 1 + tests/create_task_002.json | 23 ++++++++ tests/test_hashcat_cracker.py | 106 ++++++++++++++++++++++++++++++++++ 3 files changed, 130 insertions(+) create mode 100644 tests/create_task_002.json diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index 0f13f51..c9fd9be 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -533,6 +533,7 @@ def run_benchmark(self, task): files = update_files(task['attackcmd']) files = files.replace(task['hashlistAlias'], f'"{hashlist_path}"') + args.append(files) args.append(task['cmdpars']) args.append('-o') args.append(f'"{hashlist_out_path}"') diff --git a/tests/create_task_002.json b/tests/create_task_002.json new file mode 100644 index 0000000..3945e53 --- /dev/null +++ b/tests/create_task_002.json @@ -0,0 +1,23 @@ +{ + "attackCmd": "#HL# -a3 ?l?l?l?l", + "chunkSize": 1000, + "chunkTime": 600, + "color": "7C6EFF", + "crackerBinaryId": 1, + "crackerBinaryTypeId": 1, + "forcePipe": true, + "files": [], + "isArchived": false, + "isCpuTask": true, + "isSmall": false, + "maxAgents": 112, + "notes": "example-note", + "preprocessorCommand": "", + "priority": 10, + "skipKeyspace": 500, + "staticChunks": 2, + "statusTimer": 5, + "taskName": "Example - runtime", + "useNewBench": false, + "usePreprocessor": false +} diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 48f7e04..4a9926e 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -328,5 +328,111 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po obj.delete() hashlist_v2.delete() + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + def test_runtime_benchmark(self, mock_check_output, moch_popen): + if sys.platform != 'win32': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create Task + for p in sorted(Path(__file__).parent.glob('create_task_002.json')): + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) + obj.save() + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + config = Config() + crackers_path = config.get_value('crackers-path') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + # --version + cracker = HashcatCracker(1, binaryDownload) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + cracker.measure_keyspace(task, chunk) + + full_cmd = f'"hashcat.exe" --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ' + mock_check_output.assert_called_with( + full_cmd, + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + hashlist_path = Path(hashlists_path, str(hashlist_id)) + hashlist_out_path = Path(hashlists_path, f'{hashlist_id}.out') + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + + full_cmd = [ + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--runtime=30', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '-p', + '0x09', + f' "{hashlist_path}"', + '-a3 ?l?l?l?l', + ' --hash-type=0 ', + '-o', + f'"{hashlist_out_path}"' + ] + + full_cmd = ' '.join(full_cmd) + + moch_popen.assert_called_with( + full_cmd, + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stdout=-1, + stderr=-1 + ) + + task_id = task.get_task()['taskId'] + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task_id + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + assert chunk.get_chunk(task_id) == 1 + + # Cleanup + obj.delete() + hashlist_v2.delete() + if __name__ == '__main__': unittest.main() From d7ecdff1ea42a611f8fb5fc1f1ec804046b06454 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Wed, 22 Feb 2023 18:20:59 +0100 Subject: [PATCH 42/77] Fix preprocessor running --- htpclient/binarydownload.py | 10 +- htpclient/download.py | 2 - htpclient/files.py | 48 ++++++-- htpclient/hashcat_cracker.py | 86 +++++++++---- tests/create_task_001.json | 2 +- tests/create_task_002.json | 2 +- tests/create_task_003.json | 23 ++++ tests/hashtopolis-test.yaml | 2 +- tests/test_hashcat_cracker.py | 225 ++++++++++++++++++++++++++++++++++ 9 files changed, 353 insertions(+), 47 deletions(-) create mode 100644 tests/create_task_003.json diff --git a/htpclient/binarydownload.py b/htpclient/binarydownload.py index fcb7d04..d6a487e 100644 --- a/htpclient/binarydownload.py +++ b/htpclient/binarydownload.py @@ -136,7 +136,7 @@ def check_prince(self): def check_preprocessor(self, task): logging.debug("Checking if requested preprocessor is present...") - path = self.config.get_value('preprocessors-path') + "/" + str(task.get_task()['preprocessor']) + "/" + path = Path(self.config.get_value('preprocessors-path'), str(task.get_task()['preprocessor'])) query = copy_and_set_token(dict_downloadBinary, self.config.get_value('token')) query['type'] = 'preprocessor' query['preprocessorId'] = task.get_task()['preprocessor'] @@ -161,12 +161,12 @@ def check_preprocessor(self, task): sleep(5) return False if Initialize.get_os() == 1: - os.system("7zr" + Initialize.get_os_extension() + " x -otemp temp.7z") + os.system(f"7zr{Initialize.get_os_extension()} x -otemp temp.7z") else: - os.system("./7zr" + Initialize.get_os_extension() + " x -otemp temp.7z") + os.system(f"./7zr{Initialize.get_os_extension()} x -otemp temp.7z") for name in os.listdir("temp"): # this part needs to be done because it is compressed with the main subfolder of prince - if os.path.isdir("temp/" + name): - os.rename("temp/" + name, path) + if os.path.isdir(Path('temp', name)): + os.rename(Path('temp', name), path) break os.unlink("temp.7z") os.rmdir("temp") diff --git a/htpclient/download.py b/htpclient/download.py index 02fc38a..36bbfd3 100644 --- a/htpclient/download.py +++ b/htpclient/download.py @@ -14,8 +14,6 @@ class Download: def download(url, output, no_header=False): try: session = Session().s - if Initialize.get_os() == 1: - output = output.replace("/", '\\') # Check header if not no_header: diff --git a/htpclient/files.py b/htpclient/files.py index 28916be..1570a2f 100644 --- a/htpclient/files.py +++ b/htpclient/files.py @@ -1,6 +1,7 @@ import logging import time from time import sleep +from pathlib import Path import os @@ -36,26 +37,33 @@ def deletion_check(self): else: files = ans['filenames'] for filename in files: + file_path = Path(self.config.get_value('files-path'), filename) if filename.find("/") != -1 or filename.find("\\") != -1: continue # ignore invalid file names - elif os.path.dirname(self.config.get_value('files-path') + "/" + filename) != "files": + elif os.path.dirname(file_path) != "files": continue # ignore any case in which we would leave the files folder - elif os.path.exists(self.config.get_value('files-path') + "/" + filename): + elif os.path.exists(file_path): logging.info("Delete file '" + filename + "' as requested by server...") - if os.path.splitext(self.config.get_value('files-path') + "/" + filename)[1] == '.7z': - if os.path.exists(self.config.get_value('files-path') + "/" + filename.replace(".7z", ".txt")): + # When we get the delete requests, this function will check if the .7z maybe as + # an extracted text file. That file will also be deleted. + if os.path.splitext(file_path)[1] == '.7z': + txt_file = Path(f"{os.path.splitext(file_path)[0]}.txt") + if os.path.exists(txt_file): logging.info("Also delete assumed wordlist from archive of same file...") - os.unlink(self.config.get_value('files-path') + "/" + filename.replace(".7z", ".txt")) - os.unlink(self.config.get_value('files-path') + "/" + filename) + os.unlink(txt_file) + os.unlink(file_path) def check_files(self, files, task_id): for file in files: - file_localpath = self.config.get_value('files-path') + "/" + file + file_localpath = Path(self.config.get_value('files-path'), file) + txt_file = Path(f"{os.path.splitext(file_localpath)[0]}.txt") query = copy_and_set_token(dict_getFile, self.config.get_value('token')) query['taskId'] = task_id query['file'] = file req = JsonRequest(query) ans = req.execute() + + # Process request if ans is None: logging.error("Failed to get file!") sleep(5) @@ -65,30 +73,44 @@ def check_files(self, files, task_id): sleep(5) return False else: + # Filesize is OK file_size = int(ans['filesize']) if os.path.isfile(file_localpath) and os.stat(file_localpath).st_size == file_size: logging.debug("File is present on agent and has matching file size.") continue + + # Multicasting configured elif self.config.get_value('multicast'): logging.debug("Multicast is enabled, need to wait until it was delivered!") sleep(5) # in case the file is not there yet (or not completely), we just wait some time and then try again return False + # TODO: we might need a better check for this - if os.path.isfile(file_localpath.replace(".7z", ".txt")): + if os.path.isfile(txt_file): continue + + # Rsync if self.config.get_value('rsync') and Initialize.get_os() != 1: - Download.rsync(self.config.get_value('rsync-path') + '/' + file, file_localpath) + Download.rsync(Path(self.config.get_value('rsync-path'), file), file_localpath) else: logging.debug("Starting download of file from server...") Download.download(self.config.get_value('url').replace("api/server.php", "") + ans['url'], file_localpath) + + # Mismatch filesize if os.path.isfile(file_localpath) and os.stat(file_localpath).st_size != file_size: logging.error("file size mismatch on file: %s" % file) sleep(5) return False - if os.path.splitext(self.config.get_value('files-path') + "/" + file)[1] == '.7z' and not os.path.isfile(self.config.get_value('files-path') + "/" + file.replace(".7z", ".txt")): + + # 7z extraction, check if the .txt does exist. + if os.path.splitext(file_localpath)[1] == '.7z' and not os.path.isfile(txt_file): # extract if needed - if Initialize.get_os() != 1: - os.system("./7zr" + Initialize.get_os_extension() + " x -aoa -o'" + self.config.get_value('files-path') + "/' -y '" + self.config.get_value('files-path') + "/" + file + "'") + files_path = Path(self.config.get_value('files-path')) + if Initialize.get_os() == 1: + # Windows + cmd = f'7zr{Initialize.get_os_extension()} x -aoa -o"{files_path}" -y "{file_localpath}"' else: - os.system("7zr" + Initialize.get_os_extension() + " x -aoa -o'" + self.config.get_value('files-path') + "/' -y '" + self.config.get_value('files-path') + "/" + file + "'") + # Linux + cmd = f'7zr{Initialize.get_os_extension()} x -aoa -o"{files_path}" -y "{file_localpath}"' + os.system(cmd) return True diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index c9fd9be..c2dd00e 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -165,34 +165,62 @@ def build_prince_command(self, task, chunk): return binary + pre_args + " | " + self.callPath + post_args + task['cmdpars'] def build_preprocessor_command(self, task, chunk, preprocessor): - binary = self.config.get_value('preprocessors-path') + "/" + str(task['preprocessor']) + "/" + preprocessor['executable'] - if Initialize.get_os() != 1: - binary = "./" + binary - if not os.path.isfile(binary): + binary_path = Path(self.config.get_value('preprocessors-path'), str(task['preprocessor'])) + binary = preprocessor['executable'] + + if not os.path.isfile(binary_path / binary): split = binary.split(".") binary = '.'.join(split[:-1]) + get_bit() + "." + split[-1] + binary = binary_path / binary + + pre_args = [] # in case the skip or limit command are not available, we try to achieve the same with head/tail (the more chunks are run, the more inefficient it might be) if preprocessor['skipCommand'] is not None and preprocessor['limitCommand'] is not None: - pre_args = " " + preprocessor['skipCommand'] + " " + str(chunk['skip']) + " " + preprocessor['limitCommand'] + " " + str(chunk['length']) + ' ' - else: - pre_args = "" + pre_args.extend([preprocessor['skipCommand'], str(chunk['skip'])]) + pre_args.extend([preprocessor['limitCommand'], str(chunk['length'])]) - pre_args += ' ' + update_files(task['preprocessorCommand']) + pre_args.append(update_files(task['preprocessorCommand'])) # TODO: add support for windows as well (pre-built tools) if preprocessor['skipCommand'] is None or preprocessor['limitCommand'] is None: - pre_args += " | head -n " + str(chunk['skip'] + chunk['length']) + " | tail -n " + str(chunk['length']) + skip_length = chunk['skip'] + chunk['length'] + pre_args.append(f"| head -n {skip_length}") + pre_args.append(f"| tail -n {chunk['length']}") + + zaps_file = Path(self.config.get_value('zaps-path'), f"hashlist_{task['hashlistId']}") + output_file = Path(self.config.get_value('hashlists-path'), f"{task['hashlistId']}.out") + hashlist_file = Path(self.config.get_value('hashlists-path'), str(task['hashlistId'])) - post_args = " --machine-readable --quiet --status --remove --restore-disable --potfile-disable --session=hashtopolis" - post_args += " --status-timer " + str(task['statustimer']) - post_args += " --outfile-check-timer=" + str(task['statustimer']) - post_args += " --outfile-check-dir='" + self.config.get_value('zaps-path') + "hashlist_" + str(task['hashlistId']) + "'" - post_args += " -o '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + ".out' --outfile-format=" + self.get_outfile_format() + " -p \"" + str(chr(9)) + "\"" - post_args += " --remove-timer=" + str(task['statustimer']) - post_args += " '" + self.config.get_value('hashlists-path') + "/" + str(task['hashlistId']) + "'" - post_args += update_files(task['attackcmd']).replace(task['hashlistAlias'], '') - return f"'{binary}'" + pre_args + " | " + f"'{self.callPath}'" + post_args + task['cmdpars'] + post_args = [] + post_args.append('--machine-readable') + post_args.append('--quiet') + post_args.append('--status') + post_args.append('--remove') + post_args.append('--restore-disable') + post_args.append('--potfile-disable') + post_args.append('--session=hashtopolis') + post_args.append(f"--status-timer {task['statustimer']}") + + post_args.append(f"--outfile-check-timer={task['statustimer']}") + post_args.append(f'--outfile-check-dir="{zaps_file}"') + post_args.append(f'-o "{output_file}"') + post_args.append(f'--outfile-format={self.get_outfile_format()}') + post_args.append('-p 0x09') + post_args.append(f"--remove-timer={task['statustimer']}") + post_args.append(f'"{hashlist_file}"') + + files = update_files(task['attackcmd']) + files = files.replace(task['hashlistAlias'] + " ", "") + post_args.append(files) + post_args.append(task['cmdpars']) + + pre_args = ' '.join(pre_args) + post_args = ' '.join(post_args) + + full_cmd = f'"{binary}" {pre_args} | {self.callPath} {post_args}' + + return full_cmd def run_chunk(self, task, chunk, preprocessor): if 'enforcePipe' in task and task['enforcePipe']: @@ -477,22 +505,32 @@ def prince_keyspace(self, task, chunk): def preprocessor_keyspace(self, task, chunk): preprocessor = task.get_preprocessor() + preprocessors_path = self.config.get_value('preprocessors-path') if preprocessor['keyspaceCommand'] is None: # in case there is no keyspace flag, we just assume the task will be that large to run forever return chunk.send_keyspace(-1, task.get_task()['taskId']) binary = preprocessor['executable'] - if Initialize.get_os() != 1: - binary = "./" + binary if not os.path.isfile(binary): split = binary.split(".") binary = '.'.join(split[:-1]) + get_bit() + "." + split[-1] - - full_cmd = f"'{binary}'" + " " + preprocessor['keyspaceCommand'] + " " + update_files(task.get_task()['preprocessorCommand']) + if Initialize.get_os() == 1: - full_cmd = full_cmd.replace("/", '\\') + # Windows + binary = f'"{binary}"' + else: + # Mac / Linux + binary = f'"./{binary}"' + + args = [] + args.append(preprocessor['keyspaceCommand']) + args.append(update_files(task.get_task()['preprocessorCommand'])) + + full_cmd = ' '.join(args) + full_cmd = f"{binary} {full_cmd}" + try: logging.debug("CALL: " + full_cmd) - output = subprocess.check_output(full_cmd, shell=True, cwd=self.config.get_value('preprocessors-path') + "/" + str(task.get_task()['preprocessor'])) + output = subprocess.check_output(full_cmd, shell=True, cwd=Path(preprocessors_path, str(task.get_task()['preprocessor']))) except subprocess.CalledProcessError: logging.error("Error during preprocessor keyspace measure") send_error("Preprocessor keyspace measure failed!", self.config.get_value('token'), task.get_task()['taskId'], None) diff --git a/tests/create_task_001.json b/tests/create_task_001.json index 7158cc4..c515157 100644 --- a/tests/create_task_001.json +++ b/tests/create_task_001.json @@ -19,5 +19,5 @@ "statusTimer": 5, "taskName": "Example - Rijmen and Daemen", "useNewBench": true, - "usePreprocessor": false + "preprocessorId": 0 } diff --git a/tests/create_task_002.json b/tests/create_task_002.json index 3945e53..e7accf6 100644 --- a/tests/create_task_002.json +++ b/tests/create_task_002.json @@ -19,5 +19,5 @@ "statusTimer": 5, "taskName": "Example - runtime", "useNewBench": false, - "usePreprocessor": false + "preprocessorId": 0 } diff --git a/tests/create_task_003.json b/tests/create_task_003.json new file mode 100644 index 0000000..48f08f2 --- /dev/null +++ b/tests/create_task_003.json @@ -0,0 +1,23 @@ +{ + "attackCmd": "#HL#", + "chunkSize": 1000, + "chunkTime": 600, + "color": "7C6EFF", + "crackerBinaryId": 1, + "crackerBinaryTypeId": 1, + "forcePipe": true, + "files": [], + "isArchived": false, + "isCpuTask": true, + "isSmall": false, + "maxAgents": 112, + "notes": "example-note", + "preprocessorCommand": "--pw-min=1 --pw-max=2 ../../crackers/1/example.dict", + "priority": 10, + "skipKeyspace": 500, + "staticChunks": 2, + "statusTimer": 5, + "taskName": "Example - preprocessor", + "useNewBench": true, + "preprocessorId": 1 +} diff --git a/tests/hashtopolis-test.yaml b/tests/hashtopolis-test.yaml index 633af15..50e6807 100644 --- a/tests/hashtopolis-test.yaml +++ b/tests/hashtopolis-test.yaml @@ -1,3 +1,3 @@ -hashtopolis_uri: 'http://127.0.0.1:8080' +hashtopolis_uri: 'http://hashtopolis' username: 'root' password: 'hashtopolis' diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 4a9926e..dc392fa 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -166,6 +166,231 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po obj.delete() hashlist_v2.delete() + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_preprocessor(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'linux': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create Task + p = Path(__file__).parent.joinpath('create_task_003.json') + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) + obj.save() + preprocessor_id = payload.get('preprocessorId') + preprocessor_path = Path('preprocessors', str(preprocessor_id)) + if os.path.exists(preprocessor_path): + shutil.rmtree(preprocessor_path) + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + config = Config() + crackers_path = config.get_value('crackers-path') + + # executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + + binaryDownload = BinaryDownload(test_args) + + task = Task() + task.load_task() + + binaryDownload.check_preprocessor(task) + assert os.path.exists(preprocessor_path) + + binaryDownload.check_version(cracker_id) + cracker = HashcatCracker(1, binaryDownload) + + # --keyspace + chunk = Chunk() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + preprocessors_path = config.get_value('preprocessors-path') + assert cracker.measure_keyspace(task, chunk) == True + mock_check_output.assert_called_with( + '"./pp64.bin" --keyspace --pw-min=1 --pw-max=2 ../../crackers/1/example.dict ', + shell=True, + cwd=Path(preprocessors_path, str(preprocessor_id)), + ) + + # --benchmark + result = cracker.run_benchmark(task.get_task()) + assert int(result.split(':')[0]) > 0 + mock_check_output.assert_called_with( + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" --hash-type=0 example.dict -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + '"/app/src/preprocessors/1/pp64.bin"', + f'--skip {skip}', + f'--limit {limit}', + ' --pw-min=1 --pw-max=2', + '../../crackers/1/example.dict' + ' |', + "'./hashcat.bin'", + '--machine-readable', + '--quiet', + '--status', + '--remove', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + '--remove-timer=5', + f'"{Path(hashlists_path, str(hashlist_id))}"', + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + preexec_fn=mock.ANY + ) + + # Cleanup + obj.delete() + hashlist_v2.delete() + + # cracker_zip = Path(crackers_path, f'{cracker_id}.7z') + # crackers_temp = Path(crackers_path, 'temp') + # zip_binary = './7zr' + # mock_unlink.assert_called_with(cracker_zip) + + # mock_system.assert_called_with(f"{zip_binary} x -o'{crackers_temp}' '{cracker_zip}'") + + # # --version + # cracker = HashcatCracker(1, binaryDownload) + # mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # # --keyspace + # chunk = Chunk() + # task = Task() + # task.load_task() + # hashlist = Hashlist() + + # hashlist.load_hashlist(task.get_task()['hashlistId']) + # hashlist_id = task.get_task()['hashlistId'] + # hashlists_path = config.get_value('hashlists-path') + + # cracker.measure_keyspace(task, chunk) + # mock_check_output.assert_called_with( + # "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ", + # shell=True, + # cwd=Path(crackers_path, str(cracker_id)), + # stderr=-2 + # ) + + # # benchmark + # result = cracker.run_benchmark(task.get_task()) + # assert result != 0 + # mock_check_output.assert_called_with( + # f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + # shell=True, + # cwd=Path(crackers_path, str(cracker_id)), + # stderr=-2 + # ) + + # # Sending benchmark to server + # query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + # query['taskId'] = task.get_task()['taskId'] + # query['result'] = result + # query['type'] = task.get_task()['benchType'] + # req = JsonRequest(query) + # req.execute() + + # # cracking + # chunk.get_chunk(task.get_task()['taskId']) + # cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + # zaps_path = config.get_value('zaps-path') + # zaps_dir = f"hashlist_{hashlist_id}" + # skip = str(chunk.chunk_data()['skip']) + # limit = str(chunk.chunk_data()['length']) + + # full_cmd = [ + # "'./hashcat.bin'", + # '--machine-readable', + # '--quiet', + # '--status', + # '--restore-disable', + # '--session=hashtopolis', + # '--status-timer 5', + # '--outfile-check-timer=5', + # f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + # f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + # '--outfile-format=1,2,3,4', + # f'-p 0x09', + # f'-s {skip} -l {limit}', + # '--potfile-disable', + # '--remove', + # '--remove-timer=5 ', + # f'"{Path(hashlists_path, str(hashlist_id))}"', + # '-a3 ?l?l?l?l ', + # ' --hash-type=0 ', + # ] + + # full_cmd = ' '.join(full_cmd) + + # mock_Popen.assert_called_with( + # full_cmd, + # shell=True, + # stdout=-1, + # stderr=-1, + # cwd=Path(crackers_path, str(cracker_id)), + # preexec_fn=mock.ANY + # ) + + # # Cleanup + # obj.delete() + # hashlist_v2.delete() + class HashcatCrackerTestWindows(unittest.TestCase): @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) From e0596f505729289cb4d161719de6004e43d22ca0 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 08:14:43 +0100 Subject: [PATCH 43/77] Add newer version of hashtopolis api module --- requirements-tests.txt | 3 +- tests/hashtopolis.py | 112 +++++++++++++++++++++++++++++++++++++---- 2 files changed, 104 insertions(+), 11 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index d9cf678..9047fe6 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,2 +1,3 @@ pytest -confidence \ No newline at end of file +confidence +tuspy \ No newline at end of file diff --git a/tests/hashtopolis.py b/tests/hashtopolis.py index 76f1a9c..1050167 100644 --- a/tests/hashtopolis.py +++ b/tests/hashtopolis.py @@ -9,6 +9,7 @@ import unittest import datetime from pathlib import Path +from io import BytesIO import requests import unittest @@ -18,6 +19,7 @@ import http import confidence +import tusclient.client #logging.basicConfig(level=logging.DEBUG) @@ -81,7 +83,7 @@ def authenticate(self): } - def filter(self, filter): + def filter(self, expand, filter): self.authenticate() uri = self._api_endpoint + self._model_uri headers = self._headers @@ -104,7 +106,10 @@ def filter(self, filter): l = f'{k}={v}' filter_list.append(l) - payload = {'filter': filter_list} + payload = { + 'filter': filter_list, + 'expand': expand + } r = requests.get(uri, headers=headers, data=json.dumps(payload)) if r.status_code != 201: @@ -181,12 +186,12 @@ def get_conn(cls): return cls.conn[cls._model_uri] @classmethod - def all(cls): + def all(cls, expand=None): """ Retrieve all backend objects TODO: Make iterator supporting loading of objects via pages """ - return cls.filter() + return cls.filter(expand) @classmethod @@ -211,15 +216,15 @@ def get_first(cls): return cls.all()[0] @classmethod - def get(cls, **kwargs): - objs = cls.filter(**kwargs) + def get(cls, expand=None, **kwargs): + objs = cls.filter(expand, **kwargs) assert(len(objs) == 1) return objs[0] @classmethod - def filter(cls, **kwargs): + def filter(cls, expand=None, **kwargs): # Get all objects - api_objs = cls.get_conn().filter(kwargs) + api_objs = cls.get_conn().filter(expand, kwargs) # Convert into class objs = [] @@ -249,6 +254,27 @@ class Model(metaclass=ModelBase): def __init__(self, *args, **kwargs): self.set_initial(kwargs) super().__init__() + + def _dict2obj(self, dict): + # Function to convert a dict to an object. + uri = dict.get('_self') + + # Loop through all the registers classes + for modelname, model in cls_registry.items(): + model_uri = model.objects._model_uri + # Check if part of the uri is in the model uri + if model_uri in uri: + + obj = model() + + # Set all the attributes of the object. + for k2,v2 in dict.items(): + setattr(obj, k2, v2) + if not k2.startswith('_'): + obj.__fields.append(k2) + return obj + # If we are here, it means that no uri matched, thus we don't know the object. + raise(TypeError('Object not valid model')) def set_initial(self, kv): self.__fields = [] @@ -261,11 +287,33 @@ def set_initial(self, kv): # Create attribute values for k,v in kv.items(): + + # In case expand is true, there can be a attribute which also is an object. + # Example: Users in AccessGroups. This part will convert the returned data. + # Into proper objects. + if type(v) is list and len(v) > 0: + + # Many-to-Many relation + obj_list = [] + # Loop through all the values in the list and convert them to objects. + for dict_v in v: + if type(dict_v) is dict and dict_v.get('_self'): + # Double check that it really is an object + obj = self._dict2obj(dict_v) + obj_list.append(obj) + # Set the attribute of the current object to a set object (like Django) + # Also check if it really were objects + if len(obj_list) > 0: + setattr(self, f"{k}_set", obj_list) + continue + # This does the same as above, only one-to-one relations + if type(v) is dict: + setattr(self, f"{k}_set", self._dict2obj(v)) + continue setattr(self, k, v) if not k.startswith('_'): self.__fields.append(k) - def diff(self): d1 = self.__initial d2 = dict([(k, getattr(self, k)) for k in self.__fields]) @@ -297,15 +345,59 @@ class Task(Model, uri="/ui/tasks"): def __repr__(self): return self._self +class Pretask(Model, uri="/ui/pretasks"): + def __repr__(self): + return self._self + +class User(Model, uri="/ui/users"): + def __repr__(self): + return self._self + class Hashlist(Model, uri="/ui/hashlists"): def __repr__(self): return self._self +class AccessGroup(Model, uri="/ui/accessgroups"): + def __repr__(self): + return self._self + class Cracker(Model, uri="/ui/crackers"): def __repr__(self): return self._self class CrackerType(Model, uri="/ui/crackertypes"): def __repr__(self): - return self._self \ No newline at end of file + return self._self + +class File(Model, uri="/ui/files"): + def __repr__(self): + return self._self + + +class FileImport(HashtopolisConnector): + def __init__(self): + super().__init__("/ui/files/import", Config()) + + def __repr__(self): + return self._self + + def do_upload(self, filename, file_stream): + self.authenticate() + + uri = self._api_endpoint + self._model_uri + + my_client = tusclient.client.TusClient(uri) + del self._headers['Content-Type'] + my_client.set_headers(self._headers) + + metadata = {"filename": filename, + "filetype": "application/text"} + uploader = my_client.uploader( + file_stream=file_stream, + chunk_size=1000000000, + upload_checksum=True, + metadata=metadata + ) + uploader.upload() + From e47cf16ebc349aa6b2d70304fa25259c7cc2ed7c Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 09:14:37 +0100 Subject: [PATCH 44/77] Add tests for wordlist / rule attack --- tests/create_file_001.json | 6 ++ tests/create_task_004.json | 23 ++++ tests/test_hashcat_cracker.py | 197 ++++++++++++++++++++++++++++++++++ 3 files changed, 226 insertions(+) create mode 100644 tests/create_file_001.json create mode 100644 tests/create_task_004.json diff --git a/tests/create_file_001.json b/tests/create_file_001.json new file mode 100644 index 0000000..3d1df02 --- /dev/null +++ b/tests/create_file_001.json @@ -0,0 +1,6 @@ +{ + "sourceType": "import", + "isSecret": false, + "accessGroupId": 1 + } + \ No newline at end of file diff --git a/tests/create_task_004.json b/tests/create_task_004.json new file mode 100644 index 0000000..571a21d --- /dev/null +++ b/tests/create_task_004.json @@ -0,0 +1,23 @@ +{ + "attackCmd": "#HL# -a0 create-task-004.txt", + "chunkSize": 1000, + "chunkTime": 600, + "color": "7C6EFF", + "crackerBinaryId": 1, + "crackerBinaryTypeId": 1, + "forcePipe": true, + "files": [], + "isArchived": false, + "isCpuTask": true, + "isSmall": false, + "maxAgents": 112, + "notes": "example-note", + "preprocessorCommand": "", + "priority": 10, + "skipKeyspace": 0, + "staticChunks": 2, + "statusTimer": 5, + "taskName": "Example - files and rules", + "useNewBench": true, + "preprocessorId": 0 +} diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index dc392fa..28ea62b 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -10,6 +10,8 @@ from pathlib import Path from argparse import Namespace import sys +import datetime +from io import BytesIO from htpclient.hashcat_cracker import HashcatCracker from htpclient.binarydownload import BinaryDownload @@ -22,9 +24,12 @@ from htpclient.dicts import copy_and_set_token from htpclient.dicts import dict_sendBenchmark from htpclient.jsonRequest import JsonRequest +from htpclient.files import Files from tests.hashtopolis import Hashlist as Hashlist_v2 from tests.hashtopolis import Task as Task_v2 +from tests.hashtopolis import FileImport as FileImport_v2 +from tests.hashtopolis import File as File_v2 # The default cmdparameters, some objects need those. Maybe move to a common helper so other tests can include this aswell. # test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://example.com/api/server.php', version=False, voucher='devvoucher', zaps_path=None) @@ -166,6 +171,198 @@ def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Po obj.delete() hashlist_v2.delete() + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_files(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'linux': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Set config and variables + cracker_id = 1 + config = Config() + + crackers_path = config.get_value('crackers-path') + files_path = config.get_value('files-path') + + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Upload wordlist + stamp = datetime.datetime.now().isoformat() + filename = f'wordlist-{stamp}.txt' + file_import = FileImport_v2() + text = '12345678\n123456\nprincess\n'.encode('utf-8') + fs = BytesIO(text) + file_import.do_upload(filename, fs) + + # Create wordlist + p = Path(__file__).parent.joinpath('create_file_001.json') + payload = json.loads(p.read_text('UTF-8')) + payload['sourceData'] = filename + payload['filename'] = filename + payload['fileType'] = 0 + file_obj = File_v2(**payload) + file_obj.save() + + wordlist_id = file_obj.id + wordlist_name = file_obj.filename + + # Upload Rule file + stamp = datetime.datetime.now().isoformat() + filename = f'rule-{stamp}.txt' + file_import = FileImport_v2() + text = ':\n$1\n$2\n'.encode('utf-8') + fs = BytesIO(text) + file_import.do_upload(filename, fs) + + # Create rule file + p = Path(__file__).parent.joinpath('create_file_001.json') + payload = json.loads(p.read_text('UTF-8')) + payload['sourceData'] = filename + payload['filename'] = filename + payload['fileType'] = 1 + file_obj2 = File_v2(**payload) + file_obj2.save() + + rule_id = file_obj2.id + rule_name = file_obj2.filename + + # Create task + p = Path(__file__).parent.joinpath('create_task_004.json') + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + payload['attackCmd'] = f'#HL# -a0 {wordlist_name} -r {rule_name}' + payload['files'] = [wordlist_id, rule_id] + task_obj = Task_v2(**payload) + task_obj.save() + + # Delete files locally if they are already downloaded in a prev run + wordlist_path = Path(files_path, wordlist_name) + rule_path = Path(files_path, rule_name) + if os.path.isfile(wordlist_path): + os.remove(wordlist_path) + if os.path.isfile(rule_path): + os.remove(rule_path) + + # Try to download cracker 1 + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + # --version + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + files = Files() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + # Download required files + assert files.check_files(task.get_task()['files'], task.get_task()['taskId']) + + # Test if the files are really downloaded + assert os.path.isfile(wordlist_path) == True + assert os.path.isfile(rule_path) == True + + cracker.measure_keyspace(task, chunk) + mock_check_output.assert_called_with( + f"'./hashcat.bin' --keyspace --quiet -a0 '{wordlist_path}' -r '{rule_path}' --hash-type=0 ", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + mock_check_output.assert_called_with( + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 '{wordlist_path}' -r '{rule_path}' --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + "'./hashcat.bin'", + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--potfile-disable', + '--remove', + '--remove-timer=5 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + f"-a0 '{wordlist_path}' -r '{rule_path}' ", + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + preexec_fn=mock.ANY + ) + + # Cleanup + task_obj.delete() + hashlist_v2.delete() + file_obj.delete() + file_obj2.delete() + if os.path.isfile(wordlist_path): + os.remove(wordlist_path) + if os.path.isfile(rule_path): + os.remove(rule_path) + + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) @mock.patch('os.unlink', side_effect=os.unlink) From 7db100fc52d637e984ff09176f0049d7c4808ce9 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 09:15:12 +0100 Subject: [PATCH 45/77] Remove comment block in test file --- tests/test_hashcat_cracker.py | 91 ----------------------------------- 1 file changed, 91 deletions(-) diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py index 28ea62b..7dd029f 100644 --- a/tests/test_hashcat_cracker.py +++ b/tests/test_hashcat_cracker.py @@ -496,97 +496,6 @@ def test_preprocessor(self, mock_system, mock_unlink, mock_check_output, mock_Po obj.delete() hashlist_v2.delete() - # cracker_zip = Path(crackers_path, f'{cracker_id}.7z') - # crackers_temp = Path(crackers_path, 'temp') - # zip_binary = './7zr' - # mock_unlink.assert_called_with(cracker_zip) - - # mock_system.assert_called_with(f"{zip_binary} x -o'{crackers_temp}' '{cracker_zip}'") - - # # --version - # cracker = HashcatCracker(1, binaryDownload) - # mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) - - # # --keyspace - # chunk = Chunk() - # task = Task() - # task.load_task() - # hashlist = Hashlist() - - # hashlist.load_hashlist(task.get_task()['hashlistId']) - # hashlist_id = task.get_task()['hashlistId'] - # hashlists_path = config.get_value('hashlists-path') - - # cracker.measure_keyspace(task, chunk) - # mock_check_output.assert_called_with( - # "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ", - # shell=True, - # cwd=Path(crackers_path, str(cracker_id)), - # stderr=-2 - # ) - - # # benchmark - # result = cracker.run_benchmark(task.get_task()) - # assert result != 0 - # mock_check_output.assert_called_with( - # f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", - # shell=True, - # cwd=Path(crackers_path, str(cracker_id)), - # stderr=-2 - # ) - - # # Sending benchmark to server - # query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) - # query['taskId'] = task.get_task()['taskId'] - # query['result'] = result - # query['type'] = task.get_task()['benchType'] - # req = JsonRequest(query) - # req.execute() - - # # cracking - # chunk.get_chunk(task.get_task()['taskId']) - # cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) - # zaps_path = config.get_value('zaps-path') - # zaps_dir = f"hashlist_{hashlist_id}" - # skip = str(chunk.chunk_data()['skip']) - # limit = str(chunk.chunk_data()['length']) - - # full_cmd = [ - # "'./hashcat.bin'", - # '--machine-readable', - # '--quiet', - # '--status', - # '--restore-disable', - # '--session=hashtopolis', - # '--status-timer 5', - # '--outfile-check-timer=5', - # f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', - # f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', - # '--outfile-format=1,2,3,4', - # f'-p 0x09', - # f'-s {skip} -l {limit}', - # '--potfile-disable', - # '--remove', - # '--remove-timer=5 ', - # f'"{Path(hashlists_path, str(hashlist_id))}"', - # '-a3 ?l?l?l?l ', - # ' --hash-type=0 ', - # ] - - # full_cmd = ' '.join(full_cmd) - - # mock_Popen.assert_called_with( - # full_cmd, - # shell=True, - # stdout=-1, - # stderr=-1, - # cwd=Path(crackers_path, str(cracker_id)), - # preexec_fn=mock.ANY - # ) - - # # Cleanup - # obj.delete() - # hashlist_v2.delete() class HashcatCrackerTestWindows(unittest.TestCase): @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) From c348868291b11f02847edb9a07d4f0ceaa4747d9 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 10:06:11 +0100 Subject: [PATCH 46/77] Add splitted test files into seperate files --- tests/test_hashcat_cracker.py | 769 ----------------------------- tests/test_hashcat_files.py | 229 +++++++++ tests/test_hashcat_preprocessor.py | 171 +++++++ tests/test_hashcat_runtime.py | 144 ++++++ tests/test_hashcat_simple.py | 333 +++++++++++++ 5 files changed, 877 insertions(+), 769 deletions(-) delete mode 100644 tests/test_hashcat_cracker.py create mode 100644 tests/test_hashcat_files.py create mode 100644 tests/test_hashcat_preprocessor.py create mode 100644 tests/test_hashcat_runtime.py create mode 100644 tests/test_hashcat_simple.py diff --git a/tests/test_hashcat_cracker.py b/tests/test_hashcat_cracker.py deleted file mode 100644 index 7dd029f..0000000 --- a/tests/test_hashcat_cracker.py +++ /dev/null @@ -1,769 +0,0 @@ -import pytest -from unittest import mock -import unittest -from unittest.mock import MagicMock -import os -import subprocess -import shutil -import requests -import json -from pathlib import Path -from argparse import Namespace -import sys -import datetime -from io import BytesIO - -from htpclient.hashcat_cracker import HashcatCracker -from htpclient.binarydownload import BinaryDownload -from htpclient.session import Session -from htpclient.config import Config -from htpclient.initialize import Initialize -from htpclient.chunk import Chunk -from htpclient.hashlist import Hashlist -from htpclient.task import Task -from htpclient.dicts import copy_and_set_token -from htpclient.dicts import dict_sendBenchmark -from htpclient.jsonRequest import JsonRequest -from htpclient.files import Files - -from tests.hashtopolis import Hashlist as Hashlist_v2 -from tests.hashtopolis import Task as Task_v2 -from tests.hashtopolis import FileImport as FileImport_v2 -from tests.hashtopolis import File as File_v2 - -# The default cmdparameters, some objects need those. Maybe move to a common helper so other tests can include this aswell. -# test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://example.com/api/server.php', version=False, voucher='devvoucher', zaps_path=None) - -class HashcatCrackerTestLinux(unittest.TestCase): - @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) - @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) - @mock.patch('os.unlink', side_effect=os.unlink) - @mock.patch('os.system', side_effect=os.system) - def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Popen): - if sys.platform != 'linux': - return - # Clean up cracker folder - if os.path.exists('crackers/1'): - shutil.rmtree('crackers/1') - - #TODO: Delete tasks / hashlist to ensure clean - #TODO: Verify setup agent - - # Setup session object - session = Session(requests.Session()).s - session.headers.update({'User-Agent': Initialize.get_version()}) - - # Create hashlist - p = Path(__file__).parent.joinpath('create_hashlist_001.json') - payload = json.loads(p.read_text('UTF-8')) - hashlist_v2 = Hashlist_v2(**payload) - hashlist_v2.save() - - # Create Task - for p in sorted(Path(__file__).parent.glob('create_task_001.json')): - payload = json.loads(p.read_text('UTF-8')) - payload['hashlistId'] = int(hashlist_v2._id) - obj = Task_v2(**payload) - obj.save() - - # Cmd parameters setup - test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) - - # Try to download cracker 1 - cracker_id = 1 - config = Config() - crackers_path = config.get_value('crackers-path') - - executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') - - binaryDownload = BinaryDownload(test_args) - binaryDownload.check_version(cracker_id) - - cracker_zip = Path(crackers_path, f'{cracker_id}.7z') - crackers_temp = Path(crackers_path, 'temp') - zip_binary = './7zr' - mock_unlink.assert_called_with(cracker_zip) - - mock_system.assert_called_with(f"{zip_binary} x -o'{crackers_temp}' '{cracker_zip}'") - - # --version - cracker = HashcatCracker(1, binaryDownload) - mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) - - # --keyspace - chunk = Chunk() - task = Task() - task.load_task() - hashlist = Hashlist() - - hashlist.load_hashlist(task.get_task()['hashlistId']) - hashlist_id = task.get_task()['hashlistId'] - hashlists_path = config.get_value('hashlists-path') - - cracker.measure_keyspace(task, chunk) - mock_check_output.assert_called_with( - "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ", - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stderr=-2 - ) - - # benchmark - result = cracker.run_benchmark(task.get_task()) - assert result != 0 - mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stderr=-2 - ) - - # Sending benchmark to server - query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) - query['taskId'] = task.get_task()['taskId'] - query['result'] = result - query['type'] = task.get_task()['benchType'] - req = JsonRequest(query) - req.execute() - - # cracking - chunk.get_chunk(task.get_task()['taskId']) - cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) - zaps_path = config.get_value('zaps-path') - zaps_dir = f"hashlist_{hashlist_id}" - skip = str(chunk.chunk_data()['skip']) - limit = str(chunk.chunk_data()['length']) - - full_cmd = [ - "'./hashcat.bin'", - '--machine-readable', - '--quiet', - '--status', - '--restore-disable', - '--session=hashtopolis', - '--status-timer 5', - '--outfile-check-timer=5', - f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', - f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', - '--outfile-format=1,2,3,4', - f'-p 0x09', - f'-s {skip} -l {limit}', - '--potfile-disable', - '--remove', - '--remove-timer=5 ', - f'"{Path(hashlists_path, str(hashlist_id))}"', - '-a3 ?l?l?l?l ', - ' --hash-type=0 ', - ] - - full_cmd = ' '.join(full_cmd) - - mock_Popen.assert_called_with( - full_cmd, - shell=True, - stdout=-1, - stderr=-1, - cwd=Path(crackers_path, str(cracker_id)), - preexec_fn=mock.ANY - ) - - # Cleanup - obj.delete() - hashlist_v2.delete() - - @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) - @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) - @mock.patch('os.unlink', side_effect=os.unlink) - @mock.patch('os.system', side_effect=os.system) - def test_files(self, mock_system, mock_unlink, mock_check_output, mock_Popen): - if sys.platform != 'linux': - return - - # Setup session object - session = Session(requests.Session()).s - session.headers.update({'User-Agent': Initialize.get_version()}) - - # Cmd parameters setup - test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) - - # Set config and variables - cracker_id = 1 - config = Config() - - crackers_path = config.get_value('crackers-path') - files_path = config.get_value('files-path') - - - # Create hashlist - p = Path(__file__).parent.joinpath('create_hashlist_001.json') - payload = json.loads(p.read_text('UTF-8')) - hashlist_v2 = Hashlist_v2(**payload) - hashlist_v2.save() - - # Upload wordlist - stamp = datetime.datetime.now().isoformat() - filename = f'wordlist-{stamp}.txt' - file_import = FileImport_v2() - text = '12345678\n123456\nprincess\n'.encode('utf-8') - fs = BytesIO(text) - file_import.do_upload(filename, fs) - - # Create wordlist - p = Path(__file__).parent.joinpath('create_file_001.json') - payload = json.loads(p.read_text('UTF-8')) - payload['sourceData'] = filename - payload['filename'] = filename - payload['fileType'] = 0 - file_obj = File_v2(**payload) - file_obj.save() - - wordlist_id = file_obj.id - wordlist_name = file_obj.filename - - # Upload Rule file - stamp = datetime.datetime.now().isoformat() - filename = f'rule-{stamp}.txt' - file_import = FileImport_v2() - text = ':\n$1\n$2\n'.encode('utf-8') - fs = BytesIO(text) - file_import.do_upload(filename, fs) - - # Create rule file - p = Path(__file__).parent.joinpath('create_file_001.json') - payload = json.loads(p.read_text('UTF-8')) - payload['sourceData'] = filename - payload['filename'] = filename - payload['fileType'] = 1 - file_obj2 = File_v2(**payload) - file_obj2.save() - - rule_id = file_obj2.id - rule_name = file_obj2.filename - - # Create task - p = Path(__file__).parent.joinpath('create_task_004.json') - payload = json.loads(p.read_text('UTF-8')) - payload['hashlistId'] = int(hashlist_v2._id) - payload['attackCmd'] = f'#HL# -a0 {wordlist_name} -r {rule_name}' - payload['files'] = [wordlist_id, rule_id] - task_obj = Task_v2(**payload) - task_obj.save() - - # Delete files locally if they are already downloaded in a prev run - wordlist_path = Path(files_path, wordlist_name) - rule_path = Path(files_path, rule_name) - if os.path.isfile(wordlist_path): - os.remove(wordlist_path) - if os.path.isfile(rule_path): - os.remove(rule_path) - - # Try to download cracker 1 - executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') - - binaryDownload = BinaryDownload(test_args) - binaryDownload.check_version(cracker_id) - - # --version - cracker = HashcatCracker(1, binaryDownload) - mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) - - # --keyspace - chunk = Chunk() - task = Task() - task.load_task() - hashlist = Hashlist() - files = Files() - - hashlist.load_hashlist(task.get_task()['hashlistId']) - hashlist_id = task.get_task()['hashlistId'] - hashlists_path = config.get_value('hashlists-path') - - # Download required files - assert files.check_files(task.get_task()['files'], task.get_task()['taskId']) - - # Test if the files are really downloaded - assert os.path.isfile(wordlist_path) == True - assert os.path.isfile(rule_path) == True - - cracker.measure_keyspace(task, chunk) - mock_check_output.assert_called_with( - f"'./hashcat.bin' --keyspace --quiet -a0 '{wordlist_path}' -r '{rule_path}' --hash-type=0 ", - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stderr=-2 - ) - - # benchmark - result = cracker.run_benchmark(task.get_task()) - assert result != 0 - mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 '{wordlist_path}' -r '{rule_path}' --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stderr=-2 - ) - - # Sending benchmark to server - query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) - query['taskId'] = task.get_task()['taskId'] - query['result'] = result - query['type'] = task.get_task()['benchType'] - req = JsonRequest(query) - req.execute() - - # cracking - chunk.get_chunk(task.get_task()['taskId']) - cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) - zaps_path = config.get_value('zaps-path') - zaps_dir = f"hashlist_{hashlist_id}" - skip = str(chunk.chunk_data()['skip']) - limit = str(chunk.chunk_data()['length']) - - full_cmd = [ - "'./hashcat.bin'", - '--machine-readable', - '--quiet', - '--status', - '--restore-disable', - '--session=hashtopolis', - '--status-timer 5', - '--outfile-check-timer=5', - f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', - f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', - '--outfile-format=1,2,3,4', - f'-p 0x09', - f'-s {skip} -l {limit}', - '--potfile-disable', - '--remove', - '--remove-timer=5 ', - f'"{Path(hashlists_path, str(hashlist_id))}"', - f"-a0 '{wordlist_path}' -r '{rule_path}' ", - ' --hash-type=0 ', - ] - - full_cmd = ' '.join(full_cmd) - - mock_Popen.assert_called_with( - full_cmd, - shell=True, - stdout=-1, - stderr=-1, - cwd=Path(crackers_path, str(cracker_id)), - preexec_fn=mock.ANY - ) - - # Cleanup - task_obj.delete() - hashlist_v2.delete() - file_obj.delete() - file_obj2.delete() - if os.path.isfile(wordlist_path): - os.remove(wordlist_path) - if os.path.isfile(rule_path): - os.remove(rule_path) - - - @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) - @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) - @mock.patch('os.unlink', side_effect=os.unlink) - @mock.patch('os.system', side_effect=os.system) - def test_preprocessor(self, mock_system, mock_unlink, mock_check_output, mock_Popen): - if sys.platform != 'linux': - return - - # Setup session object - session = Session(requests.Session()).s - session.headers.update({'User-Agent': Initialize.get_version()}) - - # Create hashlist - p = Path(__file__).parent.joinpath('create_hashlist_001.json') - payload = json.loads(p.read_text('UTF-8')) - hashlist_v2 = Hashlist_v2(**payload) - hashlist_v2.save() - - # Create Task - p = Path(__file__).parent.joinpath('create_task_003.json') - payload = json.loads(p.read_text('UTF-8')) - payload['hashlistId'] = int(hashlist_v2._id) - obj = Task_v2(**payload) - obj.save() - preprocessor_id = payload.get('preprocessorId') - preprocessor_path = Path('preprocessors', str(preprocessor_id)) - if os.path.exists(preprocessor_path): - shutil.rmtree(preprocessor_path) - - # Cmd parameters setup - test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) - - # Try to download cracker 1 - cracker_id = 1 - config = Config() - crackers_path = config.get_value('crackers-path') - - # executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') - - binaryDownload = BinaryDownload(test_args) - - task = Task() - task.load_task() - - binaryDownload.check_preprocessor(task) - assert os.path.exists(preprocessor_path) - - binaryDownload.check_version(cracker_id) - cracker = HashcatCracker(1, binaryDownload) - - # --keyspace - chunk = Chunk() - hashlist = Hashlist() - - hashlist.load_hashlist(task.get_task()['hashlistId']) - hashlist_id = task.get_task()['hashlistId'] - hashlists_path = config.get_value('hashlists-path') - - preprocessors_path = config.get_value('preprocessors-path') - assert cracker.measure_keyspace(task, chunk) == True - mock_check_output.assert_called_with( - '"./pp64.bin" --keyspace --pw-min=1 --pw-max=2 ../../crackers/1/example.dict ', - shell=True, - cwd=Path(preprocessors_path, str(preprocessor_id)), - ) - - # --benchmark - result = cracker.run_benchmark(task.get_task()) - assert int(result.split(':')[0]) > 0 - mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" --hash-type=0 example.dict -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stderr=-2 - ) - - # Sending benchmark to server - query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) - query['taskId'] = task.get_task()['taskId'] - query['result'] = result - query['type'] = task.get_task()['benchType'] - req = JsonRequest(query) - req.execute() - - # cracking - chunk.get_chunk(task.get_task()['taskId']) - cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) - zaps_path = config.get_value('zaps-path') - zaps_dir = f"hashlist_{hashlist_id}" - skip = str(chunk.chunk_data()['skip']) - limit = str(chunk.chunk_data()['length']) - - full_cmd = [ - '"/app/src/preprocessors/1/pp64.bin"', - f'--skip {skip}', - f'--limit {limit}', - ' --pw-min=1 --pw-max=2', - '../../crackers/1/example.dict' - ' |', - "'./hashcat.bin'", - '--machine-readable', - '--quiet', - '--status', - '--remove', - '--restore-disable', - '--potfile-disable', - '--session=hashtopolis', - '--status-timer 5', - '--outfile-check-timer=5', - f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', - f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', - '--outfile-format=1,2,3,4', - f'-p 0x09', - '--remove-timer=5', - f'"{Path(hashlists_path, str(hashlist_id))}"', - ' --hash-type=0 ', - ] - - full_cmd = ' '.join(full_cmd) - - mock_Popen.assert_called_with( - full_cmd, - shell=True, - stdout=-1, - stderr=-1, - cwd=Path(crackers_path, str(cracker_id)), - preexec_fn=mock.ANY - ) - - # Cleanup - obj.delete() - hashlist_v2.delete() - - -class HashcatCrackerTestWindows(unittest.TestCase): - @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) - @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) - @mock.patch('os.unlink', side_effect=os.unlink) - @mock.patch('os.system', side_effect=os.system) - def test_correct_flow(self, mock_system, mock_unlink, mock_check_output, mock_Popen): - if sys.platform != 'win32': - return - - # Clean up cracker folder - if os.path.exists('crackers/1'): - shutil.rmtree('crackers/1') - - #TODO: Delete tasks / hashlist to ensure clean - #TODO: Verify setup agent - - # Setup session object - session = Session(requests.Session()).s - session.headers.update({'User-Agent': Initialize.get_version()}) - - # Create hashlist - p = Path(__file__).parent.joinpath('create_hashlist_001.json') - payload = json.loads(p.read_text('UTF-8')) - hashlist_v2 = Hashlist_v2(**payload) - hashlist_v2.save() - - # Create Task - for p in sorted(Path(__file__).parent.glob('create_task_001.json')): - payload = json.loads(p.read_text('UTF-8')) - payload['hashlistId'] = int(hashlist_v2._id) - obj = Task_v2(**payload) - obj.save() - - # Cmd parameters setup - test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) - - # Try to download cracker 1 - cracker_id = 1 - config = Config() - crackers_path = config.get_value('crackers-path') - - binaryDownload = BinaryDownload(test_args) - binaryDownload.check_version(cracker_id) - - cracker_zip = Path(crackers_path, f'{cracker_id}.7z') - crackers_temp = Path(crackers_path, 'temp') - zip_binary = '7zr.exe' - mock_unlink.assert_called_with(cracker_zip) - - mock_system.assert_called_with(f'{zip_binary} x -o"{crackers_temp}" "{cracker_zip}"') - - executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.exe') - - # --version - cracker = HashcatCracker(1, binaryDownload) - mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) - - # --keyspace - chunk = Chunk() - task = Task() - task.load_task() - hashlist = Hashlist() - - hashlist.load_hashlist(task.get_task()['hashlistId']) - hashlist_id = task.get_task()['hashlistId'] - hashlists_path = config.get_value('hashlists-path') - - cracker.measure_keyspace(task, chunk) - - full_cmd = f'"hashcat.exe" --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ' - mock_check_output.assert_called_with( - full_cmd, - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stderr=-2 - ) - - # benchmark - hashlist_path = Path(hashlists_path, str(hashlist_id)) - hashlist_out_path = Path(hashlists_path, f'{hashlist_id}.out') - result = cracker.run_benchmark(task.get_task()) - assert result != 0 - - full_cmd = [ - '"hashcat.exe"', - '--machine-readable', - '--quiet', - '--progress-only', - '--restore-disable', - '--potfile-disable', - '--session=hashtopolis', - '-p', - '0x09', - f' "{hashlist_path}"', - '-a3', - '?l?l?l?l', - ' --hash-type=0 ', - '-o', - f'"{hashlist_out_path}"' - ] - - full_cmd = ' '.join(full_cmd) - - mock_check_output.assert_called_with( - full_cmd, - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stderr=-2 - ) - - # Sending benchmark to server - query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) - query['taskId'] = task.get_task()['taskId'] - query['result'] = result - query['type'] = task.get_task()['benchType'] - req = JsonRequest(query) - req.execute() - - # cracking - chunk.get_chunk(task.get_task()['taskId']) - cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) - zaps_path = config.get_value('zaps-path') - zaps_dir = f"hashlist_{hashlist_id}" - skip = str(chunk.chunk_data()['skip']) - limit = str(chunk.chunk_data()['length']) - - full_cmd = [ - '"hashcat.exe"', - '--machine-readable', - '--quiet', - '--status', - '--restore-disable', - '--session=hashtopolis', - '--status-timer 5', - '--outfile-check-timer=5', - f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', - f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', - '--outfile-format=1,2,3,4', - f'-p 0x09', - f'-s {skip} -l {limit}', - '--potfile-disable', - '--remove', - '--remove-timer=5 ', - f'"{Path(hashlists_path, str(hashlist_id))}"', - '-a3 ?l?l?l?l ', - ' --hash-type=0 ', - ] - - full_cmd = ' '.join(full_cmd) - - mock_Popen.assert_called_with( - full_cmd, - shell=True, - stdout=-1, - stderr=-1, - cwd=Path(crackers_path, str(cracker_id)), - ) - - # Cleanup - obj.delete() - hashlist_v2.delete() - - @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) - @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) - def test_runtime_benchmark(self, mock_check_output, moch_popen): - if sys.platform != 'win32': - return - - # Setup session object - session = Session(requests.Session()).s - session.headers.update({'User-Agent': Initialize.get_version()}) - - # Create hashlist - p = Path(__file__).parent.joinpath('create_hashlist_001.json') - payload = json.loads(p.read_text('UTF-8')) - hashlist_v2 = Hashlist_v2(**payload) - hashlist_v2.save() - - # Create Task - for p in sorted(Path(__file__).parent.glob('create_task_002.json')): - payload = json.loads(p.read_text('UTF-8')) - payload['hashlistId'] = int(hashlist_v2._id) - obj = Task_v2(**payload) - obj.save() - - # Cmd parameters setup - test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) - - # Try to download cracker 1 - cracker_id = 1 - config = Config() - crackers_path = config.get_value('crackers-path') - - binaryDownload = BinaryDownload(test_args) - binaryDownload.check_version(cracker_id) - - # --version - cracker = HashcatCracker(1, binaryDownload) - - # --keyspace - chunk = Chunk() - task = Task() - task.load_task() - hashlist = Hashlist() - - hashlist.load_hashlist(task.get_task()['hashlistId']) - hashlist_id = task.get_task()['hashlistId'] - hashlists_path = config.get_value('hashlists-path') - - cracker.measure_keyspace(task, chunk) - - full_cmd = f'"hashcat.exe" --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ' - mock_check_output.assert_called_with( - full_cmd, - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stderr=-2 - ) - - # benchmark - hashlist_path = Path(hashlists_path, str(hashlist_id)) - hashlist_out_path = Path(hashlists_path, f'{hashlist_id}.out') - result = cracker.run_benchmark(task.get_task()) - assert result != 0 - - full_cmd = [ - '"hashcat.exe"', - '--machine-readable', - '--quiet', - '--runtime=30', - '--restore-disable', - '--potfile-disable', - '--session=hashtopolis', - '-p', - '0x09', - f' "{hashlist_path}"', - '-a3 ?l?l?l?l', - ' --hash-type=0 ', - '-o', - f'"{hashlist_out_path}"' - ] - - full_cmd = ' '.join(full_cmd) - - moch_popen.assert_called_with( - full_cmd, - shell=True, - cwd=Path(crackers_path, str(cracker_id)), - stdout=-1, - stderr=-1 - ) - - task_id = task.get_task()['taskId'] - - # Sending benchmark to server - query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) - query['taskId'] = task_id - query['result'] = result - query['type'] = task.get_task()['benchType'] - req = JsonRequest(query) - req.execute() - - assert chunk.get_chunk(task_id) == 1 - - # Cleanup - obj.delete() - hashlist_v2.delete() - -if __name__ == '__main__': - unittest.main() diff --git a/tests/test_hashcat_files.py b/tests/test_hashcat_files.py new file mode 100644 index 0000000..d40b3dd --- /dev/null +++ b/tests/test_hashcat_files.py @@ -0,0 +1,229 @@ +import pytest +from unittest import mock +import unittest +from unittest.mock import MagicMock +import os +import subprocess +import shutil +import requests +import json +from pathlib import Path +from argparse import Namespace +import sys +import datetime +from io import BytesIO + +from htpclient.hashcat_cracker import HashcatCracker +from htpclient.binarydownload import BinaryDownload +from htpclient.session import Session +from htpclient.config import Config +from htpclient.initialize import Initialize +from htpclient.chunk import Chunk +from htpclient.hashlist import Hashlist +from htpclient.task import Task +from htpclient.dicts import copy_and_set_token +from htpclient.dicts import dict_sendBenchmark +from htpclient.jsonRequest import JsonRequest +from htpclient.files import Files + +from tests.hashtopolis import Hashlist as Hashlist_v2 +from tests.hashtopolis import Task as Task_v2 +from tests.hashtopolis import FileImport as FileImport_v2 +from tests.hashtopolis import File as File_v2 + + +class HashcatFiles(unittest.TestCase): + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_files_linux(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'linux': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Set config and variables + cracker_id = 1 + config = Config() + + crackers_path = config.get_value('crackers-path') + files_path = config.get_value('files-path') + + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Upload wordlist + stamp = datetime.datetime.now().isoformat() + filename = f'wordlist-{stamp}.txt' + file_import = FileImport_v2() + text = '12345678\n123456\nprincess\n'.encode('utf-8') + fs = BytesIO(text) + file_import.do_upload(filename, fs) + + # Create wordlist + p = Path(__file__).parent.joinpath('create_file_001.json') + payload = json.loads(p.read_text('UTF-8')) + payload['sourceData'] = filename + payload['filename'] = filename + payload['fileType'] = 0 + file_obj = File_v2(**payload) + file_obj.save() + + wordlist_id = file_obj.id + wordlist_name = file_obj.filename + + # Upload Rule file + stamp = datetime.datetime.now().isoformat() + filename = f'rule-{stamp}.txt' + file_import = FileImport_v2() + text = ':\n$1\n$2\n'.encode('utf-8') + fs = BytesIO(text) + file_import.do_upload(filename, fs) + + # Create rule file + p = Path(__file__).parent.joinpath('create_file_001.json') + payload = json.loads(p.read_text('UTF-8')) + payload['sourceData'] = filename + payload['filename'] = filename + payload['fileType'] = 1 + file_obj2 = File_v2(**payload) + file_obj2.save() + + rule_id = file_obj2.id + rule_name = file_obj2.filename + + # Create task + p = Path(__file__).parent.joinpath('create_task_004.json') + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + payload['attackCmd'] = f'#HL# -a0 {wordlist_name} -r {rule_name}' + payload['files'] = [wordlist_id, rule_id] + task_obj = Task_v2(**payload) + task_obj.save() + + # Delete files locally if they are already downloaded in a prev run + wordlist_path = Path(files_path, wordlist_name) + rule_path = Path(files_path, rule_name) + if os.path.isfile(wordlist_path): + os.remove(wordlist_path) + if os.path.isfile(rule_path): + os.remove(rule_path) + + # Try to download cracker 1 + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + # --version + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + files = Files() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + # Download required files + assert files.check_files(task.get_task()['files'], task.get_task()['taskId']) + + # Test if the files are really downloaded + assert os.path.isfile(wordlist_path) == True + assert os.path.isfile(rule_path) == True + + cracker.measure_keyspace(task, chunk) + mock_check_output.assert_called_with( + f"'./hashcat.bin' --keyspace --quiet -a0 '{wordlist_path}' -r '{rule_path}' --hash-type=0 ", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + mock_check_output.assert_called_with( + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 '{wordlist_path}' -r '{rule_path}' --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + "'./hashcat.bin'", + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--potfile-disable', + '--remove', + '--remove-timer=5 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + f"-a0 '{wordlist_path}' -r '{rule_path}' ", + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + preexec_fn=mock.ANY + ) + + # Cleanup + task_obj.delete() + hashlist_v2.delete() + file_obj.delete() + file_obj2.delete() + if os.path.isfile(wordlist_path): + os.remove(wordlist_path) + if os.path.isfile(rule_path): + os.remove(rule_path) + + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/test_hashcat_preprocessor.py b/tests/test_hashcat_preprocessor.py new file mode 100644 index 0000000..776ae8d --- /dev/null +++ b/tests/test_hashcat_preprocessor.py @@ -0,0 +1,171 @@ +import pytest +from unittest import mock +import unittest +from unittest.mock import MagicMock +import os +import subprocess +import shutil +import requests +import json +from pathlib import Path +from argparse import Namespace +import sys +import datetime +from io import BytesIO + +from htpclient.hashcat_cracker import HashcatCracker +from htpclient.binarydownload import BinaryDownload +from htpclient.session import Session +from htpclient.config import Config +from htpclient.initialize import Initialize +from htpclient.chunk import Chunk +from htpclient.hashlist import Hashlist +from htpclient.task import Task +from htpclient.dicts import copy_and_set_token +from htpclient.dicts import dict_sendBenchmark +from htpclient.jsonRequest import JsonRequest +from htpclient.files import Files + +from tests.hashtopolis import Hashlist as Hashlist_v2 +from tests.hashtopolis import Task as Task_v2 +from tests.hashtopolis import FileImport as FileImport_v2 +from tests.hashtopolis import File as File_v2 + + +class HashcatPreprocessor(unittest.TestCase): + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_preprocessor_linux(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'linux': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create Task + p = Path(__file__).parent.joinpath('create_task_003.json') + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) + obj.save() + preprocessor_id = payload.get('preprocessorId') + preprocessor_path = Path('preprocessors', str(preprocessor_id)) + if os.path.exists(preprocessor_path): + shutil.rmtree(preprocessor_path) + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + config = Config() + crackers_path = config.get_value('crackers-path') + + # executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + + binaryDownload = BinaryDownload(test_args) + + task = Task() + task.load_task() + + binaryDownload.check_preprocessor(task) + assert os.path.exists(preprocessor_path) + + binaryDownload.check_version(cracker_id) + cracker = HashcatCracker(1, binaryDownload) + + # --keyspace + chunk = Chunk() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + preprocessors_path = config.get_value('preprocessors-path') + assert cracker.measure_keyspace(task, chunk) == True + mock_check_output.assert_called_with( + '"./pp64.bin" --keyspace --pw-min=1 --pw-max=2 ../../crackers/1/example.dict ', + shell=True, + cwd=Path(preprocessors_path, str(preprocessor_id)), + ) + + # --benchmark + result = cracker.run_benchmark(task.get_task()) + assert int(result.split(':')[0]) > 0 + mock_check_output.assert_called_with( + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" --hash-type=0 example.dict -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + '"/app/src/preprocessors/1/pp64.bin"', + f'--skip {skip}', + f'--limit {limit}', + ' --pw-min=1 --pw-max=2', + '../../crackers/1/example.dict' + ' |', + "'./hashcat.bin'", + '--machine-readable', + '--quiet', + '--status', + '--remove', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + '--remove-timer=5', + f'"{Path(hashlists_path, str(hashlist_id))}"', + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + preexec_fn=mock.ANY + ) + + # Cleanup + obj.delete() + hashlist_v2.delete() + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_hashcat_runtime.py b/tests/test_hashcat_runtime.py new file mode 100644 index 0000000..3a1a164 --- /dev/null +++ b/tests/test_hashcat_runtime.py @@ -0,0 +1,144 @@ +import pytest +from unittest import mock +import unittest +from unittest.mock import MagicMock +import os +import subprocess +import shutil +import requests +import json +from pathlib import Path +from argparse import Namespace +import sys +import datetime +from io import BytesIO + +from htpclient.hashcat_cracker import HashcatCracker +from htpclient.binarydownload import BinaryDownload +from htpclient.session import Session +from htpclient.config import Config +from htpclient.initialize import Initialize +from htpclient.chunk import Chunk +from htpclient.hashlist import Hashlist +from htpclient.task import Task +from htpclient.dicts import copy_and_set_token +from htpclient.dicts import dict_sendBenchmark +from htpclient.jsonRequest import JsonRequest +from htpclient.files import Files + +from tests.hashtopolis import Hashlist as Hashlist_v2 +from tests.hashtopolis import Task as Task_v2 +from tests.hashtopolis import FileImport as FileImport_v2 +from tests.hashtopolis import File as File_v2 + + + +class HashcatRuntime(unittest.TestCase): + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + def test_runtime_windows(self, mock_check_output, moch_popen): + if sys.platform != 'win32': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create Task + for p in sorted(Path(__file__).parent.glob('create_task_002.json')): + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) + obj.save() + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + config = Config() + crackers_path = config.get_value('crackers-path') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + # --version + cracker = HashcatCracker(1, binaryDownload) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + cracker.measure_keyspace(task, chunk) + + full_cmd = f'"hashcat.exe" --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ' + mock_check_output.assert_called_with( + full_cmd, + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + hashlist_path = Path(hashlists_path, str(hashlist_id)) + hashlist_out_path = Path(hashlists_path, f'{hashlist_id}.out') + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + + full_cmd = [ + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--runtime=30', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '-p', + '0x09', + f' "{hashlist_path}"', + '-a3 ?l?l?l?l', + ' --hash-type=0 ', + '-o', + f'"{hashlist_out_path}"' + ] + + full_cmd = ' '.join(full_cmd) + + moch_popen.assert_called_with( + full_cmd, + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stdout=-1, + stderr=-1 + ) + + task_id = task.get_task()['taskId'] + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task_id + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + assert chunk.get_chunk(task_id) == 1 + + # Cleanup + obj.delete() + hashlist_v2.delete() + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_hashcat_simple.py b/tests/test_hashcat_simple.py new file mode 100644 index 0000000..b2e3ba3 --- /dev/null +++ b/tests/test_hashcat_simple.py @@ -0,0 +1,333 @@ +import pytest +from unittest import mock +import unittest +from unittest.mock import MagicMock +import os +import subprocess +import shutil +import requests +import json +from pathlib import Path +from argparse import Namespace +import sys +import datetime +from io import BytesIO + +from htpclient.hashcat_cracker import HashcatCracker +from htpclient.binarydownload import BinaryDownload +from htpclient.session import Session +from htpclient.config import Config +from htpclient.initialize import Initialize +from htpclient.chunk import Chunk +from htpclient.hashlist import Hashlist +from htpclient.task import Task +from htpclient.dicts import copy_and_set_token +from htpclient.dicts import dict_sendBenchmark +from htpclient.jsonRequest import JsonRequest +from htpclient.files import Files + +from tests.hashtopolis import Hashlist as Hashlist_v2 +from tests.hashtopolis import Task as Task_v2 +from tests.hashtopolis import FileImport as FileImport_v2 +from tests.hashtopolis import File as File_v2 + +class HashcatSimple(unittest.TestCase): + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_simple_linux(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'linux': + return + # Clean up cracker folder + if os.path.exists('crackers/1'): + shutil.rmtree('crackers/1') + + #TODO: Delete tasks / hashlist to ensure clean + #TODO: Verify setup agent + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create Task + for p in sorted(Path(__file__).parent.glob('create_task_001.json')): + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) + obj.save() + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + config = Config() + crackers_path = config.get_value('crackers-path') + + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + cracker_zip = Path(crackers_path, f'{cracker_id}.7z') + crackers_temp = Path(crackers_path, 'temp') + zip_binary = './7zr' + mock_unlink.assert_called_with(cracker_zip) + + mock_system.assert_called_with(f"{zip_binary} x -o'{crackers_temp}' '{cracker_zip}'") + + # --version + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + cracker.measure_keyspace(task, chunk) + mock_check_output.assert_called_with( + "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + mock_check_output.assert_called_with( + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + "'./hashcat.bin'", + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--potfile-disable', + '--remove', + '--remove-timer=5 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + '-a3 ?l?l?l?l ', + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + preexec_fn=mock.ANY + ) + + # Cleanup + obj.delete() + hashlist_v2.delete() + + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_simple_windows(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'win32': + return + + # Clean up cracker folder + if os.path.exists('crackers/1'): + shutil.rmtree('crackers/1') + + #TODO: Delete tasks / hashlist to ensure clean + #TODO: Verify setup agent + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create Task + for p in sorted(Path(__file__).parent.glob('create_task_001.json')): + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) + obj.save() + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + config = Config() + crackers_path = config.get_value('crackers-path') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + cracker_zip = Path(crackers_path, f'{cracker_id}.7z') + crackers_temp = Path(crackers_path, 'temp') + zip_binary = '7zr.exe' + mock_unlink.assert_called_with(cracker_zip) + + mock_system.assert_called_with(f'{zip_binary} x -o"{crackers_temp}" "{cracker_zip}"') + + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.exe') + + # --version + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + cracker.measure_keyspace(task, chunk) + + full_cmd = f'"hashcat.exe" --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 ' + mock_check_output.assert_called_with( + full_cmd, + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + hashlist_path = Path(hashlists_path, str(hashlist_id)) + hashlist_out_path = Path(hashlists_path, f'{hashlist_id}.out') + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + + full_cmd = [ + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--progress-only', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '-p', + '0x09', + f' "{hashlist_path}"', + '-a3', + '?l?l?l?l', + ' --hash-type=0 ', + '-o', + f'"{hashlist_out_path}"' + ] + + full_cmd = ' '.join(full_cmd) + + mock_check_output.assert_called_with( + full_cmd, + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--potfile-disable', + '--remove', + '--remove-timer=5 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + '-a3 ?l?l?l?l ', + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + ) + + # Cleanup + obj.delete() + hashlist_v2.delete() + +if __name__ == '__main__': + unittest.main() \ No newline at end of file From c8b191e5d9adc5f7c3bd0fdfea99b6a6d035e681 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 11:01:01 +0100 Subject: [PATCH 47/77] Add updated version of hashtopolis apiv2 module --- tests/hashtopolis.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/hashtopolis.py b/tests/hashtopolis.py index 1050167..7805c85 100644 --- a/tests/hashtopolis.py +++ b/tests/hashtopolis.py @@ -38,7 +38,7 @@ def print_to_log(*args): cls_registry = {} -class Config(object): +class HashtopolisConfig(object): def __init__(self): # Request access TOKEN, used throughout the test load_order = confidence.DEFAULT_LOAD_ORDER + (str(Path(__file__).parent.joinpath('{name}.{extension}')),) @@ -179,7 +179,7 @@ class ManagerBase(type): @classmethod def get_conn(cls): if cls.config is None: - cls.config = Config() + cls.config = HashtopolisConfig() if cls._model_uri not in cls.conn: cls.conn[cls._model_uri] = HashtopolisConnector(cls._model_uri, cls.config) @@ -369,6 +369,10 @@ def __repr__(self): class CrackerType(Model, uri="/ui/crackertypes"): def __repr__(self): return self._self + +class Config(Model, uri="/ui/configs"): + def __repr__(self): + return self._self class File(Model, uri="/ui/files"): def __repr__(self): @@ -377,7 +381,7 @@ def __repr__(self): class FileImport(HashtopolisConnector): def __init__(self): - super().__init__("/ui/files/import", Config()) + super().__init__("/ui/files/import", HashtopolisConfig()) def __repr__(self): return self._self From f1dcff39ee40731c282b1e79426dc4ecff2ffa89 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 11:31:53 +0100 Subject: [PATCH 48/77] Add new version o hashtopolis apiv2 module --- tests/hashtopolis.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/hashtopolis.py b/tests/hashtopolis.py index 7805c85..ef2a061 100644 --- a/tests/hashtopolis.py +++ b/tests/hashtopolis.py @@ -341,6 +341,10 @@ def id(self): return self._id +class Agent(Model, uri="/ui/agents"): + def __repr__(self): + return self._self + class Task(Model, uri="/ui/tasks"): def __repr__(self): return self._self From fc5fa57b2ce8398a490717c01d44cfc032151e5e Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 11:42:19 +0100 Subject: [PATCH 49/77] Add test for brain attacks --- htpclient/hashcat_cracker.py | 2 +- tests/create_hashlist_002.json | 17 +++ tests/create_task_005.json | 23 ++++ tests/test_hashcat_brain.py | 194 +++++++++++++++++++++++++++++++++ 4 files changed, 235 insertions(+), 1 deletion(-) create mode 100644 tests/create_hashlist_002.json create mode 100644 tests/create_task_005.json create mode 100644 tests/test_hashcat_brain.py diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index c2dd00e..03f378e 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -45,7 +45,7 @@ def __init__(self, cracker_id, binary_download): cmd = [str(self.executable_path), "--version"] try: - logging.debug(f"CALL: {''.join(cmd)}") + logging.debug(f"CALL: {' '.join(cmd)}") output = subprocess.check_output(cmd, cwd=self.cracker_path) except subprocess.CalledProcessError as e: logging.error("Error during version detection: " + str(e)) diff --git a/tests/create_hashlist_002.json b/tests/create_hashlist_002.json new file mode 100644 index 0000000..245eff1 --- /dev/null +++ b/tests/create_hashlist_002.json @@ -0,0 +1,17 @@ +{ + "name": "Hashlist-md5sum-brain", + "hashTypeId": 1, + "format": 0, + "separator": ";", + "isSalted": false, + "isHexSalt": false, + "accessGroupId": 1, + "useBrain": true, + "brainFeatures": 3, + "notes": "gj", + "sourceType": "paste", + "sourceData": "Y2MwM2U3NDdhNmFmYmJjYmY4YmU3NjY4YWNmZWJlZTUK", + "hashCount": 0, + "isArchived": false, + "isSecret": false +} diff --git a/tests/create_task_005.json b/tests/create_task_005.json new file mode 100644 index 0000000..de4a925 --- /dev/null +++ b/tests/create_task_005.json @@ -0,0 +1,23 @@ +{ + "attackCmd": "#HL# -a3 ?l?l?l?l", + "chunkSize": 1000, + "chunkTime": 600, + "color": "7C6EFF", + "crackerBinaryId": 1, + "crackerBinaryTypeId": 1, + "forcePipe": true, + "files": [], + "isArchived": false, + "isCpuTask": true, + "isSmall": false, + "maxAgents": 112, + "notes": "example-note", + "preprocessorCommand": "", + "priority": 10, + "skipKeyspace": 5000, + "staticChunks": 2, + "statusTimer": 5, + "taskName": "Example - Rijmen and Daemen", + "useNewBench": true, + "preprocessorId": 0 +} diff --git a/tests/test_hashcat_brain.py b/tests/test_hashcat_brain.py new file mode 100644 index 0000000..69af4ee --- /dev/null +++ b/tests/test_hashcat_brain.py @@ -0,0 +1,194 @@ +import pytest +from unittest import mock +import unittest +from unittest.mock import MagicMock +import os +import subprocess +import shutil +import requests +import json +from pathlib import Path +from argparse import Namespace +import sys +import datetime +from io import BytesIO + +from htpclient.hashcat_cracker import HashcatCracker +from htpclient.binarydownload import BinaryDownload +from htpclient.session import Session +from htpclient.config import Config +from htpclient.initialize import Initialize +from htpclient.chunk import Chunk +from htpclient.hashlist import Hashlist +from htpclient.task import Task +from htpclient.dicts import copy_and_set_token +from htpclient.dicts import dict_sendBenchmark +from htpclient.jsonRequest import JsonRequest +from htpclient.files import Files + +from tests.hashtopolis import Hashlist as Hashlist_v2 +from tests.hashtopolis import Task as Task_v2 +from tests.hashtopolis import FileImport as FileImport_v2 +from tests.hashtopolis import File as File_v2 +from tests.hashtopolis import Config as Config_v2 +from tests.hashtopolis import Agent as Agent_v2 + + + +class HashcatBrain(unittest.TestCase): + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_brain_linux(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'linux': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Set config and variables + cracker_id = 1 + config = Config() + + crackers_path = config.get_value('crackers-path') + + # Setting Brain configuration + set_config = { + 'hashcatBrainEnable': '1', + 'hashcatBrainHost': '127.0.0.1', + 'hashcatBrainPort': '8080', + 'hashcatBrainPass': 'password', + } + + for k,v in set_config.items(): + config_item = Config_v2.objects.get(item=k) + config_item.value = v + config_item.save() + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_002.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create task + p = Path(__file__).parent.joinpath('create_task_005.json') + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + task_obj = Task_v2(**payload) + task_obj.save() + + # Try to download cracker 1 + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + cracker.measure_keyspace(task, chunk) + mock_check_output.assert_called_with( + "'./hashcat.bin' --keyspace --quiet -a3 ?l?l?l?l --hash-type=0 -S", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + mock_check_output.assert_called_with( + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -S -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + "'./hashcat.bin'", + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--brain-client', + '--brain-host', '127.0.0.1', + '--brain-port', '8080', + '--brain-password', 'password', + '--brain-client-features', '3 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + '-a3 ?l?l?l?l ', + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + preexec_fn=mock.ANY + ) + + # Cleanup + task_obj.delete() + hashlist_v2.delete() + + # Revert config + set_config = { + 'hashcatBrainEnable': '0', + 'hashcatBrainHost': '', + 'hashcatBrainPort': '', + 'hashcatBrainPass': '', + } + + for k,v in set_config.items(): + config_item = Config_v2.objects.get(item=k) + config_item.value = v + config_item.save() + + # Re-enable agents, because the the hashcat command will fail + agent = Agent_v2.objects.get(token=config.get_value('token')) + agent.isActive = True + agent.save() From 08f9401458f7e10d530b98500c0d74414fd6ccaf Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 13:43:42 +0100 Subject: [PATCH 50/77] Fix linux 7z wordlist not being extracted --- htpclient/files.py | 2 +- requirements-tests.txt | 3 +- tests/test_hashcat_files_7z.py | 211 +++++++++++++++++++++++++++++++++ 3 files changed, 214 insertions(+), 2 deletions(-) create mode 100644 tests/test_hashcat_files_7z.py diff --git a/htpclient/files.py b/htpclient/files.py index 1570a2f..4ac12f9 100644 --- a/htpclient/files.py +++ b/htpclient/files.py @@ -111,6 +111,6 @@ def check_files(self, files, task_id): cmd = f'7zr{Initialize.get_os_extension()} x -aoa -o"{files_path}" -y "{file_localpath}"' else: # Linux - cmd = f'7zr{Initialize.get_os_extension()} x -aoa -o"{files_path}" -y "{file_localpath}"' + cmd = f"./7zr{Initialize.get_os_extension()} x -aoa -o'{files_path}' -y '{file_localpath}'" os.system(cmd) return True diff --git a/requirements-tests.txt b/requirements-tests.txt index 9047fe6..6a99ace 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -1,3 +1,4 @@ pytest confidence -tuspy \ No newline at end of file +tuspy +py7zr \ No newline at end of file diff --git a/tests/test_hashcat_files_7z.py b/tests/test_hashcat_files_7z.py new file mode 100644 index 0000000..3a5f504 --- /dev/null +++ b/tests/test_hashcat_files_7z.py @@ -0,0 +1,211 @@ +import pytest +from unittest import mock +import unittest +from unittest.mock import MagicMock +import os +import subprocess +import shutil +import requests +import json +from pathlib import Path +from argparse import Namespace +import sys +import datetime +from io import BytesIO + +from htpclient.hashcat_cracker import HashcatCracker +from htpclient.binarydownload import BinaryDownload +from htpclient.session import Session +from htpclient.config import Config +from htpclient.initialize import Initialize +from htpclient.chunk import Chunk +from htpclient.hashlist import Hashlist +from htpclient.task import Task +from htpclient.dicts import copy_and_set_token +from htpclient.dicts import dict_sendBenchmark +from htpclient.jsonRequest import JsonRequest +from htpclient.files import Files + +import py7zr + +from tests.hashtopolis import Hashlist as Hashlist_v2 +from tests.hashtopolis import Task as Task_v2 +from tests.hashtopolis import FileImport as FileImport_v2 +from tests.hashtopolis import File as File_v2 + +class HashcatFiles7z(unittest.TestCase): + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_files_7z_linux(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'linux': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Set config and variables + cracker_id = 1 + config = Config() + + crackers_path = config.get_value('crackers-path') + files_path = config.get_value('files-path') + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create 7z file + stamp = datetime.datetime.now().isoformat() + wordlist = f'wordlist-{stamp}.txt' + sevenzip = f'wordlist-{stamp}.7z' + + with open(wordlist, 'w') as file_obj: + file_obj.write('12345678\n123456\nprincess\n') + + with py7zr.SevenZipFile(sevenzip, 'w') as z: + z.writeall(f'./{wordlist}') + + # Upload wordlist + file_import = FileImport_v2() + with open(sevenzip, 'rb') as fs: + file_import.do_upload(sevenzip, fs) + + # Create wordlist + p = Path(__file__).parent.joinpath('create_file_001.json') + payload = json.loads(p.read_text('UTF-8')) + payload['sourceData'] = sevenzip + payload['filename'] = sevenzip + payload['fileType'] = 0 + file_obj = File_v2(**payload) + file_obj.save() + + wordlist_id = file_obj.id + + # Create task + p = Path(__file__).parent.joinpath('create_task_004.json') + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + payload['attackCmd'] = f'#HL# -a0 {wordlist}' + payload['files'] = [wordlist_id] + task_obj = Task_v2(**payload) + task_obj.save() + + # Cleanup files + os.remove(wordlist) + os.remove(sevenzip) + wordlist_path = Path(files_path, wordlist) + if os.path.isfile(wordlist_path): + os.remove(wordlist_path) + + # Try to download cracker 1 + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + # --version + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + files = Files() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + # Download required files + assert files.check_files(task.get_task()['files'], task.get_task()['taskId']) + + file_path = Path(files_path, sevenzip) + + mock_system.assert_called_with(f"./7zr{Initialize.get_os_extension()} x -aoa -o'{files_path}' -y '{file_path}'") + + # Test if the files are really downloaded + assert os.path.isfile(wordlist_path) == True + + cracker.measure_keyspace(task, chunk) + mock_check_output.assert_called_with( + f"'./hashcat.bin' --keyspace --quiet -a0 '{wordlist_path}' --hash-type=0 ", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + mock_check_output.assert_called_with( + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 '{wordlist_path}' --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + "'./hashcat.bin'", + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--potfile-disable', + '--remove', + '--remove-timer=5 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + f"-a0 '{wordlist_path}' ", + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + preexec_fn=mock.ANY + ) + + # Cleanup + task_obj.delete() + hashlist_v2.delete() + file_obj.delete() + if os.path.isfile(wordlist_path): + os.remove(wordlist_path) From 984e322e7d98adcd9ac4f21efe83627e0afb7ba2 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 14:52:05 +0100 Subject: [PATCH 51/77] Fix wordlist and 7z on windows --- htpclient/helpers.py | 2 +- tests/test_hashcat_files_7z.py | 177 +++++++++++++++++++++++++++++ tests/test_hashcat_preprocessor.py | 155 +++++++++++++++++++++++++ 3 files changed, 333 insertions(+), 1 deletion(-) diff --git a/htpclient/helpers.py b/htpclient/helpers.py index a622d3f..eb4d1df 100644 --- a/htpclient/helpers.py +++ b/htpclient/helpers.py @@ -123,7 +123,7 @@ def update_files(command, prince=False): continue path = Path(config.get_value('files-path'), part) if os.path.exists(path): - ret.append(f"'{path}'") + ret.append(f'"{path}"') else: ret.append(str(part)) return " %s " % " ".join(ret) diff --git a/tests/test_hashcat_files_7z.py b/tests/test_hashcat_files_7z.py index 3a5f504..6c72451 100644 --- a/tests/test_hashcat_files_7z.py +++ b/tests/test_hashcat_files_7z.py @@ -11,6 +11,7 @@ from argparse import Namespace import sys import datetime +import time from io import BytesIO from htpclient.hashcat_cracker import HashcatCracker @@ -209,3 +210,179 @@ def test_files_7z_linux(self, mock_system, mock_unlink, mock_check_output, mock_ file_obj.delete() if os.path.isfile(wordlist_path): os.remove(wordlist_path) + + + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_files_7z_windows(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'win32': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Set config and variables + cracker_id = 1 + config = Config() + + crackers_path = config.get_value('crackers-path') + files_path = config.get_value('files-path') + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create 7z file + stamp = int(time.time()) + wordlist = f'wordlist-{stamp}.txt' + sevenzip = f'wordlist-{stamp}.7z' + + with open(wordlist, 'w') as file_obj: + file_obj.write('12345678\n123456\nprincess\n') + + with py7zr.SevenZipFile(sevenzip, 'w') as z: + z.writeall(f'./{wordlist}') + + # Upload wordlist + file_import = FileImport_v2() + with open(sevenzip, 'rb') as fs: + file_import.do_upload(sevenzip, fs) + + # Create wordlist + p = Path(__file__).parent.joinpath('create_file_001.json') + payload = json.loads(p.read_text('UTF-8')) + payload['sourceData'] = sevenzip + payload['filename'] = sevenzip + payload['fileType'] = 0 + file_obj = File_v2(**payload) + file_obj.save() + + wordlist_id = file_obj.id + + # Create task + p = Path(__file__).parent.joinpath('create_task_004.json') + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + payload['attackCmd'] = f'#HL# -a0 {wordlist}' + payload['files'] = [wordlist_id] + task_obj = Task_v2(**payload) + task_obj.save() + + # Cleanup files + os.remove(wordlist) + os.remove(sevenzip) + wordlist_path = Path(files_path, wordlist) + if os.path.isfile(wordlist_path): + os.remove(wordlist_path) + + # Try to download cracker 1 + executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.exe') + + binaryDownload = BinaryDownload(test_args) + binaryDownload.check_version(cracker_id) + + # --version + cracker = HashcatCracker(1, binaryDownload) + mock_check_output.assert_called_with([str(executeable_path), '--version'], cwd=Path(crackers_path, str(cracker_id))) + + # --keyspace + chunk = Chunk() + task = Task() + task.load_task() + hashlist = Hashlist() + files = Files() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + # Download required files + assert files.check_files(task.get_task()['files'], task.get_task()['taskId']) + + file_path = Path(files_path, sevenzip) + + mock_system.assert_called_with(f'7zr{Initialize.get_os_extension()} x -aoa -o"{files_path}" -y "{file_path}"') + + # Test if the files are really downloaded + assert os.path.isfile(wordlist_path) == True + + cracker.measure_keyspace(task, chunk) + mock_check_output.assert_called_with( + f'"hashcat.exe" --keyspace --quiet -a0 "{wordlist_path}" --hash-type=0 ', + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # benchmark + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + mock_check_output.assert_called_with( + f'"hashcat.exe" --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 "{wordlist_path}" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"', + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--status', + '--restore-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + f'-s {skip} -l {limit}', + '--potfile-disable', + '--remove', + '--remove-timer=5 ', + f'"{Path(hashlists_path, str(hashlist_id))}"', + f'-a0 "{wordlist_path}" ', + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + ) + + # Cleanup + task_obj.delete() + hashlist_v2.delete() + file_obj.delete() + if os.path.isfile(wordlist_path): + os.remove(wordlist_path) \ No newline at end of file diff --git a/tests/test_hashcat_preprocessor.py b/tests/test_hashcat_preprocessor.py index 776ae8d..e398391 100644 --- a/tests/test_hashcat_preprocessor.py +++ b/tests/test_hashcat_preprocessor.py @@ -166,6 +166,161 @@ def test_preprocessor_linux(self, mock_system, mock_unlink, mock_check_output, m obj.delete() hashlist_v2.delete() + @mock.patch('subprocess.Popen', side_effect=subprocess.Popen) + @mock.patch('subprocess.check_output', side_effect=subprocess.check_output) + @mock.patch('os.unlink', side_effect=os.unlink) + @mock.patch('os.system', side_effect=os.system) + def test_preprocessor_windows(self, mock_system, mock_unlink, mock_check_output, mock_Popen): + if sys.platform != 'win32': + return + + # Setup session object + session = Session(requests.Session()).s + session.headers.update({'User-Agent': Initialize.get_version()}) + + # Create hashlist + p = Path(__file__).parent.joinpath('create_hashlist_001.json') + payload = json.loads(p.read_text('UTF-8')) + hashlist_v2 = Hashlist_v2(**payload) + hashlist_v2.save() + + # Create Task + p = Path(__file__).parent.joinpath('create_task_003.json') + payload = json.loads(p.read_text('UTF-8')) + payload['hashlistId'] = int(hashlist_v2._id) + obj = Task_v2(**payload) + obj.save() + + config = Config() + preprocessor_id = payload.get('preprocessorId') + preprocessor_path = Path(config.get_value('preprocessors-path'), str(preprocessor_id)) + preprocessor_path_binary = preprocessor_path / 'pp64.exe' + if os.path.exists(preprocessor_path): + shutil.rmtree(preprocessor_path) + + # Cmd parameters setup + test_args = Namespace( cert=None, cpu_only=False, crackers_path=None, de_register=False, debug=True, disable_update=False, files_path=None, hashlists_path=None, number_only=False, preprocessors_path=None, url='http://hashtopolis/api/server.php', version=False, voucher='devvoucher', zaps_path=None) + + # Try to download cracker 1 + cracker_id = 1 + crackers_path = config.get_value('crackers-path') + + # executeable_path = Path(crackers_path, str(cracker_id), 'hashcat.bin') + + binaryDownload = BinaryDownload(test_args) + + task = Task() + task.load_task() + + binaryDownload.check_preprocessor(task) + assert os.path.exists(preprocessor_path) + + binaryDownload.check_version(cracker_id) + cracker = HashcatCracker(1, binaryDownload) + + # --keyspace + chunk = Chunk() + hashlist = Hashlist() + + hashlist.load_hashlist(task.get_task()['hashlistId']) + hashlist_id = task.get_task()['hashlistId'] + hashlists_path = config.get_value('hashlists-path') + + preprocessors_path = config.get_value('preprocessors-path') + assert cracker.measure_keyspace(task, chunk) == True + mock_check_output.assert_called_with( + '"pp64.exe" --keyspace --pw-min=1 --pw-max=2 ../../crackers/1/example.dict ', + shell=True, + cwd=Path(preprocessors_path, str(preprocessor_id)), + ) + + # benchmark + hashlist_path = Path(hashlists_path, str(hashlist_id)) + hashlist_out_path = Path(hashlists_path, f'{hashlist_id}.out') + result = cracker.run_benchmark(task.get_task()) + assert result != 0 + + full_cmd = [ + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--progress-only', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '-p', + '0x09', + f' "{hashlist_path}"', + ' --hash-type=0 ', + 'example.dict', + '-o', + f'"{hashlist_out_path}"' + ] + + full_cmd = ' '.join(full_cmd) + mock_Popen.assert_called_with( + full_cmd, + shell=True, + cwd=Path(crackers_path, str(cracker_id)), + stdout=-1, + stderr=-2 + ) + + # Sending benchmark to server + query = copy_and_set_token(dict_sendBenchmark, config.get_value('token')) + query['taskId'] = task.get_task()['taskId'] + query['result'] = result + query['type'] = task.get_task()['benchType'] + req = JsonRequest(query) + req.execute() + + # cracking + chunk.get_chunk(task.get_task()['taskId']) + cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor()) + zaps_path = config.get_value('zaps-path') + zaps_dir = f"hashlist_{hashlist_id}" + skip = str(chunk.chunk_data()['skip']) + limit = str(chunk.chunk_data()['length']) + + full_cmd = [ + f'"{preprocessor_path_binary}"', + f'--skip {skip}', + f'--limit {limit}', + ' --pw-min=1 --pw-max=2', + '../../crackers/1/example.dict' + ' |', + '"hashcat.exe"', + '--machine-readable', + '--quiet', + '--status', + '--remove', + '--restore-disable', + '--potfile-disable', + '--session=hashtopolis', + '--status-timer 5', + '--outfile-check-timer=5', + f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', + f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', + '--outfile-format=1,2,3,4', + f'-p 0x09', + '--remove-timer=5', + f'"{Path(hashlists_path, str(hashlist_id))}"', + ' --hash-type=0 ', + ] + + full_cmd = ' '.join(full_cmd) + + mock_Popen.assert_called_with( + full_cmd, + shell=True, + stdout=-1, + stderr=-1, + cwd=Path(crackers_path, str(cracker_id)), + ) + + # Cleanup + obj.delete() + hashlist_v2.delete() if __name__ == '__main__': unittest.main() From 7eb51655e8ea765d236cddc24b8acbc02722ec8f Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 15:05:48 +0100 Subject: [PATCH 52/77] Fix tests for linux --- tests/test_hashcat_files.py | 6 +++--- tests/test_hashcat_files_7z.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_hashcat_files.py b/tests/test_hashcat_files.py index d40b3dd..d17b096 100644 --- a/tests/test_hashcat_files.py +++ b/tests/test_hashcat_files.py @@ -149,7 +149,7 @@ def test_files_linux(self, mock_system, mock_unlink, mock_check_output, mock_Pop cracker.measure_keyspace(task, chunk) mock_check_output.assert_called_with( - f"'./hashcat.bin' --keyspace --quiet -a0 '{wordlist_path}' -r '{rule_path}' --hash-type=0 ", + f"'./hashcat.bin' --keyspace --quiet -a0 \"{wordlist_path}\" -r \"{rule_path}\" --hash-type=0 ", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -159,7 +159,7 @@ def test_files_linux(self, mock_system, mock_unlink, mock_check_output, mock_Pop result = cracker.run_benchmark(task.get_task()) assert result != 0 mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 '{wordlist_path}' -r '{rule_path}' --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 \"{wordlist_path}\" -r \"{rule_path}\" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -199,7 +199,7 @@ def test_files_linux(self, mock_system, mock_unlink, mock_check_output, mock_Pop '--remove', '--remove-timer=5 ', f'"{Path(hashlists_path, str(hashlist_id))}"', - f"-a0 '{wordlist_path}' -r '{rule_path}' ", + f'-a0 "{wordlist_path}" -r "{rule_path}" ', ' --hash-type=0 ', ] diff --git a/tests/test_hashcat_files_7z.py b/tests/test_hashcat_files_7z.py index 6c72451..100cf21 100644 --- a/tests/test_hashcat_files_7z.py +++ b/tests/test_hashcat_files_7z.py @@ -139,7 +139,7 @@ def test_files_7z_linux(self, mock_system, mock_unlink, mock_check_output, mock_ cracker.measure_keyspace(task, chunk) mock_check_output.assert_called_with( - f"'./hashcat.bin' --keyspace --quiet -a0 '{wordlist_path}' --hash-type=0 ", + f"'./hashcat.bin' --keyspace --quiet -a0 \"{wordlist_path}\" --hash-type=0 ", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -149,7 +149,7 @@ def test_files_7z_linux(self, mock_system, mock_unlink, mock_check_output, mock_ result = cracker.run_benchmark(task.get_task()) assert result != 0 mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 '{wordlist_path}' --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 \"{wordlist_path}\" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -189,7 +189,7 @@ def test_files_7z_linux(self, mock_system, mock_unlink, mock_check_output, mock_ '--remove', '--remove-timer=5 ', f'"{Path(hashlists_path, str(hashlist_id))}"', - f"-a0 '{wordlist_path}' ", + f'-a0 "{wordlist_path}" ', ' --hash-type=0 ', ] From 91f95f6a6fa3dbe599918fe9b14df4df5950cff6 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 27 Feb 2023 15:09:40 +0100 Subject: [PATCH 53/77] Update changelog --- changelog.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/changelog.md b/changelog.md index 687dc54..bda3501 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,11 @@ +## v0.7.0 -> v0.7.1 + +### Bugfixes + +* Agent working again on Windows, all paths have been converted to Pathlib.Path and using the correct quoting of arguments. +* 7z wordlist not extracting correctly on Windows. +* preprocessor not working correctly on both Windows and Linux. + ## v0.6.1 -> v0.7.0 ### Enhancements From bd4e4dcaf93353170497ac477bb8c75b0875bcad Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Thu, 2 Mar 2023 09:12:21 +0100 Subject: [PATCH 54/77] Fix changing 0x09 to real tab to support older <6.2.5 hashcat --- htpclient/hashcat_cracker.py | 8 ++++---- tests/test_hashcat_brain.py | 4 ++-- tests/test_hashcat_files.py | 4 ++-- tests/test_hashcat_files_7z.py | 8 ++++---- tests/test_hashcat_preprocessor.py | 8 ++++---- tests/test_hashcat_runtime.py | 2 +- tests/test_hashcat_simple.py | 8 ++++---- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index 03f378e..95cd863 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -100,7 +100,7 @@ def build_command(self, task, chunk): args.append(f'--outfile-check-dir="{zaps_file}"') args.append(f'-o "{output_file}"') args.append(f'--outfile-format={self.get_outfile_format()}') - args.append('-p 0x09') + args.append('-p "\t"') args.append(f"-s {chunk['skip']}") args.append(f"-l {chunk['length']}") @@ -206,7 +206,7 @@ def build_preprocessor_command(self, task, chunk, preprocessor): post_args.append(f'--outfile-check-dir="{zaps_file}"') post_args.append(f'-o "{output_file}"') post_args.append(f'--outfile-format={self.get_outfile_format()}') - post_args.append('-p 0x09') + post_args.append('-p "\t"') post_args.append(f"--remove-timer={task['statustimer']}") post_args.append(f'"{hashlist_file}"') @@ -561,7 +561,7 @@ def run_benchmark(self, task): args.append('--potfile-disable') args.append('--session=hashtopolis') args.append('-p') - args.append('0x09') + args.append('"\t"') @@ -631,7 +631,7 @@ def run_speed_benchmark(self, task): args.append('--potfile-disable') args.append('--session=hashtopolis') args.append('-p') - args.append('0x09') + args.append('"\t"') hashlist_path = Path(self.config.get_value('hashlists-path'), str(task['hashlistId'])) hashlist_out_path = Path(self.config.get_value('hashlists-path'), f"{str(task['hashlistId'])}.out") diff --git a/tests/test_hashcat_brain.py b/tests/test_hashcat_brain.py index 69af4ee..ee2a741 100644 --- a/tests/test_hashcat_brain.py +++ b/tests/test_hashcat_brain.py @@ -114,7 +114,7 @@ def test_brain_linux(self, mock_system, mock_unlink, mock_check_output, mock_Pop result = cracker.run_benchmark(task.get_task()) assert result != 0 mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -S -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -S -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -148,7 +148,7 @@ def test_brain_linux(self, mock_system, mock_unlink, mock_check_output, mock_Pop f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', '--outfile-format=1,2,3,4', - f'-p 0x09', + f'-p "\t"', f'-s {skip} -l {limit}', '--brain-client', '--brain-host', '127.0.0.1', diff --git a/tests/test_hashcat_files.py b/tests/test_hashcat_files.py index d17b096..31bb5f8 100644 --- a/tests/test_hashcat_files.py +++ b/tests/test_hashcat_files.py @@ -159,7 +159,7 @@ def test_files_linux(self, mock_system, mock_unlink, mock_check_output, mock_Pop result = cracker.run_benchmark(task.get_task()) assert result != 0 mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 \"{wordlist_path}\" -r \"{rule_path}\" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" \"{Path(hashlists_path, str(hashlist_id))}\" -a0 \"{wordlist_path}\" -r \"{rule_path}\" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -193,7 +193,7 @@ def test_files_linux(self, mock_system, mock_unlink, mock_check_output, mock_Pop f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', '--outfile-format=1,2,3,4', - f'-p 0x09', + f'-p "\t"', f'-s {skip} -l {limit}', '--potfile-disable', '--remove', diff --git a/tests/test_hashcat_files_7z.py b/tests/test_hashcat_files_7z.py index 100cf21..0e69a52 100644 --- a/tests/test_hashcat_files_7z.py +++ b/tests/test_hashcat_files_7z.py @@ -149,7 +149,7 @@ def test_files_7z_linux(self, mock_system, mock_unlink, mock_check_output, mock_ result = cracker.run_benchmark(task.get_task()) assert result != 0 mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 \"{wordlist_path}\" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" \"{Path(hashlists_path, str(hashlist_id))}\" -a0 \"{wordlist_path}\" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -183,7 +183,7 @@ def test_files_7z_linux(self, mock_system, mock_unlink, mock_check_output, mock_ f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', '--outfile-format=1,2,3,4', - f'-p 0x09', + f'-p "\t"', f'-s {skip} -l {limit}', '--potfile-disable', '--remove', @@ -326,7 +326,7 @@ def test_files_7z_windows(self, mock_system, mock_unlink, mock_check_output, moc result = cracker.run_benchmark(task.get_task()) assert result != 0 mock_check_output.assert_called_with( - f'"hashcat.exe" --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a0 "{wordlist_path}" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"', + f'"hashcat.exe" --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" \"{Path(hashlists_path, str(hashlist_id))}\" -a0 "{wordlist_path}" --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"', shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -360,7 +360,7 @@ def test_files_7z_windows(self, mock_system, mock_unlink, mock_check_output, moc f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', '--outfile-format=1,2,3,4', - f'-p 0x09', + f'-p "\t"', f'-s {skip} -l {limit}', '--potfile-disable', '--remove', diff --git a/tests/test_hashcat_preprocessor.py b/tests/test_hashcat_preprocessor.py index e398391..e09e06b 100644 --- a/tests/test_hashcat_preprocessor.py +++ b/tests/test_hashcat_preprocessor.py @@ -103,7 +103,7 @@ def test_preprocessor_linux(self, mock_system, mock_unlink, mock_check_output, m result = cracker.run_benchmark(task.get_task()) assert int(result.split(':')[0]) > 0 mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" --hash-type=0 example.dict -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" \"{Path(hashlists_path, str(hashlist_id))}\" --hash-type=0 example.dict -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -145,7 +145,7 @@ def test_preprocessor_linux(self, mock_system, mock_unlink, mock_check_output, m f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', '--outfile-format=1,2,3,4', - f'-p 0x09', + f'-p "\t"', '--remove-timer=5', f'"{Path(hashlists_path, str(hashlist_id))}"', ' --hash-type=0 ', @@ -249,7 +249,7 @@ def test_preprocessor_windows(self, mock_system, mock_unlink, mock_check_output, '--potfile-disable', '--session=hashtopolis', '-p', - '0x09', + '"\t"', f' "{hashlist_path}"', ' --hash-type=0 ', 'example.dict', @@ -302,7 +302,7 @@ def test_preprocessor_windows(self, mock_system, mock_unlink, mock_check_output, f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', '--outfile-format=1,2,3,4', - f'-p 0x09', + f'-p "\t"', '--remove-timer=5', f'"{Path(hashlists_path, str(hashlist_id))}"', ' --hash-type=0 ', diff --git a/tests/test_hashcat_runtime.py b/tests/test_hashcat_runtime.py index 3a1a164..52a2c6e 100644 --- a/tests/test_hashcat_runtime.py +++ b/tests/test_hashcat_runtime.py @@ -106,7 +106,7 @@ def test_runtime_windows(self, mock_check_output, moch_popen): '--potfile-disable', '--session=hashtopolis', '-p', - '0x09', + '"\t"', f' "{hashlist_path}"', '-a3 ?l?l?l?l', ' --hash-type=0 ', diff --git a/tests/test_hashcat_simple.py b/tests/test_hashcat_simple.py index b2e3ba3..a1a40f4 100644 --- a/tests/test_hashcat_simple.py +++ b/tests/test_hashcat_simple.py @@ -109,7 +109,7 @@ def test_simple_linux(self, mock_system, mock_unlink, mock_check_output, mock_Po result = cracker.run_benchmark(task.get_task()) assert result != 0 mock_check_output.assert_called_with( - f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p 0x09 \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", + f"'./hashcat.bin' --machine-readable --quiet --progress-only --restore-disable --potfile-disable --session=hashtopolis -p \"\t\" \"{Path(hashlists_path, str(hashlist_id))}\" -a3 ?l?l?l?l --hash-type=0 -o \"{Path(hashlists_path, str(hashlist_id))}.out\"", shell=True, cwd=Path(crackers_path, str(cracker_id)), stderr=-2 @@ -143,7 +143,7 @@ def test_simple_linux(self, mock_system, mock_unlink, mock_check_output, mock_Po f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', '--outfile-format=1,2,3,4', - f'-p 0x09', + f'-p "\t"', f'-s {skip} -l {limit}', '--potfile-disable', '--remove', @@ -259,7 +259,7 @@ def test_simple_windows(self, mock_system, mock_unlink, mock_check_output, mock_ '--potfile-disable', '--session=hashtopolis', '-p', - '0x09', + '"\t"', f' "{hashlist_path}"', '-a3', '?l?l?l?l', @@ -305,7 +305,7 @@ def test_simple_windows(self, mock_system, mock_unlink, mock_check_output, mock_ f'--outfile-check-dir="{Path(zaps_path, zaps_dir)}"', f'-o "{Path(hashlists_path, str(hashlist_id))}.out"', '--outfile-format=1,2,3,4', - f'-p 0x09', + f'-p "\t"', f'-s {skip} -l {limit}', '--potfile-disable', '--remove', From a2373d96f03b64559cc8f1c2064636c85f57f545 Mon Sep 17 00:00:00 2001 From: sein Date: Tue, 28 Mar 2023 20:31:51 +0200 Subject: [PATCH 55/77] prepare for release 0.7.1 --- README.md | 2 +- htpclient/initialize.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index b83d745..1d53fa5 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--crackers-path CRACKERS_PATH] [--hashlists-path HASHLISTS_PATH] [--preprocessors-path PREPROCESSORS_PATH] [--zaps-path ZAPS_PATH] [--cpu-only] -Hashtopolis Client v0.7.0 +Hashtopolis Client v0.7.1 optional arguments: -h, --help show this help message and exit diff --git a/htpclient/initialize.py b/htpclient/initialize.py index fca75a6..214e7d3 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -16,7 +16,7 @@ def get_version(): @staticmethod def get_version_number(): - return "0.7.0" + return "0.7.1" def run(self, args): self.__check_cert(args) From ced5593649bffc3000f3b8bec8f813f3752e9b07 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Mon, 16 Oct 2023 11:47:09 +0200 Subject: [PATCH 56/77] Fixes #hashtopolis/server/1012 Detecting hashcat64.bin correctly --- htpclient/hashcat_cracker.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index 95cd863..c656496 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -29,11 +29,11 @@ def __init__(self, cracker_id, binary_download): self.executable_name = self.executable_name[:k] + "." + self.executable_name[k + 1:] self.cracker_path = Path(self.config.get_value('crackers-path'), str(cracker_id)) - self.executable_path = Path(self.cracker_path, self.executable_name) - if not os.path.isfile(self.executable_path): # in case it's not the new hashcat filename, try the old one (hashcat.) + if not os.path.isfile(Path(self.cracker_path, self.executable_name)): # in case it's not the new hashcat filename, try the old one (hashcat.) self.executable_name = binary_download.get_version()['executable'] k = self.executable_name.rfind(".") self.executable_name = self.executable_name[:k] + get_bit() + "." + self.executable_name[k + 1:] + self.executable_path = Path(self.cracker_path, self.executable_name) if Initialize.get_os() == 1: # Windows From a6467e985bd6977d5be158e4b852e95bd0bb1bf0 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Tue, 19 Mar 2024 20:04:54 +0100 Subject: [PATCH 57/77] Update changelog.md --- changelog.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/changelog.md b/changelog.md index bda3501..9f0db7f 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,7 @@ +## v0.7.1 -> v0.7.2 +### Bugfixes +* When using older hashcat version Hashtopolis doesn't detect the binary correctly (hashcat64.bin) #hashtopolis/server/1012 + ## v0.7.0 -> v0.7.1 ### Bugfixes From 46f902c96bce7c8c1944a64caaece4fb1aba5b46 Mon Sep 17 00:00:00 2001 From: sein Date: Tue, 19 Mar 2024 20:05:30 +0100 Subject: [PATCH 58/77] prepare for 0.7.2 --- README.md | 2 +- htpclient/initialize.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 1d53fa5..759c60d 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--crackers-path CRACKERS_PATH] [--hashlists-path HASHLISTS_PATH] [--preprocessors-path PREPROCESSORS_PATH] [--zaps-path ZAPS_PATH] [--cpu-only] -Hashtopolis Client v0.7.1 +Hashtopolis Client v0.7.2 optional arguments: -h, --help show this help message and exit diff --git a/htpclient/initialize.py b/htpclient/initialize.py index 214e7d3..8e1431c 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -16,7 +16,7 @@ def get_version(): @staticmethod def get_version_number(): - return "0.7.1" + return "0.7.2" def run(self, args): self.__check_cert(args) From 63ebb7b47e090aadd527a5b4123874ca8453fcc5 Mon Sep 17 00:00:00 2001 From: jessevz Date: Mon, 10 Mar 2025 15:59:24 +0100 Subject: [PATCH 59/77] Added extra check for pid file --- __main__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/__main__.py b/__main__.py index 83bfe3c..b8dce22 100644 --- a/__main__.py +++ b/__main__.py @@ -357,7 +357,7 @@ def de_register(): if os.path.exists("lock.pid") and os.path.isfile("lock.pid"): pid = file_get_contents("lock.pid") logging.info("Found existing lock.pid, checking if python process is running...") - if psutil.pid_exists(int(pid)): + if pid.isdigit() and psutil.pid_exists(int(pid)): try: command = psutil.Process(int(pid)).cmdline()[0].replace('\\', '/').split('/') print(command) From bae093a549a64d6638e8a250dff582b60c6bb5e2 Mon Sep 17 00:00:00 2001 From: Sammy Date: Wed, 12 Mar 2025 13:14:36 +0100 Subject: [PATCH 60/77] added suggested solution for the bug taking into account the version of windows --- htpclient/initialize.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/htpclient/initialize.py b/htpclient/initialize.py index 8e1431c..8025618 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -104,13 +104,27 @@ def __update_information(self): devices.append(line[1].strip()) elif Initialize.get_os() == 1: # windows - output = subprocess.check_output("wmic cpu get name", shell=True) - output = output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") - for line in output: - line = line.rstrip("\r\n ") - if line == "Name" or not line: - continue - devices.append(line) + platform_release = platform.uname().release + if int(platform_release) >= 10: + processor_information = subprocess.check_output( + 'powershell -Command "Get-CimInstance Win32_Processor | Select-Object -ExpandProperty Name"', + shell=True) + processor_information = processor_information.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + for line in processor_information: + line = line.rstrip("\r\n ") + if line == "Name" or not line: + continue + devices.append(line) + else: + processor_information = subprocess.check_output( + 'powershell -Command "Get-WmiObject Win32_Processor | Select-Object -ExpandProperty Name"', + shell=True) + processor_information = processor_information.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + for line in processor_information: + line = line.rstrip("\r\n ") + if line == "Name" or not line: + continue + devices.append(line) output = subprocess.check_output("wmic path win32_VideoController get name", shell=True) output = output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") for line in output: From a54adb611277b2b8c8af43701f3291a47189b4a0 Mon Sep 17 00:00:00 2001 From: Sammy Date: Thu, 13 Mar 2025 13:56:16 +0100 Subject: [PATCH 61/77] Added more fallbacks, deduplicated some code --- htpclient/initialize.py | 31 +++++++++++++------------------ 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/htpclient/initialize.py b/htpclient/initialize.py index 8025618..3566592 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -105,33 +105,28 @@ def __update_information(self): elif Initialize.get_os() == 1: # windows platform_release = platform.uname().release - if int(platform_release) >= 10: + if platform_release == "" or int(platform_release) >= 10: processor_information = subprocess.check_output( 'powershell -Command "Get-CimInstance Win32_Processor | Select-Object -ExpandProperty Name"', shell=True) processor_information = processor_information.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") - for line in processor_information: - line = line.rstrip("\r\n ") - if line == "Name" or not line: - continue - devices.append(line) + video_controller = subprocess.check_output( + 'powershell -Command "Get-CimInstance Win32_VideoController | Select-Object -ExpandProperty Name"', + shell=True) + video_controller = video_controller.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") else: processor_information = subprocess.check_output( - 'powershell -Command "Get-WmiObject Win32_Processor | Select-Object -ExpandProperty Name"', + 'wmic cpu get name', shell=True) processor_information = processor_information.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") - for line in processor_information: + video_controller = subprocess.check_output('wmic path win32_VideoController get name', shell=True) + video_controller = video_controller.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + + for source in (processor_information, video_controller): + for line in source: line = line.rstrip("\r\n ") - if line == "Name" or not line: - continue - devices.append(line) - output = subprocess.check_output("wmic path win32_VideoController get name", shell=True) - output = output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") - for line in output: - line = line.rstrip("\r\n ") - if line == "Name" or not line: - continue - devices.append(line) + if line and line != "Name": + devices.append(line) else: # OS X output = subprocess.check_output("system_profiler SPDisplaysDataType -detaillevel mini", shell=True) From 34d64761ee2227841f72712a668c2a8974f75424 Mon Sep 17 00:00:00 2001 From: Sammy Date: Mon, 17 Mar 2025 08:45:32 +0100 Subject: [PATCH 62/77] moved the decoding out to a function to then be called --- htpclient/initialize.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/htpclient/initialize.py b/htpclient/initialize.py index 3566592..7fdcd8f 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -63,6 +63,9 @@ def __login(self): if not os.path.isdir("multicast"): os.mkdir("multicast") + def decode_output(self, output): + return output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + def __update_information(self): if not self.config.get_value('uuid'): self.config.set_value('uuid', str(uuid.uuid4())) @@ -72,7 +75,7 @@ def __update_information(self): devices = [] if Initialize.get_os() == 0: # linux output = subprocess.check_output("cat /proc/cpuinfo", shell=True) - output = output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + output = self.decode_output(output) tmp = [] for line in output: line = line.strip() @@ -96,7 +99,7 @@ def __update_information(self): except subprocess.CalledProcessError: # we silently ignore this case on machines where lspci is not present or architecture has no pci bus output = b"" - output = output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + output = self.decode_output(output) for line in output: if not line: continue @@ -109,18 +112,18 @@ def __update_information(self): processor_information = subprocess.check_output( 'powershell -Command "Get-CimInstance Win32_Processor | Select-Object -ExpandProperty Name"', shell=True) - processor_information = processor_information.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + processor_information = self.decode_output(processor_information) video_controller = subprocess.check_output( 'powershell -Command "Get-CimInstance Win32_VideoController | Select-Object -ExpandProperty Name"', shell=True) - video_controller = video_controller.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + video_controller = self.decode_output(video_controller) else: processor_information = subprocess.check_output( 'wmic cpu get name', shell=True) - processor_information = processor_information.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + processor_information = self.decode_output(processor_information) video_controller = subprocess.check_output('wmic path win32_VideoController get name', shell=True) - video_controller = video_controller.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + video_controller = self.decode_output(video_controller) for source in (processor_information, video_controller): for line in source: @@ -130,7 +133,7 @@ def __update_information(self): else: # OS X output = subprocess.check_output("system_profiler SPDisplaysDataType -detaillevel mini", shell=True) - output = output.decode(encoding='utf-8').replace("\r\n", "\n").split("\n") + output = self.decode_output(output) for line in output: line = line.rstrip("\r\n ") if "Chipset Model" not in line: From 0bfbcc710728d9d92c19dbc815e7cc436257334f Mon Sep 17 00:00:00 2001 From: jessevz Date: Mon, 17 Mar 2025 16:05:43 +0100 Subject: [PATCH 63/77] Update changelog.md --- changelog.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/changelog.md b/changelog.md index 9f0db7f..7db4fc9 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,10 @@ +## v0.7.2 -> vx.x.x +### Bugfixes +* When there is an empty pid file, the agent crashes #hashtopolis/server/1028 + +### Enhancements +* Added Windows 11 support. #hashtopolis/server/1159 + ## v0.7.1 -> v0.7.2 ### Bugfixes * When using older hashcat version Hashtopolis doesn't detect the binary correctly (hashcat64.bin) #hashtopolis/server/1012 From 79dcb2e95e0bf2e5d47df782ef2ad93a0137d1fb Mon Sep 17 00:00:00 2001 From: Sein Coray Date: Fri, 28 Mar 2025 11:46:55 +0100 Subject: [PATCH 64/77] prepare for release --- README.md | 2 +- changelog.md | 9 +++++++-- htpclient/initialize.py | 2 +- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 759c60d..2d81f4b 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--crackers-path CRACKERS_PATH] [--hashlists-path HASHLISTS_PATH] [--preprocessors-path PREPROCESSORS_PATH] [--zaps-path ZAPS_PATH] [--cpu-only] -Hashtopolis Client v0.7.2 +Hashtopolis Client v0.7.3 optional arguments: -h, --help show this help message and exit diff --git a/changelog.md b/changelog.md index 7db4fc9..ebc9b46 100644 --- a/changelog.md +++ b/changelog.md @@ -1,12 +1,17 @@ -## v0.7.2 -> vx.x.x +## v0.7.2 -> v0.7.3 + ### Bugfixes + * When there is an empty pid file, the agent crashes #hashtopolis/server/1028 - + ### Enhancements + * Added Windows 11 support. #hashtopolis/server/1159 ## v0.7.1 -> v0.7.2 + ### Bugfixes + * When using older hashcat version Hashtopolis doesn't detect the binary correctly (hashcat64.bin) #hashtopolis/server/1012 ## v0.7.0 -> v0.7.1 diff --git a/htpclient/initialize.py b/htpclient/initialize.py index 7fdcd8f..cd9e81c 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -16,7 +16,7 @@ def get_version(): @staticmethod def get_version_number(): - return "0.7.2" + return "0.7.3" def run(self, args): self.__check_cert(args) From df013af31ecb5801e88263ba353a88d807a0620e Mon Sep 17 00:00:00 2001 From: jessevz Date: Thu, 15 May 2025 12:11:49 +0200 Subject: [PATCH 65/77] Fixed bug in healthcheck --- htpclient/hashcat_cracker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index c656496..a5eca75 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -724,7 +724,7 @@ def run_health_check(self, attack, hashlist_alias): args += " --restore-disable --potfile-disable --session=health " args += update_files(attack).replace(hashlist_alias, "'" + self.config.get_value('hashlists-path') + "/health_check.txt'") args += " -o '" + self.config.get_value('hashlists-path') + "/health_check.out'" - full_cmd = f"'{self.callPath}'" + args + full_cmd = f"{self.callPath}" + args if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') logging.debug(f"CALL: {''.join(full_cmd)}") From 8e9d48a7bfe1ecae07d504723c792f667dcfdfd4 Mon Sep 17 00:00:00 2001 From: jessevz Date: Thu, 15 May 2025 12:13:26 +0200 Subject: [PATCH 66/77] Added to changelog --- changelog.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/changelog.md b/changelog.md index ebc9b46..b0b7024 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,9 @@ +## v0.7.3-> v0.7.x + +### bugfixes + +* Fixed bug in healthcheck on windows + ## v0.7.2 -> v0.7.3 ### Bugfixes From 66c361637809b31cfde1b79569a35ab248c8faf5 Mon Sep 17 00:00:00 2001 From: jessevz Date: Thu, 15 May 2025 12:27:05 +0200 Subject: [PATCH 67/77] Added issue number to changelog --- changelog.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index b0b7024..4f98940 100644 --- a/changelog.md +++ b/changelog.md @@ -2,7 +2,7 @@ ### bugfixes -* Fixed bug in healthcheck on windows +* Fixed bug in healthcheck on windows #hashtopolis/server/1019 ## v0.7.2 -> v0.7.3 From 9adfb5e02b5247845673c10b73f87ab3f59c8664 Mon Sep 17 00:00:00 2001 From: jessevz Date: Thu, 15 May 2025 17:11:01 +0200 Subject: [PATCH 68/77] Refactored healthcheck --- __main__.py | 2 +- htpclient/hashcat_cracker.py | 25 ++++++++++++++++++++----- 2 files changed, 21 insertions(+), 6 deletions(-) diff --git a/__main__.py b/__main__.py index b8dce22..b1be0fd 100644 --- a/__main__.py +++ b/__main__.py @@ -70,7 +70,7 @@ def run_health_check(): if len(states) > 0: num_gpus = len(states[0].get_temps()) else: - errors.append("Faild to retrieve one successful cracker state, most likely due to failing.") + errors.append("Failed to retrieve one successful cracker state, most likely due to failing.") num_gpus = 0 query = copy_and_set_token(dict_sendHealthCheck, CONFIG.get_value('token')) query['checkId'] = check_id diff --git a/htpclient/hashcat_cracker.py b/htpclient/hashcat_cracker.py index a5eca75..06f900b 100644 --- a/htpclient/hashcat_cracker.py +++ b/htpclient/hashcat_cracker.py @@ -720,11 +720,26 @@ def agent_stopped(self): return self.wasStopped def run_health_check(self, attack, hashlist_alias): - args = " --machine-readable --quiet" - args += " --restore-disable --potfile-disable --session=health " - args += update_files(attack).replace(hashlist_alias, "'" + self.config.get_value('hashlists-path') + "/health_check.txt'") - args += " -o '" + self.config.get_value('hashlists-path') + "/health_check.out'" - full_cmd = f"{self.callPath}" + args + args = [] + args.append('--machine-readable') + args.append('--quiet') + args.append('--restore-disable') + args.append('--potfile-disable') + args.append('--session=health') + attackcmd = update_files(attack) + hashlist_path = Path(self.config.get_value('hashlists-path'), "health_check.txt") + hashlist_out_path = Path(self.config.get_value('hashlists-path'), "health_check.out") + + # Replace #HL# with the real hashlist + attackcmd = attackcmd.replace(hashlist_alias, f'"{hashlist_path}"') + + args.append(attackcmd) + args.append('-o') + args.append(f'"{hashlist_out_path}"') + + full_cmd = ' '.join(args) + full_cmd = f"{self.callPath} {full_cmd}" + if Initialize.get_os() == 1: full_cmd = full_cmd.replace("/", '\\') logging.debug(f"CALL: {''.join(full_cmd)}") From e5187ee9555fe5e45bb8f7723223a0958963e005 Mon Sep 17 00:00:00 2001 From: Brandon Chalk Date: Fri, 27 Jun 2025 19:23:53 +0000 Subject: [PATCH 69/77] Fix cert code to read from args correctly --- htpclient/initialize.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/htpclient/initialize.py b/htpclient/initialize.py index cd9e81c..ce9f394 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -188,13 +188,13 @@ def __check_token(self, args): def __check_cert(self, args): cert = self.config.get_value('cert') - if cert is None: + if not cert: if args.cert is not None: cert = os.path.abspath(args.cert) logging.debug("Setting cert to: " + cert) self.config.set_value('cert', cert) - if cert is not None: + if cert: Session().s.cert = cert logging.debug("Configuration session cert to: " + cert) From 08b2caade6e4410e5d625c9588ed46d345cfb378 Mon Sep 17 00:00:00 2001 From: Kenny Root Date: Wed, 6 Aug 2025 20:53:45 -0700 Subject: [PATCH 70/77] Handle hashcat status line less rigidly In Hashcat 7.0.0, there is an extra field POWER which tells you how much power each device is drawing. This currently breaks the Hashtopia agent-python when using 7.0.0 because it expects the end of the line to be integer values for UTIL. In order to avoid this problem in the future, be less rigid in parsing how the status line is returned. Print out messages for any unexpected fields. --- htpclient/hashcat_status.py | 122 ++++++++++++++++++++++++----------- tests/test_hashcat_status.py | 84 ++++++++++++++++++++++++ 2 files changed, 168 insertions(+), 38 deletions(-) create mode 100644 tests/test_hashcat_status.py diff --git a/htpclient/hashcat_status.py b/htpclient/hashcat_status.py index 98eeffa..a4369fe 100644 --- a/htpclient/hashcat_status.py +++ b/htpclient/hashcat_status.py @@ -1,6 +1,13 @@ class HashcatStatus: def __init__(self, line): - # parse + """ + Initializes the HashcatStatus object by parsing a machine-readable + status line from Hashcat. + + Args: + line (str): A single line of Hashcat machine-readable status + output. + """ self.status = -1 self.speed = [] self.exec_runtime = [] @@ -11,44 +18,72 @@ def __init__(self, line): self.rejected = 0 self.util = [] self.temp = [] + self.power = [] + self.unknown_fields = False + + try: + fields = line.strip().split('\t') + if not fields or fields[0] != 'STATUS': + # Not a valid status line + return + + self.status = int(fields[1]) + + i = 2 + while i < len(fields): + key = fields[i] + i += 1 - line = line.split("\t") - if line[0] != "STATUS": - # invalid line - return - elif len(line) < 19: - # invalid line - return - self.status = int(line[1]) - index = 3 - while line[index] != "EXEC_RUNTIME": - self.speed.append([int(line[index]), int(line[index + 1])]) - index += 2 - while line[index] != "CURKU": - index += 1 - self.curku = int(line[index + 1]) - self.progress[0] = int(line[index + 3]) - self.progress[1] = int(line[index + 4]) - self.rec_hash[0] = int(line[index + 6]) - self.rec_hash[1] = int(line[index + 7]) - self.rec_salt[0] = int(line[index + 9]) - self.rec_salt[1] = int(line[index + 10]) - if line[index + 11] == "TEMP": - # we have temp values - index += 12 - while line[index] != "REJECTED": - self.temp.append(int(line[index])) - index += 1 - else: - index += 11 - self.rejected = int(line[index + 1]) - if len(line) > index + 2: - index += 2 - if line[index] == "UTIL": - index += 1 - while len(line) - 1 > index: # -1 because the \r\n is also included in the split - self.util.append(int(line[index])) - index += 1 + if key == 'SPEED': + # Speed has two values per device: hashes over period and period in ms + while i + 1 < len(fields) and fields[i].isdigit() and fields[i+1].isdigit(): + self.speed.append([int(fields[i]), int(fields[i+1])]) + i += 2 + elif key == 'EXEC_RUNTIME': + # Execution runtime per device + while i < len(fields) and fields[i].replace('.', '', 1).isdigit(): + self.exec_runtime.append(float(fields[i])) + i += 1 + elif key == 'CURKU': + self.curku = int(fields[i]) + i += 1 + elif key == 'PROGRESS': + # Progress has two values: current and total + self.progress = [int(fields[i]), int(fields[i+1])] + i += 2 + elif key == 'RECHASH': + # Recovered hashes has two values: done and total + self.rec_hash = [int(fields[i]), int(fields[i+1])] + i += 2 + elif key == 'RECSALT': + # Recovered salts has two values: done and total + self.rec_salt = [int(fields[i]), int(fields[i+1])] + i += 2 + elif key == 'TEMP': + # Temperature per device + while i < len(fields) and fields[i].lstrip('-').isdigit(): + self.temp.append(int(fields[i])) + i += 1 + elif key == 'REJECTED': + self.rejected = int(fields[i]) + i += 1 + elif key == 'UTIL': + # Utilization per device + while i < len(fields) and fields[i].lstrip('-').isdigit(): + self.util.append(int(fields[i])) + i += 1 + elif key == 'POWER': + # Power usage per device (newer versions) + while i < len(fields) and fields[i].lstrip('-').isdigit(): + self.power.append(int(fields[i])) + i += 1 + else: + print(f"Unknown field in Hashcat status line: {key}") + self.unknown_fields = True + pass + except (ValueError, IndexError) as e: + print(f"Error parsing Hashcat status line: {e}") + self.__init__("") # Fallback to default initialization def is_valid(self): return self.status >= 0 @@ -87,3 +122,14 @@ def get_speed(self): def get_rejected(self): return self.rejected + + def get_all_power(self): + return self.power + + def get_power(self): + if not self.power: + return -1 + power_sum = 0 + for p in self.power: + power_sum += p + return int(power_sum / len(self.power)) diff --git a/tests/test_hashcat_status.py b/tests/test_hashcat_status.py new file mode 100644 index 0000000..7b8317e --- /dev/null +++ b/tests/test_hashcat_status.py @@ -0,0 +1,84 @@ +import unittest +from htpclient.hashcat_status import HashcatStatus + +class TestHashcatStatus(unittest.TestCase): + def test_hashcat_6_single_device(self): + line = "STATUS\t3\tSPEED\t11887844\t1000\tEXEC_RUNTIME\t15.870873\tCURKU\t170970511093\tPROGRESS\t2735618289488\t2736891330000\tRECHASH\t0\t1\tRECSALT\t0\t1\tTEMP\t-1\tREJECTED\t0\tUTIL\t100\t" + status = HashcatStatus(line) + self.assertTrue(status.is_valid()) + self.assertEqual(status.status, 3) + self.assertEqual(status.speed, [[11887844, 1000]]) + self.assertEqual(status.exec_runtime, [15.870873]) + self.assertEqual(status.curku, 170970511093) + self.assertEqual(status.progress, [2735618289488, 2736891330000]) + self.assertEqual(status.rec_hash, [0, 1]) + self.assertEqual(status.rec_salt, [0, 1]) + self.assertEqual(status.temp, [-1]) + self.assertEqual(status.rejected, 0) + self.assertEqual(status.util, [100]) + self.assertEqual(status.power, []) + self.assertEqual(status.unknown_fields, False) + + def test_hashcat_7_single_device(self): + line = "STATUS\t3\tSPEED\t11887844\t1000\tEXEC_RUNTIME\t15.870873\tCURKU\t170970511093\tPROGRESS\t2735618289488\t2736891330000\tRECHASH\t0\t1\tRECSALT\t0\t1\tTEMP\t-1\tREJECTED\t0\tUTIL\t100\tPOWER\t56\t" + status = HashcatStatus(line) + self.assertTrue(status.is_valid()) + self.assertEqual(status.status, 3) + self.assertEqual(status.speed, [[11887844, 1000]]) + self.assertEqual(status.exec_runtime, [15.870873]) + self.assertEqual(status.curku, 170970511093) + self.assertEqual(status.progress, [2735618289488, 2736891330000]) + self.assertEqual(status.rec_hash, [0, 1]) + self.assertEqual(status.rec_salt, [0, 1]) + self.assertEqual(status.temp, [-1]) + self.assertEqual(status.rejected, 0) + self.assertEqual(status.util, [100]) + self.assertEqual(status.power, [56]) + self.assertEqual(status.unknown_fields, False) + + def test_valid_status_line(self): + line = "STATUS\t1\tSPEED\t2534\t1000\tEXEC_RUNTIME\t123\tCURKU\t45\tPROGRESS\t67\t100\tRECHASH\t89\t120\tRECSALT\t56\t110\tTEMP\t25\tREJECTED\t7\tUTIL\t85\t90\tPOWER\t100\t150" + status = HashcatStatus(line) + self.assertEqual(status.status, 1) + self.assertEqual(status.speed, [[2534, 1000]]) + self.assertEqual(status.exec_runtime, [123]) + self.assertEqual(status.curku, 45) + self.assertEqual(status.progress, [67, 100]) + self.assertEqual(status.rec_hash, [89, 120]) + self.assertEqual(status.rec_salt, [56, 110]) + self.assertEqual(status.temp, [25]) + self.assertEqual(status.rejected, 7) + self.assertEqual(status.util, [85, 90]) + self.assertEqual(status.power, [100, 150]) + + def test_invalid_status_line(self): + line = "NOT_STATUS_LINE" + status = HashcatStatus(line) + self.assertEqual(status.status, -1) + + def test_missing_fields(self): + line = "STATUS\t1\tSPEED\t200\t1000" + status = HashcatStatus(line) + self.assertEqual(status.status, 1) + self.assertEqual(status.speed, [[200, 1000]]) + self.assertEqual(status.exec_runtime, []) + self.assertEqual(status.curku, 0) + self.assertEqual(status.progress, [0, 0]) + + def test_get_progress(self): + line = "STATUS\t1\tPROGRESS\t42\t100" + status = HashcatStatus(line) + self.assertEqual(status.get_progress(), 42) + + def test_get_speed(self): + line = "STATUS\t1\tSPEED\t12400\t1000\t2000\t1000" + status = HashcatStatus(line) + self.assertEqual(status.get_speed(), 12400 + 2000) + + def test_get_util(self): + line = "STATUS\t1\tUTIL\t85\t90" + status = HashcatStatus(line) + self.assertEqual(status.get_util(), (85 + 90) // 2) + +if __name__ == '__main__': + unittest.main() From 1ea546fbf6dc06077d73728371028fd76bfb00d6 Mon Sep 17 00:00:00 2001 From: Romke van Dijk Date: Fri, 8 Aug 2025 21:52:37 +0200 Subject: [PATCH 71/77] Fixing devcontainer for hashcat 7 --- .devcontainer/Dockerfile | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 0556951..519f309 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -19,16 +19,10 @@ RUN apt-get update \ # Install Intel OpenCL Runtime RUN cd /tmp \ && apt install wget lsb-core libnuma-dev pciutils clinfo -y \ - && wget http://registrationcenter-download.intel.com/akdlm/irc_nas/vcp/15532/l_opencl_p_18.1.0.015.tgz \ - && tar xzvf l_opencl_p_18.1.0.015.tgz \ - && cd l_opencl_p_18.1.0.015 \ - && echo "ACCEPT_EULA=accept" > silent.cfg \ - && echo "PSET_INSTALL_DIR=/opt/intel" >> silent.cfg \ - && echo "CONTINUE_WITH_OPTIONAL_ERROR=yes" >> silent.cfg \ - && echo "CONTINUE_WITH_INSTALLDIR_OVERWRITE=yes" >> silent.cfg \ - && echo "COMPONENTS=DEFAULTS" >> silent.cfg \ - && echo "PSET_MODE=install" >> silent.cfg \ - && ./install.sh -s silent.cfg + && wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | tee /usr/share/keyrings/oneapi-archive-keyring.gpg >/dev/null \ + && echo "deb [signed-by=/usr/share/keyrings/oneapi-archive-keyring.gpg] https://apt.repos.intel.com/oneapi all main" | tee /etc/apt/sources.list.d/oneAPI.list \ + && apt update \ + && apt install intel-oneapi-runtime-libs opencl-headers -y # Clean RUN apt-get autoremove -y \ From d961a470c2e4ff92d2c15d68b533926e315460ec Mon Sep 17 00:00:00 2001 From: Sein Coray Date: Wed, 20 Aug 2025 15:19:19 +0200 Subject: [PATCH 72/77] prepare for release --- README.md | 12 ++++++++---- changelog.md | 5 +++-- htpclient/initialize.py | 2 +- 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 2d81f4b..e336699 100644 --- a/README.md +++ b/README.md @@ -31,11 +31,11 @@ Please note: ### Command Line Arguments ``` -usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--disable-update] [--debug] [--voucher VOUCHER] [--url URL] [--cert CERT] [--files-path FILES_PATH] - [--crackers-path CRACKERS_PATH] [--hashlists-path HASHLISTS_PATH] [--preprocessors-path PREPROCESSORS_PATH] [--zaps-path ZAPS_PATH] [--cpu-only] +usage: python3 hashtopolis.zip [-h] [--de-register] [--version] [--number-only] [--disable-update] [--debug] [--voucher VOUCHER] [--url URL] + [--cert CERT] [--files-path FILES_PATH] [--crackers-path CRACKERS_PATH] [--hashlists-path HASHLISTS_PATH] + [--preprocessors-path PREPROCESSORS_PATH] [--zaps-path ZAPS_PATH] [--cpu-only] - -Hashtopolis Client v0.7.3 +Hashtopolis Client v0.7.4 optional arguments: -h, --help show this help message and exit @@ -58,6 +58,7 @@ optional arguments: --zaps-path ZAPS_PATH Use given folder path as zaps location --cpu-only Force client to register as CPU only and also only reading out CPU information + ``` ### Config @@ -127,6 +128,9 @@ In order to use the multicast distribution for files, please make sure that the The list contains all Hashcat versions with which the client was tested and is able to work with (other versions might work): +* 7.1.1 +* 7.1.0 +* 7.0.0 * 6.2.6 * 6.2.5 * 6.2.4 diff --git a/changelog.md b/changelog.md index 4f98940..750d4d5 100644 --- a/changelog.md +++ b/changelog.md @@ -1,8 +1,9 @@ -## v0.7.3-> v0.7.x +## v0.7.3-> v0.7.4 -### bugfixes +### Bugfixes * Fixed bug in healthcheck on windows #hashtopolis/server/1019 +* Compatibility for new machine-readable output format of hashcat 7 added ## v0.7.2 -> v0.7.3 diff --git a/htpclient/initialize.py b/htpclient/initialize.py index ce9f394..92853ba 100644 --- a/htpclient/initialize.py +++ b/htpclient/initialize.py @@ -16,7 +16,7 @@ def get_version(): @staticmethod def get_version_number(): - return "0.7.3" + return "0.7.4" def run(self, args): self.__check_cert(args) From f2cf05dc5974503e190083dee736bfbc7f59e063 Mon Sep 17 00:00:00 2001 From: Sein Coray Date: Wed, 20 Aug 2025 16:28:29 +0200 Subject: [PATCH 73/77] first dockerfile draft --- Dockerfile | 46 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 Dockerfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..353391a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,46 @@ +FROM alpine/git AS preprocess + +RUN apk add --no-cache bash zip + +COPY . /htp + +RUN cd /htp && bash build.sh && ls /htp + +FROM nvidia/cuda:12.3.2-devel-ubuntu22.04 AS agent-cuda + +ENV APPDIR=/htp-agent + +WORKDIR ${APPDIR} + +RUN \ + --mount=type=cache,target=/var/cache/apt \ + apt-get update && apt-get install -y --no-install-recommends \ + zip \ + p7zip-full \ + git \ + python3 \ + python3-pip \ + python3-psutil \ + python3-requests \ + pciutils \ + ca-certificates \ + rsync \ + ocl-icd-libopencl1 \ + clinfo \ + curl && \ + rm -rf /var/lib/apt/lists/* + +RUN mkdir -p /etc/OpenCL/vendors && \ + echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd && \ + echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \ + echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf + +ENV PATH=/usr/local/nvidia/bin:${PATH} +ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64:${LD_LIBRARY_PATH} + +COPY --from=preprocess /htp/hashtopolis.zip ${APPDIR}/ + + +RUN pip3 install -r requirements.txt + +ENTRYPOINT ["python3", "hashtopolis.zip"] From e2e7acb39755d12c5349a7c86cfa5c63af867ae6 Mon Sep 17 00:00:00 2001 From: sein Date: Thu, 21 Aug 2025 21:45:15 +0200 Subject: [PATCH 74/77] added hashcat status test for multiple devices --- tests/test_hashcat_status.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/test_hashcat_status.py b/tests/test_hashcat_status.py index 7b8317e..d080ad7 100644 --- a/tests/test_hashcat_status.py +++ b/tests/test_hashcat_status.py @@ -36,6 +36,23 @@ def test_hashcat_7_single_device(self): self.assertEqual(status.power, [56]) self.assertEqual(status.unknown_fields, False) + def test_hashcat_7_multiple_devices(self): + line = "STATUS\t5\tSPEED\t8947055\t1000\t0\t1000\tEXEC_RUNTIME\t0.062464\t0.000000\tCURKU\t0\tPROGRESS\t9025\t9025\tRECHASH\t0\t1\tRECSALT\t0\t1\tTEMP\t44\t-1\tREJECTED\t0\tUTIL\t46\t-1\tPOWER\t-1\t-1\t" + status = HashcatStatus(line) + self.assertTrue(status.is_valid()) + self.assertEqual(status.status, 5) + self.assertEqual(status.speed, [[8947055, 1000], [0, 1000]]) + self.assertEqual(status.exec_runtime, [0.062464, 0.000000]) + self.assertEqual(status.curku, 0) + self.assertEqual(status.progress, [9025, 9025]) + self.assertEqual(status.rec_hash, [0, 1]) + self.assertEqual(status.rec_salt, [0, 1]) + self.assertEqual(status.temp, [44, -1]) + self.assertEqual(status.rejected, 0) + self.assertEqual(status.util, [46, -1]) + self.assertEqual(status.power, [-1, -1]) + self.assertEqual(status.unknown_fields, False) + def test_valid_status_line(self): line = "STATUS\t1\tSPEED\t2534\t1000\tEXEC_RUNTIME\t123\tCURKU\t45\tPROGRESS\t67\t100\tRECHASH\t89\t120\tRECSALT\t56\t110\tTEMP\t25\tREJECTED\t7\tUTIL\t85\t90\tPOWER\t100\t150" status = HashcatStatus(line) From 8546fb2045d7db05b01f53010b82dd84d523886e Mon Sep 17 00:00:00 2001 From: sein Date: Wed, 27 Aug 2025 08:19:34 +0200 Subject: [PATCH 75/77] updated docker file --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 353391a..f8b2645 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ RUN cd /htp && bash build.sh && ls /htp FROM nvidia/cuda:12.3.2-devel-ubuntu22.04 AS agent-cuda -ENV APPDIR=/htp-agent +ENV APPDIR=/htp WORKDIR ${APPDIR} @@ -40,6 +40,7 @@ ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64:${LD_LIBRARY_P COPY --from=preprocess /htp/hashtopolis.zip ${APPDIR}/ +COPY requirements.txt ${APPDIR} RUN pip3 install -r requirements.txt From e0b464c1c3bd1dc5b3bda96c2f2f0fd0445642d0 Mon Sep 17 00:00:00 2001 From: sein Date: Fri, 5 Sep 2025 08:56:02 +0200 Subject: [PATCH 76/77] added docker-compose and example config --- config.json.example | 5 +++++ docker-compose.yml | 13 +++++++++++++ 2 files changed, 18 insertions(+) create mode 100644 config.json.example create mode 100644 docker-compose.yml diff --git a/config.json.example b/config.json.example new file mode 100644 index 0000000..0b7f68b --- /dev/null +++ b/config.json.example @@ -0,0 +1,5 @@ +{ + "debug": true, + "voucher": "", + "url": "http://:/api/server.php" +} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..b815460 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,13 @@ +services: + hashtopolis-agent: + container_name: hashtopolis-agent + image: htp-agent #hashtopolis/agent:latest + volumes: + - ./config.json:/htp/config.json + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: all + capabilities: [gpu] From 0aebcf64ffca229d59d590257df95a14f9164716 Mon Sep 17 00:00:00 2001 From: s3inlc Date: Tue, 7 Oct 2025 13:29:08 +0200 Subject: [PATCH 77/77] fixed default image name --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index b815460..e22042f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ services: hashtopolis-agent: container_name: hashtopolis-agent - image: htp-agent #hashtopolis/agent:latest + image: hashtopolis/agent:latest volumes: - ./config.json:/htp/config.json deploy: