-
Notifications
You must be signed in to change notification settings - Fork 52
/
Copy path__main__.py
384 lines (341 loc) · 16.4 KB
/
__main__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
import glob
import shutil
import signal
import sys
import os
import time
from time import sleep
import psutil as psutil
import argparse
from htpclient.binarydownload import BinaryDownload
from htpclient.chunk import Chunk
from htpclient.files import Files
from htpclient.generic_cracker import GenericCracker
from htpclient.hashcat_cracker import HashcatCracker
from htpclient.hashlist import Hashlist
from htpclient.helpers import start_uftpd, file_get_contents
from htpclient.initialize import Initialize
from htpclient.jsonRequest import *
from htpclient.dicts import *
import logging
from htpclient.task import Task
CONFIG = None
binaryDownload = None
def run_health_check():
global CONFIG, binaryDownload
logging.info("Health check requested by server!")
logging.info("Retrieving health check settings...")
query = copy_and_set_token(dict_getHealthCheck, CONFIG.get_value('token'))
req = JsonRequest(query)
ans = req.execute()
if ans is None:
logging.error("Failed to get health check!")
sleep(5)
return
elif ans['response'] != 'SUCCESS':
logging.error("Error on getting health check: " + str(ans))
sleep(5)
return
binaryDownload.check_version(ans['crackerBinaryId'])
check_id = ans['checkId']
logging.info("Starting check ID " + str(check_id))
# write hashes to file
hash_file = open(CONFIG.get_value('hashlists-path') + "/health_check.txt", "w")
hash_file.write("\n".join(ans['hashes']))
hash_file.close()
# delete old file if necessary
if os.path.exists(CONFIG.get_value('hashlists-path') + "/health_check.out"):
os.unlink(CONFIG.get_value('hashlists-path') + "/health_check.out")
# run task
cracker = HashcatCracker(ans['crackerBinaryId'], binaryDownload)
start = int(time.time())
[states, errors] = cracker.run_health_check(ans['attack'], ans['hashlistAlias'])
end = int(time.time())
# read results
if os.path.exists(CONFIG.get_value('hashlists-path') + "/health_check.out"):
founds = file_get_contents(CONFIG.get_value('hashlists-path') + "/health_check.out").replace("\r\n", "\n").split("\n")
else:
founds = []
if len(states) > 0:
num_gpus = len(states[0].get_temps())
else:
errors.append("Faild to retrieve one successful cracker state, most likely due to failing.")
num_gpus = 0
query = copy_and_set_token(dict_sendHealthCheck, CONFIG.get_value('token'))
query['checkId'] = check_id
query['start'] = start
query['end'] = end
query['numGpus'] = num_gpus
query['numCracked'] = len(founds) - 1
query['errors'] = errors
req = JsonRequest(query)
ans = req.execute()
if ans is None:
logging.error("Failed to send health check results!")
sleep(5)
return
elif ans['response'] != 'OK':
logging.error("Error on sending health check results: " + str(ans))
sleep(5)
return
logging.info("Health check completed successfully!")
# Sets up the logging to stdout and to file with different styles and with the level as set in the config if available
def init_logging(args):
global CONFIG
log_format = '[%(asctime)s] [%(levelname)-5s] %(message)s'
print_format = '%(message)s'
date_format = '%Y-%m-%d %H:%M:%S'
log_level = logging.INFO
logfile = open('client.log', "a", encoding="utf-8")
logging.getLogger("requests").setLevel(logging.WARNING)
CONFIG = Config()
if args.debug:
CONFIG.set_value('debug', True)
if CONFIG.get_value('debug'):
log_level = logging.DEBUG
logging.getLogger("requests").setLevel(logging.DEBUG)
logging.basicConfig(level=log_level, format=print_format, datefmt=date_format)
file_handler = logging.StreamHandler(logfile)
file_handler.setFormatter(logging.Formatter(log_format))
logging.getLogger().addHandler(file_handler)
def init(args):
global CONFIG, binaryDownload
if len(CONFIG.get_value('files-path')) == 0:
CONFIG.set_value('files-path', os.path.abspath('files'))
if len(CONFIG.get_value('crackers-path')) == 0:
CONFIG.set_value('crackers-path', os.path.abspath('crackers'))
if len(CONFIG.get_value('hashlists-path')) == 0:
CONFIG.set_value('hashlists-path', os.path.abspath('hashlists'))
if len(CONFIG.get_value('zaps-path')) == 0:
CONFIG.set_value('zaps-path', os.path.abspath('.'))
if len(CONFIG.get_value('preprocessors-path')) == 0:
CONFIG.set_value('preprocessors-path', os.path.abspath('preprocessors'))
if args.files_path and len(args.files_path):
CONFIG.set_value('files-path', os.path.abspath(args.files_path))
if args.crackers_path and len(args.crackers_path):
CONFIG.set_value('crackers-path', os.path.abspath(args.crackers_path))
if args.hashlists_path and len(args.hashlists_path):
CONFIG.set_value('hashlists-path', os.path.abspath(args.hashlists_path))
if args.zaps_path and len(args.zaps_path):
CONFIG.set_value('zaps-path', os.path.abspath(args.zaps_path))
if args.preprocessors_path and len(args.preprocessors_path):
CONFIG.set_value('preprocessors-path', os.path.abspath(args.preprocessors_path))
logging.info("Starting client '" + Initialize.get_version() + "'...")
# check if there are running hashcat.pid files around (as we assume that nothing is running anymore if the client gets newly started)
if os.path.exists(CONFIG.get_value('crackers-path')):
for root, dirs, files in os.walk(CONFIG.get_value('crackers-path')):
for folder in dirs:
if folder.isdigit() and os.path.exists(CONFIG.get_value('crackers-path') + "/" + folder + "/hashtopolis.pid"):
logging.info("Cleaning hashcat PID file from " + CONFIG.get_value('crackers-path') + "/" + folder)
os.unlink(CONFIG.get_value('crackers-path') + "/" + folder + "/hashtopolis.pid")
session = Session(requests.Session()).s
session.headers.update({'User-Agent': Initialize.get_version()})
if CONFIG.get_value('proxies'):
session.proxies = CONFIG.get_value('proxies')
if CONFIG.get_value('auth-user') and CONFIG.get_value('auth-password'):
session.auth = (CONFIG.get_value('auth-user'), CONFIG.get_value('auth-password'))
# connection initialization
Initialize().run(args)
# download and updates
binaryDownload = BinaryDownload(args)
binaryDownload.run()
# if multicast is set to run, we need to start the daemon
if CONFIG.get_value('multicast') and Initialize().get_os() == 0:
start_uftpd(Initialize().get_os_extension(), CONFIG)
def loop():
global binaryDownload, CONFIG
logging.debug("Entering loop...")
task = Task()
chunk = Chunk()
files = Files()
hashlist = Hashlist()
task_change = True
last_task_id = 0
cracker = None
while True:
CONFIG.update()
files.deletion_check() # check if there are deletion orders from the server
if task.get_task() is not None:
last_task_id = task.get_task()['taskId']
task.load_task()
if task.get_task_id() == -1: # get task returned to run a health check
run_health_check()
task.reset_task()
continue
elif task.get_task() is None:
task_change = True
continue
else:
if task.get_task()['taskId'] is not last_task_id:
task_change = True
# try to download the needed cracker (if not already present)
if not binaryDownload.check_version(task.get_task()['crackerId']):
task_change = True
task.reset_task()
continue
# if prince is used, make sure it's downloaded (deprecated, as preprocessors are integrated generally now)
if 'usePrince' in task.get_task() and task.get_task()['usePrince']:
if not binaryDownload.check_prince():
continue
# if preprocessor is used, make sure it's downloaded
if 'usePreprocessor' in task.get_task() and task.get_task()['usePreprocessor']:
if not binaryDownload.check_preprocessor(task):
continue
# check if all required files are present
if not files.check_files(task.get_task()['files'], task.get_task()['taskId']):
task.reset_task()
continue
# download the hashlist for the task
if task_change and not hashlist.load_hashlist(task.get_task()['hashlistId']):
task.reset_task()
continue
if task_change: # check if the client version is up-to-date and load the appropriate cracker
binaryDownload.check_client_version()
logging.info("Got cracker binary type " + binaryDownload.get_version()['name'])
if binaryDownload.get_version()['name'].lower() == 'hashcat':
cracker = HashcatCracker(task.get_task()['crackerId'], binaryDownload)
else:
cracker = GenericCracker(task.get_task()['crackerId'], binaryDownload)
# if it's a task using hashcat brain, we need to load the found hashes
if task_change and 'useBrain' in task.get_task() and task.get_task()['useBrain'] and not hashlist.load_found(task.get_task()['hashlistId'], task.get_task()['crackerId']):
task.reset_task()
continue
task_change = False
chunk_resp = chunk.get_chunk(task.get_task()['taskId'])
if chunk_resp == 0:
task.reset_task()
continue
elif chunk_resp == -1:
# measure keyspace
if not cracker.measure_keyspace(task, chunk): # failure case
task.reset_task()
continue
elif chunk_resp == -3:
run_health_check()
task.reset_task()
continue
elif chunk_resp == -2:
# measure benchmark
logging.info("Benchmark task...")
result = cracker.run_benchmark(task.get_task())
if result == 0:
sleep(10)
task.reset_task()
# some error must have occurred on benchmarking
continue
# send result of benchmark
query = copy_and_set_token(dict_sendBenchmark, CONFIG.get_value('token'))
query['taskId'] = task.get_task()['taskId']
query['result'] = result
query['type'] = task.get_task()['benchType']
req = JsonRequest(query)
ans = req.execute()
if ans is None:
logging.error("Failed to send benchmark!")
sleep(5)
task.reset_task()
continue
elif ans['response'] != 'SUCCESS':
logging.error("Error on sending benchmark: " + str(ans))
sleep(5)
task.reset_task()
continue
else:
logging.info("Server accepted benchmark!")
continue
# check if we have an invalid chunk
if chunk.chunk_data() is not None and chunk.chunk_data()['length'] == 0:
logging.error("Invalid chunk size (0) retrieved! Retrying...")
task.reset_task()
continue
# run chunk
logging.info("Start chunk...")
cracker.run_chunk(task.get_task(), chunk.chunk_data(), task.get_preprocessor())
if cracker.agent_stopped():
# if the chunk was aborted by a stop from the server, we need to ask for a task again first
task.reset_task()
task_change = True
binaryDownload.check_client_version()
def de_register():
global CONFIG
logging.info("De-registering client..")
query = copy_and_set_token(dict_deregister, CONFIG.get_value('token'))
req = JsonRequest(query)
ans = req.execute()
if ans is None:
logging.error("De-registration failed!")
elif ans['response'] != 'SUCCESS':
logging.error("Error on de-registration: " + str(ans))
else:
logging.info("Successfully de-registered!")
# cleanup
dirs = [CONFIG.get_value('crackers-path'), CONFIG.get_value('preprocessors-path'), CONFIG.get_value('hashlists-path'), CONFIG.get_value('files-path')]
files = ['config.json', '7zr.exe', '7zr']
for file in files:
if os.path.exists(file):
os.unlink(file)
for directory in dirs:
if os.path.exists(directory):
shutil.rmtree(directory)
r = glob.glob(CONFIG.get_value('zaps-path') + '/hashlist_*')
for i in r:
shutil.rmtree(i)
logging.info("Cleanup finished!")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Hashtopolis Client v' + Initialize.get_version_number(), prog='python3 hashtopolis.zip')
parser.add_argument('--de-register', action='store_true', help='client should automatically de-register from server now')
parser.add_argument('--version', action='store_true', help='show version information')
parser.add_argument('--number-only', action='store_true', help='when using --version show only the number')
parser.add_argument('--disable-update', action='store_true', help='disable retrieving auto-updates of the client from the server')
parser.add_argument('--debug', '-d', action='store_true', help='enforce debugging output')
parser.add_argument('--voucher', type=str, required=False, help='voucher to use to automatically register')
parser.add_argument('--url', type=str, required=False, help='URL to Hashtopolis client API')
parser.add_argument('--cert', type=str, required=False, help='Client TLS cert bundle for Hashtopolis client API')
parser.add_argument('--files-path', type=str, required=False, help='Use given folder path as files location')
parser.add_argument('--crackers-path', type=str, required=False, help='Use given folder path as crackers location')
parser.add_argument('--hashlists-path', type=str, required=False, help='Use given folder path as hashlists location')
parser.add_argument('--preprocessors-path', type=str, required=False, help='Use given folder path as preprocessors location')
parser.add_argument('--zaps-path', type=str, required=False, help='Use given folder path as zaps location')
parser.add_argument('--cpu-only', action='store_true', help='Force client to register as CPU only and also only reading out CPU information')
args = parser.parse_args()
if args.version:
if args.number_only:
print(Initialize.get_version_number())
else:
print(Initialize.get_version())
sys.exit(0)
if args.de_register:
init_logging(args)
session = Session(requests.Session()).s
session.headers.update({'User-Agent': Initialize.get_version()})
de_register()
sys.exit(0)
try:
init_logging(args)
# check if there is a lock file and check if this pid is still running hashtopolis
if os.path.exists("lock.pid") and os.path.isfile("lock.pid"):
pid = file_get_contents("lock.pid")
logging.info("Found existing lock.pid, checking if python process is running...")
if psutil.pid_exists(int(pid)):
try:
command = psutil.Process(int(pid)).cmdline()[0].replace('\\', '/').split('/')
print(command)
if str.startswith(command[-1], "python"):
logging.fatal("There is already a hashtopolis agent running in this directory!")
sys.exit(-1)
except Exception:
# if we fail to determine the cmd line we assume that it's either not running anymore or another process (non-hashtopolis)
pass
logging.info("Ignoring lock.pid file because PID is not existent anymore or not running python!")
# create lock file
with open("lock.pid", 'w') as f:
f.write(str(os.getpid()))
f.close()
init(args)
loop()
except KeyboardInterrupt:
logging.info("Exiting...")
# if lock file exists, remove
if os.path.exists("lock.pid"):
os.unlink("lock.pid")
sys.exit()