Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
else:
logging.info('Parsing file %s' % args.minidumpfile)
try:
mimi = pypykatz.parse_minidump_file(args.minidumpfile)
results[args.minidumpfile] = mimi
except Exception as e:
logging.exception('Error while parsing file %s' % args.minidumpfile)
if args.halt_on_error == True:
raise e
else:
traceback.print_exc()
if args.outfile and args.json:
with open(args.outfile, 'w') as f:
json.dump(results, f, cls = UniversalEncoder, indent=4, sort_keys=True)
elif args.outfile:
with open(args.outfile, 'w') as f:
for result in results:
f.write('FILE: ======== %s =======\n' % result)
for luid in results[result].logon_sessions:
f.write('\n'+str(results[result].logon_sessions[luid]))
if len(results[result].orphaned_creds) > 0:
f.write('\n== Orphaned credentials ==\n')
for cred in results[result].orphaned_creds:
f.write(str(cred))
if len(files_with_error) > 0:
f.write('\n== Failed to parse these files:\n')
def process_results(self, results, args):
if args.outfile:
results.to_file(args.outfile, args.json)
else:
if args.json:
print(json.dumps(results.to_dict(), cls = UniversalEncoder, indent=4, sort_keys=True))
else:
print(str(results))
def dump_masterkeys(self, filename = None):
if filename is None:
for x in self.masterkeys:
print('[GUID] %s [MASTERKEY] %s' % (x, self.masterkeys[x].hex()))
for x in self.backupkeys:
print('[GUID] %s [BACKUPKEY] %s' % (x, self.backupkeys[x].hex()))
else:
with open(filename, 'w', newline = '') as f:
t = { 'masterkeys' : self.masterkeys, 'backupkeys': self.backupkeys}
f.write(json.dumps(t, cls = UniversalEncoder, indent=4, sort_keys=True))
result = '%s %s %s %s %s' % (target, groupname, group.domain, group.username, str(group.sid))
if self.out_file is not None:
if target not in self.results:
self.results[target] = []
self.results[target].append(result)
else:
print(result)
if self.out_file is None and self.to_json is False:
return
logger.info('Writing results...')
if self.out_file is not None:
with open(self.out_file,'w', newline = '') as f:
if self.to_json is True:
f.write(json.dumps(self.results, cls = UniversalEncoder, indent=4, sort_keys=True))
else:
for target in self.results:
for res in self.results[target]:
f.write( '%s %s\r\n' % (target, res))
else:
print(json.dumps(self.results, cls = UniversalEncoder, indent=4, sort_keys=True))
#print already heppened
return
logger.info('Writing results...')
if self.out_file is not None:
with open(self.out_file,'w', newline = '') as f:
if self.to_json is True:
f.write(json.dumps({'results' : self.results, 'errors': self.errors}, cls = UniversalEncoder, indent=4, sort_keys=True))
else:
for target in self.results:
for res in self.results[target]:
f.write( '%s %s\r\n' % (target, res))
for target in self.errors:
f.write( '%s %s\r\n' % (target, self.errors[target]))
else:
print(json.dumps({'results' : self.results, 'errors': self.errors}, cls = UniversalEncoder, indent=4, sort_keys=True))
if self.out_file is not None:
if target not in self.results:
self.results[target] = []
self.results[target].append(result)
else:
print(result)
if self.out_file is None and self.to_json is False:
#print already heppened
return
logger.info('Writing results...')
if self.out_file is not None:
with open(self.out_file,'w', newline = '') as f:
if self.to_json is True:
f.write(json.dumps({'results' : self.results, 'errors': self.errors}, cls = UniversalEncoder, indent=4, sort_keys=True))
else:
for target in self.results:
for res in self.results[target]:
f.write( '%s %s\r\n' % (target, res))
for target in self.errors:
f.write( '%s %s\r\n' % (target, self.errors[target]))
else:
print(json.dumps({'results' : self.results, 'errors': self.errors}, cls = UniversalEncoder, indent=4, sort_keys=True))
print(result)
if self.out_file is None and self.to_json is False:
return
logger.info('Writing results...')
if self.out_file is not None:
with open(self.out_file,'w', newline = '') as f:
if self.to_json is True:
f.write(json.dumps(self.results, cls = UniversalEncoder, indent=4, sort_keys=True))
else:
for target in self.results:
for res in self.results[target]:
f.write( '%s %s\r\n' % (target, res))
else:
print(json.dumps(self.results, cls = UniversalEncoder, indent=4, sort_keys=True))
def process_result(self, mimi, peer_addr):
try:
peer_addr = peer_addr.replace(':','_').replace('.','_')
outfile = os.path.join(self.output_dir, '%s_%s.json' % (peer_addr,os.urandom(4).hex()))
with open(outfile, 'w') as f:
json.dump(mimi, f, cls = UniversalEncoder, indent=4, sort_keys=True)
kdir = os.path.join(self.output_dir, '%s_%s' % (peer_addr,os.urandom(4).hex()), 'kerberos')
os.makedirs(kdir)
mimi.kerberos_ccache.to_file(os.path.join(kdir, 'tickets.ccache'))
except Exception as e:
traceback.print_exc()
print(result)
if self.out_file is None and self.to_json is False:
return
logger.info('Writing results...')
if self.out_file is not None:
with open(self.out_file,'w', newline = '') as f:
if self.to_json is True:
f.write(json.dumps(self.results, cls = UniversalEncoder, indent=4, sort_keys=True))
else:
for target in self.results:
for res in self.results[target]:
f.write( '%s %s\r\n' % (target, res))
else:
print(json.dumps(self.results, cls = UniversalEncoder, indent=4, sort_keys=True))