Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
b = f.read(siz)
d = struct.unpack(HEADER_FMT, b)
client = d[4].split(b'\0',1)[0].decode('utf8').replace(' ', '_')
map = d[5].split(b'\0',1)[0].decode('utf8').replace(' ', '_')
duration = d[7]
return clean_str(client), clean_str(map), duration
def save_dem(dem, out_dir):
client, map, duration = process_dem(dem)
d = datetime.now() - timedelta(seconds=duration)
name = '{}-{}-{}.dem'.format(d.strftime('%Y%m%d-%H%M'), map, client)
out = os.path.join(out_dir, name)
shutil.move(dem, out)
print('Demo saved to: '+out)
class DemoHandler(PatternMatchingEventHandler):
def __init__(self, out_dir, timeout):
super(DemoHandler, self).__init__(['*.dem'])
self.watchers = {}
self.outd = out_dir
self.timeout = timeout
def watch_dem(self, dem):
self.watchers[dem] = DemoWatcher(dem, self.outd, self.timeout)
def on_created(self, ev):
dem = ev.src_path
self.watch_dem(dem)
def on_modified(self, ev):
dem = ev.src_path
def _per_file_event_handler(self):
"""Create a Watchdog file event handler that does different things for every file
"""
file_event_handler = PatternMatchingEventHandler()
file_event_handler.on_created = self._on_file_created
file_event_handler.on_modified = self._on_file_modified
file_event_handler.on_moved = self._on_file_moved
file_event_handler._patterns = [
os.path.join(self._run.dir, os.path.normpath('*'))]
# Ignore hidden files/folders
file_event_handler._ignore_patterns = [
'*.tmp',
os.path.join(self._run.dir, ".*"),
os.path.join(self._run.dir, "*/.*"),
]
for glob in self._api.settings("ignore_globs"):
file_event_handler._ignore_patterns.append(
os.path.join(self._run.dir, glob))
return file_event_handler
def GetLine(file, phrase):
for num, line in enumerate(file):
if phrase in line:
return num
return -1
def LoadConfig(path):
config = ConfigParser.ConfigParser()
config.read(path)
options = config.options("config")
global picoPath
picoPath = config.get("config", "picopath")
class ChangeHandler(PatternMatchingEventHandler):
patterns = ["*.lua"]
def on_modified(self, event):
print event.src_path, event.event_type
CreatePicoFile()
inputFolder=""
outputFile = ""
runOnCompile = False
picoPath = ""
configPath = "./config"
try:
opts, args = getopt.getopt(sys.argv[1:], "h:i:o:r", ["folder=", "output=", "--run"])
except getopt.GetoptError:
clientId = "barlynaland"
thingName = "barlynaland"
caPath = "aws-iot-rootCA.crt"
certPath = "7b5fede5e4-certificate.pem.crt"
keyPath = "7b5fede5e4-private.pem.key"
mqttc.tls_set(caPath, certfile=certPath, keyfile=keyPath, cert_reqs=ssl.CERT_REQUIRED, tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None)
mqttc.connect(awshost, awsport, keepalive=60)
# start the thread to publish
mqttc.loop_start()
"""
class LevelHandler(PatternMatchingEventHandler):
#class FileHandler(FileSystemEventHandler):
'''
Overwrite the methods for creation, deletion, modification, and moving
to get more information as to what is happening on output
'''
patterns = ["*/level.dat"]
def on_moved(self, event):
''' listen to changes in level.dat '''
if not event.is_directory and event.dest_path.endswith("level.dat"):
time.sleep(1)
filename = event.dest_path
# get the level.dat as a python object
level = nbt_to_object(filename)
# letus know something happened
print("Moved", event.src_path, event.dest_path)
while True:
# if there is more than one file in the deque then the first one must
# have finished writing so is ready to map
if len(file_queue) > 1:
try:
map_to_reference(aligner, file_queue.popleft(), reads_per_file, destination_folder)
except ValueError as err:
print(err)
elif self.die_when_done:
print("mapping thread terminating as there are no more reads in the deque & you have elected not to watch the folder.")
return;
time.sleep(0.1)
# The Watcher watches the source folder and if a file is created then it pops
# its name into the deque for the mapping thread to deal with.
class Watcher(PatternMatchingEventHandler):
patterns = ["*.fastq", "*.fasta"]
file_queue = None
def __init__(self, file_queue, *args, **kwargs):
self.file_queue = file_queue
super(Watcher, self).__init__(*args, **kwargs)
def process(self, event):
"""
event.event_type
'modified' | 'created' | 'moved' | 'deleted'
event.is_directory
True | False
event.src_path
path/to/observed/file
def setup_file_watcher(path, callback, use_polling=False):
"""Sets up a background thread that watches for source changes and
automatically sends SIGHUP to the current process whenever a file
changes.
"""
if use_polling:
observer_class = watchdog.observers.polling.PollingObserver
else:
observer_class = watchdog_gevent.Observer
file_event_handler = watchdog.events.PatternMatchingEventHandler(patterns=['*.py'])
# file_event_handler = watchdog.events.FileSystemEventHandler()
# monkey patching is perfectly fine.
file_event_handler.on_any_event = callback
# start the watcher
file_watcher = observer_class()
file_watcher.schedule(file_event_handler, path, recursive=True)
print("000000000000000000000000")
file_watcher.start()
print("&&&&&&&&&&&&&&&&&&&&&&&&")
return file_watcher
conn.close()
def wipe(self):
for filename in glob.glob(os.path.join(self._base_dir, '*.db')):
os.unlink(filename)
def watch(self, run_id, start_cursor, callback):
watchdog = SqliteEventLogStorageWatchdog(self, run_id, callback, start_cursor)
self._watchers[run_id] = self._obs.schedule(watchdog, self._base_dir, True)
def end_watch(self, run_id, handler):
self._obs.remove_handler_for_watch(handler, self._watchers[run_id])
del self._watchers[run_id]
class SqliteEventLogStorageWatchdog(PatternMatchingEventHandler):
def __init__(self, event_log_storage, run_id, callback, start_cursor, **kwargs):
self._event_log_storage = check.inst_param(
event_log_storage, 'event_log_storage', SqliteEventLogStorage
)
self._run_id = check.str_param(run_id, 'run_id')
self._cb = check.callable_param(callback, 'callback')
self._log_path = event_log_storage.path_for_run_id(run_id)
self._cursor = start_cursor
super(SqliteEventLogStorageWatchdog, self).__init__(patterns=[self._log_path], **kwargs)
def _process_log(self):
events = self._event_log_storage.get_logs_for_run(self._run_id, self._cursor)
self._cursor += len(events)
for event in events:
status = self._cb(event)
import io
import sys
import time
import watchdog.events
import watchdog.observers
import threatingestor.extras.queueworker
class FSWatcher(
watchdog.events.PatternMatchingEventHandler,
threatingestor.extras.queueworker.QueueWorker):
"""Watch a directory for YARA rule changes.
Send contents of the changed rule files to the queue.
"""
# Only match YARA rules.
patterns = ["*.yar", "*.yara", "*.rule", "*.rules"]
def process(self, event):
"""Handle a file event."""
with io.open(event.src_path, 'r') as rule_source:
rule_content = rule_source.read()
self.queue.write_one({
'filename': event.src_path,
'rules': rule_content,
})
__copyright__ = "Copyright 2018, JK"
__license__ = "GPL3"
__version__ = "0.0.1"
import time
# pip install watchdog
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import txCode
import txBase
#######
class TelexNews(txBase.TelexBase):
class EventHandler(PatternMatchingEventHandler):
patterns = ["*.txt", "*.rsstx", "*.news"]
_last_path = ''
_last_text = ''
def __init__(self, buffer:list):
super().__init__()
self._news_buffer = buffer
def on_modified(self, event):
"""
event.event_type
'modified' | 'created' | 'moved' | 'deleted'
event.is_directory
True | False
event.src_path
priv_16 = re.compile("^172.(1[6-9]|2[0-9]|3[0-1]).[0-9]{1,3}.[0-9]{1,3}$")
return priv_lo.match(ip) is not None or priv_24.match(
ip) is not None or priv_20.match(ip) is not None or priv_16.match(
ip) is not None
def get_hostname():
with open('/etc/hostname') as fp:
hostname = fp.read().strip()
assert hostname
return hostname
class FileReloader(PatternMatchingEventHandler):
def __init__(self, filetowatch, callback):
super(FileReloader, self).__init__(['*/'+filetowatch])
self._callback = callback
self._filetowatch = filetowatch
@property
def watchfile(self):
return self._filetowatch
def on_any_event(self, event):
self._callback()