Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def lock(self):
return fasteners.InterProcessLock(self.lock_file_name)
print("Deletion unsuccessful")
else:
print("Download unsuccessful")
zipfileObj = zipfile.ZipFile('/local/incomingData.zip', 'r', compression = zipfile.ZIP_DEFLATED)
# We are extracting to the file to incoming_dir in container
zipfileObj.extractall(incoming_dir)
# Create outgoing_dir directory as the plugin container will output data there after processing.
if not os.path.exists(outgoing_dir):
os.makedirs(outgoing_dir)
if __name__ == "__main__":
incoming_dir = os.environ.get("INCOMING_DIR")
# The init-storage container in all the pods should acquire the lock
with fasteners.InterProcessLock("/share/.lockfile"):
# If "/share/.download-failed" exists, exit with an error code immediately
if os.path.exists("/share/.download-failed"):
print("Previous pod failed to download the data. Exiting with failure...")
exit(1)
# If there is some data in incoming_dir but "/share/.download-succeeded" doesn't exist, it is a failure case
# Exit with error code immediately
if os.path.exists(incoming_dir) and len(os.listdir(incoming_dir)) > 0 and not os.path.exists('/share/.download-succeeded'):
print("Some data was downloaded, but '/share/.download-succeeded' file doesn't exist. Exiting with failure...")
exit(1)
# Download the data if "/share/.download-succeeded" does not exist
if not os.path.exists('/share/.download-succeeded'):
try:
print("Lock acquired. Downloading data from Swift...")
getData(containerName=os.environ.get('SWIFT_KEY'), in_dir=incoming_dir, out_dir=os.environ.get('OUTGOING_DIR'))
os.mknod('/local/.download-pod')
except Exception as err:
def compilers(self):
"""Get information about the compilers used by this target configuration.
Returns:
InstalledCompilerSet: Collection of installed compilers used by this target.
"""
if not self._compilers:
eids = []
compilers = {}
for role in Knowledgebase.all_roles():
try:
with fasteners.InterProcessLock(os.path.join(PROJECT_STORAGE.prefix, '.lock')):
compiler_record = self.populate(role.keyword)
except KeyError:
continue
compilers[role.keyword] = compiler_record.installation()
LOGGER.debug("compilers[%s] = '%s'", role.keyword, compilers[role.keyword].absolute_path)
eids.append(compiler_record.eid)
self._compilers = InstalledCompilerSet('_'.join([str(x) for x in sorted(eids)]), **compilers)
return self._compilers
def __init__(self, path, barrier, member_id):
super(FileLock, self).__init__(path)
self.acquired = False
self._lock = fasteners.InterProcessLock(path)
self._barrier = barrier
self._member_id = member_id
self.ref = 0
merged.append(OrderedDict([
("Interface", poeInterface['interface']),
("POE Status", poeInterface['operation']),
("AP", "")
]))
for cdpNeighbor in cdpList: #add cdp that has no match
found = False
merged.append(OrderedDict([
("Interface", cdpNeighbor['local_interface']),
("POE Status", 'N/A'),
("AP", cdpNeighbor['neighbor'])
]))
try:
with fasteners.InterProcessLock('/tmp/ansible_lock_file'):
WriteDictToXl(module.params['dest'], module.params['hostname'], merged)
except IOError as (errno, strerror):
print("I/O error({0}): {1}".format(errno, strerror))
module.fail_json(msg="I/O error({0}): {1}".format(errno, strerror))
module.exit_json(changed=True, meta=module.params)
def configure(self):
"""Sets up the Experiment for a new trial.
Installs or configures TAU and all its dependencies. After calling this
function, the experiment is ready to operate on the user's application.
Returns:
TauInstallation: Object handle for the TAU installation.
"""
from taucmdr.cf.software.tau_installation import TauInstallation
LOGGER.debug("Configuring experiment %s", self['name'])
with fasteners.InterProcessLock(os.path.join(PROJECT_STORAGE.prefix, '.lock')):
populated = self.populate(defaults=True)
target = populated['target']
application = populated['application']
measurement = populated['measurement']
baseline = measurement.get_or_default('baseline')
tau = TauInstallation(\
target.sources(),
target_arch=target.architecture(),
target_os=target.operating_system(),
compilers=target.compilers(),
# Use a minimal configuration for the baseline measurement
minimal=baseline,
# TAU feature support
application_linkage=application.get_or_default('linkage'),
openmp_support=application.get_or_default('openmp'),
pthreads_support=application.get_or_default('pthreads'),
def GetDefaultScheduler() -> GpuScheduler:
gpus = GPUtil.getGPUs()
if not gpus:
raise NoGpuAvailable("No GPUs available")
if os.environ.get("TEST_TARGET") and os.environ.get("TEST_WITH_GPU") != "1":
raise NoGpuAvailable("GPUs disabled for tests")
app.Log(
2, "Creating default scheduler for %s", humanize.Plural(len(gpus), "GPU")
)
return GpuScheduler(
{gpu: fasteners.InterProcessLock(_LOCK_DIR / str(gpu.id)) for gpu in gpus}
)
def _write_to_sheets(log_date, ae_config_rel_path, pc_config_rel_path,
description, git_ref,
log_dir_root, is_continue):
try:
with fasteners.InterProcessLock(sheets_logger.get_lock_file_p()):
sheets_logger.insert_row(
log_date + ('c' if is_continue else ''),
os.environ.get('JOB_ID', 'N/A'),
ae_config_rel_path, pc_config_rel_path, description, '',
git_ref,
log_dir_root)
except sheets_logger.GoogleSheetsAccessFailedException as e:
print(e)