Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_check_exclusives(self):
a = MockTarget('a', exclusives={'a': '1', 'b': '1'})
b = MockTarget('b', exclusives={'a': '1'})
c = MockTarget('c', exclusives={'a': '2'})
d = MockTarget('d', dependencies=[a, b])
e = MockTarget('e', dependencies=[a, c], exclusives={'c': '1'})
context = Context(CheckExclusivesTest.config, options={}, run_tracker=None, target_roots=[d, e])
check_exclusives_task = CheckExclusives(context, signal_error=True)
try:
check_exclusives_task.execute([d, e])
self.fail("Expected a conflicting exclusives exception to be thrown.")
except TaskError:
pass
def _coerce_to_targets(cls, from_str, to_str):
if isinstance(from_str, Compatibility.string):
if not isinstance(to_str, Compatibility.string):
raise TaskError('Finding paths from string %s to non-string %s' % (from_str, str(to_str)))
from_address = Address.parse(get_buildroot(), from_str)
to_address = Address.parse(get_buildroot(), to_str)
from_target = Target.get(from_address)
to_target = Target.get(to_address)
if not from_target:
raise TaskError('Target %s doesn\'t exist' % from_address.reference())
if not to_target:
raise TaskError('Target %s doesn\'t exist' % to_address.reference())
return from_target, to_target
elif isinstance(to_str, Compatibility.string):
raise TaskError('Finding paths from string %s to non-string %s' % (to_str, str(from_str)))
return from_str, to_str
def execute(self, targets):
# compute transitive exclusives
for t in targets:
t._propagate_exclusives()
# Check for exclusives collision.
for t in targets:
excl = t.get_all_exclusives()
for key in excl:
if len(excl[key]) > 1:
msg = 'target %s has more than one exclusives tag for key %s: %s' % \
(t.address.reference(), key, list(excl[key]))
if self.signal_error:
raise TaskError(msg)
else:
print "Warning: %s" % msg
if self.context.products.is_required_data('exclusives_groups'):
mapping = ExclusivesMapping(self.context)
partition_keys = self._compute_exclusives_conflicts(targets)
for key in partition_keys:
mapping.add_conflict(key, partition_keys[key])
mapping._populate_target_maps(targets)
self.context.products.set_data('exclusives_groups', mapping)
for basedir, jars in target_jars.items():
for internaljar in jars:
os.symlink(os.path.join(basedir, internaljar),
os.path.join(libdir, internaljar))
classpath.add(internaljar)
app.binary.walk(add_jars, lambda t: t.is_internal)
# Add external dependencies to the bundle.
for basedir, externaljar in self.list_jar_dependencies(app.binary):
path = os.path.join(basedir, externaljar)
os.symlink(path, os.path.join(libdir, externaljar))
classpath.add(externaljar)
for basedir, jars in self.context.products.get('jars').get(app.binary).items():
if len(jars) != 1:
raise TaskError('Expected 1 mapped binary for %s but found: %s' % (app.binary, jars))
binary = jars[0]
binary_jar = os.path.join(basedir, binary)
bundle_jar = os.path.join(bundledir, binary)
if not classpath:
os.symlink(binary_jar, bundle_jar)
else:
with open_zip(binary_jar, 'r') as src:
with open_zip(bundle_jar, 'w', compression=ZIP_DEFLATED) as dest:
for item in src.infolist():
buffer = src.read(item.filename)
if Manifest.PATH == item.filename:
manifest = Manifest(buffer)
manifest.addentry(Manifest.CLASS_PATH,
' '.join(os.path.join('libs', jar) for jar in classpath))
buffer = manifest.contents()
if middle_path:
binary_path = os.path.join(base_path, *(middle_path + [version, name]))
cached_binary_path = os.path.join(cachedir, binary_path)
if not os.path.exists(cached_binary_path):
url = posixpath.join(baseurl, binary_path)
log.info('Fetching %s binary from: %s' % (name, url))
downloadpath = cached_binary_path + '~'
try:
with closing(urllib_request.urlopen(url, timeout=timeout_secs)) as binary:
with safe_open(downloadpath, 'wb') as cached_binary:
cached_binary.write(binary.read())
os.rename(downloadpath, cached_binary_path)
chmod_plus_x(cached_binary_path)
except (IOError, urllib_error.HTTPError, urllib_error.URLError) as e:
raise TaskError('Failed to fetch binary from %s: %s' % (url, e))
finally:
safe_delete(downloadpath)
log.debug('Selected %s binary cached at: %s' % (name, cached_binary_path))
return cached_binary_path
raise TaskError('No %s binary found for: %s' % (name, (sysname, release, machine)))
May symlink instead of copying, when it's OK to do so.
Postcondition: symlinks are of leaf packages only.
"""
self.log.debug('Merging classes dirs into %s' % self.classes_dir)
safe_rmtree(self.classes_dir)
symlinkable_packages = self._symlinkable_packages(state)
for artifact in self.underlying_artifacts:
classnames_by_package = defaultdict(list)
for cls in state.classes_by_target.get(artifact.targets[0], []):
classnames_by_package[os.path.dirname(cls)].append(os.path.basename(cls))
for package, classnames in classnames_by_package.items():
if package == "":
raise TaskError("Found class files %s with empty package" % classnames)
artifact_package_dir = os.path.join(artifact.classes_dir, package)
merged_package_dir = os.path.join(self.classes_dir, package)
if package in symlinkable_packages:
if os.path.islink(merged_package_dir):
assert os.readlink(merged_package_dir) == artifact_package_dir
elif os.path.exists(merged_package_dir):
safe_rmtree(merged_package_dir)
os.symlink(artifact_package_dir, merged_package_dir)
else:
safe_mkdir(os.path.dirname(merged_package_dir))
os.symlink(artifact_package_dir, merged_package_dir)
else:
safe_mkdir(merged_package_dir)
for classname in classnames:
src = os.path.join(artifact_package_dir, classname)
def identify_jars(names, jars):
jars_by_name = {}
jars_and_filenames = [(x, os.path.basename(x)) for x in jars]
for name in names:
jar_for_name = None
for jar, filename in jars_and_filenames:
if filename.startswith(name):
jar_for_name = jar
break
if jar_for_name is None:
raise TaskError('Couldn\'t find jar named %s' % name)
else:
jars_by_name[name] = jar_for_name
return jars_by_name
scala_sources = list()
for target in check_targets:
def collect(filename):
if filename.endswith('.scala'):
scala_sources.append(os.path.join(target.target_base, filename))
map(collect, filter(filter_excludes, target.sources))
if scala_sources:
def call(srcs):
cp = self._jvm_tool_bootstrapper.get_jvm_tool_classpath(self._scalastyle_bootstrap_key)
return self.runjava(classpath=cp,
main=Scalastyle._MAIN,
args=['-c', self._scalastyle_config] + srcs)
result = Xargs(call).execute(scala_sources)
if result != 0:
raise TaskError('java %s ... exited non-zero (%i)' % (Scalastyle._MAIN, result))
def _cautious_rmtree(root):
real_buildroot = os.path.realpath(os.path.abspath(get_buildroot()))
real_root = os.path.realpath(os.path.abspath(root))
if not real_root.startswith(real_buildroot):
raise TaskError('DANGER: Attempting to delete %s, which is not under the build root!')
safe_rmtree(real_root)
for actual_dep in filter(must_be_explicit_dep, actual_deps.get(src, [])):
actual_dep_tgts = targets_by_file.get(actual_dep)
# actual_dep_tgts is usually a singleton. If it's not, we only need one of these
# to be in our declared deps to be OK.
if actual_dep_tgts is None:
missing_file_deps.add((src_tgt, actual_dep))
elif src_tgt not in actual_dep_tgts: # Obviously intra-target deps are fine.
canonical_actual_dep_tgt = next(iter(actual_dep_tgts))
if actual_dep_tgts.isdisjoint(transitive_deps_by_target.get(src_tgt, [])):
missing_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append((src, actual_dep))
elif canonical_actual_dep_tgt not in src_tgt.dependencies:
# The canonical dep is the only one a direct dependency makes sense on.
missing_direct_tgt_deps_map[(src_tgt, canonical_actual_dep_tgt)].append(
(src, actual_dep))
else:
raise TaskError('Requested dep info for unknown source file: %s' % src)
return (list(missing_file_deps),
missing_tgt_deps_map.items(),
missing_direct_tgt_deps_map.items())