Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
pkgindex = None
if proc is not None:
if proc.poll() is None:
proc.kill()
proc.wait()
proc = None
if tmp_filename is not None:
try:
os.unlink(tmp_filename)
except OSError:
pass
if pkgindex is rmt_idx:
pkgindex.modified = False # don't update the header
pkgindex.header["DOWNLOAD_TIMESTAMP"] = "%d" % time.time()
try:
ensure_dirs(os.path.dirname(pkgindex_file))
f = atomic_ofstream(pkgindex_file)
pkgindex.write(f)
f.close()
except (IOError, PortageException):
if os.access(os.path.dirname(pkgindex_file), os.W_OK):
raise
# The current user doesn't have permission to cache the
# file, but that's alright.
if pkgindex:
remote_base_uri = pkgindex.header.get("URI", base_url)
for d in pkgindex.packages:
cpv = _pkg_str(d["CPV"], metadata=d,
settings=self.settings, db=self.dbapi)
# Local package instances override remote instances
# with the same instance_key.
if self.dbapi.cpv_exists(cpv):
mydata = myid[1]
myindexlen = len(myindex)
startpos = 0
while ((startpos + 8) < myindexlen):
namelen = decodeint(myindex[startpos:startpos + 4])
datapos = decodeint(myindex[startpos + 4 + namelen:startpos + 8 + namelen])
datalen = decodeint(myindex[startpos + 8 + namelen:startpos + 12 + namelen])
myname = myindex[startpos + 4:startpos + 4 + namelen]
myname = _unicode_decode(myname,
encoding=_encodings['repo.content'], errors='replace')
filename = os.path.join(mydest, myname.lstrip(os.sep))
filename = normalize_path(filename)
if not filename.startswith(mydest):
# myname contains invalid ../ component(s)
continue
dirname = os.path.dirname(filename)
if dirname:
if not os.path.exists(dirname):
os.makedirs(dirname)
mydat = open(_unicode_encode(filename,
encoding=_encodings['fs'], errors='strict'), 'wb')
mydat.write(mydata[datapos:datapos + datalen])
mydat.close()
startpos = startpos + namelen + 12
if cfgprot:
mydest = new_protect_filename(mydest, newmd5=mymd5)
# whether config protection or not, we merge the new file the
# same way. Unless moveme=0 (blocking directory)
if moveme:
# Do not hardlink files unless they are in the same
# directory, since otherwise tar may not be able to
# extract a tarball of the resulting hardlinks due to
# 'Invalid cross-device link' errors (depends on layout of
# mount points). Also, don't hardlink zero-byte files since
# it doesn't save any space, and don't hardlink
# CONFIG_PROTECTed files since config files shouldn't be
# hardlinked to eachother (for example, shadow installs
# several identical config files inside /etc/pam.d/).
parent_dir = os.path.dirname(myrealdest)
hardlink_key = (parent_dir, mymd5, mystat.st_size,
mystat.st_mode, mystat.st_uid, mystat.st_gid)
hardlink_candidates = None
if not protected and mystat.st_size != 0:
hardlink_candidates = self._md5_merge_map.get(hardlink_key)
if hardlink_candidates is None:
hardlink_candidates = []
self._md5_merge_map[hardlink_key] = hardlink_candidates
mymtime = movefile(mysrc, mydest, newmtime=thismtime,
sstat=mystat, mysettings=self.settings,
hardlink_candidates=hardlink_candidates,
encoding=_encodings['merge'])
if mymtime is None:
return 1
proc = None
if tmp_filename is not None:
try:
os.unlink(tmp_filename)
except OSError:
pass
if pkgindex is rmt_idx:
pkgindex.modified = False # don't update the header
pkgindex.header["DOWNLOAD_TIMESTAMP"] = "%d" % time.time()
try:
ensure_dirs(os.path.dirname(pkgindex_file))
f = atomic_ofstream(pkgindex_file)
pkgindex.write(f)
f.close()
except (IOError, PortageException):
if os.access(os.path.dirname(pkgindex_file), os.W_OK):
raise
# The current user doesn't have permission to cache the
# file, but that's alright.
if pkgindex:
remote_base_uri = pkgindex.header.get("URI", base_url)
for d in pkgindex.packages:
cpv = _pkg_str(d["CPV"], metadata=d,
settings=self.settings, db=self.dbapi)
# Local package instances override remote instances
# with the same instance_key.
if self.dbapi.cpv_exists(cpv):
continue
d["CPV"] = cpv
d["BASE_URI"] = remote_base_uri
d["PKGINDEX_URI"] = url
def read_ld_so_conf(path):
for l in grabfile(path):
include_match = _ld_so_include_re.match(l)
if include_match is not None:
subpath = os.path.join(os.path.dirname(path),
include_match.group(1))
for p in glob.glob(subpath):
for r in read_ld_so_conf(p):
yield r
else:
yield l
dest_path = normalize_path(
os.path.join(destroot, f.lstrip(os.path.sep)))
try:
dest_lstat = os.lstat(dest_path)
except EnvironmentError as e:
if e.errno == errno.ENOENT:
del e
continue
elif e.errno == errno.ENOTDIR:
del e
# A non-directory is in a location where this package
# expects to have a directory.
dest_lstat = None
parent_path = dest_path
while len(parent_path) > len(destroot):
parent_path = os.path.dirname(parent_path)
try:
dest_lstat = os.lstat(parent_path)
break
except EnvironmentError as e:
if e.errno != errno.ENOTDIR:
raise
del e
if not dest_lstat:
raise AssertionError(
"unable to find non-directory " + \
"parent for '%s'" % dest_path)
dest_path = parent_path
f = os.path.sep + dest_path[len(destroot):]
if f in collisions:
continue
else:
import portage
from portage import normalize_path
from portage import os
from portage.output import green
from portage.util.futures.extendedfutures import ExtendedFuture
from repoman.metadata import get_metadata_xsd
from repoman.modules.commit import repochecks
from repoman.modules.commit import manifest
from repoman.profile import check_profiles, dev_profile_keywords, setup_profile
from repoman.repos import repo_metadata
from repoman.modules.scan.scan import scan
from repoman.modules.vcs.vcs import vcs_files_to_cps
from portage.module import Modules
MODULES_PATH = os.path.join(os.path.dirname(__file__), "modules", "scan")
# initial development debug info
logging.debug("module path: %s", MODULES_PATH)
MODULE_CONTROLLER = Modules(path=MODULES_PATH, namepath="repoman.modules.scan")
MODULE_NAMES = MODULE_CONTROLLER.module_names[:]
# initial development debug info
logging.debug("module_names: %s", MODULE_NAMES)
DATA_TYPES = {'dict': dict, 'Future': ExtendedFuture, 'list': list, 'set': set}
class Scanner(object):
'''Primary scan class. Operates all the small Q/A tests and checks'''
def __init__(self, repo_settings, myreporoot, config_root, options,
def digests(self, checkdir):
'''Returns the freshly loaded digests
@param checkdir: string, directory path
'''
mf = self.repoman_settings.repositories.get_repo_for_location(
os.path.dirname(os.path.dirname(checkdir)))
mf = mf.load_manifest(checkdir, self.repoman_settings["DISTDIR"])
_digests = mf.getTypeDigests("DIST")
del mf
return _digests
# This will probably never happen, but it might if one of the
# paths returned from findConsumers() refers to one of the libs
# that should be preserved yet the path is not listed in the
# contents. Such a path might belong to some other package, so
# it shouldn't be preserved here.
showMessage(_("!!! File '%s' will not be preserved "
"due to missing contents entry\n") % (f_abs,),
level=logging.ERROR, noiselevel=-1)
preserve_paths.remove(f)
continue
new_contents[f_abs] = contents_entry
obj_type = contents_entry[0]
showMessage(_(">>> needed %s %s\n") % (obj_type, f_abs),
noiselevel=-1)
# Add parent directories to contents if necessary.
parent_dir = os.path.dirname(f_abs)
while len(parent_dir) > len(root):
new_contents[parent_dir] = ["dir"]
prev = parent_dir
parent_dir = os.path.dirname(parent_dir)
if prev == parent_dir:
break
outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
write_contents(new_contents, root, outfile)
outfile.close()
self._clear_contents_cache()
def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None):
"""
This method will grab the next COUNTER value and record it back
to the global file. Returns new counter value.
@param myroot: ignored, self._eroot is used instead
"""
myroot = None
counter = self.get_counter_tick_core(mycpv=mycpv) - 1
if incrementing:
#increment counter
counter += 1
# use same permissions as config._init_dirs()
ensure_dirs(os.path.dirname(self._counter_path),
gid=portage_gid, mode=0o2750, mask=0o2)
# update new global counter file
write_atomic(self._counter_path, str(counter))
return counter