Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_log_type():
"""
Returns the log type as configured by the deployer.
:returns: the value from VALID_LOGGERS to use as the log type. Defaults to "syslog".
:rtype: str
"""
log_type = config.config.get('server', 'log_type')
if log_type not in VALID_LOGGERS:
print >> sys.stderr, "log_type not properly set. Defaulting to syslog."
log_type = 'syslog'
if log_type == 'syslog':
if not os.path.exists(LOG_PATH):
print >> sys.stderr, "Unable to access log, {log_path}.".format(log_path=LOG_PATH)
sys.exit(os.EX_UNAVAILABLE)
return log_type
def sync(self, repo, repo_source, skip_dict={}, progress_callback=None):
cacert = clicert = clikey = None
if repo['ca'] and repo['cert'] and repo['key']:
cacert = repo['ca'].encode('utf8')
clicert = repo['cert'].encode('utf8')
clikey = repo['key'].encode('utf8')
num_threads = config.config.getint('yum', 'threads')
remove_old = config.config.getboolean('yum', 'remove_old_packages')
num_old_pkgs_keep = config.config.getint('yum', 'num_old_pkgs_keep')
# check for proxy settings
proxy_url = proxy_port = proxy_user = proxy_pass = None
for proxy_cfg in ['proxy_url', 'proxy_port', 'proxy_user', 'proxy_pass']:
if (config.config.has_option('yum', proxy_cfg)):
vars()[proxy_cfg] = config.config.get('yum', proxy_cfg)
self.yum_repo_grinder = YumRepoGrinder('', repo_source['url'].encode('ascii', 'ignore'),
num_threads, cacert=cacert, clicert=clicert, clikey=clikey,
packages_location=pulp.server.util.top_package_location(),
remove_old=remove_old, numOldPackages=num_old_pkgs_keep, skip=skip_dict,
proxy_url=proxy_url, proxy_port=proxy_port,
proxy_user=proxy_user, proxy_pass=proxy_pass)
relative_path = repo['relative_path']
if relative_path:
def _check_username_password_ldap(self, username, password=None):
"""
Check a username and password against the ldap server.
Return None if the username and password are not valid
:type username: str
:param username: the login of the user
:type password: str or None
:param password: password of the user, None => do not validate the password
:rtype: L{pulp.server.db.model.auth.User} instance or None
:return: user corresponding to the credentials
"""
ldap_uri = config.get('ldap', 'uri')
ldap_base = config.get('ldap', 'base')
ldap_tls = config.getboolean('ldap', 'tls')
ldap_filter = None
if config.has_option('ldap', 'filter'):
ldap_filter = config.get('ldap', 'filter')
ldap_server = ldap_connection.LDAPConnection(server=ldap_uri, tls=ldap_tls)
ldap_server.connect()
user = ldap_server.authenticate_user(ldap_base, username, password,
filter=ldap_filter)
return user
@return: dictionary of data to send to a consumer for a bind
@rtype: dict
'''
if hostnames is None:
hostnames = []
if key_list is None:
key_list = {}
# Add in the pulp server itself as the last host in the list if there are CDS
# instances; if there are none, the pulp server will be the only entry (default case)
server_name = pulp.server.config.config.get('server', 'server_name')
hostnames.append(server_name)
repo_hosted_url = pulp.server.config.config.get('server', 'relative_url')
#
#repo_relative_path = repo['relative_path']
repo_distributor_manager = manager_factory.repo_distributor_manager()
distributor = repo_distributor_manager.get_distributors(repo['id'])[0]
repo_relative_path = distributor['config'].get('relative_url')
#
repo_urls = []
for host in hostnames:
repo_url = 'https://%s%s/%s' % (host, repo_hosted_url, repo_relative_path)
repo_urls.append(repo_url)
# add certificates
def sync_with_auto_publish_itinerary(repo_id, overrides=None):
"""
Create a call request list for the synchronization of a repository and the
publishing of any distributors that are configured for auto publish.
@param repo_id: id of the repository to create a sync call request list for
@type repo_id: str
@param overrides: dictionary of configuration overrides for this sync
@type overrides: dict or None
@return: list of call request instances
@rtype: list
"""
repo_sync_manager = manager_factory.repo_sync_manager()
sync_weight = pulp_config.config.getint('tasks', 'sync_weight')
sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
action_tag('sync')]
sync_call_request = CallRequest(repo_sync_manager.sync,
[repo_id],
{'sync_config_override': overrides},
weight=sync_weight,
tags=sync_tags,
archive=True)
sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id)
sync_call_request.add_life_cycle_callback(dispatch_constants.CALL_ENQUEUE_LIFE_CYCLE_CALLBACK,
repo_sync_manager.prep_sync)
sync_call_request.add_life_cycle_callback(dispatch_constants.CALL_COMPLETE_LIFE_CYCLE_CALLBACK,
repo_sync_manager.post_sync)
call_requests = [sync_call_request]
def consumer_content_install_itinerary(consumer_id, units, options):
"""
Create an itinerary for consumer content installation.
@param consumer_id: unique id of the consumer
@type consumer_id: str
@param units: units to install
@type units: list or tuple
@param options: options to pass to the install manager
@type options: dict or None
@return: list of call requests
@rtype: list
"""
manager = managers_factory.consumer_agent_manager()
args = [consumer_id]
kwargs = {'units': units, 'options': options}
weight = pulp_config.config.getint('tasks', 'consumer_content_weight')
tags = [resource_tag(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id),
action_tag('unit_install')]
call_request = CallRequest(manager.install_content, args, kwargs, weight=weight, tags=tags, archive=True, asynchronous=True)
call_request.reads_resource(dispatch_constants.RESOURCE_CONSUMER_TYPE, consumer_id)
return [call_request]
@staticmethod
def storage_dir():
"""
The root storage path.
:return: The root storage path.
:rtype: str
"""
return config.get('server', 'storage_dir')
def sync(self, repo, repo_source, skip_dict={}, progress_callback=None):
cacert = clicert = clikey = None
if repo['ca'] and repo['cert'] and repo['key']:
cacert = repo['ca'].encode('utf8')
clicert = repo['cert'].encode('utf8')
clikey = repo['key'].encode('utf8')
num_threads = config.config.getint('yum', 'threads')
remove_old = config.config.getboolean('yum', 'remove_old_packages')
num_old_pkgs_keep = config.config.getint('yum', 'num_old_pkgs_keep')
# check for proxy settings
proxy_url = proxy_port = proxy_user = proxy_pass = None
for proxy_cfg in ['proxy_url', 'proxy_port', 'proxy_user', 'proxy_pass']:
if (config.config.has_option('yum', proxy_cfg)):
vars()[proxy_cfg] = config.config.get('yum', proxy_cfg)
self.yum_repo_grinder = YumRepoGrinder('', repo_source['url'].encode('ascii', 'ignore'),
num_threads, cacert=cacert, clicert=clicert, clikey=clikey,
packages_location=pulp.server.util.top_package_location(),
remove_old=remove_old, numOldPackages=num_old_pkgs_keep, skip=skip_dict,
proxy_url=proxy_url, proxy_port=proxy_port,
proxy_user=proxy_user, proxy_pass=proxy_pass)
relative_path = repo['relative_path']
if relative_path:
store_path = "%s/%s" % (pulp.server.util.top_repos_location(), relative_path)
else:
store_path = "%s/%s" % (pulp.server.util.top_repos_location(), repo['id'])
report = self.yum_repo_grinder.fetchYumRepo(store_path, callback=progress_callback)
self.progress = yum_rhn_progress_callback(report.last_progress)
log.info("YumSynchronizer reported %s successes, %s downloads, %s errors" \
def sync_with_auto_publish_itinerary(repo_id, overrides=None):
"""
Create a call request list for the synchronization of a repository and the
publishing of any distributors that are configured for auto publish.
@param repo_id: id of the repository to create a sync call request list for
@type repo_id: str
@param overrides: dictionary of configuration overrides for this sync
@type overrides: dict or None
@return: list of call request instances
@rtype: list
"""
repo_sync_manager = manager_factory.repo_sync_manager()
sync_weight = pulp_config.config.getint('tasks', 'sync_weight')
sync_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
action_tag('sync')]
sync_call_request = CallRequest(repo_sync_manager.sync, # rbarlow_converted
[repo_id],
{'sync_config_override': overrides},
weight=sync_weight,
tags=sync_tags,
archive=True)
sync_call_request.updates_resource(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id)
call_requests = [sync_call_request]
repo_publish_manager = manager_factory.repo_publish_manager()
auto_publish_tags = [resource_tag(dispatch_constants.RESOURCE_REPOSITORY_TYPE, repo_id),
action_tag('auto_publish'), action_tag('publish')]
if repo is None:
raise MissingResource(repo_id)
manager = manager_factory.repo_distributor_manager()
distributor = manager.get_distributors(repo_id)[0]
distconfig = distributor['config']
# Load the repo cert bundle
repo_cert_bundles[repo['id']] = distconfig.get('auth_cert')
repo['name'] = repo['display_name']
repo['relative_path'] = distconfig['relative_url']
repos.append(repo)
# Repository base URL for this pulp server
server_url = constants.SERVER_SCHEME + config.config.get('server', 'server_name')
repo_relative_url = config.config.get('server', 'relative_url')
repo_base_url = '%s/%s' % (server_url, repo_relative_url)
# Global cert bundle, if any (repo cert bundles are handled above)
global_cert_bundle = repo_cert_utils.read_global_cert_bundle()
# Assemble the list of CDS hostnames in the same cluster
if cds['cluster_id'] is not None:
cluster_id = cds['cluster_id']
cds_members = list(self.collection.find({'cluster_id' : cds['cluster_id']}))
member_hostnames = [c['hostname'] for c in cds_members]
else:
cluster_id = None
member_hostnames = None
payload = {
'repos' : repos,