Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
imap = IMAP4(options.imap_server)
imap.login(options.username, options.password)
result, data = imap.select(options.folder)
if result != 'OK':
raise EmailNotesError(result)
result, messages = imap.search(None, '(UNSEEN)')
if result != 'OK':
raise EmailNotesError(result)
if messages[0] == '':
exit()
# TODO there seems to be a bug where other notes in other notebooks can be deleted
# it might be when a folder is created?
git = sh.git.bake(_cwd=options.repo)
for message_index in messages[0].split(' '):
junk, data = imap.fetch(message_index, '(RFC822)')
message = message_from_string(data[0][1])
subject = decode_header(message.get('Subject'))[0]
if subject[1] is None:
subject = subject[0]
else:
subject = subject[0].decode(subject[1])
subject = subject.replace('\r\n','')
append = False
if options.folder is None or options.folder.lower() == 'inbox':
with_notebook = r'^\*Note\*\s(.*)\s@(.*)$'
without_notebook = r'^\*Note\*\s(.*)$'
else:
with_notebook = r'^(.*)\s@(.*)$'
def start_dev_cycle(catmaid_folder):
"""Prepare changelog and documentation for a new development cycle.
"""
git = sh.git.bake(_cwd=catmaid_folder)
project_root = git("rev-parse", "--show-toplevel").stdout.decode('utf-8').replace('\n', '')
log("CATMAID directory: {}".format(project_root))
# Add new header to CHANGELOG
update_file(os.path.join(project_root, 'CHANGELOG.md'), update_changelog)
# Add new header to API CHANGELOG
update_file(os.path.join(project_root, 'API_CHANGELOG.md'), update_api_changelog)
# Change reference version
update_file(os.path.join(project_root, "django/projects/mysite/utils.py"), update_version)
# Update version references in documentation
update_file(os.path.join(project_root, "sphinx-doc/source/conf.py"), update_documentation)
# Create commit
project_toprocess = []
for repo in repos:
repo_dir = os.path.join(datadir, project)
if len(repos) > 1:
repo_dir = os.path.join(repo_dir, os.path.split(repo)[1])
try:
source_branch, _, _ = refreshrepo(repo, repo_dir, source_branch,
local=local)
except Exception:
# The error was already logged by refreshrepo, and the only
# side-effect is that we are not adding this commit to the
# list of commits to be processed, so we can ignore it and
# move on to the next repo
continue
git = sh.git.bake(_cwd=repo_dir, _tty_out=False)
# Git gives us commits already sorted in the right order
if tags_only is True:
logger.info('Building tags only for %s' % project)
if since == '-1':
# we need 2 entries as HEAD will be listed too
since = '-2'
lines = filter(
lambda x: x.find('tag: ') >= 0,
git.log('--simplify-by-decoration',
"--pretty=format:'%ct %H %d'",
since, "--first-parent",
"--reverse", "%s" % source_branch))
else:
lines = git.log("--pretty=format:'%ct %H'",
since, "--first-parent",
"--reverse")
def git_clone_to_local(dest_directory, webhook_data):
git = sh.git.bake()
logger.debug('Making destination directory %s' % dest_directory)
print ('Making destination directory %s' % dest_directory)
sh.mkdir('-p', dest_directory)
sh.cd(dest_directory)
logger.debug("checking for repo_name %s in %s" % (webhook_data.repo_name, sh.pwd()))
if not os.path.exists(webhook_data.repo_name):
logger.debug("Cloning new repository")
print(git.clone(webhook_data.repo_url, webhook_data.repo_name))
sh.cd(webhook_data.repo_name)
print(git.fetch('--all'))
try:
git('show-ref', '--heads', webhook_data.branch_name)
branch_exists = True
except:
branch_exists = False
import os
import sh
import sys
import tempfile
REPO_PATH = {
'extras': '/srv/ansible/stable-2.2/lib/ansible/modules/extras',
'core': '/srv/ansible/stable-2.2/lib/ansible/modules/core'
}
if __name__ == '__main__':
commit_hash = sys.argv[1]
which_modules = sys.argv[2]
git = sh.git.bake('--no-pager', _tty_out=False)
try:
# Get the change
git('checkout', 'devel')
patch = git('format-patch', '-1', '--stdout', commit_hash).stdout
finally:
git('checkout', '-')
# Transform the change for the new repo
patch = patch.replace(b'lib/ansible/modules/', b'')
new_patch = []
patch_stream = (l for l in patch.split(b'\n'))
for line in patch_stream:
if line.strip() == b'---':
new_patch.append(b'(cherry picked from %s)' % commit_hash.encode('utf-8'))
new_patch.append(line)
break
:param version: A string containing the branch/tag/sha to be exported.
:param debug: An optional bool to toggle debug output.
:return: None
"""
if not any(
(
_has_branch(version, debug),
_has_tag(version, debug),
_has_commit(version, debug),
)
):
cmd = sh.git.bake("fetch")
util.run_command(cmd, debug=debug)
cmd = sh.git.bake("checkout", version)
util.run_command(cmd, debug=debug)
cmd = sh.git.bake("clean", "-d", "-x", "-f")
util.run_command(cmd, debug=debug)
if _has_branch(version, debug):
cmd = sh.git.bake("pull", rebase=True, ff_only=True)
util.run_command(cmd, debug=debug)
def db_git_commit(commit_desc, archive_list=None):
""" Use 'git add' and 'git commit' to commit any pending edits """
_git = sh.git.bake('-C', LINKPAD_DBPATH) # Helper to run 'git' commands against this specific repo
# Track any changes to the database file
db_file = db_filepath_database_file()
if os.path.isfile(db_file):
_git.add(db_file)
# Track any changes in entry archive files
if archive_list is not None:
for entry in archive_list:
archive_dir = db_filepath_entry_archive_dir(entry['id'])
if not entry.get('archived', False):
continue
if entry.get('hard_deleted', False):
_git.rm('-r', '-f', archive_dir)
else:
_git.add('-A', '-f', archive_dir)
rhpkg = sh.rhpkg.bake(_cwd=distgit_dir, _tty_out=False,
_timeout=3600,
_err=self._process_koji_output,
_out=self._process_koji_output,
_env={'PATH': '/usr/bin/'})
if (self.config_options.pkginfo_driver ==
'dlrn.drivers.downstream.DownstreamInfoDriver' and
self.config_options.use_upstream_spec):
# This is a special situation. We are copying the upstream
# spec over, but then building the srpm and importing. In this
# situation, rhpkg import will complain because there are
# uncommited changes to the repo... and we will commit them
# the srpm. So let's reset the git repo right before that.
git = sh.git.bake(_cwd=distgit_dir, _tty_out=False,
_timeout=3600,
_err=self._process_koji_output,
_out=self._process_koji_output,
_env={'PATH': '/usr/bin/'})
git.checkout('--', '*')
with io.open("%s/rhpkgimport.log" % output_dir, 'a',
encoding='utf-8', errors='replace') as self.koji_fp:
rhpkg('import', '--skip-diff', src_rpm)
pkg_date = strftime("%Y-%m-%d-%H%M%S", localtime(time()))
rhpkg('commit', '-p', '-m',
'DLRN build at %s\n\nSource SHA: %s\nDist SHA: %s\n' %
(pkg_date, commit.commit_hash, commit.distro_hash))
# After running rhpkg commit, we have a different commit hash, so
# let's update it
def db_format_upgrade_db():
db_check_format_ver(allow_lower=True)
format_file = db_filepath_format_file()
format_ver = db_format_ver()
_git = sh.git.bake('-C', LINKPAD_DBPATH) # Helper to run 'git' commands against this specific repo
changed = False
if format_ver == 1:
click.echo("Upgrading database '{}' to format '{}' ...".format(LINKPAD_DBNAME, 2))
"""
format=1->2:
------------
Restructure the database 'archive' subdirectory to shard entry
subdirectories by the first 2 chars, to avoid having a linearlly
increasing number of 'archive' child directories.
"""
db_archive_dir = os.path.join(LINKPAD_DBPATH, 'archive')
if os.path.isdir(db_archive_dir):
for d in os.scandir(db_archive_dir):
name = os.path.basename(d.path)
# Ignore non-directories
sanity_hello_world = blueprint_repo_dir / 'openstack' / 'hello_world_sanity.yaml'
## Temp dev workaround
cloudify_conf_path = path.path(os.getcwd()) / '.cloudify'
if cloudify_conf_path.exists():
cloudify_conf_path.remove()
#####################################
## Helper functions and variables
####################################
def out(line): sys.stdout.write(line)
def err(line): sys.stderr.write(line)
cfy = sh.cfy.bake(_out=out, _err=err)
git = sh.git.bake(_out=out, _err=err)
client = CosmoManagerRestClient(management_ip)
def get_manager_state():
print 'Fetch manager state'
blueprints = {}
for blueprint in client.list_blueprints():
blueprints[blueprint.id] = blueprint
deployments = {}
for deployment in client.list_deployments():
deployments[deployment.id] = deployment
nodes = {}
for deployment_id in deployments.keys():
for node in client.list_deployment_nodes(deployment_id).nodes:
nodes[node.id] = node
workflows = {}