Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
closed_at = issue['closed_at']
if closed_at:
closed_at = str_to_datetime(issue['closed_at'])
bug, created = self.object.bugs.get_or_create(
bug_id=str(issue['number']),
defaults={'close_date': closed_at,
'severity': None})
logger.info("%s bug [%s]",
"Imported" if created else "Found",
bug)
if created:
bug_create_time = str_to_datetime(issue['created_at'])
comment = bug.comments.create(comment_id='VIRTUAL-1',
author=self.get_user(
issue['user_data']),
timestamp=bug_create_time)
self.record_timestamp(bug_create_time)
logger.info("Imported bug body as [%s]", comment)
# TODO: not supported yet
comments = []
# Import comments
for comment in comments:
issue_number = os.path.basename(comment['issue_url'])
bug = self.object.bugs.get(bug_id=issue_number)
extrafields = {
'author': self.get_user(comment['user']['login']),
def _import_comment(self, bug, comment_id, author, timestamp):
"""
Import comment into `bug'.
@arg bug btinfo.models.Bug object to import comment for
@arg comment_id String representing comment ID
@arg author Jira user
@arg timestamp String timestamp in iso8601 form
@returns True if comment was created, otherwise false
"""
timestamp = str_to_datetime(timestamp)
name = author['displayName']
email = author.get('emailAddress', None)
author = get_participant(name, email)
_, created = bug.comments.get_or_create(
comment_id=comment_id,
defaults={
'author': author,
'timestamp': timestamp})
if created:
self.record_timestamp(timestamp)
return created
def get_commit_authorship(self, commit):
data = commit['data']
# Sanitize ext:(....) blocks from GIT_AUTHOR_NAME. hg-git adds these
# for metadata.
userid = self.userid_ext_hggit_regex.sub(' ', data['Author'])
author = self.get_committer(userid=userid)
timestamp = grimoirelab.toolkit.datetime.str_to_datetime(data['CommitDate'])
return (author, timestamp)
if not created:
bug.severity = severity
bug.save()
else:
logger.info("Saving initial comment for [%s]", bug)
bug.comments.create(
comment_id='VIRTUAL-1',
author=self.getuser(data['author_data']),
timestamp=str_to_datetime(data['created_on'])
)
last_closed_time = None
for journal in data['journals']:
journal_time = str_to_datetime(journal['created_on'])
comment, created = bug.comments.get_or_create(
comment_id=journal['id'],
author=self.getuser(journal['user_data']),
timestamp=journal_time)
logger.info("%s comment [%s]",
"Created" if created else "Found", comment)
if self.journal_is_closing_entry(journal):
last_closed_time = journal_time
if last_closed_time is not None:
bug.close_date = last_closed_time
bug.save()
bug, created = self.object.bugs.get_or_create(bug_id=data['id'],
defaults={'severity': severity})
logger.info("%s bug [%s]",
"Created" if created else "Found", bug)
if not created:
bug.severity = severity
bug.save()
else:
logger.info("Saving initial comment for [%s]", bug)
bug.comments.create(
comment_id='VIRTUAL-1',
author=self.getuser(data['author_data']),
timestamp=str_to_datetime(data['created_on'])
)
last_closed_time = None
for journal in data['journals']:
journal_time = str_to_datetime(journal['created_on'])
comment, created = bug.comments.get_or_create(
comment_id=journal['id'],
author=self.getuser(journal['user_data']),
timestamp=journal_time)
logger.info("%s comment [%s]",
"Created" if created else "Found", comment)
if self.journal_is_closing_entry(journal):
last_closed_time = journal_time
dj_bug.severity = self.translate_severity(bug['bug_severity'][0]['__text__'])
dj_bug.save()
if 'long_desc' not in bug:
bug['long_desc'] = []
for comment in bug['long_desc']:
comment_id = comment['commentid'][0]['__text__']
logger.debug("Comment is: %s", comment_id)
author = comment['who'][0]
real_name = author['name']
email = author['__text__']
if email.find('@') < 0:
email = None
author = get_participant(real_name, email)
posting_date = str_to_datetime(comment['bug_when'][0]['__text__'])
dj_comment, created = dj_bug.comments.get_or_create(
comment_id=comment_id,
defaults=dict(timestamp=posting_date,
author=author))
dirty = False
if dj_comment.timestamp != posting_date:
dirty = True
dj_comment.timestamp = posting_date
logger.warning("Fixing timestamp of [%s]",
dj_comment)
if dj_comment.author != author:
dirty = True
logger.warning("Fixing author of [%s], from [%s] to [%s]",
dj_comment, dj_comment.author, author)
# None. Check directly to avoid python-bugzilla's
# autorefreshing if it doesn't exist.
if 'cf_last_closed' in bug and bug['cf_last_closed'][0]['__text__']:
last_closed_date = bug['cf_last_closed'][0]['__text__']
# FIXME: For now we assume that the last change time is when the
# bug was closed, but that is not always the case. We should look
# into calling Bug::history to get this information.
elif bug['resolution']:
last_closed_date = bug['delta_ts'][0]['__text__']
else:
last_closed_date = None
if last_closed_date:
last_closed_date = str_to_datetime(last_closed_date)
if last_closed_date and last_closed_date != dj_bug.close_date:
self.record_timestamp(last_closed_date)
dj_bug.close_date = last_closed_date
dj_bug.severity = self.translate_severity(bug['bug_severity'][0]['__text__'])
dj_bug.save()
if 'long_desc' not in bug:
bug['long_desc'] = []
for comment in bug['long_desc']:
comment_id = comment['commentid'][0]['__text__']
logger.debug("Comment is: %s", comment_id)
author = comment['who'][0]
def _run(self):
for bug in self.iter_bugs():
issue = bug['data']
cl_date = issue['fields']['resolutiondate']
if cl_date:
close_date = str_to_datetime(cl_date)
else:
close_date = None
try:
with transaction.atomic():
bug, created = self.object.bugs.get_or_create(
bug_id=issue['key'],
defaults={'close_date': close_date})
logger.info("%s bug [%s]",
"Imported" if created else "Found",
issue['key'])
# Create first comment, since that seems to be merged into
# the issue
if created:
def parse_date(timestamp):
return grimoirelab.toolkit.datetime.unixtime_to_datetime(timestamp)
api = self.api_projects_url
elif category == CATEGORY_METRIC:
# Get all metrics definitions and then find the values for the current project
api_metrics = self.api_metrics_url
metrics = json.loads(self.fetch(api_metrics))
else:
raise ValueError(category + ' not supported in Crossminer')
if category == CATEGORY_PROJECT:
logger.debug("Crossminer client calls APIv1: %s", api)
projects = self.fetch(api)
yield projects
else:
for metric in metrics:
metric_id = metric['id']
api = urijoin(self.api_projects_url, "/p/%s/m/%s" % (project, metric_id))
logger.debug("Crossminer client calls APIv1: %s", api)
print(api)
project_metric = self.fetch(api)
yield project_metric