Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
data[bugid] = set()
m = HG_MAIL.match(user)
if m:
hgname = m.group(1).strip()
hgmail = m.group(2).strip()
data[bugid].add((hgname, hgmail))
url = hgmozilla.Revision.get_url("nightly")
queries = []
for bugid, info in bzdata.items():
hdler = functools.partial(handler_rev, bugid)
for rev in info["revisions"]:
queries.append(Query(url, {"node": rev}, hdler, self.hgdata))
if queries:
hgmozilla.Revision(queries=queries).wait()
self.set_autofixable(bzdata, user_info)
return self.hgdata
def get_hg(self, bugs):
url = hgmozilla.Revision.get_url(self.channel)
queries = []
not_landed = set()
def handler_rev(json, data):
info = utils.get_info_from_hg(json)
if info["bugid"] == data["bugid"] and not info["backedout"]:
data["ok"] = True
for info in bugs.values():
for rev, i in info.get("land", {}).items():
queries.append(Query(url, {"node": rev}, handler_rev, i))
if queries:
hgmozilla.Revision(queries=queries).wait()
for bugid, info in bugs.items():
if all(not i["ok"] for i in info.get("land", {}).values()):
not_landed.add(bugid)
return not_landed
def filter_from_hg(self, bzdata, user_info):
"""Get the bugs where an associated revision contains
the bug id in the description"""
def handler_rev(bugid, json, data):
if bugid in json["desc"] and not utils.is_backout(json):
user = json["user"]
if bugid not in data:
data[bugid] = set()
m = HG_MAIL.match(user)
if m:
hgname = m.group(1).strip()
hgmail = m.group(2).strip()
data[bugid].add((hgname, hgmail))
url = hgmozilla.Revision.get_url("nightly")
queries = []
for bugid, info in bzdata.items():
hdler = functools.partial(handler_rev, bugid)
for rev in info["revisions"]:
queries.append(Query(url, {"node": rev}, hdler, self.hgdata))
if queries:
hgmozilla.Revision(queries=queries).wait()
self.set_autofixable(bzdata, user_info)
return self.hgdata
def get_hg(self, bugs):
url = hgmozilla.Revision.get_url(self.channel)
queries = []
not_landed = set()
def handler_rev(json, data):
info = utils.get_info_from_hg(json)
if info["bugid"] == data["bugid"] and not info["backedout"]:
data["ok"] = True
for info in bugs.values():
for rev, i in info.get("land", {}).items():
queries.append(Query(url, {"node": rev}, handler_rev, i))
if queries:
hgmozilla.Revision(queries=queries).wait()
for bugid, info in bugs.items():
def handler_rev(json, data):
push = json["pushdate"][0]
push = datetime.datetime.utcfromtimestamp(push)
push = lmdutils.as_utc(push)
data["date"] = lmdutils.get_date_str(push)
data["backedout"] = utils.is_backout(json)
m = BUG_PAT.search(json["desc"])
if not m or m.group(1) != data["bugid"]:
data["bugid"] = ""
for info in bugs.values():
for rev, i in info["land"].items():
queries.append(Query(url, {"node": rev}, handler_rev, i))
if queries:
hgmozilla.Revision(queries=queries).wait()
# clean
bug_torm = []
for bug, info in bugs.items():
torm = []
for rev, i in info["land"].items():
if not i["bugid"] or not (
self.date <= lmdutils.get_date_ymd(i["date"]) < self.tomorrow
):
torm.append(rev)
for x in torm:
del info["land"][x]
if not info["land"]:
bug_torm.append(bug)
for x in bug_torm:
del bugs[x]
def get_hg_patches(self, bugs):
url = hgmozilla.RawRevision.get_url("nightly")
queries = []
def handler(patch, data):
info = self.patch_analysis(patch)
if "addlines" not in data:
data.update(info)
else:
for k, v in info.items():
data[k] += v
for info in bugs.values():
for rev, i in info["land"].items():
if not i["backedout"]:
queries.append(Query(url, {"node": rev}, handler, info))
if queries: