Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
+ "quotes": [2, "single"]
+ }
+}
diff --git a/a.js b/a.js
new file mode 100644
index 0000000..f119a7f
--- /dev/null
+++ b/a.js
@@ -0,0 +1 @@
+console.log("bar")
"""
spec = Specification()
spec.linters.append(ObjectDict(name='eslint', pattern=None))
lint = LintProcessor(pr_context, spec, os.path.join(FIXTURES_PATH, 'eslint'))
patch = PatchSet(diff.split('\n'))
with mock.patch.object(lint, 'load_changes') as load_changes,\
mock.patch.object(lint, 'update_build_status') as build_status,\
mock.patch.object(lint, '_report') as report:
load_changes.return_value = patch
build_status.return_value = None
report.return_value = (1, 2)
lint.problems.set_changes(patch)
lint.process()
assert load_changes.called
assert len(lint.problems) == 1
problem = lint.problems[0]
assert problem.filename == 'a.js'
assert problem.line == 1
index 0000000..fdeea15
--- /dev/null
+++ b/a.py
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, unicode_literals
+
+
+def add(a, b):
+ return a+ b
"""
spec = Specification()
spec.linters.append(ObjectDict(name='pylint', pattern=None))
lint = LintProcessor(pr_context, spec, os.path.join(FIXTURES_PATH, 'pylint'))
patch = PatchSet(diff.split('\n'))
with mock.patch.object(lint, 'load_changes') as load_changes,\
mock.patch.object(lint, 'update_build_status') as build_status,\
mock.patch.object(lint, '_report') as report:
load_changes.return_value = patch
build_status.return_value = None
report.return_value = (1, 2)
lint.problems.set_changes(patch)
lint.process()
assert load_changes.called
assert len(lint.problems) == 4
problem = lint.problems[0]
assert problem.filename == 'a.py'
def test_patchset_from_string(self):
with codecs.open(self.sample_file, 'r', encoding='utf-8') as diff_file:
diff_data = diff_file.read()
ps1 = PatchSet.from_string(diff_data)
with codecs.open(self.sample_file, 'r', encoding='utf-8') as diff_file:
ps2 = PatchSet(diff_file)
self.assertEqual(ps1, ps2)
def get_candidates_from_diff(difftext):
try:
import unidiff
except ImportError as e:
raise SystemExit("Could not import unidiff library: %s", e.message)
patch = unidiff.PatchSet(difftext, encoding='utf-8')
candidates = []
for patchedfile in [patchfile for patchfile in
patch.added_files + patch.modified_files]:
if patchedfile.source_file == '/dev/null':
candidates.append(patchedfile.path)
else:
lines = ",".join(["%s-%s" % (hunk.target_start, hunk.target_start + hunk.target_length)
for hunk in patchedfile])
candidates.append("%s:%s" % (patchedfile.path, lines))
return candidates
def check_diff(self):
diff_file = requests.get(self.pr_info['diff_url'], auth=API_AUTH).text
if diff_file == "Sorry, this diff is unavailable.":
self.add_invalid('Your PR looks like an ORPHAN (you deleted the fork). This cannot be automatically checked. Please close this PR and create a new one without removing the fork.')
return
diff = PatchSet(diff_file)
fcount = diff_file.count("diff --git")
if fcount < 1:
self.add_invalid('Less than one file has been added/removed/modified.')
return
# if any(d.is_modified_file and d.path != "contributors.md" for d in diff):
# self.add_attention('This PR modifies one or more pre-existing files.')
# return
new_file = self.parse_diff(diff_file.split("\n"))
return {'lines': new_file, 'diff': diff, 'diff_file': diff_file}
def get_save_pkgbuild_diff(self):
if (not self._trans_obj.gh_sha_before or not self._trans_obj.gh_sha_after) and not self._trans_obj.gh_patch:
return
gh, repo = self._pkg_obj.get_github_api_client()
compare = repo.compare_commits(self._trans_obj.gh_sha_before, self._trans_obj.gh_sha_after)
patch_file = io.StringIO(compare.diff().decode('UTF-8'))
patch = PatchSet(patch_file)
filtered_files = [f for f in patch if f.path.split('/')[-2] == self._pkg_obj.pkgname]
_file = '' if not filtered_files else filtered_files[0]
self.gh_diff = str(_file)
def generate_changelog(data, no_functions=False, fill_pr_titles=False):
changelogs = {}
changelog_list = []
prs = []
out = ''
diff = PatchSet(data)
for file in diff:
changelog = find_changelog(file.path)
if changelog not in changelogs:
changelogs[changelog] = []
changelog_list.append(changelog)
changelogs[changelog].append(file)
# Extract PR entries from newly added tests
if 'testsuite' in file.path and file.is_added_file:
# Only search first ten lines as later lines may
# contains commented code which a note that it
# has not been tested due to a certain PR or DR.
for line in list(file)[0][0:10]:
m = pr_regex.search(line.value)
if m:
def get_files_involved_in_pr(repo, pr_number):
"""
Return a list of file names modified/added in the PR
"""
headers = {"Accept": "application/vnd.github.VERSION.diff"}
query = f"/repos/{repo}/pulls/{pr_number}"
r = utils.query_request(query, headers=headers)
patch = unidiff.PatchSet(r.content.splitlines(), encoding=r.encoding)
files = {}
for patchset in patch:
diff_file = patchset.target_file[1:]
files[diff_file] = []
for hunk in patchset:
for line in hunk.target_lines():
if line.is_added:
files[diff_file].append(line.target_line_no)
return files
patched_files = {}
# Process all patches provided by Github and save them in a new per file per line representation.
for patched_file in files:
patched_files[patched_file.filename] = {
"status": patched_file.status,
"sha": patched_file.sha,
"deltas": [],
}
patch_str = io.StringIO()
patch_str.write("--- a\n+++ b\n")
if patched_file.patch is not None:
patch_str.write(patched_file.patch)
patch_str.seek(0)
logging.debug(f"Parsing diff\n{patch_str.getvalue()}")
patch = PatchSet(patch_str, encoding=None)
for hunk in patch[0]:
for line in hunk:
if line.is_context:
continue
patched_files[patched_file.filename]["deltas"].append(
vars(line))
return patched_files
def diff(self) -> unidiff.PatchSet:
response = util.request('https://patch-diff.githubusercontent.com/raw/%s/pull/%s.diff' % (
self._repo, self._pr_number))
return unidiff.PatchSet(response.content.decode('utf-8'))