Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
exports.applyPatch = function applyPatch(data, patch) {
var lines = data.split('\n');
var newLines = [];
var dataIndex = 1;
if (!patch.hunks) {
//
// patch is a textual unified diff. parse it
//
patch = JsDiff.parsePatch(patch)[0];
}
patch.hunks.forEach((hunk, hunkIndex) => {
for (var i = dataIndex; i < hunk.oldStart; i++) {
newLines.push(lines[i - 1]);
}
hunk.lines.forEach( (line, index) => {
if (line.charAt(0) === '+') {
// new line
newLines.push(line.slice(1));
} else if (line.charAt(0) === '-') {
// removed line
dataIndex++;
}
}
const fileDiffs: FileDiff[] = []
for (const [uri, edits] of editsByUri) {
if (edits.length === 0) {
continue
} else if (edits.length === 1 && edits[0].rawPatch !== undefined) {
// Fast path: use the precomputed diff.
const fullPatch = edits[0].rawPatch
const patchWithoutHeader = fullPatch.slice(fullPatch.indexOf('@@'))
const p = parseRepoURI(uri)
fileDiffs.push({
oldPath: uri.toString(),
newPath: uri.toString(),
hunks: parsePatch(fullPatch)
.flatMap(d => d.hunks)
.map(npmDiffToFileDiffHunk),
patch: `Index: ${p.filePath!}\n--- a/${p.filePath!}\n+++ b/${p.filePath!}\n${patchWithoutHeader}`,
patchWithFullURIs: `Index: ${uri}\n--- ${uri}\n+++ ${uri}\n${patchWithoutHeader}`,
})
} else {
const oldText = await extensionsController.services.fileSystem.readFile(new URL(uri))
const t0 = Date.now()
const newText = applyEdits(
oldText,
edits.map(edit => {
// TODO!(sqs): doesnt account for multiple edits
const startOffset = positionToOffset(oldText, edit.range.start)
const endOffset = positionToOffset(oldText, edit.range.end)
return { offset: startOffset, length: endOffset - startOffset, content: edit.newText }
})
export function getLineNumber (diffStr, fileName, hunkPosition) {
const parsed = diff.parsePatch(diffStr)
const file = parsed.filter(p => p.newFileName === 'b/' + fileName)[0]
let linesParsed = 0
for (let hunk of file.hunks) {
if (linesParsed + hunk.lines.length >= hunkPosition) {
const remainingLines = hunk.lines.slice(0, hunkPosition - linesParsed)
// on the right side of the diff,
// context and added lines increment the line number
return remainingLines.reduce((acc, line) => {
return line[0] === '-' ? acc : acc + 1
}, hunk.newStart - 1)
} else {
// include the header for the next hunk (if any)
linesParsed += hunk.lines.length + 1
}
public static getEditsFromFormattedText(documentUri: string, originalText: string, formattedText: string,
startOffset: number = 0): TextEdit[] {
const diff = producePatch(documentUri, originalText, formattedText);
const unifiedDiffs = parsePatch(diff);
const edits: Edit[] = [];
let currentEdit: Edit | null = null;
for (const uniDiff of unifiedDiffs) {
for (const hunk of uniDiff.hunks) {
let startLine = hunk.oldStart + startOffset;
for (const line of hunk.lines) {
switch (line[0]) {
case '-':
if (currentEdit === null) {
currentEdit = new Edit(EditAction.Delete, Position.create(startLine - 1, 0));
}
currentEdit.end = Position.create(startLine, 0);
startLine++;
}).forEach(function (op) {
var strPatch = fs.readFileSync(op.patch, 'utf-8')
var strDest
try {
strDest = fs.readFileSync(op.dest, 'utf-8')
} catch (err) {
strDest = ''
}
fs.writeFileSync(op.dest,
jsdiff.applyPatch(strDest, jsdiff.parsePatch(strPatch)))
console.log('> patched', path.relative(__dirname, op.dest))
})
console.log()
function getEditsFromUnifiedDiffStr(diffstr) {
const unifiedDiffs = jsDiff.parsePatch(diffstr);
const filePatches = parseUniDiffs(unifiedDiffs);
return filePatches;
}
exports.getEditsFromUnifiedDiffStr = getEditsFromUnifiedDiffStr;
function getChangedLines(isIgnored, diff) {
return Diff.parsePatch(diff)
.flatMap(file =>
isIgnored(file.oldFileName) && isIgnored(file.newFileName)
? []
: file.hunks
)
.flatMap(hunk => hunk.lines)
.filter(line => line[0] === "+" || line[0] === "-").length;
}
export const DiffViewer = ( { diff } ) => (
<div>
{ parsePatch( diff ).map( ( file, fileIndex ) => (
<div>
{ filename( file ) }
</div>
<div>
<div>
{ file.hunks.map( ( hunk, hunkIndex ) => {
let lineOffset = 0;
return hunk.lines.map( ( line, index ) => (
<div>
{ line[ 0 ] === '+' ? '\u00a0' : hunk.oldStart + lineOffset++ }
</div>
) );
} ) }
</div>
<div></div></div></div>
fileDiffs {
rawDiff
}
}
}
}
`,
vars: {
repositoryName: base.repoName,
baseRevSpec: base.rev || base.commitID,
headRevSpec: head.rev || head.commitID,
},
})
)
const { rawDiff } = data.repository.comparison.fileDiffs
const fileDiffs = parsePatch(rawDiff)
return Promise.all(
fileDiffs.map(async (fileDiff, fileDiffI) => {
const uri = new URL(makeRepoURI({ ...head, filePath: fileDiff.newFileName }))
const doc = await sourcegraph.workspace.openTextDocument(uri)
const diagnostics = [
...markEnsureAuthz(doc),
...markSecurityReviewRequired(doc, fileDiff),
...(rootI === 0 && fileDiffI === 0 ? [suggestChangelogEntry()] : []),
...(rootI === 0 && fileDiffI === 0 ? updateDependents() : []),
...markTODO(doc, fileDiff),
].filter(isDefined)
return [uri, diagnostics] as [URL, sourcegraph.Diagnostic[]]
})
).catch(() => [])
})
.filter(isDefined)