Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const fileList = [
'package.json',
'backend/package.json'
]
const files = await getFiles({ installationId: '123', fullName: 'owner/repo', files: fileList, log })
// returns an Object with the 5 file types
expect(Object.keys(files)).toHaveLength(5)
// The Object has 2 files at the `package.json` key
expect(files['package.json']).toHaveLength(2)
expect(files['package.json'][0].path).toEqual('package.json')
expect(files['package.json'][0].content).toEqual('eyJuYW1lIjoidGVzdCJ9')
expect(files['package.json'][1].path).toEqual('backend/package.json')
expect(files['package.json'][1].content).toEqual('eyJuYW1lIjoidGVzdCJ9')
expect(files['yarn.lock']).toHaveLength(2)
})
async function getInvalidPackages() {
const packages = (await new Project(__dirname).getPackages())
.filter(p => p.scripts.build) // Only packages that have a build target
.filter(p => (isPf3 ? p.location.indexOf('patternfly-3') > 0 || commonPackages.indexOf(p.name) >= 0 : true)) // Based off argv
.filter(p => (isPf4 ? p.location.indexOf('patternfly-4') > 0 || commonPackages.indexOf(p.name) >= 0 : true)); // Based off argv
for (let p of packages) {
p.hash = hashPackageSrc(p.location, p.name);
p.valid = cache && cache[p.name] === p.hash;
if (p.valid) {
console.info('Skipping', p.name, '(already built).');
}
}
// Invalidate everything if any deps change.
if (cache['yarn.lock'] !== yarnLockHash) {
return packages;
}
return packages.filter(p => !p.valid);
}
Cache.prototype.load = async function() {
// This should never throw, if it does, let it fail the build
const lockfile = await readFile("yarn.lock", "utf-8");
const lockfileHash = hashString(lockfile);
this.updated.checksums["yarn.lock"] = lockfileHash;
try {
const manifest = await readFile(this.manifest, "utf-8");
const { version, checksums, files } = JSON.parse(manifest);
// Ignore the cache if the version changed
assert.equal(this.version, version);
assert.ok(typeof checksums === "object");
// If yarn.lock changed, rebuild everything
assert.equal(lockfileHash, checksums["yarn.lock"]);
this.checksums = checksums;
assert.ok(typeof files === "object");
this.files = files;
Cache.prototype.load = async function() {
// This should never throw, if it does, let it fail the build
const lockfile = await readFile("yarn.lock", "utf-8");
const lockfileHash = hashString(lockfile);
this.updated.checksums["yarn.lock"] = lockfileHash;
try {
const manifest = await readFile(this.manifest, "utf-8");
const { version, checksums, files } = JSON.parse(manifest);
// Ignore the cache if the version changed
assert.equal(this.version, version);
assert.ok(typeof checksums === "object");
// If yarn.lock changed, rebuild everything
assert.equal(lockfileHash, checksums["yarn.lock"]);
this.checksums = checksums;
assert.ok(typeof files === "object");
this.files = files;
for (const files of Object.values(this.files)) {
assert.ok(Array.isArray(files));
}
} catch (err) {
this.checksums = {};
this.files = {};
}
};
const createLockfileCommits = async ({ commits, repoDoc, installationId, commitMessageTemplates, transforms, owner, repoName, branch }, log) => {
const ghqueue = githubQueue(installationId)
const lockfileCommits = []
// we need to iterate over every changed package file, not every package file commit
// we reverse because we want the most recent commit with the all the changes to the file (the last one)
// we clone because we don’t actually want to do the commits backwards
const dedupedCommits = _.uniqBy(_.clone(commits).reverse(), commit => commit.path)
// For yarn workspaces, we have to send the updated packages object (after applying all the update commits).
// So we need to iterate over all commits, replace all updated packages in the packages object,
// send all of them (old and updated together) to the exec server, tell it in which directory to run
// yarn install, and get the old yarn lock from that dir as well
let updatedPackages = _.clone(repoDoc.packages)
let workspaceRootsToUpdate = []
let packageJsonPathsWithWorkspaceDefinitions = []
const isYarn = repoDoc.files['yarn.lock'].length > 0
if (isYarn) {
packageJsonPathsWithWorkspaceDefinitions = Object.keys(repoDoc.packages).filter(path => {
const packageJson = repoDoc.packages[path]
const workspaceDefinition = packageJson.workspaces
// either has simple workspace definition…
if (workspaceDefinition && workspaceDefinition.length > 0) {
return path
}
// or a complex definition
if (workspaceDefinition && workspaceDefinition.packages && workspaceDefinition.packages.length > 0) {
return path
}
})
}
const execTokens = await getExecTokens({
installationId,
.reply(200, {
path: 'package-lock.json',
name: 'package-lock.json',
content: encodePkg({ who: 'cares' })
})
.get('/repos/finnp/test')
.reply(200, {
default_branch: 'custom'
})
const newJob = await createInitialBranch({ repositoryId: 44 })
expect(newJob).toBeFalsy()
const repodoc = await repositories.get('44')
expect(repodoc.files['package.json']).not.toHaveLength(0)
expect(repodoc.files['package-lock.json']).not.toHaveLength(0)
expect(repodoc.files['yarn.lock']).toHaveLength(0)
expect(repodoc.enabled).toBeTruthy()
})
function excludeLockFiles(files: Files): Files {
const newFiles = files;
if (newFiles['package-lock.json']) {
delete newFiles['package-lock.json'];
}
if (newFiles['yarn.lock']) {
delete newFiles['yarn.lock'];
}
return files;
}
function excludeLockFiles(files) {
const newFiles = files;
if (newFiles['package-lock.json']) {
delete newFiles['package-lock.json'];
}
if (newFiles['yarn.lock']) {
delete newFiles['yarn.lock'];
}
return files;
}