Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
[_clone] (handler, tarballOk = true) {
const o = { tmpPrefix: 'git-clone' }
const ref = this.resolvedSha || this.spec.gitCommittish
const h = this.spec.hosted
const resolved = this.resolved
// can be set manually to false to fall back to actual git clone
tarballOk = tarballOk &&
h && resolved === repoUrl(h, { noCommittish: false }) && h.tarball
return cacache.tmp.withTmp(this.cache, o, tmp => {
// if we're resolved, and have a tarball url, shell out to RemoteFetcher
if (tarballOk) {
const nameat = this.spec.name ? `${this.spec.name}@` : ''
return new RemoteFetcher(h.tarball({ noCommittish: false }), {
...this.opts,
pkgid: `git:${nameat}${this.resolved}`,
resolved: this.resolved,
integrity: null, // it'll always be different, if we have one
}).extract(tmp).then(() => handler(tmp), er => {
// fall back to ssh download if tarball fails
if (er.constructor.name.match(/^Http/))
return this[_clone](handler, false)
else
throw er
})
}
}).then((pkg) => {
return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => {
const tmpTarget = path.join(tmp, path.basename(target))
const tarOpt = {
file: tmpTarget,
cwd: dir,
prefix: 'package/',
portable: true,
// Provide a specific date in the 1980s for the benefit of zip,
// which is confounded by files dated at the Unix epoch 0.
mtime: new Date('1985-10-26T08:15:00.000Z'),
gzip: true
}
return BB.resolve(packlist({ path: dir }))
// NOTE: node-tar does some Magic Stuff depending on prefixes for files
// specifically with @ signs, so we just neutralize that one
function withTmp (opts, cb) {
if (opts.cache) {
// cacache has a special facility for working in a tmp dir
return cacache.tmp.withTmp(opts.cache, { tmpPrefix: 'git-clone' }, cb)
} else {
const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-tmp')
const tmpName = uniqueFilename(tmpDir, 'git-clone')
const tmp = mkdirp(tmpName).then(() => tmpName).disposer(rimraf)
return BB.using(tmp, cb)
}
}
manifest () {
if (this.package)
return Promise.resolve(this.package)
// have to unpack the tarball for this.
return cacache.tmp.withTmp(this.cache, this.opts, dir =>
this.extract(dir)
.then(() => readPackageJson(dir + '/package.json'))
.then(mani => this.package = {
...mani,
_integrity: String(this.integrity),
_resolved: this.resolved,
_from: this.from,
}))
}
byDigest: arg(cacache.get.byDigest),
copy: arg(cacache.get.copy),
info: arg(cacache.get.info),
hasContent: arg(cacache.get.hasContent),
}),
put: Object.assign(put, {
stream: arg(cacache.put.stream),
}),
rm: Object.assign(rm, {
all: arg(cacache.rm.all),
entry: arg(cacache.rm.entry),
content: arg(cacache.rm.content),
}),
tmp: {
mkdir: arg(cacache.tmp.mkdir),
withTmp: arg(cacache.tmp.withTmp),
},
verify: Object.assign(verify, {
lastRun: arg(cacache.verify.lastRun),
}),
}
}
function publishFromPackage (arg, opts) {
return cacache.tmp.withTmp(opts.tmp, {tmpPrefix: 'fromPackage'}, tmp => {
const extracted = path.join(tmp, 'package')
const target = path.join(tmp, 'package.json')
return tarball.toFile(arg, target, opts)
.then(() => extract(arg, extracted, opts))
.then(() => readJson(path.join(extracted, 'package.json')))
.then((pkg) => {
return BB.resolve(pack.getContents(pkg, target))
.tap((c) => !opts.json && pack.logContents(c))
.tap(() => upload(pkg, false, target, opts))
})
})
}
stream: arg(cacache.get.stream),
byDigest: arg(cacache.get.byDigest),
copy: arg(cacache.get.copy),
info: arg(cacache.get.info),
hasContent: arg(cacache.get.hasContent),
}),
put: Object.assign(put, {
stream: arg(cacache.put.stream),
}),
rm: Object.assign(rm, {
all: arg(cacache.rm.all),
entry: arg(cacache.rm.entry),
content: arg(cacache.rm.content),
}),
tmp: {
mkdir: arg(cacache.tmp.mkdir),
withTmp: arg(cacache.tmp.withTmp),
},
verify: Object.assign(verify, {
lastRun: arg(cacache.verify.lastRun),
}),
}
}