Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
// Parts of this source are modified from lerna:
// lerna: https://github.com/lerna/lerna/blob/master/LICENSE
// @flow
import { BoltError } from './errors';
import * as logger from './logger';
import * as messages from './messages';
import * as processes from './processes';
import * as path from 'path';
import pLimit from 'p-limit';
import slash from 'slash';
import tempWrite from 'temp-write';
import * as os from 'os';
// We should never run more than one git command at a time, git enforces this
// for a lot of commands and will error.
const gitCommandLimit = pLimit(1);
const GIT_LOG_LINE_FORMAT_FLAG = '--pretty=format:%H %s';
const GIT_LOG_LINE_FORMAT_SPLITTER = /^([a-zA-Z0-9]+) (.*)/;
opaque type CommitHash = string;
export type Commit = {
hash: CommitHash,
message: string
};
function git(args: Array, opts: processes.SpawnOptions) {
return gitCommandLimit(() => {
return processes.spawn('git', args, { silent: true, ...opts });
});
}
input.length === 0
? rebuildAll
: (importers: any, opts: any) => rebuildPkgs(importers, input, opts) // tslint:disable-line
)
if (opts.lockfileDir) {
const importers = await getImporters()
await rebuild(
importers,
{
...rebuildOpts,
pending: opts.pending === true,
},
)
return
}
const limitRebuild = pLimit(opts.workspaceConcurrency ?? 4)
for (const chunk of chunks) {
await Promise.all(chunk.map((rootDir: string) =>
limitRebuild(async () => {
try {
if (opts.ignoredPackages && opts.ignoredPackages.has(rootDir)) {
return
}
const localConfig = await memReadLocalConfig(rootDir)
await rebuild(
[
{
buildIndex: 0,
manifest: manifestsByPath[rootDir].manifest,
rootDir,
},
],
public async getUsers(
userGUIDs: ReadonlyArray,
): Promise> {
// Limit number of users fetched from UAA concurrently
const pool = pLimit(CONCURRENCY_LIMIT);
const uaaUsers = Promise.all(
userGUIDs.map(
guid => pool(() => this.getUser(guid)),
),
);
return uaaUsers;
}
ImportPackageFunction,
PackageFilesResponse,
} from '@pnpm/store-controller-types'
import makeDir = require('make-dir')
import fs = require('mz/fs')
import ncpCB = require('ncp')
import pLimit from 'p-limit'
import path = require('path')
import exists = require('path-exists')
import pathTemp = require('path-temp')
import renameOverwrite = require('rename-overwrite')
import { promisify } from 'util'
import importIndexedDir from '../fs/importIndexedDir'
const ncp = promisify(ncpCB)
const limitLinking = pLimit(16)
export default (packageImportMethod?: 'auto' | 'hardlink' | 'copy' | 'clone'): ImportPackageFunction => {
const importPackage = createImportPackage(packageImportMethod)
return (from, to, opts) => limitLinking(() => importPackage(from, to, opts))
}
function createImportPackage (packageImportMethod?: 'auto' | 'hardlink' | 'copy' | 'clone') {
// this works in the following way:
// - hardlink: hardlink the packages, no fallback
// - clone: clone the packages, no fallback
// - auto: try to clone or hardlink the packages, if it fails, fallback to copy
// - copy: copy the packages, do not try to link them first
switch (packageImportMethod || 'auto') {
case 'clone':
return clonePkg
case 'hardlink':
? this.storedDescriptors.get(predef)
: descriptor;
if (typeof initial === `undefined`)
throw new Error(`Assertion failed: The descriptor should have been registered`);
const alias = await this.configuration.reduceHook(hooks => {
return hooks.reduceDescriptorAlias;
}, initial, this, descriptor);
cachedAliases.set(descriptor.descriptorHash, alias);
return alias;
};
const limit = pLimit(10);
while (nextResolutionPass.size !== 0) {
const currentResolutionPass = nextResolutionPass;
nextResolutionPass = new Set();
// We remove from the "mustBeResolved" list all packages that have
// already been resolved previously.
for (const descriptorHash of currentResolutionPass)
if (allResolutions.has(descriptorHash))
currentResolutionPass.delete(descriptorHash);
if (currentResolutionPass.size === 0)
break;
// We check that the resolution dependencies have been resolved for all
// @flow
import crossSpawn from 'cross-spawn';
import * as logger from './logger';
import * as cleanUp from './cleanUp';
import type Package from '../Package';
import type Project from '../Project';
import pLimit from 'p-limit';
import os from 'os';
import path from 'path';
const limit = pLimit(os.cpus().length);
const processes = new Set();
export function handleSignals() {
cleanUp.handleAllSignals(() => {
for (let child of processes) {
child.kill('SIGTERM');
}
processes.clear();
});
}
export class ChildProcessError extends Error {
code: number;
stdout: string;
stderr: string;
async function deleteAllBucketsAsync() {
const [buckets] = await storage.getBuckets({prefix: TESTS_PREFIX});
const limit = pLimit(10);
await new Promise(resolve =>
setTimeout(resolve, RETENTION_DURATION_SECONDS * 1000)
);
return Promise.all(
buckets.map(bucket => limit(() => deleteBucketAsync(bucket)))
);
}
import { createOrConnectStoreControllerCached, CreateStoreControllerOptions } from '@pnpm/store-connection-manager'
import pLimit from 'p-limit'
import path = require('path')
import pathAbsolute = require('path-absolute')
import R = require('ramda')
import renderHelp = require('render-help')
import {
install,
InstallOptions,
link,
linkToGlobal,
WorkspacePackages,
} from 'supi'
import * as installCommand from './install'
const installLimit = pLimit(4)
export const rcOptionsTypes = cliOptionsTypes
export function cliOptionsTypes () {
return R.pick([
'global-dir',
'global',
'only',
'package-import-method',
'production',
'registry',
'reporter',
'resolution-strategy',
'save-dev',
'save-exact',
'save-optional',
function promiseThrottle(promises: IPromiseTask[], options: IOptions) {
const { concurrency = 2, onRes } = options;
const limit = pLimit(concurrency);
return Promise.all(
promises.map(promise =>
limit(() =>
promise.task(...promise.arguments).then(res => res)
).then(() => onRes(limit))
)
);
}
export default promiseThrottle;
export function createClient({
baseUrl,
concurrency,
password,
user,
defaultLanguage,
}) {
const limit = pLimit(concurrency)
const token = Buffer.from(`${user}:${password}`).toString('base64')
const headers = { Authorization: `Basic ${token}` }
return {
async getContents({ type, slug, lang, query }) {
type = type.toLowerCase()
const params = getParams({ lang: lang || defaultLanguage, type, query })
if (slug) {
params.slug = slug
const { data } = await axios.get(
`${baseUrl}/wp-json/presspack/v1/content/`,
{ params },
)
if (data && data.status !== 'publish') return []
return data