Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function init () {
// Linkify jobs are executed one by one
// (fixes race-conditions in huge DOMs, does not lock UI)
const linkifyJobs = new PQueue({ concurrency: 1 })
// console.log('[ipfs-companion] running Linkify experiment')
linkifyContainer(document.body, linkifyJobs)
.then(() => {
// console.log('[ipfs-companion] registering MutationObserver for Linkify experiment')
new MutationObserver(function (mutations) {
mutations.forEach(async (mutation) => linkifyMutation(mutation, linkifyJobs))
}).observe(document.body, {
characterData: true,
childList: true,
subtree: true
})
})
}
slackApiUrl = 'https://slack.com/api/',
logger = undefined,
logLevel = LogLevel.INFO,
maxRequestConcurrency = 3,
retryConfig = retryPolicies.tenRetriesInAboutThirtyMinutes,
agent = undefined,
tls = undefined,
rejectRateLimitedCalls = false,
headers = {},
}: WebClientOptions = {}) {
super();
this.token = token;
this.slackApiUrl = slackApiUrl;
this.retryConfig = retryConfig;
this.requestQueue = new PQueue({ concurrency: maxRequestConcurrency });
// NOTE: may want to filter the keys to only those acceptable for TLS options
this.tlsConfig = tls !== undefined ? tls : {};
this.rejectRateLimitedCalls = rejectRateLimitedCalls;
// Logging
this.logger = getLogger(WebClient.loggerName, logLevel, logger);
this.axios = axios.create({
baseURL: slackApiUrl,
headers: Object.assign(
{
'User-Agent': getUserAgent(),
},
headers,
),
httpAgent: agent,
import Queue from 'p-queue'
import { getPortPromise } from 'portfinder'
import { DEFAULT_DOCKER_CONTAINER_PORT } from '../../../config/index.js'
export default class DockerPort {
async get() {
return DockerPort._queue.add(async () => {
const port = await getPortPromise({ port: DockerPort._portScanStart })
DockerPort._portScanStart = port + 1
return port
})
}
}
// static private
DockerPort._queue = new Queue({ concurrency: 1 })
DockerPort._portScanStart = DEFAULT_DOCKER_CONTAINER_PORT
method: "GET",
path: "/health",
});
Provisioning.addAppServicePath(this.bridge, this);
// TODO(paul): see above; we had to defer this until now
this.stateStorage = new StateLookup({
client: this.bridge.getIntent().client,
eventTypes: ["m.room.member", "m.room.power_levels"],
});
log.info("Fetching teams");
const teams = await this.datastore.getAllTeams();
log.info(`Loaded ${teams.length} teams`);
const teamClients: { [id: string]: WebClient } = {};
const teamPromises = new PQueue({concurrency: STARTUP_TEAM_INIT_CONCURRENCY});
let i = 0;
for (const team of teams) {
i++;
// tslint:disable-next-line: no-floating-promises
teamPromises.add(async () => {
log.info(`[${i}/${teams.length}] Getting team client for ${team.name || team.id}`);
// This will create team clients before we use them for any rooms,
// as a pre-optimisation.
try {
teamClients[team.id] = await this.clientFactory.getTeamClient(team.id);
} catch (ex) {
log.error(`Failed to create client for ${team.id}, some rooms may be unbridgable`);
log.error(ex);
}
// Also start RTM clients for teams.
// Ensure the token is a bot token so that we can actually enable RTM for these teams.
} else {
const persistentlyCached = localStorage.getItem(createStellarTomlCacheKey(domain))
resultMap.set(domain, [persistentlyCached ? JSON.parse(persistentlyCached) : undefined, true])
}
}
return resultMap
}
export function useStellarToml(domain: string | null | undefined): [StellarToml | undefined, boolean] {
const tomlFiles = useStellarTomlFiles(domain ? [domain] : [])
return domain ? tomlFiles.get(domain)! : [undefined, false]
}
// Limit the number of concurrent fetches
const accountFetchQueue = new PromiseQueue({ concurrency: 8 })
function useAccountDataSet(horizon: Server, accountIDs: string[]): AccountData[] {
const loadingStates = React.useContext(StellarAccountDataCacheContext)
const issuerAccounts = accountIDs.map(
(accountID: string): AccountData => {
const cacheItem = loadingStates.cache.get(accountID)
return cacheItem && cacheItem.state === "resolved" ? cacheItem.data : createEmptyAccountData(accountID)
}
)
React.useEffect(() => {
for (const accountID of accountIDs) {
if (!loadingStates.cache.has(accountID)) {
loadingStates.store(accountID, FetchState.pending())
module.exports = function createDnslinkResolver (getState) {
// DNSLink lookup result cache
const cacheOptions = { max: 1000, maxAge: 1000 * 60 * 60 * 12 }
const cache = new LRU(cacheOptions)
// upper bound for concurrent background lookups done by resolve(url)
const lookupQueue = new PQueue({ concurrency: 4 })
// preload of DNSLink data
const preloadUrlCache = new LRU(cacheOptions)
const preloadQueue = new PQueue({ concurrency: 4 })
const dnslinkResolver = {
get _cache () {
return cache
},
setDnslink (fqdn, value) {
cache.set(fqdn, value)
},
clearCache () {
cache.reset()
}
playlistList.push({
name: '[Album] ' + albumInfo.album.name,
trackIds
})
}
logger.info('Download list:')
playlistList.forEach((item) => logger.info(' ' + item.name))
logger.initBar(Object.keys(trackList).length)
}
// Track processing
const trackDownloadQueue = new PQueue({ concurrency: config('trackDownloadConcurrency', 3) })
const trackCopyQueue = new PQueue({ concurrency: 1 })
for (let trackId in trackList) {
trackId = parseInt(trackId, 10)
let trackInfo = trackList[trackId]
trackDownloadQueue.add(async () => {
const tmpPath = os.tmpdir()
const realPath = path.resolve(__root, sha1(trackId).substr(0, 2))
const savePath = path.resolve(tmpPath, 'CloudMan/', sha1(trackId).substr(0, 2))
if (that.downloaded.has(trackId)) {
logger.info(`Track ${trackId} existed!`)
trackList[trackId].done = true
trackList[trackId].format = that.downloadedFormat[trackId]
logger._bar.tick(1)
return
}
try {
const explorer = cosmiconfig('compress');
const { config: { gzip, brotli, test, threshold } } = (await explorer.search()) || { config: defaultOptions };
const fileTest = new RegExp(test);
function* filesToCompress(bundle) {
if (bundle.name && fileTest.test(bundle.name)) {
yield bundle.name
}
for (var child of bundle.childBundles) {
yield* filesToCompress(child)
}
}
const queue = new pQueue({ concurrency: defaultOptions.concurrency });
[...filesToCompress(bundle)].forEach(file => {
queue.add(() => gzipCompress(file, { ...defaultOptions.gzip, threshold, ...gzip }));
queue.add(() => brotliCompress(file, { ...defaultOptions.brotli, threshold, ...brotli }));
});
await queue.onIdle();
const end = new Date().getTime();
const formattedOutput = output.sort(sortResults).map(formatResults);
console.log(chalk.bold.green(`\n⨠Compressed in ${((end - start) / 1000).toFixed(2)}s.\n`));
table(formattedOutput);
} catch (err) {
console.error(chalk.bold.red('β Compression error:\n'), err);
(async () => {
const sblendid = await Sblendid.powerOn();
const queue = new PQueue({ concurrency: 1 });
sblendid.startScanning(async peripheral => {
await queue.add(async () => {
const uuid = chalk.blue(peripheral.uuid);
const services = await peripheral.getServices();
const serviceUUIDs = services.map(s => s.uuid);
console.log(uuid, serviceUUIDs);
});
});
})();
}: ProcessorOpts) => () => (node: any) => {
if (!brokenLinksConfig) {
return null
}
const queue = new PQueue({ concurrency: 4 })
let counter = 0
visit(node, 'link', link => {
const { url } = link
if (!url || !url.match(`https?://.*`)) return
queue
.add(() => fetch(url))
.then(() => {
counter += 1
if (counter % 10 === 0) {
log({
type: 'broken-links/progress',
level: 'info',
payload: {
counter
}