Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
Apify.main(async () => {
log('Loading data from input');
try {
// Fetch the input and check it has a valid format
// You don't need to check the input, but it's a good practice.
let input = await Apify.getValue('INPUT');
const isSinglePageInput = typeCheck(PAGE_INPUT_TYPE, input);
const isMultiPageInput = typeCheck(INPUT_TYPE, input);
if (!isMultiPageInput && !isSinglePageInput) {
log('Expected input:');
log(INPUT_TYPE);
log('or');
log(PAGE_INPUT_TYPE);
log('Received input:');
console.dir(input);
throw new Error('Received invalid input');
}
if (isMultiPageInput) {
input.pages.forEach(page => {
if (!typeCheck(PAGE_INPUT_TYPE, page) && !isSinglePageInput) {
Apify.main(async () => {
// Actor INPUT variable
const input = await Apify.getValue('INPUT');
// Actor STATE variable
const state = await Apify.getValue('STATE') || { crawled: {} };
// Migrating flag
let migrating = false;
Apify.events.on('migrating', () => { migrating = true; });
// Check if all required input attributes are present.
if (!input.search && !input.startUrls) {
throw new Error('Missing "search" or "startUrls" attribute in INPUT!');
}
else if(input.search && input.startUrls && input.search.trim().length > 0 && input.startUrls.length > 0){
throw new Error('It is not possible to use both "search" and "startUrls" attributes in INPUT!');
}
if (!(input.proxyConfig && input.proxyConfig.useApifyProxy)) {
Apify.main(async () => {
// Get queue and enqueue first url.
const requestQueue = await Apify.openRequestQueue();
const input = await Apify.getValue('INPUT');
const env = await Apify.getEnv();
// based on the input country and keywords, generate the search urls
const urls = await createSearchUrls(input);
for (const searchUrl of urls) {
await requestQueue.addRequest(searchUrl);
}
const config = {
maxConcurrency: input.maxConcurrency || 40,
maxRequestsPerCrawl: input.maxRequestsPerCrawl || null,
useApifyProxy: true,
apifyProxyGroups: input.apifyProxyGroups || null,
maxRequestRetries: 6,
handlePageTimeoutSecs: 2.5 * 60 * 1000,
liveView: input.liveView ? input.liveView : true,
country: input.country,
Apify.main(async () => {
// Actor INPUT variable
const input = await Apify.getValue('INPUT');
// Actor STATE variable
const state = await Apify.getValue('STATE') || { crawled: {} };
// Migrating flag
let migrating = false;
Apify.events.on('migrating', () => { migrating = true; });
// Check if all required input attributes are present.
if (!input.search && !input.startUrls) {
throw new Error('Missing "search" or "startUrls" attribute in INPUT!');
}
else if(input.search && input.startUrls && input.search.trim().length > 0 && input.startUrls.length > 0){
throw new Error('It is not possible to use both "search" and "startUrls" attributes in INPUT!');
}
if (!(input.proxyConfig && input.proxyConfig.useApifyProxy)) {
throw new Error('This actor cannot be used without Apify proxy.');
}
if (input.useFilters && input.propertyType != 'none') {
const fetchInput = async () => {
const input = await Apify.getValue('INPUT');
const crawler = input.crawlerId
? await Apify.client.crawlers.getCrawlerSettings({ crawlerId: input.crawlerId })
: {};
// NOTE: In old crawler settings can be some values null, replace them with default values
deleteNullProperties(crawler);
deleteNullProperties(input);
const mergedInput = _.defaults(input, crawler, INPUT_DEFAULTS, {
actId: APIFY_ACT_ID,
runId: APIFY_ACT_RUN_ID,
});
mergedInput.crawlPurls = mergedInput.crawlPurls || [];
mergedInput.crawlPurls.forEach((purl) => {
const fetchInput = async () => {
const input = await Apify.getValue('INPUT');
if (!input.crawlerId) return input;
const crawler = await Apify.client.crawlers.getCrawlerSettings({ crawlerId: input.crawlerId });
return Object.assign({}, input, crawler);
};
async getValue(...args) {
return Apify.getValue(...args);
}
export const getValueOrUndefined = async (key) => {
const value = await Apify
.getValue(key)
.catch(() => undefined);
return value || undefined;
};