Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
v3("bla", v3.DNS);
v3("bla", v3.URL);
v3([0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea]);
v3(
[0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea],
[0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36]
);
// $ExpectError
v3("bla", { yolo: true });
v5.name;
v5("bla");
v5("bla", "bla");
v5("bla", v5.DNS);
v5("bla", v5.URL);
v5([0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea]);
v5(
[0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea],
[0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36]
);
// $ExpectError
v5("bla", { yolo: true });
.pipe(mergeMap(id => writeMalware(uuid(`malware--${id}`, uuid.URL)))) // mergeMap to wait for query completion
.pipe(scan(acc => acc + 1, 0)) // Accumulate the number of execution
async function fetchUrl(url, options) {
options = options || {};
if (!url) {
return "";
}
const urlFileId = uuid5(url, uuid5.URL) + ".html";
fs.mkdirSync(urlCacheDir, { recursive: true });
let urlContent = '';
const urlFileName = path.join(urlCacheDir, urlFileId);
if (!options.refetch && fs.existsSync(urlFileName)) {
console.log("Fetching url from cache", url, "with cacheKey", urlFileName);
urlContent = fs.readFileSync(urlFileName, { encoding: 'utf-8' });
} else {
console.log("Fetching url", url);
urlContent = await getPageData(url, options.scrollPage);
fs.writeFileSync(urlFileName, urlContent);
}
return urlContent;
}
onAddItem = (fields, hide) => data => {
const id = uuid(data.name, uuid.URL)
fields.push({ id, ...data })
hide()
}
<section>
<header>
<h2>Comments</h2>
</header>
{isProduction && (
)}
</section>
<section>
<header>
<h2>Read Next</h2>
</header>
{next && }
</section>
const uuidGenerator = inStr => uuidv5(inStr, uuidv5.URL);
const uuid = require('uuid/v5');
const EliteDangerousJournalServer = require('../../src/index.js');
const UUID_NAMESPACE = 'ws://journalserver.dvdagames.com/';
const port = 12345;
const serviceName = 'Example Elite Dangerous Journal Server';
const id = uuid(UUID_NAMESPACE, uuid.URL);
const headers = {
TESTING: true,
};
const interval = 1000;
const config = {
port,
id,
headers,
watcher: {
interval,
},
subscriptions: {
enabled: true,
},
discovery: {
function macro(schema, parentSchema, context) {
const ajv = context.self;
const $ref = '/' + uuidv5(JSON.stringify(schema), uuidv5.URL);
if (!ajv.getSchema($ref)) {
ajv.addSchema(createSchema(schema, $ref), $ref);
}
return { $ref };
}
if (error.message.includes('Invalid URL')) {
throw new TemplateError('INVALID_URL', repoUrl);
}
throw error;
}
const repositories = await this.getRepositories();
if (repositories.find(repo => repo.url === repoUrl)) {
throw new TemplateError('DUPLICATE_URL', repoUrl);
}
if (!(await doesURLPointToIndexJSON(url))) {
throw new TemplateError('URL_DOES_NOT_POINT_TO_INDEX_JSON', url);
}
let newRepo = {
id: uuidv5(url, uuidv5.URL),
name: repoName,
url,
description: repoDescription,
enabled: true,
}
newRepo = await fetchRepositoryDetails(newRepo);
if (isRepoProtected !== undefined) {
newRepo.protected = isRepoProtected;
}
try {
await this.addRepositoryToProviders(newRepo);
}
catch (err) {
throw new TemplateError('ADD_TO_PROVIDER_FAILURE', url, err.message);
}
addArticles (data, cb) {
const id = uuid(data.link, uuid.URL)
const filename = `${id}.html`
const useDataDirStreams = jetpack.cwd(`${remote.app.getPath('userData')}/streams/`)
if (!jetpack.exists(useDataDirStreams.path(filename))) {
try {
got.stream(data.link).pipe(useDataDirStreams.createWriteStream(filename))
data.file = useDataDirStreams.path(filename)
} catch (e) {
}
}
return article.insert(data, (err, docs) => {
if (err) {
}
return cb(docs)
})
},
fetchCategories (cb) {