Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
data.agent = conf.RPC_HTTP_AGENT;
}
if (conf.RPC_HTTPS_AGENT) {
data.httpsAgent = conf.RPC_HTTPS_AGENT;
}
if (Buffer.isBuffer(requestForm) || typeof requestForm === 'string') {
data.content = requestForm;
} else if (requestForm) {
data.stream = requestForm;
} else {
data.headers['Content-Length'] = 0;
}
var req = urllib.request(requestURI, data, function (respErr, respBody,
respInfo) {
// var end = parseInt(Date.now() / 1000);
// console.log((end - start) + " seconds");
// console.log("queuing:\t" + respInfo.timing.queuing);
// console.log("dnslookup:\t" + respInfo.timing.dnslookup);
// console.log("connected:\t" + respInfo.timing.connected);
// console.log("requestSent:\t" + respInfo.timing.requestSent);
// console.log("waiting:\t" + respInfo.timing.waiting);
// console.log("contentDownload:\t" + respInfo.timing.contentDownload);
callbackFunc(respErr, respBody, respInfo);
});
return req;
}
proto.listdir = function* (fullname) {
var url = this.disturl + fullname;
// if is doc path
var isDocPath = this.DOC_API_RE.test(fullname);
if (isDocPath) {
url += 'index.html';
if (!this._syncDocument) {
return [];
}
}
var res = yield urllib.requestThunk(url, {
timeout: 60 * 1000,
dataType: 'text',
followRedirect: true,
});
debug('listdir %s got %s, %j', url, res.status, res.headers);
return isDocPath
? this.parseDocHtml(res, fullname)
: this.parseDistHtml(res, fullname);
};
// const downloadURLs = {
// linux: '%s/chromium-browser-snapshots/Linux_x64/%d/%s.zip',
// mac: '%s/chromium-browser-snapshots/Mac/%d/%s.zip',
// win32: '%s/chromium-browser-snapshots/Win/%d/%s.zip',
// win64: '%s/chromium-browser-snapshots/Win_x64/%d/%s.zip',
// };
let existsCount = 0;
const existDirResults = yield parentDirs.map(name => this.listExists('/' + name + '/'));
const existDirsMap = {};
for (const rows of existDirResults) {
for (const row of rows) {
existDirsMap[row.parent + row.name] = true;
}
}
const result = yield urllib.request(this._npmPackageUrl, {
timeout: 60000,
dataType: 'json',
gzip: true,
followRedirect: true,
});
const versions = result.data.versions || {};
const chromium_revisions = {};
for (var version in versions) {
const pkg = versions[version];
const puppeteerInfo = pkg.puppeteer || {};
if (!puppeteerInfo.chromium_revision) continue;
if (chromium_revisions[puppeteerInfo.chromium_revision]) continue;
const publish_time = result.data.time[pkg.version];
chromium_revisions[puppeteerInfo.chromium_revision] = publish_time;
}
'use strict';
var http = require('urllib').create();
var shell = require('shelljs');
var path = require('path');
var fs = require('fs');
var logger = require('../utils/logger');
var resource = {
url: 'http://cn.bing.com/HPImageArchive.aspx',
params: {
format: 'js', // return json
idx: 0, // 0 - today, 1 - yesterday, 2 - the day before yesterday ...
n: 1 // how many images in the result
}
};
exports.first = function (cb) {
// 1. get image url
'use strict';
var http = require('urllib').create();
var shell = require('shelljs');
var path = require('path');
var fs = require('fs');
var logger = require('../utils/logger.js');
exports.first = function (cb) {
// implement your logic here, cb need a image path based on filesystem.
// Then specify --source=your in command line, and have fun.
//
// Also I would appreciate it if you could share your source at:
// https://github.com/micooz/wallpaper/pulls
//
// cb(file);
};
const debug = require('debug')('autod');
const assert = require('assert');
const glob = require('glob');
const path = require('path');
const fs = require('fs');
const readdir = require('fs-readdir-recursive');
const crequire = require('crequire');
const EventEmitter = require('events');
const co = require('co');
const urllib = require('urllib');
const semver = require('semver');
const DEFAULT_EXCLUDE = [ '.git', 'cov', 'coverage', '.vscode' ];
const DEFAULT_TEST = [ 'test', 'tests', 'test.js', 'benchmark', 'example', 'example.js' ];
const USER_AGENT = `autod@${require('./package').version} ${urllib.USER_AGENT}`;
const MODULE_REG = /^(@[0-9a-zA-Z\-\_][0-9a-zA-Z\.\-\_]*\/)?([0-9a-zA-Z\-\_][0-9a-zA-Z\.\-\_]*)/;
class Autod extends EventEmitter {
constructor(options) {
super();
this.options = Object.assign({}, options);
this.prepare();
}
prepare() {
const options = this.options;
assert(options.root, 'options.root required');
// default options
options.semver = options.semver || {};
options.registry = options.registry || 'https://registry.npm.taobao.org';
proto.listdir = function* (fullname, dirIndex) {
var alwayNewDir = false;
if (typeof this._alwayNewDirIndex === 'number' && this._alwayNewDirIndex === dirIndex) {
alwayNewDir = true;
} else if (Array.isArray(this._alwayNewDirIndex) && this._alwayNewDirIndex.indexOf(dirIndex) >= 0) {
// node-inspector `alwayNewDirIndex = [0, 1]`
alwayNewDir = true;
}
var url = this.disturl + fullname;
var res = yield urllib.requestThunk(url, {
timeout: 60000,
dataType: 'json',
followRedirect: true,
});
debug('listdir %s got %s, %j', url, res.status, res.headers);
if (res.status !== 200) {
var msg = util.format('request %s error, got %s', url, res.status);
throw new Error(msg);
}
return res.data.map(function (file) {
var item = {
isNew: null,
name: file.name,
size: file.size || '-',
date: file.date,
TBase.prototype.apply_auth = function (url, args, user) {
user.authtype = user.authtype || 'oauth';
args.headers = args.headers || {};
if (user.authtype === 'baseauth') {
if (user.username && user.password) {
args.headers.Authorization = urllib.make_base_auth_header(user.username, user.password);
}
} else if (user.authtype === 'oauth' || user.authtype === 'xauth') {
var accessor = {
consumerSecret: this.config.secret
};
// 已通过oauth认证
if (user.oauth_token_secret) {
accessor.tokenSecret = user.oauth_token_secret;
}
var parameters = {};
for (var k in args.data) {
parameters[k] = args.data[k];
if (k.substring(0, 6) === 'oauth_') { // 删除oauth_verifier相关参数
delete args.data[k];
}
export function fetch(config: Config, request: Request,
callback: (error: boolean, ctx: any) => void) {
var parsed_url = urllib.parse(request.url)
var options = {
hostname: parsed_url.hostname,
port: 443,
path: parsed_url.path,
method: request.method,
headers: {
"Accept": "application/json",
"OData-MaxVersion": "4.0",
"OData-Version": "4.0",
}
}
if (['POST', 'PUT', 'PATCH'].indexOf(request.method) >= 0) {
// GL browser should set this itself
//options.headers['Content-Length'] = payload.data.length;
options.headers['Content-Type'] = 'application/json'
}
'use strict';
const HttpClient2 = require('urllib').HttpClient2;
const urllib = new HttpClient2();
exports.urllib = {
keepAlive: false,
foo: null,
bar: undefined,
n: 1,
dd: new Date(),
httpclient: urllib,
};
exports.buffer = new Buffer('1234');
exports.array = [];
exports.console = console;
exports.zero = 0;