Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if (!bucketName) {
context.failure(
'Bucket not provided. Make sure you have a \'bucket\' property in ' +
'your request');
return;
}
if (!fileName) {
context.failure(
'Filename not provided. Make sure you have a \'file\' property in ' +
'your request');
return;
}
// Create a gcs client.
var gcs = gcloud.storage({
// We're using the API from the same project as the Cloud Function.
projectId: process.env.GCP_PROJECT,
});
var bucket = gcs.bucket(bucketName);
var file = bucket.file(fileName);
var count = 0;
// Use the readLine module to read the stream line-by line.
var lineReader = readline.createInterface({
input: file.createReadStream(),
});
lineReader.on('line', function(line) {
count += line.trim().split(/\s+/).length;
});
var _getStorageClient = function() {
if (storage === null) {
storage = gcloud.storage({
// We're using the API from the same project as the Cloud Function
projectId: process.env.GCP_PROJECT,
});
}
return storage;
};
'master': function(context, data) {
// Create a gcs client
var gcs = gcloud.storage({
// We're using the API from the same project as the Cloud Function.
projectId: process.env.GCP_PROJECT,
});
// Create a pubsub client to publish the work and read the results of the workers.
var pubsub = gcloud.pubsub({
// We're using the API from the same project as the Cloud Function.
projectId: process.env.GCP_PROJECT,
});
// Get the bucket containing our source file
var bucket = gcs.bucket(data['bucket']);
// The topic we are going to publish to
var inTopic = pubsub.topic(data['in-topic']);
var _master = function(context, data) {
// Create a gcs client
var gcs = gcloud.storage({
// We're using the API from the same project as the Cloud Function.
projectId: process.env.GCP_PROJECT,
});
// Get the location (url) of the map function
var fnUrl = data['workerFunctionUrl'];
// Get the bucket containing our source file
var bucket = gcs.bucket(data['bucket']);
// Load the master file using the stream API
logger.log(
'Opening file [' + data['file'] + '] and creating a read stream...');
var inStream = bucket.file(data['file']).createReadStream()
.on('error', function(err) {
context.failure('Error reading file stream for ' + data['file'] +
GCSFile.prototype.storage = function () {
return gcloud.storage(this.projectSettings);
};
keyFilename = requiredOrFromEnvironment('GCP_KEYFILE_PATH', 'keyfile path'),
bucket = requiredOrFromEnvironment('GCS_BUCKET', 'bucket name'),
{ bucketPrefix = fromEnvironmentOrDefault('GCS_BUCKET_PREFIX', ''),
directAccess = fromEnvironmentOrDefault('GCS_DIRECT_ACCESS', false) } = {}) {
super();
this._bucket = bucket;
this._bucketPrefix = bucketPrefix;
this._directAccess = directAccess;
let options = {
projectId: projectId,
keyFilename: keyFilename
};
this._gcsClient = new storage(options);
}