Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
uploadWithProgress: function(fileBuffer, fileSize, emitter) {
const progressor = progressStream({length: fileSize, speed: 1}, function(progress) {
console.log('Zip upload: Status =' + parseInt(progress.percentage, 10) + '%');
emitter.emit('upload.progress', progress);
});
const fileBufferStream = new streamBuffer.ReadableStreamBuffer({
// frequency: 100, // in milliseconds.
chunkSize: 4096 // in bytes.
});
fileBufferStream.put(fileBuffer);
fileBufferStream
.pipe(progressor)
.pipe(fs.createWriteStream(path.join(uploadsPath, 'upload.zip')));
},
cleanUploads: function(id) {
const isFile = results.isFile;
const result = results.result;
if (isFile) {
const filename = Path.basename(result).replace(/"/g, '\\"');
const contentDisposition = 'attachment; filename="' + filename + '"';
const stream = Fs.createReadStream(result);
return reply(stream)
.header('Content-Disposition', contentDisposition)
.header('Content-Length', results.size);
}
const stream = new StreamBuffers.ReadableStreamBuffer({
frequency: 10, // in milliseconds.
chunkSize: 204800 // 200Ko
});
const pathName = path === '/' ? '' : '_' + require('path').basename(path);
const filename = (course + pathName + '.zip').replace(/"/g, '\\"');
const contentDisposition = 'attachment; filename="' + filename + '"';
stream.put(result);
stream.stop();
return reply(stream)
.type('application/zip')
.header('Content-Disposition', contentDisposition);
const validParams = {
type: 'object',
maxProperties: 1,
required: ['meter_id'],
properties: {
meter_id: {
type: 'number'
}
}
};
if (!validate(req.params, validParams).valid || !req.file.buffer) {
res.sendStatus(400);
} else {
try {
const id = parseInt(req.params.meter_id);
const myReadableStreamBuffer = new streamBuffers.ReadableStreamBuffer({
frequency: 10,
chunkSize: 2048
});
myReadableStreamBuffer.put(req.file.buffer);
// stop() indicates we are done putting the data in our readable stream.
myReadableStreamBuffer.stop();
try {
await streamToDB(myReadableStreamBuffer, row => {
const readRate = Number(row[0]);
const endTimestamp = moment(row[1], 'MM/DD/YYYY HH:mm');
const startTimestamp = moment(row[1], 'MM/DD/YYYY HH:mm').subtract(60, 'minutes');
return new Reading(id, readRate, startTimestamp, endTimestamp);
}, (readings, tx) => Reading.insertOrUpdateAll(readings, tx));
res.status(200).json({ success: true });
} catch (e) {
res.status(403).json({ success: false, message: 'Failed to upload data.' });
const createAudioStream = function(file) {
const options = {
frequency: 200,
chunkSize: 32000
};
const audioStream = new streamBuffers.ReadableStreamBuffer(options);
audioStream.put(file);
// add some silences at the end to tell the service that it is the end of the sentence
audioStream.put(new Buffer(160000));
audioStream.stop();
return audioStream;
};
fs.access(filepath, (error) => {
if (error) {
return callback ? callback(new Error(`could not find file ${filepath}`)) : null;
}
absoluteFilepath = path.resolve(filepath);
const options = {
frequency: 100,
chunkSize: 32000
};
const audioStream = new streamBuffers.ReadableStreamBuffer(options);
fs.readFile(absoluteFilepath, (error, file) => {
audioStream.put(file);
// add some silences at the end to tell the service that it is the end of the sentence
audioStream.put(new Buffer(160000));
audioStream.stop();
audioStream.on('data', (data) => this.sendBytes(data));
audioStream.on('end', () => {if (callback) return callback()});
});
});
};
exports.createReadStream = function (buffer, options) {
buffer = Buffer.isBuffer(buffer) ? buffer : new Buffer(buffer);
var stream = new streamBuffers.ReadableStreamBuffer(options);
stream.put(buffer);
stream.destroySoon();
return stream;
};
function createBufferStream (n, options) {
var stream = new ReadableStreamBuffer({ frequency: 1, chunkSize: 1024 * 64 });
var buffer = new Buffer(+n);
buffer.fill((Math.random()*100000).toFixed(0));
stream.put(buffer);
return stream;
}
exports.createBufferStream = createBufferStream;
public storeImage(buffer: Buffer): Promise {
const blobStream = new ReadableStreamBuffer();
blobStream.put(buffer);
blobStream.stop();
const storage = gcs({
credentials: process.env.GOOGLE_CLOUD_CREDENTIALS_JSON ? JSON.parse(process.env.GOOGLE_CLOUD_CREDENTIALS_JSON) : undefined,
projectId: process.env.GOOGLE_CLOUD_PROJECT,
});
const bucketName = process.env.GOOGLE_CLOUD_BUCKET;
const bucket = storage.bucket(bucketName);
const key = this.randomPath();
const file = bucket.file(key);
return new Promise((resolve, reject) => {
blobStream
.pipe(file.createWriteStream({public: true}))
export function runIntegratedStager(config: { plan: PlanConfig }): Stager {
const controllerInStream = new StreamBuffers.ReadableStreamBuffer();
const moleculeInStream = new StreamBuffers.ReadableStreamBuffer();
const controllerOutStream = new RedirectWritableStream(
undefined,
moleculeInStream,
);
const moleculeOutStream = new RedirectWritableStream(
undefined,
controllerInStream,
);
const controllerStreams = {
inStream: controllerInStream,
outStream: controllerOutStream,
};
const moleculeStreams = {
d.sftpPut(a, b, function(e) {
e ? (log.verbose(e), 4 === e.code || 127 === e.code ? (log.verbose("Session#put()", "sftp is not available, attempt transfering file via streamPut"), is = new streamBuffers.ReadableStreamBuffer, is.pause(), fs.readFile(a, function(a, e) {
if (a) return c(a);
is.put(e);
d.streamPut(b, is, c)
})) : (14 === e.code && (e = errMsgHndl.changeErrMsg("insufficient free space")), setImmediate(c,
e))) : setImmediate(c)
})
},