Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
export async function createBlurEffect() {
if (!MediaStreamTrack.prototype.getSettings && !MediaStreamTrack.prototype.getConstraints) {
throw new Error('JitsiStreamBlurEffect not supported!');
}
// An output stride of 16 and a multiplier of 0.5 are used for improved
// performance on a larger range of CPUs.
const bpModel = await bodyPix.load({
architecture: 'MobileNetV1',
outputStride: 16,
multiplier: 0.50,
quantBytes: 2
});
return new JitsiStreamBlurEffect(bpModel);
}
async _renderMask() {
this._maskInProgress = true;
this._segmentationData = await this._bpModel.segmentPerson(this._inputVideoElement, {
internalResolution: 'low', // resized to 0.25 times of the original resolution before inference
maxDetections: 1, // max. number of person poses to detect per image
segmentationThreshold: 0.7 // represents probability that a pixel belongs to a person
});
this._maskInProgress = false;
bodyPix.drawBokehEffect(
this._outputCanvasElement,
this._inputVideoElement,
this._segmentationData,
7, // Constant for background blur, integer values between 0-20
7 // Constant for edge blur, integer values between 0-20
);
}
case 'pixelation':
const pixelCellWidth = 10.0;
bodyPix.drawPixelatedMask(
canvas, state.video, coloredPartImageData,
guiState.partMap.opacity, maskBlurAmount, flipHorizontally,
pixelCellWidth);
break;
case 'partMap':
bodyPix.drawMask(
canvas, state.video, coloredPartImageData, guiState.opacity,
maskBlurAmount, flipHorizontally);
break;
case 'blurBodyPart':
const blurBodyPartIds = [0, 1];
bodyPix.blurBodyPart(
canvas, state.video, multiPersonPartSegmentation,
blurBodyPartIds, guiState.partMap.blurBodyPartAmount,
guiState.partMap.edgeBlurAmount, flipHorizontally);
}
drawPoses(multiPersonPartSegmentation, flipHorizontally, ctx);
break;
default:
break;
}
// End monitoring code for frames per second
stats.end();
requestAnimationFrame(bodySegmentationFrame);
}
}
// Begin monitoring code for frames per second
stats.begin();
const flipHorizontally = guiState.flipHorizontal;
switch (guiState.estimate) {
case 'segmentation':
const multiPersonSegmentation = await estimateSegmentation();
switch (guiState.segmentation.effect) {
case 'mask':
const ctx = canvas.getContext('2d');
const foregroundColor = {r: 255, g: 255, b: 255, a: 255};
const backgroundColor = {r: 0, g: 0, b: 0, a: 255};
const mask = bodyPix.toMask(
multiPersonSegmentation, foregroundColor, backgroundColor,
true);
bodyPix.drawMask(
canvas, state.video, mask, guiState.segmentation.opacity,
guiState.segmentation.maskBlurAmount, flipHorizontally);
drawPoses(multiPersonSegmentation, flipHorizontally, ctx);
break;
case 'bokeh':
bodyPix.drawBokehEffect(
canvas, state.video, multiPersonSegmentation,
+guiState.segmentation.backgroundBlurAmount,
guiState.segmentation.edgeBlurAmount, flipHorizontally);
break;
}
const coloredPartImageData = bodyPix.toColoredPartMask(
multiPersonPartSegmentation,
partColorScales[guiState.partMap.colorScale]);
const maskBlurAmount = 0;
switch (guiState.partMap.effect) {
case 'pixelation':
const pixelCellWidth = 10.0;
bodyPix.drawPixelatedMask(
canvas, state.video, coloredPartImageData,
guiState.partMap.opacity, maskBlurAmount, flipHorizontally,
pixelCellWidth);
break;
case 'partMap':
bodyPix.drawMask(
canvas, state.video, coloredPartImageData, guiState.opacity,
maskBlurAmount, flipHorizontally);
break;
case 'blurBodyPart':
const blurBodyPartIds = [0, 1];
bodyPix.blurBodyPart(
canvas, state.video, multiPersonPartSegmentation,
blurBodyPartIds, guiState.partMap.blurBodyPartAmount,
guiState.partMap.edgeBlurAmount, flipHorizontally);
}
drawPoses(multiPersonPartSegmentation, flipHorizontally, ctx);
break;
default:
break;
}
const flipHorizontally = guiState.flipHorizontal;
switch (guiState.estimate) {
case 'segmentation':
const multiPersonSegmentation = await estimateSegmentation();
switch (guiState.segmentation.effect) {
case 'mask':
const ctx = canvas.getContext('2d');
const foregroundColor = {r: 255, g: 255, b: 255, a: 255};
const backgroundColor = {r: 0, g: 0, b: 0, a: 255};
const mask = bodyPix.toMask(
multiPersonSegmentation, foregroundColor, backgroundColor,
true);
bodyPix.drawMask(
canvas, state.video, mask, guiState.segmentation.opacity,
guiState.segmentation.maskBlurAmount, flipHorizontally);
drawPoses(multiPersonSegmentation, flipHorizontally, ctx);
break;
case 'bokeh':
bodyPix.drawBokehEffect(
canvas, state.video, multiPersonSegmentation,
+guiState.segmentation.backgroundBlurAmount,
guiState.segmentation.edgeBlurAmount, flipHorizontally);
break;
}
break;
case 'partmap':
const ctx = canvas.getContext('2d');
const multiPersonPartSegmentation = await estimatePartSegmentation();
async function loadBodyPix() {
toggleLoadingUI(true);
state.net = await bodyPix.load({
architecture: guiState.input.architecture,
outputStride: guiState.input.outputStride,
multiplier: guiState.input.multiplier,
quantBytes: guiState.input.quantBytes
});
toggleLoadingUI(false);
}
async loadModel() {
this.model = await bp.load(this.config.multiplier);
this.modelReady = true;
return this;
}
switch (guiState.segmentation.effect) {
case 'mask':
const ctx = canvas.getContext('2d');
const foregroundColor = {r: 255, g: 255, b: 255, a: 255};
const backgroundColor = {r: 0, g: 0, b: 0, a: 255};
const mask = bodyPix.toMask(
multiPersonSegmentation, foregroundColor, backgroundColor,
true);
bodyPix.drawMask(
canvas, state.video, mask, guiState.segmentation.opacity,
guiState.segmentation.maskBlurAmount, flipHorizontally);
drawPoses(multiPersonSegmentation, flipHorizontally, ctx);
break;
case 'bokeh':
bodyPix.drawBokehEffect(
canvas, state.video, multiPersonSegmentation,
+guiState.segmentation.backgroundBlurAmount,
guiState.segmentation.edgeBlurAmount, flipHorizontally);
break;
}
break;
case 'partmap':
const ctx = canvas.getContext('2d');
const multiPersonPartSegmentation = await estimatePartSegmentation();
const coloredPartImageData = bodyPix.toColoredPartMask(
multiPersonPartSegmentation,
partColorScales[guiState.partMap.colorScale]);
const maskBlurAmount = 0;
switch (guiState.partMap.effect) {
await this.ready;
await tf.nextFrame();
if (this.video && this.video.readyState === 0) {
await new Promise(resolve => {
this.video.onloadeddata = () => resolve();
});
}
this.config.outputStride = segmentationOptions.outputStride || this.config.outputStride;
this.config.segmentationThreshold = segmentationOptions.segmentationThreshold || this.config.segmentationThreshold;
const segmentation = await this.model.estimatePersonSegmentation(imgToSegment, this.config.outputStride, this.config.segmentationThreshold)
const result = {};
result.maskBackground = bp.toMaskImageData(segmentation, true);
result.maskPerson = bp.toMaskImageData(segmentation, false);
result.raw = segmentation;
if (p5Utils.checkP5()) {
const blob1 = await p5Utils.rawToBlob(result.maskBackground.data, segmentation.width, segmentation.height);
const blob2 = await p5Utils.rawToBlob(result.maskPerson.data, segmentation.width, segmentation.height);
const p5Image1 = await p5Utils.blobToP5Image(blob1);
const p5Image2 = await p5Utils.blobToP5Image(blob2);
result.maskBackground = p5Image1;
result.maskPerson = p5Image2;
}
return result;
}