Skip to content

Add files via upload #3

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: webgl
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
195 changes: 195 additions & 0 deletions BackgroundTransformer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,195 @@
import * as vision from '@mediapipe/tasks-vision';
import { dependencies } from '../../package.json';
import VideoTransformer from './VideoTransformer';
import { VideoTransformerInitOptions } from './types';

export type SegmenterOptions = Partial<vision.ImageSegmenterOptions['baseOptions']>;

export interface FrameProcessingStats {
processingTimeMs: number;
segmentationTimeMs: number;
filterTimeMs: number;
}

export type BackgroundOptions = {
blurRadius?: number;
imagePath?: string;
backgroundVideoSelector?: string;
/** cannot be updated through the `update` method, needs a restart */
segmenterOptions?: SegmenterOptions;
/** cannot be updated through the `update` method, needs a restart */
assetPaths?: { tasksVisionFileSet?: string; modelAssetPath?: string };
/** called when a new frame is processed */
onFrameProcessed?: (stats: FrameProcessingStats) => void;
};

export default class BackgroundProcessor extends VideoTransformer<BackgroundOptions> {
static get isSupported() {
return (
typeof OffscreenCanvas !== 'undefined' &&
typeof VideoFrame !== 'undefined' &&
typeof createImageBitmap !== 'undefined' &&
!!document.createElement('canvas').getContext('webgl2')
);
}

imageSegmenter?: vision.ImageSegmenter;

segmentationResults: vision.ImageSegmenterResult | undefined;

backgroundImage: ImageBitmap | null = null;

backgroundVideo: HTMLVideoElement | null = null;

options: BackgroundOptions;

segmentationTimeMs: number = 0;

constructor(opts: BackgroundOptions) {
super();
this.options = opts;
this.update(opts);
}

async init({ outputCanvas, inputElement: inputVideo }: VideoTransformerInitOptions) {
// Initialize WebGL with appropriate options based on our current state

await super.init({ outputCanvas, inputElement: inputVideo });

const fileSet = await vision.FilesetResolver.forVisionTasks(
this.options.assetPaths?.tasksVisionFileSet ??
`https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@${dependencies['@mediapipe/tasks-vision']}/wasm`,
);

this.imageSegmenter = await vision.ImageSegmenter.createFromOptions(fileSet, {
baseOptions: {
modelAssetPath:
this.options.assetPaths?.modelAssetPath ??
'https://storage.googleapis.com/mediapipe-models/image_segmenter/selfie_segmenter/float16/latest/selfie_segmenter.tflite',
delegate: 'GPU',
...this.options.segmenterOptions,
},
canvas: this.canvas,
runningMode: 'VIDEO',
outputCategoryMask: true,
outputConfidenceMasks: false,
});

// Skip loading the image here if update already loaded the image below
if (this.options?.imagePath && !this.backgroundImage) {
await this.loadBackground(this.options.imagePath).catch((err) =>
console.error('Error while loading processor background image: ', err),
);
}
if (this.options.backgroundVideoSelector && !this.backgroundVideo){
await this.setBgVideo(this.options.backgroundVideoSelector);
}
if (this.options.blurRadius) {
this.gl?.setBlurRadius(this.options.blurRadius);
}
}

async destroy() {
await super.destroy();
await this.imageSegmenter?.close();
this.backgroundImage = null;
}

async loadBackground(path: string) {
const img = new Image();

await new Promise((resolve, reject) => {
img.crossOrigin = 'Anonymous';
img.onload = () => resolve(img);
img.onerror = (err) => reject(err);
img.src = path;
});
const imageData = await createImageBitmap(img);
this.gl?.setBackgroundImage(imageData);
}

async setBgVideo(videoEl: string) {
this.backgroundVideo = document.querySelector<HTMLVideoElement>(
videoEl
);
if(this.backgroundVideo){
this.gl?.setBackgroundVideo(this.backgroundVideo);
}
}

async transform(frame: VideoFrame, controller: TransformStreamDefaultController<VideoFrame>) {
try {
if (!(frame instanceof VideoFrame) || frame.codedWidth === 0 || frame.codedHeight === 0) {
console.debug('empty frame detected, ignoring');
return;
}

if (this.isDisabled) {
controller.enqueue(frame);
return;
}
const frameTimeMs = Date.now();
if (!this.canvas) {
throw TypeError('Canvas needs to be initialized first');
}
this.canvas.width = frame.displayWidth;
this.canvas.height = frame.displayHeight;
const segmentationPromise = new Promise<void>((resolve, reject) => {
try {
let segmentationStartTimeMs = performance.now();
this.imageSegmenter?.segmentForVideo(frame, segmentationStartTimeMs, (result) => {
this.segmentationTimeMs = performance.now() - segmentationStartTimeMs;
this.segmentationResults = result;
this.updateMask(result.categoryMask);
result.close();
resolve();
});
} catch (e) {
reject(e);
}
});

const filterStartTimeMs = performance.now();
this.drawFrame(frame);
if (this.canvas && this.canvas.width > 0 && this.canvas.height > 0) {
const newFrame = new VideoFrame(this.canvas, {
timestamp: frame.timestamp || frameTimeMs,
});
controller.enqueue(newFrame);
const filterTimeMs = performance.now() - filterStartTimeMs;
const stats: FrameProcessingStats = {
processingTimeMs: this.segmentationTimeMs + filterTimeMs,
segmentationTimeMs: this.segmentationTimeMs,
filterTimeMs,
};
this.options.onFrameProcessed?.(stats);
} else {
controller.enqueue(frame);
}
await segmentationPromise;
} catch (e) {
console.error('Error while processing frame: ', e);
} finally {
frame.close();
}
}

async update(opts: BackgroundOptions) {
this.options = { ...this.options, ...opts };
if (opts.blurRadius) {
this.gl?.setBlurRadius(opts.blurRadius);
} else if (opts.imagePath) {
await this.loadBackground(opts.imagePath);
}
}

private async drawFrame(frame: VideoFrame) {
if (!this.gl) return;
this.gl?.renderFrame(frame);
}

private async updateMask(mask: vision.MPMask | undefined) {
if (!mask) return;
this.gl?.updateMask(mask.getAsWebGLTexture());
}
}
130 changes: 130 additions & 0 deletions index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
import ProcessorWrapper, { ProcessorWrapperOptions } from './ProcessorWrapper';
import BackgroundTransformer, {
BackgroundOptions,
FrameProcessingStats,
SegmenterOptions,
} from './transformers/BackgroundTransformer';

export * from './transformers/types';
export { default as VideoTransformer } from './transformers/VideoTransformer';
export {
ProcessorWrapper,
type BackgroundOptions,
type SegmenterOptions,
BackgroundTransformer,
type ProcessorWrapperOptions,
};

/**
* Determines if the current browser supports background processors
*/
export const supportsBackgroundProcessors = () =>
BackgroundTransformer.isSupported && ProcessorWrapper.isSupported;

/**
* Determines if the current browser supports modern background processors, which yield better performance
*/
export const supportsModernBackgroundProcessors = () =>
BackgroundTransformer.isSupported && ProcessorWrapper.hasModernApiSupport;

export interface BackgroundProcessorOptions extends ProcessorWrapperOptions {
blurRadius?: number;
imagePath?: string;
backgroundVideoSelector?: string;
segmenterOptions?: SegmenterOptions;
assetPaths?: { tasksVisionFileSet?: string; modelAssetPath?: string };
onFrameProcessed?: (stats: FrameProcessingStats) => void;
}

export const BackgroundBlur = (
blurRadius: number = 10,
segmenterOptions?: SegmenterOptions,
onFrameProcessed?: (stats: FrameProcessingStats) => void,
processorOptions?: ProcessorWrapperOptions,
) => {
return BackgroundProcessor(
{
blurRadius,
segmenterOptions,
onFrameProcessed,
...processorOptions,
},
'background-blur',
);
};

export const VirtualBackground = (
imagePath: string,
segmenterOptions?: SegmenterOptions,
onFrameProcessed?: (stats: FrameProcessingStats) => void,
processorOptions?: ProcessorWrapperOptions,
) => {
return BackgroundProcessor(
{
imagePath,
segmenterOptions,
onFrameProcessed,
...processorOptions,
},
'virtual-background',
);
};

export const LiveBackground = (
backgroundVideoSelector: string,
segmenterOptions?: SegmenterOptions,
onFrameProcessed?: (stats: FrameProcessingStats) => void,
processorOptions?: ProcessorWrapperOptions,
) => {
return BackgroundProcessor(
{
backgroundVideoSelector,
segmenterOptions,
onFrameProcessed,
...processorOptions,
},
'virtual-background',
);
};

export const BackgroundProcessor = (
options: BackgroundProcessorOptions,
name = 'background-processor',
) => {
const isTransformerSupported = BackgroundTransformer.isSupported;
const isProcessorSupported = ProcessorWrapper.isSupported;

if (!isTransformerSupported) {
throw new Error('Background transformer is not supported in this browser');
}

if (!isProcessorSupported) {
throw new Error(
'Neither MediaStreamTrackProcessor nor canvas.captureStream() fallback is supported in this browser',
);
}

// Extract transformer-specific options and processor options
const {
blurRadius,
imagePath,
backgroundVideoSelector,
segmenterOptions,
assetPaths,
onFrameProcessed,
...processorOpts
} = options;

const transformer = new BackgroundTransformer({
blurRadius,
imagePath,
backgroundVideoSelector,
segmenterOptions,
assetPaths,
onFrameProcessed,
});

const processor = new ProcessorWrapper(transformer, name, processorOpts);

return processor;
};