From 81e0bdf041e75e7762a335e1b8824537cd8dcbeb Mon Sep 17 00:00:00 2001 From: Suraj Rana <52860258+backendsuraj@users.noreply.github.com> Date: Wed, 30 Apr 2025 00:56:38 +0400 Subject: [PATCH] Add files via upload webgl/index src/index (non webgl) --- BackgroundTransformer.ts | 195 +++++++++++++++++++++++++++++++++++++++ index.ts | 130 ++++++++++++++++++++++++++ 2 files changed, 325 insertions(+) create mode 100644 BackgroundTransformer.ts create mode 100644 index.ts diff --git a/BackgroundTransformer.ts b/BackgroundTransformer.ts new file mode 100644 index 0000000..3854c93 --- /dev/null +++ b/BackgroundTransformer.ts @@ -0,0 +1,195 @@ +import * as vision from '@mediapipe/tasks-vision'; +import { dependencies } from '../../package.json'; +import VideoTransformer from './VideoTransformer'; +import { VideoTransformerInitOptions } from './types'; + +export type SegmenterOptions = Partial; + +export interface FrameProcessingStats { + processingTimeMs: number; + segmentationTimeMs: number; + filterTimeMs: number; +} + +export type BackgroundOptions = { + blurRadius?: number; + imagePath?: string; + backgroundVideoSelector?: string; + /** cannot be updated through the `update` method, needs a restart */ + segmenterOptions?: SegmenterOptions; + /** cannot be updated through the `update` method, needs a restart */ + assetPaths?: { tasksVisionFileSet?: string; modelAssetPath?: string }; + /** called when a new frame is processed */ + onFrameProcessed?: (stats: FrameProcessingStats) => void; +}; + +export default class BackgroundProcessor extends VideoTransformer { + static get isSupported() { + return ( + typeof OffscreenCanvas !== 'undefined' && + typeof VideoFrame !== 'undefined' && + typeof createImageBitmap !== 'undefined' && + !!document.createElement('canvas').getContext('webgl2') + ); + } + + imageSegmenter?: vision.ImageSegmenter; + + segmentationResults: vision.ImageSegmenterResult | undefined; + + backgroundImage: ImageBitmap | null = null; + + backgroundVideo: HTMLVideoElement | null = null; + + options: BackgroundOptions; + + segmentationTimeMs: number = 0; + + constructor(opts: BackgroundOptions) { + super(); + this.options = opts; + this.update(opts); + } + + async init({ outputCanvas, inputElement: inputVideo }: VideoTransformerInitOptions) { + // Initialize WebGL with appropriate options based on our current state + + await super.init({ outputCanvas, inputElement: inputVideo }); + + const fileSet = await vision.FilesetResolver.forVisionTasks( + this.options.assetPaths?.tasksVisionFileSet ?? + `https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@${dependencies['@mediapipe/tasks-vision']}/wasm`, + ); + + this.imageSegmenter = await vision.ImageSegmenter.createFromOptions(fileSet, { + baseOptions: { + modelAssetPath: + this.options.assetPaths?.modelAssetPath ?? + 'https://storage.googleapis.com/mediapipe-models/image_segmenter/selfie_segmenter/float16/latest/selfie_segmenter.tflite', + delegate: 'GPU', + ...this.options.segmenterOptions, + }, + canvas: this.canvas, + runningMode: 'VIDEO', + outputCategoryMask: true, + outputConfidenceMasks: false, + }); + + // Skip loading the image here if update already loaded the image below + if (this.options?.imagePath && !this.backgroundImage) { + await this.loadBackground(this.options.imagePath).catch((err) => + console.error('Error while loading processor background image: ', err), + ); + } + if (this.options.backgroundVideoSelector && !this.backgroundVideo){ + await this.setBgVideo(this.options.backgroundVideoSelector); + } + if (this.options.blurRadius) { + this.gl?.setBlurRadius(this.options.blurRadius); + } + } + + async destroy() { + await super.destroy(); + await this.imageSegmenter?.close(); + this.backgroundImage = null; + } + + async loadBackground(path: string) { + const img = new Image(); + + await new Promise((resolve, reject) => { + img.crossOrigin = 'Anonymous'; + img.onload = () => resolve(img); + img.onerror = (err) => reject(err); + img.src = path; + }); + const imageData = await createImageBitmap(img); + this.gl?.setBackgroundImage(imageData); + } + + async setBgVideo(videoEl: string) { + this.backgroundVideo = document.querySelector( + videoEl + ); + if(this.backgroundVideo){ + this.gl?.setBackgroundVideo(this.backgroundVideo); + } + } + + async transform(frame: VideoFrame, controller: TransformStreamDefaultController) { + try { + if (!(frame instanceof VideoFrame) || frame.codedWidth === 0 || frame.codedHeight === 0) { + console.debug('empty frame detected, ignoring'); + return; + } + + if (this.isDisabled) { + controller.enqueue(frame); + return; + } + const frameTimeMs = Date.now(); + if (!this.canvas) { + throw TypeError('Canvas needs to be initialized first'); + } + this.canvas.width = frame.displayWidth; + this.canvas.height = frame.displayHeight; + const segmentationPromise = new Promise((resolve, reject) => { + try { + let segmentationStartTimeMs = performance.now(); + this.imageSegmenter?.segmentForVideo(frame, segmentationStartTimeMs, (result) => { + this.segmentationTimeMs = performance.now() - segmentationStartTimeMs; + this.segmentationResults = result; + this.updateMask(result.categoryMask); + result.close(); + resolve(); + }); + } catch (e) { + reject(e); + } + }); + + const filterStartTimeMs = performance.now(); + this.drawFrame(frame); + if (this.canvas && this.canvas.width > 0 && this.canvas.height > 0) { + const newFrame = new VideoFrame(this.canvas, { + timestamp: frame.timestamp || frameTimeMs, + }); + controller.enqueue(newFrame); + const filterTimeMs = performance.now() - filterStartTimeMs; + const stats: FrameProcessingStats = { + processingTimeMs: this.segmentationTimeMs + filterTimeMs, + segmentationTimeMs: this.segmentationTimeMs, + filterTimeMs, + }; + this.options.onFrameProcessed?.(stats); + } else { + controller.enqueue(frame); + } + await segmentationPromise; + } catch (e) { + console.error('Error while processing frame: ', e); + } finally { + frame.close(); + } + } + + async update(opts: BackgroundOptions) { + this.options = { ...this.options, ...opts }; + if (opts.blurRadius) { + this.gl?.setBlurRadius(opts.blurRadius); + } else if (opts.imagePath) { + await this.loadBackground(opts.imagePath); + } + } + + private async drawFrame(frame: VideoFrame) { + if (!this.gl) return; + this.gl?.renderFrame(frame); + } + + private async updateMask(mask: vision.MPMask | undefined) { + if (!mask) return; + this.gl?.updateMask(mask.getAsWebGLTexture()); + } +} diff --git a/index.ts b/index.ts new file mode 100644 index 0000000..1fcc17d --- /dev/null +++ b/index.ts @@ -0,0 +1,130 @@ +import ProcessorWrapper, { ProcessorWrapperOptions } from './ProcessorWrapper'; +import BackgroundTransformer, { + BackgroundOptions, + FrameProcessingStats, + SegmenterOptions, +} from './transformers/BackgroundTransformer'; + +export * from './transformers/types'; +export { default as VideoTransformer } from './transformers/VideoTransformer'; +export { + ProcessorWrapper, + type BackgroundOptions, + type SegmenterOptions, + BackgroundTransformer, + type ProcessorWrapperOptions, +}; + +/** + * Determines if the current browser supports background processors + */ +export const supportsBackgroundProcessors = () => + BackgroundTransformer.isSupported && ProcessorWrapper.isSupported; + +/** + * Determines if the current browser supports modern background processors, which yield better performance + */ +export const supportsModernBackgroundProcessors = () => + BackgroundTransformer.isSupported && ProcessorWrapper.hasModernApiSupport; + +export interface BackgroundProcessorOptions extends ProcessorWrapperOptions { + blurRadius?: number; + imagePath?: string; + backgroundVideoSelector?: string; + segmenterOptions?: SegmenterOptions; + assetPaths?: { tasksVisionFileSet?: string; modelAssetPath?: string }; + onFrameProcessed?: (stats: FrameProcessingStats) => void; +} + +export const BackgroundBlur = ( + blurRadius: number = 10, + segmenterOptions?: SegmenterOptions, + onFrameProcessed?: (stats: FrameProcessingStats) => void, + processorOptions?: ProcessorWrapperOptions, +) => { + return BackgroundProcessor( + { + blurRadius, + segmenterOptions, + onFrameProcessed, + ...processorOptions, + }, + 'background-blur', + ); +}; + +export const VirtualBackground = ( + imagePath: string, + segmenterOptions?: SegmenterOptions, + onFrameProcessed?: (stats: FrameProcessingStats) => void, + processorOptions?: ProcessorWrapperOptions, +) => { + return BackgroundProcessor( + { + imagePath, + segmenterOptions, + onFrameProcessed, + ...processorOptions, + }, + 'virtual-background', + ); +}; + +export const LiveBackground = ( + backgroundVideoSelector: string, + segmenterOptions?: SegmenterOptions, + onFrameProcessed?: (stats: FrameProcessingStats) => void, + processorOptions?: ProcessorWrapperOptions, +) => { + return BackgroundProcessor( + { + backgroundVideoSelector, + segmenterOptions, + onFrameProcessed, + ...processorOptions, + }, + 'virtual-background', + ); +}; + +export const BackgroundProcessor = ( + options: BackgroundProcessorOptions, + name = 'background-processor', +) => { + const isTransformerSupported = BackgroundTransformer.isSupported; + const isProcessorSupported = ProcessorWrapper.isSupported; + + if (!isTransformerSupported) { + throw new Error('Background transformer is not supported in this browser'); + } + + if (!isProcessorSupported) { + throw new Error( + 'Neither MediaStreamTrackProcessor nor canvas.captureStream() fallback is supported in this browser', + ); + } + + // Extract transformer-specific options and processor options + const { + blurRadius, + imagePath, + backgroundVideoSelector, + segmenterOptions, + assetPaths, + onFrameProcessed, + ...processorOpts + } = options; + + const transformer = new BackgroundTransformer({ + blurRadius, + imagePath, + backgroundVideoSelector, + segmenterOptions, + assetPaths, + onFrameProcessed, + }); + + const processor = new ProcessorWrapper(transformer, name, processorOpts); + + return processor; +};