Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update blur background feature #173

Merged
merged 5 commits into from
Feb 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,6 @@
"@mui/icons-material": "^5.14.16",
"@mui/material": "^5.14.16",
"@reduxjs/toolkit": "^1.9.7",
"@tensorflow-models/body-pix": "^2.2.1",
"@tensorflow/tfjs": "^4.12.0",
"@tensorflow/tfjs-backend-webgl": "^4.12.0",
"awaitqueue": "^3.0.2",
"bowser": "^2.11.0",
"debug": "^4.3.4",
Expand Down
Binary file removed public/model/selfie_segmenter.tflite
Binary file not shown.
15 changes: 15 additions & 0 deletions src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import { permissions } from './utils/roles';
import { SnackbarKey, SnackbarProvider, useSnackbar } from 'notistack';
import { IconButton } from '@mui/material';
import { Close } from '@mui/icons-material';
import { meActions } from './store/slices/meSlice';

type AppParams = {
id: string;
Expand Down Expand Up @@ -74,6 +75,20 @@ const App = (): JSX.Element => {
navigate('/');
}
}, [ roomState ]);

/**
* Detect WebGL-support.
*/
useEffect(() => {
const canvas = document.createElement('canvas');
const gl = canvas.getContext('webgl')
|| canvas.getContext('experimental-webgl');
// Report the result.

if (gl && gl instanceof WebGLRenderingContext) {
dispatch(meActions.setWebGLSupport(true));
}
}, []);

return (
<SnackbarProvider action={
Expand Down
2 changes: 2 additions & 0 deletions src/components/settingsdialog/SettingsDialog.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ const SettingsDialog = (): JSX.Element => {
const dispatch = useAppDispatch();
const settingsOpen = useAppSelector((state) => state.ui.settingsOpen);
const currentSettingsTab = useAppSelector((state) => state.ui.currentSettingsTab);
const closeButtonDisabled = useAppSelector((state) => state.me.videoInProgress || state.me.audioInProgress);

const handleCloseSettings = (): void => {
dispatch(uiActions.setUi({
Expand Down Expand Up @@ -54,6 +55,7 @@ const SettingsDialog = (): JSX.Element => {
onClick={handleCloseSettings}
startIcon={<CloseIcon />}
size='small'
disabled={closeButtonDisabled}
>
{ closeLabel()}
</Button>
Expand Down
2 changes: 2 additions & 0 deletions src/components/settingsdialog/SettingsSwitches.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ export const OpusFecSwitch = (): JSX.Element => {
export const BlurSwitch = (): JSX.Element => {
const dispatch = useAppDispatch();
const blurEnabled = useAppSelector((state) => state.settings.blurEnabled);
const blurSwitchDisabled = useAppSelector((state) => state.me.videoInProgress);

return (
<FormControlLabel
Expand All @@ -123,6 +124,7 @@ export const BlurSwitch = (): JSX.Element => {
onChange={(event: React.ChangeEvent<HTMLInputElement>): void => {
dispatch(updateVideoSettings({ blurEnabled: event.target.checked }));
}}
disabled={blurSwitchDisabled}
/>
}
label={ backgroundBlurLabel() }
Expand Down
97 changes: 83 additions & 14 deletions src/services/effectsService.tsx
Original file line number Diff line number Diff line change
@@ -1,29 +1,76 @@
/**
* ML Backend: TF Lite
* Copyright 2018 Google LLC
* License: Apache 2.0
* https://github.com/google-coral/tflite/blob/master/LICENSE
*
* Model: MediaPipe Selfie Segmentation
* Copyright 2021 Google LLC
* License: Apache 2.0
* https://storage.googleapis.com/mediapipe-assets/Model%20Card%20MediaPipe%20Selfie%20Segmentation.pdf
*/

import EventEmitter from 'events';
import type { BodyPix } from '@tensorflow-models/body-pix';
import { EffectsTrack } from '../utils/EffectsTrack';
import { Logger, timeoutPromise } from 'edumeet-common';
import { BlurBackgroundNotSupportedError, BlurTrack } from '../utils/blurbackground/BlurTrack';
import { EffectsTrack } from '../utils/types';

const logger = new Logger('EffectsService');

declare function createTFLiteModule(): Promise<TFLite>
declare function createTFLiteSIMDModule(): Promise<TFLite>

export interface TFLite {
_getModelBufferMemoryOffset(): number
_getInputMemoryOffset(): number
_getInputHeight(): number
_getInputWidth(): number
_getInputChannelCount(): number
_getOutputMemoryOffset(): number
_getOutputHeight(): number
_getOutputWidth(): number
_getOutputChannelCount(): number
// eslint-disable-next-line no-unused-vars
_loadModel(bufferSize: number): number
_runInference(): number
/* eslint-disable @typescript-eslint/no-explicit-any */
HEAPU8: any
HEAPF32: any
/* eslint-enable @typescript-eslint/no-explicit-any */
}

export const modelConfig = {
path: '/model/selfie_segmenter_landscape.tflite',
width: 256,
height: 144
};

/**
* A service that handles tensorflow and mediapipe effects on the video stream, and RNNoise on the audio stream.
*/
export class EffectsService extends EventEmitter {
private bodyPix?: BodyPix;
private effectTracks = new Map<string, EffectsTrack>();
private model?: ArrayBuffer;
public webGLSupport = false;

public async applyEffect(track: MediaStreamTrack): Promise<MediaStreamTrack> {
logger.debug('applyEffect() [track.id %s, kind: %s]', track.id, track.kind);
if (track.kind !== 'video')
throw new Error('Audio effects are not yet implemented.');

if (!this.bodyPix) await this.loadSegmenter();
const MLBackend = await this.createMLBackend();

if (!this.model) this.model = await this.createModel();

// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const effectTrack = new EffectsTrack(this.bodyPix!, track);
const effectTrack = new BlurTrack(MLBackend, this.model, track, this.webGLSupport);

this.effectTracks.set(effectTrack.outputTrack.id, effectTrack);

return effectTrack.outputTrack;
}

public stop(trackId?: string): void {
logger.debug('stop() [trackId %s]', trackId);
if (trackId) {
const track = this.effectTracks.get(trackId);

Expand All @@ -38,18 +85,40 @@ export class EffectsService extends EventEmitter {
}
}

private async loadSegmenter() {
/* const tfjs = await import('@tensorflow/tfjs');
const tfjsWasm = await import('@tensorflow/tfjs-backend-wasm');
private async createMLBackend() {
let MLBackend: TFLite | undefined;
const LOAD_BACKEND_TIMEOUT = 10000;

tfjsWasm.setWasmPaths(`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${tfjsWasm.version_wasm}/dist/`);
// Try if browser has SIMD-support.
try {
MLBackend = await timeoutPromise(createTFLiteSIMDModule(), LOAD_BACKEND_TIMEOUT);
if (!MLBackend) throw new Error('No ML Backend');
} catch (error) {
logger.error(error);
}

// If not, try without SIMD support.
if (!MLBackend) {
try {
MLBackend = await timeoutPromise(createTFLiteModule(), LOAD_BACKEND_TIMEOUT);
if (!MLBackend) throw new Error('No ML Backend');
} catch (error) {
logger.error(error);
}
}

await tfjs.setBackend('wasm'); */
if (!MLBackend) {
throw new BlurBackgroundNotSupportedError('Could not create ML Backend');
}

return MLBackend;
}

await import('@tensorflow/tfjs-backend-webgl');
private async createModel() {
const response = await fetch(modelConfig.path);

const bodyPix = await import('@tensorflow-models/body-pix');
if (!response.ok) throw new BlurBackgroundNotSupportedError('Could not load model');

this.bodyPix = await bodyPix.load();
return await response.arrayBuffer();
}
}
1 change: 1 addition & 0 deletions src/store/actions/mediaActions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -545,6 +545,7 @@ export const updateWebcam = ({

if ((restart && webcamProducer) || start) {
if (webcamProducer) {
effectsService.stop(webcamProducer.track?.id);
dispatch(producersActions.closeProducer({
producerId: webcamProducer.id,
local: true
Expand Down
29 changes: 29 additions & 0 deletions src/store/middlewares/effectsMiddleware.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
import { Middleware } from '@reduxjs/toolkit';
import { MiddlewareOptions } from '../store';
import { Logger } from 'edumeet-common';
import { meActions } from '../slices/meSlice';

const logger = new Logger('EffectsMiddleware');

/**
* @param options - Middleware options.
* @returns {Middleware} Redux middleware.
*/
const createEffectsMiddleware = ({
effectsService,
}: MiddlewareOptions): Middleware => {
logger.debug('createEffectsMiddleware()');

const middleware: Middleware = () =>
(next) => async (action) => {
if (meActions.setWebGLSupport.match(action)) {
effectsService.webGLSupport = true;
}

return next(action);
};

return middleware;
};

export default createEffectsMiddleware;
5 changes: 5 additions & 0 deletions src/store/slices/meSlice.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ export interface MeState {
videoMuted: boolean;
lostAudio: boolean;
lostVideo: boolean;
webGLSupport: boolean;
// Status flags
audioInProgress: boolean;
videoInProgress: boolean;
Expand All @@ -50,6 +51,7 @@ const initialState: MeState = {
videoMuted: false,
lostAudio: false,
lostVideo: false,
webGLSupport: false,
// Status flags
audioInProgress: false,
videoInProgress: false,
Expand Down Expand Up @@ -114,6 +116,9 @@ const meSlice = createSlice({
setLostVideo: ((state, action: PayloadAction<boolean>) => {
state.lostVideo = action.payload;
}),
setWebGLSupport: ((state, action: PayloadAction<boolean>) => {
state.webGLSupport = action.payload;
}),
// Status flags
setAudioInProgress: ((state, action: PayloadAction<boolean>) => {
state.audioInProgress = action.payload;
Expand Down
2 changes: 2 additions & 0 deletions src/store/store.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ import { Application, feathers } from '@feathersjs/feathers/lib';
import rest from '@feathersjs/rest-client';
import authentication from '@feathersjs/authentication-client';
import { EffectsService } from '../services/effectsService';
import createEffectsMiddleware from './middlewares/effectsMiddleware';

export interface MiddlewareOptions {
mediaService: MediaService;
Expand Down Expand Up @@ -137,6 +138,7 @@ export const store = configureStore({
createRoomMiddleware(middlewareOptions),
createNotificationMiddleware(middlewareOptions),
createRecordingMiddleware(middlewareOptions),
createEffectsMiddleware(middlewareOptions),
...(edumeetConfig.reduxLoggingEnabled ? [ createLogger({
duration: true,
timestamp: false,
Expand Down
114 changes: 0 additions & 114 deletions src/utils/EffectsTrack.tsx

This file was deleted.

Loading
Loading