Skip to content

Commit

Permalink
Remove legacy WebRTC metrics and migrate to standardized metrics (#2086)
Browse files Browse the repository at this point in the history
  • Loading branch information
xuesichao authored Mar 9, 2022
1 parent 103d1d9 commit 6bf4f02
Show file tree
Hide file tree
Showing 48 changed files with 4,767 additions and 6,218 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- Add compression support when sending and receiving sdp messages.
- Add automatic language identification support from Amazon Transcribe for live transcription APIs.
- Add `rtcStatsReport` property to `DefaultClientMetricReport` to store raw [`RTCStatsReport`](https://developer.mozilla.org/en-US/docs/Web/API/RTCStatsReport) and expose it via `metricsDidReceive` event.

### Removed

- Removed SDP interface.
- Remove [legacy (non-promise-based) `getStats` API](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats#obsolete_syntax) call in `DefaultStatsCollector`. This API was previously used to obtain WebRTC metrics only for Chromium-based browsers. Now SDK obtains WebRTC metrics for all browsers via [standardized (promise-based) `getStats` API](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/getStats#syntax).
- Remove `browserBehavior` from the constructor of `DefaultStatsCollector`.

### Changed

- Change `resolveSpec` and `resolveOptions` in BackgroundBlurVideoFrameProcessor and BackgroundReplacementVideoFrameProcessor to clone parameter objects.
Expand Down
82 changes: 36 additions & 46 deletions demos/browser/app/meetingV2/meetingV2.ts
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ function getVoiceFocusSpec(joinInfo: any): VoiceFocusSpec {
const es = joinInfo.Meeting.Meeting?.MeetingFeatures?.Audio?.EchoReduction === 'AVAILABLE';
let spec: VoiceFocusSpec = VOICE_FOCUS_SPEC;
if (!spec.name) {
spec.name = es ? voiceFocusName('ns_es') : voiceFocusName('default');
spec.name = es ? voiceFocusName('ns_es') : voiceFocusName('default');
}
return spec;
};
Expand All @@ -165,9 +165,9 @@ const BACKGROUND_BLUR_PATHS: BackgroundFilterPaths = BACKGROUND_BLUR_CDN && {
simd: `${BACKGROUND_BLUR_CDN}/bgblur/wasm/_cwt-wasm-simd.wasm`,
};
const BACKGROUND_BLUR_MODEL = BACKGROUND_BLUR_CDN && ModelSpecBuilder.builder()
.withSelfieSegmentationDefaults()
.withPath(`${BACKGROUND_BLUR_CDN}/bgblur/models/selfie_segmentation_landscape.tflite`)
.build();
.withSelfieSegmentationDefaults()
.withPath(`${BACKGROUND_BLUR_CDN}/bgblur/models/selfie_segmentation_landscape.tflite`)
.build();
const BACKGROUND_BLUR_ASSET_SPEC = (BACKGROUND_BLUR_ASSET_GROUP || BACKGROUND_BLUR_REVISION_ID) && {
assetGroup: BACKGROUND_BLUR_ASSET_GROUP,
revisionID: BACKGROUND_BLUR_REVISION_ID,
Expand Down Expand Up @@ -250,8 +250,8 @@ export class DemoMeetingApp

attendeeIdPresenceHandler: (undefined | ((attendeeId: string, present: boolean, externalUserId: string, dropped: boolean) => void)) = undefined;
activeSpeakerHandler: (undefined | ((attendeeIds: string[]) => void)) = undefined;
blurObserver: (undefined | BackgroundBlurVideoFrameProcessorObserver ) = undefined;
replacementObserver: (undefined | BackgroundReplacementVideoFrameProcessorObserver ) = undefined;
blurObserver: (undefined | BackgroundBlurVideoFrameProcessorObserver) = undefined;
replacementObserver: (undefined | BackgroundReplacementVideoFrameProcessorObserver) = undefined;

showActiveSpeakerScores = false;
meeting: string | null = null;
Expand Down Expand Up @@ -477,13 +477,13 @@ export class DemoMeetingApp
}

async initBackgroundBlur(): Promise<void> {
try {
this.supportsBackgroundBlur = await BackgroundBlurVideoFrameProcessor.isSupported(this.getBackgroundBlurSpec());
}
catch (e) {
this.log(`[DEMO] Does not support background blur: ${e.message}`);
this.supportsBackgroundBlur = false;
}
try {
this.supportsBackgroundBlur = await BackgroundBlurVideoFrameProcessor.isSupported(this.getBackgroundBlurSpec());
}
catch (e) {
this.log(`[DEMO] Does not support background blur: ${e.message}`);
this.supportsBackgroundBlur = false;
}
}

async createReplacementImageBlob(startColor: string, endColor: string): Promise<Blob> {
Expand All @@ -496,7 +496,7 @@ export class DemoMeetingApp
grd.addColorStop(1, endColor);
ctx.fillStyle = grd;
ctx.fillRect(0, 0, 500, 500);
const blob = await new Promise<Blob> (resolve => {
const blob = await new Promise<Blob>(resolve => {
canvas.toBlob(resolve);
});
return blob;
Expand Down Expand Up @@ -779,7 +779,7 @@ export class DemoMeetingApp
);
});

if(!this.areVideoFiltersSupported()) {
if (!this.areVideoFiltersSupported()) {
document.getElementById('video-input-filter-container').style.display = 'none';
}

Expand Down Expand Up @@ -1548,16 +1548,8 @@ export class DemoMeetingApp
const metricReport = clientMetricReport.getObservableMetrics();
this.videoMetricReport = clientMetricReport.getObservableVideoMetrics();

this.displayEstimatedUplinkBandwidth(
metricReport.availableSendBandwidth
? metricReport.availableSendBandwidth
: metricReport.availableOutgoingBitrate
);
this.displayEstimatedDownlinkBandwidth(
metricReport.availableReceiveBandwidth
? metricReport.availableReceiveBandwidth
: metricReport.availableIncomingBitrate
);
this.displayEstimatedUplinkBandwidth(metricReport.availableOutgoingBitrate);
this.displayEstimatedDownlinkBandwidth(metricReport.availableIncomingBitrate);

this.isButtonOn('button-video-stats') && this.videoTileCollection.showVideoWebRTCStats(this.videoMetricReport);
}
Expand Down Expand Up @@ -1752,9 +1744,9 @@ export class DemoMeetingApp
this.audioVideo.addContentShareObserver(this);

this.videoTileCollection = new VideoTileCollection(this.audioVideo,
this.meetingLogger,
this.usePriorityBasedDownlinkPolicy ? new VideoPreferenceManager(this.meetingLogger, this.priorityBasedDownlinkPolicy) : undefined,
(document.getElementById('enable-pagination') as HTMLInputElement).checked ? DemoMeetingApp.REDUCED_REMOTE_VIDEO_PAGE_SIZE : DemoMeetingApp.REMOTE_VIDEO_PAGE_SIZE)
this.meetingLogger,
this.usePriorityBasedDownlinkPolicy ? new VideoPreferenceManager(this.meetingLogger, this.priorityBasedDownlinkPolicy) : undefined,
(document.getElementById('enable-pagination') as HTMLInputElement).checked ? DemoMeetingApp.REDUCED_REMOTE_VIDEO_PAGE_SIZE : DemoMeetingApp.REMOTE_VIDEO_PAGE_SIZE)
this.audioVideo.addObserver(this.videoTileCollection);

this.initContentShareDropDownItems();
Expand Down Expand Up @@ -1916,7 +1908,7 @@ export class DemoMeetingApp
if (!this.roster[attendeeId] || !this.roster[attendeeId].name) {
this.roster[attendeeId] = {
...this.roster[attendeeId],
... {name: externalUserId.split('#').slice(-1)[0] + (isContentAttendee ? ' «Content»' : '')}
... { name: externalUserId.split('#').slice(-1)[0] + (isContentAttendee ? ' «Content»' : '') }
};
}
this.audioVideo.realtimeSubscribeToVolumeIndicator(
Expand Down Expand Up @@ -2225,8 +2217,7 @@ export class DemoMeetingApp

appendNewSpeakerTranscriptDiv = (
segment: TranscriptSegment,
speakerToTranscriptSpanMap: Map<string, HTMLSpanElement>) =>
{
speakerToTranscriptSpanMap: Map<string, HTMLSpanElement>) => {
const speakerTranscriptDiv = document.createElement('div') as HTMLDivElement;
speakerTranscriptDiv.classList.add('transcript');

Expand Down Expand Up @@ -2274,15 +2265,15 @@ export class DemoMeetingApp
async startMediaCapture(): Promise<any> {
await fetch(
`${DemoMeetingApp.BASE_URL}startCapture?title=${encodeURIComponent(this.meeting)}`, {
method: 'POST',
});
method: 'POST',
});
}

async stopMediaCapture(): Promise<any> {
await fetch(
`${DemoMeetingApp.BASE_URL}endCapture?title=${encodeURIComponent(this.meeting)}`, {
method: 'POST',
});
method: 'POST',
});
}


Expand Down Expand Up @@ -2322,8 +2313,7 @@ export class DemoMeetingApp
// Also note that Firefox has its own device picker, which may be useful
// for the first device selection. Subsequent device selections could use
// a custom UX with a specific device id.
if(!this.defaultBrowserBehaviour.doesNotSupportMediaDeviceLabels())
{
if (!this.defaultBrowserBehaviour.doesNotSupportMediaDeviceLabels()) {
this.audioVideo.setDeviceLabelTrigger(
async (): Promise<MediaStream> => {
if (this.isRecorder() || this.isBroadcaster()) {
Expand All @@ -2334,7 +2324,7 @@ export class DemoMeetingApp
this.switchToFlow('flow-devices');
return stream;
}
);
);
}
}

Expand Down Expand Up @@ -2414,7 +2404,7 @@ export class DemoMeetingApp
});
}
if (additionalOptions.length) {
this.createDropdownMenuItem(menu, '──────────', () => {}).classList.add('text-center');
this.createDropdownMenuItem(menu, '──────────', () => { }).classList.add('text-center');
for (const additionalOption of additionalOptions) {
this.createDropdownMenuItem(
menu,
Expand All @@ -2427,15 +2417,15 @@ export class DemoMeetingApp
}
}
if (additionalToggles?.length) {
this.createDropdownMenuItem(menu, '──────────', () => {}).classList.add('text-center');
this.createDropdownMenuItem(menu, '──────────', () => { }).classList.add('text-center');
for (const { name, oncreate, action } of additionalToggles) {
const id = `toggle-${elementId}-${name.replace(/\s/g, '-')}`;
const elem = this.createDropdownMenuItem(menu, name, action, id);
oncreate(elem);
}
}
if (!menu.firstElementChild) {
this.createDropdownMenuItem(menu, 'Device selection unavailable', () => {});
this.createDropdownMenuItem(menu, 'Device selection unavailable', () => { });
}
}

Expand Down Expand Up @@ -2479,7 +2469,7 @@ export class DemoMeetingApp
}
}

private async stopVideoProcessor(): Promise<void> {
private async stopVideoProcessor(): Promise<void> {
this.log('Clearing filter variables and stopping the video transform device');
this.chosenVideoFilter = 'None';
this.selectedVideoFilterItem = 'None';
Expand Down Expand Up @@ -2529,14 +2519,14 @@ export class DemoMeetingApp
}

private async populateFilterList(isPreviewWindow: boolean, genericName: string, filters: VideoFilterName[]): Promise<void> {
if(isPreviewWindow) {
if (isPreviewWindow) {
this.populateVideoPreviewFilterList(
'video-input-filter',
genericName,
filters
);
}
else {
else {
this.populateInMeetingDeviceList(
'dropdown-menu-filter',
genericName,
Expand Down Expand Up @@ -3131,7 +3121,7 @@ export class DemoMeetingApp
};

const cpuUtilization: number = Number(videoFilter.match(/([0-9]{2})%/)[1]);
this.blurProcessor = await BackgroundBlurVideoFrameProcessor.create(this.getBackgroundBlurSpec(), {filterCPUUtilization: cpuUtilization});
this.blurProcessor = await BackgroundBlurVideoFrameProcessor.create(this.getBackgroundBlurSpec(), { filterCPUUtilization: cpuUtilization });
this.blurProcessor.addObserver(this.blurObserver);
return this.blurProcessor;
}
Expand Down Expand Up @@ -3365,7 +3355,7 @@ export class DemoMeetingApp
audioVideoDidStartConnecting(reconnecting: boolean): void {
this.log(`session connecting. reconnecting: ${reconnecting}`);
if (reconnecting && this.isAbortingOnReconnect()) {
fatal(Error('reconnect occured with abort-on-reconnect set to true'));
fatal(Error('reconnect occured with abort-on-reconnect set to true'));
}
}

Expand Down
4 changes: 0 additions & 4 deletions demos/browser/app/meetingV2/video/VideoTileCollection.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,6 @@ const ConfigLevelToTargetDisplaySize: { [Key in ConfigLevel]: TargetDisplaySize
};

const VideoUpstreamMetricsKeyStats: { [key: string]: string } = {
videoUpstreamGoogFrameHeight: 'Frame Height',
videoUpstreamGoogFrameWidth: 'Frame Width',
videoUpstreamFrameHeight: 'Frame Height',
videoUpstreamFrameWidth: 'Frame Width',
videoUpstreamBitrate: 'Bitrate (bps)',
Expand All @@ -42,8 +40,6 @@ const VideoUpstreamMetricsKeyStats: { [key: string]: string } = {
};

const VideoDownstreamMetricsKeyStats: { [key: string]: string } = {
videoDownstreamGoogFrameHeight: 'Frame Height',
videoDownstreamGoogFrameWidth: 'Frame Width',
videoDownstreamFrameHeight: 'Frame Height',
videoDownstreamFrameWidth: 'Frame Width',
videoDownstreamBitrate: 'Bitrate (bps)',
Expand Down
Loading

0 comments on commit 6bf4f02

Please sign in to comment.