Skip to content

Commit d12e4fe

Browse files
Record Desktop Audio (#1079)
This PR allows Studio to record desktop audio and resolves #551 Unfortunately, not all browsers are supported yet, as you can see [here](https://caniuse.com/mdn-api_mediadevices_getdisplaymedia_audio_capture_support) and as Lukas mentioned here: [#551](#551) The users get a hint after selecting display as source, with which browsers they can record desktop audio. Output of the recordings for download: **Only camera recorded:** Nothing changed, the finished camera video will contain microphone audio (if recorded) **Only display recorded:** If the user recorded both, desktop audio and microphone audio, the finished video will have both audio tracks. **Display and camera recorded:** The finished desktop video will contain desktop audio and the finished camera video will contain the microphone audio **One thing/problem:** In the "Review & trim" step (also after uploading to Opencast), if both the display AND the camera were recorded: you only hear the microphone audio or nothing (depending on whether microphone audio was recorded or not). I'm not entirely sure why. The audio is correct and audible for each of the videos that can be downloaded in the last step in Studio.
2 parents 59bea17 + a6faf88 commit d12e4fe

File tree

6 files changed

+89
-33
lines changed

6 files changed

+89
-33
lines changed

src/capturer.tsx

+1-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ export async function startDisplayCapture(
5555
...videoConstraints,
5656
...height,
5757
},
58-
audio: false,
58+
audio: true,
5959
};
6060

6161
try {

src/i18n/locales/de.json

+3-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,9 @@
5555
"aspect-ratio-auto": "auto",
5656
"quality": "Qualität",
5757
"quality-auto": "auto",
58-
"preferences-note": "<0>Hinweis:</0> Dies sind lediglich Präferenzen. Es ist nicht garantiert, dass alle Einstellungen von Ihrem Gerät unterstützt werden. Im Zweifelsfall 'auto' wählen."
58+
"preferences-note": "<0>Hinweis:</0> Dies sind lediglich Präferenzen. Es ist nicht garantiert, dass alle Einstellungen von Ihrem Gerät unterstützt werden. Im Zweifelsfall 'auto' wählen.",
59+
"display-audio-shared": "Bildschirmton wird aufgezeichnet.",
60+
"display-audio-not-shared": "Bildschirmton wird nicht aufgezeichnet. <0>Hinweis</0>: Nicht alle Browser und Betriebssysteme unterstützen die Aufnahme des Bildschirmtons."
5961
},
6062
"audio": {
6163
"label": "Audioquelle auswählen",

src/i18n/locales/en.json

+3-1
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,9 @@
5555
"aspect-ratio-auto": "auto",
5656
"quality": "Quality",
5757
"quality-auto": "auto",
58-
"preferences-note": "<0>Note:</0> these are merely preferences and it cannot be guaranteed that all options are actually supported on your device. If in doubt, choose 'auto'."
58+
"preferences-note": "<0>Note:</0> these are merely preferences and it cannot be guaranteed that all options are actually supported on your device. If in doubt, choose 'auto'.",
59+
"display-audio-shared": "Display audio will be recorded.",
60+
"display-audio-not-shared": "Display audio will not be recorded. <0>Note</0>: Not all browsers and operating systems support display audio capture."
5961
},
6062
"audio": {
6163
"label": "Select audio source",

src/steps/recording/index.tsx

+25-9
Original file line numberDiff line numberDiff line change
@@ -36,12 +36,29 @@ const addRecordOnStop = (
3636
};
3737
};
3838

39-
const mixAudioIntoVideo = (audioStream: MediaStream | null, videoStream: MediaStream) => {
40-
if (!(audioStream?.getAudioTracks().length)) {
41-
return videoStream;
42-
}
43-
return new MediaStream([...videoStream.getVideoTracks(), ...audioStream.getAudioTracks()]);
44-
};
39+
const mixAudioIntoVideo = (audioStreams: (MediaStream | null)[], videoStream: MediaStream) => (
40+
audioStreams.reduce<MediaStream>(
41+
(stream, audioStream) => audioStream?.getAudioTracks().length
42+
? new MediaStream([
43+
...stream.getVideoTracks(),
44+
...(
45+
stream.getAudioTracks().length
46+
? (() => {
47+
const audioContext = new AudioContext();
48+
const accumulatedAudio = audioContext.createMediaStreamSource(stream);
49+
const currentAudio = audioContext.createMediaStreamSource(audioStream);
50+
const resultAudio = audioContext.createMediaStreamDestination();
51+
accumulatedAudio.connect(resultAudio);
52+
currentAudio.connect(resultAudio);
53+
return resultAudio.stream;
54+
})()
55+
: audioStream
56+
).getAudioTracks(),
57+
])
58+
: stream,
59+
videoStream,
60+
)
61+
);
4562

4663

4764
export const Recording: React.FC<StepProps> = ({ goToNextStep, goToPrevStep }) => {
@@ -70,14 +87,13 @@ export const Recording: React.FC<StepProps> = ({ goToNextStep, goToPrevStep }) =
7087

7188
if (displayStream) {
7289
const onStop = addRecordOnStop(dispatch, "desktop");
73-
const stream = mixAudioIntoVideo(state.audioStream, displayStream);
90+
const stream = mixAudioIntoVideo([state.audioStream], displayStream);
7491
desktopRecorder.current = new Recorder(stream, settings.recording, onStop);
7592
desktopRecorder.current.start();
7693
}
77-
7894
if (userStream) {
7995
const onStop = addRecordOnStop(dispatch, "video");
80-
const stream = mixAudioIntoVideo(state.audioStream, userStream);
96+
const stream = mixAudioIntoVideo([state.audioStream, displayStream], userStream);
8197
videoRecorder.current = new Recorder(stream, settings.recording, onStop);
8298
videoRecorder.current.start();
8399
}

src/steps/video-setup/prefs.tsx

+2-16
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import {
1818
stopUserCapture,
1919
} from "../../capturer";
2020
import { Select } from "../../ui/Select";
21+
import { OVERLAY_STYLE } from "./preview";
2122

2223

2324
/**
@@ -218,29 +219,14 @@ export const StreamSettings: React.FC<StreamSettingsProps> = ({ isDesktop, strea
218219
onClick={() => setIsExpanded(old => !old)}
219220
aria-label={label}
220221
css={{
221-
border: "none",
222-
display: "inline-block",
223-
backgroundColor: "rgba(0, 0, 0, 0.3)",
224-
color: "white",
225-
padding: 8,
222+
...OVERLAY_STYLE,
226223
fontSize: 26,
227-
backdropFilter: "invert(0.3) blur(4px)",
228-
lineHeight: 0,
229-
borderRadius: "10px",
230-
cursor: "pointer",
231-
"&:hover, &:focus-visible": {
232-
backgroundColor: "rgba(0, 0, 0, 0.5)",
233-
},
234224
"> svg": {
235225
transition: "transform 0.2s",
236226
},
237227
"&:hover > svg, &:focus > svg": {
238228
transform: isExpanded ? "none" : "rotate(45deg)",
239229
},
240-
"&:focus-visible": {
241-
outline: "5px dashed white",
242-
outlineOffset: -2.5,
243-
},
244230
}}
245231
>
246232
{isExpanded ? <FiX /> : <FiSettings />}

src/steps/video-setup/preview.tsx

+55-5
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
import { useEffect, useRef } from "react";
2-
import { Spinner, match, unreachable, useColorScheme } from "@opencast/appkit";
3-
import { useTranslation } from "react-i18next";
2+
import { Spinner, WithTooltip, match, unreachable, useColorScheme } from "@opencast/appkit";
3+
import { Trans, useTranslation } from "react-i18next";
4+
import { LuInfo, LuVolume2, LuVolumeX } from "react-icons/lu";
45

56
import { COLORS, dimensionsOf } from "../../util";
6-
import { StreamSettings } from "./prefs";
7-
import { Input } from ".";
87
import { VideoBox, useVideoBoxResize } from "../../ui/VideoBox";
98
import { ErrorBox } from "../../ui/ErrorBox";
9+
import { StreamSettings } from "./prefs";
10+
import { Input } from ".";
1011

1112

1213

@@ -63,7 +64,10 @@ const StreamPreview: React.FC<{ input: Input }> = ({ input }) => {
6364
},
6465
}}>
6566
<PreviewVideo input={input} />
66-
{input.stream && <StreamSettings isDesktop={input.isDesktop} stream={input.stream} />}
67+
{input.stream && <>
68+
{input.isDesktop && <DisplayAudioInfo stream={input.stream} />}
69+
<StreamSettings isDesktop={input.isDesktop} stream={input.stream} />
70+
</>}
6771
</div>
6872
);
6973
};
@@ -145,3 +149,49 @@ const PreviewVideo: React.FC<{ input: Input }> = ({ input }) => {
145149
</div>
146150
);
147151
};
152+
153+
export const DisplayAudioInfo: React.FC<{ stream: MediaStream }> = ({ stream }) => {
154+
const hasAudio = stream.getAudioTracks().length;
155+
156+
return (
157+
<div css={{
158+
position: "absolute",
159+
top: 8,
160+
right: 8,
161+
}}>
162+
<WithTooltip
163+
placement="top"
164+
tooltip={
165+
<Trans i18nKey={
166+
`steps.video.${hasAudio ? "display-audio-shared" : "display-audio-not-shared"}`
167+
}>
168+
<strong>Note:</strong> Explanation.
169+
</Trans>
170+
}
171+
>
172+
<div css={{ ...OVERLAY_STYLE, fontSize: 15 }}>
173+
<LuInfo /> {hasAudio ? <LuVolume2 /> : <LuVolumeX />}
174+
</div>
175+
</WithTooltip>
176+
</div>
177+
);
178+
};
179+
180+
export const OVERLAY_STYLE = {
181+
border: "none",
182+
display: "inline-block",
183+
backgroundColor: "rgba(0, 0, 0, 0.3)",
184+
color: "white",
185+
padding: 8,
186+
backdropFilter: "invert(0.3) blur(4px)",
187+
lineHeight: 0,
188+
borderRadius: 10,
189+
cursor: "pointer",
190+
"&:hover, &:focus-visible": {
191+
backgroundColor: "rgba(0, 0, 0, 0.5)",
192+
},
193+
"&:focus-visible": {
194+
outline: "5px dashed white",
195+
outlineOffset: -2.5,
196+
},
197+
};

0 commit comments

Comments
 (0)