Skip to content

Commit 339a7b5

Browse files
authored
Merge pull request #1813 from HaishinKit/feature/recording-sample
Add recording feature for iOS Example.
2 parents 7114295 + 26c222f commit 339a7b5

File tree

4 files changed

+91
-9
lines changed

4 files changed

+91
-9
lines changed

Examples/Examples.xcodeproj/project.pbxproj

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -870,8 +870,9 @@
870870
GCC_C_LANGUAGE_STANDARD = gnu17;
871871
GENERATE_INFOPLIST_FILE = YES;
872872
INFOPLIST_KEY_CFBundleDisplayName = HaishinKit;
873-
INFOPLIST_KEY_NSCameraUsageDescription = "";
874-
INFOPLIST_KEY_NSMicrophoneUsageDescription = "";
873+
INFOPLIST_KEY_NSCameraUsageDescription = "Camera access is requested for live streaming.";
874+
INFOPLIST_KEY_NSMicrophoneUsageDescription = "Microphone access is requested for live streaming.";
875+
INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Save the captured video and audio content.";
875876
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
876877
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
877878
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;
@@ -925,8 +926,9 @@
925926
GCC_C_LANGUAGE_STANDARD = gnu17;
926927
GENERATE_INFOPLIST_FILE = YES;
927928
INFOPLIST_KEY_CFBundleDisplayName = HaishinKit;
928-
INFOPLIST_KEY_NSCameraUsageDescription = "";
929-
INFOPLIST_KEY_NSMicrophoneUsageDescription = "";
929+
INFOPLIST_KEY_NSCameraUsageDescription = "Camera access is requested for live streaming.";
930+
INFOPLIST_KEY_NSMicrophoneUsageDescription = "Microphone access is requested for live streaming.";
931+
INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Save the captured video and audio content.";
930932
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphoneos*]" = YES;
931933
"INFOPLIST_KEY_UIApplicationSceneManifest_Generation[sdk=iphonesimulator*]" = YES;
932934
"INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents[sdk=iphoneos*]" = YES;

Examples/iOS/PublishView.swift

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -59,24 +59,36 @@ struct PublishView: View {
5959
Text(source.description).tag(source)
6060
}
6161
}
62+
.frame(width: 200)
6263
.background(Color.black.opacity(0.2))
6364
.cornerRadius(16)
6465
.padding(16)
6566
}
6667
Spacer()
67-
Button(action: { Task {
68+
Button(action: {
69+
model.toggleRecording()
70+
}, label: {
71+
Image(systemName: model.isRecording ?
72+
"recordingtape.circle.fill" :
73+
"recordingtape.circle")
74+
.resizable()
75+
.scaledToFit()
76+
.foregroundColor(.white)
77+
.frame(width: 30, height: 30)
78+
})
79+
Button(action: {
6880
model.flipCamera()
69-
}}, label: {
81+
}, label: {
7082
Image(systemName:
7183
"arrow.trianglehead.2.clockwise.rotate.90.camera")
7284
.resizable()
7385
.scaledToFit()
7486
.foregroundColor(.white)
7587
.frame(width: 30, height: 30)
7688
})
77-
Button(action: { Task {
89+
Button(action: {
7890
model.toggleTorch()
79-
}}, label: {
91+
}, label: {
8092
Image(systemName: model.isTorchEnabled ?
8193
"flashlight.on.circle.fill" :
8294
"flashlight.off.circle.fill")

Examples/iOS/PublishViewModel.swift

Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import AVFoundation
22
import HaishinKit
3+
import Photos
34
import RTCHaishinKit
45
import SwiftUI
56

@@ -20,11 +21,13 @@ final class PublishViewModel: ObservableObject {
2021
}
2122
}
2223
@Published private(set) var audioSources: [AudioSource] = []
24+
@Published private(set) var isRecording = false
2325
// If you want to use the multi-camera feature, please make create a MediaMixer with a capture mode.
2426
// let mixer = MediaMixer(captureSesionMode: .multi)
2527
private(set) var mixer = MediaMixer(captureSessionMode: .multi)
2628
private var tasks: [Task<Void, Swift.Error>] = []
2729
private var session: (any Session)?
30+
private var recorder: StreamRecorder?
2831
private var currentPosition: AVCaptureDevice.Position = .back
2932
private var audioSourceService = AudioSourceService()
3033
@ScreenActor private var videoScreenObject: VideoTrackScreenObject?
@@ -65,6 +68,60 @@ final class PublishViewModel: ObservableObject {
6568
}
6669
}
6770

71+
func toggleRecording() {
72+
if isRecording {
73+
Task {
74+
do {
75+
// To use this in a product, you need to consider recovery procedures in case moving to the Photo Library fails.
76+
if let videoFile = try await recorder?.stopRecording() {
77+
Task.detached {
78+
try await PHPhotoLibrary.shared().performChanges {
79+
let creationRequest = PHAssetCreationRequest.forAsset()
80+
creationRequest.addResource(with: .video, fileURL: videoFile, options: nil)
81+
}
82+
}
83+
}
84+
} catch let error as StreamRecorder.Error {
85+
switch error {
86+
case .failedToFinishWriting(let error):
87+
self.error = error
88+
if let error {
89+
logger.warn(error)
90+
}
91+
default:
92+
self.error = error
93+
logger.warn(error)
94+
}
95+
}
96+
recorder = nil
97+
isRecording = false
98+
}
99+
} else {
100+
Task {
101+
let recorder = StreamRecorder()
102+
await mixer.addOutput(recorder)
103+
do {
104+
// When starting a recording while connected to Xcode, it freezes for about 30 seconds. iOS26 + Xcode26.
105+
try await recorder.startRecording()
106+
isRecording = true
107+
self.recorder = recorder
108+
} catch {
109+
self.error = error
110+
logger.warn(error)
111+
}
112+
for await error in await recorder.error {
113+
switch error {
114+
case .failedToAppend(let error):
115+
self.error = error
116+
default:
117+
self.error = error
118+
}
119+
break
120+
}
121+
}
122+
}
123+
}
124+
68125
func makeSession(_ preference: PreferenceViewModel) async {
69126
// Make session.
70127
do {

HaishinKit/Sources/Stream/StreamRecorder.swift

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,12 @@ public actor StreamRecorder {
8585
public var outputURL: URL? {
8686
return writer?.outputURL
8787
}
88+
/// The current error.
89+
public var error: AsyncStream<StreamRecorder.Error> {
90+
AsyncStream { continuation in
91+
self.continuation = continuation
92+
}
93+
}
8894
/// The recording or not.
8995
public private(set) var isRecording = false
9096
/// The the movie fragment interval in sec.
@@ -111,7 +117,11 @@ public actor StreamRecorder {
111117
return settings.count == writer.inputs.count
112118
}
113119
private var writer: AVAssetWriter?
114-
private var continuation: AsyncStream<Error>.Continuation?
120+
private var continuation: AsyncStream<Error>.Continuation? {
121+
didSet {
122+
oldValue?.finish()
123+
}
124+
}
115125
private var writerInputs: [AVMediaType: AVAssetWriterInput] = [:]
116126
private var audioPresentationTime: CMTime = .zero
117127
private var videoPresentationTime: CMTime = .zero
@@ -206,6 +216,7 @@ public actor StreamRecorder {
206216
}
207217
defer {
208218
isRecording = false
219+
continuation = nil
209220
self.writer = nil
210221
self.writerInputs.removeAll()
211222
}

0 commit comments

Comments
 (0)