Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
128 changes: 128 additions & 0 deletions Examples/iOS/AudioSourceService.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
@preconcurrency import AVFoundation
import Combine

struct AudioSource: Sendable, Hashable, Equatable, CustomStringConvertible {
static let empty = AudioSource(portName: "", dataSourceName: "", isSupportedStereo: false)

let portName: String
let dataSourceName: String
let isSupportedStereo: Bool

var description: String {
if isSupportedStereo {
return "\(portName)(\(dataSourceName))(Stereo)"
}
return "\(portName)(\(dataSourceName))(Mono)"
}
}

actor AudioSourceService {
enum Error: Swift.Error {
case missingDataSource(_ source: AudioSource)
}

private(set) var sources: [AudioSource] = [] {
didSet {
guard sources != oldValue else {
return
}
continuation?.yield(sources)
}
}
private let session = AVAudioSession.sharedInstance()
private var continuation: AsyncStream<[AudioSource]>.Continuation? {
didSet {
oldValue?.finish()
}
}

init() {
Task { await _init() }
}

private func _init() async {
sources = makeAudioSources()
Task {
for await _ in NotificationCenter.default.notifications(named: AVAudioSession.routeChangeNotification)
.compactMap({ $0.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt })
.compactMap({ AVAudioSession.RouteChangeReason(rawValue: $0) }) {
sources = makeAudioSources()
}
}
}

func setUp() {
let session = AVAudioSession.sharedInstance()
do {
// If you set the "mode" parameter, stereo capture is not possible, so it is left unspecified.
try session.setCategory(.playAndRecord, mode: .videoRecording, options: [.defaultToSpeaker, .allowBluetooth])
// It looks like this setting is required on iOS 18.5.
try session.setPreferredInputNumberOfChannels(2)
try session.setActive(true)
} catch {
logger.error(error)
}
}

func sourcesUpdates() -> AsyncStream<[AudioSource]> {
AsyncStream { continuation in
self.continuation = continuation
continuation.yield(sources)
}
}

func selectAudioSource(_ audioSource: AudioSource) throws {
setPreferredInputBuiltInMic(true)
guard let preferredInput = AVAudioSession.sharedInstance().preferredInput,
let dataSources = preferredInput.dataSources,
let newDataSource = dataSources.first(where: { $0.dataSourceName == audioSource.dataSourceName }),
let supportedPolarPatterns = newDataSource.supportedPolarPatterns else {
throw Error.missingDataSource(audioSource)
}
do {
let isStereoSupported = supportedPolarPatterns.contains(.stereo)
if isStereoSupported {
try newDataSource.setPreferredPolarPattern(.stereo)
}
try preferredInput.setPreferredDataSource(newDataSource)
} catch {
logger.warn(error)
}
}

private func makeAudioSources() -> [AudioSource] {
if session.inputDataSources?.isEmpty == true {
setPreferredInputBuiltInMic(false)
} else {
setPreferredInputBuiltInMic(true)
}
guard let preferredInput = session.preferredInput else {
return []
}
var sources: [AudioSource] = []
for dataSource in session.preferredInput?.dataSources ?? [] {
sources.append(.init(
portName: preferredInput.portName,
dataSourceName: dataSource.dataSourceName,
isSupportedStereo: dataSource.supportedPolarPatterns?.contains(.stereo) ?? false
))
}
return sources
}

private func setPreferredInputBuiltInMic(_ isEnabled: Bool) {
do {
if isEnabled {
guard let availableInputs = session.availableInputs,
let builtInMicInput = availableInputs.first(where: { $0.portType == .builtInMic }) else {
return
}
try session.setPreferredInput(builtInMicInput)
} else {
try session.setPreferredInput(nil)
}
} catch {
logger.warn(error)
}
}
}
18 changes: 10 additions & 8 deletions Examples/iOS/IngestView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,16 @@ struct IngestView: View {
}
VStack(alignment: .trailing) {
HStack(spacing: 16) {
if !model.audioSources.isEmpty {
Picker("AudioSource", selection: $model.audioSource) {
ForEach(model.audioSources, id: \.description) { source in
Text(source.description).tag(source)
}
}
.background(Color.black.opacity(0.2))
.cornerRadius(16)
.padding(16)
}
Spacer()
Button(action: { Task {
model.flipCamera()
Expand Down Expand Up @@ -137,14 +147,6 @@ struct IngestView: View {
}
}
.onAppear {
let session = AVAudioSession.sharedInstance()
do {
// If you set the "mode" parameter, stereo capture is not possible, so it is left unspecified.
try session.setCategory(.playAndRecord, options: [.defaultToSpeaker, .allowBluetooth])
try session.setActive(true)
} catch {
logger.error(error)
}
model.startRunning(preference)
}
.onDisappear {
Expand Down
33 changes: 32 additions & 1 deletion Examples/iOS/IngestViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,22 @@ final class IngestViewModel: ObservableObject {
@Published var isShowError = false
@Published private(set) var isTorchEnabled = false
@Published private(set) var readyState: SessionReadyState = .closed
@Published var audioSource: AudioSource = .empty {
didSet {
guard audioSource != oldValue else {
return
}
selectAudioSource(audioSource)
}
}
@Published private(set) var audioSources: [AudioSource] = []
// If you want to use the multi-camera feature, please make create a MediaMixer with a capture mode.
// let mixer = MediaMixer(captureSesionMode: .multi)
private(set) var mixer = MediaMixer(captureSessionMode: .multi)
private var tasks: [Task<Void, Swift.Error>] = []
private var session: (any Session)?
private var currentPosition: AVCaptureDevice.Position = .back
private var audioSourceService = AudioSourceService()
@ScreenActor private var videoScreenObject: VideoTrackScreenObject?
@ScreenActor private var currentVideoEffect: VideoEffect?

Expand Down Expand Up @@ -99,6 +109,11 @@ final class IngestViewModel: ObservableObject {

func startRunning(_ preference: PreferenceViewModel) {
Task {
await audioSourceService.setUp()
await mixer.configuration { session in
// It is required for the stereo setting.
session.automaticallyConfiguresApplicationAudioSession = false
}
// SetUp a mixer.
await mixer.setMonitoringEnabled(DeviceUtil.isHeadphoneConnected())
var videoMixerSettings = await mixer.videoMixerSettings
Expand All @@ -107,7 +122,6 @@ final class IngestViewModel: ObservableObject {
// Attach devices
let back = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: currentPosition)
try? await mixer.attachVideo(back, track: 0)
try? await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
let front = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front)
try? await mixer.attachVideo(front, track: 1) { videoUnit in
videoUnit.isVideoMirrored = true
Expand All @@ -129,6 +143,14 @@ final class IngestViewModel: ObservableObject {
await mixer.screen.backgroundColor = UIColor.black.cgColor
try? await mixer.screen.addChild(videoScreenObject)
}
Task {
for await sources in await audioSourceService.sourcesUpdates() {
audioSources = sources
if let first = sources.first, audioSource == .empty {
audioSource = first
}
}
}
}

func stopRunning() {
Expand Down Expand Up @@ -229,6 +251,15 @@ final class IngestViewModel: ObservableObject {
}
}
}

private func selectAudioSource(_ audioSource: AudioSource) {
Task {
try await audioSourceService.selectAudioSource(audioSource)
await mixer.stopCapturing()
try await mixer.attachAudio(AVCaptureDevice.default(for: .audio))
await mixer.startCapturing()
}
}
}

extension IngestViewModel: MTHKSwiftUiView.PreviewSource {
Expand Down