Skip to content
This repository was archived by the owner on Nov 6, 2025. It is now read-only.

Commit 6a662ee

Browse files
小田喜陽彦小田喜陽彦
authored andcommitted
Do not initialize parameters with audio session if audio disabled
1 parent 2ce4105 commit 6a662ee

File tree

1 file changed

+25
-22
lines changed

1 file changed

+25
-22
lines changed

sdk/objc/native/src/audio/audio_device_ios.mm

Lines changed: 25 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -149,18 +149,6 @@ static void LogDeviceInfo() {
149149
#if !defined(NDEBUG)
150150
LogDeviceInfo();
151151
#endif
152-
// Store the preferred sample rate and preferred number of channels already
153-
// here. They have not been set and confirmed yet since configureForWebRTC
154-
// is not called until audio is about to start. However, it makes sense to
155-
// store the parameters now and then verify at a later stage.
156-
RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration];
157-
playout_parameters_.reset(config.sampleRate, config.outputNumberOfChannels);
158-
record_parameters_.reset(config.sampleRate, config.inputNumberOfChannels);
159-
// Ensure that the audio device buffer (ADB) knows about the internal audio
160-
// parameters. Note that, even if we are unable to get a mono audio session,
161-
// we will always tell the I/O audio unit to do a channel format conversion
162-
// to guarantee mono on the "input side" of the audio unit.
163-
UpdateAudioDeviceBuffer();
164152
initialized_ = true;
165153
return InitStatus::OK;
166154
}
@@ -727,15 +715,17 @@ static void LogDeviceInfo() {
727715
// AttachAudioBuffer() is called at construction by the main class but check
728716
// just in case.
729717
RTC_DCHECK(audio_device_buffer_) << "AttachAudioBuffer must be called first";
730-
RTC_DCHECK_GT(playout_parameters_.sample_rate(), 0);
731-
RTC_DCHECK_GT(record_parameters_.sample_rate(), 0);
732-
RTC_DCHECK_EQ(playout_parameters_.channels(), 1);
733-
RTC_DCHECK_EQ(record_parameters_.channels(), 1);
734718
// Inform the audio device buffer (ADB) about the new audio format.
735-
audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate());
736-
audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels());
737-
audio_device_buffer_->SetRecordingSampleRate(record_parameters_.sample_rate());
738-
audio_device_buffer_->SetRecordingChannels(record_parameters_.channels());
719+
if (playout_parameters_.is_valid()) {
720+
RTC_DCHECK_EQ(playout_parameters_.channels(), 1);
721+
audio_device_buffer_->SetPlayoutSampleRate(playout_parameters_.sample_rate());
722+
audio_device_buffer_->SetPlayoutChannels(playout_parameters_.channels());
723+
}
724+
if (record_parameters_.is_valid()) {
725+
RTC_DCHECK_EQ(record_parameters_.channels(), 1);
726+
audio_device_buffer_->SetRecordingSampleRate(record_parameters_.sample_rate());
727+
audio_device_buffer_->SetRecordingChannels(record_parameters_.channels());
728+
}
739729
}
740730

741731
void AudioDeviceIOS::SetupAudioBuffersForActiveAudioSession() {
@@ -771,9 +761,9 @@ static void LogDeviceInfo() {
771761
// number of audio frames.
772762
// Example: IO buffer size = 0.008 seconds <=> 128 audio frames at 16kHz.
773763
// Hence, 128 is the size we expect to see in upcoming render callbacks.
774-
playout_parameters_.reset(sample_rate, playout_parameters_.channels(), io_buffer_duration);
764+
playout_parameters_.reset(sample_rate, webRTCConfig.outputNumberOfChannels, io_buffer_duration);
775765
RTC_DCHECK(playout_parameters_.is_complete());
776-
record_parameters_.reset(sample_rate, record_parameters_.channels(), io_buffer_duration);
766+
record_parameters_.reset(sample_rate, webRTCConfig.inputNumberOfChannels, io_buffer_duration);
777767
RTC_DCHECK(record_parameters_.is_complete());
778768
RTC_LOG(LS_INFO) << " frames per I/O buffer: " << playout_parameters_.frames_per_buffer();
779769
RTC_LOG(LS_INFO) << " bytes per I/O buffer: " << playout_parameters_.GetBytesPerBuffer();
@@ -943,6 +933,19 @@ static void LogDeviceInfo() {
943933
// If we are ready to play or record, and if the audio session can be
944934
// configured, then initialize the audio unit.
945935
if (session.canPlayOrRecord) {
936+
// Store the preferred sample rate and preferred number of channels already
937+
// here. They have not been set and confirmed yet since configureForWebRTC
938+
// is not called until audio is about to start. However, it makes sense to
939+
// store the parameters now and then verify at a later stage.
940+
RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration];
941+
playout_parameters_.reset(config.sampleRate, config.outputNumberOfChannels);
942+
record_parameters_.reset(config.sampleRate, config.inputNumberOfChannels);
943+
// Ensure that the audio device buffer (ADB) knows about the internal audio
944+
// parameters. Note that, even if we are unable to get a mono audio session,
945+
// we will always tell the I/O audio unit to do a channel format conversion
946+
// to guarantee mono on the "input side" of the audio unit.
947+
UpdateAudioDeviceBuffer();
948+
946949
// There should be no audio unit at this point.
947950
if (!CreateAudioUnit()) {
948951
[session unlockForConfiguration];

0 commit comments

Comments
 (0)