Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
- Add `Sample::bits_per_sample` method.
- Update `audio_thread_priority` to 0.34.
- AAudio: Configure buffer to ensure consistent callback buffer sizes.
- AAudio: Fix the buffer size range detection by querying the AudioService property correctly.
- ALSA: Improve `BufferSize::Fixed` precision and audio callback performance.
- ALSA: Change `BufferSize::Default` to use the device defaults.
- ALSA: Change card enumeration to work like `aplay -L` does.
Expand Down
62 changes: 0 additions & 62 deletions src/host/aaudio/android_media.rs

This file was deleted.

1 change: 1 addition & 0 deletions src/host/aaudio/java_interface.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
mod audio_features;
mod audio_manager;
mod definitions;
mod devices_info;
mod utils;
Expand Down
33 changes: 33 additions & 0 deletions src/host/aaudio/java_interface/audio_manager.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
use super::{
utils::{
get_context, get_property, get_system_service, with_attached, JNIEnv, JObject, JResult,
},
AudioManager, Context,
};

impl AudioManager {
/// Get the frames per buffer using Android Java API
pub fn get_frames_per_buffer() -> Result<i32, String> {
let context = get_context();

with_attached(context, |env, context| get_frames_per_buffer(env, &context))
.map_err(|error| error.to_string())
}
}

fn get_frames_per_buffer<'j>(env: &mut JNIEnv<'j>, context: &JObject<'j>) -> JResult<i32> {
let audio_manager = get_system_service(env, context, Context::AUDIO_SERVICE)?;

let frames_per_buffer = get_property(
env,
&audio_manager,
AudioManager::PROPERTY_OUTPUT_FRAMES_PER_BUFFER,
)?;

let frames_per_buffer_string = String::from(env.get_string(&frames_per_buffer)?);

// TODO: Use jni::errors::Error::ParseFailed instead of jni::errors::Error::JniCall once jni > v0.21.1 is released
frames_per_buffer_string
.parse::<i32>()
.map_err(|e| jni::errors::Error::JniCall(jni::errors::JniError::Unknown))
}
2 changes: 0 additions & 2 deletions src/host/aaudio/java_interface/definitions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@ impl PackageManager {
pub(crate) struct AudioManager;

impl AudioManager {
pub const PROPERTY_OUTPUT_SAMPLE_RATE: &'static str =
"android.media.property.OUTPUT_SAMPLE_RATE";
pub const PROPERTY_OUTPUT_FRAMES_PER_BUFFER: &'static str =
"android.media.property.OUTPUT_FRAMES_PER_BUFFER";

Expand Down
96 changes: 29 additions & 67 deletions src/host/aaudio/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
use std::cell::RefCell;
use std::cmp;
use std::convert::TryInto;
use std::time::{Duration, Instant};
Expand All @@ -7,30 +6,30 @@ use std::vec::IntoIter as VecIntoIter;
extern crate ndk;

use convert::{stream_instant, to_stream_instant};
use java_interface::{AudioDeviceDirection, AudioDeviceInfo};
use java_interface::{AudioDeviceDirection, AudioDeviceInfo, AudioManager};

use crate::traits::{DeviceTrait, HostTrait, StreamTrait};
use crate::{
BackendSpecificError, BufferSize, BuildStreamError, Data, DefaultStreamConfigError,
DeviceNameError, DevicesError, InputCallbackInfo, InputStreamTimestamp, OutputCallbackInfo,
OutputStreamTimestamp, PauseStreamError, PlayStreamError, SampleFormat, SampleRate,
SizedSample, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig,
StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig,
SupportedStreamConfigRange, SupportedStreamConfigsError,
};

mod android_media;
mod convert;
mod java_interface;

use self::android_media::{get_audio_record_min_buffer_size, get_audio_track_min_buffer_size};
use self::ndk::audio::AudioStream;

// constants from android.media.AudioFormat
const CHANNEL_OUT_MONO: i32 = 4;
const CHANNEL_OUT_STEREO: i32 = 12;

// Android Java API supports up to 8 channels
// TODO: more channels available in native AAudio
const CHANNEL_MASKS: [i32; 2] = [
android_media::CHANNEL_OUT_MONO,
android_media::CHANNEL_OUT_STEREO,
];
// Maps channel masks to their corresponding channel counts
const CHANNEL_CONFIGS: [(i32, u16); 2] = [(CHANNEL_OUT_MONO, 1), (CHANNEL_OUT_STEREO, 2)];

const SAMPLE_RATES: [i32; 13] = [
5512, 8000, 11025, 16000, 22050, 32000, 44100, 48000, 64000, 88200, 96000, 176_400, 192_000,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I know this isn't yours, but now I read it I think that it should also support 12 and 24 kHz.

Expand Down Expand Up @@ -91,18 +90,8 @@ impl HostTrait for Host {
}
}

fn buffer_size_range_for_params(
is_output: bool,
sample_rate: i32,
channel_mask: i32,
android_format: i32,
) -> SupportedBufferSize {
let min_buffer_size = if is_output {
get_audio_track_min_buffer_size(sample_rate, channel_mask, android_format)
} else {
get_audio_record_min_buffer_size(sample_rate, channel_mask, android_format)
};
if min_buffer_size > 0 {
fn buffer_size_range() -> SupportedBufferSize {
if let Ok(min_buffer_size) = AudioManager::get_frames_per_buffer() {
SupportedBufferSize::Range {
min: min_buffer_size as u32,
max: i32::MAX as u32,
Expand All @@ -112,45 +101,29 @@ fn buffer_size_range_for_params(
}
}

fn default_supported_configs(is_output: bool) -> VecIntoIter<SupportedStreamConfigRange> {
// Have to "brute force" the parameter combinations with getMinBufferSize
fn default_supported_configs() -> VecIntoIter<SupportedStreamConfigRange> {
const FORMATS: [SampleFormat; 2] = [SampleFormat::I16, SampleFormat::F32];

let mut output = Vec::with_capacity(SAMPLE_RATES.len() * CHANNEL_MASKS.len() * FORMATS.len());
let buffer_size = buffer_size_range();
let mut output = Vec::with_capacity(SAMPLE_RATES.len() * CHANNEL_CONFIGS.len() * FORMATS.len());
for sample_format in &FORMATS {
let android_format = if *sample_format == SampleFormat::I16 {
android_media::ENCODING_PCM_16BIT
} else {
android_media::ENCODING_PCM_FLOAT
};
for (mask_idx, channel_mask) in CHANNEL_MASKS.iter().enumerate() {
let channel_count = mask_idx + 1;
for (_channel_mask, channel_count) in &CHANNEL_CONFIGS {
for sample_rate in &SAMPLE_RATES {
if let SupportedBufferSize::Range { min, max } = buffer_size_range_for_params(
is_output,
*sample_rate,
*channel_mask,
android_format,
) {
output.push(SupportedStreamConfigRange {
channels: channel_count as u16,
min_sample_rate: SampleRate(*sample_rate as u32),
max_sample_rate: SampleRate(*sample_rate as u32),
buffer_size: SupportedBufferSize::Range { min, max },
sample_format: *sample_format,
});
}
output.push(SupportedStreamConfigRange {
channels: *channel_count,
min_sample_rate: SampleRate(*sample_rate as u32),
max_sample_rate: SampleRate(*sample_rate as u32),
buffer_size: buffer_size.clone(),
sample_format: *sample_format,
});
}
}
}

output.into_iter()
}

fn device_supported_configs(
device: &AudioDeviceInfo,
is_output: bool,
) -> VecIntoIter<SupportedStreamConfigRange> {
fn device_supported_configs(device: &AudioDeviceInfo) -> VecIntoIter<SupportedStreamConfigRange> {
let sample_rates = if !device.sample_rates.is_empty() {
device.sample_rates.as_slice()
} else {
Expand All @@ -171,6 +144,7 @@ fn device_supported_configs(
&ALL_FORMATS
};

let buffer_size = buffer_size_range();
let mut output = Vec::with_capacity(sample_rates.len() * channel_counts.len() * formats.len());
for sample_rate in sample_rates {
for channel_count in channel_counts {
Expand All @@ -180,25 +154,13 @@ fn device_supported_configs(
// TODO: more channels available in native AAudio
continue;
}
let channel_mask = CHANNEL_MASKS[*channel_count as usize - 1];
for format in formats {
let (android_format, sample_format) = match format {
SampleFormat::I16 => (android_media::ENCODING_PCM_16BIT, SampleFormat::I16),
SampleFormat::F32 => (android_media::ENCODING_PCM_FLOAT, SampleFormat::F32),
_ => panic!("Unexpected format"),
};
let buffer_size = buffer_size_range_for_params(
is_output,
*sample_rate,
channel_mask,
android_format,
);
output.push(SupportedStreamConfigRange {
channels: cmp::min(*channel_count as u16, 2u16),
min_sample_rate: SampleRate(*sample_rate as u32),
max_sample_rate: SampleRate(*sample_rate as u32),
buffer_size,
sample_format,
buffer_size: buffer_size.clone(),
sample_format: *format,
});
}
}
Expand Down Expand Up @@ -339,19 +301,19 @@ impl DeviceTrait for Device {
&self,
) -> Result<Self::SupportedInputConfigs, SupportedStreamConfigsError> {
if let Some(info) = &self.0 {
Ok(device_supported_configs(info, false))
Ok(device_supported_configs(info))
} else {
Ok(default_supported_configs(false))
Ok(default_supported_configs())
}
}

fn supported_output_configs(
&self,
) -> Result<Self::SupportedOutputConfigs, SupportedStreamConfigsError> {
if let Some(info) = &self.0 {
Ok(device_supported_configs(info, true))
Ok(device_supported_configs(info))
} else {
Ok(default_supported_configs(true))
Ok(default_supported_configs())
}
}

Expand Down