Sort of working audio using queues

This commit is contained in:
transistor 2022-10-08 13:26:17 -07:00
parent 721162684b
commit e4cd3dcd63
6 changed files with 292 additions and 76 deletions

View File

@ -10,5 +10,5 @@ pub mod audio;
pub use self::keys::{Key, KeyEvent};
pub use self::mouse::{MouseButton, MouseEventType, MouseEvent, MouseState};
pub use self::controllers::{ControllerDevice, ControllerEvent};
pub use self::traits::{Host, Tty, WindowUpdater, ControllerUpdater, KeyboardUpdater, MouseUpdater, Audio, BlitableSurface, HostData, DummyAudio};
pub use self::traits::{Host, Tty, WindowUpdater, ControllerUpdater, KeyboardUpdater, MouseUpdater, Audio, BlitableSurface, HostData, ClockedQueue, DummyAudio};

View File

@ -71,7 +71,7 @@ pub trait MouseUpdater: Send {
pub trait Audio {
fn samples_per_second(&self) -> usize;
fn space_available(&self) -> usize;
fn write_samples(&mut self, buffer: &[f32]);
fn write_samples(&mut self, clock: Clock, buffer: &[f32]);
fn flush(&mut self);
}
@ -118,9 +118,21 @@ impl<T: Clone> ClockedQueue<T> {
self.0.lock().unwrap().push_back((clock, data));
}
pub fn pop_next(&self) -> Option<(Clock, T)> {
self.0.lock().unwrap().pop_front()
}
pub fn pop_latest(&self) -> Option<(Clock, T)> {
self.0.lock().unwrap().drain(..).last()
}
pub fn unpop(&mut self, clock: Clock, data: T) {
self.0.lock().unwrap().push_front((clock, data));
}
pub fn peek_clock(&self) -> Option<Clock> {
self.0.lock().unwrap().front().map(|(clock, _)| *clock)
}
}
@ -135,7 +147,7 @@ impl Audio for DummyAudio {
4800
}
fn write_samples(&mut self, _buffer: &[f32]) {}
fn write_samples(&mut self, _clock: Clock, _buffer: &[f32]) {}
fn flush(&mut self) {}
}

View File

@ -1,39 +1,87 @@
use std::sync::{Arc, Mutex};
use std::collections::VecDeque;
use cpal::{Sample, Stream, SampleRate, SampleFormat, StreamConfig, traits::{DeviceTrait, HostTrait, StreamTrait}};
use moa_core::host::{HostData, Audio};
use moa_core::Clock;
use moa_core::host::{HostData, Audio, ClockedQueue};
use crate::circularbuf::CircularBuffer;
const SAMPLE_RATE: usize = 48000;
#[derive(Clone)]
pub struct AudioFrame {
data: Vec<f32>,
}
pub struct AudioSource {
id: usize,
sample_rate: usize,
frame_size: usize,
sequence_num: usize,
mixer: HostData<AudioMixer>,
mixer: Arc<Mutex<AudioMixer>>,
buffer: CircularBuffer<f32>,
queue: ClockedQueue<AudioFrame>,
}
impl AudioSource {
pub fn new(mixer: HostData<AudioMixer>) -> Self {
let sample_rate = mixer.lock().sample_rate();
let frame_size = mixer.lock().frame_size();
pub fn new(mixer: Arc<Mutex<AudioMixer>>) -> Self {
let queue = ClockedQueue::new();
let (id, sample_rate, frame_size) = {
let mut mixer = mixer.lock().unwrap();
let id = mixer.add_source(queue.clone());
(
id,
mixer.sample_rate(),
mixer.frame_size(),
)
};
let buffer = CircularBuffer::new(frame_size * 2, 0.0);
Self {
id,
sample_rate,
frame_size,
sequence_num: 0,
mixer,
buffer,
queue,
}
}
pub fn space_available(&self) -> usize {
self.buffer.free_space() / 2
//self.buffer.free_space() / 2
self.frame_size / 2
}
pub fn fill_with(&mut self, clock: Clock, buffer: &[f32]) {
let mut data = vec![];
//if self.buffer.free_space() > buffer.len() * 2 {
for sample in buffer.iter() {
// TODO this is here to keep it quiet for testing, but should be removed later
let sample = 0.5 * *sample;
data.push(sample);
data.push(sample);
}
//}
let frame = AudioFrame {
data, //: Vec::from(buffer)
};
//println!("synthesized {}: {:?}", self.id, frame.data);
self.queue.push(clock, frame);
self.flush();
}
pub fn flush(&mut self) {
self.mixer.lock().unwrap().check_next_frame();
}
/*
pub fn fill_with(&mut self, buffer: &[f32]) {
if self.buffer.free_space() > buffer.len() * 2 {
for sample in buffer.iter() {
@ -53,20 +101,31 @@ impl AudioSource {
let mixer_sequence_num = locked_mixer.sequence_num();
if mixer_sequence_num == self.sequence_num {
println!("repeated seq");
return;
}
self.sequence_num = mixer_sequence_num;
println!("flushing to audio mixer {}", self.sequence_num);
for i in 0..locked_mixer.buffer.len() {
locked_mixer.buffer[i] = (locked_mixer.buffer[i] + self.buffer.next().unwrap_or(0.0)).clamp(-1.0, 1.0);
}
//for i in 0..locked_mixer.buffer.len() {
// locked_mixer.buffer[i] = (locked_mixer.buffer[i] + self.buffer.next().unwrap_or(0.0)).clamp(-1.0, 1.0);
//}
self.queue.push(0, AudioFrame { data: (0..self.frame_size).map(|_| self.buffer.next().unwrap()).collect() });
self.frame_size = locked_mixer.frame_size();
self.buffer.resize(self.frame_size * 2);
}
}
*/
}
// could have the audio source use the circular buffer and then publish to its queue, and then call the mixer to flush if possible,
// and have the mixer (from the sim thread effectively) build the frame and publish it to its output. Frames in the source queues
// could even be 1ms, and the assembler could just fetch multiple frames, adjusting for sim time
// you could either only use the circular buffer, or only use the source queue
impl Audio for AudioSource {
fn samples_per_second(&self) -> usize {
self.sample_rate
@ -76,8 +135,8 @@ impl Audio for AudioSource {
self.space_available()
}
fn write_samples(&mut self, buffer: &[f32]) {
self.fill_with(buffer);
fn write_samples(&mut self, clock: Clock, buffer: &[f32]) {
self.fill_with(clock, buffer);
}
fn flush(&mut self) {
@ -85,34 +144,58 @@ impl Audio for AudioSource {
}
}
use moa_core::host::audio::SquareWave;
#[derive(Clone)]
pub struct AudioMixer {
sample_rate: usize,
//buffer: CircularBuffer<f32>,
buffer: Vec<f32>,
frame_size: usize,
sequence_num: usize,
clock: Clock,
sources: Vec<ClockedQueue<AudioFrame>>,
buffer_underrun: bool,
output: Arc<Mutex<AudioOutput>>,
test: SquareWave,
}
impl AudioMixer {
pub fn new(sample_rate: usize) -> HostData<AudioMixer> {
HostData::new(AudioMixer {
pub fn new(sample_rate: usize) -> Arc<Mutex<AudioMixer>> {
Arc::new(Mutex::new(AudioMixer {
sample_rate,
//buffer: CircularBuffer::new(1280 * 2, 0.0),
buffer: vec![0.0; 1280 * 2],
frame_size: 1280,
sequence_num: 0,
})
clock: 0,
sources: vec![],
buffer_underrun: false,
output: AudioOutput::new(),
test: SquareWave::new(600.0, sample_rate),
}))
}
pub fn new_default() -> HostData<AudioMixer> {
pub fn with_default_rate() -> Arc<Mutex<AudioMixer>> {
AudioMixer::new(SAMPLE_RATE)
}
pub fn add_source(&mut self, source: ClockedQueue<AudioFrame>) -> usize {
self.sources.push(source);
self.sources.len() - 1
}
pub fn get_sink(&mut self) -> Arc<Mutex<AudioOutput>> {
self.output.clone()
}
pub fn sample_rate(&self) -> usize {
self.sample_rate
}
pub fn nanos_per_sample(&self) -> Clock {
1_000_000_000 as Clock / self.sample_rate as Clock
}
pub fn frame_size(&self) -> usize {
self.buffer.len()
//self.buffer.len()
self.frame_size
}
pub fn sequence_num(&self) -> usize {
@ -120,17 +203,111 @@ impl AudioMixer {
}
pub fn resize_frame(&mut self, newlen: usize) {
if self.buffer.len() != newlen {
self.buffer = vec![0.0; newlen];
//if self.buffer.len() != newlen {
// self.buffer = vec![0.0; newlen];
//}
self.frame_size = newlen;
}
pub fn check_next_frame(&mut self) {
if self.output.lock().unwrap().is_empty() {
self.assemble_frame();
}
}
pub fn assemble_frame(&mut self) {
self.frame_size = self.output.lock().unwrap().frame_size;
let nanos_per_sample = self.nanos_per_sample();
let mut data: Vec<f32> = vec![0.0; self.frame_size];
if self.buffer_underrun {
self.buffer_underrun = false;
self.clock += nanos_per_sample * data.len() as Clock;
let empty_frame = AudioFrame { data };
self.output.lock().unwrap().add_frame(empty_frame.clone());
self.output.lock().unwrap().add_frame(empty_frame);
return;
}
/*
for i in (0..data.len()).step_by(2) {
let sample = self.test.next().unwrap() * 0.5;
data[i] = sample;
data[i + 1] = sample;
}
*/
let lowest_clock = self.sources
.iter()
.fold(self.clock, |lowest_clock, source|
source
.peek_clock()
.map_or(lowest_clock, |c| c.min(lowest_clock)));
self.clock = self.clock.min(lowest_clock);
for (id, source) in self.sources.iter_mut().enumerate() {
let mut i = 0;
while i < data.len() {
let (clock, frame) = match source.pop_next() {
Some(frame) => frame,
None => {
println!("buffer underrun");
self.buffer_underrun = true;
break;
},
};
//println!("clock: {} - {} = {}", clock, self.clock, clock - self.clock);
//if clock > self.clock {
let start = ((2 * (clock - self.clock) / nanos_per_sample) as usize).min(data.len() - 1);
let length = frame.data.len().min(data.len() - start);
//println!("source: {}, clock: {}, start: {}, end: {}, length: {}", id, clock, start, start + length, length);
data[start..start + length].iter_mut().zip(frame.data[..length].iter()).for_each(|(d, s)| *d = (*d + s).clamp(-1.0, 1.0));
if length < frame.data.len() {
let adjusted_clock = clock + nanos_per_sample * (length / 2) as Clock;
//println!("unpopping {} {}", clock, adjusted_clock);
source.unpop(adjusted_clock, AudioFrame { data: frame.data[length..].to_vec() });
}
//}
// TODO we need to handle the opposite case
i += length;
}
}
self.clock += nanos_per_sample * data.len() as Clock;
//println!("{:?}", data);
self.output.lock().unwrap().add_frame(AudioFrame { data });
}
/*
pub fn assembly_frame(&mut self, data: &mut [f32]) {
self.resize_frame(data.len());
for i in 0..data.len() {
data[i] = Sample::from(&self.buffer[i]);
self.buffer[i] = 0.0;
println!("assemble audio frame {}", self.sequence_num);
//for i in 0..data.len() {
// data[i] = Sample::from(&self.buffer[i]);
// self.buffer[i] = 0.0;
//}
//self.sources
// .iter()
// .filter_map(|queue| queue.pop_latest())
// .fold(data, |data, frame| {
// data.iter_mut()
// .zip(frame.1.data.iter())
// .for_each(|(d, s)| *d = (*d + s).clamp(-1.0, 1.0));
// data
// });
if let Some((_, last)) = self.output.pop_latest() {
self.last_frame = Some(last);
}
if let Some(last) = &self.last_frame {
data.copy_from_slice(&last.data);
}
println!("frame {} sent", self.sequence_num);
self.sequence_num = self.sequence_num.wrapping_add(1);
/*
@ -166,7 +343,7 @@ impl AudioMixer {
}
*/
}
*/
// TODO you need a way to add data to the mixer... the question is do you need to keep track of real time
// If you have a counter that calculates the amount of time until the next sample based on the size of
// the buffer given to the data_callback, then when submitting data, the audio sources can know that they
@ -179,12 +356,50 @@ impl AudioMixer {
#[allow(dead_code)]
pub struct AudioOutput {
stream: Stream,
mixer: HostData<AudioMixer>,
frame_size: usize,
sequence_num: usize,
last_frame: Option<AudioFrame>,
output: VecDeque<AudioFrame>,
}
impl AudioOutput {
pub fn create_audio_output(mixer: HostData<AudioMixer>) -> AudioOutput {
pub fn new() -> Arc<Mutex<Self>> {
Arc::new(Mutex::new(Self {
frame_size: 1280,
sequence_num: 0,
last_frame: None,
output: VecDeque::with_capacity(2),
}))
}
pub fn add_frame(&mut self, frame: AudioFrame) {
self.output.push_back(frame);
self.sequence_num = self.sequence_num.wrapping_add(1);
println!("added frame {}", self.sequence_num);
}
pub fn pop_next(&mut self) -> Option<AudioFrame> {
println!("frame {} sent", self.sequence_num);
self.output.pop_front()
}
pub fn pop_latest(&mut self) -> Option<AudioFrame> {
self.output.drain(..).last()
}
pub fn is_empty(&self) -> bool {
self.output.is_empty()
}
}
#[allow(dead_code)]
pub struct CpalAudioOutput {
stream: Stream,
}
impl CpalAudioOutput {
pub fn create_audio_output(output: Arc<Mutex<AudioOutput>>) -> CpalAudioOutput {
let device = cpal::default_host()
.default_output_device()
.expect("No sound output device available");
@ -197,33 +412,21 @@ impl AudioOutput {
.with_sample_rate(SampleRate(SAMPLE_RATE as u32))
.into();
//let channels = config.channels as usize;
let data_callback = move |data: &mut [f32], _: &cpal::OutputCallbackInfo| {
let result = if let Ok(mut output) = output.lock() {
output.frame_size = data.len();
output.pop_next()
} else {
return;
};
let data_callback = {
let mixer = mixer.clone();
move |data: &mut [f32], _: &cpal::OutputCallbackInfo| {
mixer.lock().assembly_frame(data);
/*
let mut locked_mixer = mixer.lock();
//println!(">>> {} into {}", locked_mixer.buffer.used_space(), data.len());
// TODO these are quick hacks to delay or shrink the buffer if it's too small or big
if locked_mixer.buffer.used_space() < data.len() {
return;
}
if locked_mixer.buffer.used_space() > data.len() * 2 {
for _ in 0..(locked_mixer.buffer.used_space() - (data.len() * 2)) {
locked_mixer.buffer.next();
}
}
for addr in data.iter_mut() {
let sample = locked_mixer.buffer.next().unwrap_or(0.0);
*addr = Sample::from(&sample);
}
//locked_mixer.buffer.clear();
*/
if let Some(frame) = result {
//println!("needs {}, gets {}", data.len(), frame.data.len());
//println!("{:?}", frame.data);
let length = frame.data.len().min(data.len());
data[..length].copy_from_slice(&frame.data[..length]);
} else {
println!("missed a frame");
}
};
@ -237,9 +440,8 @@ impl AudioOutput {
stream.play().unwrap();
AudioOutput {
CpalAudioOutput {
stream,
mixer,
}
}

View File

@ -8,10 +8,10 @@ use minifb::{self, Key, MouseMode, MouseButton};
use clap::{App, Arg, ArgMatches};
use moa_core::{System, Error, Clock};
use moa_core::host::{Host, HostData, ControllerUpdater, KeyboardUpdater, KeyEvent, MouseUpdater, MouseState, WindowUpdater, Audio, ControllerDevice};
use moa_core::host::{Host, ControllerUpdater, KeyboardUpdater, KeyEvent, MouseUpdater, MouseState, WindowUpdater, Audio, ControllerDevice};
use moa_core::host::gfx::Frame;
use moa_common::audio::{AudioOutput, AudioMixer, AudioSource};
use moa_common::audio::{AudioOutput, AudioMixer, AudioSource, CpalAudioOutput};
mod keys;
mod controllers;
@ -99,7 +99,7 @@ pub struct MiniFrontendBuilder {
pub controller: Option<Box<dyn ControllerUpdater>>,
pub keyboard: Option<Box<dyn KeyboardUpdater>>,
pub mouse: Option<Box<dyn MouseUpdater>>,
pub mixer: Option<HostData<AudioMixer>>,
pub mixer: Option<Arc<Mutex<AudioMixer>>>,
pub finalized: bool,
}
@ -110,7 +110,7 @@ impl MiniFrontendBuilder {
controller: None,
keyboard: None,
mouse: None,
mixer: Some(AudioMixer::new_default()),
mixer: Some(AudioMixer::with_default_rate()),
finalized: false,
}
}
@ -180,8 +180,8 @@ pub struct MiniFrontend {
pub controller: Option<Box<dyn ControllerUpdater>>,
pub keyboard: Option<Box<dyn KeyboardUpdater>>,
pub mouse: Option<Box<dyn MouseUpdater>>,
pub audio: Option<AudioOutput>,
pub mixer: HostData<AudioMixer>,
pub audio: Option<CpalAudioOutput>,
pub mixer: Arc<Mutex<AudioMixer>>,
}
impl MiniFrontend {
@ -190,7 +190,7 @@ impl MiniFrontend {
controller: Option<Box<dyn ControllerUpdater>>,
keyboard: Option<Box<dyn KeyboardUpdater>>,
mouse: Option<Box<dyn MouseUpdater>>,
mixer: HostData<AudioMixer>,
mixer: Arc<Mutex<AudioMixer>>,
) -> Self {
Self {
modifiers: 0,
@ -210,7 +210,7 @@ impl MiniFrontend {
}
if matches.occurrences_of("disable-audio") <= 0 {
self.audio = Some(AudioOutput::create_audio_output(self.mixer.clone()));
self.audio = Some(CpalAudioOutput::create_audio_output(self.mixer.lock().unwrap().get_sink()));
}
let mut options = minifb::WindowOptions::default();
@ -250,18 +250,19 @@ impl MiniFrontend {
let mut update_timer = Instant::now();
let mut last_frame = Frame::new(size.0, size.1);
while window.is_open() && !window.is_key_down(Key::Escape) {
let frame_time = update_timer.elapsed().as_micros();
let frame_time = update_timer.elapsed();
update_timer = Instant::now();
println!("new frame after {:?}us", frame_time);
println!("new frame after {:?}us", frame_time.as_micros());
let run_timer = Instant::now();
if let Some(system) = system.as_mut() {
system.run_for(nanoseconds_per_frame).unwrap();
//system.run_for(nanoseconds_per_frame).unwrap();
system.run_for(frame_time.as_nanos() as u64).unwrap();
//system.run_until_break().unwrap();
}
let sim_time = run_timer.elapsed().as_micros();
average_time = (average_time + sim_time) / 2;
println!("ran simulation for {:?}us in {:?}us (avg: {:?}us)", nanoseconds_per_frame / 1_000, sim_time, average_time);
println!("ran simulation for {:?}us in {:?}us (avg: {:?}us)", frame_time.as_nanos() / 1_000, sim_time, average_time);
if let Some(keys) = window.get_keys_pressed(minifb::KeyRepeat::No) {
for key in keys {

View File

@ -105,7 +105,7 @@ impl Sn76489 {
}
impl Steppable for Sn76489 {
fn step(&mut self, _system: &System) -> Result<ClockElapsed, Error> {
fn step(&mut self, system: &System) -> Result<ClockElapsed, Error> {
let rate = self.source.samples_per_second();
let available = self.source.space_available();
let samples = if available < rate / 1000 { available } else { rate / 1000 };
@ -128,7 +128,7 @@ impl Steppable for Sn76489 {
buffer[i] = sample.clamp(-1.0, 1.0);
}
self.source.write_samples(&buffer);
self.source.write_samples(system.clock, &buffer);
} else {
self.source.flush();
}

View File

@ -239,7 +239,7 @@ pub fn get_ch_op(bank: usize, reg: usize) -> (usize, usize) {
impl Steppable for Ym2612 {
fn step(&mut self, _system: &System) -> Result<ClockElapsed, Error> {
fn step(&mut self, system: &System) -> Result<ClockElapsed, Error> {
// TODO since you expect this step function to be called every 1ms of simulated time
// you could assume that you should produce (sample_rate / 1000) samples
@ -280,7 +280,8 @@ impl Steppable for Ym2612 {
buffer[i] = sample.clamp(-1.0, 1.0);
}
self.source.write_samples(&buffer);
//println!("synthesized: {:?}", buffer);
self.source.write_samples(system.clock, &buffer);
//}
Ok(1_000_000) // Every 1ms of simulated time