Refactored audio to use ClockedQueue

It now actually checks the clock and tries to mix the audio in sync
relative to the clock, but the cpal output doesn't yet try to sync
to the StreamInstant time.  Sound seems a lot better on chrome in
wasm, but and kind of better on firefox despite frame skipping not
being supported yet, but it's way slower for some reason (12fps)
This commit is contained in:
transistor 2023-05-07 10:03:25 -07:00
parent 112bd8219b
commit 527f65c69b
14 changed files with 331 additions and 251 deletions

View File

@ -33,13 +33,12 @@ jobs:
run: |
cd emulator/frontends/pixels
just build moa-genesis
cp -R dist ../../../build
cp -R dist/* ../../../build
- name: Build docs
run: |
cargo doc --document-private-items --workspace
mkdir build/doc
cp -R target/doc build/doc
cp -R target/doc build
- name: Upload artifact
uses: actions/upload-pages-artifact@v1

View File

@ -401,3 +401,18 @@ General Work
because the upper bit is 1), so the number is negative. Even an unsigned addition will result in
the correct number at the end. It's just that shift that caused the issue
2023-05-06
- I replaced the controller, keyboard, and mouse updaters with queues to make it easier to implement
I/O devices, and avoid some of the mutablility and timing issues of the old updaters.
Unfortunately I can't easily add time to those queues because webassembly doesn't support
std::time. There's another type called instant::Instant which I've used in the pixels frontend
because it does work with webassembly, but it's a bit uncomfortable. I'd rather not integrate
that into the core crate, which makes me want to not put the mixer in core but keep it in common,
where the `instant` dep is isolated to a smaller area
- this all started because I was trying to add a web controller, which I still haven't added due to
mutability and references with the pixels frontend
- for some reason, the new rust updater that calls `set_timeout` doesn't work too well in chrome,
making the fps drop down to 47-50 instead of 60. I'm not sure if that's because the older js
version does some funny clock stuff or if the concept itself adds a lot of overhead for some as
yet unknown reason

View File

@ -1,4 +1,5 @@
use std::time::Duration;
use std::ops::{Add, AddAssign, Sub, SubAssign, Mul, MulAssign, Div, DivAssign};
/// Type to use for storing femtoseconds
@ -184,6 +185,18 @@ impl Div<ClockDuration> for ClockDuration {
}
impl From<ClockDuration> for Duration {
fn from(value: ClockDuration) -> Self {
Duration::from_nanos(value.as_nanos())
}
}
impl From<Duration> for ClockDuration {
fn from(value: Duration) -> Self {
ClockDuration::from_nanos(value.as_nanos() as u64)
}
}
/// Represents time from the start of the simulation
///
@ -212,6 +225,14 @@ impl ClockTime {
None => None,
}
}
#[inline]
pub const fn checked_sub(self, duration: ClockDuration) -> Option<Self> {
match self.0.checked_sub(duration) {
Some(duration) => Some(Self(duration)),
None => None,
}
}
}
impl Add<ClockDuration> for ClockTime {

View File

@ -0,0 +1,25 @@
#[derive(Copy, Clone, Default)]
pub struct Sample(pub f32, pub f32);
impl Sample {
pub fn new(left: f32, right: f32) -> Self {
Self(left, right)
}
}
#[derive(Clone, Default)]
pub struct AudioFrame {
pub sample_rate: usize,
pub data: Vec<Sample>,
}
impl AudioFrame {
pub fn new(sample_rate: usize, data: Vec<Sample>) -> Self {
AudioFrame {
sample_rate,
data,
}
}
}

View File

@ -0,0 +1,41 @@
use std::sync::{Arc, Mutex};
use std::collections::VecDeque;
pub fn event_queue<T>() -> (EventSender<T>, EventReceiver<T>) {
let sender = EventSender {
queue: Arc::new(Mutex::new(VecDeque::new())),
};
let receiver = EventReceiver {
queue: sender.queue.clone(),
};
(sender, receiver)
}
pub struct EventSender<T> {
queue: Arc<Mutex<VecDeque<T>>>,
}
impl<T> EventSender<T> {
pub fn send(&self, event: T) {
self.queue.lock().unwrap().push_back(event);
}
//pub fn send_at_instant(&self, instant: Instant, event: T) {
// self.queue.lock().unwrap().push_back((instant, event));
//}
}
pub struct EventReceiver<T> {
queue: Arc<Mutex<VecDeque<T>>>,
}
impl<T> EventReceiver<T> {
pub fn receive(&self) -> Option<T> {
self.queue.lock().unwrap().pop_front()
}
}

View File

@ -1,11 +1,13 @@
mod traits;
mod keys;
mod audio;
mod controllers;
mod gfx;
mod input;
mod controllers;
mod keys;
mod mouse;
mod traits;
pub use self::audio::{Sample, AudioFrame};
pub use self::gfx::{Pixel, PixelEncoding, Frame, FrameSender, FrameReceiver, frame_queue};
pub use self::keys::{Key, KeyEvent};
pub use self::mouse::{MouseButton, MouseEventType, MouseEvent, MouseState};

View File

@ -4,11 +4,13 @@ use std::sync::{Arc, Mutex, MutexGuard};
use crate::{ClockTime, Error};
use crate::host::gfx::FrameReceiver;
use crate::host::audio::Sample;
use crate::host::keys::KeyEvent;
use crate::host::controllers::ControllerEvent;
use crate::host::mouse::MouseEvent;
use crate::host::input::EventSender;
pub trait Host {
fn add_pty(&self) -> Result<Box<dyn Tty>, Error> {
Err(Error::new("This frontend doesn't support PTYs"))
@ -44,9 +46,7 @@ pub trait Tty {
pub trait Audio {
fn samples_per_second(&self) -> usize;
fn space_available(&self) -> usize;
fn write_samples(&mut self, clock: ClockTime, buffer: &[f32]);
fn flush(&mut self);
fn write_samples(&mut self, clock: ClockTime, buffer: &[Sample]);
}
@ -99,13 +99,17 @@ impl<T: Clone> ClockedQueue<T> {
self.0.lock().unwrap().drain(..).last()
}
pub fn unpop(&self, clock: ClockTime, data: T) {
pub fn put_back(&self, clock: ClockTime, data: T) {
self.0.lock().unwrap().push_front((clock, data));
}
pub fn peek_clock(&self) -> Option<ClockTime> {
self.0.lock().unwrap().front().map(|(clock, _)| *clock)
}
pub fn is_empty(&self) -> bool {
self.0.lock().unwrap().is_empty()
}
}
@ -116,12 +120,6 @@ impl Audio for DummyAudio {
48000
}
fn space_available(&self) -> usize {
4800
}
fn write_samples(&mut self, _clock: ClockTime, _buffer: &[f32]) {}
fn flush(&mut self) {}
fn write_samples(&mut self, _clock: ClockTime, _buffer: &[Sample]) {}
}

View File

@ -1,47 +1,35 @@
use std::sync::{Arc, Mutex};
use std::collections::VecDeque;
use std::sync::{Arc, Mutex, MutexGuard};
use moa_core::{ClockTime, ClockDuration};
use moa_core::host::{Audio, ClockedQueue};
use moa_core::host::{Audio, Sample, AudioFrame, ClockedQueue};
pub const SAMPLE_RATE: usize = 48000;
pub struct Sample(f32, f32);
#[derive(Clone, Default)]
pub struct AudioFrame {
//pub sample_rate: usize,
pub data: Vec<(f32, f32)>,
}
pub struct AudioSource {
id: usize,
sample_rate: usize,
frame_size: usize,
mixer: Arc<Mutex<AudioMixer>>,
queue: ClockedQueue<AudioFrame>,
}
impl AudioSource {
pub fn new(mixer: Arc<Mutex<AudioMixer>>) -> Self {
// TODO should you move this to AudioMixer to make the interface easier to use?
// ie. let source: AudioSource = mixer.new_source();
pub fn new(mixer: AudioMixer) -> Self {
let queue = ClockedQueue::new(5000);
let (id, sample_rate, frame_size) = {
let mut mixer = mixer.lock().unwrap();
let (id, sample_rate) = {
let mut mixer = mixer.borrow_mut();
let id = mixer.add_source(queue.clone());
(
id,
mixer.sample_rate(),
mixer.frame_size(),
)
};
Self {
id,
sample_rate,
frame_size,
mixer,
queue,
}
}
@ -50,28 +38,16 @@ impl AudioSource {
self.id
}
pub fn space_available(&self) -> usize {
self.frame_size / 2
}
pub fn add_frame(&mut self, clock: ClockTime, buffer: &[f32]) {
let mut data = vec![];
pub fn add_frame(&mut self, clock: ClockTime, buffer: &[Sample]) {
let mut data = Vec::with_capacity(buffer.len());
for sample in buffer.iter() {
// TODO this is here to keep it quiet for testing, but should be removed later
let sample = 0.5 * *sample;
data.push((sample, sample));
data.push(*sample);
}
let frame = AudioFrame {
data,
};
let frame = AudioFrame::new(self.sample_rate, data);
self.queue.push(clock, frame);
}
pub fn flush(&mut self) {
self.mixer.lock().unwrap().check_next_frame();
}
}
@ -80,48 +56,39 @@ impl Audio for AudioSource {
self.sample_rate
}
fn space_available(&self) -> usize {
self.space_available()
}
fn write_samples(&mut self, clock: ClockTime, buffer: &[f32]) {
fn write_samples(&mut self, clock: ClockTime, buffer: &[Sample]) {
self.add_frame(clock, buffer);
self.flush();
}
fn flush(&mut self) {
self.mixer.lock().unwrap().check_next_frame();
}
}
#[derive(Clone)]
pub struct AudioMixer {
pub struct AudioMixer(Arc<Mutex<AudioMixerInner>>);
pub struct AudioMixerInner {
sample_rate: usize,
frame_size: usize,
sequence_num: usize,
clock: ClockTime,
sources: Vec<ClockedQueue<AudioFrame>>,
buffer_underrun: bool,
output: Arc<Mutex<AudioOutput>>,
output: AudioOutput,
}
impl AudioMixer {
pub fn new(sample_rate: usize) -> Arc<Mutex<AudioMixer>> {
Arc::new(Mutex::new(AudioMixer {
pub fn new(sample_rate: usize) -> AudioMixer {
AudioMixer(Arc::new(Mutex::new(AudioMixerInner {
sample_rate,
frame_size: 1280,
sequence_num: 0,
clock: ClockTime::START,
sources: vec![],
buffer_underrun: false,
output: AudioOutput::new(),
}))
})))
}
pub fn with_default_rate() -> Arc<Mutex<AudioMixer>> {
pub fn with_default_rate() -> AudioMixer {
AudioMixer::new(SAMPLE_RATE)
}
pub fn borrow_mut(&self) -> MutexGuard<'_, AudioMixerInner> {
self.0.lock().unwrap()
}
}
impl AudioMixerInner {
pub fn add_source(&mut self, source: ClockedQueue<AudioFrame>) -> usize {
self.sources.push(source);
self.sources.len() - 1
@ -131,7 +98,7 @@ impl AudioMixer {
self.sources.len()
}
pub fn get_sink(&mut self) -> Arc<Mutex<AudioOutput>> {
pub fn get_sink(&mut self) -> AudioOutput {
self.output.clone()
}
@ -143,120 +110,90 @@ impl AudioMixer {
ClockDuration::from_secs(1) / self.sample_rate as u64
}
pub fn frame_size(&self) -> usize {
self.frame_size
}
pub fn sequence_num(&self) -> usize {
self.sequence_num
}
pub fn resize_frame(&mut self, newlen: usize) {
self.frame_size = newlen;
}
pub fn check_next_frame(&mut self) {
if self.output.lock().unwrap().is_empty() {
self.assemble_frame();
}
}
pub fn assemble_frame(&mut self) {
self.frame_size = self.output.lock().unwrap().frame_size;
fn assemble_frame(&mut self, frame_start: ClockTime, frame_duration: ClockDuration) {
let sample_duration = self.sample_duration();
let mut data: Vec<(f32, f32)> = vec![(0.0, 0.0); self.frame_size];
let samples = (frame_duration / sample_duration) as usize;
if self.buffer_underrun {
self.buffer_underrun = false;
self.clock += sample_duration * data.len() as u64;
let empty_frame = AudioFrame { data };
self.output.lock().unwrap().add_frame(empty_frame.clone());
self.output.lock().unwrap().add_frame(empty_frame);
return;
}
let mut data = vec![Sample(0.0, 0.0); samples];
let lowest_clock = self.sources
.iter()
.fold(self.clock, |lowest_clock, source|
source
.peek_clock()
.map_or(lowest_clock, |c| c.min(lowest_clock)));
self.clock = self.clock.min(lowest_clock);
for source in &mut self.sources {
let mut i = 0;
while i < data.len() {
let (clock, frame) = match source.pop_next() {
Some(frame) => frame,
None => {
println!("buffer underrun");
self.buffer_underrun = true;
break;
},
};
let start = ((clock.duration_since(self.clock) / sample_duration) as usize).min(data.len() - 1);
let length = frame.data.len().min(data.len() - start);
data[start..start + length].iter_mut()
.zip(frame.data[..length].iter())
.for_each(|(d, s)|
*d = (
(d.0 + s.0).clamp(-1.0, 1.0),
(d.1 + s.1).clamp(-1.0, 1.0)
)
);
if length < frame.data.len() {
let adjusted_clock = clock + sample_duration * length as u64;
//println!("unpopping at clock {}, length {}", adjusted_clock, frame.data.len() - length);
source.unpop(adjusted_clock, AudioFrame { data: frame.data[length..].to_vec() });
for source in &self.sources {
let mut index = 0;
while index < data.len() {
if let Some((clock, mut frame)) = source.pop_next() {
index = (clock.duration_since(frame_start) / sample_duration) as usize;
let size = frame.data.len().min(data.len() - index);
frame.data.iter()
.zip(&mut data[index..index + size])
.for_each(|(source, dest)| {
dest.0 += source.0;
dest.1 += source.1;
});
index += size;
if size < frame.data.len() {
frame.data.drain(0..size);
source.put_back(clock, frame);
}
}
i = start + length;
}
}
self.clock += sample_duration * data.len() as u64;
self.output.lock().unwrap().add_frame(AudioFrame { data });
// Average each sample, and clamp it to the 1 to -1 range
for sample in data.iter_mut() {
sample.0 = (sample.0 / self.sources.len() as f32).clamp(-1.0, 1.0);
sample.1 = (sample.1 / self.sources.len() as f32).clamp(-1.0, 1.0);
}
self.output.add_frame(frame_start, AudioFrame::new(self.sample_rate, data));
}
}
use moa_core::{Transmutable, Steppable, Error, System};
impl Steppable for AudioMixer {
fn step(&mut self, system: &System) -> Result<ClockDuration, Error> {
let duration = ClockDuration::from_millis(1);
// TODO should you make the clock be even further back to ensure the data is already written
if let Some(start) = system.clock.checked_sub(duration) {
self.borrow_mut().assemble_frame(start, duration);
}
Ok(duration)
}
}
impl Transmutable for AudioMixer {
fn as_steppable(&mut self) -> Option<&mut dyn Steppable> {
Some(self)
}
}
// TODO this should be split up into a sender/receiver
#[derive(Clone)]
pub struct AudioOutput {
frame_size: usize,
sequence_num: usize,
output: VecDeque<AudioFrame>,
queue: ClockedQueue<AudioFrame>,
}
impl AudioOutput {
pub fn new() -> Arc<Mutex<Self>> {
Arc::new(Mutex::new(Self {
frame_size: 0,
sequence_num: 0,
output: VecDeque::with_capacity(2),
}))
pub fn new() -> Self {
Self {
queue: ClockedQueue::new(5000),
}
}
pub fn set_frame_size(&mut self, frame_size: usize) {
self.frame_size = frame_size
pub fn add_frame(&self, clock: ClockTime, frame: AudioFrame) {
self.queue.push(clock, frame);
}
pub fn add_frame(&mut self, frame: AudioFrame) {
self.output.push_back(frame);
self.sequence_num = self.sequence_num.wrapping_add(1);
//println!("added frame {}", self.sequence_num);
pub fn put_back(&self, clock: ClockTime, frame: AudioFrame) {
self.queue.put_back(clock, frame);
}
pub fn pop_next(&mut self) -> Option<AudioFrame> {
//println!("frame {} sent", self.sequence_num);
self.output.pop_front()
}
pub fn pop_latest(&mut self) -> Option<AudioFrame> {
self.output.drain(..).last()
pub fn receive(&self) -> Option<(ClockTime, AudioFrame)> {
self.queue.pop_next()
}
pub fn is_empty(&self) -> bool {
self.output.is_empty()
self.queue.is_empty()
}
}

View File

@ -1,5 +1,4 @@
use std::sync::{Arc, Mutex};
use cpal::{Stream, SampleRate, SampleFormat, StreamConfig, StreamInstant, OutputCallbackInfo, traits::{DeviceTrait, HostTrait, StreamTrait}};
use moa_core::{warn, error};
@ -12,7 +11,7 @@ pub struct CpalAudioOutput {
}
impl CpalAudioOutput {
pub fn create_audio_output(output: Arc<Mutex<AudioOutput>>) -> CpalAudioOutput {
pub fn create_audio_output(output: AudioOutput) -> CpalAudioOutput {
let device = cpal::default_host()
.default_output_device()
.expect("No sound output device available");
@ -26,22 +25,25 @@ impl CpalAudioOutput {
.into();
let data_callback = move |data: &mut [f32], info: &OutputCallbackInfo| {
let result = if let Ok(mut output) = output.lock() {
output.set_frame_size(data.len() / 2);
output.pop_next()
} else {
return;
};
if let Some(frame) = result {
let (start, middle, end) = unsafe { frame.data.align_to::<f32>() };
if !start.is_empty() || !end.is_empty() {
warn!("audio: frame wasn't aligned");
let mut index = 0;
while index < data.len() {
if let Some((clock, mut frame)) = output.receive() {
let size = (frame.data.len() * 2).min(data.len() - index);
frame.data.iter()
.zip(data[index..index + size].chunks_mut(2))
.for_each(|(sample, location)| {
location[0] = sample.0;
location[1] = sample.1;
});
index += size;
if size < frame.data.len() * 2 {
frame.data.drain(0..size / 2);
output.put_back(clock, frame);
}
} else {
warn!("missed an audio frame");
break;
}
let length = middle.len().min(data.len());
data[..length].copy_from_slice(&middle[..length]);
} else {
warn!("missed an audio frame");
}
};

View File

@ -7,7 +7,7 @@ use std::time::{Duration, Instant};
use minifb::{self, Key, MouseMode, MouseButton};
use clap::{App, Arg, ArgMatches};
use moa_core::{System, Error, ClockDuration};
use moa_core::{System, Error, ClockDuration, wrap_transmutable};
use moa_core::host::{Host, Audio, KeyEvent, MouseEvent, MouseState, ControllerDevice, ControllerEvent, EventSender, PixelEncoding, Frame, FrameReceiver};
use moa_common::{AudioMixer, AudioSource};
@ -99,7 +99,7 @@ pub struct MiniFrontendBuilder {
controllers: Option<EventSender<ControllerEvent>>,
keyboard: Option<EventSender<KeyEvent>>,
mouse: Option<EventSender<MouseEvent>>,
mixer: Option<Arc<Mutex<AudioMixer>>>,
mixer: Option<AudioMixer>,
finalized: bool,
}
@ -179,7 +179,7 @@ pub struct MiniFrontend {
pub keyboard: Option<EventSender<KeyEvent>>,
pub mouse: Option<EventSender<MouseEvent>>,
pub audio: Option<CpalAudioOutput>,
pub mixer: Arc<Mutex<AudioMixer>>,
pub mixer: AudioMixer,
}
impl MiniFrontend {
@ -188,7 +188,7 @@ impl MiniFrontend {
controllers: Option<EventSender<ControllerEvent>>,
keyboard: Option<EventSender<KeyEvent>>,
mouse: Option<EventSender<MouseEvent>>,
mixer: Arc<Mutex<AudioMixer>>,
mixer: AudioMixer,
) -> Self {
Self {
modifiers: 0,
@ -214,8 +214,11 @@ impl MiniFrontend {
}
}
if self.mixer.lock().unwrap().num_sources() != 0 && matches.occurrences_of("disable-audio") == 0 {
self.audio = Some(CpalAudioOutput::create_audio_output(self.mixer.lock().unwrap().get_sink()));
if self.mixer.borrow_mut().num_sources() != 0 && matches.occurrences_of("disable-audio") == 0 {
if let Some(system) = system.as_mut() {
system.add_device("mixer", wrap_transmutable(self.mixer.clone())).unwrap();
}
self.audio = Some(CpalAudioOutput::create_audio_output(self.mixer.borrow_mut().get_sink()));
}
let options = minifb::WindowOptions {

View File

@ -1,6 +1,4 @@
use std::sync::{Arc, Mutex};
use instant::Instant;
use pixels::{Pixels, SurfaceTexture};
use winit::event::{Event, VirtualKeyCode, WindowEvent, ElementState};
@ -22,23 +20,24 @@ pub type LoadSystemFn = fn (&mut PixelsFrontend, Vec<u8>) -> Result<System, Erro
pub struct PixelsFrontend {
video: Option<FrameReceiver>,
controllers: Option<EventSender<ControllerEvent>>,
mixer: Arc<Mutex<AudioMixer>>,
audio_output: CpalAudioOutput,
mixer: AudioMixer,
}
impl PixelsFrontend {
pub fn new() -> PixelsFrontend {
settings::get().run = true;
let mixer = AudioMixer::with_default_rate();
let audio_output = CpalAudioOutput::create_audio_output(mixer.lock().unwrap().get_sink());
PixelsFrontend {
video: None,
controllers: None,
mixer,
audio_output,
}
}
pub fn get_mixer(&self) -> AudioMixer {
self.mixer.clone()
}
}
impl Host for PixelsFrontend {
@ -68,6 +67,11 @@ pub async fn run_loop(host: PixelsFrontend) {
receiver.request_encoding(PixelEncoding::ABGR);
}
let mut audio_output = None;
if host.mixer.borrow_mut().num_sources() > 0 {
audio_output = Some(CpalAudioOutput::create_audio_output(host.mixer.borrow_mut().get_sink()));
}
let mut pixels = {
let window_size = window.inner_size();
let surface_texture =
@ -137,11 +141,13 @@ pub async fn run_loop(host: PixelsFrontend) {
}
}
let requested_mute = settings::get().mute;
if requested_mute != mute {
mute = requested_mute;
host.audio_output.set_mute(mute);
log::info!("setting mute to {}", mute);
if let Some(output) = audio_output.as_ref() {
let requested_mute = settings::get().mute;
if requested_mute != mute {
mute = requested_mute;
output.set_mute(mute);
log::info!("setting mute to {}", mute);
}
}
// Check if the run flag is no longer true, and exit the loop

View File

@ -12,7 +12,7 @@ use web_sys::Event;
use wasm_bindgen::JsCast;
use wasm_bindgen::closure::Closure;
use moa_core::{ClockDuration, System};
use moa_core::{ClockDuration, System, wrap_transmutable};
use crate::settings;
use crate::frontend::{self, PixelsFrontend, LoadSystemFn};
@ -104,7 +104,11 @@ impl LoadSystemFnHandle {
#[wasm_bindgen]
pub fn load_system(handle: &mut HostHandle, load: LoadSystemFnHandle) -> SystemHandle {
let system = load.0(&mut handle.0, settings::get().rom_data.clone()).unwrap();
let mut system = load.0(&mut handle.0, settings::get().rom_data.clone()).unwrap();
let mixer = handle.0.get_mixer();
if mixer.borrow_mut().num_sources() > 0 {
system.add_device("mixer", wrap_transmutable(mixer.clone())).unwrap();
}
SystemHandle(system)
}
@ -243,7 +247,7 @@ pub fn create_window<T>(event_loop: &EventLoop<T>) -> Rc<Window> {
#[wasm_bindgen]
pub fn start_system(handle: SystemHandle) -> Handle {
let emulator = Emulator::new(handle.0);
set_timeout(emulator.clone(), 17);
set_timeout(emulator.clone(), 0);
Handle(emulator)
}
@ -254,6 +258,7 @@ pub struct Emulator {
running: bool,
//frontend: PixelsFrontend,
system: System,
last_update: Instant,
}
impl Emulator {
@ -261,6 +266,7 @@ impl Emulator {
Rc::new(RefCell::new(Self {
running: false,
system,
last_update: Instant::now(),
}))
}
@ -269,6 +275,35 @@ impl Emulator {
}
}
/// This updater tries to work like the JS one, by simulating the amount of time that has passed
/// since the last update, but it doesn't work too well yet
fn update(emulator: Rc<RefCell<Emulator>>) {
let run_timer = Instant::now();
let last_update = {
let mut emulator = emulator.borrow_mut();
let last_update = emulator.last_update;
emulator.last_update = run_timer;
last_update
};
let diff = run_timer.duration_since(last_update);
let nanoseconds_per_frame = ClockDuration::from_nanos(diff.as_nanos() as u64);
//let nanoseconds_per_frame = (16_600_000 as f32 * settings::get().speed) as Clock;
if let Err(err) = emulator.borrow_mut().system.run_for(nanoseconds_per_frame) {
log::error!("{:?}", err);
}
let run_time = run_timer.elapsed().as_millis();
log::debug!("ran simulation for {:?}ms in {:?}ms", nanoseconds_per_frame / 1_000_000, run_time);
let running = emulator.borrow().running;
if running {
let remaining = (diff.as_millis() - run_time - (diff.as_millis() / 10)).max(1);
set_timeout(emulator, 16);
}
}
/*
/// This is the constant-time updater which always tries to simulate one frame
fn update(emulator: Rc<RefCell<Emulator>>) {
let run_timer = Instant::now();
let nanoseconds_per_frame = (16_600_000 as f32 * settings::get().speed) as u64;
@ -282,6 +317,7 @@ fn update(emulator: Rc<RefCell<Emulator>>) {
set_timeout(emulator, 17);
}
}
*/
fn set_timeout(emulator: Rc<RefCell<Emulator>>, timeout: i32) {
emulator.borrow_mut().running = true;

View File

@ -1,7 +1,7 @@
use moa_core::{info, warn, debug};
use moa_core::{System, Error, ClockTime, ClockDuration, Frequency, Address, Addressable, Steppable, Transmutable};
use moa_core::host::{Host, Audio};
use moa_core::host::{Host, Audio, Sample};
use moa_audio::SquareWave;
@ -111,31 +111,26 @@ impl Sn76489 {
impl Steppable for Sn76489 {
fn step(&mut self, system: &System) -> Result<ClockDuration, Error> {
let rate = self.source.samples_per_second();
let available = self.source.space_available();
let samples = if available < rate / 1000 { available } else { rate / 1000 };
let samples = rate / 1000;
if samples > 0 {
//if available >= rate / 1000 {
let mut buffer = vec![0.0; samples];
for buffered_sample in buffer.iter_mut().take(samples) {
let mut sample = 0.0;
let mut buffer = vec![Sample(0.0, 0.0); samples];
for buffered_sample in buffer.iter_mut().take(samples) {
let mut sample = 0.0;
for ch in 0..3 {
if self.tones[ch].on {
sample += self.tones[ch].get_sample();
}
for ch in 0..3 {
if self.tones[ch].on {
sample += self.tones[ch].get_sample();
}
if self.noise.on {
sample += self.noise.get_sample();
}
*buffered_sample = sample.clamp(-1.0, 1.0);
}
self.source.write_samples(system.clock, &buffer);
} else {
self.source.flush();
if self.noise.on {
sample += self.noise.get_sample();
}
let sample = sample.clamp(-1.0, 1.0);
*buffered_sample = Sample(sample, sample);
}
self.source.write_samples(system.clock, &buffer);
Ok(ClockDuration::from_millis(1)) // Every 1ms of simulated time
}

View File

@ -21,7 +21,7 @@ use lazy_static::lazy_static;
use moa_core::{debug, warn};
use moa_core::{System, Error, ClockTime, ClockDuration, Frequency, Address, Addressable, Steppable, Transmutable};
use moa_core::host::{Host, Audio};
use moa_core::host::{Host, Audio, Sample};
/// Table of shift values for each possible rate angle
@ -286,9 +286,9 @@ impl EnvelopeGenerator {
// to bitwise-and with 0xFFC instead, which will wrap the number to a 12-bit signed number, which when
// clamped to MAX_ENVELOPE will produce the same results
let new_envelope = self.envelope + (((!self.envelope * increment) as i16) >> 4) as u16;
if self.debug_name == "ch 2, op 0" {
println!("{:4x} {:4x} {:4x} {:4x} {:4x}", self.envelope, update_cycle, rate * 8 + update_cycle as usize, (((!self.envelope * increment) as i16) >> 4) as u16 & 0xFFFC, new_envelope);
}
//if self.debug_name == "ch 2, op 0" {
//println!("{:4x} {:4x} {:4x} {:4x} {:4x}", self.envelope, update_cycle, rate * 8 + update_cycle as usize, (((!self.envelope * increment) as i16) >> 4) as u16 & 0xFFFC, new_envelope);
//}
if new_envelope > self.envelope {
self.envelope_state = EnvelopeState::Decay;
self.envelope = 0;
@ -781,31 +781,31 @@ impl Ym2612 {
impl Steppable for Ym2612 {
fn step(&mut self, system: &System) -> Result<ClockDuration, Error> {
let rate = self.source.samples_per_second();
let available = self.source.space_available();
let samples = if available < rate / 1000 { available } else { rate / 1000 };
let samples = rate / 1000;
let sample_duration = ClockDuration::from_secs(1) / rate as u64;
//if self.source.space_available() >= samples {
let mut sample = 0.0;
let mut buffer = vec![0.0; samples];
for (i, buffered_sample) in buffer.iter_mut().enumerate().take(samples) {
let sample_clock = system.clock + (sample_duration * i as u64);
let fm_clock = sample_clock.as_duration() / self.fm_clock_period;
let mut sample = 0.0;
let mut buffer = vec![Sample(0.0, 0.0); samples];
for (i, buffered_sample) in buffer.iter_mut().enumerate().take(samples) {
let sample_clock = system.clock + (sample_duration * i as u64);
let fm_clock = sample_clock.as_duration() / self.fm_clock_period;
// Simulate each clock cycle, even if we skip one due to aliasing from the unequal sampling rate of 53,267 Hz
for clock in self.next_fm_clock..=fm_clock {
sample = self.get_sample(clock);
}
self.next_fm_clock = fm_clock + 1;
// The DAC uses an 8000 Hz sample rate, so we don't want to skip clocks
if self.dac.enabled {
sample += self.dac.get_sample();
}
*buffered_sample = sample.clamp(-1.0, 1.0);
// Simulate each clock cycle, even if we skip one due to aliasing from the unequal sampling rate of 53,267 Hz
for clock in self.next_fm_clock..=fm_clock {
sample = self.get_sample(clock);
}
self.source.write_samples(system.clock, &buffer);
//}
self.next_fm_clock = fm_clock + 1;
// The DAC uses an 8000 Hz sample rate, so we don't want to skip clocks
if self.dac.enabled {
sample += self.dac.get_sample();
}
// TODO add stereo output, which is supported by ym2612
let sample = sample.clamp(-1.0, 1.0);
*buffered_sample = Sample(sample, sample);
}
self.source.write_samples(system.clock, &buffer);
Ok(ClockDuration::from_millis(1)) // Every 1ms of simulated time
}