Almost working audio fft input for waterfall

This commit is contained in:
2024-05-09 19:23:07 -04:00
parent 63ec587d08
commit fc9e04ffd2
5 changed files with 416 additions and 28 deletions

View File

@@ -1,26 +1,14 @@
use eframe::{egui_glow, glow};
use egui::mutex::Mutex;
use std::sync::mpsc;
use std::sync::Arc;
mod waterfall;
use waterfall::Waterfall;
mod audio_fft;
use audio_fft::AudioFFT;
pub mod turbo_colormap;
mod deadbeef_rand {
static mut RNG_SEED: u32 = 0x3d2faba7;
static mut RNG_BEEF: u32 = 0xdeadbeef;
pub fn rand() -> u8 {
unsafe {
RNG_SEED = (RNG_SEED << 7) ^ ((RNG_SEED >> 25).wrapping_add(RNG_BEEF));
RNG_BEEF = (RNG_BEEF << 7) ^ ((RNG_BEEF >> 25).wrapping_add(0xdeadbeef));
(RNG_SEED & 0xff) as u8
}
}
}
use deadbeef_rand::rand;
const WF_SIZE: usize = 1024;
const FFT_SIZE: usize = 1024;
/// We derive Deserialize/Serialize so we can persist app state on shutdown.
pub struct TemplateApp {
@@ -29,7 +17,7 @@ pub struct TemplateApp {
value: f32,
/// Behind an `Arc<Mutex<…>>` so we can pass it to [`egui::PaintCallback`] and paint later.
waterfall: Arc<Mutex<Waterfall>>,
fft_sender: mpsc::Sender<Vec<u8>>,
_stream: AudioFFT,
}
impl TemplateApp {
@@ -41,7 +29,8 @@ impl TemplateApp {
// Load previous app state (if any).
// Note that you must enable the `persistence` feature for this to work.
let (tx, rx) = mpsc::channel();
let (stream, rx) = AudioFFT::new(FFT_SIZE).unwrap();
let wf_size = stream.output_len;
let gl = cc
.gl
.as_ref()
@@ -51,8 +40,8 @@ impl TemplateApp {
// Example stuff:
label: "Hello World!".to_owned(),
value: 2.7,
waterfall: Arc::new(Mutex::new(Waterfall::new(gl, WF_SIZE, WF_SIZE, rx))),
fft_sender: tx,
waterfall: Arc::new(Mutex::new(Waterfall::new(gl, wf_size, wf_size, rx))),
_stream: stream,
}
}
}
@@ -127,12 +116,6 @@ impl eframe::App for TemplateApp {
let _angle = response.drag_motion().x * 0.01;
let mut new_data = vec![0_u8; WF_SIZE];
for data in new_data.iter_mut() {
*data = rand();
}
self.fft_sender.send(new_data).unwrap();
// Clone locals so we can move them into the paint callback:
let waterfall = self.waterfall.clone();

83
src/app/audio_fft.rs Normal file
View File

@@ -0,0 +1,83 @@
use anyhow::{anyhow, Result};
use cpal::{
self,
traits::{DeviceTrait, HostTrait},
BufferSize, StreamConfig,
};
use realfft::RealFftPlanner;
use std::sync::mpsc;
pub struct AudioFFT {
pub stream: cpal::Stream,
pub output_len: usize,
}
impl AudioFFT {
pub fn new(size: usize) -> Result<(Self, mpsc::Receiver<Vec<u8>>)> {
let output_len = size / 2 + 1;
// Create mpsc queue
let (tx, rx) = mpsc::channel();
// Setup fft use f32 for now
let mut fft_planner = RealFftPlanner::<f32>::new();
let fft = fft_planner.plan_fft_forward(size);
// Setup audio input
let host = cpal::default_host();
let device = host
.default_input_device()
.ok_or(anyhow!("No input audio device found"))?;
// Basic config that 'should' be suppoted by most devices
let config = StreamConfig {
channels: 1,
sample_rate: cpal::SampleRate(44100),
buffer_size: BufferSize::Default,
};
let mut fft_in: Vec<f32> = Vec::with_capacity(size);
let mut fft_out = fft.make_output_vec();
let mut fft_scratch = fft.make_scratch_vec();
let stream = device.build_input_stream(
&config,
move |mut data: &[f32], _: &cpal::InputCallbackInfo| {
while data.fill_vec(&mut fft_in, size).is_ok() {
assert_eq!(size, fft_in.len());
fft.process_with_scratch(&mut fft_in, &mut fft_out, &mut fft_scratch)
.unwrap();
fft_in.clear();
let output: Vec<u8> = fft_out.iter().map(|c| (c.arg() * 255.0) as u8).collect();
assert_eq!(output_len, output.len());
tx.send(output).unwrap();
}
},
move |err| log::error!("Audio Thread Error: {err}"),
None,
)?;
Ok((Self { stream, output_len }, rx))
}
}
trait FillVec {
/// Takes elements from self and inserts them into out_vec
/// Returns Ok if out_vec is filled to size
/// Returns Err when out_vec is not fully filled (self will be empty)
fn fill_vec(&mut self, out_vec: &mut Vec<f32>, size: usize) -> Result<()>;
}
impl FillVec for &[f32] {
fn fill_vec(&mut self, out_vec: &mut Vec<f32>, size: usize) -> Result<()> {
let have = self.len();
if have == 0 {
anyhow::bail!("Self empty");
}
let need = size - out_vec.len();
let can_move = need.min(have);
out_vec.extend_from_slice(&self[..can_move]);
*self = &self[can_move..];
match out_vec.len() == size {
true => Ok(()),
false => Err(anyhow!("out_vec not full")),
}
}
}

View File

@@ -185,7 +185,6 @@ impl Waterfall {
gl.tex_parameter_i32(TEXTURE_2D, TEXTURE_MAG_FILTER, NEAREST as i32);
check_for_gl_errors(&gl, "Set texture params");
//gl.tex_storage_2d(glow::TEXTURE_2D, 1, glow::R8, 300, 300);
gl.tex_image_2d(
glow::TEXTURE_2D,
0,
@@ -195,7 +194,8 @@ impl Waterfall {
0,
glow::RED,
glow::UNSIGNED_BYTE,
Some(&buffer),
//Some(&buffer), // This segfaults with large buffers
None,
);
check_for_gl_errors(&gl, "Initializing Texture");