Compare commits

..

16 Commits

15 changed files with 2639 additions and 1255 deletions

1
.gitignore vendored
View File

@@ -1,3 +1,4 @@
target target
.DS_Store .DS_Store
dist dist
heaptrack.*.zst

3093
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -7,38 +7,39 @@ edition = "2021"
# Common dependencies # Common dependencies
[dependencies] [dependencies]
anyhow = "1.0.83" anyhow = "1.0.98"
cpal = "0.15.3" cpal = "0.17.1"
egui = "0.27.0" egui = "0.31"
egui_plot = "0.27.2" egui_plot = "0.32"
log = "0.4.21" log = "0.4.27"
realfft = "3.3.0" realfft = "3.4.0"
# eframe features for non android targets # eframe features for non android targets
[target.'cfg(not(target_os = "android"))'.dependencies.eframe] [target.'cfg(not(target_os = "android"))'.dependencies.eframe]
version = "0.27.0" version = "0.31"
default-features = false default-features = false
features = ["accesskit", "default_fonts", "glow"] features = ["accesskit", "default_fonts", "glow", "wayland", "x11"]
# eframe features for android targets # eframe features for android targets
[target.'cfg(target_os = "android")'.dependencies.eframe] [target.'cfg(target_os = "android")'.dependencies.eframe]
version = "0.27.0" version = "0.31"
default-features = false default-features = false
features = ["accesskit", "default_fonts", "glow", "android-native-activity"] features = ["accesskit", "default_fonts", "glow", "android-native-activity"]
# android only dependencies # android only dependencies
[target.'cfg(target_os = "android")'.dependencies] [target.'cfg(target_os = "android")'.dependencies]
android_logger = "0.13.3" android_logger = "0.15.0"
#android-activity = { version = "0.5", features = ["native-activity"] } #android-activity = { version = "0.5", features = ["native-activity"] }
winit = { version = "0.29.4", features = ["android-native-activity"] } winit = { version = "0.30.11", features = ["android-native-activity"] }
# native only dependencies # native only dependencies
[target.'cfg(all(not(target_arch = "wasm32"), not(target_os = "android")))'.dependencies] [target.'cfg(all(not(target_arch = "wasm32"), not(target_os = "android")))'.dependencies]
env_logger = "0.10" env_logger = "0.11"
# web only dependencies # web only dependencies
[target.'cfg(target_arch = "wasm32")'.dependencies] [target.'cfg(target_arch = "wasm32")'.dependencies]
wasm-bindgen-futures = "0.4" wasm-bindgen-futures = "0.4"
web-sys = "0.3.70" # to access the DOM (to hide the loading text)
[profile.release] [profile.release]
opt-level = 2 opt-level = 2

View File

@@ -3,18 +3,18 @@
"short_name": "egui-template-pwa", "short_name": "egui-template-pwa",
"icons": [ "icons": [
{ {
"src": "./icon-256.png", "src": "./assets/icon-256.png",
"sizes": "256x256", "sizes": "256x256",
"type": "image/png" "type": "image/png"
}, },
{ {
"src": "./maskable_icon_x512.png", "src": "./assets/maskable_icon_x512.png",
"sizes": "512x512", "sizes": "512x512",
"type": "image/png", "type": "image/png",
"purpose": "any maskable" "purpose": "any maskable"
}, },
{ {
"src": "./icon-1024.png", "src": "./assets/icon-1024.png",
"sizes": "1024x1024", "sizes": "1024x1024",
"type": "image/png" "type": "image/png"
} }

View File

@@ -17,16 +17,16 @@
<link data-trunk rel="icon" href="assets/favicon.ico"> <link data-trunk rel="icon" href="assets/favicon.ico">
<link data-trunk rel="copy-file" href="assets/sw.js" /> <link data-trunk rel="copy-file" href="assets/sw.js"/>
<link data-trunk rel="copy-file" href="assets/manifest.json" /> <link data-trunk rel="copy-file" href="assets/manifest.json"/>
<link data-trunk rel="copy-file" href="assets/icon-1024.png" /> <link data-trunk rel="copy-file" href="assets/icon-1024.png" data-target-path="assets"/>
<link data-trunk rel="copy-file" href="assets/icon-256.png" /> <link data-trunk rel="copy-file" href="assets/icon-256.png" data-target-path="assets"/>
<link data-trunk rel="copy-file" href="assets/icon_ios_touch_192.png" /> <link data-trunk rel="copy-file" href="assets/icon_ios_touch_192.png" data-target-path="assets"/>
<link data-trunk rel="copy-file" href="assets/maskable_icon_x512.png" /> <link data-trunk rel="copy-file" href="assets/maskable_icon_x512.png" data-target-path="assets"/>
<link rel="manifest" href="manifest.json"> <link rel="manifest" href="manifest.json">
<link rel="apple-touch-icon" href="icon_ios_touch_192.png"> <link rel="apple-touch-icon" href="assets/icon_ios_touch_192.png">
<meta name="theme-color" media="(prefers-color-scheme: light)" content="white"> <meta name="theme-color" media="(prefers-color-scheme: light)" content="white">
<meta name="theme-color" media="(prefers-color-scheme: dark)" content="#404040"> <meta name="theme-color" media="(prefers-color-scheme: dark)" content="#404040">
@@ -60,15 +60,16 @@
width: 100%; width: 100%;
} }
/* Position canvas in center-top: */ /* Make canvas fill entire document: */
canvas { canvas {
margin-right: auto; margin-right: auto;
margin-left: auto; margin-left: auto;
display: block; display: block;
position: absolute; position: absolute;
top: 0%; top: 0;
left: 50%; left: 0;
transform: translate(-50%, 0%); width: 100%;
height: 100%;
} }
.centered { .centered {
@@ -114,7 +115,6 @@
transform: rotate(360deg); transform: rotate(360deg);
} }
} }
</style> </style>
</head> </head>
@@ -123,6 +123,14 @@
<!-- the id is hardcoded in main.rs . so, make sure both match. --> <!-- the id is hardcoded in main.rs . so, make sure both match. -->
<canvas id="the_canvas_id"></canvas> <canvas id="the_canvas_id"></canvas>
<!-- the loading spinner will be removed in main.rs -->
<div class="centered" id="loading_text">
<p style="font-size:16px">
Loading…
</p>
<div class="lds-dual-ring"></div>
</div>
<!--Register Service Worker. this will cache the wasm / js scripts for offline use (for PWA functionality). --> <!--Register Service Worker. this will cache the wasm / js scripts for offline use (for PWA functionality). -->
<!-- Force refresh (Ctrl + F5) to load the latest files instead of cached files --> <!-- Force refresh (Ctrl + F5) to load the latest files instead of cached files -->
<script> <script>

View File

@@ -5,6 +5,6 @@
# to the user in the error, instead of "error: invalid channel name '[toolchain]'". # to the user in the error, instead of "error: invalid channel name '[toolchain]'".
[toolchain] [toolchain]
channel = "1.76.0" channel = "1.87.0"
components = [ "rustfmt", "clippy" ] components = [ "rustfmt", "clippy" ]
targets = [ "wasm32-unknown-unknown", "aarch64-linux-android" ] targets = [ "wasm32-unknown-unknown", "aarch64-linux-android" ]

View File

@@ -1,25 +1,29 @@
use eframe::{egui_glow, glow}; use eframe::{egui_glow, glow};
use egui::mutex::Mutex; use egui::{mutex::Mutex, ScrollArea};
use std::sync::Arc; use std::sync::Arc;
use crate::backend::{self, Backends};
pub mod debug_plot; pub mod debug_plot;
use debug_plot::DebugPlots; use debug_plot::DebugPlots;
mod waterfall; mod waterfall;
use waterfall::Waterfall; use waterfall::Waterfall;
mod audio_fft; mod fft;
use audio_fft::AudioFFT; use fft::Fft;
pub mod turbo_colormap; pub mod turbo_colormap;
const FFT_SIZE: usize = 1024; const FFT_SIZE: usize = 1024;
pub struct TemplateApp { pub struct TemplateApp {
plots: DebugPlots, plots: DebugPlots,
// Example stuff:
label: String,
value: f32,
/// Behind an `Arc<Mutex<…>>` so we can pass it to [`egui::PaintCallback`] and paint later. /// Behind an `Arc<Mutex<…>>` so we can pass it to [`egui::PaintCallback`] and paint later.
waterfall: Arc<Mutex<Waterfall>>, waterfall: Arc<Mutex<Waterfall>>,
_stream: AudioFFT, fft: Fft,
backends: backend::Backends,
selected_backend: usize,
open_device: Option<Box<dyn backend::Device>>,
device_window_open: bool,
side_panel_open: bool,
} }
impl TemplateApp { impl TemplateApp {
@@ -32,8 +36,10 @@ impl TemplateApp {
// Note that you must enable the `persistence` feature for this to work. // Note that you must enable the `persistence` feature for this to work.
let plots = DebugPlots::new(); let plots = DebugPlots::new();
let (stream, rx) = AudioFFT::new(FFT_SIZE, plots.get_sender()).unwrap();
let wf_size = stream.output_len; let (fft, rx) = Fft::new(FFT_SIZE, plots.get_sender()).unwrap();
let wf_size = fft.output_len;
let gl = cc let gl = cc
.gl .gl
.as_ref() .as_ref()
@@ -41,11 +47,13 @@ impl TemplateApp {
Self { Self {
plots, plots,
// Example stuff:
label: "Hello World!".to_owned(),
value: 2.7,
waterfall: Arc::new(Mutex::new(Waterfall::new(gl, wf_size, wf_size, rx))), waterfall: Arc::new(Mutex::new(Waterfall::new(gl, wf_size, wf_size, rx))),
_stream: stream, fft,
backends: Backends::default(),
selected_backend: 0,
open_device: None,
device_window_open: true,
side_panel_open: false,
} }
} }
} }
@@ -64,81 +72,145 @@ impl eframe::App for TemplateApp {
/// Called each time the UI needs repainting, which may be many times per second. /// Called each time the UI needs repainting, which may be many times per second.
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) { fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
// Put your widgets into a `SidePanel`, `TopBottomPanel`, `CentralPanel`, `Window` or `Area`.
// For inspiration and more examples, go to https://emilk.github.io/egui
ctx.request_repaint(); ctx.request_repaint();
self.plots.update_plots(); self.plots.update_plots();
// Menu bar panel
egui::TopBottomPanel::top("top_panel").show(ctx, |ui| { egui::TopBottomPanel::top("top_panel").show(ctx, |ui| {
// The top panel is often a good place for a menu bar:
egui::menu::bar(ui, |ui| { egui::menu::bar(ui, |ui| {
// NOTE: no File->Quit on web pages! // NOTE: no File->Quit on web pages!
let is_web = cfg!(target_arch = "wasm32"); let is_web = cfg!(target_arch = "wasm32");
if !is_web { ui.menu_button("File", |ui| {
ui.menu_button("File", |ui| { if ui.button("Open Device").clicked() {
self.device_window_open = true;
}
if self.open_device.is_some() {
if ui.button("Close Device").clicked() {
if let Some(dev) = self.open_device.take() {
dev.close();
}
}
}
if !is_web {
if ui.button("Quit").clicked() { if ui.button("Quit").clicked() {
ctx.send_viewport_cmd(egui::ViewportCommand::Close); ctx.send_viewport_cmd(egui::ViewportCommand::Close);
} }
}); }
} });
ui.menu_button("View", |ui| {
ui.checkbox(&mut self.side_panel_open, "Side Panel");
});
self.plots.render_menu_buttons(ui); self.plots.render_menu_buttons(ui);
ui.add_space(16.0); ui.add_space(16.0);
egui::widgets::global_dark_light_mode_buttons(ui); egui::widgets::global_theme_preference_buttons(ui);
}); });
}); });
self.plots.render_plot_windows(ctx); // Side panel
egui::SidePanel::right("Sid panel").show_animated(ctx, self.side_panel_open, |ui| {
egui::CentralPanel::default().show(ctx, |ui| { if let Some(d) = &mut self.open_device {
// The central panel the region left after adding TopPanel's and SidePanel's d.show_settings(ui)
ui.heading("eframe template");
ui.horizontal(|ui| {
ui.label("Write something: ");
ui.text_edit_singleline(&mut self.label);
});
ui.add(egui::Slider::new(&mut self.value, 0.0..=10.0).text("value"));
if ui.button("Increment").clicked() {
self.value += 1.0;
} }
});
ui.separator(); // Central panel
ui.horizontal(|ui| { egui::CentralPanel::default().show(ctx, |ui| {
ui.spacing_mut().item_spacing.x = 0.0; egui::TopBottomPanel::top("Plot")
ui.label("The texture is being painted using "); .resizable(true)
ui.hyperlink_to("glow", "https://github.com/grovesNL/glow"); .show_inside(ui, |_ui| {
ui.label(" (OpenGL)."); // TODO: Add plot
}); });
ui.with_layout(egui::Layout::bottom_up(egui::Align::LEFT), |ui| { egui::CentralPanel::default().show_inside(ui, |ui| {
powered_by_egui_and_eframe(ui); ui.with_layout(egui::Layout::bottom_up(egui::Align::LEFT), |ui| {
egui::warn_if_debug_build(ui); powered_by_egui_and_eframe(ui);
egui::Frame::canvas(ui.style()).show(ui, |ui| { egui::warn_if_debug_build(ui);
let available_space = ui.available_size(); egui::Frame::canvas(ui.style()).show(ui, |ui| {
let (rect, response) = let available_space = ui.available_size();
ui.allocate_exact_size(available_space, egui::Sense::drag()); let (rect, response) =
ui.allocate_exact_size(available_space, egui::Sense::drag());
let _angle = response.drag_motion().x * 0.01; let _angle = response.drag_motion().x * 0.01;
// Clone locals so we can move them into the paint callback: // Clone locals so we can move them into the paint callback:
let waterfall = self.waterfall.clone(); let waterfall = self.waterfall.clone();
let callback = egui::PaintCallback { let callback = egui::PaintCallback {
rect, rect,
callback: std::sync::Arc::new(egui_glow::CallbackFn::new( callback: std::sync::Arc::new(egui_glow::CallbackFn::new(
move |_info, painter| { move |_info, painter| {
waterfall.lock().paint(painter.gl(), _angle); waterfall.lock().paint(painter.gl(), _angle);
}, },
)), )),
}; };
ui.painter().add(callback); ui.painter().add(callback);
});
}); });
}); });
}); });
// Update debug plot windows
self.plots.render_plot_windows(ctx);
// Update device selection window
let mut device_window = egui::Window::new("Select Device")
.default_width(600.0)
.default_height(400.0)
.vscroll(false)
.resizable(true)
.collapsible(false);
if self.open_device.is_some() {
device_window = device_window.open(&mut self.device_window_open);
} else {
device_window = device_window.anchor(egui::Align2::CENTER_CENTER, [0., 0.]);
}
let mut close_device_window = false;
device_window.show(ctx, |ui| {
egui::SidePanel::left("Select Driver")
.resizable(true)
.default_width(150.0)
.width_range(80.0..=200.0)
.show_inside(ui, |ui| {
ScrollArea::vertical().show(ui, |ui| {
ui.with_layout(egui::Layout::top_down_justified(egui::Align::LEFT), |ui| {
for (i, b) in self.backends.0.iter().enumerate() {
ui.selectable_value(
&mut self.selected_backend,
i,
b.display_text(),
);
}
});
});
});
ui.vertical_centered(|ui| {
egui::ScrollArea::vertical().show(ui, |ui| {
//if self._selected_backend < self._backends.0.len() {
if let Some(b) = self.backends.0.get_mut(self.selected_backend) {
//let mut b = &self._backends.0[self._selected_backend];
b.show_device_selection(ui);
if ui.add(egui::Button::new("Apply")).clicked() {
if let Some(dev) = self.open_device.take() {
dev.close()
};
if let Ok(device) =
b.build_device(self.fft.tx.clone(), self.plots.get_sender())
{
self.open_device = Some(device);
close_device_window = true;
}
}
} else {
ui.add(egui::Label::new("Select a Device Driver"));
}
});
});
});
if close_device_window {
self.device_window_open = false;
}
} }
} }
@@ -147,11 +219,13 @@ fn powered_by_egui_and_eframe(ui: &mut egui::Ui) {
ui.spacing_mut().item_spacing.x = 0.0; ui.spacing_mut().item_spacing.x = 0.0;
ui.label("Powered by "); ui.label("Powered by ");
ui.hyperlink_to("egui", "https://github.com/emilk/egui"); ui.hyperlink_to("egui", "https://github.com/emilk/egui");
ui.label(" and "); ui.label(", ");
ui.hyperlink_to( ui.hyperlink_to(
"eframe", "eframe",
"https://github.com/emilk/egui/tree/master/crates/eframe", "https://github.com/emilk/egui/tree/master/crates/eframe",
); );
ui.label(" and ");
ui.hyperlink_to("glow", "https://github.com/grovesNL/glow");
ui.label("."); ui.label(".");
}); });
} }

View File

@@ -1,5 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::mpsc::{self, Sender}; use std::sync::mpsc;
use egui::{Context, Ui}; use egui::{Context, Ui};
use egui_plot::{Line, Plot, PlotBounds, PlotPoints}; use egui_plot::{Line, Plot, PlotBounds, PlotPoints};
@@ -7,28 +7,47 @@ use realfft::num_complex::Complex32;
pub enum PlotData { pub enum PlotData {
U8(Vec<u8>), U8(Vec<u8>),
//F32(Vec<f32>), F32(Vec<f32>),
Bode32(Vec<Complex32>), Bode32(Vec<Complex32>),
} }
#[derive(Clone)]
pub struct DebugPlotSender {
tx: mpsc::SyncSender<(&'static str, PlotData)>,
}
impl DebugPlotSender {
pub fn send(
&self,
plot_name: &'static str,
plot_data: PlotData,
) -> Result<(), mpsc::SendError<PlotData>> {
match self.tx.try_send((plot_name, plot_data)) {
Err(mpsc::TrySendError::Full(_)) => {
log::warn!("Debug buffer is full!");
Ok(())
}
Err(mpsc::TrySendError::Disconnected((_, d))) => Err(mpsc::SendError(d)),
Ok(()) => Ok(()),
}
}
}
pub struct DebugPlots { pub struct DebugPlots {
plots: HashMap<&'static str, PlotData>, plots: HashMap<&'static str, PlotData>,
plot_en: HashMap<&'static str, bool>, plot_en: HashMap<&'static str, bool>,
rx: mpsc::Receiver<(&'static str, PlotData)>, rx: mpsc::Receiver<(&'static str, PlotData)>,
tx: mpsc::Sender<(&'static str, PlotData)>, tx: DebugPlotSender,
} }
impl DebugPlots { impl DebugPlots {
pub fn new() -> Self { pub fn new() -> Self {
let (tx, rx) = mpsc::channel(); let (tx, rx) = mpsc::sync_channel(128);
DebugPlots { DebugPlots {
plots: HashMap::new(), plots: HashMap::new(),
plot_en: HashMap::new(), plot_en: HashMap::new(),
rx, rx,
tx, tx: DebugPlotSender { tx },
} }
} }
pub fn get_sender(&self) -> Sender<(&'static str, PlotData)> { pub fn get_sender(&self) -> DebugPlotSender {
self.tx.clone() self.tx.clone()
} }
pub fn update_plots(&mut self) { pub fn update_plots(&mut self) {
@@ -63,35 +82,61 @@ impl DebugPlots {
match plot { match plot {
PlotData::U8(v) => { PlotData::U8(v) => {
ui.heading("u8 Plot"); ui.heading("u8 Plot");
let line = Line::new(PlotPoints::from_iter( let line = Line::new(
v.iter().enumerate().map(|(i, y)| [i as f64, *y as f64]), "Data",
)); PlotPoints::from_iter(
v.iter().enumerate().map(|(i, y)| [i as f64, *y as f64]),
),
);
let plot = Plot::new(title); let plot = Plot::new(title);
plot.show(ui, |plot_ui| { plot.show(ui, |plot_ui| {
plot_ui.line(line); plot_ui.line(line);
plot_ui.set_plot_bounds(PlotBounds::from_min_max( plot_ui.set_plot_bounds(PlotBounds::from_min_max(
[-1.0, -1.0], [-1.0, -1.0],
[(v.len() + 1) as f64, 256.0], [(v.len() + 1) as f64, core::u8::MAX as f64 + 1.0],
));
});
}
PlotData::F32(v) => {
ui.heading("f32 plot");
let line = Line::new("Data", PlotPoints::from_ys_f32(&v));
let plot = Plot::new(title);
plot.show(ui, |plot_ui| {
plot_ui.line(line);
plot_ui.set_plot_bounds(PlotBounds::from_min_max(
[-1.0, -2.0],
[(v.len() + 1) as f64, 2.0],
)); ));
}); });
} }
PlotData::Bode32(v) => { PlotData::Bode32(v) => {
ui.heading("Bode Plot"); ui.heading("Bode Plot");
let mag_line = let mag_line = Line::new(
Line::new(PlotPoints::from_iter(v.iter().enumerate().map(|(i, c)| { "Magnitude",
[i as f64, ((c.re * c.re) + (c.im * c.im)).sqrt() as f64] PlotPoints::from_iter(v.iter().enumerate().map(|(i, c)| {
}))); [
let phase_line = Line::new(PlotPoints::from_iter( i as f64,
v.iter() ((c.re * c.re) + (c.im * c.im)).sqrt() as f64 / v.len() as f64,
.enumerate() ]
.map(|(i, c)| [i as f64, c.arg() as f64]), })),
)); );
let phase_line = Line::new(
"Phase",
PlotPoints::from_iter(
v.iter()
.enumerate()
.map(|(i, c)| [i as f64, c.arg() as f64 / core::f64::consts::PI]),
),
);
let plot = Plot::new(title); let plot = Plot::new(title);
plot.show(ui, |plot_ui| { plot.show(ui, |plot_ui| {
plot_ui.line(mag_line); plot_ui.line(mag_line);
plot_ui.line(phase_line); plot_ui.line(phase_line);
plot_ui.set_plot_bounds(PlotBounds::from_min_max(
[0.0, -1.0],
[(v.len() + 1) as f64, 1.0],
));
}); });
ui.heading("TODO");
} }
}; };
}); });

View File

@@ -1,77 +1,70 @@
use anyhow::{anyhow, Result}; use anyhow::{anyhow, Result};
use cpal::{
self,
traits::{DeviceTrait, HostTrait},
BufferSize, StreamConfig,
};
use realfft::RealFftPlanner; use realfft::RealFftPlanner;
use std::sync::mpsc::{self, Sender}; use std::sync::mpsc::{self, Receiver, SyncSender, TrySendError};
use super::debug_plot::PlotData; use super::debug_plot::{DebugPlotSender, PlotData};
pub struct AudioFFT { pub struct Fft {
pub stream: cpal::Stream, pub tx: SyncSender<Vec<f32>>,
pub output_len: usize, pub output_len: usize,
} }
impl AudioFFT { impl Fft {
pub fn new( pub fn new(size: usize, plot_tx: DebugPlotSender) -> Result<(Self, mpsc::Receiver<Vec<u8>>)> {
size: usize,
plot_tx: Sender<(&'static str, PlotData)>,
) -> Result<(Self, mpsc::Receiver<Vec<u8>>)> {
let output_len = size / 2 + 1; let output_len = size / 2 + 1;
// Create mpsc queue // Create mpsc queue
let (tx, rx) = mpsc::channel(); let (tx, rx) = mpsc::sync_channel(10);
let (in_tx, in_rx): (SyncSender<Vec<f32>>, Receiver<Vec<f32>>) = mpsc::sync_channel(10);
// Setup fft use f32 for now // Setup fft use f32 for now
let mut fft_planner = RealFftPlanner::<f32>::new(); let mut fft_planner = RealFftPlanner::<f32>::new();
let fft = fft_planner.plan_fft_forward(size); let fft = fft_planner.plan_fft_forward(size);
// Setup audio input
let host = cpal::default_host();
let device = host
.default_input_device()
.ok_or(anyhow!("No input audio device found"))?;
// Basic config that 'should' be suppoted by most devices
let config = StreamConfig {
channels: 1,
sample_rate: cpal::SampleRate(44100),
buffer_size: BufferSize::Default,
};
let mut fft_in: Vec<f32> = Vec::with_capacity(size); let mut fft_in: Vec<f32> = Vec::with_capacity(size);
let mut fft_out = fft.make_output_vec(); let mut fft_out = fft.make_output_vec();
let mut fft_scratch = fft.make_scratch_vec(); let mut fft_scratch = fft.make_scratch_vec();
let stream = device.build_input_stream(
&config, std::thread::spawn(move || {
move |mut data: &[f32], _: &cpal::InputCallbackInfo| { while let Ok(samples) = in_rx.recv() {
let mut data = samples.as_slice();
while data.fill_vec(&mut fft_in, size).is_ok() { while data.fill_vec(&mut fft_in, size).is_ok() {
assert_eq!(size, fft_in.len()); assert_eq!(size, fft_in.len());
fft.process_with_scratch(&mut fft_in, &mut fft_out, &mut fft_scratch) fft.process_with_scratch(&mut fft_in, &mut fft_out, &mut fft_scratch)
.unwrap(); .unwrap();
plot_tx plot_tx
.send(("FFT Output", PlotData::Bode32(fft_out.clone()))) .send("FFT Output", PlotData::Bode32(fft_out.clone()))
.unwrap(); .unwrap();
fft_in.clear(); fft_in.clear();
let output: Vec<u8> = fft_out let output: Vec<u8> = fft_out
.iter() .iter()
.map(|c| { .map(|c| {
(((c.re * c.re) + (c.im * c.im)).sqrt() / size as f32 * 255.0) as u8 (((c.re * c.re) + (c.im * c.im)).sqrt() / output_len as f32 * 255.0)
as u8
}) })
.collect(); .collect();
assert_eq!(output_len, output.len()); assert_eq!(output_len, output.len());
plot_tx plot_tx
.send(("FFT Processed Output", PlotData::U8(output.clone()))) .send("FFT Processed Output", PlotData::U8(output.clone()))
.unwrap(); .unwrap();
tx.send(output).unwrap(); match tx.try_send(output) {
Ok(_) => {}
Err(TrySendError::Full(_)) => log::warn!("Waterfall buffer full."),
Err(TrySendError::Disconnected(_)) => {
panic!("The fft thread has disconnected from the waterfall!")
}
}
} }
}, }
move |err| log::error!("Audio Thread Error: {err}"), });
None,
)?;
Ok((Self { stream, output_len }, rx)) Ok((
Self {
tx: in_tx,
output_len,
},
rx,
))
} }
} }

View File

@@ -17,6 +17,25 @@ unsafe fn check_for_gl_errors(gl: &glow::Context, msg: &str) {
log::error!("Waterfall {}: GL ERROR {} ({:#X})", msg, err, err); log::error!("Waterfall {}: GL ERROR {} ({:#X})", msg, err, err);
} }
} }
unsafe fn clear_texture(gl: &glow::Context, bytes_per_row: usize, n_rows: usize) {
let blank_line = vec![0_u8; bytes_per_row];
for offset in 0..n_rows {
unsafe {
gl.tex_sub_image_2d(
glow::TEXTURE_2D,
0,
0,
offset as i32,
bytes_per_row as i32,
1,
glow::RED,
glow::UNSIGNED_BYTE,
PixelUnpackData::Slice(Some(&blank_line)),
);
check_for_gl_errors(&gl, &format!("clear texture with offset {}", offset));
}
}
}
use crate::app::turbo_colormap; use crate::app::turbo_colormap;
@@ -29,6 +48,7 @@ pub struct Waterfall {
ebo: glow::Buffer, ebo: glow::Buffer,
offset: usize, offset: usize,
width: usize, width: usize,
height: usize,
fft_in: Receiver<Vec<u8>>, fft_in: Receiver<Vec<u8>>,
} }
@@ -80,14 +100,14 @@ impl Waterfall {
1, 1,
glow::RED, glow::RED,
glow::UNSIGNED_BYTE, glow::UNSIGNED_BYTE,
PixelUnpackData::Slice(&fft), PixelUnpackData::Slice(Some(&fft)),
); );
check_for_gl_errors(&gl, "update texture"); check_for_gl_errors(&gl, &format!("update texture with offset {}", self.offset));
self.offset = (self.offset + 1) % self.width; self.offset = (self.offset + 1) % self.height;
} }
if let Some(uniform) = gl.get_uniform_location(self.program, "offset") { if let Some(uniform) = gl.get_uniform_location(self.program, "offset") {
gl.uniform_1_f32(Some(&uniform), self.offset as f32 / self.width as f32); gl.uniform_1_f32(Some(&uniform), self.offset as f32 / self.height as f32);
} }
check_for_gl_errors(&gl, "update uniform"); check_for_gl_errors(&gl, "update uniform");
@@ -195,10 +215,13 @@ impl Waterfall {
glow::RED, glow::RED,
glow::UNSIGNED_BYTE, glow::UNSIGNED_BYTE,
//Some(&buffer), // This segfaults with large buffers //Some(&buffer), // This segfaults with large buffers
None, PixelUnpackData::Slice(None),
); );
check_for_gl_errors(&gl, "Initializing Texture"); check_for_gl_errors(&gl, "Initializing Texture");
// Clear the texture
clear_texture(gl, width, height);
let color_lut = gl let color_lut = gl
.create_texture() .create_texture()
.expect("Waterfall: could not create LUT"); .expect("Waterfall: could not create LUT");
@@ -223,7 +246,7 @@ impl Waterfall {
0, 0,
glow::RGB, glow::RGB,
UNSIGNED_BYTE, UNSIGNED_BYTE,
Some(&turbo_colormap::TURBO_SRGB_BYTES), PixelUnpackData::Slice(Some(&turbo_colormap::TURBO_SRGB_BYTES)),
); );
check_for_gl_errors(&gl, "Initializing LUT"); check_for_gl_errors(&gl, "Initializing LUT");
@@ -317,6 +340,7 @@ impl Waterfall {
ebo, ebo,
offset: 0_usize, offset: 0_usize,
width, width,
height,
fft_in, fft_in,
} }
} }

124
src/backend/audio.rs Normal file
View File

@@ -0,0 +1,124 @@
use anyhow::Result;
use cpal::{
self,
traits::{DeviceTrait, HostTrait},
BufferSize,
};
use std::sync::mpsc::{SyncSender, TrySendError};
use crate::app::debug_plot::DebugPlotSender;
pub struct Audio {
pub _stream: cpal::Stream,
}
impl Audio {
pub fn new(
device_id: cpal::DeviceId,
config: cpal::StreamConfig,
fft_input: SyncSender<Vec<f32>>,
_plot_tx: DebugPlotSender,
) -> Result<Self> {
let host = cpal::default_host();
let device = host
.device_by_id(&device_id)
.ok_or(anyhow::anyhow!("Can't open device."))?;
let _stream = device.build_input_stream(
&config,
move |data: &[f32], _: &cpal::InputCallbackInfo| {
match fft_input.try_send(data.to_vec()) {
Err(TrySendError::Disconnected(_)) => panic!(
"Error: Audio backend has lost connection to frontend! Can not continue!"
),
Err(TrySendError::Full(_)) => log::warn!("Audio Backend buffer full."),
Ok(()) => {}
};
},
move |err| log::error!("Audio Thread Error: {err}"),
None,
)?;
Ok(Self { _stream })
}
}
impl crate::backend::Device for Audio {
fn show_settings(&mut self, ui: &mut egui::Ui) {
ui.label("TODO");
}
fn can_tune(&self) -> bool {
false
}
fn tune(&mut self, _freq: usize) -> anyhow::Result<()> {
anyhow::bail!("Can't tune this device")
}
fn close(self: Box<Self>) {
drop(self);
}
}
pub struct AudioBackend {
host: cpal::Host,
devices: Vec<cpal::Device>,
current_device: usize,
}
impl AudioBackend {
pub fn new() -> Self {
let host = cpal::default_host();
let devices = host.devices().unwrap().collect();
let current_device = 0;
Self {
host,
devices,
current_device,
}
}
fn update_devices(&mut self) {
self.devices.clear();
self.devices = self.host.devices().unwrap().collect();
self.current_device = 0;
}
}
impl super::Backend for AudioBackend {
fn display_text(&self) -> &'static str {
"Audio"
}
fn show_device_selection(&mut self, ui: &mut egui::Ui) {
egui::ComboBox::from_label("Device")
.selected_text(
self.devices[self.current_device]
.id()
.map(|id| id.1)
.unwrap_or("UNKNOWN DEVICE".into()),
)
.show_index(ui, &mut self.current_device, self.devices.len(), |i| {
self.devices[i]
.id()
.map(|id| id.1)
.unwrap_or("UNKNOWN DEVICE".into())
});
if ui.add(egui::Button::new("Refresh")).clicked() {
self.update_devices();
}
}
fn build_device(
&mut self,
fft_input: SyncSender<Vec<f32>>,
_plot_tx: DebugPlotSender,
) -> anyhow::Result<Box<dyn super::Device>> {
let config = cpal::StreamConfig {
channels: 1,
sample_rate: 44100,
buffer_size: BufferSize::Default,
};
Ok(Box::new(Audio::new(
self.devices[self.current_device].id()?,
config,
fft_input,
_plot_tx,
)?))
}
}

109
src/backend/dummy.rs Normal file
View File

@@ -0,0 +1,109 @@
use anyhow::Result;
use core::panic;
use std::{
sync::mpsc::{self, RecvTimeoutError, SyncSender, TrySendError},
time::{Duration, Instant},
usize,
};
use crate::app::debug_plot::{DebugPlotSender, PlotData};
const LUT_LEN: usize = 4096;
pub struct DummyDevice {
close: SyncSender<()>,
}
impl DummyDevice {
pub fn new(
sample_rate: usize,
fft_input: SyncSender<Vec<f32>>,
_plot_tx: DebugPlotSender,
) -> Result<Self> {
let sin_lut: Vec<f32> = (0..LUT_LEN)
.map(|i| ((i as f32 / LUT_LEN as f32) * std::f32::consts::TAU).sin())
.collect();
let (close, close_rx) = mpsc::sync_channel(0);
let buffer_size: usize = 2048;
let loop_interval = Duration::from_secs_f32((1. / sample_rate as f32) * buffer_size as f32);
let freq = (sample_rate / 4) as f32;
let phase_delta = sin_lut.len() as f32 * (freq / sample_rate as f32);
std::thread::spawn(move || {
let mut phase = 0_f32;
loop {
let start = Instant::now();
let samples: Vec<f32> = (0..buffer_size)
.map(|_i| {
phase = (phase + phase_delta) % sin_lut.len() as f32;
sin_lut[phase as usize]
})
.collect();
_plot_tx
.send("Dummy output", PlotData::F32(samples.clone()))
.unwrap();
match fft_input.try_send(samples) {
Ok(_) => {}
Err(TrySendError::Full(_)) => log::warn!("Dummy Backend buffer full."),
Err(TrySendError::Disconnected(_)) => {
panic!("Dummy device lost connection to frontend!")
}
}
match close_rx.recv_timeout(loop_interval - start.elapsed()) {
Ok(_) => break,
Err(RecvTimeoutError::Disconnected) => {
panic!("Dummy device lost connection to frontend!")
}
Err(RecvTimeoutError::Timeout) => {}
}
}
});
Ok(Self { close })
}
}
impl crate::backend::Device for DummyDevice {
fn show_settings(&mut self, ui: &mut egui::Ui) {
ui.label("TODO");
}
fn can_tune(&self) -> bool {
false
}
fn tune(&mut self, _freq: usize) -> anyhow::Result<()> {
anyhow::bail!("Can't tune this device")
}
fn close(self: Box<Self>) {
self.close.send(()).unwrap();
}
}
pub struct DummyBackend {
sample_rate: usize,
}
impl DummyBackend {
pub fn new() -> Self {
Self { sample_rate: 48000 }
}
}
impl super::Backend for DummyBackend {
fn display_text(&self) -> &'static str {
"Dummy"
}
fn show_device_selection(&mut self, ui: &mut egui::Ui) {
ui.label("TODO");
}
fn build_device(
&mut self,
fft_input: SyncSender<Vec<f32>>,
_plot_tx: DebugPlotSender,
) -> anyhow::Result<Box<dyn super::Device>> {
Ok(Box::new(DummyDevice::new(
self.sample_rate,
fft_input,
_plot_tx,
)?))
}
}

47
src/backend/mod.rs Normal file
View File

@@ -0,0 +1,47 @@
use std::sync::mpsc::SyncSender;
use egui::Ui;
use crate::app::debug_plot::DebugPlotSender;
mod audio;
mod dummy;
pub trait Device {
fn show_settings(&mut self, ui: &mut Ui);
fn can_tune(&self) -> bool;
fn tune(&mut self, freq: usize) -> anyhow::Result<()>;
fn close(self: Box<Self>);
}
pub trait Backend {
fn display_text(&self) -> &'static str;
fn show_device_selection(&mut self, ui: &mut Ui);
fn build_device(
&mut self,
fft_input: SyncSender<Vec<f32>>,
_plot_tx: DebugPlotSender,
) -> anyhow::Result<Box<dyn Device>>;
}
pub struct Backends(pub Vec<Box<dyn Backend>>);
#[cfg(all(not(target_arch = "wasm32"), not(target_os = "android")))]
impl Default for Backends {
fn default() -> Self {
Backends(vec![
Box::new(audio::AudioBackend::new()),
Box::new(dummy::DummyBackend::new()),
])
}
}
#[cfg(target_arch = "wasm32")]
impl Default for Backends {
fn default() -> Self {
Backends(vec![Box::new(dummy::DummyBackend::new())])
}
}
#[cfg(target_os = "android")]
impl Default for Backends {
fn default() -> Self {
Backends(vec![Box::new(dummy::DummyBackend::new())])
}
}

View File

@@ -1,12 +1,16 @@
#![warn(clippy::all, rust_2018_idioms)] #![warn(clippy::all, rust_2018_idioms)]
pub mod app; pub mod app;
mod backend;
#[cfg(target_os = "android")] #[cfg(target_os = "android")]
#[no_mangle] #[no_mangle]
fn android_main(app: winit::platform::android::activity::AndroidApp) { fn android_main(app: winit::platform::android::activity::AndroidApp) {
use winit::platform::android::activity::WindowManagerFlags; use winit::platform::android::activity::WindowManagerFlags;
use winit::platform::android::EventLoopBuilderExtAndroid;
android_logger::init_once(
android_logger::Config::default().with_max_level(log::LevelFilter::Debug),
);
// Disable LAYOUT_IN_SCREEN to keep app from drawing under the status bar // Disable LAYOUT_IN_SCREEN to keep app from drawing under the status bar
// winit does not currently do anything with MainEvent::InsetsChanged events // winit does not currently do anything with MainEvent::InsetsChanged events
@@ -17,18 +21,15 @@ fn android_main(app: winit::platform::android::activity::AndroidApp) {
// Alternatively we can hide the system bars by setting the app to fullscreen // Alternatively we can hide the system bars by setting the app to fullscreen
//app.set_window_flags(WindowManagerFlags::FULLSCREEN, WindowManagerFlags::empty()); //app.set_window_flags(WindowManagerFlags::FULLSCREEN, WindowManagerFlags::empty());
android_logger::init_once( let options = eframe::NativeOptions {
android_logger::Config::default().with_max_level(log::LevelFilter::Debug), android_app: Some(app),
); ..Default::default()
let mut options = eframe::NativeOptions::default(); };
options.event_loop_builder = Some(Box::new(move |builder| {
builder.with_android_app(app);
}));
let res = eframe::run_native( let res = eframe::run_native(
"eframe template", "eframe template",
options, options,
Box::new(|cc| Box::new(app::TemplateApp::new(cc))), Box::new(|cc| Ok(Box::new(app::TemplateApp::new(cc)))),
); );
if let Err(e) = res { if let Err(e) = res {
log::error!("{e:?}"); log::error!("{e:?}");

View File

@@ -1,5 +1,6 @@
mod app; mod app;
use app::TemplateApp; use app::TemplateApp;
mod backend;
//#[cfg(target_os = "android")] //#[cfg(target_os = "android")]
//fn main() {} //fn main() {}
@@ -22,26 +23,53 @@ fn main() -> eframe::Result<()> {
eframe::run_native( eframe::run_native(
"eframe template", "eframe template",
native_options, native_options,
Box::new(|cc| Box::new(TemplateApp::new(cc))), Box::new(|cc| Ok(Box::new(TemplateApp::new(cc)))),
) )
} }
// When compiling to web using trunk: // When compiling to web using trunk:
#[cfg(target_arch = "wasm32")] #[cfg(target_arch = "wasm32")]
fn main() { fn main() {
use eframe::wasm_bindgen::JsCast as _;
// Redirect `log` message to `console.log` and friends: // Redirect `log` message to `console.log` and friends:
eframe::WebLogger::init(log::LevelFilter::Debug).ok(); eframe::WebLogger::init(log::LevelFilter::Debug).ok();
let web_options = eframe::WebOptions::default(); let web_options = eframe::WebOptions::default();
wasm_bindgen_futures::spawn_local(async { wasm_bindgen_futures::spawn_local(async {
eframe::WebRunner::new() let document = web_sys::window()
.expect("No window")
.document()
.expect("No document");
let canvas = document
.get_element_by_id("the_canvas_id")
.expect("Failed to find the_canvas_id")
.dyn_into::<web_sys::HtmlCanvasElement>()
.expect("the_canvas_id was not a HtmlCanvasElement");
let start_result = eframe::WebRunner::new()
.start( .start(
"the_canvas_id", // hardcode it canvas,
web_options, web_options,
Box::new(|cc| Box::new(TemplateApp::new(cc))), Box::new(|cc| Ok(Box::new(TemplateApp::new(cc)))),
) )
.await .await;
.expect("failed to start eframe");
// Remove the loading text and spinner:
if let Some(loading_text) = document.get_element_by_id("loading_text") {
match start_result {
Ok(_) => {
loading_text.remove();
}
Err(e) => {
loading_text.set_inner_html(
"<p> The app has crashed. See the developer console for details. </p>",
);
panic!("Failed to start eframe: {e:?}");
}
}
}
}); });
} }