Browse Source
It presents the audio stream as a waveform, loads the audio file lazily and is more adaptive.fractal-13
26 changed files with 1764 additions and 384 deletions
|
After Width: | Height: | Size: 490 B |
|
After Width: | Height: | Size: 539 B |
@ -1,8 +0,0 @@
|
||||
using Gtk 4.0; |
||||
using Adw 1; |
||||
|
||||
template $AudioPlayer: Adw.Bin { |
||||
Gtk.MediaControls { |
||||
media-stream: bind template.media-file; |
||||
} |
||||
} |
||||
@ -1,105 +0,0 @@
|
||||
use adw::{prelude::*, subclass::prelude::*}; |
||||
use gtk::{gio, glib}; |
||||
|
||||
use crate::utils::BoundObject; |
||||
|
||||
mod imp { |
||||
use std::cell::Cell; |
||||
|
||||
use glib::subclass::InitializingObject; |
||||
|
||||
use super::*; |
||||
|
||||
#[derive(Debug, Default, gtk::CompositeTemplate, glib::Properties)] |
||||
#[template(resource = "/org/gnome/Fractal/ui/components/media/audio_player.ui")] |
||||
#[properties(wrapper_type = super::AudioPlayer)] |
||||
pub struct AudioPlayer { |
||||
/// The media file to play.
|
||||
#[property(get, set = Self::set_media_file, explicit_notify, nullable)] |
||||
media_file: BoundObject<gtk::MediaFile>, |
||||
/// Whether to play the media automatically.
|
||||
#[property(get, set = Self::set_autoplay, explicit_notify)] |
||||
autoplay: Cell<bool>, |
||||
} |
||||
|
||||
#[glib::object_subclass] |
||||
impl ObjectSubclass for AudioPlayer { |
||||
const NAME: &'static str = "AudioPlayer"; |
||||
type Type = super::AudioPlayer; |
||||
type ParentType = adw::Bin; |
||||
|
||||
fn class_init(klass: &mut Self::Class) { |
||||
Self::bind_template(klass); |
||||
} |
||||
|
||||
fn instance_init(obj: &InitializingObject<Self>) { |
||||
obj.init_template(); |
||||
} |
||||
} |
||||
|
||||
#[glib::derived_properties] |
||||
impl ObjectImpl for AudioPlayer {} |
||||
|
||||
impl WidgetImpl for AudioPlayer {} |
||||
impl BinImpl for AudioPlayer {} |
||||
|
||||
impl AudioPlayer { |
||||
/// Set the media file to play.
|
||||
fn set_media_file(&self, media_file: Option<gtk::MediaFile>) { |
||||
if self.media_file.obj() == media_file { |
||||
return; |
||||
} |
||||
|
||||
self.media_file.disconnect_signals(); |
||||
|
||||
if let Some(media_file) = media_file { |
||||
let mut handlers = Vec::new(); |
||||
|
||||
if self.autoplay.get() { |
||||
let prepared_handler = media_file.connect_prepared_notify(|media_file| { |
||||
if media_file.is_prepared() { |
||||
media_file.play(); |
||||
} |
||||
}); |
||||
handlers.push(prepared_handler); |
||||
} |
||||
|
||||
self.media_file.set(media_file, handlers); |
||||
} |
||||
|
||||
self.obj().notify_media_file(); |
||||
} |
||||
|
||||
/// Set whether to play the media automatically.
|
||||
fn set_autoplay(&self, autoplay: bool) { |
||||
if self.autoplay.get() == autoplay { |
||||
return; |
||||
} |
||||
|
||||
self.autoplay.set(autoplay); |
||||
self.obj().notify_autoplay(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
glib::wrapper! { |
||||
/// A widget displaying a video media file.
|
||||
pub struct AudioPlayer(ObjectSubclass<imp::AudioPlayer>) |
||||
@extends gtk::Widget, adw::Bin, |
||||
@implements gtk::Accessible, gtk::Buildable, gtk::ConstraintTarget; |
||||
} |
||||
|
||||
impl AudioPlayer { |
||||
/// Create a new audio player.
|
||||
pub fn new() -> Self { |
||||
glib::Object::new() |
||||
} |
||||
|
||||
/// Set the file to play.
|
||||
///
|
||||
/// This is a convenience method that calls
|
||||
/// [`AudioPlayer::set_media_file()`].
|
||||
pub(crate) fn set_file(&self, file: Option<&gio::File>) { |
||||
self.set_media_file(file.map(gtk::MediaFile::for_file)); |
||||
} |
||||
} |
||||
@ -0,0 +1,92 @@
|
||||
using Gtk 4.0; |
||||
using Adw 1; |
||||
|
||||
template $AudioPlayer: Adw.BreakpointBin { |
||||
margin-start: 6; |
||||
margin-end: 6; |
||||
width-request: 200; |
||||
height-request: 100; |
||||
|
||||
Gtk.Box { |
||||
orientation: vertical; |
||||
spacing: 6; |
||||
|
||||
Gtk.Box { |
||||
spacing: 6; |
||||
|
||||
Gtk.Label position_label { |
||||
styles [ |
||||
"caption", |
||||
] |
||||
} |
||||
|
||||
Gtk.Overlay { |
||||
$Waveform waveform { |
||||
hexpand: true; |
||||
seek => $seek() swapped; |
||||
} |
||||
|
||||
[overlay] |
||||
Adw.Spinner spinner { |
||||
visible: false; |
||||
height-request: 20; |
||||
width-request: 20; |
||||
halign: center; |
||||
valign: center; |
||||
} |
||||
|
||||
[overlay] |
||||
Gtk.Image error_img { |
||||
visible: false; |
||||
icon-name: "error-symbolic"; |
||||
halign: center; |
||||
valign: center; |
||||
} |
||||
} |
||||
|
||||
Gtk.Label remaining_label { |
||||
styles [ |
||||
"caption", |
||||
] |
||||
} |
||||
} |
||||
|
||||
Gtk.Box bottom_box { |
||||
spacing: 6; |
||||
|
||||
Adw.Bin play_button_bin { |
||||
child: Gtk.Button play_button { |
||||
halign: center; |
||||
clicked => $toggle_playing() swapped; |
||||
|
||||
styles [ |
||||
"flat", |
||||
] |
||||
}; |
||||
} |
||||
|
||||
Gtk.Label filename_label { |
||||
hexpand: true; |
||||
xalign: 0.0; |
||||
ellipsize: end; |
||||
} |
||||
|
||||
Gtk.Label position_label_narrow { |
||||
visible: false; |
||||
halign: end; |
||||
label: bind position_label.label; |
||||
|
||||
styles [ |
||||
"caption", |
||||
] |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
Gtk.SizeGroup { |
||||
widgets [ |
||||
position_label, |
||||
play_button_bin, |
||||
] |
||||
} |
||||
@ -0,0 +1,609 @@
|
||||
use std::time::Duration; |
||||
|
||||
use adw::{prelude::*, subclass::prelude::*}; |
||||
use gettextrs::gettext; |
||||
use gtk::{gio, glib, glib::clone}; |
||||
use tracing::warn; |
||||
|
||||
mod waveform; |
||||
mod waveform_paintable; |
||||
|
||||
use self::waveform::Waveform; |
||||
use crate::{ |
||||
session::model::Session, |
||||
spawn, |
||||
utils::{ |
||||
File, LoadingState, |
||||
matrix::{AudioMessageExt, MediaMessage, MessageCacheKey}, |
||||
media::{ |
||||
self, MediaFileError, |
||||
audio::{generate_waveform, load_audio_info}, |
||||
}, |
||||
}, |
||||
}; |
||||
|
||||
mod imp { |
||||
use std::cell::{Cell, RefCell}; |
||||
|
||||
use glib::subclass::InitializingObject; |
||||
|
||||
use super::*; |
||||
|
||||
#[derive(Debug, Default, gtk::CompositeTemplate, glib::Properties)] |
||||
#[template(resource = "/org/gnome/Fractal/ui/components/media/audio_player/mod.ui")] |
||||
#[properties(wrapper_type = super::AudioPlayer)] |
||||
pub struct AudioPlayer { |
||||
#[template_child] |
||||
position_label: TemplateChild<gtk::Label>, |
||||
#[template_child] |
||||
waveform: TemplateChild<Waveform>, |
||||
#[template_child] |
||||
spinner: TemplateChild<adw::Spinner>, |
||||
#[template_child] |
||||
error_img: TemplateChild<gtk::Image>, |
||||
#[template_child] |
||||
remaining_label: TemplateChild<gtk::Label>, |
||||
#[template_child] |
||||
bottom_box: TemplateChild<gtk::Box>, |
||||
#[template_child] |
||||
play_button: TemplateChild<gtk::Button>, |
||||
#[template_child] |
||||
filename_label: TemplateChild<gtk::Label>, |
||||
#[template_child] |
||||
position_label_narrow: TemplateChild<gtk::Label>, |
||||
/// The source to play.
|
||||
source: RefCell<Option<AudioPlayerSource>>, |
||||
/// The API used to play the audio file.
|
||||
#[property(get)] |
||||
media_file: gtk::MediaFile, |
||||
/// The audio file that is currently loaded.
|
||||
///
|
||||
/// This is used to keep a strong reference to the temporary file.
|
||||
file: RefCell<Option<File>>, |
||||
/// Whether the audio player is the main widget of the current view.
|
||||
///
|
||||
/// This hides the filename and centers the play button.
|
||||
#[property(get, set = Self::set_standalone, explicit_notify)] |
||||
standalone: Cell<bool>, |
||||
/// Whether we are in narrow mode.
|
||||
narrow: Cell<bool>, |
||||
/// The state of the audio file.
|
||||
#[property(get, builder(LoadingState::default()))] |
||||
state: Cell<LoadingState>, |
||||
/// The duration of the audio stream, in microseconds.
|
||||
duration: Cell<Duration>, |
||||
} |
||||
|
||||
#[glib::object_subclass] |
||||
impl ObjectSubclass for AudioPlayer { |
||||
const NAME: &'static str = "AudioPlayer"; |
||||
type Type = super::AudioPlayer; |
||||
type ParentType = adw::BreakpointBin; |
||||
|
||||
fn class_init(klass: &mut Self::Class) { |
||||
Self::bind_template(klass); |
||||
Self::bind_template_callbacks(klass); |
||||
|
||||
klass.set_css_name("audio-player"); |
||||
} |
||||
|
||||
fn instance_init(obj: &InitializingObject<Self>) { |
||||
obj.init_template(); |
||||
} |
||||
} |
||||
|
||||
#[glib::derived_properties] |
||||
impl ObjectImpl for AudioPlayer { |
||||
fn constructed(&self) { |
||||
self.parent_constructed(); |
||||
|
||||
let breakpoint = adw::Breakpoint::new(adw::BreakpointCondition::new_length( |
||||
adw::BreakpointConditionLengthType::MaxWidth, |
||||
360.0, |
||||
adw::LengthUnit::Px, |
||||
)); |
||||
breakpoint.connect_apply(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
move |_| { |
||||
imp.set_narrow(true); |
||||
} |
||||
)); |
||||
breakpoint.connect_unapply(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
move |_| { |
||||
imp.set_narrow(false); |
||||
} |
||||
)); |
||||
self.obj().add_breakpoint(breakpoint); |
||||
|
||||
self.media_file.connect_duration_notify(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
move |media_file| { |
||||
if !imp.use_media_file_data() { |
||||
return; |
||||
} |
||||
|
||||
let duration = Duration::from_micros(media_file.duration().cast_unsigned()); |
||||
imp.set_duration(duration); |
||||
} |
||||
)); |
||||
|
||||
self.media_file.connect_timestamp_notify(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
move |media_file| { |
||||
if !imp.use_media_file_data() { |
||||
return; |
||||
} |
||||
|
||||
let mut duration = media_file.duration(); |
||||
let timestamp = media_file.timestamp(); |
||||
|
||||
// The duration should always be bigger than the timestamp, but let's be safe.
|
||||
if duration != 0 && timestamp > duration { |
||||
duration = timestamp; |
||||
} |
||||
|
||||
let position = if duration == 0 { |
||||
0.0 |
||||
} else { |
||||
(timestamp as f64 / duration as f64) as f32 |
||||
}; |
||||
|
||||
imp.waveform.set_position(position); |
||||
} |
||||
)); |
||||
|
||||
self.media_file.connect_playing_notify(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
move |_| { |
||||
imp.update_play_button(); |
||||
} |
||||
)); |
||||
|
||||
self.media_file.connect_prepared_notify(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
move |media_file| { |
||||
if media_file.is_prepared() { |
||||
// The media file should only become prepared after the user clicked play,
|
||||
// so start playing it.
|
||||
media_file.set_playing(true); |
||||
|
||||
// If the user selected a position while we didn't have a media file, seek
|
||||
// to it.
|
||||
let position = imp.waveform.position(); |
||||
if position > 0.0 { |
||||
media_file |
||||
.seek((media_file.duration() as f64 * f64::from(position)) as i64); |
||||
} |
||||
} |
||||
} |
||||
)); |
||||
|
||||
self.media_file.connect_error_notify(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
move |media_file| { |
||||
if let Some(error) = media_file.error() { |
||||
warn!("Could not read audio file: {error}"); |
||||
imp.set_error(&gettext("Error reading audio file")); |
||||
} |
||||
} |
||||
)); |
||||
|
||||
self.waveform.connect_position_notify(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
move |_| { |
||||
imp.update_position_labels(); |
||||
} |
||||
)); |
||||
|
||||
self.update_play_button(); |
||||
} |
||||
|
||||
fn dispose(&self) { |
||||
self.media_file.clear(); |
||||
} |
||||
} |
||||
|
||||
impl WidgetImpl for AudioPlayer {} |
||||
impl BreakpointBinImpl for AudioPlayer {} |
||||
|
||||
#[gtk::template_callbacks] |
||||
impl AudioPlayer { |
||||
/// Set the source to play.
|
||||
pub(super) fn set_source(&self, source: Option<AudioPlayerSource>) { |
||||
let should_reload = source.as_ref().is_none_or(|source| { |
||||
self.source |
||||
.borrow() |
||||
.as_ref() |
||||
.is_none_or(|old_source| old_source.should_reload(source)) |
||||
}); |
||||
|
||||
if should_reload { |
||||
self.set_state(LoadingState::Initial); |
||||
self.media_file.clear(); |
||||
self.file.take(); |
||||
} |
||||
|
||||
self.source.replace(source); |
||||
|
||||
if should_reload { |
||||
spawn!(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
async move { |
||||
imp.load_source_duration().await; |
||||
} |
||||
)); |
||||
spawn!(clone!( |
||||
#[weak(rename_to = imp)] |
||||
self, |
||||
async move { |
||||
imp.load_source_waveform().await; |
||||
} |
||||
)); |
||||
|
||||
self.update_source_filename(); |
||||
} |
||||
|
||||
self.update_play_button(); |
||||
} |
||||
|
||||
/// Set whether the audio player is the main widget of the current view.
|
||||
fn set_standalone(&self, standalone: bool) { |
||||
if self.standalone.get() == standalone { |
||||
return; |
||||
} |
||||
|
||||
self.standalone.set(standalone); |
||||
self.update_layout(); |
||||
self.obj().notify_standalone(); |
||||
} |
||||
|
||||
/// Set whether we are in narrow mode.
|
||||
fn set_narrow(&self, narrow: bool) { |
||||
if self.narrow.get() == narrow { |
||||
return; |
||||
} |
||||
|
||||
self.narrow.set(narrow); |
||||
self.update_layout(); |
||||
} |
||||
|
||||
/// Update the layout for the current state.
|
||||
fn update_layout(&self) { |
||||
let standalone = self.standalone.get(); |
||||
let narrow = self.narrow.get(); |
||||
|
||||
self.position_label.set_visible(!narrow); |
||||
self.remaining_label.set_visible(!narrow); |
||||
self.filename_label.set_visible(!standalone); |
||||
self.position_label_narrow |
||||
.set_visible(narrow && !standalone); |
||||
|
||||
self.bottom_box.set_halign(if standalone { |
||||
gtk::Align::Center |
||||
} else { |
||||
gtk::Align::Fill |
||||
}); |
||||
} |
||||
|
||||
/// Set the state of the audio stream.
|
||||
fn set_state(&self, state: LoadingState) { |
||||
if self.state.get() == state { |
||||
return; |
||||
} |
||||
|
||||
self.waveform |
||||
.set_sensitive(matches!(state, LoadingState::Initial | LoadingState::Ready)); |
||||
self.spinner |
||||
.set_visible(matches!(state, LoadingState::Loading)); |
||||
self.error_img |
||||
.set_visible(matches!(state, LoadingState::Error)); |
||||
|
||||
self.state.set(state); |
||||
self.obj().notify_state(); |
||||
} |
||||
|
||||
/// Convenience method to set the state to `Error` with the given error
|
||||
/// message.
|
||||
fn set_error(&self, error: &str) { |
||||
self.set_state(LoadingState::Error); |
||||
self.error_img.set_tooltip_text(Some(error)); |
||||
} |
||||
|
||||
/// Whether we should use the source data rather than the `GtkMediaFile`
|
||||
/// data.
|
||||
///
|
||||
/// We cannot use the `GtkMediaFile` data if it doesn't have a `GFile`
|
||||
/// set.
|
||||
fn use_media_file_data(&self) -> bool { |
||||
self.state.get() != LoadingState::Initial |
||||
} |
||||
|
||||
/// Set the duration of the audio stream.
|
||||
fn set_duration(&self, duration: Duration) { |
||||
if self.duration.get() == duration { |
||||
return; |
||||
} |
||||
|
||||
self.duration.set(duration); |
||||
self.update_duration_labels_width(); |
||||
self.update_position_labels(); |
||||
} |
||||
|
||||
/// Update the width of labels presenting a duration.
|
||||
fn update_duration_labels_width(&self) { |
||||
let has_hours = self.duration.get().as_secs() > 60 * 60; |
||||
let time_width = if has_hours { 8 } else { 5 }; |
||||
|
||||
self.position_label.set_width_chars(time_width); |
||||
self.remaining_label.set_width_chars(time_width + 1); |
||||
} |
||||
|
||||
/// Load the duration of the current source.
|
||||
async fn load_source_duration(&self) { |
||||
let Some(source) = self.source.borrow().clone() else { |
||||
self.set_duration(Duration::default()); |
||||
return; |
||||
}; |
||||
|
||||
let duration = source.duration().await; |
||||
self.set_duration(duration.unwrap_or_default()); |
||||
} |
||||
|
||||
/// Load the waveform of the current source.
|
||||
async fn load_source_waveform(&self) { |
||||
let Some(source) = self.source.borrow().clone() else { |
||||
self.waveform.set_waveform(vec![]); |
||||
return; |
||||
}; |
||||
|
||||
let waveform = source.waveform().await; |
||||
self.waveform.set_waveform(waveform.unwrap_or_default()); |
||||
} |
||||
|
||||
/// Update the name of the source.
|
||||
fn update_source_filename(&self) { |
||||
let filename = self |
||||
.source |
||||
.borrow() |
||||
.as_ref() |
||||
.map(AudioPlayerSource::filename) |
||||
.unwrap_or_default(); |
||||
|
||||
self.filename_label.set_label(&filename); |
||||
} |
||||
|
||||
/// Update the labels displaying the position in the audio stream.
|
||||
fn update_position_labels(&self) { |
||||
let duration = self.duration.get(); |
||||
let position = self.waveform.position(); |
||||
|
||||
let position = duration.mul_f32(position); |
||||
let remaining = duration.saturating_sub(position); |
||||
|
||||
self.position_label |
||||
.set_label(&media::time_to_label(&position)); |
||||
self.remaining_label |
||||
.set_label(&format!("-{}", media::time_to_label(&remaining))); |
||||
} |
||||
|
||||
/// Update the play button.
|
||||
fn update_play_button(&self) { |
||||
let is_playing = self.media_file.is_playing(); |
||||
|
||||
let (icon_name, tooltip) = if is_playing { |
||||
("pause-symbolic", gettext("Pause")) |
||||
} else { |
||||
("play-symbolic", gettext("Play")) |
||||
}; |
||||
|
||||
self.play_button.set_icon_name(icon_name); |
||||
self.play_button.set_tooltip_text(Some(&tooltip)); |
||||
|
||||
if is_playing { |
||||
self.set_state(LoadingState::Ready); |
||||
} |
||||
} |
||||
|
||||
/// Set the media file to play.
|
||||
async fn set_file(&self, file: File) { |
||||
let gfile = file.as_gfile(); |
||||
self.media_file.set_file(Some(&gfile)); |
||||
self.file.replace(Some(file)); |
||||
|
||||
// Reload the waveform if we got it from a message, because we cannot trust the
|
||||
// sender.
|
||||
if self |
||||
.source |
||||
.borrow() |
||||
.as_ref() |
||||
.is_some_and(|source| matches!(source, AudioPlayerSource::Message(_))) |
||||
&& let Some(waveform) = generate_waveform(&gfile, None).await |
||||
{ |
||||
self.waveform.set_waveform(waveform); |
||||
} |
||||
} |
||||
|
||||
/// Play or pause the media.
|
||||
#[template_callback] |
||||
async fn toggle_playing(&self) { |
||||
if self.use_media_file_data() { |
||||
self.media_file.set_playing(!self.media_file.is_playing()); |
||||
return; |
||||
} |
||||
|
||||
let Some(source) = self.source.borrow().clone() else { |
||||
return; |
||||
}; |
||||
|
||||
self.set_state(LoadingState::Loading); |
||||
|
||||
match source.to_file().await { |
||||
Ok(file) => { |
||||
self.set_file(file).await; |
||||
} |
||||
Err(error) => { |
||||
warn!("Could not retrieve audio file: {error}"); |
||||
self.set_error(&gettext("Could not retrieve audio file")); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// Seek to the given relative position.
|
||||
///
|
||||
/// The position must be a value between 0 and 1.
|
||||
#[template_callback] |
||||
fn seek(&self, new_position: f32) { |
||||
if self.use_media_file_data() { |
||||
let duration = self.duration.get(); |
||||
|
||||
if !duration.is_zero() { |
||||
let timestamp = duration.as_micros() as f64 * f64::from(new_position); |
||||
self.media_file.seek(timestamp as i64); |
||||
} |
||||
} else { |
||||
self.waveform.set_position(new_position); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
glib::wrapper! { |
||||
/// A widget displaying a video media file.
|
||||
pub struct AudioPlayer(ObjectSubclass<imp::AudioPlayer>) |
||||
@extends gtk::Widget, adw::BreakpointBin, |
||||
@implements gtk::Accessible, gtk::Buildable, gtk::ConstraintTarget; |
||||
} |
||||
|
||||
impl AudioPlayer { |
||||
/// Create a new audio player.
|
||||
pub fn new() -> Self { |
||||
glib::Object::new() |
||||
} |
||||
|
||||
/// Set the source to play.
|
||||
pub(crate) fn set_source(&self, source: Option<AudioPlayerSource>) { |
||||
self.imp().set_source(source); |
||||
} |
||||
} |
||||
|
||||
/// The possible sources accepted by the audio player.
|
||||
#[derive(Debug, Clone)] |
||||
pub(crate) enum AudioPlayerSource { |
||||
/// An audio file.
|
||||
File(gio::File), |
||||
/// An audio message.
|
||||
Message(AudioPlayerMessage), |
||||
} |
||||
|
||||
impl AudioPlayerSource { |
||||
/// Get the filename of the source.
|
||||
fn filename(&self) -> String { |
||||
match self { |
||||
Self::File(file) => file |
||||
.path() |
||||
.and_then(|path| path.file_name().map(|s| s.to_string_lossy().into_owned())) |
||||
.unwrap_or_default(), |
||||
Self::Message(message) => message.message.filename(), |
||||
} |
||||
} |
||||
|
||||
/// Whether the source should be reloaded because it has changed.
|
||||
fn should_reload(&self, new_source: &Self) -> bool { |
||||
match (self, new_source) { |
||||
(Self::File(file), Self::File(new_file)) => file != new_file, |
||||
(Self::Message(message), Self::Message(new_message)) => { |
||||
message.cache_key.should_reload(&new_message.cache_key) |
||||
} |
||||
_ => true, |
||||
} |
||||
} |
||||
|
||||
/// Get the duration of this source, if any.
|
||||
async fn duration(&self) -> Option<Duration> { |
||||
match self { |
||||
Self::File(file) => load_audio_info(file).await.duration, |
||||
Self::Message(message) => { |
||||
if let MediaMessage::Audio(content) = &message.message { |
||||
content.info.as_deref().and_then(|info| info.duration) |
||||
} else { |
||||
None |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// Get the waveform representation of this source, if any.
|
||||
async fn waveform(&self) -> Option<Vec<f32>> { |
||||
match self { |
||||
Self::File(file) => generate_waveform(file, None).await, |
||||
Self::Message(message) => { |
||||
if let MediaMessage::Audio(content) = &message.message { |
||||
content.normalized_waveform() |
||||
} else { |
||||
None |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// Get a file to play this source.
|
||||
async fn to_file(&self) -> Result<File, MediaFileError> { |
||||
match self { |
||||
Self::File(file) => Ok(file.clone().into()), |
||||
Self::Message(message) => { |
||||
let Some(session) = message.session.upgrade() else { |
||||
return Err(MediaFileError::NoSession); |
||||
}; |
||||
|
||||
message |
||||
.message |
||||
.clone() |
||||
.into_tmp_file(&session.client()) |
||||
.await |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/// The data required to play an audio message.
|
||||
#[derive(Debug, Clone)] |
||||
pub(crate) struct AudioPlayerMessage { |
||||
/// The audio message.
|
||||
pub(crate) message: MediaMessage, |
||||
/// The session that will be used to load the file.
|
||||
pub(crate) session: glib::WeakRef<Session>, |
||||
/// The cache key for the audio message.
|
||||
///
|
||||
/// The audio is only reloaded if the cache key changes. This is to
|
||||
/// avoid reloading the audio when the local echo is updated to a remote
|
||||
/// echo.
|
||||
pub(crate) cache_key: MessageCacheKey, |
||||
} |
||||
|
||||
impl AudioPlayerMessage { |
||||
/// Construct a new `AudioPlayerMessage`.
|
||||
pub(crate) fn new( |
||||
message: MediaMessage, |
||||
session: &Session, |
||||
cache_key: MessageCacheKey, |
||||
) -> Self { |
||||
let session_weak = glib::WeakRef::new(); |
||||
session_weak.set(Some(session)); |
||||
|
||||
Self { |
||||
message, |
||||
session: session_weak, |
||||
cache_key, |
||||
} |
||||
} |
||||
} |
||||
@ -0,0 +1,453 @@
|
||||
use adw::prelude::*; |
||||
use gtk::{ |
||||
gdk, glib, |
||||
glib::{clone, closure_local}, |
||||
graphene, gsk, |
||||
subclass::prelude::*, |
||||
}; |
||||
use tracing::error; |
||||
|
||||
use super::waveform_paintable::WaveformPaintable; |
||||
|
||||
/// The height of the waveform.
|
||||
pub(super) const WAVEFORM_HEIGHT: f32 = 60.0; |
||||
/// The height of the waveform, as an integer.
|
||||
pub(super) const WAVEFORM_HEIGHT_I32: i32 = 60; |
||||
/// The duration of the animation, in milliseconds.
|
||||
const ANIMATION_DURATION: u32 = 250; |
||||
/// The error margin when comparing two `f32`s.
|
||||
const F32_ERROR_MARGIN: f32 = 0.0001; |
||||
|
||||
mod imp { |
||||
use std::{ |
||||
cell::{Cell, OnceCell, RefCell}, |
||||
sync::LazyLock, |
||||
}; |
||||
|
||||
use glib::subclass::Signal; |
||||
|
||||
use super::*; |
||||
|
||||
#[derive(Debug, Default, glib::Properties)] |
||||
#[properties(wrapper_type = super::Waveform)] |
||||
pub struct Waveform { |
||||
/// The paintable that draws the waveform.
|
||||
#[property(get)] |
||||
paintable: WaveformPaintable, |
||||
/// The current position in the audio stream.
|
||||
///
|
||||
/// Must be a value between 0 and 1.
|
||||
#[property(get, set = Self::set_position, explicit_notify, minimum = 0.0, maximum = 1.0)] |
||||
position: Cell<f32>, |
||||
/// The animation for the transition between waveforms.
|
||||
animation: OnceCell<adw::TimedAnimation>, |
||||
/// The current hover position, if any.
|
||||
hover_position: Cell<Option<f32>>, |
||||
/// The cached paintable.
|
||||
///
|
||||
/// We only need to redraw it when the waveform changes of the widget is
|
||||
/// resized.
|
||||
paintable_cache: RefCell<Option<gdk::Paintable>>, |
||||
} |
||||
|
||||
#[glib::object_subclass] |
||||
impl ObjectSubclass for Waveform { |
||||
const NAME: &'static str = "Waveform"; |
||||
type Type = super::Waveform; |
||||
type ParentType = gtk::Widget; |
||||
|
||||
fn class_init(klass: &mut Self::Class) { |
||||
klass.set_css_name("waveform"); |
||||
klass.set_accessible_role(gtk::AccessibleRole::Slider); |
||||
} |
||||
} |
||||
|
||||
#[glib::derived_properties] |
||||
impl ObjectImpl for Waveform { |
||||
fn signals() -> &'static [Signal] { |
||||
static SIGNALS: LazyLock<Vec<Signal>> = LazyLock::new(|| { |
||||
vec![ |
||||
Signal::builder("seek") |
||||
.param_types([f32::static_type()]) |
||||
.build(), |
||||
] |
||||
}); |
||||
SIGNALS.as_ref() |
||||
} |
||||
|
||||
fn constructed(&self) { |
||||
self.parent_constructed(); |
||||
|
||||
self.init_event_controllers(); |
||||
|
||||
let obj = self.obj(); |
||||
obj.set_focusable(true); |
||||
obj.update_property(&[ |
||||
gtk::accessible::Property::ValueMin(0.0), |
||||
gtk::accessible::Property::ValueMax(1.0), |
||||
gtk::accessible::Property::ValueNow(0.0), |
||||
]); |
||||
|
||||
self.paintable.connect_invalidate_contents(clone!( |
||||
#[weak] |
||||
obj, |
||||
move |_| { |
||||
obj.queue_draw(); |
||||
} |
||||
)); |
||||
} |
||||
} |
||||
|
||||
impl WidgetImpl for Waveform { |
||||
fn request_mode(&self) -> gtk::SizeRequestMode { |
||||
gtk::SizeRequestMode::HeightForWidth |
||||
} |
||||
|
||||
fn measure(&self, orientation: gtk::Orientation, _for_size: i32) -> (i32, i32, i32, i32) { |
||||
if orientation == gtk::Orientation::Vertical { |
||||
// The height is fixed.
|
||||
(WAVEFORM_HEIGHT_I32, WAVEFORM_HEIGHT_I32, -1, -1) |
||||
} else { |
||||
// We accept any width, the optimal width is the default width of the paintable.
|
||||
(0, self.paintable.intrinsic_width(), -1, -1) |
||||
} |
||||
} |
||||
|
||||
fn size_allocate(&self, width: i32, _height: i32, _baseline: i32) { |
||||
if self |
||||
.paintable_cache |
||||
.borrow() |
||||
.as_ref() |
||||
.is_some_and(|paintable| width != paintable.intrinsic_width()) |
||||
{ |
||||
// We need to adjust the waveform to the new width.
|
||||
self.paintable_cache.take(); |
||||
self.obj().queue_draw(); |
||||
} |
||||
} |
||||
|
||||
fn snapshot(&self, snapshot: >k::Snapshot) { |
||||
let obj = self.obj(); |
||||
let width = obj.width(); |
||||
|
||||
if width <= 0 { |
||||
return; |
||||
} |
||||
|
||||
let Some(paintable) = self.paintable() else { |
||||
return; |
||||
}; |
||||
|
||||
let width = width as f32; |
||||
let is_rtl = obj.direction() == gtk::TextDirection::Rtl; |
||||
|
||||
// Use the waveform as a mask that we will apply to the colored rectangles
|
||||
// below.
|
||||
snapshot.push_mask(gsk::MaskMode::Alpha); |
||||
snapshot.save(); |
||||
|
||||
// Invert the paintable horizontally if we are in right-to-left direction.
|
||||
if is_rtl { |
||||
snapshot.translate(&graphene::Point::new(width, 0.0)); |
||||
snapshot.scale(-1.0, 1.0); |
||||
} |
||||
|
||||
paintable.snapshot(snapshot, width.into(), WAVEFORM_HEIGHT.into()); |
||||
|
||||
snapshot.restore(); |
||||
snapshot.pop(); |
||||
|
||||
// Paint three colored rectangles to mark the two positions:
|
||||
//
|
||||
// ----------------------------
|
||||
// | played | hover | remaining |
|
||||
// ----------------------------
|
||||
//
|
||||
// The "played" part stops at the first of the `position` or the
|
||||
// `hover_position` and the "hover" part stops at the last of the
|
||||
// `position` or the `hover_position`.
|
||||
//
|
||||
// The order is inverted in right-to-left direction, and any rectangle that is
|
||||
// not visible (i.e. has a width of 0) is not drawn.
|
||||
let (start, end) = if is_rtl { (width, 0.0) } else { (0.0, width) }; |
||||
let mut position = self.position.get() * width; |
||||
if is_rtl { |
||||
position = width - position; |
||||
} |
||||
let hover_position = self.hover_position.get(); |
||||
|
||||
let (played_end, hover_end) = if let Some(hover_position) = hover_position { |
||||
if (!is_rtl && hover_position > position) || (is_rtl && hover_position < position) { |
||||
(position, hover_position) |
||||
} else { |
||||
(hover_position, position) |
||||
} |
||||
} else { |
||||
(position, position) |
||||
}; |
||||
|
||||
let color = obj.color(); |
||||
let is_high_contrast = adw::StyleManager::default().is_high_contrast(); |
||||
|
||||
if (played_end - start).abs() > F32_ERROR_MARGIN { |
||||
let rect = graphene::Rect::new(start, 0.0, played_end - start, WAVEFORM_HEIGHT); |
||||
snapshot.append_color(&color, &rect); |
||||
} |
||||
|
||||
if (hover_end - played_end).abs() > F32_ERROR_MARGIN { |
||||
let color = color.with_alpha(if is_high_contrast { 0.7 } else { 0.45 }); |
||||
|
||||
let rect = |
||||
graphene::Rect::new(played_end, 0.0, hover_end - played_end, WAVEFORM_HEIGHT); |
||||
snapshot.append_color(&color, &rect); |
||||
} |
||||
|
||||
if (hover_end - end).abs() > F32_ERROR_MARGIN { |
||||
let color = color.with_alpha(if is_high_contrast { 0.4 } else { 0.2 }); |
||||
|
||||
let rect = graphene::Rect::new(hover_end, 0.0, end - hover_end, WAVEFORM_HEIGHT); |
||||
snapshot.append_color(&color, &rect); |
||||
} |
||||
|
||||
snapshot.pop(); |
||||
} |
||||
} |
||||
|
||||
impl Waveform { |
||||
/// Set the waveform to display.
|
||||
///
|
||||
/// The values must be normalized between 0 and 1.
|
||||
pub(super) fn set_waveform(&self, waveform: Vec<f32>) { |
||||
let animate_transition = self.paintable.set_waveform(waveform); |
||||
self.paintable_cache.take(); |
||||
|
||||
if animate_transition { |
||||
self.animation().play(); |
||||
} |
||||
} |
||||
|
||||
/// Set the current position in the audio stream.
|
||||
pub(super) fn set_position(&self, position: f32) { |
||||
if (self.position.get() - position).abs() > F32_ERROR_MARGIN { |
||||
return; |
||||
} |
||||
|
||||
self.position.set(position); |
||||
|
||||
let obj = self.obj(); |
||||
obj.update_property(&[gtk::accessible::Property::ValueNow(position.into())]); |
||||
obj.notify_position(); |
||||
obj.queue_draw(); |
||||
} |
||||
|
||||
/// The animation for the waveform change.
|
||||
fn animation(&self) -> &adw::TimedAnimation { |
||||
self.animation.get_or_init(|| { |
||||
adw::TimedAnimation::builder() |
||||
.widget(&*self.obj()) |
||||
.value_to(1.0) |
||||
.duration(ANIMATION_DURATION) |
||||
.target(&adw::PropertyAnimationTarget::new( |
||||
&self.paintable, |
||||
"transition-progress", |
||||
)) |
||||
.easing(adw::Easing::EaseInOutQuad) |
||||
.build() |
||||
}) |
||||
} |
||||
|
||||
// Get the waveform shape as a monochrome paintable.
|
||||
//
|
||||
// If we are not in a transition phase, we cache it because the shape only
|
||||
// changes if the widget is resized.
|
||||
fn paintable(&self) -> Option<gdk::Paintable> { |
||||
let transition_is_ongoing = self |
||||
.animation |
||||
.get() |
||||
.is_some_and(|animation| animation.state() == adw::AnimationState::Playing); |
||||
|
||||
if !transition_is_ongoing && let Some(paintable) = self.paintable_cache.borrow().clone() |
||||
{ |
||||
return Some(paintable); |
||||
} |
||||
|
||||
let width = self.obj().width() as f32; |
||||
let cache_snapshot = gtk::Snapshot::new(); |
||||
|
||||
self.paintable |
||||
.snapshot(&cache_snapshot, width.into(), WAVEFORM_HEIGHT.into()); |
||||
let Some(paintable) = |
||||
cache_snapshot.to_paintable(Some(&graphene::Size::new(width, WAVEFORM_HEIGHT))) |
||||
else { |
||||
error!("Could not convert snapshot to paintable"); |
||||
return None; |
||||
}; |
||||
|
||||
if !transition_is_ongoing { |
||||
self.paintable_cache.replace(Some(paintable.clone())); |
||||
} |
||||
|
||||
Some(paintable) |
||||
} |
||||
|
||||
/// Convert the given x coordinate on the waveform to a relative
|
||||
/// position.
|
||||
///
|
||||
/// Takes into account the text direction.
|
||||
///
|
||||
/// Returns a value between 0 and 1.
|
||||
fn x_coord_to_position(&self, x: f64) -> f32 { |
||||
let obj = self.obj(); |
||||
|
||||
let mut position = (x / f64::from(obj.width())) as f32; |
||||
|
||||
if obj.direction() == gtk::TextDirection::Rtl { |
||||
position = 1.0 - position; |
||||
} |
||||
|
||||
position |
||||
} |
||||
|
||||
/// Emit the `seek` signal with the given new position.
|
||||
fn emit_seek(&self, new_position: f32) { |
||||
self.obj().emit_by_name::<()>("seek", &[&new_position]); |
||||
} |
||||
|
||||
/// Initialize the event controllers on the waveform.
|
||||
fn init_event_controllers(&self) { |
||||
let obj = self.obj(); |
||||
|
||||
// Show mouse hover effect.
|
||||
let motion = gtk::EventControllerMotion::builder() |
||||
.name("waveform-motion") |
||||
.build(); |
||||
motion.connect_motion(clone!( |
||||
#[weak] |
||||
obj, |
||||
move |_, x, _| { |
||||
obj.imp().hover_position.set(Some(x as f32)); |
||||
obj.queue_draw(); |
||||
} |
||||
)); |
||||
motion.connect_leave(clone!( |
||||
#[weak] |
||||
obj, |
||||
move |_| { |
||||
obj.imp().hover_position.take(); |
||||
obj.queue_draw(); |
||||
} |
||||
)); |
||||
obj.add_controller(motion); |
||||
|
||||
// Handle dragging to seek. This also handles clicks because a click triggers a
|
||||
// drag begin.
|
||||
let drag = gtk::GestureDrag::builder() |
||||
.name("waveform-drag") |
||||
.button(0) |
||||
.build(); |
||||
drag.connect_drag_begin(clone!( |
||||
#[weak] |
||||
obj, |
||||
move |gesture, x, _| { |
||||
gesture.set_state(gtk::EventSequenceState::Claimed); |
||||
|
||||
if !obj.has_focus() { |
||||
obj.grab_focus(); |
||||
} |
||||
|
||||
let imp = obj.imp(); |
||||
imp.emit_seek(imp.x_coord_to_position(x)); |
||||
} |
||||
)); |
||||
drag.connect_drag_update(clone!( |
||||
#[weak] |
||||
obj, |
||||
move |gesture, offset_x, _| { |
||||
gesture.set_state(gtk::EventSequenceState::Claimed); |
||||
|
||||
if !obj.has_focus() { |
||||
obj.grab_focus(); |
||||
} |
||||
|
||||
let x = gesture |
||||
.start_point() |
||||
.expect("ongoing drag should have start point") |
||||
.0 |
||||
+ offset_x; |
||||
|
||||
let imp = obj.imp(); |
||||
imp.emit_seek(imp.x_coord_to_position(x)); |
||||
} |
||||
)); |
||||
obj.add_controller(drag); |
||||
|
||||
// Handle left and right key presses to seek.
|
||||
let key = gtk::EventControllerKey::builder() |
||||
.name("waveform-key") |
||||
.build(); |
||||
key.connect_key_released(clone!( |
||||
#[weak] |
||||
obj, |
||||
move |_, keyval, _, _| { |
||||
let mut delta = match keyval { |
||||
gdk::Key::Left | gdk::Key::KP_Left => -0.05, |
||||
gdk::Key::Right | gdk::Key::KP_Right => 0.05, |
||||
_ => return, |
||||
}; |
||||
|
||||
if obj.direction() == gtk::TextDirection::Rtl { |
||||
delta = -delta; |
||||
} |
||||
|
||||
let imp = obj.imp(); |
||||
let new_position = imp.position.get() + delta; |
||||
|
||||
if (0.0..=1.0).contains(&new_position) { |
||||
imp.emit_seek(new_position); |
||||
} |
||||
} |
||||
)); |
||||
obj.add_controller(key); |
||||
} |
||||
} |
||||
} |
||||
|
||||
glib::wrapper! { |
||||
/// A widget displaying a waveform.
|
||||
///
|
||||
/// This widget supports seeking with the keyboard and mouse.
|
||||
pub struct Waveform(ObjectSubclass<imp::Waveform>) |
||||
@extends gtk::Widget, |
||||
@implements gtk::Accessible, gtk::Buildable, gtk::ConstraintTarget; |
||||
} |
||||
|
||||
impl Waveform { |
||||
/// Create a new empty `Waveform`.
|
||||
pub fn new() -> Self { |
||||
glib::Object::new() |
||||
} |
||||
|
||||
/// Set the waveform to display.
|
||||
///
|
||||
/// The values must be normalized between 0 and 1.
|
||||
pub(crate) fn set_waveform(&self, waveform: Vec<f32>) { |
||||
self.imp().set_waveform(waveform); |
||||
} |
||||
|
||||
/// Connect to the signal emitted when the user seeks another position.
|
||||
pub fn connect_seek<F: Fn(&Self, f32) + 'static>(&self, f: F) -> glib::SignalHandlerId { |
||||
self.connect_closure( |
||||
"seek", |
||||
true, |
||||
closure_local!(move |obj: Self, position: f32| { |
||||
f(&obj, position); |
||||
}), |
||||
) |
||||
} |
||||
} |
||||
|
||||
impl Default for Waveform { |
||||
fn default() -> Self { |
||||
Self::new() |
||||
} |
||||
} |
||||
@ -0,0 +1,195 @@
|
||||
use std::borrow::Cow; |
||||
|
||||
use gtk::{gdk, glib, graphene, prelude::*, subclass::prelude::*}; |
||||
|
||||
use super::waveform::{WAVEFORM_HEIGHT, WAVEFORM_HEIGHT_I32}; |
||||
use crate::utils::resample_slice; |
||||
|
||||
/// The width of the bars in the waveform.
|
||||
const BAR_WIDTH: f32 = 2.0; |
||||
/// The horizontal padding around bars in the waveform.
|
||||
const BAR_HORIZONTAL_PADDING: f32 = 1.0; |
||||
/// The full width of a bar, including its padding.
|
||||
const BAR_FULL_WIDTH: f32 = BAR_WIDTH + 2.0 * BAR_HORIZONTAL_PADDING; |
||||
/// The minimum height of the bars in the waveform.
|
||||
///
|
||||
/// We do not want to have holes in the waveform so we restrict the minimum
|
||||
/// height.
|
||||
const BAR_MIN_HEIGHT: f32 = 2.0; |
||||
/// The waveform used as fallback.
|
||||
///
|
||||
/// It will generate a full waveform.
|
||||
const WAVEFORM_FALLBACK: &[f32] = &[1.0]; |
||||
|
||||
mod imp { |
||||
use std::cell::{Cell, RefCell}; |
||||
|
||||
use super::*; |
||||
|
||||
#[derive(Debug, glib::Properties)] |
||||
#[properties(wrapper_type = super::WaveformPaintable)] |
||||
pub struct WaveformPaintable { |
||||
/// The waveform to display.
|
||||
///
|
||||
/// The values must be normalized between 0 and 1.
|
||||
waveform: RefCell<Cow<'static, [f32]>>, |
||||
/// The previous waveform that was displayed, if any.
|
||||
///
|
||||
/// Use for the transition between waveforms.
|
||||
previous_waveform: RefCell<Option<Cow<'static, [f32]>>>, |
||||
/// The progress of the transition between waveforms.
|
||||
#[property(get, set = Self::set_transition_progress, explicit_notify)] |
||||
transition_progress: Cell<f64>, |
||||
} |
||||
|
||||
impl Default for WaveformPaintable { |
||||
fn default() -> Self { |
||||
Self { |
||||
waveform: RefCell::new(Cow::Borrowed(WAVEFORM_FALLBACK)), |
||||
previous_waveform: Default::default(), |
||||
transition_progress: Cell::new(1.0), |
||||
} |
||||
} |
||||
} |
||||
|
||||
#[glib::object_subclass] |
||||
impl ObjectSubclass for WaveformPaintable { |
||||
const NAME: &'static str = "WaveformPaintable"; |
||||
type Type = super::WaveformPaintable; |
||||
type Interfaces = (gdk::Paintable,); |
||||
} |
||||
|
||||
#[glib::derived_properties] |
||||
impl ObjectImpl for WaveformPaintable {} |
||||
|
||||
impl PaintableImpl for WaveformPaintable { |
||||
fn intrinsic_width(&self) -> i32 { |
||||
(self.waveform.borrow().len() as f32 * BAR_FULL_WIDTH) as i32 |
||||
} |
||||
|
||||
fn intrinsic_height(&self) -> i32 { |
||||
WAVEFORM_HEIGHT_I32 |
||||
} |
||||
|
||||
fn snapshot(&self, snapshot: &gdk::Snapshot, width: f64, _height: f64) { |
||||
if width <= 0.0 { |
||||
return; |
||||
} |
||||
|
||||
let exact_samples_needed = width as f32 / BAR_FULL_WIDTH; |
||||
|
||||
// If the number of samples has a fractional part, compute a padding to center
|
||||
// the waveform horizontally in the paintable.
|
||||
let waveform_start_padding = (exact_samples_needed.fract() * BAR_FULL_WIDTH).trunc(); |
||||
// We are sure that the number of samples is positive.
|
||||
#[allow(clippy::cast_sign_loss)] |
||||
let samples_needed = exact_samples_needed.trunc() as usize; |
||||
|
||||
let mut waveform = |
||||
resample_slice(self.waveform.borrow().as_ref(), samples_needed).into_owned(); |
||||
|
||||
// If there is a previous waveform, we have an ongoing transition.
|
||||
if let Some(previous_waveform) = self.previous_waveform.borrow().as_ref() |
||||
&& *previous_waveform != waveform |
||||
{ |
||||
let previous_waveform = resample_slice(previous_waveform, samples_needed); |
||||
let progress = self.transition_progress.get() as f32; |
||||
|
||||
// Compute the current waveform for the ongoing transition.
|
||||
waveform = waveform |
||||
.into_iter() |
||||
.zip(previous_waveform.iter()) |
||||
.map(|(current, &previous)| { |
||||
(((current - previous) * progress) + previous).clamp(0.0, 1.0) |
||||
}) |
||||
.collect(); |
||||
} |
||||
|
||||
for (pos, value) in waveform.into_iter().enumerate() { |
||||
if value > 1.0 { |
||||
tracing::error!("Waveform sample value is higher than 1: {value}"); |
||||
} |
||||
|
||||
let x = waveform_start_padding + pos as f32 * (BAR_FULL_WIDTH); |
||||
let height = (WAVEFORM_HEIGHT * value).max(BAR_MIN_HEIGHT); |
||||
// Center the bar vertically.
|
||||
let y = (WAVEFORM_HEIGHT - height) / 2.0; |
||||
|
||||
let rect = graphene::Rect::new(x, y, BAR_WIDTH, height); |
||||
snapshot.append_color(&gdk::RGBA::WHITE, &rect); |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl WaveformPaintable { |
||||
/// Set the values of the bars to display.
|
||||
///
|
||||
/// The values must be normalized between 0 and 1.
|
||||
///
|
||||
/// Returns whether the waveform changed.
|
||||
pub(super) fn set_waveform(&self, waveform: Vec<f32>) -> bool { |
||||
let waveform = if waveform.is_empty() { |
||||
Cow::Borrowed(WAVEFORM_FALLBACK) |
||||
} else { |
||||
Cow::Owned(waveform) |
||||
}; |
||||
|
||||
if *self.waveform.borrow() == waveform { |
||||
return false; |
||||
} |
||||
|
||||
let previous = self.waveform.replace(waveform); |
||||
self.previous_waveform.replace(Some(previous)); |
||||
|
||||
self.obj().invalidate_contents(); |
||||
|
||||
true |
||||
} |
||||
|
||||
/// Set the progress of the transition between waveforms.
|
||||
fn set_transition_progress(&self, progress: f64) { |
||||
if (self.transition_progress.get() - progress).abs() > 0.000_001 { |
||||
return; |
||||
} |
||||
|
||||
self.transition_progress.set(progress); |
||||
|
||||
if (progress - 1.0).abs() > 0.000_001 { |
||||
// This is the end of the transition, we can drop the previous waveform.
|
||||
self.previous_waveform.take(); |
||||
} |
||||
|
||||
let obj = self.obj(); |
||||
obj.notify_transition_progress(); |
||||
obj.invalidate_contents(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
glib::wrapper! { |
||||
/// A paintable displaying a waveform.
|
||||
pub struct WaveformPaintable(ObjectSubclass<imp::WaveformPaintable>) |
||||
@implements gdk::Paintable; |
||||
} |
||||
|
||||
impl WaveformPaintable { |
||||
/// Create a new empty `WaveformPaintable`.
|
||||
pub fn new() -> Self { |
||||
glib::Object::new() |
||||
} |
||||
|
||||
/// Set the waveform to display.
|
||||
///
|
||||
/// The values must be normalized between 0 and 1.
|
||||
///
|
||||
/// Returns whether the waveform changed.
|
||||
pub(crate) fn set_waveform(&self, waveform: Vec<f32>) -> bool { |
||||
self.imp().set_waveform(waveform) |
||||
} |
||||
} |
||||
|
||||
impl Default for WaveformPaintable { |
||||
fn default() -> Self { |
||||
Self::new() |
||||
} |
||||
} |
||||
@ -1,40 +1,26 @@
|
||||
using Gtk 4.0; |
||||
using Adw 1; |
||||
|
||||
template $ContentMessageAudio: Adw.Bin { |
||||
Gtk.Box { |
||||
orientation: vertical; |
||||
|
||||
Gtk.Box { |
||||
margin-top: 6; |
||||
spacing: 6; |
||||
|
||||
Gtk.Image { |
||||
visible: bind template.compact; |
||||
icon-name: "audio-symbolic"; |
||||
} |
||||
|
||||
Gtk.Label { |
||||
ellipsize: end; |
||||
xalign: 0.0; |
||||
hexpand: true; |
||||
label: bind template.filename; |
||||
} |
||||
template $ContentMessageAudio: Gtk.Box { |
||||
orientation: vertical; |
||||
|
||||
[end] |
||||
Adw.Spinner state_spinner { |
||||
height-request: 20; |
||||
width-request: 20; |
||||
} |
||||
Gtk.Box { |
||||
visible: bind template.compact; |
||||
margin-top: 6; |
||||
spacing: 6; |
||||
|
||||
[end] |
||||
Gtk.Image state_error { |
||||
icon-name: "error-symbolic"; |
||||
} |
||||
Gtk.Image { |
||||
icon-name: "audio-symbolic"; |
||||
} |
||||
|
||||
$AudioPlayer player { |
||||
visible: bind template.compact inverted; |
||||
Gtk.Label { |
||||
ellipsize: end; |
||||
xalign: 0.0; |
||||
hexpand: true; |
||||
label: bind template.filename; |
||||
} |
||||
} |
||||
|
||||
$AudioPlayer player { |
||||
visible: bind template.compact inverted; |
||||
} |
||||
} |
||||
|
||||
@ -0,0 +1,192 @@
|
||||
//! Collection of methods for audio.
|
||||
|
||||
use std::{ |
||||
sync::{Arc, Mutex}, |
||||
time::Duration, |
||||
}; |
||||
|
||||
use futures_channel::oneshot; |
||||
use gst::prelude::*; |
||||
use gtk::{gio, glib, prelude::*}; |
||||
use matrix_sdk::attachment::BaseAudioInfo; |
||||
use tracing::warn; |
||||
|
||||
use super::load_gstreamer_media_info; |
||||
use crate::utils::resample_slice; |
||||
|
||||
/// Load information for the audio in the given file.
|
||||
pub(crate) async fn load_audio_info(file: &gio::File) -> BaseAudioInfo { |
||||
let mut info = BaseAudioInfo::default(); |
||||
|
||||
let Some(media_info) = load_gstreamer_media_info(file).await else { |
||||
return info; |
||||
}; |
||||
|
||||
info.duration = media_info.duration().map(Into::into); |
||||
info |
||||
} |
||||
|
||||
/// Generate a waveform for the given audio file.
|
||||
///
|
||||
/// The returned waveform should contain between 30 and 110 samples with a value
|
||||
/// between 0 and 1.
|
||||
pub(crate) async fn generate_waveform( |
||||
file: &gio::File, |
||||
duration: Option<Duration>, |
||||
) -> Option<Vec<f32>> { |
||||
// According to MSC3246, we want at least 30 values and at most 120 values. It
|
||||
// should also allow us to have enough samples for drawing our waveform.
|
||||
let interval = duration |
||||
.and_then(|duration| { |
||||
// Try to get around 1 sample per second, except if the duration is too short or
|
||||
// too long.
|
||||
match duration.as_secs() { |
||||
0..30 => duration.checked_div(30), |
||||
30..110 => Some(Duration::from_secs(1)), |
||||
_ => duration.checked_div(110), |
||||
} |
||||
}) |
||||
.unwrap_or_else(|| Duration::from_secs(1)); |
||||
|
||||
// Create our pipeline from a pipeline description string.
|
||||
let pipeline = match gst::parse::launch(&format!( |
||||
"uridecodebin uri={} ! audioconvert ! audio/x-raw,channels=1 ! level name=level interval={} ! fakesink qos=false sync=false", |
||||
file.uri(), |
||||
interval.as_nanos() |
||||
)) { |
||||
Ok(pipeline) => pipeline |
||||
.downcast::<gst::Pipeline>() |
||||
.expect("GstElement should be a GstPipeline"), |
||||
Err(error) => { |
||||
warn!("Could not create GstPipeline for audio waveform: {error}"); |
||||
return None; |
||||
} |
||||
}; |
||||
|
||||
let (sender, receiver) = oneshot::channel(); |
||||
let sender = Arc::new(Mutex::new(Some(sender))); |
||||
let samples = Arc::new(Mutex::new(vec![])); |
||||
let bus = pipeline.bus().expect("GstPipeline should have a GstBus"); |
||||
|
||||
let samples_clone = samples.clone(); |
||||
let _bus_guard = bus |
||||
.add_watch(move |_, message| { |
||||
match message.view() { |
||||
gst::MessageView::Eos(_) => { |
||||
// We are done collecting the samples.
|
||||
send_empty_signal(&sender); |
||||
glib::ControlFlow::Break |
||||
} |
||||
gst::MessageView::Error(error) => { |
||||
warn!("Could not generate audio waveform: {error}"); |
||||
send_empty_signal(&sender); |
||||
glib::ControlFlow::Break |
||||
} |
||||
gst::MessageView::Element(element) => { |
||||
if let Some(structure) = element.structure() |
||||
&& structure.has_name("level") |
||||
{ |
||||
let peaks_array = structure |
||||
.get::<&glib::ValueArray>("peak") |
||||
.expect("peak value should be a GValueArray"); |
||||
let peak = peaks_array[0] |
||||
.get::<f64>() |
||||
.expect("GValueArray value should be a double"); |
||||
|
||||
match samples_clone.lock() { |
||||
Ok(mut samples) => { |
||||
let value_db = if peak.is_nan() { 0.0 } else { peak }; |
||||
// Convert the decibels to a relative amplitude, to get a value
|
||||
// between 0 and 1.
|
||||
let value = 10.0_f64.powf(value_db / 20.0); |
||||
|
||||
samples.push(value); |
||||
} |
||||
Err(error) => { |
||||
warn!("Failed to lock audio waveform samples mutex: {error}"); |
||||
} |
||||
} |
||||
} |
||||
glib::ControlFlow::Continue |
||||
} |
||||
_ => glib::ControlFlow::Continue, |
||||
} |
||||
}) |
||||
.expect("Adding GstBus watch should succeed"); |
||||
|
||||
match pipeline.set_state(gst::State::Playing) { |
||||
Ok(_) => { |
||||
let _ = receiver.await; |
||||
} |
||||
Err(error) => { |
||||
warn!("Could not start GstPipeline for audio waveform: {error}"); |
||||
} |
||||
} |
||||
|
||||
// Clean up pipeline.
|
||||
let _ = pipeline.set_state(gst::State::Null); |
||||
bus.set_flushing(true); |
||||
|
||||
let waveform = match samples.lock() { |
||||
Ok(mut samples) => std::mem::take(&mut *samples), |
||||
Err(error) => { |
||||
warn!("Failed to lock audio waveform samples mutex: {error}"); |
||||
return None; |
||||
} |
||||
}; |
||||
|
||||
Some(normalize_waveform(waveform)).filter(|waveform| !waveform.is_empty()) |
||||
} |
||||
|
||||
/// Try to send an empty signal through the given sender.
|
||||
fn send_empty_signal(sender: &Mutex<Option<oneshot::Sender<()>>>) { |
||||
let mut sender = match sender.lock() { |
||||
Ok(sender) => sender, |
||||
Err(error) => { |
||||
warn!("Failed to lock audio waveform signal mutex: {error}"); |
||||
return; |
||||
} |
||||
}; |
||||
|
||||
if let Some(sender) = sender.take() |
||||
&& sender.send(()).is_err() |
||||
{ |
||||
warn!("Failed to send audio waveform end through channel"); |
||||
} |
||||
} |
||||
|
||||
/// Normalize the given waveform to have between 30 and 120 samples with a value
|
||||
/// between 0 and 1.
|
||||
///
|
||||
/// All the samples in the waveform must be positive or negative. If they are
|
||||
/// mixed, this will change the waveform because it uses the absolute value of
|
||||
/// the sample.
|
||||
///
|
||||
/// If the waveform was empty, returns an empty vec.
|
||||
pub(crate) fn normalize_waveform(waveform: Vec<f64>) -> Vec<f32> { |
||||
if waveform.is_empty() { |
||||
return vec![]; |
||||
} |
||||
|
||||
let max = waveform |
||||
.iter() |
||||
.copied() |
||||
.map(f64::abs) |
||||
.reduce(f64::max) |
||||
.expect("iterator should contain at least one value"); |
||||
|
||||
// Normalize between 0 and 1, with the highest value as 1.
|
||||
let mut normalized = waveform |
||||
.into_iter() |
||||
.map(f64::abs) |
||||
.map(|value| if max == 0.0 { value } else { value / max } as f32) |
||||
.collect::<Vec<_>>(); |
||||
|
||||
match normalized.len() { |
||||
0..30 => normalized = resample_slice(&normalized, 30).into_owned(), |
||||
30..120 => {} |
||||
_ => normalized = resample_slice(&normalized, 120).into_owned(), |
||||
} |
||||
|
||||
normalized |
||||
} |
||||
Loading…
Reference in new issue