use adw::prelude::*; use relm4::{WorkerController, prelude::*}; use crate::{ icon_names, open_dialog::{OpenDialog, OpenDialogMsg, OpenDialogOutput}, player::{Player, PlayerMsg, PlayerOutput}, preferences_dialog::{PreferencesDialog, PreferencesDialogMsg}, subtitle_selection_dialog::{ SubtitleSelectionDialog, SubtitleSelectionDialogMsg, SubtitleSelectionDialogOutput, SubtitleSettings, }, subtitle_view::{SubtitleView, SubtitleViewMsg, SubtitleViewOutput}, subtitles::{ MetadataCollection, SUBTITLE_TRACKS, StreamIndex, SubtitleCue, SubtitleTrack, extraction::{SubtitleExtractor, SubtitleExtractorMsg, SubtitleExtractorOutput}, state::SubtitleState, }, transcript::{Transcript, TranscriptMsg, TranscriptOutput}, translation::{DeeplTranslator, deepl::DeeplTranslatorMsg}, util::Tracker, }; pub struct App { root: adw::ApplicationWindow, transcript: Controller, player: Controller, subtitle_view: Controller, extractor: WorkerController, deepl_translator: AsyncController, preferences: Controller, open_url_dialog: Controller, subtitle_selection_dialog: Option>, primary_subtitle_state: SubtitleState, secondary_subtitle_state: SubtitleState, // for auto-pausing autopaused: bool, hovering_primary_cue: bool, } #[derive(Debug)] pub enum AppMsg { AddCue(StreamIndex, SubtitleCue), SubtitleExtractionComplete, ApplySubtitleSettings(SubtitleSettings), PositionUpdate(gst::ClockTime), SetHoveringSubtitleCue(bool), ShowUrlOpenDialog, ShowPreferences, ShowSubtitleSelectionDialog, Play { url: String, metadata: MetadataCollection, whisper_stream_index: Option, }, } #[relm4::component(pub)] impl SimpleComponent for App { type Init = (); type Input = AppMsg; type Output = (); view! { #[root] adw::ApplicationWindow { set_title: Some("lleap"), set_default_width: 800, set_default_height: 600, adw::ToolbarView { add_top_bar = &adw::HeaderBar { pack_start = >k::Button { set_label: "Open...", connect_clicked => AppMsg::ShowUrlOpenDialog, }, pack_end = >k::Button { set_icon_name: icon_names::SETTINGS, connect_clicked => AppMsg::ShowPreferences, } }, #[wrap(Some)] set_content = >k::Paned { set_orientation: gtk::Orientation::Vertical, #[wrap(Some)] set_start_child = >k::Paned { set_start_child: Some(model.player.widget()), set_end_child: Some(model.transcript.widget()), }, set_end_child: Some(model.subtitle_view.widget()), set_shrink_end_child: false, } } } } fn init( _init: Self::Init, root: Self::Root, sender: ComponentSender, ) -> ComponentParts { let subtitle_view = SubtitleView::builder().launch(()).forward( sender.input_sender(), |output| match output { SubtitleViewOutput::SetHoveringCue(val) => AppMsg::SetHoveringSubtitleCue(val), }, ); let player = Player::builder() .launch(()) .forward(sender.input_sender(), |output| match output { PlayerOutput::PositionUpdate(pos) => AppMsg::PositionUpdate(pos), PlayerOutput::SubtitleSelectionButtonPressed => AppMsg::ShowSubtitleSelectionDialog, }); let transcript = Transcript::builder() .launch(()) .forward(player.sender(), |msg| match msg { TranscriptOutput::SeekTo(pos) => PlayerMsg::SeekTo(pos), }); let extractor = SubtitleExtractor::builder().detach_worker(()).forward( sender.input_sender(), |output| match output { SubtitleExtractorOutput::NewCue(stream_index, cue) => { AppMsg::AddCue(stream_index, cue) } SubtitleExtractorOutput::ExtractionComplete => AppMsg::SubtitleExtractionComplete, }, ); let deepl_translator = DeeplTranslator::builder().launch(()).detach(); let preferences = PreferencesDialog::builder() .launch(root.clone().into()) .detach(); let open_url_dialog = OpenDialog::builder().launch(root.clone().into()).forward( sender.input_sender(), |output| match output { OpenDialogOutput::Play { url, metadata, whisper_stream_index, } => AppMsg::Play { url, metadata, whisper_stream_index, }, }, ); let model = Self { root: root.clone(), player, transcript, subtitle_view, extractor, deepl_translator, preferences, open_url_dialog, subtitle_selection_dialog: None, primary_subtitle_state: SubtitleState::default(), secondary_subtitle_state: SubtitleState::default(), autopaused: false, hovering_primary_cue: false, }; let widgets = view_output!(); ComponentParts { model, widgets } } fn update(&mut self, message: Self::Input, sender: ComponentSender) { match message { AppMsg::AddCue(stream_ix, cue) => { SUBTITLE_TRACKS .write() .get_mut(&stream_ix) .unwrap() .push_cue(cue.clone()); self.transcript .sender() .send(TranscriptMsg::NewCue(stream_ix, cue)) .unwrap(); } AppMsg::SubtitleExtractionComplete => { log::info!("Subtitle extraction complete"); } AppMsg::ApplySubtitleSettings(settings) => { self.primary_subtitle_state .set_stream_ix(settings.primary_track_ix); self.secondary_subtitle_state .set_stream_ix(settings.secondary_track_ix); self.transcript .sender() .send(TranscriptMsg::SelectTrack(settings.primary_track_ix)) .unwrap(); self.deepl_translator .sender() .send(DeeplTranslatorMsg::SelectTrack(settings.primary_track_ix)) .unwrap(); self.subtitle_view .sender() .send(SubtitleViewMsg::ApplySubtitleSettings(settings)) .unwrap(); } AppMsg::PositionUpdate(position) => { self.update_subtitle_states(position); } AppMsg::SetHoveringSubtitleCue(hovering) => { self.hovering_primary_cue = hovering; if !hovering && self.autopaused { self.player.sender().send(PlayerMsg::Play).unwrap(); self.autopaused = false; } } AppMsg::ShowUrlOpenDialog => { self.open_url_dialog .sender() .send(OpenDialogMsg::Show) .unwrap(); } AppMsg::ShowPreferences => { self.preferences .sender() .send(PreferencesDialogMsg::Show) .unwrap(); } AppMsg::ShowSubtitleSelectionDialog => { if let Some(ref dialog) = self.subtitle_selection_dialog { dialog .sender() .send(SubtitleSelectionDialogMsg::Show) .unwrap(); } } AppMsg::Play { url, metadata, whisper_stream_index, } => { self.player .sender() .send(PlayerMsg::SetUrl(url.clone())) .unwrap(); self.extractor .sender() .send(SubtitleExtractorMsg::ExtractFromUrl { url, whisper_stream_index, }) .unwrap(); let subtitle_selection_dialog = SubtitleSelectionDialog::builder() .launch((self.root.clone().into(), metadata)) .forward(sender.input_sender(), |output| match output { SubtitleSelectionDialogOutput::ApplySubtitleSettings(settings) => { AppMsg::ApplySubtitleSettings(settings) } }); self.subtitle_selection_dialog = Some(subtitle_selection_dialog); } } } } impl App { fn update_subtitle_states(&mut self, position: gst::ClockTime) { self.update_primary_subtitle_state(position); self.update_secondary_subtitle_state(position); } fn update_primary_subtitle_state(&mut self, position: gst::ClockTime) { // sometimes we get a few position update messages after // auto-pausing if self.autopaused { return; } update_subtitle_state(&mut self.primary_subtitle_state, position); // last cue just ended -> auto-pause if self.primary_subtitle_state.last_ended_cue_ix.is_dirty() && self.hovering_primary_cue { self.player.sender().send(PlayerMsg::Pause).unwrap(); self.autopaused = true; return; } if self.primary_subtitle_state.is_dirty() { let cue = self.primary_subtitle_state.active_cue(); self.subtitle_view .sender() .send(SubtitleViewMsg::SetPrimaryCue(cue)) .unwrap(); } if self.primary_subtitle_state.last_started_cue_ix.is_dirty() { if let Some(ix) = *self.primary_subtitle_state.last_started_cue_ix { self.transcript .sender() .send(TranscriptMsg::ScrollToCue(ix)) .unwrap(); } } self.primary_subtitle_state.reset(); } fn update_secondary_subtitle_state(&mut self, position: gst::ClockTime) { // sometimes we get a few position update messages after // auto-pausing if self.autopaused { return; } update_subtitle_state(&mut self.secondary_subtitle_state, position); if self.secondary_subtitle_state.is_dirty() { let cue = self.secondary_subtitle_state.active_cue(); self.subtitle_view .sender() .send(SubtitleViewMsg::SetSecondaryCue(cue)) .unwrap(); } self.secondary_subtitle_state.reset(); } } fn update_subtitle_state(state: &mut SubtitleState, position: gst::ClockTime) { if let Some(stream_ix) = state.stream_ix { let lock = SUBTITLE_TRACKS.read(); let track = lock.get(&stream_ix).unwrap(); update_last_time_ix(&track.start_times, &mut state.last_started_cue_ix, position); update_last_time_ix(&track.end_times, &mut state.last_ended_cue_ix, position); } } fn update_last_time_ix( times: &Vec, last_time_ix: &mut Tracker>, current_time: gst::ClockTime, ) { // try to find index quickly (should succeed during normal playback) if let Some(ix) = last_time_ix.get() { let t0 = times.get(*ix).unwrap(); match (times.get(ix + 1), times.get(ix + 2)) { (None, _) if current_time >= *t0 => { return; } (Some(t1), _) if current_time >= *t0 && current_time < *t1 => { return; } (Some(t1), None) if current_time >= *t1 => { last_time_ix.set(Some(ix + 1)); return; } (Some(t1), Some(t2)) if current_time >= *t1 && current_time < *t2 => { last_time_ix.set(Some(ix + 1)); return; } _ => {} } } // if we are before the first timestamp, no need to look further if times.is_empty() || current_time < *times.first().unwrap() { last_time_ix.set_if_ne(None); return; } // otherwise, search the whole array (e.g. after seeking) last_time_ix.set(times.iter().rposition(|time| *time <= current_time)); }