aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/app.rs335
-rw-r--r--src/cue_view.rs81
-rw-r--r--src/main.rs9
-rw-r--r--src/open_dialog.rs58
-rw-r--r--src/preferences_dialog.rs (renamed from src/preferences.rs)29
-rw-r--r--src/settings.rs5
-rw-r--r--src/subtitle_selection_dialog.rs148
-rw-r--r--src/subtitle_view.rs57
-rw-r--r--src/subtitles/extraction/embedded.rs (renamed from src/subtitle_extraction/embedded.rs)18
-rw-r--r--src/subtitles/extraction/mod.rs (renamed from src/subtitle_extraction/mod.rs)12
-rw-r--r--src/subtitles/extraction/whisper.rs (renamed from src/subtitle_extraction/whisper.rs)16
-rw-r--r--src/subtitles/mod.rs86
-rw-r--r--src/subtitles/state.rs63
-rw-r--r--src/track_selector.rs26
-rw-r--r--src/tracks.rs38
-rw-r--r--src/transcript.rs8
-rw-r--r--src/translation/deepl.rs106
-rw-r--r--src/translation/mod.rs11
-rw-r--r--src/util/tracker.rs18
19 files changed, 743 insertions, 381 deletions
diff --git a/src/app.rs b/src/app.rs
index 951392e..bdb2ef9 100644
--- a/src/app.rs
+++ b/src/app.rs
@@ -5,33 +5,36 @@ use crate::{
icon_names,
open_dialog::{OpenDialog, OpenDialogMsg, OpenDialogOutput},
player::{Player, PlayerMsg, PlayerOutput},
- preferences::{Preferences, PreferencesMsg},
- subtitle_extraction::{SubtitleExtractor, SubtitleExtractorMsg, SubtitleExtractorOutput},
+ preferences_dialog::{PreferencesDialog, PreferencesDialogMsg},
subtitle_selection_dialog::{
SubtitleSelectionDialog, SubtitleSelectionDialogMsg, SubtitleSelectionDialogOutput,
+ SubtitleSettings,
},
subtitle_view::{SubtitleView, SubtitleViewMsg, SubtitleViewOutput},
- tracks::{SUBTITLE_TRACKS, StreamIndex, SubtitleCue},
+ subtitles::{
+ MetadataCollection, SUBTITLE_TRACKS, StreamIndex, SubtitleCue, SubtitleTrack,
+ extraction::{SubtitleExtractor, SubtitleExtractorMsg, SubtitleExtractorOutput},
+ state::SubtitleState,
+ },
transcript::{Transcript, TranscriptMsg, TranscriptOutput},
+ translation::{DeeplTranslator, deepl::DeeplTranslatorMsg},
util::Tracker,
};
pub struct App {
+ root: adw::ApplicationWindow,
transcript: Controller<Transcript>,
player: Controller<Player>,
subtitle_view: Controller<SubtitleView>,
extractor: WorkerController<SubtitleExtractor>,
+ deepl_translator: AsyncController<DeeplTranslator>,
- preferences: Controller<Preferences>,
+ preferences: Controller<PreferencesDialog>,
open_url_dialog: Controller<OpenDialog>,
- subtitle_selection_dialog: Controller<SubtitleSelectionDialog>,
+ subtitle_selection_dialog: Option<Controller<SubtitleSelectionDialog>>,
- primary_stream_ix: Option<StreamIndex>,
- primary_cue: Tracker<Option<String>>,
- primary_last_cue_ix: Tracker<Option<usize>>,
- secondary_cue: Tracker<Option<String>>,
- secondary_stream_ix: Option<StreamIndex>,
- secondary_last_cue_ix: Tracker<Option<usize>>,
+ primary_subtitle_state: SubtitleState,
+ secondary_subtitle_state: SubtitleState,
// for auto-pausing
autopaused: bool,
@@ -40,10 +43,9 @@ pub struct App {
#[derive(Debug)]
pub enum AppMsg {
- NewCue(StreamIndex, SubtitleCue),
+ AddCue(StreamIndex, SubtitleCue),
SubtitleExtractionComplete,
- PrimarySubtitleTrackSelected(Option<StreamIndex>),
- SecondarySubtitleTrackSelected(Option<StreamIndex>),
+ ApplySubtitleSettings(SubtitleSettings),
PositionUpdate(gst::ClockTime),
SetHoveringSubtitleCue(bool),
ShowUrlOpenDialog,
@@ -51,6 +53,7 @@ pub enum AppMsg {
ShowSubtitleSelectionDialog,
Play {
url: String,
+ metadata: MetadataCollection,
whisper_stream_index: Option<StreamIndex>,
},
}
@@ -123,52 +126,46 @@ impl SimpleComponent for App {
sender.input_sender(),
|output| match output {
SubtitleExtractorOutput::NewCue(stream_index, cue) => {
- AppMsg::NewCue(stream_index, cue)
+ AppMsg::AddCue(stream_index, cue)
}
SubtitleExtractorOutput::ExtractionComplete => AppMsg::SubtitleExtractionComplete,
},
);
- let preferences = Preferences::builder().launch(root.clone().into()).detach();
+ let deepl_translator = DeeplTranslator::builder().launch(()).detach();
+
+ let preferences = PreferencesDialog::builder()
+ .launch(root.clone().into())
+ .detach();
let open_url_dialog = OpenDialog::builder().launch(root.clone().into()).forward(
sender.input_sender(),
|output| match output {
OpenDialogOutput::Play {
url,
+ metadata,
whisper_stream_index,
} => AppMsg::Play {
url,
+ metadata,
whisper_stream_index,
},
},
);
- let subtitle_selection_dialog = SubtitleSelectionDialog::builder()
- .launch(root.clone().into())
- .forward(sender.input_sender(), |output| match output {
- SubtitleSelectionDialogOutput::PrimaryTrackSelected(ix) => {
- AppMsg::PrimarySubtitleTrackSelected(ix)
- }
- SubtitleSelectionDialogOutput::SecondaryTrackSelected(ix) => {
- AppMsg::SecondarySubtitleTrackSelected(ix)
- }
- });
let model = Self {
+ root: root.clone(),
player,
transcript,
subtitle_view,
extractor,
+ deepl_translator,
preferences,
open_url_dialog,
- subtitle_selection_dialog,
+ subtitle_selection_dialog: None,
- primary_stream_ix: None,
- primary_cue: Tracker::new(None),
- primary_last_cue_ix: Tracker::new(None),
- secondary_stream_ix: None,
- secondary_cue: Tracker::new(None),
- secondary_last_cue_ix: Tracker::new(None),
+ primary_subtitle_state: SubtitleState::default(),
+ secondary_subtitle_state: SubtitleState::default(),
autopaused: false,
hovering_primary_cue: false,
@@ -179,94 +176,45 @@ impl SimpleComponent for App {
ComponentParts { model, widgets }
}
- fn update(&mut self, message: Self::Input, _sender: ComponentSender<Self>) {
+ fn update(&mut self, message: Self::Input, sender: ComponentSender<Self>) {
match message {
- AppMsg::NewCue(stream_index, cue) => {
+ AppMsg::AddCue(stream_ix, cue) => {
+ SUBTITLE_TRACKS
+ .write()
+ .get_mut(&stream_ix)
+ .unwrap()
+ .push_cue(cue.clone());
+
self.transcript
.sender()
- .send(TranscriptMsg::NewCue(stream_index, cue))
+ .send(TranscriptMsg::NewCue(stream_ix, cue))
.unwrap();
}
AppMsg::SubtitleExtractionComplete => {
log::info!("Subtitle extraction complete");
}
- AppMsg::PrimarySubtitleTrackSelected(stream_index) => {
- self.primary_stream_ix = stream_index;
+ AppMsg::ApplySubtitleSettings(settings) => {
+ self.primary_subtitle_state
+ .set_stream_ix(settings.primary_track_ix);
+ self.secondary_subtitle_state
+ .set_stream_ix(settings.secondary_track_ix);
self.transcript
.sender()
- .send(TranscriptMsg::SelectTrack(stream_index))
+ .send(TranscriptMsg::SelectTrack(settings.primary_track_ix))
+ .unwrap();
+ self.deepl_translator
+ .sender()
+ .send(DeeplTranslatorMsg::SelectTrack(settings.primary_track_ix))
.unwrap();
- }
- AppMsg::SecondarySubtitleTrackSelected(stream_index) => {
- self.secondary_stream_ix = stream_index;
- }
- AppMsg::PositionUpdate(pos) => {
- if let Some(stream_ix) = self.primary_stream_ix {
- // sometimes we get a few position update messages after
- // auto-pausing; this prevents us from immediately un-autopausing
- // again
- if self.autopaused {
- return;
- }
-
- let cue_was_some = self.primary_cue.get().is_some();
-
- Self::update_cue(
- stream_ix,
- pos,
- &mut self.primary_cue,
- &mut self.primary_last_cue_ix,
- );
-
- if self.primary_cue.is_dirty() {
- // last cue just ended -> auto-pause
- if cue_was_some && self.hovering_primary_cue {
- self.player.sender().send(PlayerMsg::Pause).unwrap();
- self.autopaused = true;
- return;
- }
-
- self.subtitle_view
- .sender()
- .send(SubtitleViewMsg::SetPrimaryCue(
- self.primary_cue.get().clone(),
- ))
- .unwrap();
-
- self.primary_cue.reset();
- }
-
- if self.primary_last_cue_ix.is_dirty() {
- if let Some(ix) = self.primary_last_cue_ix.get() {
- self.transcript
- .sender()
- .send(TranscriptMsg::ScrollToCue(*ix))
- .unwrap();
- }
- self.primary_last_cue_ix.reset();
- }
- }
- if let Some(stream_ix) = self.secondary_stream_ix {
- Self::update_cue(
- stream_ix,
- pos,
- &mut self.secondary_cue,
- &mut self.secondary_last_cue_ix,
- );
-
- if !self.autopaused && self.secondary_cue.is_dirty() {
- self.subtitle_view
- .sender()
- .send(SubtitleViewMsg::SetSecondaryCue(
- self.secondary_cue.get().clone(),
- ))
- .unwrap();
-
- self.secondary_cue.reset();
- }
- }
+ self.subtitle_view
+ .sender()
+ .send(SubtitleViewMsg::ApplySubtitleSettings(settings))
+ .unwrap();
+ }
+ AppMsg::PositionUpdate(position) => {
+ self.update_subtitle_states(position);
}
AppMsg::SetHoveringSubtitleCue(hovering) => {
self.hovering_primary_cue = hovering;
@@ -284,17 +232,20 @@ impl SimpleComponent for App {
AppMsg::ShowPreferences => {
self.preferences
.sender()
- .send(PreferencesMsg::Show)
+ .send(PreferencesDialogMsg::Show)
.unwrap();
}
AppMsg::ShowSubtitleSelectionDialog => {
- self.subtitle_selection_dialog
- .sender()
- .send(SubtitleSelectionDialogMsg::Show)
- .unwrap();
+ if let Some(ref dialog) = self.subtitle_selection_dialog {
+ dialog
+ .sender()
+ .send(SubtitleSelectionDialogMsg::Show)
+ .unwrap();
+ }
}
AppMsg::Play {
url,
+ metadata,
whisper_stream_index,
} => {
self.player
@@ -308,70 +259,128 @@ impl SimpleComponent for App {
whisper_stream_index,
})
.unwrap();
+
+ let subtitle_selection_dialog = SubtitleSelectionDialog::builder()
+ .launch((self.root.clone().into(), metadata))
+ .forward(sender.input_sender(), |output| match output {
+ SubtitleSelectionDialogOutput::ApplySubtitleSettings(settings) => {
+ AppMsg::ApplySubtitleSettings(settings)
+ }
+ });
+ self.subtitle_selection_dialog = Some(subtitle_selection_dialog);
}
}
}
}
impl App {
- fn update_cue(
- stream_ix: StreamIndex,
- position: gst::ClockTime,
- cue: &mut Tracker<Option<String>>,
- last_cue_ix: &mut Tracker<Option<usize>>,
- ) {
- let lock = SUBTITLE_TRACKS.read();
- let track = lock.get(&stream_ix).unwrap();
+ fn update_subtitle_states(&mut self, position: gst::ClockTime) {
+ self.update_primary_subtitle_state(position);
+ self.update_secondary_subtitle_state(position);
+ }
- // try to find current cue quickly (should usually succeed during playback)
- if let Some(ix) = last_cue_ix.get() {
- let last_cue = track.cues.get(*ix).unwrap();
- if last_cue.start <= position && position <= last_cue.end {
- // still at current cue
- return;
- } else if let Some(next_cue) = track.cues.get(ix + 1) {
- if last_cue.end < position && position < next_cue.start {
- // strictly between cues
- cue.set(None);
- return;
- }
- if next_cue.start <= position && position <= next_cue.end {
- // already in next cue (this happens when one cue immediately
- // follows the previous one)
- cue.set(Some(next_cue.text.clone()));
- last_cue_ix.set(Some(ix + 1));
- return;
- }
+ fn update_primary_subtitle_state(&mut self, position: gst::ClockTime) {
+ // sometimes we get a few position update messages after
+ // auto-pausing
+ if self.autopaused {
+ return;
+ }
+
+ update_subtitle_state(&mut self.primary_subtitle_state, position);
+
+ // last cue just ended -> auto-pause
+ if self.primary_subtitle_state.last_ended_cue_ix.is_dirty() && self.hovering_primary_cue {
+ self.player.sender().send(PlayerMsg::Pause).unwrap();
+ self.autopaused = true;
+ return;
+ }
+
+ if self.primary_subtitle_state.is_dirty() {
+ let cue = self.primary_subtitle_state.active_cue();
+
+ self.subtitle_view
+ .sender()
+ .send(SubtitleViewMsg::SetPrimaryCue(cue))
+ .unwrap();
+ }
+
+ if self.primary_subtitle_state.last_started_cue_ix.is_dirty() {
+ if let Some(ix) = *self.primary_subtitle_state.last_started_cue_ix {
+ self.transcript
+ .sender()
+ .send(TranscriptMsg::ScrollToCue(ix))
+ .unwrap();
}
}
- // if we are before the first subtitle, no need to look further
- if track.cues.is_empty() || position < track.cues.first().unwrap().start {
- cue.set(None);
- last_cue_ix.set(None);
+ self.primary_subtitle_state.reset();
+ }
+
+ fn update_secondary_subtitle_state(&mut self, position: gst::ClockTime) {
+ // sometimes we get a few position update messages after
+ // auto-pausing
+ if self.autopaused {
return;
}
- // otherwise, search the whole track (e.g. after seeking)
- match track
- .cues
- .iter()
- .enumerate()
- .rev()
- .find(|(_ix, cue)| cue.start <= position)
- {
- Some((ix, new_cue)) => {
- last_cue_ix.set(Some(ix));
- if position <= new_cue.end {
- cue.set(Some(new_cue.text.clone()));
- } else {
- cue.set(None);
- }
+ update_subtitle_state(&mut self.secondary_subtitle_state, position);
+
+ if self.secondary_subtitle_state.is_dirty() {
+ let cue = self.secondary_subtitle_state.active_cue();
+
+ self.subtitle_view
+ .sender()
+ .send(SubtitleViewMsg::SetSecondaryCue(cue))
+ .unwrap();
+ }
+
+ self.secondary_subtitle_state.reset();
+ }
+}
+
+fn update_subtitle_state(state: &mut SubtitleState, position: gst::ClockTime) {
+ if let Some(stream_ix) = state.stream_ix {
+ let lock = SUBTITLE_TRACKS.read();
+ let track = lock.get(&stream_ix).unwrap();
+
+ update_last_time_ix(&track.start_times, &mut state.last_started_cue_ix, position);
+ update_last_time_ix(&track.end_times, &mut state.last_ended_cue_ix, position);
+ }
+}
+
+fn update_last_time_ix(
+ times: &Vec<gst::ClockTime>,
+ last_time_ix: &mut Tracker<Option<usize>>,
+ current_time: gst::ClockTime,
+) {
+ // try to find index quickly (should succeed during normal playback)
+ if let Some(ix) = last_time_ix.get() {
+ let t0 = times.get(*ix).unwrap();
+ match (times.get(ix + 1), times.get(ix + 2)) {
+ (None, _) if current_time >= *t0 => {
+ return;
}
- None => {
- cue.set(None);
- last_cue_ix.set(None);
+ (Some(t1), _) if current_time >= *t0 && current_time < *t1 => {
+ return;
}
- };
+ (Some(t1), None) if current_time >= *t1 => {
+ last_time_ix.set(Some(ix + 1));
+ return;
+ }
+ (Some(t1), Some(t2)) if current_time >= *t1 && current_time < *t2 => {
+ last_time_ix.set(Some(ix + 1));
+ return;
+ }
+ _ => {}
+ }
+ }
+
+ // if we are before the first timestamp, no need to look further
+ if times.is_empty() || current_time < *times.first().unwrap() {
+ last_time_ix.set_if_ne(None);
+ return;
}
+
+ // otherwise, search the whole array (e.g. after seeking)
+ last_time_ix.set(times.iter().rposition(|time| *time <= current_time));
}
diff --git a/src/cue_view.rs b/src/cue_view.rs
index fbf2520..05c45c4 100644
--- a/src/cue_view.rs
+++ b/src/cue_view.rs
@@ -8,18 +8,25 @@ use relm4::prelude::*;
use relm4::{ComponentParts, SimpleComponent};
use unicode_segmentation::UnicodeSegmentation;
+use crate::subtitles::state::CueAddress;
+use crate::translation::TRANSLATIONS;
use crate::util::Tracker;
-pub struct CueView {
- text: Tracker<Option<String>>,
+pub struct ActiveCueViewState {
+ addr: CueAddress,
+ text: String,
// byte ranges for the words in `text`
word_ranges: Vec<Range<usize>>,
}
+pub struct CueView {
+ state: Tracker<Option<ActiveCueViewState>>,
+}
+
#[derive(Debug)]
pub enum CueViewMsg {
// messages from the app
- SetText(Option<String>),
+ SetCue(Option<CueAddress>),
// messages from UI
MouseMotion,
}
@@ -42,7 +49,7 @@ impl SimpleComponent for CueView {
gtk::Label {
add_controller: event_controller.clone(),
set_use_markup: true,
- set_visible: false,
+ set_sensitive: false,
set_justify: gtk::Justification::Center,
add_css_class: "cue-view",
},
@@ -71,8 +78,7 @@ impl SimpleComponent for CueView {
sender: relm4::ComponentSender<Self>,
) -> relm4::ComponentParts<Self> {
let model = Self {
- text: Tracker::new(None),
- word_ranges: Vec::new(),
+ state: Tracker::new(None),
};
let widgets = view_output!();
@@ -81,19 +87,26 @@ impl SimpleComponent for CueView {
}
fn update(&mut self, message: Self::Input, _sender: relm4::ComponentSender<Self>) {
- match message {
- CueViewMsg::SetText(text) => {
- self.text.set(text);
+ self.state.reset();
- if let Some(text) = self.text.get() {
- self.word_ranges = UnicodeSegmentation::unicode_word_indices(text.as_str())
+ match message {
+ CueViewMsg::SetCue(addr) => {
+ if let Some(addr) = addr {
+ let text = addr.resolve_text();
+ let word_ranges = UnicodeSegmentation::unicode_word_indices(text.as_str())
.map(|(offset, slice)| Range {
start: offset,
end: offset + slice.len(),
})
.collect();
+
+ self.state.set(Some(ActiveCueViewState {
+ addr,
+ text,
+ word_ranges,
+ }))
} else {
- self.word_ranges = Vec::new();
+ self.state.set(None);
}
}
CueViewMsg::MouseMotion => {
@@ -103,11 +116,16 @@ impl SimpleComponent for CueView {
}
fn post_view() {
- if self.text.is_dirty() {
- if let Some(text) = self.text.get() {
+ if self.state.is_dirty() {
+ if let Some(ActiveCueViewState {
+ addr: _,
+ text,
+ word_ranges,
+ }) = self.state.get()
+ {
let mut markup = String::new();
- let mut it = self.word_ranges.iter().enumerate().peekable();
+ let mut it = word_ranges.iter().enumerate().peekable();
if let Some((_, first_word_range)) = it.peek() {
markup.push_str(
glib::markup_escape_text(&text[..first_word_range.start]).as_str(),
@@ -127,24 +145,35 @@ impl SimpleComponent for CueView {
markup.push_str(glib::markup_escape_text(&text[next_gap_range]).as_str());
}
- widgets.label.set_markup(markup.as_str());
- widgets.label.set_visible(true);
+ widgets.label.set_markup(&markup);
+ widgets.label.set_sensitive(true);
} else {
- widgets.label.set_visible(false);
+ // insensitive = invisible by css
+ widgets.label.set_sensitive(false);
}
}
- if let Some(word_ix_str) = widgets.label.current_uri() {
- let range = self
- .word_ranges
- .get(usize::from_str(word_ix_str.as_str()).unwrap())
- .unwrap();
- widgets
- .popover_label
- .set_text(&self.text.get().as_ref().unwrap()[range.clone()]);
+ if let (
+ Some(ActiveCueViewState {
+ addr: CueAddress(stream_ix, cue_ix),
+ text: _,
+ word_ranges,
+ }),
+ Some(word_ix_str),
+ ) = (self.state.get(), widgets.label.current_uri())
+ {
+ let word_ix = usize::from_str(word_ix_str.as_str()).unwrap();
+
+ {
+ // TODO get translation
+ widgets.popover_label.set_text(word_ix_str.as_str());
+ }
+
+ let range = word_ranges.get(word_ix).unwrap();
widgets
.popover
.set_pointing_to(Some(&Self::get_rect_of_byte_range(&widgets.label, &range)));
+
widgets.popover.popup();
} else {
widgets.popover.popdown();
diff --git a/src/main.rs b/src/main.rs
index f010c6a..69ccb38 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -2,13 +2,14 @@ mod app;
mod cue_view;
mod open_dialog;
mod player;
-mod preferences;
-mod subtitle_extraction;
+mod preferences_dialog;
+mod settings;
mod subtitle_selection_dialog;
mod subtitle_view;
+mod subtitles;
mod track_selector;
-mod tracks;
mod transcript;
+mod translation;
mod util;
use gtk::{CssProvider, STYLE_PROVIDER_PRIORITY_APPLICATION, gdk, glib};
@@ -38,7 +39,7 @@ fn main() {
let css_provider = CssProvider::new();
css_provider.load_from_bytes(&glib::Bytes::from_static(include_bytes!(
- "../resources/style.css"
+ "../data/style.css"
)));
gtk::style_context_add_provider_for_display(
&gdk::Display::default().unwrap(),
diff --git a/src/open_dialog.rs b/src/open_dialog.rs
index 2f17c59..3b822be 100644
--- a/src/open_dialog.rs
+++ b/src/open_dialog.rs
@@ -5,10 +5,10 @@ use gtk::gio;
use gtk::glib::clone;
use relm4::prelude::*;
+use crate::subtitles::{MetadataCollection, StreamIndex, TrackMetadata};
use crate::track_selector::{
TrackInfo, TrackSelector, TrackSelectorInit, TrackSelectorMsg, TrackSelectorOutput,
};
-use crate::tracks::{StreamIndex, TrackMetadata};
use crate::util::Tracker;
pub struct OpenDialog {
@@ -23,6 +23,7 @@ pub struct OpenDialog {
whisper_stream_index: Option<StreamIndex>,
metadata_command_running: bool,
+ metadata: Option<MetadataCollection>,
}
#[derive(Debug)]
@@ -34,7 +35,7 @@ pub enum OpenDialogMsg {
FileSelected(gio::File),
UrlChanged(String),
SetDoWhisperExtraction(bool),
- WhisperTrackSelected(Option<StreamIndex>),
+ WhisperTrackSelected(StreamIndex),
Play,
}
@@ -42,6 +43,7 @@ pub enum OpenDialogMsg {
pub enum OpenDialogOutput {
Play {
url: String,
+ metadata: MetadataCollection,
whisper_stream_index: Option<StreamIndex>,
},
}
@@ -51,7 +53,7 @@ impl Component for OpenDialog {
type Init = adw::ApplicationWindow;
type Input = OpenDialogMsg;
type Output = OpenDialogOutput;
- type CommandOutput = Result<BTreeMap<StreamIndex, TrackMetadata>, ffmpeg::Error>;
+ type CommandOutput = Result<MetadataCollection, ffmpeg::Error>;
view! {
#[root]
@@ -186,6 +188,7 @@ impl Component for OpenDialog {
whisper_stream_index: None,
metadata_command_running: false,
+ metadata: None,
};
let widgets = view_output!();
@@ -227,23 +230,28 @@ impl Component for OpenDialog {
self.url.set(file.uri().into());
}
OpenDialogMsg::Play => {
- sender
- .output(OpenDialogOutput::Play {
- url: self.url.get().clone(),
- whisper_stream_index: if self.do_whisper_extraction {
- self.whisper_stream_index
- } else {
- None
- },
- })
- .unwrap();
- self.dialog.close();
+ if let Some(ref metadata) = self.metadata {
+ sender
+ .output(OpenDialogOutput::Play {
+ url: self.url.get().clone(),
+ metadata: metadata.clone(),
+ whisper_stream_index: if self.do_whisper_extraction {
+ self.whisper_stream_index
+ } else {
+ None
+ },
+ })
+ .unwrap();
+ self.dialog.close();
+ } else {
+ log::error!("metadata is unavailable, can't play");
+ }
}
OpenDialogMsg::SetDoWhisperExtraction(val) => {
self.do_whisper_extraction = val;
}
OpenDialogMsg::WhisperTrackSelected(track_index) => {
- self.whisper_stream_index = track_index;
+ self.whisper_stream_index = Some(track_index);
}
}
}
@@ -259,10 +267,10 @@ impl Component for OpenDialog {
self.metadata_command_running = false;
match message {
- Ok(audio_tracks) => {
+ Ok(metadata) => {
let list_model = gio::ListStore::new::<TrackInfo>();
- for (&stream_index, track) in audio_tracks.iter() {
+ for (&stream_index, track) in metadata.audio.iter() {
let track_info = TrackInfo::new(
stream_index,
track.language.map(|lang| lang.to_name()),
@@ -276,6 +284,8 @@ impl Component for OpenDialog {
.send(TrackSelectorMsg::SetListModel(list_model))
.unwrap();
+ self.metadata = Some(metadata);
+
self.next();
}
Err(e) => {
@@ -302,7 +312,7 @@ impl OpenDialog {
sender.spawn_oneshot_command(move || {
let input = ffmpeg::format::input(&url)?;
- let audio_tracks = input
+ let audio = input
.streams()
.filter_map(|stream| {
if stream.parameters().medium() == ffmpeg::media::Type::Audio {
@@ -312,8 +322,18 @@ impl OpenDialog {
}
})
.collect::<BTreeMap<_, _>>();
+ let subtitles = input
+ .streams()
+ .filter_map(|stream| {
+ if stream.parameters().medium() == ffmpeg::media::Type::Subtitle {
+ Some((stream.index(), TrackMetadata::from_ffmpeg_stream(&stream)))
+ } else {
+ None
+ }
+ })
+ .collect::<BTreeMap<_, _>>();
- Ok(audio_tracks)
+ Ok(MetadataCollection { audio, subtitles })
});
self.metadata_command_running = true;
diff --git a/src/preferences.rs b/src/preferences_dialog.rs
index c5f9bb1..5aacfe8 100644
--- a/src/preferences.rs
+++ b/src/preferences_dialog.rs
@@ -1,25 +1,23 @@
use adw::prelude::*;
-use gtk::gio;
use relm4::prelude::*;
-pub struct Preferences {
+use crate::settings::Settings;
+
+pub struct PreferencesDialog {
parent_window: adw::ApplicationWindow,
dialog: adw::PreferencesDialog,
}
#[derive(Debug)]
-pub enum PreferencesMsg {
+pub enum PreferencesDialogMsg {
Show,
}
-#[derive(Debug)]
-pub enum PreferencesOutput {}
-
#[relm4::component(pub)]
-impl SimpleComponent for Preferences {
+impl SimpleComponent for PreferencesDialog {
type Init = adw::ApplicationWindow;
- type Input = PreferencesMsg;
- type Output = PreferencesOutput;
+ type Input = PreferencesDialogMsg;
+ type Output = ();
view! {
#[root]
@@ -33,12 +31,9 @@ impl SimpleComponent for Preferences {
adw::PreferencesGroup {
set_title: "Machine Translation",
+ #[name(deepl_api_key_row)]
adw::EntryRow {
set_title: "DeepL API key",
- set_text: settings.string("deepl-api-key").as_str(),
- connect_changed[settings] => move |entry| {
- settings.set_string("deepl-api-key", entry.text().as_str()).unwrap()
- }
},
}
}
@@ -49,7 +44,7 @@ impl SimpleComponent for Preferences {
root: Self::Root,
_sender: ComponentSender<Self>,
) -> ComponentParts<Self> {
- let settings = gio::Settings::new("tc.mal.lleap");
+ let settings = Settings::default();
let model = Self {
parent_window,
@@ -58,12 +53,16 @@ impl SimpleComponent for Preferences {
let widgets = view_output!();
+ settings
+ .bind_deepl_api_key(&widgets.deepl_api_key_row, "text")
+ .build();
+
ComponentParts { model, widgets }
}
fn update(&mut self, msg: Self::Input, _sender: ComponentSender<Self>) {
match msg {
- PreferencesMsg::Show => {
+ PreferencesDialogMsg::Show => {
self.dialog.present(Some(&self.parent_window));
}
}
diff --git a/src/settings.rs b/src/settings.rs
new file mode 100644
index 0000000..eb1f6b9
--- /dev/null
+++ b/src/settings.rs
@@ -0,0 +1,5 @@
+use gsettings_macro::gen_settings;
+use gtk::{gio, glib};
+
+#[gen_settings(file = "./data/tc.mal.lleap.gschema.xml", id = "tc.mal.lleap")]
+pub struct Settings;
diff --git a/src/subtitle_selection_dialog.rs b/src/subtitle_selection_dialog.rs
index 6136d56..8e5d283 100644
--- a/src/subtitle_selection_dialog.rs
+++ b/src/subtitle_selection_dialog.rs
@@ -2,37 +2,47 @@ use adw::prelude::*;
use gtk::gio;
use relm4::prelude::*;
+use crate::subtitles::{MetadataCollection, StreamIndex};
use crate::track_selector::{
TrackInfo, TrackSelector, TrackSelectorInit, TrackSelectorMsg, TrackSelectorOutput,
};
-use crate::tracks::{SUBTITLE_TRACKS, StreamIndex};
+
+#[derive(Clone, Copy, Default, Debug)]
+pub struct SubtitleSettings {
+ pub primary_track_ix: Option<StreamIndex>,
+ pub secondary_track_ix: Option<StreamIndex>,
+ pub show_secondary: bool,
+ pub show_machine_translation: bool,
+}
pub struct SubtitleSelectionDialog {
parent_window: adw::ApplicationWindow,
dialog: adw::PreferencesDialog,
- track_list_model: gio::ListStore,
primary_selector: Controller<TrackSelector>,
secondary_selector: Controller<TrackSelector>,
- primary_track_ix: Option<StreamIndex>,
- secondary_track_ix: Option<StreamIndex>,
+
+ settings: SubtitleSettings,
}
#[derive(Debug)]
pub enum SubtitleSelectionDialogMsg {
Show,
- PrimaryTrackChanged(Option<StreamIndex>),
- SecondaryTrackChanged(Option<StreamIndex>),
+ Close,
+ // ui messages
+ PrimaryTrackChanged(StreamIndex),
+ SecondaryTrackChanged(StreamIndex),
+ ShowSecondaryChanged(bool),
+ ShowMachineTranslationChanged(bool),
}
#[derive(Debug)]
pub enum SubtitleSelectionDialogOutput {
- PrimaryTrackSelected(Option<StreamIndex>),
- SecondaryTrackSelected(Option<StreamIndex>),
+ ApplySubtitleSettings(SubtitleSettings),
}
#[relm4::component(pub)]
impl SimpleComponent for SubtitleSelectionDialog {
- type Init = adw::ApplicationWindow;
+ type Init = (adw::ApplicationWindow, MetadataCollection);
type Input = SubtitleSelectionDialogMsg;
type Output = SubtitleSelectionDialogOutput;
@@ -41,22 +51,50 @@ impl SimpleComponent for SubtitleSelectionDialog {
adw::PreferencesDialog {
set_title: "Subtitle Settings",
add: &page,
+ connect_closed => SubtitleSelectionDialogMsg::Close,
},
#[name(page)]
adw::PreferencesPage {
adw::PreferencesGroup {
model.primary_selector.widget(),
- model.secondary_selector.widget(),
+
+ adw::ExpanderRow {
+ set_title: "Show secondary subtitles",
+ set_subtitle: "Enable this if there exist subtitles a language you already know",
+ set_show_enable_switch: true,
+ #[watch]
+ set_enable_expansion: model.settings.show_secondary,
+ connect_enable_expansion_notify[sender] => move |expander_row| {
+ sender.input(SubtitleSelectionDialogMsg::ShowSecondaryChanged(expander_row.enables_expansion()))
+ },
+
+ add_row: model.secondary_selector.widget(),
+ },
+
+ adw::ExpanderRow {
+ set_title: "Show machine translations",
+ set_subtitle: "This is useful in case there are no subtitles in your native language or you prefer a more direct translation of the primary subtitles",
+ set_show_enable_switch: true,
+ #[watch]
+ set_enable_expansion: model.settings.show_machine_translation,
+ connect_enable_expansion_notify[sender] => move |expander_row| {
+ sender.input(SubtitleSelectionDialogMsg::ShowMachineTranslationChanged(expander_row.enables_expansion()))
+ },
+
+ // TODO add row for language selection
+ },
}
},
}
fn init(
- parent_window: Self::Init,
+ init: Self::Init,
root: Self::Root,
sender: ComponentSender<Self>,
) -> ComponentParts<Self> {
+ let (parent_window, metadata) = init;
+
let primary_selector = TrackSelector::builder()
.launch(TrackSelectorInit {
title: "Primary subtitle track",
@@ -81,73 +119,59 @@ impl SimpleComponent for SubtitleSelectionDialog {
let model = Self {
parent_window,
dialog: root.clone(),
- track_list_model: gio::ListStore::new::<TrackInfo>(),
primary_selector,
secondary_selector,
- primary_track_ix: None,
- secondary_track_ix: None,
+ settings: Default::default(),
};
let widgets = view_output!();
+ let track_list_model = gio::ListStore::new::<TrackInfo>();
+ for (&stream_index, track_metadata) in metadata.subtitles.iter() {
+ let track_info = TrackInfo::new(
+ stream_index,
+ track_metadata.language.map(|lang| lang.to_name()),
+ track_metadata.title.clone(),
+ );
+ track_list_model.append(&track_info);
+ }
+
+ model
+ .primary_selector
+ .sender()
+ .send(TrackSelectorMsg::SetListModel(track_list_model.clone()))
+ .unwrap();
+ model
+ .secondary_selector
+ .sender()
+ .send(TrackSelectorMsg::SetListModel(track_list_model.clone()))
+ .unwrap();
+
ComponentParts { model, widgets }
}
fn update(&mut self, msg: Self::Input, sender: ComponentSender<Self>) {
match msg {
SubtitleSelectionDialogMsg::Show => {
- self.update_track_list_model();
-
- self.primary_selector
- .sender()
- .send(TrackSelectorMsg::SetListModel(
- self.track_list_model.clone(),
- ))
- .unwrap();
- self.secondary_selector
- .sender()
- .send(TrackSelectorMsg::SetListModel(
- self.track_list_model.clone(),
- ))
- .unwrap();
-
self.dialog.present(Some(&self.parent_window));
}
- SubtitleSelectionDialogMsg::PrimaryTrackChanged(stream_index) => {
- self.primary_track_ix = stream_index;
- sender
- .output(SubtitleSelectionDialogOutput::PrimaryTrackSelected(
- stream_index,
- ))
- .unwrap();
+ SubtitleSelectionDialogMsg::Close => sender
+ .output(SubtitleSelectionDialogOutput::ApplySubtitleSettings(
+ self.settings,
+ ))
+ .unwrap(),
+ SubtitleSelectionDialogMsg::PrimaryTrackChanged(stream_ix) => {
+ self.settings.primary_track_ix = Some(stream_ix);
}
- SubtitleSelectionDialogMsg::SecondaryTrackChanged(stream_index) => {
- self.secondary_track_ix = stream_index;
- sender
- .output(SubtitleSelectionDialogOutput::SecondaryTrackSelected(
- stream_index,
- ))
- .unwrap();
+ SubtitleSelectionDialogMsg::SecondaryTrackChanged(stream_ix) => {
+ self.settings.secondary_track_ix = Some(stream_ix);
+ }
+ SubtitleSelectionDialogMsg::ShowSecondaryChanged(val) => {
+ self.settings.show_secondary = val;
+ }
+ SubtitleSelectionDialogMsg::ShowMachineTranslationChanged(val) => {
+ self.settings.show_machine_translation = val;
}
- }
- }
-}
-
-impl SubtitleSelectionDialog {
- fn update_track_list_model(&mut self) {
- let tracks = SUBTITLE_TRACKS.read();
-
- // Clear previous entries
- self.track_list_model.remove_all();
-
- // Add all available tracks
- for (&stream_index, track) in tracks.iter() {
- let track_info = TrackInfo::new(
- stream_index,
- track.metadata.language.map(|lang| lang.to_name()),
- track.metadata.title.clone(),
- );
- self.track_list_model.append(&track_info);
}
}
}
diff --git a/src/subtitle_view.rs b/src/subtitle_view.rs
index fd98c60..4de73dd 100644
--- a/src/subtitle_view.rs
+++ b/src/subtitle_view.rs
@@ -1,17 +1,22 @@
use crate::cue_view::{CueView, CueViewMsg, CueViewOutput};
-use crate::util::Tracker;
+use crate::subtitle_selection_dialog::SubtitleSettings;
+use crate::subtitles::state::CueAddress;
use gtk::prelude::*;
use relm4::prelude::*;
pub struct SubtitleView {
primary_cue: Controller<CueView>,
- secondary_cue: Tracker<Option<String>>,
+ secondary_cue: Option<String>,
+ machine_translation: Option<String>,
+ show_secondary: bool,
+ show_machine_translation: bool,
}
#[derive(Debug)]
pub enum SubtitleViewMsg {
- SetPrimaryCue(Option<String>),
- SetSecondaryCue(Option<String>),
+ SetPrimaryCue(Option<CueAddress>),
+ SetSecondaryCue(Option<CueAddress>),
+ ApplySubtitleSettings(SubtitleSettings),
}
#[derive(Debug)]
@@ -39,12 +44,30 @@ impl SimpleComponent for SubtitleView {
model.primary_cue.widget(),
gtk::Box {
+ #[watch]
+ set_visible: model.show_secondary,
set_vexpand: true,
},
gtk::Label {
- #[track = "model.secondary_cue.is_dirty()"]
- set_text: model.secondary_cue.get().as_ref().map(|val| val.as_str()).unwrap_or(""),
+ #[watch]
+ set_text: model.secondary_cue.as_ref().map(|val| val.as_str()).unwrap_or(""),
+ #[watch]
+ set_visible: model.show_secondary,
+ set_justify: gtk::Justification::Center,
+ },
+
+ gtk::Box {
+ #[watch]
+ set_visible: model.show_machine_translation,
+ set_vexpand: true,
+ },
+
+ gtk::Label {
+ #[watch]
+ set_text: model.machine_translation.as_ref().map(|val| val.as_str()).unwrap_or(""),
+ #[watch]
+ set_visible: model.show_machine_translation,
set_justify: gtk::Justification::Center,
},
@@ -67,7 +90,10 @@ impl SimpleComponent for SubtitleView {
CueViewOutput::MouseEnter => SubtitleViewOutput::SetHoveringCue(true),
CueViewOutput::MouseLeave => SubtitleViewOutput::SetHoveringCue(false),
}),
- secondary_cue: Tracker::new(None),
+ secondary_cue: None,
+ machine_translation: None,
+ show_secondary: false,
+ show_machine_translation: false,
};
let widgets = view_output!();
@@ -76,18 +102,21 @@ impl SimpleComponent for SubtitleView {
}
fn update(&mut self, msg: Self::Input, _sender: ComponentSender<Self>) {
- // Reset trackers
- self.secondary_cue.reset();
-
match msg {
- SubtitleViewMsg::SetPrimaryCue(value) => {
+ SubtitleViewMsg::SetPrimaryCue(addr) => {
self.primary_cue
.sender()
- .send(CueViewMsg::SetText(value))
+ .send(CueViewMsg::SetCue(addr))
.unwrap();
+ self.machine_translation = addr.and_then(|a| a.resolve_translation())
+ }
+ SubtitleViewMsg::SetSecondaryCue(addr) => {
+ let text = addr.map(|addr| addr.resolve_text());
+ self.secondary_cue = text;
}
- SubtitleViewMsg::SetSecondaryCue(value) => {
- self.secondary_cue.set(value);
+ SubtitleViewMsg::ApplySubtitleSettings(settings) => {
+ self.show_secondary = settings.show_secondary;
+ self.show_machine_translation = settings.show_machine_translation;
}
}
}
diff --git a/src/subtitle_extraction/embedded.rs b/src/subtitles/extraction/embedded.rs
index 0ba6178..920f52b 100644
--- a/src/subtitle_extraction/embedded.rs
+++ b/src/subtitles/extraction/embedded.rs
@@ -2,7 +2,7 @@ use std::sync::mpsc;
use anyhow::Context;
-use crate::subtitle_extraction::*;
+use crate::{subtitles::SubtitleCue, subtitles::extraction::*};
pub fn extract_embedded_subtitles(
// stream index to use when storing extracted subtitles, this index already
@@ -23,12 +23,6 @@ pub fn extract_embedded_subtitles(
match decoder.decode(&packet, &mut subtitle) {
Ok(true) => {
if let Some(cue) = parse_subtitle(&subtitle, &packet, time_base) {
- SUBTITLE_TRACKS
- .write()
- .get_mut(&stream_ix)
- .unwrap()
- .cues
- .push(cue.clone());
sender
.output(SubtitleExtractorOutput::NewCue(stream_ix, cue))
.unwrap();
@@ -72,10 +66,14 @@ fn parse_subtitle(
.collect::<Vec<String>>()
.join("\n— ");
- let start = pts_to_clock_time(packet.pts()?);
- let end = pts_to_clock_time(packet.pts()? + packet.duration());
+ let start_time = pts_to_clock_time(packet.pts()?);
+ let end_time = pts_to_clock_time(packet.pts()? + packet.duration());
- Some(SubtitleCue { start, end, text })
+ Some(SubtitleCue {
+ text,
+ start_time,
+ end_time,
+ })
}
fn extract_dialogue_text(dialogue_line: &str) -> Option<String> {
diff --git a/src/subtitle_extraction/mod.rs b/src/subtitles/extraction/mod.rs
index 9e7fff4..b012658 100644
--- a/src/subtitle_extraction/mod.rs
+++ b/src/subtitles/extraction/mod.rs
@@ -8,7 +8,7 @@ use std::{collections::BTreeMap, sync::mpsc, thread};
use ffmpeg::Rational;
use relm4::{ComponentSender, Worker};
-use crate::tracks::{SUBTITLE_TRACKS, StreamIndex, SubtitleCue, SubtitleTrack, TrackMetadata};
+use crate::subtitles::{SUBTITLE_TRACKS, StreamIndex, SubtitleCue, SubtitleTrack, TrackMetadata};
pub struct SubtitleExtractor {}
@@ -87,10 +87,7 @@ impl SubtitleExtractor {
if stream.parameters().medium() == ffmpeg::media::Type::Subtitle {
let metadata = TrackMetadata::from_ffmpeg_stream(&stream);
- let track = SubtitleTrack {
- metadata,
- cues: Vec::new(),
- };
+ let track = SubtitleTrack::new(metadata);
SUBTITLE_TRACKS.write().insert(stream_ix, track);
@@ -117,10 +114,7 @@ impl SubtitleExtractor {
None => "Auto-generated from audio (Whisper)".to_string(),
});
- let track = SubtitleTrack {
- metadata,
- cues: Vec::new(),
- };
+ let track = SubtitleTrack::new(metadata);
SUBTITLE_TRACKS.write().insert(stream_ix, track);
diff --git a/src/subtitle_extraction/whisper.rs b/src/subtitles/extraction/whisper.rs
index ffa2e47..bd6fba7 100644
--- a/src/subtitle_extraction/whisper.rs
+++ b/src/subtitles/extraction/whisper.rs
@@ -8,7 +8,10 @@ use anyhow::Context;
use ffmpeg::{filter, frame};
use serde::Deserialize;
-use crate::{subtitle_extraction::*, tracks::StreamIndex};
+use crate::{
+ subtitles::extraction::*,
+ subtitles::{StreamIndex, SubtitleCue},
+};
#[derive(Debug, Deserialize)]
struct WhisperCue {
@@ -117,18 +120,11 @@ fn handle_packet(
let whisper_cue: WhisperCue = serde_json::from_str(&line_buf)?;
let cue = SubtitleCue {
- start: gst::ClockTime::from_mseconds(whisper_cue.start),
- end: gst::ClockTime::from_mseconds(whisper_cue.end),
text: whisper_cue.text,
+ start_time: gst::ClockTime::from_mseconds(whisper_cue.start),
+ end_time: gst::ClockTime::from_mseconds(whisper_cue.end),
};
- // TODO deduplicate this vs. the code in embedded.rs
- SUBTITLE_TRACKS
- .write()
- .get_mut(&stream_ix)
- .unwrap()
- .cues
- .push(cue.clone());
sender
.output(SubtitleExtractorOutput::NewCue(stream_ix, cue))
.unwrap();
diff --git a/src/subtitles/mod.rs b/src/subtitles/mod.rs
new file mode 100644
index 0000000..a545d52
--- /dev/null
+++ b/src/subtitles/mod.rs
@@ -0,0 +1,86 @@
+pub mod extraction;
+pub mod state;
+
+use std::collections::BTreeMap;
+
+use relm4::SharedState;
+
+pub type StreamIndex = usize;
+
+#[derive(Debug, Clone)]
+pub struct MetadataCollection {
+ pub audio: BTreeMap<StreamIndex, TrackMetadata>,
+ pub subtitles: BTreeMap<StreamIndex, TrackMetadata>,
+}
+
+#[derive(Debug, Clone)]
+pub struct TrackMetadata {
+ pub language: Option<isolang::Language>,
+ pub title: Option<String>,
+}
+
+#[derive(Debug, Clone)]
+pub struct SubtitleCue {
+ pub text: String,
+ pub start_time: gst::ClockTime,
+ pub end_time: gst::ClockTime,
+}
+
+#[derive(Debug, Clone)]
+pub struct SubtitleTrack {
+ pub metadata: TrackMetadata,
+ // SoA of cue text, start timestamp, end timestamp
+ pub texts: Vec<String>,
+ pub start_times: Vec<gst::ClockTime>,
+ pub end_times: Vec<gst::ClockTime>,
+}
+
+pub static SUBTITLE_TRACKS: SharedState<BTreeMap<StreamIndex, SubtitleTrack>> = SharedState::new();
+
+impl TrackMetadata {
+ pub fn from_ffmpeg_stream(stream: &ffmpeg::Stream) -> Self {
+ let language_code = stream.metadata().get("language").map(|s| s.to_string());
+ let title = stream.metadata().get("title").map(|s| s.to_string());
+
+ Self {
+ language: language_code.and_then(|code| isolang::Language::from_639_2b(&code)),
+ title,
+ }
+ }
+}
+
+impl SubtitleTrack {
+ pub fn new(metadata: TrackMetadata) -> Self {
+ Self {
+ metadata,
+ texts: Vec::new(),
+ start_times: Vec::new(),
+ end_times: Vec::new(),
+ }
+ }
+
+ pub fn push_cue(&mut self, cue: SubtitleCue) {
+ let SubtitleCue {
+ text,
+ start_time,
+ end_time,
+ } = cue;
+
+ self.texts.push(text);
+ self.start_times.push(start_time);
+ self.end_times.push(end_time);
+ }
+
+ pub fn iter_cloned_cues(&self) -> impl Iterator<Item = SubtitleCue> {
+ self.texts
+ .iter()
+ .cloned()
+ .zip(self.start_times.iter().cloned())
+ .zip(self.end_times.iter().cloned())
+ .map(|((text, start_time), end_time)| SubtitleCue {
+ text,
+ start_time,
+ end_time,
+ })
+ }
+}
diff --git a/src/subtitles/state.rs b/src/subtitles/state.rs
new file mode 100644
index 0000000..6b1ebda
--- /dev/null
+++ b/src/subtitles/state.rs
@@ -0,0 +1,63 @@
+use crate::{
+ subtitles::{SUBTITLE_TRACKS, StreamIndex},
+ translation::TRANSLATIONS,
+ util::Tracker,
+};
+
+#[derive(Default)]
+pub struct SubtitleState {
+ pub stream_ix: Option<StreamIndex>,
+ pub last_started_cue_ix: Tracker<Option<usize>>,
+ pub last_ended_cue_ix: Tracker<Option<usize>>,
+}
+
+#[derive(Clone, Copy, Debug)]
+pub struct CueAddress(pub StreamIndex, pub usize);
+
+impl SubtitleState {
+ pub fn active_cue(&self) -> Option<CueAddress> {
+ if let Some(stream_ix) = self.stream_ix {
+ match (*self.last_started_cue_ix, *self.last_ended_cue_ix) {
+ (None, _) => None,
+ (Some(started_ix), None) => Some(CueAddress(stream_ix, started_ix)),
+ (Some(started_ix), Some(ended_ix)) => {
+ if started_ix > ended_ix {
+ Some(CueAddress(stream_ix, started_ix))
+ } else {
+ None
+ }
+ }
+ }
+ } else {
+ None
+ }
+ }
+
+ pub fn is_dirty(&self) -> bool {
+ self.last_started_cue_ix.is_dirty() || self.last_ended_cue_ix.is_dirty()
+ }
+
+ pub fn reset(&mut self) {
+ self.last_started_cue_ix.reset();
+ self.last_ended_cue_ix.reset();
+ }
+
+ pub fn set_stream_ix(&mut self, stream_ix: Option<StreamIndex>) {
+ self.stream_ix = stream_ix;
+ self.last_started_cue_ix.set(None);
+ self.last_ended_cue_ix.set(None);
+ }
+}
+
+impl CueAddress {
+ pub fn resolve_text(&self) -> String {
+ SUBTITLE_TRACKS.read().get(&self.0).unwrap().texts[self.1].clone()
+ }
+
+ pub fn resolve_translation(&self) -> Option<String> {
+ TRANSLATIONS
+ .read()
+ .get(&self.0)
+ .and_then(|tln| tln.get(self.1).cloned())
+ }
+}
diff --git a/src/track_selector.rs b/src/track_selector.rs
index 5c56e4d..ce04d07 100644
--- a/src/track_selector.rs
+++ b/src/track_selector.rs
@@ -2,7 +2,7 @@ use adw::prelude::*;
use gtk::{gio, glib};
use relm4::prelude::*;
-use crate::tracks::StreamIndex;
+use crate::{subtitles::StreamIndex, util::Tracker};
glib::wrapper! {
pub struct TrackInfo(ObjectSubclass<imp::TrackInfo>);
@@ -65,11 +65,12 @@ pub struct TrackSelectorInit {
#[derive(Debug)]
pub enum TrackSelectorMsg {
SetListModel(gio::ListStore),
+ Changed(StreamIndex),
}
#[derive(Debug)]
pub enum TrackSelectorOutput {
- Changed(Option<StreamIndex>),
+ Changed(StreamIndex),
}
#[relm4::component(pub)]
@@ -87,11 +88,15 @@ impl SimpleComponent for TrackSelector {
set_factory: Some(&track_factory),
#[watch]
set_model: Some(&model.track_list_model),
- #[watch]
- set_selected: model.track_ix.map_or(gtk::INVALID_LIST_POSITION, |ix| get_list_ix_from_stream_ix(&model.track_list_model, ix)),
+ // #[watch]
+ // set_selected: model.track_ix.map_or(gtk::INVALID_LIST_POSITION, |ix| get_list_ix_from_stream_ix(&model.track_list_model, ix)),
connect_selected_notify[sender] => move |combo| {
- let stream_index = get_stream_ix_from_combo(combo);
- sender.output(TrackSelectorOutput::Changed(stream_index)).unwrap();
+ if let Some(stream_ix) = get_stream_ix_from_combo(combo) {
+ println!("selected {}", stream_ix);
+ sender.input(TrackSelectorMsg::Changed(stream_ix));
+ } else {
+ println!("selected none");
+ }
},
},
@@ -155,11 +160,18 @@ impl SimpleComponent for TrackSelector {
ComponentParts { model, widgets }
}
- fn update(&mut self, msg: Self::Input, _sender: ComponentSender<Self>) {
+ fn update(&mut self, msg: Self::Input, sender: ComponentSender<Self>) {
match msg {
TrackSelectorMsg::SetListModel(list_model) => {
self.track_list_model = list_model;
}
+ TrackSelectorMsg::Changed(track_ix) => {
+ println!("changed {:?}", track_ix);
+ self.track_ix = Some(track_ix);
+ sender
+ .output(TrackSelectorOutput::Changed(track_ix))
+ .unwrap();
+ }
}
}
}
diff --git a/src/tracks.rs b/src/tracks.rs
deleted file mode 100644
index 4d69e12..0000000
--- a/src/tracks.rs
+++ /dev/null
@@ -1,38 +0,0 @@
-use std::collections::BTreeMap;
-
-use relm4::SharedState;
-
-pub type StreamIndex = usize;
-
-#[derive(Debug, Clone)]
-pub struct TrackMetadata {
- pub language: Option<isolang::Language>,
- pub title: Option<String>,
-}
-
-#[derive(Debug, Clone)]
-pub struct SubtitleTrack {
- pub metadata: TrackMetadata,
- pub cues: Vec<SubtitleCue>,
-}
-
-#[derive(Debug, Clone)]
-pub struct SubtitleCue {
- pub start: gst::ClockTime,
- pub end: gst::ClockTime,
- pub text: String,
-}
-
-pub static SUBTITLE_TRACKS: SharedState<BTreeMap<StreamIndex, SubtitleTrack>> = SharedState::new();
-
-impl TrackMetadata {
- pub fn from_ffmpeg_stream(stream: &ffmpeg::Stream) -> Self {
- let language_code = stream.metadata().get("language").map(|s| s.to_string());
- let title = stream.metadata().get("title").map(|s| s.to_string());
-
- Self {
- language: language_code.and_then(|code| isolang::Language::from_639_2b(&code)),
- title,
- }
- }
-}
diff --git a/src/transcript.rs b/src/transcript.rs
index a8ae554..602e340 100644
--- a/src/transcript.rs
+++ b/src/transcript.rs
@@ -1,7 +1,7 @@
use gtk::{ListBox, pango::WrapMode, prelude::*};
use relm4::prelude::*;
-use crate::tracks::{SUBTITLE_TRACKS, StreamIndex, SubtitleCue};
+use crate::subtitles::{SUBTITLE_TRACKS, StreamIndex, SubtitleCue};
#[derive(Debug)]
pub enum SubtitleCueOutput {
@@ -20,7 +20,7 @@ impl FactoryComponent for SubtitleCue {
gtk::Button {
inline_css: "padding: 5px; border-radius: 0;",
connect_clicked: {
- let start = self.start;
+ let start = self.start_time;
move |_| {
sender.output(SubtitleCueOutput::SeekTo(start)).unwrap()
}
@@ -124,8 +124,8 @@ impl SimpleComponent for Transcript {
if let Some(stream_ix) = stream_index {
let tracks = SUBTITLE_TRACKS.read();
if let Some(track) = tracks.get(&stream_ix) {
- for cue in &track.cues {
- self.active_cues.guard().push_back(cue.clone());
+ for cue in track.iter_cloned_cues() {
+ self.active_cues.guard().push_back(cue);
}
}
}
diff --git a/src/translation/deepl.rs b/src/translation/deepl.rs
new file mode 100644
index 0000000..f2e84d7
--- /dev/null
+++ b/src/translation/deepl.rs
@@ -0,0 +1,106 @@
+use std::{collections::BTreeMap, time::Duration};
+
+use deepl::DeepLApi;
+use relm4::prelude::*;
+
+use crate::{
+ settings::Settings,
+ subtitles::{SUBTITLE_TRACKS, StreamIndex},
+ translation::TRANSLATIONS,
+};
+
+pub struct DeeplTranslator {
+ stream_ix: Option<StreamIndex>,
+ next_cues_to_translate: BTreeMap<StreamIndex, usize>,
+}
+
+#[derive(Debug)]
+pub enum DeeplTranslatorMsg {
+ SelectTrack(Option<StreamIndex>),
+ // this is only used to drive the async translation
+ DoTranslate,
+}
+
+impl AsyncComponent for DeeplTranslator {
+ type Init = ();
+ type Input = DeeplTranslatorMsg;
+ type Output = ();
+ type CommandOutput = ();
+ type Root = ();
+ type Widgets = ();
+
+ async fn init(
+ _init: Self::Init,
+ _root: Self::Root,
+ sender: relm4::AsyncComponentSender<Self>,
+ ) -> AsyncComponentParts<Self> {
+ let model = Self {
+ stream_ix: None,
+ next_cues_to_translate: BTreeMap::new(),
+ };
+
+ sender.input(DeeplTranslatorMsg::DoTranslate);
+
+ AsyncComponentParts { model, widgets: () }
+ }
+
+ async fn update(
+ &mut self,
+ message: Self::Input,
+ sender: AsyncComponentSender<Self>,
+ _root: &Self::Root,
+ ) {
+ match message {
+ DeeplTranslatorMsg::SelectTrack(stream_ix) => {
+ self.stream_ix = stream_ix;
+ }
+ DeeplTranslatorMsg::DoTranslate => self.do_translate(sender).await,
+ }
+ }
+
+ fn init_root() -> Self::Root {
+ ()
+ }
+}
+
+impl DeeplTranslator {
+ async fn do_translate(&mut self, sender: AsyncComponentSender<Self>) {
+ if let Some(stream_ix) = self.stream_ix {
+ let deepl = DeepLApi::with(&Settings::default().deepl_api_key()).new();
+
+ let next_cue_to_translate = self.next_cues_to_translate.entry(stream_ix).or_insert(0);
+
+ if let Some(cue) = {
+ SUBTITLE_TRACKS
+ .read()
+ .get(&stream_ix)
+ .unwrap()
+ .texts
+ .get(*next_cue_to_translate)
+ .cloned()
+ } {
+ match deepl
+ .translate_text(cue, deepl::Lang::EN)
+ .model_type(deepl::ModelType::PreferQualityOptimized)
+ .await
+ {
+ Ok(mut resp) => {
+ TRANSLATIONS
+ .write()
+ .entry(stream_ix)
+ .or_insert(Vec::new())
+ .push(resp.translations.pop().unwrap().text);
+
+ *next_cue_to_translate = *next_cue_to_translate + 1;
+ }
+ Err(e) => {
+ log::error!("error fetching translation: {}", e)
+ }
+ };
+ }
+ }
+
+ relm4::tokio::time::sleep(Duration::from_secs(1)).await;
+ sender.input(DeeplTranslatorMsg::DoTranslate);
+ }
+}
diff --git a/src/translation/mod.rs b/src/translation/mod.rs
new file mode 100644
index 0000000..4a1b358
--- /dev/null
+++ b/src/translation/mod.rs
@@ -0,0 +1,11 @@
+use std::collections::BTreeMap;
+
+use relm4::SharedState;
+
+use crate::subtitles::StreamIndex;
+
+pub mod deepl;
+
+pub use deepl::DeeplTranslator;
+
+pub static TRANSLATIONS: SharedState<BTreeMap<StreamIndex, Vec<String>>> = SharedState::new();
diff --git a/src/util/tracker.rs b/src/util/tracker.rs
index 69a1c5f..060acae 100644
--- a/src/util/tracker.rs
+++ b/src/util/tracker.rs
@@ -1,3 +1,5 @@
+use std::ops::Deref;
+
pub struct Tracker<T> {
inner: T,
dirty: bool,
@@ -40,8 +42,24 @@ impl<T> Tracker<T> {
}
}
+impl<T> Deref for Tracker<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ self.get()
+ }
+}
+
impl<T: Default> Default for Tracker<T> {
fn default() -> Self {
Self::new(T::default())
}
}
+
+impl<T: Eq> Tracker<T> {
+ pub fn set_if_ne(&mut self, value: T) {
+ if self.inner != value {
+ self.set(value);
+ }
+ }
+}