tyrolienne/src/ffmpeg.rs

166 lines
4 KiB
Rust

use std::{
path::{Path, PathBuf},
process::Stdio,
};
use color_eyre::eyre::{ContextCompat, Result, bail};
use futures::channel::oneshot;
use relm4::{
Sender,
tokio::{
self,
io::{AsyncBufReadExt, BufReader},
process::Command,
},
};
use crate::ProgressMessage;
#[derive(serde::Deserialize)]
struct FfprobeOut {
streams: Vec<StreamInfo>,
format: FormatInfo,
}
#[derive(serde::Deserialize)]
struct StreamInfo {
width: usize,
height: usize,
}
#[derive(serde::Deserialize)]
struct FormatInfo {
duration: String,
}
#[derive(Debug)]
pub struct VideoMeta {
pub width: usize,
pub height: usize,
pub duration_us: usize,
}
#[derive(Default, Clone, Copy)]
pub enum Codec {
AV1,
#[default]
VP9,
}
impl From<String> for Codec {
fn from(value: String) -> Self {
match value.as_str() {
"AV1" => Self::AV1,
"VP9" => Self::VP9,
_ => Default::default(),
}
}
}
pub async fn get_video_meta(path: &Path) -> Result<VideoMeta> {
let output = Command::new("ffprobe")
.args([
"-v",
"error",
"-select_streams",
"v:0",
"-show_entries",
"stream=width,height : format=duration",
"-of",
"json",
])
.arg(path)
.stdout(Stdio::piped())
.output()
.await?;
let str = String::from_utf8(output.stdout)?;
let output: FfprobeOut = serde_json::from_str(&str)?;
let stream = output
.streams
.first()
.wrap_err("could not get stream information from ffprobe")?;
let duration_sec = output.format.duration.parse::<f64>()?;
let duration_us = (duration_sec * 1_000_000.0).ceil() as usize;
Ok(VideoMeta {
width: stream.width,
height: stream.height,
duration_us,
})
}
pub async fn convert_video(
path: &Path,
out_filename: Option<String>,
out_codec: Codec,
merge_tracks: bool,
sender: Sender<ProgressMessage>,
) -> Result<PathBuf> {
let out_filename = out_filename
.or_else(|| {
path.file_name()
.and_then(|s| s.to_str())
.map(|s| s.to_owned())
})
.unwrap_or("out.webm".into());
let mut out_path = std::env::temp_dir().join(out_filename);
out_path.set_extension("webm");
let codec_args: &[&str] = match out_codec {
Codec::AV1 => &["-c:v", "libsvtav1"],
Codec::VP9 => &["-c:v", "libvpx-vp9", "-row-mt", "1"],
};
// TODO: maybe check if the video has 2 audio tracks? or at least use a "fail-safe" method
let merge_args: &[&str] = if merge_tracks {
&["-ac", "2", "-filter_complex", "amerge=inputs=2"]
} else {
&[]
};
let mut child = Command::new("ffmpeg")
.arg("-i")
.arg(path)
.args(["-c:a", "libopus", "-b:a", "96k"])
.args(codec_args)
.args(merge_args)
.args(["-y", "-loglevel", "error", "-progress", "-", "-nostats"])
.arg(&out_path)
.stdout(Stdio::piped())
.spawn()?;
let stdout = child.stdout.take().unwrap();
let mut reader = BufReader::new(stdout).lines();
// make sure the process is actually started and awaited
let (tx, rx) = oneshot::channel();
tokio::spawn(async move {
tx.send(child.wait().await)
.expect("could not send exit status");
});
while let Some(line) = reader.next_line().await? {
if line.starts_with("out_time_us") {
let (_, current_duration) = line
.split_once("=")
.wrap_err_with(|| format!("could not parse ffmpeg output: {line}"))?;
if current_duration != "N/A" {
sender.emit(ProgressMessage::AbsProgress(
current_duration.parse::<usize>()?,
));
}
}
}
let status = rx.await??;
if !status.success() {
bail!("ffmpeg process errored: {status}");
}
Ok(out_path)
}