Skip to content

Commit

Permalink
Add support for joining streams and fix clippy warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
AdrianEddy committed Aug 29, 2023
1 parent 7d534c5 commit a4cd137
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 35 deletions.
3 changes: 1 addition & 2 deletions src/desc_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
// Copyright © 2022 Adrian <adrian.eddy at gmail>

use std::io::{ Read, Seek, Result, SeekFrom };
use std::path::PathBuf;
use byteorder::{ ReadBytesExt, BigEndian };
use crate::{ fourcc, read_box, typ_to_str };

Expand All @@ -27,7 +26,7 @@ pub struct TrackDesc {

#[derive(Default, Clone, Debug)]
pub struct Desc {
pub mdat_position: Vec<(Option<PathBuf>, u64, u64)>, // file path, offset, size
pub mdat_position: Vec<(Option<usize>, u64, u64)>, // file path, offset, size
pub moov_mvhd_duration: u64,
pub moov_tracks: Vec<TrackDesc>,
pub mdat_offset: u64,
Expand Down
35 changes: 23 additions & 12 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
// SPDX-License-Identifier: MIT OR Apache-2.0
// Copyright © 2022 Adrian <adrian.eddy at gmail>

use std::io::{ Read, Seek, Result };
use std::path::{ Path, PathBuf };
use std::io::{ Read, Seek, Write, Result };
use std::path::Path;
use byteorder::{ ReadBytesExt, WriteBytesExt, BigEndian };
use std::time::Instant;

Expand Down Expand Up @@ -48,14 +48,25 @@ pub fn read_box<R: Read + Seek>(reader: &mut R) -> Result<(u32, u64, u64, i64)>
}
}

pub fn join_files<P: AsRef<Path> + AsRef<std::ffi::OsStr>, F: Fn(f64)>(files: &[P], output_file: P, progress_cb: F) -> Result<()> {
pub fn join_files<P: AsRef<Path>, F: Fn(f64)>(files: &[P], output_file: P, progress_cb: F) -> Result<()> {
let mut open_files = Vec::with_capacity(files.len());
for x in files {
let f = std::fs::File::open(x)?;
let size = f.metadata()?.len() as usize;
open_files.push((f, size));
}
join_file_streams(&mut open_files, std::fs::File::create(output_file)?, progress_cb)
}

pub fn join_file_streams<F: Fn(f64), I: Read + Seek, O: Read + Write + Seek>(files: &mut [(I, usize)], output_file: O, progress_cb: F) -> Result<()> {
// Get the merged description from all source files
let mut desc = desc_reader::Desc::default();
desc.moov_tracks.resize(10, Default::default());
let mut total_size = 0;
for (i, path) in files.iter().enumerate() {
let mut fs = std::fs::File::open(path)?;
total_size += fs.metadata()?.len();
let num_files = files.len() as f64;
for (i, fs) in files.iter_mut().enumerate() {
total_size += fs.1;
let mut fs = &mut fs.0;

{ // Find mdat first
while let Ok((typ, offs, size, header_size)) = read_box(&mut fs) {
Expand All @@ -74,28 +85,28 @@ pub fn join_files<P: AsRef<Path> + AsRef<std::ffi::OsStr>, F: Fn(f64)>(files: &[
desc_reader::read_desc(&mut fs, &mut desc, 0, u64::MAX, i)?;

if let Some(mdat) = desc.mdat_position.last_mut() {
mdat.0 = Some(PathBuf::from(path));
mdat.0 = Some(i);
desc.mdat_offset += mdat.2;
for t in &mut desc.moov_tracks {
t.sample_offset = t.stsz_count;
t.chunk_offset = t.stco.len() as u32;
}
}

progress_cb(((i as f64 + 1.0) / files.len() as f64) * 0.1);
progress_cb(((i as f64 + 1.0) / num_files) * 0.1);
}

// Write it to the file
let mut f1 = std::fs::File::open(&files[0])?;
let f_out = std::fs::File::create(output_file)?;
let mut debounce = Instant::now();
let mut f_out = ProgressStream::new(f_out, |total| {
let mut f_out = ProgressStream::new(output_file, |total| {
if (Instant::now() - debounce).as_millis() > 20 {
progress_cb((0.1 + ((total as f64 / total_size as f64) * 0.9)).min(0.9999));
debounce = Instant::now();
}
});
writer::rewrite_from_desc(&mut f1, &mut f_out, &mut desc, 0, u64::MAX)?;

writer::get_first(files).seek(std::io::SeekFrom::Start(0))?;
writer::rewrite_from_desc(files, &mut f_out, &mut desc, 0, u64::MAX)?;

// Patch final mdat positions
for track in &desc.moov_tracks {
Expand Down
50 changes: 29 additions & 21 deletions src/writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,24 @@ use std::io::{ Read, Write, Seek, Result, SeekFrom };
use byteorder::{ ReadBytesExt, WriteBytesExt, BigEndian };
use crate::{ fourcc, read_box, typ_to_str, desc_reader::Desc };

pub fn rewrite_from_desc<R: Read + Seek, W: Write + Seek>(d: &mut R, output_file: &mut W, desc: &mut Desc, track: usize, max_read: u64) -> Result<u64> {
pub(crate) fn get_first<R: Read + Seek>(files: &mut [(R, usize)]) -> &mut R { files.get_mut(0).map(|x| &mut x.0).unwrap() }

pub fn rewrite_from_desc<R: Read + Seek, W: Write + Seek>(files: &mut [(R, usize)], output_file: &mut W, desc: &mut Desc, track: usize, max_read: u64) -> Result<u64> {
let mut total_read_size = 0;
let mut total_new_size = 0;
let mut tl_track = track;
while let Ok((typ, offs, size, header_size)) = read_box(d) {
while let Ok((typ, offs, size, header_size)) = read_box(get_first(files)) {
if size == 0 || typ == 0 { break; }

total_read_size += size;
let mut new_size = size;
if crate::has_children(typ, false) {
let d = get_first(files);
// Copy the header
d.seek(SeekFrom::Current(-header_size))?;
let out_pos = output_file.stream_position()?;
std::io::copy(&mut d.take(header_size as u64), output_file)?;
new_size = rewrite_from_desc(d, output_file, desc, tl_track, size - header_size as u64)?;
new_size = rewrite_from_desc(files, output_file, desc, tl_track, size - header_size as u64)?;
new_size += header_size as u64;

if typ == fourcc("trak") {
Expand All @@ -33,29 +36,33 @@ pub fn rewrite_from_desc<R: Read + Seek, W: Write + Seek>(d: &mut R, output_file
} else if typ == fourcc("mdat") {
log::debug!("Merging mdat's, offset: {}, size: {size}", offs);

output_file.write(&1u32.to_be_bytes())?;
output_file.write(&fourcc("mdat").to_be_bytes())?;
output_file.write_all(&1u32.to_be_bytes())?;
output_file.write_all(&fourcc("mdat").to_be_bytes())?;
let pos = output_file.stream_position()?;
output_file.write(&0u64.to_be_bytes())?;
output_file.write_all(&0u64.to_be_bytes())?;
new_size = 16;

desc.mdat_final_position = output_file.stream_position()?;

// Merge all mdats
for (fpath, mo, ms) in &desc.mdat_position {
if let Some(fpath) = fpath {
let mut f = std::fs::File::open(fpath)?;
f.seek(SeekFrom::Start(*mo))?;
std::io::copy(&mut f.take(*ms), output_file)?;
new_size += ms;
for (file_index, mo, ms) in &desc.mdat_position {
if let Some(file_index) = file_index {
if let Some(f) = files.get_mut(*file_index).map(|x| &mut x.0) {
let prev_pos = f.stream_position()?;
f.seek(SeekFrom::Start(*mo))?;
std::io::copy(&mut f.take(*ms), output_file)?;
f.seek(SeekFrom::Start(prev_pos))?;
new_size += ms;
}
}
}
patch_bytes(output_file, pos, &new_size.to_be_bytes())?;

d.seek(SeekFrom::Current(size as i64 - header_size))?;
get_first(files).seek(SeekFrom::Current(size as i64 - header_size))?;

} else if typ == fourcc("mvhd") || typ == fourcc("tkhd") || typ == fourcc("mdhd") || typ == fourcc("elst") {
log::debug!("Writing {} with patched duration, offset: {}, size: {size}", typ_to_str(typ), offs);
let d = get_first(files);

let (v, _flags) = (d.read_u8()?, d.read_u24::<BigEndian>()?);

Expand Down Expand Up @@ -87,22 +94,22 @@ pub fn rewrite_from_desc<R: Read + Seek, W: Write + Seek>(d: &mut R, output_file
} else if typ == fourcc("stts") || typ == fourcc("stsz") || typ == fourcc("stss") || typ == fourcc("stco") || typ == fourcc("co64") || typ == fourcc("sdtp") || typ == fourcc("stsc") {
log::debug!("Writing new {}, offset: {}, size: {size}", typ_to_str(typ), offs);

d.seek(SeekFrom::Current(size as i64 - header_size))?;
get_first(files).seek(SeekFrom::Current(size as i64 - header_size))?;

let out_pos = output_file.stream_position()?;
new_size = 12;
output_file.write(&0u32.to_be_bytes())?;
output_file.write_all(&0u32.to_be_bytes())?;
let new_typ = if typ == fourcc("stco") { fourcc("co64") } else { typ };
output_file.write(&new_typ.to_be_bytes())?;
output_file.write(&0u32.to_be_bytes())?; // flags
output_file.write_all(&new_typ.to_be_bytes())?;
output_file.write_all(&0u32.to_be_bytes())?; // flags

let track_desc = desc.moov_tracks.get_mut(tl_track).unwrap();
if typ == fourcc("stts") {
let mut new_stts: Vec<(u32, u32)> = Vec::with_capacity(track_desc.stts.len());
let mut prev_delta = None;
for x in &track_desc.stts {
if let Some(prev_delta) = prev_delta {
if prev_delta == x.1 { (*new_stts.last_mut().unwrap()).0 += x.0; continue; }
if prev_delta == x.1 { new_stts.last_mut().unwrap().0 += x.0; continue; }
}
prev_delta = Some(x.1);
new_stts.push(*x);
Expand All @@ -119,12 +126,12 @@ pub fn rewrite_from_desc<R: Read + Seek, W: Write + Seek>(d: &mut R, output_file
output_file.write_u32::<BigEndian>(track_desc.stsz_sample_size)?; // sample_size
output_file.write_u32::<BigEndian>(track_desc.stsz_count)?;
new_size += 8;
for x in &track_desc.stsz { output_file.write_u32::<BigEndian>(*x as u32)?; new_size += 4; }
for x in &track_desc.stsz { output_file.write_u32::<BigEndian>(*x)?; new_size += 4; }
}
if typ == fourcc("stss") {
output_file.write_u32::<BigEndian>(track_desc.stss.len() as u32)?;
new_size += 4;
for x in &track_desc.stss { output_file.write_u32::<BigEndian>(*x as u32)?; new_size += 4; }
for x in &track_desc.stss { output_file.write_u32::<BigEndian>(*x)?; new_size += 4; }
}
if typ == fourcc("stco") || typ == fourcc("co64") {
output_file.write_u32::<BigEndian>(track_desc.stco.len() as u32)?;
Expand All @@ -151,6 +158,7 @@ pub fn rewrite_from_desc<R: Read + Seek, W: Write + Seek>(d: &mut R, output_file
patch_bytes(output_file, out_pos, &(new_size as u32).to_be_bytes())?;
} else {
log::debug!("Writing original {}, offset: {}, size: {size}", typ_to_str(typ), offs);
let d = get_first(files);

// Copy without changes
d.seek(SeekFrom::Current(-header_size))?;
Expand All @@ -167,7 +175,7 @@ pub fn rewrite_from_desc<R: Read + Seek, W: Write + Seek>(d: &mut R, output_file
pub fn patch_bytes<W: Write + Seek>(writer: &mut W, position: u64, bytes: &[u8]) -> Result<()> {
let new_pos = writer.stream_position()?;
writer.seek(SeekFrom::Start(position))?;
writer.write(bytes)?;
writer.write_all(bytes)?;
writer.seek(SeekFrom::Start(new_pos))?;
Ok(())
}

0 comments on commit a4cd137

Please sign in to comment.