diff --git a/README.md b/README.md index fa46997..272c5d6 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ You need to have ffmpeg installed. * For macOS, you could use `brew install ffmpeg`. * For Windows, you need to download ffmpeg from [here](https://github.com/BtbN/FFmpeg-Builds/releases). -Make sure you download a shared library build such as `ffmpeg-n5.1-latest-win64-gpl-5.1.zip`. +Make sure you download a shared library build such as `ffmpeg-master-latest-win64-gpl-shared.zip`. Then, simply run `cargo build --release` to build the project. and `cargo run --release` to run it. diff --git a/src/capture/macos/display.rs b/src/capture/macos/display.rs index 8ddf26a..3837204 100644 --- a/src/capture/macos/display.rs +++ b/src/capture/macos/display.rs @@ -1,8 +1,6 @@ -use failure::format_err; - use crate::capture::DisplayInfo; use crate::result::Result; -use std::mem; +use failure::format_err; use super::ffi::*; @@ -36,11 +34,14 @@ impl Display { } pub fn width(self) -> usize { - unsafe { CGDisplayModeGetPixelWidth(CGDisplayCopyDisplayMode(self.id())) } + unsafe { CGDisplayPixelsWide(self.id()) } + // unsafe { CGDisplayModeGetPixelWidth(CGDisplayCopyDisplayMode(self.id())) } } pub fn height(self) -> usize { - unsafe { CGDisplayModeGetPixelHeight(CGDisplayCopyDisplayMode(self.id())) } + unsafe { CGDisplayPixelsHigh(self.id()) } + + //unsafe { CGDisplayModeGetPixelHeight(CGDisplayCopyDisplayMode(self.id())) } } } @@ -48,4 +49,7 @@ impl DisplayInfo for Display { fn resolution(&self) -> (u32, u32) { (self.width() as u32, self.height() as u32) } + fn dpi_conversion_factor(&self) -> f64 { + self.height() as f64 / unsafe { CGDisplayPixelsHigh(self.id()) } as f64 + } } diff --git a/src/capture/macos/ffi.rs b/src/capture/macos/ffi.rs index 007a4c5..de0e105 100644 --- a/src/capture/macos/ffi.rs +++ b/src/capture/macos/ffi.rs @@ -191,9 +191,16 @@ extern "C" { pixel_buffer_attributes: CFDictionaryRef, pixel_buffer_out: *mut CVPixelBufferRef, ) -> i32; - pub fn CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer: CVPixelBufferRef, planeIndex: usize) -> usize; - pub fn CVPixelBufferGetBaseAddressOfPlane(pixel_buffer: CVPixelBufferRef, planeIndex: usize) -> *mut c_void; - pub fn CVPixelBufferUnlockBaseAddress(pixel_buffer: CVPixelBufferRef, unlock_flags: u32) -> i32; + pub fn CVPixelBufferGetBytesPerRowOfPlane( + pixel_buffer: CVPixelBufferRef, + planeIndex: usize, + ) -> usize; + pub fn CVPixelBufferGetBaseAddressOfPlane( + pixel_buffer: CVPixelBufferRef, + planeIndex: usize, + ) -> *mut c_void; + pub fn CVPixelBufferUnlockBaseAddress(pixel_buffer: CVPixelBufferRef, unlock_flags: u32) + -> i32; // IOSurface pub fn IOSurfaceGetAllocSize(buffer: IOSurfaceRef) -> usize; diff --git a/src/capture/macos/macos_capture.rs b/src/capture/macos/macos_capture.rs index be1bfd3..ac0659c 100644 --- a/src/capture/macos/macos_capture.rs +++ b/src/capture/macos/macos_capture.rs @@ -1,5 +1,5 @@ -use std::{ptr, slice}; use std::time::Duration; +use std::{ptr, slice}; use async_trait::async_trait; use block::{Block, ConcreteBlock}; @@ -8,17 +8,25 @@ use libc::c_void; use tokio::sync::mpsc::Receiver; use tokio::sync::mpsc::Sender; -use crate::{OutputSink, ScreenCapture}; use crate::capture::frame::YUVFrame; use crate::capture::macos::config::Config as CaptureConfig; use crate::capture::macos::display::Display; -use crate::capture::macos::ffi::{CFRelease, CGDisplayStreamCreateWithDispatchQueue, CGDisplayStreamFrameStatus, CGDisplayStreamRef, CGDisplayStreamStart, CGDisplayStreamStop, CGDisplayStreamUpdateGetDropCount, CGDisplayStreamUpdateRef, CGError, CVPixelBufferCreateWithIOSurface, CVPixelBufferGetBaseAddressOfPlane, CVPixelBufferGetBytesPerRowOfPlane, CVPixelBufferGetHeight, CVPixelBufferGetWidth, CVPixelBufferLockBaseAddress, CVPixelBufferRelease, CVPixelBufferUnlockBaseAddress, dispatch_queue_create, dispatch_release, DispatchQueue, FrameAvailableHandler, IOSurfaceRef}; use crate::capture::macos::ffi::CGDisplayStreamFrameStatus::{FrameComplete, Stopped}; use crate::capture::macos::ffi::PixelFormat::YCbCr420Full; +use crate::capture::macos::ffi::{ + dispatch_queue_create, dispatch_release, CFRelease, CGDisplayStreamCreateWithDispatchQueue, + CGDisplayStreamFrameStatus, CGDisplayStreamRef, CGDisplayStreamStart, CGDisplayStreamStop, + CGDisplayStreamUpdateGetDropCount, CGDisplayStreamUpdateRef, CGError, + CVPixelBufferCreateWithIOSurface, CVPixelBufferGetBaseAddressOfPlane, + CVPixelBufferGetBytesPerRowOfPlane, CVPixelBufferGetHeight, CVPixelBufferGetWidth, + CVPixelBufferLockBaseAddress, CVPixelBufferRelease, CVPixelBufferUnlockBaseAddress, + DispatchQueue, FrameAvailableHandler, IOSurfaceRef, +}; use crate::config::Config; use crate::encoder::{FfmpegEncoder, FrameData}; use crate::performance_profiler::PerformanceProfiler; use crate::result::Result; +use crate::{OutputSink, ScreenCapture}; pub struct MacOSScreenCapture<'a> { stream: CGDisplayStreamRef, @@ -37,23 +45,17 @@ impl<'a> MacOSScreenCapture<'a> { let (sender, receiver) = tokio::sync::mpsc::channel::(1); let sender = Box::into_raw(Box::new(sender)); - let handler: FrameAvailableHandler = - ConcreteBlock::new( - move |status: CGDisplayStreamFrameStatus, - display_time: u64, - frame_surface: IOSurfaceRef, - update_ref: CGDisplayStreamUpdateRef| { - unsafe { - frame_available_handler( - display_time, - sender, - status, - frame_surface, - update_ref, - ) - } - }, - ).copy(); + let handler: FrameAvailableHandler = ConcreteBlock::new( + move |status: CGDisplayStreamFrameStatus, + display_time: u64, + frame_surface: IOSurfaceRef, + update_ref: CGDisplayStreamUpdateRef| { + unsafe { + frame_available_handler(display_time, sender, status, frame_surface, update_ref) + } + }, + ) + .copy(); let queue = unsafe { dispatch_queue_create(b"app.mirashare\0".as_ptr() as *const i8, ptr::null_mut()) @@ -66,7 +68,7 @@ impl<'a> MacOSScreenCapture<'a> { throttle: 1. / (config.max_fps as f64), queue_length: 3, } - .build(); + .build(); let stream = CGDisplayStreamCreateWithDispatchQueue( display.id(), display.width(), @@ -108,10 +110,7 @@ impl ScreenCapture for MacOSScreenCapture<'_> { profiler.accept_frame(frame_time as i64); profiler.done_preprocessing(); let encoded = encoder - .encode( - FrameData::NV12(&frame), - frame_time as i64, - ) + .encode(FrameData::NV12(&frame), frame_time as i64) .unwrap(); let encoded_len = encoded.len(); profiler.done_encoding(); @@ -150,7 +149,8 @@ unsafe fn frame_available_handler( frame_surface, ptr::null_mut(), &mut pixel_buffer, - ) != 0 { + ) != 0 + { error!("CVPixelBufferCreateWithIOSurface failed"); return; } @@ -165,14 +165,16 @@ unsafe fn frame_available_handler( let luminance_bytes = slice::from_raw_parts( luminance_bytes_address as *mut u8, height * luminance_stride, - ).to_vec(); + ) + .to_vec(); let chrominance_bytes_address = CVPixelBufferGetBaseAddressOfPlane(pixel_buffer, 1); let chrominance_stride = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1); let chrominance_bytes = slice::from_raw_parts( chrominance_bytes_address as *mut u8, height * chrominance_stride / 2, - ).to_vec(); + ) + .to_vec(); CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); diff --git a/src/capture/macos/mod.rs b/src/capture/macos/mod.rs index 525ddf5..52fa8d8 100644 --- a/src/capture/macos/mod.rs +++ b/src/capture/macos/mod.rs @@ -1,6 +1,6 @@ mod config; -mod ffi; pub mod display; +mod ffi; pub mod macos_capture; pub use display::Display; diff --git a/src/capture/mod.rs b/src/capture/mod.rs index dafecf9..448c991 100644 --- a/src/capture/mod.rs +++ b/src/capture/mod.rs @@ -14,6 +14,8 @@ pub trait ScreenCapture { pub trait DisplayInfo { /// Get the resolution of the display in (width, height) fn resolution(&self) -> (u32, u32); + /// Get the DPI factor for input handling + fn dpi_conversion_factor(&self) -> f64; } mod yuv_converter; @@ -29,12 +31,12 @@ pub use wgc::display::Display; #[cfg(target_os = "windows")] pub use wgc::WGCScreenCapture as ScreenCaptureImpl; +mod frame; #[cfg(target_os = "macos")] mod macos; -mod frame; +pub use frame::YUVFrame; #[cfg(target_os = "macos")] pub use macos::display::Display; #[cfg(target_os = "macos")] pub use macos::MacOSScreenCapture as ScreenCaptureImpl; -pub use frame::YUVFrame; diff --git a/src/capture/wgc/display.rs b/src/capture/wgc/display.rs index b97b787..26215be 100644 --- a/src/capture/wgc/display.rs +++ b/src/capture/wgc/display.rs @@ -46,4 +46,7 @@ impl DisplayInfo for GraphicsCaptureItem { self.Size().unwrap().Height as u32, ) } + fn dpi_conversion_factor(&self) -> f64 { + 1.0 + } } diff --git a/src/config.rs b/src/config.rs index 73fca64..ad0ce40 100644 --- a/src/config.rs +++ b/src/config.rs @@ -84,7 +84,7 @@ pub fn load(path: &Path) -> Result { if !path.exists() { let mut file = File::create(path)?; let config = toml::from_str::("")?; - file.write_all("# for more sample configs, see https://github.com/mira-screen-share/sharer/tree/main/configs".as_bytes())?; + file.write_all("# for more sample configs, see https://github.com/mira-screen-share/sharer/tree/main/configs\n".as_bytes())?; file.write_all(toml::to_string(&config)?.as_ref())?; return Ok(config); } @@ -123,7 +123,7 @@ fn default_viewer() -> String { } fn default_max_fps() -> u32 { - 120 + 30 } fn default_ice_servers() -> Vec { diff --git a/src/encoder/ffmpeg.rs b/src/encoder/ffmpeg.rs index 74923b0..2bf5bce 100644 --- a/src/encoder/ffmpeg.rs +++ b/src/encoder/ffmpeg.rs @@ -1,8 +1,8 @@ -use std::sync::Arc; use std::sync::atomic::AtomicBool; +use std::sync::Arc; -use ac_ffmpeg::codec::{Encoder, video}; use ac_ffmpeg::codec::video::VideoEncoder; +use ac_ffmpeg::codec::{video, Encoder}; use ac_ffmpeg::time::{TimeBase, Timestamp}; use bytes::Bytes; use itertools::enumerate; @@ -88,20 +88,21 @@ impl FfmpegEncoder { match frame_data { FrameData::NV12(nv12) => { assert_eq!(self.pixel_format, "nv12"); - let encoder_buffer_len = frame.planes_mut()[0].data().len(); - let encoder_line_size = encoder_buffer_len / self.h; - let y = &nv12.luminance_bytes; - let uv = &nv12.chrominance_bytes; - for (r, row) in enumerate(y.chunks(nv12.luminance_stride as usize)) { - frame.planes_mut()[0] - .data_mut()[r * encoder_line_size..r * encoder_line_size + self.w] - .copy_from_slice(&row[..self.w]) - } - for (r, row) in enumerate(uv.chunks(nv12.chrominance_stride as usize)) { - frame.planes_mut()[1] - .data_mut()[r * encoder_line_size..r * encoder_line_size + self.w] - .copy_from_slice(&row[..self.w]) - } + let encoder_buffer_len = frame.planes_mut()[0].data_mut().len(); + let encoder_line_size = encoder_buffer_len / self.h as usize; + + self.copy_nv12( + &nv12.luminance_bytes, + nv12.luminance_stride as usize, + encoder_line_size, + frame.planes_mut()[0].data_mut(), + ); + self.copy_nv12( + &nv12.chrominance_bytes, + nv12.chrominance_stride as usize, + encoder_line_size, + frame.planes_mut()[1].data_mut(), + ); } FrameData::BGR0(bgr0) => match self.pixel_format.as_str() { "yuv420p" => { @@ -120,7 +121,6 @@ impl FfmpegEncoder { _ => unimplemented!(), }, } - let frame = frame.freeze(); self.encoder.push(frame.clone())?; self.frame_pool.put(frame); @@ -130,4 +130,17 @@ impl FfmpegEncoder { } Ok(Bytes::from(ret)) } + + fn copy_nv12( + &self, + source: &[u8], + stride: usize, + encoder_line_size: usize, + destination: &mut [u8], + ) { + for (r, row) in enumerate(source.chunks(stride)) { + destination[r * encoder_line_size..r * encoder_line_size + self.w as usize] + .copy_from_slice(&row[..self.w as usize]) + } + } } diff --git a/src/inputs/mod.rs b/src/inputs/mod.rs index 78fd2c9..7c6a2cc 100644 --- a/src/inputs/mod.rs +++ b/src/inputs/mod.rs @@ -41,39 +41,52 @@ pub struct InputHandler { } impl InputHandler { - fn handle_input_event(input_msg: Bytes) -> Result<()> { + fn handle_input_event(input_msg: Bytes, dpi_factor: f64) -> Result<()> { + let scroll_reverse_factor = if cfg!(target_os = "windows") { -1. } else { 1. }; let mut enigo = enigo::Enigo::new(); let input_msg = serde_json::from_slice::(&input_msg)?; debug!("Deserialized input message: {:#?}", input_msg); match input_msg { InputMessage::KeyDown { key } => enigo.key_down(enigo::Key::from_js_key(&key)?), InputMessage::KeyUp { key } => enigo.key_up(enigo::Key::from_js_key(&key)?), - InputMessage::MouseMove { x, y } => enigo.mouse_move_to(x, y), + InputMessage::MouseMove { x, y } => enigo.mouse_move_to( + (x as f64 * dpi_factor) as i32, + (y as f64 * dpi_factor) as i32, + ), InputMessage::MouseDown { x, y, button } => { - enigo.mouse_move_to(x, y); + enigo.mouse_move_to( + (x as f64 * dpi_factor) as i32, + (y as f64 * dpi_factor) as i32, + ); enigo.mouse_down(button.into()) } InputMessage::MouseUp { x, y, button } => { - enigo.mouse_move_to(x, y); + enigo.mouse_move_to( + (x as f64 * dpi_factor) as i32, + (y as f64 * dpi_factor) as i32, + ); enigo.mouse_up(button.into()) } InputMessage::MouseWheel { x, y, dx, dy } => { - enigo.mouse_move_to(x, y); - enigo.mouse_scroll_y(dy); - enigo.mouse_scroll_x(dx); + enigo.mouse_move_to( + (x as f64 * dpi_factor) as i32, + (y as f64 * dpi_factor) as i32, + ); + enigo.mouse_scroll_y((dy as f64 / 120. * scroll_reverse_factor) as i32); + enigo.mouse_scroll_x((dx as f64 / 120.) as i32); } }; Ok(()) } - pub fn new(disabled_control: bool) -> Self { + pub fn new(disabled_control: bool, dpi_factor: f64) -> Self { let (sender, mut receiver) = mpsc::channel::(32); tokio::spawn(async move { while let Some(msg) = receiver.recv().await { - if disabled_control{ + if disabled_control { continue; // Skip the message if user disabled remote control } - if let Err(err) = Self::handle_input_event(msg) { + if let Err(err) = Self::handle_input_event(msg, 1. / dpi_factor) { warn!("Error handling input event: {}", err); } } diff --git a/src/main.rs b/src/main.rs index 5d68b9a..99a348d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -50,11 +50,12 @@ async fn main() -> Result<()> { let config = config::load(Path::new(&args.config))?; let display = Display::online().unwrap()[args.display].select()?; + let dpi_conversion_factor = display.dpi_conversion_factor(); let profiler = PerformanceProfiler::new(args.profiler, config.max_fps); let resolution = display.resolution(); let mut capture = ScreenCaptureImpl::new(display, &config)?; let mut encoder = encoder::FfmpegEncoder::new(resolution.0, resolution.1, &config.encoder); - let input_handler = Arc::new(inputs::InputHandler::new(args.disable_control)); + let input_handler = Arc::new(inputs::InputHandler::new(args.disable_control, dpi_conversion_factor)); let my_uuid = uuid::Uuid::new_v4().to_string(); info!("Resolution: {:?}", resolution); diff --git a/src/output/webrtc_peer.rs b/src/output/webrtc_peer.rs index e39fda9..6c743cc 100644 --- a/src/output/webrtc_peer.rs +++ b/src/output/webrtc_peer.rs @@ -18,7 +18,6 @@ use rtcp::payload_feedbacks::full_intra_request::FullIntraRequest; use rtcp::payload_feedbacks::receiver_estimated_maximum_bitrate::ReceiverEstimatedMaximumBitrate; use rtcp::receiver_report::ReceiverReport; - pub struct WebRTCPeer {} impl WebRTCPeer {