From b6288d39ceeb8e6d526d761f11bf37cafb09fb92 Mon Sep 17 00:00:00 2001 From: l1npengtul Date: Sat, 12 Oct 2024 19:55:16 +0900 Subject: [PATCH] [will not build] split out capture API, add async capture type APIs --- nokhwa-bindings-linux/src/lib.rs | 2 +- nokhwa-core/src/buffer.rs | 2 +- nokhwa-core/src/capture.rs | 112 +++++++++ nokhwa-core/src/controls.rs | 210 ++++++---------- nokhwa-core/src/decoders/general.rs | 4 +- nokhwa-core/src/error.rs | 1 - nokhwa-core/src/format_request.rs | 21 +- nokhwa-core/src/frame_format.rs | 16 +- nokhwa-core/src/lib.rs | 1 + nokhwa-core/src/traits.rs | 369 ---------------------------- nokhwa-core/src/types.rs | 5 +- 11 files changed, 207 insertions(+), 536 deletions(-) create mode 100644 nokhwa-core/src/capture.rs diff --git a/nokhwa-bindings-linux/src/lib.rs b/nokhwa-bindings-linux/src/lib.rs index 5b016d2..c0892c7 100644 --- a/nokhwa-bindings-linux/src/lib.rs +++ b/nokhwa-bindings-linux/src/lib.rs @@ -594,7 +594,7 @@ mod internal { fn fourcc_to_frameformat(fourcc: FourCC) -> Option { match fourcc.str().ok()? { "YUYV" => Some(FrameFormat::Yuy2_422), - "UYVY" => Some(FrameFormat::Uyvy_422), + "UYVY" => Some(FrameFormat::Uyvy422), "YV12" => Some(FrameFormat::Yv12), "MJPG" => Some(FrameFormat::MJpeg), "GRAY" => Some(FrameFormat::Luma8), diff --git a/nokhwa-core/src/buffer.rs b/nokhwa-core/src/buffer.rs index 2049bb8..e0418ca 100644 --- a/nokhwa-core/src/buffer.rs +++ b/nokhwa-core/src/buffer.rs @@ -53,7 +53,7 @@ impl Buffer { &self.buffer } - /// Get a owned version of this buffer. + /// Get an owned version of this buffer. Note: This is the equivalent #[must_use] pub fn buffer_bytes(&self) -> Bytes { self.buffer.clone() diff --git a/nokhwa-core/src/capture.rs b/nokhwa-core/src/capture.rs new file mode 100644 index 0000000..7ba4d27 --- /dev/null +++ b/nokhwa-core/src/capture.rs @@ -0,0 +1,112 @@ +use std::collections::HashMap; +use crate::buffer::Buffer; +use crate::controls::{CameraProperties, CameraPropertyId, CameraPropertyValue}; +use crate::error::NokhwaError; +use crate::types::{CameraFormat, CameraIndex, Resolution}; + +pub trait Open { + fn open(index: CameraIndex) -> Self; +} + +#[cfg(feature = "async")] +pub trait AsyncOpen { + async fn open_async(index: CameraIndex) -> Self; +} + +macro_rules! def_camera_props { + ( $($property:ident, )* ) => { + $( + fn paste::paste! { [<$property:snake>] } (&self) -> Option<&CameraPropertyDescriptor> { + self.properties().paste::paste! { [<$property:snake>] } + } + + fn paste::paste! { [] } (&mut self, value: CameraPropertyValue) -> Result<(), NokhwaError>; + )* + }; +} + +macro_rules! def_camera_props_async { + ( $($property:ident, )* ) => { + $( + async fn paste::paste! { [] } (&mut self, value: CameraPropertyValue) -> Result<(), NokhwaError>; + )* + }; +} + +pub trait Setting { + fn enumerate_formats(&self) -> Vec; + + fn enumerate_formats_by_resolution(&self) -> HashMap; + + fn set_format(&self, camera_format: CameraFormat) -> Result<(), NokhwaError>; + + fn properties(&self) -> &CameraProperties; + + fn set_property(&mut self, property: &CameraPropertyId, value: CameraPropertyValue) -> Result<(), NokhwaError>; + + def_camera_props!( + Brightness, + Contrast, + Hue, + Saturation, + Sharpness, + Gamma, + WhiteBalance, + BacklightCompensation, + Gain, + Pan, + Tilt, + Zoom, + Exposure, + Iris, + Focus, + Facing, + ); +} + +#[cfg(feature = "async")] +pub trait AsyncSetting { + async fn set_format(&self, camera_format: CameraFormat) -> Result<(), NokhwaError>; + + async fn set_property(&mut self, property: &CameraPropertyId, value: CameraPropertyValue) -> Result<(), NokhwaError>; + + def_camera_props_async!( + Brightness, + Contrast, + Hue, + Saturation, + Sharpness, + Gamma, + WhiteBalance, + BacklightCompensation, + Gain, + Pan, + Tilt, + Zoom, + Exposure, + Iris, + Focus, + Facing, + ); +} + +pub trait Stream { + fn open_stream(&mut self) -> Result<(), NokhwaError>; + + fn poll_frame(&mut self) -> Result; + + fn close_stream(&mut self) -> Result<(), NokhwaError>; +} + +#[cfg(feature = "async")] +pub trait AsyncStream { + async fn open_stream(&mut self) -> Result<(), NokhwaError>; + + async fn poll_frame(&mut self) -> Result; + + async fn close_stream(&mut self) -> Result<(), NokhwaError>;} + +pub trait Capture: Open + Setting + Stream {} + +#[cfg(feature = "async")] +pub trait AsyncCapture: Capture + AsyncOpen + AsyncSetting + AsyncStream {} diff --git a/nokhwa-core/src/controls.rs b/nokhwa-core/src/controls.rs index 390b42a..c9135ac 100644 --- a/nokhwa-core/src/controls.rs +++ b/nokhwa-core/src/controls.rs @@ -16,159 +16,105 @@ impl From for ControlValidationFailure { } } +#[derive(Clone, Debug, Hash, Ord, PartialOrd, Eq, PartialEq)] +pub enum CameraPropertyId { + Brightness, + Contrast, + Hue, + Saturation, + Sharpness, + Gamma, + WhiteBalance, + BacklightCompensation, + Gain, + Pan, + Tilt, + Zoom, + Exposure, + Iris, + Focus, + Facing, + Custom(String) +} + // TODO: Replace Controls API with Properties. (this one) /// Properties of a Camera. /// /// If the property is not supported, it is `None`. /// Custom or platform-specific properties go into `other` pub struct CameraProperties { - brightness: Option, - contrast: Option, - hue: Option, - saturation: Option, - sharpness: Option, - gamma: Option, - white_balance: Option, - backlight_compensation: Option, - gain: Option, - pan: Option, - tilt: Option, - zoom: Option, - exposure: Option, - iris: Option, - focus: Option, - facing: Option, - other: HashMap, + props: HashMap, } -impl CameraProperties { - pub fn brightness(&self) -> Option<&CameraPropertyDescriptor> { - self.brightness.as_ref() - } - - pub fn contrast(&self) -> Option<&CameraPropertyDescriptor> { - self.contrast.as_ref() - } - - pub fn hue(&self) -> Option<&CameraPropertyDescriptor> { - self.hue.as_ref() - } - - pub fn saturation(&self) -> Option<&CameraPropertyDescriptor> { - self.saturation.as_ref() - } - - pub fn sharpness(&self) -> Option<&CameraPropertyDescriptor> { - self.sharpness.as_ref() - } - - pub fn gamma(&self) -> Option<&CameraPropertyDescriptor> { - self.gamma.as_ref() - } - - pub fn white_balance(&self) -> Option<&CameraPropertyDescriptor> { - self.white_balance.as_ref() - } - - pub fn backlight_compensation(&self) -> Option<&CameraPropertyDescriptor> { - self.backlight_compensation.as_ref() - } - - pub fn gain(&self) -> Option<&CameraPropertyDescriptor> { - self.gain.as_ref() - } - - pub fn pan(&self) -> Option<&CameraPropertyDescriptor> { - self.pan.as_ref() - } - - pub fn tilt(&self) -> Option<&CameraPropertyDescriptor> { - self.tilt.as_ref() - } - - pub fn zoom(&self) -> Option<&CameraPropertyDescriptor> { - self.zoom.as_ref() - } - - pub fn exposure(&self) -> Option<&CameraPropertyDescriptor> { - self.exposure.as_ref() - } - - pub fn iris(&self) -> Option<&CameraPropertyDescriptor> { - self.iris.as_ref() - } - - pub fn focus(&self) -> Option<&CameraPropertyDescriptor> { - self.focus.as_ref() - } - - pub fn facing(&self) -> Option<&CameraPropertyDescriptor> { - self.facing.as_ref() - } - - pub fn other(&self, property: &str) -> Option<&CameraPropertyDescriptor> { - self.other.get(property) - } - - pub fn set_other(&mut self, property: &str, value: CameraPropertyValue) -> Result<(), NokhwaError> { - if let Some(prop) = self.other.get_mut(property) { - prop.set_value(value)?; - return Ok(()); - } - - Err( - NokhwaError::SetPropertyError { - property: property.to_string(), - value: value.to_string(), - error: String::from("Is null."), - } - ) - } -} - -macro_rules! generate_property_sets { - ( $( $name:ident, )* ) => { - { - impl CameraProperties { - paste::paste! { - $( - pub fn [](&mut self, value: CameraPropertyValue) -> Result<(), NokhwaError> { - if let Some(descriptor) = self.$name { - descriptor.set_value(value)?; - return Ok(()) - } - return Err( - NokhwaError::SetPropertyError { - property: std::stringify!($name), - value: value.to_string(), - error: String::from("Is null."), - } - ); - } - )* +macro_rules! def_camera_props { + ( $($property:ident, )* ) => { + impl CameraProperties { + $( + pub fn paste::paste! { [<$property:snake>] } (&self) -> Option<&CameraPropertyDescriptor> { + self.props.get(&CameraPropertyId::$property) } + + pub fn paste::paste! { [] } (&mut self, value: CameraPropertyValue) -> Result<(), NokhwaError> { + self.props.set_property(&CameraPropertyId::$property, value) } + )* } }; } -generate_property_sets!( brightness, contrast, hue, saturation, sharpness, gamma, white_balance, - backlight_compensation, gain, pan, tilt, zoom, exposure, iris, focus, facing, ); +def_camera_props!( + Brightness, + Contrast, + Hue, + Saturation, + Sharpness, + Gamma, + WhiteBalance, + BacklightCompensation, + Gain, + Pan, + Tilt, + Zoom, + Exposure, + Iris, + Focus, + Facing, +); + +impl CameraProperties { + pub fn property(&self, property: &CameraPropertyId) -> Option<&CameraPropertyDescriptor> { + self.props.get(property) + } + + pub fn set_property(&mut self, property: &CameraPropertyId, value: CameraPropertyValue) -> Result<(), NokhwaError> { + match self.props.get_mut(property) { + Some(prop) => { + prop.set_value(value)?; + Ok(()) + } + None => { + Err(NokhwaError::SetPropertyError { + property: property.to_string(), + value: value.to_string(), + error: String::from("Is null."), + }) + } + } + } +} /// Describes an individual property. #[derive(Clone, Debug)] pub struct CameraPropertyDescriptor { flags: HashSet, - platform_specific_id: Option, range: CameraPropertyRange, value: CameraPropertyValue, } impl CameraPropertyDescriptor { - pub fn new(flags: &[CameraPropertyFlag], platform_id: Option, range: CameraPropertyRange, value: CameraPropertyValue) -> Self { + pub fn new(flags: &[CameraPropertyFlag], range: CameraPropertyRange, value: CameraPropertyValue) -> Self { CameraPropertyDescriptor { flags: HashSet::from(flags), - platform_specific_id: platform_id, range, value, } @@ -206,10 +152,6 @@ impl CameraPropertyDescriptor { self.is_disabled()?; Ok(&self.flags) } - - pub fn platform_specific(&self) -> Option<&CameraCustomPropertyPlatformId> { - self.platform_specific_id.as_ref() - } pub fn range(&self) -> &CameraPropertyRange { &self.range @@ -220,7 +162,11 @@ impl CameraPropertyDescriptor { } pub fn set_value(&mut self, value: CameraPropertyValue) -> Result<(), NokhwaError> { - self.range.check_value(&value)?; + self.range.check_value(&value).map_err(|_| NokhwaError::SetPropertyError { + property: "CameraPropertyValue".to_string(), + value: value.to_string(), + error: "Bad Type".to_string(), + })?; self.value = value; Ok(()) } diff --git a/nokhwa-core/src/decoders/general.rs b/nokhwa-core/src/decoders/general.rs index d51bc52..5a80db0 100644 --- a/nokhwa-core/src/decoders/general.rs +++ b/nokhwa-core/src/decoders/general.rs @@ -10,7 +10,7 @@ pub struct GeneralPurposeDecoder where D: PixelWithColorType; impl Decoder for GeneralPurposeDecoder where D: PixelWithColorType { const ALLOWED_FORMATS: &'static [FrameFormat] = &[ FrameFormat::MJpeg, FrameFormat::Luma8, FrameFormat::Luma16, FrameFormat::Rgb8, FrameFormat::RgbA8, - FrameFormat::Nv12, FrameFormat::Nv21, FrameFormat::Uyvy_422, FrameFormat::Yuy2_422, FrameFormat::Yv12, + FrameFormat::Nv12, FrameFormat::Nv21, FrameFormat::Uyvy422, FrameFormat::Yuy2_422, FrameFormat::Yv12, FrameFormat::Yuv444, FrameFormat::I420, FrameFormat::I422, FrameFormat::I444 ]; @@ -40,7 +40,7 @@ impl Decoder for GeneralPurposeDecoder where D: PixelWithColorType { let source = match buffer.source_frame_format() { FrameFormat::MJpeg => PixelFormat::Rgb, // => JPEG decoder FrameFormat::Yuy2_422 => PixelFormat::I422, - FrameFormat::Uyvy_422 => PixelFormat::I422, + FrameFormat::Uyvy422 => PixelFormat::I422, FrameFormat::Yuv444 => PixelFormat::I444, FrameFormat::Nv12 => PixelFormat::Nv12, FrameFormat::Nv21 => PixelFormat::Nv12, diff --git a/nokhwa-core/src/error.rs b/nokhwa-core/src/error.rs index d7a9622..aacd93c 100644 --- a/nokhwa-core/src/error.rs +++ b/nokhwa-core/src/error.rs @@ -16,7 +16,6 @@ use crate::{frame_format::FrameFormat, types::ApiBackend}; use thiserror::Error; -use crate::ranges::RangeValidationResult; /// All errors in `nokhwa`. #[allow(clippy::module_name_repetitions)] diff --git a/nokhwa-core/src/format_request.rs b/nokhwa-core/src/format_request.rs index 8379980..116527e 100644 --- a/nokhwa-core/src/format_request.rs +++ b/nokhwa-core/src/format_request.rs @@ -75,36 +75,21 @@ impl FormatRequest { let mut formats = list_of_formats.iter().filter(|x| { frame_format.contains(&x.format()) && frame_rate.in_range(x.frame_rate()) }).collect::>(); - formats.sort_by(|a, b| { - match a.frame_rate().partial_cmp(&b.frame_rate()) { - None | Some(Ordering::Equal) => a.resolution().cmp(&b.resolution()), - Some(ord) => ord, - } - }); + formats.sort(); formats.first().copied().copied() } FormatRequest::HighestResolution { resolution, frame_format } => { let mut formats = list_of_formats.iter().filter(|x| { frame_format.contains(&x.format()) && resolution.in_range(x.resolution()) }).collect::>(); - formats.sort_by(|a, b| { - match a.resolution().partial_cmp(&b.resolution()) { - None | Some(Ordering::Equal) => a.frame_rate().partial_cmp(&b.frame_rate()).unwrap_or(Ordering::Equal), - Some(ord) => ord, - } - }); + formats.sort(); formats.first().copied().copied() } FormatRequest::Exact { resolution, frame_rate, frame_format } => { let mut formats = list_of_formats.iter().filter(|x| { frame_format.contains(&x.format()) && resolution == &x.resolution() && frame_rate == &x.frame_rate() }).collect::>(); - formats.sort_by(|a, b| { - match a.resolution().partial_cmp(&b.resolution()) { - None | Some(Ordering::Equal) => a.frame_rate().partial_cmp(&b.frame_rate()).unwrap_or(Ordering::Equal), - Some(ord) => ord, - } - }); + formats.sort(); formats.first().copied().copied() } } diff --git a/nokhwa-core/src/frame_format.rs b/nokhwa-core/src/frame_format.rs index 557299e..8e6bc0c 100644 --- a/nokhwa-core/src/frame_format.rs +++ b/nokhwa-core/src/frame_format.rs @@ -40,8 +40,8 @@ pub enum FrameFormat { Yuv444, // -> 422 16 BPP - Yuy2_422, - Uyvy_422, + Yuyv422, + Uyvy422, // 420 Nv12, @@ -77,8 +77,8 @@ impl FrameFormat { FrameFormat::XVid, FrameFormat::VP8, FrameFormat::VP9, - FrameFormat::Yuy2_422, - FrameFormat::Uyvy_422, + FrameFormat::Yuyv422, + FrameFormat::Uyvy422, FrameFormat::Nv12, FrameFormat::Nv21, FrameFormat::Yv12, @@ -103,8 +103,8 @@ impl FrameFormat { ]; pub const CHROMA: &'static [FrameFormat] = &[ - FrameFormat::Yuy2_422, - FrameFormat::Uyvy_422, + FrameFormat::Yuyv422, + FrameFormat::Uyvy422, FrameFormat::Nv12, FrameFormat::Nv21, FrameFormat::Yv12, @@ -126,8 +126,8 @@ impl FrameFormat { FrameFormat::XVid, FrameFormat::VP8, FrameFormat::VP9, - FrameFormat::Yuy2_422, - FrameFormat::Uyvy_422, + FrameFormat::Yuyv422, + FrameFormat::Uyvy422, FrameFormat::Nv12, FrameFormat::Nv21, FrameFormat::Yv12, diff --git a/nokhwa-core/src/lib.rs b/nokhwa-core/src/lib.rs index a2951e6..9c19fea 100644 --- a/nokhwa-core/src/lib.rs +++ b/nokhwa-core/src/lib.rs @@ -32,3 +32,4 @@ pub mod decoders; pub mod utils; pub mod ranges; pub mod controls; +mod capture; diff --git a/nokhwa-core/src/traits.rs b/nokhwa-core/src/traits.rs index 3ddb370..c7ed4fb 100644 --- a/nokhwa-core/src/traits.rs +++ b/nokhwa-core/src/traits.rs @@ -14,375 +14,6 @@ * limitations under the License. */ -use crate::{ - buffer::Buffer, error::NokhwaError, format_request::FormatRequest, types::{ - ApiBackend, CameraFormat, CameraIndex, CameraInfo, Resolution - } -}; -use std::{borrow::Cow, collections::HashMap}; -use crate::controls::{CameraControl, ControlValueSetter, KnownCameraControl}; -use crate::frame_format::FrameFormat; -use crate::types::FrameRate; - -/// This trait is for any backend that allows you to grab and take frames from a camera. -/// Many of the backends are **blocking**, if the camera is occupied the library will block while it waits for it to become available. -/// -/// **Note**: -/// - Backends, if not provided with a camera format, will be spawned with 640x480@15 FPS, MJPEG [`CameraFormat`]. -/// - Behaviour can differ from backend to backend. While the Camera struct abstracts most of this away, if you plan to use the raw backend structs please read the `Quirks` section of each backend. -/// - If you call [`stop_stream()`](CaptureTrait::stop_stream()), you will usually need to call [`open_stream()`](CaptureTrait::open_stream()) to get more frames from the camera. -pub trait CaptureTrait { - /// Returns the current backend used. - fn backend(&self) -> ApiBackend; - - /// Gets the camera information such as Name and Index as a [`CameraInfo`]. - fn camera_info(&self) -> &CameraInfo; - - /// Forcefully refreshes the stored camera format, bringing it into sync with "reality" (current camera state) - /// # Errors - /// If the camera can not get its most recent [`CameraFormat`]. this will error. - fn refresh_camera_format(&mut self) -> Result<(), NokhwaError>; - - /// Gets the current [`CameraFormat`]. This will force refresh to the current latest if it has changed. - fn camera_format(&self) -> Option; - - /// Will set the current [`CameraFormat`] - /// This will reset the current stream if used while stream is opened. - /// - /// This will also update the cache. - /// # Errors - /// If you started the stream and the camera rejects the new camera format, this will return an error. - fn set_camera_format(&mut self, new_fmt: CameraFormat) -> Result<(), NokhwaError>; - - /// A hashmap of [`Resolution`]s mapped to framerates. Not sorted! - /// # Errors - /// This will error if the camera is not queryable or a query operation has failed. Some backends will error this out as a Unsupported Operation ([`UnsupportedOperationError`](NokhwaError::UnsupportedOperationError)). - fn compatible_list_by_resolution( - &mut self, - fourcc: FrameFormat, - ) -> Result>, NokhwaError>; - - /// Gets the compatible [`CameraFormat`] of the camera - /// # Errors - /// If it fails to get, this will error. - fn compatible_camera_formats(&mut self) -> Result, NokhwaError> { - let mut compatible_formats = vec![]; - for fourcc in self.compatible_fourcc()? { - for (resolution, fps_list) in self.compatible_list_by_resolution(fourcc)? { - for fps in fps_list { - compatible_formats.push(CameraFormat::new(resolution, fourcc, fps)); - } - } - } - - Ok(compatible_formats) - } - - /// A Vector of compatible [`FrameFormat`]s. Will only return 2 elements at most. - /// # Errors - /// This will error if the camera is not queryable or a query operation has failed. Some backends will error this out as a Unsupported Operation ([`UnsupportedOperationError`](NokhwaError::UnsupportedOperationError)). - fn compatible_fourcc(&mut self) -> Result, NokhwaError>; - - /// Gets the current camera resolution (See: [`Resolution`], [`CameraFormat`]). This will force refresh to the current latest if it has changed. - fn resolution(&self) -> Option; - - /// Will set the current [`Resolution`] - /// This will reset the current stream if used while stream is opened. - /// - /// This will also update the cache. - /// # Errors - /// If you started the stream and the camera rejects the new resolution, this will return an error. - fn set_resolution(&mut self, new_res: Resolution) -> Result<(), NokhwaError>; - - /// Gets the current camera framerate (See: [`CameraFormat`]). This will force refresh to the current latest if it has changed. - fn frame_rate(&self) -> Option; - - /// Will set the current framerate - /// This will reset the current stream if used while stream is opened. - /// - /// This will also update the cache. - /// # Errors - /// If you started the stream and the camera rejects the new framerate, this will return an error. - fn set_frame_rate(&mut self, new_fps: u32) -> Result<(), NokhwaError>; - - /// Gets the current camera's frame format (See: [`FrameFormat`], [`CameraFormat`]). This will force refresh to the current latest if it has changed. - fn frame_format(&self) -> FrameFormat; - - /// Will set the current [`FrameFormat`] - /// This will reset the current stream if used while stream is opened. - /// - /// This will also update the cache. - /// # Errors - /// If you started the stream and the camera rejects the new frame format, this will return an error. - fn set_frame_format(&mut self, fourcc: FrameFormat) - -> Result<(), NokhwaError>; - - /// Gets the value of [`KnownCameraControl`]. - /// # Errors - /// If the `control` is not supported or there is an error while getting the camera control values (e.g. unexpected value, too high, etc) - /// this will error. - fn camera_control(&self, control: KnownCameraControl) -> Result; - - /// Gets the current supported list of [`KnownCameraControl`] - /// # Errors - /// If the list cannot be collected, this will error. This can be treated as a "nothing supported". - fn camera_controls(&self) -> Result, NokhwaError>; - - /// Sets the control to `control` in the camera. - /// Usually, the pipeline is calling [`camera_control()`](CaptureTrait::camera_control), getting a camera control that way - /// then calling [`value()`](CameraControl::value()) to get a [`ControlValueSetter`] and setting the value that way. - /// # Errors - /// If the `control` is not supported, the value is invalid (less than min, greater than max, not in step), or there was an error setting the control, - /// this will error. - fn set_camera_control( - &mut self, - id: KnownCameraControl, - value: ControlValueSetter, - ) -> Result<(), NokhwaError>; - - /// Will open the camera stream with set parameters. This will be called internally if you try and call [`frame()`](CaptureTrait::frame()) before you call [`open_stream()`](CaptureTrait::open_stream()). - /// # Errors - /// If the specific backend fails to open the camera (e.g. already taken, busy, doesn't exist anymore) this will error. - fn open_stream(&mut self) -> Result<(), NokhwaError>; - - /// Checks if stream if open. If it is, it will return true. - fn is_stream_open(&self) -> bool; - - /// Will get a frame from the camera as a [`Buffer`]. Depending on the backend, if you have not called [`open_stream()`](CaptureTrait::open_stream()) before you called this, - /// it will either return an error. - /// # Errors - /// If the backend fails to get the frame (e.g. already taken, busy, doesn't exist anymore), the decoding fails (e.g. MJPEG -> u8), or [`open_stream()`](CaptureTrait::open_stream()) has not been called yet, - /// this will error. - fn frame(&mut self) -> Result; - - /// Will get a frame from the camera **without** any processing applied, meaning you will usually get a frame you need to decode yourself. - /// # Errors - /// If the backend fails to get the frame (e.g. already taken, busy, doesn't exist anymore), or [`open_stream()`](CaptureTrait::open_stream()) has not been called yet, this will error. - fn frame_raw(&mut self) -> Result, NokhwaError>; - - // #[cfg(feature = "wgpu-types")] - // #[cfg_attr(feature = "docs-features", doc(cfg(feature = "wgpu-types")))] - // /// Directly copies a frame to a Wgpu texture. This will automatically convert the frame into a RGBA frame. - // /// # Errors - // /// If the frame cannot be captured or the resolution is 0 on any axis, this will error. - // fn frame_texture<'a>( - // &mut self, - // device: &WgpuDevice, - // queue: &WgpuQueue, - // label: Option<&'a str>, - // ) -> Result { - // use crate::pixel_format::RgbAFormat; - // use std::num::NonZeroU32; - // let frame = self.frame()?.decode_image::()?; - // - // let texture_size = Extent3d { - // width: frame.width(), - // height: frame.height(), - // depth_or_array_layers: 1, - // }; - // - // let texture = device.create_texture(&TextureDescriptor { - // label, - // size: texture_size, - // mip_level_count: 1, - // sample_count: 1, - // dimension: TextureDimension::D2, - // format: TextureFormat::Rgba8UnormSrgb, - // usage: TextureUsages::TEXTURE_BINDING | TextureUsages::COPY_DST, - // }); - // - // let width_nonzero = match NonZeroU32::try_from(4 * frame.width()) { - // Ok(w) => Some(w), - // Err(why) => return Err(NokhwaError::ReadFrameError(why.to_string())), - // }; - // - // let height_nonzero = match NonZeroU32::try_from(frame.height()) { - // Ok(h) => Some(h), - // Err(why) => return Err(NokhwaError::ReadFrameError(why.to_string())), - // }; - // - // queue.write_texture( - // ImageCopyTexture { - // texture: &texture, - // mip_level: 0, - // origin: wgpu::Origin3d::ZERO, - // aspect: TextureAspect::All, - // }, - // &frame, - // ImageDataLayout { - // offset: 0, - // bytes_per_row: width_nonzero, - // rows_per_image: height_nonzero, - // }, - // texture_size, - // ); - // - // Ok(texture) - // } - - /// Will drop the stream. - /// # Errors - /// Please check the `Quirks` section of each backend. - fn stop_stream(&mut self) -> Result<(), NokhwaError>; -} - -/// A trait to open the capture backend. -pub trait OpenCaptureTrait: CaptureTrait { - /// Opens a camera. - /// # Errors - /// Please see implementations for errors. - fn open(index: &CameraIndex, camera_fmt: FormatRequest) -> Result where Self: Sized; -} - -impl From for Box -where - T: CaptureTrait + 'static, -{ - fn from(backend: T) -> Self { - Box::new(backend) - } -} - -#[cfg(feature = "async")] -#[cfg_attr(feature = "async", async_trait::async_trait)] -pub trait AsyncCaptureTrait: CaptureTrait { - /// Forcefully refreshes the stored camera format, bringing it into sync with "reality" (current camera state) - /// # Errors - /// If the camera can not get its most recent [`CameraFormat`]. this will error. - async fn refresh_camera_format_async(&mut self) -> Result<(), NokhwaError>; - - /// Will set the current [`CameraFormat`] - /// This will reset the current stream if used while stream is opened. - /// - /// This will also update the cache. - /// # Errors - /// If you started the stream and the camera rejects the new camera format, this will return an error. - async fn set_camera_format_async(&mut self, new_fmt: CameraFormat) -> Result<(), NokhwaError>; - - /// A hashmap of [`Resolution`]s mapped to framerates. Not sorted! - /// # Errors - /// This will error if the camera is not queryable or a query operation has failed. Some backends will error this out as a Unsupported Operation ([`UnsupportedOperationError`](NokhwaError::UnsupportedOperationError)). - async fn compatible_list_by_resolution_async( - &mut self, - fourcc: FrameFormat, - ) -> Result>, NokhwaError>; - - /// Will set the current [`Resolution`] - /// This will reset the current stream if used while stream is opened. - /// - /// This will also update the cache. - /// # Errors - /// If you started the stream and the camera rejects the new resolution, this will return an error. - async fn set_resolution_async(&mut self, new_res: Resolution) -> Result<(), NokhwaError>; - - /// Will set the current framerate - /// This will reset the current stream if used while stream is opened. - /// - /// This will also update the cache. - /// # Errors - /// If you started the stream and the camera rejects the new framerate, this will return an error. - async fn set_frame_rate_async(&mut self, new_fps: u32) -> Result<(), NokhwaError>; - - /// Will set the current [`FrameFormat`] - /// This will reset the current stream if used while stream is opened. - /// - /// This will also update the cache. - /// # Errors - /// If you started the stream and the camera rejects the new frame format, this will return an error. - async fn set_frame_format_async( - &mut self, - fourcc: FrameFormat, - ) -> Result<(), NokhwaError>; - - /// Sets the control to `control` in the camera. - /// Usually, the pipeline is calling [`camera_control()`](CaptureTrait::camera_control), getting a camera control that way - /// then calling [`value()`](CameraControl::value()) to get a [`ControlValueSetter`] and setting the value that way. - /// # Errors - /// If the `control` is not supported, the value is invalid (less than min, greater than max, not in step), or there was an error setting the control, - /// this will error. - async fn set_camera_control_async( - &mut self, - id: KnownCameraControl, - value: ControlValueSetter, - ) -> Result<(), NokhwaError>; - - /// Will open the camera stream with set parameters. This will be called internally if you try and call [`frame()`](CaptureTrait::frame()) before you call [`open_stream()`](CaptureTrait::open_stream()). - /// # Errors - /// If the specific backend fails to open the camera (e.g. already taken, busy, doesn't exist anymore) this will error. - async fn open_stream_async(&mut self) -> Result<(), NokhwaError>; - - /// Will get a frame from the camera as a [`Buffer`]. Depending on the backend, if you have not called [`open_stream()`](CaptureTrait::open_stream()) before you called this, - /// it will either return an error. - /// # Errors - /// If the backend fails to get the frame (e.g. already taken, busy, doesn't exist anymore), the decoding fails (e.g. MJPEG -> u8), or [`open_stream()`](CaptureTrait::open_stream()) has not been called yet, - /// this will error. - async fn frame_async(&mut self) -> Result; - - /// Will get a frame from the camera **without** any processing applied, meaning you will usually get a frame you need to decode yourself. - /// # Errors - /// If the backend fails to get the frame (e.g. already taken, busy, doesn't exist anymore), or [`open_stream()`](CaptureTrait::open_stream()) has not been called yet, this will error. - async fn frame_raw_async(&mut self) -> Result, NokhwaError>; - - /// Will drop the stream. - /// # Errors - /// Please check the `Quirks` section of each backend. - async fn stop_stream_async(&mut self) -> Result<(), NokhwaError>; -} - -#[cfg(feature = "async")] -impl From for Box -where - T: AsyncCaptureTrait + 'static, -{ - fn from(backend: T) -> Self { - Box::new(backend) - } -} - -/// Capture one frame from the camera and immediately stop. -pub trait OneShot: CaptureTrait { - /// Captures one frame from the camera, returning a [`Buffer`] - /// # Errors - /// If opening the stream or closing the stream has an error, this will also fail. - fn one_shot(&mut self) -> Result { - if self.is_stream_open() { - self.frame() - } else { - self.open_stream()?; - let frame = self.frame()?; - self.stop_stream()?; - Ok(frame) - } - } -} - -#[cfg(feature = "async")] -#[cfg_attr(feature = "async", async_trait::async_trait)] -pub trait AsyncOneShot: AsyncCaptureTrait { - async fn one_shot(&mut self) -> Result { - if self.is_stream_open() { - self.frame_async().await - } else { - self.open_stream_async().await?; - let frame = self.frame_async().await?; - self.stop_stream_async().await?; - Ok(frame) - } - } -} - - -#[cfg(feature = "async")] -#[cfg_attr(feature = "async", async_trait::async_trait)] -/// A trait to open the capture backend. -pub trait AsyncOpenCaptureTrait: AsyncCaptureTrait { - /// Opens a camera. - /// # Errors - /// Please see implementations for errors. - async fn open(index: &CameraIndex, camera_fmt: FormatRequest) -> Result where Self: Sized; -} - - // pub trait VirtualBackendTrait {} pub trait Distance where T: PartialEq { diff --git a/nokhwa-core/src/types.rs b/nokhwa-core/src/types.rs index 6b80886..801c01f 100644 --- a/nokhwa-core/src/types.rs +++ b/nokhwa-core/src/types.rs @@ -5,15 +5,12 @@ use crate::{ #[cfg(feature = "serialize")] use serde::{Deserialize, Serialize}; use std::{ - borrow::Borrow, cmp::Ordering, collections::HashSet, fmt::{ + borrow::Borrow, cmp::Ordering, fmt::{ Debug, Display, Formatter }, hash::{Hash, Hasher}, ops::{Add, Deref, DerefMut, Sub} }; -use std::collections::HashMap; -use crate::controls::{CameraControl, CameraPropertyFlag, KnownCameraControl}; -use crate::ranges::Range; use crate::traits::Distance;