Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support kCMPixelFormat_32BGRA and add github action for running tests #1

Open
wants to merge 15 commits into
base: 0.10
Choose a base branch
from
10 changes: 6 additions & 4 deletions nokhwa-bindings-macos/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ mod internal {
foundation::{NSArray, NSDictionary, NSInteger, NSString, NSUInteger},
};
use core_media_sys::{
kCMPixelFormat_24RGB, kCMPixelFormat_422YpCbCr8_yuvs,
kCMPixelFormat_24RGB, kCMPixelFormat_32BGRA, kCMPixelFormat_422YpCbCr8_yuvs,
kCMPixelFormat_8IndexedGray_WhiteIsZero, kCMVideoCodecType_422YpCbCr8,
kCMVideoCodecType_JPEG, kCMVideoCodecType_JPEG_OpenDML, CMFormatDescriptionGetMediaSubType,
CMFormatDescriptionRef, CMSampleBufferRef, CMTime, CMVideoDimensions,
Expand Down Expand Up @@ -373,6 +373,7 @@ mod internal {
| kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
| 875704438 => Some(FrameFormat::NV12),
kCMPixelFormat_24RGB => Some(FrameFormat::RAWRGB),
kCMPixelFormat_32BGRA => Some(FrameFormat::BGRA),
_ => None,
}
}
Expand Down Expand Up @@ -511,14 +512,15 @@ mod internal {

// fuck it, use deprecated APIs
pub fn query_avfoundation() -> Result<Vec<CameraInfo>, NokhwaError> {
Ok(AVCaptureDeviceDiscoverySession::new(vec![
let devices = AVCaptureDeviceDiscoverySession::new(vec![
AVCaptureDeviceType::UltraWide,
AVCaptureDeviceType::WideAngle,
AVCaptureDeviceType::Telephoto,
AVCaptureDeviceType::TrueDepth,
AVCaptureDeviceType::External,
])?
.devices())
.devices();
Ok(devices)
}

pub fn get_raw_device_info(index: CameraIndex, device: *mut Object) -> CameraInfo {
Expand Down Expand Up @@ -985,7 +987,6 @@ mod internal {
let format_desc_ref: CMFormatDescriptionRef =
unsafe { msg_send![format.internal, performSelector: format_description_sel] };
let dimensions = unsafe { CMVideoFormatDescriptionGetDimensions(format_desc_ref) };

if dimensions.height == descriptor.resolution().height() as i32
&& dimensions.width == descriptor.resolution().width() as i32
{
Expand Down Expand Up @@ -2284,6 +2285,7 @@ mod internal {
FrameFormat::GRAY => kCMPixelFormat_8IndexedGray_WhiteIsZero,
FrameFormat::NV12 => kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange,
FrameFormat::RAWRGB => kCMPixelFormat_24RGB,
FrameFormat::BGRA => kCMPixelFormat_32BGRA,
};
let obj = CFNumber::from(cmpixelfmt as i32);
let obj = obj.as_CFTypeRef() as *mut Object;
Expand Down
161 changes: 156 additions & 5 deletions nokhwa-core/src/pixel_format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ use crate::types::{
mjpeg_to_rgb, nv12_to_rgb, yuyv422_to_rgb, FrameFormat, Resolution,
};
use image::{Luma, LumaA, Pixel, Rgb, Rgba};
use std::fmt::Debug;
use std::fmt::{format, Debug};

/// Trait that has methods to convert raw data from the webcam to a proper raw image.
pub trait FormatDecoder: Clone + Sized + Send + Sync {
Expand Down Expand Up @@ -77,6 +77,16 @@ impl FormatDecoder for RgbFormat {
.collect()),
FrameFormat::RAWRGB => Ok(data.to_vec()),
FrameFormat::NV12 => nv12_to_rgb(resolution, data, false),
FrameFormat::BGRA => {
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is the meat of the change

let mut rgb = vec![0u8; data.len()];
data.chunks_exact(4).enumerate().for_each(|(idx, px)| {
let index = idx * 3;
rgb[index] = px[2];
rgb[index + 1] = px[1];
rgb[index + 2] = px[0];
});
Ok(rgb)
}
}
}

Expand Down Expand Up @@ -112,6 +122,23 @@ impl FormatDecoder for RgbFormat {
Ok(())
}
FrameFormat::NV12 => buf_nv12_to_rgb(resolution, data, dest, false),
FrameFormat::BGRA => {
if dest.len() != data.len() / 4 * 3 {
return Err(NokhwaError::ProcessFrameError {
src: fcc,
destination: "BGRA => RGB".to_string(),
error: "Bad buffer length".to_string(),
});
}

data.chunks_exact(4).enumerate().for_each(|(idx, px)| {
let index = idx * 3;
dest[index] = px[2];
dest[index + 1] = px[1];
dest[index + 2] = px[0];
});
Ok(())
}
}
}
}
Expand Down Expand Up @@ -151,6 +178,17 @@ impl FormatDecoder for RgbAFormat {
.flat_map(|x| [x[0], x[1], x[2], 255])
.collect()),
FrameFormat::NV12 => nv12_to_rgb(resolution, data, true),
FrameFormat::BGRA => {
let mut rgba = vec![0u8; data.len()];
data.chunks_exact(4).enumerate().for_each(|(idx, px)| {
let index = idx * 4;
rgba[index] = px[2];
rgba[index + 1] = px[1];
rgba[index + 2] = px[0];
rgba[index + 3] = px[3];
});
Ok(rgba)
}
}
}

Expand Down Expand Up @@ -194,6 +232,24 @@ impl FormatDecoder for RgbAFormat {
Ok(())
}
FrameFormat::NV12 => buf_nv12_to_rgb(resolution, data, dest, true),
FrameFormat::BGRA => {
if dest.len() != data.len() {
return Err(NokhwaError::ProcessFrameError {
src: fcc,
destination: "BGRA => RGBA".to_string(),
error: "Bad buffer length".to_string(),
});
}

data.chunks_exact(4).enumerate().for_each(|(idx, px)| {
let index = idx * 4;
dest[index] = px[2];
dest[index + 1] = px[1];
dest[index + 2] = px[0];
dest[index + 3] = px[3];
});
Ok(())
}
}
}
}
Expand Down Expand Up @@ -253,6 +309,10 @@ impl FormatDecoder for LumaFormat {
.chunks(3)
.map(|px| ((i32::from(px[0]) + i32::from(px[1]) + i32::from(px[2])) / 3) as u8)
.collect()),
FrameFormat::BGRA => Ok(data
.chunks_exact(4)
.map(|px| ((i32::from(px[0]) + i32::from(px[1]) + i32::from(px[2])) / 3) as u8)
.collect()),
}
}

Expand Down Expand Up @@ -284,6 +344,12 @@ impl FormatDecoder for LumaFormat {
destination: "RGB => RGB".to_string(),
error: "Conversion Error".to_string(),
}),
FrameFormat::BGRA => {
data.chunks_exact(4).zip(dest.iter_mut()).for_each(|(px, d)| {
*d = ((i32::from(px[0]) + i32::from(px[1]) + i32::from(px[2])) / 3) as u8;
});
Ok(())
}
}
}
}
Expand Down Expand Up @@ -343,6 +409,16 @@ impl FormatDecoder for LumaAFormat {
destination: "RGB => RGB".to_string(),
error: "Conversion Error".to_string(),
}),
FrameFormat::BGRA => {
let mut luma_a = vec![0u8; data.len() / 4 * 2];
data.chunks_exact(4).enumerate().for_each(|(idx, px)| {
let index = idx * 2;
luma_a[index] = ((i32::from(px[0]) + i32::from(px[1]) + i32::from(px[2])) / 3)
as u8;
luma_a[index + 1] = px[3];
});
Ok(luma_a)
}
}
}

Expand Down Expand Up @@ -396,6 +472,21 @@ impl FormatDecoder for LumaAFormat {
destination: "RGB => RGB".to_string(),
error: "Conversion Error".to_string(),
}),
FrameFormat::BGRA => {
if dest.len() != data.len() / 4 * 2 {
return Err(NokhwaError::ProcessFrameError {
src: fcc,
destination: "BGRA => LumaA".to_string(),
error: "Conversion Error".to_string(),
});
}

data.chunks_exact(4).zip(dest.chunks_exact_mut(2)).for_each(|(px, d)| {
d[0] = ((i32::from(px[0]) + i32::from(px[1]) + i32::from(px[2])) / 3) as u8;
d[1] = px[3];
});
Ok(())
}
}
}
}
Expand All @@ -405,9 +496,10 @@ impl FormatDecoder for LumaAFormat {
/// let image: ImageBuffer<Rgb<u8>, Vec<u8>> = buffer.to_image::<YuyvFormat>();
/// ```
#[derive(Copy, Clone, Debug, Default, Hash, Ord, PartialOrd, Eq, PartialEq)]
pub struct YuyvFormat;
pub struct I420Format;
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This was never YUYV it was always meant to be i420


impl FormatDecoder for YuyvFormat {
impl FormatDecoder for I420Format {
// YUV 4:2:0 planar colors. but we need to change the image crate to use this format
type Output = Rgb<u8>;
const FORMATS: &'static [FrameFormat] = color_frame_formats();

Expand All @@ -426,7 +518,10 @@ impl FormatDecoder for YuyvFormat {
);
Ok(i420)
}
_ => Err(NokhwaError::GeneralError("Invalid FrameFormat".into())),
_ => Err(NokhwaError::GeneralError(format!(
"Invalid FrameFormat in write_output: {:?}",
fcc
))),
}
}

Expand All @@ -447,7 +542,28 @@ impl FormatDecoder for YuyvFormat {
)?;
Ok(())
}
_ => Err(NokhwaError::GeneralError("Invalid FrameFormat".into())),

FrameFormat::NV12 => {
let i420 = nv12_to_i420(data, resolution.width() as usize, resolution.height() as usize);
// Slice the enough tata to fill the destination buffer, i420 is larger so we need to slice it
let i420 = &i420[..dest.len()];
dest.copy_from_slice(&i420);
Ok(())
}

FrameFormat::BGRA => {
// transform the BGRA buffer to i420 and write it to the destination buffer
let i420 = nv12_to_i420(data, resolution.width() as usize, resolution.height() as usize);
// Slice the enough tata to fill the destination buffer, i420 is larger so we need to slice it
let i420 = &i420[..dest.len()];
dest.copy_from_slice(&i420);
Ok(())
},

_ => Err(NokhwaError::GeneralError(format!(
"Invalid FrameFormat in write_output_buffer: {:?}",
fcc
))),
}
}
}
Expand Down Expand Up @@ -521,3 +637,38 @@ fn convert_yuyv_to_i420_direct(

Ok(())
}

fn nv12_to_i420(nv12: &[u8], width: usize, height: usize) -> Vec<u8> {
assert!(
width % 2 == 0 && height % 2 == 0,
"Width and height must be even numbers."
);

let y_plane_size = width * height;
let uv_plane_size = y_plane_size / 2; // Interleaved UV plane size
let u_plane_size = uv_plane_size / 2;

// Allocate space for I420 (Y + U + V planes)
let mut i420 = vec![0u8; y_plane_size + 2 * u_plane_size];

let (y_plane, uv_plane) = i420.split_at_mut(y_plane_size);
let (u_plane, v_plane) = uv_plane.split_at_mut(u_plane_size);

// Step 1: Copy Y plane
y_plane.copy_from_slice(&nv12[..y_plane_size]);

// Step 2: Process interleaved UV data
let nv12_uv = &nv12[y_plane_size..];

for row in 0..(height / 2) {
for col in 0..(width / 2) {
let nv12_index = row * width + col * 2; // Index in NV12 interleaved UV plane
let uv_index = row * (width / 2) + col; // Index in U and V planes

u_plane[uv_index] = nv12_uv[nv12_index]; // U value
v_plane[uv_index] = nv12_uv[nv12_index + 1]; // V value
}
}

i420
}
1 change: 1 addition & 0 deletions nokhwa-core/src/traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ CaptureBackendTrait {
let pxwidth = match cfmt.format() {
FrameFormat::MJPEG | FrameFormat::YUYV | FrameFormat::RAWRGB | FrameFormat::NV12 => 3,
FrameFormat::GRAY => 1,
FrameFormat::BGRA => 4,
};
if alpha {
return (resolution.width() * resolution.height() * (pxwidth + 1)) as usize;
Expand Down
7 changes: 7 additions & 0 deletions nokhwa-core/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,7 @@ pub enum FrameFormat {
NV12,
GRAY,
RAWRGB,
BGRA
}

impl Display for FrameFormat {
Expand All @@ -319,6 +320,9 @@ impl Display for FrameFormat {
FrameFormat::NV12 => {
write!(f, "NV12")
}
FrameFormat::BGRA => {
write!(f, "BGRA")
}
}
}
}
Expand All @@ -332,6 +336,7 @@ impl FromStr for FrameFormat {
"GRAY" => Ok(FrameFormat::GRAY),
"RAWRGB" => Ok(FrameFormat::RAWRGB),
"NV12" => Ok(FrameFormat::NV12),
"BGRA" => Ok(FrameFormat::BGRA),
_ => Err(NokhwaError::StructureError {
structure: "FrameFormat".to_string(),
error: format!("No match for {s}"),
Expand All @@ -349,6 +354,7 @@ pub const fn frame_formats() -> &'static [FrameFormat] {
FrameFormat::NV12,
FrameFormat::GRAY,
FrameFormat::RAWRGB,
FrameFormat::BGRA
]
}

Expand All @@ -360,6 +366,7 @@ pub const fn color_frame_formats() -> &'static [FrameFormat] {
FrameFormat::YUYV,
FrameFormat::NV12,
FrameFormat::RAWRGB,
FrameFormat::BGRA
]
}

Expand Down
Loading