This crates intended to provide access to Linux V4L2 APIs without any limitations.
The primary design goal is an optimal balance between safety and overhead. The implementation much closer to system calls than v4l. Interface types wraps kernel types to avoid unnecessary copying.
The secondary goal is providing full set of features of the original API.
At end this is my demure attempt to do things right.
- linux-video-core - core abstractions and low level interface (not for end users)
- linux-video - sync interface which supports synchronous operation only
- tokio-linux-video - async interface for tokio users
- async-std-linux-video - async interface for async-std users
Enumerating devices:
use linux_video::Device;
fn main() -> std::io::Result<()> {
let mut devs = Device::list()?;
while let Some(path) = devs.fetch_next()? {
let dev = Device::open(&path)?;
let caps = dev.capabilities()?;
println!("path: {}, {caps}", path.display());
}
Ok(())
}
Getting capabilities and controls:
use linux_video::Device;
fn main() -> std::io::Result<()> {
let dev = Device::open("/dev/video0")?;
let caps = dev.capabilities()?;
println!("Capabilities: {caps}");
println!("Controls:");
let mut controls = dev.controls(None);
while let Some(ctrl) = controls.fetch_next()? {
println!(" {ctrl}");
if let Some(mut items) = dev.control_items(&ctrl) {
while let Some(item) = items.fetch_next()? {
println!(" {item}");
}
}
}
Ok(())
}
Getting supported formats:
use linux_video::{types::BufferType, Device};
fn main() -> std::io::Result<()> {
let dev = Device::open("/dev/video0")?;
let caps = dev.capabilities()?;
for type_ in BufferType::ALL {
if type_.is_supported(caps.capabilities()) {
println!("{type_} formats:");
let mut fmts = dev.formats(type_);
if let Some(fmt) = fmts.fetch_next()? {
println!(" {fmt}");
if type_.content().is_video() {
let mut sizes = dev.sizes(fmt.pixel_format());
while let Some(size) = sizes.fetch_next()? {
println!(" {size}");
for size in size.sizes() {
println!(" {size}");
let mut intervals = dev.intervals(fmt.pixel_format(), size.width(), size.height());
while let Some(interval) = intervals.fetch_next()? {
println!(" {interval}");
}
}
}
}
}
}
}
Ok(())
}
Using controls:
use linux_video::{types::*, Device};
fn main() -> std::io::Result<()> {
let dev = Device::open("/dev/video0")?;
// Get control from device by identifier
let contrast_ctrl = dev.control(CtrlId::Contrast)?;
// Create a value for control
let mut contrast = Value::from(&contrast_ctrl);
// Get control value from device
dev.get_control(&mut contrast)?;
// Get reference to value data
let contrast_value = contrast.try_ref::<i32>().unwrap();
println!("Current contrast: {contrast_value:?}");
// Set new value by reference
*contrast.try_mut::<i32>().unwrap() = contrast_value + 10;
println!("Updated contrast: {:?}", contrast.try_ref::<i32>().unwrap());
// Set new control value to device
dev.set_control(&contrast)?;
Ok(())
}
Capture video data:
use linux_video::{types::*, Device};
fn main() -> std::io::Result<()> {
let dev = Device::open("/dev/video0")?;
// Get current format
let mut fmt = dev.format(BufferType::VideoOutput)?;
println!(" {fmt}");
// Start video capture stream
let stream = dev.stream::<In, Mmap>(ContentType::Video, 4)?;
let mut i = 0;
while let Ok(buffer) = stream.next() {
let buffer = buffer.lock();
println!("#{i} {buffer}");
// Get reference to frame buffer contents
let _data: &[u8] = buffer.as_ref();
i += 1;
if i > 30 {
break;
}
}
Ok(())
}
Output video data:
use linux_video::{types::*, Device};
fn main() -> std::io::Result<()> {
let dev = Device::open("/dev/video0")?;
// Get current format
let mut fmt = dev.format(BufferType::VideoOutput)?;
println!(" {fmt}");
// Start video output stream
let stream = dev.stream::<Out, Mmap>(ContentType::Video, 4)?;
let mut i = 0;
while let Ok(mut buffer) = stream.next() {
let mut buffer = buffer.lock();
println!("#{i} {buffer}");
// Get reference to frame buffer contents
let _data: &mut [u8] = buffer.as_mut();
i += 1;
if i > 30 {
break;
}
}
Ok(())
}