Merge branch '10-bit' into 'master'

Add support for 10-bit packed formats

See merge request postmarketOS/megapixels!10
This commit is contained in:
Benjamin Schaaf 2022-02-01 12:04:56 +00:00
commit 0163c00eaa
10 changed files with 439 additions and 51 deletions

View File

@ -60,7 +60,7 @@ when previewing.
* `width=640` and `height=480` the resolution to use for the sensor
* `rate=15` the refresh rate in fps to use for the sensor
* `fmt=BGGR8` sets the pixel and bus formats used when capturing from the sensor, only BGGR8 is fully supported
* `fmt=BGGR8` sets the pixel and bus formats used when capturing from the sensor.
# Post processing

View File

@ -1,29 +1,58 @@
#ifdef GL_ES
precision mediump float;
precision highp float;
#endif
uniform sampler2D texture;
uniform mat3 color_matrix;
#ifdef BITS_10
uniform float row_length;
#endif
varying vec2 top_left_uv;
varying vec2 top_right_uv;
varying vec2 bottom_left_uv;
varying vec2 bottom_right_uv;
#ifdef BITS_10
vec2
skip_5th_pixel(vec2 uv)
{
vec2 new_uv = uv;
new_uv.x *= 0.8;
new_uv.x += floor(uv.x * row_length / 5.0) / row_length;
return new_uv;
}
#endif
void
main()
{
// Note the coordinates for texture samples need to be a varying, as the
// Mali-400 has this as a fast path allowing 32-bit floats. Otherwise
// they end up as 16-bit floats and that's not accurate enough.
#ifdef BITS_10
vec4 samples = vec4(texture2D(texture, skip_5th_pixel(top_left_uv)).r,
texture2D(texture, skip_5th_pixel(top_right_uv)).r,
texture2D(texture, skip_5th_pixel(bottom_left_uv)).r,
texture2D(texture, skip_5th_pixel(bottom_right_uv)).r);
#else
vec4 samples = vec4(texture2D(texture, top_left_uv).r,
texture2D(texture, top_right_uv).r,
texture2D(texture, bottom_left_uv).r,
texture2D(texture, bottom_right_uv).r);
#endif
// Assume BGGR for now. Currently this just takes 3 of the four samples
// for each pixel, there's room here to do some better debayering.
#if defined(CFA_BGGR)
vec3 color = vec3(samples.w, (samples.y + samples.z) / 2.0, samples.x);
#elif defined(CFA_GBRG)
vec3 color = vec3(samples.z, (samples.x + samples.w) / 2.0, samples.y);
#elif defined(CFA_GRBG)
vec3 color = vec3(samples.y, (samples.x + samples.w) / 2.0, samples.z);
#else
vec3 color = vec3(samples.x, (samples.y + samples.z) / 2.0, samples.w);
#endif
// Some crude blacklevel correction to make the preview a bit nicer, this
// should be an uniform

View File

@ -144,6 +144,64 @@ mp_pixel_format_pixel_depth(MPPixelFormat pixel_format)
}
}
const char *
mp_pixel_format_cfa(MPPixelFormat pixel_format)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, 0);
switch (pixel_format) {
case MP_PIXEL_FMT_BGGR8:
case MP_PIXEL_FMT_BGGR10P:
return "BGGR";
break;
case MP_PIXEL_FMT_GBRG8:
case MP_PIXEL_FMT_GBRG10P:
return "GBRG";
break;
case MP_PIXEL_FMT_GRBG8:
case MP_PIXEL_FMT_GRBG10P:
return "GRBG";
break;
case MP_PIXEL_FMT_RGGB8:
case MP_PIXEL_FMT_RGGB10P:
return "RGGB";
break;
case MP_PIXEL_FMT_UYVY:
return "UYUV";
break;
case MP_PIXEL_FMT_YUYV:
return "YUYV";
break;
default:
return "unsupported";
}
}
const char *
mp_pixel_format_cfa_pattern(MPPixelFormat pixel_format)
{
g_return_val_if_fail(pixel_format < MP_PIXEL_FMT_MAX, 0);
switch (pixel_format) {
case MP_PIXEL_FMT_BGGR8:
case MP_PIXEL_FMT_BGGR10P:
return "\002\001\001\000";
break;
case MP_PIXEL_FMT_GBRG8:
case MP_PIXEL_FMT_GBRG10P:
return "\001\002\000\001";
break;
case MP_PIXEL_FMT_GRBG8:
case MP_PIXEL_FMT_GRBG10P:
return "\001\000\002\001";
break;
case MP_PIXEL_FMT_RGGB8:
case MP_PIXEL_FMT_RGGB10P:
return "\000\001\001\002";
break;
default:
return NULL;
}
}
uint32_t
mp_pixel_format_width_to_bytes(MPPixelFormat pixel_format, uint32_t width)
{
@ -264,7 +322,6 @@ mp_camera_new(int video_fd, int subdev_fd)
bool use_mplane;
if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
use_mplane = true;
printf("!!\n");
} else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
use_mplane = false;
} else {
@ -394,10 +451,19 @@ mp_camera_get_subdev_fd(MPCamera *camera)
return camera->subdev_fd;
}
static enum v4l2_buf_type
get_buf_type(MPCamera *camera)
{
if (camera->use_mplane) {
return V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
}
return V4L2_BUF_TYPE_VIDEO_CAPTURE;
}
static bool
camera_mode_impl(MPCamera *camera, int request, MPCameraMode *mode)
{
uint32_t pixfmt = mp_pixel_format_from_v4l_pixel_format(mode->pixel_format);
uint32_t pixfmt = mp_pixel_format_to_v4l_pixel_format(mode->pixel_format);
struct v4l2_format fmt = {};
if (camera->use_mplane) {
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
@ -460,7 +526,6 @@ mp_camera_set_mode(MPCamera *camera, MPCameraMode *mode)
VIDIOC_SUBDEV_S_FRAME_INTERVAL,
&interval) == -1) {
errno_printerr("VIDIOC_SUBDEV_S_FRAME_INTERVAL");
return false;
}
bool did_set_frame_rate = interval.interval.numerator ==
@ -522,10 +587,7 @@ mp_camera_start_capture(MPCamera *camera)
g_return_val_if_fail(camera->has_set_mode, false);
g_return_val_if_fail(camera->num_buffers == 0, false);
enum v4l2_buf_type buftype = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (camera->use_mplane) {
buftype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
}
const enum v4l2_buf_type buftype = get_buf_type(camera);
// Start by requesting buffers
struct v4l2_requestbuffers req = {};
@ -588,7 +650,7 @@ mp_camera_start_capture(MPCamera *camera)
}
struct v4l2_exportbuffer expbuf = {
.type = V4L2_BUF_TYPE_VIDEO_CAPTURE,
.type = buftype,
.index = i,
};
if (xioctl(camera->video_fd, VIDIOC_EXPBUF, &expbuf) == -1) {
@ -627,7 +689,7 @@ mp_camera_start_capture(MPCamera *camera)
}
// Start capture
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
enum v4l2_buf_type type = buftype;
if (xioctl(camera->video_fd, VIDIOC_STREAMON, &type) == -1) {
errno_printerr("VIDIOC_STREAMON");
goto error;
@ -653,7 +715,7 @@ error:
{
struct v4l2_requestbuffers req = {};
req.count = 0;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.type = buftype;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(camera->video_fd, VIDIOC_REQBUFS, &req) == -1) {
@ -669,7 +731,9 @@ mp_camera_stop_capture(MPCamera *camera)
{
g_return_val_if_fail(camera->num_buffers > 0, false);
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
const enum v4l2_buf_type buftype = get_buf_type(camera);
enum v4l2_buf_type type = buftype;
if (xioctl(camera->video_fd, VIDIOC_STREAMOFF, &type) == -1) {
errno_printerr("VIDIOC_STREAMOFF");
}
@ -690,7 +754,7 @@ mp_camera_stop_capture(MPCamera *camera)
struct v4l2_requestbuffers req = {};
req.count = 0;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.type = buftype;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(camera->video_fd, VIDIOC_REQBUFS, &req) == -1) {
errno_printerr("VIDIOC_REQBUFS");
@ -708,8 +772,10 @@ mp_camera_is_capturing(MPCamera *camera)
bool
mp_camera_capture_buffer(MPCamera *camera, MPBuffer *buffer)
{
const enum v4l2_buf_type buftype = get_buf_type(camera);
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.type = buftype;
buf.memory = V4L2_MEMORY_MMAP;
struct v4l2_plane planes[1];
@ -757,10 +823,19 @@ mp_camera_capture_buffer(MPCamera *camera, MPBuffer *buffer)
bool
mp_camera_release_buffer(MPCamera *camera, uint32_t buffer_index)
{
const enum v4l2_buf_type buftype = get_buf_type(camera);
struct v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.type = buftype;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = buffer_index;
struct v4l2_plane planes[1];
if (camera->use_mplane) {
buf.m.planes = planes;
buf.length = 1;
}
if (xioctl(camera->video_fd, VIDIOC_QBUF, &buf) == -1) {
errno_printerr("VIDIOC_QBUF");
return false;
@ -863,12 +938,14 @@ get_subdev_modes(MPCamera *camera, bool (*check)(MPCamera *, MPCameraMode *))
static MPCameraModeList *
get_video_modes(MPCamera *camera, bool (*check)(MPCamera *, MPCameraMode *))
{
const enum v4l2_buf_type buftype = get_buf_type(camera);
MPCameraModeList *item = NULL;
for (uint32_t fmt_index = 0;; ++fmt_index) {
struct v4l2_fmtdesc fmt = {};
fmt.index = fmt_index;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.type = buftype;
if (xioctl(camera->video_fd, VIDIOC_ENUM_FMT, &fmt) == -1) {
if (errno != EINVAL) {
errno_printerr("VIDIOC_ENUM_FMT");

View File

@ -31,6 +31,8 @@ uint32_t mp_pixel_format_to_v4l_bus_code(MPPixelFormat pixel_format);
uint32_t mp_pixel_format_bits_per_pixel(MPPixelFormat pixel_format);
uint32_t mp_pixel_format_pixel_depth(MPPixelFormat pixel_format);
const char *mp_pixel_format_cfa(MPPixelFormat pixel_format);
const char *mp_pixel_format_cfa_pattern(MPPixelFormat pixel_format);
uint32_t mp_pixel_format_width_to_bytes(MPPixelFormat pixel_format, uint32_t width);
uint32_t mp_pixel_format_width_to_colors(MPPixelFormat pixel_format, uint32_t width);
uint32_t mp_pixel_format_height_to_colors(MPPixelFormat pixel_format,

View File

@ -153,6 +153,12 @@ mp_device_close(MPDevice *device)
free(device);
}
int
mp_device_get_fd(const MPDevice *device)
{
return device->fd;
}
bool
mp_device_setup_link(MPDevice *device,
uint32_t source_pad_id,
@ -375,6 +381,7 @@ mp_device_get_num_links(const MPDevice *device)
struct _MPDeviceList {
MPDevice *device;
MPDeviceList *next;
char path[PATH_MAX];
};
MPDeviceList *
@ -387,8 +394,8 @@ mp_device_list_new()
DIR *d = opendir("/dev");
while ((dir = readdir(d)) != NULL) {
if (strncmp(dir->d_name, "media", 5) == 0) {
char path[261];
snprintf(path, 261, "/dev/%s", dir->d_name);
char path[PATH_MAX];
snprintf(path, PATH_MAX, "/dev/%s", dir->d_name);
MPDevice *device = mp_device_open(path);
@ -396,6 +403,7 @@ mp_device_list_new()
MPDeviceList *next = malloc(sizeof(MPDeviceList));
next->device = device;
next->next = current;
memcpy(next->path, path, sizeof(path));
current = next;
}
}
@ -461,6 +469,12 @@ mp_device_list_get(const MPDeviceList *device_list)
return device_list->device;
}
const char *
mp_device_list_get_path(const MPDeviceList *device_list)
{
return device_list->path;
}
MPDeviceList *
mp_device_list_next(const MPDeviceList *device_list)
{

View File

@ -15,6 +15,8 @@ MPDevice *mp_device_open(const char *path);
MPDevice *mp_device_new(int fd);
void mp_device_close(MPDevice *device);
int mp_device_get_fd(const MPDevice *device);
bool mp_device_setup_link(MPDevice *device,
uint32_t source_pad_id,
uint32_t sink_pad_id,
@ -62,4 +64,5 @@ MPDevice *mp_device_list_find_remove(MPDeviceList **device_list,
MPDevice *mp_device_list_remove(MPDeviceList **device_list);
MPDevice *mp_device_list_get(const MPDeviceList *device_list);
const char *mp_device_list_get_path(const MPDeviceList *device_list);
MPDeviceList *mp_device_list_next(const MPDeviceList *device_list);

View File

@ -8,12 +8,15 @@
#define TEX_COORD_ATTRIBUTE 1
struct _GLES2Debayer {
MPPixelFormat format;
GLuint frame_buffer;
GLuint program;
GLuint uniform_transform;
GLuint uniform_pixel_size;
GLuint uniform_texture;
GLuint uniform_color_matrix;
GLuint uniform_row_length;
GLuint quad;
};
@ -21,7 +24,10 @@ struct _GLES2Debayer {
GLES2Debayer *
gles2_debayer_new(MPPixelFormat format)
{
if (format != MP_PIXEL_FMT_BGGR8) {
if (format != MP_PIXEL_FMT_BGGR8 && format != MP_PIXEL_FMT_GBRG8 &&
format != MP_PIXEL_FMT_GRBG8 && format != MP_PIXEL_FMT_RGGB8 &&
format != MP_PIXEL_FMT_BGGR10P && format != MP_PIXEL_FMT_GBRG10P &&
format != MP_PIXEL_FMT_GRBG10P && format != MP_PIXEL_FMT_RGGB10P) {
return NULL;
}
@ -29,6 +35,15 @@ gles2_debayer_new(MPPixelFormat format)
glGenFramebuffers(1, &frame_buffer);
check_gl();
char format_def[64];
snprintf(format_def,
64,
"#define CFA_%s\n#define BITS_%d\n",
mp_pixel_format_cfa(format),
mp_pixel_format_bits_per_pixel(format));
const GLchar *def[1] = { format_def };
GLuint shaders[] = {
gl_util_load_shader("/org/postmarketos/Megapixels/debayer.vert",
GL_VERTEX_SHADER,
@ -36,8 +51,8 @@ gles2_debayer_new(MPPixelFormat format)
0),
gl_util_load_shader("/org/postmarketos/Megapixels/debayer.frag",
GL_FRAGMENT_SHADER,
NULL,
0),
def,
1),
};
GLuint program = gl_util_link_program(shaders, 2);
@ -46,6 +61,8 @@ gles2_debayer_new(MPPixelFormat format)
check_gl();
GLES2Debayer *self = malloc(sizeof(GLES2Debayer));
self->format = format;
self->frame_buffer = frame_buffer;
self->program = program;
@ -54,6 +71,9 @@ gles2_debayer_new(MPPixelFormat format)
self->uniform_texture = glGetUniformLocation(self->program, "texture");
self->uniform_color_matrix =
glGetUniformLocation(self->program, "color_matrix");
if (mp_pixel_format_bits_per_pixel(self->format) == 10)
self->uniform_row_length =
glGetUniformLocation(self->program, "row_length");
check_gl();
self->quad = gl_util_new_quad();
@ -135,6 +155,12 @@ gles2_debayer_configure(GLES2Debayer *self,
self->uniform_color_matrix, 1, GL_FALSE, identity);
}
check_gl();
if (mp_pixel_format_bits_per_pixel(self->format) == 10) {
assert(src_width % 4 == 0);
glUniform1f(self->uniform_row_length, src_width + src_width / 4);
check_gl();
}
}
void

View File

@ -174,6 +174,37 @@ mp_process_pipeline_buffer_get_texture_id(MPProcessPipelineBuffer *buf)
return buf->texture_id;
}
static void
repack_image_sequencial(const uint8_t *src_buf,
uint8_t *dst_buf,
size_t width,
size_t height)
{
uint16_t pixels[4];
/*
* Repack 40 bits stored in sensor format into sequencial format
*
* src_buf: 11111111 22222222 33333333 44444444 11223344 ...
* dst_buf: 11111111 11222222 22223333 33333344 44444444 ...
*/
assert(width % 4 == 0);
for (size_t i = 0; i < (width + width / 4) * height; i += 5) {
/* Extract pixels from packed sensor format */
pixels[0] = (src_buf[i] << 2) | (src_buf[i + 4] >> 6);
pixels[1] = (src_buf[i + 1] << 2) | (src_buf[i + 4] >> 4 & 0x03);
pixels[2] = (src_buf[i + 2] << 2) | (src_buf[i + 4] >> 2 & 0x03);
pixels[3] = (src_buf[i + 3] << 2) | (src_buf[i + 4] & 0x03);
/* Pack pixels into sequencial format */
dst_buf[i] = (pixels[0] >> 2 & 0xff);
dst_buf[i + 1] = (pixels[0] << 6 & 0xff) | (pixels[1] >> 4 & 0x3f);
dst_buf[i + 2] = (pixels[1] << 4 & 0xff) | (pixels[2] >> 6 & 0x0f);
dst_buf[i + 3] = (pixels[2] << 2 & 0xff) | (pixels[3] >> 8 & 0x03);
dst_buf[i + 4] = (pixels[3] & 0xff);
}
}
static GLES2Debayer *gles2_debayer = NULL;
static GdkGLContext *context;
@ -224,14 +255,9 @@ init_gl(MPPipeline *pipeline, GdkSurface **surface)
check_gl();
}
gles2_debayer = gles2_debayer_new(MP_PIXEL_FMT_BGGR8);
check_gl();
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
check_gl();
gles2_debayer_use(gles2_debayer);
for (size_t i = 0; i < NUM_BUFFERS; ++i) {
glGenTextures(1, &output_buffers[i].texture_id);
glBindTexture(GL_TEXTURE_2D, output_buffers[i].texture_id);
@ -297,7 +323,7 @@ process_image_for_preview(const uint8_t *image)
glTexImage2D(GL_TEXTURE_2D,
0,
GL_LUMINANCE,
mode.width,
mp_pixel_format_width_to_bytes(mode.pixel_format, mode.width),
mode.height,
0,
GL_LUMINANCE,
@ -462,9 +488,14 @@ process_image_for_capture(const uint8_t *image, int count)
static const short cfapatterndim[] = { 2, 2 };
TIFFSetField(tif, TIFFTAG_CFAREPEATPATTERNDIM, cfapatterndim);
#if (TIFFLIB_VERSION < 20201219) && !LIBTIFF_CFA_PATTERN
TIFFSetField(tif, TIFFTAG_CFAPATTERN, "\002\001\001\000"); // BGGR
TIFFSetField(tif,
TIFFTAG_CFAPATTERN,
mp_pixel_format_cfa_pattern(mode.pixel_format));
#else
TIFFSetField(tif, TIFFTAG_CFAPATTERN, 4, "\002\001\001\000"); // BGGR
TIFFSetField(tif,
TIFFTAG_CFAPATTERN,
4,
mp_pixel_format_cfa_pattern(mode.pixel_format));
#endif
printf("TIFF version %d\n", TIFFLIB_VERSION);
int whitelevel = camera->whitelevel;
@ -480,10 +511,22 @@ process_image_for_capture(const uint8_t *image, int count)
TIFFCheckpointDirectory(tif);
printf("Writing frame to %s\n", fname);
uint8_t *output_image = (uint8_t *)image;
// Repack 10-bit image from sensor format into a sequencial format
if (mp_pixel_format_bits_per_pixel(mode.pixel_format) == 10) {
output_image = malloc(mp_pixel_format_width_to_bytes(
mode.pixel_format, mode.width) *
mode.height);
repack_image_sequencial(
image, output_image, mode.width, mode.height);
}
for (int row = 0; row < mode.height; row++) {
TIFFWriteScanline(
tif,
(void *)image +
(void *)output_image +
(row * mp_pixel_format_width_to_bytes(
mode.pixel_format, mode.width)),
row,
@ -491,6 +534,9 @@ process_image_for_capture(const uint8_t *image, int count)
}
TIFFWriteDirectory(tif);
if (output_image != image)
free(output_image);
// Add an EXIF block to the tiff
TIFFCreateEXIFDirectory(tif);
// 1 = manual, 2 = full auto, 3 = aperture priority, 4 = shutter priority
@ -718,7 +764,7 @@ mp_process_pipeline_capture()
}
static void
on_output_changed()
on_output_changed(bool format_changed)
{
output_buffer_width = mode.width / 2;
output_buffer_height = mode.height / 2;
@ -744,6 +790,17 @@ on_output_changed()
glBindTexture(GL_TEXTURE_2D, 0);
// Create new gles2_debayer on format change
if (format_changed) {
if (gles2_debayer)
gles2_debayer_free(gles2_debayer);
gles2_debayer = gles2_debayer_new(mode.pixel_format);
check_gl();
gles2_debayer_use(gles2_debayer);
}
gles2_debayer_configure(
gles2_debayer,
output_buffer_width,
@ -772,6 +829,8 @@ update_state(MPPipeline *pipeline, const struct mp_process_pipeline_state *state
preview_height != state->preview_height ||
device_rotation != state->device_rotation;
const bool format_changed = mode.pixel_format != state->mode.pixel_format;
camera = state->camera;
mode = state->mode;
@ -793,7 +852,7 @@ update_state(MPPipeline *pipeline, const struct mp_process_pipeline_state *state
if (output_changed) {
camera_rotation = mod(camera->rotate - device_rotation, 360);
on_output_changed();
on_output_changed(format_changed);
}
struct mp_main_state main_state = {

View File

@ -174,7 +174,11 @@ process_image(MPPipeline *pipeline, MPZBarImage **_image)
assert(image->pixel_format == MP_PIXEL_FMT_BGGR8 ||
image->pixel_format == MP_PIXEL_FMT_GBRG8 ||
image->pixel_format == MP_PIXEL_FMT_GRBG8 ||
image->pixel_format == MP_PIXEL_FMT_RGGB8);
image->pixel_format == MP_PIXEL_FMT_RGGB8 ||
image->pixel_format == MP_PIXEL_FMT_BGGR10P ||
image->pixel_format == MP_PIXEL_FMT_GBRG10P ||
image->pixel_format == MP_PIXEL_FMT_GRBG10P ||
image->pixel_format == MP_PIXEL_FMT_RGGB10P);
// Create a grayscale image for scanning from the current preview.
// Rotate/mirror correctly.
@ -182,11 +186,46 @@ process_image(MPPipeline *pipeline, MPZBarImage **_image)
int height = image->height / 2;
uint8_t *data = malloc(width * height * sizeof(uint8_t));
size_t row_length =
mp_pixel_format_width_to_bytes(image->pixel_format, image->width);
size_t i = 0;
for (int y = 0; y < image->height; y += 2) {
for (int x = 0; x < image->width; x += 2) {
data[i++] = image->data[x + image->width * y];
size_t offset;
switch (image->pixel_format) {
case MP_PIXEL_FMT_BGGR8:
case MP_PIXEL_FMT_GBRG8:
case MP_PIXEL_FMT_GRBG8:
case MP_PIXEL_FMT_RGGB8:
for (int y = 0; y < image->height; y += 2) {
for (int x = 0; x < row_length; x += 2) {
data[i++] = image->data[x + row_length * y];
}
}
break;
case MP_PIXEL_FMT_BGGR10P:
case MP_PIXEL_FMT_GBRG10P:
case MP_PIXEL_FMT_GRBG10P:
case MP_PIXEL_FMT_RGGB10P:
// Skip 5th byte of each 4-pixel segment by incrementing an
// offset every time a 5th byte is reached, making the
// X coordinate land on the next byte:
//
// image->data | | | | X | | | | X | | | | X | | | | X | ...
// x 0 2 4 6 8 10 12 14 16 18 20 ...
// offset 0 1 2 3 4 5 ...
// > ---> -----> ------->
// x + offset 0 2 4 6 8 10 12 16 18 ...
for (int y = 0; y < image->height; y += 2) {
offset = 0;
for (int x = 0; x < image->width; x += 2) {
if (x % 4 == 0)
offset += 1;
data[i++] = image->data[x + offset + row_length * y];
}
}
break;
default:
assert(0);
}
// Create image for zbar

View File

@ -1,7 +1,117 @@
#include "device.h"
#include <assert.h>
#include <linux/limits.h>
#include <linux/media.h>
#include <stdio.h>
const char *
entity_type_str(uint32_t type)
{
switch (type) {
case MEDIA_ENT_F_UNKNOWN:
return "UNKNOWN";
case MEDIA_ENT_F_V4L2_SUBDEV_UNKNOWN:
return "V4L2_SUBDEV_UNKNOWN";
case MEDIA_ENT_F_IO_V4L:
return "IO_V4L";
case MEDIA_ENT_F_IO_VBI:
return "IO_VBI";
case MEDIA_ENT_F_IO_SWRADIO:
return "IO_SWRADIO";
case MEDIA_ENT_F_IO_DTV:
return "IO_DTV";
case MEDIA_ENT_F_DTV_DEMOD:
return "DTV_DEMOD";
case MEDIA_ENT_F_TS_DEMUX:
return "TS_DEMUX";
case MEDIA_ENT_F_DTV_CA:
return "DTV_CA";
case MEDIA_ENT_F_DTV_NET_DECAP:
return "DTV_NET_DECAP";
case MEDIA_ENT_F_CAM_SENSOR:
return "CAM_SENSOR";
case MEDIA_ENT_F_FLASH:
return "FLASH";
case MEDIA_ENT_F_LENS:
return "LENS";
case MEDIA_ENT_F_ATV_DECODER:
return "ATV_DECODER";
case MEDIA_ENT_F_TUNER:
return "TUNER";
case MEDIA_ENT_F_IF_VID_DECODER:
return "IF_VID_DECODER";
case MEDIA_ENT_F_IF_AUD_DECODER:
return "IF_AUD_DECODER";
case MEDIA_ENT_F_AUDIO_CAPTURE:
return "AUDIO_CAPTURE";
case MEDIA_ENT_F_AUDIO_PLAYBACK:
return "AUDIO_PLAYBACK";
case MEDIA_ENT_F_AUDIO_MIXER:
return "AUDIO_MIXER";
case MEDIA_ENT_F_PROC_VIDEO_COMPOSER:
return "PROC_VIDEO_COMPOSER";
case MEDIA_ENT_F_PROC_VIDEO_PIXEL_FORMATTER:
return "PROC_VIDEO_PIXEL_FORMATTER";
case MEDIA_ENT_F_PROC_VIDEO_PIXEL_ENC_CONV:
return "PROC_VIDEO_PIXEL_ENC_CONV";
case MEDIA_ENT_F_PROC_VIDEO_LUT:
return "PROC_VIDEO_LUT";
case MEDIA_ENT_F_PROC_VIDEO_SCALER:
return "PROC_VIDEO_SCALER";
case MEDIA_ENT_F_PROC_VIDEO_STATISTICS:
return "PROC_VIDEO_STATISTICS";
default:
return "invalid type";
}
}
const char *
intf_type_str(uint32_t type)
{
switch (type) {
case MEDIA_INTF_T_DVB_FE:
return "DVB_FE";
case MEDIA_INTF_T_DVB_DEMUX:
return "DVB_DEMUX";
case MEDIA_INTF_T_DVB_DVR:
return "DVB_DVR";
case MEDIA_INTF_T_DVB_CA:
return "DVB_CA";
case MEDIA_INTF_T_DVB_NET:
return "DVB_NET";
case MEDIA_INTF_T_V4L_VIDEO:
return "V4L_VIDEO";
case MEDIA_INTF_T_V4L_VBI:
return "V4L_VBI";
case MEDIA_INTF_T_V4L_RADIO:
return "V4L_RADIO";
case MEDIA_INTF_T_V4L_SUBDEV:
return "V4L_SUBDEV";
case MEDIA_INTF_T_V4L_SWRADIO:
return "V4L_SWRADIO";
case MEDIA_INTF_T_V4L_TOUCH:
return "V4L_TOUCH";
case MEDIA_INTF_T_ALSA_PCM_CAPTURE:
return "ALSA_PCM_CAPTURE";
case MEDIA_INTF_T_ALSA_PCM_PLAYBACK:
return "ALSA_PCM_PLAYBACK";
case MEDIA_INTF_T_ALSA_CONTROL:
return "ALSA_CONTROL";
case MEDIA_INTF_T_ALSA_COMPRESS:
return "ALSA_COMPRESS";
case MEDIA_INTF_T_ALSA_RAWMIDI:
return "ALSA_RAWMIDI";
case MEDIA_INTF_T_ALSA_HWDEP:
return "ALSA_HWDEP";
case MEDIA_INTF_T_ALSA_SEQUENCER:
return "ALSA_SEQUENCER";
case MEDIA_INTF_T_ALSA_TIMER:
return "ALSA_TIMER";
default:
return "invalid type";
}
}
int
main(int argc, char *argv[])
{
@ -12,6 +122,7 @@ main(int argc, char *argv[])
const struct media_device_info *info = mp_device_get_info(device);
printf("%s (%s) %s\n", info->model, info->driver, info->serial);
printf(" Path: %s\n", mp_device_list_get_path(list));
printf(" Bus Info: %s\n", info->bus_info);
printf(" Media Version: %d\n", info->media_version);
printf(" HW Revision: %d\n", info->hw_revision);
@ -22,10 +133,10 @@ main(int argc, char *argv[])
size_t num = mp_device_get_num_entities(device);
printf(" Entities (%ld):\n", num);
for (int i = 0; i < num; ++i) {
printf(" %d %s (%d)\n",
printf(" %d %s (%s)\n",
entities[i].id,
entities[i].name,
entities[i].function);
entity_type_str(entities[i].function));
}
const struct media_v2_interface *interfaces =
@ -33,33 +144,61 @@ main(int argc, char *argv[])
num = mp_device_get_num_interfaces(device);
printf(" Interfaces (%ld):\n", num);
for (int i = 0; i < num; ++i) {
printf(" %d (%d - %d) devnode %d:%d\n",
// Unused
assert(interfaces[i].flags == 0);
char buf[PATH_MAX];
buf[0] = '\0';
mp_find_device_path(interfaces[i].devnode, buf, PATH_MAX);
printf(" %d (%s) devnode %d:%d %s\n",
interfaces[i].id,
interfaces[i].intf_type,
interfaces[i].flags,
intf_type_str(interfaces[i].intf_type),
interfaces[i].devnode.major,
interfaces[i].devnode.minor);
interfaces[i].devnode.minor,
buf);
}
const struct media_v2_pad *pads = mp_device_get_pads(device);
num = mp_device_get_num_pads(device);
printf(" Pads (%ld):\n", num);
for (int i = 0; i < num; ++i) {
printf(" %d for device:%d (%d)\n",
printf(" %d for device:%d (",
pads[i].id,
pads[i].entity_id,
pads[i].flags);
pads[i].entity_id);
if (pads[i].flags & MEDIA_PAD_FL_SINK)
printf("SINK ");
if (pads[i].flags & MEDIA_PAD_FL_SOURCE)
printf("SOURCE ");
if (pads[i].flags & MEDIA_PAD_FL_MUST_CONNECT)
printf("MUST_CONNECT ");
printf(")\n");
}
const struct media_v2_link *links = mp_device_get_links(device);
num = mp_device_get_num_links(device);
printf(" Links (%ld):\n", num);
for (int i = 0; i < num; ++i) {
printf(" %d from:%d to:%d (%d)\n",
printf(" %d from:%d to:%d (",
links[i].id,
links[i].source_id,
links[i].sink_id,
links[i].flags);
links[i].sink_id);
if (links[i].flags & MEDIA_LNK_FL_ENABLED)
printf("ENABLED ");
if (links[i].flags & MEDIA_LNK_FL_IMMUTABLE)
printf("IMMUTABLE ");
if (links[i].flags & MEDIA_LNK_FL_DYNAMIC)
printf("DYNAMIC ");
uint32_t type = links[i].flags & MEDIA_LNK_FL_LINK_TYPE;
if (type == MEDIA_LNK_FL_INTERFACE_LINK) {
printf("INTERFACE)\n");
} else {
assert(type == MEDIA_LNK_FL_DATA_LINK);
printf("DATA)\n");
}
}
list = mp_device_list_next(list);