Remove commented out code in main

This commit is contained in:
Benjamin Schaaf 2020-11-26 22:39:43 +11:00
parent a7e7f802bc
commit dda7367368
1 changed files with 0 additions and 414 deletions

414
main.c
View File

@ -173,121 +173,6 @@ void mp_main_capture_completed(const char *fname)
g_free);
}
// static void
// start_capturing(int fd)
// {
// for (int i = 0; i < n_buffers; ++i) {
// struct v4l2_buffer buf = {
// .type = current.type,
// .memory = V4L2_MEMORY_MMAP,
// .index = i,
// };
// if(current.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
// buf.m.planes = buf_planes;
// buf.length = 1;
// }
// if (xioctl(fd, VIDIOC_QBUF, &buf) == -1) {
// errno_exit("VIDIOC_QBUF");
// }
// }
// if (xioctl(fd, VIDIOC_STREAMON, &current.type) == -1) {
// errno_exit("VIDIOC_STREAMON");
// }
// ready = 1;
// }
// static void
// stop_capturing(int fd)
// {
// int i;
// ready = 0;
// printf("Stopping capture\n");
// if (xioctl(fd, VIDIOC_STREAMOFF, &current.type) == -1) {
// errno_exit("VIDIOC_STREAMOFF");
// }
// for (i = 0; i < n_buffers; ++i) {
// munmap(buffers[i].start, buffers[i].length);
// }
// }
// static void
// init_mmap(int fd)
// {
// struct v4l2_requestbuffers req = {
// .count = 4,
// .type = current.type,
// .memory = V4L2_MEMORY_MMAP,
// };
// if (xioctl(fd, VIDIOC_REQBUFS, &req) == -1) {
// if (errno == EINVAL) {
// fprintf(stderr, "%s does not support memory mapping",
// current.dev_name);
// exit(EXIT_FAILURE);
// } else {
// errno_exit("VIDIOC_REQBUFS");
// }
// }
// if (req.count < 2) {
// fprintf(stderr, "Insufficient buffer memory on %s\n",
// current.dev_name);
// exit(EXIT_FAILURE);
// }
// buffers = calloc(req.count, sizeof(buffers[0]));
// if (!buffers) {
// fprintf(stderr, "Out of memory\\n");
// exit(EXIT_FAILURE);
// }
// for (n_buffers = 0; n_buffers < req.count; ++n_buffers) {
// struct v4l2_buffer buf = {
// .type = current.type,
// .memory = V4L2_MEMORY_MMAP,
// .index = n_buffers,
// };
// if (current.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
// buf.m.planes = buf_planes;
// buf.length = 1;
// }
// if (xioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {
// errno_exit("VIDIOC_QUERYBUF");
// }
// if (current.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
// buffers[n_buffers].length = buf.m.planes[0].length;
// buffers[n_buffers].start = mmap(NULL /* start anywhere */,
// buf.m.planes[0].length,
// PROT_READ | PROT_WRITE /* required */,
// MAP_SHARED /* recommended */,
// fd, buf.m.planes[0].m.mem_offset);
// } else {
// buffers[n_buffers].length = buf.length;
// buffers[n_buffers].start = mmap(NULL /* start anywhere */,
// buf.length,
// PROT_READ | PROT_WRITE /* required */,
// MAP_SHARED /* recommended */,
// fd, buf.m.offset);
// }
// if (MAP_FAILED == buffers[n_buffers].start) {
// errno_exit("mmap");
// }
// }
// }
static void
draw_controls()
{
@ -370,226 +255,6 @@ draw_controls()
gtk_widget_queue_draw_area(preview, 0, 0, preview_width, 32);
}
// static void
// init_sensor(char *fn, int width, int height, int mbus, int rate)
// {
// int fd;
// struct v4l2_subdev_frame_interval interval = {};
// struct v4l2_subdev_format fmt = {};
// fd = open(fn, O_RDWR);
// g_print("Setting sensor rate to %d\n", rate);
// interval.pad = 0;
// interval.interval.numerator = 1;
// interval.interval.denominator = rate;
// if (xioctl(fd, VIDIOC_SUBDEV_S_FRAME_INTERVAL, &interval) == -1) {
// errno_exit("VIDIOC_SUBDEV_S_FRAME_INTERVAL");
// }
// if (interval.interval.numerator != 1 || interval.interval.denominator != rate)
// g_printerr("Driver chose %d/%d instead\n",
// interval.interval.numerator, interval.interval.denominator);
// g_print("Setting sensor to %dx%d fmt %d\n",
// width, height, mbus);
// fmt.pad = 0;
// fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
// fmt.format.code = mbus;
// fmt.format.width = width;
// fmt.format.height = height;
// fmt.format.field = V4L2_FIELD_ANY;
// if (xioctl(fd, VIDIOC_SUBDEV_S_FMT, &fmt) == -1) {
// errno_exit("VIDIOC_SUBDEV_S_FMT");
// }
// if (fmt.format.width != width || fmt.format.height != height || fmt.format.code != mbus)
// g_printerr("Driver chose %dx%d fmt %d instead\n",
// fmt.format.width, fmt.format.height,
// fmt.format.code);
// // Trigger continuous auto focus if the sensor supports it
// if (v4l2_has_control(fd, V4L2_CID_FOCUS_AUTO)) {
// current.has_af_c = 1;
// v4l2_ctrl_set(fd, V4L2_CID_FOCUS_AUTO, 1);
// }
// if (v4l2_has_control(fd, V4L2_CID_AUTO_FOCUS_START)) {
// current.has_af_s = 1;
// }
// if (v4l2_has_control(fd, V4L2_CID_GAIN)) {
// current.gain_ctrl = V4L2_CID_GAIN;
// current.gain_max = v4l2_ctrl_get_max(fd, V4L2_CID_GAIN);
// }
// if (v4l2_has_control(fd, V4L2_CID_ANALOGUE_GAIN)) {
// current.gain_ctrl = V4L2_CID_ANALOGUE_GAIN;
// current.gain_max = v4l2_ctrl_get_max(fd, V4L2_CID_ANALOGUE_GAIN);
// }
// auto_exposure = 1;
// auto_gain = 1;
// draw_controls();
// close(current.fd);
// current.fd = fd;
// }
// static void
// init_media_entity(char *fn, int width, int height, int mbus)
// {
// int fd;
// struct v4l2_subdev_format fmt = {};
// fd = open(fn, O_RDWR);
// // Apply mode to v4l2 subdev
// g_print("Setting node to %dx%d fmt %d\n",
// width, height, mbus);
// fmt.pad = 0;
// fmt.which = V4L2_SUBDEV_FORMAT_ACTIVE;
// fmt.format.code = mbus;
// fmt.format.width = width;
// fmt.format.height = height;
// fmt.format.field = V4L2_FIELD_ANY;
// if (xioctl(fd, VIDIOC_SUBDEV_S_FMT, &fmt) == -1) {
// errno_exit("VIDIOC_SUBDEV_S_FMT");
// }
// if (fmt.format.width != width || fmt.format.height != height || fmt.format.code != mbus)
// g_printerr("Driver chose %dx%d fmt %d instead\n",
// fmt.format.width, fmt.format.height,
// fmt.format.code);
// }
// static int
// init_device(int fd)
// {
// struct v4l2_capability cap;
// if (xioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) {
// if (errno == EINVAL) {
// fprintf(stderr, "%s is no V4L2 device\n",
// current.dev_name);
// exit(EXIT_FAILURE);
// } else {
// errno_exit("VIDIOC_QUERYCAP");
// }
// }
// // Detect buffer format for the interface node, preferring normal video capture
// if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
// current.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
// } else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
// printf("[%s] Using the MPLANE buffer format\n", current.cfg_name);
// current.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
// } else {
// fprintf(stderr, "%s is no video capture device\n",
// current.dev_name);
// exit(EXIT_FAILURE);
// }
// if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
// fprintf(stderr, "%s does not support streaming i/o\n",
// current.dev_name);
// exit(EXIT_FAILURE);
// }
// /* Select video input, video standard and tune here. */
// struct v4l2_cropcap cropcap = {
// .type = current.type,
// };
// struct v4l2_crop crop = {0};
// if (xioctl(fd, VIDIOC_CROPCAP, &cropcap) == 0) {
// crop.type = current.type;
// crop.c = cropcap.defrect; /* reset to default */
// if (xioctl(fd, VIDIOC_S_CROP, &crop) == -1) {
// switch (errno) {
// case EINVAL:
// /* Cropping not supported. */
// break;
// default:
// /* Errors ignored. */
// break;
// }
// }
// } else {
// /* Errors ignored. */
// }
// // Request a video format
// struct v4l2_format fmt = {
// .type = current.type,
// };
// if (current.width > 0) {
// g_print("Setting camera to %dx%d fmt %d\n",
// current.width, current.height, current.fmt);
// if (current.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
// fmt.fmt.pix_mp.width = current.width;
// fmt.fmt.pix_mp.height = current.height;
// fmt.fmt.pix_mp.pixelformat = current.fmt;
// fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
// } else {
// fmt.fmt.pix.width = current.width;
// fmt.fmt.pix.height = current.height;
// fmt.fmt.pix.pixelformat = current.fmt;
// fmt.fmt.pix.field = V4L2_FIELD_ANY;
// }
// if (xioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {
// g_printerr("VIDIOC_S_FMT failed");
// show_error("Could not set camera mode");
// return -1;
// }
// if (current.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
// && (fmt.fmt.pix_mp.width != current.width ||
// fmt.fmt.pix_mp.height != current.height ||
// fmt.fmt.pix_mp.pixelformat != current.fmt))
// g_printerr("Driver returned %dx%d fmt %d\n",
// fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height,
// fmt.fmt.pix_mp.pixelformat);
// if (current.type == V4L2_BUF_TYPE_VIDEO_CAPTURE
// && (fmt.fmt.pix.width != current.width ||
// fmt.fmt.pix.height != current.height ||
// fmt.fmt.pix.pixelformat != current.fmt))
// g_printerr("Driver returned %dx%d fmt %d\n",
// fmt.fmt.pix.width, fmt.fmt.pix.height,
// fmt.fmt.pix.pixelformat);
// /* Note VIDIOC_S_FMT may change width and height. */
// } else {
// if (xioctl(fd, VIDIOC_G_FMT, &fmt) == -1) {
// errno_exit("VIDIOC_G_FMT");
// }
// if (current.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
// g_print("Got %dx%d fmt %d from the driver\n",
// fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height,
// fmt.fmt.pix_mp.pixelformat);
// current.width = fmt.fmt.pix.width;
// current.height = fmt.fmt.pix.height;
// } else {
// g_print("Got %dx%d fmt %d from the driver\n",
// fmt.fmt.pix.width, fmt.fmt.pix.height,
// fmt.fmt.pix.pixelformat);
// current.width = fmt.fmt.pix_mp.width;
// current.height = fmt.fmt.pix_mp.height;
// }
// }
// if (current.type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
// current.fmt = fmt.fmt.pix_mp.pixelformat;
// } else {
// current.fmt = fmt.fmt.pix.pixelformat;
// }
// init_mmap(fd);
// return 0;
// }
static gboolean
preview_draw(GtkWidget *widget, cairo_t *cr, gpointer data)
{
@ -638,85 +303,6 @@ preview_configure(GtkWidget *widget, GdkEventConfigure *event)
return TRUE;
}
// int
// setup_camera(int cid)
// {
// struct media_link_desc link = {0};
// // Kill existing links for cameras in the same graph
// for(int i=0; i<NUM_CAMERAS; i++) {
// if(!cameras[i].exists)
// continue;
// if(i == cid)
// continue;
// if(strcmp(cameras[i].media_dev_fname, cameras[cid].media_dev_fname) != 0)
// continue;
// // Disable the interface<->front link
// link.flags = 0;
// link.source.entity = cameras[i].entity_id;
// link.source.index = 0;
// link.sink.entity = cameras[i].interface_entity_id;
// link.sink.index = 0;
// if (xioctl(cameras[cid].media_fd, MEDIA_IOC_SETUP_LINK, &link) < 0) {
// g_printerr("Could not disable [%s] camera link\n", cameras[i].cfg_name);
// return -1;
// }
// }
// // Enable the interface<->sensor link
// link.flags = MEDIA_LNK_FL_ENABLED;
// link.source.entity = cameras[cid].entity_id;
// link.source.index = 0;
// link.sink.entity = cameras[cid].interface_entity_id;
// link.sink.index = 0;
// current = cameras[cid];
// if (xioctl(cameras[cid].media_fd, MEDIA_IOC_SETUP_LINK, &link) < 0) {
// g_printerr("[%s] Could not enable direct sensor->if link\n", cameras[cid].cfg_name);
// for(int i=0;i<NUM_LINKS; i++) {
// if (!cameras[cid].media_links[i].valid)
// continue;
// if (cameras[cid].media_links[i].source_entity_id < 1) {
// g_printerr("[%s] media entry [%s] not found\n",
// cameras[cid].cfg_name,
// cameras[cid].media_links[i].source_name);
// }
// if (cameras[cid].media_links[i].target_entity_id < 1) {
// g_printerr("[%s] media entry [%s] not found\n",
// cameras[cid].cfg_name,
// cameras[cid].media_links[i].target_name);
// }
// link.flags = MEDIA_LNK_FL_ENABLED;
// link.source.entity = cameras[cid].media_links[i].source_entity_id;
// link.source.index = cameras[cid].media_links[i].source_port;
// link.sink.entity = cameras[cid].media_links[i].target_entity_id;
// link.sink.index = cameras[cid].media_links[i].target_port;
// if (xioctl(cameras[cid].media_fd, MEDIA_IOC_SETUP_LINK, &link) < 0) {
// g_printerr("[%s] Could not link [%s:%d] -> [%s:%d]\n",
// cameras[cid].cfg_name,
// cameras[cid].media_links[i].source_name,
// cameras[cid].media_links[i].source_port,
// cameras[cid].media_links[i].target_name,
// cameras[cid].media_links[i].target_port);
// }
// init_media_entity(cameras[cid].media_links[i].source_fname, current.width, current.height, current.mbus);
// init_media_entity(cameras[cid].media_links[i].target_fname, current.width, current.height, current.mbus);
// }
// }
// // Find camera node
// init_sensor(current.dev_fname, current.width, current.height, current.mbus, current.rate);
// return 0;
// }
void
on_open_last_clicked(GtkWidget *widget, gpointer user_data)
{