Refactor h264 encoder, allow switch in runtime

This commit is contained in:
mittorn 2024-10-12 04:26:44 +03:00
parent b9cb95c5e6
commit 88f37f23f5
4 changed files with 498 additions and 15 deletions

View file

@ -507,7 +507,7 @@ encoder_update_slice_parameter(struct vaapi_recorder *r, int slice_type)
{
r->encoder.param.slice.RefPicList1[i].picture_id = VA_INVALID_ID;
r->encoder.param.slice.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
r->encoder.param.slice.RefPicList0[i].frame_idx = 0;
r->encoder.param.slice.RefPicList1[i].frame_idx = 0;
}
if(r->frame_count)
{

475
vaapi_encoder_h264.h Normal file
View file

@ -0,0 +1,475 @@
#ifndef VAAPI_ENCODER_H264_H
#define VAAPI_ENCODER_H264_H
#include "vaapi_encoder.h"
#include <string.h>
#include <va/va_enc_h264.h>
#define NAL_REF_IDC_NONE 0
#define NAL_REF_IDC_LOW 1
#define NAL_REF_IDC_MEDIUM 2
#define NAL_REF_IDC_HIGH 3
#define NAL_NON_IDR 1
#define NAL_IDR 5
#define NAL_SPS 7
#define NAL_PPS 8
#define NAL_SEI 6
#define SLICE_TYPE_P 0
#define SLICE_TYPE_B 1
#define SLICE_TYPE_I 2
#define SLICE_TYPE_P_ONLY 5
#define H264_IS_P_SLICE(type) (SLICE_TYPE_P == (type) || SLICE_TYPE_P_ONLY == (type))
#define H264_IS_B_SLICE(type) (SLICE_TYPE_B == (type))
#define H264_IS_I_SLICE(type) (SLICE_TYPE_I == (type))
#define ENTROPY_MODE_CAVLC 0
#define ENTROPY_MODE_CABAC 1
#define H264_PROFILE_IDC_BASELINE 66
#define H264_PROFILE_IDC_MAIN 77
#define H264_PROFILE_IDC_HIGH 100
struct BaseBitstreamH264 : BaseBitstream<8>
{
inline void NalStartCodePrefix()
{
PutUI(0x00000001, 32);
}
inline void NalHeader(int nal_ref_idc, int nal_unit_type)
{
PutUI(0, 1);
PutUI(nal_ref_idc, 2);
PutUI(nal_unit_type, 5);
}
};
struct PackedSPSH264 : BaseBitstreamH264
{
PackedSPSH264(const VAEncSequenceParameterBufferH264 *seq, int constraint_set_flag): BaseBitstreamH264()
{
int i;
NalStartCodePrefix();
NalHeader(NAL_REF_IDC_HIGH, NAL_SPS);
PutUI(H264_PROFILE_IDC_MAIN, 8);
/* constraint_set[0-3] flag */
for (i = 0; i < 4; i++) {
int set = (constraint_set_flag & (1 << i)) ? 1 : 0;
PutUI(set, 1);
}
/* reserved_zero_4bits */
PutUI(0, 4);
PutUI(seq->level_idc, 8);
PutUE(seq->seq_parameter_set_id);
PutUE(seq->seq_fields.bits.log2_max_frame_num_minus4);
PutUE(seq->seq_fields.bits.pic_order_cnt_type);
if(seq->seq_fields.bits.pic_order_cnt_type != 2)
PutUE(seq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4);
PutUE(seq->max_num_ref_frames);
/* gaps_in_frame_num_value_allowed_flag */
PutUI(0, 1);
/* pic_width_in_mbs_minus1, pic_height_in_map_units_minus1 */
PutUE(seq->picture_width_in_mbs - 1);
PutUE(seq->picture_height_in_mbs - 1);
PutUI(seq->seq_fields.bits.frame_mbs_only_flag, 1);
PutUI(seq->seq_fields.bits.direct_8x8_inference_flag, 1);
PutUI(seq->frame_cropping_flag, 1);
if (seq->frame_cropping_flag) {
PutUE(seq->frame_crop_left_offset);
PutUE(seq->frame_crop_right_offset);
PutUE(seq->frame_crop_top_offset);
PutUE(seq->frame_crop_bottom_offset);
}
/* vui_parameters_present_flag */
PutUI(0, 1);
#if 0
/* aspect_ratio_info_present_flag */
PutUI(0, 1);
/* overscan_info_present_flag */
PutUI(0, 1);
/* video_signal_type_present_flag */
PutUI(0, 1);
/* chroma_loc_info_present_flag */
PutUI(0, 1);
/* timing_info_present_flag */
PutUI(1, 1);
PutUI(seq->num_units_in_tick, 32);
PutUI(seq->time_scale, 32);
/* fixed_frame_rate_flag */
PutUI(1, 1);
/* nal_hrd_parameters_present_flag */
PutUI(0, 1);
/* vcl_hrd_parameters_present_flag */
PutUI(0, 1);
/* low_delay_hrd_flag */
PutUI(0, 1);
/* pic_struct_present_flag */
PutUI(0, 1);
/* bitstream_restriction_flag */
PutUI(0, 1);
#endif
RBSPTrailingBits();
End();
}
};
struct PackedPPSH264 : BaseBitstreamH264
{
PackedPPSH264(const VAEncPictureParameterBufferH264 *pic): BaseBitstreamH264()
{
NalStartCodePrefix();
NalHeader(NAL_REF_IDC_HIGH, NAL_PPS);
/* pic_parameter_set_id, seq_parameter_set_id */
PutUE(pic->pic_parameter_set_id);
PutUE(pic->seq_parameter_set_id);
PutUI(pic->pic_fields.bits.entropy_coding_mode_flag, 1);
/* pic_order_present_flag: 0 */
PutUI(0, 1);
/* num_slice_groups_minus1 */
PutUE(0);
PutUE(pic->num_ref_idx_l0_active_minus1);
PutUE(pic->num_ref_idx_l1_active_minus1);
PutUI(pic->pic_fields.bits.weighted_pred_flag, 1);
PutUI(pic->pic_fields.bits.weighted_bipred_idc, 2);
/* pic_init_qp_minus26, pic_init_qs_minus26, chroma_qp_index_offset */
PutSE(pic->pic_init_qp - 26);
PutSE(0);
PutSE(0);
PutUI(pic->pic_fields.bits.deblocking_filter_control_present_flag, 1);
/* constrained_intra_pred_flag, redundant_pic_cnt_present_flag */
PutUI(0, 1);
PutUI(0, 1);
PutUI(pic->pic_fields.bits.transform_8x8_mode_flag, 1);
/* pic_scaling_matrix_present_flag */
PutUI(0, 1);
PutSE(pic->second_chroma_qp_index_offset );
RBSPTrailingBits();
End();
}
};
template<int slice_type>
struct PackedSliceH264: BaseBitstreamH264
{
PackedSliceH264(VAEncSequenceParameterBufferH264 *seq,VAEncPictureParameterBufferH264 *pic, VAEncSliceParameterBufferH264 *slice): BaseBitstreamH264()
{
constexpr bool is_idr = true;
constexpr bool is_ref = true;
NalStartCodePrefix();
if constexpr(H264_IS_I_SLICE(slice_type))
NalHeader(NAL_REF_IDC_HIGH, is_idr ? NAL_IDR : NAL_NON_IDR);
else if (H264_IS_P_SLICE(slice->slice_type))
NalHeader(NAL_REF_IDC_MEDIUM, NAL_NON_IDR);
else
NalHeader(is_ref ? NAL_REF_IDC_LOW : NAL_REF_IDC_NONE, NAL_NON_IDR);
int first_mb_in_slice = slice->macroblock_address;
PutUE(first_mb_in_slice); /* first_mb_in_slice: 0 */
PutUE(slice->slice_type); /* slice_type */
PutUE(slice->pic_parameter_set_id); /* pic_parameter_set_id: 0 */
PutUI(pic->frame_num, seq->seq_fields.bits.log2_max_frame_num_minus4 + 4); /* frame_num */
/* frame_mbs_only_flag == 1 */
if (!seq->seq_fields.bits.frame_mbs_only_flag) {
/* FIXME: */
assert(0);
}
if constexpr(H264_IS_I_SLICE(slice_type))
if (pic->pic_fields.bits.idr_pic_flag)
PutUE(slice->idr_pic_id); /* idr_pic_id: 0 */
if (seq->seq_fields.bits.pic_order_cnt_type == 0) {
PutUI(pic->CurrPic.TopFieldOrderCnt, seq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
/* pic_order_present_flag == 0 */
} else {
/* FIXME: */
// assert(0);
}
/* redundant_pic_cnt_present_flag == 0 */
/* slice type */
if constexpr(H264_IS_P_SLICE(slice_type)) {
PutUI(slice->num_ref_idx_active_override_flag, 1); /* num_ref_idx_active_override_flag: */
if (slice->num_ref_idx_active_override_flag)
PutUE(slice->num_ref_idx_l0_active_minus1);
/* ref_pic_list_reordering */
PutUI(0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
} else if (H264_IS_B_SLICE(slice_type)) {
PutUI(slice->direct_spatial_mv_pred_flag, 1); /* direct_spatial_mv_pred: 1 */
PutUI(slice->num_ref_idx_active_override_flag, 1); /* num_ref_idx_active_override_flag: */
if (slice->num_ref_idx_active_override_flag) {
PutUE(slice->num_ref_idx_l0_active_minus1);
PutUE(slice->num_ref_idx_l1_active_minus1);
}
/* ref_pic_list_reordering */
PutUI(0, 1); /* ref_pic_list_reordering_flag_l0: 0 */
PutUI(0, 1); /* ref_pic_list_reordering_flag_l1: 0 */
}
if ((pic->pic_fields.bits.weighted_pred_flag &&
H264_IS_P_SLICE(slice->slice_type)) ||
((pic->pic_fields.bits.weighted_bipred_idc == 1) &&
H264_IS_B_SLICE(slice->slice_type))) {
/* FIXME: fill weight/offset table */
assert(0);
}
/* dec_ref_pic_marking */
if (pic->pic_fields.bits.reference_pic_flag) { /* nal_ref_idc != 0 */
unsigned char no_output_of_prior_pics_flag = 0;
unsigned char long_term_reference_flag = 0;
unsigned char adaptive_ref_pic_marking_mode_flag = 0;
if (pic->pic_fields.bits.idr_pic_flag) {
PutUI(no_output_of_prior_pics_flag, 1); /* no_output_of_prior_pics_flag: 0 */
PutUI(long_term_reference_flag, 1); /* long_term_reference_flag: 0 */
} else {
PutUI(adaptive_ref_pic_marking_mode_flag, 1); /* adaptive_ref_pic_marking_mode_flag: 0 */
}
}
if (pic->pic_fields.bits.entropy_coding_mode_flag &&
!H264_IS_I_SLICE(slice->slice_type))
PutUE(slice->cabac_init_idc); /* cabac_init_idc: 0 */
PutSE(slice->slice_qp_delta); /* slice_qp_delta: 0 */
/* ignore for SP/SI */
if (pic->pic_fields.bits.deblocking_filter_control_present_flag) {
PutUE(slice->disable_deblocking_filter_idc); /* disable_deblocking_filter_idc: 0 */
if (slice->disable_deblocking_filter_idc != 1) {
PutSE(slice->slice_alpha_c0_offset_div2); /* slice_alpha_c0_offset_div2: 2 */
PutSE(slice->slice_beta_offset_div2); /* slice_beta_offset_div2: 2 */
}
}
if (pic->pic_fields.bits.entropy_coding_mode_flag) {
ByteAligning(1);
}
End();
}
};
struct VaapiEncoderH264: VaapiEncoder
{
VAEncSequenceParameterBufferH264 seq;
VAEncPictureParameterBufferH264 pic;
VAEncSliceParameterBufferH264 slice;
void InitParameters()
{
int width_in_mbs, height_in_mbs;
int frame_cropping_flag = 0;
int frame_crop_bottom_offset = 0;
width_in_mbs = (width + 15) / 16;
height_in_mbs = (height + 15) / 16;
seq.level_idc = 60;
seq.intra_period = 32767; //r->encoder.intra_period;
seq.intra_idr_period = 32767;
seq.max_num_ref_frames = 1;
seq.picture_width_in_mbs = width_in_mbs;
seq.picture_height_in_mbs = height_in_mbs;
seq.seq_fields.bits.chroma_format_idc = 1;
seq.seq_fields.bits.frame_mbs_only_flag = 1;
seq.ip_period = 1;
/* Tc = num_units_in_tick / time_scale */
seq.time_scale = 180;
seq.num_units_in_tick = 15;
if (height_in_mbs * 16 - height > 0) {
frame_cropping_flag = 1;
frame_crop_bottom_offset = (height_in_mbs * 16 - height) / 2;
}
seq.frame_cropping_flag = frame_cropping_flag;
seq.frame_crop_bottom_offset = frame_crop_bottom_offset;
seq.seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = 0;//12;
seq.seq_fields.bits.log2_max_frame_num_minus4 = 4;
seq.seq_fields.bits.pic_order_cnt_type = 2;
pic.pic_init_qp = 11;
/* ENTROPY_MODE_CABAC */
pic.pic_fields.bits.entropy_coding_mode_flag = 1;
pic.pic_fields.bits.deblocking_filter_control_present_flag = 0;
pic.pic_fields.bits.reference_pic_flag = 1;
for(int i = 0; i < 16; i++)
{
pic.ReferenceFrames[i].picture_id = VA_INVALID_ID;
pic.ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID;
pic.ReferenceFrames[i].frame_idx = 0;
}
slice.num_macroblocks = width_in_mbs * height_in_mbs;
slice.macroblock_info = VA_INVALID_ID;
slice.direct_spatial_mv_pred_flag = 1;
for(int i = 0; i < 32; i++)
{
slice.RefPicList0[i].picture_id = VA_INVALID_ID;
slice.RefPicList0[i].flags = VA_PICTURE_H264_INVALID;
slice.RefPicList0[i].frame_idx = 0;
}
for(int i = 0; i < 32; i++)
{
slice.RefPicList1[i].picture_id = VA_INVALID_ID;
slice.RefPicList1[i].flags = VA_PICTURE_H264_INVALID;
slice.RefPicList1[i].frame_idx = 0;
}
}
bool Setup(int drm_fd, int width, int height, const char *filename, int *dmabuf_fd, uint64_t *mod, uint32_t *size, uint32_t *offset, uint32_t *pitch1, uint32_t *pitch2, uint64_t *modifiers, int modifierscount, bool p010)
{
VAProfile profile = VAProfileH264Main;
uint32_t format = VA_RT_FORMAT_YUV420;
uint32_t fourcc = VA_FOURCC_NV12;
if(!SetupVA(profile, format, fourcc, drm_fd, width, height, filename, dmabuf_fd, mod, size, offset, pitch1, pitch2, modifiers, modifierscount))
{
VaapiEncoder::Destroy();
return false;
}
if(!CreateContext(profile, format, fourcc, VA_RC_CBR))
{
VaapiEncoder::Destroy();
return false;
}
InitParameters();
return true;
}
inline void EncodeIDR(int idx)
{
VAStatus status = vaBeginPicture(dpy, ctx, inputFrames[idx]);
VABufferID seqb = CreateParamererBuffer(VAEncSequenceParameterBufferType, seq);
slice.slice_type = SLICE_TYPE_I;
slice.RefPicList0[0].picture_id = VA_INVALID_ID;
slice.RefPicList0[0].flags = VA_PICTURE_H264_INVALID;
slice.RefPicList0[0].frame_idx = 0;
slice.pic_order_cnt_lsb = seq.seq_fields.bits.pic_order_cnt_type == 0? frame_count: 0;
VABufferID sliceb = CreateParamererBuffer(VAEncSliceParameterBufferType, slice);
VABufferID output = CreateOutputBuf(width * height);
pic.CurrPic.picture_id = reference_picture[frame_count % 2];
pic.CurrPic.frame_idx = frame_count;
pic.CurrPic.TopFieldOrderCnt = seq.seq_fields.bits.pic_order_cnt_type == 0? frame_count:0;
pic.ReferenceFrames[0].picture_id = VA_INVALID_ID;
pic.ReferenceFrames[0].flags = VA_PICTURE_H264_INVALID;
pic.ReferenceFrames[0].frame_idx = 0;
pic.coded_buf = output;
pic.pic_fields.bits.idr_pic_flag = 1;
VABufferID picb = CreateParamererBuffer(VAEncPictureParameterBufferType, pic);
VABufferID fpsb = CreateMiscParameterBuffer(VAEncMiscParameterTypeFrameRate,VAEncMiscParameterFrameRate{.framerate = 90} );
VABufferID hrdb = CreateMiscParameterBuffer(VAEncMiscParameterTypeHRD, VAEncMiscParameterHRD{});
VABufferID rcb = CreateMiscParameterBuffer(VAEncMiscParameterTypeRateControl, VAEncMiscParameterRateControl{
.bits_per_second = 150*1024*1024,
.target_percentage = 66,
.window_size = 1000,
.initial_qp = 25
});
VABufferID ppps[2];
VABufferID psps[2];
CreatePackedBuffer(psps[0], psps[1], VAEncPackedHeaderSequence, PackedSPSH264(&seq,(1 << 1) /* Annex A.2.2 */));
CreatePackedBuffer(ppps[0], ppps[1], VAEncPackedHeaderPicture, PackedPPSH264(&pic));
VABufferID pslice[2];
CreatePackedBuffer(pslice[0], pslice[1], VAEncPackedHeaderSlice,
PackedSliceH264<SLICE_TYPE_I>(&seq, &pic, &slice));
VABufferID buffers[] = {seqb, psps[0], psps[1], fpsb, hrdb, rcb, ppps[0], ppps[1], picb, pslice[0], pslice[1], sliceb };
vaRenderPicture(dpy, ctx, buffers, sizeof(buffers) / sizeof(buffers[0]) );
status = vaEndPicture(dpy, ctx);
if(status != VA_STATUS_SUCCESS)
abort();
PushOutput(output);
/*status = vaSyncSurface(dpy, inputFrames[idx]);
status = vaSyncBuffer(dpy, output, 1000000000);
WriteOutput(output);*/
for(int i = 0; i < sizeof(buffers) / sizeof(buffers[0]); i++)
vaDestroyBuffer(dpy, buffers[i]);
frame_count++;
}
inline void EncodeP(int idx)
{
VAStatus status = vaBeginPicture(dpy, ctx, inputFrames[idx]);
// todo: chain slice/output buffers, patch POC in slice buffers???
slice.slice_type = SLICE_TYPE_P;
slice.RefPicList0[0].frame_idx = frame_count - 1;
slice.RefPicList0[0].picture_id = reference_picture[(frame_count - 1)% 2];
slice.RefPicList0[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
slice.pic_order_cnt_lsb = seq.seq_fields.bits.pic_order_cnt_type == 0? frame_count: 0;
VABufferID sliceb = CreateParamererBuffer(VAEncSliceParameterBufferType, slice);
VABufferID output = CreateOutputBuf(width * height);
pic.CurrPic.picture_id = reference_picture[frame_count % 2];
pic.CurrPic.frame_idx = frame_count;
pic.CurrPic.TopFieldOrderCnt = seq.seq_fields.bits.pic_order_cnt_type == 0? frame_count:0;
pic.ReferenceFrames[0].picture_id = reference_picture[(frame_count + 1) % 2];
pic.ReferenceFrames[0].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
pic.ReferenceFrames[0].frame_idx = frame_count - 1;
pic.coded_buf = output;
pic.pic_fields.bits.idr_pic_flag = 0;
VABufferID picb = CreateParamererBuffer(VAEncPictureParameterBufferType, pic);
VABufferID pslice[2];
CreatePackedBuffer(pslice[0], pslice[1], VAEncPackedHeaderSlice,
PackedSliceH264<SLICE_TYPE_P>(&seq, &pic, &slice));
VABufferID buffers[] = {picb, pslice[0], pslice[1], sliceb };
vaRenderPicture(dpy, ctx, buffers, sizeof(buffers) / sizeof(buffers[0]) );
status = vaEndPicture(dpy, ctx);
PushOutput(output);
/*status = vaSyncSurface(dpy, inputFrames[idx]);
status = vaSyncBuffer(dpy, output, 1000000000);
WriteOutput(output);*/
for(int i = 0; i < sizeof(buffers) / sizeof(buffers[0]); i++)
vaDestroyBuffer(dpy, buffers[i]);
frame_count++;
}
};
#endif // VAAPI_ENCODER_H264_H

View file

@ -2,7 +2,7 @@
#define VAAPI_ENCODER_HEVC_H
#include "vaapi_encoder.h"
#include <cstring>
#include <string.h>
#include <va/va_enc_hevc.h>
#define NAL_REF_IDC_NONE 0
@ -16,16 +16,16 @@ enum {
SLICE_P = 1,
SLICE_I = 2,
};
#define IS_I_SLICE(type) (SLICE_I == (type))
#define IS_P_SLICE(type) (SLICE_P == (type))
#define IS_B_SLICE(type) (SLICE_B == (type))
#define HEVC_IS_I_SLICE(type) (SLICE_I == (type))
#define HEVC_IS_P_SLICE(type) (SLICE_P == (type))
#define HEVC_IS_B_SLICE(type) (SLICE_B == (type))
#define ENTROPY_MODE_CAVLC 0
#define ENTROPY_MODE_CABAC 1
#define PROFILE_IDC_MAIN 1
#define PROFILE_IDC_MAIN10 2
#define HEVC_PROFILE_IDC_MAIN 1
#define HEVC_PROFILE_IDC_MAIN10 2
enum NALUType {
NALU_TRAIL_N = 0x00, // Coded slice segment of a non-TSA, non-STSA trailing picture - slice_segment_layer_rbsp, VLC
@ -688,7 +688,7 @@ struct VaapiEncoderHEVC: VaapiEncoder
// (none?)
// profile
seq.general_level_idc = 120;
seq.general_profile_idc = p010? PROFILE_IDC_MAIN10: PROFILE_IDC_MAIN;
seq.general_profile_idc = p010? HEVC_PROFILE_IDC_MAIN10: HEVC_PROFILE_IDC_MAIN;
// pps
pic.pic_fields.bits.dependent_slice_segments_enabled_flag = 1; // seens work both

View file

@ -36,8 +36,8 @@
#include <stdlib.h>
#include <unistd.h>
//#include "vaapi-recorder.h"
#include "vaapi_encoder_h264.h"
#include "vaapi_encoder_hevc.h"
struct DrmHelper
{
char *mapped_buffer;
@ -1328,7 +1328,8 @@ struct ComputeApplication {
*/
//VK_CHECK_RESULT(vkWaitForFences(device, 1, &fence, VK_TRUE, 100000000000));
}
void run() {
template <typename Codec>
void run(bool p010, const char *filename) {
// Buffer size of the storage buffer that will contain the rendered mandelbrot set.
//bufferSize = sizeof(Pixel) * WIDTH * HEIGHT;
@ -1347,11 +1348,11 @@ struct ComputeApplication {
uint32_t size, offset, pitch1, pitch2;
int fd[CHAIN_SIZE];
uint64_t modifiers[32];
bool p010 = true;
int count = getAvailiableModifiersList(modifiers, 32, p010?VK_FORMAT_R16_UNORM:VK_FORMAT_R8_UNORM);
//auto *r = vaapi_recorder_create5(drm_fd, WIDTH, HEIGHT, "out.264", fd, &mod, &size, &offset, &pitch1, &pitch2, modifiers, count);
VaapiEncoderHEVC enc = {};
enc.Setup(drm_fd, WIDTH, HEIGHT, "out.265", fd, &mod, &size, &offset, &pitch1, &pitch2, modifiers, count, p010);
Codec enc = {};
enc.Setup(drm_fd, WIDTH, HEIGHT, filename, fd, &mod, &size, &offset, &pitch1, &pitch2, modifiers, count, p010);
for(int i = 0; i < CHAIN_SIZE; i++)
{
createUBO(i);
@ -1464,10 +1465,17 @@ struct ComputeApplication {
}
};
int main() {
int main(int argc, char **argv) {
ComputeApplication app;
app.run();
bool hevc = argc > 1;
bool p010 = false;
if(hevc)
p010 = atoi(argv[1]);
if(hevc)
app.run<VaapiEncoderHEVC>(p010, "out.265");
else
app.run<VaapiEncoderH264>(false, "out.264");
return EXIT_SUCCESS;
}