multimedia/ffmpeg: backport rav1e support

https://www.reddit.com/r/AV1/comments/dubsv5/rav1e_support_lands_in_ffmpeg_master/
This commit is contained in:
Jan Beich 2019-11-11 12:36:02 +00:00
parent 653760085c
commit 646e4032d5
Notes: svn2git 2021-03-31 03:12:20 +00:00
svn path=/head/; revision=517258
6 changed files with 766 additions and 3 deletions

View file

@ -214,6 +214,7 @@
SUBDIR += libquvi-scripts
SUBDIR += libquvi-scripts09
SUBDIR += libquvi09
SUBDIR += librav1e
SUBDIR += librtmp
SUBDIR += libsmacker
SUBDIR += libtheora

View file

@ -40,7 +40,7 @@ OPTIONS_DEFINE= ALSA AMR_NB AMR_WB AOM ARIBB24 ASS BEIGNET BS2B CACA CDIO CELT C
FREETYPE FREI0R FRIBIDI GME GSM ICONV ILBC JACK KLVANC KVAZAAR LADSPA \
LAME LENSFUN LIBBLURAY LIBRSVG2 LIBXML2 LV2 MODPLUG MYSOFA OPENAL OPENCL OPENCV OPENGL \
OPENH264 OPENJPEG OPENMPT OPTIMIZED_CFLAGS OPUS POCKETSPHINX PULSEAUDIO \
RUBBERBAND RTCPU SDL SMB SNAPPY SNDIO SOXR SPEEX SRT SSH TENSORFLOW \
RAV1E RUBBERBAND RTCPU SDL SMB SNAPPY SNDIO SOXR SPEEX SRT SSH TENSORFLOW \
TESSERACT THEORA TWOLAME V4L VAAPI VAPOURSYNTH VDPAU VIDSTAB VMAF VORBIS \
VO_AMRWBENC VPX WAVPACK WEBP X264 X265 XAVS2 XCB XVID \
XVIDEO ZIMG ZMQ ZVBI
@ -117,6 +117,7 @@ NONFREE_DESC= Allow use of nonfree code
OPENMPT_DESC= Decoding tracked files via libopenmpt
OPENH264_DESC= H.264 video codec support via OpenH264
POCKETSPHINX_DESC= Automatic Speech Recognition via PocketSphinx
RAV1E_DESC= AV1 encoding via librav1e
RUBBERBAND_DESC=Time-stretching and pitch-shifting with librubberband
RTCPU_DESC= Detect CPU capabilities at runtime
RTMP_DESC= RTMP(T)E protocol support
@ -391,6 +392,10 @@ POCKETSPHINX_BROKEN= pocketsphinx < 5prealpha is not supported
PULSEAUDIO_LIB_DEPENDS= libpulse.so:audio/pulseaudio
PULSEAUDIO_CONFIGURE_ENABLE= libpulse
# rav1e
RAV1E_LIB_DEPENDS= librav1e.so:multimedia/librav1e
RAV1E_CONFIGURE_ENABLE= librav1e
# rubberband
RUBBERBAND_LIB_DEPENDS= librubberband.so:audio/rubberband
RUBBERBAND_CONFIGURE_ENABLE= librubberband

View file

@ -0,0 +1,722 @@
https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/d8bf24459b69
https://git.ffmpeg.org/gitweb/ffmpeg.git/commitdiff/3a84081cbd98
--- configure.orig 2019-11-11 11:47:47 UTC
+++ configure
@@ -254,6 +254,7 @@ External library support:
--enable-libopenmpt enable decoding tracked files via libopenmpt [no]
--enable-libopus enable Opus de/encoding via libopus [no]
--enable-libpulse enable Pulseaudio input via libpulse [no]
+ --enable-librav1e enable AV1 encoding via rav1e [no]
--enable-librsvg enable SVG rasterization via librsvg [no]
--enable-librubberband enable rubberband needed for rubberband filter [no]
--enable-librtmp enable RTMP[E] support via librtmp [no]
@@ -1785,6 +1786,7 @@ EXTERNAL_LIBRARY_LIST="
libopenmpt
libopus
libpulse
+ librav1e
librsvg
librtmp
libshine
@@ -3187,6 +3189,8 @@ libopenmpt_demuxer_deps="libopenmpt"
libopus_decoder_deps="libopus"
libopus_encoder_deps="libopus"
libopus_encoder_select="audio_frame_queue"
+librav1e_encoder_deps="librav1e"
+librav1e_encoder_select="extract_extradata_bsf"
librsvg_decoder_deps="librsvg"
libshine_encoder_deps="libshine"
libshine_encoder_select="audio_frame_queue"
@@ -6255,6 +6259,7 @@ enabled libopus && {
}
}
enabled libpulse && require_pkg_config libpulse libpulse pulse/pulseaudio.h pa_context_new
+enabled librav1e && require_pkg_config librav1e "rav1e >= 0.1.0" rav1e.h rav1e_context_new
enabled librsvg && require_pkg_config librsvg librsvg-2.0 librsvg-2.0/librsvg/rsvg.h rsvg_handle_render_cairo
enabled librtmp && require_pkg_config librtmp librtmp librtmp/rtmp.h RTMP_Socket
enabled librubberband && require_pkg_config librubberband "rubberband >= 1.8.1" rubberband/rubberband-c.h rubberband_new -lstdc++ && append librubberband_extralibs "-lstdc++"
--- doc/encoders.texi.orig 2019-08-05 20:52:21 UTC
+++ doc/encoders.texi
@@ -1378,6 +1378,49 @@ makes it possible to store non-rgb pix_fmts.
@end table
+@section librav1e
+
+rav1e AV1 encoder wrapper.
+
+Requires the presence of the rav1e headers and library during configuration.
+You need to explicitly configure the build with @code{--enable-librav1e}.
+
+@subsection Options
+
+@table @option
+@item qmax
+Sets the maximum quantizer to use when using bitrate mode.
+
+@item qmin
+Sets the minimum quantizer to use when using bitrate mode.
+
+@item qp
+Uses quantizer mode to encode at the given quantizer.
+
+@item speed
+Selects the speed preset (0-10) to encode with.
+
+@item tiles
+Selects how many tiles to encode with.
+
+@item tile-rows
+Selects how many rows of tiles to encode with.
+
+@item tile-columns
+Selects how many columns of tiles to encode with.
+
+@item rav1e-params
+Set rav1e options using a list of @var{key}=@var{value} pairs separated
+by ":". See @command{rav1e --help} for a list of options.
+
+For example to specify librav1e encoding options with @option{-rav1e-params}:
+
+@example
+ffmpeg -i input -c:v librav1e -b:v 500K -rav1e-params speed=5:low_latency=true output.mp4
+@end example
+
+@end table
+
@section libaom-av1
libaom AV1 encoder wrapper.
--- doc/general.texi.orig 2019-08-05 20:52:21 UTC
+++ doc/general.texi
@@ -243,6 +243,13 @@ FFmpeg can use the OpenJPEG libraries for decoding/enc
instructions. To enable using OpenJPEG in FFmpeg, pass @code{--enable-libopenjpeg} to
@file{./configure}.
+@section rav1e
+
+FFmpeg can make use of rav1e (Rust AV1 Encoder) via its C bindings to encode videos.
+Go to @url{https://github.com/xiph/rav1e/} and follow the instructions to build
+the C library. To enable using rav1e in FFmpeg, pass @code{--enable-librav1e}
+to @file{./configure}.
+
@section TwoLAME
FFmpeg can make use of the TwoLAME library for MP2 encoding.
--- libavcodec/Makefile.orig 2019-11-11 11:47:47 UTC
+++ libavcodec/Makefile
@@ -988,6 +988,7 @@ OBJS-$(CONFIG_LIBOPUS_DECODER) += libopusde
vorbis_data.o
OBJS-$(CONFIG_LIBOPUS_ENCODER) += libopusenc.o libopus.o \
vorbis_data.o
+OBJS-$(CONFIG_LIBRAV1E_ENCODER) += librav1e.o
OBJS-$(CONFIG_LIBSHINE_ENCODER) += libshine.o
OBJS-$(CONFIG_LIBSPEEX_DECODER) += libspeexdec.o
OBJS-$(CONFIG_LIBSPEEX_ENCODER) += libspeexenc.o
--- libavcodec/allcodecs.c.orig 2019-11-11 11:47:47 UTC
+++ libavcodec/allcodecs.c
@@ -703,6 +703,7 @@ extern AVCodec ff_libopenjpeg_encoder;
extern AVCodec ff_libopenjpeg_decoder;
extern AVCodec ff_libopus_encoder;
extern AVCodec ff_libopus_decoder;
+extern AVCodec ff_librav1e_encoder;
extern AVCodec ff_librsvg_decoder;
extern AVCodec ff_libshine_encoder;
extern AVCodec ff_libspeex_encoder;
--- libavcodec/librav1e.c.orig 2019-11-11 11:47:47 UTC
+++ libavcodec/librav1e.c
@@ -0,0 +1,593 @@
+/*
+ * librav1e encoder
+ *
+ * Copyright (c) 2019 Derek Buitenhuis
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include <rav1e.h>
+
+#include "libavutil/internal.h"
+#include "libavutil/avassert.h"
+#include "libavutil/base64.h"
+#include "libavutil/common.h"
+#include "libavutil/mathematics.h"
+#include "libavutil/opt.h"
+#include "libavutil/pixdesc.h"
+#include "avcodec.h"
+#include "internal.h"
+
+typedef struct librav1eContext {
+ const AVClass *class;
+
+ RaContext *ctx;
+ AVBSFContext *bsf;
+
+ uint8_t *pass_data;
+ size_t pass_pos;
+ int pass_size;
+
+ char *rav1e_opts;
+ int quantizer;
+ int speed;
+ int tiles;
+ int tile_rows;
+ int tile_cols;
+} librav1eContext;
+
+static inline RaPixelRange range_map(enum AVPixelFormat pix_fmt, enum AVColorRange range)
+{
+ switch (pix_fmt) {
+ case AV_PIX_FMT_YUVJ420P:
+ case AV_PIX_FMT_YUVJ422P:
+ case AV_PIX_FMT_YUVJ444P:
+ return RA_PIXEL_RANGE_FULL;
+ }
+
+ switch (range) {
+ case AVCOL_RANGE_JPEG:
+ return RA_PIXEL_RANGE_FULL;
+ case AVCOL_RANGE_MPEG:
+ default:
+ return RA_PIXEL_RANGE_LIMITED;
+ }
+}
+
+static inline RaChromaSampling pix_fmt_map(enum AVPixelFormat pix_fmt)
+{
+ switch (pix_fmt) {
+ case AV_PIX_FMT_YUV420P:
+ case AV_PIX_FMT_YUVJ420P:
+ case AV_PIX_FMT_YUV420P10:
+ case AV_PIX_FMT_YUV420P12:
+ return RA_CHROMA_SAMPLING_CS420;
+ case AV_PIX_FMT_YUV422P:
+ case AV_PIX_FMT_YUVJ422P:
+ case AV_PIX_FMT_YUV422P10:
+ case AV_PIX_FMT_YUV422P12:
+ return RA_CHROMA_SAMPLING_CS422;
+ case AV_PIX_FMT_YUV444P:
+ case AV_PIX_FMT_YUVJ444P:
+ case AV_PIX_FMT_YUV444P10:
+ case AV_PIX_FMT_YUV444P12:
+ return RA_CHROMA_SAMPLING_CS444;
+ default:
+ av_assert0(0);
+ }
+}
+
+static inline RaChromaSamplePosition chroma_loc_map(enum AVChromaLocation chroma_loc)
+{
+ switch (chroma_loc) {
+ case AVCHROMA_LOC_LEFT:
+ return RA_CHROMA_SAMPLE_POSITION_VERTICAL;
+ case AVCHROMA_LOC_TOPLEFT:
+ return RA_CHROMA_SAMPLE_POSITION_COLOCATED;
+ default:
+ return RA_CHROMA_SAMPLE_POSITION_UNKNOWN;
+ }
+}
+
+static int get_stats(AVCodecContext *avctx, int eos)
+{
+ librav1eContext *ctx = avctx->priv_data;
+ RaData* buf = rav1e_twopass_out(ctx->ctx);
+ if (!buf)
+ return 0;
+
+ if (!eos) {
+ uint8_t *tmp = av_fast_realloc(ctx->pass_data, &ctx->pass_size,
+ ctx->pass_pos + buf->len);
+ if (!tmp) {
+ rav1e_data_unref(buf);
+ return AVERROR(ENOMEM);
+ }
+
+ ctx->pass_data = tmp;
+ memcpy(ctx->pass_data + ctx->pass_pos, buf->data, buf->len);
+ ctx->pass_pos += buf->len;
+ } else {
+ size_t b64_size = AV_BASE64_SIZE(ctx->pass_pos);
+
+ memcpy(ctx->pass_data, buf->data, buf->len);
+
+ avctx->stats_out = av_malloc(b64_size);
+ if (!avctx->stats_out) {
+ rav1e_data_unref(buf);
+ return AVERROR(ENOMEM);
+ }
+
+ av_base64_encode(avctx->stats_out, b64_size, ctx->pass_data, ctx->pass_pos);
+
+ av_freep(&ctx->pass_data);
+ }
+
+ rav1e_data_unref(buf);
+
+ return 0;
+}
+
+static int set_stats(AVCodecContext *avctx)
+{
+ librav1eContext *ctx = avctx->priv_data;
+ int ret = 1;
+
+ while (ret > 0 && ctx->pass_size - ctx->pass_pos > 0) {
+ ret = rav1e_twopass_in(ctx->ctx, ctx->pass_data + ctx->pass_pos, ctx->pass_size);
+ if (ret < 0)
+ return AVERROR_EXTERNAL;
+ ctx->pass_pos += ret;
+ }
+
+ return 0;
+}
+
+static av_cold int librav1e_encode_close(AVCodecContext *avctx)
+{
+ librav1eContext *ctx = avctx->priv_data;
+
+ if (ctx->ctx) {
+ rav1e_context_unref(ctx->ctx);
+ ctx->ctx = NULL;
+ }
+
+ av_bsf_free(&ctx->bsf);
+ av_freep(&ctx->pass_data);
+
+ return 0;
+}
+
+static av_cold int librav1e_encode_init(AVCodecContext *avctx)
+{
+ librav1eContext *ctx = avctx->priv_data;
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
+ RaConfig *cfg = NULL;
+ int rret;
+ int ret = 0;
+
+ cfg = rav1e_config_default();
+ if (!cfg) {
+ av_log(avctx, AV_LOG_ERROR, "Could not allocate rav1e config.\n");
+ return AVERROR_EXTERNAL;
+ }
+
+ rav1e_config_set_time_base(cfg, (RaRational) {
+ avctx->time_base.num * avctx->ticks_per_frame,
+ avctx->time_base.den
+ });
+
+ if (avctx->flags & AV_CODEC_FLAG_PASS2) {
+ if (!avctx->stats_in) {
+ av_log(avctx, AV_LOG_ERROR, "No stats file provided for second pass.\n");
+ ret = AVERROR(EINVAL);
+ goto end;
+ }
+
+ ctx->pass_size = (strlen(avctx->stats_in) * 3) / 4;
+ ctx->pass_data = av_malloc(ctx->pass_size);
+ if (!ctx->pass_data) {
+ av_log(avctx, AV_LOG_ERROR, "Could not allocate stats buffer.\n");
+ ret = AVERROR(ENOMEM);
+ goto end;
+ }
+
+ ctx->pass_size = av_base64_decode(ctx->pass_data, avctx->stats_in, ctx->pass_size);
+ if (ctx->pass_size < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Invalid pass file.\n");
+ ret = AVERROR(EINVAL);
+ goto end;
+ }
+ }
+
+ if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
+ const AVBitStreamFilter *filter = av_bsf_get_by_name("extract_extradata");
+ int bret;
+
+ if (!filter) {
+ av_log(avctx, AV_LOG_ERROR, "extract_extradata bitstream filter "
+ "not found. This is a bug, please report it.\n");
+ ret = AVERROR_BUG;
+ goto end;
+ }
+
+ bret = av_bsf_alloc(filter, &ctx->bsf);
+ if (bret < 0) {
+ ret = bret;
+ goto end;
+ }
+
+ bret = avcodec_parameters_from_context(ctx->bsf->par_in, avctx);
+ if (bret < 0) {
+ ret = bret;
+ goto end;
+ }
+
+ bret = av_bsf_init(ctx->bsf);
+ if (bret < 0) {
+ ret = bret;
+ goto end;
+ }
+ }
+
+ if (ctx->rav1e_opts) {
+ AVDictionary *dict = NULL;
+ AVDictionaryEntry *en = NULL;
+
+ if (!av_dict_parse_string(&dict, ctx->rav1e_opts, "=", ":", 0)) {
+ while (en = av_dict_get(dict, "", en, AV_DICT_IGNORE_SUFFIX)) {
+ int parse_ret = rav1e_config_parse(cfg, en->key, en->value);
+ if (parse_ret < 0)
+ av_log(avctx, AV_LOG_WARNING, "Invalid value for %s: %s.\n", en->key, en->value);
+ }
+ av_dict_free(&dict);
+ }
+ }
+
+ rret = rav1e_config_parse_int(cfg, "width", avctx->width);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Invalid width passed to rav1e.\n");
+ ret = AVERROR_INVALIDDATA;
+ goto end;
+ }
+
+ rret = rav1e_config_parse_int(cfg, "height", avctx->height);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Invalid height passed to rav1e.\n");
+ ret = AVERROR_INVALIDDATA;
+ goto end;
+ }
+
+ rret = rav1e_config_parse_int(cfg, "threads", avctx->thread_count);
+ if (rret < 0)
+ av_log(avctx, AV_LOG_WARNING, "Invalid number of threads, defaulting to auto.\n");
+
+ if (ctx->speed >= 0) {
+ rret = rav1e_config_parse_int(cfg, "speed", ctx->speed);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set speed preset.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+ }
+
+ /* rav1e handles precedence between 'tiles' and cols/rows for us. */
+ if (ctx->tiles > 0) {
+ rret = rav1e_config_parse_int(cfg, "tiles", ctx->tiles);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set number of tiles to encode with.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+ }
+ if (ctx->tile_rows > 0) {
+ rret = rav1e_config_parse_int(cfg, "tile_rows", ctx->tile_rows);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set number of tile rows to encode with.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+ }
+ if (ctx->tile_cols > 0) {
+ rret = rav1e_config_parse_int(cfg, "tile_cols", ctx->tile_cols);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set number of tile cols to encode with.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+ }
+
+ if (avctx->gop_size > 0) {
+ rret = rav1e_config_parse_int(cfg, "key_frame_interval", avctx->gop_size);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set max keyint.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+ }
+
+ if (avctx->keyint_min > 0) {
+ rret = rav1e_config_parse_int(cfg, "min_key_frame_interval", avctx->keyint_min);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set min keyint.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+ }
+
+ if (avctx->bit_rate && ctx->quantizer < 0) {
+ int max_quantizer = avctx->qmax >= 0 ? avctx->qmax : 255;
+
+ rret = rav1e_config_parse_int(cfg, "quantizer", max_quantizer);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set max quantizer.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+
+ if (avctx->qmin >= 0) {
+ rret = rav1e_config_parse_int(cfg, "min_quantizer", avctx->qmin);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set min quantizer.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+ }
+
+ rret = rav1e_config_parse_int(cfg, "bitrate", avctx->bit_rate);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set bitrate.\n");
+ ret = AVERROR_INVALIDDATA;
+ goto end;
+ }
+ } else if (ctx->quantizer >= 0) {
+ if (avctx->bit_rate)
+ av_log(avctx, AV_LOG_WARNING, "Both bitrate and quantizer specified. Using quantizer mode.");
+
+ rret = rav1e_config_parse_int(cfg, "quantizer", ctx->quantizer);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not set quantizer.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+ }
+
+ rret = rav1e_config_set_pixel_format(cfg, desc->comp[0].depth,
+ pix_fmt_map(avctx->pix_fmt),
+ chroma_loc_map(avctx->chroma_sample_location),
+ range_map(avctx->pix_fmt, avctx->color_range));
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Failed to set pixel format properties.\n");
+ ret = AVERROR_INVALIDDATA;
+ goto end;
+ }
+
+ /* rav1e's colorspace enums match standard values. */
+ rret = rav1e_config_set_color_description(cfg, (RaMatrixCoefficients) avctx->colorspace,
+ (RaColorPrimaries) avctx->color_primaries,
+ (RaTransferCharacteristics) avctx->color_trc);
+ if (rret < 0) {
+ av_log(avctx, AV_LOG_WARNING, "Failed to set color properties.\n");
+ if (avctx->err_recognition & AV_EF_EXPLODE) {
+ ret = AVERROR_INVALIDDATA;
+ goto end;
+ }
+ }
+
+ ctx->ctx = rav1e_context_new(cfg);
+ if (!ctx->ctx) {
+ av_log(avctx, AV_LOG_ERROR, "Failed to create rav1e encode context.\n");
+ ret = AVERROR_EXTERNAL;
+ goto end;
+ }
+
+ ret = 0;
+
+end:
+
+ rav1e_config_unref(cfg);
+
+ return ret;
+}
+
+static int librav1e_send_frame(AVCodecContext *avctx, const AVFrame *frame)
+{
+ librav1eContext *ctx = avctx->priv_data;
+ RaFrame *rframe = NULL;
+ int ret;
+
+ if (frame) {
+ const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
+
+ rframe = rav1e_frame_new(ctx->ctx);
+ if (!rframe) {
+ av_log(avctx, AV_LOG_ERROR, "Could not allocate new rav1e frame.\n");
+ return AVERROR(ENOMEM);
+ }
+
+ for (int i = 0; i < desc->nb_components; i++) {
+ int shift = i ? desc->log2_chroma_h : 0;
+ int bytes = desc->comp[0].depth == 8 ? 1 : 2;
+ rav1e_frame_fill_plane(rframe, i, frame->data[i],
+ (frame->height >> shift) * frame->linesize[i],
+ frame->linesize[i], bytes);
+ }
+ }
+
+ ret = rav1e_send_frame(ctx->ctx, rframe);
+ if (rframe)
+ rav1e_frame_unref(rframe); /* No need to unref if flushing. */
+
+ switch (ret) {
+ case RA_ENCODER_STATUS_SUCCESS:
+ break;
+ case RA_ENCODER_STATUS_ENOUGH_DATA:
+ return AVERROR(EAGAIN);
+ case RA_ENCODER_STATUS_FAILURE:
+ av_log(avctx, AV_LOG_ERROR, "Could not send frame: %s\n", rav1e_status_to_str(ret));
+ return AVERROR_EXTERNAL;
+ default:
+ av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_send_frame: %s\n", ret, rav1e_status_to_str(ret));
+ return AVERROR_UNKNOWN;
+ }
+
+ return 0;
+}
+
+static int librav1e_receive_packet(AVCodecContext *avctx, AVPacket *pkt)
+{
+ librav1eContext *ctx = avctx->priv_data;
+ RaPacket *rpkt = NULL;
+ int ret;
+
+retry:
+
+ if (avctx->flags & AV_CODEC_FLAG_PASS1) {
+ int sret = get_stats(avctx, 0);
+ if (sret < 0)
+ return sret;
+ } else if (avctx->flags & AV_CODEC_FLAG_PASS2) {
+ int sret = set_stats(avctx);
+ if (sret < 0)
+ return sret;
+ }
+
+ ret = rav1e_receive_packet(ctx->ctx, &rpkt);
+ switch (ret) {
+ case RA_ENCODER_STATUS_SUCCESS:
+ break;
+ case RA_ENCODER_STATUS_LIMIT_REACHED:
+ if (avctx->flags & AV_CODEC_FLAG_PASS1) {
+ int sret = get_stats(avctx, 1);
+ if (sret < 0)
+ return sret;
+ }
+ return AVERROR_EOF;
+ case RA_ENCODER_STATUS_ENCODED:
+ if (avctx->internal->draining)
+ goto retry;
+ return AVERROR(EAGAIN);
+ case RA_ENCODER_STATUS_NEED_MORE_DATA:
+ if (avctx->internal->draining) {
+ av_log(avctx, AV_LOG_ERROR, "Unexpected error when receiving packet after EOF.\n");
+ return AVERROR_EXTERNAL;
+ }
+ return AVERROR(EAGAIN);
+ case RA_ENCODER_STATUS_FAILURE:
+ av_log(avctx, AV_LOG_ERROR, "Could not encode frame: %s\n", rav1e_status_to_str(ret));
+ return AVERROR_EXTERNAL;
+ default:
+ av_log(avctx, AV_LOG_ERROR, "Unknown return code %d from rav1e_receive_packet: %s\n", ret, rav1e_status_to_str(ret));
+ return AVERROR_UNKNOWN;
+ }
+
+ ret = av_new_packet(pkt, rpkt->len);
+ if (ret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "Could not allocate packet.\n");
+ rav1e_packet_unref(rpkt);
+ return ret;
+ }
+
+ memcpy(pkt->data, rpkt->data, rpkt->len);
+
+ if (rpkt->frame_type == RA_FRAME_TYPE_KEY)
+ pkt->flags |= AV_PKT_FLAG_KEY;
+
+ pkt->pts = pkt->dts = rpkt->input_frameno * avctx->ticks_per_frame;
+ rav1e_packet_unref(rpkt);
+
+ if (avctx->flags & AV_CODEC_FLAG_GLOBAL_HEADER) {
+ int ret = av_bsf_send_packet(ctx->bsf, pkt);
+ if (ret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "extradata extraction send failed.\n");
+ av_packet_unref(pkt);
+ return ret;
+ }
+
+ ret = av_bsf_receive_packet(ctx->bsf, pkt);
+ if (ret < 0) {
+ av_log(avctx, AV_LOG_ERROR, "extradata extraction receive failed.\n");
+ av_packet_unref(pkt);
+ return ret;
+ }
+ }
+
+ return 0;
+}
+
+#define OFFSET(x) offsetof(librav1eContext, x)
+#define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
+
+static const AVOption options[] = {
+ { "qp", "use constant quantizer mode", OFFSET(quantizer), AV_OPT_TYPE_INT, { .i64 = 100 }, -1, 255, VE },
+ { "speed", "what speed preset to use", OFFSET(speed), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, 10, VE },
+ { "tiles", "number of tiles encode with", OFFSET(tiles), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
+ { "tile-rows", "number of tiles rows to encode with", OFFSET(tile_rows), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
+ { "tile-columns", "number of tiles columns to encode with", OFFSET(tile_cols), AV_OPT_TYPE_INT, { .i64 = 0 }, -1, INT64_MAX, VE },
+ { "rav1e-params", "set the rav1e configuration using a :-separated list of key=value parameters", OFFSET(rav1e_opts), AV_OPT_TYPE_STRING, { 0 }, 0, 0, VE },
+ { NULL }
+};
+
+static const AVCodecDefault librav1e_defaults[] = {
+ { "b", "0" },
+ { "g", "0" },
+ { "keyint_min", "0" },
+ { "qmax", "-1" },
+ { "qmin", "-1" },
+ { NULL }
+};
+
+const enum AVPixelFormat librav1e_pix_fmts[] = {
+ AV_PIX_FMT_YUV420P,
+ AV_PIX_FMT_YUVJ420P,
+ AV_PIX_FMT_YUV420P10,
+ AV_PIX_FMT_YUV420P12,
+ AV_PIX_FMT_YUV422P,
+ AV_PIX_FMT_YUVJ422P,
+ AV_PIX_FMT_YUV422P10,
+ AV_PIX_FMT_YUV422P12,
+ AV_PIX_FMT_YUV444P,
+ AV_PIX_FMT_YUVJ444P,
+ AV_PIX_FMT_YUV444P10,
+ AV_PIX_FMT_YUV444P12,
+ AV_PIX_FMT_NONE
+};
+
+static const AVClass class = {
+ .class_name = "librav1e",
+ .item_name = av_default_item_name,
+ .option = options,
+ .version = LIBAVUTIL_VERSION_INT,
+};
+
+AVCodec ff_librav1e_encoder = {
+ .name = "librav1e",
+ .long_name = NULL_IF_CONFIG_SMALL("librav1e AV1"),
+ .type = AVMEDIA_TYPE_VIDEO,
+ .id = AV_CODEC_ID_AV1,
+ .init = librav1e_encode_init,
+ .send_frame = librav1e_send_frame,
+ .receive_packet = librav1e_receive_packet,
+ .close = librav1e_encode_close,
+ .priv_data_size = sizeof(librav1eContext),
+ .priv_class = &class,
+ .defaults = librav1e_defaults,
+ .pix_fmts = librav1e_pix_fmts,
+ .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_AUTO_THREADS,
+ .caps_internal = FF_CODEC_CAP_INIT_CLEANUP,
+ .wrapper_name = "librav1e",
+};

View file

@ -0,0 +1,27 @@
# $FreeBSD$
PORTREVISION= 0
PKGNAMEPREFIX= lib
MASTERDIR= ${.CURDIR}/../rav1e
PLIST= ${.CURDIR}/pkg-plist
BUILD_DEPENDS= cargo-cbuild:devel/cargo-c
PLIST_FILES= # empty
PLIST_SUB= VERSION=${DISTVERSION:C/-.*//}
do-build:
@${CARGO_CARGO_RUN} cbuild \
${CARGO_BUILD_ARGS}
do-install:
@${CARGO_CARGO_RUN} cinstall \
--destdir "${STAGEDIR}" \
${CARGO_BUILD_ARGS} \
${CARGO_INSTALL_ARGS}
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/lib/*.so
.include "${MASTERDIR}/Makefile"

View file

@ -0,0 +1,6 @@
include/rav1e/rav1e.h
lib/librav1e.a
lib/librav1e.so
lib/librav1e.so.0
lib/librav1e.so.%%VERSION%%
libdata/pkgconfig/rav1e.pc

View file

@ -10,13 +10,13 @@ COMMENT= Fast and safe AV1 encoder
LICENSE= BSD2CLAUSE
LICENSE_FILE= ${WRKSRC}/LICENSE
BUILD_DEPENDS= ${BUILD_DEPENDS_${ARCH}}
BUILD_DEPENDS+= ${BUILD_DEPENDS_${ARCH}}
BUILD_DEPENDS_amd64= nasm:devel/nasm
USES= cargo
USE_GITHUB= yes
GH_ACCOUNT= xiph
PLIST_FILES= bin/${PORTNAME}
PLIST_FILES?= bin/${PORTNAME}
CARGO_CRATES= adler32-1.0.4 \
aho-corasick-0.7.6 \
@ -156,7 +156,9 @@ CARGO_CRATES= adler32-1.0.4 \
wincolor-1.0.2 \
y4m-0.4.0
.if !target(post-install)
post-install:
@${STRIP_CMD} ${STAGEDIR}${PREFIX}/bin/${PORTNAME}
.endif
.include <bsd.port.mk>