ffmpeg: update build with version 3.0.

* untested.
This commit is contained in:
Jérôme Duval 2016-02-19 21:41:54 +01:00
parent 0d6b5d2667
commit f97f2394ca
8 changed files with 224 additions and 104 deletions

View File

@ -54,6 +54,11 @@
#if LIBAVCODEC_VERSION_INT > ((54 << 16) | (50 << 8))
typedef AVCodecID CodecID;
#endif
#if LIBAVCODEC_VERSION_INT < ((55 << 16) | (45 << 8))
#define av_frame_alloc avcodec_alloc_frame
#define av_frame_unref avcodec_get_frame_defaults
#define av_frame_free avcodec_free_frame
#endif
struct wave_format_ex {
@ -92,9 +97,9 @@ AVCodecDecoder::AVCodecDecoder()
fResampleContext(NULL),
fDecodedData(NULL),
fDecodedDataSizeInBytes(0),
fPostProcessedDecodedPicture(avcodec_alloc_frame()),
fRawDecodedPicture(avcodec_alloc_frame()),
fRawDecodedAudio(avcodec_alloc_frame()),
fPostProcessedDecodedPicture(av_frame_alloc()),
fRawDecodedPicture(av_frame_alloc()),
fRawDecodedAudio(av_frame_alloc()),
fCodecInitDone(false),
@ -118,7 +123,7 @@ AVCodecDecoder::AVCodecDecoder()
fChunkBufferSize(0),
fAudioDecodeError(false),
fDecodedDataBuffer(avcodec_alloc_frame()),
fDecodedDataBuffer(av_frame_alloc()),
fDecodedDataBufferOffset(0),
fDecodedDataBufferSize(0)
{
@ -1091,7 +1096,8 @@ AVCodecDecoder::_DecodeSomeAudioFramesIntoEmptyDecodedDataBuffer()
assert(fDecodedDataBufferSize == 0);
assert(fTempPacket.size > 0);
avcodec_get_frame_defaults(fDecodedDataBuffer);
memset(fDecodedDataBuffer, 0, sizeof(AVFrame));
av_frame_unref(fDecodedDataBuffer);
fDecodedDataBufferOffset = 0;
int gotAudioFrame = 0;
@ -1618,11 +1624,16 @@ AVCodecDecoder::_DeinterlaceAndColorConvertVideoFrame()
avpicture_alloc(&deinterlacedPicture, fContext->pix_fmt, displayWidth,
displayHeight);
#if LIBAVCODEC_VERSION_INT < ((57 << 16) | (0 << 8))
if (avpicture_deinterlace(&deinterlacedPicture, &rawPicture,
fContext->pix_fmt, displayWidth, displayHeight) < 0) {
TRACE("[v] avpicture_deinterlace() - error\n");
} else
useDeinterlacedPicture = true;
#else
// avpicture_deinterlace is gone
TRACE("[v] avpicture_deinterlace() - not implemented\n");
#endif
}
// Some decoders do not set pix_fmt until they have decoded 1 frame

View File

@ -35,6 +35,18 @@ extern "C" {
static const size_t kDefaultChunkBufferSize = 2 * 1024 * 1024;
#if LIBAVCODEC_VERSION_INT < ((54 << 16) | (50 << 8))
#define AV_PIX_FMT_NONE PIX_FMT_NONE
#define AV_CODEC_ID_NONE CODEC_ID_NONE
#define AV_CODEC_ID_MPEG1VIDEO CODEC_ID_MPEG1VIDEO
#define AV_CODEC_ID_MPEG2VIDEO CODEC_ID_MPEG2VIDEO
#endif
#if LIBAVCODEC_VERSION_INT < ((55 << 16) | (45 << 8))
#define av_frame_alloc avcodec_alloc_frame
#define av_frame_unref avcodec_get_frame_defaults
#define av_frame_free avcodec_free_frame
#endif
AVCodecEncoder::AVCodecEncoder(uint32 codecID, int bitRateScale)
:
@ -45,7 +57,7 @@ AVCodecEncoder::AVCodecEncoder(uint32 codecID, int bitRateScale)
fOwnContext(avcodec_alloc_context3(NULL)),
fContext(fOwnContext),
fCodecInitStatus(CODEC_INIT_NEEDED),
fFrame(avcodec_alloc_frame()),
fFrame(av_frame_alloc()),
fSwsContext(NULL),
fFramesWritten(0)
{
@ -149,9 +161,9 @@ AVCodecEncoder::SetUp(const media_format* inputFormat)
return B_BAD_VALUE;
// Codec IDs for raw-formats may need to be figured out here.
if (fCodec == NULL && fCodecID == CODEC_ID_NONE) {
if (fCodec == NULL && fCodecID == AV_CODEC_ID_NONE) {
fCodecID = raw_audio_codec_id_for(*inputFormat);
if (fCodecID != CODEC_ID_NONE)
if (fCodecID != AV_CODEC_ID_NONE)
fCodec = avcodec_find_encoder(fCodecID);
}
if (fCodec == NULL) {
@ -304,7 +316,7 @@ AVCodecEncoder::_Setup()
// TODO: Fix pixel format or setup conversion method...
if (fCodec->pix_fmts != NULL) {
for (int i = 0; fCodec->pix_fmts[i] != PIX_FMT_NONE; i++) {
for (int i = 0; fCodec->pix_fmts[i] != AV_PIX_FMT_NONE; i++) {
// Use the last supported pixel format, which we hope is the
// one with the best quality.
fContext->pix_fmt = fCodec->pix_fmts[i];
@ -465,10 +477,10 @@ AVCodecEncoder::_Setup()
fEncodeParameters.quality, fContext->bit_rate);
// Add some known fixes from the FFmpeg API example:
if (fContext->codec_id == CODEC_ID_MPEG2VIDEO) {
if (fContext->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
// Just for testing, we also add B frames */
fContext->max_b_frames = 2;
} else if (fContext->codec_id == CODEC_ID_MPEG1VIDEO) {
} else if (fContext->codec_id == AV_CODEC_ID_MPEG1VIDEO) {
// Needed to avoid using macroblocks in which some coeffs overflow.
// This does not happen with normal video, it just happens here as
// the motion of the chroma plane does not match the luma plane.
@ -612,7 +624,7 @@ AVCodecEncoder::_EncodeAudio(const uint8* buffer, size_t bufferSize,
int gotPacket = 0;
if (buffer) {
avcodec_get_frame_defaults(&frame);
av_frame_unref(&frame);
frame.nb_samples = frameCount;
@ -699,9 +711,17 @@ AVCodecEncoder::_EncodeVideo(const void* buffer, int64 frameCount,
fDstFrame.linesize);
// Encode one video chunk/frame.
#if LIBAVCODEC_VERSION_INT < ((55 << 16) | (45 << 8))
int usedBytes = avcodec_encode_video(fContext, fChunkBuffer,
kDefaultChunkBufferSize, fFrame);
#else
int gotPacket;
AVPacket pkt;
pkt.data = NULL;
pkt.size = 0;
av_init_packet(&pkt);
int usedBytes = avcodec_encode_video2(fContext, &pkt, fFrame, &gotPacket);
#endif
// avcodec.h says we need to set it.
fFrame->pts++;
@ -710,6 +730,7 @@ AVCodecEncoder::_EncodeVideo(const void* buffer, int64 frameCount,
return B_ERROR;
}
#if LIBAVCODEC_VERSION_INT < ((55 << 16) | (45 << 8))
// Maybe we need to use this PTS to calculate start_time:
if (fContext->coded_frame->pts != kNoPTSValue) {
TRACE(" codec frame PTS: %lld (codec time_base: %d/%d)\n",
@ -719,6 +740,17 @@ AVCodecEncoder::_EncodeVideo(const void* buffer, int64 frameCount,
TRACE(" codec frame PTS: N/A (codec time_base: %d/%d)\n",
fContext->time_base.num, fContext->time_base.den);
}
#else
// Maybe we need to use this PTS to calculate start_time:
if (pkt.pts != AV_NOPTS_VALUE) {
TRACE(" codec frame PTS: %lld (codec time_base: %d/%d)\n",
pkt.pts, fContext->time_base.num,
fContext->time_base.den);
} else {
TRACE(" codec frame PTS: N/A (codec time_base: %d/%d)\n",
fContext->time_base.num, fContext->time_base.den);
}
#endif
// Setup media_encode_info, most important is the time stamp.
info->start_time = (bigtime_t)(fFramesWritten * 1000000LL
@ -729,7 +761,11 @@ AVCodecEncoder::_EncodeVideo(const void* buffer, int64 frameCount,
info->flags |= B_MEDIA_KEY_FRAME;
// Write the chunk
#if LIBAVCODEC_VERSION_INT < ((55 << 16) | (45 << 8))
ret = WriteChunk(fChunkBuffer, usedBytes, info);
#else
ret = WriteChunk(pkt.data, pkt.size, info);
#endif
if (ret != B_OK) {
TRACE(" error writing chunk: %s\n", strerror(ret));
break;

View File

@ -46,6 +46,14 @@ extern "C" {
#define ERROR(a...) fprintf(stderr, a)
#if LIBAVCODEC_VERSION_INT < ((54 << 16) | (50 << 8))
#define AV_CODEC_ID_PCM_S16BE CODEC_ID_PCM_S16BE
#define AV_CODEC_ID_PCM_S16LE CODEC_ID_PCM_S16LE
#define AV_CODEC_ID_PCM_U16BE CODEC_ID_PCM_U16BE
#define AV_CODEC_ID_PCM_U16LE CODEC_ID_PCM_U16LE
#define AV_CODEC_ID_PCM_S8 CODEC_ID_PCM_S8
#define AV_CODEC_ID_PCM_U8 CODEC_ID_PCM_U8
#endif
static const int64 kNoPTSValue = AV_NOPTS_VALUE;
@ -980,8 +988,8 @@ AVFormatReader::Stream::Init(int32 virtualIndex)
// Set format family and type depending on codec_type of the stream.
switch (codecContext->codec_type) {
case AVMEDIA_TYPE_AUDIO:
if ((codecContext->codec_id >= CODEC_ID_PCM_S16LE)
&& (codecContext->codec_id <= CODEC_ID_PCM_U8)) {
if ((codecContext->codec_id >= AV_CODEC_ID_PCM_S16LE)
&& (codecContext->codec_id <= AV_CODEC_ID_PCM_U8)) {
TRACE(" raw audio\n");
format->type = B_MEDIA_RAW_AUDIO;
description.family = B_ANY_FORMAT_FAMILY;
@ -1010,37 +1018,37 @@ AVFormatReader::Stream::Init(int32 virtualIndex)
if (format->type == B_MEDIA_RAW_AUDIO) {
// We cannot describe all raw-audio formats, some are unsupported.
switch (codecContext->codec_id) {
case CODEC_ID_PCM_S16LE:
case AV_CODEC_ID_PCM_S16LE:
format->u.raw_audio.format
= media_raw_audio_format::B_AUDIO_SHORT;
format->u.raw_audio.byte_order
= B_MEDIA_LITTLE_ENDIAN;
break;
case CODEC_ID_PCM_S16BE:
case AV_CODEC_ID_PCM_S16BE:
format->u.raw_audio.format
= media_raw_audio_format::B_AUDIO_SHORT;
format->u.raw_audio.byte_order
= B_MEDIA_BIG_ENDIAN;
break;
case CODEC_ID_PCM_U16LE:
case AV_CODEC_ID_PCM_U16LE:
// format->u.raw_audio.format
// = media_raw_audio_format::B_AUDIO_USHORT;
// format->u.raw_audio.byte_order
// = B_MEDIA_LITTLE_ENDIAN;
return B_NOT_SUPPORTED;
break;
case CODEC_ID_PCM_U16BE:
case AV_CODEC_ID_PCM_U16BE:
// format->u.raw_audio.format
// = media_raw_audio_format::B_AUDIO_USHORT;
// format->u.raw_audio.byte_order
// = B_MEDIA_BIG_ENDIAN;
return B_NOT_SUPPORTED;
break;
case CODEC_ID_PCM_S8:
case AV_CODEC_ID_PCM_S8:
format->u.raw_audio.format
= media_raw_audio_format::B_AUDIO_CHAR;
break;
case CODEC_ID_PCM_U8:
case AV_CODEC_ID_PCM_U8:
format->u.raw_audio.format
= media_raw_audio_format::B_AUDIO_UCHAR;
break;

View File

@ -42,6 +42,11 @@ extern "C" {
#define ERROR(a...) fprintf(stderr, a)
#if LIBAVCODEC_VERSION_INT < ((54 << 16) | (50 << 8))
#define AV_CODEC_ID_NONE CODEC_ID_NONE
#define AV_PIX_FMT_YUV420P PIX_FMT_YUV420P
#endif
static const size_t kIOBufferSize = 64 * 1024;
// TODO: This could depend on the BMediaFile creation flags, IIRC,
@ -124,7 +129,7 @@ AVFormatWriter::StreamCookie::Init(media_format* format,
// TODO: This is a hack for now! Use avcodec_find_encoder_by_name()
// or something similar...
fStream->codec->codec_id = (CodecID)codecInfo->sub_id;
if (fStream->codec->codec_id == CODEC_ID_NONE)
if (fStream->codec->codec_id == AV_CODEC_ID_NONE)
fStream->codec->codec_id = raw_audio_codec_id_for(*format);
// Setup the stream according to the media format...
@ -162,7 +167,7 @@ AVFormatWriter::StreamCookie::Init(media_format* format,
// AVCodec* codec = fStream->codec->codec;
// for (int i = 0; codec->pix_fmts[i] != PIX_FMT_NONE; i++)
// fStream->codec->pix_fmt = codec->pix_fmts[i];
fStream->codec->pix_fmt = PIX_FMT_YUV420P;
fStream->codec->pix_fmt = AV_PIX_FMT_YUV420P;
} else if (format->type == B_MEDIA_RAW_AUDIO) {
fStream->codec->codec_type = AVMEDIA_TYPE_AUDIO;

View File

@ -14,6 +14,12 @@ extern "C" {
#if LIBAVCODEC_VERSION_INT > ((54 << 16) | (50 << 8))
typedef AVCodecID CodecID;
#else
#define AV_CODEC_ID_NONE CODEC_ID_NONE
#define AV_CODEC_ID_PCM_S16LE CODEC_ID_PCM_S16LE
#define AV_CODEC_ID_RAWVIDEO CODEC_ID_RAWVIDEO
#define AV_CODEC_ID_DVD_SUBTITLE CODEC_ID_DVD_SUBTITLE
#define AV_CODEC_ID_ADPCM_IMA_QT CODEC_ID_ADPCM_IMA_QT
#endif
@ -52,13 +58,13 @@ register_avcodec_tags(media_format_family family, const char *avname, int &index
if (tags == NULL)
continue;
for (; tags->id != CODEC_ID_NONE; tags++) {
for (; tags->id != AV_CODEC_ID_NONE; tags++) {
// XXX: we might want to keep some strange PCM codecs too...
// skip unwanted codec tags
if (tags->tag == CODEC_ID_RAWVIDEO
|| (tags->tag >= CODEC_ID_PCM_S16LE
&& tags->tag < CODEC_ID_ADPCM_IMA_QT)
|| tags->tag >= CODEC_ID_DVD_SUBTITLE)
if (tags->tag == AV_CODEC_ID_RAWVIDEO
|| (tags->tag >= AV_CODEC_ID_PCM_S16LE
&& tags->tag < AV_CODEC_ID_ADPCM_IMA_QT)
|| tags->tag >= AV_CODEC_ID_DVD_SUBTITLE)
continue;
if (index >= sMaxFormatCount) {
@ -70,7 +76,7 @@ register_avcodec_tags(media_format_family family, const char *avname, int &index
media_format format;
// Determine media type
if (tags->tag < CODEC_ID_PCM_S16LE)
if (tags->tag < AV_CODEC_ID_PCM_S16LE)
format.type = B_MEDIA_ENCODED_VIDEO;
else
format.type = B_MEDIA_ENCODED_AUDIO;

View File

@ -7,6 +7,30 @@
#include "EncoderTable.h"
#if LIBAVCODEC_VERSION_INT < ((54 << 16) | (50 << 8))
#define AV_CODEC_ID_NONE CODEC_ID_NONE
#define AV_CODEC_ID_PCM_F32LE CODEC_ID_PCM_F32LE
#define AV_CODEC_ID_PCM_F64LE CODEC_ID_PCM_F64LE
#define AV_CODEC_ID_PCM_S32LE CODEC_ID_PCM_S32LE
#define AV_CODEC_ID_PCM_S16LE CODEC_ID_PCM_S16LE
#define AV_CODEC_ID_PCM_U8 CODEC_ID_PCM_U8
#define AV_CODEC_ID_PCM_F32BE CODEC_ID_PCM_F32BE
#define AV_CODEC_ID_PCM_F64BE CODEC_ID_PCM_F64BE
#define AV_CODEC_ID_PCM_S32BE CODEC_ID_PCM_S32BE
#define AV_CODEC_ID_PCM_S16BE CODEC_ID_PCM_S16BE
#define AV_CODEC_ID_AAC CODEC_ID_AAC
#define AV_CODEC_ID_AC3 CODEC_ID_AC3
#define AV_CODEC_ID_DVVIDEO CODEC_ID_DVVIDEO
#define AV_CODEC_ID_FLAC CODEC_ID_FLAC
#define AV_CODEC_ID_MJPEG CODEC_ID_MJPEG
#define AV_CODEC_ID_MPEG1VIDEO CODEC_ID_MPEG1VIDEO
#define AV_CODEC_ID_MPEG2VIDEO CODEC_ID_MPEG2VIDEO
#define AV_CODEC_ID_MPEG4 CODEC_ID_MPEG4
#define AV_CODEC_ID_THEORA CODEC_ID_THEORA
#endif
const EncoderDescription gEncoderTable[] = {
// Video codecs
{
@ -14,7 +38,7 @@ const EncoderDescription gEncoderTable[] = {
"MPEG-4 video",
"mpeg4",
0,
CODEC_ID_MPEG4,
AV_CODEC_ID_MPEG4,
{ 0 }
},
B_ANY_FORMAT_FAMILY, // TODO: Hm, actually not really /any/ family...
@ -27,7 +51,7 @@ const EncoderDescription gEncoderTable[] = {
"MPEG-1 video",
"mpeg1video",
0,
CODEC_ID_MPEG1VIDEO,
AV_CODEC_ID_MPEG1VIDEO,
{ 0 }
},
B_MPEG_FORMAT_FAMILY,
@ -40,7 +64,7 @@ const EncoderDescription gEncoderTable[] = {
"MPEG-2 video",
"mpeg2video",
0,
CODEC_ID_MPEG2VIDEO,
AV_CODEC_ID_MPEG2VIDEO,
{ 0 }
},
B_MPEG_FORMAT_FAMILY,
@ -53,7 +77,7 @@ const EncoderDescription gEncoderTable[] = {
"Theora video",
"theora",
0,
CODEC_ID_THEORA,
AV_CODEC_ID_THEORA,
{ 0 }
},
B_ANY_FORMAT_FAMILY,
@ -66,7 +90,7 @@ const EncoderDescription gEncoderTable[] = {
"DV (Digital Video)",
"dvvideo",
0,
CODEC_ID_DVVIDEO,
AV_CODEC_ID_DVVIDEO,
{ 0 }
},
B_MISC_FORMAT_FAMILY,
@ -79,7 +103,7 @@ const EncoderDescription gEncoderTable[] = {
"MJPEG (Motion JPEG)",
"mjpeg",
0,
CODEC_ID_MJPEG,
AV_CODEC_ID_MJPEG,
{ 0 }
},
B_ANY_FORMAT_FAMILY,
@ -94,7 +118,7 @@ const EncoderDescription gEncoderTable[] = {
"Free Lossless Audio Codec (FLAC)",
"flac",
0,
CODEC_ID_FLAC,
AV_CODEC_ID_FLAC,
{ 0 }
},
B_ANY_FORMAT_FAMILY,
@ -107,7 +131,7 @@ const EncoderDescription gEncoderTable[] = {
"Advanced Audio Coding (AAC)",
"aac",
0,
CODEC_ID_AAC,
AV_CODEC_ID_AAC,
{ 0 }
},
B_MPEG_FORMAT_FAMILY,
@ -133,7 +157,7 @@ const EncoderDescription gEncoderTable[] = {
"Dolby Digital (AC-3)",
"ac3",
0,
CODEC_ID_AC3,
AV_CODEC_ID_AC3,
{ 0 }
},
B_ANY_FORMAT_FAMILY,
@ -243,37 +267,37 @@ const size_t gEncoderCount = sizeof(gEncoderTable) / sizeof(EncoderDescription);
raw_audio_codec_id_for(const media_format& format)
{
if (format.type != B_MEDIA_RAW_AUDIO)
return CODEC_ID_NONE;
return AV_CODEC_ID_NONE;
if (format.u.raw_audio.byte_order == B_MEDIA_LITTLE_ENDIAN) {
switch (format.u.raw_audio.format) {
case media_raw_audio_format::B_AUDIO_FLOAT:
return CODEC_ID_PCM_F32LE;
return AV_CODEC_ID_PCM_F32LE;
case media_raw_audio_format::B_AUDIO_DOUBLE:
return CODEC_ID_PCM_F64LE;
return AV_CODEC_ID_PCM_F64LE;
case media_raw_audio_format::B_AUDIO_INT:
return CODEC_ID_PCM_S32LE;
return AV_CODEC_ID_PCM_S32LE;
case media_raw_audio_format::B_AUDIO_SHORT:
return CODEC_ID_PCM_S16LE;
return AV_CODEC_ID_PCM_S16LE;
case media_raw_audio_format::B_AUDIO_UCHAR:
return CODEC_ID_PCM_U8;
return AV_CODEC_ID_PCM_U8;
default:
return CODEC_ID_NONE;
return AV_CODEC_ID_NONE;
}
} else {
switch (format.u.raw_audio.format) {
case media_raw_audio_format::B_AUDIO_FLOAT:
return CODEC_ID_PCM_F32BE;
return AV_CODEC_ID_PCM_F32BE;
case media_raw_audio_format::B_AUDIO_DOUBLE:
return CODEC_ID_PCM_F64BE;
return AV_CODEC_ID_PCM_F64BE;
case media_raw_audio_format::B_AUDIO_INT:
return CODEC_ID_PCM_S32BE;
return AV_CODEC_ID_PCM_S32BE;
case media_raw_audio_format::B_AUDIO_SHORT:
return CODEC_ID_PCM_S16BE;
return AV_CODEC_ID_PCM_S16BE;
case media_raw_audio_format::B_AUDIO_UCHAR:
return CODEC_ID_PCM_U8;
return AV_CODEC_ID_PCM_U8;
default:
return CODEC_ID_NONE;
return AV_CODEC_ID_NONE;
}
}
}

View File

@ -22,11 +22,36 @@ extern "C" {
#define TRACE(a...)
#endif
#if LIBAVCODEC_VERSION_INT < ((54 << 16) | (50 << 8))
#define AVPixelFormat PixelFormat
#define AV_PIX_FMT_NONE PIX_FMT_NONE
#define AV_PIX_FMT_YUV410P PIX_FMT_YUV410P
#define AV_PIX_FMT_YUV411P PIX_FMT_YUV411P
#define AV_PIX_FMT_YUV420P PIX_FMT_YUV420P
#define AV_PIX_FMT_YUVJ420P PIX_FMT_YUVJ420P
#define AV_PIX_FMT_YUV422P PIX_FMT_YUV422P
#define AV_PIX_FMT_YUVJ422P PIX_FMT_YUVJ422P
#define AV_PIX_FMT_YUYV422 PIX_FMT_YUYV422
#define AV_PIX_FMT_YUV420P10LE PIX_FMT_YUV420P10LE
#define AV_PIX_FMT_YUV444P PIX_FMT_YUV444P
#define AV_PIX_FMT_RGB24 PIX_FMT_RGB24
#define AV_PIX_FMT_BGR24 PIX_FMT_BGR24
#define AV_PIX_FMT_RGB565 PIX_FMT_RGB565
#define AV_PIX_FMT_RGB555 PIX_FMT_RGB555
#define AV_PIX_FMT_GRAY8 PIX_FMT_GRAY8
#define AV_PIX_FMT_MONOBLACK PIX_FMT_MONOBLACK
#define AV_PIX_FMT_PAL8 PIX_FMT_PAL8
#define AV_PIX_FMT_BGR32 PIX_FMT_BGR32
#define AV_PIX_FMT_BGR565 PIX_FMT_BGR565
#define AV_PIX_FMT_BGR555 PIX_FMT_BGR555
#define AV_PIX_FMT_RGB32 PIX_FMT_RGB32
#endif
//! This function will try to find the best colorspaces for both the ff-codec
// and the Media Kit sides.
gfx_convert_func
resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
resolve_colorspace(color_space colorSpace, AVPixelFormat pixelFormat, int width,
int height)
{
CPUCapabilities cpu;
@ -34,18 +59,18 @@ resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
switch (colorSpace) {
case B_RGB32:
// Planar Formats
if (pixelFormat == PIX_FMT_YUV410P) {
if (pixelFormat == AV_PIX_FMT_YUV410P) {
TRACE("resolve_colorspace: gfx_conv_yuv410p_rgb32_c\n");
return gfx_conv_yuv410p_rgb32_c;
}
if (pixelFormat == PIX_FMT_YUV411P) {
if (pixelFormat == AV_PIX_FMT_YUV411P) {
TRACE("resolve_colorspace: gfx_conv_yuv411p_rgb32_c\n");
return gfx_conv_yuv411p_rgb32_c;
}
if (pixelFormat == PIX_FMT_YUV420P
|| pixelFormat == PIX_FMT_YUVJ420P) {
if (pixelFormat == AV_PIX_FMT_YUV420P
|| pixelFormat == AV_PIX_FMT_YUVJ420P) {
#ifndef __x86_64__
if (cpu.HasSSSE3() && width % 8 == 0 && height % 2 == 0) {
TRACE("resolve_colorspace: gfx_conv_yuv420p_rgba32_ssse3\n");
@ -63,8 +88,8 @@ resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
return gfx_conv_YCbCr420p_RGB32_c;
}
if (pixelFormat == PIX_FMT_YUV422P
|| pixelFormat == PIX_FMT_YUVJ422P) {
if (pixelFormat == AV_PIX_FMT_YUV422P
|| pixelFormat == AV_PIX_FMT_YUVJ422P) {
#ifndef __x86_64__
if (cpu.HasSSSE3() && width % 8 == 0) {
TRACE("resolve_colorspace: gfx_conv_yuv422p_RGB32_ssse3\n");
@ -82,7 +107,7 @@ resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
}
// Packed Formats
if (pixelFormat == PIX_FMT_YUYV422) {
if (pixelFormat == AV_PIX_FMT_YUYV422) {
#ifndef __x86_64__
if (cpu.HasSSSE3() && width % 8 == 0) {
return gfx_conv_yuv422_rgba32_ssse3;
@ -96,7 +121,7 @@ resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
return gfx_conv_YCbCr422_RGB32_c;
}
if (pixelFormat == PIX_FMT_YUV420P10LE)
if (pixelFormat == AV_PIX_FMT_YUV420P10LE)
return gfx_conv_yuv420p10le_rgb32_c;
TRACE("resolve_colorspace: %s => B_RGB32: NULL\n",
@ -114,23 +139,23 @@ resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
return NULL;
case B_YCbCr422:
if (pixelFormat == PIX_FMT_YUV410P) {
if (pixelFormat == AV_PIX_FMT_YUV410P) {
TRACE("resolve_colorspace: gfx_conv_yuv410p_ycbcr422_c\n");
return gfx_conv_yuv410p_ycbcr422_c;
}
if (pixelFormat == PIX_FMT_YUV411P) {
if (pixelFormat == AV_PIX_FMT_YUV411P) {
TRACE("resolve_colorspace: gfx_conv_yuv411p_ycbcr422_c\n");
return gfx_conv_yuv411p_ycbcr422_c;
}
if (pixelFormat == PIX_FMT_YUV420P
|| pixelFormat == PIX_FMT_YUVJ420P) {
if (pixelFormat == AV_PIX_FMT_YUV420P
|| pixelFormat == AV_PIX_FMT_YUVJ420P) {
TRACE("resolve_colorspace: gfx_conv_yuv420p_ycbcr422_c\n");
return gfx_conv_yuv420p_ycbcr422_c;
}
if (pixelFormat == PIX_FMT_YUYV422) {
if (pixelFormat == AV_PIX_FMT_YUYV422) {
TRACE("resolve_colorspace: PIX_FMT_YUV422 => B_YCbCr422: "
"gfx_conv_null\n");
return gfx_conv_null;
@ -150,7 +175,7 @@ resolve_colorspace(color_space colorSpace, PixelFormat pixelFormat, int width,
const char*
pixfmt_to_string(int pixFormat)
{
const char* name = av_get_pix_fmt_name((enum PixelFormat)pixFormat);
const char* name = av_get_pix_fmt_name((enum AVPixelFormat)pixFormat);
if (name == NULL)
return "(unknown)";
return name;
@ -165,92 +190,92 @@ pixfmt_to_colorspace(int pixFormat)
TRACE("No BE API colorspace definition for pixel format "
"\"%s\".\n", pixfmt_to_string(pixFormat));
// Supposed to fall through.
case PIX_FMT_NONE:
case AV_PIX_FMT_NONE:
return B_NO_COLOR_SPACE;
// NOTE: See pixfmt_to_colorspace() for what these are.
case PIX_FMT_YUV420P:
case AV_PIX_FMT_YUV420P:
return B_YUV420;
case PIX_FMT_YUYV422:
case AV_PIX_FMT_YUYV422:
return B_YUV422;
case PIX_FMT_RGB24:
case AV_PIX_FMT_RGB24:
return B_RGB24_BIG;
case PIX_FMT_BGR24:
case AV_PIX_FMT_BGR24:
return B_RGB24;
case PIX_FMT_YUV422P:
case AV_PIX_FMT_YUV422P:
return B_YUV422;
case PIX_FMT_YUV444P:
case AV_PIX_FMT_YUV444P:
return B_YUV444;
case PIX_FMT_RGB32:
case AV_PIX_FMT_RGB32:
return B_RGBA32_BIG;
case PIX_FMT_YUV410P:
case AV_PIX_FMT_YUV410P:
return B_YUV9;
case PIX_FMT_YUV411P:
case AV_PIX_FMT_YUV411P:
return B_YUV12;
case PIX_FMT_RGB565:
case AV_PIX_FMT_RGB565:
return B_RGB16_BIG;
case PIX_FMT_RGB555:
case AV_PIX_FMT_RGB555:
return B_RGB15_BIG;
case PIX_FMT_GRAY8:
case AV_PIX_FMT_GRAY8:
return B_GRAY8;
case PIX_FMT_MONOBLACK:
case AV_PIX_FMT_MONOBLACK:
return B_GRAY1;
case PIX_FMT_PAL8:
case AV_PIX_FMT_PAL8:
return B_CMAP8;
case PIX_FMT_BGR32:
case AV_PIX_FMT_BGR32:
return B_RGB32;
case PIX_FMT_BGR565:
case AV_PIX_FMT_BGR565:
return B_RGB16;
case PIX_FMT_BGR555:
case AV_PIX_FMT_BGR555:
return B_RGB15;
}
}
PixelFormat
AVPixelFormat
colorspace_to_pixfmt(color_space format)
{
switch(format) {
default:
case B_NO_COLOR_SPACE:
return PIX_FMT_NONE;
return AV_PIX_FMT_NONE;
// NOTE: See pixfmt_to_colorspace() for what these are.
case B_YUV420:
return PIX_FMT_YUV420P;
return AV_PIX_FMT_YUV420P;
case B_YUV422:
return PIX_FMT_YUV422P;
return AV_PIX_FMT_YUV422P;
case B_RGB24_BIG:
return PIX_FMT_RGB24;
return AV_PIX_FMT_RGB24;
case B_RGB24:
return PIX_FMT_BGR24;
return AV_PIX_FMT_BGR24;
case B_YUV444:
return PIX_FMT_YUV444P;
return AV_PIX_FMT_YUV444P;
case B_RGBA32_BIG:
case B_RGB32_BIG:
return PIX_FMT_BGR32;
return AV_PIX_FMT_BGR32;
case B_YUV9:
return PIX_FMT_YUV410P;
return AV_PIX_FMT_YUV410P;
case B_YUV12:
return PIX_FMT_YUV411P;
return AV_PIX_FMT_YUV411P;
// TODO: YCbCr color spaces! These are not the same as YUV!
case B_RGB16_BIG:
return PIX_FMT_RGB565;
return AV_PIX_FMT_RGB565;
case B_RGB15_BIG:
return PIX_FMT_RGB555;
return AV_PIX_FMT_RGB555;
case B_GRAY8:
return PIX_FMT_GRAY8;
return AV_PIX_FMT_GRAY8;
case B_GRAY1:
return PIX_FMT_MONOBLACK;
return AV_PIX_FMT_MONOBLACK;
case B_CMAP8:
return PIX_FMT_PAL8;
return AV_PIX_FMT_PAL8;
case B_RGBA32:
case B_RGB32:
return PIX_FMT_RGB32;
return AV_PIX_FMT_RGB32;
case B_RGB16:
return PIX_FMT_BGR565;
return AV_PIX_FMT_BGR565;
case B_RGB15:
return PIX_FMT_BGR555;
return AV_PIX_FMT_BGR555;
}
}

View File

@ -28,6 +28,11 @@ extern "C" {
#include "libavcodec/avcodec.h"
}
#if LIBAVCODEC_VERSION_INT < ((54 << 16) | (50 << 8))
typedef PixelFormat AVPixelFormat;
#endif
// this function will be used by the wrapper to write into
// the Media Kit provided buffer from the self-allocated ffmpeg codec buffer
// it also will do some colorspace and planar/chunky conversions.
@ -38,12 +43,12 @@ typedef void (*gfx_convert_func) (AVFrame *in, AVFrame *out, int width, int heig
// this function will try to find the best colorspaces for both the ff-codec and
// the Media Kit sides.
gfx_convert_func resolve_colorspace(color_space cs, PixelFormat pixelFormat, int width, int height);
gfx_convert_func resolve_colorspace(color_space cs, AVPixelFormat pixelFormat, int width, int height);
const char *pixfmt_to_string(int format);
color_space pixfmt_to_colorspace(int format);
PixelFormat colorspace_to_pixfmt(color_space format);
AVPixelFormat colorspace_to_pixfmt(color_space format);
void dump_ffframe_audio(AVFrame *frame, const char *name);
void dump_ffframe_video(AVFrame *frame, const char *name);