Refactored H264 encoding/decoding

Use YUV primitives now for all H264 operations
This commit is contained in:
akallabeth 2020-10-22 18:11:21 +02:00 committed by akallabeth
parent c67e4df96b
commit 50acf72615
11 changed files with 680 additions and 338 deletions

View File

@ -76,7 +76,6 @@ struct _VIDEO_PLUGIN
VIDEO_LISTENER_CALLBACK* data_callback;
VideoClientContext* context;
rdpSettings* settings;
BOOL initialized;
};
typedef struct _VIDEO_PLUGIN VIDEO_PLUGIN;
@ -127,7 +126,6 @@ struct _PresentationContext
UINT64 startTimeStamp;
UINT64 publishOffset;
H264_CONTEXT* h264;
YUV_CONTEXT* yuv;
wStream* currentSample;
UINT64 lastPublishTime, nextPublishTime;
volatile LONG refCounter;
@ -135,6 +133,7 @@ struct _PresentationContext
VideoSurface* surface;
};
static void PresentationContext_unref(PresentationContext* presentation);
static const char* video_command_name(BYTE cmd)
{
switch (cmd)
@ -148,28 +147,6 @@ static const char* video_command_name(BYTE cmd)
}
}
static BOOL yuv_to_rgb(PresentationContext* presentation, BYTE* dest)
{
const BYTE* pYUVPoint[3];
H264_CONTEXT* h264 = presentation->h264;
BYTE** ppYUVData;
ppYUVData = h264->pYUVData;
pYUVPoint[0] = ppYUVData[0];
pYUVPoint[1] = ppYUVData[1];
pYUVPoint[2] = ppYUVData[2];
if (!yuv_context_decode(presentation->yuv, pYUVPoint, h264->iStride, PIXEL_FORMAT_BGRX32, dest,
h264->width * 4))
{
WLog_ERR(TAG, "error in yuv_to_rgb conversion");
return FALSE;
}
return TRUE;
}
static void video_client_context_set_geometry(VideoClientContext* video,
GeometryClientContext* geometry)
{
@ -220,8 +197,7 @@ error_frames:
}
static PresentationContext* PresentationContext_new(VideoClientContext* video, BYTE PresentationId,
UINT32 x, UINT32 y, UINT32 width, UINT32 height,
BOOL disable_threads)
UINT32 x, UINT32 y, UINT32 width, UINT32 height)
{
size_t s;
VideoClientContextPriv* priv = video->priv;
@ -241,7 +217,7 @@ static PresentationContext* PresentationContext_new(VideoClientContext* video, B
if (!ret->h264)
{
WLog_ERR(TAG, "unable to create a h264 context");
goto error_h264;
goto fail;
}
h264_context_reset(ret->h264, width, height);
@ -249,44 +225,28 @@ static PresentationContext* PresentationContext_new(VideoClientContext* video, B
if (!ret->currentSample)
{
WLog_ERR(TAG, "unable to create current packet stream");
goto error_currentSample;
goto fail;
}
ret->surfaceData = BufferPool_Take(priv->surfacePool, s);
if (!ret->surfaceData)
{
WLog_ERR(TAG, "unable to allocate surfaceData");
goto error_surfaceData;
goto fail;
}
ret->surface = video->createSurface(video, ret->surfaceData, x, y, width, height);
if (!ret->surface)
{
WLog_ERR(TAG, "unable to create surface");
goto error_surface;
goto fail;
}
ret->yuv = yuv_context_new(FALSE, disable_threads);
if (!ret->yuv)
{
WLog_ERR(TAG, "unable to create YUV decoder");
goto error_yuv;
}
yuv_context_reset(ret->yuv, width, height);
ret->refCounter = 1;
return ret;
error_yuv:
video->deleteSurface(video, ret->surface);
error_surface:
BufferPool_Return(priv->surfacePool, ret->surfaceData);
error_surfaceData:
Stream_Free(ret->currentSample, TRUE);
error_currentSample:
h264_context_free(ret->h264);
error_h264:
free(ret);
fail:
PresentationContext_unref(ret);
return NULL;
}
@ -298,7 +258,7 @@ static void PresentationContext_unref(PresentationContext* presentation)
if (!presentation)
return;
if (InterlockedDecrement(&presentation->refCounter) != 0)
if (InterlockedDecrement(&presentation->refCounter) > 0)
return;
geometry = presentation->geometry;
@ -316,7 +276,6 @@ static void PresentationContext_unref(PresentationContext* presentation)
Stream_Free(presentation->currentSample, TRUE);
presentation->video->deleteSurface(presentation->video, presentation->surface);
BufferPool_Return(priv->surfacePool, presentation->surfaceData);
yuv_context_free(presentation->yuv);
free(presentation);
}
@ -408,8 +367,7 @@ static BOOL video_onMappedGeometryClear(MAPPED_GEOMETRY* geometry)
return TRUE;
}
static UINT video_PresentationRequest(VideoClientContext* video, TSMM_PRESENTATION_REQUEST* req,
BOOL disable_threads)
static UINT video_PresentationRequest(VideoClientContext* video, TSMM_PRESENTATION_REQUEST* req)
{
VideoClientContextPriv* priv = video->priv;
PresentationContext* presentation;
@ -458,7 +416,7 @@ static UINT video_PresentationRequest(VideoClientContext* video, TSMM_PRESENTATI
WLog_DBG(TAG, "creating presentation 0x%x", req->PresentationId);
presentation = PresentationContext_new(
video, req->PresentationId, geom->topLevelLeft + geom->left,
geom->topLevelTop + geom->top, req->SourceWidth, req->SourceHeight, disable_threads);
geom->topLevelTop + geom->top, req->SourceWidth, req->SourceHeight);
if (!presentation)
{
WLog_ERR(TAG, "unable to create presentation video");
@ -501,8 +459,7 @@ static UINT video_PresentationRequest(VideoClientContext* video, TSMM_PRESENTATI
return ret;
}
static UINT video_read_tsmm_presentation_req(VideoClientContext* context, wStream* s,
BOOL disable_threads)
static UINT video_read_tsmm_presentation_req(VideoClientContext* context, wStream* s)
{
TSMM_PRESENTATION_REQUEST req;
@ -546,7 +503,7 @@ static UINT video_read_tsmm_presentation_req(VideoClientContext* context, wStrea
req.SourceHeight, req.ScaledWidth, req.ScaledHeight, req.hnsTimestampOffset,
req.GeometryMappingId);
return video_PresentationRequest(context, &req, disable_threads);
return video_PresentationRequest(context, &req);
}
/**
@ -579,8 +536,7 @@ static UINT video_control_on_data_received(IWTSVirtualChannelCallback* pChannelC
switch (packetType)
{
case TSMM_PACKET_TYPE_PRESENTATION_REQUEST:
ret = video_read_tsmm_presentation_req(
context, s, video->settings->ThreadingFlags & THREADING_FLAGS_DISABLE_THREADS);
ret = video_read_tsmm_presentation_req(context, s);
break;
default:
WLog_ERR(TAG, "not expecting packet type %" PRIu32 "", packetType);
@ -761,6 +717,7 @@ static UINT video_VideoData(VideoClientContext* context, TSMM_VIDEO_DATA* data)
VideoClientContextPriv* priv = context->priv;
PresentationContext* presentation;
int status;
const UINT32 format = PIXEL_FORMAT_BGRX32;
presentation = priv->currentPresentation;
if (!presentation)
@ -790,17 +747,10 @@ static UINT video_VideoData(VideoClientContext* context, TSMM_VIDEO_DATA* data)
UINT64 startTime = GetTickCount64(), timeAfterH264;
MAPPED_GEOMETRY* geom = presentation->geometry;
const RECTANGLE_16 rect = { 0, 0, presentation->SourceWidth, presentation->SourceHeight };
Stream_SealLength(presentation->currentSample);
Stream_SetPosition(presentation->currentSample, 0);
status = h264->subsystem->Decompress(h264, Stream_Pointer(presentation->currentSample),
Stream_Length(presentation->currentSample));
if (status == 0)
return CHANNEL_RC_OK;
if (status < 0)
return CHANNEL_RC_OK;
timeAfterH264 = GetTickCount64();
if (data->SampleNumber == 1)
{
@ -813,8 +763,13 @@ static UINT video_VideoData(VideoClientContext* context, TSMM_VIDEO_DATA* data)
int dropped = 0;
/* if the frame is to be published in less than 10 ms, let's consider it's now */
yuv_to_rgb(presentation, presentation->surfaceData);
status = avc420_decompress(h264, Stream_Pointer(presentation->currentSample),
Stream_Length(presentation->currentSample),
presentation->surfaceData, format, 0, rect.right,
rect.bottom, &rect, 1);
if (status < 0)
return CHANNEL_RC_OK;
context->showSurface(context, presentation->surface);
priv->publishedFrames++;
@ -862,14 +817,13 @@ static UINT video_VideoData(VideoClientContext* context, TSMM_VIDEO_DATA* data)
return CHANNEL_RC_NO_MEMORY;
}
if (!yuv_to_rgb(presentation, frame->surfaceData))
{
WLog_ERR(TAG, "error during YUV->RGB conversion");
BufferPool_Return(priv->surfacePool, frame->surfaceData);
mappedGeometryUnref(geom);
free(frame);
return CHANNEL_RC_NO_MEMORY;
}
status =
avc420_decompress(h264, Stream_Pointer(presentation->currentSample),
Stream_Length(presentation->currentSample), frame->surfaceData,
format, 0, rect.right, rect.bottom, &rect, 1);
if (status < 0)
return CHANNEL_RC_OK;
InterlockedIncrement(&presentation->refCounter);
@ -1151,8 +1105,6 @@ UINT DVCPluginEntry(IDRDYNVC_ENTRY_POINTS* pEntryPoints)
return CHANNEL_RC_NO_MEMORY;
}
videoPlugin->settings = pEntryPoints->GetRdpSettings(pEntryPoints);
videoPlugin->wtsPlugin.Initialize = video_plugin_initialize;
videoPlugin->wtsPlugin.Connected = NULL;
videoPlugin->wtsPlugin.Disconnected = NULL;

View File

@ -27,23 +27,8 @@
#include <freerdp/channels/rdpgfx.h>
typedef struct _H264_CONTEXT H264_CONTEXT;
typedef BOOL (*pfnH264SubsystemInit)(H264_CONTEXT* h264);
typedef void (*pfnH264SubsystemUninit)(H264_CONTEXT* h264);
typedef int (*pfnH264SubsystemDecompress)(H264_CONTEXT* h264, const BYTE* pSrcData, UINT32 SrcSize);
typedef int (*pfnH264SubsystemCompress)(H264_CONTEXT* h264, const BYTE** pSrcYuv,
const UINT32* pStride, BYTE** ppDstData, UINT32* pDstSize);
struct _H264_CONTEXT_SUBSYSTEM
{
const char* name;
pfnH264SubsystemInit Init;
pfnH264SubsystemUninit Uninit;
pfnH264SubsystemDecompress Decompress;
pfnH264SubsystemCompress Compress;
};
typedef struct _H264_CONTEXT_SUBSYSTEM H264_CONTEXT_SUBSYSTEM;
typedef struct _YUV_CONTEXT YUV_CONTEXT;
enum _H264_RATECONTROL_MODE
{
@ -75,6 +60,7 @@ struct _H264_CONTEXT
UINT32 numSystemData;
void* pSystemData;
H264_CONTEXT_SUBSYSTEM* subsystem;
YUV_CONTEXT* yuv;
void* lumaData;
wLog* log;
@ -91,16 +77,17 @@ extern "C"
FREERDP_API INT32 avc420_decompress(H264_CONTEXT* h264, const BYTE* pSrcData, UINT32 SrcSize,
BYTE* pDstData, DWORD DstFormat, UINT32 nDstStep,
UINT32 nDstWidth, UINT32 nDstHeight,
RECTANGLE_16* regionRects, UINT32 numRegionRect);
const RECTANGLE_16* regionRects, UINT32 numRegionRect);
FREERDP_API INT32 avc444_compress(H264_CONTEXT* h264, const BYTE* pSrcData, DWORD SrcFormat,
UINT32 nSrcStep, UINT32 nSrcWidth, UINT32 nSrcHeight,
BYTE version, BYTE* op, BYTE** pDstData, UINT32* pDstSize,
BYTE** pAuxDstData, UINT32* pAuxDstSize);
FREERDP_API INT32 avc444_decompress(H264_CONTEXT* h264, BYTE op, RECTANGLE_16* regionRects,
UINT32 numRegionRect, const BYTE* pSrcData, UINT32 SrcSize,
RECTANGLE_16* auxRegionRects, UINT32 numAuxRegionRect,
FREERDP_API INT32 avc444_decompress(H264_CONTEXT* h264, BYTE op,
const RECTANGLE_16* regionRects, UINT32 numRegionRect,
const BYTE* pSrcData, UINT32 SrcSize,
const RECTANGLE_16* auxRegionRects, UINT32 numAuxRegionRect,
const BYTE* pAuxSrcData, UINT32 AuxSrcSize, BYTE* pDstData,
DWORD DstFormat, UINT32 nDstStep, UINT32 nDstWidth,
UINT32 nDstHeight, UINT32 codecId);

View File

@ -32,9 +32,25 @@ extern "C"
{
#endif
FREERDP_API BOOL yuv_context_decode(YUV_CONTEXT* context, const BYTE* pYUVData[3],
UINT32 iStride[3], DWORD DstFormat, BYTE* dest,
UINT32 nDstStep);
FREERDP_API BOOL yuv420_context_decode(YUV_CONTEXT* context, const BYTE* pYUVData[3],
const UINT32 iStride[3], DWORD DstFormat, BYTE* dest,
UINT32 nDstStep, const RECTANGLE_16* regionRects,
UINT32 numRegionRects);
FREERDP_API BOOL yuv420_context_encode(YUV_CONTEXT* context, const BYTE* rgbData,
UINT32 srcStep, UINT32 srcFormat,
const UINT32 iStride[3], BYTE* yuvData[3],
const RECTANGLE_16* regionRects, UINT32 numRegionRects);
FREERDP_API BOOL yuv444_context_decode(YUV_CONTEXT* context, BYTE type, const BYTE* pYUVData[3],
const UINT32 iStride[3], BYTE* pYUVDstData[3],
const UINT32 iDstStride[3], DWORD DstFormat, BYTE* dest,
UINT32 nDstStep, const RECTANGLE_16* regionRects,
UINT32 numRegionRects);
FREERDP_API BOOL yuv444_context_encode(YUV_CONTEXT* context, BYTE version, const BYTE* pSrcData,
UINT32 nSrcStep, UINT32 SrcFormat,
const UINT32 iStride[3], BYTE* pYUVLumaData[3],
BYTE* pYUVChromaData[3], const RECTANGLE_16* regionRects,
UINT32 numRegionRects);
FREERDP_API void yuv_context_reset(YUV_CONTEXT* context, UINT32 width, UINT32 height);

View File

@ -123,7 +123,7 @@ typedef pstatus_t (*__YUV444ToRGB_8u_P3AC4R_t)(const BYTE* const pSrc[3], const
BYTE* pDst, UINT32 dstStep, UINT32 DstFormat,
const prim_size_t* roi);
typedef pstatus_t (*__RGBToYUV420_8u_P3AC4R_t)(const BYTE* pSrc, UINT32 SrcFormat, UINT32 srcStep,
BYTE* pDst[3], UINT32 dstStep[3],
BYTE* pDst[3], const UINT32 dstStep[3],
const prim_size_t* roi);
typedef pstatus_t (*__RGBToYUV444_8u_P3AC4R_t)(const BYTE* pSrc, UINT32 SrcFormat, UINT32 srcStep,
BYTE* pDst[3], UINT32 dstStep[3],

View File

@ -30,6 +30,7 @@
#include <freerdp/primitives.h>
#include <freerdp/codec/h264.h>
#include <freerdp/codec/yuv.h>
#include <freerdp/log.h>
#include "h264.h"
@ -74,102 +75,12 @@ BOOL avc420_ensure_buffer(H264_CONTEXT* h264, UINT32 stride, UINT32 width, UINT3
return TRUE;
}
static BOOL check_rect(const H264_CONTEXT* h264, const RECTANGLE_16* rect, UINT32 nDstWidth,
UINT32 nDstHeight)
{
/* Check, if the output rectangle is valid in decoded h264 frame. */
if ((rect->right > h264->width) || (rect->left > h264->width))
return FALSE;
if ((rect->top > h264->height) || (rect->bottom > h264->height))
return FALSE;
/* Check, if the output rectangle is valid in destination buffer. */
if ((rect->right > nDstWidth) || (rect->left > nDstWidth))
return FALSE;
if ((rect->bottom > nDstHeight) || (rect->top > nDstHeight))
return FALSE;
return TRUE;
}
static BOOL avc_yuv_to_rgb(H264_CONTEXT* h264, const RECTANGLE_16* regionRects,
UINT32 numRegionRects, UINT32 nDstWidth, UINT32 nDstHeight,
UINT32 nDstStep, BYTE* pDstData, DWORD DstFormat, BOOL use444)
{
UINT32 x;
BYTE* pDstPoint;
prim_size_t roi;
INT32 width, height;
const BYTE* pYUVPoint[3];
primitives_t* prims = primitives_get();
for (x = 0; x < numRegionRects; x++)
{
const RECTANGLE_16* rect = &(regionRects[x]);
const UINT32* iStride;
BYTE** ppYUVData;
if (use444)
{
iStride = h264->iYUV444Stride;
ppYUVData = h264->pYUV444Data;
}
else
{
iStride = h264->iStride;
ppYUVData = h264->pYUVData;
}
if (!check_rect(h264, rect, nDstWidth, nDstHeight))
return FALSE;
width = rect->right - rect->left;
height = rect->bottom - rect->top;
pDstPoint = pDstData + rect->top * nDstStep + rect->left * 4;
pYUVPoint[0] = ppYUVData[0] + rect->top * iStride[0] + rect->left;
pYUVPoint[1] = ppYUVData[1];
pYUVPoint[2] = ppYUVData[2];
if (use444)
{
pYUVPoint[1] += rect->top * iStride[1] + rect->left;
pYUVPoint[2] += rect->top * iStride[2] + rect->left;
}
else
{
pYUVPoint[1] += rect->top / 2 * iStride[1] + rect->left / 2;
pYUVPoint[2] += rect->top / 2 * iStride[2] + rect->left / 2;
}
roi.width = width;
roi.height = height;
if (use444)
{
if (prims->YUV444ToRGB_8u_P3AC4R(pYUVPoint, iStride, pDstPoint, nDstStep, DstFormat,
&roi) != PRIMITIVES_SUCCESS)
{
return FALSE;
}
}
else
{
if (prims->YUV420ToRGB_8u_P3AC4R(pYUVPoint, iStride, pDstPoint, nDstStep, DstFormat,
&roi) != PRIMITIVES_SUCCESS)
return FALSE;
}
}
return TRUE;
}
INT32 avc420_decompress(H264_CONTEXT* h264, const BYTE* pSrcData, UINT32 SrcSize, BYTE* pDstData,
DWORD DstFormat, UINT32 nDstStep, UINT32 nDstWidth, UINT32 nDstHeight,
RECTANGLE_16* regionRects, UINT32 numRegionRects)
const RECTANGLE_16* regionRects, UINT32 numRegionRects)
{
int status;
const BYTE* pYUVData[3];
if (!h264)
return -1001;
@ -182,8 +93,11 @@ INT32 avc420_decompress(H264_CONTEXT* h264, const BYTE* pSrcData, UINT32 SrcSize
if (status < 0)
return status;
if (!avc_yuv_to_rgb(h264, regionRects, numRegionRects, nDstWidth, nDstHeight, nDstStep,
pDstData, DstFormat, FALSE))
pYUVData[0] = h264->pYUVData[0];
pYUVData[1] = h264->pYUVData[1];
pYUVData[2] = h264->pYUVData[2];
if (!yuv420_context_decode(h264->yuv, pYUVData, h264->iStride, DstFormat, pDstData, nDstStep,
regionRects, numRegionRects))
return -1002;
return 1;
@ -192,8 +106,8 @@ INT32 avc420_decompress(H264_CONTEXT* h264, const BYTE* pSrcData, UINT32 SrcSize
INT32 avc420_compress(H264_CONTEXT* h264, const BYTE* pSrcData, DWORD SrcFormat, UINT32 nSrcStep,
UINT32 nSrcWidth, UINT32 nSrcHeight, BYTE** ppDstData, UINT32* pDstSize)
{
prim_size_t roi;
primitives_t* prims = primitives_get();
RECTANGLE_16 rect;
const BYTE* pYUVData[3];
if (!h264)
return -1;
@ -204,25 +118,26 @@ INT32 avc420_compress(H264_CONTEXT* h264, const BYTE* pSrcData, DWORD SrcFormat,
if (!avc420_ensure_buffer(h264, nSrcStep, nSrcWidth, nSrcHeight))
return -1;
roi.width = nSrcWidth;
roi.height = nSrcHeight;
rect.left = 0;
rect.top = 0;
rect.right = nSrcWidth;
rect.bottom = nSrcHeight;
if (prims->RGBToYUV420_8u_P3AC4R(pSrcData, SrcFormat, nSrcStep, h264->pYUVData, h264->iStride,
&roi) != PRIMITIVES_SUCCESS)
if (!yuv420_context_encode(h264->yuv, pSrcData, nSrcStep, SrcFormat, h264->iStride,
h264->pYUVData, &rect, 1))
return -1;
{
const BYTE* pYUVData[3] = { h264->pYUVData[0], h264->pYUVData[1], h264->pYUVData[2] };
return h264->subsystem->Compress(h264, pYUVData, h264->iStride, ppDstData, pDstSize);
}
pYUVData[0] = h264->pYUVData[0];
pYUVData[1] = h264->pYUVData[1];
pYUVData[2] = h264->pYUVData[2];
return h264->subsystem->Compress(h264, pYUVData, h264->iStride, ppDstData, pDstSize);
}
INT32 avc444_compress(H264_CONTEXT* h264, const BYTE* pSrcData, DWORD SrcFormat, UINT32 nSrcStep,
UINT32 nSrcWidth, UINT32 nSrcHeight, BYTE version, BYTE* op, BYTE** ppDstData,
UINT32* pDstSize, BYTE** ppAuxDstData, UINT32* pAuxDstSize)
{
prim_size_t roi;
primitives_t* prims = primitives_get();
RECTANGLE_16 rect = { 0 };
BYTE* coded;
UINT32 codedSize;
@ -238,30 +153,12 @@ INT32 avc444_compress(H264_CONTEXT* h264, const BYTE* pSrcData, DWORD SrcFormat,
if (!avc444_ensure_buffer(h264, nSrcHeight))
return -1;
roi.width = nSrcWidth;
roi.height = nSrcHeight;
rect.right = nSrcWidth;
rect.bottom = nSrcHeight;
switch (version)
{
case 1:
if (prims->RGBToAVC444YUV(pSrcData, SrcFormat, nSrcStep, h264->pYUV444Data,
h264->iStride, h264->pYUVData, h264->iStride,
&roi) != PRIMITIVES_SUCCESS)
return -1;
break;
case 2:
if (prims->RGBToAVC444YUVv2(pSrcData, SrcFormat, nSrcStep, h264->pYUV444Data,
h264->iStride, h264->pYUVData, h264->iStride,
&roi) != PRIMITIVES_SUCCESS)
return -1;
break;
default:
return -1;
}
if (!yuv444_context_encode(h264->yuv, version, pSrcData, nSrcStep, SrcFormat, h264->iStride,
h264->pYUV444Data, h264->pYUVData, &rect, 1))
return -1;
{
const BYTE* pYUV444Data[3] = { h264->pYUV444Data[0], h264->pYUV444Data[1],
@ -349,37 +246,26 @@ static BOOL avc444_process_rects(H264_CONTEXT* h264, const BYTE* pSrcData, UINT3
UINT32 nDstWidth, UINT32 nDstHeight, const RECTANGLE_16* rects,
UINT32 nrRects, avc444_frame_type type)
{
const primitives_t* prims = primitives_get();
UINT32 x;
const BYTE* pYUVData[3];
BYTE* pYUVDstData[3];
UINT32* piDstStride = h264->iYUV444Stride;
BYTE** ppYUVDstData = h264->pYUV444Data;
const UINT32* piStride = h264->iStride;
const BYTE* const* ppYUVData = (const BYTE* const*)h264->pYUVData;
if (h264->subsystem->Decompress(h264, pSrcData, SrcSize) < 0)
return FALSE;
pYUVData[0] = h264->pYUVData[0];
pYUVData[1] = h264->pYUVData[1];
pYUVData[2] = h264->pYUVData[2];
if (!avc444_ensure_buffer(h264, nDstHeight))
return FALSE;
for (x = 0; x < nrRects; x++)
{
const RECTANGLE_16* rect = &rects[x];
const UINT32 alignedWidth =
h264->width + ((h264->width % 16 != 0) ? 16 - h264->width % 16 : 0);
const UINT32 alignedHeight =
h264->height + ((h264->height % 16 != 0) ? 16 - h264->height % 16 : 0);
if (!check_rect(h264, rect, nDstWidth, nDstHeight))
continue;
if (prims->YUV420CombineToYUV444(type, ppYUVData, piStride, alignedWidth, alignedHeight,
ppYUVDstData, piDstStride, rect) != PRIMITIVES_SUCCESS)
return FALSE;
}
if (!avc_yuv_to_rgb(h264, rects, nrRects, nDstWidth, nDstHeight, nDstStep, pDstData, DstFormat,
TRUE))
pYUVDstData[0] = ppYUVDstData[0];
pYUVDstData[1] = ppYUVDstData[1];
pYUVDstData[2] = ppYUVDstData[2];
if (!yuv444_context_decode(h264->yuv, type, pYUVData, piStride, pYUVDstData, piDstStride,
DstFormat, pDstData, nDstStep, rects, nrRects))
return FALSE;
return TRUE;
@ -401,9 +287,9 @@ static double avg(UINT64* count, double old, double size)
}
#endif
INT32 avc444_decompress(H264_CONTEXT* h264, BYTE op, RECTANGLE_16* regionRects,
INT32 avc444_decompress(H264_CONTEXT* h264, BYTE op, const RECTANGLE_16* regionRects,
UINT32 numRegionRects, const BYTE* pSrcData, UINT32 SrcSize,
RECTANGLE_16* auxRegionRects, UINT32 numAuxRegionRect,
const RECTANGLE_16* auxRegionRects, UINT32 numAuxRegionRect,
const BYTE* pAuxSrcData, UINT32 AuxSrcSize, BYTE* pDstData, DWORD DstFormat,
UINT32 nDstStep, UINT32 nDstWidth, UINT32 nDstHeight, UINT32 codecId)
{
@ -563,33 +449,38 @@ BOOL h264_context_reset(H264_CONTEXT* h264, UINT32 width, UINT32 height)
h264->width = width;
h264->height = height;
yuv_context_reset(h264->yuv, width, height);
return TRUE;
}
H264_CONTEXT* h264_context_new(BOOL Compressor)
{
H264_CONTEXT* h264;
h264 = (H264_CONTEXT*)calloc(1, sizeof(H264_CONTEXT));
H264_CONTEXT* h264 = (H264_CONTEXT*)calloc(1, sizeof(H264_CONTEXT));
if (!h264)
return NULL;
h264->Compressor = Compressor;
if (Compressor)
if (h264)
{
h264->Compressor = Compressor;
if (Compressor)
{
/* Default compressor settings, may be changed by caller */
h264->BitRate = 1000000;
h264->FrameRate = 30;
}
if (!h264_context_init(h264))
{
free(h264);
return NULL;
}
/* Default compressor settings, may be changed by caller */
h264->BitRate = 1000000;
h264->FrameRate = 30;
}
if (!h264_context_init(h264))
goto fail;
h264->yuv = yuv_context_new(Compressor, 0);
if (!h264->yuv)
goto fail;
return h264;
fail:
h264_context_free(h264);
return NULL;
}
void h264_context_free(H264_CONTEXT* h264)
@ -601,6 +492,7 @@ void h264_context_free(H264_CONTEXT* h264)
_aligned_free(h264->pYUV444Data[1]);
_aligned_free(h264->pYUV444Data[2]);
_aligned_free(h264->lumaData);
yuv_context_free(h264->yuv);
free(h264);
}
}

View File

@ -24,6 +24,22 @@
#include <freerdp/api.h>
#include <freerdp/codec/h264.h>
typedef BOOL (*pfnH264SubsystemInit)(H264_CONTEXT* h264);
typedef void (*pfnH264SubsystemUninit)(H264_CONTEXT* h264);
typedef int (*pfnH264SubsystemDecompress)(H264_CONTEXT* h264, const BYTE* pSrcData, UINT32 SrcSize);
typedef int (*pfnH264SubsystemCompress)(H264_CONTEXT* h264, const BYTE** pSrcYuv,
const UINT32* pStride, BYTE** ppDstData, UINT32* pDstSize);
struct _H264_CONTEXT_SUBSYSTEM
{
const char* name;
pfnH264SubsystemInit Init;
pfnH264SubsystemUninit Uninit;
pfnH264SubsystemDecompress Decompress;
pfnH264SubsystemCompress Compress;
};
FREERDP_LOCAL BOOL avc420_ensure_buffer(H264_CONTEXT* h264, UINT32 stride, UINT32 width,
UINT32 height);

View File

@ -28,6 +28,8 @@
#include <libavcodec/avcodec.h>
#include <libavutil/opt.h>
#include "h264.h"
#ifdef WITH_VAAPI
#if LIBAVUTIL_VERSION_INT >= AV_VERSION_INT(55, 9, 0)
#include <libavutil/hwcontext.h>

View File

@ -30,6 +30,8 @@
#include "wels/codec_api.h"
#include "wels/codec_ver.h"
#include "h264.h"
typedef void (*pWelsGetCodecVersionEx)(OpenH264Version* pVersion);
typedef long (*pWelsCreateDecoder)(ISVCDecoder** ppDecoder);

View File

@ -26,21 +26,78 @@ struct _YUV_PROCESS_WORK_PARAM
DWORD DstFormat;
BYTE* dest;
UINT32 nDstStep;
UINT32 y;
UINT32 height;
RECTANGLE_16 rect;
};
typedef struct _YUV_PROCESS_WORK_PARAM YUV_PROCESS_WORK_PARAM;
static void CALLBACK yuv_process_work_callback(PTP_CALLBACK_INSTANCE instance, void* context,
PTP_WORK work)
struct _YUV_COMBINE_WORK_PARAM
{
YUV_CONTEXT* context;
const BYTE* pYUVData[3];
UINT32 iStride[3];
BYTE* pYUVDstData[3];
UINT32 iDstStride[3];
RECTANGLE_16 rect;
BYTE type;
};
typedef struct _YUV_COMBINE_WORK_PARAM YUV_COMBINE_WORK_PARAM;
struct _YUV_ENCODE_WORK_PARAM
{
YUV_CONTEXT* context;
const BYTE* pSrcData;
DWORD SrcFormat;
UINT32 nSrcStep;
RECTANGLE_16 rect;
BYTE version;
BYTE* pYUVLumaData[3];
BYTE* pYUVChromaData[3];
UINT32 iStride[3];
};
typedef struct _YUV_ENCODE_WORK_PARAM YUV_ENCODE_WORK_PARAM;
static void CALLBACK yuv420_process_work_callback(PTP_CALLBACK_INSTANCE instance, void* context,
PTP_WORK work)
{
prim_size_t roi;
YUV_PROCESS_WORK_PARAM* param = (YUV_PROCESS_WORK_PARAM*)context;
primitives_t* prims = primitives_get();
roi.width = param->context->width;
roi.height = param->height;
if (prims->YUV420ToRGB_8u_P3AC4R(param->pYUVData, param->iStride, param->dest, param->nDstStep,
const BYTE* pYUVData[3];
BYTE* dest = param->dest + param->rect.top * param->nDstStep +
param->rect.left * GetBytesPerPixel(param->DstFormat);
pYUVData[0] = param->pYUVData[0] + param->iStride[0] * param->rect.top + param->rect.left;
pYUVData[1] =
param->pYUVData[1] + param->iStride[1] * param->rect.top / 2 + param->rect.left / 2;
pYUVData[2] =
param->pYUVData[2] + param->iStride[2] * param->rect.top / 2 + param->rect.left / 2;
roi.width = param->rect.right - param->rect.left;
roi.height = param->rect.bottom - param->rect.top;
if (prims->YUV420ToRGB_8u_P3AC4R(pYUVData, param->iStride, dest, param->nDstStep,
param->DstFormat, &roi) != PRIMITIVES_SUCCESS)
{
WLog_ERR(TAG, "error when decoding lines");
}
}
static void CALLBACK yuv444_process_work_callback(PTP_CALLBACK_INSTANCE instance, void* context,
PTP_WORK work)
{
prim_size_t roi;
YUV_PROCESS_WORK_PARAM* param = (YUV_PROCESS_WORK_PARAM*)context;
primitives_t* prims = primitives_get();
const BYTE* pYUVData[3];
BYTE* dest = param->dest + param->rect.top * param->nDstStep +
param->rect.left * GetBytesPerPixel(param->DstFormat);
pYUVData[0] = param->pYUVData[0] + param->iStride[0] * param->rect.top + param->rect.left;
pYUVData[1] = param->pYUVData[1] + param->iStride[1] * param->rect.top + param->rect.left;
pYUVData[2] = param->pYUVData[2] + param->iStride[2] * param->rect.top + param->rect.left;
roi.width = param->rect.right - param->rect.left;
roi.height = param->rect.bottom - param->rect.top;
if (prims->YUV444ToRGB_8u_P3AC4R(pYUVData, param->iStride, dest, param->nDstStep,
param->DstFormat, &roi) != PRIMITIVES_SUCCESS)
{
WLog_ERR(TAG, "error when decoding lines");
@ -94,12 +151,14 @@ YUV_CONTEXT* yuv_context_new(BOOL encoder, UINT32 ThreadingFlags)
return ret;
error_threadpool:
free(ret);
yuv_context_free(ret);
return NULL;
}
void yuv_context_free(YUV_CONTEXT* context)
{
if (!context)
return;
if (context->useThreads)
{
CloseThreadpool(context->threadPool);
@ -108,70 +167,65 @@ void yuv_context_free(YUV_CONTEXT* context)
free(context);
}
BOOL yuv_context_decode(YUV_CONTEXT* context, const BYTE* pYUVData[3], UINT32 iStride[3],
DWORD DstFormat, BYTE* dest, UINT32 nDstStep)
static INLINE YUV_PROCESS_WORK_PARAM pool_decode_param(const RECTANGLE_16* rect,
YUV_CONTEXT* context,
const BYTE* pYUVData[3],
const UINT32 iStride[3], UINT32 DstFormat,
BYTE* dest, UINT32 nDstStep)
{
UINT32 y, nobjects, i;
PTP_WORK* work_objects = NULL;
YUV_PROCESS_WORK_PARAM* params;
UINT32 waitCount = 0;
BOOL ret = TRUE;
primitives_t* prims = primitives_get();
YUV_PROCESS_WORK_PARAM current = { 0 };
if (!context->useThreads || (primitives_flags(prims) & PRIM_FLAGS_HAVE_EXTGPU))
current.context = context;
current.DstFormat = DstFormat;
current.pYUVData[0] = pYUVData[0];
current.pYUVData[1] = pYUVData[1];
current.pYUVData[2] = pYUVData[2];
current.iStride[0] = iStride[0];
current.iStride[1] = iStride[1];
current.iStride[2] = iStride[2];
current.nDstStep = nDstStep;
current.dest = dest;
current.rect = *rect;
return current;
}
static BOOL allocate_objects(PTP_WORK** work, void** params, size_t size, UINT32 count)
{
{
prim_size_t roi;
roi.width = context->width;
roi.height = context->height;
return prims->YUV420ToRGB_8u_P3AC4R(pYUVData, iStride, dest, nDstStep, DstFormat, &roi) ==
PRIMITIVES_SUCCESS;
PTP_WORK* tmp;
PTP_WORK* cur = *work;
tmp = realloc(cur, sizeof(PTP_WORK*) * count);
if (!tmp)
return FALSE;
*work = tmp;
}
/* case where we use threads */
nobjects = (context->height + context->heightStep - 1) / context->heightStep;
work_objects = (PTP_WORK*)calloc(nobjects, sizeof(PTP_WORK));
if (!work_objects)
{
void* cur = *params;
void* tmp = realloc(cur, size * count);
if (!tmp)
return FALSE;
*params = tmp;
}
return TRUE;
}
static BOOL submit_object(PTP_WORK* work_object, PTP_WORK_CALLBACK cb, const void* param,
YUV_CONTEXT* context)
{
if (!work_object || !param || !context)
return FALSE;
}
params = (YUV_PROCESS_WORK_PARAM*)calloc(nobjects, sizeof(*params));
if (!params)
{
free(work_objects);
*work_object = CreateThreadpoolWork(cb, (void*)param, &context->ThreadPoolEnv);
if (!*work_object)
return FALSE;
}
for (i = 0, y = 0; i < nobjects; i++, y += context->heightStep, waitCount++)
{
params[i].context = context;
params[i].DstFormat = DstFormat;
params[i].pYUVData[0] = pYUVData[0] + (y * iStride[0]);
params[i].pYUVData[1] = pYUVData[1] + ((y / 2) * iStride[1]);
params[i].pYUVData[2] = pYUVData[2] + ((y / 2) * iStride[2]);
params[i].iStride[0] = iStride[0];
params[i].iStride[1] = iStride[1];
params[i].iStride[2] = iStride[2];
params[i].nDstStep = nDstStep;
params[i].dest = dest + (nDstStep * y);
params[i].y = y;
if (y + context->heightStep <= context->height)
params[i].height = context->heightStep;
else
params[i].height = context->height % context->heightStep;
work_objects[i] = CreateThreadpoolWork(yuv_process_work_callback, (void*)&params[i],
&context->ThreadPoolEnv);
if (!work_objects[i])
{
ret = FALSE;
break;
}
SubmitThreadpoolWork(work_objects[i]);
}
SubmitThreadpoolWork(*work_object);
return TRUE;
}
static void free_objects(PTP_WORK* work_objects, void* params, UINT32 waitCount)
{
UINT32 i;
for (i = 0; i < waitCount; i++)
{
WaitForThreadpoolWorkCallbacks(work_objects[i], FALSE);
@ -180,6 +234,426 @@ BOOL yuv_context_decode(YUV_CONTEXT* context, const BYTE* pYUVData[3], UINT32 iS
free(work_objects);
free(params);
return ret;
}
static BOOL pool_decode(YUV_CONTEXT* context, PTP_WORK_CALLBACK cb, const BYTE* pYUVData[3],
const UINT32 iStride[3], UINT32 DstFormat, BYTE* dest, UINT32 nDstStep,
const RECTANGLE_16* regionRects, UINT32 numRegionRects)
{
BOOL rc = FALSE;
UINT32 x, y, nobjects;
PTP_WORK* work_objects = NULL;
YUV_PROCESS_WORK_PARAM* params = NULL;
UINT32 waitCount = 0;
primitives_t* prims = primitives_get();
if (!context->useThreads || (primitives_flags(prims) & PRIM_FLAGS_HAVE_EXTGPU))
{
for (y = 0; y < numRegionRects; y++)
{
YUV_PROCESS_WORK_PARAM current = pool_decode_param(&regionRects[y], context, pYUVData,
iStride, DstFormat, dest, nDstStep);
cb(NULL, &current, NULL);
}
return TRUE;
}
/* case where we use threads */
nobjects = (context->height + context->heightStep - 1) / context->heightStep;
if (!allocate_objects(&work_objects, (void**)&params, sizeof(YUV_PROCESS_WORK_PARAM), nobjects))
goto fail;
for (x = 0; x < numRegionRects; x++)
{
const RECTANGLE_16* rect = &regionRects[x];
const UINT32 height = rect->bottom - rect->top;
const UINT32 steps = (height + context->heightStep / 2) / context->heightStep;
if (waitCount + steps >= nobjects)
{
nobjects *= 2;
if (!allocate_objects(&work_objects, (void**)&params, sizeof(YUV_PROCESS_WORK_PARAM),
nobjects))
goto fail;
}
for (y = 0; y < steps; y++)
{
YUV_PROCESS_WORK_PARAM* cur = &params[waitCount];
RECTANGLE_16 r = *rect;
r.top += y * context->heightStep;
*cur = pool_decode_param(&r, context, pYUVData, iStride, DstFormat, dest, nDstStep);
if (!submit_object(&work_objects[waitCount], cb, cur, context))
goto fail;
waitCount++;
}
}
rc = TRUE;
fail:
free_objects(work_objects, params, waitCount);
return rc;
}
static INLINE BOOL check_rect(const YUV_CONTEXT* yuv, const RECTANGLE_16* rect, UINT32 nDstWidth,
UINT32 nDstHeight)
{
/* Check, if the output rectangle is valid in decoded h264 frame. */
if ((rect->right > yuv->width) || (rect->left > yuv->width))
return FALSE;
if ((rect->top > yuv->height) || (rect->bottom > yuv->height))
return FALSE;
/* Check, if the output rectangle is valid in destination buffer. */
if ((rect->right > nDstWidth) || (rect->left > nDstWidth))
return FALSE;
if ((rect->bottom > nDstHeight) || (rect->top > nDstHeight))
return FALSE;
return TRUE;
}
static void CALLBACK yuv444_combine_work_callback(PTP_CALLBACK_INSTANCE instance, void* context,
PTP_WORK work)
{
YUV_COMBINE_WORK_PARAM* param = (YUV_COMBINE_WORK_PARAM*)context;
primitives_t* prims = primitives_get();
YUV_CONTEXT* yuv = param->context;
const RECTANGLE_16* rect = &param->rect;
const UINT32 alignedWidth = yuv->width + ((yuv->width % 16 != 0) ? 16 - yuv->width % 16 : 0);
const UINT32 alignedHeight =
yuv->height + ((yuv->height % 16 != 0) ? 16 - yuv->height % 16 : 0);
WINPR_UNUSED(instance);
WINPR_UNUSED(work);
if (!check_rect(context, rect, yuv->width, yuv->height))
return;
if (prims->YUV420CombineToYUV444(param->type, param->pYUVData, param->iStride, alignedWidth,
alignedHeight, param->pYUVDstData, param->iDstStride,
rect) != PRIMITIVES_SUCCESS)
WLog_WARN(TAG, "YUV420CombineToYUV444 failed");
}
static INLINE YUV_COMBINE_WORK_PARAM pool_decode_rect_param(
const RECTANGLE_16* rect, YUV_CONTEXT* context, BYTE type, const BYTE* pYUVData[3],
const UINT32 iStride[3], BYTE* pYUVDstData[3], const UINT32 iDstStride[3])
{
YUV_COMBINE_WORK_PARAM current = { 0 };
current.context = context;
current.pYUVData[0] = pYUVData[0];
current.pYUVData[1] = pYUVData[1];
current.pYUVData[2] = pYUVData[2];
current.pYUVDstData[0] = pYUVDstData[0];
current.pYUVDstData[1] = pYUVDstData[1];
current.pYUVDstData[2] = pYUVDstData[2];
current.iStride[0] = iStride[0];
current.iStride[1] = iStride[1];
current.iStride[2] = iStride[2];
current.iDstStride[0] = iDstStride[0];
current.iDstStride[1] = iDstStride[1];
current.iDstStride[2] = iDstStride[2];
current.type = type;
current.rect = *rect;
return current;
}
static BOOL pool_decode_rect(YUV_CONTEXT* context, BYTE type, const BYTE* pYUVData[3],
const UINT32 iStride[3], BYTE* pYUVDstData[3],
const UINT32 iDstStride[3], const RECTANGLE_16* regionRects,
UINT32 numRegionRects)
{
BOOL rc = FALSE;
UINT32 y;
PTP_WORK* work_objects = NULL;
YUV_COMBINE_WORK_PARAM* params = NULL;
UINT32 waitCount = 0;
PTP_WORK_CALLBACK cb = yuv444_combine_work_callback;
primitives_t* prims = primitives_get();
if (!context->useThreads || (primitives_flags(prims) & PRIM_FLAGS_HAVE_EXTGPU))
{
for (y = 0; y < numRegionRects; y++)
{
YUV_COMBINE_WORK_PARAM current = pool_decode_rect_param(
&regionRects[y], context, type, pYUVData, iStride, pYUVDstData, iDstStride);
cb(NULL, &current, NULL);
}
return TRUE;
}
/* case where we use threads */
if (!allocate_objects(&work_objects, (void**)&params, sizeof(YUV_COMBINE_WORK_PARAM),
numRegionRects))
goto fail;
for (waitCount = 0; waitCount < numRegionRects; waitCount++)
{
YUV_COMBINE_WORK_PARAM* current = &params[waitCount];
*current = pool_decode_rect_param(&regionRects[waitCount], context, type, pYUVData, iStride,
pYUVDstData, iDstStride);
if (!submit_object(&work_objects[waitCount], cb, current, context))
goto fail;
}
rc = TRUE;
fail:
free_objects(work_objects, params, waitCount);
return rc;
}
BOOL yuv444_context_decode(YUV_CONTEXT* context, BYTE type, const BYTE* pYUVData[3],
const UINT32 iStride[3], BYTE* pYUVDstData[3],
const UINT32 iDstStride[3], DWORD DstFormat, BYTE* dest, UINT32 nDstStep,
const RECTANGLE_16* regionRects, UINT32 numRegionRects)
{
const BYTE* pYUVCDstData[3];
if (!pool_decode_rect(context, type, pYUVData, iStride, pYUVDstData, iDstStride, regionRects,
numRegionRects))
return FALSE;
pYUVCDstData[0] = pYUVDstData[0];
pYUVCDstData[1] = pYUVDstData[1];
pYUVCDstData[2] = pYUVDstData[2];
return pool_decode(context, yuv444_process_work_callback, pYUVCDstData, iDstStride, DstFormat,
dest, nDstStep, regionRects, numRegionRects);
}
BOOL yuv420_context_decode(YUV_CONTEXT* context, const BYTE* pYUVData[3], const UINT32 iStride[3],
DWORD DstFormat, BYTE* dest, UINT32 nDstStep,
const RECTANGLE_16* regionRects, UINT32 numRegionRects)
{
return pool_decode(context, yuv420_process_work_callback, pYUVData, iStride, DstFormat, dest,
nDstStep, regionRects, numRegionRects);
}
static void CALLBACK yuv420_encode_work_callback(PTP_CALLBACK_INSTANCE instance, void* context,
PTP_WORK work)
{
prim_size_t roi;
YUV_ENCODE_WORK_PARAM* param = (YUV_ENCODE_WORK_PARAM*)context;
primitives_t* prims = primitives_get();
BYTE* pYUVData[3];
const BYTE* src;
WINPR_UNUSED(instance);
WINPR_UNUSED(work);
roi.width = param->rect.right - param->rect.left;
roi.height = param->rect.bottom - param->rect.top;
src = param->pSrcData + param->nSrcStep * param->rect.top +
param->rect.left * GetBytesPerPixel(param->SrcFormat);
pYUVData[0] = param->pYUVLumaData[0] + param->rect.top * param->iStride[0] + param->rect.left;
pYUVData[1] =
param->pYUVLumaData[1] + param->rect.top / 2 * param->iStride[1] + param->rect.left / 2;
pYUVData[2] =
param->pYUVLumaData[2] + param->rect.top / 2 * param->iStride[2] + param->rect.left / 2;
if (prims->RGBToYUV420_8u_P3AC4R(src, param->SrcFormat, param->nSrcStep, pYUVData,
param->iStride, &roi) != PRIMITIVES_SUCCESS)
{
WLog_ERR(TAG, "error when decoding lines");
}
}
static void CALLBACK yuv444v1_encode_work_callback(PTP_CALLBACK_INSTANCE instance, void* context,
PTP_WORK work)
{
prim_size_t roi;
YUV_ENCODE_WORK_PARAM* param = (YUV_ENCODE_WORK_PARAM*)context;
primitives_t* prims = primitives_get();
BYTE* pYUVLumaData[3];
BYTE* pYUVChromaData[3];
const BYTE* src;
WINPR_UNUSED(instance);
WINPR_UNUSED(work);
roi.width = param->rect.right - param->rect.left;
roi.height = param->rect.bottom - param->rect.top;
src = param->pSrcData + param->nSrcStep * param->rect.top +
param->rect.left * GetBytesPerPixel(param->SrcFormat);
pYUVLumaData[0] =
param->pYUVLumaData[0] + param->rect.top * param->iStride[0] + param->rect.left;
pYUVLumaData[1] =
param->pYUVLumaData[1] + param->rect.top / 2 * param->iStride[1] + param->rect.left / 2;
pYUVLumaData[2] =
param->pYUVLumaData[2] + param->rect.top / 2 * param->iStride[2] + param->rect.left / 2;
pYUVChromaData[0] =
param->pYUVChromaData[0] + param->rect.top * param->iStride[0] + param->rect.left;
pYUVChromaData[1] =
param->pYUVChromaData[1] + param->rect.top / 2 * param->iStride[1] + param->rect.left / 2;
pYUVChromaData[2] =
param->pYUVChromaData[2] + param->rect.top / 2 * param->iStride[2] + param->rect.left / 2;
if (prims->RGBToAVC444YUV(src, param->SrcFormat, param->nSrcStep, pYUVLumaData, param->iStride,
pYUVChromaData, param->iStride, &roi) != PRIMITIVES_SUCCESS)
{
WLog_ERR(TAG, "error when decoding lines");
}
}
static void CALLBACK yuv444v2_encode_work_callback(PTP_CALLBACK_INSTANCE instance, void* context,
PTP_WORK work)
{
prim_size_t roi;
YUV_ENCODE_WORK_PARAM* param = (YUV_ENCODE_WORK_PARAM*)context;
primitives_t* prims = primitives_get();
BYTE* pYUVLumaData[3];
BYTE* pYUVChromaData[3];
const BYTE* src;
WINPR_UNUSED(instance);
WINPR_UNUSED(work);
roi.width = param->rect.right - param->rect.left;
roi.height = param->rect.bottom - param->rect.top;
src = param->pSrcData + param->nSrcStep * param->rect.top +
param->rect.left * GetBytesPerPixel(param->SrcFormat);
pYUVLumaData[0] =
param->pYUVLumaData[0] + param->rect.top * param->iStride[0] + param->rect.left;
pYUVLumaData[1] =
param->pYUVLumaData[1] + param->rect.top / 2 * param->iStride[1] + param->rect.left / 2;
pYUVLumaData[2] =
param->pYUVLumaData[2] + param->rect.top / 2 * param->iStride[2] + param->rect.left / 2;
pYUVChromaData[0] =
param->pYUVChromaData[0] + param->rect.top * param->iStride[0] + param->rect.left;
pYUVChromaData[1] =
param->pYUVChromaData[1] + param->rect.top / 2 * param->iStride[1] + param->rect.left / 2;
pYUVChromaData[2] =
param->pYUVChromaData[2] + param->rect.top / 2 * param->iStride[2] + param->rect.left / 2;
if (prims->RGBToAVC444YUVv2(src, param->SrcFormat, param->nSrcStep, pYUVLumaData,
param->iStride, pYUVChromaData, param->iStride,
&roi) != PRIMITIVES_SUCCESS)
{
WLog_ERR(TAG, "error when decoding lines");
}
}
static INLINE YUV_ENCODE_WORK_PARAM pool_encode_fill(const RECTANGLE_16* rect, YUV_CONTEXT* context,
const BYTE* pSrcData, UINT32 nSrcStep,
UINT32 SrcFormat, const UINT32 iStride[],
BYTE* pYUVLumaData[], BYTE* pYUVChromaData[])
{
YUV_ENCODE_WORK_PARAM current = { 0 };
current.context = context;
current.pSrcData = pSrcData;
current.SrcFormat = SrcFormat;
current.nSrcStep = nSrcStep;
current.pYUVLumaData[0] = pYUVLumaData[0];
current.pYUVLumaData[1] = pYUVLumaData[1];
current.pYUVLumaData[2] = pYUVLumaData[2];
if (pYUVChromaData)
{
current.pYUVChromaData[0] = pYUVChromaData[0];
current.pYUVChromaData[1] = pYUVChromaData[1];
current.pYUVChromaData[2] = pYUVChromaData[2];
}
current.iStride[0] = iStride[0];
current.iStride[1] = iStride[1];
current.iStride[2] = iStride[2];
current.rect = *rect;
return current;
}
static BOOL pool_encode(YUV_CONTEXT* context, PTP_WORK_CALLBACK cb, const BYTE* pSrcData,
UINT32 nSrcStep, UINT32 SrcFormat, const UINT32 iStride[],
BYTE* pYUVLumaData[], BYTE* pYUVChromaData[],
const RECTANGLE_16* regionRects, UINT32 numRegionRects)
{
BOOL rc = FALSE;
primitives_t* prims = primitives_get();
UINT32 x, y, nobjects;
PTP_WORK* work_objects = NULL;
YUV_ENCODE_WORK_PARAM* params = NULL;
UINT32 waitCount = 0;
if (!context->useThreads || (primitives_flags(prims) & PRIM_FLAGS_HAVE_EXTGPU))
{
for (x = 0; x < numRegionRects; x++)
{
YUV_ENCODE_WORK_PARAM current =
pool_encode_fill(&regionRects[x], context, pSrcData, nSrcStep, SrcFormat, iStride,
pYUVLumaData, pYUVChromaData);
cb(NULL, &current, NULL);
}
return TRUE;
}
/* case where we use threads */
nobjects = (context->height + context->heightStep - 1) / context->heightStep;
if (!allocate_objects(&work_objects, (void**)&params, sizeof(YUV_ENCODE_WORK_PARAM), nobjects))
goto fail;
for (x = 0; x < numRegionRects; x++)
{
const RECTANGLE_16* rect = &regionRects[x];
const UINT32 height = rect->bottom - rect->top;
const UINT32 steps = (height + context->heightStep / 2) / context->heightStep;
if (waitCount + steps >= nobjects)
{
nobjects *= 2;
if (!allocate_objects(&work_objects, (void**)&params, sizeof(YUV_ENCODE_WORK_PARAM),
nobjects))
goto fail;
}
for (y = 0; y < steps; y++)
{
RECTANGLE_16 r = *rect;
YUV_ENCODE_WORK_PARAM* current = &params[waitCount];
r.top += y * context->heightStep;
*current = pool_encode_fill(&r, context, pSrcData, nSrcStep, SrcFormat, iStride,
pYUVLumaData, pYUVChromaData);
if (!submit_object(&work_objects[waitCount], cb, current, context))
goto fail;
waitCount++;
}
}
rc = TRUE;
fail:
free_objects(work_objects, params, waitCount);
return rc;
}
BOOL yuv420_context_encode(YUV_CONTEXT* context, const BYTE* pSrcData, UINT32 nSrcStep,
UINT32 SrcFormat, const UINT32 iStride[], BYTE* pYUVData[],
const RECTANGLE_16* regionRects, UINT32 numRegionRects)
{
if (!context || !pSrcData || !iStride || !pYUVData || !regionRects)
return FALSE;
return pool_encode(context, yuv420_encode_work_callback, pSrcData, nSrcStep, SrcFormat, iStride,
pYUVData, NULL, regionRects, numRegionRects);
}
BOOL yuv444_context_encode(YUV_CONTEXT* context, BYTE version, const BYTE* pSrcData,
UINT32 nSrcStep, UINT32 SrcFormat, const UINT32 iStride[],
BYTE* pYUVLumaData[], BYTE* pYUVChromaData[],
const RECTANGLE_16* regionRects, UINT32 numRegionRects)
{
PTP_WORK_CALLBACK cb;
switch (version)
{
case 1:
cb = yuv444v1_encode_work_callback;
break;
case 2:
cb = yuv444v2_encode_work_callback;
break;
default:
return FALSE;
}
return pool_encode(context, cb, pSrcData, nSrcStep, SrcFormat, iStride, pYUVLumaData,
pYUVChromaData, regionRects, numRegionRects);
}

View File

@ -640,7 +640,7 @@ static pstatus_t general_RGBToYUV444_8u_P3AC4R(const BYTE* pSrc, UINT32 SrcForma
}
static INLINE pstatus_t general_RGBToYUV420_BGRX(const BYTE* pSrc, UINT32 srcStep, BYTE* pDst[3],
UINT32 dstStep[3], const prim_size_t* roi)
const UINT32 dstStep[3], const prim_size_t* roi)
{
UINT32 x, y, i;
size_t x1 = 0, x2 = 4, x3 = srcStep, x4 = srcStep + 4;
@ -706,7 +706,7 @@ static INLINE pstatus_t general_RGBToYUV420_BGRX(const BYTE* pSrc, UINT32 srcSte
}
static INLINE pstatus_t general_RGBToYUV420_RGBX(const BYTE* pSrc, UINT32 srcStep, BYTE* pDst[3],
UINT32 dstStep[3], const prim_size_t* roi)
const UINT32 dstStep[3], const prim_size_t* roi)
{
UINT32 x, y, i;
size_t x1 = 0, x2 = 4, x3 = srcStep, x4 = srcStep + 4;
@ -772,7 +772,7 @@ static INLINE pstatus_t general_RGBToYUV420_RGBX(const BYTE* pSrc, UINT32 srcSte
}
static INLINE pstatus_t general_RGBToYUV420_ANY(const BYTE* pSrc, UINT32 srcFormat, UINT32 srcStep,
BYTE* pDst[3], UINT32 dstStep[3],
BYTE* pDst[3], const UINT32 dstStep[3],
const prim_size_t* roi)
{
const UINT32 bpp = GetBytesPerPixel(srcFormat);
@ -849,7 +849,7 @@ static INLINE pstatus_t general_RGBToYUV420_ANY(const BYTE* pSrc, UINT32 srcForm
}
static pstatus_t general_RGBToYUV420_8u_P3AC4R(const BYTE* pSrc, UINT32 srcFormat, UINT32 srcStep,
BYTE* pDst[3], UINT32 dstStep[3],
BYTE* pDst[3], const UINT32 dstStep[3],
const prim_size_t* roi)
{
switch (srcFormat)

View File

@ -245,6 +245,7 @@ static int shadow_encoder_init_h264(rdpShadowEncoder* encoder)
encoder->h264->BitRate = encoder->server->h264BitRate;
encoder->h264->FrameRate = encoder->server->h264FrameRate;
encoder->h264->QP = encoder->server->h264QP;
encoder->codecs |= FREERDP_CODEC_AVC420 | FREERDP_CODEC_AVC444;
return 1;
fail: