video: an implementation of MS-RDPEVOR for X11

Implements the decoding of video streams using common H264 decoders. We also implement
a trivial feedback algorithm.

Sponsored by: Rangee GmbH (http://www.rangee.de)
This commit is contained in:
David Fort 2017-12-15 11:15:24 +01:00
parent b84839b21c
commit a07efb73ec
14 changed files with 938 additions and 6 deletions

View File

@ -1,7 +1,7 @@
# FreeRDP: A Remote Desktop Protocol Implementation
# FreeRDP cmake build script
#
# Copyright 2013 Marc-Andre Moreau <marcandre.moreau@gmail.com>
# Copyright 2018 David Fort <contact@hardening-consulting.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.

View File

@ -100,7 +100,7 @@ static UINT video_read_tsmm_presentation_req(VideoClientContext *context, wStrea
Stream_Read_UINT8(s, req.PresentationId);
Stream_Read_UINT8(s, req.Version);
Stream_Read_UINT8(s, req.Command);
Stream_Seek_UINT8(s); /* FrameRate - reserved and ignored */
Stream_Read_UINT8(s, req.FrameRate); /* FrameRate - reserved and ignored */
Stream_Seek_UINT16(s); /* AverageBitrateKbps reserved and ignored */
Stream_Seek_UINT16(s); /* reserved */

View File

@ -50,6 +50,8 @@ set(${MODULE_PREFIX}_SRCS
xf_graphics.h
xf_keyboard.c
xf_keyboard.h
xf_video.c
xf_video.h
xf_window.c
xf_window.h
xf_client.c

View File

@ -31,7 +31,7 @@
#include "xf_rail.h"
#include "xf_cliprdr.h"
#include "xf_disp.h"
#include "xf_video.h"
void xf_OnChannelConnectedEventHandler(rdpContext* context, ChannelConnectedEventArgs* e)
{
@ -69,6 +69,18 @@ void xf_OnChannelConnectedEventHandler(rdpContext* context, ChannelConnectedEven
{
xf_disp_init(xfc, (DispClientContext*)e->pInterface);
}
else if (strcmp(e->name, GEOMETRY_DVC_CHANNEL_NAME) == 0)
{
xf_video_geometry_init(xfc, (GeometryClientContext*)e->pInterface);
}
else if (strcmp(e->name, VIDEO_CONTROL_DVC_CHANNEL_NAME) == 0)
{
xf_video_control_init(xfc, (VideoClientContext*)e->pInterface);
}
else if (strcmp(e->name, VIDEO_DATA_DVC_CHANNEL_NAME) == 0)
{
xf_video_data_init(xfc, (VideoClientContext*)e->pInterface);
}
}
void xf_OnChannelDisconnectedEventHandler(rdpContext* context, ChannelDisconnectedEventArgs* e)

View File

@ -29,6 +29,8 @@
#include <freerdp/client/rdpgfx.h>
#include <freerdp/client/encomsp.h>
#include <freerdp/client/disp.h>
#include <freerdp/client/geometry.h>
#include <freerdp/client/video.h>
int xf_on_channel_connected(freerdp* instance, const char* name, void* pInterface);
int xf_on_channel_disconnected(freerdp* instance, const char* name, void* pInterface);

View File

@ -97,6 +97,7 @@
#include "xf_input.h"
#include "xf_cliprdr.h"
#include "xf_disp.h"
#include "xf_video.h"
#include "xf_monitor.h"
#include "xf_graphics.h"
#include "xf_keyboard.h"
@ -1279,6 +1280,13 @@ static BOOL xf_post_connect(freerdp* instance)
return FALSE;
}
if (!(xfc->xfVideo = xf_video_new(xfc)))
{
xf_clipboard_free(xfc->clipboard);
xf_disp_free(xfc->xfDisp);
return FALSE;
}
EventArgsInit(&e, "xfreerdp");
e.width = settings->DesktopWidth;
e.height = settings->DesktopHeight;
@ -1529,7 +1537,7 @@ static void* xf_client_thread(void* param)
due.QuadPart = 0;
if (!SetWaitableTimer(timer, &due, 100, NULL, NULL, FALSE))
if (!SetWaitableTimer(timer, &due, 20, NULL, NULL, FALSE))
{
goto disconnect;
}

631
client/X11/xf_video.c Normal file
View File

@ -0,0 +1,631 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* Video Optimized Remoting Virtual Channel Extension for X11
*
* Copyright 2017 David Fort <contact@hardening-consulting.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <winpr/sysinfo.h>
#include <winpr/interlocked.h>
#include <freerdp/client/geometry.h>
#include <freerdp/client/video.h>
#include <freerdp/primitives.h>
#include <freerdp/codec/h264.h>
#include <freerdp/codec/yuv.h>
#include "xf_video.h"
#define TAG CLIENT_TAG("video")
#define XF_VIDEO_UNLIMITED_RATE 31
BYTE MFVideoFormat_H264[] = {'H', '2', '6', '4',
0x00, 0x00,
0x10, 0x00,
0x80, 0x00,
0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71};
typedef struct _xfPresentationContext xfPresentationContext;
struct _xfVideoContext
{
xfContext *xfc;
wQueue *frames;
CRITICAL_SECTION framesLock;
wBufferPool *surfacePool;
UINT32 publishedFrames;
UINT32 droppedFrames;
UINT32 lastSentRate;
UINT64 nextFeedbackTime;
xfPresentationContext *currentPresentation;
};
struct _xfVideoFrame
{
UINT64 publishTime;
UINT64 hnsDuration;
UINT32 x, y, w, h;
BYTE *surfaceData;
xfPresentationContext *presentation;
};
typedef struct _xfVideoFrame xfVideoFrame;
struct _xfPresentationContext
{
xfContext *xfc;
xfVideoContext *xfVideo;
BYTE PresentationId;
UINT32 SourceWidth, SourceHeight;
UINT32 ScaledWidth, ScaledHeight;
MAPPED_GEOMETRY *geometry;
UINT64 startTimeStamp;
UINT64 publishOffset;
H264_CONTEXT *h264;
YUV_CONTEXT *yuv;
wStream *currentSample;
BYTE *surfaceData;
XImage *surface;
UINT64 lastPublishTime, nextPublishTime;
volatile LONG refCounter;
};
static void xfPresentationContext_unref(xfPresentationContext *c)
{
xfVideoContext *xfVideo;
if (!c)
return;
if (InterlockedDecrement(&c->refCounter) != 0)
return;
xfVideo = c->xfVideo;
if (c->geometry)
{
c->geometry->MappedGeometryUpdate = NULL;
c->geometry->MappedGeometryClear = NULL;
c->geometry->custom = NULL;
}
h264_context_free(c->h264);
Stream_Free(c->currentSample, TRUE);
XFree(c->surface);
BufferPool_Return(xfVideo->surfacePool, c->surfaceData);
yuv_context_free(c->yuv);
free(c);
}
static xfPresentationContext *xfPresentationContext_new(xfContext *xfc, BYTE PresentationId, UINT32 width, UINT32 height)
{
xfVideoContext *xfVideo = xfc->xfVideo;
xfPresentationContext *ret = calloc(1, sizeof(*ret));
if (!ret)
return NULL;
ret->xfc = xfc;
ret->PresentationId = PresentationId;
ret->h264 = h264_context_new(FALSE);
if (!ret->h264)
{
WLog_ERR(TAG, "unable to create a h264 context");
goto error_h264;
}
h264_context_reset(ret->h264, width, height);
ret->currentSample = Stream_New(NULL, 4096);
if (!ret->currentSample)
{
WLog_ERR(TAG, "unable to create current packet stream");
goto error_currentSample;
}
ret->surfaceData = BufferPool_Take(xfVideo->surfacePool, width * height * 4);
if (!ret->surfaceData)
{
WLog_ERR(TAG, "unable to allocate surfaceData");
goto error_surfaceData;
}
ret->surface = XCreateImage(xfc->display, xfc->visual, xfc->depth, ZPixmap, 0,
(char *)ret->surfaceData, width, height, 8, width * 4);
if (!ret->surface)
{
WLog_ERR(TAG, "unable to create surface");
goto error_surface;
}
ret->yuv = yuv_context_new(FALSE);
if (!ret->yuv)
{
WLog_ERR(TAG, "unable to create YUV decoder");
goto error_yuv;
}
yuv_context_reset(ret->yuv, width, height);
ret->refCounter = 1;
return ret;
error_yuv:
XFree(ret->surface);
error_surface:
BufferPool_Return(xfVideo->surfacePool, ret->surfaceData);
error_surfaceData:
Stream_Free(ret->currentSample, TRUE);
error_currentSample:
h264_context_free(ret->h264);
error_h264:
free(ret);
return NULL;
}
xfVideoContext *xf_video_new(xfContext *xfc)
{
xfVideoContext *ret = calloc(1, sizeof(xfVideoContext));
if (!ret)
return NULL;
ret->frames = Queue_New(TRUE, 10, 2);
if (!ret->frames)
{
WLog_ERR(TAG, "unable to allocate frames queue");
goto error_frames;
}
ret->surfacePool = BufferPool_New(FALSE, 0, 16);
if (!ret->surfacePool)
{
WLog_ERR(TAG, "unable to create surface pool");
goto error_surfacePool;
}
if (!InitializeCriticalSectionAndSpinCount(&ret->framesLock, 4000))
{
WLog_ERR(TAG, "unable to initialize frames lock");
goto error_spinlock;
}
ret->xfc = xfc;
ret->lastSentRate = XF_VIDEO_UNLIMITED_RATE;
return ret;
error_spinlock:
BufferPool_Free(ret->surfacePool);
error_surfacePool:
Queue_Free(ret->frames);
error_frames:
free(ret);
return NULL;
}
void xf_video_geometry_init(xfContext *xfc, GeometryClientContext *geom)
{
xfc->geometry = geom;
}
static BOOL xf_video_onMappedGeometryUpdate(MAPPED_GEOMETRY *geometry)
{
return TRUE;
}
static BOOL xf_video_onMappedGeometryClear(MAPPED_GEOMETRY *geometry)
{
xfPresentationContext *presentation = (xfPresentationContext *)geometry->custom;
presentation->geometry = NULL;
return TRUE;
}
static UINT xf_video_PresentationRequest(VideoClientContext* context, TSMM_PRESENTATION_REQUEST *req)
{
xfContext *xfc = context->custom;
xfVideoContext *xfVideo = xfc->xfVideo;
xfPresentationContext *presentation;
UINT ret = CHANNEL_RC_OK;
presentation = xfVideo->currentPresentation;
if (req->Command == TSMM_START_PRESENTATION)
{
MAPPED_GEOMETRY *geom;
TSMM_PRESENTATION_RESPONSE resp;
if (memcmp(req->VideoSubtypeId, MFVideoFormat_H264, 16) != 0)
{
WLog_ERR(TAG, "not a H264 video, ignoring request");
return CHANNEL_RC_OK;
}
if (presentation)
{
if (presentation->PresentationId == req->PresentationId)
{
WLog_ERR(TAG, "ignoring start request for existing presentation %d", req->PresentationId);
return CHANNEL_RC_OK;
}
WLog_ERR(TAG, "releasing current presentation %d", req->PresentationId);
xfPresentationContext_unref(presentation);
presentation = xfVideo->currentPresentation = NULL;
}
if (!xfc->geometry)
{
WLog_ERR(TAG, "geometry channel not ready, ignoring request");
return CHANNEL_RC_OK;
}
geom = HashTable_GetItemValue(xfc->geometry->geometries, &(req->GeometryMappingId));
if (!geom)
{
WLog_ERR(TAG, "geometry mapping 0x%"PRIx64" not registered", req->GeometryMappingId);
return CHANNEL_RC_OK;
}
WLog_DBG(TAG, "creating presentation 0x%x", req->PresentationId);
presentation = xfPresentationContext_new(xfc, req->PresentationId, req->SourceWidth, req->SourceHeight);
if (!presentation)
{
WLog_ERR(TAG, "unable to create presentation context");
return CHANNEL_RC_NO_MEMORY;
}
xfVideo->currentPresentation = presentation;
presentation->xfVideo = xfVideo;
presentation->geometry = geom;
presentation->SourceWidth = req->SourceWidth;
presentation->SourceHeight = req->SourceHeight;
presentation->ScaledWidth = req->ScaledWidth;
presentation->ScaledHeight = req->ScaledHeight;
geom->custom = presentation;
geom->MappedGeometryUpdate = xf_video_onMappedGeometryUpdate;
geom->MappedGeometryClear = xf_video_onMappedGeometryClear;
/* send back response */
resp.PresentationId = req->PresentationId;
ret = context->PresentationResponse(context, &resp);
}
else if (req->Command == TSMM_STOP_PRESENTATION)
{
WLog_DBG(TAG, "stopping presentation 0x%x", req->PresentationId);
if (!presentation)
{
WLog_ERR(TAG, "unknown presentation to stop %d", req->PresentationId);
return CHANNEL_RC_OK;
}
xfVideo->currentPresentation = NULL;
xfPresentationContext_unref(presentation);
}
return CHANNEL_RC_OK;
}
static BOOL yuv_to_rgb(xfPresentationContext *presentation, BYTE *dest)
{
const BYTE* pYUVPoint[3];
H264_CONTEXT *h264 = presentation->h264;
BYTE** ppYUVData;
ppYUVData = h264->pYUVData;
pYUVPoint[0] = ppYUVData[0];
pYUVPoint[1] = ppYUVData[1];
pYUVPoint[2] = ppYUVData[2];
if (!yuv_context_decode(presentation->yuv, pYUVPoint, h264->iStride, PIXEL_FORMAT_BGRX32, dest, h264->width * 4))
{
WLog_ERR(TAG, "error in yuv_to_rgb conversion");
return FALSE;
}
return TRUE;
}
static void xf_video_frame_free(xfVideoFrame **pframe)
{
xfVideoFrame *frame = *pframe;
xfPresentationContext_unref(frame->presentation);
BufferPool_Return(frame->presentation->xfVideo->surfacePool, frame->surfaceData);
free(frame);
*pframe = NULL;
}
static void xf_video_timer(xfContext *xfc, TimerEventArgs *timer)
{
xfVideoContext *xfVideo = xfc->xfVideo;
xfPresentationContext *presentation;
xfVideoFrame *peekFrame, *frame = NULL;
if (!xfVideo->currentPresentation)
return;
EnterCriticalSection(&xfVideo->framesLock);
do
{
peekFrame = (xfVideoFrame *)Queue_Peek(xfVideo->frames);
if (!peekFrame)
break;
if (peekFrame->publishTime > timer->now)
break;
if (frame)
{
/* free skipped frame */
WLog_DBG(TAG, "dropping frame @%"PRIu64, frame->publishTime);
xfVideo->droppedFrames++;
xf_video_frame_free(&frame);
}
frame = peekFrame;
Queue_Dequeue(xfVideo->frames);
}
while (1);
LeaveCriticalSection(&xfVideo->framesLock);
if (!frame)
goto treat_feedback;
presentation = frame->presentation;
xfVideo->publishedFrames++;
memcpy(presentation->surfaceData, frame->surfaceData, frame->w * frame->h * 4);
XPutImage(xfc->display, xfc->drawable, xfc->gc, presentation->surface,
0, 0,
frame->x, frame->y, frame->w, frame->h);
xfPresentationContext_unref(presentation);
BufferPool_Return(xfVideo->surfacePool, frame->surfaceData);
free(frame);
treat_feedback:
if (xfVideo->nextFeedbackTime < timer->now)
{
/* we can compute some feedback only if we have some published frames */
if (xfVideo->publishedFrames)
{
UINT32 computedRate;
if (xfVideo->droppedFrames)
{
/**
* some dropped frames, looks like we're asking too many frames per seconds,
* try lowering rate. We go directly from unlimited rate to 24 frames/seconds
* otherwise we lower rate by 2 frames by seconds
*/
if (xfVideo->lastSentRate == XF_VIDEO_UNLIMITED_RATE)
computedRate = 24;
else
{
computedRate = xfVideo->lastSentRate - 2;
if (!computedRate)
computedRate = 2;
}
}
else
{
/**
* we treat all frames ok, so either ask the server to send more,
* or stay unlimited
*/
if (xfVideo->lastSentRate == XF_VIDEO_UNLIMITED_RATE)
computedRate = XF_VIDEO_UNLIMITED_RATE; /* stay unlimited */
else
{
computedRate = xfVideo->lastSentRate + 2;
if (computedRate > XF_VIDEO_UNLIMITED_RATE)
computedRate = XF_VIDEO_UNLIMITED_RATE;
}
}
if (computedRate != xfVideo->lastSentRate)
{
TSMM_CLIENT_NOTIFICATION notif;
notif.PresentationId = presentation->PresentationId;
notif.NotificationType = TSMM_CLIENT_NOTIFICATION_TYPE_FRAMERATE_OVERRIDE;
if (computedRate == XF_VIDEO_UNLIMITED_RATE)
{
notif.FramerateOverride.Flags = 0x01;
notif.FramerateOverride.DesiredFrameRate = 0x00;
}
else
{
notif.FramerateOverride.Flags = 0x02;
notif.FramerateOverride.DesiredFrameRate = computedRate;
}
xfVideo->xfc->video->ClientNotification(xfVideo->xfc->video, &notif);
xfVideo->lastSentRate = computedRate;
WLog_DBG(TAG, "server notified with rate %d published=%d dropped=%d", xfVideo->lastSentRate,
xfVideo->publishedFrames, xfVideo->droppedFrames);
}
}
WLog_DBG(TAG, "currentRate=%d published=%d dropped=%d", xfVideo->lastSentRate,
xfVideo->publishedFrames, xfVideo->droppedFrames);
xfVideo->droppedFrames = 0;
xfVideo->publishedFrames = 0;
xfVideo->nextFeedbackTime = timer->now + 1000;
}
}
static UINT xf_video_VideoData(VideoClientContext* context, TSMM_VIDEO_DATA *data)
{
xfContext *xfc = context->custom;
xfVideoContext *xfVideo = xfc->xfVideo;
xfPresentationContext *presentation;
int status;
presentation = xfVideo->currentPresentation;
if (!presentation)
{
WLog_ERR(TAG, "no current presentation");
return CHANNEL_RC_OK;
}
if (presentation->PresentationId != data->PresentationId)
{
WLog_ERR(TAG, "current presentation id=%d doesn't match data id=%d", presentation->PresentationId,
data->PresentationId);
return CHANNEL_RC_OK;
}
if (!Stream_EnsureRemainingCapacity(presentation->currentSample, data->cbSample))
{
WLog_ERR(TAG, "unable to expand the current packet");
return CHANNEL_RC_NO_MEMORY;
}
Stream_Write(presentation->currentSample, data->pSample, data->cbSample);
if (data->CurrentPacketIndex == data->PacketsInSample)
{
H264_CONTEXT *h264 = presentation->h264;
UINT64 startTime = GetTickCount64(), timeAfterH264;
MAPPED_GEOMETRY *geom = presentation->geometry;
Stream_SealLength(presentation->currentSample);
Stream_SetPosition(presentation->currentSample, 0);
status = h264->subsystem->Decompress(h264, Stream_Pointer(presentation->currentSample),
Stream_Length(presentation->currentSample));
if (status == 0)
return CHANNEL_RC_OK;
if (status < 0)
return CHANNEL_RC_OK;
timeAfterH264 = GetTickCount64();
if (data->SampleNumber == 1)
{
presentation->lastPublishTime = startTime;
}
presentation->lastPublishTime += (data->hnsDuration / 10000);
if (presentation->lastPublishTime <= timeAfterH264 + 10)
{
int dropped = 0;
/* if the frame is to be published in less than 10 ms, let's consider it's now */
yuv_to_rgb(presentation, presentation->surfaceData);
XPutImage(xfc->display, xfc->drawable, xfc->gc, presentation->surface,
0, 0,
geom->topLevelLeft + geom->left + geom->geometry.boundingRect.x,
geom->topLevelTop + geom->top + geom->geometry.boundingRect.y,
presentation->SourceWidth, presentation->SourceHeight);
xfVideo->publishedFrames++;
/* cleanup previously scheduled frames */
EnterCriticalSection(&xfVideo->framesLock);
while (Queue_Count(xfVideo->frames) > 0)
{
xfVideoFrame *frame = Queue_Dequeue(xfVideo->frames);
if (frame)
{
xfVideo->droppedFrames++;
xf_video_frame_free(&frame);
dropped++;
}
}
LeaveCriticalSection(&xfVideo->framesLock);
if (dropped)
WLog_DBG(TAG, "showing frame (%d dropped)", dropped);
}
else
{
BOOL enqueueResult;
xfVideoFrame *frame = calloc(1, sizeof(*frame));
if (!frame)
{
WLog_ERR(TAG, "unable to create frame");
return CHANNEL_RC_NO_MEMORY;
}
frame->presentation = presentation;
frame->publishTime = presentation->lastPublishTime;
frame->x = geom->topLevelLeft + geom->left + geom->geometry.boundingRect.x;
frame->y = geom->topLevelTop + geom->top + geom->geometry.boundingRect.y;
frame->w = presentation->SourceWidth;
frame->h = presentation->SourceHeight;
frame->surfaceData = BufferPool_Take(xfVideo->surfacePool, frame->w * frame->h * 4);
if (!frame->surfaceData)
{
WLog_ERR(TAG, "unable to allocate frame data");
free(frame);
return CHANNEL_RC_NO_MEMORY;
}
if (!yuv_to_rgb(presentation, frame->surfaceData))
{
WLog_ERR(TAG, "error during YUV->RGB conversion");
BufferPool_Return(xfVideo->surfacePool, frame->surfaceData);
free(frame);
return CHANNEL_RC_NO_MEMORY;
}
InterlockedIncrement(&presentation->refCounter);
EnterCriticalSection(&xfVideo->framesLock);
enqueueResult = Queue_Enqueue(xfVideo->frames, frame);
LeaveCriticalSection(&xfVideo->framesLock);
if (!enqueueResult)
{
WLog_ERR(TAG, "unable to enqueue frame");
xf_video_frame_free(&frame);
return CHANNEL_RC_NO_MEMORY;
}
WLog_DBG(TAG, "scheduling frame in %"PRIu32" ms", (frame->publishTime-startTime));
}
}
return CHANNEL_RC_OK;
}
void xf_video_control_init(xfContext *xfc, VideoClientContext *video)
{
xfc->video = video;
video->custom = xfc;
video->PresentationRequest = xf_video_PresentationRequest;
}
void xf_video_data_init(xfContext *xfc, VideoClientContext *video)
{
video->VideoData = xf_video_VideoData;
PubSub_SubscribeTimer(xfc->context.pubSub, (pTimerEventHandler)xf_video_timer);
}
void xf_video_data_uninit(xfVideoContext *xfVideo)
{
PubSub_UnsubscribeTimer(xfVideo->xfc->context.pubSub, (pTimerEventHandler)xf_video_timer);
}

39
client/X11/xf_video.h Normal file
View File

@ -0,0 +1,39 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* Video Optimized Remoting Virtual Channel Extension for X11
*
* Copyright 2017 David Fort <contact@hardening-consulting.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CLIENT_X11_XF_VIDEO_H_
#define CLIENT_X11_XF_VIDEO_H_
#include "xfreerdp.h"
#include <freerdp/channels/geometry.h>
#include <freerdp/channels/video.h>
struct _xfVideoContext;
typedef struct _xfVideoContext xfVideoContext;
void xf_video_geometry_init(xfContext *xfc, GeometryClientContext *geom);
void xf_video_control_init(xfContext *xfc, VideoClientContext *video);
void xf_video_data_init(xfContext *xfc, VideoClientContext *video);
void xf_video_data_uninit(xfVideoContext *xfVideo);
xfVideoContext *xf_video_new();
#endif /* CLIENT_X11_XF_VIDEO_H_ */

View File

@ -82,6 +82,7 @@ typedef struct xf_glyph xfGlyph;
typedef struct xf_clipboard xfClipboard;
typedef struct _xfDispContext xfDispContext;
typedef struct _xfVideoContext xfVideoContext;
/* Value of the first logical button number in X11 which must be */
/* subtracted to go from a button number in X11 to an index into */
@ -215,6 +216,9 @@ struct xf_context
TsmfClientContext* tsmf;
xfClipboard* clipboard;
CliprdrClientContext* cliprdr;
xfVideoContext *xfVideo;
GeometryClientContext *geometry;
VideoClientContext *video;
RdpeiClientContext* rdpei;
EncomspClientContext* encomsp;
xfDispContext* xfDisp;

View File

@ -48,6 +48,7 @@ struct _TSMM_PRESENTATION_REQUEST
BYTE PresentationId;
BYTE Version;
BYTE Command;
BYTE FrameRate;
UINT32 SourceWidth, SourceHeight;
UINT32 ScaledWidth, ScaledHeight;
UINT64 hnsTimestampOffset;
@ -89,12 +90,14 @@ struct _TSMM_VIDEO_DATA
};
typedef struct _TSMM_VIDEO_DATA TSMM_VIDEO_DATA;
/** @brief values for NotificationType in TSMM_CLIENT_NOTIFICATION */
enum
{
TSMM_CLIENT_NOTIFICATION_TYPE_NETWORK_ERROR = 1,
TSMM_CLIENT_NOTIFICATION_TYPE_FRAMERATE_OVERRIDE = 2
};
/** @brief struct used when NotificationType is FRAMERATE_OVERRIDE */
struct _TSMM_CLIENT_NOTIFICATION_FRAMERATE_OVERRIDE
{
UINT32 Flags;
@ -102,6 +105,7 @@ struct _TSMM_CLIENT_NOTIFICATION_FRAMERATE_OVERRIDE
};
typedef struct _TSMM_CLIENT_NOTIFICATION_FRAMERATE_OVERRIDE TSMM_CLIENT_NOTIFICATION_FRAMERATE_OVERRIDE;
/** @brief a client to server notification struct */
struct _TSMM_CLIENT_NOTIFICATION
{
BYTE PresentationId;

View File

@ -0,0 +1,48 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* YUV decoder
*
* Copyright 2017 David Fort <contact@hardening-consulting.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FREERDP_CODEC_YUV_H
#define FREERDP_CODEC_YUV_H
typedef struct _YUV_CONTEXT YUV_CONTEXT;
#include <freerdp/api.h>
#include <freerdp/types.h>
#include <freerdp/freerdp.h>
#include <freerdp/constants.h>
#ifdef __cplusplus
extern "C" {
#endif
FREERDP_API BOOL yuv_context_decode(YUV_CONTEXT* context, const BYTE* pYUVData[3], UINT32 iStride[3],
DWORD DstFormat, BYTE *dest, UINT32 nDstStep);
FREERDP_API void yuv_context_reset(YUV_CONTEXT* context, UINT32 width, UINT32 height);
FREERDP_API YUV_CONTEXT* yuv_context_new(BOOL encoder);
FREERDP_API void yuv_context_free(YUV_CONTEXT* context);
#ifdef __cplusplus
}
#endif
#endif /* FREERDP_CODEC_YUV_H */

View File

@ -130,7 +130,8 @@ set(CODEC_SRCS
codec/zgfx.c
codec/clear.c
codec/jpeg.c
codec/h264.c)
codec/h264.c
codec/yuv.c)
set(CODEC_SSE2_SRCS
codec/rfx_sse2.c

View File

@ -180,7 +180,6 @@ INT32 avc420_decompress(H264_CONTEXT* h264, const BYTE* pSrcData, UINT32 SrcSize
return -1001;
status = h264->subsystem->Decompress(h264, pSrcData, SrcSize);
if (status == 0)
return 1;

182
libfreerdp/codec/yuv.c Normal file
View File

@ -0,0 +1,182 @@
#include <winpr/sysinfo.h>
#include <winpr/pool.h>
#include <freerdp/primitives.h>
#include <freerdp/log.h>
#include <freerdp/codec/yuv.h>
#define TAG FREERDP_TAG("codec")
struct _YUV_CONTEXT
{
UINT32 width, height;
BOOL useThreads;
UINT32 nthreads;
UINT32 heightStep;
PTP_POOL threadPool;
TP_CALLBACK_ENVIRON ThreadPoolEnv;
};
struct _YUV_PROCESS_WORK_PARAM
{
YUV_CONTEXT* context;
const BYTE* pYUVData[3];
UINT32 iStride[3];
DWORD DstFormat;
BYTE *dest;
UINT32 nDstStep;
UINT32 y;
UINT32 height;
};
typedef struct _YUV_PROCESS_WORK_PARAM YUV_PROCESS_WORK_PARAM;
static void CALLBACK yuv_process_work_callback(PTP_CALLBACK_INSTANCE instance, void* context,
PTP_WORK work)
{
prim_size_t roi;
YUV_PROCESS_WORK_PARAM* param = (YUV_PROCESS_WORK_PARAM*)context;
primitives_t* prims = primitives_get();
roi.width = param->context->width;
roi.height = param->height;
if( prims->YUV420ToRGB_8u_P3AC4R(param->pYUVData, param->iStride, param->dest, param->nDstStep,
param->DstFormat, &roi) != PRIMITIVES_SUCCESS)
{
WLog_ERR(TAG, "error when decoding lines");
}
}
void yuv_context_reset(YUV_CONTEXT* context, UINT32 width, UINT32 height)
{
context->width = width;
context->height = height;
context->heightStep = (height / context->nthreads);
}
YUV_CONTEXT* yuv_context_new(BOOL encoder)
{
SYSTEM_INFO sysInfos;
YUV_CONTEXT* ret = calloc(1, sizeof(*ret));
if (!ret)
return NULL;
/** do it here to avoid a race condition between threads */
primitives_get();
GetNativeSystemInfo(&sysInfos);
ret->useThreads = (sysInfos.dwNumberOfProcessors > 1);
if (ret->useThreads)
{
ret->nthreads = sysInfos.dwNumberOfProcessors;
ret->threadPool = CreateThreadpool(NULL);
if (!ret->threadPool)
{
goto error_threadpool;
}
InitializeThreadpoolEnvironment(&ret->ThreadPoolEnv);
SetThreadpoolCallbackPool(&ret->ThreadPoolEnv, ret->threadPool);
}
else
{
ret->nthreads = 1;
}
return ret;
error_threadpool:
free(ret);
return NULL;
}
void yuv_context_free(YUV_CONTEXT* context)
{
if (context->useThreads)
{
CloseThreadpool(context->threadPool);
DestroyThreadpoolEnvironment(&context->ThreadPoolEnv);
}
free(context);
}
BOOL yuv_context_decode(YUV_CONTEXT* context, const BYTE* pYUVData[3], UINT32 iStride[3],
DWORD DstFormat, BYTE *dest, UINT32 nDstStep)
{
UINT32 y, nobjects, i;
PTP_WORK *work_objects = NULL;
YUV_PROCESS_WORK_PARAM *params;
int waitCount = 0;
BOOL ret = TRUE;
if (!context->useThreads)
{
primitives_t* prims = primitives_get();
prim_size_t roi;
roi.width = context->width;
roi.height = context->height;
return prims->YUV420ToRGB_8u_P3AC4R(pYUVData, iStride, dest, nDstStep,
DstFormat, &roi) == PRIMITIVES_SUCCESS;
}
/* case where we use threads */
nobjects = (context->height + context->heightStep - 1) / context->heightStep;
work_objects = (PTP_WORK *)calloc(nobjects, sizeof(PTP_WORK));
if (!work_objects)
{
return FALSE;
}
params = (YUV_PROCESS_WORK_PARAM *)calloc(nobjects, sizeof(*params));
if (!params)
{
free(work_objects);
return FALSE;
}
for (i = 0, y = 0; i < nobjects; i++, y += context->heightStep, waitCount++)
{
params[i].context = context;
params[i].DstFormat = DstFormat;
params[i].pYUVData[0] = pYUVData[0] + (y * iStride[0]);
params[i].pYUVData[1] = pYUVData[1] + ((y / 2) * iStride[1]);
params[i].pYUVData[2] = pYUVData[2] + ((y / 2) * iStride[2]);
params[i].iStride[0] = iStride[0];
params[i].iStride[1] = iStride[1];
params[i].iStride[2] = iStride[2];
params[i].nDstStep = nDstStep;
params[i].dest = dest + (nDstStep * y);
params[i].y = y;
if (y + context->heightStep <= context->height)
params[i].height = context->heightStep;
else
params[i].height = context->height % context->heightStep;
work_objects[i] = CreateThreadpoolWork((PTP_WORK_CALLBACK)yuv_process_work_callback,
(void*) &params[i], &context->ThreadPoolEnv);
if (!work_objects[i])
{
ret = FALSE;
break;
}
SubmitThreadpoolWork(work_objects[i]);
}
for (i = 0; i < waitCount; i++)
{
WaitForThreadpoolWorkCallbacks(work_objects[i], FALSE);
CloseThreadpoolWork(work_objects[i]);
}
free(work_objects);
free(params);
return ret;
}