RDPECAM client: new rdpecam channel

This commit is contained in:
oleg0421 2024-06-09 16:00:51 -07:00 committed by akallabeth
parent 0af0f31e27
commit a81d111ac4
No known key found for this signature in database
GPG Key ID: A49454A3FC909FD5
8 changed files with 2632 additions and 1 deletions

View File

@ -1,6 +1,6 @@
set(OPTION_DEFAULT ON)
set(OPTION_CLIENT_DEFAULT OFF)
set(OPTION_CLIENT_DEFAULT ON)
set(OPTION_SERVER_DEFAULT ON)
define_channel_options(NAME "rdpecam" TYPE "dynamic"

View File

@ -0,0 +1,59 @@
# FreeRDP: A Remote Desktop Protocol Implementation
# FreeRDP cmake build script
#
# Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
option(WITH_MS_RDPECAM_CLIENT "Build with video camera redirect client" OFF)
if(WITH_MS_RDPECAM_CLIENT)
define_channel_client("rdpecam")
# swscale is required and is either part of FFMPEG or standalone library
if(NOT WITH_FFMPEG)
find_package(SWScale REQUIRED)
endif()
if(NOT WITH_OPENH264 AND NOT WITH_FFMPEG)
message(FATAL_ERROR "WITH_OPENH264 or WITH_FFMPEG required for WITH_MS_RDPECAM_CLIENT")
endif()
# currently camera redirect client supported for platforms with Video4Linux only
find_package(V4L)
if(V4L_FOUND)
set(WITH_V4L ON)
add_definitions("-DWITH_V4L")
else()
message(FATAL_ERROR "libv4l-dev required for WITH_MS_RDPECAM_CLIENT")
endif()
set(${MODULE_PREFIX}_SRCS
camera_device_enum_main.c
camera_device_main.c
encoding.c
)
set(${MODULE_PREFIX}_LIBS
freerdp winpr
)
add_channel_client_library(${MODULE_PREFIX} ${MODULE_NAME} ${CHANNEL_NAME} TRUE "DVCPluginEntry")
if(V4L_FOUND)
add_channel_client_subsystem(${MODULE_PREFIX} ${CHANNEL_NAME} "v4l" "")
endif()
endif()

View File

@ -0,0 +1,188 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, main header file
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef FREERDP_CLIENT_CAMERA_H
#define FREERDP_CLIENT_CAMERA_H
#include <errno.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <libswscale/swscale.h>
#include <libavutil/imgutils.h>
#include <winpr/wlog.h>
#include <freerdp/api.h>
#include <freerdp/types.h>
#include <freerdp/client/channels.h>
#include <freerdp/channels/log.h>
#include <freerdp/channels/rdpecam.h>
#include <freerdp/codecs.h>
#include <freerdp/primitives.h>
#define ECAM_PROTO_VERSION 0x02
/* currently supporting 1 stream per device */
#define ECAM_DEVICE_MAX_STREAMS 1
#define ECAM_MAX_MEDIA_TYPE_DESCRIPTORS 256
/* Allow to send up to that many unsolicited samples.
* For example, to support 30 fps with 250 ms round trip
* ECAM_MAX_SAMPLE_CREDITS has to be at least 8.
*/
#define ECAM_MAX_SAMPLE_CREDITS 8
/* Having this hardcoded allows to preallocate and reuse buffer
* for sample responses. Excessive size is to make sure any sample
* will fit in, even with highest resolution.
*/
#define ECAM_SAMPLE_RESPONSE_BUFFER_SIZE (1024 * 4050)
/* 4 Mbps max encoded bitrate seems to produce reasonably
* good quality with H264_RATECONTROL_VBR.
*/
#define ECAM_H264_ENCODED_BITRATE 4000000
typedef struct s_ICamHal ICamHal;
typedef struct
{
IWTSPlugin iface;
IWTSListener* listener;
GENERIC_LISTENER_CALLBACK* hlistener;
/* HAL interface */
ICamHal* ihal;
char* subsystem;
BOOL initialized;
BOOL attached;
UINT32 version;
wHashTable* devices;
} CameraPlugin;
typedef struct
{
CAM_MEDIA_FORMAT inputFormat; /* camera side */
CAM_MEDIA_FORMAT outputFormat; /* network side */
} CAM_MEDIA_FORMAT_INFO;
typedef struct
{
BOOL streaming;
CAM_MEDIA_FORMAT_INFO formats;
CAM_MEDIA_TYPE_DESCRIPTION currMediaType;
GENERIC_CHANNEL_CALLBACK* hSampleReqChannel;
INT nSampleCredits;
wStream* sampleRespBuffer;
H264_CONTEXT* h264;
struct SwsContext* sws;
int srcLineSizes[4];
} CameraDeviceStream;
static INLINE CAM_MEDIA_FORMAT streamInputFormat(CameraDeviceStream* stream)
{
return stream->formats.inputFormat;
}
static INLINE CAM_MEDIA_FORMAT streamOutputFormat(CameraDeviceStream* stream)
{
return stream->formats.outputFormat;
}
typedef struct
{
IWTSListener* listener;
GENERIC_LISTENER_CALLBACK* hlistener;
CameraPlugin* ecam;
ICamHal* ihal; /* HAL interface, same as used by CameraPlugin */
char deviceId[32];
CameraDeviceStream streams[ECAM_DEVICE_MAX_STREAMS];
} CameraDevice;
/**
* Subsystem (Hardware Abstraction Layer, HAL) Interface
*/
typedef UINT (*ICamHalEnumCallback)(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
const char* deviceId, const char* deviceName);
/* may run in context of different thread */
typedef UINT (*ICamHalSampleCapturedCallback)(CameraDevice* dev, int streamIndex,
const BYTE* sample, size_t size);
struct s_ICamHal
{
UINT(*Enumerate)
(ICamHal* ihal, ICamHalEnumCallback callback, CameraPlugin* ecam,
GENERIC_CHANNEL_CALLBACK* hchannel);
INT16(*GetMediaTypeDescriptions)
(ICamHal* ihal, const char* deviceId, int streamIndex,
const CAM_MEDIA_FORMAT_INFO* supportedFormats, size_t nSupportedFormats,
CAM_MEDIA_TYPE_DESCRIPTION* mediaTypes, size_t* nMediaTypes);
UINT(*StartStream)
(ICamHal* ihal, CameraDevice* dev, int streamIndex, const CAM_MEDIA_TYPE_DESCRIPTION* mediaType,
ICamHalSampleCapturedCallback callback);
UINT (*StopStream)(ICamHal* ihal, const char* deviceId, int streamIndex);
UINT (*Free)(ICamHal* hal);
};
typedef UINT (*PREGISTERCAMERAHAL)(IWTSPlugin* plugin, ICamHal* hal);
typedef struct
{
IWTSPlugin* plugin;
PREGISTERCAMERAHAL pRegisterCameraHal;
CameraPlugin* ecam;
const ADDIN_ARGV* args;
} FREERDP_CAMERA_HAL_ENTRY_POINTS;
typedef FREERDP_CAMERA_HAL_ENTRY_POINTS* PFREERDP_CAMERA_HAL_ENTRY_POINTS;
/* entry point called by addin manager */
typedef UINT (*PFREERDP_CAMERA_HAL_ENTRY)(PFREERDP_CAMERA_HAL_ENTRY_POINTS pEntryPoints);
/* common functions */
UINT ecam_channel_send_generic_msg(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MSG_ID msg);
UINT ecam_channel_send_error_response(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_ERROR_CODE code);
UINT ecam_channel_write(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel, CAM_MSG_ID msg,
wStream* out, BOOL freeStream);
/* ecam device interface */
CameraDevice* ecam_dev_create(CameraPlugin* ecam, const char* deviceId, const char* deviceName);
void ecam_dev_destroy(void* dev);
/* video encoding interface */
BOOL ecam_encoder_context_init(CameraDeviceStream* stream);
BOOL ecam_encoder_context_free(CameraDeviceStream* stream);
BOOL ecam_encoder_compress(CameraDeviceStream* stream, const BYTE* srcData, size_t srcSize,
BYTE** ppDstData, size_t* pDstSize);
#endif /* FREERDP_CLIENT_CAMERA_H */

View File

@ -0,0 +1,551 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, Device Enumeration Channel
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <winpr/assert.h>
#include "camera.h"
#define TAG CHANNELS_TAG("rdpecam-enum.client")
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
UINT ecam_channel_send_error_response(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_ERROR_CODE code)
{
CAM_MSG_ID msg = CAM_MSG_ID_ErrorResponse;
WINPR_ASSERT(ecam);
wStream* s = Stream_New(NULL, CAM_HEADER_SIZE + 4);
if (!s)
{
WLog_ERR(TAG, "Stream_New failed!");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, ecam->version);
Stream_Write_UINT8(s, msg);
Stream_Write_UINT32(s, code);
return ecam_channel_write(ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
UINT ecam_channel_send_generic_msg(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MSG_ID msg)
{
WINPR_ASSERT(ecam);
wStream* s = Stream_New(NULL, CAM_HEADER_SIZE);
if (!s)
{
WLog_ERR(TAG, "Stream_New failed!");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, ecam->version);
Stream_Write_UINT8(s, msg);
return ecam_channel_write(ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
UINT ecam_channel_write(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel, CAM_MSG_ID msg,
wStream* out, BOOL freeStream)
{
if (!hchannel || !out)
return ERROR_INVALID_PARAMETER;
Stream_SealLength(out);
WINPR_ASSERT(Stream_Length(out) <= ULONG_MAX);
WLog_DBG(TAG, "ChannelId=%d, MessageId=0x%02" PRIx8 ", Length=%d",
hchannel->channel_mgr->GetChannelId(hchannel->channel), msg, Stream_Length(out));
const UINT error = hchannel->channel->Write(hchannel->channel, (ULONG)Stream_Length(out),
Stream_Buffer(out), NULL);
if (freeStream)
Stream_Free(out, TRUE);
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_send_device_added_notification(CameraPlugin* ecam,
GENERIC_CHANNEL_CALLBACK* hchannel,
const char* deviceName, const char* channelName)
{
CAM_MSG_ID msg = CAM_MSG_ID_DeviceAddedNotification;
WINPR_ASSERT(ecam);
wStream* s = Stream_New(NULL, 256);
if (!s)
{
WLog_ERR(TAG, "Stream_New failed!");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, ecam->version);
Stream_Write_UINT8(s, msg);
size_t devNameLen = strlen(deviceName);
if (Stream_Write_UTF16_String_From_UTF8(s, devNameLen + 1, deviceName, devNameLen, TRUE) < 0)
{
Stream_Free(s, TRUE);
return ERROR_INTERNAL_ERROR;
}
Stream_Write(s, channelName, strlen(channelName) + 1);
return ecam_channel_write(ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_ihal_device_added_callback(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel,
const char* deviceId, const char* deviceName)
{
WLog_DBG(TAG, "deviceId=%s, deviceName=%s", deviceId, deviceName);
if (!HashTable_ContainsKey(ecam->devices, deviceId))
{
CameraDevice* dev = ecam_dev_create(ecam, deviceId, deviceName);
HashTable_Insert(ecam->devices, deviceId, dev);
}
else
{
WLog_DBG(TAG, "Device %s already exists", deviceId);
}
ecam_send_device_added_notification(ecam, hchannel, deviceName, deviceId /*channelName*/);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_enumerate_devices(CameraPlugin* ecam, GENERIC_CHANNEL_CALLBACK* hchannel)
{
ecam->ihal->Enumerate(ecam->ihal, ecam_ihal_device_added_callback, ecam, hchannel);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_process_select_version_response(CameraPlugin* ecam,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s,
BYTE serverVersion)
{
const BYTE clientVersion = ECAM_PROTO_VERSION;
/* check remaining s capacity */
WLog_DBG(TAG, "ServerVersion=%" PRIu8 ", ClientVersion=%" PRIu8, serverVersion, clientVersion);
if (serverVersion > clientVersion)
{
WLog_ERR(TAG,
"Incompatible protocol version server=%" PRIu8 ", client supports version=%" PRIu8,
serverVersion, clientVersion);
return CHANNEL_RC_OK;
}
ecam->version = serverVersion;
if (ecam->ihal)
ecam_enumerate_devices(ecam, hchannel);
else
WLog_ERR(TAG, "No HAL registered");
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_on_data_received(IWTSVirtualChannelCallback* pChannelCallback, wStream* data)
{
UINT error = CHANNEL_RC_OK;
BYTE version = 0;
BYTE messageId = 0;
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
if (!hchannel || !data)
return ERROR_INVALID_PARAMETER;
CameraPlugin* ecam = (CameraPlugin*)hchannel->plugin;
if (!ecam)
return ERROR_INTERNAL_ERROR;
if (!Stream_CheckAndLogRequiredCapacity(TAG, data, CAM_HEADER_SIZE))
return ERROR_NO_DATA;
Stream_Read_UINT8(data, version);
Stream_Read_UINT8(data, messageId);
WLog_DBG(TAG, "ChannelId=%d, MessageId=0x%02" PRIx8 ", Version=%d",
hchannel->channel_mgr->GetChannelId(hchannel->channel), messageId, version);
switch (messageId)
{
case CAM_MSG_ID_SelectVersionResponse:
error = ecam_process_select_version_response(ecam, hchannel, data, version);
break;
default:
WLog_WARN(TAG, "unknown MessageId=0x%02" PRIx8 "", messageId);
error = ERROR_INVALID_DATA;
ecam_channel_send_error_response(ecam, hchannel, CAM_ERROR_CODE_OperationNotSupported);
break;
}
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_on_open(IWTSVirtualChannelCallback* pChannelCallback)
{
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
WINPR_ASSERT(hchannel);
CameraPlugin* ecam = (CameraPlugin*)hchannel->plugin;
WINPR_ASSERT(ecam);
WLog_DBG(TAG, "entered");
return ecam_channel_send_generic_msg(ecam, hchannel, CAM_MSG_ID_SelectVersionRequest);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_on_close(IWTSVirtualChannelCallback* pChannelCallback)
{
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
WINPR_ASSERT(hchannel);
WLog_DBG(TAG, "entered");
free(hchannel);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_on_new_channel_connection(IWTSListenerCallback* pListenerCallback,
IWTSVirtualChannel* pChannel, BYTE* Data, BOOL* pbAccept,
IWTSVirtualChannelCallback** ppCallback)
{
GENERIC_LISTENER_CALLBACK* hlistener = (GENERIC_LISTENER_CALLBACK*)pListenerCallback;
if (!hlistener || !hlistener->plugin)
return ERROR_INTERNAL_ERROR;
WLog_DBG(TAG, "entered");
GENERIC_CHANNEL_CALLBACK* hchannel =
(GENERIC_CHANNEL_CALLBACK*)calloc(1, sizeof(GENERIC_CHANNEL_CALLBACK));
if (!hchannel)
{
WLog_ERR(TAG, "calloc failed!");
return CHANNEL_RC_NO_MEMORY;
}
hchannel->iface.OnDataReceived = ecam_on_data_received;
hchannel->iface.OnOpen = ecam_on_open;
hchannel->iface.OnClose = ecam_on_close;
hchannel->plugin = hlistener->plugin;
hchannel->channel_mgr = hlistener->channel_mgr;
hchannel->channel = pChannel;
*ppCallback = (IWTSVirtualChannelCallback*)hchannel;
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_plugin_initialize(IWTSPlugin* pPlugin, IWTSVirtualChannelManager* pChannelMgr)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
WLog_DBG(TAG, "entered");
if (!ecam || !pChannelMgr)
return ERROR_INVALID_PARAMETER;
if (ecam->initialized)
{
WLog_ERR(TAG, "[%s] plugin initialized twice, aborting", RDPECAM_CONTROL_DVC_CHANNEL_NAME);
return ERROR_INVALID_DATA;
}
ecam->version = ECAM_PROTO_VERSION;
ecam->devices = HashTable_New(FALSE);
if (!ecam->devices)
{
WLog_ERR(TAG, "HashTable_New failed!");
return CHANNEL_RC_NO_MEMORY;
}
HashTable_SetupForStringData(ecam->devices, FALSE);
wObject* obj = HashTable_ValueObject(ecam->devices);
WINPR_ASSERT(obj);
obj->fnObjectFree = ecam_dev_destroy;
ecam->hlistener = (GENERIC_LISTENER_CALLBACK*)calloc(1, sizeof(GENERIC_LISTENER_CALLBACK));
if (!ecam->hlistener)
{
WLog_ERR(TAG, "calloc failed!");
return CHANNEL_RC_NO_MEMORY;
}
ecam->hlistener->iface.OnNewChannelConnection = ecam_on_new_channel_connection;
ecam->hlistener->plugin = pPlugin;
ecam->hlistener->channel_mgr = pChannelMgr;
const UINT rc = pChannelMgr->CreateListener(pChannelMgr, RDPECAM_CONTROL_DVC_CHANNEL_NAME, 0,
&ecam->hlistener->iface, &ecam->listener);
ecam->initialized = (rc == CHANNEL_RC_OK);
return rc;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_plugin_terminated(IWTSPlugin* pPlugin)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
if (!ecam)
return ERROR_INVALID_DATA;
WLog_DBG(TAG, "entered");
if (ecam->hlistener)
{
IWTSVirtualChannelManager* mgr = ecam->hlistener->channel_mgr;
if (mgr)
IFCALL(mgr->DestroyListener, mgr, ecam->listener);
}
free(ecam->hlistener);
HashTable_Free(ecam->devices);
if (ecam->ihal)
ecam->ihal->Free(ecam->ihal);
free(ecam);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_plugin_attached(IWTSPlugin* pPlugin)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
UINT error = CHANNEL_RC_OK;
if (!ecam)
return ERROR_INVALID_PARAMETER;
ecam->attached = TRUE;
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_plugin_detached(IWTSPlugin* pPlugin)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
UINT error = CHANNEL_RC_OK;
if (!ecam)
return ERROR_INVALID_PARAMETER;
ecam->attached = FALSE;
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_register_hal_plugin(IWTSPlugin* pPlugin, ICamHal* ihal)
{
CameraPlugin* ecam = (CameraPlugin*)pPlugin;
WINPR_ASSERT(ecam);
if (ecam->ihal)
{
WLog_DBG(TAG, "already registered");
return ERROR_ALREADY_EXISTS;
}
WLog_DBG(TAG, "HAL registered");
ecam->ihal = ihal;
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_load_hal_plugin(CameraPlugin* ecam, const char* name, const ADDIN_ARGV* args)
{
WINPR_ASSERT(ecam);
FREERDP_CAMERA_HAL_ENTRY_POINTS entryPoints = { 0 };
UINT error = ERROR_INTERNAL_ERROR;
const PFREERDP_CAMERA_HAL_ENTRY entry =
(const PFREERDP_CAMERA_HAL_ENTRY)freerdp_load_channel_addin_entry(RDPECAM_CHANNEL_NAME,
name, NULL, 0);
if (entry == NULL)
{
WLog_ERR(TAG,
"freerdp_load_channel_addin_entry did not return any function pointers for %s ",
name);
return ERROR_INVALID_FUNCTION;
}
entryPoints.plugin = &ecam->iface;
entryPoints.pRegisterCameraHal = ecam_register_hal_plugin;
entryPoints.args = args;
entryPoints.ecam = ecam;
if ((error = entry(&entryPoints)))
{
WLog_ERR(TAG, "%s entry returned error %" PRIu32 ".", name, error);
return error;
}
WLog_INFO(TAG, "Loaded %s HAL for ecam", name);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
FREERDP_ENTRY_POINT(UINT rdpecam_DVCPluginEntry(IDRDYNVC_ENTRY_POINTS* pEntryPoints))
{
UINT error = CHANNEL_RC_INITIALIZATION_ERROR;
WINPR_ASSERT(pEntryPoints);
WINPR_ASSERT(pEntryPoints->GetPlugin);
CameraPlugin* ecam = (CameraPlugin*)pEntryPoints->GetPlugin(pEntryPoints, RDPECAM_CHANNEL_NAME);
if (ecam != NULL)
return CHANNEL_RC_ALREADY_INITIALIZED;
ecam = (CameraPlugin*)calloc(1, sizeof(CameraPlugin));
if (!ecam)
{
WLog_ERR(TAG, "calloc failed!");
return CHANNEL_RC_NO_MEMORY;
}
ecam->attached = TRUE;
ecam->iface.Initialize = ecam_plugin_initialize;
ecam->iface.Connected = NULL; /* server connects to client */
ecam->iface.Disconnected = NULL;
ecam->iface.Terminated = ecam_plugin_terminated;
ecam->iface.Attached = ecam_plugin_attached;
ecam->iface.Detached = ecam_plugin_detached;
/* TODO: camera redirect only supported for platforms with Video4Linux */
#if defined(WITH_V4L)
ecam->subsystem = "v4l";
#else
ecam->subsystem = NULL;
#endif
if (ecam->subsystem)
{
if ((error = ecam_load_hal_plugin(ecam, ecam->subsystem, NULL /*args*/)))
{
WLog_ERR(TAG,
"Unable to load camera redirection subsystem %s because of error %" PRIu32 "",
ecam->subsystem, error);
goto out;
}
}
error = pEntryPoints->RegisterPlugin(pEntryPoints, RDPECAM_CHANNEL_NAME, &ecam->iface);
if (error == CHANNEL_RC_OK)
return error;
out:
ecam_plugin_terminated(&ecam->iface);
return error;
}

View File

@ -0,0 +1,821 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, Device Channels
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <winpr/assert.h>
#include "camera.h"
#define TAG CHANNELS_TAG("rdpecam-device.client")
/* supported formats in preference order:
* passthrough, I420 (used as input for H264 encoder), other YUV based, RGB based
*/
static const CAM_MEDIA_FORMAT_INFO supportedFormats[] = {
/* inputFormat, outputFormat */
{ CAM_MEDIA_FORMAT_H264, CAM_MEDIA_FORMAT_H264 }, /* passthrough: comment out to disable */
{ CAM_MEDIA_FORMAT_I420, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_YUY2, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_NV12, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_RGB24, CAM_MEDIA_FORMAT_H264 },
{ CAM_MEDIA_FORMAT_RGB32, CAM_MEDIA_FORMAT_H264 },
};
static const size_t nSupportedFormats = ARRAYSIZE(supportedFormats);
/**
* Function description
*
* @return void
*/
static void ecam_dev_write_media_type(wStream* s, CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
WINPR_ASSERT(mediaType);
Stream_Write_UINT8(s, mediaType->Format);
Stream_Write_UINT32(s, mediaType->Width);
Stream_Write_UINT32(s, mediaType->Height);
Stream_Write_UINT32(s, mediaType->FrameRateNumerator);
Stream_Write_UINT32(s, mediaType->FrameRateDenominator);
Stream_Write_UINT32(s, mediaType->PixelAspectRatioNumerator);
Stream_Write_UINT32(s, mediaType->PixelAspectRatioDenominator);
Stream_Write_UINT8(s, mediaType->Flags);
}
/**
* Function description
*
* @return TRUE if success
*/
static BOOL ecam_dev_read_media_type(wStream* s, CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
WINPR_ASSERT(mediaType);
Stream_Read_UINT8(s, mediaType->Format);
Stream_Read_UINT32(s, mediaType->Width);
Stream_Read_UINT32(s, mediaType->Height);
Stream_Read_UINT32(s, mediaType->FrameRateNumerator);
Stream_Read_UINT32(s, mediaType->FrameRateDenominator);
Stream_Read_UINT32(s, mediaType->PixelAspectRatioNumerator);
Stream_Read_UINT32(s, mediaType->PixelAspectRatioDenominator);
Stream_Read_UINT8(s, mediaType->Flags);
return TRUE;
}
/**
* Function description
*
* @return void
*/
static void ecam_dev_print_media_type(CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
WINPR_ASSERT(mediaType);
WLog_DBG(TAG, "Format: %d, width: %d, height: %d, fps: %d, flags: %d", mediaType->Format,
mediaType->Width, mediaType->Height, mediaType->FrameRateNumerator, mediaType->Flags);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_send_sample_response(CameraDevice* dev, size_t streamIndex, const BYTE* sample,
size_t size)
{
WINPR_ASSERT(dev);
CameraDeviceStream* stream = &dev->streams[streamIndex];
CAM_MSG_ID msg = CAM_MSG_ID_SampleResponse;
Stream_SetPosition(stream->sampleRespBuffer, 0);
Stream_Write_UINT8(stream->sampleRespBuffer, dev->ecam->version);
Stream_Write_UINT8(stream->sampleRespBuffer, msg);
Stream_Write_UINT8(stream->sampleRespBuffer, streamIndex);
Stream_Write(stream->sampleRespBuffer, sample, size);
/* channel write is protected by critical section in dvcman_write_channel */
return ecam_channel_write(dev->ecam, stream->hSampleReqChannel, msg, stream->sampleRespBuffer,
FALSE /* don't free stream */);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_sample_captured_callback(CameraDevice* dev, int streamIndex,
const BYTE* sample, size_t size)
{
BYTE* encodedSample = NULL;
size_t encodedSize = 0;
WINPR_ASSERT(dev);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
return ERROR_INVALID_INDEX;
CameraDeviceStream* stream = &dev->streams[streamIndex];
if (!stream->streaming)
return CHANNEL_RC_OK;
if (streamInputFormat(stream) != streamOutputFormat(stream))
{
if (!ecam_encoder_compress(stream, sample, size, &encodedSample, &encodedSize))
{
WLog_DBG(TAG, "Frame drop or error in ecam_encoder_compress");
return CHANNEL_RC_OK;
}
if (!stream->streaming)
return CHANNEL_RC_OK;
}
else /* passthrough */
{
encodedSample = (BYTE*)sample;
encodedSize = size;
}
if (stream->nSampleCredits == 0)
{
WLog_DBG(TAG, "Skip sample: no credits left");
return CHANNEL_RC_OK;
}
stream->nSampleCredits--;
return ecam_dev_send_sample_response(dev, streamIndex, encodedSample, encodedSize);
}
/**
* Function description
*
* @return void
*/
static void ecam_dev_stop_stream(CameraDevice* dev, size_t streamIndex)
{
WINPR_ASSERT(dev);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
return;
CameraDeviceStream* stream = &dev->streams[streamIndex];
if (stream->streaming)
{
stream->streaming = FALSE;
dev->ihal->StopStream(dev->ihal, dev->deviceId, 0);
}
if (stream->sampleRespBuffer)
{
Stream_Free(stream->sampleRespBuffer, TRUE);
stream->sampleRespBuffer = NULL;
}
ecam_encoder_context_free(stream);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_stop_streams_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
WINPR_ASSERT(dev);
WINPR_UNUSED(s);
for (size_t i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
ecam_dev_stop_stream(dev, i);
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_SuccessResponse);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_start_streams_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
BYTE streamIndex = 0;
CAM_MEDIA_TYPE_DESCRIPTION mediaType = { 0 };
WINPR_ASSERT(dev);
if (!Stream_CheckAndLogRequiredLength(TAG, s, 1 + 26))
return ERROR_INVALID_DATA;
Stream_Read_UINT8(s, streamIndex);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
{
WLog_ERR(TAG, "Incorrect streamIndex %" PRIuz, streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidStreamNumber);
return ERROR_INVALID_INDEX;
}
if (!ecam_dev_read_media_type(s, &mediaType))
{
WLog_ERR(TAG, "Unable to read MEDIA_TYPE_DESCRIPTION");
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidMessage);
return ERROR_INVALID_DATA;
}
ecam_dev_print_media_type(&mediaType);
CameraDeviceStream* stream = &dev->streams[streamIndex];
if (stream->streaming)
{
WLog_ERR(TAG, "Streaming already in progress, device %s, streamIndex %d", dev->deviceId,
streamIndex);
return CAM_ERROR_CODE_UnexpectedError;
}
/* saving media type description for CurrentMediaTypeRequest,
* to be done before calling ecam_encoder_context_init
*/
stream->currMediaType = mediaType;
/* initialize encoder, if input and output formats differ */
if (streamInputFormat(stream) != streamOutputFormat(stream) &&
!ecam_encoder_context_init(stream))
{
WLog_ERR(TAG, "stream_ecam_encoder_init failed");
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_UnexpectedError);
return ERROR_INVALID_DATA;
}
stream->sampleRespBuffer = Stream_New(NULL, ECAM_SAMPLE_RESPONSE_BUFFER_SIZE);
if (!stream->sampleRespBuffer)
{
WLog_ERR(TAG, "Stream_New failed");
ecam_dev_stop_stream(dev, streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_OutOfMemory);
return ERROR_INVALID_DATA;
}
/* replacing outputFormat with inputFormat in mediaType before starting stream */
mediaType.Format = streamInputFormat(stream);
stream->nSampleCredits = 0;
UINT error = dev->ihal->StartStream(dev->ihal, dev, streamIndex, &mediaType,
ecam_dev_sample_captured_callback);
if (error)
{
WLog_ERR(TAG, "StartStream failure");
ecam_dev_stop_stream(dev, streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, error);
return ERROR_INVALID_DATA;
}
stream->streaming = TRUE;
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_SuccessResponse);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_property_list_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
WINPR_ASSERT(dev);
// TODO: supported properties implementation
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_PropertyListResponse);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_send_current_media_type_response(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MEDIA_TYPE_DESCRIPTION* mediaType)
{
CAM_MSG_ID msg = CAM_MSG_ID_CurrentMediaTypeResponse;
WINPR_ASSERT(dev);
wStream* s = Stream_New(NULL, CAM_HEADER_SIZE + sizeof(CAM_MEDIA_TYPE_DESCRIPTION));
if (!s)
{
WLog_ERR(TAG, "Stream_New failed");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, dev->ecam->version);
Stream_Write_UINT8(s, msg);
ecam_dev_write_media_type(s, mediaType);
return ecam_channel_write(dev->ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_sample_request(CameraDevice* dev, GENERIC_CHANNEL_CALLBACK* hchannel,
wStream* s)
{
BYTE streamIndex = 0;
WINPR_ASSERT(dev);
if (!Stream_CheckAndLogRequiredLength(TAG, s, 1))
return ERROR_INVALID_DATA;
Stream_Read_UINT8(s, streamIndex);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
{
WLog_ERR(TAG, "Incorrect streamIndex %d", streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidStreamNumber);
return ERROR_INVALID_INDEX;
}
CameraDeviceStream* stream = &dev->streams[streamIndex];
/* need to save channel because responses are asynchronous and coming from capture thread */
if (stream->hSampleReqChannel != hchannel)
stream->hSampleReqChannel = hchannel;
/* allow to send that many unsolicited samples */
stream->nSampleCredits = ECAM_MAX_SAMPLE_CREDITS;
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_current_media_type_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
wStream* s)
{
BYTE streamIndex = 0;
WINPR_ASSERT(dev);
if (!Stream_CheckAndLogRequiredLength(TAG, s, 1))
return ERROR_INVALID_DATA;
Stream_Read_UINT8(s, streamIndex);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
{
WLog_ERR(TAG, "Incorrect streamIndex %d", streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidStreamNumber);
return ERROR_INVALID_INDEX;
}
CameraDeviceStream* stream = &dev->streams[streamIndex];
if (stream->currMediaType.Format == 0)
{
WLog_ERR(TAG, "Current media type unknown for streamIndex %d", streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_NotInitialized);
return ERROR_DEVICE_REINITIALIZATION_NEEDED;
}
return ecam_dev_send_current_media_type_response(dev, hchannel, &stream->currMediaType);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_send_media_type_list_response(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
CAM_MEDIA_TYPE_DESCRIPTION* mediaTypes,
size_t nMediaTypes)
{
CAM_MSG_ID msg = CAM_MSG_ID_MediaTypeListResponse;
WINPR_ASSERT(dev);
wStream* s = Stream_New(NULL, CAM_HEADER_SIZE + ECAM_MAX_MEDIA_TYPE_DESCRIPTORS *
sizeof(CAM_MEDIA_TYPE_DESCRIPTION));
if (!s)
{
WLog_ERR(TAG, "Stream_New failed");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, dev->ecam->version);
Stream_Write_UINT8(s, msg);
for (size_t i = 0; i < nMediaTypes; i++, mediaTypes++)
{
ecam_dev_write_media_type(s, mediaTypes);
}
return ecam_channel_write(dev->ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_media_type_list_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
UINT error = CHANNEL_RC_OK;
BYTE streamIndex = 0;
CAM_MEDIA_TYPE_DESCRIPTION* mediaTypes = NULL;
size_t nMediaTypes = ECAM_MAX_MEDIA_TYPE_DESCRIPTORS;
WINPR_ASSERT(dev);
if (!Stream_CheckAndLogRequiredLength(TAG, s, 1))
return ERROR_INVALID_DATA;
Stream_Read_UINT8(s, streamIndex);
if (streamIndex >= ECAM_DEVICE_MAX_STREAMS)
{
WLog_ERR(TAG, "Incorrect streamIndex %d", streamIndex);
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_InvalidStreamNumber);
return ERROR_INVALID_INDEX;
}
CameraDeviceStream* stream = &dev->streams[streamIndex];
mediaTypes =
(CAM_MEDIA_TYPE_DESCRIPTION*)calloc(nMediaTypes, sizeof(CAM_MEDIA_TYPE_DESCRIPTION));
if (!mediaTypes)
{
WLog_ERR(TAG, "calloc failed");
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_OutOfMemory);
return CHANNEL_RC_NO_MEMORY;
}
INT16 formatIndex =
dev->ihal->GetMediaTypeDescriptions(dev->ihal, dev->deviceId, streamIndex, supportedFormats,
nSupportedFormats, mediaTypes, &nMediaTypes);
if (formatIndex == -1 || nMediaTypes == 0)
{
WLog_ERR(TAG, "Camera doesn't support any compatible video formats");
ecam_channel_send_error_response(dev->ecam, hchannel, CAM_ERROR_CODE_ItemNotFound);
error = ERROR_DEVICE_FEATURE_NOT_SUPPORTED;
goto error;
}
stream->formats = supportedFormats[formatIndex];
/* replacing inputFormat with outputFormat in mediaTypes before sending response */
for (int i = 0; i < nMediaTypes; i++)
{
mediaTypes[i].Format = streamOutputFormat(stream);
mediaTypes[i].Flags = CAM_MEDIA_TYPE_DESCRIPTION_FLAG_DecodingRequired;
}
if (stream->currMediaType.Format == 0)
{
/* saving 1st media type description for CurrentMediaTypeRequest */
stream->currMediaType = mediaTypes[0];
}
error = ecam_dev_send_media_type_list_response(dev, hchannel, mediaTypes, nMediaTypes);
error:
free(mediaTypes);
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_send_stream_list_response(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel)
{
CAM_MSG_ID msg = CAM_MSG_ID_StreamListResponse;
WINPR_ASSERT(dev);
wStream* s = Stream_New(NULL, CAM_HEADER_SIZE + sizeof(CAM_STREAM_DESCRIPTION));
if (!s)
{
WLog_ERR(TAG, "Stream_New failed");
return ERROR_NOT_ENOUGH_MEMORY;
}
Stream_Write_UINT8(s, dev->ecam->version);
Stream_Write_UINT8(s, msg);
/* single stream description */
Stream_Write_UINT16(s, CAM_STREAM_FRAME_SOURCE_TYPE_Color);
Stream_Write_UINT8(s, CAM_STREAM_CATEGORY_Capture);
Stream_Write_UINT8(s, TRUE /* Selected */);
Stream_Write_UINT8(s, FALSE /* CanBeShared */);
return ecam_channel_write(dev->ecam, hchannel, msg, s, TRUE);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_stream_list_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
return ecam_dev_send_stream_list_response(dev, hchannel);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_activate_device_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel, wStream* s)
{
WINPR_ASSERT(dev);
/* TODO: TBD if this is required */
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_SuccessResponse);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_process_deactivate_device_request(CameraDevice* dev,
GENERIC_CHANNEL_CALLBACK* hchannel,
wStream* s)
{
WINPR_ASSERT(dev);
WINPR_UNUSED(s);
for (size_t i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
ecam_dev_stop_stream(dev, i);
return ecam_channel_send_generic_msg(dev->ecam, hchannel, CAM_MSG_ID_SuccessResponse);
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_on_data_received(IWTSVirtualChannelCallback* pChannelCallback, wStream* data)
{
UINT error = CHANNEL_RC_OK;
BYTE version = 0;
BYTE messageId = 0;
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
if (!hchannel || !data)
return ERROR_INVALID_PARAMETER;
CameraDevice* dev = (CameraDevice*)hchannel->plugin;
if (!dev)
return ERROR_INTERNAL_ERROR;
if (!Stream_CheckAndLogRequiredCapacity(TAG, data, CAM_HEADER_SIZE))
return ERROR_NO_DATA;
Stream_Read_UINT8(data, version);
Stream_Read_UINT8(data, messageId);
WLog_DBG(TAG, "ChannelId=%d, MessageId=0x%02" PRIx8 ", Version=%d",
hchannel->channel_mgr->GetChannelId(hchannel->channel), messageId, version);
switch (messageId)
{
case CAM_MSG_ID_ActivateDeviceRequest:
error = ecam_dev_process_activate_device_request(dev, hchannel, data);
break;
case CAM_MSG_ID_DeactivateDeviceRequest:
error = ecam_dev_process_deactivate_device_request(dev, hchannel, data);
break;
case CAM_MSG_ID_StreamListRequest:
error = ecam_dev_process_stream_list_request(dev, hchannel, data);
break;
case CAM_MSG_ID_MediaTypeListRequest:
error = ecam_dev_process_media_type_list_request(dev, hchannel, data);
break;
case CAM_MSG_ID_CurrentMediaTypeRequest:
error = ecam_dev_process_current_media_type_request(dev, hchannel, data);
break;
case CAM_MSG_ID_PropertyListRequest:
error = ecam_dev_process_property_list_request(dev, hchannel, data);
break;
case CAM_MSG_ID_StartStreamsRequest:
error = ecam_dev_process_start_streams_request(dev, hchannel, data);
break;
case CAM_MSG_ID_StopStreamsRequest:
error = ecam_dev_process_stop_streams_request(dev, hchannel, data);
break;
case CAM_MSG_ID_SampleRequest:
error = ecam_dev_process_sample_request(dev, hchannel, data);
break;
default:
WLog_WARN(TAG, "unknown MessageId=0x%02" PRIx8 "", messageId);
error = ERROR_INVALID_DATA;
ecam_channel_send_error_response(dev->ecam, hchannel,
CAM_ERROR_CODE_OperationNotSupported);
break;
}
return error;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_on_open(IWTSVirtualChannelCallback* pChannelCallback)
{
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
WINPR_ASSERT(hchannel);
CameraDevice* dev = (CameraDevice*)hchannel->plugin;
WINPR_ASSERT(dev);
WLog_DBG(TAG, "entered");
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_on_close(IWTSVirtualChannelCallback* pChannelCallback)
{
GENERIC_CHANNEL_CALLBACK* hchannel = (GENERIC_CHANNEL_CALLBACK*)pChannelCallback;
WINPR_ASSERT(hchannel);
CameraDevice* dev = (CameraDevice*)hchannel->plugin;
WINPR_ASSERT(dev);
WLog_DBG(TAG, "entered");
/* make sure this channel is not used for sample responses */
for (size_t i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
if (dev->streams[i].hSampleReqChannel == hchannel)
dev->streams[i].hSampleReqChannel = NULL;
free(hchannel);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT ecam_dev_on_new_channel_connection(IWTSListenerCallback* pListenerCallback,
IWTSVirtualChannel* pChannel, BYTE* Data,
BOOL* pbAccept,
IWTSVirtualChannelCallback** ppCallback)
{
GENERIC_LISTENER_CALLBACK* hlistener = (GENERIC_LISTENER_CALLBACK*)pListenerCallback;
if (!hlistener || !hlistener->plugin)
return ERROR_INTERNAL_ERROR;
WLog_DBG(TAG, "entered");
GENERIC_CHANNEL_CALLBACK* hchannel =
(GENERIC_CHANNEL_CALLBACK*)calloc(1, sizeof(GENERIC_CHANNEL_CALLBACK));
if (!hchannel)
{
WLog_ERR(TAG, "calloc failed");
return CHANNEL_RC_NO_MEMORY;
}
hchannel->iface.OnDataReceived = ecam_dev_on_data_received;
hchannel->iface.OnOpen = ecam_dev_on_open;
hchannel->iface.OnClose = ecam_dev_on_close;
hchannel->plugin = hlistener->plugin;
hchannel->channel_mgr = hlistener->channel_mgr;
hchannel->channel = pChannel;
*ppCallback = (IWTSVirtualChannelCallback*)hchannel;
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return CameraDevice pointer or NULL in case of error
*/
CameraDevice* ecam_dev_create(CameraPlugin* ecam, const char* deviceId, const char* deviceName)
{
WINPR_ASSERT(ecam);
WINPR_ASSERT(ecam->hlistener);
IWTSVirtualChannelManager* pChannelMgr = ecam->hlistener->channel_mgr;
WINPR_ASSERT(pChannelMgr);
WLog_DBG(TAG, "entered for %s", deviceId);
CameraDevice* dev = (CameraDevice*)calloc(1, sizeof(CameraDevice));
if (!dev)
{
WLog_ERR(TAG, "calloc failed");
return NULL;
}
dev->ecam = ecam;
dev->ihal = ecam->ihal;
strncpy(dev->deviceId, deviceId, sizeof(dev->deviceId) - 1);
dev->hlistener = (GENERIC_LISTENER_CALLBACK*)calloc(1, sizeof(GENERIC_LISTENER_CALLBACK));
if (!dev->hlistener)
{
free(dev);
WLog_ERR(TAG, "calloc failed");
return NULL;
}
dev->hlistener->iface.OnNewChannelConnection = ecam_dev_on_new_channel_connection;
dev->hlistener->plugin = (IWTSPlugin*)dev;
dev->hlistener->channel_mgr = pChannelMgr;
if (CHANNEL_RC_OK != pChannelMgr->CreateListener(pChannelMgr, deviceId, 0,
&dev->hlistener->iface, &dev->listener))
{
free(dev->hlistener);
free(dev);
WLog_ERR(TAG, "CreateListener failed");
return NULL;
}
return dev;
}
/**
* Function description
*
* OBJECT_FREE_FN for devices hash table value
*
* @return void
*/
void ecam_dev_destroy(void* obj)
{
CameraDevice* dev = (CameraDevice*)obj;
if (!dev)
return;
WLog_DBG(TAG, "entered for %s", dev->deviceId);
if (dev->hlistener)
{
IWTSVirtualChannelManager* mgr = dev->hlistener->channel_mgr;
if (mgr)
IFCALL(mgr->DestroyListener, mgr, dev->listener);
}
free(dev->hlistener);
for (int i = 0; i < ECAM_DEVICE_MAX_STREAMS; i++)
ecam_dev_stop_stream(dev, i);
free(dev);
return;
}

View File

@ -0,0 +1,242 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, Video Encoding
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <winpr/assert.h>
#include "camera.h"
#define TAG CHANNELS_TAG("rdpecam-video.client")
/**
* Function description
*
* @return enum AVPixelFormat value
*/
static enum AVPixelFormat ecamToAVPixFormat(CAM_MEDIA_FORMAT ecamFormat)
{
switch (ecamFormat)
{
case CAM_MEDIA_FORMAT_YUY2:
return AV_PIX_FMT_YUYV422;
case CAM_MEDIA_FORMAT_NV12:
return AV_PIX_FMT_NV12;
case CAM_MEDIA_FORMAT_I420:
return AV_PIX_FMT_YUV420P;
case CAM_MEDIA_FORMAT_RGB24:
return AV_PIX_FMT_RGB24;
case CAM_MEDIA_FORMAT_RGB32:
return AV_PIX_FMT_RGB32;
default:
WLog_ERR(TAG, "Unsupported ecamFormat %d", ecamFormat);
return AV_PIX_FMT_NONE;
}
}
/**
* Function description
*
* @return success/failure
*/
static BOOL ecam_encoder_compress_h264(CameraDeviceStream* stream, const BYTE* srcData,
size_t srcSize, BYTE** ppDstData, size_t* pDstSize)
{
UINT32 dstSize = 0;
BYTE* srcSlice[4] = { 0 };
BYTE* yuv420pData[3] = { 0 };
UINT32 yuv420pStride[3] = { 0 };
prim_size_t size = { 0 };
size.width = stream->currMediaType.Width;
size.height = stream->currMediaType.Height;
CAM_MEDIA_FORMAT inputFormat = streamInputFormat(stream);
enum AVPixelFormat pixFormat = ecamToAVPixFormat(inputFormat);
/* get buffers for YUV420P */
if (h264_get_yuv_buffer(stream->h264, stream->srcLineSizes[0], size.width, size.height,
yuv420pData, yuv420pStride) < 0)
return FALSE;
/* convert from source format to YUV420P */
if (av_image_fill_pointers(srcSlice, pixFormat, (int)size.height, (BYTE*)srcData,
stream->srcLineSizes) < 0)
return FALSE;
const BYTE* cSrcSlice[4] = { srcSlice[0], srcSlice[1], srcSlice[2], srcSlice[3] };
if (sws_scale(stream->sws, cSrcSlice, stream->srcLineSizes, 0, (int)size.height, yuv420pData,
(int*)yuv420pStride) <= 0)
return FALSE;
/* encode from YUV420P to H264 */
if (h264_compress(stream->h264, ppDstData, &dstSize) < 0)
return FALSE;
*pDstSize = dstSize;
return TRUE;
}
/**
* Function description
*
*/
static void ecam_encoder_context_free_h264(CameraDeviceStream* stream)
{
WINPR_ASSERT(stream);
if (stream->sws)
{
sws_freeContext(stream->sws);
stream->sws = NULL;
}
if (stream->h264)
{
h264_context_free(stream->h264);
stream->h264 = NULL;
}
}
/**
* Function description
*
* @return success/failure
*/
static BOOL ecam_encoder_context_init_h264(CameraDeviceStream* stream)
{
WINPR_ASSERT(stream);
if (!stream->h264)
stream->h264 = h264_context_new(TRUE);
if (!stream->h264)
{
WLog_ERR(TAG, "h264_context_new failed");
return FALSE;
}
if (!h264_context_reset(stream->h264, stream->currMediaType.Width,
stream->currMediaType.Height))
goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_USAGETYPE,
H264_CAMERA_VIDEO_REAL_TIME))
goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_FRAMERATE,
stream->currMediaType.FrameRateNumerator))
goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_BITRATE,
ECAM_H264_ENCODED_BITRATE))
goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_RATECONTROL,
H264_RATECONTROL_VBR))
goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_QP, 0))
goto fail;
/* initialize libswscale */
{
const int width = (int)stream->currMediaType.Width;
const int height = (int)stream->currMediaType.Height;
CAM_MEDIA_FORMAT inputFormat = streamInputFormat(stream);
enum AVPixelFormat pixFormat = ecamToAVPixFormat(inputFormat);
if (av_image_fill_linesizes(stream->srcLineSizes, pixFormat, width) < 0)
{
WLog_ERR(TAG, "av_image_fill_linesizes failed");
goto fail;
}
if (!stream->sws)
{
stream->sws = sws_getContext(width, height, pixFormat, width, height,
AV_PIX_FMT_YUV420P, 0, NULL, NULL, NULL);
}
if (!stream->sws)
{
WLog_ERR(TAG, "sws_getContext failed");
goto fail;
}
}
return TRUE;
fail:
ecam_encoder_context_free_h264(stream);
return FALSE;
}
/**
* Function description
*
* @return success/failure
*/
BOOL ecam_encoder_context_init(CameraDeviceStream* stream)
{
CAM_MEDIA_FORMAT format = streamOutputFormat(stream);
switch (format)
{
case CAM_MEDIA_FORMAT_H264:
return ecam_encoder_context_init_h264(stream);
default:
WLog_ERR(TAG, "Unsupported output format %d", format);
return FALSE;
}
}
/**
* Function description
*
* @return success/failure
*/
BOOL ecam_encoder_context_free(CameraDeviceStream* stream)
{
CAM_MEDIA_FORMAT format = streamOutputFormat(stream);
switch (format)
{
case CAM_MEDIA_FORMAT_H264:
ecam_encoder_context_free_h264(stream);
break;
default:
return FALSE;
}
return TRUE;
}
/**
* Function description
*
* @return success/failure
*/
BOOL ecam_encoder_compress(CameraDeviceStream* stream, const BYTE* srcData, size_t srcSize,
BYTE** ppDstData, size_t* pDstSize)
{
CAM_MEDIA_FORMAT format = streamOutputFormat(stream);
switch (format)
{
case CAM_MEDIA_FORMAT_H264:
return ecam_encoder_compress_h264(stream, srcData, srcSize, ppDstData, pDstSize);
default:
WLog_ERR(TAG, "Unsupported output format %d", format);
return FALSE;
}
}

View File

@ -0,0 +1,36 @@
# FreeRDP: A Remote Desktop Protocol Implementation
# FreeRDP cmake build script
#
# Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if(WITH_MS_RDPECAM_CLIENT AND WITH_V4L)
define_channel_client_subsystem("rdpecam" "v4l" "")
set(${MODULE_PREFIX}_SRCS
camera_v4l.c
)
set(${MODULE_PREFIX}_LIBS
winpr
freerdp
${V4L_TARGETS}
)
include_directories(..)
add_channel_client_subsystem_library(${MODULE_PREFIX} ${MODULE_NAME} ${CHANNEL_NAME} "" TRUE "")
endif()

View File

@ -0,0 +1,734 @@
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* MS-RDPECAM Implementation, V4L Interface
*
* Copyright 2024 Oleg Turovski <oleg2104@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <errno.h>
#include <fcntl.h>
#include <poll.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
/* v4l includes */
#include <linux/videodev2.h>
#include "camera.h"
#define TAG CHANNELS_TAG("rdpecam-v4l.client")
#define CAM_V4L2_BUFFERS_COUNT 4
#define CAM_V4L2_CAPTURE_THREAD_SLEEP_MS 1000
typedef struct
{
void* start;
size_t length;
} CamV4lBuffer;
typedef struct
{
CRITICAL_SECTION lock;
/* members used to call the callback */
CameraDevice* dev;
int streamIndex;
ICamHalSampleCapturedCallback sampleCallback;
BOOL streaming;
int fd;
size_t nBuffers;
CamV4lBuffer* buffers;
} CamV4lStream;
typedef struct
{
ICamHal iHal;
wHashTable* streams; /* Index: deviceId, Value: CamV4lStream */
} CamV4lHal;
/**
* Function description
*
* @return NULL-terminated fourcc string
*/
static const char* cam_v4l_get_fourcc_str(unsigned int fourcc)
{
static char buf[5];
buf[0] = (fourcc & 0xFF);
buf[1] = (fourcc >> 8) & 0xFF;
buf[2] = (fourcc >> 16) & 0xFF;
buf[3] = (fourcc >> 24) & 0xFF;
buf[4] = 0;
return buf;
}
/**
* Function description
*
* @return one of V4L2_PIX_FMT
*/
static UINT32 ecamToV4L2PixFormat(CAM_MEDIA_FORMAT ecamFormat)
{
switch (ecamFormat)
{
case CAM_MEDIA_FORMAT_H264:
return V4L2_PIX_FMT_H264;
case CAM_MEDIA_FORMAT_MJPG:
return V4L2_PIX_FMT_MJPEG;
case CAM_MEDIA_FORMAT_YUY2:
return V4L2_PIX_FMT_YUYV;
case CAM_MEDIA_FORMAT_NV12:
return V4L2_PIX_FMT_NV12;
case CAM_MEDIA_FORMAT_I420:
return V4L2_PIX_FMT_YUV420;
case CAM_MEDIA_FORMAT_RGB24:
return V4L2_PIX_FMT_RGB24;
case CAM_MEDIA_FORMAT_RGB32:
return V4L2_PIX_FMT_RGB32;
default:
WLog_ERR(TAG, "Unsupported CAM_MEDIA_FORMAT %d", ecamFormat);
return 0;
}
}
/**
* Function description
*
* @return file descriptor
*/
static int cam_v4l_open_device(const char* deviceId, int flags)
{
UINT n;
char device[20];
int fd = -1;
struct v4l2_capability cap;
if (!deviceId)
return -1;
if (0 == strncmp(deviceId, "/dev/video", 10))
return open(deviceId, flags);
for (n = 0; n < 64; n++)
{
snprintf(device, sizeof(device), "/dev/video%d", n);
if ((fd = open(device, flags)) == -1)
continue;
/* query device capabilities and make sure this is a video capture device */
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE))
{
close(fd);
continue;
}
if (cap.bus_info[0] != 0 && 0 == strcmp((const char*)cap.bus_info, deviceId))
return fd;
close(fd);
}
return fd;
}
/**
* Function description
*
* @return -1 if error, otherwise index of supportedFormats array and mediaTypes/nMediaTypes filled
* in
*/
static INT16 cam_v4l_get_media_type_descriptions(ICamHal* hal, const char* deviceId,
int streamIndex,
const CAM_MEDIA_FORMAT_INFO* supportedFormats,
size_t nSupportedFormats,
CAM_MEDIA_TYPE_DESCRIPTION* mediaTypes,
size_t* nMediaTypes)
{
int fd;
size_t maxMediaTypes = *nMediaTypes;
size_t nTypes = 0;
int formatIndex;
BOOL formatFound = FALSE;
struct v4l2_format video_fmt = { 0 };
video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
video_fmt.fmt.pix.sizeimage = 0;
unsigned int videoSizes[][2] = { { 160, 90 }, { 160, 120 }, { 320, 180 }, { 320, 240 },
{ 432, 240 }, { 352, 288 }, { 640, 360 }, { 800, 448 },
{ 640, 480 }, { 848, 480 }, { 864, 480 }, { 960, 540 },
{ 1024, 576 }, { 800, 600 }, { 960, 720 }, { 1280, 720 },
{ 1024, 768 }, { 1600, 896 }, { 1440, 1080 }, { 1920, 1080 } };
const int totalSizes = sizeof(videoSizes) / sizeof(unsigned int[2]);
if ((fd = cam_v4l_open_device(deviceId, O_RDONLY)) == -1)
{
WLog_ERR(TAG, "Unable to open device %s", deviceId);
return -1;
}
for (formatIndex = 0; formatIndex < nSupportedFormats; formatIndex++)
{
UINT32 pixelFormat = ecamToV4L2PixFormat(supportedFormats[formatIndex].inputFormat);
WINPR_ASSERT(pixelFormat != 0);
for (int i = 0; i < totalSizes; i++)
{
video_fmt.fmt.pix.pixelformat = pixelFormat;
video_fmt.fmt.pix.width = videoSizes[i][0];
video_fmt.fmt.pix.height = videoSizes[i][1];
if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) < 0 ||
video_fmt.fmt.pix.pixelformat != pixelFormat ||
video_fmt.fmt.pix.width != videoSizes[i][0] ||
video_fmt.fmt.pix.height != videoSizes[i][1])
continue;
formatFound = TRUE;
mediaTypes->Width = video_fmt.fmt.pix.width;
mediaTypes->Height = video_fmt.fmt.pix.height;
mediaTypes->Format = supportedFormats[formatIndex].inputFormat;
/* V4l2 does not have a stable method of knowing fps so we use 30 */
mediaTypes->FrameRateNumerator = 30;
mediaTypes->FrameRateDenominator = 1;
mediaTypes->PixelAspectRatioNumerator = mediaTypes->PixelAspectRatioDenominator = 1;
WLog_DBG(
TAG, "Camera capability %d: width: %d, height: %d, fourcc: %s, type: %d, fps: %d",
nTypes, mediaTypes->Width, mediaTypes->Height, cam_v4l_get_fourcc_str(pixelFormat),
mediaTypes->Format, mediaTypes->FrameRateNumerator);
mediaTypes++;
nTypes++;
if (nTypes == maxMediaTypes)
{
WLog_ERR(TAG, "Media types reached buffer maximum %" PRIu32 "", maxMediaTypes);
goto error;
}
}
if (formatFound)
{
/* we are interested in 1st supported format only, with all supported sizes */
break;
}
}
error:
*nMediaTypes = nTypes;
close(fd);
return formatIndex;
}
/**
* Function description
*
* @return number of video capture devices
*/
static UINT cam_v4l_enumerate(ICamHal* ihal, ICamHalEnumCallback callback, CameraPlugin* ecam,
GENERIC_CHANNEL_CALLBACK* hchannel)
{
UINT n, count = 0;
char device[20];
int fd = -1;
struct v4l2_capability cap;
char *deviceName, *deviceId;
for (n = 0; n < 64; n++)
{
snprintf(device, sizeof(device), "/dev/video%d", n);
if ((fd = open(device, O_RDONLY)) == -1)
continue;
/* query device capabilities and make sure this is a video capture device */
if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0 || !(cap.device_caps & V4L2_CAP_VIDEO_CAPTURE))
{
close(fd);
continue;
}
count++;
deviceName = (char*)cap.card;
if (cap.bus_info[0] != 0) /* may not be available in all drivers */
deviceId = (char*)cap.bus_info;
else
deviceId = device;
IFCALL(callback, ecam, hchannel, deviceId, deviceName);
close(fd);
}
return count;
}
/**
* Function description
*
* @return void
*/
static void cam_v4l_stream_free_buffers(CamV4lStream* stream)
{
if (!stream || !stream->buffers)
return;
/* unmap buffers */
for (int i = 0; i < stream->nBuffers; i++)
{
if (stream->buffers[i].length && stream->buffers[i].start != MAP_FAILED)
{
munmap(stream->buffers[i].start, stream->buffers[i].length);
}
}
free(stream->buffers);
stream->buffers = NULL;
stream->nBuffers = 0;
return;
}
/**
* Function description
*
* @return 0 on failure, otherwise allocated buffer size
*/
static size_t cam_v4l_stream_alloc_buffers(CamV4lStream* stream)
{
struct v4l2_requestbuffers rbuffer = { 0 };
rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
rbuffer.memory = V4L2_MEMORY_MMAP;
rbuffer.count = CAM_V4L2_BUFFERS_COUNT;
if (ioctl(stream->fd, VIDIOC_REQBUFS, &rbuffer) < 0 || rbuffer.count == 0)
{
WLog_ERR(TAG, "Failure in VIDIOC_REQBUFS, errno %d, count %d", errno, rbuffer.count);
return 0;
}
stream->nBuffers = rbuffer.count;
/* Map the buffers */
stream->buffers = (CamV4lBuffer*)calloc(rbuffer.count, sizeof(CamV4lBuffer));
if (!stream->buffers)
{
WLog_ERR(TAG, "Failure in calloc");
return 0;
}
for (unsigned int i = 0; i < rbuffer.count; i++)
{
struct v4l2_buffer buffer = { 0 };
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = i;
if (ioctl(stream->fd, VIDIOC_QUERYBUF, &buffer) < 0)
{
WLog_ERR(TAG, "Failure in VIDIOC_QUERYBUF, errno %d", errno);
cam_v4l_stream_free_buffers(stream);
return 0;
}
stream->buffers[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
stream->fd, buffer.m.offset);
if (MAP_FAILED == stream->buffers[i].start)
{
WLog_ERR(TAG, "Failure in mmap, errno %d", errno);
cam_v4l_stream_free_buffers(stream);
return 0;
}
stream->buffers[i].length = buffer.length;
WLog_DBG(TAG, "Buffer %d mapped, size: %d", i, buffer.length);
if (ioctl(stream->fd, VIDIOC_QBUF, &buffer) < 0)
{
WLog_ERR(TAG, "Failure in VIDIOC_QBUF, errno %d", errno);
cam_v4l_stream_free_buffers(stream);
return 0;
}
}
return stream->buffers[0].length;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT cam_v4l_stream_capture_thread(void* param)
{
CamV4lStream* stream = (CamV4lStream*)param;
int fd = stream->fd;
do
{
int retVal;
struct pollfd pfd = { 0 };
pfd.fd = fd;
pfd.events = POLLIN;
retVal = poll(&pfd, 1, CAM_V4L2_CAPTURE_THREAD_SLEEP_MS);
if (retVal == 0)
{
/* poll timed out */
continue;
}
else if (retVal < 0)
{
WLog_DBG(TAG, "Failure in poll, errno %d", errno);
Sleep(CAM_V4L2_CAPTURE_THREAD_SLEEP_MS); /* trying to recover */
continue;
}
else if (!(pfd.revents & POLLIN))
{
WLog_DBG(TAG, "poll reported non-read event %d", pfd.revents);
Sleep(CAM_V4L2_CAPTURE_THREAD_SLEEP_MS); /* also trying to recover */
continue;
}
EnterCriticalSection(&stream->lock);
if (stream->streaming)
{
struct v4l2_buffer buf = { 0 };
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
/* dequeue buffers until empty */
while (ioctl(fd, VIDIOC_DQBUF, &buf) != -1)
{
stream->sampleCallback(stream->dev, stream->streamIndex,
stream->buffers[buf.index].start, buf.bytesused);
/* enqueue buffer back */
if (ioctl(fd, VIDIOC_QBUF, &buf) == -1)
{
WLog_ERR(TAG, "Failure in VIDIOC_QBUF, errno %d", errno);
}
}
}
LeaveCriticalSection(&stream->lock);
} while (stream->streaming);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return void
*/
static void cam_v4l_stream_close_device(CamV4lStream* stream)
{
if (stream->fd != -1)
{
close(stream->fd);
stream->fd = -1;
}
}
/**
* Function description
*
* @return Null on failure, otherwise pointer to new CamV4lStream
*/
static CamV4lStream* cam_v4l_stream_create(CameraDevice* dev, int streamIndex,
ICamHalSampleCapturedCallback callback)
{
CamV4lStream* stream = calloc(1, sizeof(CamV4lStream));
if (!stream)
{
WLog_ERR(TAG, "Failure in calloc");
return NULL;
}
stream->dev = dev;
stream->streamIndex = streamIndex;
stream->sampleCallback = callback;
stream->fd = -1;
if (!InitializeCriticalSectionEx(&stream->lock, 0, 0))
{
WLog_ERR(TAG, "Failure in calloc");
free(stream);
return NULL;
}
return stream;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT cam_v4l_stream_stop(CamV4lStream* stream)
{
if (!stream || !stream->streaming)
return CHANNEL_RC_OK;
stream->streaming = FALSE; /* this will terminate capture thread */
EnterCriticalSection(&stream->lock);
/* stop streaming */
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(stream->fd, VIDIOC_STREAMOFF, &type) < 0)
{
WLog_ERR(TAG, "Failure in VIDIOC_STREAMOFF, errno %d", errno);
}
cam_v4l_stream_free_buffers(stream);
cam_v4l_stream_close_device(stream);
LeaveCriticalSection(&stream->lock);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise CAM_ERROR_CODE
*/
static UINT cam_v4l_stream_start(ICamHal* ihal, CameraDevice* dev, int streamIndex,
const CAM_MEDIA_TYPE_DESCRIPTION* mediaType,
ICamHalSampleCapturedCallback callback)
{
CamV4lHal* hal = (CamV4lHal*)ihal;
CamV4lStream* stream = (CamV4lStream*)HashTable_GetItemValue(hal->streams, dev->deviceId);
if (!stream)
{
stream = cam_v4l_stream_create(dev, streamIndex, callback);
if (!stream)
return CAM_ERROR_CODE_OutOfMemory;
HashTable_Insert(hal->streams, dev->deviceId, stream);
}
if (stream->streaming)
{
WLog_ERR(TAG, "Streaming already in progress, device %s, streamIndex %d", dev->deviceId,
streamIndex);
return CAM_ERROR_CODE_UnexpectedError;
}
if ((stream->fd = cam_v4l_open_device(dev->deviceId, O_RDWR | O_NONBLOCK)) == -1)
{
WLog_ERR(TAG, "Unable to open device %s", dev->deviceId);
return CAM_ERROR_CODE_UnexpectedError;
}
struct v4l2_format video_fmt = { 0 };
video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
video_fmt.fmt.pix.sizeimage = 0;
video_fmt.fmt.pix.width = mediaType->Width;
video_fmt.fmt.pix.height = mediaType->Height;
UINT32 pixelFormat = ecamToV4L2PixFormat(mediaType->Format);
if (pixelFormat == 0)
{
cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_InvalidMediaType;
}
video_fmt.fmt.pix.pixelformat = pixelFormat;
/* set format and frame size */
if (ioctl(stream->fd, VIDIOC_S_FMT, &video_fmt) < 0)
{
WLog_ERR(TAG, "Failure in VIDIOC_S_FMT, errno %d", errno);
cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_InvalidMediaType;
}
/* trying to set frame rate, if driver supports it */
struct v4l2_streamparm sp1 = { 0 }, sp2 = { 0 };
sp1.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(stream->fd, VIDIOC_G_PARM, &sp1) < 0 ||
!(sp1.parm.capture.capability & V4L2_CAP_TIMEPERFRAME))
{
WLog_INFO(TAG, "Driver doesn't support setting framerate");
}
else
{
sp2.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
/* inverse of a fraction */
sp2.parm.capture.timeperframe.numerator = mediaType->FrameRateDenominator;
sp2.parm.capture.timeperframe.denominator = mediaType->FrameRateNumerator;
if (ioctl(stream->fd, VIDIOC_S_PARM, &sp2) < 0)
{
WLog_INFO(TAG, "Failed to set the framerate, errno %d", errno);
}
}
size_t maxSample = cam_v4l_stream_alloc_buffers(stream);
if (maxSample == 0)
{
WLog_ERR(TAG, "Failure to allocate video buffers");
cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_OutOfMemory;
}
HANDLE captureThread = CreateThread(NULL, 0, cam_v4l_stream_capture_thread, stream, 0, NULL);
if (!captureThread)
{
WLog_ERR(TAG, "CreateThread failure");
cam_v4l_stream_free_buffers(stream);
cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_OutOfMemory;
}
stream->streaming = TRUE;
/* start streaming */
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(stream->fd, VIDIOC_STREAMON, &type) < 0)
{
WLog_ERR(TAG, "Failure in VIDIOC_STREAMON, errno %d", errno);
cam_v4l_stream_stop(stream);
return CAM_ERROR_CODE_UnexpectedError;
}
WLog_INFO(TAG, "Camera format: %s, width: %d, height: %d, fps: %d",
cam_v4l_get_fourcc_str(pixelFormat), mediaType->Width, mediaType->Height,
mediaType->FrameRateNumerator);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT cam_v4l_stream_stop_by_device_id(ICamHal* ihal, const char* deviceId, int streamIndex)
{
CamV4lHal* hal = (CamV4lHal*)ihal;
CamV4lStream* stream = (CamV4lStream*)HashTable_GetItemValue(hal->streams, deviceId);
if (!stream)
return CHANNEL_RC_OK;
return cam_v4l_stream_stop(stream);
}
/**
* Function description
*
* OBJECT_FREE_FN for streams hash table value
*
* @return void
*/
static void cam_v4l_stream_free(void* obj)
{
CamV4lStream* stream = (CamV4lStream*)obj;
if (!stream)
return;
cam_v4l_stream_stop(stream);
DeleteCriticalSection(&stream->lock);
free(stream);
return;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
static UINT cam_v4l_free(ICamHal* ihal)
{
CamV4lHal* hal = (CamV4lHal*)ihal;
if (hal == NULL)
return ERROR_INVALID_PARAMETER;
HashTable_Free(hal->streams);
free(hal);
return CHANNEL_RC_OK;
}
/**
* Function description
*
* @return 0 on success, otherwise a Win32 error code
*/
FREERDP_ENTRY_POINT(
UINT v4l_freerdp_rdpecam_client_subsystem_entry(PFREERDP_CAMERA_HAL_ENTRY_POINTS pEntryPoints))
{
UINT ret = CHANNEL_RC_OK;
WINPR_ASSERT(pEntryPoints);
CamV4lHal* hal = (CamV4lHal*)calloc(1, sizeof(CamV4lHal));
if (hal == NULL)
return CHANNEL_RC_NO_MEMORY;
hal->iHal.Enumerate = cam_v4l_enumerate;
hal->iHal.GetMediaTypeDescriptions = cam_v4l_get_media_type_descriptions;
hal->iHal.StartStream = cam_v4l_stream_start;
hal->iHal.StopStream = cam_v4l_stream_stop_by_device_id;
hal->iHal.Free = cam_v4l_free;
hal->streams = HashTable_New(FALSE);
if (!hal->streams)
return CHANNEL_RC_NO_MEMORY;
HashTable_SetupForStringData(hal->streams, FALSE);
wObject* obj = HashTable_ValueObject(hal->streams);
WINPR_ASSERT(obj);
obj->fnObjectFree = cam_v4l_stream_free;
if ((ret = pEntryPoints->pRegisterCameraHal(pEntryPoints->plugin, &hal->iHal)))
{
WLog_ERR(TAG, "RegisterCameraHal failed with error %" PRIu32 "", ret);
goto error;
}
return ret;
error:
cam_v4l_free(&hal->iHal);
return ret;
}