[channel,rdpecam] framerate support and h264 bitrate tuneup

This commit is contained in:
oleg0421 2024-06-15 19:05:35 -07:00
parent bcd663b7d9
commit f48c8eacad
3 changed files with 99 additions and 39 deletions

View File

@ -56,11 +56,6 @@
*/ */
#define ECAM_SAMPLE_RESPONSE_BUFFER_SIZE (1024 * 4050) #define ECAM_SAMPLE_RESPONSE_BUFFER_SIZE (1024 * 4050)
/* 4 Mbps max encoded bitrate seems to produce reasonably
* good quality with H264_RATECONTROL_VBR.
*/
#define ECAM_H264_ENCODED_BITRATE 4000000
typedef struct s_ICamHal ICamHal; typedef struct s_ICamHal ICamHal;
typedef struct typedef struct

View File

@ -23,6 +23,44 @@
#define TAG CHANNELS_TAG("rdpecam-video.client") #define TAG CHANNELS_TAG("rdpecam-video.client")
/**
* Function description
*
* @return bitrate in bps
*/
static UINT32 ecam_encoder_h264_get_max_bitrate(CameraDeviceStream* stream)
{
static struct Bitrates
{
UINT32 height;
UINT32 bitrate; /* kbps */
} bitrates[] = {
/* source: https://livekit.io/webrtc/bitrate-guide (webcam streaming)
*
* sorted by height in descending order
*/
{ 1080, 2700 }, { 720, 1250 }, { 480, 700 }, { 360, 400 },
{ 240, 170 }, { 180, 140 }, { 0, 100 },
};
const size_t nBitrates = ARRAYSIZE(bitrates);
UINT32 height = stream->currMediaType.Height;
for (size_t i = 0; i < nBitrates; i++)
{
if (height >= bitrates[i].height)
{
UINT32 bitrate = bitrates[i].bitrate;
WLog_DBG(TAG, "Setting h264 max bitrate: %u kbps", bitrate);
return bitrate * 1000;
}
}
WINPR_ASSERT(FALSE);
return 0;
}
/** /**
* Function description * Function description
* *
@ -136,11 +174,12 @@ static BOOL ecam_encoder_context_init_h264(CameraDeviceStream* stream)
goto fail; goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_FRAMERATE, if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_FRAMERATE,
stream->currMediaType.FrameRateNumerator)) stream->currMediaType.FrameRateNumerator /
stream->currMediaType.FrameRateDenominator))
goto fail; goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_BITRATE, if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_BITRATE,
ECAM_H264_ENCODED_BITRATE)) ecam_encoder_h264_get_max_bitrate(stream)))
goto fail; goto fail;
if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_RATECONTROL, if (!h264_context_set_option(stream->h264, H264_CONTEXT_OPTION_RATECONTROL,

View File

@ -33,6 +33,9 @@
#define CAM_V4L2_BUFFERS_COUNT 4 #define CAM_V4L2_BUFFERS_COUNT 4
#define CAM_V4L2_CAPTURE_THREAD_SLEEP_MS 1000 #define CAM_V4L2_CAPTURE_THREAD_SLEEP_MS 1000
#define CAM_V4L2_FRAMERATE_NUMERATOR_DEFAULT 30
#define CAM_V4L2_FRAMERATE_DENOMINATOR_DEFAULT 1
typedef struct typedef struct
{ {
void* start; void* start;
@ -114,6 +117,24 @@ static UINT32 ecamToV4L2PixFormat(CAM_MEDIA_FORMAT ecamFormat)
} }
} }
/**
* Function description
*
* @return TRUE or FALSE
*/
static BOOL cam_v4l_format_supported(int fd, UINT32 format)
{
struct v4l2_fmtdesc fmtdesc = { 0 };
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (fmtdesc.index = 0; ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) == 0; fmtdesc.index++)
{
if (fmtdesc.pixelformat == format)
return TRUE;
}
return FALSE;
}
/** /**
* Function description * Function description
* *
@ -172,17 +193,6 @@ static INT16 cam_v4l_get_media_type_descriptions(ICamHal* hal, const char* devic
size_t nTypes = 0; size_t nTypes = 0;
int formatIndex; int formatIndex;
BOOL formatFound = FALSE; BOOL formatFound = FALSE;
struct v4l2_format video_fmt = { 0 };
video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
video_fmt.fmt.pix.sizeimage = 0;
unsigned int videoSizes[][2] = { { 160, 90 }, { 160, 120 }, { 320, 180 }, { 320, 240 },
{ 432, 240 }, { 352, 288 }, { 640, 360 }, { 800, 448 },
{ 640, 480 }, { 848, 480 }, { 864, 480 }, { 960, 540 },
{ 1024, 576 }, { 800, 600 }, { 960, 720 }, { 1280, 720 },
{ 1024, 768 }, { 1600, 896 }, { 1440, 1080 }, { 1920, 1080 } };
const int totalSizes = sizeof(videoSizes) / sizeof(unsigned int[2]);
if ((fd = cam_v4l_open_device(deviceId, O_RDONLY)) == -1) if ((fd = cam_v4l_open_device(deviceId, O_RDONLY)) == -1)
{ {
@ -194,32 +204,48 @@ static INT16 cam_v4l_get_media_type_descriptions(ICamHal* hal, const char* devic
{ {
UINT32 pixelFormat = ecamToV4L2PixFormat(supportedFormats[formatIndex].inputFormat); UINT32 pixelFormat = ecamToV4L2PixFormat(supportedFormats[formatIndex].inputFormat);
WINPR_ASSERT(pixelFormat != 0); WINPR_ASSERT(pixelFormat != 0);
struct v4l2_frmsizeenum frmsize = { 0 };
for (int i = 0; i < totalSizes; i++) if (!cam_v4l_format_supported(fd, pixelFormat))
continue;
frmsize.pixel_format = pixelFormat;
for (frmsize.index = 0; ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize) == 0; frmsize.index++)
{ {
video_fmt.fmt.pix.pixelformat = pixelFormat; struct v4l2_frmivalenum frmival = { 0 };
video_fmt.fmt.pix.width = videoSizes[i][0];
video_fmt.fmt.pix.height = videoSizes[i][1];
if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) < 0 || if (frmsize.type != V4L2_FRMSIZE_TYPE_DISCRETE)
video_fmt.fmt.pix.pixelformat != pixelFormat || break; /* don't support size types other than discrete */
video_fmt.fmt.pix.width != videoSizes[i][0] ||
video_fmt.fmt.pix.height != videoSizes[i][1])
continue;
formatFound = TRUE; formatFound = TRUE;
mediaTypes->Width = video_fmt.fmt.pix.width; mediaTypes->Width = frmsize.discrete.width;
mediaTypes->Height = video_fmt.fmt.pix.height; mediaTypes->Height = frmsize.discrete.height;
mediaTypes->Format = supportedFormats[formatIndex].inputFormat; mediaTypes->Format = supportedFormats[formatIndex].inputFormat;
/* V4l2 does not have a stable method of knowing fps so we use 30 */
mediaTypes->FrameRateNumerator = 30; /* query frame rate (1st is highest fps supported) */
mediaTypes->FrameRateDenominator = 1; frmival.index = 0;
frmival.pixel_format = pixelFormat;
frmival.width = frmsize.discrete.width;
frmival.height = frmsize.discrete.height;
if (ioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &frmival) == 0 &&
frmival.type == V4L2_FRMIVAL_TYPE_DISCRETE)
{
/* inverse of a fraction */
mediaTypes->FrameRateNumerator = frmival.discrete.denominator;
mediaTypes->FrameRateDenominator = frmival.discrete.numerator;
}
else
{
WLog_DBG(TAG, "VIDIOC_ENUM_FRAMEINTERVALS failed, using default framerate");
mediaTypes->FrameRateNumerator = CAM_V4L2_FRAMERATE_NUMERATOR_DEFAULT;
mediaTypes->FrameRateDenominator = CAM_V4L2_FRAMERATE_DENOMINATOR_DEFAULT;
}
mediaTypes->PixelAspectRatioNumerator = mediaTypes->PixelAspectRatioDenominator = 1; mediaTypes->PixelAspectRatioNumerator = mediaTypes->PixelAspectRatioDenominator = 1;
WLog_DBG( WLog_DBG(TAG, "Camera format: %s, width: %u, height: %u, fps: %u/%u",
TAG, "Camera capability %d: width: %d, height: %d, fourcc: %s, type: %d, fps: %d", cam_v4l_get_fourcc_str(pixelFormat), mediaTypes->Width, mediaTypes->Height,
nTypes, mediaTypes->Width, mediaTypes->Height, cam_v4l_get_fourcc_str(pixelFormat), mediaTypes->FrameRateNumerator, mediaTypes->FrameRateDenominator);
mediaTypes->Format, mediaTypes->FrameRateNumerator);
mediaTypes++; mediaTypes++;
nTypes++; nTypes++;
@ -615,7 +641,7 @@ static UINT cam_v4l_stream_start(ICamHal* ihal, CameraDevice* dev, int streamInd
if (maxSample == 0) if (maxSample == 0)
{ {
WLog_ERR(TAG, "Failure to allocate video buffers"); WLog_ERR(TAG, "Failure to allocate video buffers");
cam_v4l_stream_stop(stream); cam_v4l_stream_close_device(stream);
return CAM_ERROR_CODE_OutOfMemory; return CAM_ERROR_CODE_OutOfMemory;
} }
@ -639,9 +665,9 @@ static UINT cam_v4l_stream_start(ICamHal* ihal, CameraDevice* dev, int streamInd
return CAM_ERROR_CODE_OutOfMemory; return CAM_ERROR_CODE_OutOfMemory;
} }
WLog_INFO(TAG, "Camera format: %s, width: %d, height: %d, fps: %d", WLog_INFO(TAG, "Camera format: %s, width: %u, height: %u, fps: %u/%u",
cam_v4l_get_fourcc_str(pixelFormat), mediaType->Width, mediaType->Height, cam_v4l_get_fourcc_str(pixelFormat), mediaType->Width, mediaType->Height,
mediaType->FrameRateNumerator); mediaType->FrameRateNumerator, mediaType->FrameRateDenominator);
return CHANNEL_RC_OK; return CHANNEL_RC_OK;
} }