FreeRDP/channels/tsmf/client/tsmf_codec.c

657 lines
17 KiB
C
Raw Normal View History

2011-09-19 18:54:09 +04:00
/**
2012-10-09 07:02:04 +04:00
* FreeRDP: A Remote Desktop Protocol Implementation
2011-09-19 18:54:09 +04:00
* Video Redirection Virtual Channel - Codec
*
* Copyright 2010-2011 Vic Lee
* Copyright 2012 Hewlett-Packard Development Company, L.P.
2011-09-19 18:54:09 +04:00
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <winpr/crt.h>
#include <winpr/stream.h>
#include <winpr/print.h>
2011-09-19 18:54:09 +04:00
#include "tsmf_decoder.h"
2011-09-19 18:54:09 +04:00
#include "tsmf_constants.h"
#include "tsmf_types.h"
#include "tsmf_codec.h"
2014-08-19 20:26:39 +04:00
#include <freerdp/log.h>
#define TAG CHANNELS_TAG("tsmf.client")
2011-09-19 18:54:09 +04:00
typedef struct _TSMFMediaTypeMap
{
BYTE guid[16];
2014-08-19 20:26:39 +04:00
const char* name;
2011-09-19 18:54:09 +04:00
int type;
} TSMFMediaTypeMap;
static const TSMFMediaTypeMap tsmf_major_type_map[] =
{
/* 73646976-0000-0010-8000-00AA00389B71 */
{
{ 0x76, 0x69, 0x64, 0x73, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIATYPE_Video",
TSMF_MAJOR_TYPE_VIDEO
},
/* 73647561-0000-0010-8000-00AA00389B71 */
{
{ 0x61, 0x75, 0x64, 0x73, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIATYPE_Audio",
TSMF_MAJOR_TYPE_AUDIO
},
{
{ 0 },
"Unknown",
TSMF_MAJOR_TYPE_UNKNOWN
}
};
static const TSMFMediaTypeMap tsmf_sub_type_map[] =
{
/* 31435657-0000-0010-8000-00AA00389B71 */
{
{ 0x57, 0x56, 0x43, 0x31, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_WVC1",
TSMF_SUB_TYPE_WVC1
},
- Use decodebin2 instead of old decodebin - decodebin has issues - Use autovideosink - xvimagesink does not work with cards with no xv ports available and cant be used if wanted to use the fluendo hardware accelerated playback codec - Use autoaudiosink - let gstreamer choose best audio playback plugin - Catch when autosinks add known elements so that we can manipulate properties on them - Adjust caps of various media types to work better with gstreamer, some codecs are picky about having certain fields available - Remove unneeded plugins such as "ffmpegcolorspace" and "videoscale" - these do not work correctly with fluendo hardware accelerated playback codec - Name audio/video gstreamer elements better for easier debugging - Update gstreamer pipeline and element properties to handle playback better - Detect when valid timestamps are available for buffer from server and try to account for when they are not valid - Start time is much more reliable then end time from server for various media formats, so use it when possible to make decisions instead of end time - Do not rebuild gstreamer pipeline for a seek(very expensive), instead reset gstreamer time to 0 and maintain offset between real time and gstreamer time - Change buffer filled function back to a buffer level function, so that we can use buffer level to make better choices above gstreamer decoder in tsmf - Remove ack function from gstreamer, instead rely on ack thread to handle acks - Rework X11 gstreamer code to handle various videosinks which implement the XOverlayInterface and to keep more detailed information on the sub-window that is used for display - Add check to see if a decoder is available for telling the server the client various media types - Add in support for M4S2 and WMA1 media types - Fix flush message handling, they are for individual streams and not the entire presentation - Delay eos response to try to allow more time for buffers to be loaded into decoder, as we anticipate acks to server and the server will issue stop as soon as we ack eos. - Fix issue with geometry info being ignored when resent for new streams within existing presentation - Fixed volume level initialization issue when a stream is stopped and restarted - Attempt to sync video/audio streams...because we run two different gstreamer pipelines - they can enter pause/playing states at different times and are thus not synchronized. Attempt to adjust video buffer timestamps based on difference between audio/video running time to account for this difference. This logic accounts for a huge improvement in audio/video sync(ie. lip sync to words)
2015-07-08 00:39:29 +03:00
/* 00000160-0000-0010-8000-00AA00389B71 */
{
{ 0x60, 0x01, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_WMAudioV1", /* V7, V8 has the same GUID */
TSMF_SUB_TYPE_WMA1
},
2011-09-19 18:54:09 +04:00
/* 00000161-0000-0010-8000-00AA00389B71 */
{
{ 0x61, 0x01, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_WMAudioV2", /* V7, V8 has the same GUID */
TSMF_SUB_TYPE_WMA2
},
/* 00000162-0000-0010-8000-00AA00389B71 */
{
{ 0x62, 0x01, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_WMAudioV9",
TSMF_SUB_TYPE_WMA9
},
/* 00000055-0000-0010-8000-00AA00389B71 */
{
{ 0x55, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_MP3",
TSMF_SUB_TYPE_MP3
},
/* E06D802B-DB46-11CF-B4D1-00805F6CBBEA */
{
{ 0x2B, 0x80, 0x6D, 0xE0, 0x46, 0xDB, 0xCF, 0x11, 0xB4, 0xD1, 0x00, 0x80, 0x5F, 0x6C, 0xBB, 0xEA },
"MEDIASUBTYPE_MPEG2_AUDIO",
TSMF_SUB_TYPE_MP2A
},
/* E06D8026-DB46-11CF-B4D1-00805F6CBBEA */
{
{ 0x26, 0x80, 0x6D, 0xE0, 0x46, 0xDB, 0xCF, 0x11, 0xB4, 0xD1, 0x00, 0x80, 0x5F, 0x6C, 0xBB, 0xEA },
"MEDIASUBTYPE_MPEG2_VIDEO",
TSMF_SUB_TYPE_MP2V
},
/* 31564D57-0000-0010-8000-00AA00389B71 */
{
{ 0x57, 0x4D, 0x56, 0x31, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_WMV1",
TSMF_SUB_TYPE_WMV1
},
/* 32564D57-0000-0010-8000-00AA00389B71 */
{
{ 0x57, 0x4D, 0x56, 0x32, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_WMV2",
TSMF_SUB_TYPE_WMV2
},
2011-09-19 18:54:09 +04:00
/* 33564D57-0000-0010-8000-00AA00389B71 */
{
{ 0x57, 0x4D, 0x56, 0x33, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_WMV3",
TSMF_SUB_TYPE_WMV3
},
/* 00001610-0000-0010-8000-00AA00389B71 */
{
{ 0x10, 0x16, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_MPEG_HEAAC",
TSMF_SUB_TYPE_AAC
},
/* 34363248-0000-0010-8000-00AA00389B71 */
{
{ 0x48, 0x32, 0x36, 0x34, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_H264",
TSMF_SUB_TYPE_H264
},
/* 31435641-0000-0010-8000-00AA00389B71 */
{
{ 0x41, 0x56, 0x43, 0x31, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_AVC1",
TSMF_SUB_TYPE_AVC1
},
/* 3334504D-0000-0010-8000-00AA00389B71 */
{
{ 0x4D, 0x50, 0x34, 0x33, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_MP43",
TSMF_SUB_TYPE_MP43
},
/* 5634504D-0000-0010-8000-00AA00389B71 */
{
{ 0x4D, 0x50, 0x34, 0x56, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_MP4S",
TSMF_SUB_TYPE_MP4S
},
/* 3234504D-0000-0010-8000-00AA00389B71 */
{
{ 0x4D, 0x50, 0x34, 0x32, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_MP42",
TSMF_SUB_TYPE_MP42
},
- Use decodebin2 instead of old decodebin - decodebin has issues - Use autovideosink - xvimagesink does not work with cards with no xv ports available and cant be used if wanted to use the fluendo hardware accelerated playback codec - Use autoaudiosink - let gstreamer choose best audio playback plugin - Catch when autosinks add known elements so that we can manipulate properties on them - Adjust caps of various media types to work better with gstreamer, some codecs are picky about having certain fields available - Remove unneeded plugins such as "ffmpegcolorspace" and "videoscale" - these do not work correctly with fluendo hardware accelerated playback codec - Name audio/video gstreamer elements better for easier debugging - Update gstreamer pipeline and element properties to handle playback better - Detect when valid timestamps are available for buffer from server and try to account for when they are not valid - Start time is much more reliable then end time from server for various media formats, so use it when possible to make decisions instead of end time - Do not rebuild gstreamer pipeline for a seek(very expensive), instead reset gstreamer time to 0 and maintain offset between real time and gstreamer time - Change buffer filled function back to a buffer level function, so that we can use buffer level to make better choices above gstreamer decoder in tsmf - Remove ack function from gstreamer, instead rely on ack thread to handle acks - Rework X11 gstreamer code to handle various videosinks which implement the XOverlayInterface and to keep more detailed information on the sub-window that is used for display - Add check to see if a decoder is available for telling the server the client various media types - Add in support for M4S2 and WMA1 media types - Fix flush message handling, they are for individual streams and not the entire presentation - Delay eos response to try to allow more time for buffers to be loaded into decoder, as we anticipate acks to server and the server will issue stop as soon as we ack eos. - Fix issue with geometry info being ignored when resent for new streams within existing presentation - Fixed volume level initialization issue when a stream is stopped and restarted - Attempt to sync video/audio streams...because we run two different gstreamer pipelines - they can enter pause/playing states at different times and are thus not synchronized. Attempt to adjust video buffer timestamps based on difference between audio/video running time to account for this difference. This logic accounts for a huge improvement in audio/video sync(ie. lip sync to words)
2015-07-08 00:39:29 +03:00
/* 3253344D-0000-0010-8000-00AA00389B71 */
{
{ 0x4D, 0x34, 0x53, 0x32, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_MP42",
TSMF_SUB_TYPE_M4S2
},
/* E436EB81-524F-11CE-9F53-0020AF0BA770 */
{
{ 0x81, 0xEB, 0x36, 0xE4, 0x4F, 0x52, 0xCE, 0x11, 0x9F, 0x53, 0x00, 0x20, 0xAF, 0x0B, 0xA7, 0x70 },
"MEDIASUBTYPE_MP1V",
TSMF_SUB_TYPE_MP1V
},
/* 00000050-0000-0010-8000-00AA00389B71 */
{
{ 0x50, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_MP1A",
TSMF_SUB_TYPE_MP1A
},
2011-09-19 18:54:09 +04:00
/* E06D802C-DB46-11CF-B4D1-00805F6CBBEA */
{
{ 0x2C, 0x80, 0x6D, 0xE0, 0x46, 0xDB, 0xCF, 0x11, 0xB4, 0xD1, 0x00, 0x80, 0x5F, 0x6C, 0xBB, 0xEA },
"MEDIASUBTYPE_DOLBY_AC3",
TSMF_SUB_TYPE_AC3
},
/* 32595559-0000-0010-8000-00AA00389B71 */
{
{ 0x59, 0x55, 0x59, 0x32, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71 },
"MEDIASUBTYPE_YUY2",
TSMF_SUB_TYPE_YUY2
},
/* Opencodec IDS */
{
{0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71},
"MEDIASUBTYPE_FLAC",
TSMF_SUB_TYPE_FLAC
},
{
{0x61, 0x34, 0x70, 0x6D, 0x7A, 0x76, 0x4D, 0x49, 0xB4, 0x78, 0xF2, 0x9D, 0x25, 0xDC, 0x90, 0x37},
"MEDIASUBTYPE_OGG",
TSMF_SUB_TYPE_OGG
},
{
{0x4D, 0x34, 0x53, 0x32, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71},
"MEDIASUBTYPE_H263",
TSMF_SUB_TYPE_H263
},
/* WebMMF codec IDS */
{
{0x56, 0x50, 0x38, 0x30, 0x00, 0x00, 0x10, 0x00, 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9B, 0x71},
"MEDIASUBTYPE_VP8",
TSMF_SUB_TYPE_VP8
},
{
{0x0B, 0xD1, 0x2F, 0x8D, 0x41, 0x58, 0x6B, 0x4A, 0x89, 0x05, 0x58, 0x8F, 0xEC, 0x1A, 0xDE, 0xD9},
"MEDIASUBTYPE_OGG",
TSMF_SUB_TYPE_OGG
},
2011-09-19 18:54:09 +04:00
{
{ 0 },
"Unknown",
TSMF_SUB_TYPE_UNKNOWN
}
};
static const TSMFMediaTypeMap tsmf_format_type_map[] =
{
/* AED4AB2D-7326-43CB-9464-C879CAB9C43D */
{
{ 0x2D, 0xAB, 0xD4, 0xAE, 0x26, 0x73, 0xCB, 0x43, 0x94, 0x64, 0xC8, 0x79, 0xCA, 0xB9, 0xC4, 0x3D },
"FORMAT_MFVideoFormat",
TSMF_FORMAT_TYPE_MFVIDEOFORMAT
},
/* 05589F81-C356-11CE-BF01-00AA0055595A */
{
{ 0x81, 0x9F, 0x58, 0x05, 0x56, 0xC3, 0xCE, 0x11, 0xBF, 0x01, 0x00, 0xAA, 0x00, 0x55, 0x59, 0x5A },
"FORMAT_WaveFormatEx",
TSMF_FORMAT_TYPE_WAVEFORMATEX
},
/* E06D80E3-DB46-11CF-B4D1-00805F6CBBEA */
{
{ 0xE3, 0x80, 0x6D, 0xE0, 0x46, 0xDB, 0xCF, 0x11, 0xB4, 0xD1, 0x00, 0x80, 0x5F, 0x6C, 0xBB, 0xEA },
"FORMAT_MPEG2_VIDEO",
TSMF_FORMAT_TYPE_MPEG2VIDEOINFO
},
/* F72A76A0-EB0A-11D0-ACE4-0000C0CC16BA */
{
{ 0xA0, 0x76, 0x2A, 0xF7, 0x0A, 0xEB, 0xD0, 0x11, 0xAC, 0xE4, 0x00, 0x00, 0xC0, 0xCC, 0x16, 0xBA },
"FORMAT_VideoInfo2",
TSMF_FORMAT_TYPE_VIDEOINFO2
},
/* 05589F82-C356-11CE-BF01-00AA0055595A */
{
{ 0x82, 0x9F, 0x58, 0x05, 0x56, 0xC3, 0xCE, 0x11, 0xBF, 0x01, 0x00, 0xAA, 0x00, 0x55, 0x59, 0x5A },
"FORMAT_MPEG1_VIDEO",
TSMF_FORMAT_TYPE_MPEG1VIDEOINFO
},
2011-09-19 18:54:09 +04:00
{
{ 0 },
"Unknown",
TSMF_FORMAT_TYPE_UNKNOWN
}
};
2014-08-19 20:26:39 +04:00
static void tsmf_print_guid(const BYTE* guid)
2011-09-19 18:54:09 +04:00
{
2014-05-23 15:51:47 +04:00
#ifdef WITH_DEBUG_TSMF
char guidString[37];
2014-08-19 20:26:39 +04:00
snprintf(guidString, sizeof(guidString), "%02"PRIX8"%02"PRIX8"%02"PRIX8"%02"PRIX8"-%02"PRIX8"%02"PRIX8"-%02"PRIX8"%02"PRIX8"-%02"PRIX8"%02"PRIX8"-%02"PRIX8"%02"PRIX8"%02"PRIX8"%02"PRIX8"%02"PRIX8"%02"PRIX8"",
guid[3], guid[2], guid[1], guid[0],
guid[5], guid[4],
guid[7], guid[6],
guid[8], guid[9],
guid[10], guid[11], guid[12], guid[13], guid[14], guid[15]
);
2014-08-19 20:26:39 +04:00
WLog_INFO(TAG, "%s", guidString);
2011-09-19 18:54:09 +04:00
#endif
}
/* http://msdn.microsoft.com/en-us/library/dd318229.aspx */
2014-08-19 20:26:39 +04:00
static UINT32 tsmf_codec_parse_BITMAPINFOHEADER(TS_AM_MEDIA_TYPE* mediatype, wStream* s, BOOL bypass)
2011-09-19 18:54:09 +04:00
{
2012-10-09 11:26:39 +04:00
UINT32 biSize;
UINT32 biWidth;
UINT32 biHeight;
if (Stream_GetRemainingLength(s) < 40)
return 0;
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, biSize);
Stream_Read_UINT32(s, biWidth);
Stream_Read_UINT32(s, biHeight);
Stream_Seek(s, 28);
2014-08-19 20:26:39 +04:00
if (mediatype->Width == 0)
2011-09-19 18:54:09 +04:00
mediatype->Width = biWidth;
2014-08-19 20:26:39 +04:00
if (mediatype->Height == 0)
2011-09-19 18:54:09 +04:00
mediatype->Height = biHeight;
2014-08-19 20:26:39 +04:00
2011-09-19 18:54:09 +04:00
/* Assume there will be no color table for video? */
if ((biSize < 40) || (Stream_GetRemainingLength(s) < (biSize-40)))
return 0;
2014-08-19 20:26:39 +04:00
if (bypass && biSize > 40)
Stream_Seek(s, biSize - 40);
2014-08-19 20:26:39 +04:00
2011-09-19 18:54:09 +04:00
return (bypass ? biSize : 40);
}
/* http://msdn.microsoft.com/en-us/library/dd407326.aspx */
2014-08-19 20:26:39 +04:00
static UINT32 tsmf_codec_parse_VIDEOINFOHEADER2(TS_AM_MEDIA_TYPE* mediatype, wStream* s)
2011-09-19 18:54:09 +04:00
{
2012-10-09 11:26:39 +04:00
UINT64 AvgTimePerFrame;
2011-09-19 18:54:09 +04:00
/* VIDEOINFOHEADER2.rcSource, RECT(LONG left, LONG top, LONG right, LONG bottom) */
if (Stream_GetRemainingLength(s) < 72)
return 0;
Stream_Seek_UINT32(s);
Stream_Seek_UINT32(s);
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, mediatype->Width);
Stream_Read_UINT32(s, mediatype->Height);
2011-09-19 18:54:09 +04:00
/* VIDEOINFOHEADER2.rcTarget */
Stream_Seek(s, 16);
2011-09-19 18:54:09 +04:00
/* VIDEOINFOHEADER2.dwBitRate */
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, mediatype->BitRate);
2011-09-19 18:54:09 +04:00
/* VIDEOINFOHEADER2.dwBitErrorRate */
Stream_Seek_UINT32(s);
2011-09-19 18:54:09 +04:00
/* VIDEOINFOHEADER2.AvgTimePerFrame */
2013-05-09 00:09:16 +04:00
Stream_Read_UINT64(s, AvgTimePerFrame);
2011-09-19 18:54:09 +04:00
mediatype->SamplesPerSecond.Numerator = 1000000;
mediatype->SamplesPerSecond.Denominator = (int)(AvgTimePerFrame / 10LL);
/* Remaining fields before bmiHeader */
Stream_Seek(s, 24);
2011-09-19 18:54:09 +04:00
return 72;
}
/* http://msdn.microsoft.com/en-us/library/dd390700.aspx */
2014-08-19 20:26:39 +04:00
static UINT32 tsmf_codec_parse_VIDEOINFOHEADER(TS_AM_MEDIA_TYPE* mediatype, wStream* s)
{
2014-05-23 15:51:47 +04:00
/*
typedef struct tagVIDEOINFOHEADER {
RECT rcSource; //16
RECT rcTarget; //16 32
DWORD dwBitRate; //4 36
DWORD dwBitErrorRate; //4 40
REFERENCE_TIME AvgTimePerFrame; //8 48
BITMAPINFOHEADER bmiHeader;
} VIDEOINFOHEADER;
*/
2012-10-09 11:26:39 +04:00
UINT64 AvgTimePerFrame;
if (Stream_GetRemainingLength(s) < 48)
return 0;
/* VIDEOINFOHEADER.rcSource, RECT(LONG left, LONG top, LONG right, LONG bottom) */
Stream_Seek_UINT32(s);
Stream_Seek_UINT32(s);
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, mediatype->Width);
Stream_Read_UINT32(s, mediatype->Height);
/* VIDEOINFOHEADER.rcTarget */
Stream_Seek(s, 16);
/* VIDEOINFOHEADER.dwBitRate */
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, mediatype->BitRate);
/* VIDEOINFOHEADER.dwBitErrorRate */
Stream_Seek_UINT32(s);
/* VIDEOINFOHEADER.AvgTimePerFrame */
2013-05-09 00:09:16 +04:00
Stream_Read_UINT64(s, AvgTimePerFrame);
mediatype->SamplesPerSecond.Numerator = 1000000;
mediatype->SamplesPerSecond.Denominator = (int)(AvgTimePerFrame / 10LL);
return 48;
}
static BOOL tsmf_read_format_type(TS_AM_MEDIA_TYPE* mediatype, wStream* s, UINT32 cbFormat)
2011-09-19 18:54:09 +04:00
{
2019-02-07 16:32:55 +03:00
UINT32 i, j;
2014-08-19 20:26:39 +04:00
switch (mediatype->FormatType)
2011-09-19 18:54:09 +04:00
{
case TSMF_FORMAT_TYPE_MFVIDEOFORMAT:
/* http://msdn.microsoft.com/en-us/library/aa473808.aspx */
if (Stream_GetRemainingLength(s) < 176)
return FALSE;
Stream_Seek(s, 8); /* dwSize and ? */
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, mediatype->Width); /* videoInfo.dwWidth */
Stream_Read_UINT32(s, mediatype->Height); /* videoInfo.dwHeight */
Stream_Seek(s, 32);
2011-09-19 18:54:09 +04:00
/* videoInfo.FramesPerSecond */
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, mediatype->SamplesPerSecond.Numerator);
Stream_Read_UINT32(s, mediatype->SamplesPerSecond.Denominator);
Stream_Seek(s, 80);
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, mediatype->BitRate); /* compressedInfo.AvgBitrate */
Stream_Seek(s, 36);
2014-08-19 20:26:39 +04:00
if (cbFormat > 176)
2011-09-19 18:54:09 +04:00
{
mediatype->ExtraDataSize = cbFormat - 176;
mediatype->ExtraData = Stream_Pointer(s);
2011-09-19 18:54:09 +04:00
}
break;
2011-09-19 18:54:09 +04:00
case TSMF_FORMAT_TYPE_WAVEFORMATEX:
/* http://msdn.microsoft.com/en-us/library/dd757720.aspx */
if (Stream_GetRemainingLength(s) < 18)
return FALSE;
Stream_Seek_UINT16(s);
2013-05-09 00:09:16 +04:00
Stream_Read_UINT16(s, mediatype->Channels);
Stream_Read_UINT32(s, mediatype->SamplesPerSecond.Numerator);
2011-09-19 18:54:09 +04:00
mediatype->SamplesPerSecond.Denominator = 1;
2013-05-09 00:09:16 +04:00
Stream_Read_UINT32(s, mediatype->BitRate);
2011-09-19 18:54:09 +04:00
mediatype->BitRate *= 8;
2013-05-09 00:09:16 +04:00
Stream_Read_UINT16(s, mediatype->BlockAlign);
Stream_Read_UINT16(s, mediatype->BitsPerSample);
Stream_Read_UINT16(s, mediatype->ExtraDataSize);
2014-08-19 20:26:39 +04:00
if (mediatype->ExtraDataSize > 0)
{
if (Stream_GetRemainingLength(s) < mediatype->ExtraDataSize)
return FALSE;
mediatype->ExtraData = Stream_Pointer(s);
}
2011-09-19 18:54:09 +04:00
break;
case TSMF_FORMAT_TYPE_MPEG1VIDEOINFO:
/* http://msdn.microsoft.com/en-us/library/dd390700.aspx */
i = tsmf_codec_parse_VIDEOINFOHEADER(mediatype, s);
if (!i)
return FALSE;
j = tsmf_codec_parse_BITMAPINFOHEADER(mediatype, s, TRUE);
if (!j)
return FALSE;
i += j;
2014-08-19 20:26:39 +04:00
if (cbFormat > i)
{
mediatype->ExtraDataSize = cbFormat - i;
if (Stream_GetRemainingLength(s) < mediatype->ExtraDataSize)
return FALSE;
mediatype->ExtraData = Stream_Pointer(s);
}
break;
2011-09-19 18:54:09 +04:00
case TSMF_FORMAT_TYPE_MPEG2VIDEOINFO:
/* http://msdn.microsoft.com/en-us/library/dd390707.aspx */
i = tsmf_codec_parse_VIDEOINFOHEADER2(mediatype, s);
if (!i)
return FALSE;
j = tsmf_codec_parse_BITMAPINFOHEADER(mediatype, s, TRUE);
if (!j)
return FALSE;
i += j;
2014-08-19 20:26:39 +04:00
if (cbFormat > i)
2011-09-19 18:54:09 +04:00
{
mediatype->ExtraDataSize = cbFormat - i;
if (Stream_GetRemainingLength(s) < mediatype->ExtraDataSize)
return FALSE;
mediatype->ExtraData = Stream_Pointer(s);
2011-09-19 18:54:09 +04:00
}
break;
2011-09-19 18:54:09 +04:00
case TSMF_FORMAT_TYPE_VIDEOINFO2:
i = tsmf_codec_parse_VIDEOINFOHEADER2(mediatype, s);
if (!i)
return FALSE;
j = tsmf_codec_parse_BITMAPINFOHEADER(mediatype, s, FALSE);
if (!j)
return FALSE;
i += j;
2014-08-19 20:26:39 +04:00
if (cbFormat > i)
2011-09-19 18:54:09 +04:00
{
mediatype->ExtraDataSize = cbFormat - i;
if (Stream_GetRemainingLength(s) < mediatype->ExtraDataSize)
return FALSE;
mediatype->ExtraData = Stream_Pointer(s);
2011-09-19 18:54:09 +04:00
}
break;
2011-09-19 18:54:09 +04:00
default:
WLog_INFO(TAG, "unhandled format type 0x%x", mediatype->FormatType);
break;
}
return TRUE;
}
BOOL tsmf_codec_parse_media_type(TS_AM_MEDIA_TYPE* mediatype, wStream* s)
{
UINT32 cbFormat;
BOOL ret = TRUE;
int i;
ZeroMemory(mediatype, sizeof(TS_AM_MEDIA_TYPE));
/* MajorType */
DEBUG_TSMF("MediaMajorType:");
if (Stream_GetRemainingLength(s) < 16)
return FALSE;
tsmf_print_guid(Stream_Pointer(s));
for (i = 0; tsmf_major_type_map[i].type != TSMF_MAJOR_TYPE_UNKNOWN; i++)
{
if (memcmp(tsmf_major_type_map[i].guid, Stream_Pointer(s), 16) == 0)
break;
}
mediatype->MajorType = tsmf_major_type_map[i].type;
if (mediatype->MajorType == TSMF_MAJOR_TYPE_UNKNOWN)
ret = FALSE;
DEBUG_TSMF("MediaMajorType %s", tsmf_major_type_map[i].name);
Stream_Seek(s, 16);
/* SubType */
DEBUG_TSMF("MediaSubType:");
if (Stream_GetRemainingLength(s) < 16)
return FALSE;
tsmf_print_guid(Stream_Pointer(s));
for (i = 0; tsmf_sub_type_map[i].type != TSMF_SUB_TYPE_UNKNOWN; i++)
{
if (memcmp(tsmf_sub_type_map[i].guid, Stream_Pointer(s), 16) == 0)
break;
}
mediatype->SubType = tsmf_sub_type_map[i].type;
if (mediatype->SubType == TSMF_SUB_TYPE_UNKNOWN)
ret = FALSE;
DEBUG_TSMF("MediaSubType %s", tsmf_sub_type_map[i].name);
Stream_Seek(s, 16);
/* bFixedSizeSamples, bTemporalCompression, SampleSize */
if (Stream_GetRemainingLength(s) < 12)
return FALSE;
Stream_Seek(s, 12);
/* FormatType */
DEBUG_TSMF("FormatType:");
if (Stream_GetRemainingLength(s) < 16)
return FALSE;
tsmf_print_guid(Stream_Pointer(s));
for (i = 0; tsmf_format_type_map[i].type != TSMF_FORMAT_TYPE_UNKNOWN; i++)
{
if (memcmp(tsmf_format_type_map[i].guid, Stream_Pointer(s), 16) == 0)
2011-09-19 18:54:09 +04:00
break;
}
2014-08-19 20:26:39 +04:00
mediatype->FormatType = tsmf_format_type_map[i].type;
if (mediatype->FormatType == TSMF_FORMAT_TYPE_UNKNOWN)
ret = FALSE;
DEBUG_TSMF("FormatType %s", tsmf_format_type_map[i].name);
Stream_Seek(s, 16);
/* cbFormat */
if (Stream_GetRemainingLength(s) < 4)
return FALSE;
Stream_Read_UINT32(s, cbFormat);
DEBUG_TSMF("cbFormat %"PRIu32"", cbFormat);
#ifdef WITH_DEBUG_TSMF
winpr_HexDump(TAG, WLOG_DEBUG, Stream_Pointer(s), cbFormat);
#endif
ret = tsmf_read_format_type(mediatype, s, cbFormat);
2014-08-19 20:26:39 +04:00
if (mediatype->SamplesPerSecond.Numerator == 0)
2011-09-19 18:54:09 +04:00
mediatype->SamplesPerSecond.Numerator = 1;
2014-08-19 20:26:39 +04:00
if (mediatype->SamplesPerSecond.Denominator == 0)
2011-09-19 18:54:09 +04:00
mediatype->SamplesPerSecond.Denominator = 1;
2014-08-19 20:26:39 +04:00
2011-09-19 18:54:09 +04:00
return ret;
}
BOOL tsmf_codec_check_media_type(const char* decoder_name, wStream* s)
2011-09-19 18:54:09 +04:00
{
2014-08-19 20:26:39 +04:00
BYTE* m;
- Use decodebin2 instead of old decodebin - decodebin has issues - Use autovideosink - xvimagesink does not work with cards with no xv ports available and cant be used if wanted to use the fluendo hardware accelerated playback codec - Use autoaudiosink - let gstreamer choose best audio playback plugin - Catch when autosinks add known elements so that we can manipulate properties on them - Adjust caps of various media types to work better with gstreamer, some codecs are picky about having certain fields available - Remove unneeded plugins such as "ffmpegcolorspace" and "videoscale" - these do not work correctly with fluendo hardware accelerated playback codec - Name audio/video gstreamer elements better for easier debugging - Update gstreamer pipeline and element properties to handle playback better - Detect when valid timestamps are available for buffer from server and try to account for when they are not valid - Start time is much more reliable then end time from server for various media formats, so use it when possible to make decisions instead of end time - Do not rebuild gstreamer pipeline for a seek(very expensive), instead reset gstreamer time to 0 and maintain offset between real time and gstreamer time - Change buffer filled function back to a buffer level function, so that we can use buffer level to make better choices above gstreamer decoder in tsmf - Remove ack function from gstreamer, instead rely on ack thread to handle acks - Rework X11 gstreamer code to handle various videosinks which implement the XOverlayInterface and to keep more detailed information on the sub-window that is used for display - Add check to see if a decoder is available for telling the server the client various media types - Add in support for M4S2 and WMA1 media types - Fix flush message handling, they are for individual streams and not the entire presentation - Delay eos response to try to allow more time for buffers to be loaded into decoder, as we anticipate acks to server and the server will issue stop as soon as we ack eos. - Fix issue with geometry info being ignored when resent for new streams within existing presentation - Fixed volume level initialization issue when a stream is stopped and restarted - Attempt to sync video/audio streams...because we run two different gstreamer pipelines - they can enter pause/playing states at different times and are thus not synchronized. Attempt to adjust video buffer timestamps based on difference between audio/video running time to account for this difference. This logic accounts for a huge improvement in audio/video sync(ie. lip sync to words)
2015-07-08 00:39:29 +03:00
BOOL ret = FALSE;
2011-09-19 18:54:09 +04:00
TS_AM_MEDIA_TYPE mediatype;
- Use decodebin2 instead of old decodebin - decodebin has issues - Use autovideosink - xvimagesink does not work with cards with no xv ports available and cant be used if wanted to use the fluendo hardware accelerated playback codec - Use autoaudiosink - let gstreamer choose best audio playback plugin - Catch when autosinks add known elements so that we can manipulate properties on them - Adjust caps of various media types to work better with gstreamer, some codecs are picky about having certain fields available - Remove unneeded plugins such as "ffmpegcolorspace" and "videoscale" - these do not work correctly with fluendo hardware accelerated playback codec - Name audio/video gstreamer elements better for easier debugging - Update gstreamer pipeline and element properties to handle playback better - Detect when valid timestamps are available for buffer from server and try to account for when they are not valid - Start time is much more reliable then end time from server for various media formats, so use it when possible to make decisions instead of end time - Do not rebuild gstreamer pipeline for a seek(very expensive), instead reset gstreamer time to 0 and maintain offset between real time and gstreamer time - Change buffer filled function back to a buffer level function, so that we can use buffer level to make better choices above gstreamer decoder in tsmf - Remove ack function from gstreamer, instead rely on ack thread to handle acks - Rework X11 gstreamer code to handle various videosinks which implement the XOverlayInterface and to keep more detailed information on the sub-window that is used for display - Add check to see if a decoder is available for telling the server the client various media types - Add in support for M4S2 and WMA1 media types - Fix flush message handling, they are for individual streams and not the entire presentation - Delay eos response to try to allow more time for buffers to be loaded into decoder, as we anticipate acks to server and the server will issue stop as soon as we ack eos. - Fix issue with geometry info being ignored when resent for new streams within existing presentation - Fixed volume level initialization issue when a stream is stopped and restarted - Attempt to sync video/audio streams...because we run two different gstreamer pipelines - they can enter pause/playing states at different times and are thus not synchronized. Attempt to adjust video buffer timestamps based on difference between audio/video running time to account for this difference. This logic accounts for a huge improvement in audio/video sync(ie. lip sync to words)
2015-07-08 00:39:29 +03:00
static BOOL decoderAvailable = FALSE;
static BOOL firstRun = TRUE;
if (firstRun)
{
firstRun =FALSE;
if (tsmf_check_decoder_available(decoder_name))
decoderAvailable = TRUE;
}
2013-05-09 00:27:21 +04:00
Stream_GetPointer(s, m);
- Use decodebin2 instead of old decodebin - decodebin has issues - Use autovideosink - xvimagesink does not work with cards with no xv ports available and cant be used if wanted to use the fluendo hardware accelerated playback codec - Use autoaudiosink - let gstreamer choose best audio playback plugin - Catch when autosinks add known elements so that we can manipulate properties on them - Adjust caps of various media types to work better with gstreamer, some codecs are picky about having certain fields available - Remove unneeded plugins such as "ffmpegcolorspace" and "videoscale" - these do not work correctly with fluendo hardware accelerated playback codec - Name audio/video gstreamer elements better for easier debugging - Update gstreamer pipeline and element properties to handle playback better - Detect when valid timestamps are available for buffer from server and try to account for when they are not valid - Start time is much more reliable then end time from server for various media formats, so use it when possible to make decisions instead of end time - Do not rebuild gstreamer pipeline for a seek(very expensive), instead reset gstreamer time to 0 and maintain offset between real time and gstreamer time - Change buffer filled function back to a buffer level function, so that we can use buffer level to make better choices above gstreamer decoder in tsmf - Remove ack function from gstreamer, instead rely on ack thread to handle acks - Rework X11 gstreamer code to handle various videosinks which implement the XOverlayInterface and to keep more detailed information on the sub-window that is used for display - Add check to see if a decoder is available for telling the server the client various media types - Add in support for M4S2 and WMA1 media types - Fix flush message handling, they are for individual streams and not the entire presentation - Delay eos response to try to allow more time for buffers to be loaded into decoder, as we anticipate acks to server and the server will issue stop as soon as we ack eos. - Fix issue with geometry info being ignored when resent for new streams within existing presentation - Fixed volume level initialization issue when a stream is stopped and restarted - Attempt to sync video/audio streams...because we run two different gstreamer pipelines - they can enter pause/playing states at different times and are thus not synchronized. Attempt to adjust video buffer timestamps based on difference between audio/video running time to account for this difference. This logic accounts for a huge improvement in audio/video sync(ie. lip sync to words)
2015-07-08 00:39:29 +03:00
if (decoderAvailable)
ret = tsmf_codec_parse_media_type(&mediatype, s);
2013-05-09 00:27:21 +04:00
Stream_SetPointer(s, m);
if (ret)
{
ITSMFDecoder* decoder = tsmf_load_decoder(decoder_name, &mediatype);
if (!decoder)
{
WLog_WARN(TAG, "Format not supported by decoder %s", decoder_name);
ret = FALSE;
}
else
{
decoder->Free(decoder);
}
}
2011-09-19 18:54:09 +04:00
return ret;
}