Pictures they move and everything and hark what is that noise

git-svn-id: file:///srv/svn/repos/haiku/haiku/trunk@30497 a95241bf-73f2-0310-859d-f6bbb57e9c96
This commit is contained in:
David McPaul 2009-04-30 10:21:34 +00:00
parent 797f475dba
commit 94793d4d86
8 changed files with 402 additions and 163 deletions

View File

@ -92,11 +92,19 @@ ASFFileReader::ParseFile()
}
uint32 totalStreams = getStreamCount();
StreamHeader streamHeader;
StreamEntry streamEntry;
for (int i=0;i < totalStreams;i++) {
streamHeader.streamIndex = i;
streams.push_back(streamHeader);
for (uint32 i=0;i < totalStreams;i++) {
streamEntry.streamIndex = i;
streams.push_back(streamEntry);
}
ParseIndex();
// load the first packet
if (asf_get_packet(asfFile, packet) < 0) {
printf("Could not get first packet\n");
return B_ERROR;
}
return B_OK;
@ -194,15 +202,19 @@ ASFFileReader::getVideoFormat(uint32 streamIndex, ASFVideoFormat *format)
bigtime_t
ASFFileReader::getVideoDuration(uint32 streamIndex)
ASFFileReader::getStreamDuration(uint32 streamIndex)
{
if (streamIndex < streams.size()) {
return streams[streamIndex].getDuration();
}
asf_stream_t *stream;
stream = asf_get_stream(asfFile, streamIndex);
if (stream) {
if (stream->flags & ASF_STREAM_FLAG_EXTENDED) {
printf("VIDEO end time %Ld, start time %Ld\n",stream->extended->end_time, stream->extended->start_time);
printf("STREAM %ld end time %Ld, start time %Ld\n",streamIndex, stream->extended->end_time, stream->extended->start_time);
if (stream->extended->end_time - stream->extended->start_time > 0) {
return stream->extended->end_time - stream->extended->start_time;
}
@ -212,45 +224,14 @@ ASFFileReader::getVideoDuration(uint32 streamIndex)
return asf_get_duration(asfFile) / 10;
}
bigtime_t
ASFFileReader::getAudioDuration(uint32 streamIndex)
{
asf_stream_t *stream;
stream = asf_get_stream(asfFile, streamIndex);
if (stream) {
if (stream->flags & ASF_STREAM_FLAG_EXTENDED) {
printf("AUDIO end time %Ld, start time %Ld\n",stream->extended->end_time, stream->extended->start_time);
if (stream->extended->end_time - stream->extended->start_time > 0) {
return stream->extended->end_time - stream->extended->start_time;
}
}
}
return asf_get_duration(asfFile) / 10; // convert from 100 nanosecond units to microseconds
}
bigtime_t
ASFFileReader::getMaxDuration()
{
return asf_get_duration(asfFile) / 10;
}
// Not really frame count, really total data packets
uint32
ASFFileReader::getFrameCount(uint32 streamIndex)
{
return asf_get_data_packets(asfFile);
}
if (streamIndex < streams.size()) {
return streams[streamIndex].getFrameCount();
}
uint32
ASFFileReader::getAudioChunkCount(uint32 streamIndex)
{
return asf_get_data_packets(asfFile);
return 0;
}
bool
@ -282,12 +263,6 @@ ASFFileReader::IsAudio(uint32 streamIndex)
return false;
}
void
ASFFileReader::AddIndex(uint32 streamIndex, uint32 frameNo, bool keyFrame, bigtime_t pts, uint8 *data, uint32 size)
{
streams[streamIndex].AddIndex(frameNo, keyFrame, pts, data, size);
}
IndexEntry
ASFFileReader::GetIndex(uint32 streamIndex, uint32 frameNo)
{
@ -304,31 +279,115 @@ ASFFileReader::HasIndex(uint32 streamIndex, uint32 frameNo)
return false;
}
uint32
ASFFileReader::GetFrameForTime(uint32 streamIndex, bigtime_t time)
{
if (streamIndex < streams.size()) {
return streams[streamIndex].GetIndex(time).frameNo;
}
return 0;
}
void
ASFFileReader::ParseIndex() {
// Try to build some sort of useful index
// packet->send_time seems to be a better presentation time stamp than pts though
if (asf_seek_to_msec(asfFile,0) < 0) {
printf("Seek to start of stream failed\n");
}
asf_payload_t *payload;
while (asf_get_packet(asfFile, packet) > 0) {
for (int i=0;i<packet->payload_count;i++) {
payload = (asf_payload_t *)(&packet->payloads[i]);
// printf("Payload %d Stream %d Keyframe %d send time %ld pts %ld id %d size %d\n",i+1,payload->stream_number,payload->key_frame, packet->send_time * 1000L, payload->pts * 1000L, payload->media_object_number, payload->datalen);
if (payload->stream_number < streams.size()) {
streams[payload->stream_number].AddPayload(payload->media_object_number, payload->key_frame, packet->send_time * 1000, payload->datalen, false);
}
}
}
for (uint32 i=0;i<streams.size();i++) {
streams[i].AddPayload(0, false, 0, 0, true);
streams[i].setDuration((packet->send_time + packet->duration) * 1000);
}
if (asf_seek_to_msec(asfFile,0) < 0) {
printf("Seek to start of stream failed\n");
}
}
bool
ASFFileReader::GetNextChunkInfo(uint32 streamIndex, uint32 pFrameNo,
char **buffer, uint32 *size, bool *keyframe, bigtime_t *pts)
{
// Ok, Need to join payloads together that have the same payload->pts
// packet->send_time seems to be a better presentation time stamp than pts though
// Ok, Need to join payloads together that have the same payload->media_object_number
asf_payload_t *payload;
while (!HasIndex(streamIndex, pFrameNo+1) && asf_get_packet(asfFile, packet) > 0) {
for (int i=0;i<packet->payload_count;i++) {
payload = (asf_payload_t *)(&packet->payloads[i]);
printf("Payload %d ",i+1);
printf("Stream %d Keyframe %d pts %d frame %d, size %d\n",payload->stream_number,payload->key_frame, payload->pts * 1000, payload->media_object_number, payload->datalen);
AddIndex(payload->stream_number, payload->media_object_number, payload->key_frame, packet->send_time * 1000, payload->data, payload->datalen);
IndexEntry indexEntry = GetIndex(streamIndex, pFrameNo);
if (indexEntry.noPayloads == 0) {
// No index entry
return false;
}
while (packet->send_time * 1000 < indexEntry.pts) {
if (asf_get_packet(asfFile, packet) < 0) {
return false;
}
}
if (HasIndex(streamIndex, pFrameNo+1)) {
IndexEntry indexEntry = GetIndex(streamIndex, pFrameNo+1);
*buffer = (char *)indexEntry.data;
*size = indexEntry.size;
*keyframe = indexEntry.keyFrame;
*pts = indexEntry.pts;
return true;
if (packet->send_time * 1000 > indexEntry.pts) {
// seek back to pts
printf("seeking back to %Ld status %Ld\n",indexEntry.pts, asf_seek_to_msec(asfFile, indexEntry.pts/1000));
if (asf_get_packet(asfFile, packet) < 0) {
return false;
}
}
// fillin some details
*size = indexEntry.dataSize;
*keyframe = indexEntry.keyFrame;
*pts = indexEntry.pts;
uint32 expectedPayloads = indexEntry.noPayloads;
uint32 offset = 0;
for (int i=0;i<packet->payload_count;i++) {
payload = (asf_payload_t *)(&packet->payloads[i]);
// find the first payload matching the id we want and then
// combine the next x payloads where x is the noPayloads in indexEntry
if (payload->media_object_number == indexEntry.id && payload->stream_number == streamIndex) {
// copy data to buffer
memcpy(*buffer + offset, payload->data, payload->datalen);
offset += payload->datalen;
expectedPayloads--;
if (expectedPayloads == 0) {
return true;
}
}
}
// combine packets into a single buffer
while ((asf_get_packet(asfFile, packet) > 0) && (expectedPayloads > 0)) {
for (int i=0;i<packet->payload_count;i++) {
payload = (asf_payload_t *)(&packet->payloads[i]);
// find the first payload matching the id we want and then
// combine the next x payloads where x is the noPayloads in indexEntry
if (payload->media_object_number == indexEntry.id && payload->stream_number == streamIndex) {
// copy data to buffer
memcpy(*buffer + offset, payload->data, payload->datalen);
offset += payload->datalen;
expectedPayloads--;
if (expectedPayloads == 0) {
return true;
}
}
}
}
return false;

View File

@ -35,6 +35,7 @@
extern "C" {
#include "libasf/asf.h"
}
#include "ASFIndex.h"
struct ASFAudioFormat {
uint16 Compression;
@ -60,71 +61,6 @@ struct ASFVideoFormat {
uint8 *extraData;
};
class IndexEntry {
public:
IndexEntry() {frameNo = 0;data=NULL;size=0;pts=0;keyFrame=false;};
uint32 frameNo; // frame_no or sample_no
uint8 * data; // The data for this frame
uint32 size; // The size of the data available
bigtime_t pts; // Presentation Time Stamp for this frame
bool keyFrame; // Is this a keyframe.
};
class StreamHeader {
public:
StreamHeader() {streamIndex = 0;};
~StreamHeader() {index.clear();};
void AddIndex(uint32 frameNo, bool keyFrame, bigtime_t pts, uint8 *data, uint32 size)
{
IndexEntry indexEntry;
// Should be in a constructor
indexEntry.frameNo = frameNo;
indexEntry.data = data;
indexEntry.size = size;
indexEntry.pts = pts;
indexEntry.keyFrame = keyFrame;
index.push_back(indexEntry);
};
IndexEntry GetIndex(uint32 frameNo)
{
IndexEntry indexEntry;
for (std::vector<IndexEntry>::iterator itr = index.begin();
itr != index.end();
++itr) {
if (itr->frameNo == frameNo) {
indexEntry = *itr;
index.erase(itr);
break;
}
}
return indexEntry;
};
bool HasIndex(uint32 frameNo)
{
if (!index.empty()) {
for (std::vector<IndexEntry>::iterator itr = index.begin();
itr != index.end();
++itr) {
if (itr->frameNo == frameNo) {
return true;
}
}
}
return false;
};
uint16 streamIndex;
std::vector<IndexEntry> index;
};
// ASF file reader
class ASFFileReader {
private:
@ -132,7 +68,9 @@ private:
BPositionIO *theStream;
asf_file_t *asfFile;
asf_packet_t *packet;
std::vector<StreamHeader> streams;
std::vector<StreamEntry> streams;
void ParseIndex();
public:
ASFFileReader(BPositionIO *pStream);
@ -150,20 +88,15 @@ public:
// How many tracks in file
uint32 getStreamCount();
// The first video track duration indexed by streamIndex
bigtime_t getVideoDuration(uint32 streamIndex);
// the first audio track duration indexed by streamIndex
bigtime_t getAudioDuration(uint32 streamIndex);
// the max of all active audio or video durations
bigtime_t getMaxDuration();
// the Stream duration indexed by streamIndex
bigtime_t getStreamDuration(uint32 streamIndex);
bool getAudioFormat(uint32 streamIndex, ASFAudioFormat *format);
bool getVideoFormat(uint32 streamIndex, ASFVideoFormat *format);
// The no of frames in the video track indexed by streamIndex
// The no of frames in the Stream indexed by streamIndex
uint32 getFrameCount(uint32 streamIndex);
// The no of chunks in the audio track indexed by streamIndex
uint32 getAudioChunkCount(uint32 streamIndex);
// Is stream (track) a video track
bool IsVideo(uint32 streamIndex);
// Is stream (track) a audio track
@ -171,17 +104,16 @@ public:
BPositionIO *Source() {return theStream;};
void AddIndex(uint32 streamIndex, uint32 frameNo, bool keyFrame, bigtime_t pts, uint8 *data, uint32 size);
IndexEntry GetIndex(uint32 streamIndex, uint32 frameNo);
bool HasIndex(uint32 streamIndex, uint32 frameNo);
uint32 GetFrameForTime(uint32 streamIndex, bigtime_t time);
bool GetNextChunkInfo(uint32 streamIndex, uint32 pFrameNo, char **buffer, uint32 *size, bool *keyframe, bigtime_t *pts);
static int32_t read(void *opaque, void *buffer, int32_t size);
static int32_t write(void *opaque, void *buffer, int32_t size);
static int64_t seek(void *opaque, int64_t offset);
};
#endif

View File

@ -0,0 +1,158 @@
/*
* Copyright (c) 2009, David McPaul
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "ASFIndex.h"
void
IndexEntry::Clear()
{
frameNo=0;
noPayloads=0;
dataSize=0;
pts=0;
keyFrame=false;
}
void
IndexEntry::AddPayload(uint32 pdataSize)
{
noPayloads++;
dataSize += pdataSize;
}
StreamEntry::StreamEntry()
{
lastID = 0;
frameCount = 0;
maxPTS = 0;
duration = 0;
}
StreamEntry::~StreamEntry()
{
index.clear();
}
void
StreamEntry::setDuration(bigtime_t pduration)
{
duration = pduration;
}
IndexEntry
StreamEntry::GetIndex(uint32 frameNo)
{
IndexEntry indexEntry;
for (std::vector<IndexEntry>::iterator itr = index.begin(); itr != index.end(); ++itr) {
if (itr->frameNo == frameNo) {
indexEntry = *itr;
index.erase(itr);
break;
}
}
return indexEntry;
}
bool
StreamEntry::HasIndex(uint32 frameNo)
{
if (!index.empty()) {
for (std::vector<IndexEntry>::iterator itr = index.begin();
itr != index.end();
++itr) {
if (itr->frameNo == frameNo) {
return true;
}
}
}
return false;
}
IndexEntry
StreamEntry::GetIndex(bigtime_t pts)
{
IndexEntry indexEntry;
for (std::vector<IndexEntry>::iterator itr = index.begin(); itr != index.end(); ++itr) {
if (pts <= itr->pts) {
indexEntry = *itr;
index.erase(itr);
break;
}
}
return indexEntry;
}
bool
StreamEntry::HasIndex(bigtime_t pts)
{
if (!index.empty()) {
for (std::vector<IndexEntry>::iterator itr = index.begin();
itr != index.end();
++itr) {
if (pts <= itr->pts) {
return true;
}
}
}
return false;
}
/*
Combine payloads with the same id into a single IndexEntry
When isLast flag is set then no more payloads are available (ie add current entry to index)
*/
void
StreamEntry::AddPayload(uint32 id, bool keyFrame, bigtime_t pts, uint32 dataSize, bool isLast)
{
if (isLast) {
maxPTS = indexEntry.pts;
index.push_back(indexEntry);
printf("Stream Index Loaded for Stream %d Max Index %ld Max PTS %Ld\n",streamIndex, frameCount, maxPTS);
} else {
if (id > lastID) {
if (lastID != 0) {
// add indexEntry to Index
index.push_back(indexEntry);
}
lastID = id;
indexEntry.Clear();
indexEntry.frameNo = frameCount++;
indexEntry.keyFrame = keyFrame;
indexEntry.pts = pts;
indexEntry.dataSize = dataSize;
indexEntry.noPayloads = 1;
indexEntry.id = id;
} else {
indexEntry.AddPayload(dataSize);
}
}
}

View File

@ -0,0 +1,76 @@
/*
* Copyright (c) 2009, David McPaul
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _ASF_INDEX_H
#define _ASF_INDEX_H
#include <SupportDefs.h>
#include <vector>
class IndexEntry {
public:
IndexEntry() {frameNo = 0;noPayloads=0;dataSize=0;pts=0;keyFrame=false;};
uint32 frameNo; // frame_no or sample_no
uint8 noPayloads; // The number of payloads that make up this frame
uint32 dataSize; // The size of the data available
bigtime_t pts; // Presentation Time Stamp for this frame
bool keyFrame; // Is this a keyframe.
uint8 id; // The id for this Entry
void Clear();
void AddPayload(uint32 pdataSize);
};
class StreamEntry {
public:
uint16 streamIndex;
StreamEntry();
~StreamEntry();
IndexEntry GetIndex(uint32 frameNo);
bool HasIndex(uint32 frameNo);
IndexEntry GetIndex(bigtime_t pts);
bool HasIndex(bigtime_t pts);
bigtime_t getMaxPTS() {return maxPTS;};
uint32 getFrameCount() {return frameCount;};
bigtime_t getDuration() {return duration;};
void setDuration(bigtime_t pduration);
void AddPayload(uint32 id, bool keyFrame, bigtime_t pts, uint32 dataSize, bool isLast);
private:
std::vector<IndexEntry> index;
IndexEntry indexEntry;
uint32 lastID;
uint32 frameCount;
bigtime_t maxPTS;
bigtime_t duration;
};
#endif

View File

@ -9,6 +9,7 @@ SubDirHdrs [ FDirName $(SUBDIR) libasf ] ;
Addon asf_reader :
asf_reader.cpp
ASFFileReader.cpp
ASFIndex.cpp
:
libasfreader.a
be libmedia.so $(TARGET_LIBSTDC++)

View File

@ -185,22 +185,27 @@ asfReader::AllocateCookie(int32 streamNumber, void **_cookie)
cookie->line_count = videoFormat.VideoHeight;
cookie->frame_size = 1;
cookie->duration = theFileReader->getVideoDuration(streamNumber);
cookie->duration = theFileReader->getStreamDuration(streamNumber);
cookie->frame_count = theFileReader->getFrameCount(streamNumber);
TRACE("frame_count %Ld\n", cookie->frame_count);
TRACE("duration %.6f (%Ld)\n", cookie->duration / 1E6, cookie->duration);
// asf does not have a frame rate! The extended descriptor defines an average time per frame.
if (videoFormat.FrameScale && videoFormat.FrameRate) {
cookie->frames_per_sec_rate = videoFormat.FrameRate;
cookie->frames_per_sec_scale = videoFormat.FrameScale;
TRACE("frames_per_sec_rate %ld, frames_per_sec_scale %ld (using both)\n", cookie->frames_per_sec_rate, cookie->frames_per_sec_scale);
} else {
cookie->frames_per_sec_rate = 25;
cookie->frames_per_sec_scale = 1;
TRACE("frames_per_sec_rate %ld, frames_per_sec_scale %ld (using fallback)\n", cookie->frames_per_sec_rate, cookie->frames_per_sec_scale);
}
// asf does not have a frame rate! The extended descriptor defines an average time per frame which is generally useless.
cookie->frames_per_sec_rate = cookie->frame_count;
cookie->frames_per_sec_scale = cookie->duration / 1000000LL;
TRACE("frames_per_sec_rate %ld, frames_per_sec_scale %ld (using both)\n", cookie->frames_per_sec_rate, cookie->frames_per_sec_scale);
// if (videoFormat.FrameScale && videoFormat.FrameRate) {
// cookie->frames_per_sec_rate = videoFormat.FrameRate;
// cookie->frames_per_sec_scale = videoFormat.FrameScale;
// TRACE("frames_per_sec_rate %ld, frames_per_sec_scale %ld (using both)\n", cookie->frames_per_sec_rate, cookie->frames_per_sec_scale);
// } else {
// cookie->frames_per_sec_rate = 25;
// cookie->frames_per_sec_scale = 1;
// TRACE("frames_per_sec_rate %ld, frames_per_sec_scale %ld (using fallback)\n", cookie->frames_per_sec_rate, cookie->frames_per_sec_scale);
// }
description.family = B_AVI_FORMAT_FAMILY;
description.u.avi.codec = videoFormat.Compression;
@ -277,6 +282,9 @@ asfReader::AllocateCookie(int32 streamNumber, void **_cookie)
*(uint32 *)format->user_data = codecID; format->user_data[4] = 0;
}
cookie->buffer_size = ((videoFormat.VideoWidth * videoFormat.VideoHeight * 4) + 15) & ~15; // WRONG Find max input buffer size needed
cookie->buffer = new char [cookie->buffer_size];
return B_OK;
}
@ -293,15 +301,16 @@ asfReader::AllocateCookie(int32 streamNumber, void **_cookie)
uint32 sampleSize = (audioFormat.NoChannels * audioFormat.BitsPerSample / 8);
cookie->audio = true;
cookie->duration = theFileReader->getAudioDuration(streamNumber);
cookie->frame_count = (cookie->duration * audioFormat.SamplesPerSec) / sampleSize / 1000000LL;
cookie->duration = theFileReader->getStreamDuration(streamNumber);
// Calculate sample count using duration
cookie->frame_count = (cookie->duration * audioFormat.SamplesPerSec) / 1000000LL;
cookie->frame_pos = 0;
cookie->frames_per_sec_rate = audioFormat.SamplesPerSec;
cookie->frames_per_sec_scale = 1;
cookie->bytes_per_sec_rate = audioFormat.AvgBytesPerSec;
cookie->bytes_per_sec_scale = 1;
TRACE("Chunk Count %ld\n", theFileReader->getAudioChunkCount(streamNumber));
TRACE("Chunk Count %ld\n", theFileReader->getFrameCount(streamNumber));
TRACE("audio frame_count %Ld, duration %.6f\n", cookie->frame_count, cookie->duration / 1E6 );
if (audioFormat.Compression == 0x0001) {
@ -349,6 +358,9 @@ asfReader::AllocateCookie(int32 streamNumber, void **_cookie)
cookie->frame_size);
}
cookie->buffer_size = (audioFormat.BlockAlign + 15) & ~15;
cookie->buffer = new char [cookie->buffer_size];
// TODO: this doesn't seem to work (it's not even a fourcc)
format->user_data_type = B_CODEC_TYPE_INFO;
*(uint32 *)format->user_data = audioFormat.Compression; format->user_data[4] = 0;
@ -386,7 +398,6 @@ status_t
asfReader::FreeCookie(void *_cookie)
{
asf_cookie *cookie = (asf_cookie *)_cookie;
cookie->buffer = NULL; // we don't own the buffer
delete [] cookie->buffer;
@ -436,13 +447,13 @@ asfReader::Seek(void *cookie, uint32 flags, int64 *frame, bigtime_t *time)
if (flags & B_MEDIA_SEEK_TO_TIME) {
// frame = (time * rate) / fps / 1000000LL
*frame = ((*time * asfCookie->frames_per_sec_rate) / (int64)asfCookie->frames_per_sec_scale) / 1000000LL;
asfCookie->frame_pos = *frame;
asfCookie->frame_pos = theFileReader->GetFrameForTime(asfCookie->stream,*time);
}
if (flags & B_MEDIA_SEEK_TO_FRAME) {
// time = frame * 1000000LL * fps / rate
*time = (*frame * 1000000LL * (int64)asfCookie->frames_per_sec_scale) / asfCookie->frames_per_sec_rate;
asfCookie->frame_pos = *frame;
asfCookie->frame_pos = theFileReader->GetFrameForTime(asfCookie->stream,*time);
}
TRACE("asfReader::Seek: seekTo%s%s%s%s, time %Ld, frame %Ld\n",
@ -514,9 +525,10 @@ asfReader::GetNextChunk(void *_cookie, const void **chunkBuffer,
mediaHeader->u.encoded_video.field_number = 0;
mediaHeader->u.encoded_video.field_sequence = cookie->frame_pos;
}
TRACE(" stream %d: frame %ld start time %.6f Size %ld key frame %s\n",cookie->stream, cookie->frame_pos, mediaHeader->start_time / 1000000.0, size, keyframe ? "true" : "false");
cookie->frame_pos ++;
cookie->frame_pos++;
*chunkBuffer = cookie->buffer;
*chunkSize = size;

View File

@ -243,7 +243,8 @@ asf_seek_to_msec(asf_file_t *file, int64_t msec)
}
/* Index structure is missing, check if we can still seek */
if (file->index == NULL) {
// DLM we can always seek to 0
if (file->index == NULL && msec > 0) {
int i, audiocount;
audiocount = 0;

View File

@ -31,14 +31,14 @@
INLINE void
debug_printf(char *fmt, ...)
{
//#ifdef DEBUG
#ifdef DEBUG
va_list argp;
va_start(argp, fmt);
vfprintf(stderr, fmt, argp);
va_end(argp);
fprintf(stderr, "\n");
//#endif
#endif
}
#endif