Click here to Skip to main content
15,896,269 members
Articles / Desktop Programming / MFC

DirectShow Editing Services (DES) and combining AVI files

Rate me:
Please Sign up or sign in to vote.
4.64/5 (4 votes)
9 Sep 2011CPOL5 min read 35.9K   5.8K   12  
A sample C++ project that uses DES to combine two or more AVI files.
#include <stdafx.h>

#include <fstream>
#include <iomanip>
#include <ios>
#include <limits.h>
#include <map>
#include <stdexcept>
#include <sstream>

#include <initguid.h>

#include <dshow.h>

#include "DESCombine.h"
#include "Messages.h"
#include "Utility.h"
#include "Vmr.h"

#define THROW_EXCEPTION( msg )	throwException( msg, __FILE__, __FUNCTION__, __LINE__ ) 

DEFINE_GUID( 
	CLSID_DefaultDirectSoundRenderer,
	0x79376820, 0x07D0, 0x11CF, 0xA2, 0x4D, 0x00, 0x20, 0xAF, 0xD7, 0x97, 0x67
);

DECLARE_USER_MESSAGE( UWM_ShowMessage )
DECLARE_USER_MESSAGE( UWM_GraphNotify )

_COM_SMARTPTR_TYPEDEF( IEnumMediaTypes,		IID_IEnumMediaTypes );
_COM_SMARTPTR_TYPEDEF( IAMTimelineComp,		IID_IAMTimelineComp );
_COM_SMARTPTR_TYPEDEF( IAMTimelineGroup,	IID_IAMTimelineGroup );
_COM_SMARTPTR_TYPEDEF( IAMTimelineObj,		IID_IAMTimelineObj );
_COM_SMARTPTR_TYPEDEF( IAMTimelineSrc,		IID_IAMTimelineSrc );
_COM_SMARTPTR_TYPEDEF( IAMSetErrorLog,		IID_IAMSetErrorLog );
_COM_SMARTPTR_TYPEDEF( IConfigInterleaving, IID_IConfigInterleaving );
_COM_SMARTPTR_TYPEDEF( IMediaEventSink,		IID_IMediaEventSink );
_COM_SMARTPTR_TYPEDEF( ISampleGrabber,		IID_ISampleGrabber );
_COM_SMARTPTR_TYPEDEF( IXml2Dex,			IID_IXml2Dex );

namespace DESCombineLib {

static Log s_generalLog;
static CCriticalSection s_csLog;

static void s_logGeneralMessage( const std::wstring& src );
static void s_writeLogFile( const std::wstring& logFilename, const Log& log );

/*
	=====================================================================
	=====================================================================
	MediaFile
	=====================================================================
	=====================================================================
*/
/*
	=====================================================================
	=====================================================================
*/
MediaFile::MediaFile()
: _frameLength( 0 )
{
}
/*
	=====================================================================
	=====================================================================
*/
MediaFile::MediaFile( const InputFile& inputFile )
: _frameLength( 0 )
{
	initialize( inputFile );
}
/*
	=====================================================================
	=====================================================================
*/
int MediaFile::getMaximumFPS() const
{
	int fps = 0;
	StreamsInformation::const_iterator it;
	for( it = _streamsInformation.begin();  it != _streamsInformation.end();  ++it ) {
		const StreamInformation& si = *it;
		const VideoInformation& vi = si.getVideoInformation();
		if( fps < vi.fps )
			fps = vi.fps;
	}

	return fps;
}
/*
	=====================================================================
	=====================================================================
*/
LONGLONG MediaFile::getMaximumStreamLength() const
{
	LONGLONG length = 0;
	StreamsInformation::const_iterator it;
	for( it = _streamsInformation.begin();  it != _streamsInformation.end();  ++it ) {
		const StreamInformation& si = *it;
		LONGLONG len = si.getLength();
		if( length < len )
			length = len;
	}

	return length;
}
/*
	=====================================================================
	=====================================================================
*/
VideoInformation MediaFile::getMaximumVideoInformation() const
{
	VideoInformation vi;

	StreamsInformation::const_iterator it;
	for( it = _streamsInformation.begin();  it != _streamsInformation.end();  ++it ) {
		const StreamInformation& si = *it;
		VideoInformation vi2 = si.getVideoInformation();
		if( vi.getImageSize() < vi2.getImageSize() )
			vi = vi2;
	}

	return vi;
}
/*
	=====================================================================
	=====================================================================
*/
StreamInformation MediaFile::getStreamInformation( int xStream ) const
{
	if( 0 > xStream  ||  (int)_streamsInformation.size() <= xStream )
		THROW_EXCEPTION( "out of range stream index" );

	return _streamsInformation[xStream];
}
/*
	=====================================================================
	=====================================================================
*/
int MediaFile::getStreamNumber( int xStream ) const
{
	CMediaType streamMT = getStreamInformation( xStream ).getMediaType();

	int streamNumber = -1;
	int ctStreams = getStreamCount();
	for( int x = 0;  x < ctStreams  &&  x <= xStream;  ++x ) {
		CMediaType nextMT = getStreamInformation( x ).getMediaType();
		if( nextMT == streamMT )
			++streamNumber;
	}

	return streamNumber;
}
/*
	=====================================================================
	=====================================================================
*/
void MediaFile::initialize( const InputFile& inputFile )
{
	HRESULT hr;

	if( inputFile.mediaFilename.empty() )
		THROW_EXCEPTION( "filename.empty()" );
	_inputFile = inputFile;

	IMediaDetPtr mediaDet;
	hr = mediaDet.CreateInstance( CLSID_MediaDet );
	CHECK_HR( hr, "CLSID_MediaDet" );

	_bstr_t bstr( inputFile.mediaFilename.c_str() );
	hr = mediaDet->put_Filename( bstr );
	CHECK_HR( hr, "put_Filename" );

	long ctStreams = -1;
	hr = mediaDet->get_OutputStreams( &ctStreams );
	CHECK_HR( hr, "get_OutputStreams" );

	_streamsInformation.clear();
	_streamsInformation.resize( ctStreams );
	for( long xStream = 0;  xStream < ctStreams;  ++xStream ) {
		HRESULT hr = mediaDet->put_CurrentStream( xStream );
		CHECK_HR( hr, "put_CurrentStream" );

		double length;
		hr = mediaDet->get_StreamLength( &length );
		CHECK_HR( hr, "get_StreamLength" );

		CMediaType mediaType;
		hr = mediaDet->get_StreamMediaType( &mediaType );
		CHECK_HR( hr, "get_StreamMediaType" );

		StreamInformation& si = _streamsInformation[xStream];
		si.setLength( (LONGLONG)( length * UNITS) );
		si.setMediaType( mediaType );
		
		if( !IsEqualGUID( mediaType.majortype, MEDIATYPE_Video ) ) 
			continue;
	
		if( FORMAT_VideoInfo == mediaType.formattype ) {
			VIDEOINFOHEADER* viHeader = (VIDEOINFOHEADER*)mediaType.pbFormat;
			BITMAPINFOHEADER& biHeader = viHeader->bmiHeader;

			CRect rt( 0,0, biHeader.biWidth, biHeader.biHeight );

			VideoInformation vi = si.getVideoInformation();
			if( vi.width < rt.Width() )
				vi.width = rt.Width();
			if( vi.height < rt.Height() )
				vi.height = rt.Height();
			if( vi.ctBits < biHeader.biBitCount )
				vi.ctBits = biHeader.biBitCount;
			int fps = (int)( UNITS / viHeader->AvgTimePerFrame );
			if( vi.fps < fps )
				vi.fps = fps;
			si.setVideoInformation( vi );
		}
	} // for( long xStream = 0; ...
}
/*
	=====================================================================
	=====================================================================
*/
MediaFile& MediaFile::operator=( const MediaFile& src )
{
	if( this != &src ) {
		_inputFile = src._inputFile;
		_streamsInformation.assign( src._streamsInformation.begin(), src._streamsInformation.end() );
		_frameLength = src._frameLength;
	}

	return *this;
}

/*
	=====================================================================
	=====================================================================
	MediaFiles
	=====================================================================
	=====================================================================
*/
/*
	=====================================================================
	=====================================================================
*/
int MediaFiles::getMaximumFPS() const
{
	int fps = 0;
	for( const_iterator it = begin();  it != end();  ++it ) {
		const MediaFile& mediaFile = *it;
		int fps2 = mediaFile.getMaximumFPS();
		if( fps < fps2 )
			fps = fps2;
	}

	return fps;
}
/*
	=====================================================================
	=====================================================================
*/
VideoInformation MediaFiles::getMaximumVideoInformation() const
{
	VideoInformation vi;

	for( const_iterator it = begin();  it != end();  ++it ) {
		const MediaFile& mediaFile = *it;
		VideoInformation vi2 = mediaFile.getMaximumVideoInformation();
		if( vi.getImageSize() < vi2.getImageSize() )
			vi = vi2;
	}

	return vi;
}
/*
	=====================================================================
	=====================================================================
*/
void MediaFiles::initialize( const InputFiles& inputs )
{
	clear();

	InputFiles::const_iterator it;
	for( it = inputs.begin();  it != inputs.end();  ++it ) {
		const InputFile& inputFile = *it;
		push_back( MediaFile( inputFile ) );
	}
}

/*
	=====================================================================
	=====================================================================
	Group
	=====================================================================
	=====================================================================
*/
/*
	=====================================================================
	=====================================================================
*/
Group::Group()
: _length( 0 )
, _fps( 0 )
{
}
/*
	=====================================================================
	=====================================================================
*/
#ifdef _DEBUG
Group::~Group()
{
	clear();
}
#endif
/*
	=====================================================================
	=====================================================================
*/
Group::Group( const CMediaType& mtype, const IAMTimelinePtr& timeline, double fps )
: _length( 0 )
, _fps( 0 )
{
	initialize( mtype, timeline, fps );
}
/*
	=====================================================================
	=====================================================================
*/
void Group::add( const MediaFile& mediaFile, long streamNumber, LONGLONG start, LONGLONG end )
{
	_files.push_back( mediaFile );

	if( 0 > end )
		end = mediaFile.getMaximumStreamLength();
	LONGLONG length = end - start;

	IAMTimelineObjPtr sourceObj;
	HRESULT hr = _timeline->CreateEmptyNode( &sourceObj, TIMELINE_MAJOR_TYPE_SOURCE );
	CHECK_HR( hr, "CreateEmptyNode" );

	hr = sourceObj->SetStartStop( _length, _length + length );
	CHECK_HR( hr, "SetStartStop" );

	IAMTimelineSrcPtr source;
	hr = sourceObj->QueryInterface( IID_IAMTimelineSrc, (void **)&source );
	CHECK_HR( hr, "IID_IAMTimelineSrc" );

	const InputFile inputFile = mediaFile.getInputFile();
	hr = source->SetMediaName( _bstr_t( inputFile.mediaFilename.c_str() ) );
	CHECK_HR( hr, "SetMediaName" );

	hr = source->SetStreamNumber( streamNumber );
	CHECK_HR( hr, "SetStreamNumber" );

	hr = source->SetMediaTimes( start, end );
	CHECK_HR( hr, "SetMediaTimes" );

	hr = _track->SrcAdd( sourceObj );
	CHECK_HR( hr, "SrcAdd" );

	hr = source->FixMediaTimes( &start, &end );
	CHECK_HR( hr, "FixMediaTimes" );

	double d1 = (double)( end - start );
	double d2 = UNITS / _fps;
	double d3 = d1 / d2;
	int ctFrames = (int)d3;
	_files.rbegin()->setFrameLength( ctFrames );

	_length += length;
}
/*
	=====================================================================
	=====================================================================
*/
void Group::clear()
{
	_length = 0;
	_files.clear();
	_track = NULL;
	_fps = 0;
	_timeline = NULL;
}
/*
	=====================================================================
	=====================================================================
*/
MediaFile& Group::file( int xFile )
{
	if( 0 > xFile  ||  (int)_files.size() <= xFile )
		THROW_EXCEPTION( "out of range 'xFile'" );

	return _files[ xFile ];
}
/*
	=====================================================================
	=====================================================================
*/
void Group::initialize( const CMediaType& mtype, const IAMTimelinePtr& timeline, double fps )
{
	ASSERT( timeline );
	_timeline = timeline;
	_fps = fps;

	HRESULT hr;

	IAMTimelineObjPtr groupObj;
	hr = _timeline->CreateEmptyNode( &groupObj, TIMELINE_MAJOR_TYPE_GROUP );
	CHECK_HR( hr, "CreateEmptyNode" );

	IAMTimelineGroupPtr group;
	hr = groupObj->QueryInterface( IID_IAMTimelineGroup, (void**)&group );
	CHECK_HR( hr, "IID_IAMTimelineGroup" );

	hr = group->SetMediaType( (AM_MEDIA_TYPE*) &mtype );
	CHECK_HR( hr, "SetMediaType" );

	hr = _timeline->AddGroup( groupObj );
	CHECK_HR( hr, "AddGroup" );

	IAMTimelineObjPtr trackObj;
	hr = _timeline->CreateEmptyNode( &trackObj, TIMELINE_MAJOR_TYPE_TRACK );
	CHECK_HR( hr, "CreateEmptyNode" );

	IAMTimelineCompPtr composition;
	hr = group->QueryInterface( IID_IAMTimelineComp, (void **)&composition );
	CHECK_HR( hr, "IID_IAMTimelineComp" );

	hr = composition->VTrackInsBefore( trackObj, -1 );
	CHECK_HR( hr, "VTrackInsBefore" );

	hr = trackObj->QueryInterface( IID_IAMTimelineTrack, (void **)&_track );
	CHECK_HR( hr, "IID_IAMTimelineTrack" );
}
/*
	=====================================================================
	=====================================================================
*/
Group& Group::operator=( const Group& src )
{
	if( this != &src ) {
		_length = src._length;
		_files.assign( src._files.begin(), src._files.end() );
		_track = src._track;
		_fps = src._fps;
		_timeline = src._timeline;
	}

	return *this;
}

/*
	=====================================================================
	=====================================================================
	SampleGrabberCB
	=====================================================================
	=====================================================================
*/
/*
	=====================================================================
	=====================================================================
*/
SampleGrabberCB::SampleGrabberCB()
: CUnknown( NAME("SampleGrabberCB"), NULL )
, _desCombineCB( NULL )
, _endOfFileEventCode( 0 )
, _xCurrentFile( 0 )
, _xCurrentFrame( 0 )
, _ctMaximumFrames( 0 )
{
}
/*
	=====================================================================
	=====================================================================
*/
SampleGrabberCB::SampleGrabberCB( 
	const Group& group, 
	IDESCombineCB* desCombineCB, 
	IMediaEventSinkPtr& sink, 
	int endOfFileEventCode 
) 
: CUnknown( NAME("SampleGrabberCB"), NULL )
, _desCombineCB( NULL )
, _endOfFileEventCode( 0 )
, _xCurrentFile( 0 )
, _xCurrentFrame( 0 )
, _ctMaximumFrames( 0 )
{
	initialize( group, desCombineCB, sink, endOfFileEventCode );
}
/*
	=====================================================================
	=====================================================================
*/
SampleGrabberCB::SampleGrabberCB( const SampleGrabberCB& src )
: CUnknown( NAME("SampleGrabberCB"), NULL )
{
	operator=( src );
}
/*
	=====================================================================
	=====================================================================
*/
STDMETHODIMP SampleGrabberCB::BufferCB( double sampleTime, BYTE *buffer, long bufferLength )
{
	AUTOTRACE;

	if( _desCombineCB ) {
		// provide real-time GUI update by calling our callback function 
		_desCombineCB->BufferCB( _currentFilename.c_str(), sampleTime, buffer, bufferLength );
	}

	// have we finished the current file?
	if( ++_xCurrentFrame >= _ctMaximumFrames ) {
		// notify application that current file has been processed
		HRESULT hr = _mediaEventSink->Notify(
			(long)_endOfFileEventCode, (long)_xCurrentFile, (long)_xCurrentFrame 
		);
		ASSERT( SUCCEEDED(hr) );

		// identify next file to be processed
		if( ++_xCurrentFile < _group.getCount() ) {
			const MediaFile& mediaFile = _group.file( _xCurrentFile );
			const InputFile inputFile = mediaFile.getInputFile();
			_currentFilename = inputFile.mediaFilename;
			_ctMaximumFrames += mediaFile.getFrameLength();
		}
		else {
			// all files have been processed
			_ctMaximumFrames = INT_MAX;
		}
	}

	return S_OK;
}
/*
	=====================================================================
	=====================================================================
*/
void SampleGrabberCB::clear()
{
	AUTOTRACE;
	_group.clear();
	_desCombineCB = NULL;
	_mediaEventSink = NULL;
	_endOfFileEventCode = 0;

	_xCurrentFile = 0;
	_xCurrentFrame = 0;
	_ctMaximumFrames = 0;
	_currentFilename.clear();
}
/*
	=====================================================================
	=====================================================================
*/
void SampleGrabberCB::initialize( 
	const Group& group, IDESCombineCB* desCombineCB, IMediaEventSinkPtr& sink, int endOfFileEventCode 
) {
	AUTOTRACE;

	_group = group;
	_desCombineCB = desCombineCB;
	_mediaEventSink = sink;
	_endOfFileEventCode = endOfFileEventCode;

	if( _group.getCount() > _xCurrentFile ) {
		const MediaFile& mediaFile = _group.file( _xCurrentFile );
		const InputFile inputFile = mediaFile.getInputFile();
		_currentFilename = inputFile.mediaFilename;
		_ctMaximumFrames = mediaFile.getFrameLength();
	}
	else {
		_ctMaximumFrames = INT_MAX; 
	}
}
/*
	=====================================================================
	=====================================================================
*/
STDMETHODIMP SampleGrabberCB::NonDelegatingQueryInterface( REFIID riid, void** ppv )
{
	CheckPointer( ppv, E_POINTER );

	if( IID_ISampleGrabberCB == riid )
		return GetInterface( (ISampleGrabberCB*)this, ppv );

	return inherited::NonDelegatingQueryInterface( riid, ppv );
}
/*
	=====================================================================
	=====================================================================
*/
SampleGrabberCB& SampleGrabberCB::operator=( const SampleGrabberCB& src )
{
	if( this != &src ) {
		_desCombineCB = src._desCombineCB;
		_group = src._group;
		_mediaEventSink = src._mediaEventSink;
		_endOfFileEventCode = src._endOfFileEventCode;
		_xCurrentFile = src._xCurrentFile;
		_xCurrentFrame = src._xCurrentFrame;
		_ctMaximumFrames = src._ctMaximumFrames;
		_currentFilename = src._currentFilename;
	}

	return *this;
}
/*
	=====================================================================
	=====================================================================
*/
STDMETHODIMP SampleGrabberCB::SampleCB( double sampleTime, IMediaSample *sample )
{
	//AUTOTRACE;
	HRESULT hr = S_OK;

	// log information 
	std::wstring type = _getType();
	std::wostringstream os;
	os	<< type << L" " 
		<< std::setw( 3 ) << std::dec << _xCurrentFrame << L" " 
		<< _ctMaximumFrames << L" " 
		<< std::setw( 8 ) << std::setprecision( 8 ) << sampleTime;
	s_logGeneralMessage( os.str() );

	LONGLONG startTime, endTime;
	hr = sample->GetMediaTime( &startTime, &endTime );
	if( FAILED(hr) ) {
		//_logError( L"sample->GetMediaTime", hr );
		//sample->Release();
		//return hr;
	}
	else {
		std::wostringstream os;
		os	<< L"\t" << std::dec 
			<< L"startTime = " << startTime 
			<< L", endTime = " << endTime;
		s_logGeneralMessage( os.str() );
	}

	// have we finished the current file?
	if( ++_xCurrentFrame >= _ctMaximumFrames ) {
		// notify application that current file has been processed
		hr = _mediaEventSink->Notify( (long)_endOfFileEventCode, (long)_xCurrentFile, (long)_xCurrentFrame );

		// identify next file to be processed
		if( ++_xCurrentFile < _group.getCount() ) {
			const MediaFile& mediaFile = _group.file( _xCurrentFile );
			const InputFile inputFile = mediaFile.getInputFile();
			_currentFilename = inputFile.mediaFilename;
			_ctMaximumFrames += mediaFile.getFrameLength();

			std::wostringstream os;
			os << std::endl << type << L" " << _currentFilename << std::endl;
			s_logGeneralMessage( os.str() );
		}
		else {
			// all files have been processed
			_ctMaximumFrames = INT_MAX;
		}
	}
	sample->Release();
	return S_OK;
}

/*
	=====================================================================
	=====================================================================
	SampleGrabberCB - private functions
	=====================================================================
	=====================================================================
*/
/*
	=====================================================================
	=====================================================================
*/
std::wstring SampleGrabberCB::_getType()
{
	std::wstring type;
	switch( _endOfFileEventCode ) {
		case EC_AudioFileComplete :
			type = L"audio";
			break;

		case EC_VideoFileComplete :
			type = L"video";
			break;

		default :
			type = L"undefined";
	}

	return type;
}
/*
	=====================================================================
	=====================================================================
*/
void SampleGrabberCB::_logError( const std::wstring& msg, HRESULT hr )
{
	std::wostringstream os;
	os	<< L"ERROR: " << msg << L", " 
		<< getMessageWStr( hr ) 
		<< L"(0x" << std::hex << hr << L")";

	s_logGeneralMessage( os.str() );
}

/*
	=====================================================================
	=====================================================================
	DESCombine
	=====================================================================
	=====================================================================
*/
DECLARE_USER_MESSAGE( UWM_Completed )
DECLARE_USER_MESSAGE( UWM_FileCompleted )

/*
	=====================================================================
	=====================================================================
*/
DESCombine::DESCombine()
: _state( exUndefined )
, _wndNotify( 0 )
, _wndDisplay( 0 )
{
}
/*
	=====================================================================
	=====================================================================
*/
DESCombine::~DESCombine()
{
	cleanUp();
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::cancel()
{
	AUTOTRACE;
	CSingleLock sl( &_criticalSection, TRUE );

	if( exGraphStarted >= _state ) {
		_stopRendering();
		_changeState( exCancelled );
	}
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::cleanUp()
{
	AUTOTRACE;
	CSingleLock sl( &_criticalSection, TRUE );

	_stopRendering();
	_disableEventNotification();
	_cleanUpRenderEngine();

	if( exGraphStarted >= _state ) {
		_changeState( exGraphCompleted );
		s_writeLogFile( L"DESCombine.log", s_generalLog );
	}
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::getEventCodes( EventCodes& dst )
{
	AUTOTRACE;
	CSingleLock sl( &_criticalSection, TRUE );

	if( !_mediaEvent )
		return;
	ASSERT( _graphBuilder );

	HRESULT hr;
	long eventCode, timeout = 0;
	LONG_PTR param1, param2;

	while( 1 ) {
		hr = _mediaEvent->GetEvent( &eventCode, &param1, &param2, timeout );
		if( S_OK == hr )
			dst.push_back( eventCode );
		_mediaEvent->FreeEventParams( eventCode, param1, param2 );
		if( E_ABORT == hr )
			break;
		CHECK_HR( hr, "GetEvent" );
	}
}
/*
	=====================================================================
	=====================================================================
*/
LONGLONG DESCombine::getTimelineDuration()
{
	AUTOTRACE;
	CSingleLock sl( &_criticalSection, TRUE );

	ASSERT( _timeline );
	LONGLONG length;
	HRESULT hr = _timeline->GetDuration( &length );
	CHECK_HR( hr, "GetDuration" );

	return length;
}
/*
	=====================================================================
	=====================================================================
*/
std::wstring DESCombine::getXML()
{
	AUTOTRACE;
	CSingleLock sl( &_criticalSection, TRUE );

	if( exRenderSelected < _state )
		THROW_EXCEPTION( "No render method has been selected." );

	IXml2DexPtr xml2dex;
	HRESULT hr = xml2dex.CreateInstance( CLSID_Xml2Dex );
	CHECK_HR( hr, "CLSID_Xml2Dex" );

	BSTR str;
	hr = xml2dex->WriteXML( _timeline, &str );
	CHECK_HR( hr, "WriteXML" );
	bstr_t str2( str, FALSE );

	return std::wstring( str2 );
}
/*
	=====================================================================
	This function creates a timeline, adds groups to the timeline and 
	then adds the media files to timeline groups.
	=====================================================================
*/
void DESCombine::initialize( const InputFiles& inputs )
{
	AUTOTRACE;
	s_generalLog.clear();

	// extract information from media files
	MediaFiles mediaFiles;
	mediaFiles.initialize( inputs );

	// get audio/video media types that will be used to define the timeline groups
	CMediaType audioMT = _getGroupAudioMediaType();
	CMediaType videoMT = _getGroupVideoMediaType( mediaFiles );
	int fps = _getFPS( videoMT );

	/* Initialize the timeline.  Note that the timeline default video frame rate 
		(i.e. the call to SetDefaultFPS) MUST be defined before the timeline groups
		are defined otherwise the video playback rate in the output file will be 
		incorrect.
	*/
	HRESULT hr = _timeline.CreateInstance( CLSID_AMTimeline );
	CHECK_HR( hr, "CLSID_AMTimeline" );
	hr = _timeline->SetDefaultFPS( fps );
	CHECK_HR( hr, "SetDefaultFPS" );
	_setErrorLog();

	// add media files to the timeline
	_initializeTimelineGroups( mediaFiles, fps, audioMT, videoMT );
	_addMediaFilesToTimeline( mediaFiles );

	_changeState( exConstructed );
}
/*
	=====================================================================
	The code in this file instructs DES to send the message 'UWM_GraphNotify' to the 
	parent window to signal that a DES event(s) need to be processed.  The parent window
	calls this function to process the event notifications.  The set of event codes
	that were received/processed is returned to the parent in the function argument 
	'eventCodes'.
	=====================================================================
*/
void DESCombine::onGraphNotify( EventCodes& eventCodes, WPARAM wParam, LPARAM lParam )
{
	AUTOTRACE;
	if( exGraphStarted < _state )
		return;

	getEventCodes( eventCodes );

	EventCodes::const_iterator it;
	for( it = eventCodes.begin();  it != eventCodes.end();  ++it ) {
		long eventCode = *it;

		switch( eventCode ) {
			case EC_USERABORT :
			case EC_ERRORABORT :
				// file processing has been aborted
				_stopRendering();
				_disableEventNotification();

				_changeState( exCancelled );
				break;

			case EC_COMPLETE :
				// DES has completed processing all files 
				_stopRendering();
				_disableEventNotification();

				_changeState( exGraphCompleted );
				break;

			case EC_AudioFileComplete :
			case EC_VideoFileComplete :
				// instruct application that DES has just finished processing a file
				_postMessage( UWM_FileCompleted, eventCode, (LPARAM)wParam );
				break;
		}
	} // for( it = eventCodes.begin() ...
}
/*
	=====================================================================
	This function renders the timeline to an AVI file.
	=====================================================================
*/
void DESCombine::renderToAVI( 
	const std::wstring& outputMediaFilename, 
	GetEncoderCallbackFunction videoEncoderCB,
	GetEncoderCallbackFunction audioEncoderCB,
	IDESCombineCB* videoCallback, 
	IDESCombineCB* audioCallback
) {
	AUTOTRACE;
	ASSERT( outputMediaFilename.size() );
	_initializeRenderEngine();

	// create the MUX filter used to write to the AVI file
	ICaptureGraphBuilder2Ptr captureGraphBuilder2;
	HRESULT hr = captureGraphBuilder2.CreateInstance(  CLSID_CaptureGraphBuilder2 );
	CHECK_HR( hr, "CLSID_CaptureGraphBuilder2" );
	hr = captureGraphBuilder2->SetFiltergraph( _graphBuilder );
	CHECK_HR( hr, "SetFiltergraph" );
	IBaseFilterPtr mux;
	hr = captureGraphBuilder2->SetOutputFileName( &MEDIASUBTYPE_Avi, outputMediaFilename.c_str(), &mux, NULL );
	CHECK_HR( hr, "SetOutputFileName" );

	/*	NOTE: This function was originally copied from the sample DESCombine DirectShow.NET sample 
		project.  The .NET did not explicitly call IConfigInterleaving::put_Mode().  This function 
		must be called otherwise audio will not be synchronized with video during playback. 
	*/
	IConfigInterleavingPtr configInterleaving;
	hr = mux->QueryInterface( IID_IConfigInterleaving, (void**)&configInterleaving );
	CHECK_HR( hr, "IID_IConfigInterleaving" );
	hr = configInterleaving->put_Mode( INTERLEAVE_FULL );
	CHECK_HR( hr, "INTERLEAVE_FULL" );

	/* create the IMediaEventSink filter that our IDESCombineCB callback functions will use 
		to notify the application when each file has been processed (e.g. see 
		IMediaEventSink::Notify() in SampleGrabberCB::BufferCB()).
	*/
	IMediaEventSinkPtr sink;
	hr = _graphBuilder->QueryInterface( IID_IMediaEventSink, (void**)&sink );
	CHECK_HR( hr, "IID_IMediaEventSink" );

	/* Get number of groups that have been added to the timeline.  For this example,
		there should be two groups (i.e. one group contains all video and one group contains
		all audio).
	*/ 
	long ctGroups;
	hr = _timeline->GetGroupCount( &ctGroups );
	CHECK_HR( hr, "GetGroupCount" );

	// connect the output pin from each timeline group to the multiplexor (MUX)
	for( long xGroup = 0;  xGroup < ctGroups;  ++xGroup ) {
		// instruct DES that we are writing to a file (and not previewing in a window)
		IAMTimelineObjPtr groupObj;
		HRESULT hr = _timeline->GetGroup( &groupObj, xGroup );
		CHECK_HR( hr, "GetGroup" );
		IAMTimelineGroupPtr group;
		hr = groupObj->QueryInterface( IID_IAMTimelineGroup, (void**)&group );
		CHECK_HR( hr, "IID_IAMTimelineGroup" );
		hr = group->SetPreviewMode( FALSE );
		CHECK_HR( hr, "SetPreviewMode" );

		/* get output pin from current timeline group and determine whether it should
			be connected to an audio or a video renderer filter
		*/
		IPinPtr groupOutputPin;
		hr = _renderEngine->GetGroupOutputPin( xGroup, &groupOutputPin );
		CHECK_HR( hr, "GetGroupOutputPin" );
		
		IBaseFilterPtr encoder;
		if( _isVideo( groupOutputPin ) ) {
			// build the filter graph so it outputs video
			SampleGrabberCB* sampleGrabberCB = new SampleGrabberCB;
			ASSERT( sampleGrabberCB );
			sampleGrabberCB->initialize( 
				_timelineGroups[xGroup], videoCallback, sink, EC_VideoFileComplete 
			);
			if( videoEncoderCB )
				encoder = (*videoEncoderCB)();
			_buildFilterGraph( 
				captureGraphBuilder2, sampleGrabberCB, L"Video", groupOutputPin, encoder, mux 
			);
		}
		else {
			// build the filter graph so it outputs audio
			SampleGrabberCB* sampleGrabberCB = new SampleGrabberCB;
			ASSERT( sampleGrabberCB );
			sampleGrabberCB->initialize( 
				_timelineGroups[xGroup], audioCallback, sink, EC_AudioFileComplete 
			);
			if( audioEncoderCB )
				encoder = (*audioEncoderCB)();
			_buildFilterGraph( 
				captureGraphBuilder2, sampleGrabberCB, L"Audio", groupOutputPin, encoder, mux 
			);
		}
	} // for( long xGroup = 0; ... 
}
/*
	=====================================================================
	This function renderers the timeline to a window so it can be previewed.
	=====================================================================
*/
void DESCombine::renderToWindow( CWnd* wndDisplay, IDESCombineCB* videoCallback, IDESCombineCB* audioCallback )
{
	AUTOTRACE;
	ASSERT( wndDisplay );
	ASSERT( ::IsWindow( wndDisplay->m_hWnd ) );
	_wndDisplay = wndDisplay;

	_initializeRenderEngine();

	ICaptureGraphBuilder2Ptr captureGraphBuilder2;
	HRESULT hr = captureGraphBuilder2.CreateInstance(  CLSID_CaptureGraphBuilder2 );
	CHECK_HR( hr, "CLSID_CaptureGraphBuilder2" );
	hr = captureGraphBuilder2->SetFiltergraph( _graphBuilder );
	CHECK_HR( hr, "SetFiltergraph" );

	IMediaEventSinkPtr sink;
	hr = _graphBuilder->QueryInterface( IID_IMediaEventSink, (void**)&sink );
	CHECK_HR( hr, "IID_IMediaEventSink" );

	VideoDisplayAreas videoDisplayAreas;
	_getVideoDisplayAreas( videoDisplayAreas );

	/* Get number of groups that have been added to the timeline.  For this example,
		there should be two groups (i.e. one group contains all video and one group contains
		all audio).
	*/ 
	long ctGroups;
	hr = _timeline->GetGroupCount( &ctGroups );
	CHECK_HR( hr, "GetGroupCount" );

	int xVmrRenderer = -1;
	for( long xGroup = 0;  xGroup < ctGroups;  ++xGroup ) {
		// instruct DES that we are previewing the timeline (and not writing it to disk)
		IAMTimelineObjPtr groupObj;
		HRESULT hr = _timeline->GetGroup( &groupObj, xGroup );
		CHECK_HR( hr, "GetGroup" );
		IAMTimelineGroupPtr group;
		hr = groupObj->QueryInterface( IID_IAMTimelineGroup, (void**)&group );
		CHECK_HR( hr, "IID_IAMTimelineGroup" );
		hr = group->SetPreviewMode( TRUE );
		CHECK_HR( hr, "SetPreviewMode" );

		/* get output pin from current timeline group and determine whether it should
			be connected to an audio or a video renderer filter
		*/
		IPinPtr groupOutputPin;
		hr = _renderEngine->GetGroupOutputPin( xGroup, &groupOutputPin );
		CHECK_HR( hr, "GetGroupOutputPin" );

		if( _isVideo( groupOutputPin ) ) {
			// build the filter graph using the video renderer
			IBaseFilterPtr vmrRenderer;
			hr = vmrRenderer.CreateInstance( CLSID_VideoMixingRenderer9 );
			CHECK_HR( hr, "CLSID_VideoMixingRenderer9" );

			vmrInitialize( vmrRenderer, _wndDisplay );

			SampleGrabberCB* sampleGrabberCB = new SampleGrabberCB;
			ASSERT( sampleGrabberCB );
			sampleGrabberCB->initialize( 
				_timelineGroups[xGroup], videoCallback, sink, EC_VideoFileComplete 
			);
			_buildFilterGraph( 
				captureGraphBuilder2, sampleGrabberCB, L"Video", groupOutputPin, vmrRenderer 
			);

			vmrSetDisplayArea( vmrRenderer, _wndDisplay, videoDisplayAreas[++xVmrRenderer], true );
		}
		else {
			// build the filter graph using the audio renderer
			IBaseFilterPtr renderer;
			hr = renderer.CreateInstance( CLSID_DefaultDirectSoundRenderer );
			CHECK_HR( hr, "CLSID_DefaultDirectSoundRenderer" );

			SampleGrabberCB* sampleGrabberCB = new SampleGrabberCB;
			ASSERT( sampleGrabberCB );
			sampleGrabberCB->initialize( 
				_timelineGroups[xGroup], audioCallback, sink, EC_AudioFileComplete 
			);
			_buildFilterGraph( 
				captureGraphBuilder2, sampleGrabberCB, L"Audio", groupOutputPin, renderer 
			);
		}
	} // for( long xGroup = 0 ...
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::startRendering( CWnd* wndNotify )
{
	AUTOTRACE;
	if( exGraphStarted <= _state )
		return;

	ASSERT( wndNotify );
	_wndNotify = wndNotify;

	if( exRenderSelected > _state )
		THROW_EXCEPTION( "Render method not selected." );
	_changeState( exGraphStarted );

	_enableEventNotification();
	_writeFilterGraph( L"debug.DESCombine.grf" );
	_startRendering();
}

/*
	=====================================================================
	=====================================================================
	DESCombine - private functions
	=====================================================================
	=====================================================================
*/
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_addMediaFilesToTimeline( const MediaFiles& src )
{
	MediaFiles::const_iterator it;
	for( it = src.begin();  it != src.end();  ++it ) {
		const MediaFile& mediaFile = *it;

		const InputFile inputFile = mediaFile.getInputFile();
		LONGLONG startTime = 0 >= inputFile.startTime
			? 0LL
			: LONGLONG( inputFile.startTime * UNITS );
		LONGLONG endTime = 0 >= inputFile.endTime
			? -1LL
			: LONGLONG( inputFile.endTime * UNITS );

		_addMediaFileToTimeline( mediaFile, startTime, endTime );
	}
}
/*
	=====================================================================
	This function adds a new source media file to the timeline.  All source
	media files added to the timeline MUST have the same number of streams and 
	matching stream media types (i.e. the media type of stream #1 is the same
	in all files, the media type of stream #2 is the same in all files, etc.).
	Accordingly, the number of groups defined for the timeline equals the number
	of streams in the source media files.
	=====================================================================
*/
void DESCombine::_addMediaFileToTimeline( const MediaFile& mediaFile, LONGLONG start, LONGLONG stop )
{
	if( exFilesAdded < _state )
		THROW_EXCEPTION( "Can't add files since rendering method already selected" );

	int ctGroups = (int)_timelineGroups.size();
	ASSERT( mediaFile.getStreamCount() == ctGroups );

	for( int x = 0;  x < ctGroups;  ++x ) 
		_timelineGroups[x].add( mediaFile, mediaFile.getStreamNumber( x ), start, stop );

	_changeState( exFilesAdded );
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_buildFilterGraph(
	ICaptureGraphBuilder2Ptr& captureGraphBuilder2,
	SampleGrabberCB* myCallback,
	const std::wstring& type,
	IPinPtr& groupPin,
	IBaseFilterPtr& compressor,
	IBaseFilterPtr& renderer
) {
	HRESULT hr = 0;

	// if a callback function is defined then configure ISampleGrabber to use it
	IBaseFilterPtr sampleGrabber;
	if( myCallback ) {
		ISampleGrabberPtr sampleGrabberFilter;
		hr = sampleGrabberFilter.CreateInstance( CLSID_SampleGrabber );
		CHECK_HR( hr, "CLSID_SampleGrabber" );

		hr = sampleGrabberFilter->SetCallback( myCallback, 1 );
		CHECK_HR( hr, "SetCallback" );

		hr = sampleGrabberFilter->QueryInterface( IID_IBaseFilter, (void**)&sampleGrabber );
		CHECK_HR( hr, "IID_IBaseFilter" );

		std::wostringstream os;
		os << type << L" sample grabber";
		hr = _graphBuilder->AddFilter( sampleGrabber, os.str().c_str() );
		CHECK_HR( hr, "AddFilter" );
	}

	if( compressor ) {
		/* a compressor filter is defined so:
			1) connect the group pin to the the sample grabber filter 
			2) connect the sample grabber filter to the compressor filter
			3) connect the compressor filter to the renderer filter
		*/
		std::wostringstream os;
		os << type << L" Compressor";
		HRESULT hr = _graphBuilder->AddFilter( compressor, os.str().c_str() );
		CHECK_HR( hr, "AddFilter" );

		hr = captureGraphBuilder2->RenderStream( NULL, NULL, groupPin, sampleGrabber, compressor );
		CHECK_HR( hr, "RenderStream 1" );

		hr = captureGraphBuilder2->RenderStream( NULL, NULL, compressor, NULL, renderer );
		CHECK_HR( hr, "RenderStream 2" );
	}
	else {
		/* a compressor filter is not defined so: 
			1) connect the group pin to the the sample grabber filter 
			2) connect the sample grabber filter to the renderer filter
		*/
		hr = captureGraphBuilder2->RenderStream( NULL, NULL, groupPin, sampleGrabber, renderer );
		CHECK_HR( hr, "RenderStream 2" );
	}
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_buildFilterGraph( 
	ICaptureGraphBuilder2Ptr& captureGraphBuilder2,
	SampleGrabberCB* myCallback,
	const std::wstring& type,
	IPinPtr& groupPin,
	IBaseFilterPtr& renderer
) {
	std::wostringstream os;
	os << type << L" Renderer";
	HRESULT hr = _graphBuilder->AddFilter( renderer, os.str().c_str() );
	CHECK_HR( hr, "AddFilter" );

	IBaseFilterPtr compressor;
	_buildFilterGraph( captureGraphBuilder2, myCallback, type, groupPin, compressor, renderer );
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_cleanUpRenderEngine()
{
	if( !_renderEngine )
		return;

	AUTOTRACE;
	HRESULT hr = _renderEngine->ScrapIt();
	CHECK_HR( hr, "ScrapIt" );

	_runningObjectTable.remove();

	/* The timeline is to be release AFTER calling IRenderEngine::ScrapIt() as stated in the 
		documentation (see http://msdn.microsoft.com/en-us/library/dd390661(v=vs.85).aspx):
			"The render engine does not keep a reference count on the timeline. Do not release 
			the timeline before you are done using it, and always call ScrapIt on the render 
			engine first."
	*/
	_renderEngine = NULL;
	_graphBuilder = NULL;
	_timeline = NULL;
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_disableEventNotification()
{
	if( !_wndNotify  ||  !_mediaEvent )
		return;

	AUTOTRACE;
	ASSERT( _mediaEvent );
	HRESULT hr = _mediaEvent->SetNotifyWindow( 0, 0, NULL );
	CHECK_HR( hr, "SetNotifyWindow" );

	_mediaEvent = NULL;
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_enableEventNotification()
{
	if( !_wndNotify )
		return;

	ASSERT( _graphBuilder );
	HRESULT hr = _graphBuilder->QueryInterface( IID_IMediaEventEx, (void**)&_mediaEvent );
	CHECK_HR( hr, "IMediaEventEx" );
	ASSERT( _mediaEvent );

	hr = _mediaEvent->SetNotifyWindow( (OAHWND)_wndNotify->m_hWnd, UWM_GraphNotify, 0 );
	CHECK_HR( hr, "SetNotifyWindow" );
}
/*
	=====================================================================
	Currently, the only audio media type supported is 16-bit stereo PCM audio (MEDIASUBTYPE_PCM). 
	(see ms-help://MS.VSCC.v90/MS.MSDNQTR.v90.en/directshow/htm/iamtimelinegroupsetmediatype.htm)
	=====================================================================
*/
CMediaType DESCombine::_getGroupAudioMediaType()
{
	CMediaType mt;
	mt.InitMediaType();
	mt.majortype = MEDIATYPE_Audio;
	mt.subtype = MEDIASUBTYPE_PCM;
	mt.formattype = FORMAT_WaveFormatEx;
	mt.bFixedSizeSamples = FALSE; // TRUE;

	if( mt.AllocFormatBuffer( sizeof(WAVEFORMATEX) ) == NULL )
		THROW_EXCEPTION( "AllocFormatBuffer" );
	WAVEFORMATEX *format = (WAVEFORMATEX*) mt.pbFormat;
	::ZeroMemory( (PVOID)format, sizeof(WAVEFORMATEX) );
	format->wFormatTag = WAVE_FORMAT_PCM;
	format->nChannels = 2;  // Stereo
	format->nSamplesPerSec = 44100;
	format->wBitsPerSample = 16;
	format->nBlockAlign = format->nChannels * format->wBitsPerSample/8;
	format->nAvgBytesPerSec = format->nSamplesPerSec * format->nBlockAlign; 
	format->cbSize = 0;

	return mt;
}
/*
	=====================================================================
	=====================================================================
*/
int DESCombine::_getFPS( const CMediaType& src ) const
{
	ASSERT( IsEqualGUID( MEDIATYPE_Video, src.majortype ) );
	VIDEOINFOHEADER* viHeader = (VIDEOINFOHEADER*)src.pbFormat;
	int fps = (int)( UNITS / viHeader->AvgTimePerFrame );
	return fps;
}
/*
	=====================================================================
	The MSDN documentation states the following:
	(see ms-help://MS.VSCC.v90/MS.MSDNQTR.v90.en/directshow/htm/iamtimelinegroupsetmediatype.htm) 

	Currently, the following media types are supported: 
		- uncompressed RGB video 
			16 bits per pixel, 555 format (MEDIASUBTYPE_RGB555) 
			24 bits per pixel (MEDIASUBTYPE_RGB24) 
			32 bits per pixel, with alpha (MEDIASUBTYPE_ARGB32, not MEDIASUBTYPE_RGB32) 
		- 16-bit stereo PCM audio (MEDIASUBTYPE_PCM) 
		- Video types must use FORMAT_VideoInfo for the format type and VIDEOINFOHEADER for the format 
			block. The VIDEOINFOHEADER2 format is not supported. Also, top-down video formats 
			(biHeight < 0) are not supported.
		To specify a compression format for the group, call the 
			IAMTimelineGroup::SetSmartRecompressFormat method.

	NOTE: This function was originally copied from the sample DESCombine DirectShow.NET sample 
	project.  The .NET version of this function did not explicitly initialize the following 
	variables:
		VIDEOINFOHEADER::rcSource
		VIDEOINFOHEADER::rcTarget
		VIDEOINFOHEADER::dwBitRate
		VIDEOINFOHEADER::AvgTimePerFrame
	These variables must be initialized in the C++ code below otherwise DES simply generates 
	the very obscure runtime error of E_FAIL when it attempts to render the timeline.  DES does 
	not provide any other information that links this error code to the fact that certain variables 
	had not been defined. 
	=====================================================================
*/
CMediaType DESCombine::_getGroupVideoMediaType( const MediaFiles& src )
{	
	VideoInformation srcVI = _getVideoInformation( src );
	ASSERT( srcVI.isDefined() );

	CMediaType mt;
	mt.InitMediaType();

	GUID mediaSubType;
	switch( srcVI.ctBits ) {
		case 16 :
			mediaSubType = MEDIASUBTYPE_RGB555;
			break;

		case 24 :
			mediaSubType = MEDIASUBTYPE_RGB24;
			break;

		case 32 :
			mediaSubType = MEDIASUBTYPE_ARGB32;
			break;

		default:
			THROW_EXCEPTION( "invalid bitCount" );
	}

	mt.majortype = MEDIATYPE_Video;
	mt.subtype = mediaSubType;
	mt.formattype = FORMAT_VideoInfo;
	mt.bFixedSizeSamples = TRUE;

	if( mt.AllocFormatBuffer( sizeof(VIDEOINFOHEADER) ) == NULL )
		THROW_EXCEPTION( "AllocFormatBuffer" );

	VIDEOINFOHEADER* viHeader = (VIDEOINFOHEADER*)mt.pbFormat;
    viHeader->rcSource = CRect( 0,0, srcVI.width, srcVI.height );
    viHeader->rcTarget = CRect( 0,0, srcVI.width, srcVI.height );
    viHeader->dwBitRate = 0; 
	viHeader->AvgTimePerFrame = UNITS / srcVI.fps;

	BITMAPINFOHEADER& biHeader = viHeader->bmiHeader;
    ZeroMemory((PVOID)&biHeader, sizeof(biHeader));
	biHeader.biSize = sizeof( BITMAPINFOHEADER );
	biHeader.biWidth = srcVI.width;
	biHeader.biHeight = srcVI.height;
	biHeader.biPlanes = 1;
	biHeader.biBitCount = srcVI.ctBits;
	biHeader.biCompression = FALSE;
	biHeader.biSizeImage = biHeader.biWidth * biHeader.biHeight * (biHeader.biBitCount / 8);

	mt.lSampleSize = biHeader.biSizeImage;

	return mt;
}
/*
	=====================================================================
	=====================================================================
*/
int DESCombine::_getVideoCount() const
{
	HRESULT hr;

	long ctGroups;
	hr = _timeline->GetGroupCount( &ctGroups );
	CHECK_HR( hr, "GetGroupCount" );

	int ctVideo = 0;
	for( long xGroup = 0;  xGroup < ctGroups;  ++xGroup ) {
		IPinPtr pin;
		hr = _renderEngine->GetGroupOutputPin( xGroup, &pin );
		CHECK_HR( hr, "GetGroupOutputPin" );

		if( _isVideo( pin ) )
			++ctVideo;
	}

	return ctVideo;
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_getVideoDisplayAreas( VideoDisplayAreas& dst ) const
{
	ASSERT( _wndDisplay );

	CRect displayArea;
	_wndDisplay->GetClientRect( displayArea );
	displayArea.DeflateRect( 8, 16, 8, 16 );

	int ctVideo = _getVideoCount();
	dst.resize( ctVideo );

	int ctRows = 2 >= ctVideo ? 1 : 2;
	int ctColumns = ctVideo / ctRows;
	if( 1 < ctRows )
		ctColumns += (ctVideo % 2);
	int columnWidth = displayArea.Width() / ctColumns;
	int rowHeight = displayArea.Height() / ctRows;

	for( int x = 0;  x < ctVideo;  ++x ) {
		CRect& rt = dst[x];
		rt.left = displayArea.left + (x % ctColumns) * columnWidth;
		rt.right = rt.left + columnWidth - 1;
		rt.top = displayArea.top + (x / ctColumns) * rowHeight;
		rt.bottom = rt.top + rowHeight - 1;
	}
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_initializeRenderEngine()
{
	if( exRenderSelected <= _state )
		THROW_EXCEPTION( "Graph rendering has already been selected." );
	if( exFilesAdded < _state )
		THROW_EXCEPTION( "No files added to render." );

	_changeState( exRenderSelected );

	// create the render engine
	HRESULT hr = CoCreateInstance(
		CLSID_RenderEngine,
		NULL,
		CLSCTX_INPROC_SERVER,
		IID_IRenderEngine,
		(void**) &_renderEngine
	);
	CHECK_HR( hr, "render engine" );
	ASSERT( _renderEngine );

	// tell the render engine about the timeline it should use
	hr = _renderEngine->SetTimelineObject( _timeline );
	CHECK_HR( hr, "SetTimelineObject" );

	// connect up the front end
 	hr = _renderEngine->ConnectFrontEnd();
	if( S_WARN_OUTPUTRESET != hr ) 
		CHECK_HR( hr, "ConnectFrontEnd" );

	hr = _renderEngine->GetFilterGraph( &_graphBuilder ); 
	CHECK_HR( hr, "GetFilterGraph" );

	_runningObjectTable.add( _graphBuilder );
}
/*
	=====================================================================
	This function adds a new group to the timeline for each stream identified in the 
	source media files.  All source files MUST have the same number of streams
	and matching stream media types (i.e. the media type of stream #1 is the same 
	in all files, the media type of stream #2 is the same in all files, etc.).
	So the number of groups added to the timeline will equal the number of streams in 
	the source media files.

	Note: this function does NOT add the media files to the timeline groups (adding
	media files to the timeline is done by calling the function '_addMediaFilesToTimeline').
	=====================================================================
*/
void DESCombine::_initializeTimelineGroups( 
	const MediaFiles& mediaFiles,
	const int fps,
	const CMediaType& audioMT,
	const CMediaType& videoMT
){
	ASSERT( mediaFiles.size() );
	_timelineGroups.clear();

	std::deque< CMediaType > groupsMediaType;
		// local copy of media types associated with each group added to the timeline

	// create a timeline group for each stream identified in the first media file
	MediaFiles::const_iterator it = mediaFiles.begin();
	{	const MediaFile& mediaFile = *it;
		int ctStreams = mediaFile.getStreamCount();
		ASSERT( 0 < ctStreams );

		for( int xStream = 0;  xStream < ctStreams;  ++xStream ) {
			CMediaType streamMT = mediaFile.getStreamInformation( xStream ).getMediaType();
			groupsMediaType.push_back( streamMT );

			// initialize new timeline group
			_timelineGroups.push_back( Group() );
			Group& group = *_timelineGroups.rbegin();
			if( IsEqualGUID( MEDIATYPE_Audio, streamMT.majortype ) ) {
				// add new audio group to timeline
				group.initialize( audioMT, _timeline, fps );
			}
			else if( IsEqualGUID( MEDIATYPE_Video, streamMT.majortype ) ) {
				// add new video group to timeline
				group.initialize( videoMT, _timeline, fps );
			}
			else {
				THROW_EXCEPTION( "unrecognized stream media type" );
			}
		} // for( int xStream = 0; ....
	}

	// confirm all other media files have the same number of streams and stream media types
	for( ++it;  it != mediaFiles.end();  ++it ) {
		const MediaFile& mediaFile = *it;
		int ctStreams = mediaFile.getStreamCount();
		ASSERT( 0 < ctStreams );

		// confirm number of streams in current media file is the same as in the first media file
		if( (int)_timelineGroups.size() != ctStreams ) 
			THROW_EXCEPTION( "invalid number of streams" );

		// confirm stream media types in current media file are the same as in the first media file
		for( int xStream = 0;  xStream < ctStreams;  ++xStream ) {
			CMediaType streamMT = mediaFile.getStreamInformation( xStream ).getMediaType();
			if( streamMT != groupsMediaType[xStream] ) {
				THROW_EXCEPTION( "unexpected stream media type" );
			}
		} // for( int xStream = 0; ...
	} // for( ++it; ...
}
/*
	=====================================================================
	=====================================================================
*/
bool DESCombine::_isVideo( IPinPtr& pin ) const
{
	IEnumMediaTypesPtr mediaEnum;
	HRESULT hr = pin->EnumMediaTypes( &mediaEnum );
	CHECK_HR( hr, "EnumMediaTypes" );

	bool isVideo = false;
	mediaEnum->Reset();
	while( !isVideo ) {
		AM_MEDIA_TYPE* mediaType = NULL;
		hr = mediaEnum->Next( 1, &mediaType, NULL );
		if( S_FALSE == hr  ||  FAILED(hr) )
			break;
		CHECK_HR( hr, "Next" );

		isVideo = IsEqualGUID( MEDIATYPE_Video, mediaType->majortype ) ? true : false;
		::DeleteMediaType( mediaType );
	}

	return isVideo;
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_postMessage( UINT message, WPARAM wParam, LPARAM lParam )
{
	if( _wndNotify  &&  ::IsWindow( _wndNotify->m_hWnd ) )
		_wndNotify->PostMessage( message, message, lParam );
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_postString( const wchar_t* message )
{
	if( !message )
		return;

	if( _wndNotify  &&  ::IsWindow( _wndNotify->m_hWnd ) ) {
		std::wstring* str = new std::wstring( message );
		_wndNotify->PostMessage( UWM_ShowMessage, (WPARAM)str, 0 );
	}
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_setErrorLog()
{
	HRESULT hr = 0;

	ErrorLog* errorLog = new ErrorLog( _T("DESCombine"), NULL, &hr );
	CHECK_HR( hr, "ErrorLog" );
	ASSERT( errorLog );
	_errorLog = (IAMErrorLog*)errorLog;

	ASSERT( _timeline );
	IAMSetErrorLogPtr setLog;
	hr = _timeline->QueryInterface( IID_IAMSetErrorLog, (void **)&setLog );
	CHECK_HR( hr, "IID_IAMSetErrorLog" );
	hr = setLog->put_ErrorLog( _errorLog );
	CHECK_HR( hr, "put_ErrorLog" );
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_startRendering()
{
	HRESULT hr = _graphBuilder->QueryInterface( IID_IMediaControl, (void**)&_mediaControl );
	CHECK_HR( hr, "IID_IMediaControl" );
	hr = _mediaControl->Run();
	CHECK_HR( hr, "Run" );
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_stopRendering()
{
	if( !_mediaControl )
		return;

	AUTOTRACE;
	ASSERT( _mediaControl );
	HRESULT hr = _mediaControl->Stop();
	CHECK_HR( hr, "Stop" );
	_mediaControl = NULL;
}
/*
	=====================================================================
	=====================================================================
*/
void DESCombine::_writeFilterGraph( const std::wstring& filename )
{
	if( !_graphBuilder )
		THROW_EXCEPTION( "undefined filter graph" );

	IXml2DexPtr xml2dex;
	HRESULT hr = xml2dex.CreateInstance( CLSID_Xml2Dex );
	CHECK_HR( hr, "CLSID_Xml2Dex" );

	ASSERT( _graphBuilder );
	bstr_t str( filename.c_str() );
	hr = xml2dex->WriteGrfFile( _graphBuilder, str );
	CHECK_HR( hr, "WriteGrfFile" );
}

/*
	=====================================================================
	=====================================================================
	static functions
	=====================================================================
	=====================================================================
*/
/*
	=====================================================================
	=====================================================================
*/
static void s_logGeneralMessage( const std::wstring& src )
{
	CSingleLock sl( &s_csLog, TRUE );
	s_generalLog.push_back( src );
	::OutputDebugString( L"\n" );
	::OutputDebugString( src.c_str() );

}
/*
	=====================================================================
	=====================================================================
*/
static void s_writeLogFile( const std::wstring& logFilename, const Log& log )
{
	if( log.empty() )
		return;

	ASSERT( logFilename.size() );
	std::wofstream file( logFilename.c_str(), std::ios_base::out | std::ios_base::trunc );
	Log::const_iterator it;
	for( it = log.begin();  it != log.end();  ++it ) {
		file << *it << std::endl;
		ASSERT( file.bad() == false );
	}
}

} // namespace DESCombineLib {

By viewing downloads associated with this article you agree to the Terms of Service and the article's licence.

If a file you wish to view isn't highlighted, and is a text file (not binary), please let us know and we'll add colourisation support for it.

License

This article, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)


Written By
Retired
Canada Canada
This member has not yet provided a Biography. Assume it's interesting and varied, and probably something to do with programming.

Comments and Discussions