I have written directshow filter to unite two images from two input pins and put resulting image to output pin. The only media, i accept, is (FORMAT_VideoInfo, MEDIATYPE_Video, RGB32). In the output is the same media with width = 2*input_pin_1_width. Here is the code
ALLOCATOR_PROPERTIES Request, Actual;
HRESULT hr;
if ( m_pVideoTransformerFilter->m_SelInputpin ){
if (m_pVideoTransformerFilter->m_SelInputpin->IsConnected()) {
hr = m_pVideoTransformerFilter->m_SelInputpin->GetAllocator()->GetProperties(&Request);
if (FAILED(hr)) {
return hr;
}
} else {
ZeroMemory(&Request, sizeof(Request));
Request.cBuffers = 1;
Request.cbBuffer = 1;
}
DbgLog((LOG_MEMORY,1,TEXT("Setting Allocator Requirements")));
DbgLog((LOG_MEMORY,1,TEXT("Count %d, Size %d"),
Request.cBuffers, Request.cbBuffer));
ppropInputRequest->cBuffers = Request.cBuffers;
ppropInputRequest->cbBuffer = Request.cbBuffer*2;
ppropInputRequest->cbAlign = Request.cbAlign;
if (ppropInputRequest->cBuffers<=0) {ppropInputRequest->cBuffers = 1; }
if (ppropInputRequest->cbBuffer<=0) {ppropInputRequest->cbBuffer = 1; }
hr = pAlloc->SetProperties(ppropInputRequest, &Actual);
if (FAILED(hr)) {
return hr;
}
DbgLog((LOG_MEMORY,1,TEXT("Obtained Allocator Requirements")));
DbgLog((LOG_MEMORY,1,TEXT("Count %d, Size %d, Alignment %d"),
Actual.cBuffers, Actual.cbBuffer, Actual.cbAlign));
if ( (Request.cBuffers > Actual.cBuffers)
|| (Request.cbBuffer*2 > Actual.cbBuffer)
|| (Request.cbAlign > Actual.cbAlign)
) {
return E_FAIL;
}
return NOERROR;
}
return E_FAIL;
}
Here i request buffer_size = 2*input_pin_1_bufffer_size.
unsigned __stdcall deliver_thread(void *data_parameter)
{
CVideoTransformerFilter *local_filter = (CVideoTransformerFilter*)data_parameter;
if(local_filter!=NULL)
{
while(true)
{
if(WaitForSingleObject(local_filter->receive_event[0],INFINITE) != WAIT_OBJECT_0)
{
continue;
}
if(WaitForSingleObject(local_filter->receive_event[1],INFINITE) != WAIT_OBJECT_0)
{
continue;
}
ResetEvent(local_filter->receive_event[0]);
ResetEvent(local_filter->receive_event[1]);
if
(
local_filter->samples_list_pin_1.size()!=0
&&
local_filter->samples_list_pin_2.size()!=0
)
{
IMediaSample *local_sample_1 = *local_filter->samples_list_pin_1.begin();
IMediaSample *local_sample_2 = *local_filter->samples_list_pin_2.begin();
local_filter->samples_list_pin_1.erase(local_filter->samples_list_pin_1.begin());
local_filter->samples_list_pin_2.erase(local_filter->samples_list_pin_2.begin());
if (local_filter->m_outputpin!=NULL)
{
CComPtr<IMediaSample> local_output_sample;
REFERENCE_TIME local_start_time,local_end_time;
if(local_sample_1->GetTime(&local_start_time,&local_end_time)==S_OK)
{
if(local_filter->m_outputpin->GetAllocator()->GetBuffer(&local_output_sample,&local_start_time,&local_end_time,AM_GBF_NOTASYNCPOINT)==S_OK)
{
local_output_sample->SetTime(&local_start_time,&local_end_time);
AM_MEDIA_TYPE local_media_type_1;
AM_MEDIA_TYPE local_media_type_2;
AM_MEDIA_TYPE local_media_type_output;
local_filter->m_Arrinputpin[0]->ConnectionMediaType(&local_media_type_1);
local_filter->m_Arrinputpin[1]->ConnectionMediaType(&local_media_type_2);
local_filter->m_outputpin->ConnectionMediaType(&local_media_type_output);
if
(
local_media_type_1.formattype==FORMAT_VideoInfo
&&
local_media_type_2.formattype==FORMAT_VideoInfo
&&
local_media_type_output.formattype==FORMAT_VideoInfo
)
{
VIDEOINFOHEADER *pvi_1 = (VIDEOINFOHEADER *) local_media_type_1.pbFormat;
VIDEOINFOHEADER *pvi_2 = (VIDEOINFOHEADER *) local_media_type_2.pbFormat;
VIDEOINFOHEADER *pvi_output = (VIDEOINFOHEADER *) local_media_type_output.pbFormat;
BYTE *pData_1;
BYTE *pData_2;
BYTE *pData_output;
long lDataLen_1;
long lDataLen_2;
long lDataLen_output;
RGBQUAD *prgb_1;
RGBQUAD *prgb_2;
RGBQUAD *prgb_output;
HRESULT local_result = S_OK;
local_sample_1->GetPointer(&pData_1);
local_sample_2->GetPointer(&pData_2);
local_result=local_output_sample->GetPointer(&pData_output);
lDataLen_1 = local_sample_1->GetSize();
lDataLen_2 = local_sample_1->GetSize();
lDataLen_output = local_output_sample->GetSize();
if(lDataLen_output==lDataLen_1+lDataLen_2)
{
int iPixelSize_1 = pvi_1->bmiHeader.biBitCount / 8;
int cxImage_1 = pvi_1->bmiHeader.biWidth;
int cyImage_1 = pvi_1->bmiHeader.biHeight;
int cbImage_1 = cyImage_1 * cxImage_1 * iPixelSize_1;
int numPixels_1 = cxImage_1 * cyImage_1;
int iPixelSize_2 = pvi_2->bmiHeader.biBitCount / 8;
int cxImage_2 = pvi_2->bmiHeader.biWidth;
int cyImage_2 = pvi_2->bmiHeader.biHeight;
int cbImage_2 = cyImage_2 * cxImage_2 * iPixelSize_2;
int numPixels_2 = cxImage_2 * cyImage_2;
int iPixelSize_output = pvi_output->bmiHeader.biBitCount / 8;
int cxImage_output = pvi_output->bmiHeader.biWidth;
int cyImage_output = pvi_output->bmiHeader.biHeight;
int cbImage_output = cyImage_output * cxImage_output * iPixelSize_output;
int numPixels_output = cxImage_output * cyImage_output;
prgb_1 = (RGBQUAD*) pData_1;
prgb_2 = (RGBQUAD*) pData_2;
prgb_output = (RGBQUAD*) pData_output;
ValidateWritePtr(pData_output,lDataLen_output);
if
(
cxImage_1 == cxImage_2
&&
cyImage_1 == cyImage_2
&&
cxImage_output == cxImage_1 + cxImage_2
&&
cyImage_output == cyImage_1
)
{
int cxImage = cxImage_1;
int cyImage = cyImage_1;
for (int local_counter = 0;local_counter<cyImage;local_counter++)
{
memcpy(&prgb_output[(cyImage-local_counter)*cxImage*2],&prgb_1[local_counter*cxImage],cxImage*sizeof(RGBQUAD));
memcpy(&prgb_output[cxImage+(cyImage-local_counter)*cxImage*2],&prgb_2[local_counter*cxImage],cxImage*sizeof(RGBQUAD));
}
}
}
}
local_filter->m_outputpin->Deliver(local_output_sample);
}
}
}
local_sample_1->Release();
local_sample_2->Release();
}
}
}
local_filter->thread_handle = 0;
return 0;
}
Here is deliver media thread.
With some filters i get error when copying data to output buffer (memcpy)! Even it has right size!
With some filters it works fine.
How should i set right buffer size?
What should i do, to make filter work fine with all filters, it connects?
What should i do else in function DecideBufferSize?
Why it works with some filters and does not work with others?