Click here to Skip to main content
15,887,175 members
Please Sign up or sign in to vote.
0.00/5 (No votes)
I'm trying to make a Transform filter dll witch gets parallel video frames and displays it in fullscreen mode as NVidia 3D stereo mode. I use graphedit to show the results but, when i conect It to a video render filter, It shows me the "Hard coded break point" error:

"CTransformFilter::Transform() should never be called"
At line 67 of x:\program files\microsoft sdks\windows\v7.1\samples\multimedia\directshow\baseclases\transfrm.cpp

Continue? (Cancel to debug)


I'm not sure what to do now.

i index the files of my project:

Filter.h
C++
/** 
 *  Filter DirectShow Interface 
 *  andresin87@gmail.com 
 *  All rights reserved. 
 */  
  
#ifndef TFILTER_H  
#define TFILTER_H  
#pragma once  
  
#include <windows.h>  
#include <initguid.h>  
#include <streams.h>

#include <d3d9.h>
#include <d3dx9.h>
  
#define TFILTER_NAME L"tFilter (auto-loading version)"  
  
  
// {C50020E3-71AC-48F1-AC40-9ADA98BDBE9D}
DEFINE_GUID(CLSID_TFilter, 
0xc50020e3, 0x71ac, 0x48f1, 0xac, 0x40, 0x9a, 0xda, 0x98, 0xbd, 0xbe, 0x9d);
  
class TFilter : public CTransformFilter {  
public:  
    DECLARE_IUNKNOWN;
	static CUnknown * WINAPI CreateInstance(LPUNKNOWN pUnk, HRESULT *phr); 

	// -- CTransformFilter overrides --
    HRESULT CheckInputType(const CMediaType *mtIn);
	HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
    HRESULT CheckTransform(const CMediaType *mtIn, const CMediaType *mtOut);
    HRESULT DecideBufferSize(IMemAllocator *pAllocator, ALLOCATOR_PROPERTIES *pProp);
    HRESULT Transform(IMediaSample *pSample, AM_MEDIA_TYPE* media_type, LPDIRECT3DDEVICE9 direct_3D_device);

	TFilter(LPUNKNOWN pUnk, HRESULT *phr);
};  
  
#endif TFILTER_H</d3dx9.h></d3d9.h></streams.h></initguid.h></windows.h>


Filter.cpp
/** 
 *  Filter DirectShow Interface 
 *  andresin87@gmail.com 
 *  All rights reserved. 
 */

#include "Filter.h"

int buffers_size;
RGBTRIPLE *member_cash_buffer;
RGBTRIPLE *member_buffer;
RGBTRIPLE *local_member_buffer_1;
RGBTRIPLE *local_member_buffer_2;
RGBTRIPLE *local_member_entered_buffer;
int member_valid_cash;
  
  
TFilter::TFilter(LPUNKNOWN pUnk, HRESULT *phr)   
	: CTransformFilter(TEXT("TFilter"), pUnk, CLSID_TFilter){
}
  
CUnknown *TFilter::CreateInstance(LPUNKNOWN pUnk, HRESULT *phr) {
	TFilter *pNewObject = new TFilter(pUnk, phr);
    if (NULL == pNewObject)
	{
		if (phr)
			*phr = E_OUTOFMEMORY;
	}
    return pNewObject;  
}  
  
HRESULT TFilter::CheckInputType(const CMediaType *mtIn) {  
    if ((mtIn->majortype != MEDIATYPE_Video) ||
        (mtIn->subtype != MEDIASUBTYPE_RGB8) ||
        (mtIn->formattype != FORMAT_VideoInfo) || 
        (mtIn->cbFormat < sizeof(VIDEOINFOHEADER)))
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    VIDEOINFOHEADER *pVih = 
        reinterpret_cast<videoinfoheader*>(mtIn->pbFormat);
    if ((pVih->bmiHeader.biBitCount != 8) ||
        (pVih->bmiHeader.biCompression != BI_RGB))
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    // Check the palette table.
    if (pVih->bmiHeader.biClrUsed > PALETTE_ENTRIES(pVih))
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }
    DWORD cbPalette = pVih->bmiHeader.biClrUsed * sizeof(RGBQUAD);
    if (mtIn->cbFormat < sizeof(VIDEOINFOHEADER) + cbPalette)
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    // Everything is good.
    return S_OK; 
}

HRESULT TFilter::GetMediaType(int iPosition, CMediaType *pMediaType)
{
	ASSERT(m_pInput->IsConnected());
    if (iPosition < 0)
    {
        return E_INVALIDARG;
    }
    if (iPosition == 0)
    {
        HRESULT hr = m_pInput->ConnectionMediaType(pMediaType);
        if (FAILED(hr))
        {
            return hr;
        }
        FOURCCMap fccMap = FOURCCMap('MRLE'); 
        pMediaType->subtype = static_cast<guid>(fccMap);
        pMediaType->SetVariableSize();
        pMediaType->SetTemporalCompression(FALSE);

        ASSERT(pMediaType->formattype == FORMAT_VideoInfo);
        VIDEOINFOHEADER *pVih =
            reinterpret_cast<videoinfoheader*>(pMediaType->pbFormat);
        pVih->bmiHeader.biCompression = BI_RLE8;
        pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader); 
        return S_OK;
    }
    // else
    return VFW_S_NO_MORE_ITEMS;
}

HRESULT TFilter::CheckTransform(const CMediaType *mtIn, const CMediaType *mtOut)
{
    // Check the major type.
    if (mtOut->majortype != MEDIATYPE_Video)
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    // Check the subtype and format type.
    FOURCCMap fccMap = FOURCCMap('MRLE'); 
    if (mtOut->subtype != static_cast<guid>(fccMap))
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }
    if ((mtOut->formattype != FORMAT_VideoInfo) || 
        (mtOut->cbFormat < sizeof(VIDEOINFOHEADER)))
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    // Compare the bitmap information against the input type.
    ASSERT(mtIn->formattype == FORMAT_VideoInfo);
    BITMAPINFOHEADER *pBmiOut = HEADER(mtOut->pbFormat);
    BITMAPINFOHEADER *pBmiIn = HEADER(mtIn->pbFormat);
    if ((pBmiOut->biPlanes != 1) ||
        (pBmiOut->biBitCount != 8) ||
        (pBmiOut->biCompression != BI_RLE8) ||
        (pBmiOut->biWidth != pBmiIn->biWidth) ||
        (pBmiOut->biHeight != pBmiIn->biHeight))
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    // Compare source and target rectangles.
    RECT rcImg;
    SetRect(&rcImg, 0, 0, pBmiIn->biWidth, pBmiIn->biHeight);
    RECT *prcSrc = &((VIDEOINFOHEADER*)(mtIn->pbFormat))->rcSource;
    RECT *prcTarget = &((VIDEOINFOHEADER*)(mtOut->pbFormat))->rcTarget;
    if (!IsRectEmpty(prcSrc) && !EqualRect(prcSrc, &rcImg))
    {
        return VFW_E_INVALIDMEDIATYPE;
    }
    if (!IsRectEmpty(prcTarget) && !EqualRect(prcTarget, &rcImg))
    {
        return VFW_E_INVALIDMEDIATYPE;
    }

    // Check the palette table.
    if (pBmiOut->biClrUsed != pBmiIn->biClrUsed)
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }
    DWORD cbPalette = pBmiOut->biClrUsed * sizeof(RGBQUAD);
    if (mtOut->cbFormat < sizeof(VIDEOINFOHEADER) + cbPalette)
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }
    if (0 != memcmp(pBmiOut + 1, pBmiIn + 1, cbPalette))
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    // Everything is good.
    return S_OK;
}

HRESULT TFilter::DecideBufferSize(IMemAllocator *pAllocator, ALLOCATOR_PROPERTIES *pProp)
{
	if (!m_pInput->IsConnected() || !m_pOutput->IsConnected())
        return E_UNEXPECTED;

    ASSERT(m_pOutput->CurrentMediaType().formattype == FORMAT_VideoInfo);

    VIDEOINFOHEADER *pVihOut = 
        reinterpret_cast<videoinfoheader*>(m_pOutput->CurrentMediaType().Format());
    CheckPointer(pVihOut, E_UNEXPECTED);

    pProp->cBuffers = 1;
    pProp->cbBuffer = GetBitmapSize(&(pVihOut->bmiHeader));
    pProp->cbAlign = 1;
    pProp->cbPrefix = 0;

	ALLOCATOR_PROPERTIES actProp;
    HRESULT hr = pAllocator->SetProperties(pProp, &actProp);
    if (FAILED(hr))
        return hr;
    if (pProp->cBuffers > actProp.cBuffers || pProp->cbBuffer > actProp.cbBuffer)
        return E_FAIL;
	return S_OK;
}

HRESULT TFilter::Transform(IMediaSample *pSample, AM_MEDIA_TYPE* media_type, LPDIRECT3DDEVICE9 direct_3D_device)
{  
    //CAutoLock lock(m_pLock);  
    //CheckPointer(pSample, E_POINTER);  
    // do some transformation  
    //return S_OK;
	// Transform code here
if (
		pSample==NULL || media_type==NULL || direct_3D_device == NULL
		)
	{
		return E_POINTER;
	}
	AM_MEDIA_TYPE* pType = media_type;
    VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pType->pbFormat;
    BYTE *pData;                // Pointer to the actual image buffer
    long lDataLen;              // Holds length of any given sample
    RGBTRIPLE *prgb;            // Holds a pointer to the current pixel
	
    pSample->GetPointer(&pData);
    lDataLen = pSample->GetSize();
    // Get the image properties from the BITMAPINFOHEADER
	
    int iPixelSize = pvi->bmiHeader.biBitCount / 8;
    int cxImage    = pvi->bmiHeader.biWidth;
    int cyImage    = pvi->bmiHeader.biHeight;
    int cbImage    = cyImage * cxImage * iPixelSize;
    int numPixels  = cxImage * cyImage;
    iPixelSize = pvi->bmiHeader.biBitCount / 8;
    cxImage    = pvi->bmiHeader.biWidth;
    cyImage    = pvi->bmiHeader.biHeight;
    cbImage    = cyImage * cxImage * iPixelSize;
    numPixels  = cxImage * cyImage;
	
	prgb = (RGBTRIPLE*) pData;
	int pixels_shift = 2*cxImage/100;
	REFERENCE_TIME rtStart, rtEnd;
	pSample->GetTime(&rtStart, &rtEnd);
	{
		if(buffers_size!=cxImage*cyImage)
		{
			buffers_size = cxImage*cyImage;
			delete []member_cash_buffer;
			delete []member_buffer;
			delete []local_member_buffer_1;
			delete []local_member_buffer_2;
			delete []local_member_entered_buffer;
			member_cash_buffer = new RGBTRIPLE[buffers_size];
			member_buffer = new RGBTRIPLE[buffers_size];
			local_member_buffer_1 = new RGBTRIPLE[buffers_size];
			local_member_buffer_2 = new RGBTRIPLE[buffers_size];
			local_member_entered_buffer = new RGBTRIPLE[buffers_size];
			member_valid_cash = 0;
		}
	}
 
#define RGB_BYTE_ORDER(r, g ,b)  ((DWORD) (((BYTE) (b) | ((WORD) (g) << 8)) | (((DWORD) (BYTE) (r)) << 16)))
	HRESULT local_handle_result = S_OK;
	{
		IDirect3DSurface9* gImageSrc = NULL;
		
		{
			local_handle_result = direct_3D_device->CreateOffscreenPlainSurface(
				cxImage,
				cyImage+1,
				D3DFMT_A8R8G8B8, D3DPOOL_DEFAULT, // Surface is in video memory
				&gImageSrc, NULL);
			if(local_handle_result!=S_OK)
			{
				return local_handle_result;
			}
			if(gImageSrc==NULL)
			{
				return local_handle_result;
			}
			{
				DWORD *local_bit_map_buffer;
				local_bit_map_buffer = new DWORD[cxImage*cyImage];				
				
				for(int local_counter_width=0;local_counter_width<cximage;local_counter_width++)>
				{
					for(int local_counter_height=0;local_counter_height<cyimage;local_counter_height++)>
					{
						int local_couter = local_counter_width+local_counter_height*cxImage;
						int local_bit_map_couter = local_counter_width+(cyImage-(local_counter_height+1))*cxImage;
						local_bit_map_buffer[local_bit_map_couter] = RGB_BYTE_ORDER(prgb[local_couter].rgbtRed,prgb[local_couter].rgbtGreen,prgb[local_couter].rgbtBlue);
					}
				}
				
				HBITMAP handle_bit_map = NULL;
				
				handle_bit_map = CreateBitmap(
					cxImage, 
					cyImage, 
					1,
					32, 
					local_bit_map_buffer);
				
				delete []local_bit_map_buffer;
				
				HRESULT local_handle_result;
				
				
				
				HDC hdc; 
				gImageSrc->GetDC(&hdc);
				
				HDC hdc_compatible = CreateCompatibleDC(hdc);
				
				SelectObject(hdc_compatible,handle_bit_map);
				BitBlt(hdc, 0  ,0 ,cxImage  , cyImage  , hdc_compatible, 0, 0, SRCCOPY);
				gImageSrc->ReleaseDC(hdc);
				DeleteDC(hdc_compatible);
				
				bool local_result = DeleteObject(handle_bit_map);
			}
				
			int gImageWidth= cxImage; // Source image width
			int gImageHeight= cyImage;// Source image height
			
			
			RECT srcRect= { 0, 0, gImageWidth, gImageHeight+1};
			RECT dstRect= { 0, 0, gImageWidth, gImageHeight};
			
			// Stereo Blitdefines
#define NVSTEREO_IMAGE_SIGNATURE 0x4433564e //NV3D
			typedef struct _Nv_Stereo_Image_Header
			{
				unsigned int dwSignature;
				unsigned int dwWidth;
				unsigned int dwHeight;
				unsigned int dwBPP;
				unsigned int dwFlags;
			} NVSTEREOIMAGEHEADER, *LPNVSTEREOIMAGEHEADER;
			// ORedflags in the dwFlagsfielsof the _Nv_Stereo_Image_Headerstructure above
#define SIH_SWAP_EYES 0x00000001
#define SIH_SCALE_TO_FIT 0x00000002
			// Lock the stereo image
			D3DLOCKED_RECT lr;
			gImageSrc->LockRect(&lr,NULL,0);
			// write stereo signature in the last raw of the stereo image
			LPNVSTEREOIMAGEHEADER pSIH=
				(LPNVSTEREOIMAGEHEADER)(((unsigned char *) lr.pBits) + (lr.Pitch* gImageHeight));
			// Update the signature header values
			pSIH->dwSignature= NVSTEREO_IMAGE_SIGNATURE;
			pSIH->dwBPP= 32;
			pSIH->dwFlags= SIH_SWAP_EYES; // Src image has left on left and right on right
			pSIH->dwWidth= gImageWidth;
			pSIH->dwHeight= gImageHeight;
			// Unlock surface
			gImageSrc->UnlockRect();
	
			
			D3DVIEWPORT9 local_view_port;
			direct_3D_device->GetViewport(&local_view_port);
			RECT local_view_port_rect = {0,0,local_view_port.Width,local_view_port.Height};
			
			{
				direct_3D_device->Clear (0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB (0, 0, 0), 0.0f, 0);
				direct_3D_device->BeginScene ();
				
				IDirect3DSurface9* pDestSurface = NULL;
				direct_3D_device->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &pDestSurface);
				
				if(pDestSurface)
				{
					direct_3D_device->StretchRect(gImageSrc, &srcRect, pDestSurface, &local_view_port_rect, D3DTEXF_LINEAR);
				}
				direct_3D_device->EndScene ();
//				direct_3D_device->Present (NULL, NULL, NULL, NULL);
				if(pDestSurface)
				{
					pDestSurface->Release();
				}
				
				if(gImageSrc)
				{
					gImageSrc->Release();
				}
			}
		}
	}
	return S_OK;
}  
  
// COM things  
const AMOVIESETUP_MEDIATYPE sudPinTypes[] = {  
    {  
        &MEDIATYPE_Video,       // Major type  
        &MEDIASUBTYPE_NULL      // Minor type  
    }  
};  
  
const AMOVIESETUP_PIN sudpPins [] = {
        L"Input",             // Pins string name  
        FALSE,                // Is it rendered  
        FALSE,                // Is it an output  
        FALSE,                // Are we allowed none  
        FALSE,                // And allowed many  
        &CLSID_NULL,          // Connects to filter  
        NULL,                 // Connects to pin  
        1,                    // Number of types  
        sudPinTypes          // Pin information  
    ,  
    {  
        L"Output",            // Pins string name  
        FALSE,                // Is it rendered  
        TRUE,                 // Is it an output  
        FALSE,                // Are we allowed none  
        FALSE,                // And allowed many  
        &CLSID_NULL,          // Connects to filter  
        NULL,                 // Connects to pin  
        1,                    // Number of types  
        sudPinTypes          // Pin information  
    }  
};  
  
const AMOVIESETUP_FILTER sudTcasFilter = {  
    &CLSID_TFilter,      // Filter CLSID  
    TFILTER_NAME,        // String name  
    MERIT_DO_NOT_USE,       // Filter merit  
    2,                      // Number of pins  
    sudpPins                // Pin information  
};  
  
CFactoryTemplate g_Templates[] = {  
    { TFILTER_NAME,   
      &CLSID_TFilter,   
      TFilter::CreateInstance,   
      NULL,   
      &sudTcasFilter   
    }  
};  
  
int g_cTemplates = sizeof(g_Templates) / sizeof(g_Templates[0]);  
  
STDAPI DllRegisterServer() {  
    return AMovieDllRegisterServer2(TRUE);  
}  
  
STDAPI DllUnregisterServer() {  
    return AMovieDllRegisterServer2(FALSE);  
}  
  
extern "C" BOOL WINAPI DllEntryPoint(HINSTANCE, ULONG, LPVOID);  

BOOL APIENTRY DllMain(HANDLE hModule, DWORD  dwReason, LPVOID lpReserved) {
    return DllEntryPoint((HINSTANCE)hModule, dwReason, lpReserved);  
}</guid></guid>


please reply me..

Thanks in advance..
Posted
Updated 19-Oct-11 1:14am
v2

This content, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)



CodeProject, 20 Bay Street, 11th Floor Toronto, Ontario, Canada M5J 2N8 +1 (416) 849-8900