MuxRenderer 的实现

    技术2022-05-11  67

    #ifndef __AMCMUXER_H__#define __AMCMUXER_H__

    #include <streams.h>#include <amextra.h>#include "amc_strmif.h"

    class CAMCMuxInputPin;class CAMCMuxRenderer;

    // AMCMuxer classclass CAMCMuxRenderer : public CBaseFilter, public IFileSinkFilter2, public IAMFilterMiscFlags, public IAMCRecord{ friend class CAMCMuxInputPin;

    public:    // Constructor    CAMCMuxRenderer( TCHAR* pszName, LPUNKNOWN pUnk, HRESULT *phr, REFCLSID   clsid); virtual ~CAMCMuxRenderer();

     DECLARE_IUNKNOWN

     // Overriden to say what interfaces we support where    STDMETHODIMP NonDelegatingQueryInterface(REFIID riid, void ** ppv);

    #ifdef DEBUG    ULONG STDMETHODCALLTYPE NonDelegatingRelease();#endif

        // Pin enumeration    virtual CBasePin * GetPin(int n);    virtual int GetPinCount();

     virtual HRESULT CheckInputType (const CMediaType * mtIn);

        // Open and close the file as necessary    virtual STDMETHODIMP Run(REFERENCE_TIME tStart);    virtual STDMETHODIMP Pause();    virtual STDMETHODIMP Stop();

     // receive sample virtual STDMETHODIMP Receive(IMediaSample *pSample, const GUID* pMediaType);

     // Implements the IFileSinkFilter interface    virtual STDMETHODIMP SetFileName(LPCOLESTR pszFileName,const AM_MEDIA_TYPE *pmt);    virtual STDMETHODIMP GetCurFile(LPOLESTR * ppszFileName,AM_MEDIA_TYPE *pmt);

     virtual HRESULT STDMETHODCALLTYPE SetMode(DWORD dwFlags); virtual HRESULT STDMETHODCALLTYPE GetMode( DWORD* pdwFlags);

     // IAMFilterMiscFlags virtual ULONG STDMETHODCALLTYPE GetMiscFlags(void);

     //IAMCRecord interface STDMETHODIMP SetCallback( PFNRECORDCALLBACK pfnRecordCallback, LONG lUserData);

     HRESULT Callback();

    protected: WCHAR*   m_pFileName; CCritSec  m_csFilter; CCritSec  m_csReceive;

     CAMCMuxInputPin* m_pAudioPin; CAMCMuxInputPin* m_pVideoPin;

     WAVEFORMATEX m_vf;

     LONG   m_lUserData; DWORD   m_dwRecordStatus; DWORD   m_dwRecordSize; DWORD   m_dwRecordTime; DWORD   m_dwDumpSize;

     DWORD   m_dwAudioTime; DWORD   m_dwVideoTime;

     PFNRECORDCALLBACK m_pFnCallback;

     DWORD   m_dwMode;};

    //  Pin objectclass CAMCMuxInputPin : public CRenderedInputPin, public CBaseStreamControl{public:

        CAMCMuxInputPin( TCHAR* pObjectName,                  CBaseFilter *pFilter,                  CCritSec *pLock,                  HRESULT *phr); ~CAMCMuxInputPin();

     DECLARE_IUNKNOWN

     // Overriden to say what interfaces we support where    STDMETHODIMP NonDelegatingQueryInterface(REFIID riid, void ** ppv);

        // Do something with this media sample    virtual STDMETHODIMP Receive(IMediaSample *pSample);    virtual STDMETHODIMP EndOfStream(void);    virtual STDMETHODIMP ReceiveCanBlock();

     //check if it is endofstream virtual BOOL IsEndOfStream();

        // Check if the pin can support this specific proposed type and format    virtual HRESULT CheckMediaType(const CMediaType* mtIn);

     virtual HRESULT GetMediaType(int iPosition, CMediaType *pMediaType); virtual HRESULT AddMediaType(const CMediaType *pMediaType);

        // Track NewSegment    STDMETHODIMP NewSegment(REFERENCE_TIME tStart,                            REFERENCE_TIME tStop,                            double dRate);

     // Default handling for BeginFlush - call at the beginning    // of your implementation (makes sure that all Receive calls    // fail). After calling this, you need to free any queued data    // and then call downstream.    STDMETHODIMP BeginFlush(void);        // default handling for EndFlush - call at end of your implementation    // - before calling this, ensure that there is no queued data and no thread    // pushing any more without a further receive, then call downstream,    // then call this method to clear the m_bFlushing flag and re-enable    // receives    STDMETHODIMP EndFlush(void);

    protected: // List of media types we support    CGenericList<CMediaType> m_lTypes;

     CAMCMuxRenderer* m_pMuxRenderer;};

    #endif //__AMCMUXER_H__

     

    //------------------------------------------------------------------------------// File: Mp4MuxerRender.h//// Desc: DirectShow sample code - definitions for dump renderer.//// Copyright (c) Microsoft Corporation.  All rights reserved.//------------------------------------------------------------------------------#ifndef __MP4_MUXERRENDER_H__#define __MP4_MUXERRENDER_H__

    #include <streams.h>#include <amextra.h>

    class CMP4Muxer ;//the class to write a .mp4/.3gp/.3g2 file

    class CMP4Renderer : public CAMCMuxRenderer{public:

     static CUnknown * WINAPI CreateInstance(LPUNKNOWN punk, HRESULT *phr);

        // Constructor    CMP4Renderer( LPUNKNOWN pUnk, HRESULT *phr); ~CMP4Renderer();

        // Pin enumeration    CBasePin * GetPin(int n);    int GetPinCount();

     HRESULT CheckInputType (const CMediaType * mtIn);

        // Open and close the file as necessary    STDMETHODIMP Stop();

     STDMETHODIMP JoinFilterGraph(IFilterGraph * pGraph, LPCWSTR pName);

     // receive sample STDMETHODIMP Receive(IMediaSample *pSample, const GUID* pMediaType);

    private: HRESULT CheckVideoInput(const CMediaType* mtIn); HRESULT CheckAudioInput(const CMediaType* mtIn); HRESULT GetRegistryInfo();

    private: CMP4Muxer*  m_pMuxer;

     MEDIAAUDIOINFO* m_pAudioInfo; MEDIAVIDEOINFO* m_pVideoInfo;

     REFERENCE_TIME m_tLast; DWORD   m_dwWidth; DWORD   m_dwHeight;

     DWORD   m_dwRotate;

     // Following data member is used to solve a problem in DMO. // In DMO (Picture&Video app), when user pause the video capture process,  // actually the app does not call the pause function, just do not pass the raw data. // But the stream time is till go on. So when the capture process is resumed, // the time stamp will skip a long time. It is okey for audio steeam for the muxer just  // dump the audio data according to the date durationg. But for video stream, that cause  // a big problem. DWORD   m_dwSampleA; //audio samples that have been dumped DWORD   m_dwStartA;  //audio stream start time. Note: the time stamp of first audio sample may not start from zero. LONG   m_lDeltaA;  //dela time DWORD   m_dwSampleV; //video samples that have been dumped};

    class CMP4Muxer {public:    CMP4Muxer();    ~CMP4Muxer();

        HRESULT DumpVideoFrame(PBYTE pbData, LONG lDataLength, BOOL bSyncFrame, DWORD dwTimeStamp); HRESULT DumpAudioFrame(PBYTE pbData, LONG lDataLength, DWORD dwDuration, DWORD dwTimeSpan);

        // Open and write to the file    HRESULT OpenFile(TCHAR* szFileName);    HRESULT CloseFile();

     HRESULT SetAudioInfo(MEDIAAUDIOINFO* pAudioInfo); HRESULT SetVideoInfo(MEDIAVIDEOINFO* pVideoInfo);

     BOOL HasEnoughSpace(); DWORD GetDumpSize();

    public:    MMP4HANDLE  m_hMux;

     MEDIAAUDIOINFO* m_pAudioInfo; MEDIAVIDEOINFO* m_pVideoInfo;  BOOL   m_bHasAudio; BOOL   m_bHasVideo;

     LONG   m_lVideoTime; LONG   m_lAudioTime;

     BYTE*   m_pAudioBuf; DWORD   m_dwAudOffset; DWORD   m_dwAudDuration;

     LONG   m_lAudDelTime; DWORD   m_dwStartTime; BOOL   m_bVideoRec;

     TCHAR*   m_pFileName;};#endif//__MP4_MUXERRENDER_H__


    最新回复(0)