Thanks for your reply. Here is the code I wrote.For MPEG1, it plays very
shortly.If I save the frames, all those are saved correctly.Didn't I set
correct frame time? Pathos> From: [EMAIL PROTECTED]> To:
[email protected]> Date: Mon, 7 Jul 2008 18:08:29 -0400> Subject: Re:
[libav-user] MPEG 1/2 directshow filter> > Perhaps an error in the code? The
one I wrote works with MPEG1, MPEG2 and > several other formats...> > > -----
Original Message ----- > From: 'John Smith' <[EMAIL PROTECTED]>> To:
'libav-user' <[email protected]>> Sent: Monday, July 07, 2008 1:52 PM>
Subject: [libav-user] MPEG 1/2 directshow filter> > > >> > I wrote a simple
directshow fileter for mpeg using libavcodcec.> >> > For mpeg2 it works, but
not for mpeg1> >> > What is the reason?> >> >
_________________________________________________________________> > Making the
world a better place one message at a time.> >
http://www.imtalkathon.com/?source=EML_WLH_Talkathon_BetterPlace> >
_______________________________________________> > libav-user mailing list> >
[email protected]> >
https://lists.mplayerhq.hu/mailman/listinfo/libav-user> > > >
_______________________________________________> libav-user mailing list>
[email protected]> https://lists.mplayerhq.hu/mailman/listinfo/libav-user
_________________________________________________________________
It’s a talkathon – but it’s not just talk.
http://www.imtalkathon.com/?source=EML_WLH_Talkathon_JustTalk
#include "stdafx.h"
#include "MyDecFilter.h"
FILE *pFile;
// {30CC0FD0-6418-47b8-AD8F-2895C8939CAD}
DEFINE_GUID(CLSID_MyMPEGDec,
0x30cc0fd0, 0x6418, 0x47b8, 0xad, 0x8f, 0x28, 0x95, 0xc8, 0x93, 0x9c, 0xad);
static bool USE_IYUV;
static bool USE_YV12;
static bool USE_YUY2;
static bool USE_YVYU;
static bool USE_UYVY;
static bool USE_RGB32;
static bool USE_RGB24;
static bool USE_RG555;
static bool USE_RG565;
const AMOVIESETUP_MEDIATYPE sudInputPinTypes[] =
{
{&MEDIATYPE_Video, &MEDIASUBTYPE_MPEG1Packet},
{&MEDIATYPE_Video, &MEDIASUBTYPE_MPEG1Payload},
{&MEDIATYPE_Video, &MEDIASUBTYPE_MPEG2_VIDEO}
};
const AMOVIESETUP_MEDIATYPE sudOutputPinTypes[] =
{
{&MEDIATYPE_Video, &MEDIASUBTYPE_IYUV}
};
const AMOVIESETUP_PIN psudPins[] =
{
{
L"Input", // String pin name
FALSE, // Is it rendered
FALSE, // Is it an output
FALSE, // Allowed none
FALSE, // Allowed many
&CLSID_NULL, // Connects to filter
L"Output", // Connects to pin
sizeof(sudInputPinTypes) / sizeof(AMOVIESETUP_MEDIATYPE), //
Number of types
sudInputPinTypes // The pin details
},
{
L"Output", // String pin name
FALSE, // Is it rendered
TRUE, // Is it an output
FALSE, // Allowed none
FALSE, // Allowed many
&CLSID_NULL, // Connects to filter
L"Input", // Connects to pin
sizeof(sudOutputPinTypes) / sizeof(AMOVIESETUP_MEDIATYPE),
// Number of types
sudOutputPinTypes // The pin details
}
};
const AMOVIESETUP_FILTER sudMyDecFilter =
{
&CLSID_MyMPEGDec, // Filter CLSID
MY_FILTER_NAME_L, // Filter name
MERIT_PREFERRED, // Its merit
sizeof(psudPins) / sizeof(AMOVIESETUP_PIN), // Number of pins
psudPins // Pin details
};
CFactoryTemplate g_Templates[] =
{
{
MY_FILTER_NAME_L,
&CLSID_MyMPEGDec,
CMyDecFilter::CreateInstance,
NULL,
&sudMyDecFilter
},
};
int g_cTemplates = sizeof(g_Templates) / sizeof(CFactoryTemplate);
STDAPI DllRegisterServer()
{
return AMovieDllRegisterServer2(TRUE);
}
STDAPI DllUnregisterServer()
{
return AMovieDllRegisterServer2(FALSE);
}
/* create instance */
CUnknown * WINAPI CMyDecFilter::CreateInstance(LPUNKNOWN punk, HRESULT *phr)
{
CMyDecFilter *pNewObject = new CMyDecFilter(punk, phr);
if (pNewObject == NULL)
{
*phr = E_OUTOFMEMORY;
}
return pNewObject;
}
/* query interfaces */
STDMETHODIMP CMyDecFilter::NonDelegatingQueryInterface(REFIID riid, void **ppv)
{
CheckPointer(ppv, E_POINTER);
return CTransformFilter::NonDelegatingQueryInterface(riid, ppv);
}
/* Constructor */
CMyDecFilter::CMyDecFilter(LPUNKNOWN punk, HRESULT *phr) :
CVideoTransformFilter((TCHAR *) NAME("CMyDecFilter"), punk, CLSID_MyMPEGDec)
{
if(FAILED(*phr)) return;
USE_IYUV = TRUE;
USE_YV12 = TRUE;
USE_YUY2 = FALSE;
USE_YVYU = FALSE;
USE_UYVY = FALSE;
USE_RGB32 = TRUE;
USE_RGB24 = TRUE;
USE_RG555 = TRUE;
USE_RG565 = TRUE;
}
/* Destructor */
CMyDecFilter::~CMyDecFilter()
{
}
/* Check input type */
HRESULT CMyDecFilter::CheckInputType(const CMediaType *mtIn)
{
if (*mtIn->Type() == MEDIATYPE_Video) {
if ((*mtIn->Subtype() == MEDIASUBTYPE_MPEG1Packet) ||
(*mtIn->Subtype() == MEDIASUBTYPE_MPEG1Payload))
{
if(mtIn->cbFormat >= sizeof(MPEG1VIDEOINFO))
{
MPEG1VIDEOINFO *pvi = (MPEG1VIDEOINFO *)
mtIn->Format();
size.cx = pvi->hdr.bmiHeader.biWidth;
size.cy = abs(pvi->hdr.bmiHeader.biHeight);
rtFrameTime = pvi->hdr.AvgTimePerFrame;
codecID = CODEC_ID_MPEG1VIDEO;
return S_OK;
}
}
else if (*mtIn->Subtype() == MEDIASUBTYPE_MPEG2_VIDEO){
if(mtIn->cbFormat >= sizeof(MPEG2VIDEOINFO))
{
MPEG2VIDEOINFO *pvi = (MPEG2VIDEOINFO *)
mtIn->Format();
size.cx = pvi->hdr.bmiHeader.biWidth;
size.cy = abs(pvi->hdr.bmiHeader.biHeight);
rtFrameTime = pvi->hdr.AvgTimePerFrame;
codecID = CODEC_ID_MPEG2VIDEO;
return S_OK;
}
}
}
return VFW_E_TYPE_NOT_ACCEPTED;
}
/* Get list of supported output colorspaces */
HRESULT CMyDecFilter::GetMediaType(int iPosition, CMediaType *mtOut)
{
BITMAPINFOHEADER * bmih;
if (m_pInput->IsConnected() == FALSE)
return E_UNEXPECTED;
mtOut->InitMediaType();
VIDEOINFOHEADER * vih = (VIDEOINFOHEADER *)
mtOut->AllocFormatBuffer(sizeof(VIDEOINFOHEADER));
if (vih == NULL)
return E_OUTOFMEMORY;
ZeroMemory(vih, sizeof (VIDEOINFOHEADER));
vih->AvgTimePerFrame = rtFrameTime;
bmih = &(vih->bmiHeader);
mtOut->SetFormatType(&FORMAT_VideoInfo);
bmih->biSize = sizeof(BITMAPINFOHEADER);
bmih->biWidth = size.cx;
bmih->biHeight = size.cy;
bmih->biPlanes = 1;
if (iPosition < 0) return E_INVALIDARG;
switch (iPosition) {
case 0:
if (USE_YUY2) {
bmih->biCompression = MEDIASUBTYPE_YUY2.Data1;
bmih->biBitCount = 16;
mtOut->SetSubtype(&MEDIASUBTYPE_YUY2);
break;
}
case 1 :
if (USE_YVYU) {
bmih->biCompression = MEDIASUBTYPE_YVYU.Data1;
bmih->biBitCount = 16;
mtOut->SetSubtype(&MEDIASUBTYPE_YVYU);
break;
}
case 2 :
if (USE_UYVY) {
bmih->biCompression = MEDIASUBTYPE_UYVY.Data1;
bmih->biBitCount = 16;
mtOut->SetSubtype(&MEDIASUBTYPE_UYVY);
break;
}
case 3 :
//if (USE_IYUV) {
// bmih->biCompression =
CLSID_MEDIASUBTYPE_IYUV.Data1;
// bmih->biBitCount = 12;
// mtOut->SetSubtype(&CLSID_MEDIASUBTYPE_IYUV);
break;
case 4 :
if (USE_YV12) {
bmih->biCompression = MEDIASUBTYPE_YV12.Data1;
bmih->biBitCount = 12;
mtOut->SetSubtype(&MEDIASUBTYPE_YV12);
break;
}
case 5 :
if (USE_RGB32) {
bmih->biCompression = BI_RGB;
bmih->biBitCount = 32;
mtOut->SetSubtype(&MEDIASUBTYPE_RGB32);
break;
}
case 6 :
if (USE_RGB24) {
bmih->biCompression = BI_RGB;
bmih->biBitCount = 24;
mtOut->SetSubtype(&MEDIASUBTYPE_RGB24);
break;
}
case 7 :
if (USE_RG555) {
bmih->biCompression = BI_RGB;
bmih->biBitCount = 16;
mtOut->SetSubtype(&MEDIASUBTYPE_RGB555);
break;
}
case 8 :
if (USE_RG565) {
bmih->biCompression = BI_RGB;
bmih->biBitCount = 16;
mtOut->SetSubtype(&MEDIASUBTYPE_RGB565);
break;
}
default :
return VFW_S_NO_MORE_ITEMS;
}
bmih->biSizeImage = GetBitmapSize(bmih);
mtOut->SetType(&MEDIATYPE_Video);
mtOut->SetTemporalCompression(FALSE);
mtOut->SetSampleSize(bmih->biSizeImage);
return S_OK;
}
/* Check input<->output compatiblity */
HRESULT CMyDecFilter::CheckTransform(const CMediaType *mtIn, const CMediaType
*mtOut)
{
//Check the major type.
if (mtOut->majortype != MEDIATYPE_Video)
{
return VFW_E_TYPE_NOT_ACCEPTED;
}
if ((mtOut->formattype != FORMAT_VideoInfo) ||
(mtOut->cbFormat < sizeof(VIDEOINFOHEADER)))
{
return VFW_E_TYPE_NOT_ACCEPTED;
}
VIDEOINFOHEADER *pVIH = (VIDEOINFOHEADER *) mtOut->pbFormat;
stride = pVIH->bmiHeader.biWidth;
return S_OK;
}
/* Allocate output buffer */
HRESULT CMyDecFilter::DecideBufferSize(IMemAllocator *pAlloc,
ALLOCATOR_PROPERTIES *pProp)
{
AM_MEDIA_TYPE mt;
HRESULT hr = m_pOutput->ConnectionMediaType(&mt);
if (FAILED(hr))
{
return hr;
}
pProp->cBuffers = 1;
pProp->cbBuffer = size.cx * size.cy * 4;
pProp->cbPrefix = 0;
// Set allocator properties.
ALLOCATOR_PROPERTIES Actual;
hr = pAlloc->SetProperties(pProp, &Actual);
if (hr != S_OK)
{
return hr;
}
if (pProp->cbBuffer > Actual.cbBuffer)
{
return E_FAIL;
}
return S_OK;
}
HRESULT CMyDecFilter::CompleteConnect(PIN_DIRECTION direction, IPin
*pReceivePin)
{
HRESULT hr = CTransformFilter::CompleteConnect(direction, pReceivePin);
if (direction == PINDIR_OUTPUT)
{
hr = pReceivePin->QueryInterface(__uuidof(m_pNextFilterInputpin),
(VOID**)&m_pNextFilterInputpin);
ASSERT(hr == S_OK);
// we do not want to hold the reference of the input pin
m_pNextFilterInputpin->Release();
}
return hr;
}
HRESULT CMyDecFilter::StartStreaming()
{
HRESULT hr = CTransformFilter::StartStreaming();
if (FAILED(hr)) return hr;
avcodec = NULL;
avctx = NULL;
frame = NULL;
avcodec_init();
avcodec_register_all();
ffbuf = NULL;
ffbuflen = 0;
avcodec = avcodec_find_decoder(codecID);
if (!avcodec)
return S_FALSE;
avctx = avcodec_alloc_context();
avctx->width = size.cx;
avctx->height= size.cy;
if (codecID == CODEC_ID_MPEG1VIDEO)
avctx->codec_tag = mmioFOURCC('M','P','G','1');
else if (codecID == CODEC_ID_MPEG2VIDEO)
avctx->codec_tag = mmioFOURCC('M','P','G','2');
else
return S_FALSE;
if(avcodec->capabilities & CODEC_CAP_TRUNCATED)
avctx->flags |= CODEC_FLAG_TRUNCATED;
if (avcodec_open(avctx, avcodec) < 0)
return S_FALSE;
frame = avcodec_alloc_frame();
pFile = fopen("D:\\Work\\RAC\\Filter1\\MyMPEGDec\\MyMPEGDec\\cyj.dbg",
"wb");
if (pFile == NULL)
return S_FALSE;
my = 0;
rtLastTime = 0;
return hr;
}
HRESULT CMyDecFilter::Receive(IMediaSample* pIn)
{
uint8_t *src, *dest;
HRESULT hr;
AM_MEDIA_TYPE* pmt;
if (SUCCEEDED(pIn->GetMediaType(&pmt)) && pmt)
{
CMediaType mt(*pmt);
m_pInput->SetMediaType(&mt);
DeleteMediaType(pmt);
}
ASSERT (m_pOutput != NULL);
IMediaSample *pOut;
hr = InitializeOutputSample(pIn, &pOut);
if (hr != S_OK)
return S_OK;
pIn->GetPointer(&src);
int srclen = pIn->GetActualDataLength();
if (srclen <= 0)
return S_OK;
if(srclen <= 0) return S_OK;
if (pOut->GetPointer(&dest) != S_OK)
return S_OK;
while (srclen > 0)
{
int got_picture, used_bytes;
unsigned int neededsize = srclen + FF_INPUT_BUFFER_PADDING_SIZE;
if (ffbuflen < neededsize)
ffbuf = (unsigned char *) realloc(ffbuf, ffbuflen =
neededsize);
memcpy(ffbuf, src, srclen);
memset(ffbuf + srclen, 0, FF_INPUT_BUFFER_PADDING_SIZE);
used_bytes = avcodec_decode_video(avctx, frame, &got_picture,
ffbuf, srclen);
if (used_bytes < 0)
break;
src += used_bytes;
srclen -= used_bytes;
if (got_picture && frame->data[0])
{
uint8_t *out[3];
out[0] = dest;
out[2] = out[0] + stride * size.cy;
out[1] = out[2] + stride * (size.cy >> 2);
for (int i = 0; i < size.cy >> 1; i++)
{
CopyMemory(out[0], frame->data[0] + 2 * i *
frame->linesize[0], frame->linesize[0]);
out[0] += stride;
CopyMemory(out[0], frame->data[0] + (2 * i + 1)
* frame->linesize[0], frame->linesize[0]);
out[0] += stride;
CopyMemory(out[1], frame->data[1] + i *
frame->linesize[1], frame->linesize[1]);
out[1] += stride >> 1;
CopyMemory(out[2], frame->data[2] + i *
frame->linesize[2], frame->linesize[1]);
out[2] += stride >> 1;
}
int dstlen = size.cy * stride * 3 / 2;
pOut->SetActualDataLength(dstlen);
REFERENCE_TIME rtStart, rtStop;
pIn->GetTime(&rtStart, &rtStop);
rtStop = rtStart + rtFrameTime;
pOut->SetTime(&rtStart, &rtStop);
my++;
hr = m_pOutput->Deliver(pOut);
}
}
pOut->Release();
return S_OK;
}
HRESULT CMyDecFilter::StopStreaming()
{
if (avctx)
avcodec_close(avctx);
av_free(avctx); avctx = NULL;
av_free(frame); frame = NULL;
avcodec = NULL;
// Write header
fprintf(pFile, "Frame count=%d\n", my);
fclose(pFile);
return CTransformFilter::StopStreaming();
}
void CMyDecFilter::SaveFrame(int iFrame)
{
FILE *pFile;
char szFilename[256];
int y;
// Open file
sprintf(szFilename,
"D:\\Work\\RAC\\Filter1\\MyMPEGDec\\MyMPEGDec\\test\\frame%d.ppm", iFrame);
pFile = fopen(szFilename, "wb");
if (pFile == NULL)
return;
// Write header
fprintf(pFile, "P6\n%d %d\n255\n", size.cx, size.cy);
// Write pixel data
for (y = 0; y < size.cy; y++)
fwrite(frame->data[0] + y * frame->linesize[0], 1, 3 * size.cx,
pFile);
// Close file
fclose(pFile);
}
_______________________________________________
libav-user mailing list
[email protected]
https://lists.mplayerhq.hu/mailman/listinfo/libav-user