Yes, I essentially have that (your message just made it to my email after I 
sent my code)
I don't know why map() would fail, and why bits() would not be valid.

I'm on OSX, reading from an MP4...


> Sent: Thursday, June 14, 2018 at 7:12 AM
> From: "Igor Mironchik" <[email protected]>
> To: [email protected]
> Subject: Re: [Interest] (code) Cannot map videoforames for 
> QAbstractVideoSurface::present
>
> Hi,
> 
> Start from here:
> 
> bool
> Frames::present( const QVideoFrame & frame )
> {
>    if( !isActive() )
>      return false;
> 
>    QVideoFrame f = frame;
>    f.map( QAbstractVideoBuffer::ReadOnly );
> 
>    QImage image( f.bits(), f.width(), f.height(), f.bytesPerLine(),
>      QVideoFrame::imageFormatFromPixelFormat( f.pixelFormat() ) );
> 
>    f.unmap();
> 
>    emit newFrame( image.copy( image.rect() ) );
> 
>    return true;
> }
> 
> This works for me...
> 
> 
> 14-Jun-18 17:52, Jason H пишет:
> > I have no idea what I'm doing wrong. Can someone this and let me know?
> > The frames I'm getting are BGR32, so I have special code for that.
> > But I'm getting the message "QVideoFrame::unmap() was called more times 
> > then QVideoFrame::map()"
> > Which is obviously not the case.
> >
> > To restate the problems:
> > 1. I only get to present() once
> > 2. I's failing to map frames
> > 3. "then" in the message should be "than"
> >
> > Here's the first 3 lines of the .pro and complete code:
> > extractframes.pro-----
> > QT += multimedia
> > CONFIG += c++11 console
> > CONFIG -= app_bundle
> >
> > main.cpp--------------
> > #include <QAbstractVideoSurface>
> > #include <QGuiApplication>
> > #include <QMediaPlayer>
> > #include <QVideoSurfaceFormat>
> >
> > #include <QtDebug>
> >
> > class VideoSurface: public QAbstractVideoSurface {
> >             int _frameNumber;
> >             QVideoSurfaceFormat _format;
> >             QString _prefix;
> >             int _frameInterval;
> > public:
> >             VideoSurface(const QString &prefix, int frameInterval): 
> > _prefix(prefix), _frameInterval(frameInterval) {        }
> >
> >     bool present(const QVideoFrame &frame) override {
> >             qDebug() << Q_FUNC_INFO << _frameNumber << frame.isValid();
> >
> >             if (_frameNumber % _frameInterval == 0) {
> >                     QVideoFrame frame2(frame);
> >                     frame2.map(QAbstractVideoBuffer::ReadOnly);
> >                     QImage image;
> >
> >                     if (frame2.pixelFormat() == QVideoFrame::Format_BGR32) {
> >                              image =  QImage(frame2.bits(), frame2.width(), 
> > frame2.height(), QImage::Format_RGB32);
> >                              image = image.rgbSwapped();
> >                     } else {
> >                             image =  QImage(frame2.bits(), frame2.width(), 
> > frame2.height(), 
> > QVideoFrame::imageFormatFromPixelFormat(frame2.pixelFormat()));
> >                     }
> >
> >                     frame2.unmap();
> >                     QString filename = QString(_prefix).arg(_frameNumber);
> >                     bool result = image.save(filename);
> >                     qDebug() << Q_FUNC_INFO << filename << result;
> >             }
> >             _frameNumber ++;
> >             return true;
> >     }
> >
> >     bool start(const QVideoSurfaceFormat &format) override {
> >             qDebug() << Q_FUNC_INFO;
> >             _format = format;
> >             _frameNumber = 0;
> >             QAbstractVideoSurface::start(format);
> >             return true;
> >     }
> >     void stop() override {
> >             qDebug() << Q_FUNC_INFO;
> >             QAbstractVideoSurface::stop();
> >     }
> >     QList<QVideoFrame::PixelFormat> 
> > supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const 
> > override
> >     {
> >             Q_UNUSED(handleType);
> >             return QList<QVideoFrame::PixelFormat>()
> >                     << QVideoFrame::Format_ARGB32
> >                     << QVideoFrame::Format_ARGB32_Premultiplied
> >                     << QVideoFrame::Format_RGB32
> >                     << QVideoFrame::Format_RGB24
> >                     << QVideoFrame::Format_RGB565
> >                     << QVideoFrame::Format_RGB555
> >                     << QVideoFrame::Format_ARGB8565_Premultiplied
> >                     << QVideoFrame::Format_BGRA32
> >                     << QVideoFrame::Format_BGRA32_Premultiplied
> >                     << QVideoFrame::Format_BGR32
> >                     << QVideoFrame::Format_BGR24
> >                     << QVideoFrame::Format_BGR565
> >                     << QVideoFrame::Format_BGR555
> >                     << QVideoFrame::Format_BGRA5658_Premultiplied
> >                     << QVideoFrame::Format_AYUV444
> >                     << QVideoFrame::Format_AYUV444_Premultiplied
> >                     << QVideoFrame::Format_YUV444
> >                     << QVideoFrame::Format_YUV420P
> >                     << QVideoFrame::Format_YV12
> >                     << QVideoFrame::Format_UYVY
> >                     << QVideoFrame::Format_YUYV
> >                     << QVideoFrame::Format_NV12
> >                     << QVideoFrame::Format_NV21
> >                     << QVideoFrame::Format_IMC1
> >                     << QVideoFrame::Format_IMC2
> >                     << QVideoFrame::Format_IMC3
> >                     << QVideoFrame::Format_IMC4
> >                     << QVideoFrame::Format_Y8
> >                     << QVideoFrame::Format_Y16
> >                     << QVideoFrame::Format_Jpeg
> >                     << QVideoFrame::Format_CameraRaw
> >                     << QVideoFrame::Format_AdobeDng;
> >     }
> >
> >
> > };
> >
> > // Command line: extractframes format interval
> > // format:QString, with %1 = frame number
> > // Example:
> > // extractframes filename.mp4 image_%1.png 30
> >
> > int main(int argc, char *argv[])
> > {
> >     QGuiApplication a(argc, argv);
> >     QMediaPlayer player;
> >     VideoSurface surface(argv[2], QString(argv[3]).toInt());
> >     QObject::connect(&player, &QMediaPlayer::mediaStatusChanged, [=]( 
> > QMediaPlayer::MediaStatus status ) {
> >             qDebug() << Q_FUNC_INFO << status;
> >     });
> >     QObject::connect(&player, 
> > QOverload<QMediaPlayer::Error>::of(&QMediaPlayer::error), [=]( 
> > QMediaPlayer::Error error ) {
> >             qDebug() << Q_FUNC_INFO << error;
> >     });
> >     QObject::connect(&player, &QMediaPlayer::stateChanged, [=]( const 
> > QMediaPlayer::State &state ) {
> >             qDebug() << Q_FUNC_INFO << state;
> >             if (QMediaPlayer::StoppedState == state) qApp->quit();
> >     });
> >
> >     player.setMedia(QUrl::fromLocalFile(argv[1]));
> >     player.setVideoOutput(&surface);
> >     player.setMuted(true);
> >
> >     player.play();
> >
> >     return a.exec();
> > }
> > _______________________________________________
> > Interest mailing list
> > [email protected]
> > http://lists.qt-project.org/mailman/listinfo/interest
> 
> _______________________________________________
> Interest mailing list
> [email protected]
> http://lists.qt-project.org/mailman/listinfo/interest
>
_______________________________________________
Interest mailing list
[email protected]
http://lists.qt-project.org/mailman/listinfo/interest

Reply via email to