From faded5af32526a07136095016ed3781e8ca405bc Mon Sep 17 00:00:00 2001 From: Rogerio Pimentel Date: Thu, 14 Jun 2012 08:53:38 -0500 Subject: [PATCH] Add QT4.7.4 patch to support Freescale codecs This patch adds a QT4.7.4 patch file to add support for Freescale Multimedia codecs Signed-off-by: Rogerio Pimentel --- .../qt4/qt4-embedded_4.7.4.bbappend | 10 + ...dd-support-for-i.MX-codecs-to-phonon.patch | 453 ++++++++++++++++++ 2 files changed, 463 insertions(+) create mode 100644 meta-fsl-arm/recipes-qt/qt4/qt4-embedded_4.7.4.bbappend create mode 100644 meta-fsl-arm/recipes-qt/qt4/qt4/0001-Add-support-for-i.MX-codecs-to-phonon.patch diff --git a/meta-fsl-arm/recipes-qt/qt4/qt4-embedded_4.7.4.bbappend b/meta-fsl-arm/recipes-qt/qt4/qt4-embedded_4.7.4.bbappend new file mode 100644 index 00000000..cebb203b --- /dev/null +++ b/meta-fsl-arm/recipes-qt/qt4/qt4-embedded_4.7.4.bbappend @@ -0,0 +1,10 @@ +#Freescale +FILESEXTRAPATHS_prepend := "${THISDIR}/qt4:" + +SRC_URI_append_mx5 += "file://0001-Add-support-for-i.MX-codecs-to-phonon.patch" +SRC_URI_append_mx6 += "file://0001-Add-support-for-i.MX-codecs-to-phonon.patch" + +PACKAGE_ARCH_mx5 = "${MACHINE_ARCH}" +PACKAGE_ARCH_mx6 = "${MACHINE_ARCH}" + +PRINC := "${@int(PRINC) + 1}" diff --git a/meta-fsl-arm/recipes-qt/qt4/qt4/0001-Add-support-for-i.MX-codecs-to-phonon.patch b/meta-fsl-arm/recipes-qt/qt4/qt4/0001-Add-support-for-i.MX-codecs-to-phonon.patch new file mode 100644 index 00000000..c8c12e0e --- /dev/null +++ b/meta-fsl-arm/recipes-qt/qt4/qt4/0001-Add-support-for-i.MX-codecs-to-phonon.patch @@ -0,0 +1,453 @@ +From d46b70c7679e6706a001771c322185fcc95a981c Mon Sep 17 00:00:00 2001 +From: Rogerio Pimentel +Date: Mon, 4 Jun 2012 14:25:31 -0300 +Subject: [PATCH] Add support for i.MX codecs to phonon + +Add support for i.MX codecs to phonon + +Signed-off-by: Daniele Dall'Acqua +Signed-off-by: Rogerio Pimentel +--- + src/3rdparty/phonon/gstreamer/abstractrenderer.h | 1 + + src/3rdparty/phonon/gstreamer/mediaobject.cpp | 4 + + src/3rdparty/phonon/gstreamer/videowidget.cpp | 57 ++------ + src/3rdparty/phonon/gstreamer/videowidget.h | 1 + + src/3rdparty/phonon/gstreamer/widgetrenderer.cpp | 166 +++++++++++++--------- + src/3rdparty/phonon/gstreamer/widgetrenderer.h | 17 ++- + src/3rdparty/phonon/gstreamer/x11renderer.cpp | 23 +--- + 7 files changed, 136 insertions(+), 133 deletions(-) + +diff --git a/src/3rdparty/phonon/gstreamer/abstractrenderer.h b/src/3rdparty/phonon/gstreamer/abstractrenderer.h +index 10a2822..4901530 100644 +--- a/src/3rdparty/phonon/gstreamer/abstractrenderer.h ++++ b/src/3rdparty/phonon/gstreamer/abstractrenderer.h +@@ -49,6 +49,7 @@ public: + virtual bool eventFilter(QEvent *) = 0; + virtual void handlePaint(QPaintEvent *) {} + virtual bool paintsOnWidget() { return true; } // Controls overlays ++ virtual void handleMove(QMoveEvent * event ) = 0; + + protected: + VideoWidget *m_videoWidget; +diff --git a/src/3rdparty/phonon/gstreamer/mediaobject.cpp b/src/3rdparty/phonon/gstreamer/mediaobject.cpp +index 23a60c0..f806d64 100644 +--- a/src/3rdparty/phonon/gstreamer/mediaobject.cpp ++++ b/src/3rdparty/phonon/gstreamer/mediaobject.cpp +@@ -515,6 +515,9 @@ void MediaObject::createPipeline() + // reduce buffer overruns as these are not gracefully handled at the moment. + m_audioPipe = gst_element_factory_make("queue", NULL); + g_object_set(G_OBJECT(m_audioPipe), "max-size-time", MAX_QUEUE_TIME, (const char*)NULL); ++ g_object_set(G_OBJECT(m_audioPipe), "max-size-time", 0, (const char*)NULL); ++ g_object_set(G_OBJECT(m_audioPipe), "max-size-buffers", 0, (const char*)NULL); ++ g_object_set(G_OBJECT(m_audioPipe), "max-size-bytes", 0, (const char*)NULL); + gst_bin_add(GST_BIN(m_audioGraph), m_audioPipe); + GstPad *audiopad = gst_element_get_pad (m_audioPipe, "sink"); + gst_element_add_pad (m_audioGraph, gst_ghost_pad_new ("sink", audiopad)); +@@ -527,6 +530,7 @@ void MediaObject::createPipeline() + + m_videoPipe = gst_element_factory_make("queue", NULL); + g_object_set(G_OBJECT(m_videoPipe), "max-size-time", MAX_QUEUE_TIME, (const char*)NULL); ++ g_object_set(G_OBJECT(m_videoPipe), "max-size-time", 33000, (const char*)NULL); + gst_bin_add(GST_BIN(m_videoGraph), m_videoPipe); + GstPad *videopad = gst_element_get_pad (m_videoPipe, "sink"); + gst_element_add_pad (m_videoGraph, gst_ghost_pad_new ("sink", videopad)); +diff --git a/src/3rdparty/phonon/gstreamer/videowidget.cpp b/src/3rdparty/phonon/gstreamer/videowidget.cpp +index e1f0ec9..6c9862b 100644 +--- a/src/3rdparty/phonon/gstreamer/videowidget.cpp ++++ b/src/3rdparty/phonon/gstreamer/videowidget.cpp +@@ -81,48 +81,17 @@ void VideoWidget::setupVideoBin() + Q_ASSERT(m_videoBin); + gst_object_ref (GST_OBJECT (m_videoBin)); //Take ownership + gst_object_sink (GST_OBJECT (m_videoBin)); ++ gst_bin_add_many (GST_BIN (m_videoBin), videoSink, NULL); ++ GstPad *videopad = gst_element_get_pad (videoSink,"sink"); ++ gst_element_add_pad (m_videoBin, gst_ghost_pad_new ("sink", videopad)); ++ gst_object_unref (videopad); ++ QWidget *parentWidget = qobject_cast(parent()); + +- //The videoplug element is the final element before the pluggable videosink +- m_videoplug = gst_element_factory_make ("identity", NULL); +- +- //Colorspace ensures that the output of the stream matches the input format accepted by our video sink +- m_colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL); +- +- //Video scale is used to prepare the correct aspect ratio and scale. +- GstElement *videoScale = gst_element_factory_make ("videoscale", NULL); +- +- //We need a queue to support the tee from parent node +- GstElement *queue = gst_element_factory_make ("queue", NULL); +- +- if (queue && m_videoBin && videoScale && m_colorspace && videoSink && m_videoplug) { +- //Ensure that the bare essentials are prepared +- gst_bin_add_many (GST_BIN (m_videoBin), queue, m_colorspace, m_videoplug, videoScale, videoSink, (const char*)NULL); +- bool success = false; +- //Video balance controls color/sat/hue in the YUV colorspace +- m_videoBalance = gst_element_factory_make ("videobalance", NULL); +- if (m_videoBalance) { +- // For video balance to work we have to first ensure that the video is in YUV colorspace, +- // then hand it off to the videobalance filter before finally converting it back to RGB. +- // Hence we nede a videoFilter to convert the colorspace before and after videobalance +- GstElement *m_colorspace2 = gst_element_factory_make ("ffmpegcolorspace", NULL); +- gst_bin_add_many(GST_BIN(m_videoBin), m_videoBalance, m_colorspace2, (const char*)NULL); +- success = gst_element_link_many(queue, m_colorspace, m_videoBalance, m_colorspace2, videoScale, m_videoplug, videoSink, (const char*)NULL); +- } else { +- //If video balance is not available, just connect to sink directly +- success = gst_element_link_many(queue, m_colorspace, videoScale, m_videoplug, videoSink, (const char*)NULL); +- } ++ if (parentWidget) ++ parentWidget->winId(); // Due to some existing issues with alien in 4.4, ++ // we must currently force the creation of a parent widget. ++ m_isValid = true; //initialization ok, accept input + +- if (success) { +- GstPad *videopad = gst_element_get_pad (queue, "sink"); +- gst_element_add_pad (m_videoBin, gst_ghost_pad_new ("sink", videopad)); +- gst_object_unref (videopad); +- QWidget *parentWidget = qobject_cast(parent()); +- if (parentWidget) +- parentWidget->winId(); // Due to some existing issues with alien in 4.4, +- // we must currently force the creation of a parent widget. +- m_isValid = true; //initialization ok, accept input +- } +- } + } + + void VideoWidget::paintEvent(QPaintEvent *event) +@@ -131,6 +100,12 @@ void VideoWidget::paintEvent(QPaintEvent *event) + m_renderer->handlePaint(event); + } + ++void VideoWidget::moveEvent(QMoveEvent * event ) ++{ ++ Q_ASSERT(m_renderer); ++ m_renderer->handleMove(event); ++} ++ + void VideoWidget::setVisible(bool val) { + Q_ASSERT(m_renderer); + +diff --git a/src/3rdparty/phonon/gstreamer/videowidget.h b/src/3rdparty/phonon/gstreamer/videowidget.h +index 8603f6a..38c7b17 100644 +--- a/src/3rdparty/phonon/gstreamer/videowidget.h ++++ b/src/3rdparty/phonon/gstreamer/videowidget.h +@@ -65,6 +65,7 @@ public: + qreal saturation() const; + void setSaturation(qreal); + void setMovieSize(const QSize &size); ++ void moveEvent(QMoveEvent * event ); + QSize sizeHint() const; + QRect scaleToAspect(QRect srcRect, int w, int h) const; + QRect calculateDrawFrameRect() const; +diff --git a/src/3rdparty/phonon/gstreamer/widgetrenderer.cpp b/src/3rdparty/phonon/gstreamer/widgetrenderer.cpp +index 423af9d..de524ec 100644 +--- a/src/3rdparty/phonon/gstreamer/widgetrenderer.cpp ++++ b/src/3rdparty/phonon/gstreamer/widgetrenderer.cpp +@@ -15,7 +15,9 @@ + along with this library. If not, see . + */ + ++#include + #include ++#include + #include + #include "common.h" + #include "message.h" +@@ -24,6 +26,19 @@ + #include "widgetrenderer.h" + #include "qrgb.h" + ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++#include ++ ++#define MXCFB_GBL_ALPHA 255 ++#define MXCFB_CLR_KEY 0x00000000 // ARGB8888 ++ + // support old OpenGL installations (1.2) + // assume that if TEXTURE0 isn't defined, none are + #ifndef GL_TEXTURE0 +@@ -35,26 +50,6 @@ + #ifndef QT_NO_PHONON_VIDEO + QT_BEGIN_NAMESPACE + +-static void frameRendered() +-{ +- static QString displayFps = qgetenv("PHONON_GST_FPS"); +- if (displayFps.isEmpty()) +- return; +- +- static int frames = 0; +- static QTime lastTime = QTime::currentTime(); +- QTime time = QTime::currentTime(); +- +- int delta = lastTime.msecsTo(time); +- if (delta > 2000) { +- printf("FPS: %f\n", 1000.0 * frames / qreal(delta)); +- lastTime = time; +- frames = 0; +- } +- +- ++frames; +-} +- + namespace Phonon + { + namespace Gstreamer +@@ -62,20 +57,12 @@ namespace Gstreamer + + WidgetRenderer::WidgetRenderer(VideoWidget *videoWidget) + : AbstractRenderer(videoWidget) +- , m_width(0) +- , m_height(0) + { +- videoWidget->backend()->logMessage("Creating QWidget renderer"); +- if ((m_videoSink = GST_ELEMENT(g_object_new(get_type_RGB(), NULL)))) { +- gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership ++ if ((m_videoSink = gst_element_factory_make("mfw_v4lsink", NULL)) && m_videoSink != NULL) { ++ gst_object_ref (GST_OBJECT (m_videoSink)); //Take ownership + gst_object_sink (GST_OBJECT (m_videoSink)); +- +- QWidgetVideoSinkBase* sink = reinterpret_cast(m_videoSink); +- // Let the videosink know which widget to direct frame updates to +- sink->renderWidget = videoWidget; + } + +- // Clear the background with black by default + QPalette palette; + palette.setColor(QPalette::Background, Qt::black); + m_videoWidget->setPalette(palette); +@@ -84,65 +71,116 @@ WidgetRenderer::WidgetRenderer(VideoWidget *videoWidget) + m_videoWidget->setAttribute(Qt::WA_PaintOnScreen, false); + } + +-void WidgetRenderer::setNextFrame(const QByteArray &array, int w, int h) ++WidgetRenderer::~WidgetRenderer() + { +- if (m_videoWidget->root()->state() == Phonon::LoadingState) +- return; ++ if (m_videoSink) { ++ gst_object_unref (GST_OBJECT (m_videoSink)); ++ m_videoSink = 0; ++ } ++} + +- m_frame = QImage(); +- { +- m_frame = QImage((uchar *)array.constData(), w, h, QImage::Format_RGB32); +- } ++void WidgetRenderer::setVideoSize(void) ++{ ++ ++ int adj_x; ++ int adj_y; ++ ++ QSize wSize = m_videoWidget->size(); ++ m_drawFrameRect = m_videoWidget->calculateDrawFrameRect(); ++ framePos = m_videoWidget->mapToGlobal(QPoint(0,0)); + +- m_array = array; +- m_width = w; +- m_height = h; ++ //Center the video in the widget + +- m_videoWidget->update(); ++ adj_x = (wSize.width()/2) - (m_drawFrameRect.width()/2); ++ adj_y = (wSize.height()/2) - (m_drawFrameRect.height()/2); ++ ++ g_object_set(G_OBJECT(m_videoSink), "axis-left",adj_x + framePos.x(),(const char*)NULL); ++ g_object_set(G_OBJECT(m_videoSink), "axis-top", adj_y + framePos.y(), (const char*)NULL); ++ g_object_set(G_OBJECT(m_videoSink), "disp-width", m_drawFrameRect.width(), (const char*)NULL); ++ g_object_set(G_OBJECT(m_videoSink), "disp-height", m_drawFrameRect.height(), (const char*)NULL); ++ g_object_set(G_OBJECT(m_videoSink), "setpara", 1, (const char*)NULL); + } + + void WidgetRenderer::handleMediaNodeEvent(const MediaNodeEvent *event) + { + switch (event->type()) { +- case MediaNodeEvent::SourceChanged: +- { +- clearFrame(); +- break; +- } + default: + break; + } + } + +-void WidgetRenderer::clearFrame() ++void WidgetRenderer::handlePaint(QPaintEvent *event) + { +- m_frame = QImage(); +- m_array = QByteArray(); +- m_videoWidget->update(); ++ Q_UNUSED(event); ++ QPainter painter(m_videoWidget); ++ painter.fillRect(m_videoWidget->rect(), m_videoWidget->palette().background()); + } + +-const QImage &WidgetRenderer::currentFrame() const ++int WidgetRenderer::setOverlay(void) + { +- return m_frame; ++ struct mxcfb_color_key color_key; ++ struct mxcfb_gbl_alpha alpha; ++ int fd_fb; ++ ++ if ((fd_fb = open("/dev/fb0", O_RDWR, 0)) < 0) ++ { ++ printf("Unable to open %s\n", "/dev/fb0"); ++ return -1; ++ } ++ ++ alpha.alpha = MXCFB_GBL_ALPHA; ++ alpha.enable = 1; ++ ++ if (ioctl(fd_fb, MXCFB_SET_GBL_ALPHA, &alpha) < 0) { ++ printf("Error in applying Alpha\n"); ++ } ++ ++ color_key.color_key = MXCFB_CLR_KEY & 0x00FFFFFF; ++ color_key.enable = 1; ++ if ( ioctl(fd_fb, MXCFB_SET_CLR_KEY, &color_key) < 0) { ++ printf("Error in applying Color Key\n"); ++ return -1; ++ } ++ close (fd_fb); ++ return 0; + } + +-void WidgetRenderer::handlePaint(QPaintEvent *event) ++void WidgetRenderer::handleMove( QMoveEvent * event) + { +- Q_UNUSED(event); +- QPainter painter(m_videoWidget); +- m_drawFrameRect = m_videoWidget->calculateDrawFrameRect(); +- painter.drawImage(drawFrameRect(), currentFrame()); +- frameRendered(); ++ Q_UNUSED(event); ++ ++ if (framePos != m_videoWidget->mapToGlobal(QPoint(0,0))) ++ setVideoSize(); + } + + bool WidgetRenderer::eventFilter(QEvent * event) + { +- if (event->type() == QEvent::User) { +- NewFrameEvent *frameEvent= static_cast (event); +- setNextFrame(frameEvent->frame, frameEvent->width, frameEvent->height); +- return true; +- } +- return false; ++ if (event->type() == QEvent::Show) { ++ setOverlay(); ++ return true; ++ } else if (event->type() == QEvent::Resize) { ++ setVideoSize(); ++ return true; ++ } ++ if (framePos != m_videoWidget->mapToGlobal(QPoint(0,0))) ++ setVideoSize(); ++ return false; ++} ++ ++void WidgetRenderer::aspectRatioChanged(Phonon::VideoWidget::AspectRatio) ++{ ++ setVideoSize(); ++} ++ ++void WidgetRenderer::scaleModeChanged(Phonon::VideoWidget::ScaleMode) ++{ ++ setVideoSize(); ++} ++ ++void WidgetRenderer::movieSizeChanged(const QSize &movieSize) ++{ ++ Q_UNUSED(movieSize); ++ setVideoSize(); + } + + } +diff --git a/src/3rdparty/phonon/gstreamer/widgetrenderer.h b/src/3rdparty/phonon/gstreamer/widgetrenderer.h +index 03ee9c0..886975a 100644 +--- a/src/3rdparty/phonon/gstreamer/widgetrenderer.h ++++ b/src/3rdparty/phonon/gstreamer/widgetrenderer.h +@@ -40,20 +40,21 @@ class WidgetRenderer : public AbstractRenderer + { + public: + WidgetRenderer(VideoWidget *videoWidget); ++ ~WidgetRenderer(void); + bool eventFilter(QEvent * event); + void handlePaint(QPaintEvent *paintEvent); + void handleMediaNodeEvent(const MediaNodeEvent *event); +- const QImage& currentFrame() const; + QRect drawFrameRect() const { return m_drawFrameRect; } +- void setNextFrame(const QByteArray &array, int width, int height); +- bool frameIsSet() { return !m_array.isNull(); } +- void clearFrame(); ++ void aspectRatioChanged(Phonon::VideoWidget::AspectRatio aspectRatio); ++ void scaleModeChanged(Phonon::VideoWidget::ScaleMode scaleMode); ++ void movieSizeChanged(const QSize &movieSize); ++ void setVideoSize(void); ++ int setOverlay(void); ++ void handleMove(QMoveEvent* event); + private: +- mutable QImage m_frame; +- QByteArray m_array; +- int m_width; +- int m_height; ++ void paintEvent ( QPaintEvent * event ); + QRect m_drawFrameRect; ++ QPoint framePos; + }; + + } +diff --git a/src/3rdparty/phonon/gstreamer/x11renderer.cpp b/src/3rdparty/phonon/gstreamer/x11renderer.cpp +index 968f3a8..2119840 100644 +--- a/src/3rdparty/phonon/gstreamer/x11renderer.cpp ++++ b/src/3rdparty/phonon/gstreamer/x11renderer.cpp +@@ -31,6 +31,8 @@ + #include "mediaobject.h" + #include "message.h" + ++#define FSL_GSTREAMER 1 ++ + QT_BEGIN_NAMESPACE + + namespace Phonon +@@ -83,26 +85,7 @@ X11Renderer::~X11Renderer() + + GstElement* X11Renderer::createVideoSink() + { +- GstElement *videoSink = gst_element_factory_make ("xvimagesink", NULL); +- if (videoSink) { +- // Check if the xv sink is usable +- if (gst_element_set_state(videoSink, GST_STATE_READY) != GST_STATE_CHANGE_SUCCESS) { +- gst_object_unref(GST_OBJECT(videoSink)); +- videoSink = 0; +- } else { +- // Note that this should not really be necessary as these are +- // default values, though under certain conditions values are retained +- // even between application instances. (reproducible on 0.10.16/Gutsy) +- g_object_set(G_OBJECT(videoSink), "brightness", 0, (const char*)NULL); +- g_object_set(G_OBJECT(videoSink), "contrast", 0, (const char*)NULL); +- g_object_set(G_OBJECT(videoSink), "hue", 0, (const char*)NULL); +- g_object_set(G_OBJECT(videoSink), "saturation", 0, (const char*)NULL); +- } +- } +- +- if (!videoSink) +- videoSink = gst_element_factory_make ("ximagesink", NULL); +- ++ GstElement *videoSink = gst_element_factory_make ("mfw_v4lsink", NULL); + gst_object_ref (GST_OBJECT (videoSink)); //Take ownership + gst_object_sink (GST_OBJECT (videoSink)); + +-- +1.7.1 + -- 2.40.1