SVN commit 844875 by mkretz: make VideoDataOutput work using Experimental::VideoDataOutputInterface add some debug on connect M +1 -1 CMakeLists.txt M +10 -8 backend.cpp M +124 -35 videodataoutput.cpp M +4 -0 videodataoutput.h --- trunk/KDE/kdebase/runtime/phonon/xine/CMakeLists.txt #844874:844875 @@ -14,7 +14,7 @@ effect.cpp audiooutput.cpp mediaobject.cpp - #videodataoutput.cpp + videodataoutput.cpp visualization.cpp backend.cpp volumefadereffect.cpp --- trunk/KDE/kdebase/runtime/phonon/xine/backend.cpp #844874:844875 @@ -20,7 +20,7 @@ */ #include "backend.h" -//#include +#include #include "mediaobject.h" #include "effect.h" #include "events.h" @@ -29,7 +29,7 @@ #include "nullsink.h" #include "visualization.h" #include "volumefadereffect.h" -//#include "videodataoutput.h" +#include "videodataoutput.h" #include "videowidget.h" #include "wirecall.h" #include "xinethread.h" @@ -157,10 +157,9 @@ return new AudioDataOutput(parent); case VisualizationClass: return new Visualization(parent); - case VideoDataOutputClass: - //case Phonon::Experimental::BackendInterface::VideoDataOutputClass: - //return new VideoDataOutput(parent); - return 0; + //case VideoDataOutputClass: + case Phonon::Experimental::BackendInterface::VideoDataOutputClass: + return new VideoDataOutput(parent); case EffectClass: { Q_ASSERT(args.size() == 1); @@ -352,7 +351,7 @@ bool Backend::connectNodes(QObject *_source, QObject *_sink) { - kDebug(610); + kDebug(610) << _source << "->" << _sink; SourceNode *source = qobject_cast(_source); SinkNode *sink = qobject_cast(_sink); if (!source || !sink) { @@ -391,12 +390,13 @@ bool Backend::disconnectNodes(QObject *_source, QObject *_sink) { - kDebug(610); + kDebug(610) << _source << "XX" << _sink; SourceNode *source = qobject_cast(_source); SinkNode *sink = qobject_cast(_sink); if (!source || !sink) { return false; } + kDebug(610) << source->threadSafeObject().data() << "XX" << sink->threadSafeObject().data(); const MediaStreamTypes types = source->outputMediaStreamTypes() & sink->inputMediaStreamTypes(); if (!source->sinks().contains(sink) || sink->source() != source) { return false; @@ -464,6 +464,8 @@ } } sink->findXineEngine(); + } else if (!source) { + kDebug(610) << q << "is neither a source nor a sink"; } ConnectNotificationInterface *connectNotify = qobject_cast(q); if (connectNotify) { --- trunk/KDE/kdebase/runtime/phonon/xine/videodataoutput.cpp #844874:844875 @@ -1,5 +1,5 @@ /* This file is part of the KDE project - Copyright (C) 2006 Matthias Kretz + Copyright (C) 2006,2008 Matthias Kretz This program is free software; you can redistribute it and/or modify it under the terms of the GNU Library General Public @@ -19,12 +19,20 @@ */ #include "videodataoutput.h" -#include + +#include "events.h" +#include "keepreference.h" #include "sourcenode.h" +#include "wirecall.h" +#include "xinethread.h" + +#include + +#include + #include +#include -#define K_XT(type) (static_cast(SinkNode::threadSafeObject().data())) - namespace Phonon { namespace Xine @@ -34,49 +42,44 @@ public: VideoDataOutputXT(); ~VideoDataOutputXT(); - xine_video_port_t *videoPort() const { return m_videoPort; } + xine_video_port_t *videoPort() const; void rewireTo(SourceNodeXT *); + bool setFrontendObject(Experimental::AbstractVideoDataOutput *x); Phonon::Experimental::AbstractVideoDataOutput *m_frontend; private: - struct Frame - { - int format; - int width; - int height; - double aspectRatio; - void *data0; - void *data1; - void *data2; - }; +#ifdef XINE_VISUAL_TYPE_RAW static void raw_output_cb(void *user_data, int frame_format, int frame_width, int frame_height, double frame_aspect, void *data0, void *data1, void *data2); static void raw_overlay_cb(void *user_data, int num_ovl, raw_overlay_t *overlay_array); - -#ifdef XINE_VISUAL_TYPE_RAW raw_visual_t m_visual; #endif + int m_supported_formats; + public: + bool m_needNewPort; xine_video_port_t *m_videoPort; }; +#ifdef XINE_VISUAL_TYPE_RAW void VideoDataOutputXT::raw_output_cb(void *user_data, int format, int width, int height, double aspect, void *data0, void *data1, void *data2) { + kDebug(610); VideoDataOutputXT* vw = reinterpret_cast(user_data); - const Experimental::VideoFrame f = { + const Experimental::VideoFrame2 f = { width, height, aspect, - ((format == XINE_VORAW_YV12) ? Experimental::VideoFrame::Format_YV12 : - (format == XINE_VORAW_YUY2) ? Experimental::VideoFrame::Format_YUY2 : - (format == XINE_VORAW_RGB ) ? Experimental::VideoFrame::Format_RGB888 : - Experimental::VideoFrame::Format_Invalid), + ((format == XINE_VORAW_YV12) ? Experimental::VideoFrame2::Format_YV12 : + (format == XINE_VORAW_YUY2) ? Experimental::VideoFrame2::Format_YUY2 : + (format == XINE_VORAW_RGB ) ? Experimental::VideoFrame2::Format_RGB888 : + Experimental::VideoFrame2::Format_Invalid), QByteArray::fromRawData(reinterpret_cast(data0), ((format == XINE_VORAW_RGB) ? 3 : (format == XINE_VORAW_YUY2) ? 2 : 1) * width * height), QByteArray::fromRawData(reinterpret_cast(data1), (format == XINE_VORAW_YV12) ? (width >> 1) + (height >> 1) : 0), QByteArray::fromRawData(reinterpret_cast(data2), (format == XINE_VORAW_YV12) ? (width >> 1) + (height >> 1) : 0) }; if (vw->m_frontend) { - //kDebug(610) << "send frame to frontend"; + kDebug(610) << "calling frameReady on the frontend"; vw->m_frontend->frameReady(f); } } @@ -87,19 +90,19 @@ Q_UNUSED(vw); Q_UNUSED(num_ovl); Q_UNUSED(overlay_array); + // TODO do we want to handle overlays? How? } +#endif VideoDataOutputXT::VideoDataOutputXT() : m_frontend(0), +#ifdef XINE_VISUAL_TYPE_RAW + m_supported_formats(XINE_VORAW_YV12 | XINE_VORAW_YUY2 | XINE_VORAW_RGB), + m_needNewPort(true), +#endif m_videoPort(0) { -#ifdef XINE_VISUAL_TYPE_RAW - m_visual.user_data = static_cast(this); - m_visual.raw_output_cb = &Phonon::Xine::VideoDataOutputXT::raw_output_cb; - m_visual.raw_overlay_cb = &Phonon::Xine::VideoDataOutputXT::raw_overlay_cb; - m_visual.supported_formats = /*XINE_VORAW_YV12 | XINE_VORAW_YUY2 |*/ XINE_VORAW_RGB; // TODO - m_videoPort = xine_open_video_driver(XineEngine::xine(), "auto", XINE_VISUAL_TYPE_RAW, static_cast(&m_visual)); -#endif + m_xine = Backend::xine(); } VideoDataOutputXT::~VideoDataOutputXT() @@ -108,7 +111,9 @@ xine_video_port_t *vp = m_videoPort; m_videoPort = 0; - xine_close_video_driver(XineEngine::xine(), vp); + if (vp) { + xine_close_video_driver(m_xine, vp); + } } } @@ -122,26 +127,110 @@ { } +xine_video_port_t *VideoDataOutputXT::videoPort() const +{ +#ifdef XINE_VISUAL_TYPE_RAW + if (m_needNewPort) { + VideoDataOutputXT *that = const_cast(this); + that->m_needNewPort = false; + that->m_visual.user_data = static_cast(that); + that->m_visual.supported_formats = m_supported_formats; + that->m_visual.raw_output_cb = &Phonon::Xine::VideoDataOutputXT::raw_output_cb; + that->m_visual.raw_overlay_cb = &Phonon::Xine::VideoDataOutputXT::raw_overlay_cb; + kDebug(610) << "create new raw video port with supported_formats =" << that->m_visual.supported_formats; + xine_video_port_t *newVideoPort = xine_open_video_driver(m_xine, "auto", XINE_VISUAL_TYPE_RAW, static_cast(&that->m_visual)); + if (m_videoPort) { + // TODO delayed destruction of m_videoPort + } + that->m_videoPort = newVideoPort; + return newVideoPort; + } +#endif + return m_videoPort; +} + void VideoDataOutputXT::rewireTo(SourceNodeXT *source) { + kDebug(610); if (!source->videoOutputPort()) { return; } + kDebug(610) << "do something"; xine_post_wire_video_port(source->videoOutputPort(), videoPort()); } Experimental::AbstractVideoDataOutput *VideoDataOutput::frontendObject() const { - return K_XT(const VideoDataOutputXT)->m_frontend; + K_XT(const VideoDataOutput); + return xt->m_frontend; } +bool VideoDataOutputXT::setFrontendObject(Experimental::AbstractVideoDataOutput *x) +{ + m_frontend = x; +#ifdef XINE_VISUAL_TYPE_RAW + if (m_frontend) { + int supported_formats = 0; + if (m_frontend->allowedFormats().contains(Experimental::VideoFrame2::Format_RGB888)) { + supported_formats |= XINE_VORAW_RGB; + } + if (m_frontend->allowedFormats().contains(Experimental::VideoFrame2::Format_YV12)) { + supported_formats |= XINE_VORAW_YV12; + } + if (m_frontend->allowedFormats().contains(Experimental::VideoFrame2::Format_YUY2)) { + supported_formats |= XINE_VORAW_YUY2; + } + if (m_supported_formats != supported_formats) { + m_supported_formats = supported_formats; + m_needNewPort = true; + return true; + } + } +#endif + return false; +} + void VideoDataOutput::setFrontendObject(Experimental::AbstractVideoDataOutput *x) { - K_XT(VideoDataOutputXT)->m_frontend = x; + K_XT(VideoDataOutput); + if (xt->setFrontendObject(x)) { + // we need to recreate and rewire the output + SourceNode *src = source(); + if (src) { + QList wireCall; + wireCall << WireCall(src, this); + QCoreApplication::postEvent(XineThread::instance(), new RewireEvent(wireCall, QList())); + } + } } +void VideoDataOutput::aboutToChangeXineEngine() +{ + kDebug(); + K_XT(VideoDataOutput); + if (xt->m_videoPort) { + VideoDataOutputXT *xt2 = new VideoDataOutputXT(); + xt2->m_xine = xt->m_xine; + xt2->m_videoPort = xt->m_videoPort; + xt2->m_needNewPort = false; + xt->m_needNewPort = true; + xt->m_videoPort = 0; + KeepReference<> *keep = new KeepReference<>; + keep->addObject(xt2); + keep->ready(); + } +} + +void VideoDataOutput::xineEngineChanged() +{ +//X kDebug(); +//X K_XT(VideoDataOutput); +//X if (xt->m_xine) { +//X Q_ASSERT(!xt->m_videoPort); +//X xt->createVideoPort(); +//X } +} + }} //namespace Phonon::Xine -#undef K_XT - #include "videodataoutput.moc" --- trunk/KDE/kdebase/runtime/phonon/xine/videodataoutput.h #844874:844875 @@ -50,6 +50,10 @@ Experimental::AbstractVideoDataOutput *frontendObject() const; void setFrontendObject(Experimental::AbstractVideoDataOutput *); + + protected: + void aboutToChangeXineEngine(); + void xineEngineChanged(); }; }} //namespace Phonon::Xine